Update window_peer_menu.cpp

This commit is contained in:
Neurotoxin001 2025-06-26 20:58:22 +03:00 committed by GitHub
parent 941103de3e
commit 640e68271d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -748,28 +748,28 @@ void Filler::addClearHistory() {
}
void Filler::addDeleteMyMessages() {
if (_topic) {
return;
}
const auto isGroup = _peer - > isChat() || _peer - > isMegagroup();
if (!isGroup) {
return;
}
if (const auto chat = _peer - > asChat()) {
if (!chat - > amIn() || chat - > amCreator() || chat - > hasAdminRights()) {
return;
}
} else if (const auto channel = _peer - > asChannel()) {
if (!channel - > isMegagroup() || !channel - > amIn() || channel - > amCreator() || channel - > hasAdminRights()) {
return;
}
} else {
return;
}
_addAction(
tr::lng_profile_delete_my_messages(tr::now),
DeleteMyMessagesHandler(_controller, _peer), &
st::menuIconDelete);
if (_topic) {
return;
}
const auto isGroup = _peer->isChat() || _peer->isMegagroup();
if (!isGroup) {
return;
}
if (const auto chat = _peer->asChat()) {
if (!chat->amIn() || chat->amCreator() || chat->hasAdminRights()) {
return;
}
} else if (const auto channel = _peer->asChannel()) {
if (!channel->isMegagroup() || !channel->amIn() || channel->amCreator() || channel->hasAdminRights()) {
return;
}
} else {
return;
}
_addAction(
tr::lng_profile_delete_my_messages(tr::now),
DeleteMyMessagesHandler(_controller, _peer),
&st::menuIconDelete);
}
void Filler::addDeleteChat() {
@ -3208,113 +3208,107 @@ Fn<void()> ClearHistoryHandler(
};
}
void DeleteMyMessagesAfterConfirm(not_null < PeerData * > peer) {
const auto session = & peer - > session();
void DeleteMyMessagesAfterConfirm(not_null<PeerData*> peer) {
const auto session = &peer->session();
auto collected = std::make_shared < std::vector < MsgId >> ();
auto collected = std::make_shared<std::vector<MsgId>>();
const auto removeNext = std::make_shared < Fn < void(int) >> ();
const auto requestNext = std::make_shared < Fn < void(MsgId) >> ();
const auto removeNext = std::make_shared<Fn<void(int)>>();
const auto requestNext = std::make_shared<Fn<void(MsgId)>>();
* removeNext = [ = ](int index) {
if (index >= int(collected - > size())) {
DEBUG_LOG(("Deleted all %1 my messages in this chat").arg(collected - > size()));
return;
}
*removeNext = [=](int index) {
if (index >= int(collected->size())) {
DEBUG_LOG(("Deleted all %1 my messages in this chat").arg(collected->size()));
return;
}
QVector < MTPint > ids;
ids.reserve(std::min < int > (100, collected - > size() - index));
for (auto i = 0; i < 100 && (index + i) < int(collected - > size()); ++i) {
ids.push_back(MTP_int(( * collected)[index + i].bare));
}
QVector<MTPint> ids;
ids.reserve(std::min<int>(100, collected->size() - index));
for (auto i = 0; i < 100 && (index + i) < int(collected->size()); ++i) {
ids.push_back(MTP_int((*collected)[index + i].bare));
}
const auto batch = index / 100 + 1;
const auto done = [ = ](const MTPmessages_AffectedMessages & result) {
session - > api().applyAffectedMessages(peer, result);
if (peer - > isChannel()) {
session - > data().processMessagesDeleted(peer - > id, ids);
} else {
session - > data().processNonChannelMessagesDeleted(ids);
}
const auto deleted = index + ids.size();
DEBUG_LOG(("Deleted batch %1, total deleted %2/%3").arg(batch).arg(deleted).arg(collected - > size()));
const auto delay = crl::time(500 + base::RandomValue < int > () % 500);
base::call_delayed(delay, [ = ] {
( * removeNext)(deleted);
});
};
const auto fail = [ = ](const MTP::Error & error) {
DEBUG_LOG(("Delete batch failed: %1").arg(error.type()));
const auto delay = crl::time(1000);
base::call_delayed(delay, [ = ] {
( * removeNext)(index);
});
};
const auto batch = index / 100 + 1;
const auto done = [=](const MTPmessages_AffectedMessages &result) {
session->api().applyAffectedMessages(peer, result);
if (peer->isChannel()) {
session->data().processMessagesDeleted(peer->id, ids);
} else {
session->data().processNonChannelMessagesDeleted(ids);
}
const auto deleted = index + ids.size();
DEBUG_LOG(("Deleted batch %1, total deleted %2/%3").arg(batch).arg(deleted).arg(collected->size()));
const auto delay = crl::time(500 + base::RandomValue<int>() % 500);
base::call_delayed(delay, [=] { (*removeNext)(deleted); });
};
const auto fail = [=](const MTP::Error &error) {
DEBUG_LOG(("Delete batch failed: %1").arg(error.type()));
const auto delay = crl::time(1000);
base::call_delayed(delay, [=] { (*removeNext)(index); });
};
if (const auto channel = peer - > asChannel()) {
session - > api()
.request(MTPchannels_DeleteMessages(channel - > inputChannel, MTP_vector < MTPint > (ids)))
.done(done)
.fail(fail)
.handleFloodErrors()
.send();
} else {
using Flag = MTPmessages_DeleteMessages::Flag;
session - > api()
.request(MTPmessages_DeleteMessages(MTP_flags(Flag::f_revoke), MTP_vector < MTPint > (ids)))
.done(done)
.fail(fail)
.handleFloodErrors()
.send();
}
};
if (const auto channel = peer->asChannel()) {
session->api()
.request(MTPchannels_DeleteMessages(channel->inputChannel, MTP_vector<MTPint>(ids)))
.done(done)
.fail(fail)
.handleFloodErrors()
.send();
} else {
using Flag = MTPmessages_DeleteMessages::Flag;
session->api()
.request(MTPmessages_DeleteMessages(MTP_flags(Flag::f_revoke), MTP_vector<MTPint>(ids)))
.done(done)
.fail(fail)
.handleFloodErrors()
.send();
}
};
* requestNext = [ = ](MsgId from) {
using Flag = MTPmessages_Search::Flag;
auto request = MTPmessages_Search(
MTP_flags(Flag::f_from_id),
peer - > input,
MTP_string(),
MTP_inputPeerSelf(),
MTPInputPeer(),
MTPVector < MTPReaction > (),
MTP_int(0), // top_msg_id
MTP_inputMessagesFilterEmpty(),
MTP_int(0), // min_date
MTP_int(0), // max_date
MTP_int(from.bare),
MTP_int(0), // add_offset
MTP_int(100),
MTP_int(0), // max_id
MTP_int(0), // min_id
MTP_long(0)); // hash
*requestNext = [=](MsgId from) {
using Flag = MTPmessages_Search::Flag;
auto request = MTPmessages_Search(
MTP_flags(Flag::f_from_id),
peer->input,
MTP_string(),
MTP_inputPeerSelf(),
MTPInputPeer(),
MTPVector<MTPReaction>(),
MTP_int(0), // top_msg_id
MTP_inputMessagesFilterEmpty(),
MTP_int(0), // min_date
MTP_int(0), // max_date
MTP_int(from.bare),
MTP_int(0), // add_offset
MTP_int(100),
MTP_int(0), // max_id
MTP_int(0), // min_id
MTP_long(0)); // hash
session - > api()
.request(std::move(request))
.done([ = ](const Api::HistoryRequestResult & result) {
auto parsed = Api::ParseHistoryResult(peer, from, Data::LoadDirection::Before, result);
MsgId minId;
int batchCount = 0;
for (const auto & id: parsed.messageIds) {
if (!minId || id < minId) minId = id;
collected - > push_back(id);
++batchCount;
}
DEBUG_LOG(("Batch found %1 my messages, total %2").arg(batchCount).arg(collected - > size()));
if (parsed.messageIds.size() == 100 && minId) {
( * requestNext)(minId - MsgId(1));
} else {
DEBUG_LOG(("Found %1 my messages in this chat (SEARCH)").arg(collected - > size()));
( * removeNext)(0);
}
})
.fail([ = ](const MTP::Error & error) {
DEBUG_LOG(("History fetch failed: %1").arg(error.type()));
})
.send();
};
session->api()
.request(std::move(request))
.done([=](const Api::HistoryRequestResult &result) {
auto parsed = Api::ParseHistoryResult(peer, from, Data::LoadDirection::Before, result);
MsgId minId;
int batchCount = 0;
for (const auto &id : parsed.messageIds) {
if (!minId || id < minId) minId = id;
collected->push_back(id);
++batchCount;
}
DEBUG_LOG(("Batch found %1 my messages, total %2").arg(batchCount).arg(collected->size()));
if (parsed.messageIds.size() == 100 && minId) {
(*requestNext)(minId - MsgId(1));
} else {
DEBUG_LOG(("Found %1 my messages in this chat (SEARCH)").arg(collected->size()));
(*removeNext)(0);
}
})
.fail([=](const MTP::Error &error) { DEBUG_LOG(("History fetch failed: %1").arg(error.type())); })
.send();
};
( * requestNext)(MsgId(0));
(*requestNext)(MsgId(0));
}
Fn<void()> DeleteMyMessagesHandler(not_null<Window::SessionController *> controller, not_null<PeerData *> peer) {