2016-10-25 19:51:31 +02:00
|
|
|
from datetime import datetime, timedelta
|
2020-05-07 14:24:45 +02:00
|
|
|
from typing import Any, Dict, List, Optional, Tuple
|
2019-06-13 17:09:35 +02:00
|
|
|
from unittest import mock
|
2016-10-25 19:51:31 +02:00
|
|
|
|
2024-05-08 03:10:56 +02:00
|
|
|
import time_machine
|
2019-06-25 20:01:05 +02:00
|
|
|
from django.conf import settings
|
2017-04-15 04:03:56 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2023-10-12 19:43:45 +02:00
|
|
|
from typing_extensions import override
|
2016-11-01 11:26:38 +01:00
|
|
|
|
2022-04-14 23:58:15 +02:00
|
|
|
from zerver.actions.create_realm import do_create_realm
|
2022-07-17 13:00:21 +02:00
|
|
|
from zerver.actions.message_delete import do_delete_messages
|
2022-04-14 23:50:10 +02:00
|
|
|
from zerver.actions.message_send import internal_send_private_message
|
2022-04-14 23:57:15 +02:00
|
|
|
from zerver.actions.realm_settings import do_set_realm_property
|
2023-07-28 16:46:24 +02:00
|
|
|
from zerver.actions.scheduled_messages import check_schedule_message, delete_scheduled_message
|
2022-04-14 23:31:40 +02:00
|
|
|
from zerver.actions.submessage import do_add_submessage
|
2016-11-01 11:26:38 +01:00
|
|
|
from zerver.lib.retention import (
|
|
|
|
archive_messages,
|
2019-06-25 20:01:05 +02:00
|
|
|
clean_archived_data,
|
2020-05-07 14:24:45 +02:00
|
|
|
get_realms_and_streams_for_archiving,
|
2020-06-11 00:54:34 +02:00
|
|
|
move_messages_to_archive,
|
|
|
|
restore_all_data_from_archive,
|
2020-06-22 11:26:06 +02:00
|
|
|
restore_retention_policy_deletions_for_stream,
|
2020-06-11 00:54:34 +02:00
|
|
|
)
|
|
|
|
from zerver.lib.test_classes import ZulipTestCase
|
2022-10-15 22:47:40 +02:00
|
|
|
from zerver.lib.test_helpers import zulip_reaction_info
|
2024-06-20 18:12:58 +02:00
|
|
|
from zerver.lib.upload import create_attachment
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.models import (
|
|
|
|
ArchivedAttachment,
|
|
|
|
ArchivedMessage,
|
|
|
|
ArchivedReaction,
|
|
|
|
ArchivedSubMessage,
|
|
|
|
ArchivedUserMessage,
|
|
|
|
ArchiveTransaction,
|
|
|
|
Attachment,
|
|
|
|
Message,
|
|
|
|
Reaction,
|
|
|
|
Realm,
|
|
|
|
Stream,
|
|
|
|
SubMessage,
|
|
|
|
UserMessage,
|
2016-11-01 11:26:38 +01:00
|
|
|
)
|
2023-12-15 04:33:19 +01:00
|
|
|
from zerver.models.clients import get_client
|
2023-12-15 02:14:24 +01:00
|
|
|
from zerver.models.realms import get_realm
|
2023-12-15 03:57:04 +01:00
|
|
|
from zerver.models.streams import get_stream
|
2023-12-15 01:16:00 +01:00
|
|
|
from zerver.models.users import get_system_bot
|
2020-09-02 05:21:28 +02:00
|
|
|
|
2019-05-30 13:01:06 +02:00
|
|
|
# Class with helper functions useful for testing archiving of reactions:
|
2020-08-10 18:40:38 +02:00
|
|
|
from zerver.tornado.django_api import send_event
|
2019-05-30 13:01:06 +02:00
|
|
|
|
2016-11-01 11:26:38 +01:00
|
|
|
ZULIP_REALM_DAYS = 30
|
|
|
|
MIT_REALM_DAYS = 100
|
2016-10-25 19:51:31 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-05-30 12:35:44 +02:00
|
|
|
class RetentionTestingBase(ZulipTestCase):
|
2019-06-21 19:40:57 +02:00
|
|
|
def _get_usermessage_ids(self, message_ids: List[int]) -> List[int]:
|
2021-02-12 08:19:30 +01:00
|
|
|
return list(
|
2021-02-12 08:20:45 +01:00
|
|
|
UserMessage.objects.filter(message_id__in=message_ids).values_list("id", flat=True)
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2016-10-25 19:51:31 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def _verify_archive_data(
|
|
|
|
self, expected_message_ids: List[int], expected_usermessage_ids: List[int]
|
|
|
|
) -> None:
|
2019-06-21 19:40:57 +02:00
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
set(ArchivedMessage.objects.values_list("id", flat=True)),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
set(expected_message_ids),
|
2019-06-21 19:40:57 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
set(ArchivedUserMessage.objects.values_list("id", flat=True)),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
set(expected_usermessage_ids),
|
2019-06-21 19:40:57 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
# Archived Messages and UserMessages should have been removed from the normal tables:
|
|
|
|
self.assertEqual(Message.objects.filter(id__in=expected_message_ids).count(), 0)
|
|
|
|
self.assertEqual(UserMessage.objects.filter(id__in=expected_usermessage_ids).count(), 0)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def _verify_restored_data(
|
|
|
|
self, expected_message_ids: List[int], expected_usermessage_ids: List[int]
|
|
|
|
) -> None:
|
2019-06-21 19:40:57 +02:00
|
|
|
# Check that the data was restored:
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
set(Message.objects.filter(id__in=expected_message_ids).values_list("id", flat=True)),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
set(expected_message_ids),
|
2019-06-21 19:40:57 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(
|
2021-02-12 08:19:30 +01:00
|
|
|
set(
|
|
|
|
UserMessage.objects.filter(id__in=expected_usermessage_ids).values_list(
|
2021-02-12 08:20:45 +01:00
|
|
|
"id", flat=True
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
set(expected_usermessage_ids),
|
2019-06-21 19:40:57 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
# The Messages and UserMessages should still be in the archive - we don't delete them.
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
set(ArchivedMessage.objects.values_list("id", flat=True)),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
set(expected_message_ids),
|
2019-06-21 19:40:57 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
set(ArchivedUserMessage.objects.values_list("id", flat=True)),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
set(expected_usermessage_ids),
|
2019-06-21 19:40:57 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-06-21 19:40:57 +02:00
|
|
|
class ArchiveMessagesTestingBase(RetentionTestingBase):
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2017-11-19 04:02:03 +01:00
|
|
|
def setUp(self) -> None:
|
2019-10-19 20:47:00 +02:00
|
|
|
super().setUp()
|
2021-02-12 08:20:45 +01:00
|
|
|
self.zulip_realm = get_realm("zulip")
|
|
|
|
self.mit_realm = get_realm("zephyr")
|
2019-05-25 13:07:01 +02:00
|
|
|
self._set_realm_message_retention_value(self.zulip_realm, ZULIP_REALM_DAYS)
|
|
|
|
self._set_realm_message_retention_value(self.mit_realm, MIT_REALM_DAYS)
|
|
|
|
|
|
|
|
# Set publication date of all existing messages to "now", so that we have full
|
|
|
|
# control over what's expired and what isn't.
|
2019-08-28 02:43:19 +02:00
|
|
|
Message.objects.all().update(date_sent=timezone_now())
|
2016-10-25 19:51:31 +02:00
|
|
|
|
2020-06-24 13:02:07 +02:00
|
|
|
def _set_realm_message_retention_value(self, realm: Realm, retention_period: int) -> None:
|
2016-10-25 19:51:31 +02:00
|
|
|
realm.message_retention_days = retention_period
|
|
|
|
realm.save()
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def _set_stream_message_retention_value(
|
|
|
|
self, stream: Stream, retention_period: Optional[int]
|
|
|
|
) -> None:
|
2019-06-06 15:10:39 +02:00
|
|
|
stream.message_retention_days = retention_period
|
|
|
|
stream.save()
|
2019-06-05 20:22:08 +02:00
|
|
|
|
2019-08-28 02:43:19 +02:00
|
|
|
def _change_messages_date_sent(self, msgs_ids: List[int], date_sent: datetime) -> None:
|
|
|
|
Message.objects.filter(id__in=msgs_ids).update(date_sent=date_sent)
|
2016-10-25 19:51:31 +02:00
|
|
|
|
2019-08-28 02:43:19 +02:00
|
|
|
def _make_mit_messages(self, message_quantity: int, date_sent: datetime) -> Any:
|
2016-10-25 19:51:31 +02:00
|
|
|
# send messages from mit.edu realm and change messages pub date
|
2021-02-12 08:20:45 +01:00
|
|
|
sender = self.mit_user("espuser")
|
|
|
|
recipient = self.mit_user("starnine")
|
2021-02-12 08:19:30 +01:00
|
|
|
msg_ids = [self.send_personal_message(sender, recipient) for i in range(message_quantity)]
|
2019-05-25 13:07:01 +02:00
|
|
|
|
2019-08-28 02:43:19 +02:00
|
|
|
self._change_messages_date_sent(msg_ids, date_sent)
|
2019-05-25 13:07:01 +02:00
|
|
|
return msg_ids
|
2016-10-25 19:51:31 +02:00
|
|
|
|
2019-06-28 01:50:40 +02:00
|
|
|
def _send_cross_realm_personal_message(self) -> int:
|
2016-11-01 11:26:38 +01:00
|
|
|
# Send message from bot to users from different realm.
|
2021-02-12 08:20:45 +01:00
|
|
|
bot_email = "notification-bot@zulip.com"
|
2021-03-08 11:54:39 +01:00
|
|
|
internal_realm = get_realm(settings.SYSTEM_BOT_REALM)
|
2019-06-28 01:50:40 +02:00
|
|
|
zulip_user = self.example_user("hamlet")
|
2019-05-25 13:07:01 +02:00
|
|
|
msg_id = internal_send_private_message(
|
2021-03-08 11:54:39 +01:00
|
|
|
sender=get_system_bot(bot_email, internal_realm.id),
|
2019-06-28 01:50:40 +02:00
|
|
|
recipient_user=zulip_user,
|
2021-02-12 08:20:45 +01:00
|
|
|
content="test message",
|
2016-11-01 11:26:38 +01:00
|
|
|
)
|
2019-05-25 13:07:01 +02:00
|
|
|
assert msg_id is not None
|
|
|
|
return msg_id
|
2016-10-25 19:51:31 +02:00
|
|
|
|
2022-10-28 22:06:01 +02:00
|
|
|
def _send_personal_message_to_cross_realm_bot(self) -> int:
|
|
|
|
# Send message from bot to users from different realm.
|
|
|
|
bot_email = "notification-bot@zulip.com"
|
|
|
|
internal_realm = get_realm(settings.SYSTEM_BOT_REALM)
|
|
|
|
zulip_user = self.example_user("hamlet")
|
|
|
|
msg_id = internal_send_private_message(
|
|
|
|
sender=zulip_user,
|
|
|
|
recipient_user=get_system_bot(bot_email, internal_realm.id),
|
|
|
|
content="test message",
|
|
|
|
)
|
|
|
|
assert msg_id is not None
|
|
|
|
return msg_id
|
|
|
|
|
2019-05-30 12:08:23 +02:00
|
|
|
def _make_expired_zulip_messages(self, message_quantity: int) -> List[int]:
|
2021-02-12 08:19:30 +01:00
|
|
|
msg_ids = list(
|
2021-02-12 08:20:45 +01:00
|
|
|
Message.objects.order_by("id")
|
2023-08-30 23:45:34 +02:00
|
|
|
.filter(realm=self.zulip_realm)
|
2021-02-12 08:20:45 +01:00
|
|
|
.values_list("id", flat=True)[3 : 3 + message_quantity]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-08-28 02:43:19 +02:00
|
|
|
self._change_messages_date_sent(
|
2019-05-30 12:08:23 +02:00
|
|
|
msg_ids,
|
2023-11-28 19:33:10 +01:00
|
|
|
timezone_now() - timedelta(days=ZULIP_REALM_DAYS + 1),
|
2019-05-30 12:08:23 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
return msg_ids
|
|
|
|
|
2016-11-01 11:26:38 +01:00
|
|
|
def _send_messages_with_attachments(self) -> Dict[str, int]:
|
|
|
|
user_profile = self.example_user("hamlet")
|
|
|
|
sample_size = 10
|
2019-12-13 03:56:59 +01:00
|
|
|
host = user_profile.realm.host
|
2019-07-16 20:08:26 +02:00
|
|
|
realm_id = get_realm("zulip").id
|
2016-11-01 11:26:38 +01:00
|
|
|
dummy_files = [
|
2021-02-12 08:20:45 +01:00
|
|
|
("zulip.txt", f"{realm_id}/31/4CBjtTLYZhk66pZrF8hnYGwc/zulip.txt", sample_size),
|
|
|
|
("temp_file.py", f"{realm_id}/31/4CBjtTLYZhk66pZrF8hnYGwc/temp_file.py", sample_size),
|
|
|
|
("abc.py", f"{realm_id}/31/4CBjtTLYZhk66pZrF8hnYGwc/abc.py", sample_size),
|
2016-11-01 11:26:38 +01:00
|
|
|
]
|
|
|
|
|
|
|
|
for file_name, path_id, size in dummy_files:
|
2022-01-26 20:17:12 +01:00
|
|
|
create_attachment(file_name, path_id, user_profile, user_profile.realm, size)
|
2016-11-01 11:26:38 +01:00
|
|
|
|
|
|
|
self.subscribe(user_profile, "Denmark")
|
2021-02-12 08:19:30 +01:00
|
|
|
body = (
|
2023-01-03 02:16:53 +01:00
|
|
|
"Some files here ..."
|
2024-03-21 03:43:05 +01:00
|
|
|
f" [zulip.txt](http://{host}/user_uploads/{realm_id}/31/4CBjtTLYZhk66pZrF8hnYGwc/zulip.txt)"
|
|
|
|
f" http://{host}/user_uploads/{realm_id}/31/4CBjtTLYZhk66pZrF8hnYGwc/temp_file.py.... Some"
|
|
|
|
f" more.... http://{host}/user_uploads/{realm_id}/31/4CBjtTLYZhk66pZrF8hnYGwc/abc.py"
|
|
|
|
)
|
2016-11-01 11:26:38 +01:00
|
|
|
|
2020-03-07 11:43:05 +01:00
|
|
|
expired_message_id = self.send_stream_message(user_profile, "Denmark", body)
|
|
|
|
actual_message_id = self.send_stream_message(user_profile, "Denmark", body)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
othello = self.example_user("othello")
|
2020-03-07 11:43:05 +01:00
|
|
|
other_message_id = self.send_stream_message(othello, "Denmark", body)
|
2021-02-12 08:19:30 +01:00
|
|
|
self._change_messages_date_sent(
|
|
|
|
[expired_message_id], timezone_now() - timedelta(days=MIT_REALM_DAYS + 1)
|
|
|
|
)
|
|
|
|
return {
|
2021-02-12 08:20:45 +01:00
|
|
|
"expired_message_id": expired_message_id,
|
|
|
|
"actual_message_id": actual_message_id,
|
|
|
|
"other_user_message_id": other_message_id,
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
|
|
|
|
2016-11-01 11:26:38 +01:00
|
|
|
|
2019-06-21 19:40:57 +02:00
|
|
|
class TestArchiveMessagesGeneral(ArchiveMessagesTestingBase):
|
2016-11-01 11:26:38 +01:00
|
|
|
def test_no_expired_messages(self) -> None:
|
2019-06-05 20:22:08 +02:00
|
|
|
archive_messages()
|
2019-05-24 12:41:04 +02:00
|
|
|
|
2016-11-01 11:26:38 +01:00
|
|
|
self.assertEqual(ArchivedUserMessage.objects.count(), 0)
|
|
|
|
self.assertEqual(ArchivedMessage.objects.count(), 0)
|
|
|
|
|
|
|
|
def test_expired_messages_in_each_realm(self) -> None:
|
|
|
|
"""General test for archiving expired messages properly with
|
|
|
|
multiple realms involved"""
|
2019-05-25 13:07:01 +02:00
|
|
|
# Make some expired messages in MIT:
|
|
|
|
expired_mit_msg_ids = self._make_mit_messages(
|
|
|
|
5,
|
2021-02-12 08:19:30 +01:00
|
|
|
timezone_now() - timedelta(days=MIT_REALM_DAYS + 1),
|
2019-05-25 13:07:01 +02:00
|
|
|
)
|
|
|
|
# Make some non-expired messages in MIT:
|
2021-02-12 08:19:30 +01:00
|
|
|
self._make_mit_messages(4, timezone_now() - timedelta(days=MIT_REALM_DAYS - 1))
|
2019-05-25 13:07:01 +02:00
|
|
|
|
|
|
|
# Change some Zulip messages to be expired:
|
2021-02-12 08:19:30 +01:00
|
|
|
expired_zulip_msg_ids = list(
|
2021-02-12 08:20:45 +01:00
|
|
|
Message.objects.order_by("id")
|
2023-08-30 23:45:34 +02:00
|
|
|
.filter(realm=self.zulip_realm)
|
2021-02-12 08:20:45 +01:00
|
|
|
.values_list("id", flat=True)[3:10]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-08-28 02:43:19 +02:00
|
|
|
self._change_messages_date_sent(
|
2019-05-25 13:07:01 +02:00
|
|
|
expired_zulip_msg_ids,
|
2023-11-28 19:33:10 +01:00
|
|
|
timezone_now() - timedelta(days=ZULIP_REALM_DAYS + 1),
|
2019-05-25 13:07:01 +02:00
|
|
|
)
|
2016-11-01 11:26:38 +01:00
|
|
|
|
2019-05-25 13:07:01 +02:00
|
|
|
expired_msg_ids = expired_mit_msg_ids + expired_zulip_msg_ids
|
2019-06-05 20:22:08 +02:00
|
|
|
expired_usermsg_ids = self._get_usermessage_ids(expired_msg_ids)
|
2019-05-24 12:41:04 +02:00
|
|
|
|
2019-06-05 20:22:08 +02:00
|
|
|
archive_messages()
|
|
|
|
self._verify_archive_data(expired_msg_ids, expired_usermsg_ids)
|
2016-11-01 11:26:38 +01:00
|
|
|
|
2019-06-24 17:19:22 +02:00
|
|
|
restore_all_data_from_archive()
|
|
|
|
self._verify_restored_data(expired_msg_ids, expired_usermsg_ids)
|
|
|
|
|
2017-11-19 04:02:03 +01:00
|
|
|
def test_expired_messages_in_one_realm(self) -> None:
|
2016-11-01 11:26:38 +01:00
|
|
|
"""Test with a retention policy set for only the MIT realm"""
|
2020-06-24 13:02:07 +02:00
|
|
|
self._set_realm_message_retention_value(self.zulip_realm, -1)
|
2016-11-01 11:26:38 +01:00
|
|
|
|
2019-05-25 13:07:01 +02:00
|
|
|
# Make some expired messages in MIT:
|
|
|
|
expired_mit_msg_ids = self._make_mit_messages(
|
|
|
|
5,
|
2021-02-12 08:19:30 +01:00
|
|
|
timezone_now() - timedelta(days=MIT_REALM_DAYS + 1),
|
2019-05-25 13:07:01 +02:00
|
|
|
)
|
|
|
|
# Make some non-expired messages in MIT:
|
2021-02-12 08:19:30 +01:00
|
|
|
self._make_mit_messages(4, timezone_now() - timedelta(days=MIT_REALM_DAYS - 1))
|
2016-11-01 11:26:38 +01:00
|
|
|
|
2019-08-28 02:43:19 +02:00
|
|
|
# Change some Zulip messages date_sent, but the realm has no retention policy,
|
2019-05-25 13:07:01 +02:00
|
|
|
# so they shouldn't get archived
|
2021-02-12 08:19:30 +01:00
|
|
|
zulip_msg_ids = list(
|
2021-02-12 08:20:45 +01:00
|
|
|
Message.objects.order_by("id")
|
2023-08-30 23:45:34 +02:00
|
|
|
.filter(realm=self.zulip_realm)
|
2021-02-12 08:20:45 +01:00
|
|
|
.values_list("id", flat=True)[3:10]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-08-28 02:43:19 +02:00
|
|
|
self._change_messages_date_sent(
|
2019-05-25 13:07:01 +02:00
|
|
|
zulip_msg_ids,
|
2023-11-28 19:33:10 +01:00
|
|
|
timezone_now() - timedelta(days=ZULIP_REALM_DAYS + 1),
|
2016-10-25 19:51:31 +02:00
|
|
|
)
|
2019-05-25 13:07:01 +02:00
|
|
|
|
|
|
|
# Only MIT has a retention policy:
|
|
|
|
expired_msg_ids = expired_mit_msg_ids
|
2019-06-05 20:22:08 +02:00
|
|
|
expired_usermsg_ids = self._get_usermessage_ids(expired_msg_ids)
|
2019-05-25 13:07:01 +02:00
|
|
|
|
2019-06-05 20:22:08 +02:00
|
|
|
archive_messages()
|
|
|
|
self._verify_archive_data(expired_msg_ids, expired_usermsg_ids)
|
2019-05-25 13:07:01 +02:00
|
|
|
|
2019-06-24 17:19:22 +02:00
|
|
|
restore_all_data_from_archive()
|
|
|
|
self._verify_restored_data(expired_msg_ids, expired_usermsg_ids)
|
|
|
|
|
2019-05-25 13:07:01 +02:00
|
|
|
self._set_realm_message_retention_value(self.zulip_realm, ZULIP_REALM_DAYS)
|
|
|
|
|
2019-06-06 15:10:39 +02:00
|
|
|
def test_different_stream_realm_policies(self) -> None:
|
|
|
|
verona = get_stream("Verona", self.zulip_realm)
|
2020-03-07 11:43:05 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
2019-06-06 15:10:39 +02:00
|
|
|
|
|
|
|
msg_id = self.send_stream_message(hamlet, "Verona", "test")
|
|
|
|
usermsg_ids = self._get_usermessage_ids([msg_id])
|
2019-08-28 02:43:19 +02:00
|
|
|
self._change_messages_date_sent([msg_id], timezone_now() - timedelta(days=2))
|
2019-06-06 15:10:39 +02:00
|
|
|
|
|
|
|
# Don't archive if stream's retention policy set to -1:
|
|
|
|
self._set_realm_message_retention_value(self.zulip_realm, 1)
|
|
|
|
self._set_stream_message_retention_value(verona, -1)
|
|
|
|
archive_messages()
|
|
|
|
self._verify_archive_data([], [])
|
|
|
|
|
|
|
|
# Don't archive if stream and realm have no retention policy:
|
2020-06-24 13:02:07 +02:00
|
|
|
self._set_realm_message_retention_value(self.zulip_realm, -1)
|
2019-06-06 15:10:39 +02:00
|
|
|
self._set_stream_message_retention_value(verona, None)
|
|
|
|
archive_messages()
|
|
|
|
self._verify_archive_data([], [])
|
|
|
|
|
|
|
|
# Archive if stream has a retention policy set:
|
2020-06-24 13:02:07 +02:00
|
|
|
self._set_realm_message_retention_value(self.zulip_realm, -1)
|
2019-06-06 15:10:39 +02:00
|
|
|
self._set_stream_message_retention_value(verona, 1)
|
|
|
|
archive_messages()
|
|
|
|
self._verify_archive_data([msg_id], usermsg_ids)
|
|
|
|
|
2019-06-28 01:50:40 +02:00
|
|
|
def test_cross_realm_personal_message_archiving(self) -> None:
|
2021-05-08 02:36:30 +02:00
|
|
|
"""Check that cross-realm personal messages get correctly archived."""
|
2022-10-28 22:06:01 +02:00
|
|
|
|
|
|
|
# We want to test on a set of cross-realm messages of both kinds -
|
|
|
|
# from a bot to a user, and from a user to a bot.
|
2019-06-28 01:50:40 +02:00
|
|
|
msg_ids = [self._send_cross_realm_personal_message() for i in range(1, 7)]
|
2022-10-28 22:06:01 +02:00
|
|
|
msg_ids += [self._send_personal_message_to_cross_realm_bot() for i in range(1, 7)]
|
2019-06-28 01:50:40 +02:00
|
|
|
usermsg_ids = self._get_usermessage_ids(msg_ids)
|
2022-10-28 22:06:01 +02:00
|
|
|
# Make the message expired in the Zulip realm.:
|
2023-11-28 19:33:10 +01:00
|
|
|
self._change_messages_date_sent(
|
|
|
|
msg_ids, timezone_now() - timedelta(days=ZULIP_REALM_DAYS + 1)
|
|
|
|
)
|
2019-05-25 13:07:01 +02:00
|
|
|
|
2019-06-05 20:22:08 +02:00
|
|
|
archive_messages()
|
2019-06-28 01:50:40 +02:00
|
|
|
self._verify_archive_data(msg_ids, usermsg_ids)
|
2016-11-01 11:26:38 +01:00
|
|
|
|
2019-06-13 17:09:35 +02:00
|
|
|
def test_archiving_interrupted(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
"""Check that queries get rolled back to a consistent state
|
|
|
|
if archiving gets interrupted in the middle of processing a chunk."""
|
2019-06-13 17:09:35 +02:00
|
|
|
expired_msg_ids = self._make_expired_zulip_messages(7)
|
|
|
|
expired_usermsg_ids = self._get_usermessage_ids(expired_msg_ids)
|
|
|
|
|
|
|
|
# Insert an exception near the end of the archiving process of a chunk:
|
2022-10-30 00:16:32 +02:00
|
|
|
with mock.patch(
|
|
|
|
"zerver.lib.retention.delete_messages", side_effect=Exception("delete_messages error")
|
|
|
|
):
|
|
|
|
with self.assertRaisesRegex(Exception, r"^delete_messages error$"):
|
2020-09-02 02:50:08 +02:00
|
|
|
# Specify large chunk_size to ensure things happen in a single batch
|
|
|
|
archive_messages(chunk_size=1000)
|
2019-06-13 17:09:35 +02:00
|
|
|
|
|
|
|
# Archiving code has been executed, but because we got an exception, things should have been rolled back:
|
|
|
|
self._verify_archive_data([], [])
|
|
|
|
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
set(Message.objects.filter(id__in=expired_msg_ids).values_list("id", flat=True)),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
set(expired_msg_ids),
|
2019-06-13 17:09:35 +02:00
|
|
|
)
|
|
|
|
self.assertEqual(
|
2021-02-12 08:19:30 +01:00
|
|
|
set(
|
|
|
|
UserMessage.objects.filter(id__in=expired_usermsg_ids).values_list(
|
2021-02-12 08:20:45 +01:00
|
|
|
"id", flat=True
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
set(expired_usermsg_ids),
|
2019-06-13 17:09:35 +02:00
|
|
|
)
|
|
|
|
|
2016-11-01 11:26:38 +01:00
|
|
|
def test_archive_message_tool(self) -> None:
|
|
|
|
"""End-to-end test of the archiving tool, directly calling
|
|
|
|
archive_messages."""
|
2019-05-25 13:07:01 +02:00
|
|
|
# Make some expired messages in MIT:
|
|
|
|
expired_mit_msg_ids = self._make_mit_messages(
|
|
|
|
5,
|
2021-02-12 08:19:30 +01:00
|
|
|
timezone_now() - timedelta(days=MIT_REALM_DAYS + 1),
|
2019-05-25 13:07:01 +02:00
|
|
|
)
|
|
|
|
# Make some non-expired messages in MIT:
|
2021-02-12 08:19:30 +01:00
|
|
|
self._make_mit_messages(4, timezone_now() - timedelta(days=MIT_REALM_DAYS - 1))
|
2019-05-25 13:07:01 +02:00
|
|
|
|
|
|
|
# Change some Zulip messages to be expired:
|
2019-05-30 12:08:23 +02:00
|
|
|
expired_zulip_msg_ids = self._make_expired_zulip_messages(7)
|
2016-11-01 11:26:38 +01:00
|
|
|
|
2019-06-28 01:50:40 +02:00
|
|
|
expired_crossrealm_msg_id = self._send_cross_realm_personal_message()
|
|
|
|
# Make the message expired in the recipient's realm:
|
2019-08-28 02:43:19 +02:00
|
|
|
self._change_messages_date_sent(
|
2019-05-25 13:07:01 +02:00
|
|
|
[expired_crossrealm_msg_id],
|
2023-11-28 19:33:10 +01:00
|
|
|
timezone_now() - timedelta(days=ZULIP_REALM_DAYS + 1),
|
2016-11-01 11:26:38 +01:00
|
|
|
)
|
|
|
|
|
2020-09-02 06:59:07 +02:00
|
|
|
expired_msg_ids = [*expired_mit_msg_ids, *expired_zulip_msg_ids, expired_crossrealm_msg_id]
|
2019-06-05 20:22:08 +02:00
|
|
|
expired_usermsg_ids = self._get_usermessage_ids(expired_msg_ids)
|
2016-11-01 11:26:38 +01:00
|
|
|
|
2019-06-10 18:09:50 +02:00
|
|
|
archive_messages(chunk_size=2) # Specify low chunk_size to test batching.
|
2020-03-28 01:25:56 +01:00
|
|
|
# Make sure we archived what needed:
|
2019-06-05 20:22:08 +02:00
|
|
|
self._verify_archive_data(expired_msg_ids, expired_usermsg_ids)
|
2019-05-25 13:07:01 +02:00
|
|
|
|
2019-06-24 17:19:22 +02:00
|
|
|
restore_all_data_from_archive()
|
|
|
|
self._verify_restored_data(expired_msg_ids, expired_usermsg_ids)
|
|
|
|
|
2016-11-01 11:26:38 +01:00
|
|
|
def test_archiving_attachments(self) -> None:
|
|
|
|
"""End-to-end test for the logic for archiving attachments. This test
|
|
|
|
is hard to read without first reading _send_messages_with_attachments"""
|
|
|
|
msgs_ids = self._send_messages_with_attachments()
|
|
|
|
|
|
|
|
# First, confirm deleting the oldest message
|
|
|
|
# (`expired_message_id`) creates ArchivedAttachment objects
|
|
|
|
# and associates that message ID with them, but does not
|
|
|
|
# delete the Attachment object.
|
|
|
|
archive_messages()
|
2019-05-25 13:07:01 +02:00
|
|
|
self.assertEqual(ArchivedAttachment.objects.count(), 3)
|
2016-11-01 11:26:38 +01:00
|
|
|
self.assertEqual(
|
2021-02-12 08:19:30 +01:00
|
|
|
list(
|
2021-02-12 08:20:45 +01:00
|
|
|
ArchivedAttachment.objects.distinct("messages__id").values_list(
|
|
|
|
"messages__id", flat=True
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
),
|
2021-02-12 08:20:45 +01:00
|
|
|
[msgs_ids["expired_message_id"]],
|
2019-05-25 13:07:01 +02:00
|
|
|
)
|
|
|
|
self.assertEqual(Attachment.objects.count(), 3)
|
2016-11-01 11:26:38 +01:00
|
|
|
|
|
|
|
# Now make `actual_message_id` expired too. We still don't
|
|
|
|
# delete the Attachment objects.
|
2021-02-12 08:19:30 +01:00
|
|
|
self._change_messages_date_sent(
|
2021-02-12 08:20:45 +01:00
|
|
|
[msgs_ids["actual_message_id"]], timezone_now() - timedelta(days=MIT_REALM_DAYS + 1)
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2016-11-01 11:26:38 +01:00
|
|
|
archive_messages()
|
2019-05-25 13:07:01 +02:00
|
|
|
self.assertEqual(Attachment.objects.count(), 3)
|
2016-11-01 11:26:38 +01:00
|
|
|
|
|
|
|
# Finally, make the last message mentioning those attachments
|
|
|
|
# expired. We should now delete the Attachment objects and
|
|
|
|
# each ArchivedAttachment object should list all 3 messages.
|
2021-02-12 08:19:30 +01:00
|
|
|
self._change_messages_date_sent(
|
2021-02-12 08:20:45 +01:00
|
|
|
[msgs_ids["other_user_message_id"]], timezone_now() - timedelta(days=MIT_REALM_DAYS + 1)
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2016-11-01 11:26:38 +01:00
|
|
|
|
|
|
|
archive_messages()
|
2019-05-25 13:07:01 +02:00
|
|
|
self.assertEqual(Attachment.objects.count(), 0)
|
|
|
|
self.assertEqual(ArchivedAttachment.objects.count(), 3)
|
2016-11-01 11:26:38 +01:00
|
|
|
self.assertEqual(
|
2021-02-12 08:19:30 +01:00
|
|
|
list(
|
2021-02-12 08:20:45 +01:00
|
|
|
ArchivedAttachment.objects.distinct("messages__id")
|
|
|
|
.order_by("messages__id")
|
|
|
|
.values_list("messages__id", flat=True)
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
sorted(msgs_ids.values()),
|
2019-05-25 13:07:01 +02:00
|
|
|
)
|
2017-05-14 21:14:26 +02:00
|
|
|
|
2019-06-24 17:19:22 +02:00
|
|
|
restore_all_data_from_archive()
|
|
|
|
# Attachments should have been restored:
|
|
|
|
self.assertEqual(Attachment.objects.count(), 3)
|
2020-09-02 02:50:08 +02:00
|
|
|
# Archived data doesn't get deleted by restoring.
|
|
|
|
self.assertEqual(ArchivedAttachment.objects.count(), 3)
|
2019-06-24 17:19:22 +02:00
|
|
|
self.assertEqual(
|
2021-02-12 08:19:30 +01:00
|
|
|
list(
|
2021-02-12 08:20:45 +01:00
|
|
|
Attachment.objects.distinct("messages__id")
|
|
|
|
.order_by("messages__id")
|
|
|
|
.values_list("messages__id", flat=True)
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
sorted(msgs_ids.values()),
|
2019-06-24 17:19:22 +02:00
|
|
|
)
|
|
|
|
|
2019-06-24 22:25:02 +02:00
|
|
|
def test_restoring_and_rearchiving(self) -> None:
|
|
|
|
expired_msg_ids = self._make_mit_messages(
|
|
|
|
7,
|
2021-02-12 08:19:30 +01:00
|
|
|
timezone_now() - timedelta(days=MIT_REALM_DAYS + 1),
|
2019-06-24 22:25:02 +02:00
|
|
|
)
|
|
|
|
expired_usermsg_ids = self._get_usermessage_ids(expired_msg_ids)
|
|
|
|
|
|
|
|
archive_messages(chunk_size=4)
|
|
|
|
self._verify_archive_data(expired_msg_ids, expired_usermsg_ids)
|
|
|
|
|
|
|
|
transactions = ArchiveTransaction.objects.all()
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(transactions, 2) # With chunk_size 4, there should be 2 transactions
|
2019-06-24 22:25:02 +02:00
|
|
|
|
2024-05-08 03:10:56 +02:00
|
|
|
now = timezone_now()
|
|
|
|
with time_machine.travel(now, tick=False):
|
|
|
|
restore_all_data_from_archive()
|
2019-06-24 22:25:02 +02:00
|
|
|
transactions[0].refresh_from_db()
|
|
|
|
transactions[1].refresh_from_db()
|
|
|
|
self.assertTrue(transactions[0].restored)
|
|
|
|
self.assertTrue(transactions[1].restored)
|
2024-05-08 03:10:56 +02:00
|
|
|
self.assertEqual(transactions[0].restored_timestamp, now)
|
|
|
|
self.assertEqual(transactions[1].restored_timestamp, now)
|
2019-06-24 22:25:02 +02:00
|
|
|
|
|
|
|
archive_messages(chunk_size=10)
|
|
|
|
self._verify_archive_data(expired_msg_ids, expired_usermsg_ids)
|
|
|
|
|
|
|
|
transactions = ArchiveTransaction.objects.order_by("id")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(transactions, 3)
|
2019-06-24 22:25:02 +02:00
|
|
|
|
|
|
|
archived_messages = ArchivedMessage.objects.filter(id__in=expired_msg_ids)
|
|
|
|
# Check that the re-archived messages are correctly assigned to the new transaction:
|
|
|
|
for message in archived_messages:
|
|
|
|
self.assertEqual(message.archive_transaction_id, transactions[2].id)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-06-21 19:40:57 +02:00
|
|
|
class TestArchivingSubMessages(ArchiveMessagesTestingBase):
|
2019-05-30 12:49:20 +02:00
|
|
|
def test_archiving_submessages(self) -> None:
|
|
|
|
expired_msg_ids = self._make_expired_zulip_messages(2)
|
2021-02-12 08:20:45 +01:00
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
hamlet = self.example_user("hamlet")
|
2019-05-30 12:49:20 +02:00
|
|
|
|
|
|
|
do_add_submessage(
|
|
|
|
realm=self.zulip_realm,
|
|
|
|
sender_id=cordelia.id,
|
|
|
|
message_id=expired_msg_ids[0],
|
2021-02-12 08:20:45 +01:00
|
|
|
msg_type="whatever",
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
content='{"name": "alice", "salary": 20}',
|
2019-05-30 12:49:20 +02:00
|
|
|
)
|
|
|
|
do_add_submessage(
|
|
|
|
realm=self.zulip_realm,
|
|
|
|
sender_id=hamlet.id,
|
|
|
|
message_id=expired_msg_ids[0],
|
2021-02-12 08:20:45 +01:00
|
|
|
msg_type="whatever",
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
content='{"name": "john", "salary": 30}',
|
2019-05-30 12:49:20 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
do_add_submessage(
|
|
|
|
realm=self.zulip_realm,
|
|
|
|
sender_id=cordelia.id,
|
|
|
|
message_id=expired_msg_ids[1],
|
2021-02-12 08:20:45 +01:00
|
|
|
msg_type="whatever",
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
content='{"name": "jack", "salary": 10}',
|
2019-05-30 12:49:20 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
submessage_ids = list(
|
2021-02-12 08:20:45 +01:00
|
|
|
SubMessage.objects.filter(message_id__in=expired_msg_ids).values_list("id", flat=True),
|
2019-05-30 12:49:20 +02:00
|
|
|
)
|
|
|
|
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(submessage_ids, 3)
|
2019-05-30 12:49:20 +02:00
|
|
|
self.assertEqual(SubMessage.objects.filter(id__in=submessage_ids).count(), 3)
|
|
|
|
archive_messages()
|
|
|
|
self.assertEqual(SubMessage.objects.filter(id__in=submessage_ids).count(), 0)
|
|
|
|
|
2019-05-30 15:24:18 +02:00
|
|
|
self.assertEqual(
|
2021-02-12 08:19:30 +01:00
|
|
|
set(
|
|
|
|
ArchivedSubMessage.objects.filter(id__in=submessage_ids).values_list(
|
2021-02-12 08:20:45 +01:00
|
|
|
"id", flat=True
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
set(submessage_ids),
|
2019-05-30 15:24:18 +02:00
|
|
|
)
|
|
|
|
|
2019-06-24 17:19:22 +02:00
|
|
|
restore_all_data_from_archive()
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
set(SubMessage.objects.filter(id__in=submessage_ids).values_list("id", flat=True)),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
set(submessage_ids),
|
2019-06-24 17:19:22 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-07-16 18:08:15 +02:00
|
|
|
class TestArchivingReactions(ArchiveMessagesTestingBase):
|
2019-05-30 13:01:06 +02:00
|
|
|
def test_archiving_reactions(self) -> None:
|
|
|
|
expired_msg_ids = self._make_expired_zulip_messages(2)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
cordelia = self.example_user("cordelia")
|
2020-07-16 18:08:15 +02:00
|
|
|
|
|
|
|
for sender in [hamlet, cordelia]:
|
|
|
|
self.api_post(
|
|
|
|
sender,
|
2021-02-12 08:20:45 +01:00
|
|
|
f"/api/v1/messages/{expired_msg_ids[0]}/reactions",
|
2020-07-16 18:08:15 +02:00
|
|
|
zulip_reaction_info(),
|
|
|
|
)
|
2019-05-30 13:01:06 +02:00
|
|
|
|
2020-07-16 18:08:15 +02:00
|
|
|
self.api_post(
|
|
|
|
hamlet,
|
2021-02-12 08:20:45 +01:00
|
|
|
f"/api/v1/messages/{expired_msg_ids[1]}/reactions",
|
2020-07-16 18:08:15 +02:00
|
|
|
zulip_reaction_info(),
|
|
|
|
)
|
2019-05-30 13:01:06 +02:00
|
|
|
|
|
|
|
reaction_ids = list(
|
2021-02-12 08:20:45 +01:00
|
|
|
Reaction.objects.filter(message_id__in=expired_msg_ids).values_list("id", flat=True),
|
2019-05-30 13:01:06 +02:00
|
|
|
)
|
|
|
|
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(reaction_ids, 3)
|
2019-05-30 13:01:06 +02:00
|
|
|
self.assertEqual(Reaction.objects.filter(id__in=reaction_ids).count(), 3)
|
|
|
|
archive_messages()
|
|
|
|
self.assertEqual(Reaction.objects.filter(id__in=reaction_ids).count(), 0)
|
|
|
|
|
2019-05-30 15:06:45 +02:00
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
set(ArchivedReaction.objects.filter(id__in=reaction_ids).values_list("id", flat=True)),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
set(reaction_ids),
|
2019-05-30 15:06:45 +02:00
|
|
|
)
|
|
|
|
|
2019-06-24 17:19:22 +02:00
|
|
|
restore_all_data_from_archive()
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
set(Reaction.objects.filter(id__in=reaction_ids).values_list("id", flat=True)),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
set(reaction_ids),
|
2019-06-24 17:19:22 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-06-21 19:40:57 +02:00
|
|
|
class MoveMessageToArchiveBase(RetentionTestingBase):
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2017-11-19 04:02:03 +01:00
|
|
|
def setUp(self) -> None:
|
2019-10-19 20:47:00 +02:00
|
|
|
super().setUp()
|
2021-02-12 08:20:45 +01:00
|
|
|
self.sender = self.example_user("hamlet")
|
|
|
|
self.recipient = self.example_user("cordelia")
|
2017-05-14 21:14:26 +02:00
|
|
|
|
2017-11-19 04:02:03 +01:00
|
|
|
def _create_attachments(self) -> None:
|
2017-05-14 21:14:26 +02:00
|
|
|
sample_size = 10
|
2019-07-16 20:08:26 +02:00
|
|
|
realm_id = get_realm("zulip").id
|
2017-05-14 21:14:26 +02:00
|
|
|
dummy_files = [
|
2021-02-12 08:20:45 +01:00
|
|
|
("zulip.txt", f"{realm_id}/31/4CBjtTLYZhk66pZrF8hnYGwc/zulip.txt", sample_size),
|
|
|
|
("temp_file.py", f"{realm_id}/31/4CBjtTLYZhk66pZrF8hnYGwc/temp_file.py", sample_size),
|
|
|
|
("abc.py", f"{realm_id}/31/4CBjtTLYZhk66pZrF8hnYGwc/abc.py", sample_size),
|
|
|
|
("hello.txt", f"{realm_id}/31/4CBjtTLYZhk66pZrF8hnYGwc/hello.txt", sample_size),
|
|
|
|
("new.py", f"{realm_id}/31/4CBjtTLYZhk66pZrF8hnYGwc/new.py", sample_size),
|
2017-05-14 21:14:26 +02:00
|
|
|
]
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
2017-05-14 21:14:26 +02:00
|
|
|
for file_name, path_id, size in dummy_files:
|
2022-01-26 20:17:12 +01:00
|
|
|
create_attachment(file_name, path_id, user_profile, user_profile.realm, size)
|
2017-05-14 21:14:26 +02:00
|
|
|
|
2019-05-25 13:07:01 +02:00
|
|
|
def _assert_archive_empty(self) -> None:
|
|
|
|
self.assertFalse(ArchivedUserMessage.objects.exists())
|
|
|
|
self.assertFalse(ArchivedMessage.objects.exists())
|
|
|
|
self.assertFalse(ArchivedAttachment.objects.exists())
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-05-30 15:06:45 +02:00
|
|
|
class MoveMessageToArchiveGeneral(MoveMessageToArchiveBase):
|
2018-09-13 16:29:12 +02:00
|
|
|
def test_personal_messages_archiving(self) -> None:
|
2023-09-01 23:05:08 +02:00
|
|
|
msg_ids = [self.send_personal_message(self.sender, self.recipient) for i in range(3)]
|
2019-06-21 19:40:57 +02:00
|
|
|
usermsg_ids = self._get_usermessage_ids(msg_ids)
|
2019-05-25 13:07:01 +02:00
|
|
|
|
|
|
|
self._assert_archive_empty()
|
2018-09-13 16:29:12 +02:00
|
|
|
move_messages_to_archive(message_ids=msg_ids)
|
2019-06-21 19:40:57 +02:00
|
|
|
self._verify_archive_data(msg_ids, usermsg_ids)
|
2018-09-13 16:29:12 +02:00
|
|
|
|
2019-06-24 17:19:22 +02:00
|
|
|
restore_all_data_from_archive()
|
|
|
|
self._verify_restored_data(msg_ids, usermsg_ids)
|
|
|
|
|
2020-05-16 20:30:11 +02:00
|
|
|
def test_move_messages_to_archive_with_realm_argument(self) -> None:
|
|
|
|
realm = get_realm("zulip")
|
2023-09-01 23:05:08 +02:00
|
|
|
msg_ids = [self.send_personal_message(self.sender, self.recipient) for i in range(3)]
|
2020-05-16 20:30:11 +02:00
|
|
|
usermsg_ids = self._get_usermessage_ids(msg_ids)
|
|
|
|
|
|
|
|
self._assert_archive_empty()
|
|
|
|
move_messages_to_archive(message_ids=msg_ids, realm=realm)
|
|
|
|
self._verify_archive_data(msg_ids, usermsg_ids)
|
|
|
|
|
|
|
|
archive_transaction = ArchiveTransaction.objects.last()
|
2021-07-24 16:56:39 +02:00
|
|
|
assert archive_transaction is not None
|
2020-05-16 20:30:11 +02:00
|
|
|
self.assertEqual(archive_transaction.realm, realm)
|
|
|
|
|
2018-09-13 16:29:12 +02:00
|
|
|
def test_stream_messages_archiving(self) -> None:
|
2023-09-01 23:05:08 +02:00
|
|
|
msg_ids = [self.send_stream_message(self.sender, "Verona") for i in range(3)]
|
2019-06-21 19:40:57 +02:00
|
|
|
usermsg_ids = self._get_usermessage_ids(msg_ids)
|
2019-05-25 13:07:01 +02:00
|
|
|
|
|
|
|
self._assert_archive_empty()
|
2018-09-13 16:29:12 +02:00
|
|
|
move_messages_to_archive(message_ids=msg_ids)
|
2019-06-21 19:40:57 +02:00
|
|
|
self._verify_archive_data(msg_ids, usermsg_ids)
|
2018-09-13 16:29:12 +02:00
|
|
|
|
2019-06-24 17:19:22 +02:00
|
|
|
restore_all_data_from_archive()
|
|
|
|
self._verify_restored_data(msg_ids, usermsg_ids)
|
|
|
|
|
2018-09-13 16:29:12 +02:00
|
|
|
def test_archiving_messages_second_time(self) -> None:
|
2023-09-01 23:05:08 +02:00
|
|
|
msg_ids = [self.send_stream_message(self.sender, "Verona") for i in range(3)]
|
2019-06-21 19:40:57 +02:00
|
|
|
usermsg_ids = self._get_usermessage_ids(msg_ids)
|
2019-05-25 13:07:01 +02:00
|
|
|
|
|
|
|
self._assert_archive_empty()
|
2018-09-13 16:29:12 +02:00
|
|
|
move_messages_to_archive(message_ids=msg_ids)
|
2019-06-21 19:40:57 +02:00
|
|
|
self._verify_archive_data(msg_ids, usermsg_ids)
|
2019-05-25 13:07:01 +02:00
|
|
|
|
2017-05-14 21:14:26 +02:00
|
|
|
with self.assertRaises(Message.DoesNotExist):
|
2018-09-13 16:29:12 +02:00
|
|
|
move_messages_to_archive(message_ids=msg_ids)
|
2017-05-14 21:14:26 +02:00
|
|
|
|
2020-05-16 20:30:11 +02:00
|
|
|
def test_archiving_messages_multiple_realms(self) -> None:
|
|
|
|
"""
|
|
|
|
Verifies that move_messages_to_archive works correctly
|
|
|
|
if called on messages in multiple realms.
|
|
|
|
"""
|
|
|
|
iago = self.example_user("iago")
|
|
|
|
othello = self.example_user("othello")
|
|
|
|
|
|
|
|
cordelia = self.lear_user("cordelia")
|
|
|
|
king = self.lear_user("king")
|
|
|
|
|
2023-09-01 23:05:08 +02:00
|
|
|
zulip_msg_ids = [self.send_personal_message(iago, othello) for i in range(3)]
|
|
|
|
lear_msg_ids = [self.send_personal_message(cordelia, king) for i in range(3)]
|
2023-01-02 20:50:23 +01:00
|
|
|
msg_ids = zulip_msg_ids + lear_msg_ids
|
2020-05-16 20:30:11 +02:00
|
|
|
usermsg_ids = self._get_usermessage_ids(msg_ids)
|
|
|
|
|
|
|
|
self._assert_archive_empty()
|
|
|
|
move_messages_to_archive(message_ids=msg_ids)
|
|
|
|
self._verify_archive_data(msg_ids, usermsg_ids)
|
|
|
|
|
|
|
|
restore_all_data_from_archive()
|
|
|
|
self._verify_restored_data(msg_ids, usermsg_ids)
|
|
|
|
|
2018-09-13 16:29:12 +02:00
|
|
|
def test_archiving_messages_with_attachment(self) -> None:
|
2017-05-14 21:14:26 +02:00
|
|
|
self._create_attachments()
|
2019-07-16 20:08:26 +02:00
|
|
|
realm_id = get_realm("zulip").id
|
2019-12-13 03:56:59 +01:00
|
|
|
host = get_realm("zulip").host
|
2023-08-03 00:28:59 +02:00
|
|
|
body1 = f"""Some files here ...[zulip.txt](
|
|
|
|
http://{host}/user_uploads/{realm_id}/31/4CBjtTLYZhk66pZrF8hnYGwc/zulip.txt)
|
|
|
|
http://{host}/user_uploads/{realm_id}/31/4CBjtTLYZhk66pZrF8hnYGwc/temp_file.py ....
|
|
|
|
Some more.... http://{host}/user_uploads/{realm_id}/31/4CBjtTLYZhk66pZrF8hnYGwc/abc.py
|
|
|
|
"""
|
|
|
|
body2 = f"""Some files here
|
|
|
|
http://{host}/user_uploads/{realm_id}/31/4CBjtTLYZhk66pZrF8hnYGwc/zulip.txt ...
|
|
|
|
http://{host}/user_uploads/{realm_id}/31/4CBjtTLYZhk66pZrF8hnYGwc/hello.txt ....
|
|
|
|
http://{host}/user_uploads/{realm_id}/31/4CBjtTLYZhk66pZrF8hnYGwc/new.py ....
|
|
|
|
"""
|
2019-05-25 13:07:01 +02:00
|
|
|
|
|
|
|
msg_ids = [
|
|
|
|
self.send_personal_message(self.sender, self.recipient, body1),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
self.send_personal_message(self.sender, self.recipient, body2),
|
2019-05-25 13:07:01 +02:00
|
|
|
]
|
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
attachment_id_to_message_ids: Dict[int, List[int]] = {}
|
2019-05-25 13:07:01 +02:00
|
|
|
attachment_ids = list(
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
Attachment.objects.filter(messages__id__in=msg_ids).values_list("id", flat=True),
|
2019-05-25 13:07:01 +02:00
|
|
|
)
|
|
|
|
for attachment_id in attachment_ids:
|
|
|
|
attachment_id_to_message_ids[attachment_id] = list(
|
2023-08-30 21:19:37 +02:00
|
|
|
Message.objects.filter(realm_id=realm_id, attachment__id=attachment_id).values_list(
|
|
|
|
"id", flat=True
|
|
|
|
),
|
2019-05-25 13:07:01 +02:00
|
|
|
)
|
|
|
|
|
2019-06-21 19:40:57 +02:00
|
|
|
usermsg_ids = self._get_usermessage_ids(msg_ids)
|
2019-05-25 13:07:01 +02:00
|
|
|
|
|
|
|
self._assert_archive_empty()
|
2018-09-13 16:29:12 +02:00
|
|
|
move_messages_to_archive(message_ids=msg_ids)
|
2019-06-21 19:40:57 +02:00
|
|
|
self._verify_archive_data(msg_ids, usermsg_ids)
|
2019-05-25 13:07:01 +02:00
|
|
|
|
|
|
|
self.assertFalse(Attachment.objects.exists())
|
|
|
|
archived_attachment_ids = list(
|
2021-02-12 08:19:30 +01:00
|
|
|
ArchivedAttachment.objects.filter(messages__id__in=msg_ids).values_list(
|
|
|
|
"id", flat=True
|
|
|
|
),
|
2019-05-25 13:07:01 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(set(attachment_ids), set(archived_attachment_ids))
|
|
|
|
for attachment_id in archived_attachment_ids:
|
|
|
|
self.assertEqual(
|
|
|
|
set(attachment_id_to_message_ids[attachment_id]),
|
2021-02-12 08:19:30 +01:00
|
|
|
set(
|
2022-03-25 01:20:58 +01:00
|
|
|
ArchivedMessage.objects.filter(attachment__id=attachment_id).values_list(
|
|
|
|
"id", flat=True
|
|
|
|
)
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
2019-05-25 13:07:01 +02:00
|
|
|
)
|
2017-05-14 21:14:26 +02:00
|
|
|
|
2019-06-24 17:19:22 +02:00
|
|
|
restore_all_data_from_archive()
|
|
|
|
self._verify_restored_data(msg_ids, usermsg_ids)
|
|
|
|
|
|
|
|
restored_attachment_ids = list(
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
Attachment.objects.filter(messages__id__in=msg_ids).values_list("id", flat=True),
|
2019-06-24 17:19:22 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(set(attachment_ids), set(restored_attachment_ids))
|
|
|
|
for attachment_id in restored_attachment_ids:
|
|
|
|
self.assertEqual(
|
|
|
|
set(attachment_id_to_message_ids[attachment_id]),
|
2021-02-12 08:19:30 +01:00
|
|
|
set(
|
2023-08-30 21:19:37 +02:00
|
|
|
Message.objects.filter(
|
|
|
|
realm_id=realm_id, attachment__id=attachment_id
|
|
|
|
).values_list("id", flat=True)
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
2019-06-24 17:19:22 +02:00
|
|
|
)
|
|
|
|
|
2017-11-19 04:02:03 +01:00
|
|
|
def test_archiving_message_with_shared_attachment(self) -> None:
|
2019-05-25 13:07:01 +02:00
|
|
|
# Make sure that attachments still in use in other messages don't get deleted:
|
2017-05-14 21:14:26 +02:00
|
|
|
self._create_attachments()
|
2019-07-16 20:08:26 +02:00
|
|
|
realm_id = get_realm("zulip").id
|
2019-12-13 03:56:59 +01:00
|
|
|
host = get_realm("zulip").host
|
2024-03-21 03:43:05 +01:00
|
|
|
body = f"""Some files here ...[zulip.txt](
|
|
|
|
http://{host}/user_uploads/{realm_id}/31/4CBjtTLYZhk66pZrF8hnYGwc/zulip.txt)
|
|
|
|
http://{host}/user_uploads/{realm_id}/31/4CBjtTLYZhk66pZrF8hnYGwc/temp_file.py ....
|
|
|
|
Some more.... http://{host}/user_uploads/{realm_id}/31/4CBjtTLYZhk66pZrF8hnYGwc/abc.py ...
|
|
|
|
http://{host}/user_uploads/{realm_id}/31/4CBjtTLYZhk66pZrF8hnYGwc/new.py ....
|
|
|
|
http://{host}/user_uploads/{realm_id}/31/4CBjtTLYZhk66pZrF8hnYGwc/hello.txt ....
|
|
|
|
"""
|
2017-10-28 18:00:16 +02:00
|
|
|
|
2019-05-25 13:07:01 +02:00
|
|
|
msg_id = self.send_personal_message(self.sender, self.recipient, body)
|
2017-10-28 18:00:16 +02:00
|
|
|
# Simulate a reply with the same contents.
|
2019-05-25 13:07:01 +02:00
|
|
|
reply_msg_id = self.send_personal_message(
|
2020-03-07 11:43:05 +01:00
|
|
|
from_user=self.recipient,
|
|
|
|
to_user=self.sender,
|
2017-10-28 18:00:16 +02:00
|
|
|
content=body,
|
|
|
|
)
|
|
|
|
|
2019-06-21 19:40:57 +02:00
|
|
|
usermsg_ids = self._get_usermessage_ids([msg_id])
|
2019-05-25 13:07:01 +02:00
|
|
|
attachment_ids = list(
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
Attachment.objects.filter(messages__id=msg_id).values_list("id", flat=True),
|
2019-05-25 13:07:01 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
self._assert_archive_empty()
|
2019-06-24 22:25:02 +02:00
|
|
|
# Archive one of the messages:
|
2018-09-13 16:29:12 +02:00
|
|
|
move_messages_to_archive(message_ids=[msg_id])
|
2019-06-21 19:40:57 +02:00
|
|
|
self._verify_archive_data([msg_id], usermsg_ids)
|
2019-06-24 22:25:02 +02:00
|
|
|
# Attachments shouldn't have been deleted, as the second message links to them:
|
2018-09-13 16:29:12 +02:00
|
|
|
self.assertEqual(Attachment.objects.count(), 5)
|
2019-05-25 13:07:01 +02:00
|
|
|
|
|
|
|
self.assertEqual(
|
2021-02-12 08:19:30 +01:00
|
|
|
set(
|
|
|
|
ArchivedAttachment.objects.filter(messages__id=msg_id).values_list("id", flat=True)
|
|
|
|
),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
set(attachment_ids),
|
2019-05-25 13:07:01 +02:00
|
|
|
)
|
|
|
|
|
2019-06-24 22:25:02 +02:00
|
|
|
# Restore the first message:
|
|
|
|
restore_all_data_from_archive()
|
|
|
|
# Archive the second:
|
2019-05-25 13:07:01 +02:00
|
|
|
move_messages_to_archive(message_ids=[reply_msg_id])
|
2019-06-24 22:25:02 +02:00
|
|
|
# The restored messages links to the Attachments, so they shouldn't be deleted:
|
|
|
|
self.assertEqual(Attachment.objects.count(), 5)
|
|
|
|
|
|
|
|
# Archive the first message again:
|
|
|
|
move_messages_to_archive(message_ids=[msg_id])
|
2019-05-25 13:07:01 +02:00
|
|
|
# Now the attachment should have been deleted:
|
2017-05-14 21:14:26 +02:00
|
|
|
self.assertEqual(Attachment.objects.count(), 0)
|
2019-05-30 15:06:45 +02:00
|
|
|
|
2019-06-24 17:19:22 +02:00
|
|
|
# Restore everything:
|
|
|
|
restore_all_data_from_archive()
|
|
|
|
self.assertEqual(
|
|
|
|
set(Attachment.objects.filter(messages__id=msg_id).values_list("id", flat=True)),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
set(attachment_ids),
|
2019-06-24 17:19:22 +02:00
|
|
|
)
|
|
|
|
|
2023-07-28 16:46:24 +02:00
|
|
|
def test_archiving_message_with_scheduled_message(self) -> None:
|
|
|
|
# Make sure that attachments referenced by scheduledmessages do't get deleted
|
|
|
|
self._create_attachments()
|
|
|
|
realm_id = get_realm("zulip").id
|
|
|
|
host = get_realm("zulip").host
|
2024-03-21 03:43:05 +01:00
|
|
|
body = f"""Some files here ...[zulip.txt](
|
|
|
|
http://{host}/user_uploads/{realm_id}/31/4CBjtTLYZhk66pZrF8hnYGwc/zulip.txt)
|
|
|
|
http://{host}/user_uploads/{realm_id}/31/4CBjtTLYZhk66pZrF8hnYGwc/temp_file.py ....
|
|
|
|
Some more.... http://{host}/user_uploads/{realm_id}/31/4CBjtTLYZhk66pZrF8hnYGwc/abc.py ...
|
|
|
|
http://{host}/user_uploads/{realm_id}/31/4CBjtTLYZhk66pZrF8hnYGwc/new.py ....
|
|
|
|
http://{host}/user_uploads/{realm_id}/31/4CBjtTLYZhk66pZrF8hnYGwc/hello.txt ....
|
|
|
|
"""
|
2023-07-28 16:46:24 +02:00
|
|
|
|
|
|
|
msg_id = self.send_personal_message(self.sender, self.recipient, body)
|
|
|
|
|
|
|
|
# Schedule a message with the same contents
|
|
|
|
scheduled_msg_id = check_schedule_message(
|
|
|
|
sender=self.sender,
|
|
|
|
client=get_client("website"),
|
|
|
|
recipient_type_name="private",
|
|
|
|
message_to=[self.recipient.id],
|
|
|
|
topic_name=None,
|
|
|
|
message_content=body,
|
|
|
|
deliver_at=timezone_now() + timedelta(hours=1),
|
|
|
|
)
|
|
|
|
|
|
|
|
usermsg_ids = self._get_usermessage_ids([msg_id])
|
|
|
|
attachment_ids = list(
|
|
|
|
Attachment.objects.filter(messages__id=msg_id).values_list("id", flat=True),
|
|
|
|
)
|
|
|
|
|
|
|
|
self._assert_archive_empty()
|
|
|
|
# Archive one of the messages:
|
|
|
|
move_messages_to_archive(message_ids=[msg_id])
|
|
|
|
self._verify_archive_data([msg_id], usermsg_ids)
|
|
|
|
# Attachments shouldn't have been deleted, as the scheduled message links to them:
|
|
|
|
self.assertEqual(Attachment.objects.count(), 5)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
set(
|
|
|
|
ArchivedAttachment.objects.filter(messages__id=msg_id).values_list("id", flat=True)
|
|
|
|
),
|
|
|
|
set(attachment_ids),
|
|
|
|
)
|
|
|
|
|
|
|
|
# Delete the ScheduledMessage
|
|
|
|
delete_scheduled_message(self.sender, scheduled_msg_id)
|
|
|
|
|
|
|
|
# The Attachment object exists, with no message or scheduledmessage attached
|
|
|
|
self.assertEqual(Attachment.objects.count(), 5)
|
|
|
|
self.assertEqual(
|
|
|
|
Attachment.objects.filter(messages=None, scheduled_messages=None).count(), 5
|
|
|
|
)
|
|
|
|
|
|
|
|
# There is also the ArchivedAttachment for each of them
|
|
|
|
self.assertEqual(
|
|
|
|
set(
|
|
|
|
ArchivedAttachment.objects.filter(messages__id=msg_id).values_list("id", flat=True)
|
|
|
|
),
|
|
|
|
set(attachment_ids),
|
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-05-30 15:24:18 +02:00
|
|
|
class MoveMessageToArchiveWithSubMessages(MoveMessageToArchiveBase):
|
|
|
|
def test_archiving_message_with_submessages(self) -> None:
|
|
|
|
msg_id = self.send_stream_message(self.sender, "Verona")
|
2021-02-12 08:20:45 +01:00
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
hamlet = self.example_user("hamlet")
|
2019-05-30 15:24:18 +02:00
|
|
|
|
|
|
|
do_add_submessage(
|
2021-02-12 08:20:45 +01:00
|
|
|
realm=get_realm("zulip"),
|
2019-05-30 15:24:18 +02:00
|
|
|
sender_id=cordelia.id,
|
|
|
|
message_id=msg_id,
|
2021-02-12 08:20:45 +01:00
|
|
|
msg_type="whatever",
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
content='{"name": "alice", "salary": 20}',
|
2019-05-30 15:24:18 +02:00
|
|
|
)
|
|
|
|
do_add_submessage(
|
2021-02-12 08:20:45 +01:00
|
|
|
realm=get_realm("zulip"),
|
2019-05-30 15:24:18 +02:00
|
|
|
sender_id=hamlet.id,
|
|
|
|
message_id=msg_id,
|
2021-02-12 08:20:45 +01:00
|
|
|
msg_type="whatever",
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
content='{"name": "john", "salary": 30}',
|
2019-05-30 15:24:18 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
submessage_ids = list(
|
2021-02-12 08:20:45 +01:00
|
|
|
SubMessage.objects.filter(message_id=msg_id).values_list("id", flat=True),
|
2019-05-30 15:24:18 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(SubMessage.objects.filter(id__in=submessage_ids).count(), 2)
|
|
|
|
move_messages_to_archive(message_ids=[msg_id])
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
set(ArchivedSubMessage.objects.filter(message_id=msg_id).values_list("id", flat=True)),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
set(submessage_ids),
|
2019-05-30 15:24:18 +02:00
|
|
|
)
|
|
|
|
self.assertEqual(SubMessage.objects.filter(id__in=submessage_ids).count(), 0)
|
|
|
|
|
2019-06-24 17:19:22 +02:00
|
|
|
restore_all_data_from_archive()
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
set(SubMessage.objects.filter(id__in=submessage_ids).values_list("id", flat=True)),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
set(submessage_ids),
|
2019-06-24 17:19:22 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-07-16 18:08:15 +02:00
|
|
|
class MoveMessageToArchiveWithReactions(MoveMessageToArchiveBase):
|
2019-05-30 15:06:45 +02:00
|
|
|
def test_archiving_message_with_reactions(self) -> None:
|
|
|
|
msg_id = self.send_stream_message(self.sender, "Verona")
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
for name in ["hamlet", "cordelia"]:
|
2020-07-16 18:08:15 +02:00
|
|
|
self.api_post(
|
|
|
|
self.example_user(name),
|
2021-02-12 08:20:45 +01:00
|
|
|
f"/api/v1/messages/{msg_id}/reactions",
|
2020-07-16 18:08:15 +02:00
|
|
|
zulip_reaction_info(),
|
|
|
|
)
|
2019-05-30 15:06:45 +02:00
|
|
|
|
|
|
|
reaction_ids = list(
|
2021-02-12 08:20:45 +01:00
|
|
|
Reaction.objects.filter(message_id=msg_id).values_list("id", flat=True),
|
2019-05-30 15:06:45 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(Reaction.objects.filter(id__in=reaction_ids).count(), 2)
|
|
|
|
move_messages_to_archive(message_ids=[msg_id])
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
set(ArchivedReaction.objects.filter(message_id=msg_id).values_list("id", flat=True)),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
set(reaction_ids),
|
2019-05-30 15:06:45 +02:00
|
|
|
)
|
|
|
|
self.assertEqual(Reaction.objects.filter(id__in=reaction_ids).count(), 0)
|
2019-06-24 17:19:22 +02:00
|
|
|
|
|
|
|
restore_all_data_from_archive()
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
set(Reaction.objects.filter(id__in=reaction_ids).values_list("id", flat=True)),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
set(reaction_ids),
|
2019-06-24 17:19:22 +02:00
|
|
|
)
|
2019-06-25 20:01:05 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-06-25 20:01:05 +02:00
|
|
|
class TestCleaningArchive(ArchiveMessagesTestingBase):
|
|
|
|
def test_clean_archived_data(self) -> None:
|
|
|
|
self._make_expired_zulip_messages(7)
|
|
|
|
archive_messages(chunk_size=2) # Small chunk size to have multiple transactions
|
|
|
|
|
|
|
|
transactions = list(ArchiveTransaction.objects.all())
|
|
|
|
for transaction in transactions[0:-1]:
|
|
|
|
transaction.timestamp = timezone_now() - timedelta(
|
2021-02-12 08:19:30 +01:00
|
|
|
days=settings.ARCHIVED_DATA_VACUUMING_DELAY_DAYS + 1
|
|
|
|
)
|
2019-06-25 20:01:05 +02:00
|
|
|
transaction.save()
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
message_ids_to_clean = list(
|
|
|
|
ArchivedMessage.objects.filter(archive_transaction__in=transactions[0:-1]).values_list(
|
2021-02-12 08:20:45 +01:00
|
|
|
"id", flat=True
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
)
|
2019-06-25 20:01:05 +02:00
|
|
|
|
|
|
|
clean_archived_data()
|
|
|
|
remaining_transactions = list(ArchiveTransaction.objects.all())
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(remaining_transactions, 1)
|
2019-06-25 20:01:05 +02:00
|
|
|
# All transactions except the last one were deleted:
|
|
|
|
self.assertEqual(remaining_transactions[0].id, transactions[-1].id)
|
|
|
|
# And corresponding ArchivedMessages should have been deleted:
|
|
|
|
self.assertFalse(ArchivedMessage.objects.filter(id__in=message_ids_to_clean).exists())
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertFalse(
|
|
|
|
ArchivedUserMessage.objects.filter(message_id__in=message_ids_to_clean).exists()
|
|
|
|
)
|
2019-06-25 20:01:05 +02:00
|
|
|
|
|
|
|
for message in ArchivedMessage.objects.all():
|
|
|
|
self.assertEqual(message.archive_transaction_id, remaining_transactions[0].id)
|
2020-02-19 18:28:12 +01:00
|
|
|
|
2020-05-07 14:24:45 +02:00
|
|
|
|
|
|
|
class TestGetRealmAndStreamsForArchiving(ZulipTestCase):
|
|
|
|
def fix_ordering_of_result(self, result: List[Tuple[Realm, List[Stream]]]) -> None:
|
|
|
|
"""
|
2020-08-11 01:47:44 +02:00
|
|
|
This is a helper for giving the structure returned by get_realms_and_streams_for_archiving
|
2020-05-07 14:24:45 +02:00
|
|
|
a consistent ordering.
|
|
|
|
"""
|
|
|
|
# Sort the list of tuples by realm id:
|
|
|
|
result.sort(key=lambda x: x[0].id)
|
|
|
|
|
|
|
|
# Now we sort the lists of streams in each tuple:
|
|
|
|
for realm, streams_list in result:
|
|
|
|
streams_list.sort(key=lambda stream: stream.id)
|
|
|
|
|
|
|
|
def simple_get_realms_and_streams_for_archiving(self) -> List[Tuple[Realm, List[Stream]]]:
|
|
|
|
"""
|
|
|
|
This is an implementation of the function we're testing, but using the obvious,
|
|
|
|
unoptimized algorithm. We can use this for additional verification of correctness,
|
|
|
|
by comparing the output of the two implementations.
|
|
|
|
"""
|
|
|
|
|
|
|
|
result = []
|
|
|
|
for realm in Realm.objects.all():
|
2020-06-24 13:02:07 +02:00
|
|
|
if realm.message_retention_days != -1:
|
2020-05-07 14:24:45 +02:00
|
|
|
streams = Stream.objects.filter(realm=realm).exclude(message_retention_days=-1)
|
|
|
|
result.append((realm, list(streams)))
|
|
|
|
else:
|
2021-02-12 08:19:30 +01:00
|
|
|
streams = (
|
|
|
|
Stream.objects.filter(realm=realm)
|
|
|
|
.exclude(message_retention_days__isnull=True)
|
2020-05-07 14:24:45 +02:00
|
|
|
.exclude(message_retention_days=-1)
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-05-07 14:24:45 +02:00
|
|
|
if streams.exists():
|
|
|
|
result.append((realm, list(streams)))
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
|
|
|
def test_get_realms_and_streams_for_archiving(self) -> None:
|
|
|
|
zulip_realm = get_realm("zulip")
|
|
|
|
zulip_realm.message_retention_days = 10
|
|
|
|
zulip_realm.save()
|
|
|
|
|
|
|
|
verona = get_stream("Verona", zulip_realm)
|
|
|
|
verona.message_retention_days = -1 # Block archiving for this stream
|
|
|
|
verona.save()
|
|
|
|
denmark = get_stream("Denmark", zulip_realm)
|
|
|
|
denmark.message_retention_days = 1
|
|
|
|
denmark.save()
|
|
|
|
|
|
|
|
zephyr_realm = get_realm("zephyr")
|
2020-06-24 13:02:07 +02:00
|
|
|
zephyr_realm.message_retention_days = -1
|
2020-05-07 14:24:45 +02:00
|
|
|
zephyr_realm.save()
|
|
|
|
self.make_stream("normal stream", realm=zephyr_realm)
|
|
|
|
|
|
|
|
archiving_blocked_zephyr_stream = self.make_stream("no archiving", realm=zephyr_realm)
|
|
|
|
archiving_blocked_zephyr_stream.message_retention_days = -1
|
|
|
|
archiving_blocked_zephyr_stream.save()
|
|
|
|
|
|
|
|
archiving_enabled_zephyr_stream = self.make_stream("with archiving", realm=zephyr_realm)
|
|
|
|
archiving_enabled_zephyr_stream.message_retention_days = 1
|
|
|
|
archiving_enabled_zephyr_stream.save()
|
|
|
|
|
2021-03-08 13:22:43 +01:00
|
|
|
no_archiving_realm = do_create_realm(string_id="no_archiving", name="no_archiving")
|
2021-03-01 11:33:24 +01:00
|
|
|
do_set_realm_property(no_archiving_realm, "invite_required", False, acting_user=None)
|
|
|
|
do_set_realm_property(no_archiving_realm, "message_retention_days", -1, acting_user=None)
|
2021-03-08 13:22:43 +01:00
|
|
|
|
|
|
|
# Realm for testing the edge case where it has a default retention policy,
|
|
|
|
# but all streams disable it.
|
|
|
|
realm_all_streams_archiving_disabled = do_create_realm(
|
|
|
|
string_id="with_archiving", name="with_archiving"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-03-01 11:33:24 +01:00
|
|
|
do_set_realm_property(
|
|
|
|
realm_all_streams_archiving_disabled, "invite_required", False, acting_user=None
|
|
|
|
)
|
|
|
|
do_set_realm_property(
|
|
|
|
realm_all_streams_archiving_disabled, "message_retention_days", 1, acting_user=None
|
|
|
|
)
|
2021-03-08 13:22:43 +01:00
|
|
|
Stream.objects.filter(realm=realm_all_streams_archiving_disabled).update(
|
|
|
|
message_retention_days=-1
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-05-07 14:24:45 +02:00
|
|
|
|
|
|
|
# We construct a list representing how the result of get_realms_and_streams_for_archiving should be.
|
|
|
|
# One nuisance is that the ordering of the elements in the result structure is not deterministic,
|
|
|
|
# so we use a helper to order both structures in a consistent manner. This wouldn't be necessary
|
|
|
|
# if python had a true "unordered list" data structure. Set doesn't do the job, because it requires
|
|
|
|
# elements to be hashable.
|
2022-07-06 06:58:43 +02:00
|
|
|
expected_result: List[Tuple[Realm, List[Stream]]] = [
|
2020-05-07 14:24:45 +02:00
|
|
|
(zulip_realm, list(Stream.objects.filter(realm=zulip_realm).exclude(id=verona.id))),
|
|
|
|
(zephyr_realm, [archiving_enabled_zephyr_stream]),
|
2021-03-08 13:22:43 +01:00
|
|
|
(realm_all_streams_archiving_disabled, []),
|
2020-05-07 14:24:45 +02:00
|
|
|
]
|
|
|
|
self.fix_ordering_of_result(expected_result)
|
|
|
|
|
|
|
|
simple_algorithm_result = self.simple_get_realms_and_streams_for_archiving()
|
|
|
|
self.fix_ordering_of_result(simple_algorithm_result)
|
|
|
|
|
|
|
|
result = get_realms_and_streams_for_archiving()
|
|
|
|
self.fix_ordering_of_result(result)
|
|
|
|
|
|
|
|
self.assert_length(result, len(expected_result))
|
|
|
|
self.assertEqual(result, expected_result)
|
|
|
|
|
|
|
|
self.assert_length(result, len(simple_algorithm_result))
|
|
|
|
self.assertEqual(result, simple_algorithm_result)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-22 11:26:06 +02:00
|
|
|
class TestRestoreStreamMessages(ArchiveMessagesTestingBase):
|
|
|
|
def test_restore_retention_policy_deletions_for_stream(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
hamlet = self.example_user("hamlet")
|
2020-06-22 11:26:06 +02:00
|
|
|
|
|
|
|
realm = get_realm("zulip")
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
stream_name = "Verona"
|
2020-06-22 11:26:06 +02:00
|
|
|
stream = get_stream(stream_name, realm)
|
|
|
|
|
|
|
|
message_ids_to_archive_manually = [
|
2023-09-01 23:05:08 +02:00
|
|
|
self.send_stream_message(cordelia, stream_name, str(i)) for i in range(2)
|
2020-06-22 11:26:06 +02:00
|
|
|
]
|
2021-02-12 08:19:30 +01:00
|
|
|
usermessage_ids_to_archive_manually = self._get_usermessage_ids(
|
|
|
|
message_ids_to_archive_manually
|
|
|
|
)
|
2020-06-22 11:26:06 +02:00
|
|
|
message_ids_to_archive_by_policy = [
|
2023-09-01 23:05:08 +02:00
|
|
|
self.send_stream_message(hamlet, stream_name, str(i)) for i in range(2)
|
2020-06-22 11:26:06 +02:00
|
|
|
]
|
2021-02-12 08:19:30 +01:00
|
|
|
usermessage_ids_to_archive_by_policy = self._get_usermessage_ids(
|
|
|
|
message_ids_to_archive_by_policy
|
|
|
|
)
|
2020-06-22 11:26:06 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
expected_archived_message_ids = (
|
|
|
|
message_ids_to_archive_manually + message_ids_to_archive_by_policy
|
|
|
|
)
|
|
|
|
expected_archived_usermessage_ids = (
|
|
|
|
usermessage_ids_to_archive_manually + usermessage_ids_to_archive_by_policy
|
|
|
|
)
|
2020-06-22 11:26:06 +02:00
|
|
|
|
|
|
|
self._set_stream_message_retention_value(stream, 5)
|
2021-02-12 08:19:30 +01:00
|
|
|
self._change_messages_date_sent(
|
|
|
|
message_ids_to_archive_by_policy, timezone_now() - timedelta(days=6)
|
|
|
|
)
|
2020-06-22 11:26:06 +02:00
|
|
|
|
|
|
|
move_messages_to_archive(message_ids_to_archive_manually)
|
|
|
|
archive_messages()
|
|
|
|
|
|
|
|
self._verify_archive_data(expected_archived_message_ids, expected_archived_usermessage_ids)
|
|
|
|
|
|
|
|
restore_retention_policy_deletions_for_stream(stream)
|
|
|
|
|
|
|
|
# Verify that we restore the stream messages that were archived due to retention policy,
|
|
|
|
# but not the ones manually deleted.
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assert_length(
|
|
|
|
Message.objects.filter(id__in=message_ids_to_archive_by_policy),
|
|
|
|
len(message_ids_to_archive_by_policy),
|
|
|
|
)
|
2020-06-22 11:26:06 +02:00
|
|
|
self.assertFalse(Message.objects.filter(id__in=message_ids_to_archive_manually))
|
|
|
|
|
|
|
|
|
2020-02-19 18:28:12 +01:00
|
|
|
class TestDoDeleteMessages(ZulipTestCase):
|
|
|
|
def test_do_delete_messages_multiple(self) -> None:
|
|
|
|
realm = get_realm("zulip")
|
2021-02-12 08:20:45 +01:00
|
|
|
cordelia = self.example_user("cordelia")
|
2023-09-01 23:05:08 +02:00
|
|
|
message_ids = [self.send_stream_message(cordelia, "Verona", str(i)) for i in range(10)]
|
2020-02-19 18:28:12 +01:00
|
|
|
messages = Message.objects.filter(id__in=message_ids)
|
|
|
|
|
2024-04-27 16:51:14 +02:00
|
|
|
with self.assert_database_query_count(21):
|
2020-02-19 18:28:12 +01:00
|
|
|
do_delete_messages(realm, messages)
|
|
|
|
self.assertFalse(Message.objects.filter(id__in=message_ids).exists())
|
|
|
|
|
|
|
|
archived_messages = ArchivedMessage.objects.filter(id__in=message_ids)
|
|
|
|
self.assertEqual(archived_messages.count(), len(message_ids))
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length({message.archive_transaction_id for message in archived_messages}, 1)
|
2020-03-01 19:59:20 +01:00
|
|
|
|
|
|
|
def test_old_event_format_processed_correctly(self) -> None:
|
|
|
|
"""
|
|
|
|
do_delete_messages used to send events with users in dict format {"id": <int>}.
|
|
|
|
We have a block in process_notification to deal with that old format, that should be
|
|
|
|
deleted in a later release. This test is meant to ensure correctness of that block.
|
|
|
|
"""
|
|
|
|
realm = get_realm("zulip")
|
2021-02-12 08:20:45 +01:00
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
hamlet = self.example_user("hamlet")
|
2020-03-07 11:43:05 +01:00
|
|
|
message_id = self.send_personal_message(cordelia, hamlet)
|
2020-03-01 19:59:20 +01:00
|
|
|
message = Message.objects.get(id=message_id)
|
|
|
|
|
|
|
|
event = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"type": "delete_message",
|
|
|
|
"sender": message.sender.email,
|
|
|
|
"sender_id": message.sender_id,
|
|
|
|
"message_id": message.id,
|
|
|
|
"message_type": "private",
|
|
|
|
"recipient_id": message.recipient_id,
|
2020-03-01 19:59:20 +01:00
|
|
|
}
|
|
|
|
move_messages_to_archive([message_id])
|
|
|
|
# We only send the event to see no exception is thrown - as it would be if the block
|
|
|
|
# in process_notification to handle this old format of "users to notify" wasn't correct.
|
|
|
|
send_event(realm, event, [{"id": cordelia.id}, {"id": hamlet.id}])
|