2020-12-28 11:30:07 +01:00
|
|
|
from datetime import datetime
|
2020-06-11 00:54:34 +02:00
|
|
|
from typing import Any, Dict, List, Optional, Tuple
|
2018-11-01 15:16:26 +01:00
|
|
|
|
2020-12-28 11:30:07 +01:00
|
|
|
import orjson
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.db import connection
|
2023-03-04 01:52:14 +01:00
|
|
|
from django.db.models import Q, QuerySet
|
2020-11-16 22:52:27 +01:00
|
|
|
from sqlalchemy.sql import ColumnElement, column, func, literal
|
2021-08-21 01:07:28 +02:00
|
|
|
from sqlalchemy.types import Boolean, Text
|
2018-11-01 21:48:49 +01:00
|
|
|
|
2018-11-09 18:35:34 +01:00
|
|
|
from zerver.lib.request import REQ
|
2022-03-02 00:49:18 +01:00
|
|
|
from zerver.lib.types import EditHistoryEvent
|
2020-10-16 17:45:21 +02:00
|
|
|
from zerver.models import Message, Stream, UserMessage, UserProfile
|
2018-11-01 15:16:26 +01:00
|
|
|
|
2018-11-01 18:26:20 +01:00
|
|
|
# Only use these constants for events.
|
|
|
|
ORIG_TOPIC = "orig_subject"
|
|
|
|
TOPIC_NAME = "subject"
|
2020-02-07 13:09:17 +01:00
|
|
|
TOPIC_LINKS = "topic_links"
|
2018-11-09 17:25:57 +01:00
|
|
|
MATCH_TOPIC = "match_subject"
|
2018-11-01 18:26:20 +01:00
|
|
|
|
2021-07-13 09:37:57 +02:00
|
|
|
# Prefix use to mark topic as resolved.
|
|
|
|
RESOLVED_TOPIC_PREFIX = "✔ "
|
|
|
|
|
2018-11-10 17:10:45 +01:00
|
|
|
# This constant is pretty closely coupled to the
|
|
|
|
# database, but it's the JSON field.
|
|
|
|
EXPORT_TOPIC_NAME = "subject"
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2018-11-10 22:50:28 +01:00
|
|
|
The following functions are for user-facing APIs
|
|
|
|
where we'll want to support "subject" for a while.
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2018-11-10 22:50:28 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-11-10 22:50:28 +01:00
|
|
|
def get_topic_from_message_info(message_info: Dict[str, Any]) -> str:
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2018-11-10 22:50:28 +01:00
|
|
|
Use this where you are getting dicts that are based off of messages
|
|
|
|
that may come from the outside world, especially from third party
|
|
|
|
APIs and bots.
|
|
|
|
|
|
|
|
We prefer 'topic' to 'subject' here. We expect at least one field
|
|
|
|
to be present (or the caller must know how to handle KeyError).
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
if "topic" in message_info:
|
|
|
|
return message_info["topic"]
|
2018-11-10 22:50:28 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
return message_info["subject"]
|
2018-11-10 22:50:28 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-11-09 18:35:34 +01:00
|
|
|
def REQ_topic() -> Optional[str]:
|
|
|
|
# REQ handlers really return a REQ, but we
|
|
|
|
# lie to make the rest of the type matching work.
|
|
|
|
return REQ(
|
2021-02-12 08:20:45 +01:00
|
|
|
whence="topic",
|
|
|
|
aliases=["subject"],
|
2022-01-11 10:10:56 +01:00
|
|
|
converter=lambda var_name, x: x.strip(),
|
2018-11-09 18:35:34 +01:00
|
|
|
default=None,
|
2019-08-10 00:30:34 +02:00
|
|
|
)
|
2018-11-09 18:35:34 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2018-11-10 23:01:45 +01:00
|
|
|
TRY TO KEEP THIS DIVIDING LINE.
|
|
|
|
|
|
|
|
Below this line we want to make it so that functions are only
|
|
|
|
using "subject" in the DB sense, and nothing customer facing.
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2018-11-10 23:01:45 +01:00
|
|
|
|
|
|
|
# This is used in low-level message functions in
|
|
|
|
# zerver/lib/message.py, and it's not user facing.
|
|
|
|
DB_TOPIC_NAME = "subject"
|
2021-02-12 08:20:45 +01:00
|
|
|
MESSAGE__TOPIC = "message__subject"
|
2018-11-10 23:01:45 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-06-26 10:03:34 +02:00
|
|
|
def topic_match_sa(topic_name: str) -> ColumnElement[Boolean]:
|
2020-10-23 02:43:28 +02:00
|
|
|
# _sa is short for SQLAlchemy, which we use mostly for
|
2018-11-01 21:48:49 +01:00
|
|
|
# queries that search messages
|
2020-11-16 22:52:27 +01:00
|
|
|
topic_cond = func.upper(column("subject", Text)) == func.upper(literal(topic_name))
|
2018-11-01 21:48:49 +01:00
|
|
|
return topic_cond
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-06-26 10:03:34 +02:00
|
|
|
def get_resolved_topic_condition_sa() -> ColumnElement[Boolean]:
|
2021-07-13 20:23:36 +02:00
|
|
|
resolved_topic_cond = column("subject", Text).startswith(RESOLVED_TOPIC_PREFIX)
|
|
|
|
return resolved_topic_cond
|
|
|
|
|
|
|
|
|
2022-06-26 10:03:34 +02:00
|
|
|
def topic_column_sa() -> ColumnElement[Text]:
|
2020-11-16 22:52:27 +01:00
|
|
|
return column("subject", Text)
|
2018-11-09 17:06:00 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-06-23 20:01:38 +02:00
|
|
|
def filter_by_exact_message_topic(query: QuerySet[Message], message: Message) -> QuerySet[Message]:
|
2018-11-01 15:48:14 +01:00
|
|
|
topic_name = message.topic_name()
|
|
|
|
return query.filter(subject=topic_name)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-06-23 20:01:38 +02:00
|
|
|
def filter_by_topic_name_via_message(
|
|
|
|
query: QuerySet[UserMessage], topic_name: str
|
|
|
|
) -> QuerySet[UserMessage]:
|
2018-11-01 18:06:55 +01:00
|
|
|
return query.filter(message__subject__iexact=topic_name)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-06-23 20:01:38 +02:00
|
|
|
def messages_for_topic(stream_recipient_id: int, topic_name: str) -> QuerySet[Message]:
|
2018-11-09 19:02:54 +01:00
|
|
|
return Message.objects.filter(
|
2020-02-11 16:04:05 +01:00
|
|
|
recipient_id=stream_recipient_id,
|
2019-01-25 02:45:55 +01:00
|
|
|
subject__iexact=topic_name,
|
2018-11-09 19:02:54 +01:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-11-01 20:12:59 +01:00
|
|
|
def save_message_for_edit_use_case(message: Message) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
message.save(
|
|
|
|
update_fields=[
|
|
|
|
TOPIC_NAME,
|
|
|
|
"content",
|
|
|
|
"rendered_content",
|
|
|
|
"rendered_content_version",
|
|
|
|
"last_edit_time",
|
|
|
|
"edit_history",
|
|
|
|
"has_attachment",
|
|
|
|
"has_image",
|
|
|
|
"has_link",
|
|
|
|
"recipient_id",
|
|
|
|
]
|
|
|
|
)
|
2019-09-24 21:10:56 +02:00
|
|
|
|
2018-11-01 20:12:59 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def user_message_exists_for_topic(
|
|
|
|
user_profile: UserProfile, recipient_id: int, topic_name: str
|
|
|
|
) -> bool:
|
2018-11-09 17:32:08 +01:00
|
|
|
return UserMessage.objects.filter(
|
|
|
|
user_profile=user_profile,
|
2020-10-16 17:02:33 +02:00
|
|
|
message__recipient_id=recipient_id,
|
2018-11-09 17:32:08 +01:00
|
|
|
message__subject__iexact=topic_name,
|
|
|
|
).exists()
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-04-22 07:02:04 +02:00
|
|
|
def update_edit_history(
|
2022-03-02 00:49:18 +01:00
|
|
|
message: Message, last_edit_time: datetime, edit_history_event: EditHistoryEvent
|
2021-04-22 07:02:04 +02:00
|
|
|
) -> None:
|
|
|
|
message.last_edit_time = last_edit_time
|
|
|
|
if message.edit_history is not None:
|
2022-03-02 00:49:18 +01:00
|
|
|
edit_history: List[EditHistoryEvent] = orjson.loads(message.edit_history)
|
2021-04-22 07:02:04 +02:00
|
|
|
edit_history.insert(0, edit_history_event)
|
|
|
|
else:
|
|
|
|
edit_history = [edit_history_event]
|
|
|
|
message.edit_history = orjson.dumps(edit_history).decode()
|
|
|
|
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def update_messages_for_topic_edit(
|
2021-05-12 22:54:43 +02:00
|
|
|
acting_user: UserProfile,
|
2021-04-22 07:00:27 +02:00
|
|
|
edited_message: Message,
|
2021-02-12 08:19:30 +01:00
|
|
|
propagate_mode: str,
|
|
|
|
orig_topic_name: str,
|
|
|
|
topic_name: Optional[str],
|
|
|
|
new_stream: Optional[Stream],
|
2021-05-12 22:58:23 +02:00
|
|
|
old_stream: Stream,
|
2022-03-02 00:49:18 +01:00
|
|
|
edit_history_event: EditHistoryEvent,
|
2021-02-12 08:19:30 +01:00
|
|
|
last_edit_time: datetime,
|
|
|
|
) -> List[Message]:
|
2021-05-12 22:58:23 +02:00
|
|
|
propagate_query = Q(recipient_id=old_stream.recipient_id, subject__iexact=orig_topic_name)
|
2021-02-12 08:20:45 +01:00
|
|
|
if propagate_mode == "change_all":
|
2021-04-22 07:00:27 +02:00
|
|
|
propagate_query = propagate_query & ~Q(id=edited_message.id)
|
2021-02-12 08:20:45 +01:00
|
|
|
if propagate_mode == "change_later":
|
2021-04-22 07:00:27 +02:00
|
|
|
propagate_query = propagate_query & Q(id__gt=edited_message.id)
|
2018-11-01 19:55:14 +01:00
|
|
|
|
|
|
|
messages = Message.objects.filter(propagate_query).select_related()
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
update_fields = ["edit_history", "last_edit_time"]
|
2020-02-19 01:38:34 +01:00
|
|
|
|
2021-05-12 23:54:41 +02:00
|
|
|
if new_stream is not None:
|
|
|
|
# If we're moving the messages between streams, only move
|
|
|
|
# messages that the acting user can access, so that one cannot
|
|
|
|
# gain access to messages through moving them.
|
|
|
|
from zerver.lib.message import bulk_access_messages
|
|
|
|
|
|
|
|
messages_list = bulk_access_messages(acting_user, messages, stream=old_stream)
|
|
|
|
else:
|
|
|
|
# For single-message edits or topic moves within a stream, we
|
|
|
|
# allow moving history the user may not have access in order
|
|
|
|
# to keep topics together.
|
|
|
|
messages_list = list(messages)
|
2018-11-01 19:55:14 +01:00
|
|
|
|
2020-02-19 01:38:34 +01:00
|
|
|
# The cached ORM objects are not changed by the upcoming
|
|
|
|
# messages.update(), and the remote cache update (done by the
|
|
|
|
# caller) requires the new value, so we manually update the
|
|
|
|
# objects in addition to sending a bulk query to the database.
|
|
|
|
if new_stream is not None:
|
2020-12-28 11:30:07 +01:00
|
|
|
update_fields.append("recipient")
|
2020-02-19 01:38:34 +01:00
|
|
|
for m in messages_list:
|
2022-07-28 18:25:06 +02:00
|
|
|
assert new_stream.recipient is not None
|
2020-02-19 01:38:34 +01:00
|
|
|
m.recipient = new_stream.recipient
|
|
|
|
if topic_name is not None:
|
2020-12-28 11:30:07 +01:00
|
|
|
update_fields.append("subject")
|
2020-02-19 01:38:34 +01:00
|
|
|
for m in messages_list:
|
|
|
|
m.set_topic_name(topic_name)
|
|
|
|
|
2020-12-28 11:30:07 +01:00
|
|
|
for message in messages_list:
|
2021-04-22 07:02:04 +02:00
|
|
|
update_edit_history(message, last_edit_time, edit_history_event)
|
2020-12-28 11:30:07 +01:00
|
|
|
|
|
|
|
Message.objects.bulk_update(messages_list, update_fields)
|
2018-11-01 19:55:14 +01:00
|
|
|
|
|
|
|
return messages_list
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-11-01 15:16:26 +01:00
|
|
|
def generate_topic_history_from_db_rows(rows: List[Tuple[str, int]]) -> List[Dict[str, Any]]:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
canonical_topic_names: Dict[str, Tuple[int, str]] = {}
|
2018-11-01 15:16:26 +01:00
|
|
|
|
|
|
|
# Sort rows by max_message_id so that if a topic
|
|
|
|
# has many different casings, we use the most
|
|
|
|
# recent row.
|
|
|
|
rows = sorted(rows, key=lambda tup: tup[1])
|
|
|
|
|
2023-02-02 04:35:24 +01:00
|
|
|
for topic_name, max_message_id in rows:
|
2018-11-01 15:16:26 +01:00
|
|
|
canonical_name = topic_name.lower()
|
|
|
|
canonical_topic_names[canonical_name] = (max_message_id, topic_name)
|
|
|
|
|
|
|
|
history = []
|
2021-08-14 01:01:37 +02:00
|
|
|
for max_message_id, topic_name in canonical_topic_names.values():
|
2021-02-12 08:19:30 +01:00
|
|
|
history.append(
|
|
|
|
dict(name=topic_name, max_id=max_message_id),
|
2018-11-01 15:16:26 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
return sorted(history, key=lambda x: -x["max_id"])
|
2018-11-01 15:16:26 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-10-16 17:45:21 +02:00
|
|
|
def get_topic_history_for_public_stream(recipient_id: int) -> List[Dict[str, Any]]:
|
2020-08-26 02:03:08 +02:00
|
|
|
cursor = connection.cursor()
|
2021-02-12 08:20:45 +01:00
|
|
|
query = """
|
2020-08-26 02:03:08 +02:00
|
|
|
SELECT
|
|
|
|
"zerver_message"."subject" as topic,
|
|
|
|
max("zerver_message".id) as max_message_id
|
|
|
|
FROM "zerver_message"
|
|
|
|
WHERE (
|
|
|
|
"zerver_message"."recipient_id" = %s
|
|
|
|
)
|
|
|
|
GROUP BY (
|
|
|
|
"zerver_message"."subject"
|
|
|
|
)
|
|
|
|
ORDER BY max("zerver_message".id) DESC
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2020-10-16 17:45:21 +02:00
|
|
|
cursor.execute(query, [recipient_id])
|
2018-11-01 15:16:26 +01:00
|
|
|
rows = cursor.fetchall()
|
|
|
|
cursor.close()
|
|
|
|
|
|
|
|
return generate_topic_history_from_db_rows(rows)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def get_topic_history_for_stream(
|
|
|
|
user_profile: UserProfile, recipient_id: int, public_history: bool
|
|
|
|
) -> List[Dict[str, Any]]:
|
2020-08-26 02:03:48 +02:00
|
|
|
if public_history:
|
2020-10-16 17:45:21 +02:00
|
|
|
return get_topic_history_for_public_stream(recipient_id)
|
2020-08-26 02:03:48 +02:00
|
|
|
|
2018-11-01 15:16:26 +01:00
|
|
|
cursor = connection.cursor()
|
2021-02-12 08:20:45 +01:00
|
|
|
query = """
|
2018-11-01 15:16:26 +01:00
|
|
|
SELECT
|
|
|
|
"zerver_message"."subject" as topic,
|
|
|
|
max("zerver_message".id) as max_message_id
|
|
|
|
FROM "zerver_message"
|
2020-08-26 02:03:48 +02:00
|
|
|
INNER JOIN "zerver_usermessage" ON (
|
|
|
|
"zerver_usermessage"."message_id" = "zerver_message"."id"
|
|
|
|
)
|
2018-11-01 15:16:26 +01:00
|
|
|
WHERE (
|
2020-08-26 02:03:48 +02:00
|
|
|
"zerver_usermessage"."user_profile_id" = %s AND
|
2018-11-01 15:16:26 +01:00
|
|
|
"zerver_message"."recipient_id" = %s
|
|
|
|
)
|
|
|
|
GROUP BY (
|
|
|
|
"zerver_message"."subject"
|
|
|
|
)
|
|
|
|
ORDER BY max("zerver_message".id) DESC
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2020-10-16 17:45:21 +02:00
|
|
|
cursor.execute(query, [user_profile.id, recipient_id])
|
2018-11-01 15:16:26 +01:00
|
|
|
rows = cursor.fetchall()
|
|
|
|
cursor.close()
|
|
|
|
|
|
|
|
return generate_topic_history_from_db_rows(rows)
|
2022-10-28 00:25:31 +02:00
|
|
|
|
|
|
|
|
|
|
|
def get_topic_resolution_and_bare_name(stored_name: str) -> Tuple[bool, str]:
|
|
|
|
"""
|
|
|
|
Resolved topics are denoted only by a title change, not by a boolean toggle in a database column. This
|
|
|
|
method inspects the topic name and returns a tuple of:
|
|
|
|
|
|
|
|
- Whether the topic has been resolved
|
|
|
|
- The topic name with the resolution prefix, if present in stored_name, removed
|
|
|
|
"""
|
|
|
|
if stored_name.startswith(RESOLVED_TOPIC_PREFIX):
|
|
|
|
return (True, stored_name[len(RESOLVED_TOPIC_PREFIX) :])
|
|
|
|
|
|
|
|
return (False, stored_name)
|