2018-11-01 19:55:14 +01:00
|
|
|
import datetime
|
|
|
|
|
2018-11-01 15:16:26 +01:00
|
|
|
from django.db import connection
|
2018-11-01 19:55:14 +01:00
|
|
|
from django.db.models.query import QuerySet, Q
|
|
|
|
from django.utils.timezone import now as timezone_now
|
2018-11-01 15:16:26 +01:00
|
|
|
|
2018-11-01 21:48:49 +01:00
|
|
|
from sqlalchemy.sql import (
|
|
|
|
column,
|
2018-11-01 22:15:43 +01:00
|
|
|
literal,
|
2018-11-01 21:48:49 +01:00
|
|
|
func,
|
|
|
|
)
|
|
|
|
|
2018-11-09 18:35:34 +01:00
|
|
|
from zerver.lib.request import REQ
|
2018-11-01 15:48:14 +01:00
|
|
|
from zerver.models import (
|
|
|
|
Message,
|
|
|
|
Recipient,
|
2018-11-09 17:32:08 +01:00
|
|
|
UserMessage,
|
2018-11-01 15:48:14 +01:00
|
|
|
UserProfile,
|
|
|
|
)
|
2018-11-01 15:16:26 +01:00
|
|
|
|
2018-11-09 18:35:34 +01:00
|
|
|
from typing import Any, Dict, List, Optional, Tuple
|
2018-11-01 15:16:26 +01:00
|
|
|
|
2018-11-01 18:26:20 +01:00
|
|
|
# Only use these constants for events.
|
|
|
|
ORIG_TOPIC = "orig_subject"
|
|
|
|
TOPIC_NAME = "subject"
|
|
|
|
TOPIC_LINKS = "subject_links"
|
2018-11-09 17:25:57 +01:00
|
|
|
MATCH_TOPIC = "match_subject"
|
2018-11-01 18:26:20 +01:00
|
|
|
|
2018-11-09 17:53:59 +01:00
|
|
|
# This constant is actually embedded into
|
|
|
|
# the JSON data for message edit history,
|
|
|
|
# so we'll always need to handle legacy data
|
|
|
|
# unless we do a pretty tricky migration.
|
|
|
|
LEGACY_PREV_TOPIC = "prev_subject"
|
|
|
|
|
2018-11-08 15:33:44 +01:00
|
|
|
# This is used in low-level message functions in
|
|
|
|
# zerver/lib/message.py, and it's not user facing.
|
|
|
|
DB_TOPIC_NAME = "subject"
|
2018-11-08 15:37:37 +01:00
|
|
|
MESSAGE__TOPIC = 'message__subject'
|
2018-11-08 15:33:44 +01:00
|
|
|
|
2018-11-09 18:35:34 +01:00
|
|
|
|
|
|
|
def REQ_topic() -> Optional[str]:
|
|
|
|
# REQ handlers really return a REQ, but we
|
|
|
|
# lie to make the rest of the type matching work.
|
|
|
|
return REQ(
|
2018-11-09 19:45:25 +01:00
|
|
|
whence='topic',
|
|
|
|
aliases=['subject'],
|
2018-11-09 18:35:34 +01:00
|
|
|
converter=lambda x: x.strip(),
|
|
|
|
default=None,
|
|
|
|
) # type: ignore # see comment above
|
|
|
|
|
2018-11-01 21:48:49 +01:00
|
|
|
def topic_match_sa(topic_name: str) -> Any:
|
|
|
|
# _sa is short for Sql Alchemy, which we use mostly for
|
|
|
|
# queries that search messages
|
2018-11-01 22:15:43 +01:00
|
|
|
topic_cond = func.upper(column("subject")) == func.upper(literal(topic_name))
|
2018-11-01 21:48:49 +01:00
|
|
|
return topic_cond
|
|
|
|
|
2018-11-09 17:06:00 +01:00
|
|
|
def topic_column_sa() -> Any:
|
|
|
|
return column("subject")
|
|
|
|
|
2018-11-01 15:48:14 +01:00
|
|
|
def filter_by_exact_message_topic(query: QuerySet, message: Message) -> QuerySet:
|
|
|
|
topic_name = message.topic_name()
|
|
|
|
return query.filter(subject=topic_name)
|
|
|
|
|
2018-11-01 18:06:55 +01:00
|
|
|
def filter_by_topic_name_via_message(query: QuerySet, topic_name: str) -> QuerySet:
|
|
|
|
return query.filter(message__subject__iexact=topic_name)
|
|
|
|
|
2018-11-09 19:02:54 +01:00
|
|
|
def messages_for_topic(stream_id: int, topic_name: str) -> QuerySet:
|
|
|
|
# It might be the case that we really want subject__contains
|
|
|
|
# here. This code is used for the archive.
|
|
|
|
return Message.objects.filter(
|
|
|
|
recipient__type_id=stream_id,
|
|
|
|
subject=topic_name,
|
|
|
|
)
|
|
|
|
|
2018-11-01 20:12:59 +01:00
|
|
|
def save_message_for_edit_use_case(message: Message) -> None:
|
|
|
|
message.save(update_fields=["subject", "content", "rendered_content",
|
|
|
|
"rendered_content_version", "last_edit_time",
|
|
|
|
"edit_history"])
|
|
|
|
|
2018-11-09 17:32:08 +01:00
|
|
|
def user_message_exists_for_topic(user_profile: UserProfile,
|
|
|
|
recipient: Recipient,
|
|
|
|
topic_name: str) -> bool:
|
|
|
|
return UserMessage.objects.filter(
|
|
|
|
user_profile=user_profile,
|
|
|
|
message__recipient=recipient,
|
|
|
|
message__subject__iexact=topic_name,
|
|
|
|
).exists()
|
|
|
|
|
2018-11-01 19:55:14 +01:00
|
|
|
def update_messages_for_topic_edit(message: Message,
|
|
|
|
propagate_mode: str,
|
|
|
|
orig_topic_name: str,
|
|
|
|
topic_name: str) -> List[Message]:
|
|
|
|
propagate_query = Q(recipient = message.recipient, subject = orig_topic_name)
|
|
|
|
# We only change messages up to 2 days in the past, to avoid hammering our
|
|
|
|
# DB by changing an unbounded amount of messages
|
|
|
|
if propagate_mode == 'change_all':
|
|
|
|
before_bound = timezone_now() - datetime.timedelta(days=2)
|
|
|
|
|
|
|
|
propagate_query = (propagate_query & ~Q(id = message.id) &
|
|
|
|
Q(pub_date__range=(before_bound, timezone_now())))
|
|
|
|
if propagate_mode == 'change_later':
|
|
|
|
propagate_query = propagate_query & Q(id__gt = message.id)
|
|
|
|
|
|
|
|
messages = Message.objects.filter(propagate_query).select_related()
|
|
|
|
|
|
|
|
# Evaluate the query before running the update
|
|
|
|
messages_list = list(messages)
|
|
|
|
messages.update(subject=topic_name)
|
|
|
|
|
|
|
|
for m in messages_list:
|
|
|
|
# The cached ORM object is not changed by messages.update()
|
|
|
|
# and the remote cache update requires the new value
|
|
|
|
m.set_topic_name(topic_name)
|
|
|
|
|
|
|
|
return messages_list
|
|
|
|
|
2018-11-01 15:16:26 +01:00
|
|
|
def generate_topic_history_from_db_rows(rows: List[Tuple[str, int]]) -> List[Dict[str, Any]]:
|
|
|
|
canonical_topic_names = {} # type: Dict[str, Tuple[int, str]]
|
|
|
|
|
|
|
|
# Sort rows by max_message_id so that if a topic
|
|
|
|
# has many different casings, we use the most
|
|
|
|
# recent row.
|
|
|
|
rows = sorted(rows, key=lambda tup: tup[1])
|
|
|
|
|
|
|
|
for (topic_name, max_message_id) in rows:
|
|
|
|
canonical_name = topic_name.lower()
|
|
|
|
canonical_topic_names[canonical_name] = (max_message_id, topic_name)
|
|
|
|
|
|
|
|
history = []
|
|
|
|
for canonical_topic, (max_message_id, topic_name) in canonical_topic_names.items():
|
|
|
|
history.append(dict(
|
|
|
|
name=topic_name,
|
|
|
|
max_id=max_message_id)
|
|
|
|
)
|
|
|
|
return sorted(history, key=lambda x: -x['max_id'])
|
|
|
|
|
|
|
|
def get_topic_history_for_stream(user_profile: UserProfile,
|
|
|
|
recipient: Recipient,
|
|
|
|
public_history: bool) -> List[Dict[str, Any]]:
|
|
|
|
cursor = connection.cursor()
|
|
|
|
if public_history:
|
|
|
|
query = '''
|
|
|
|
SELECT
|
|
|
|
"zerver_message"."subject" as topic,
|
|
|
|
max("zerver_message".id) as max_message_id
|
|
|
|
FROM "zerver_message"
|
|
|
|
WHERE (
|
|
|
|
"zerver_message"."recipient_id" = %s
|
|
|
|
)
|
|
|
|
GROUP BY (
|
|
|
|
"zerver_message"."subject"
|
|
|
|
)
|
|
|
|
ORDER BY max("zerver_message".id) DESC
|
|
|
|
'''
|
|
|
|
cursor.execute(query, [recipient.id])
|
|
|
|
else:
|
|
|
|
query = '''
|
|
|
|
SELECT
|
|
|
|
"zerver_message"."subject" as topic,
|
|
|
|
max("zerver_message".id) as max_message_id
|
|
|
|
FROM "zerver_message"
|
|
|
|
INNER JOIN "zerver_usermessage" ON (
|
|
|
|
"zerver_usermessage"."message_id" = "zerver_message"."id"
|
|
|
|
)
|
|
|
|
WHERE (
|
|
|
|
"zerver_usermessage"."user_profile_id" = %s AND
|
|
|
|
"zerver_message"."recipient_id" = %s
|
|
|
|
)
|
|
|
|
GROUP BY (
|
|
|
|
"zerver_message"."subject"
|
|
|
|
)
|
|
|
|
ORDER BY max("zerver_message".id) DESC
|
|
|
|
'''
|
|
|
|
cursor.execute(query, [user_profile.id, recipient.id])
|
|
|
|
rows = cursor.fetchall()
|
|
|
|
cursor.close()
|
|
|
|
|
|
|
|
return generate_topic_history_from_db_rows(rows)
|
|
|
|
|
|
|
|
def get_topic_history_for_web_public_stream(recipient: Recipient) -> List[Dict[str, Any]]:
|
|
|
|
cursor = connection.cursor()
|
|
|
|
query = '''
|
|
|
|
SELECT
|
|
|
|
"zerver_message"."subject" as topic,
|
|
|
|
max("zerver_message".id) as max_message_id
|
|
|
|
FROM "zerver_message"
|
|
|
|
WHERE (
|
|
|
|
"zerver_message"."recipient_id" = %s
|
|
|
|
)
|
|
|
|
GROUP BY (
|
|
|
|
"zerver_message"."subject"
|
|
|
|
)
|
|
|
|
ORDER BY max("zerver_message".id) DESC
|
|
|
|
'''
|
|
|
|
cursor.execute(query, [recipient.id])
|
|
|
|
rows = cursor.fetchall()
|
|
|
|
cursor.close()
|
|
|
|
|
|
|
|
return generate_topic_history_from_db_rows(rows)
|