2024-07-12 02:30:25 +02:00
|
|
|
from typing import Annotated
|
|
|
|
|
2020-06-22 22:57:01 +02:00
|
|
|
from django.http import HttpRequest, HttpResponse
|
2021-04-16 00:57:30 +02:00
|
|
|
from django.utils.translation import gettext as _
|
2024-04-16 14:41:09 +02:00
|
|
|
from pydantic import Json, NonNegativeInt
|
2020-06-22 22:57:01 +02:00
|
|
|
|
2022-04-14 23:54:53 +02:00
|
|
|
from zerver.actions.message_flags import (
|
2020-06-22 22:57:01 +02:00
|
|
|
do_mark_all_as_read,
|
|
|
|
do_mark_stream_messages_as_read,
|
|
|
|
do_update_message_flags,
|
|
|
|
)
|
2023-09-07 17:45:10 +02:00
|
|
|
from zerver.lib.exceptions import JsonableError
|
2024-04-17 17:33:26 +02:00
|
|
|
from zerver.lib.narrow import NarrowParameter, fetch_messages, parse_anchor_value
|
2024-04-16 14:41:09 +02:00
|
|
|
from zerver.lib.request import RequestNotes
|
2023-09-07 17:45:10 +02:00
|
|
|
from zerver.lib.response import json_success
|
2020-06-22 22:57:01 +02:00
|
|
|
from zerver.lib.streams import access_stream_by_id
|
|
|
|
from zerver.lib.topic import user_message_exists_for_topic
|
2024-04-16 14:41:09 +02:00
|
|
|
from zerver.lib.typed_endpoint import (
|
|
|
|
ApiParamConfig,
|
|
|
|
typed_endpoint,
|
|
|
|
typed_endpoint_without_parameters,
|
|
|
|
)
|
2020-06-22 22:57:01 +02:00
|
|
|
from zerver.models import UserActivity, UserProfile
|
|
|
|
|
|
|
|
|
2024-07-12 02:30:23 +02:00
|
|
|
def get_latest_update_message_flag_activity(user_profile: UserProfile) -> UserActivity | None:
|
2021-02-12 08:19:30 +01:00
|
|
|
return (
|
2022-11-10 01:06:37 +01:00
|
|
|
UserActivity.objects.filter(
|
|
|
|
user_profile=user_profile,
|
|
|
|
query__in=["update_message_flags", "update_message_flags_for_narrow"],
|
|
|
|
)
|
2021-02-12 08:19:30 +01:00
|
|
|
.order_by("last_visit")
|
|
|
|
.last()
|
|
|
|
)
|
|
|
|
|
2020-06-22 22:57:01 +02:00
|
|
|
|
|
|
|
# NOTE: If this function name is changed, add the new name to the
|
|
|
|
# query in get_latest_update_message_flag_activity
|
2024-04-16 14:41:09 +02:00
|
|
|
@typed_endpoint
|
2021-02-12 08:19:30 +01:00
|
|
|
def update_message_flags(
|
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
2024-04-16 14:41:09 +02:00
|
|
|
*,
|
2024-07-12 02:30:17 +02:00
|
|
|
messages: Json[list[int]],
|
2024-04-16 14:41:09 +02:00
|
|
|
operation: Annotated[str, ApiParamConfig("op")],
|
|
|
|
flag: str,
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> HttpResponse:
|
2021-08-21 19:24:20 +02:00
|
|
|
request_notes = RequestNotes.get_notes(request)
|
2021-07-09 10:06:04 +02:00
|
|
|
assert request_notes.log_data is not None
|
2020-06-22 22:57:01 +02:00
|
|
|
|
2022-07-21 23:15:39 +02:00
|
|
|
count = do_update_message_flags(user_profile, operation, flag, messages)
|
2020-06-22 22:57:01 +02:00
|
|
|
|
|
|
|
target_count_str = str(len(messages))
|
|
|
|
log_data_str = f"[{operation} {flag}/{target_count_str}] actually {count}"
|
2021-07-09 10:06:04 +02:00
|
|
|
request_notes.log_data["extra"] = log_data_str
|
2020-06-22 22:57:01 +02:00
|
|
|
|
2022-11-10 01:06:37 +01:00
|
|
|
return json_success(
|
|
|
|
request,
|
|
|
|
data={
|
|
|
|
"messages": messages, # Useless, but included for backwards compatibility.
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
MAX_MESSAGES_PER_UPDATE = 5000
|
|
|
|
|
2023-02-02 04:35:24 +01:00
|
|
|
|
2022-11-10 01:06:37 +01:00
|
|
|
# NOTE: If this function name is changed, add the new name to the
|
|
|
|
# query in get_latest_update_message_flag_activity
|
2024-04-16 14:41:09 +02:00
|
|
|
@typed_endpoint
|
2022-11-10 01:06:37 +01:00
|
|
|
def update_message_flags_for_narrow(
|
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
2024-04-16 14:41:09 +02:00
|
|
|
*,
|
|
|
|
anchor_val: Annotated[str, ApiParamConfig("anchor")],
|
|
|
|
include_anchor: Json[bool] = True,
|
|
|
|
num_before: Json[NonNegativeInt],
|
|
|
|
num_after: Json[NonNegativeInt],
|
2024-07-12 02:30:23 +02:00
|
|
|
narrow: Json[list[NarrowParameter] | None],
|
2024-04-16 14:41:09 +02:00
|
|
|
operation: Annotated[str, ApiParamConfig("op")],
|
|
|
|
flag: str,
|
2022-11-10 01:06:37 +01:00
|
|
|
) -> HttpResponse:
|
|
|
|
anchor = parse_anchor_value(anchor_val, use_first_unread_anchor=False)
|
|
|
|
|
|
|
|
if num_before > 0 and num_after > 0 and not include_anchor:
|
|
|
|
raise JsonableError(_("The anchor can only be excluded at an end of the range"))
|
|
|
|
|
|
|
|
# Clamp such that num_before + num_after <= MAX_MESSAGES_PER_UPDATE.
|
|
|
|
num_before = min(
|
|
|
|
num_before, max(MAX_MESSAGES_PER_UPDATE - num_after, MAX_MESSAGES_PER_UPDATE // 2)
|
|
|
|
)
|
|
|
|
num_after = min(num_after, MAX_MESSAGES_PER_UPDATE - num_before)
|
|
|
|
|
|
|
|
query_info = fetch_messages(
|
2024-06-19 15:47:34 +02:00
|
|
|
narrow=narrow,
|
2022-11-10 01:06:37 +01:00
|
|
|
user_profile=user_profile,
|
|
|
|
realm=user_profile.realm,
|
|
|
|
is_web_public_query=False,
|
|
|
|
anchor=anchor,
|
|
|
|
include_anchor=include_anchor,
|
|
|
|
num_before=num_before,
|
|
|
|
num_after=num_after,
|
|
|
|
)
|
|
|
|
|
|
|
|
messages = [row[0] for row in query_info.rows]
|
|
|
|
updated_count = do_update_message_flags(user_profile, operation, flag, messages)
|
|
|
|
|
|
|
|
return json_success(
|
|
|
|
request,
|
|
|
|
data={
|
|
|
|
"processed_count": len(messages),
|
|
|
|
"updated_count": updated_count,
|
|
|
|
"first_processed_id": messages[0] if messages else None,
|
|
|
|
"last_processed_id": messages[-1] if messages else None,
|
|
|
|
"found_oldest": query_info.found_oldest,
|
|
|
|
"found_newest": query_info.found_newest,
|
|
|
|
},
|
|
|
|
)
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-22 22:57:01 +02:00
|
|
|
|
2024-04-16 14:41:09 +02:00
|
|
|
@typed_endpoint_without_parameters
|
2020-06-22 22:57:01 +02:00
|
|
|
def mark_all_as_read(request: HttpRequest, user_profile: UserProfile) -> HttpResponse:
|
2021-08-21 19:24:20 +02:00
|
|
|
request_notes = RequestNotes.get_notes(request)
|
2024-04-18 19:25:46 +02:00
|
|
|
count = do_mark_all_as_read(user_profile, timeout=50)
|
|
|
|
if count is None:
|
2023-09-07 17:45:10 +02:00
|
|
|
return json_success(request, data={"complete": False})
|
2020-06-22 22:57:01 +02:00
|
|
|
|
|
|
|
log_data_str = f"[{count} updated]"
|
2021-07-09 10:06:04 +02:00
|
|
|
assert request_notes.log_data is not None
|
|
|
|
request_notes.log_data["extra"] = log_data_str
|
2020-06-22 22:57:01 +02:00
|
|
|
|
2023-09-07 17:45:10 +02:00
|
|
|
return json_success(request, data={"complete": True})
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-22 22:57:01 +02:00
|
|
|
|
2024-04-16 14:41:09 +02:00
|
|
|
@typed_endpoint
|
2021-02-12 08:19:30 +01:00
|
|
|
def mark_stream_as_read(
|
2024-04-16 14:41:09 +02:00
|
|
|
request: HttpRequest, user_profile: UserProfile, *, stream_id: Json[int]
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> HttpResponse:
|
2020-10-16 17:25:48 +02:00
|
|
|
stream, sub = access_stream_by_id(user_profile, stream_id)
|
2022-07-21 22:31:23 +02:00
|
|
|
assert stream.recipient_id is not None
|
2020-10-16 17:25:48 +02:00
|
|
|
count = do_mark_stream_messages_as_read(user_profile, stream.recipient_id)
|
2020-06-22 22:57:01 +02:00
|
|
|
|
|
|
|
log_data_str = f"[{count} updated]"
|
2021-08-21 19:24:20 +02:00
|
|
|
log_data = RequestNotes.get_notes(request).log_data
|
2021-07-09 10:06:04 +02:00
|
|
|
assert log_data is not None
|
|
|
|
log_data["extra"] = log_data_str
|
2020-06-22 22:57:01 +02:00
|
|
|
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request)
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-22 22:57:01 +02:00
|
|
|
|
2024-04-16 14:41:09 +02:00
|
|
|
@typed_endpoint
|
2021-02-12 08:19:30 +01:00
|
|
|
def mark_topic_as_read(
|
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
2024-04-16 14:41:09 +02:00
|
|
|
*,
|
|
|
|
stream_id: Json[int],
|
|
|
|
topic_name: str,
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> HttpResponse:
|
2020-10-16 17:25:48 +02:00
|
|
|
stream, sub = access_stream_by_id(user_profile, stream_id)
|
2022-05-31 01:27:38 +02:00
|
|
|
assert stream.recipient_id is not None
|
2020-06-22 22:57:01 +02:00
|
|
|
|
|
|
|
if topic_name:
|
|
|
|
topic_exists = user_message_exists_for_topic(
|
|
|
|
user_profile=user_profile,
|
2020-10-16 17:02:33 +02:00
|
|
|
recipient_id=stream.recipient_id,
|
2020-06-22 22:57:01 +02:00
|
|
|
topic_name=topic_name,
|
|
|
|
)
|
|
|
|
|
|
|
|
if not topic_exists:
|
2023-07-17 22:40:33 +02:00
|
|
|
raise JsonableError(_("No such topic '{topic}'").format(topic=topic_name))
|
2020-06-22 22:57:01 +02:00
|
|
|
|
2020-10-16 16:25:32 +02:00
|
|
|
count = do_mark_stream_messages_as_read(user_profile, stream.recipient_id, topic_name)
|
2020-06-22 22:57:01 +02:00
|
|
|
|
|
|
|
log_data_str = f"[{count} updated]"
|
2021-08-21 19:24:20 +02:00
|
|
|
log_data = RequestNotes.get_notes(request).log_data
|
2021-07-09 10:06:04 +02:00
|
|
|
assert log_data is not None
|
|
|
|
log_data["extra"] = log_data_str
|
2020-06-22 22:57:01 +02:00
|
|
|
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request)
|