2022-10-21 19:29:15 +02:00
|
|
|
from typing import Dict, Iterable, List, Optional, Tuple, Union
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2020-08-04 19:33:43 +02:00
|
|
|
from django.contrib.auth.models import AnonymousUser
|
2016-06-06 00:32:39 +02:00
|
|
|
from django.http import HttpRequest, HttpResponse
|
2013-11-26 00:41:24 +01:00
|
|
|
from django.utils.html import escape as escape_html
|
2021-04-16 00:57:30 +02:00
|
|
|
from django.utils.translation import gettext as _
|
2022-10-21 19:29:15 +02:00
|
|
|
from sqlalchemy.sql import and_, column, join, literal, literal_column, select, table
|
2022-09-09 02:20:21 +02:00
|
|
|
from sqlalchemy.types import Integer, Text
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2020-09-01 13:56:15 +02:00
|
|
|
from zerver.context_processors import get_valid_realm_from_request
|
2022-09-09 02:20:21 +02:00
|
|
|
from zerver.lib.exceptions import JsonableError, MissingAuthenticationError
|
2020-06-22 23:25:37 +02:00
|
|
|
from zerver.lib.message import get_first_visible_message_id, messages_for_ids
|
2022-09-09 02:20:21 +02:00
|
|
|
from zerver.lib.narrow import (
|
|
|
|
NarrowBuilder,
|
|
|
|
OptionalNarrowListT,
|
2022-11-10 00:35:52 +01:00
|
|
|
fetch_messages,
|
2022-09-09 02:20:21 +02:00
|
|
|
is_spectator_compatible,
|
|
|
|
is_web_public_narrow,
|
|
|
|
narrow_parameter,
|
2022-10-19 04:19:19 +02:00
|
|
|
parse_anchor_value,
|
2022-09-09 02:20:21 +02:00
|
|
|
)
|
2021-08-21 19:24:20 +02:00
|
|
|
from zerver.lib.request import REQ, RequestNotes, has_request_variables
|
2021-06-30 18:35:50 +02:00
|
|
|
from zerver.lib.response import json_success
|
2016-07-19 08:12:35 +02:00
|
|
|
from zerver.lib.sqlalchemy_utils import get_sqlalchemy_connection
|
2022-09-09 02:20:21 +02:00
|
|
|
from zerver.lib.topic import DB_TOPIC_NAME, MATCH_TOPIC, topic_column_sa
|
2013-12-12 18:36:32 +01:00
|
|
|
from zerver.lib.utils import statsd
|
2022-09-09 02:20:21 +02:00
|
|
|
from zerver.lib.validator import check_bool, check_int, check_list, to_non_negative_int
|
|
|
|
from zerver.models import Realm, UserMessage, UserProfile
|
2016-06-24 02:26:09 +02:00
|
|
|
|
2018-09-09 14:54:52 +02:00
|
|
|
MAX_MESSAGES_PER_FETCH = 5000
|
2017-02-23 05:50:15 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-04-24 03:47:28 +02:00
|
|
|
def highlight_string(text: str, locs: Iterable[Tuple[int, int]]) -> str:
|
2017-11-04 05:23:22 +01:00
|
|
|
highlight_start = '<span class="highlight">'
|
2021-02-12 08:20:45 +01:00
|
|
|
highlight_stop = "</span>"
|
2013-11-26 00:41:24 +01:00
|
|
|
pos = 0
|
2021-02-12 08:20:45 +01:00
|
|
|
result = ""
|
2017-04-06 15:59:56 +02:00
|
|
|
in_tag = False
|
2017-10-31 19:03:12 +01:00
|
|
|
|
2013-11-26 00:41:24 +01:00
|
|
|
for loc in locs:
|
|
|
|
(offset, length) = loc
|
2017-10-31 19:03:12 +01:00
|
|
|
|
|
|
|
prefix_start = pos
|
|
|
|
prefix_end = offset
|
|
|
|
match_start = offset
|
|
|
|
match_end = offset + length
|
|
|
|
|
2019-08-28 11:06:38 +02:00
|
|
|
prefix = text[prefix_start:prefix_end]
|
|
|
|
match = text[match_start:match_end]
|
2017-10-31 19:03:12 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
for character in prefix + match:
|
2021-02-12 08:20:45 +01:00
|
|
|
if character == "<":
|
2017-04-06 15:59:56 +02:00
|
|
|
in_tag = True
|
2021-02-12 08:20:45 +01:00
|
|
|
elif character == ">":
|
2017-04-06 15:59:56 +02:00
|
|
|
in_tag = False
|
2022-02-12 03:39:06 +01:00
|
|
|
if in_tag:
|
2017-10-31 19:03:12 +01:00
|
|
|
result += prefix
|
|
|
|
result += match
|
2017-04-06 15:59:56 +02:00
|
|
|
else:
|
2017-10-31 19:03:12 +01:00
|
|
|
result += prefix
|
2017-04-06 15:59:56 +02:00
|
|
|
result += highlight_start
|
2017-10-31 19:03:12 +01:00
|
|
|
result += match
|
2017-04-06 15:59:56 +02:00
|
|
|
result += highlight_stop
|
2017-10-31 19:03:12 +01:00
|
|
|
pos = match_end
|
|
|
|
|
2019-08-28 11:06:38 +02:00
|
|
|
result += text[pos:]
|
2016-08-25 08:00:52 +02:00
|
|
|
return result
|
2013-11-26 00:41:24 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def get_search_fields(
|
|
|
|
rendered_content: str,
|
|
|
|
topic_name: str,
|
|
|
|
content_matches: Iterable[Tuple[int, int]],
|
|
|
|
topic_matches: Iterable[Tuple[int, int]],
|
|
|
|
) -> Dict[str, str]:
|
2018-11-09 17:25:57 +01:00
|
|
|
return {
|
2021-02-12 08:20:45 +01:00
|
|
|
"match_content": highlight_string(rendered_content, content_matches),
|
2018-11-09 17:25:57 +01:00
|
|
|
MATCH_TOPIC: highlight_string(escape_html(topic_name), topic_matches),
|
|
|
|
}
|
2013-12-12 18:36:32 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2014-02-13 16:24:06 +01:00
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def get_messages_backend(
|
|
|
|
request: HttpRequest,
|
|
|
|
maybe_user_profile: Union[UserProfile, AnonymousUser],
|
2021-04-07 21:53:14 +02:00
|
|
|
anchor_val: Optional[str] = REQ("anchor", default=None),
|
2022-11-11 03:32:09 +01:00
|
|
|
include_anchor: bool = REQ(json_validator=check_bool, default=True),
|
2021-02-12 08:19:30 +01:00
|
|
|
num_before: int = REQ(converter=to_non_negative_int),
|
|
|
|
num_after: int = REQ(converter=to_non_negative_int),
|
2021-02-12 08:20:45 +01:00
|
|
|
narrow: OptionalNarrowListT = REQ("narrow", converter=narrow_parameter, default=None),
|
2021-02-12 08:19:30 +01:00
|
|
|
use_first_unread_anchor_val: bool = REQ(
|
2021-04-07 22:00:44 +02:00
|
|
|
"use_first_unread_anchor", json_validator=check_bool, default=False
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
2021-08-05 19:48:43 +02:00
|
|
|
client_gravatar: bool = REQ(json_validator=check_bool, default=True),
|
2021-04-07 22:00:44 +02:00
|
|
|
apply_markdown: bool = REQ(json_validator=check_bool, default=True),
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> HttpResponse:
|
2020-01-29 03:29:15 +01:00
|
|
|
anchor = parse_anchor_value(anchor_val, use_first_unread_anchor_val)
|
2018-09-09 14:54:52 +02:00
|
|
|
if num_before + num_after > MAX_MESSAGES_PER_FETCH:
|
2021-06-30 18:35:50 +02:00
|
|
|
raise JsonableError(
|
2021-02-12 08:19:30 +01:00
|
|
|
_("Too many messages requested (maximum {}).").format(
|
|
|
|
MAX_MESSAGES_PER_FETCH,
|
|
|
|
)
|
|
|
|
)
|
2022-11-11 03:32:09 +01:00
|
|
|
if num_before > 0 and num_after > 0 and not include_anchor:
|
|
|
|
raise JsonableError(_("The anchor can only be excluded at an end of the range"))
|
2014-02-13 16:24:06 +01:00
|
|
|
|
2021-10-03 14:16:07 +02:00
|
|
|
realm = get_valid_realm_from_request(request)
|
2020-08-04 19:33:43 +02:00
|
|
|
if not maybe_user_profile.is_authenticated:
|
|
|
|
# If user is not authenticated, clients must include
|
|
|
|
# `streams:web-public` in their narrow query to indicate this
|
|
|
|
# is a web-public query. This helps differentiate between
|
|
|
|
# cases of web-public queries (where we should return the
|
|
|
|
# web-public results only) and clients with buggy
|
|
|
|
# authentication code (where we should return an auth error).
|
2020-10-07 13:56:30 +02:00
|
|
|
#
|
|
|
|
# GetOldMessagesTest.test_unauthenticated_* tests ensure
|
|
|
|
# that we are not leaking any secure data (private messages and
|
2022-04-28 05:15:11 +02:00
|
|
|
# non-web-public stream messages) via this path.
|
2021-10-03 14:16:07 +02:00
|
|
|
if not realm.allow_web_public_streams_access():
|
2023-02-04 02:07:20 +01:00
|
|
|
raise MissingAuthenticationError
|
2020-08-04 19:33:43 +02:00
|
|
|
if not is_web_public_narrow(narrow):
|
2023-02-04 02:07:20 +01:00
|
|
|
raise MissingAuthenticationError
|
2020-08-04 19:33:43 +02:00
|
|
|
assert narrow is not None
|
2021-09-04 04:03:07 +02:00
|
|
|
if not is_spectator_compatible(narrow):
|
2023-02-04 02:07:20 +01:00
|
|
|
raise MissingAuthenticationError
|
2020-08-04 19:33:43 +02:00
|
|
|
|
|
|
|
# We use None to indicate unauthenticated requests as it's more
|
|
|
|
# readable than using AnonymousUser, and the lack of Django
|
|
|
|
# stubs means that mypy can't check AnonymousUser well.
|
|
|
|
user_profile: Optional[UserProfile] = None
|
|
|
|
is_web_public_query = True
|
|
|
|
else:
|
|
|
|
assert isinstance(maybe_user_profile, UserProfile)
|
|
|
|
user_profile = maybe_user_profile
|
|
|
|
assert user_profile is not None
|
|
|
|
is_web_public_query = False
|
|
|
|
|
|
|
|
assert realm is not None
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
if (
|
|
|
|
is_web_public_query
|
|
|
|
or realm.email_address_visibility != Realm.EMAIL_ADDRESS_VISIBILITY_EVERYONE
|
|
|
|
):
|
2019-02-05 07:12:37 +01:00
|
|
|
# If email addresses are only available to administrators,
|
|
|
|
# clients cannot compute gravatars, so we force-set it to false.
|
|
|
|
client_gravatar = False
|
|
|
|
|
2013-12-12 18:36:32 +01:00
|
|
|
if narrow is not None:
|
2013-12-12 22:50:49 +01:00
|
|
|
# Add some metadata to our logging data for narrows
|
2013-12-12 18:36:32 +01:00
|
|
|
verbose_operators = []
|
2014-02-10 21:45:53 +01:00
|
|
|
for term in narrow:
|
2021-02-12 08:20:45 +01:00
|
|
|
if term["operator"] == "is":
|
|
|
|
verbose_operators.append("is:" + term["operand"])
|
2013-12-12 18:36:32 +01:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
verbose_operators.append(term["operator"])
|
2021-08-21 19:24:20 +02:00
|
|
|
log_data = RequestNotes.get_notes(request).log_data
|
2021-07-09 10:06:04 +02:00
|
|
|
assert log_data is not None
|
|
|
|
log_data["extra"] = "[{}]".format(",".join(verbose_operators))
|
2013-12-12 18:36:32 +01:00
|
|
|
|
2022-11-10 00:35:52 +01:00
|
|
|
query_info = fetch_messages(
|
|
|
|
narrow=narrow,
|
|
|
|
user_profile=user_profile,
|
|
|
|
realm=realm,
|
|
|
|
is_web_public_query=is_web_public_query,
|
|
|
|
anchor=anchor,
|
2022-11-11 03:32:09 +01:00
|
|
|
include_anchor=include_anchor,
|
2018-03-15 11:21:36 +01:00
|
|
|
num_before=num_before,
|
|
|
|
num_after=num_after,
|
|
|
|
)
|
|
|
|
|
2022-11-10 00:35:52 +01:00
|
|
|
anchor = query_info.anchor
|
|
|
|
include_history = query_info.include_history
|
|
|
|
is_search = query_info.is_search
|
2022-11-10 00:52:13 +01:00
|
|
|
rows = query_info.rows
|
2018-03-15 11:21:36 +01:00
|
|
|
|
2013-12-12 18:36:32 +01:00
|
|
|
# The following is a little messy, but ensures that the code paths
|
|
|
|
# are similar regardless of the value of include_history. The
|
|
|
|
# 'user_messages' dictionary maps each message to the user's
|
|
|
|
# UserMessage object for that message, which we will attach to the
|
|
|
|
# rendered message dict before returning it. We attempt to
|
2016-03-31 03:39:51 +02:00
|
|
|
# bulk-fetch rendered message dicts from remote cache using the
|
2013-12-12 18:36:32 +01:00
|
|
|
# 'messages' list.
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
message_ids: List[int] = []
|
|
|
|
user_message_flags: Dict[int, List[str]] = {}
|
2020-08-04 19:33:43 +02:00
|
|
|
if is_web_public_query:
|
2021-06-15 18:03:32 +02:00
|
|
|
# For spectators, we treat all historical messages as read.
|
2020-08-04 19:33:43 +02:00
|
|
|
for row in rows:
|
|
|
|
message_id = row[0]
|
|
|
|
message_ids.append(message_id)
|
|
|
|
user_message_flags[message_id] = ["read"]
|
|
|
|
elif include_history:
|
|
|
|
assert user_profile is not None
|
2018-03-14 12:38:04 +01:00
|
|
|
message_ids = [row[0] for row in rows]
|
2013-12-10 23:32:29 +01:00
|
|
|
|
|
|
|
# TODO: This could be done with an outer join instead of two queries
|
2021-04-22 16:23:09 +02:00
|
|
|
um_rows = UserMessage.objects.filter(user_profile=user_profile, message_id__in=message_ids)
|
2017-11-07 16:18:42 +01:00
|
|
|
user_message_flags = {um.message_id: um.flags_list() for um in um_rows}
|
|
|
|
|
2017-11-07 17:12:27 +01:00
|
|
|
for message_id in message_ids:
|
|
|
|
if message_id not in user_message_flags:
|
2013-12-10 23:32:29 +01:00
|
|
|
user_message_flags[message_id] = ["read", "historical"]
|
2013-12-12 18:36:32 +01:00
|
|
|
else:
|
2018-03-14 12:38:04 +01:00
|
|
|
for row in rows:
|
2013-12-10 23:32:29 +01:00
|
|
|
message_id = row[0]
|
|
|
|
flags = row[1]
|
2017-11-07 18:40:39 +01:00
|
|
|
user_message_flags[message_id] = UserMessage.flags_list_for_flags(flags)
|
2013-12-10 23:32:29 +01:00
|
|
|
message_ids.append(message_id)
|
|
|
|
|
2020-09-02 08:14:51 +02:00
|
|
|
search_fields: Dict[int, Dict[str, str]] = {}
|
2017-11-07 17:12:27 +01:00
|
|
|
if is_search:
|
2018-03-14 12:38:04 +01:00
|
|
|
for row in rows:
|
2017-11-07 17:12:27 +01:00
|
|
|
message_id = row[0]
|
2018-11-09 17:19:17 +01:00
|
|
|
(topic_name, rendered_content, content_matches, topic_matches) = row[-4:]
|
2022-09-08 03:13:07 +02:00
|
|
|
search_fields[message_id] = get_search_fields(
|
|
|
|
rendered_content, topic_name, content_matches, topic_matches
|
|
|
|
)
|
2013-12-12 18:36:32 +01:00
|
|
|
|
2017-11-07 17:36:29 +01:00
|
|
|
message_list = messages_for_ids(
|
|
|
|
message_ids=message_ids,
|
|
|
|
user_message_flags=user_message_flags,
|
|
|
|
search_fields=search_fields,
|
|
|
|
apply_markdown=apply_markdown,
|
|
|
|
client_gravatar=client_gravatar,
|
2020-08-04 19:33:43 +02:00
|
|
|
allow_edit_history=realm.allow_edit_history,
|
2017-11-07 17:36:29 +01:00
|
|
|
)
|
2017-10-10 09:22:21 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
statsd.incr("loaded_old_messages", len(message_list))
|
2018-03-15 11:43:51 +01:00
|
|
|
|
|
|
|
ret = dict(
|
|
|
|
messages=message_list,
|
2021-02-12 08:20:45 +01:00
|
|
|
result="success",
|
|
|
|
msg="",
|
2022-11-10 00:52:13 +01:00
|
|
|
found_anchor=query_info.found_anchor,
|
|
|
|
found_oldest=query_info.found_oldest,
|
|
|
|
found_newest=query_info.found_newest,
|
|
|
|
history_limited=query_info.history_limited,
|
2018-03-15 11:43:51 +01:00
|
|
|
anchor=anchor,
|
|
|
|
)
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request, data=ret)
|
2013-12-12 18:36:32 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2013-12-12 18:36:32 +01:00
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def messages_in_narrow_backend(
|
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
2021-04-07 22:00:44 +02:00
|
|
|
msg_ids: List[int] = REQ(json_validator=check_list(check_int)),
|
2021-02-12 08:19:30 +01:00
|
|
|
narrow: OptionalNarrowListT = REQ(converter=narrow_parameter),
|
|
|
|
) -> HttpResponse:
|
2018-01-02 18:33:28 +01:00
|
|
|
first_visible_message_id = get_first_visible_message_id(user_profile.realm)
|
|
|
|
msg_ids = [message_id for message_id in msg_ids if message_id >= first_visible_message_id]
|
2017-06-30 02:24:05 +02:00
|
|
|
# This query is limited to messages the user has access to because they
|
|
|
|
# actually received them, as reflected in `zerver_usermessage`.
|
2022-02-10 04:13:15 +01:00
|
|
|
query = (
|
|
|
|
select(column("message_id", Integer), topic_column_sa(), column("rendered_content", Text))
|
|
|
|
.where(
|
|
|
|
and_(
|
|
|
|
column("user_profile_id", Integer) == literal(user_profile.id),
|
|
|
|
column("message_id", Integer).in_(msg_ids),
|
|
|
|
)
|
|
|
|
)
|
|
|
|
.select_from(
|
|
|
|
join(
|
|
|
|
table("zerver_usermessage"),
|
|
|
|
table("zerver_message"),
|
|
|
|
literal_column("zerver_usermessage.message_id", Integer)
|
|
|
|
== literal_column("zerver_message.id", Integer),
|
|
|
|
)
|
|
|
|
)
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2013-12-10 23:32:29 +01:00
|
|
|
|
2020-11-16 22:52:27 +01:00
|
|
|
builder = NarrowBuilder(user_profile, column("message_id", Integer), user_profile.realm)
|
2017-03-19 01:46:35 +01:00
|
|
|
if narrow is not None:
|
|
|
|
for term in narrow:
|
|
|
|
query = builder.add_term(query, term)
|
2013-12-12 18:36:32 +01:00
|
|
|
|
2020-09-02 08:14:51 +02:00
|
|
|
search_fields = {}
|
2022-02-10 04:59:48 +01:00
|
|
|
with get_sqlalchemy_connection() as sa_conn:
|
|
|
|
for row in sa_conn.execute(query).fetchall():
|
|
|
|
message_id = row._mapping["message_id"]
|
|
|
|
topic_name = row._mapping[DB_TOPIC_NAME]
|
|
|
|
rendered_content = row._mapping["rendered_content"]
|
|
|
|
if "content_matches" in row._mapping:
|
|
|
|
content_matches = row._mapping["content_matches"]
|
|
|
|
topic_matches = row._mapping["topic_matches"]
|
|
|
|
else:
|
|
|
|
content_matches = topic_matches = []
|
|
|
|
search_fields[str(message_id)] = get_search_fields(
|
|
|
|
rendered_content,
|
|
|
|
topic_name,
|
|
|
|
content_matches,
|
|
|
|
topic_matches,
|
|
|
|
)
|
2013-12-10 23:32:29 +01:00
|
|
|
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request, data={"messages": search_fields})
|