2023-10-12 19:43:45 +02:00
|
|
|
# https://github.com/typeddjango/django-stubs/issues/1698
|
|
|
|
# mypy: disable-error-code="explicit-override"
|
|
|
|
|
2023-07-19 00:44:51 +02:00
|
|
|
import hashlib
|
2020-09-05 04:02:13 +02:00
|
|
|
import secrets
|
2020-06-11 00:54:34 +02:00
|
|
|
import time
|
2023-07-16 18:29:58 +02:00
|
|
|
from collections import defaultdict
|
2023-11-19 19:45:19 +01:00
|
|
|
from datetime import datetime, timedelta, timezone
|
2023-12-15 02:51:31 +01:00
|
|
|
from typing import Any, Callable, Dict, List, Optional, Set, Tuple, TypedDict, TypeVar, Union
|
2013-04-23 18:51:17 +02:00
|
|
|
|
2021-09-19 20:11:34 +02:00
|
|
|
import orjson
|
2022-10-02 22:52:31 +02:00
|
|
|
import uri_template
|
2020-06-11 00:54:34 +02:00
|
|
|
from bitfield import BitField
|
2022-06-27 08:00:09 +02:00
|
|
|
from bitfield.types import Bit, BitHandler
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.conf import settings
|
2023-12-15 01:16:00 +01:00
|
|
|
from django.contrib.auth.models import AnonymousUser
|
2021-04-05 18:42:45 +02:00
|
|
|
from django.contrib.contenttypes.fields import GenericRelation
|
2022-02-23 08:14:01 +01:00
|
|
|
from django.contrib.postgres.indexes import GinIndex
|
|
|
|
from django.contrib.postgres.search import SearchVectorField
|
2017-07-07 20:35:31 +02:00
|
|
|
from django.core.exceptions import ValidationError
|
2023-06-07 21:14:43 +02:00
|
|
|
from django.core.serializers.json import DjangoJSONEncoder
|
2023-12-15 02:43:18 +01:00
|
|
|
from django.core.validators import RegexValidator
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.db import models, transaction
|
2022-02-23 07:22:23 +01:00
|
|
|
from django.db.backends.base.base import BaseDatabaseWrapper
|
2023-12-15 02:14:24 +01:00
|
|
|
from django.db.models import CASCADE, Exists, F, OuterRef, Q, QuerySet
|
2023-02-10 19:10:19 +01:00
|
|
|
from django.db.models.functions import Lower, Upper
|
2023-12-15 02:14:24 +01:00
|
|
|
from django.db.models.signals import post_delete, post_save
|
2022-02-23 07:22:23 +01:00
|
|
|
from django.db.models.sql.compiler import SQLCompiler
|
2017-04-15 04:03:56 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2021-04-16 00:57:30 +02:00
|
|
|
from django.utils.translation import gettext as _
|
|
|
|
from django.utils.translation import gettext_lazy
|
2022-09-19 21:48:53 +02:00
|
|
|
from django_stubs_ext import StrPromise, ValuesQuerySet
|
2023-10-12 19:43:45 +02:00
|
|
|
from typing_extensions import override
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2020-06-12 16:19:17 +02:00
|
|
|
from confirmation import settings as confirmation_settings
|
2016-11-14 09:23:03 +01:00
|
|
|
from zerver.lib import cache
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.cache import (
|
|
|
|
cache_delete,
|
|
|
|
cache_with_key,
|
|
|
|
flush_message,
|
2021-03-27 13:31:26 +01:00
|
|
|
flush_muting_users_cache,
|
2020-06-11 00:54:34 +02:00
|
|
|
flush_stream,
|
|
|
|
flush_submessage,
|
|
|
|
flush_used_upload_space_cache,
|
|
|
|
realm_alert_words_automaton_cache_key,
|
|
|
|
realm_alert_words_cache_key,
|
|
|
|
)
|
2023-12-15 02:14:24 +01:00
|
|
|
from zerver.lib.exceptions import RateLimitedError
|
2023-12-15 02:51:31 +01:00
|
|
|
from zerver.lib.per_request_cache import return_same_value_during_entire_request
|
2020-08-01 03:20:55 +02:00
|
|
|
from zerver.lib.timestamp import datetime_to_timestamp
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.types import (
|
2023-07-27 16:42:21 +02:00
|
|
|
DefaultStreamDict,
|
2020-06-11 00:54:34 +02:00
|
|
|
ExtendedFieldElement,
|
|
|
|
ExtendedValidator,
|
|
|
|
FieldElement,
|
2023-02-17 12:46:14 +01:00
|
|
|
GroupPermissionSetting,
|
2020-06-13 09:30:51 +02:00
|
|
|
ProfileDataElementBase,
|
2021-09-21 16:52:15 +02:00
|
|
|
ProfileDataElementValue,
|
2022-03-15 11:10:29 +01:00
|
|
|
RealmPlaygroundDict,
|
2020-06-11 00:54:34 +02:00
|
|
|
RealmUserValidator,
|
2021-11-30 13:34:37 +01:00
|
|
|
UnspecifiedValue,
|
2023-08-10 16:12:37 +02:00
|
|
|
UserDisplayRecipient,
|
2020-06-11 00:54:34 +02:00
|
|
|
UserFieldElement,
|
|
|
|
Validator,
|
|
|
|
)
|
|
|
|
from zerver.lib.validator import (
|
|
|
|
check_date,
|
|
|
|
check_int,
|
|
|
|
check_list,
|
|
|
|
check_long_string,
|
|
|
|
check_short_string,
|
|
|
|
check_url,
|
2021-03-24 12:48:00 +01:00
|
|
|
validate_select_field,
|
2020-06-11 00:54:34 +02:00
|
|
|
)
|
2023-12-05 19:32:36 +01:00
|
|
|
from zerver.models.constants import MAX_LANGUAGE_ID_LENGTH, MAX_TOPIC_NAME_LENGTH
|
2023-12-15 01:55:59 +01:00
|
|
|
from zerver.models.groups import GroupGroupMembership as GroupGroupMembership
|
|
|
|
from zerver.models.groups import SystemGroups
|
|
|
|
from zerver.models.groups import UserGroup as UserGroup
|
|
|
|
from zerver.models.groups import UserGroupMembership as UserGroupMembership
|
2023-12-15 02:51:31 +01:00
|
|
|
from zerver.models.linkifiers import RealmFilter as RealmFilter
|
|
|
|
from zerver.models.linkifiers import url_template_validator
|
2023-12-15 02:43:18 +01:00
|
|
|
from zerver.models.realm_emoji import RealmEmoji as RealmEmoji
|
2023-12-15 02:14:24 +01:00
|
|
|
from zerver.models.realms import Realm as Realm
|
|
|
|
from zerver.models.realms import RealmAuthenticationMethod as RealmAuthenticationMethod
|
|
|
|
from zerver.models.realms import RealmDomain as RealmDomain
|
2023-12-15 01:16:00 +01:00
|
|
|
from zerver.models.users import RealmUserDefault as RealmUserDefault
|
|
|
|
from zerver.models.users import UserBaseSettings as UserBaseSettings
|
|
|
|
from zerver.models.users import UserProfile as UserProfile
|
|
|
|
from zerver.models.users import get_user_profile_by_id_in_realm
|
2012-12-07 01:05:14 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-02-23 07:22:23 +01:00
|
|
|
@models.Field.register_lookup
|
2021-07-17 12:25:08 +02:00
|
|
|
class AndZero(models.Lookup[int]):
|
2022-02-23 07:22:23 +01:00
|
|
|
lookup_name = "andz"
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2022-02-23 07:22:23 +01:00
|
|
|
def as_sql(
|
|
|
|
self, compiler: SQLCompiler, connection: BaseDatabaseWrapper
|
2022-07-15 15:33:03 +02:00
|
|
|
) -> Tuple[str, List[Union[str, int]]]: # nocoverage # currently only used in migrations
|
2022-02-23 07:22:23 +01:00
|
|
|
lhs, lhs_params = self.process_lhs(compiler, connection)
|
|
|
|
rhs, rhs_params = self.process_rhs(compiler, connection)
|
|
|
|
return f"{lhs} & {rhs} = 0", lhs_params + rhs_params
|
|
|
|
|
|
|
|
|
|
|
|
@models.Field.register_lookup
|
2021-07-17 12:25:08 +02:00
|
|
|
class AndNonZero(models.Lookup[int]):
|
2022-02-23 07:22:23 +01:00
|
|
|
lookup_name = "andnz"
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2022-02-23 07:22:23 +01:00
|
|
|
def as_sql(
|
|
|
|
self, compiler: SQLCompiler, connection: BaseDatabaseWrapper
|
2022-07-15 15:33:03 +02:00
|
|
|
) -> Tuple[str, List[Union[str, int]]]: # nocoverage # currently only used in migrations
|
2022-02-23 07:22:23 +01:00
|
|
|
lhs, lhs_params = self.process_lhs(compiler, connection)
|
|
|
|
rhs, rhs_params = self.process_rhs(compiler, connection)
|
|
|
|
return f"{lhs} & {rhs} != 0", lhs_params + rhs_params
|
|
|
|
|
|
|
|
|
2022-06-24 17:53:28 +02:00
|
|
|
ModelT = TypeVar("ModelT", bound=models.Model)
|
|
|
|
RowT = TypeVar("RowT")
|
|
|
|
|
|
|
|
|
|
|
|
def query_for_ids(
|
2022-09-19 21:48:53 +02:00
|
|
|
query: ValuesQuerySet[ModelT, RowT],
|
2022-06-24 17:53:28 +02:00
|
|
|
user_ids: List[int],
|
|
|
|
field: str,
|
2022-09-19 21:48:53 +02:00
|
|
|
) -> ValuesQuerySet[ModelT, RowT]:
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2017-09-13 17:24:11 +02:00
|
|
|
This function optimizes searches of the form
|
|
|
|
`user_profile_id in (1, 2, 3, 4)` by quickly
|
|
|
|
building the where clauses. Profiling shows significant
|
|
|
|
speedups over the normal Django-based approach.
|
|
|
|
|
|
|
|
Use this very carefully! Also, the caller should
|
|
|
|
guard against empty lists of user_ids.
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
|
|
|
assert user_ids
|
2021-02-12 08:20:45 +01:00
|
|
|
clause = f"{field} IN %s"
|
2017-09-13 17:24:11 +02:00
|
|
|
query = query.extra(
|
2021-02-12 08:19:30 +01:00
|
|
|
where=[clause],
|
|
|
|
params=(tuple(user_ids),),
|
2017-09-13 17:24:11 +02:00
|
|
|
)
|
|
|
|
return query
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-07-14 19:46:50 +02:00
|
|
|
@return_same_value_during_entire_request
|
2021-02-12 08:19:30 +01:00
|
|
|
def get_display_recipient_by_id(
|
|
|
|
recipient_id: int, recipient_type: int, recipient_type_id: Optional[int]
|
2023-08-10 16:12:37 +02:00
|
|
|
) -> List[UserDisplayRecipient]:
|
2017-08-02 20:18:35 +02:00
|
|
|
"""
|
|
|
|
returns: an object describing the recipient (using a cache).
|
|
|
|
If the type is a stream, the type_id must be an int; a string is returned.
|
|
|
|
Otherwise, type_id may be None; an array of recipient dicts is returned.
|
|
|
|
"""
|
2019-08-15 00:44:33 +02:00
|
|
|
# Have to import here, to avoid circular dependency.
|
|
|
|
from zerver.lib.display_recipient import get_display_recipient_remote_cache
|
|
|
|
|
2023-07-14 19:46:50 +02:00
|
|
|
return get_display_recipient_remote_cache(recipient_id, recipient_type, recipient_type_id)
|
2013-09-21 15:35:12 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-08-10 16:12:37 +02:00
|
|
|
def get_display_recipient(recipient: "Recipient") -> List[UserDisplayRecipient]:
|
2013-09-21 15:35:12 +02:00
|
|
|
return get_display_recipient_by_id(
|
2017-01-24 07:06:13 +01:00
|
|
|
recipient.id,
|
|
|
|
recipient.type,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
recipient.type_id,
|
2013-09-21 15:35:12 +02:00
|
|
|
)
|
2013-04-25 20:42:28 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-04-14 21:07:57 +02:00
|
|
|
def get_recipient_ids(
|
|
|
|
recipient: Optional["Recipient"], user_profile_id: int
|
|
|
|
) -> Tuple[List[int], str]:
|
|
|
|
if recipient is None:
|
|
|
|
recipient_type_str = ""
|
|
|
|
to = []
|
|
|
|
elif recipient.type == Recipient.STREAM:
|
|
|
|
recipient_type_str = "stream"
|
|
|
|
to = [recipient.type_id]
|
|
|
|
else:
|
|
|
|
recipient_type_str = "private"
|
|
|
|
if recipient.type == Recipient.PERSONAL:
|
|
|
|
to = [recipient.type_id]
|
|
|
|
else:
|
|
|
|
to = []
|
|
|
|
for r in get_display_recipient(recipient):
|
|
|
|
assert not isinstance(r, str) # It will only be a string for streams
|
|
|
|
if r["id"] != user_profile_id:
|
|
|
|
to.append(r["id"])
|
|
|
|
return to, recipient_type_str
|
|
|
|
|
|
|
|
|
2021-02-14 12:07:09 +01:00
|
|
|
class RealmPlayground(models.Model):
|
|
|
|
"""Server side storage model to store playground information needed by our
|
|
|
|
'view code in playground' feature in code blocks.
|
|
|
|
"""
|
|
|
|
|
|
|
|
MAX_PYGMENTS_LANGUAGE_LENGTH = 40
|
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE)
|
2023-05-27 05:04:50 +02:00
|
|
|
url_template = models.TextField(validators=[url_template_validator])
|
2021-02-14 12:07:09 +01:00
|
|
|
|
|
|
|
# User-visible display name used when configuring playgrounds in the settings page and
|
|
|
|
# when displaying them in the playground links popover.
|
2022-08-15 19:10:58 +02:00
|
|
|
name = models.TextField(db_index=True)
|
2021-02-14 12:07:09 +01:00
|
|
|
|
|
|
|
# This stores the pygments lexer subclass names and not the aliases themselves.
|
2022-08-15 19:10:58 +02:00
|
|
|
pygments_language = models.CharField(
|
2021-02-14 12:07:09 +01:00
|
|
|
db_index=True,
|
|
|
|
max_length=MAX_PYGMENTS_LANGUAGE_LENGTH,
|
|
|
|
# We validate to see if this conforms to the character set allowed for a
|
|
|
|
# language in the code block.
|
|
|
|
validators=[
|
|
|
|
RegexValidator(
|
|
|
|
regex=r"^[ a-zA-Z0-9_+-./#]*$", message=_("Invalid characters in pygments language")
|
|
|
|
)
|
|
|
|
],
|
|
|
|
)
|
|
|
|
|
|
|
|
class Meta:
|
2021-05-13 03:39:29 +02:00
|
|
|
unique_together = (("realm", "pygments_language", "name"),)
|
2021-02-14 12:07:09 +01:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2021-02-14 12:07:09 +01:00
|
|
|
def __str__(self) -> str:
|
2023-03-08 22:18:59 +01:00
|
|
|
return f"{self.realm.string_id}: {self.pygments_language} {self.name}"
|
2021-02-14 12:07:09 +01:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2023-05-27 03:29:40 +02:00
|
|
|
def clean(self) -> None:
|
|
|
|
"""Validate whether the URL template is valid for the playground,
|
|
|
|
ensuring that "code" is the sole variable present in it.
|
|
|
|
|
|
|
|
Django's `full_clean` calls `clean_fields` followed by `clean` method
|
|
|
|
and stores all ValidationErrors from all stages to return as JSON.
|
|
|
|
"""
|
|
|
|
|
2023-05-27 05:04:50 +02:00
|
|
|
# Do not continue the check if the url template is invalid to begin
|
|
|
|
# with. The ValidationError for invalid template will only be raised by
|
|
|
|
# the validator set on the url_template field instead of here to avoid
|
|
|
|
# duplicates.
|
2023-05-27 03:29:40 +02:00
|
|
|
if not uri_template.validate(self.url_template):
|
|
|
|
return
|
|
|
|
|
|
|
|
# Extract variables used in the URL template.
|
|
|
|
template_variables = set(uri_template.URITemplate(self.url_template).variable_names)
|
|
|
|
|
2023-05-27 05:04:50 +02:00
|
|
|
if "code" not in template_variables:
|
2023-05-27 03:29:40 +02:00
|
|
|
raise ValidationError(_('Missing the required variable "code" in the URL template'))
|
|
|
|
|
|
|
|
# The URL template should only contain a single variable, which is "code".
|
|
|
|
if len(template_variables) != 1:
|
|
|
|
raise ValidationError(
|
|
|
|
_('"code" should be the only variable present in the URL template'),
|
|
|
|
)
|
|
|
|
|
2021-02-14 12:07:09 +01:00
|
|
|
|
2022-03-15 11:10:29 +01:00
|
|
|
def get_realm_playgrounds(realm: Realm) -> List[RealmPlaygroundDict]:
|
2023-07-31 22:52:35 +02:00
|
|
|
return [
|
|
|
|
RealmPlaygroundDict(
|
|
|
|
id=playground.id,
|
|
|
|
name=playground.name,
|
|
|
|
pygments_language=playground.pygments_language,
|
|
|
|
url_template=playground.url_template,
|
2021-02-14 12:07:09 +01:00
|
|
|
)
|
2023-07-31 22:52:35 +02:00
|
|
|
for playground in RealmPlayground.objects.filter(realm=realm).all()
|
|
|
|
]
|
2021-02-14 12:07:09 +01:00
|
|
|
|
|
|
|
|
2019-12-04 02:38:46 +01:00
|
|
|
class Recipient(models.Model):
|
2022-09-13 20:36:47 +02:00
|
|
|
"""Represents an audience that can potentially receive messages in Zulip.
|
|
|
|
|
|
|
|
This table essentially functions as a generic foreign key that
|
|
|
|
allows Message.recipient_id to be a simple ForeignKey representing
|
|
|
|
the audience for a message, while supporting the different types
|
|
|
|
of audiences Zulip supports for a message.
|
|
|
|
|
|
|
|
Recipient has just two attributes: The enum type, and a type_id,
|
|
|
|
which is the ID of the UserProfile/Stream/Huddle object containing
|
|
|
|
all the metadata for the audience. There are 3 recipient types:
|
|
|
|
|
2023-06-19 16:42:11 +02:00
|
|
|
1. 1:1 direct message: The type_id is the ID of the UserProfile
|
2022-09-13 20:36:47 +02:00
|
|
|
who will receive any message to this Recipient. The sender
|
|
|
|
of such a message is represented separately.
|
|
|
|
2. Stream message: The type_id is the ID of the associated Stream.
|
2023-06-19 16:42:11 +02:00
|
|
|
3. Group direct message: In Zulip, group direct messages are
|
2022-09-13 20:36:47 +02:00
|
|
|
represented by Huddle objects, which encode the set of users
|
|
|
|
in the conversation. The type_id is the ID of the associated Huddle
|
|
|
|
object; the set of users is usually retrieved via the Subscription
|
|
|
|
table. See the Huddle model for details.
|
|
|
|
|
|
|
|
See also the Subscription model, which stores which UserProfile
|
2022-09-16 01:21:09 +02:00
|
|
|
objects are subscribed to which Recipient objects.
|
2022-09-13 20:36:47 +02:00
|
|
|
"""
|
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
type_id = models.IntegerField(db_index=True)
|
|
|
|
type = models.PositiveSmallIntegerField(db_index=True)
|
2019-12-04 02:38:46 +01:00
|
|
|
# Valid types are {personal, stream, huddle}
|
2022-09-13 20:36:47 +02:00
|
|
|
|
2023-06-19 16:42:11 +02:00
|
|
|
# The type for 1:1 direct messages.
|
2019-12-04 02:38:46 +01:00
|
|
|
PERSONAL = 1
|
2022-09-13 20:36:47 +02:00
|
|
|
# The type for stream messages.
|
2019-12-04 02:38:46 +01:00
|
|
|
STREAM = 2
|
2023-06-19 16:42:11 +02:00
|
|
|
# The type group direct messages.
|
2019-12-04 02:38:46 +01:00
|
|
|
HUDDLE = 3
|
|
|
|
|
|
|
|
class Meta:
|
|
|
|
unique_together = ("type", "type_id")
|
|
|
|
|
|
|
|
# N.B. If we used Django's choice=... we would get this for free (kinda)
|
2021-02-12 08:20:45 +01:00
|
|
|
_type_names = {PERSONAL: "personal", STREAM: "stream", HUDDLE: "huddle"}
|
2019-12-04 02:38:46 +01:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2019-12-04 02:38:46 +01:00
|
|
|
def __str__(self) -> str:
|
2023-07-16 14:21:27 +02:00
|
|
|
return f"{self.label()} ({self.type_id}, {self.type})"
|
|
|
|
|
|
|
|
def label(self) -> str:
|
|
|
|
if self.type == Recipient.STREAM:
|
|
|
|
return Stream.objects.get(id=self.type_id).name
|
|
|
|
else:
|
|
|
|
return str(get_display_recipient(self))
|
2019-12-04 02:38:46 +01:00
|
|
|
|
2023-04-12 22:40:35 +02:00
|
|
|
def type_name(self) -> str:
|
|
|
|
# Raises KeyError if invalid
|
|
|
|
return self._type_names[self.type]
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-03-02 17:33:11 +01:00
|
|
|
class PreregistrationRealm(models.Model):
|
|
|
|
"""Data on a partially created realm entered by a user who has
|
|
|
|
completed the "new organization" form. Used to transfer the user's
|
|
|
|
selections from the pre-confirmation "new organization" form to
|
|
|
|
the post-confirmation user registration form.
|
|
|
|
|
|
|
|
Note that the values stored here may not match those of the
|
|
|
|
created realm (in the event the user creates a realm at all),
|
|
|
|
because we allow the user to edit these values in the registration
|
|
|
|
form (and in fact the user will be required to do so if the
|
|
|
|
`string_id` is claimed by another realm before registraiton is
|
|
|
|
completed).
|
|
|
|
"""
|
|
|
|
|
|
|
|
name = models.CharField(max_length=Realm.MAX_REALM_NAME_LENGTH)
|
|
|
|
org_type = models.PositiveSmallIntegerField(
|
|
|
|
default=Realm.ORG_TYPES["unspecified"]["id"],
|
|
|
|
choices=[(t["id"], t["name"]) for t in Realm.ORG_TYPES.values()],
|
|
|
|
)
|
2023-09-12 21:58:58 +02:00
|
|
|
default_language = models.CharField(
|
|
|
|
default="en",
|
|
|
|
max_length=MAX_LANGUAGE_ID_LENGTH,
|
|
|
|
)
|
2023-03-02 17:33:11 +01:00
|
|
|
string_id = models.CharField(max_length=Realm.MAX_REALM_SUBDOMAIN_LENGTH)
|
|
|
|
email = models.EmailField()
|
|
|
|
|
|
|
|
confirmation = GenericRelation("confirmation.Confirmation", related_query_name="prereg_realm")
|
|
|
|
status = models.IntegerField(default=0)
|
|
|
|
|
|
|
|
# The Realm created upon completion of the registration
|
|
|
|
# for this PregistrationRealm
|
|
|
|
created_realm = models.ForeignKey(Realm, null=True, related_name="+", on_delete=models.SET_NULL)
|
|
|
|
|
|
|
|
# The UserProfile created upon completion of the registration
|
|
|
|
# for this PregistrationRealm
|
|
|
|
created_user = models.ForeignKey(
|
|
|
|
UserProfile, null=True, related_name="+", on_delete=models.SET_NULL
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2012-09-28 22:47:05 +02:00
|
|
|
class PreregistrationUser(models.Model):
|
2019-03-20 22:21:35 +01:00
|
|
|
# Data on a partially created user, before the completion of
|
|
|
|
# registration. This is used in at least three major code paths:
|
|
|
|
# * Realm creation, in which case realm is None.
|
|
|
|
#
|
|
|
|
# * Invitations, in which case referred_by will always be set.
|
|
|
|
#
|
|
|
|
# * Social authentication signup, where it's used to store data
|
|
|
|
# from the authentication step and pass it to the registration
|
|
|
|
# form.
|
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
email = models.EmailField()
|
2019-11-01 00:00:36 +01:00
|
|
|
|
2021-04-05 18:42:45 +02:00
|
|
|
confirmation = GenericRelation("confirmation.Confirmation", related_query_name="prereg_user")
|
2019-11-01 00:00:36 +01:00
|
|
|
# If the pre-registration process provides a suggested full name for this user,
|
2021-05-10 07:02:14 +02:00
|
|
|
# store it here to use it to prepopulate the full name field in the registration form:
|
2022-08-15 19:10:58 +02:00
|
|
|
full_name = models.CharField(max_length=UserProfile.MAX_NAME_LENGTH, null=True)
|
|
|
|
full_name_validated = models.BooleanField(default=False)
|
|
|
|
referred_by = models.ForeignKey(UserProfile, null=True, on_delete=CASCADE)
|
2023-11-14 00:38:06 +01:00
|
|
|
streams = models.ManyToManyField("zerver.Stream")
|
2022-08-15 19:10:58 +02:00
|
|
|
invited_at = models.DateTimeField(auto_now=True)
|
|
|
|
realm_creation = models.BooleanField(default=False)
|
2017-08-04 08:09:25 +02:00
|
|
|
# Indicates whether the user needs a password. Users who were
|
|
|
|
# created via SSO style auth (e.g. GitHub/Google) generally do not.
|
2022-08-15 19:10:58 +02:00
|
|
|
password_required = models.BooleanField(default=True)
|
2012-12-11 23:42:32 +01:00
|
|
|
|
2012-10-29 19:08:18 +01:00
|
|
|
# status: whether an object has been confirmed.
|
2022-07-16 20:09:13 +02:00
|
|
|
# if confirmed, set to confirmation.settings.STATUS_USED
|
2022-08-15 19:10:58 +02:00
|
|
|
status = models.IntegerField(default=0)
|
2012-10-29 19:08:18 +01:00
|
|
|
|
2019-03-20 22:21:35 +01:00
|
|
|
# The realm should only ever be None for PreregistrationUser
|
|
|
|
# objects created as part of realm creation.
|
2022-08-15 19:10:58 +02:00
|
|
|
realm = models.ForeignKey(Realm, null=True, on_delete=CASCADE)
|
2013-08-02 20:31:19 +02:00
|
|
|
|
2021-04-30 20:01:44 +02:00
|
|
|
# These values should be consistent with the values
|
|
|
|
# in settings_config.user_role_values.
|
2018-12-28 12:05:44 +01:00
|
|
|
INVITE_AS = dict(
|
2021-02-12 08:19:30 +01:00
|
|
|
REALM_OWNER=100,
|
|
|
|
REALM_ADMIN=200,
|
2021-04-30 15:19:47 +02:00
|
|
|
MODERATOR=300,
|
2021-02-12 08:19:30 +01:00
|
|
|
MEMBER=400,
|
|
|
|
GUEST_USER=600,
|
2018-12-28 12:05:44 +01:00
|
|
|
)
|
2022-08-15 19:10:58 +02:00
|
|
|
invited_as = models.PositiveSmallIntegerField(default=INVITE_AS["MEMBER"])
|
2018-12-28 12:05:44 +01:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
multiuse_invite = models.ForeignKey("MultiuseInvite", null=True, on_delete=models.SET_NULL)
|
2022-07-21 23:38:59 +02:00
|
|
|
|
2022-04-13 02:42:34 +02:00
|
|
|
# The UserProfile created upon completion of the registration
|
|
|
|
# for this PregistrationUser
|
2022-08-15 19:10:58 +02:00
|
|
|
created_user = models.ForeignKey(
|
2022-04-13 02:42:34 +02:00
|
|
|
UserProfile, null=True, related_name="+", on_delete=models.SET_NULL
|
|
|
|
)
|
|
|
|
|
2022-02-23 08:14:01 +01:00
|
|
|
class Meta:
|
|
|
|
indexes = [
|
|
|
|
models.Index(Upper("email"), name="upper_preregistration_email_idx"),
|
|
|
|
]
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-04-05 18:42:45 +02:00
|
|
|
def filter_to_valid_prereg_users(
|
2022-06-23 22:47:50 +02:00
|
|
|
query: QuerySet[PreregistrationUser],
|
2022-02-10 11:52:34 +01:00
|
|
|
invite_expires_in_minutes: Union[Optional[int], UnspecifiedValue] = UnspecifiedValue(),
|
2022-06-23 22:47:50 +02:00
|
|
|
) -> QuerySet[PreregistrationUser]:
|
2021-11-30 13:34:37 +01:00
|
|
|
"""
|
|
|
|
If invite_expires_in_days is specified, we return only those PreregistrationUser
|
|
|
|
objects that were created at most that many days in the past.
|
|
|
|
"""
|
2022-07-16 20:09:13 +02:00
|
|
|
used_value = confirmation_settings.STATUS_USED
|
2020-06-12 16:19:17 +02:00
|
|
|
revoked_value = confirmation_settings.STATUS_REVOKED
|
2021-04-05 18:42:45 +02:00
|
|
|
|
2022-07-16 20:09:13 +02:00
|
|
|
query = query.exclude(status__in=[used_value, revoked_value])
|
2022-02-10 11:52:34 +01:00
|
|
|
if invite_expires_in_minutes is None:
|
|
|
|
# Since invite_expires_in_minutes is None, we're invitation will never
|
2021-11-30 13:34:37 +01:00
|
|
|
# expire, we do not need to check anything else and can simply return
|
|
|
|
# after excluding objects with active and revoked status.
|
|
|
|
return query
|
|
|
|
|
2022-02-10 11:52:34 +01:00
|
|
|
assert invite_expires_in_minutes is not None
|
|
|
|
if not isinstance(invite_expires_in_minutes, UnspecifiedValue):
|
2023-11-19 19:45:19 +01:00
|
|
|
lowest_datetime = timezone_now() - timedelta(minutes=invite_expires_in_minutes)
|
2021-04-05 18:42:45 +02:00
|
|
|
return query.filter(invited_at__gte=lowest_datetime)
|
|
|
|
else:
|
2021-11-30 13:34:37 +01:00
|
|
|
return query.filter(
|
|
|
|
Q(confirmation__expiry_date=None) | Q(confirmation__expiry_date__gte=timezone_now())
|
|
|
|
)
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-12 16:19:17 +02:00
|
|
|
|
2017-08-10 22:27:57 +02:00
|
|
|
class MultiuseInvite(models.Model):
|
2022-08-15 19:10:58 +02:00
|
|
|
referred_by = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
2023-11-14 00:38:06 +01:00
|
|
|
streams = models.ManyToManyField("zerver.Stream")
|
2022-08-15 19:10:58 +02:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE)
|
|
|
|
invited_as = models.PositiveSmallIntegerField(default=PreregistrationUser.INVITE_AS["MEMBER"])
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-09-12 00:39:43 +02:00
|
|
|
# status for tracking whether the invite has been revoked.
|
|
|
|
# If revoked, set to confirmation.settings.STATUS_REVOKED.
|
|
|
|
# STATUS_USED is not supported, because these objects are supposed
|
|
|
|
# to be usable multiple times.
|
|
|
|
status = models.IntegerField(default=0)
|
|
|
|
|
2017-08-10 22:27:57 +02:00
|
|
|
|
2017-01-20 12:27:38 +01:00
|
|
|
class EmailChangeStatus(models.Model):
|
2022-08-15 19:10:58 +02:00
|
|
|
new_email = models.EmailField()
|
|
|
|
old_email = models.EmailField()
|
|
|
|
updated_at = models.DateTimeField(auto_now=True)
|
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
2017-01-20 12:27:38 +01:00
|
|
|
|
|
|
|
# status: whether an object has been confirmed.
|
2022-07-16 20:09:13 +02:00
|
|
|
# if confirmed, set to confirmation.settings.STATUS_USED
|
2022-08-15 19:10:58 +02:00
|
|
|
status = models.IntegerField(default=0)
|
2017-01-20 12:27:38 +01:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE)
|
2017-01-20 12:27:38 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-07-26 15:48:26 +02:00
|
|
|
class RealmReactivationStatus(models.Model):
|
|
|
|
# status: whether an object has been confirmed.
|
|
|
|
# if confirmed, set to confirmation.settings.STATUS_USED
|
2022-08-15 19:10:58 +02:00
|
|
|
status = models.IntegerField(default=0)
|
2022-07-26 15:48:26 +02:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE)
|
2022-07-26 15:48:26 +02:00
|
|
|
|
|
|
|
|
2017-04-19 07:22:54 +02:00
|
|
|
class AbstractPushDeviceToken(models.Model):
|
2013-12-09 23:17:16 +01:00
|
|
|
APNS = 1
|
|
|
|
GCM = 2
|
|
|
|
|
|
|
|
KINDS = (
|
2021-02-12 08:20:45 +01:00
|
|
|
(APNS, "apns"),
|
|
|
|
(GCM, "gcm"),
|
2013-12-09 23:17:16 +01:00
|
|
|
)
|
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
kind = models.PositiveSmallIntegerField(choices=KINDS)
|
2013-12-09 23:17:16 +01:00
|
|
|
|
|
|
|
# The token is a unique device-specific token that is
|
|
|
|
# sent to us from each device:
|
|
|
|
# - APNS token if kind == APNS
|
|
|
|
# - GCM registration id if kind == GCM
|
2022-08-15 19:10:58 +02:00
|
|
|
token = models.CharField(max_length=4096, db_index=True)
|
2018-10-11 00:53:13 +02:00
|
|
|
|
|
|
|
# TODO: last_updated should be renamed date_created, since it is
|
|
|
|
# no longer maintained as a last_updated value.
|
2022-08-15 19:10:58 +02:00
|
|
|
last_updated = models.DateTimeField(auto_now=True)
|
2013-12-09 23:17:16 +01:00
|
|
|
|
2015-02-10 08:08:39 +01:00
|
|
|
# [optional] Contains the app id of the device if it is an iOS device
|
2022-08-15 19:10:58 +02:00
|
|
|
ios_app_id = models.TextField(null=True)
|
2015-02-10 08:08:39 +01:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2017-04-19 07:22:54 +02:00
|
|
|
abstract = True
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-04-19 07:22:54 +02:00
|
|
|
class PushDeviceToken(AbstractPushDeviceToken):
|
2020-07-02 03:13:26 +02:00
|
|
|
# The user whose device this is
|
2022-08-15 19:10:58 +02:00
|
|
|
user = models.ForeignKey(UserProfile, db_index=True, on_delete=CASCADE)
|
2018-10-11 00:53:13 +02:00
|
|
|
|
|
|
|
class Meta:
|
|
|
|
unique_together = ("user", "kind", "token")
|
2017-04-19 07:22:54 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def generate_email_token_for_stream() -> str:
|
2020-09-05 04:02:13 +02:00
|
|
|
return secrets.token_hex(16)
|
2015-08-18 21:29:23 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-10-27 08:42:27 +02:00
|
|
|
class Stream(models.Model):
|
2013-10-09 16:55:17 +02:00
|
|
|
MAX_NAME_LENGTH = 60
|
2018-04-30 08:59:51 +02:00
|
|
|
MAX_DESCRIPTION_LENGTH = 1024
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
name = models.CharField(max_length=MAX_NAME_LENGTH, db_index=True)
|
|
|
|
realm = models.ForeignKey(Realm, db_index=True, on_delete=CASCADE)
|
|
|
|
date_created = models.DateTimeField(default=timezone_now)
|
|
|
|
deactivated = models.BooleanField(default=False)
|
|
|
|
description = models.CharField(max_length=MAX_DESCRIPTION_LENGTH, default="")
|
|
|
|
rendered_description = models.TextField(default="")
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2019-11-28 16:56:04 +01:00
|
|
|
# Foreign key to the Recipient object for STREAM type messages to this stream.
|
|
|
|
recipient = models.ForeignKey(Recipient, null=True, on_delete=models.SET_NULL)
|
|
|
|
|
2021-12-11 00:41:25 +01:00
|
|
|
# Various permission policy configurations
|
|
|
|
PERMISSION_POLICIES: Dict[str, Dict[str, Any]] = {
|
|
|
|
"web_public": {
|
|
|
|
"invite_only": False,
|
|
|
|
"history_public_to_subscribers": True,
|
|
|
|
"is_web_public": True,
|
2022-01-29 00:54:13 +01:00
|
|
|
"policy_name": gettext_lazy("Web-public"),
|
2021-12-11 00:41:25 +01:00
|
|
|
},
|
|
|
|
"public": {
|
|
|
|
"invite_only": False,
|
|
|
|
"history_public_to_subscribers": True,
|
|
|
|
"is_web_public": False,
|
|
|
|
"policy_name": gettext_lazy("Public"),
|
|
|
|
},
|
|
|
|
"private_shared_history": {
|
|
|
|
"invite_only": True,
|
|
|
|
"history_public_to_subscribers": True,
|
|
|
|
"is_web_public": False,
|
|
|
|
"policy_name": gettext_lazy("Private, shared history"),
|
|
|
|
},
|
|
|
|
"private_protected_history": {
|
|
|
|
"invite_only": True,
|
|
|
|
"history_public_to_subscribers": False,
|
|
|
|
"is_web_public": False,
|
|
|
|
"policy_name": gettext_lazy("Private, protected history"),
|
|
|
|
},
|
|
|
|
# Public streams with protected history are currently only
|
|
|
|
# available in Zephyr realms
|
|
|
|
"public_protected_history": {
|
|
|
|
"invite_only": False,
|
|
|
|
"history_public_to_subscribers": False,
|
|
|
|
"is_web_public": False,
|
|
|
|
"policy_name": gettext_lazy("Public, protected history"),
|
|
|
|
},
|
|
|
|
}
|
2022-08-15 19:10:58 +02:00
|
|
|
invite_only = models.BooleanField(default=False)
|
|
|
|
history_public_to_subscribers = models.BooleanField(default=True)
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2018-07-25 00:29:05 +02:00
|
|
|
# Whether this stream's content should be published by the web-public archive features
|
2022-08-15 19:10:58 +02:00
|
|
|
is_web_public = models.BooleanField(default=False)
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2020-02-04 21:50:55 +01:00
|
|
|
STREAM_POST_POLICY_EVERYONE = 1
|
|
|
|
STREAM_POST_POLICY_ADMINS = 2
|
|
|
|
STREAM_POST_POLICY_RESTRICT_NEW_MEMBERS = 3
|
2021-03-29 16:01:39 +02:00
|
|
|
STREAM_POST_POLICY_MODERATORS = 4
|
2020-02-04 21:50:55 +01:00
|
|
|
# TODO: Implement policy to restrict posting to a user group or admins.
|
|
|
|
|
|
|
|
# Who in the organization has permission to send messages to this stream.
|
2022-08-15 19:10:58 +02:00
|
|
|
stream_post_policy = models.PositiveSmallIntegerField(default=STREAM_POST_POLICY_EVERYONE)
|
2022-09-19 21:43:34 +02:00
|
|
|
POST_POLICIES: Dict[int, StrPromise] = {
|
2021-12-15 01:04:35 +01:00
|
|
|
# These strings should match the strings in the
|
|
|
|
# stream_post_policy_values object in stream_data.js.
|
|
|
|
STREAM_POST_POLICY_EVERYONE: gettext_lazy("All stream members can post"),
|
|
|
|
STREAM_POST_POLICY_ADMINS: gettext_lazy("Only organization administrators can post"),
|
|
|
|
STREAM_POST_POLICY_MODERATORS: gettext_lazy(
|
|
|
|
"Only organization administrators and moderators can post"
|
|
|
|
),
|
|
|
|
STREAM_POST_POLICY_RESTRICT_NEW_MEMBERS: gettext_lazy(
|
|
|
|
"Only organization full members can post"
|
|
|
|
),
|
|
|
|
}
|
|
|
|
STREAM_POST_POLICY_TYPES = list(POST_POLICIES.keys())
|
2017-10-08 21:16:51 +02:00
|
|
|
|
|
|
|
# The unique thing about Zephyr public streams is that we never list their
|
|
|
|
# users. We may try to generalize this concept later, but for now
|
|
|
|
# we just use a concrete field. (Zephyr public streams aren't exactly like
|
|
|
|
# invite-only streams--while both are private in terms of listing users,
|
|
|
|
# for Zephyr we don't even list users to stream members, yet membership
|
|
|
|
# is more public in the sense that you don't need a Zulip invite to join.
|
|
|
|
# This field is populated directly from UserProfile.is_zephyr_mirror_realm,
|
|
|
|
# and the reason for denormalizing field is performance.
|
2022-08-15 19:10:58 +02:00
|
|
|
is_in_zephyr_realm = models.BooleanField(default=False)
|
2017-10-08 21:16:51 +02:00
|
|
|
|
2013-08-08 16:51:18 +02:00
|
|
|
# Used by the e-mail forwarder. The e-mail RFC specifies a maximum
|
|
|
|
# e-mail length of 254, and our max stream length is 30, so we
|
|
|
|
# have plenty of room for the token.
|
2022-08-15 19:10:58 +02:00
|
|
|
email_token = models.CharField(
|
2021-02-12 08:19:30 +01:00
|
|
|
max_length=32,
|
|
|
|
default=generate_email_token_for_stream,
|
|
|
|
unique=True,
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
)
|
2013-09-26 21:48:08 +02:00
|
|
|
|
2020-05-07 14:48:31 +02:00
|
|
|
# For old messages being automatically deleted.
|
|
|
|
# Value NULL means "use retention policy of the realm".
|
|
|
|
# Value -1 means "disable retention policy for this stream unconditionally".
|
|
|
|
# Non-negative values have the natural meaning of "archive messages older than <value> days".
|
2020-06-21 11:14:35 +02:00
|
|
|
MESSAGE_RETENTION_SPECIAL_VALUES_MAP = {
|
2021-08-02 18:43:08 +02:00
|
|
|
"unlimited": -1,
|
2021-02-12 08:20:45 +01:00
|
|
|
"realm_default": None,
|
2020-06-21 11:14:35 +02:00
|
|
|
}
|
2022-08-15 19:10:58 +02:00
|
|
|
message_retention_days = models.IntegerField(null=True, default=None)
|
2019-06-05 19:46:35 +02:00
|
|
|
|
2022-06-24 16:18:55 +02:00
|
|
|
# on_delete field here is set to RESTRICT because we don't want to allow
|
2023-10-09 20:41:12 +02:00
|
|
|
# deleting a user group in case it is referenced by this setting.
|
2022-06-24 16:18:55 +02:00
|
|
|
# We are not using PROTECT since we want to allow deletion of user groups
|
|
|
|
# when realm itself is deleted.
|
2022-08-15 19:10:58 +02:00
|
|
|
can_remove_subscribers_group = models.ForeignKey(UserGroup, on_delete=models.RESTRICT)
|
2022-06-24 16:18:55 +02:00
|
|
|
|
2019-03-04 17:50:49 +01:00
|
|
|
# The very first message ID in the stream. Used to help clients
|
|
|
|
# determine whether they might need to display "more topics" for a
|
|
|
|
# stream based on what messages they have cached.
|
2022-08-15 19:10:58 +02:00
|
|
|
first_message_id = models.IntegerField(null=True, db_index=True)
|
2019-03-04 17:50:49 +01:00
|
|
|
|
2023-02-17 12:46:14 +01:00
|
|
|
stream_permission_group_settings = {
|
|
|
|
"can_remove_subscribers_group": GroupPermissionSetting(
|
|
|
|
require_system_group=True,
|
|
|
|
allow_internet_group=False,
|
|
|
|
allow_owners_group=False,
|
2023-04-06 08:12:41 +02:00
|
|
|
allow_nobody_group=False,
|
2023-09-07 02:06:51 +02:00
|
|
|
allow_everyone_group=True,
|
2023-09-21 13:06:39 +02:00
|
|
|
default_group_name=SystemGroups.ADMINISTRATORS,
|
2023-07-07 12:16:40 +02:00
|
|
|
id_field_name="can_remove_subscribers_group_id",
|
2023-02-17 12:46:14 +01:00
|
|
|
),
|
|
|
|
}
|
|
|
|
|
2023-04-12 22:40:35 +02:00
|
|
|
class Meta:
|
|
|
|
indexes = [
|
|
|
|
models.Index(Upper("name"), name="upper_stream_name_idx"),
|
|
|
|
]
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2018-05-11 02:24:34 +02:00
|
|
|
def __str__(self) -> str:
|
2023-03-08 22:18:59 +01:00
|
|
|
return self.name
|
Give our models meaningful reprs.
>>> from zephyr.models import UserProfile, Recipient, Zephyr, ZephyrClass
>>> for klass in [UserProfile, Recipient, Zephyr, ZephyrClass]:
... print klass.objects.all()[:2]
...
[<UserProfile: othello>, <UserProfile: iago>]
[<Recipient: Verona (1, class)>, <Recipient: Denmark (2, class)>]
[<Zephyr: Scotland / Scotland3 / <UserProfile: prospero>>, <Zephyr: Venice / Venice3 / <UserProfile: iago>>]
[<ZephyrClass: Verona>, <ZephyrClass: Denmark>]
(imported from commit 9998ffe40800213a5425990d6e85f5c5a43a5355)
2012-08-29 16:15:06 +02:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def is_public(self) -> bool:
|
2016-07-27 01:45:29 +02:00
|
|
|
# All streams are private in Zephyr mirroring realms.
|
2017-10-08 21:16:51 +02:00
|
|
|
return not self.invite_only and not self.is_in_zephyr_realm
|
2013-01-15 21:10:50 +01:00
|
|
|
|
2018-04-05 00:28:14 +02:00
|
|
|
def is_history_realm_public(self) -> bool:
|
|
|
|
return self.is_public()
|
|
|
|
|
|
|
|
def is_history_public_to_subscribers(self) -> bool:
|
2018-04-27 01:00:26 +02:00
|
|
|
return self.history_public_to_subscribers
|
2018-04-05 00:28:14 +02:00
|
|
|
|
2020-02-03 03:21:07 +01:00
|
|
|
# Stream fields included whenever a Stream object is provided to
|
|
|
|
# Zulip clients via the API. A few details worth noting:
|
|
|
|
# * "id" is represented as "stream_id" in most API interfaces.
|
|
|
|
# * "email_token" is not realm-public and thus is not included here.
|
|
|
|
# * is_in_zephyr_realm is a backend-only optimization.
|
|
|
|
# * "deactivated" streams are filtered from the API entirely.
|
2020-02-29 21:51:06 +01:00
|
|
|
# * "realm" and "recipient" are not exposed to clients via the API.
|
2020-02-03 03:21:07 +01:00
|
|
|
API_FIELDS = [
|
2022-03-12 11:44:34 +01:00
|
|
|
"date_created",
|
2020-02-03 03:21:07 +01:00
|
|
|
"description",
|
2022-03-12 11:44:34 +01:00
|
|
|
"first_message_id",
|
|
|
|
"history_public_to_subscribers",
|
|
|
|
"id",
|
2020-02-03 03:21:07 +01:00
|
|
|
"invite_only",
|
|
|
|
"is_web_public",
|
2020-07-25 05:44:12 +02:00
|
|
|
"message_retention_days",
|
2022-03-12 11:44:34 +01:00
|
|
|
"name",
|
|
|
|
"rendered_description",
|
|
|
|
"stream_post_policy",
|
2022-06-27 18:39:33 +02:00
|
|
|
"can_remove_subscribers_group_id",
|
2020-02-03 03:21:07 +01:00
|
|
|
]
|
|
|
|
|
2023-07-27 16:42:21 +02:00
|
|
|
def to_dict(self) -> DefaultStreamDict:
|
|
|
|
return DefaultStreamDict(
|
2023-07-12 12:57:57 +02:00
|
|
|
can_remove_subscribers_group=self.can_remove_subscribers_group_id,
|
2022-05-26 02:51:35 +02:00
|
|
|
date_created=datetime_to_timestamp(self.date_created),
|
|
|
|
description=self.description,
|
|
|
|
first_message_id=self.first_message_id,
|
|
|
|
history_public_to_subscribers=self.history_public_to_subscribers,
|
|
|
|
invite_only=self.invite_only,
|
|
|
|
is_web_public=self.is_web_public,
|
|
|
|
message_retention_days=self.message_retention_days,
|
|
|
|
name=self.name,
|
|
|
|
rendered_description=self.rendered_description,
|
|
|
|
stream_id=self.id,
|
|
|
|
stream_post_policy=self.stream_post_policy,
|
|
|
|
is_announcement_only=self.stream_post_policy == Stream.STREAM_POST_POLICY_ADMINS,
|
|
|
|
)
|
2014-03-02 06:46:54 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2014-01-28 20:49:55 +01:00
|
|
|
post_save.connect(flush_stream, sender=Stream)
|
|
|
|
post_delete.connect(flush_stream, sender=Stream)
|
2014-01-15 22:48:27 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-07-23 15:15:54 +02:00
|
|
|
class UserTopic(models.Model):
|
2022-08-15 19:10:58 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
|
|
|
stream = models.ForeignKey(Stream, on_delete=CASCADE)
|
|
|
|
recipient = models.ForeignKey(Recipient, on_delete=CASCADE)
|
|
|
|
topic_name = models.CharField(max_length=MAX_TOPIC_NAME_LENGTH)
|
2021-07-29 15:15:42 +02:00
|
|
|
# The default value for last_updated is a few weeks before tracking
|
2020-01-17 16:01:00 +01:00
|
|
|
# of when topics were muted was first introduced. It's designed
|
2021-07-29 15:15:42 +02:00
|
|
|
# to be obviously incorrect so that one can tell it's backfilled data.
|
2023-11-19 19:45:19 +01:00
|
|
|
last_updated = models.DateTimeField(default=datetime(2020, 1, 1, 0, 0, tzinfo=timezone.utc))
|
2017-08-30 02:19:34 +02:00
|
|
|
|
2023-03-12 16:19:42 +01:00
|
|
|
class VisibilityPolicy(models.IntegerChoices):
|
|
|
|
# A normal muted topic. No notifications and unreads hidden.
|
|
|
|
MUTED = 1, "Muted topic"
|
2021-08-02 14:08:44 +02:00
|
|
|
|
2023-03-12 16:19:42 +01:00
|
|
|
# This topic will behave like an unmuted topic in an unmuted stream even if it
|
|
|
|
# belongs to a muted stream.
|
|
|
|
UNMUTED = 2, "Unmuted topic in muted stream"
|
2022-02-25 21:48:56 +01:00
|
|
|
|
2023-03-12 16:19:42 +01:00
|
|
|
# This topic will behave like `UNMUTED`, plus some additional
|
|
|
|
# display and/or notifications priority that is TBD and likely to
|
|
|
|
# be configurable; see #6027. Not yet implemented.
|
|
|
|
FOLLOWED = 3, "Followed topic"
|
2021-08-02 09:46:56 +02:00
|
|
|
|
2023-03-12 16:19:42 +01:00
|
|
|
# Implicitly, if a UserTopic does not exist, the (user, topic)
|
|
|
|
# pair should have normal behavior for that (user, stream) pair.
|
2021-08-02 09:46:56 +02:00
|
|
|
|
2023-03-12 16:19:42 +01:00
|
|
|
# We use this in our code to represent the condition in the comment above.
|
|
|
|
INHERIT = 0, "User's default policy for the stream."
|
2021-08-02 09:46:56 +02:00
|
|
|
|
2023-03-12 16:19:42 +01:00
|
|
|
visibility_policy = models.SmallIntegerField(
|
|
|
|
choices=VisibilityPolicy.choices, default=VisibilityPolicy.MUTED
|
2021-08-02 09:46:56 +02:00
|
|
|
)
|
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2023-02-10 19:10:19 +01:00
|
|
|
constraints = [
|
|
|
|
models.UniqueConstraint(
|
|
|
|
"user_profile",
|
|
|
|
"stream",
|
|
|
|
Lower("topic_name"),
|
|
|
|
name="usertopic_case_insensitive_topic_uniq",
|
|
|
|
),
|
|
|
|
]
|
2017-08-30 02:19:34 +02:00
|
|
|
|
2021-08-02 14:08:44 +02:00
|
|
|
indexes = [
|
2022-02-23 07:33:55 +01:00
|
|
|
models.Index("stream", Upper("topic_name"), name="zerver_mutedtopic_stream_topic"),
|
2021-08-02 14:08:44 +02:00
|
|
|
# This index is designed to optimize queries fetching the
|
|
|
|
# set of users who have special policy for a stream,
|
|
|
|
# e.g. for the send-message code paths.
|
|
|
|
models.Index(
|
|
|
|
fields=("stream", "topic_name", "visibility_policy", "user_profile"),
|
|
|
|
name="zerver_usertopic_stream_topic_user_visibility_idx",
|
|
|
|
),
|
|
|
|
# This index is useful for handling API requests fetching the
|
|
|
|
# muted topics for a given user or user/stream pair.
|
|
|
|
models.Index(
|
|
|
|
fields=("user_profile", "visibility_policy", "stream", "topic_name"),
|
|
|
|
name="zerver_usertopic_user_visibility_idx",
|
|
|
|
),
|
|
|
|
]
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2018-05-11 02:24:34 +02:00
|
|
|
def __str__(self) -> str:
|
2023-03-08 22:18:59 +01:00
|
|
|
return f"({self.user_profile.email}, {self.stream.name}, {self.topic_name}, {self.last_updated})"
|
2021-07-23 15:15:54 +02:00
|
|
|
|
|
|
|
|
2021-03-27 11:43:03 +01:00
|
|
|
class MutedUser(models.Model):
|
2022-08-15 19:10:58 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, related_name="muter", on_delete=CASCADE)
|
|
|
|
muted_user = models.ForeignKey(UserProfile, related_name="muted", on_delete=CASCADE)
|
|
|
|
date_muted = models.DateTimeField(default=timezone_now)
|
2021-03-27 11:43:03 +01:00
|
|
|
|
|
|
|
class Meta:
|
2021-03-27 12:23:32 +01:00
|
|
|
unique_together = ("user_profile", "muted_user")
|
2021-03-27 11:43:03 +01:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2021-03-27 11:43:03 +01:00
|
|
|
def __str__(self) -> str:
|
2023-03-08 22:18:59 +01:00
|
|
|
return f"{self.user_profile.email} -> {self.muted_user.email}"
|
2021-03-27 11:43:03 +01:00
|
|
|
|
|
|
|
|
2021-03-27 13:31:26 +01:00
|
|
|
post_save.connect(flush_muting_users_cache, sender=MutedUser)
|
|
|
|
post_delete.connect(flush_muting_users_cache, sender=MutedUser)
|
|
|
|
|
|
|
|
|
2017-10-27 08:42:27 +02:00
|
|
|
class Client(models.Model):
|
2023-02-14 22:45:00 +01:00
|
|
|
MAX_NAME_LENGTH = 30
|
|
|
|
name = models.CharField(max_length=MAX_NAME_LENGTH, db_index=True, unique=True)
|
2012-10-19 21:30:42 +02:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2018-05-11 02:24:34 +02:00
|
|
|
def __str__(self) -> str:
|
2023-03-08 22:18:59 +01:00
|
|
|
return self.name
|
2016-04-21 00:26:45 +02:00
|
|
|
|
2023-12-14 07:08:43 +01:00
|
|
|
def default_read_by_sender(self) -> bool:
|
|
|
|
"""Used to determine whether a message was sent by a full Zulip UI
|
|
|
|
style client (and thus whether the message should be treated
|
|
|
|
as sent by a human and automatically marked as read for the
|
|
|
|
sender). The purpose of this distinction is to ensure that
|
|
|
|
message sent to the user by e.g. a Google Calendar integration
|
|
|
|
using the user's own API key don't get marked as read
|
|
|
|
automatically.
|
|
|
|
"""
|
|
|
|
sending_client = self.name.lower()
|
|
|
|
|
|
|
|
return (
|
|
|
|
sending_client
|
|
|
|
in (
|
|
|
|
"zulipandroid",
|
|
|
|
"zulipios",
|
|
|
|
"zulipdesktop",
|
|
|
|
"zulipmobile",
|
|
|
|
"zulipelectron",
|
|
|
|
"zulipterminal",
|
|
|
|
"snipe",
|
|
|
|
"website",
|
|
|
|
"ios",
|
|
|
|
"android",
|
|
|
|
)
|
|
|
|
or "desktop app" in sending_client
|
|
|
|
# Since the vast majority of messages are sent by humans
|
|
|
|
# in Zulip, treat test suite messages as such.
|
|
|
|
or (sending_client == "test suite" and settings.TEST_SUITE)
|
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
get_client_cache: Dict[str, Client] = {}
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
|
2021-03-24 12:04:41 +01:00
|
|
|
def clear_client_cache() -> None: # nocoverage
|
|
|
|
global get_client_cache
|
|
|
|
get_client_cache = {}
|
|
|
|
|
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def get_client(name: str) -> Client:
|
2016-11-14 09:23:03 +01:00
|
|
|
# Accessing KEY_PREFIX through the module is necessary
|
|
|
|
# because we need the updated value of the variable.
|
2023-02-14 22:45:00 +01:00
|
|
|
cache_name = cache.KEY_PREFIX + name[0 : Client.MAX_NAME_LENGTH]
|
2016-11-14 09:23:03 +01:00
|
|
|
if cache_name not in get_client_cache:
|
2016-03-31 03:30:33 +02:00
|
|
|
result = get_client_remote_cache(name)
|
2016-11-14 09:23:03 +01:00
|
|
|
get_client_cache[cache_name] = result
|
|
|
|
return get_client_cache[cache_name]
|
2013-11-20 22:16:48 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def get_client_cache_key(name: str) -> str:
|
2023-07-19 00:44:51 +02:00
|
|
|
return f"get_client:{hashlib.sha1(name.encode()).hexdigest()}"
|
2013-03-26 17:47:52 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
@cache_with_key(get_client_cache_key, timeout=3600 * 24 * 7)
|
2018-05-11 02:24:34 +02:00
|
|
|
def get_client_remote_cache(name: str) -> Client:
|
2023-02-14 22:45:00 +01:00
|
|
|
(client, _) = Client.objects.get_or_create(name=name[0 : Client.MAX_NAME_LENGTH])
|
2012-10-19 21:30:42 +02:00
|
|
|
return client
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def get_realm_stream(stream_name: str, realm_id: int) -> Stream:
|
cache: Eliminate get-stream-by-name cache.
We remove the cache functionality for the
get_realm_stream function, and we also change it to
return a thin Stream object (instead of calling
select_related with no arguments).
The main goal here is to remove code complexity, as we
have been prone to at least one caching validation bug
related to how Realm and UserGroup interact. That
particular bug was more theoretical than practical in
terms of its impact, to be clear.
Even if we were to be perfectly disciplined about only
caching thin stream objects and always making sure to
delete cache entries when stream data changed, we would
still be prone to ugly situations like having
transactions get rolled back before we delete the cache
entry. The do_deactivate_stream is a perfect example of
where we have to consider the best time to unset the
cache. If you unset it too early, then you are prone to
races where somebody else churns the cache right before
you update the database. If you set it too late, then
you can have an invalid entry after a rollback or
deadlock situation. If you just eliminate the cache as
a moving part, that whole debate is moot.
As the lack of test changes here indicates, we rarely
fetch streams by name any more in critical sections of
our code.
The one place where we fetch by name is in loading the
home page, but that is **only** when you specify a
stream name. And, of course, that only causes about an
extra millisecond of time.
2023-07-09 22:24:32 +02:00
|
|
|
return Stream.objects.get(name__iexact=stream_name.strip(), realm_id=realm_id)
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2013-03-19 13:05:19 +01:00
|
|
|
|
2022-06-23 22:47:50 +02:00
|
|
|
def get_active_streams(realm: Realm) -> QuerySet[Stream]:
|
2014-01-24 23:30:53 +01:00
|
|
|
"""
|
|
|
|
Return all streams (including invite-only streams) that have not been deactivated.
|
|
|
|
"""
|
|
|
|
return Stream.objects.filter(realm=realm, deactivated=False)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-06-23 22:47:50 +02:00
|
|
|
def get_linkable_streams(realm_id: int) -> QuerySet[Stream]:
|
2021-12-30 13:59:30 +01:00
|
|
|
"""
|
|
|
|
This returns the streams that we are allowed to linkify using
|
|
|
|
something like "#frontend" in our markup. For now the business
|
|
|
|
rule is that you can link any stream in the realm that hasn't
|
|
|
|
been deactivated (similar to how get_active_streams works).
|
|
|
|
"""
|
|
|
|
return Stream.objects.filter(realm_id=realm_id, deactivated=False)
|
|
|
|
|
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def get_stream(stream_name: str, realm: Realm) -> Stream:
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2017-10-22 01:48:16 +02:00
|
|
|
Callers that don't have a Realm object already available should use
|
|
|
|
get_realm_stream directly, to avoid unnecessarily fetching the
|
|
|
|
Realm object.
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2017-10-22 01:48:16 +02:00
|
|
|
return get_realm_stream(stream_name, realm.id)
|
2013-01-17 22:16:39 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-01-28 05:28:29 +01:00
|
|
|
def get_stream_by_id_in_realm(stream_id: int, realm: Realm) -> Stream:
|
2023-07-21 13:11:48 +02:00
|
|
|
return Stream.objects.select_related("realm", "recipient").get(id=stream_id, realm=realm)
|
2019-01-28 05:28:29 +01:00
|
|
|
|
2013-06-27 22:52:05 +02:00
|
|
|
|
cache: Use DB for all bulk get-stream-by-name queries.
This changes bulk_get_streams so that it just uses the
database all the time. Also, we avoid calling
select_related(), so that we just get back thin and
tidy Stream objects with simple queries.
About not caching any more:
It's actually pretty rare that we fetch streams by name
in the main application. It's usually API requests that
send in stream names to find more info about streams.
It also turns out that for large queries (>= ~30 rows
for my measurements) it's more efficent to hit the
database than memcached. The database is super fast at
scale; it's just the startup cost of having Django
construct the query, and then having the database do
query planning or whatever, that slows us down. I don't
know the exact bottleneck, but you can clearly measure
that one-row queries are slow (on the order of a full
millisecond or so) but the marginal cost of additional
rows is minimal assuming you have a decent index (20
microseconds per row on my droplet).
All the query-count changes in the tests revolve around
unsubscribing somebody from a stream, and that's a
particularly odd use case for bulk_get_streams, since
you generally unsubscribe from a single stream at a
time. If there are some use cases where you do want to
unsubscribe from multiple streams, we should move
toward passing in stream ids, at least from the
application. And even if we don't do that, our cost for
most queries is a couple milliseconds.
2023-07-11 14:24:06 +02:00
|
|
|
def bulk_get_streams(realm: Realm, stream_names: Set[str]) -> Dict[str, Any]:
|
|
|
|
def fetch_streams_by_name(stream_names: Set[str]) -> QuerySet[Stream]:
|
2016-06-04 09:02:05 +02:00
|
|
|
#
|
2013-06-27 22:52:05 +02:00
|
|
|
# This should be just
|
|
|
|
#
|
2019-12-12 21:13:07 +01:00
|
|
|
# Stream.objects.select_related().filter(name__iexact__in=stream_names,
|
|
|
|
# realm_id=realm_id)
|
2013-06-27 22:52:05 +02:00
|
|
|
#
|
|
|
|
# But chaining __in and __iexact doesn't work with Django's
|
|
|
|
# ORM, so we have the following hack to construct the relevant where clause
|
2021-02-12 08:19:30 +01:00
|
|
|
where_clause = (
|
|
|
|
"upper(zerver_stream.name::text) IN (SELECT upper(name) FROM unnest(%s) AS name)"
|
|
|
|
)
|
cache: Use DB for all bulk get-stream-by-name queries.
This changes bulk_get_streams so that it just uses the
database all the time. Also, we avoid calling
select_related(), so that we just get back thin and
tidy Stream objects with simple queries.
About not caching any more:
It's actually pretty rare that we fetch streams by name
in the main application. It's usually API requests that
send in stream names to find more info about streams.
It also turns out that for large queries (>= ~30 rows
for my measurements) it's more efficent to hit the
database than memcached. The database is super fast at
scale; it's just the startup cost of having Django
construct the query, and then having the database do
query planning or whatever, that slows us down. I don't
know the exact bottleneck, but you can clearly measure
that one-row queries are slow (on the order of a full
millisecond or so) but the marginal cost of additional
rows is minimal assuming you have a decent index (20
microseconds per row on my droplet).
All the query-count changes in the tests revolve around
unsubscribing somebody from a stream, and that's a
particularly odd use case for bulk_get_streams, since
you generally unsubscribe from a single stream at a
time. If there are some use cases where you do want to
unsubscribe from multiple streams, we should move
toward passing in stream ids, at least from the
application. And even if we don't do that, our cost for
most queries is a couple milliseconds.
2023-07-11 14:24:06 +02:00
|
|
|
return get_active_streams(realm).extra(where=[where_clause], params=(list(stream_names),))
|
2013-06-27 22:52:05 +02:00
|
|
|
|
cache: Use DB for all bulk get-stream-by-name queries.
This changes bulk_get_streams so that it just uses the
database all the time. Also, we avoid calling
select_related(), so that we just get back thin and
tidy Stream objects with simple queries.
About not caching any more:
It's actually pretty rare that we fetch streams by name
in the main application. It's usually API requests that
send in stream names to find more info about streams.
It also turns out that for large queries (>= ~30 rows
for my measurements) it's more efficent to hit the
database than memcached. The database is super fast at
scale; it's just the startup cost of having Django
construct the query, and then having the database do
query planning or whatever, that slows us down. I don't
know the exact bottleneck, but you can clearly measure
that one-row queries are slow (on the order of a full
millisecond or so) but the marginal cost of additional
rows is minimal assuming you have a decent index (20
microseconds per row on my droplet).
All the query-count changes in the tests revolve around
unsubscribing somebody from a stream, and that's a
particularly odd use case for bulk_get_streams, since
you generally unsubscribe from a single stream at a
time. If there are some use cases where you do want to
unsubscribe from multiple streams, we should move
toward passing in stream ids, at least from the
application. And even if we don't do that, our cost for
most queries is a couple milliseconds.
2023-07-11 14:24:06 +02:00
|
|
|
if not stream_names:
|
|
|
|
return {}
|
|
|
|
streams = list(fetch_streams_by_name(stream_names))
|
|
|
|
return {stream.name.lower(): stream for stream in streams}
|
2013-06-27 22:52:05 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-09-19 21:48:53 +02:00
|
|
|
def get_huddle_user_ids(recipient: Recipient) -> ValuesQuerySet["Subscription", int]:
|
2021-02-12 08:19:30 +01:00
|
|
|
assert recipient.type == Recipient.HUDDLE
|
|
|
|
|
|
|
|
return (
|
|
|
|
Subscription.objects.filter(
|
|
|
|
recipient=recipient,
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
.order_by("user_profile_id")
|
|
|
|
.values_list("user_profile_id", flat=True)
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-10-29 17:03:51 +01:00
|
|
|
|
|
|
|
|
2023-07-16 19:08:55 +02:00
|
|
|
def bulk_get_huddle_user_ids(recipient_ids: List[int]) -> Dict[int, Set[int]]:
|
2019-08-13 23:05:47 +02:00
|
|
|
"""
|
2023-07-16 19:08:55 +02:00
|
|
|
Takes a list of huddle-type recipient_ids, returns a dict
|
2019-08-13 23:05:47 +02:00
|
|
|
mapping recipient id to list of user ids in the huddle.
|
2023-07-16 19:08:55 +02:00
|
|
|
|
|
|
|
We rely on our caller to pass us recipient_ids that correspond
|
|
|
|
to huddles, but technically this function is valid for any type
|
|
|
|
of subscription.
|
2019-08-13 23:05:47 +02:00
|
|
|
"""
|
2023-07-16 19:08:55 +02:00
|
|
|
if not recipient_ids:
|
2019-08-13 23:05:47 +02:00
|
|
|
return {}
|
|
|
|
|
|
|
|
subscriptions = Subscription.objects.filter(
|
2023-07-16 19:08:55 +02:00
|
|
|
recipient_id__in=recipient_ids,
|
2023-07-16 18:29:58 +02:00
|
|
|
).only("user_profile_id", "recipient_id")
|
|
|
|
|
|
|
|
result_dict: Dict[int, Set[int]] = defaultdict(set)
|
|
|
|
for subscription in subscriptions:
|
|
|
|
result_dict[subscription.recipient_id].add(subscription.user_profile_id)
|
2019-08-13 23:05:47 +02:00
|
|
|
|
|
|
|
return result_dict
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-10-27 08:42:27 +02:00
|
|
|
class AbstractMessage(models.Model):
|
2022-08-15 19:10:58 +02:00
|
|
|
sender = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
2022-09-13 20:36:47 +02:00
|
|
|
|
|
|
|
# The target of the message is signified by the Recipient object.
|
|
|
|
# See the Recipient class for details.
|
2022-08-15 19:10:58 +02:00
|
|
|
recipient = models.ForeignKey(Recipient, on_delete=CASCADE)
|
2022-09-13 20:36:47 +02:00
|
|
|
|
2023-09-27 20:11:48 +02:00
|
|
|
# The realm containing the message. Usually this will be the same
|
|
|
|
# as the realm of the messages's sender; the exception to that is
|
|
|
|
# cross-realm bot users.
|
|
|
|
#
|
|
|
|
# Important for efficient indexes and sharding in multi-realm servers.
|
2022-09-28 16:19:07 +02:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE)
|
2022-09-27 21:42:31 +02:00
|
|
|
|
2018-07-25 06:35:48 +02:00
|
|
|
# The message's topic.
|
|
|
|
#
|
|
|
|
# Early versions of Zulip called this concept a "subject", as in an email
|
|
|
|
# "subject line", before changing to "topic" in 2013 (commit dac5a46fa).
|
|
|
|
# UI and user documentation now consistently say "topic". New APIs and
|
|
|
|
# new code should generally also say "topic".
|
|
|
|
#
|
|
|
|
# See also the `topic_name` method on `Message`.
|
2022-08-15 19:10:58 +02:00
|
|
|
subject = models.CharField(max_length=MAX_TOPIC_NAME_LENGTH, db_index=True)
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2023-09-27 20:11:48 +02:00
|
|
|
# The raw Markdown-format text (E.g., what the user typed into the compose box).
|
2022-08-15 19:10:58 +02:00
|
|
|
content = models.TextField()
|
2023-09-27 20:11:48 +02:00
|
|
|
|
|
|
|
# The HTML rendered content resulting from rendering the content
|
|
|
|
# with the Markdown processor.
|
2022-08-15 19:10:58 +02:00
|
|
|
rendered_content = models.TextField(null=True)
|
2023-09-27 20:11:48 +02:00
|
|
|
# A rarely-incremented version number, theoretically useful for
|
|
|
|
# tracking which messages have been already rerendered when making
|
|
|
|
# major changes to the markup rendering process.
|
2022-08-15 19:10:58 +02:00
|
|
|
rendered_content_version = models.IntegerField(null=True)
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
date_sent = models.DateTimeField("date sent", db_index=True)
|
2023-09-27 20:11:48 +02:00
|
|
|
|
|
|
|
# A Client object indicating what type of Zulip client sent this message.
|
2022-08-15 19:10:58 +02:00
|
|
|
sending_client = models.ForeignKey(Client, on_delete=CASCADE)
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2023-09-27 20:11:48 +02:00
|
|
|
# The last time the message was modified by message editing or moving.
|
2022-08-15 19:10:58 +02:00
|
|
|
last_edit_time = models.DateTimeField(null=True)
|
2018-07-25 06:35:48 +02:00
|
|
|
|
|
|
|
# A JSON-encoded list of objects describing any past edits to this
|
|
|
|
# message, oldest first.
|
2022-08-15 19:10:58 +02:00
|
|
|
edit_history = models.TextField(null=True)
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2023-09-27 20:11:48 +02:00
|
|
|
# Whether the message contains a (link to) an uploaded file.
|
2022-08-15 19:10:58 +02:00
|
|
|
has_attachment = models.BooleanField(default=False, db_index=True)
|
2023-09-27 20:11:48 +02:00
|
|
|
# Whether the message contains a visible image element.
|
2022-08-15 19:10:58 +02:00
|
|
|
has_image = models.BooleanField(default=False, db_index=True)
|
2023-09-27 20:11:48 +02:00
|
|
|
# Whether the message contains a link.
|
2022-08-15 19:10:58 +02:00
|
|
|
has_link = models.BooleanField(default=False, db_index=True)
|
2014-02-21 17:44:48 +01:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2016-11-01 11:26:38 +01:00
|
|
|
abstract = True
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2018-05-11 02:24:34 +02:00
|
|
|
def __str__(self) -> str:
|
2023-07-16 14:21:27 +02:00
|
|
|
return f"{self.recipient.label()} / {self.subject} / {self.sender!r}"
|
2017-05-17 05:59:50 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-06-18 19:54:09 +02:00
|
|
|
class ArchiveTransaction(models.Model):
|
2022-08-15 19:10:58 +02:00
|
|
|
timestamp = models.DateTimeField(default=timezone_now, db_index=True)
|
2019-06-18 19:54:09 +02:00
|
|
|
# Marks if the data archived in this transaction has been restored:
|
2022-08-15 19:10:58 +02:00
|
|
|
restored = models.BooleanField(default=False, db_index=True)
|
2019-06-18 19:54:09 +02:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
type = models.PositiveSmallIntegerField(db_index=True)
|
2019-06-18 19:54:09 +02:00
|
|
|
# Valid types:
|
|
|
|
RETENTION_POLICY_BASED = 1 # Archiving was executed due to automated retention policies
|
|
|
|
MANUAL = 2 # Archiving was run manually, via move_messages_to_archive function
|
|
|
|
|
|
|
|
# ForeignKey to the realm with which objects archived in this transaction are associated.
|
|
|
|
# If type is set to MANUAL, this should be null.
|
2022-08-15 19:10:58 +02:00
|
|
|
realm = models.ForeignKey(Realm, null=True, on_delete=CASCADE)
|
2016-11-01 11:26:38 +01:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2019-06-24 16:34:54 +02:00
|
|
|
def __str__(self) -> str:
|
2023-03-08 22:18:59 +01:00
|
|
|
return "id: {id}, type: {type}, realm: {realm}, timestamp: {timestamp}".format(
|
2019-06-24 16:34:54 +02:00
|
|
|
id=self.id,
|
|
|
|
type="MANUAL" if self.type == self.MANUAL else "RETENTION_POLICY_BASED",
|
|
|
|
realm=self.realm.string_id if self.realm else None,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
timestamp=self.timestamp,
|
2019-06-24 16:34:54 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-11-01 11:26:38 +01:00
|
|
|
class ArchivedMessage(AbstractMessage):
|
2018-07-25 00:29:05 +02:00
|
|
|
"""Used as a temporary holding place for deleted messages before they
|
|
|
|
are permanently deleted. This is an important part of a robust
|
|
|
|
'message retention' feature.
|
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
archive_transaction = models.ForeignKey(ArchiveTransaction, on_delete=CASCADE)
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-11-01 11:26:38 +01:00
|
|
|
|
|
|
|
class Message(AbstractMessage):
|
2023-04-17 17:02:07 +02:00
|
|
|
# Recipient types used when a Message object is provided to
|
|
|
|
# Zulip clients via the API.
|
|
|
|
#
|
|
|
|
# A detail worth noting:
|
|
|
|
# * "direct" was introduced in 2023 with the goal of
|
|
|
|
# deprecating the original "private" and becoming the
|
|
|
|
# preferred way to indicate a personal or huddle
|
|
|
|
# Recipient type via the API.
|
|
|
|
API_RECIPIENT_TYPES = ["direct", "private", "stream"]
|
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
search_tsvector = SearchVectorField(null=True)
|
2016-11-01 11:26:38 +01:00
|
|
|
|
2023-08-23 08:44:02 +02:00
|
|
|
DEFAULT_SELECT_RELATED = ["sender", "realm", "recipient", "sending_client"]
|
2023-08-01 16:12:18 +02:00
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def topic_name(self) -> str:
|
2016-07-14 17:48:11 +02:00
|
|
|
"""
|
|
|
|
Please start using this helper to facilitate an
|
|
|
|
eventual switch over to a separate topic table.
|
|
|
|
"""
|
|
|
|
return self.subject
|
2012-08-28 18:44:51 +02:00
|
|
|
|
2018-11-01 16:05:30 +01:00
|
|
|
def set_topic_name(self, topic_name: str) -> None:
|
|
|
|
self.subject = topic_name
|
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def is_stream_message(self) -> bool:
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2017-10-28 21:53:47 +02:00
|
|
|
Find out whether a message is a stream message by
|
|
|
|
looking up its recipient.type. TODO: Make this
|
|
|
|
an easier operation by denormalizing the message
|
2020-03-28 01:25:56 +01:00
|
|
|
type onto Message, either explicitly (message.type)
|
2017-10-28 21:53:47 +02:00
|
|
|
or implicitly (message.stream_id is not None).
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2017-10-28 21:53:47 +02:00
|
|
|
return self.recipient.type == Recipient.STREAM
|
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def get_realm(self) -> Realm:
|
2023-08-10 05:59:25 +02:00
|
|
|
return self.realm
|
2013-08-22 16:56:37 +02:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def save_rendered_content(self) -> None:
|
2013-09-20 21:25:51 +02:00
|
|
|
self.save(update_fields=["rendered_content", "rendered_content_version"])
|
|
|
|
|
2013-09-21 16:46:28 +02:00
|
|
|
@staticmethod
|
2021-02-12 08:19:30 +01:00
|
|
|
def need_to_render_content(
|
|
|
|
rendered_content: Optional[str],
|
|
|
|
rendered_content_version: Optional[int],
|
|
|
|
markdown_version: int,
|
|
|
|
) -> bool:
|
|
|
|
return (
|
|
|
|
rendered_content is None
|
|
|
|
or rendered_content_version is None
|
|
|
|
or rendered_content_version < markdown_version
|
|
|
|
)
|
2012-08-30 19:56:15 +02:00
|
|
|
|
2016-07-10 22:58:46 +02:00
|
|
|
@staticmethod
|
2018-05-11 02:24:34 +02:00
|
|
|
def is_status_message(content: str, rendered_content: str) -> bool:
|
2016-07-10 22:58:46 +02:00
|
|
|
"""
|
2019-12-03 15:29:44 +01:00
|
|
|
"status messages" start with /me and have special rendering:
|
|
|
|
/me loves chocolate -> Full Name loves chocolate
|
2016-07-10 22:58:46 +02:00
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
if content.startswith("/me "):
|
2019-12-03 15:29:44 +01:00
|
|
|
return True
|
2016-07-10 22:58:46 +02:00
|
|
|
return False
|
|
|
|
|
2022-02-23 06:21:17 +01:00
|
|
|
class Meta:
|
|
|
|
indexes = [
|
2022-02-23 08:14:01 +01:00
|
|
|
GinIndex("search_tsvector", fastupdate=False, name="zerver_message_search_tsvector"),
|
2023-08-30 21:38:59 +02:00
|
|
|
models.Index(
|
|
|
|
# For moving messages between streams or marking
|
|
|
|
# streams as read. The "id" at the end makes it easy
|
|
|
|
# to scan the resulting messages in order, and perform
|
|
|
|
# batching.
|
|
|
|
"realm_id",
|
|
|
|
"recipient_id",
|
|
|
|
"id",
|
|
|
|
name="zerver_message_realm_recipient_id",
|
|
|
|
),
|
|
|
|
models.Index(
|
|
|
|
# For generating digest emails and message archiving,
|
|
|
|
# which both group by stream.
|
|
|
|
"realm_id",
|
|
|
|
"recipient_id",
|
|
|
|
"date_sent",
|
|
|
|
name="zerver_message_realm_recipient_date_sent",
|
|
|
|
),
|
|
|
|
models.Index(
|
|
|
|
# For exports, which want to limit both sender and
|
|
|
|
# receiver. The prefix of this index (realm_id,
|
|
|
|
# sender_id) can be used for scrubbing users and/or
|
|
|
|
# deleting users' messages.
|
|
|
|
"realm_id",
|
|
|
|
"sender_id",
|
|
|
|
"recipient_id",
|
|
|
|
name="zerver_message_realm_sender_recipient",
|
|
|
|
),
|
|
|
|
models.Index(
|
|
|
|
# For analytics queries
|
|
|
|
"realm_id",
|
|
|
|
"date_sent",
|
|
|
|
name="zerver_message_realm_date_sent",
|
|
|
|
),
|
|
|
|
models.Index(
|
|
|
|
# For users searching by topic (but not stream), which
|
|
|
|
# is done case-insensitively
|
|
|
|
"realm_id",
|
|
|
|
Upper("subject"),
|
|
|
|
F("id").desc(nulls_last=True),
|
|
|
|
name="zerver_message_realm_upper_subject",
|
|
|
|
),
|
|
|
|
models.Index(
|
|
|
|
# Most stream/topic searches are case-insensitive by
|
|
|
|
# topic name (e.g. messages_for_topic). The "id" at
|
|
|
|
# the end makes it easy to scan the resulting messages
|
|
|
|
# in order, and perform batching.
|
|
|
|
"realm_id",
|
|
|
|
"recipient_id",
|
|
|
|
Upper("subject"),
|
|
|
|
F("id").desc(nulls_last=True),
|
|
|
|
name="zerver_message_realm_recipient_upper_subject",
|
|
|
|
),
|
|
|
|
models.Index(
|
2023-09-27 18:17:41 +02:00
|
|
|
# Used by already_sent_mirrored_message_id, and when
|
|
|
|
# determining recent topics (we post-process to merge
|
|
|
|
# and show the most recent case)
|
2023-08-30 21:38:59 +02:00
|
|
|
"realm_id",
|
|
|
|
"recipient_id",
|
|
|
|
"subject",
|
|
|
|
F("id").desc(nulls_last=True),
|
|
|
|
name="zerver_message_realm_recipient_subject",
|
|
|
|
),
|
|
|
|
models.Index(
|
|
|
|
# Only used by update_first_visible_message_id
|
|
|
|
"realm_id",
|
|
|
|
F("id").desc(nulls_last=True),
|
|
|
|
name="zerver_message_realm_id",
|
|
|
|
),
|
2022-02-23 06:21:17 +01:00
|
|
|
]
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-06-23 22:47:50 +02:00
|
|
|
def get_context_for_message(message: Message) -> QuerySet[Message]:
|
2014-07-15 21:03:51 +02:00
|
|
|
return Message.objects.filter(
|
2023-08-30 21:19:37 +02:00
|
|
|
# Uses index: zerver_message_realm_recipient_upper_subject
|
|
|
|
realm_id=message.realm_id,
|
2014-07-15 21:03:51 +02:00
|
|
|
recipient_id=message.recipient_id,
|
2023-02-07 17:43:35 +01:00
|
|
|
subject__iexact=message.subject,
|
2014-07-15 21:03:51 +02:00
|
|
|
id__lt=message.id,
|
2019-08-28 02:43:19 +02:00
|
|
|
date_sent__gt=message.date_sent - timedelta(minutes=15),
|
2021-02-12 08:20:45 +01:00
|
|
|
).order_by("-id")[:10]
|
2014-07-15 21:03:51 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-07-08 02:25:55 +02:00
|
|
|
post_save.connect(flush_message, sender=Message)
|
2014-07-15 21:03:51 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-05-29 16:01:34 +02:00
|
|
|
class AbstractSubMessage(models.Model):
|
2018-02-11 14:08:01 +01:00
|
|
|
# We can send little text messages that are associated with a regular
|
|
|
|
# Zulip message. These can be used for experimental widgets like embedded
|
|
|
|
# games, surveys, mini threads, etc. These are designed to be pretty
|
|
|
|
# generic in purpose.
|
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
sender = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
|
|
|
msg_type = models.TextField()
|
|
|
|
content = models.TextField()
|
2018-02-11 14:08:01 +01:00
|
|
|
|
2019-05-29 16:01:34 +02:00
|
|
|
class Meta:
|
|
|
|
abstract = True
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-05-29 16:01:34 +02:00
|
|
|
class SubMessage(AbstractSubMessage):
|
2022-08-15 19:10:58 +02:00
|
|
|
message = models.ForeignKey(Message, on_delete=CASCADE)
|
2019-05-29 16:01:34 +02:00
|
|
|
|
2018-02-11 14:08:01 +01:00
|
|
|
@staticmethod
|
|
|
|
def get_raw_db_rows(needed_ids: List[int]) -> List[Dict[str, Any]]:
|
2021-02-12 08:20:45 +01:00
|
|
|
fields = ["id", "message_id", "sender_id", "msg_type", "content"]
|
2018-02-11 14:08:01 +01:00
|
|
|
query = SubMessage.objects.filter(message_id__in=needed_ids).values(*fields)
|
2021-02-12 08:20:45 +01:00
|
|
|
query = query.order_by("message_id", "id")
|
2018-02-11 14:08:01 +01:00
|
|
|
return list(query)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-05-29 16:01:34 +02:00
|
|
|
class ArchivedSubMessage(AbstractSubMessage):
|
2022-08-15 19:10:58 +02:00
|
|
|
message = models.ForeignKey(ArchivedMessage, on_delete=CASCADE)
|
2019-05-29 16:01:34 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-02-11 14:09:17 +01:00
|
|
|
post_save.connect(flush_submessage, sender=SubMessage)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-07-23 19:24:22 +02:00
|
|
|
class Draft(models.Model):
|
2021-02-12 08:19:30 +01:00
|
|
|
"""Server-side storage model for storing drafts so that drafts can be synced across
|
2020-07-23 19:24:22 +02:00
|
|
|
multiple clients/devices.
|
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=models.CASCADE)
|
|
|
|
recipient = models.ForeignKey(Recipient, null=True, on_delete=models.SET_NULL)
|
|
|
|
topic = models.CharField(max_length=MAX_TOPIC_NAME_LENGTH, db_index=True)
|
|
|
|
content = models.TextField() # Length should not exceed MAX_MESSAGE_LENGTH
|
|
|
|
last_edit_time = models.DateTimeField(db_index=True)
|
2020-07-23 19:24:22 +02:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2020-07-23 19:24:22 +02:00
|
|
|
def __str__(self) -> str:
|
2023-03-08 22:18:59 +01:00
|
|
|
return f"{self.user_profile.email} / {self.id} / {self.last_edit_time}"
|
2020-07-23 19:24:22 +02:00
|
|
|
|
2020-08-04 05:31:41 +02:00
|
|
|
def to_dict(self) -> Dict[str, Any]:
|
2023-04-14 21:07:57 +02:00
|
|
|
to, recipient_type_str = get_recipient_ids(self.recipient, self.user_profile_id)
|
2020-07-23 19:24:22 +02:00
|
|
|
return {
|
2020-08-06 08:10:35 +02:00
|
|
|
"id": self.id,
|
2023-04-14 21:06:51 +02:00
|
|
|
"type": recipient_type_str,
|
2020-07-23 19:24:22 +02:00
|
|
|
"to": to,
|
|
|
|
"topic": self.topic,
|
|
|
|
"content": self.content,
|
2020-08-04 05:31:41 +02:00
|
|
|
"timestamp": int(self.last_edit_time.timestamp()),
|
2020-07-23 19:24:22 +02:00
|
|
|
}
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-07-23 22:11:21 +02:00
|
|
|
class AbstractEmoji(models.Model):
|
2018-07-25 05:57:10 +02:00
|
|
|
"""For emoji reactions to messages (and potentially future reaction types).
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2018-07-25 05:57:10 +02:00
|
|
|
Emoji are surprisingly complicated to implement correctly. For details
|
|
|
|
on how this subsystem works, see:
|
2018-07-25 00:29:05 +02:00
|
|
|
https://zulip.readthedocs.io/en/latest/subsystems/emoji.html
|
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2018-07-25 05:57:10 +02:00
|
|
|
# The user-facing name for an emoji reaction. With emoji aliases,
|
|
|
|
# there may be multiple accepted names for a given emoji; this
|
|
|
|
# field encodes which one the user selected.
|
2022-08-15 19:10:58 +02:00
|
|
|
emoji_name = models.TextField()
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
UNICODE_EMOJI = "unicode_emoji"
|
|
|
|
REALM_EMOJI = "realm_emoji"
|
|
|
|
ZULIP_EXTRA_EMOJI = "zulip_extra_emoji"
|
2021-02-12 08:19:30 +01:00
|
|
|
REACTION_TYPES = (
|
2021-04-16 00:57:30 +02:00
|
|
|
(UNICODE_EMOJI, gettext_lazy("Unicode emoji")),
|
|
|
|
(REALM_EMOJI, gettext_lazy("Custom emoji")),
|
|
|
|
(ZULIP_EXTRA_EMOJI, gettext_lazy("Zulip extra emoji")),
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2022-08-15 19:10:58 +02:00
|
|
|
reaction_type = models.CharField(default=UNICODE_EMOJI, choices=REACTION_TYPES, max_length=30)
|
2016-11-03 18:49:00 +01:00
|
|
|
|
2021-12-06 20:14:49 +01:00
|
|
|
# A string with the property that (realm, reaction_type,
|
|
|
|
# emoji_code) uniquely determines the emoji glyph.
|
|
|
|
#
|
|
|
|
# We cannot use `emoji_name` for this purpose, since the
|
|
|
|
# name-to-glyph mappings for unicode emoji change with time as we
|
|
|
|
# update our emoji database, and multiple custom emoji can have
|
|
|
|
# the same `emoji_name` in a realm (at most one can have
|
|
|
|
# `deactivated=False`). The format for `emoji_code` varies by
|
|
|
|
# `reaction_type`:
|
2018-07-25 05:57:10 +02:00
|
|
|
#
|
|
|
|
# * For Unicode emoji, a dash-separated hex encoding of the sequence of
|
|
|
|
# Unicode codepoints that define this emoji in the Unicode
|
|
|
|
# specification. For examples, see "non_qualified" or "unified" in the
|
|
|
|
# following data, with "non_qualified" taking precedence when both present:
|
|
|
|
# https://raw.githubusercontent.com/iamcal/emoji-data/master/emoji_pretty.json
|
|
|
|
#
|
2021-12-06 20:14:49 +01:00
|
|
|
# * For user uploaded custom emoji (`reaction_type="realm_emoji"`), the stringified ID
|
|
|
|
# of the RealmEmoji object, computed as `str(realm_emoji.id)`.
|
2018-07-25 05:57:10 +02:00
|
|
|
#
|
2021-12-06 20:14:49 +01:00
|
|
|
# * For "Zulip extra emoji" (like :zulip:), the name of the emoji (e.g. "zulip").
|
2022-08-15 19:10:58 +02:00
|
|
|
emoji_code = models.TextField()
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2021-07-23 22:11:21 +02:00
|
|
|
class Meta:
|
|
|
|
abstract = True
|
|
|
|
|
|
|
|
|
|
|
|
class AbstractReaction(AbstractEmoji):
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2019-05-29 15:52:57 +02:00
|
|
|
abstract = True
|
2023-02-25 02:29:12 +01:00
|
|
|
unique_together = ("user_profile", "message", "reaction_type", "emoji_code")
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-11-03 18:49:00 +01:00
|
|
|
|
2019-05-29 15:52:57 +02:00
|
|
|
class Reaction(AbstractReaction):
|
2022-08-15 19:10:58 +02:00
|
|
|
message = models.ForeignKey(Message, on_delete=CASCADE)
|
2019-05-29 15:52:57 +02:00
|
|
|
|
2016-12-06 07:19:34 +01:00
|
|
|
@staticmethod
|
2017-11-27 07:33:05 +01:00
|
|
|
def get_raw_db_rows(needed_ids: List[int]) -> List[Dict[str, Any]]:
|
2021-02-12 08:19:30 +01:00
|
|
|
fields = [
|
2021-02-12 08:20:45 +01:00
|
|
|
"message_id",
|
|
|
|
"emoji_name",
|
|
|
|
"emoji_code",
|
|
|
|
"reaction_type",
|
|
|
|
"user_profile__email",
|
2021-04-22 16:23:09 +02:00
|
|
|
"user_profile_id",
|
2021-02-12 08:20:45 +01:00
|
|
|
"user_profile__full_name",
|
2021-02-12 08:19:30 +01:00
|
|
|
]
|
2021-11-30 01:47:09 +01:00
|
|
|
# The ordering is important here, as it makes it convenient
|
|
|
|
# for clients to display reactions in order without
|
|
|
|
# client-side sorting code.
|
|
|
|
return Reaction.objects.filter(message_id__in=needed_ids).values(*fields).order_by("id")
|
2016-12-06 07:19:34 +01:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2019-01-03 22:25:04 +01:00
|
|
|
def __str__(self) -> str:
|
2020-06-10 06:41:04 +02:00
|
|
|
return f"{self.user_profile.email} / {self.message.id} / {self.emoji_name}"
|
2019-01-03 22:25:04 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-05-29 15:52:57 +02:00
|
|
|
class ArchivedReaction(AbstractReaction):
|
2022-08-15 19:10:58 +02:00
|
|
|
message = models.ForeignKey(ArchivedMessage, on_delete=CASCADE)
|
2019-05-29 15:52:57 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-06-08 02:04:09 +02:00
|
|
|
# Whenever a message is sent, for each user subscribed to the
|
2021-05-14 19:48:00 +02:00
|
|
|
# corresponding Recipient object (that is not long-term idle), we add
|
|
|
|
# a row to the UserMessage table indicating that that user received
|
|
|
|
# that message. This table allows us to quickly query any user's last
|
|
|
|
# 1000 messages to generate the home view and search exactly the
|
|
|
|
# user's message history.
|
2016-04-01 08:42:38 +02:00
|
|
|
#
|
2021-05-14 19:48:00 +02:00
|
|
|
# The long-term idle optimization is extremely important for large,
|
|
|
|
# open organizations, and is described in detail here:
|
|
|
|
# https://zulip.readthedocs.io/en/latest/subsystems/sending-messages.html#soft-deactivation
|
2016-04-01 08:42:38 +02:00
|
|
|
#
|
2021-05-14 19:48:00 +02:00
|
|
|
# In particular, new messages to public streams will only generate
|
|
|
|
# UserMessage rows for Members who are long_term_idle if they would
|
|
|
|
# have nonzero flags for the message (E.g. a mention, alert word, or
|
|
|
|
# mobile push notification).
|
|
|
|
#
|
|
|
|
# The flags field stores metadata like whether the user has read the
|
|
|
|
# message, starred or collapsed the message, was mentioned in the
|
|
|
|
# message, etc. We use of postgres partial indexes on flags to make
|
|
|
|
# queries for "User X's messages with flag Y" extremely fast without
|
|
|
|
# consuming much storage space.
|
|
|
|
#
|
|
|
|
# UserMessage is the largest table in many Zulip installations, even
|
2016-04-01 08:42:38 +02:00
|
|
|
# though each row is only 4 integers.
|
2017-10-27 08:42:27 +02:00
|
|
|
class AbstractUserMessage(models.Model):
|
2022-08-15 19:10:58 +02:00
|
|
|
id = models.BigAutoField(primary_key=True)
|
2019-08-24 01:34:36 +02:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
2019-06-04 09:26:45 +02:00
|
|
|
# The order here is important! It's the order of fields in the bitfield.
|
|
|
|
ALL_FLAGS = [
|
2021-02-12 08:20:45 +01:00
|
|
|
"read",
|
|
|
|
"starred",
|
|
|
|
"collapsed",
|
|
|
|
"mentioned",
|
2023-11-03 15:20:44 +01:00
|
|
|
"stream_wildcard_mentioned",
|
2023-10-19 10:17:09 +02:00
|
|
|
"topic_wildcard_mentioned",
|
|
|
|
"group_mentioned",
|
|
|
|
# These next 2 flags are from features that have since been removed.
|
2023-11-03 12:07:53 +01:00
|
|
|
# We've cleared these 2 flags in migration 0486.
|
2021-02-12 08:20:45 +01:00
|
|
|
"force_expand",
|
|
|
|
"force_collapse",
|
2019-06-04 09:26:45 +02:00
|
|
|
# Whether the message contains any of the user's alert words.
|
2021-02-12 08:20:45 +01:00
|
|
|
"has_alert_word",
|
2019-06-04 09:26:45 +02:00
|
|
|
# The historical flag is used to mark messages which the user
|
|
|
|
# did not receive when they were sent, but later added to
|
|
|
|
# their history via e.g. starring the message. This is
|
|
|
|
# important accounting for the "Subscribed to stream" dividers.
|
2021-02-12 08:20:45 +01:00
|
|
|
"historical",
|
2023-06-19 16:42:11 +02:00
|
|
|
# Whether the message is a direct message; this flag is a
|
2019-06-04 09:26:45 +02:00
|
|
|
# denormalization of message.recipient.type to support an
|
2023-06-19 16:42:11 +02:00
|
|
|
# efficient index on UserMessage for a user's direct messages.
|
2021-02-12 08:20:45 +01:00
|
|
|
"is_private",
|
2019-06-04 09:26:45 +02:00
|
|
|
# Whether we've sent a push notification to the user's mobile
|
|
|
|
# devices for this message that has not been revoked.
|
2021-02-12 08:20:45 +01:00
|
|
|
"active_mobile_push_notification",
|
2019-06-04 09:26:45 +02:00
|
|
|
]
|
2018-08-08 11:18:44 +02:00
|
|
|
# Certain flags are used only for internal accounting within the
|
2019-06-04 09:26:45 +02:00
|
|
|
# Zulip backend, and don't make sense to expose to the API.
|
2018-08-09 22:57:36 +02:00
|
|
|
NON_API_FLAGS = {"is_private", "active_mobile_push_notification"}
|
2019-06-04 09:26:45 +02:00
|
|
|
# Certain additional flags are just set once when the UserMessage
|
|
|
|
# row is created.
|
|
|
|
NON_EDITABLE_FLAGS = {
|
|
|
|
# These flags are bookkeeping and don't make sense to edit.
|
|
|
|
"has_alert_word",
|
|
|
|
"mentioned",
|
2023-11-03 15:20:44 +01:00
|
|
|
"stream_wildcard_mentioned",
|
2023-10-19 10:17:09 +02:00
|
|
|
"topic_wildcard_mentioned",
|
|
|
|
"group_mentioned",
|
2019-06-04 09:26:45 +02:00
|
|
|
"historical",
|
|
|
|
# Unused flags can't be edited.
|
|
|
|
"force_expand",
|
|
|
|
"force_collapse",
|
|
|
|
}
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
flags: BitHandler = BitField(flags=ALL_FLAGS, default=0)
|
2012-09-07 17:04:41 +02:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2016-11-01 11:26:38 +01:00
|
|
|
abstract = True
|
2012-11-08 21:08:13 +01:00
|
|
|
unique_together = ("user_profile", "message")
|
|
|
|
|
2017-05-23 03:02:01 +02:00
|
|
|
@staticmethod
|
2022-06-27 08:00:09 +02:00
|
|
|
def where_flag_is_present(flagattr: Bit) -> str:
|
2018-08-22 00:03:00 +02:00
|
|
|
# Use this for Django ORM queries to access starred messages.
|
|
|
|
# This custom SQL plays nice with our partial indexes. Grep
|
|
|
|
# the code for example usage.
|
|
|
|
#
|
|
|
|
# The key detail is that e.g.
|
|
|
|
# UserMessage.objects.filter(user_profile=user_profile, flags=UserMessage.flags.starred)
|
|
|
|
# will generate a query involving `flags & 2 = 2`, which doesn't match our index.
|
2022-06-27 08:00:09 +02:00
|
|
|
return f"flags & {1 << flagattr.number} <> 0"
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def where_flag_is_absent(flagattr: Bit) -> str:
|
|
|
|
return f"flags & {1 << flagattr.number} = 0"
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def where_unread() -> str:
|
2022-10-08 06:10:17 +02:00
|
|
|
return AbstractUserMessage.where_flag_is_absent(AbstractUserMessage.flags.read)
|
2022-06-27 08:00:09 +02:00
|
|
|
|
2021-06-16 21:15:47 +02:00
|
|
|
@staticmethod
|
|
|
|
def where_read() -> str:
|
2022-10-08 06:10:17 +02:00
|
|
|
return AbstractUserMessage.where_flag_is_present(AbstractUserMessage.flags.read)
|
2021-06-16 21:15:47 +02:00
|
|
|
|
2022-06-27 08:00:09 +02:00
|
|
|
@staticmethod
|
|
|
|
def where_starred() -> str:
|
2022-10-08 06:10:17 +02:00
|
|
|
return AbstractUserMessage.where_flag_is_present(AbstractUserMessage.flags.starred)
|
2018-08-22 00:03:00 +02:00
|
|
|
|
2018-08-22 00:08:41 +02:00
|
|
|
@staticmethod
|
|
|
|
def where_active_push_notification() -> str:
|
2022-06-27 08:00:09 +02:00
|
|
|
return AbstractUserMessage.where_flag_is_present(
|
2022-10-08 06:10:17 +02:00
|
|
|
AbstractUserMessage.flags.active_mobile_push_notification
|
2022-06-27 08:00:09 +02:00
|
|
|
)
|
2018-08-22 00:08:41 +02:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def flags_list(self) -> List[str]:
|
2017-09-10 21:36:23 +02:00
|
|
|
flags = int(self.flags)
|
|
|
|
return self.flags_list_for_flags(flags)
|
|
|
|
|
|
|
|
@staticmethod
|
2017-11-27 07:33:05 +01:00
|
|
|
def flags_list_for_flags(val: int) -> List[str]:
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2017-09-09 19:47:38 +02:00
|
|
|
This function is highly optimized, because it actually slows down
|
|
|
|
sending messages in a naive implementation.
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2017-11-07 18:40:39 +01:00
|
|
|
flags = []
|
|
|
|
mask = 1
|
|
|
|
for flag in UserMessage.ALL_FLAGS:
|
2018-08-08 11:18:44 +02:00
|
|
|
if (val & mask) and flag not in AbstractUserMessage.NON_API_FLAGS:
|
2017-11-07 18:40:39 +01:00
|
|
|
flags.append(flag)
|
|
|
|
mask <<= 1
|
|
|
|
return flags
|
2016-11-01 11:26:38 +01:00
|
|
|
|
|
|
|
|
2018-07-25 00:29:05 +02:00
|
|
|
class UserMessage(AbstractUserMessage):
|
2022-08-15 19:10:58 +02:00
|
|
|
message = models.ForeignKey(Message, on_delete=CASCADE)
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2022-02-23 07:15:28 +01:00
|
|
|
class Meta(AbstractUserMessage.Meta):
|
|
|
|
indexes = [
|
2022-02-23 07:47:46 +01:00
|
|
|
models.Index(
|
|
|
|
"user_profile",
|
|
|
|
"message",
|
|
|
|
condition=Q(flags__andnz=AbstractUserMessage.flags.starred.mask),
|
|
|
|
name="zerver_usermessage_starred_message_id",
|
|
|
|
),
|
2022-02-23 07:45:57 +01:00
|
|
|
models.Index(
|
|
|
|
"user_profile",
|
|
|
|
"message",
|
|
|
|
condition=Q(flags__andnz=AbstractUserMessage.flags.mentioned.mask),
|
|
|
|
name="zerver_usermessage_mentioned_message_id",
|
|
|
|
),
|
2022-02-23 07:44:03 +01:00
|
|
|
models.Index(
|
|
|
|
"user_profile",
|
|
|
|
"message",
|
|
|
|
condition=Q(flags__andz=AbstractUserMessage.flags.read.mask),
|
|
|
|
name="zerver_usermessage_unread_message_id",
|
|
|
|
),
|
2022-02-23 07:41:32 +01:00
|
|
|
models.Index(
|
|
|
|
"user_profile",
|
|
|
|
"message",
|
|
|
|
condition=Q(flags__andnz=AbstractUserMessage.flags.has_alert_word.mask),
|
|
|
|
name="zerver_usermessage_has_alert_word_message_id",
|
|
|
|
),
|
2022-02-23 07:38:38 +01:00
|
|
|
models.Index(
|
|
|
|
"user_profile",
|
|
|
|
"message",
|
|
|
|
condition=Q(flags__andnz=AbstractUserMessage.flags.mentioned.mask)
|
2023-11-03 15:20:44 +01:00
|
|
|
| Q(flags__andnz=AbstractUserMessage.flags.stream_wildcard_mentioned.mask),
|
2022-02-23 07:38:38 +01:00
|
|
|
name="zerver_usermessage_wildcard_mentioned_message_id",
|
|
|
|
),
|
2023-10-19 10:17:09 +02:00
|
|
|
models.Index(
|
|
|
|
"user_profile",
|
|
|
|
"message",
|
|
|
|
condition=Q(
|
|
|
|
flags__andnz=AbstractUserMessage.flags.mentioned.mask
|
2023-11-03 15:20:44 +01:00
|
|
|
| AbstractUserMessage.flags.stream_wildcard_mentioned.mask
|
2023-10-19 10:17:09 +02:00
|
|
|
| AbstractUserMessage.flags.topic_wildcard_mentioned.mask
|
|
|
|
| AbstractUserMessage.flags.group_mentioned.mask
|
|
|
|
),
|
|
|
|
name="zerver_usermessage_any_mentioned_message_id",
|
|
|
|
),
|
2022-02-23 07:15:28 +01:00
|
|
|
models.Index(
|
|
|
|
"user_profile",
|
|
|
|
"message",
|
2022-02-23 07:28:51 +01:00
|
|
|
condition=Q(flags__andnz=AbstractUserMessage.flags.is_private.mask),
|
|
|
|
name="zerver_usermessage_is_private_message_id",
|
|
|
|
),
|
|
|
|
models.Index(
|
|
|
|
"user_profile",
|
|
|
|
"message",
|
2022-02-23 07:15:28 +01:00
|
|
|
condition=Q(
|
|
|
|
flags__andnz=AbstractUserMessage.flags.active_mobile_push_notification.mask
|
|
|
|
),
|
|
|
|
name="zerver_usermessage_active_mobile_push_notification_id",
|
|
|
|
),
|
|
|
|
]
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2022-08-01 17:16:53 +02:00
|
|
|
def __str__(self) -> str:
|
2023-07-16 14:21:27 +02:00
|
|
|
recipient_string = self.message.recipient.label()
|
|
|
|
return f"{recipient_string} / {self.user_profile.email} ({self.flags_list()})"
|
2022-08-01 17:16:53 +02:00
|
|
|
|
2022-08-14 12:02:05 +02:00
|
|
|
@staticmethod
|
|
|
|
def select_for_update_query() -> QuerySet["UserMessage"]:
|
|
|
|
"""This SELECT FOR UPDATE query ensures consistent ordering on
|
|
|
|
the row locks acquired by a bulk update operation to modify
|
|
|
|
message flags using bitand/bitor.
|
|
|
|
|
2023-06-15 19:51:26 +02:00
|
|
|
This consistent ordering is important to prevent deadlocks when
|
|
|
|
2 or more bulk updates to the same rows in the UserMessage table
|
|
|
|
race against each other (For example, if a client submits
|
|
|
|
simultaneous duplicate API requests to mark a certain set of
|
|
|
|
messages as read).
|
2022-08-14 12:02:05 +02:00
|
|
|
"""
|
|
|
|
return UserMessage.objects.select_for_update().order_by("message_id")
|
|
|
|
|
2023-11-04 14:05:38 +01:00
|
|
|
@staticmethod
|
|
|
|
def has_any_mentions(user_profile_id: int, message_id: int) -> bool:
|
|
|
|
# The query uses the 'zerver_usermessage_any_mentioned_message_id' index.
|
|
|
|
return UserMessage.objects.filter(
|
|
|
|
Q(
|
|
|
|
flags__andnz=UserMessage.flags.mentioned.mask
|
2023-11-03 15:20:44 +01:00
|
|
|
| UserMessage.flags.stream_wildcard_mentioned.mask
|
2023-11-04 14:05:38 +01:00
|
|
|
| UserMessage.flags.topic_wildcard_mentioned.mask
|
|
|
|
| UserMessage.flags.group_mentioned.mask
|
|
|
|
),
|
|
|
|
user_profile_id=user_profile_id,
|
|
|
|
message_id=message_id,
|
|
|
|
).exists()
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def get_usermessage_by_message_id(
|
|
|
|
user_profile: UserProfile, message_id: int
|
|
|
|
) -> Optional[UserMessage]:
|
2018-07-27 11:47:07 +02:00
|
|
|
try:
|
2021-02-12 08:19:30 +01:00
|
|
|
return UserMessage.objects.select_related().get(
|
2021-04-22 16:23:09 +02:00
|
|
|
user_profile=user_profile, message_id=message_id
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-07-27 11:47:07 +02:00
|
|
|
except UserMessage.DoesNotExist:
|
|
|
|
return None
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-11-01 11:26:38 +01:00
|
|
|
class ArchivedUserMessage(AbstractUserMessage):
|
2018-07-25 00:29:05 +02:00
|
|
|
"""Used as a temporary holding place for deleted UserMessages objects
|
|
|
|
before they are permanently deleted. This is an important part of
|
|
|
|
a robust 'message retention' feature.
|
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
message = models.ForeignKey(ArchivedMessage, on_delete=CASCADE)
|
2016-11-01 11:26:38 +01:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2022-08-01 17:16:53 +02:00
|
|
|
def __str__(self) -> str:
|
2023-07-16 14:21:27 +02:00
|
|
|
recipient_string = self.message.recipient.label()
|
|
|
|
return f"{recipient_string} / {self.user_profile.email} ({self.flags_list()})"
|
2022-08-01 17:16:53 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-10-27 08:42:27 +02:00
|
|
|
class AbstractAttachment(models.Model):
|
2022-08-15 19:10:58 +02:00
|
|
|
file_name = models.TextField(db_index=True)
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2016-03-24 20:24:01 +01:00
|
|
|
# path_id is a storage location agnostic representation of the path of the file.
|
|
|
|
# If the path of a file is http://localhost:9991/user_uploads/a/b/abc/temp_file.py
|
|
|
|
# then its path_id will be a/b/abc/temp_file.py.
|
2022-08-15 19:10:58 +02:00
|
|
|
path_id = models.TextField(db_index=True, unique=True)
|
|
|
|
owner = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE)
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
create_time = models.DateTimeField(
|
2021-02-12 08:19:30 +01:00
|
|
|
default=timezone_now,
|
|
|
|
db_index=True,
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
)
|
2020-06-20 21:58:35 +02:00
|
|
|
# Size of the uploaded file, in bytes
|
2022-08-15 19:10:58 +02:00
|
|
|
size = models.IntegerField()
|
2016-11-01 11:26:38 +01:00
|
|
|
|
2022-03-23 05:09:26 +01:00
|
|
|
# The two fields below serve as caches to let us avoid looking up
|
|
|
|
# the corresponding messages/streams to check permissions before
|
|
|
|
# serving these files.
|
|
|
|
#
|
|
|
|
# For both fields, the `null` state is used when a change in
|
|
|
|
# message permissions mean that we need to determine their proper
|
|
|
|
# value.
|
2020-08-01 03:17:21 +02:00
|
|
|
|
2018-07-25 00:29:05 +02:00
|
|
|
# Whether this attachment has been posted to a public stream, and
|
|
|
|
# thus should be available to all non-guest users in the
|
|
|
|
# organization (even if they weren't a recipient of a message
|
2020-08-01 03:17:21 +02:00
|
|
|
# linking to it).
|
2022-08-15 19:10:58 +02:00
|
|
|
is_realm_public = models.BooleanField(default=False, null=True)
|
2020-08-01 03:17:21 +02:00
|
|
|
# Whether this attachment has been posted to a web-public stream,
|
|
|
|
# and thus should be available to everyone on the internet, even
|
|
|
|
# if the person isn't logged in.
|
2022-08-15 19:10:58 +02:00
|
|
|
is_web_public = models.BooleanField(default=False, null=True)
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2016-11-01 11:26:38 +01:00
|
|
|
abstract = True
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2018-05-11 02:24:34 +02:00
|
|
|
def __str__(self) -> str:
|
2023-03-08 22:18:59 +01:00
|
|
|
return self.file_name
|
2017-05-17 05:59:50 +02:00
|
|
|
|
2016-11-01 11:26:38 +01:00
|
|
|
|
|
|
|
class ArchivedAttachment(AbstractAttachment):
|
2018-07-25 00:29:05 +02:00
|
|
|
"""Used as a temporary holding place for deleted Attachment objects
|
|
|
|
before they are permanently deleted. This is an important part of
|
|
|
|
a robust 'message retention' feature.
|
2022-06-08 10:14:11 +02:00
|
|
|
|
|
|
|
Unlike the similar archive tables, ArchivedAttachment does not
|
|
|
|
have an ArchiveTransaction foreign key, and thus will not be
|
|
|
|
directly deleted by clean_archived_data. Instead, attachments that
|
|
|
|
were only referenced by now fully deleted messages will leave
|
|
|
|
ArchivedAttachment objects with empty `.messages`.
|
|
|
|
|
|
|
|
A second step, delete_old_unclaimed_attachments, will delete the
|
|
|
|
resulting orphaned ArchivedAttachment objects, along with removing
|
|
|
|
the associated uploaded files from storage.
|
2018-07-25 00:29:05 +02:00
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
messages = models.ManyToManyField(
|
2022-03-25 01:20:58 +01:00
|
|
|
ArchivedMessage, related_name="attachment_set", related_query_name="attachment"
|
|
|
|
)
|
2016-11-01 11:26:38 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-11-01 11:26:38 +01:00
|
|
|
class Attachment(AbstractAttachment):
|
2022-08-15 19:10:58 +02:00
|
|
|
messages = models.ManyToManyField(Message)
|
2016-03-24 20:24:01 +01:00
|
|
|
|
2023-05-07 20:04:37 +02:00
|
|
|
# This is only present for Attachment and not ArchiveAttachment.
|
|
|
|
# because ScheduledMessage is not subject to archiving.
|
2023-11-14 00:38:06 +01:00
|
|
|
scheduled_messages = models.ManyToManyField("zerver.ScheduledMessage")
|
2023-05-07 20:04:37 +02:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def is_claimed(self) -> bool:
|
2023-09-07 18:22:41 +02:00
|
|
|
return self.messages.exists() or self.scheduled_messages.exists()
|
2016-03-24 20:24:01 +01:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def to_dict(self) -> Dict[str, Any]:
|
2016-12-28 14:46:42 +01:00
|
|
|
return {
|
2021-02-12 08:20:45 +01:00
|
|
|
"id": self.id,
|
|
|
|
"name": self.file_name,
|
|
|
|
"path_id": self.path_id,
|
|
|
|
"size": self.size,
|
2017-09-15 01:17:38 +02:00
|
|
|
# convert to JavaScript-style UNIX timestamp so we can take
|
2022-02-24 21:15:43 +01:00
|
|
|
# advantage of client time zones.
|
2021-02-12 08:20:45 +01:00
|
|
|
"create_time": int(time.mktime(self.create_time.timetuple()) * 1000),
|
|
|
|
"messages": [
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"id": m.id,
|
|
|
|
"date_sent": int(time.mktime(m.date_sent.timetuple()) * 1000),
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
|
|
|
for m in self.messages.all()
|
|
|
|
],
|
2016-12-28 14:46:42 +01:00
|
|
|
}
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-01-14 07:46:31 +01:00
|
|
|
post_save.connect(flush_used_upload_space_cache, sender=Attachment)
|
|
|
|
post_delete.connect(flush_used_upload_space_cache, sender=Attachment)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-03-23 05:09:26 +01:00
|
|
|
def validate_attachment_request_for_spectator_access(
|
|
|
|
realm: Realm, attachment: Attachment
|
|
|
|
) -> Optional[bool]:
|
|
|
|
if attachment.realm != realm:
|
|
|
|
return False
|
|
|
|
|
|
|
|
# Update cached is_web_public property, if necessary.
|
|
|
|
if attachment.is_web_public is None:
|
|
|
|
# Fill the cache in a single query. This is important to avoid
|
|
|
|
# a potential race condition between checking and setting,
|
|
|
|
# where the attachment could have been moved again.
|
|
|
|
Attachment.objects.filter(id=attachment.id, is_web_public__isnull=True).update(
|
|
|
|
is_web_public=Exists(
|
|
|
|
Message.objects.filter(
|
2023-08-30 21:19:37 +02:00
|
|
|
# Uses index: zerver_attachment_messages_attachment_id_message_id_key
|
|
|
|
realm_id=realm.id,
|
2022-03-23 05:09:26 +01:00
|
|
|
attachment=OuterRef("id"),
|
|
|
|
recipient__stream__invite_only=False,
|
|
|
|
recipient__stream__is_web_public=True,
|
|
|
|
),
|
|
|
|
),
|
|
|
|
)
|
|
|
|
attachment.refresh_from_db()
|
|
|
|
|
2021-11-02 15:42:58 +01:00
|
|
|
if not attachment.is_web_public:
|
|
|
|
return False
|
|
|
|
|
|
|
|
if settings.RATE_LIMITING:
|
|
|
|
try:
|
|
|
|
from zerver.lib.rate_limiter import rate_limit_spectator_attachment_access_by_file
|
|
|
|
|
|
|
|
rate_limit_spectator_attachment_access_by_file(attachment.path_id)
|
2022-11-17 09:30:48 +01:00
|
|
|
except RateLimitedError:
|
2021-11-02 15:42:58 +01:00
|
|
|
return False
|
2022-03-23 05:09:26 +01:00
|
|
|
|
2021-11-02 15:42:58 +01:00
|
|
|
return True
|
2022-03-23 05:09:26 +01:00
|
|
|
|
2021-11-02 15:42:58 +01:00
|
|
|
|
|
|
|
def validate_attachment_request(
|
|
|
|
maybe_user_profile: Union[UserProfile, AnonymousUser],
|
|
|
|
path_id: str,
|
|
|
|
realm: Optional[Realm] = None,
|
|
|
|
) -> Optional[bool]:
|
2016-06-17 19:48:17 +02:00
|
|
|
try:
|
|
|
|
attachment = Attachment.objects.get(path_id=path_id)
|
|
|
|
except Attachment.DoesNotExist:
|
|
|
|
return None
|
2016-11-01 11:26:38 +01:00
|
|
|
|
2021-11-02 15:42:58 +01:00
|
|
|
if isinstance(maybe_user_profile, AnonymousUser):
|
|
|
|
assert realm is not None
|
|
|
|
return validate_attachment_request_for_spectator_access(realm, attachment)
|
|
|
|
|
|
|
|
user_profile = maybe_user_profile
|
|
|
|
assert isinstance(user_profile, UserProfile)
|
|
|
|
|
2022-03-23 05:09:26 +01:00
|
|
|
# Update cached is_realm_public property, if necessary.
|
|
|
|
if attachment.is_realm_public is None:
|
|
|
|
# Fill the cache in a single query. This is important to avoid
|
|
|
|
# a potential race condition between checking and setting,
|
|
|
|
# where the attachment could have been moved again.
|
|
|
|
Attachment.objects.filter(id=attachment.id, is_realm_public__isnull=True).update(
|
|
|
|
is_realm_public=Exists(
|
|
|
|
Message.objects.filter(
|
2023-08-30 21:19:37 +02:00
|
|
|
# Uses index: zerver_attachment_messages_attachment_id_message_id_key
|
|
|
|
realm_id=user_profile.realm_id,
|
2022-03-23 05:09:26 +01:00
|
|
|
attachment=OuterRef("id"),
|
|
|
|
recipient__stream__invite_only=False,
|
|
|
|
),
|
|
|
|
),
|
|
|
|
)
|
|
|
|
attachment.refresh_from_db()
|
|
|
|
|
2018-06-05 21:02:02 +02:00
|
|
|
if user_profile == attachment.owner:
|
|
|
|
# If you own the file, you can access it.
|
|
|
|
return True
|
2021-02-12 08:19:30 +01:00
|
|
|
if (
|
|
|
|
attachment.is_realm_public
|
|
|
|
and attachment.realm == user_profile.realm
|
|
|
|
and user_profile.can_access_public_streams()
|
|
|
|
):
|
2018-06-05 21:02:02 +02:00
|
|
|
# Any user in the realm can access realm-public files
|
|
|
|
return True
|
|
|
|
|
|
|
|
messages = attachment.messages.all()
|
|
|
|
if UserMessage.objects.filter(user_profile=user_profile, message__in=messages).exists():
|
2023-06-19 16:42:11 +02:00
|
|
|
# If it was sent in a direct message or private stream
|
2018-06-05 21:02:02 +02:00
|
|
|
# message, then anyone who received that message can access it.
|
|
|
|
return True
|
|
|
|
|
2018-06-05 21:12:28 +02:00
|
|
|
# The user didn't receive any of the messages that included this
|
|
|
|
# attachment. But they might still have access to it, if it was
|
|
|
|
# sent to a stream they are on where history is public to
|
|
|
|
# subscribers.
|
|
|
|
|
|
|
|
# These are subscriptions to a stream one of the messages was sent to
|
|
|
|
relevant_stream_ids = Subscription.objects.filter(
|
|
|
|
user_profile=user_profile,
|
|
|
|
active=True,
|
|
|
|
recipient__type=Recipient.STREAM,
|
2021-02-12 08:19:30 +01:00
|
|
|
recipient__in=[m.recipient_id for m in messages],
|
|
|
|
).values_list("recipient__type_id", flat=True)
|
2018-06-05 21:12:28 +02:00
|
|
|
if len(relevant_stream_ids) == 0:
|
|
|
|
return False
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
return Stream.objects.filter(
|
|
|
|
id__in=relevant_stream_ids, history_public_to_subscribers=True
|
|
|
|
).exists()
|
|
|
|
|
2018-06-05 21:02:02 +02:00
|
|
|
|
2022-05-18 22:07:15 +02:00
|
|
|
def get_old_unclaimed_attachments(
|
|
|
|
weeks_ago: int,
|
|
|
|
) -> Tuple[QuerySet[Attachment], QuerySet[ArchivedAttachment]]:
|
2022-05-18 22:29:01 +02:00
|
|
|
"""
|
|
|
|
The logic in this function is fairly tricky. The essence is that
|
|
|
|
a file should be cleaned up if and only if it not referenced by any
|
2023-05-07 20:04:37 +02:00
|
|
|
Message, ScheduledMessage or ArchivedMessage. The way to find that out is through the
|
2022-05-18 22:29:01 +02:00
|
|
|
Attachment and ArchivedAttachment tables.
|
|
|
|
The queries are complicated by the fact that an uploaded file
|
|
|
|
may have either only an Attachment row, only an ArchivedAttachment row,
|
|
|
|
or both - depending on whether some, all or none of the messages
|
|
|
|
linking to it have been archived.
|
|
|
|
"""
|
2023-11-19 19:45:19 +01:00
|
|
|
delta_weeks_ago = timezone_now() - timedelta(weeks=weeks_ago)
|
2023-05-07 20:04:37 +02:00
|
|
|
|
|
|
|
# The Attachment vs ArchivedAttachment queries are asymmetric because only
|
|
|
|
# Attachment has the scheduled_messages relation.
|
2022-05-18 22:07:15 +02:00
|
|
|
old_attachments = Attachment.objects.annotate(
|
|
|
|
has_other_messages=Exists(
|
|
|
|
ArchivedAttachment.objects.filter(id=OuterRef("id")).exclude(messages=None)
|
|
|
|
)
|
2023-05-07 20:04:37 +02:00
|
|
|
).filter(
|
|
|
|
messages=None,
|
|
|
|
scheduled_messages=None,
|
|
|
|
create_time__lt=delta_weeks_ago,
|
|
|
|
has_other_messages=False,
|
|
|
|
)
|
2022-05-18 22:07:15 +02:00
|
|
|
old_archived_attachments = ArchivedAttachment.objects.annotate(
|
|
|
|
has_other_messages=Exists(
|
retention: Prevent deletion of partially-archived messages.
Previously, this code:
```python3
old_archived_attachments = ArchivedAttachment.objects.annotate(
has_other_messages=Exists(
Attachment.objects.filter(id=OuterRef("id"))
.exclude(messages=None)
.exclude(scheduled_messages=None)
)
).filter(messages=None, create_time__lt=delta_weeks_ago, has_other_messages=False)
```
...protected from removal any ArchivedAttachment objects where there
was an Attachment which had _both_ a message _and_ a scheduled
message, instead of _either_ a message _or_ a scheduled message.
Since files are removed from disk when the ArchivedAttachment rows are
deleted, this meant that if an upload was referenced in two messages,
and one was deleted, the file was permanently deleted when the
ArchivedMessage and ArchivedAttachment were cleaned up, despite being
still referenced in live Messages and Attachments.
Switch from `.exclude(messages=None).exclude(scheduled_messages=None)`
to `.exclude(messages=None, scheduled_messages=None)` which "OR"s
those conditions appropriately.
Pull the relevant test into its own file, and expand it significantly
to cover this, and other, corner cases.
2023-07-28 20:53:07 +02:00
|
|
|
Attachment.objects.filter(id=OuterRef("id")).exclude(
|
|
|
|
messages=None, scheduled_messages=None
|
|
|
|
)
|
2022-05-18 22:07:15 +02:00
|
|
|
)
|
|
|
|
).filter(messages=None, create_time__lt=delta_weeks_ago, has_other_messages=False)
|
|
|
|
|
|
|
|
return old_attachments, old_archived_attachments
|
2016-03-24 20:24:01 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-10-27 08:42:27 +02:00
|
|
|
class Subscription(models.Model):
|
2022-09-13 20:36:47 +02:00
|
|
|
"""Keeps track of which users are part of the
|
|
|
|
audience for a given Recipient object.
|
|
|
|
|
2023-06-19 16:42:11 +02:00
|
|
|
For 1:1 and group direct message Recipient objects, only the
|
2022-09-13 20:36:47 +02:00
|
|
|
user_profile and recipient fields have any meaning, defining the
|
|
|
|
immutable set of users who are in the audience for that Recipient.
|
|
|
|
|
|
|
|
For Recipient objects associated with a Stream, the remaining
|
|
|
|
fields in this model describe the user's subscription to that stream.
|
|
|
|
"""
|
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
|
|
|
recipient = models.ForeignKey(Recipient, on_delete=CASCADE)
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2018-07-25 01:03:19 +02:00
|
|
|
# Whether the user has since unsubscribed. We mark Subscription
|
|
|
|
# objects as inactive, rather than deleting them, when a user
|
2020-03-28 01:25:56 +01:00
|
|
|
# unsubscribes, so we can preserve user customizations like
|
2018-07-25 01:03:19 +02:00
|
|
|
# notification settings, stream color, etc., if the user later
|
|
|
|
# resubscribes.
|
2022-08-15 19:10:58 +02:00
|
|
|
active = models.BooleanField(default=True)
|
2021-02-14 00:03:40 +01:00
|
|
|
# This is a denormalization designed to improve the performance of
|
|
|
|
# bulk queries of Subscription objects, Whether the subscribed user
|
|
|
|
# is active tends to be a key condition in those queries.
|
|
|
|
# We intentionally don't specify a default value to promote thinking
|
|
|
|
# about this explicitly, as in some special cases, such as data import,
|
|
|
|
# we may be creating Subscription objects for a user that's deactivated.
|
2022-08-15 19:10:58 +02:00
|
|
|
is_user_active = models.BooleanField()
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2018-08-02 23:46:05 +02:00
|
|
|
# Whether this user had muted this stream.
|
2022-08-15 19:10:58 +02:00
|
|
|
is_muted = models.BooleanField(default=False)
|
2012-08-29 17:50:36 +02:00
|
|
|
|
2020-04-09 21:51:58 +02:00
|
|
|
DEFAULT_STREAM_COLOR = "#c2c2c2"
|
2022-08-15 19:10:58 +02:00
|
|
|
color = models.CharField(max_length=10, default=DEFAULT_STREAM_COLOR)
|
|
|
|
pin_to_top = models.BooleanField(default=False)
|
2014-02-05 23:00:46 +01:00
|
|
|
|
2019-02-13 10:22:16 +01:00
|
|
|
# These fields are stream-level overrides for the user's default
|
|
|
|
# configuration for notification, configured in UserProfile. The
|
|
|
|
# default, None, means we just inherit the user-level default.
|
2022-08-15 19:10:58 +02:00
|
|
|
desktop_notifications = models.BooleanField(null=True, default=None)
|
|
|
|
audible_notifications = models.BooleanField(null=True, default=None)
|
|
|
|
push_notifications = models.BooleanField(null=True, default=None)
|
|
|
|
email_notifications = models.BooleanField(null=True, default=None)
|
|
|
|
wildcard_mentions_notify = models.BooleanField(null=True, default=None)
|
2014-02-05 23:00:46 +01:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2012-11-07 22:33:38 +01:00
|
|
|
unique_together = ("user_profile", "recipient")
|
2021-02-14 00:10:37 +01:00
|
|
|
indexes = [
|
|
|
|
models.Index(
|
|
|
|
fields=("recipient", "user_profile"),
|
|
|
|
name="zerver_subscription_recipient_id_user_profile_id_idx",
|
|
|
|
condition=Q(active=True, is_user_active=True),
|
|
|
|
),
|
|
|
|
]
|
2012-11-07 22:33:38 +01:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2018-05-11 02:24:34 +02:00
|
|
|
def __str__(self) -> str:
|
2023-03-08 22:18:59 +01:00
|
|
|
return f"{self.user_profile!r} -> {self.recipient!r}"
|
2012-08-28 22:56:21 +02:00
|
|
|
|
2020-02-03 03:21:07 +01:00
|
|
|
# Subscription fields included whenever a Subscription object is provided to
|
|
|
|
# Zulip clients via the API. A few details worth noting:
|
|
|
|
# * These fields will generally be merged with Stream.API_FIELDS
|
|
|
|
# data about the stream.
|
|
|
|
# * "user_profile" is usually implied as full API access to Subscription
|
|
|
|
# is primarily done for the current user; API access to other users'
|
|
|
|
# subscriptions is generally limited to boolean yes/no.
|
|
|
|
# * "id" and "recipient_id" are not included as they are not used
|
|
|
|
# in the Zulip API; it's an internal implementation detail.
|
|
|
|
# Subscription objects are always looked up in the API via
|
|
|
|
# (user_profile, stream) pairs.
|
|
|
|
# * "active" is often excluded in API use cases where it is implied.
|
|
|
|
# * "is_muted" often needs to be copied to not "in_home_view" for
|
|
|
|
# backwards-compatibility.
|
|
|
|
API_FIELDS = [
|
|
|
|
"audible_notifications",
|
2022-03-12 11:44:34 +01:00
|
|
|
"color",
|
2020-02-03 03:21:07 +01:00
|
|
|
"desktop_notifications",
|
|
|
|
"email_notifications",
|
2022-03-12 11:44:34 +01:00
|
|
|
"is_muted",
|
|
|
|
"pin_to_top",
|
2020-02-03 03:21:07 +01:00
|
|
|
"push_notifications",
|
2022-03-12 11:44:34 +01:00
|
|
|
"wildcard_mentions_notify",
|
2020-02-03 03:21:07 +01:00
|
|
|
]
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2012-09-04 23:20:21 +02:00
|
|
|
class Huddle(models.Model):
|
2022-09-13 20:36:47 +02:00
|
|
|
"""
|
|
|
|
Represents a group of individuals who may have a
|
2023-06-19 16:42:11 +02:00
|
|
|
group direct message conversation together.
|
2022-09-13 20:36:47 +02:00
|
|
|
|
|
|
|
The membership of the Huddle is stored in the Subscription table just like with
|
|
|
|
Streams - for each user in the Huddle, there is a Subscription object
|
|
|
|
tied to the UserProfile and the Huddle's recipient object.
|
|
|
|
|
|
|
|
A hash of the list of user IDs is stored in the huddle_hash field
|
|
|
|
below, to support efficiently mapping from a set of users to the
|
|
|
|
corresponding Huddle object.
|
|
|
|
"""
|
|
|
|
|
2012-09-07 20:14:13 +02:00
|
|
|
# TODO: We should consider whether using
|
|
|
|
# CommaSeparatedIntegerField would be better.
|
2022-08-15 19:10:58 +02:00
|
|
|
huddle_hash = models.CharField(max_length=40, db_index=True, unique=True)
|
2020-03-15 19:05:27 +01:00
|
|
|
# Foreign key to the Recipient object for this Huddle.
|
|
|
|
recipient = models.ForeignKey(Recipient, null=True, on_delete=models.SET_NULL)
|
2012-09-04 23:20:21 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def get_huddle_hash(id_list: List[int]) -> str:
|
2012-09-05 17:38:09 +02:00
|
|
|
id_list = sorted(set(id_list))
|
2012-09-05 17:41:53 +02:00
|
|
|
hash_key = ",".join(str(x) for x in id_list)
|
2023-07-19 00:44:51 +02:00
|
|
|
return hashlib.sha1(hash_key.encode()).hexdigest()
|
2012-10-20 18:02:58 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-09-27 18:43:07 +02:00
|
|
|
def get_or_create_huddle(id_list: List[int]) -> Huddle:
|
2022-09-13 20:36:47 +02:00
|
|
|
"""
|
|
|
|
Takes a list of user IDs and returns the Huddle object for the
|
|
|
|
group consisting of these users. If the Huddle object does not
|
|
|
|
yet exist, it will be transparently created.
|
|
|
|
"""
|
2012-10-20 18:02:58 +02:00
|
|
|
huddle_hash = get_huddle_hash(id_list)
|
2017-01-06 17:29:41 +01:00
|
|
|
with transaction.atomic():
|
2023-08-09 16:22:16 +02:00
|
|
|
(huddle, created) = Huddle.objects.get_or_create(huddle_hash=huddle_hash)
|
2017-01-06 17:29:41 +01:00
|
|
|
if created:
|
2021-02-12 08:19:30 +01:00
|
|
|
recipient = Recipient.objects.create(type_id=huddle.id, type=Recipient.HUDDLE)
|
2020-03-15 19:05:27 +01:00
|
|
|
huddle.recipient = recipient
|
|
|
|
huddle.save(update_fields=["recipient"])
|
2021-02-12 08:19:30 +01:00
|
|
|
subs_to_create = [
|
2021-02-14 00:03:40 +01:00
|
|
|
Subscription(
|
|
|
|
recipient=recipient,
|
|
|
|
user_profile_id=user_profile_id,
|
|
|
|
is_user_active=is_active,
|
|
|
|
)
|
|
|
|
for user_profile_id, is_active in UserProfile.objects.filter(id__in=id_list)
|
|
|
|
.distinct("id")
|
|
|
|
.values_list("id", "is_active")
|
2021-02-12 08:19:30 +01:00
|
|
|
]
|
2013-03-26 18:51:55 +01:00
|
|
|
Subscription.objects.bulk_create(subs_to_create)
|
2017-01-06 17:29:41 +01:00
|
|
|
return huddle
|
2012-09-04 23:20:21 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2012-11-08 23:02:16 +01:00
|
|
|
class UserActivity(models.Model):
|
2021-02-27 14:21:59 +01:00
|
|
|
"""Data table recording the last time each user hit Zulip endpoints
|
|
|
|
via which Clients; unlike UserPresence, these data are not exposed
|
|
|
|
to users via the Zulip API.
|
|
|
|
|
|
|
|
Useful for debugging as well as to answer analytics questions like
|
|
|
|
"How many users have accessed the Zulip mobile app in the last
|
|
|
|
month?" or "Which users/organizations have recently used API
|
|
|
|
endpoint X that is about to be desupported" for communications
|
|
|
|
and database migration purposes.
|
|
|
|
"""
|
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
|
|
|
client = models.ForeignKey(Client, on_delete=CASCADE)
|
|
|
|
query = models.CharField(max_length=50, db_index=True)
|
2012-11-08 23:02:16 +01:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
count = models.IntegerField()
|
|
|
|
last_visit = models.DateTimeField("last visit")
|
2012-11-08 23:02:16 +01:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2012-11-08 23:02:16 +01:00
|
|
|
unique_together = ("user_profile", "client", "query")
|
2012-11-27 18:26:51 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2013-09-06 21:52:12 +02:00
|
|
|
class UserActivityInterval(models.Model):
|
2023-11-19 19:45:19 +01:00
|
|
|
MIN_INTERVAL_LENGTH = timedelta(minutes=15)
|
2017-04-15 07:20:16 +02:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
|
|
|
start = models.DateTimeField("start time", db_index=True)
|
|
|
|
end = models.DateTimeField("end time", db_index=True)
|
2013-09-06 21:52:12 +02:00
|
|
|
|
migrations: Add a (profile,end) index on useractivityinterval.
The `user_activity_interval` worker calls:
```python3
last = UserActivityInterval.objects.filter(user_profile=user_profile).order_by("-end")[0]
`````
Which results in a query like:
```sql
SELECT "zerver_useractivityinterval"."id", "zerver_useractivityinterval"."user_profile_id", "zerver_useractivityinterval"."start", "zerver_useractivityinterval"."end" FROM "zerver_useractivityinterval" WHERE "zerver_useractivityinterval"."user_profile_id" = 12345 ORDER BY "zerver_useractivityinterval"."end" DESC LIMIT 1
```
For users which have at least one matching row, this results in a
query plan like:
```
Limit (cost=0.56..711.38 rows=1 width=24) (actual time=0.078..0.078 rows=1 loops=1)
-> Index Scan Backward using zerver_useractivityinterval_7f021a14 on zerver_useractivityinterval (cost=0.56..1031399.46 rows=1451 width=24) (actual time=0.077..0.078 rows=1 loops=1)
Filter: (user_profile_id = 12345)
Rows Removed by Filter: 98
Planning Time: 0.059 ms
Execution Time: 0.088 ms
```
But for users that have just been created, with no matching rows, this
is considerably more expensive:
```
Limit (cost=0.56..711.38 rows=1 width=24) (actual time=10798.146..10798.146 rows=0 loops=1)
-> Index Scan Backward using zerver_useractivityinterval_7f021a14 on zerver_useractivityinterval (cost=0.56..1031399.46 rows=1451 width=24) (actual time=10798.145..10798.145 rows=0 loops=1)
Filter: (user_profile_id = 12345)
Rows Removed by Filter: (count of every single row in the table, redacted)
Planning Time: 0.053 ms
Execution Time: 10798.158 ms
```
Regular vacuuming can force the use of the index on `user_profile_id`
as long as there are few enough users, which is fast -- however, at
some point, the query planner decides that is insufficiently specific,
always chooses the effective-whole-table-scan.
Add an index on `(user_profile_id, end)`, which is expected to be
sufficiently specific that it is used even with large numbers of user
profiles.
Ref #19250.
2021-08-31 20:27:52 +02:00
|
|
|
class Meta:
|
2023-06-11 21:22:36 +02:00
|
|
|
indexes = [
|
|
|
|
models.Index(
|
|
|
|
fields=["user_profile", "end"],
|
|
|
|
name="zerver_useractivityinterval_user_profile_id_end_bb3bfc37_idx",
|
|
|
|
),
|
migrations: Add a (profile,end) index on useractivityinterval.
The `user_activity_interval` worker calls:
```python3
last = UserActivityInterval.objects.filter(user_profile=user_profile).order_by("-end")[0]
`````
Which results in a query like:
```sql
SELECT "zerver_useractivityinterval"."id", "zerver_useractivityinterval"."user_profile_id", "zerver_useractivityinterval"."start", "zerver_useractivityinterval"."end" FROM "zerver_useractivityinterval" WHERE "zerver_useractivityinterval"."user_profile_id" = 12345 ORDER BY "zerver_useractivityinterval"."end" DESC LIMIT 1
```
For users which have at least one matching row, this results in a
query plan like:
```
Limit (cost=0.56..711.38 rows=1 width=24) (actual time=0.078..0.078 rows=1 loops=1)
-> Index Scan Backward using zerver_useractivityinterval_7f021a14 on zerver_useractivityinterval (cost=0.56..1031399.46 rows=1451 width=24) (actual time=0.077..0.078 rows=1 loops=1)
Filter: (user_profile_id = 12345)
Rows Removed by Filter: 98
Planning Time: 0.059 ms
Execution Time: 0.088 ms
```
But for users that have just been created, with no matching rows, this
is considerably more expensive:
```
Limit (cost=0.56..711.38 rows=1 width=24) (actual time=10798.146..10798.146 rows=0 loops=1)
-> Index Scan Backward using zerver_useractivityinterval_7f021a14 on zerver_useractivityinterval (cost=0.56..1031399.46 rows=1451 width=24) (actual time=10798.145..10798.145 rows=0 loops=1)
Filter: (user_profile_id = 12345)
Rows Removed by Filter: (count of every single row in the table, redacted)
Planning Time: 0.053 ms
Execution Time: 10798.158 ms
```
Regular vacuuming can force the use of the index on `user_profile_id`
as long as there are few enough users, which is fast -- however, at
some point, the query planner decides that is insufficiently specific,
always chooses the effective-whole-table-scan.
Add an index on `(user_profile_id, end)`, which is expected to be
sufficiently specific that it is used even with large numbers of user
profiles.
Ref #19250.
2021-08-31 20:27:52 +02:00
|
|
|
]
|
|
|
|
|
2017-04-15 07:20:16 +02:00
|
|
|
|
2013-02-08 23:44:15 +01:00
|
|
|
class UserPresence(models.Model):
|
2018-07-25 05:57:10 +02:00
|
|
|
"""A record from the last time we heard from a given user on a given client.
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2021-02-27 14:21:59 +01:00
|
|
|
NOTE: Users can disable updates to this table (see UserProfile.presence_enabled),
|
|
|
|
so this cannot be used to determine if a user was recently active on Zulip.
|
|
|
|
The UserActivity table is recommended for that purpose.
|
|
|
|
|
2018-07-25 05:57:10 +02:00
|
|
|
This is a tricky subsystem, because it is highly optimized. See the docs:
|
2018-07-25 00:29:05 +02:00
|
|
|
https://zulip.readthedocs.io/en/latest/subsystems/presence.html
|
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-11 16:03:47 +02:00
|
|
|
user_profile = models.OneToOneField(UserProfile, on_delete=CASCADE, unique=True)
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2020-06-11 16:03:47 +02:00
|
|
|
# Realm is just here as denormalization to optimize database
|
|
|
|
# queries to fetch all presence data for a given realm.
|
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE)
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2020-06-11 16:03:47 +02:00
|
|
|
# The last time the user had a client connected to Zulip,
|
|
|
|
# including idle clients where the user hasn't interacted with the
|
|
|
|
# system recently (and thus might be AFK).
|
2023-04-08 15:52:48 +02:00
|
|
|
last_connected_time = models.DateTimeField(default=timezone_now, db_index=True, null=True)
|
2020-06-11 16:03:47 +02:00
|
|
|
# The last time a client connected to Zulip reported that the user
|
|
|
|
# was actually present (E.g. via focusing a browser window or
|
|
|
|
# interacting with a computer running the desktop app)
|
2023-04-08 15:52:48 +02:00
|
|
|
last_active_time = models.DateTimeField(default=timezone_now, db_index=True, null=True)
|
2020-06-11 16:03:47 +02:00
|
|
|
|
|
|
|
# The following constants are used in the presence API for
|
|
|
|
# communicating whether a user is active (last_active_time recent)
|
|
|
|
# or idle (last_connected_time recent) or offline (neither
|
|
|
|
# recent). They're no longer part of the data model.
|
|
|
|
LEGACY_STATUS_ACTIVE = "active"
|
|
|
|
LEGACY_STATUS_IDLE = "idle"
|
|
|
|
LEGACY_STATUS_ACTIVE_INT = 1
|
|
|
|
LEGACY_STATUS_IDLE_INT = 2
|
2013-02-08 23:44:15 +01:00
|
|
|
|
2023-04-12 22:40:35 +02:00
|
|
|
class Meta:
|
2023-06-11 21:22:36 +02:00
|
|
|
indexes = [
|
|
|
|
models.Index(
|
|
|
|
fields=["realm", "last_active_time"],
|
|
|
|
name="zerver_userpresence_realm_id_last_active_time_1c5aa9a2_idx",
|
|
|
|
),
|
|
|
|
models.Index(
|
|
|
|
fields=["realm", "last_connected_time"],
|
|
|
|
name="zerver_userpresence_realm_id_last_connected_time_98d2fc9f_idx",
|
|
|
|
),
|
2023-04-12 22:40:35 +02:00
|
|
|
]
|
|
|
|
|
2013-04-03 22:00:02 +02:00
|
|
|
@staticmethod
|
2018-11-27 20:21:55 +01:00
|
|
|
def status_from_string(status: str) -> Optional[int]:
|
2021-02-12 08:20:45 +01:00
|
|
|
if status == "active":
|
2020-06-11 16:03:47 +02:00
|
|
|
return UserPresence.LEGACY_STATUS_ACTIVE_INT
|
2021-02-12 08:20:45 +01:00
|
|
|
elif status == "idle":
|
2020-06-11 16:03:47 +02:00
|
|
|
return UserPresence.LEGACY_STATUS_IDLE_INT
|
2013-04-03 22:00:02 +02:00
|
|
|
|
2020-06-11 16:03:47 +02:00
|
|
|
return None
|
2013-04-03 22:00:02 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-07-23 22:11:21 +02:00
|
|
|
class UserStatus(AbstractEmoji):
|
2022-08-15 19:10:58 +02:00
|
|
|
user_profile = models.OneToOneField(UserProfile, on_delete=CASCADE)
|
2018-12-17 16:19:18 +01:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
timestamp = models.DateTimeField()
|
|
|
|
client = models.ForeignKey(Client, on_delete=CASCADE)
|
2018-12-17 16:19:18 +01:00
|
|
|
|
2021-07-23 22:11:21 +02:00
|
|
|
# Override emoji_name and emoji_code field of (AbstractReaction model) to accept
|
|
|
|
# default value.
|
2022-08-15 19:10:58 +02:00
|
|
|
emoji_name = models.TextField(default="")
|
|
|
|
emoji_code = models.TextField(default="")
|
2018-12-17 16:19:18 +01:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
status_text = models.CharField(max_length=255, default="")
|
2018-12-17 16:19:18 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2012-11-27 18:26:51 +01:00
|
|
|
class DefaultStream(models.Model):
|
2022-08-15 19:10:58 +02:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE)
|
|
|
|
stream = models.ForeignKey(Stream, on_delete=CASCADE)
|
2012-11-27 18:26:51 +01:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2012-11-27 18:26:51 +01:00
|
|
|
unique_together = ("realm", "stream")
|
2012-12-01 04:35:59 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-10-12 19:35:14 +02:00
|
|
|
class DefaultStreamGroup(models.Model):
|
|
|
|
MAX_NAME_LENGTH = 60
|
2020-07-02 03:13:26 +02:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
name = models.CharField(max_length=MAX_NAME_LENGTH, db_index=True)
|
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE)
|
2023-11-14 00:38:06 +01:00
|
|
|
streams = models.ManyToManyField("zerver.Stream")
|
2022-08-15 19:10:58 +02:00
|
|
|
description = models.CharField(max_length=1024, default="")
|
2017-10-12 19:35:14 +02:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2017-10-12 19:35:14 +02:00
|
|
|
unique_together = ("realm", "name")
|
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def to_dict(self) -> Dict[str, Any]:
|
2021-02-12 08:19:30 +01:00
|
|
|
return dict(
|
|
|
|
name=self.name,
|
|
|
|
id=self.id,
|
|
|
|
description=self.description,
|
2021-03-25 09:37:47 +01:00
|
|
|
streams=[stream.to_dict() for stream in self.streams.all().order_by("name")],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
|
2017-10-12 19:35:14 +02:00
|
|
|
|
2022-06-23 22:47:50 +02:00
|
|
|
def get_default_stream_groups(realm: Realm) -> QuerySet[DefaultStreamGroup]:
|
2017-10-12 19:35:14 +02:00
|
|
|
return DefaultStreamGroup.objects.filter(realm=realm)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-07-02 21:10:41 +02:00
|
|
|
class AbstractScheduledJob(models.Model):
|
2022-08-15 19:10:58 +02:00
|
|
|
scheduled_timestamp = models.DateTimeField(db_index=True)
|
2017-07-02 21:10:41 +02:00
|
|
|
# JSON representation of arguments to consumer
|
2022-08-15 19:10:58 +02:00
|
|
|
data = models.TextField()
|
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE)
|
2017-07-02 21:10:41 +02:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2017-07-02 21:10:41 +02:00
|
|
|
abstract = True
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-07-02 21:10:41 +02:00
|
|
|
class ScheduledEmail(AbstractScheduledJob):
|
2019-01-04 01:50:21 +01:00
|
|
|
# Exactly one of users or address should be set. These are
|
|
|
|
# duplicate values, used to efficiently filter the set of
|
|
|
|
# ScheduledEmails for use in clear_scheduled_emails; the
|
|
|
|
# recipients used for actually sending messages are stored in the
|
|
|
|
# data field of AbstractScheduledJob.
|
2022-08-15 19:10:58 +02:00
|
|
|
users = models.ManyToManyField(UserProfile)
|
2017-07-02 21:10:41 +02:00
|
|
|
# Just the address part of a full "name <address>" email address
|
2022-08-15 19:10:58 +02:00
|
|
|
address = models.EmailField(null=True, db_index=True)
|
2017-07-02 21:10:41 +02:00
|
|
|
|
|
|
|
# Valid types are below
|
|
|
|
WELCOME = 1
|
|
|
|
DIGEST = 2
|
|
|
|
INVITATION_REMINDER = 3
|
2022-08-15 19:10:58 +02:00
|
|
|
type = models.PositiveSmallIntegerField()
|
2013-11-06 00:47:59 +01:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2018-05-11 02:24:34 +02:00
|
|
|
def __str__(self) -> str:
|
2023-03-08 22:18:59 +01:00
|
|
|
return f"{self.type} {self.address or list(self.users.all())} {self.scheduled_timestamp}"
|
2017-09-21 14:58:49 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-12-25 22:28:05 +01:00
|
|
|
class MissedMessageEmailAddress(models.Model):
|
2022-08-15 19:10:58 +02:00
|
|
|
message = models.ForeignKey(Message, on_delete=CASCADE)
|
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
|
|
|
email_token = models.CharField(max_length=34, unique=True, db_index=True)
|
2019-12-25 22:28:05 +01:00
|
|
|
|
|
|
|
# Timestamp of when the missed message address generated.
|
2022-08-15 19:10:58 +02:00
|
|
|
timestamp = models.DateTimeField(db_index=True, default=timezone_now)
|
2022-02-23 05:32:17 +01:00
|
|
|
# Number of times the missed message address has been used.
|
2022-08-15 19:10:58 +02:00
|
|
|
times_used = models.PositiveIntegerField(default=0, db_index=True)
|
2019-12-25 22:28:05 +01:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2019-12-25 22:28:05 +01:00
|
|
|
def __str__(self) -> str:
|
|
|
|
return settings.EMAIL_GATEWAY_PATTERN % (self.email_token,)
|
|
|
|
|
2019-12-26 13:46:55 +01:00
|
|
|
def increment_times_used(self) -> None:
|
|
|
|
self.times_used += 1
|
|
|
|
self.save(update_fields=["times_used"])
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-07-09 13:38:12 +02:00
|
|
|
class NotificationTriggers:
|
2023-08-04 19:54:41 +02:00
|
|
|
# "direct_message" is for 1:1 direct messages as well as huddles
|
|
|
|
DIRECT_MESSAGE = "direct_message"
|
2021-07-09 13:38:12 +02:00
|
|
|
MENTION = "mentioned"
|
2023-06-07 19:19:33 +02:00
|
|
|
TOPIC_WILDCARD_MENTION = "topic_wildcard_mentioned"
|
2023-06-03 16:51:38 +02:00
|
|
|
STREAM_WILDCARD_MENTION = "stream_wildcard_mentioned"
|
2021-07-09 13:38:12 +02:00
|
|
|
STREAM_PUSH = "stream_push_notify"
|
|
|
|
STREAM_EMAIL = "stream_email_notify"
|
2023-05-28 17:03:04 +02:00
|
|
|
FOLLOWED_TOPIC_PUSH = "followed_topic_push_notify"
|
2023-05-17 16:01:16 +02:00
|
|
|
FOLLOWED_TOPIC_EMAIL = "followed_topic_email_notify"
|
2023-06-07 19:19:33 +02:00
|
|
|
TOPIC_WILDCARD_MENTION_IN_FOLLOWED_TOPIC = "topic_wildcard_mentioned_in_followed_topic"
|
2023-06-03 16:51:38 +02:00
|
|
|
STREAM_WILDCARD_MENTION_IN_FOLLOWED_TOPIC = "stream_wildcard_mentioned_in_followed_topic"
|
2021-07-09 13:38:12 +02:00
|
|
|
|
|
|
|
|
2021-07-07 16:55:25 +02:00
|
|
|
class ScheduledMessageNotificationEmail(models.Model):
|
|
|
|
"""Stores planned outgoing message notification emails. They may be
|
|
|
|
processed earlier should Zulip choose to batch multiple messages
|
|
|
|
in a single email, but typically will be processed just after
|
|
|
|
scheduled_timestamp.
|
|
|
|
"""
|
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
|
|
|
message = models.ForeignKey(Message, on_delete=CASCADE)
|
2021-07-07 16:55:25 +02:00
|
|
|
|
|
|
|
EMAIL_NOTIFICATION_TRIGGER_CHOICES = [
|
2023-08-04 19:54:41 +02:00
|
|
|
(NotificationTriggers.DIRECT_MESSAGE, "Direct message"),
|
2021-07-07 16:55:25 +02:00
|
|
|
(NotificationTriggers.MENTION, "Mention"),
|
2023-06-07 19:19:33 +02:00
|
|
|
(NotificationTriggers.TOPIC_WILDCARD_MENTION, "Topic wildcard mention"),
|
2023-06-03 16:51:38 +02:00
|
|
|
(NotificationTriggers.STREAM_WILDCARD_MENTION, "Stream wildcard mention"),
|
2021-07-07 16:55:25 +02:00
|
|
|
(NotificationTriggers.STREAM_EMAIL, "Stream notifications enabled"),
|
2023-05-17 16:01:16 +02:00
|
|
|
(NotificationTriggers.FOLLOWED_TOPIC_EMAIL, "Followed topic notifications enabled"),
|
2023-06-07 19:19:33 +02:00
|
|
|
(
|
|
|
|
NotificationTriggers.TOPIC_WILDCARD_MENTION_IN_FOLLOWED_TOPIC,
|
|
|
|
"Topic wildcard mention in followed topic",
|
|
|
|
),
|
2023-06-03 16:51:38 +02:00
|
|
|
(
|
|
|
|
NotificationTriggers.STREAM_WILDCARD_MENTION_IN_FOLLOWED_TOPIC,
|
|
|
|
"Stream wildcard mention in followed topic",
|
|
|
|
),
|
2021-07-07 16:55:25 +02:00
|
|
|
]
|
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
trigger = models.TextField(choices=EMAIL_NOTIFICATION_TRIGGER_CHOICES)
|
|
|
|
mentioned_user_group = models.ForeignKey(UserGroup, null=True, on_delete=CASCADE)
|
2021-07-07 16:55:25 +02:00
|
|
|
|
|
|
|
# Timestamp for when the notification should be processed and sent.
|
|
|
|
# Calculated from the time the event was received and the batching period.
|
2022-08-15 19:10:58 +02:00
|
|
|
scheduled_timestamp = models.DateTimeField(db_index=True)
|
2021-07-07 16:55:25 +02:00
|
|
|
|
|
|
|
|
2023-05-09 14:36:52 +02:00
|
|
|
class APIScheduledStreamMessageDict(TypedDict):
|
2023-04-20 04:11:24 +02:00
|
|
|
scheduled_message_id: int
|
2023-04-28 17:42:23 +02:00
|
|
|
to: int
|
2023-04-20 04:11:24 +02:00
|
|
|
type: str
|
|
|
|
content: str
|
|
|
|
rendered_content: str
|
|
|
|
topic: str
|
2023-04-20 04:26:41 +02:00
|
|
|
scheduled_delivery_timestamp: int
|
2023-05-11 19:31:13 +02:00
|
|
|
failed: bool
|
2023-04-20 04:11:24 +02:00
|
|
|
|
|
|
|
|
2023-05-09 14:36:52 +02:00
|
|
|
class APIScheduledDirectMessageDict(TypedDict):
|
2023-04-28 17:42:23 +02:00
|
|
|
scheduled_message_id: int
|
|
|
|
to: List[int]
|
|
|
|
type: str
|
|
|
|
content: str
|
|
|
|
rendered_content: str
|
2023-04-20 04:26:41 +02:00
|
|
|
scheduled_delivery_timestamp: int
|
2023-05-11 19:31:13 +02:00
|
|
|
failed: bool
|
2023-04-28 17:42:23 +02:00
|
|
|
|
|
|
|
|
2018-01-01 20:41:24 +01:00
|
|
|
class ScheduledMessage(models.Model):
|
2022-08-15 19:10:58 +02:00
|
|
|
sender = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
|
|
|
recipient = models.ForeignKey(Recipient, on_delete=CASCADE)
|
|
|
|
subject = models.CharField(max_length=MAX_TOPIC_NAME_LENGTH)
|
|
|
|
content = models.TextField()
|
2023-04-14 21:04:19 +02:00
|
|
|
rendered_content = models.TextField()
|
2022-08-15 19:10:58 +02:00
|
|
|
sending_client = models.ForeignKey(Client, on_delete=CASCADE)
|
|
|
|
stream = models.ForeignKey(Stream, null=True, on_delete=CASCADE)
|
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE)
|
|
|
|
scheduled_timestamp = models.DateTimeField(db_index=True)
|
2023-12-14 07:41:50 +01:00
|
|
|
read_by_sender = models.BooleanField()
|
2022-08-15 19:10:58 +02:00
|
|
|
delivered = models.BooleanField(default=False)
|
2023-05-09 03:47:07 +02:00
|
|
|
delivered_message = models.ForeignKey(Message, null=True, on_delete=CASCADE)
|
2023-05-07 20:04:37 +02:00
|
|
|
has_attachment = models.BooleanField(default=False, db_index=True)
|
2018-01-01 20:41:24 +01:00
|
|
|
|
2023-05-05 01:23:04 +02:00
|
|
|
# Metadata for messages that failed to send when their scheduled
|
|
|
|
# moment arrived.
|
|
|
|
failed = models.BooleanField(default=False)
|
|
|
|
failure_message = models.TextField(null=True)
|
|
|
|
|
2018-01-12 12:02:47 +01:00
|
|
|
SEND_LATER = 1
|
|
|
|
REMIND = 2
|
|
|
|
|
|
|
|
DELIVERY_TYPES = (
|
2021-02-12 08:20:45 +01:00
|
|
|
(SEND_LATER, "send_later"),
|
|
|
|
(REMIND, "remind"),
|
2018-01-12 12:02:47 +01:00
|
|
|
)
|
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
delivery_type = models.PositiveSmallIntegerField(
|
2021-02-12 08:19:30 +01:00
|
|
|
choices=DELIVERY_TYPES,
|
|
|
|
default=SEND_LATER,
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
)
|
2018-01-12 12:02:47 +01:00
|
|
|
|
2023-05-05 02:38:55 +02:00
|
|
|
class Meta:
|
|
|
|
indexes = [
|
|
|
|
# We expect a large number of delivered scheduled messages
|
|
|
|
# to accumulate over time. This first index is for the
|
|
|
|
# deliver_scheduled_messages worker.
|
|
|
|
models.Index(
|
|
|
|
name="zerver_unsent_scheduled_messages_by_time",
|
|
|
|
fields=["scheduled_timestamp"],
|
|
|
|
condition=Q(
|
|
|
|
delivered=False,
|
|
|
|
failed=False,
|
|
|
|
),
|
|
|
|
),
|
|
|
|
# This index is for displaying scheduled messages to the
|
|
|
|
# user themself via the API; we don't filter failed
|
|
|
|
# messages since we will want to display those so that
|
|
|
|
# failures don't just disappear into a black hole.
|
2023-08-30 21:38:59 +02:00
|
|
|
models.Index(
|
|
|
|
name="zerver_realm_unsent_scheduled_messages_by_user",
|
|
|
|
fields=["realm_id", "sender", "delivery_type", "scheduled_timestamp"],
|
|
|
|
condition=Q(
|
|
|
|
delivered=False,
|
|
|
|
),
|
|
|
|
),
|
2023-05-05 02:38:55 +02:00
|
|
|
]
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2023-04-12 22:40:35 +02:00
|
|
|
def __str__(self) -> str:
|
2023-07-16 14:21:27 +02:00
|
|
|
return f"{self.recipient.label()} {self.subject} {self.sender!r} {self.scheduled_timestamp}"
|
2023-04-12 22:40:35 +02:00
|
|
|
|
2018-11-01 15:31:55 +01:00
|
|
|
def topic_name(self) -> str:
|
|
|
|
return self.subject
|
|
|
|
|
|
|
|
def set_topic_name(self, topic_name: str) -> None:
|
|
|
|
self.subject = topic_name
|
|
|
|
|
2023-05-07 20:04:37 +02:00
|
|
|
def is_stream_message(self) -> bool:
|
|
|
|
return self.recipient.type == Recipient.STREAM
|
|
|
|
|
2023-05-09 14:36:52 +02:00
|
|
|
def to_dict(self) -> Union[APIScheduledStreamMessageDict, APIScheduledDirectMessageDict]:
|
2023-04-20 04:11:24 +02:00
|
|
|
recipient, recipient_type_str = get_recipient_ids(self.recipient, self.sender.id)
|
|
|
|
|
2023-04-28 17:42:23 +02:00
|
|
|
if recipient_type_str == "private":
|
|
|
|
# The topic for direct messages should always be an empty string.
|
|
|
|
assert self.topic_name() == ""
|
|
|
|
|
2023-05-09 14:36:52 +02:00
|
|
|
return APIScheduledDirectMessageDict(
|
2023-04-28 17:42:23 +02:00
|
|
|
scheduled_message_id=self.id,
|
|
|
|
to=recipient,
|
|
|
|
type=recipient_type_str,
|
|
|
|
content=self.content,
|
|
|
|
rendered_content=self.rendered_content,
|
2023-04-20 04:26:41 +02:00
|
|
|
scheduled_delivery_timestamp=datetime_to_timestamp(self.scheduled_timestamp),
|
2023-05-11 19:31:13 +02:00
|
|
|
failed=self.failed,
|
2023-04-28 17:42:23 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
# The recipient for stream messages should always just be the unique stream ID.
|
|
|
|
assert len(recipient) == 1
|
|
|
|
|
2023-05-09 14:36:52 +02:00
|
|
|
return APIScheduledStreamMessageDict(
|
2023-04-20 04:11:24 +02:00
|
|
|
scheduled_message_id=self.id,
|
2023-04-28 17:42:23 +02:00
|
|
|
to=recipient[0],
|
2023-04-20 04:11:24 +02:00
|
|
|
type=recipient_type_str,
|
|
|
|
content=self.content,
|
|
|
|
rendered_content=self.rendered_content,
|
|
|
|
topic=self.topic_name(),
|
2023-04-20 04:26:41 +02:00
|
|
|
scheduled_delivery_timestamp=datetime_to_timestamp(self.scheduled_timestamp),
|
2023-05-11 19:31:13 +02:00
|
|
|
failed=self.failed,
|
2023-04-20 04:11:24 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-07-02 21:10:41 +02:00
|
|
|
EMAIL_TYPES = {
|
2023-07-18 11:44:27 +02:00
|
|
|
"account_registered": ScheduledEmail.WELCOME,
|
2023-07-18 11:50:12 +02:00
|
|
|
"onboarding_zulip_topics": ScheduledEmail.WELCOME,
|
2023-03-15 20:18:09 +01:00
|
|
|
"onboarding_zulip_guide": ScheduledEmail.WELCOME,
|
2023-04-24 17:46:49 +02:00
|
|
|
"onboarding_team_to_zulip": ScheduledEmail.WELCOME,
|
2021-02-12 08:20:45 +01:00
|
|
|
"digest": ScheduledEmail.DIGEST,
|
|
|
|
"invitation_reminder": ScheduledEmail.INVITATION_REMINDER,
|
2017-07-02 21:10:41 +02:00
|
|
|
}
|
2017-02-15 04:35:10 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-10-03 02:01:36 +02:00
|
|
|
class AbstractRealmAuditLog(models.Model):
|
|
|
|
"""Defines fields common to RealmAuditLog and RemoteRealmAuditLog."""
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
event_time = models.DateTimeField(db_index=True)
|
2017-03-30 05:20:36 +02:00
|
|
|
# If True, event_time is an overestimate of the true time. Can be used
|
|
|
|
# by migrations when introducing a new event_type.
|
2022-08-15 19:10:58 +02:00
|
|
|
backfilled = models.BooleanField(default=False)
|
2019-10-05 02:36:16 +02:00
|
|
|
|
|
|
|
# Keys within extra_data, when extra_data is a json dict. Keys are strings because
|
|
|
|
# json keys must always be strings.
|
2021-02-12 08:20:45 +01:00
|
|
|
OLD_VALUE = "1"
|
|
|
|
NEW_VALUE = "2"
|
|
|
|
ROLE_COUNT = "10"
|
|
|
|
ROLE_COUNT_HUMANS = "11"
|
|
|
|
ROLE_COUNT_BOTS = "12"
|
2017-01-24 01:48:35 +01:00
|
|
|
|
2023-07-13 19:46:06 +02:00
|
|
|
extra_data = models.JSONField(default=dict, encoder=DjangoJSONEncoder)
|
2019-09-26 03:20:56 +02:00
|
|
|
|
2019-10-08 06:05:22 +02:00
|
|
|
# Event types
|
2019-09-26 03:20:56 +02:00
|
|
|
USER_CREATED = 101
|
|
|
|
USER_ACTIVATED = 102
|
|
|
|
USER_DEACTIVATED = 103
|
|
|
|
USER_REACTIVATED = 104
|
2019-10-05 02:36:16 +02:00
|
|
|
USER_ROLE_CHANGED = 105
|
2021-12-29 19:14:34 +01:00
|
|
|
USER_DELETED = 106
|
2022-04-16 23:58:44 +02:00
|
|
|
USER_DELETED_PRESERVING_MESSAGES = 107
|
2019-09-26 03:20:56 +02:00
|
|
|
|
|
|
|
USER_SOFT_ACTIVATED = 120
|
|
|
|
USER_SOFT_DEACTIVATED = 121
|
|
|
|
USER_PASSWORD_CHANGED = 122
|
|
|
|
USER_AVATAR_SOURCE_CHANGED = 123
|
|
|
|
USER_FULL_NAME_CHANGED = 124
|
|
|
|
USER_EMAIL_CHANGED = 125
|
2021-12-07 02:23:24 +01:00
|
|
|
USER_TERMS_OF_SERVICE_VERSION_CHANGED = 126
|
2019-09-26 03:20:56 +02:00
|
|
|
USER_API_KEY_CHANGED = 127
|
|
|
|
USER_BOT_OWNER_CHANGED = 128
|
2020-07-12 23:45:50 +02:00
|
|
|
USER_DEFAULT_SENDING_STREAM_CHANGED = 129
|
|
|
|
USER_DEFAULT_REGISTER_STREAM_CHANGED = 130
|
|
|
|
USER_DEFAULT_ALL_PUBLIC_STREAMS_CHANGED = 131
|
2021-09-07 19:33:26 +02:00
|
|
|
USER_SETTING_CHANGED = 132
|
2020-11-12 12:11:35 +01:00
|
|
|
USER_DIGEST_EMAIL_CREATED = 133
|
2019-09-26 03:20:56 +02:00
|
|
|
|
|
|
|
REALM_DEACTIVATED = 201
|
|
|
|
REALM_REACTIVATED = 202
|
|
|
|
REALM_SCRUBBED = 203
|
|
|
|
REALM_PLAN_TYPE_CHANGED = 204
|
|
|
|
REALM_LOGO_CHANGED = 205
|
|
|
|
REALM_EXPORTED = 206
|
2020-06-29 15:34:19 +02:00
|
|
|
REALM_PROPERTY_CHANGED = 207
|
2020-07-11 20:59:52 +02:00
|
|
|
REALM_ICON_SOURCE_CHANGED = 208
|
2020-12-04 11:08:10 +01:00
|
|
|
REALM_DISCOUNT_CHANGED = 209
|
2020-12-04 11:16:33 +01:00
|
|
|
REALM_SPONSORSHIP_APPROVED = 210
|
2023-12-01 13:19:04 +01:00
|
|
|
REALM_BILLING_MODALITY_CHANGED = 211
|
2020-12-04 11:46:51 +01:00
|
|
|
REALM_REACTIVATION_EMAIL_SENT = 212
|
2020-12-04 12:14:51 +01:00
|
|
|
REALM_SPONSORSHIP_PENDING_STATUS_CHANGED = 213
|
2020-12-06 20:04:33 +01:00
|
|
|
REALM_SUBDOMAIN_CHANGED = 214
|
2021-04-20 12:29:19 +02:00
|
|
|
REALM_CREATED = 215
|
2021-07-21 13:40:46 +02:00
|
|
|
REALM_DEFAULT_USER_SETTINGS_CHANGED = 216
|
2021-10-07 21:30:54 +02:00
|
|
|
REALM_ORG_TYPE_CHANGED = 217
|
2022-03-07 14:49:16 +01:00
|
|
|
REALM_DOMAIN_ADDED = 218
|
2022-03-07 15:19:13 +01:00
|
|
|
REALM_DOMAIN_CHANGED = 219
|
2022-03-07 15:35:17 +01:00
|
|
|
REALM_DOMAIN_REMOVED = 220
|
2022-03-11 15:16:04 +01:00
|
|
|
REALM_PLAYGROUND_ADDED = 221
|
2022-03-11 15:40:42 +01:00
|
|
|
REALM_PLAYGROUND_REMOVED = 222
|
2022-03-14 11:50:24 +01:00
|
|
|
REALM_LINKIFIER_ADDED = 223
|
2022-03-14 12:10:25 +01:00
|
|
|
REALM_LINKIFIER_CHANGED = 224
|
2022-03-14 14:50:55 +01:00
|
|
|
REALM_LINKIFIER_REMOVED = 225
|
2022-04-06 18:34:07 +02:00
|
|
|
REALM_EMOJI_ADDED = 226
|
2022-04-07 12:24:30 +02:00
|
|
|
REALM_EMOJI_REMOVED = 227
|
2023-08-10 04:09:25 +02:00
|
|
|
REALM_LINKIFIERS_REORDERED = 228
|
2023-12-11 23:26:38 +01:00
|
|
|
REALM_IMPORTED = 229
|
2019-09-26 03:20:56 +02:00
|
|
|
|
|
|
|
SUBSCRIPTION_CREATED = 301
|
|
|
|
SUBSCRIPTION_ACTIVATED = 302
|
|
|
|
SUBSCRIPTION_DEACTIVATED = 303
|
2020-07-12 23:48:08 +02:00
|
|
|
SUBSCRIPTION_PROPERTY_CHANGED = 304
|
2019-09-26 03:20:56 +02:00
|
|
|
|
2021-04-10 19:32:22 +02:00
|
|
|
USER_MUTED = 350
|
|
|
|
USER_UNMUTED = 351
|
|
|
|
|
2019-09-26 03:20:56 +02:00
|
|
|
STRIPE_CUSTOMER_CREATED = 401
|
|
|
|
STRIPE_CARD_CHANGED = 402
|
|
|
|
STRIPE_PLAN_CHANGED = 403
|
|
|
|
STRIPE_PLAN_QUANTITY_RESET = 404
|
|
|
|
|
|
|
|
CUSTOMER_CREATED = 501
|
|
|
|
CUSTOMER_PLAN_CREATED = 502
|
2020-06-15 20:09:24 +02:00
|
|
|
CUSTOMER_SWITCHED_FROM_MONTHLY_TO_ANNUAL_PLAN = 503
|
2023-11-20 13:01:25 +01:00
|
|
|
CUSTOMER_SWITCHED_FROM_ANNUAL_TO_MONTHLY_PLAN = 504
|
2019-09-26 03:20:56 +02:00
|
|
|
|
2020-06-29 23:31:25 +02:00
|
|
|
STREAM_CREATED = 601
|
2020-06-29 15:02:07 +02:00
|
|
|
STREAM_DEACTIVATED = 602
|
2020-07-12 23:39:54 +02:00
|
|
|
STREAM_NAME_CHANGED = 603
|
2021-10-21 12:32:07 +02:00
|
|
|
STREAM_REACTIVATED = 604
|
2021-12-07 20:47:49 +01:00
|
|
|
STREAM_MESSAGE_RETENTION_DAYS_CHANGED = 605
|
2021-12-14 21:00:45 +01:00
|
|
|
STREAM_PROPERTY_CHANGED = 607
|
2023-02-17 12:46:14 +01:00
|
|
|
STREAM_GROUP_BASED_SETTING_CHANGED = 608
|
2020-06-29 23:31:25 +02:00
|
|
|
|
2022-11-21 04:48:09 +01:00
|
|
|
USER_GROUP_CREATED = 701
|
|
|
|
USER_GROUP_DELETED = 702
|
|
|
|
USER_GROUP_DIRECT_USER_MEMBERSHIP_ADDED = 703
|
|
|
|
USER_GROUP_DIRECT_USER_MEMBERSHIP_REMOVED = 704
|
|
|
|
USER_GROUP_DIRECT_SUBGROUP_MEMBERSHIP_ADDED = 705
|
|
|
|
USER_GROUP_DIRECT_SUBGROUP_MEMBERSHIP_REMOVED = 706
|
|
|
|
USER_GROUP_DIRECT_SUPERGROUP_MEMBERSHIP_ADDED = 707
|
|
|
|
USER_GROUP_DIRECT_SUPERGROUP_MEMBERSHIP_REMOVED = 708
|
|
|
|
# 709 to 719 reserved for membership changes
|
|
|
|
USER_GROUP_NAME_CHANGED = 720
|
|
|
|
USER_GROUP_DESCRIPTION_CHANGED = 721
|
2023-06-05 22:34:52 +02:00
|
|
|
USER_GROUP_GROUP_BASED_SETTING_CHANGED = 722
|
2022-11-21 04:48:09 +01:00
|
|
|
|
2021-12-01 17:31:08 +01:00
|
|
|
# The following values are only for RemoteZulipServerAuditLog
|
2021-12-15 18:53:58 +01:00
|
|
|
# Values should be exactly 10000 greater than the corresponding
|
|
|
|
# value used for the same purpose in RealmAuditLog (e.g.
|
|
|
|
# REALM_DEACTIVATED = 201, and REMOTE_SERVER_DEACTIVATED = 10201).
|
|
|
|
REMOTE_SERVER_DEACTIVATED = 10201
|
2023-11-09 20:40:42 +01:00
|
|
|
REMOTE_SERVER_PLAN_TYPE_CHANGED = 10204
|
|
|
|
REMOTE_SERVER_DISCOUNT_CHANGED = 10209
|
|
|
|
REMOTE_SERVER_SPONSORSHIP_APPROVED = 10210
|
2023-12-01 13:19:04 +01:00
|
|
|
REMOTE_SERVER_BILLING_MODALITY_CHANGED = 10211
|
2023-11-09 20:40:42 +01:00
|
|
|
REMOTE_SERVER_SPONSORSHIP_PENDING_STATUS_CHANGED = 10213
|
|
|
|
REMOTE_SERVER_CREATED = 10215
|
2021-12-01 17:31:08 +01:00
|
|
|
|
2023-11-08 20:02:10 +01:00
|
|
|
# This value is for RemoteRealmAuditLog entries tracking changes to the
|
|
|
|
# RemoteRealm model resulting from modified realm information sent to us
|
2023-12-11 14:24:13 +01:00
|
|
|
# via send_server_data_to_push_bouncer.
|
2023-11-08 20:02:10 +01:00
|
|
|
REMOTE_REALM_VALUE_UPDATED = 20001
|
2023-12-11 18:00:42 +01:00
|
|
|
REMOTE_PLAN_TRANSFERRED_SERVER_TO_REALM = 20002
|
2023-12-15 16:01:04 +01:00
|
|
|
REMOTE_REALM_LOCALLY_DELETED = 20003
|
2023-11-08 20:02:10 +01:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
event_type = models.PositiveSmallIntegerField()
|
2018-06-28 00:48:51 +02:00
|
|
|
|
2020-06-09 00:58:42 +02:00
|
|
|
# event_types synced from on-prem installations to Zulip Cloud when
|
2019-10-08 06:05:22 +02:00
|
|
|
# billing for mobile push notifications is enabled. Every billing
|
|
|
|
# event_type should have ROLE_COUNT populated in extra_data.
|
|
|
|
SYNCED_BILLING_EVENTS = [
|
2021-02-12 08:19:30 +01:00
|
|
|
USER_CREATED,
|
|
|
|
USER_ACTIVATED,
|
|
|
|
USER_DEACTIVATED,
|
|
|
|
USER_REACTIVATED,
|
|
|
|
USER_ROLE_CHANGED,
|
|
|
|
REALM_DEACTIVATED,
|
|
|
|
REALM_REACTIVATED,
|
2023-12-11 23:26:38 +01:00
|
|
|
REALM_IMPORTED,
|
2021-02-12 08:19:30 +01:00
|
|
|
]
|
2019-10-08 06:05:22 +02:00
|
|
|
|
2019-10-03 02:01:36 +02:00
|
|
|
class Meta:
|
|
|
|
abstract = True
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-10-03 02:01:36 +02:00
|
|
|
class RealmAuditLog(AbstractRealmAuditLog):
|
|
|
|
"""
|
|
|
|
RealmAuditLog tracks important changes to users, streams, and
|
|
|
|
realms in Zulip. It is intended to support both
|
|
|
|
debugging/introspection (e.g. determining when a user's left a
|
|
|
|
given stream?) as well as help with some database migrations where
|
|
|
|
we might be able to do a better data backfill with it. Here are a
|
|
|
|
few key details about how this works:
|
|
|
|
|
|
|
|
* acting_user is the user who initiated the state change
|
|
|
|
* modified_user (if present) is the user being modified
|
|
|
|
* modified_stream (if present) is the stream being modified
|
2022-11-21 04:48:09 +01:00
|
|
|
* modified_user_group (if present) is the user group being modified
|
2019-10-03 02:01:36 +02:00
|
|
|
|
|
|
|
For example:
|
|
|
|
* When a user subscribes another user to a stream, modified_user,
|
|
|
|
acting_user, and modified_stream will all be present and different.
|
|
|
|
* When an administrator changes an organization's realm icon,
|
2022-11-21 04:48:09 +01:00
|
|
|
acting_user is that administrator and modified_user,
|
|
|
|
modified_stream and modified_user_group will be None.
|
2019-10-03 02:01:36 +02:00
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE)
|
|
|
|
acting_user = models.ForeignKey(
|
2021-02-12 08:19:30 +01:00
|
|
|
UserProfile,
|
|
|
|
null=True,
|
|
|
|
related_name="+",
|
|
|
|
on_delete=CASCADE,
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
)
|
2022-08-15 19:10:58 +02:00
|
|
|
modified_user = models.ForeignKey(
|
2021-02-12 08:19:30 +01:00
|
|
|
UserProfile,
|
|
|
|
null=True,
|
|
|
|
related_name="+",
|
|
|
|
on_delete=CASCADE,
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
)
|
2022-08-15 19:10:58 +02:00
|
|
|
modified_stream = models.ForeignKey(
|
2021-02-12 08:19:30 +01:00
|
|
|
Stream,
|
|
|
|
null=True,
|
|
|
|
on_delete=CASCADE,
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
)
|
2022-11-21 04:48:09 +01:00
|
|
|
modified_user_group = models.ForeignKey(
|
|
|
|
UserGroup,
|
|
|
|
null=True,
|
|
|
|
on_delete=CASCADE,
|
|
|
|
)
|
2021-07-17 12:25:08 +02:00
|
|
|
event_last_message_id = models.IntegerField(null=True)
|
2019-10-03 02:01:36 +02:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2017-11-27 07:33:05 +01:00
|
|
|
def __str__(self) -> str:
|
2017-09-22 16:09:37 +02:00
|
|
|
if self.modified_user is not None:
|
2023-03-08 22:18:59 +01:00
|
|
|
return f"{self.modified_user!r} {self.event_type} {self.event_time} {self.id}"
|
2017-09-22 16:09:37 +02:00
|
|
|
if self.modified_stream is not None:
|
2023-03-08 22:18:59 +01:00
|
|
|
return f"{self.modified_stream!r} {self.event_type} {self.event_time} {self.id}"
|
2022-11-21 04:48:09 +01:00
|
|
|
if self.modified_user_group is not None:
|
|
|
|
return f"{self.modified_user_group!r} {self.event_type} {self.event_time} {self.id}"
|
2023-03-08 22:18:59 +01:00
|
|
|
return f"{self.realm!r} {self.event_type} {self.event_time} {self.id}"
|
2017-09-22 16:09:37 +02:00
|
|
|
|
2023-04-28 21:08:21 +02:00
|
|
|
class Meta:
|
|
|
|
indexes = [
|
|
|
|
models.Index(
|
|
|
|
name="zerver_realmauditlog_user_subscriptions_idx",
|
|
|
|
fields=["modified_user", "modified_stream"],
|
|
|
|
condition=Q(
|
|
|
|
event_type__in=[
|
|
|
|
AbstractRealmAuditLog.SUBSCRIPTION_CREATED,
|
|
|
|
AbstractRealmAuditLog.SUBSCRIPTION_ACTIVATED,
|
|
|
|
AbstractRealmAuditLog.SUBSCRIPTION_DEACTIVATED,
|
|
|
|
]
|
|
|
|
),
|
|
|
|
)
|
|
|
|
]
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-12-01 08:20:48 +01:00
|
|
|
class OnboardingStep(models.Model):
|
2022-08-15 19:10:58 +02:00
|
|
|
user = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
2023-12-01 08:20:48 +01:00
|
|
|
onboarding_step = models.CharField(max_length=30)
|
2022-08-15 19:10:58 +02:00
|
|
|
timestamp = models.DateTimeField(default=timezone_now)
|
2017-01-24 01:48:35 +01:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2023-12-01 08:20:48 +01:00
|
|
|
unique_together = ("user", "onboarding_step")
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def check_valid_user_ids(realm_id: int, val: object, allow_deactivated: bool = False) -> List[int]:
|
2020-06-23 01:12:03 +02:00
|
|
|
user_ids = check_list(check_int)("User IDs", val)
|
2018-06-07 20:01:31 +02:00
|
|
|
realm = Realm.objects.get(id=realm_id)
|
|
|
|
for user_id in user_ids:
|
|
|
|
# TODO: Structurally, we should be doing a bulk fetch query to
|
|
|
|
# get the users here, not doing these in a loop. But because
|
|
|
|
# this is a rarely used feature and likely to never have more
|
|
|
|
# than a handful of users, it's probably mostly OK.
|
|
|
|
try:
|
|
|
|
user_profile = get_user_profile_by_id_in_realm(user_id, realm)
|
|
|
|
except UserProfile.DoesNotExist:
|
2023-07-17 22:40:33 +02:00
|
|
|
raise ValidationError(_("Invalid user ID: {user_id}").format(user_id=user_id))
|
2018-06-07 20:01:31 +02:00
|
|
|
|
2023-01-18 02:59:37 +01:00
|
|
|
if not allow_deactivated and not user_profile.is_active:
|
2023-07-17 22:40:33 +02:00
|
|
|
raise ValidationError(
|
|
|
|
_("User with ID {user_id} is deactivated").format(user_id=user_id)
|
|
|
|
)
|
2018-05-08 13:54:40 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
if user_profile.is_bot:
|
2023-07-17 22:40:33 +02:00
|
|
|
raise ValidationError(_("User with ID {user_id} is a bot").format(user_id=user_id))
|
2018-06-07 20:01:31 +02:00
|
|
|
|
2020-06-21 02:36:20 +02:00
|
|
|
return user_ids
|
2018-05-08 13:54:40 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-03-17 10:07:22 +01:00
|
|
|
class CustomProfileField(models.Model):
|
2018-07-25 05:57:10 +02:00
|
|
|
"""Defines a form field for the per-realm custom profile fields feature.
|
|
|
|
|
|
|
|
See CustomProfileFieldValue for an individual user's values for one of
|
|
|
|
these fields.
|
2018-07-25 00:29:05 +02:00
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-03-31 07:30:24 +02:00
|
|
|
HINT_MAX_LENGTH = 80
|
2018-08-16 20:12:00 +02:00
|
|
|
NAME_MAX_LENGTH = 40
|
2022-07-12 21:04:47 +02:00
|
|
|
MAX_DISPLAY_IN_PROFILE_SUMMARY_FIELDS = 2
|
2018-03-31 07:30:24 +02:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE)
|
|
|
|
name = models.CharField(max_length=NAME_MAX_LENGTH)
|
|
|
|
hint = models.CharField(max_length=HINT_MAX_LENGTH, default="")
|
2022-07-12 21:04:47 +02:00
|
|
|
|
|
|
|
# Sort order for display of custom profile fields.
|
2022-08-15 19:10:58 +02:00
|
|
|
order = models.IntegerField(default=0)
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2022-07-12 21:04:47 +02:00
|
|
|
# Whether the field should be displayed in smaller summary
|
|
|
|
# sections of a page displaying custom profile fields.
|
2022-08-15 19:10:58 +02:00
|
|
|
display_in_profile_summary = models.BooleanField(default=False)
|
2022-07-12 21:04:47 +02:00
|
|
|
|
2018-04-02 15:04:22 +02:00
|
|
|
SHORT_TEXT = 1
|
|
|
|
LONG_TEXT = 2
|
2021-03-20 11:39:22 +01:00
|
|
|
SELECT = 3
|
2018-04-03 18:06:13 +02:00
|
|
|
DATE = 4
|
2018-04-25 19:20:58 +02:00
|
|
|
URL = 5
|
2018-05-06 09:43:38 +02:00
|
|
|
USER = 6
|
2019-05-27 10:59:55 +02:00
|
|
|
EXTERNAL_ACCOUNT = 7
|
2022-10-01 12:16:11 +02:00
|
|
|
PRONOUNS = 8
|
2018-04-08 09:50:05 +02:00
|
|
|
|
2018-05-06 09:43:38 +02:00
|
|
|
# These are the fields whose validators require more than var_name
|
2021-03-20 11:39:22 +01:00
|
|
|
# and value argument. i.e. SELECT require field_data, USER require
|
2018-05-06 09:43:38 +02:00
|
|
|
# realm as argument.
|
2021-03-20 11:39:22 +01:00
|
|
|
SELECT_FIELD_TYPE_DATA: List[ExtendedFieldElement] = [
|
2021-04-16 00:57:30 +02:00
|
|
|
(SELECT, gettext_lazy("List of options"), validate_select_field, str, "SELECT"),
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
]
|
|
|
|
USER_FIELD_TYPE_DATA: List[UserFieldElement] = [
|
2021-09-19 20:11:34 +02:00
|
|
|
(USER, gettext_lazy("Person picker"), check_valid_user_ids, orjson.loads, "USER"),
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
]
|
2018-04-08 09:50:05 +02:00
|
|
|
|
2021-03-20 11:39:22 +01:00
|
|
|
SELECT_FIELD_VALIDATORS: Dict[int, ExtendedValidator] = {
|
|
|
|
item[0]: item[2] for item in SELECT_FIELD_TYPE_DATA
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
}
|
|
|
|
USER_FIELD_VALIDATORS: Dict[int, RealmUserValidator] = {
|
2018-05-06 09:43:38 +02:00
|
|
|
item[0]: item[2] for item in USER_FIELD_TYPE_DATA
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
}
|
2017-03-17 10:07:22 +01:00
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
FIELD_TYPE_DATA: List[FieldElement] = [
|
2021-05-10 07:02:14 +02:00
|
|
|
# Type, display name, validator, converter, keyword
|
2021-04-16 00:57:30 +02:00
|
|
|
(SHORT_TEXT, gettext_lazy("Short text"), check_short_string, str, "SHORT_TEXT"),
|
|
|
|
(LONG_TEXT, gettext_lazy("Long text"), check_long_string, str, "LONG_TEXT"),
|
|
|
|
(DATE, gettext_lazy("Date picker"), check_date, str, "DATE"),
|
|
|
|
(URL, gettext_lazy("Link"), check_url, str, "URL"),
|
2021-02-12 08:19:30 +01:00
|
|
|
(
|
|
|
|
EXTERNAL_ACCOUNT,
|
2021-04-16 00:57:30 +02:00
|
|
|
gettext_lazy("External account"),
|
2021-02-12 08:19:30 +01:00
|
|
|
check_short_string,
|
|
|
|
str,
|
|
|
|
"EXTERNAL_ACCOUNT",
|
|
|
|
),
|
2022-10-01 12:16:11 +02:00
|
|
|
(PRONOUNS, gettext_lazy("Pronouns"), check_short_string, str, "PRONOUNS"),
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
]
|
2018-04-08 09:50:05 +02:00
|
|
|
|
2021-03-20 11:39:22 +01:00
|
|
|
ALL_FIELD_TYPES = [*FIELD_TYPE_DATA, *SELECT_FIELD_TYPE_DATA, *USER_FIELD_TYPE_DATA]
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2021-09-21 16:52:15 +02:00
|
|
|
FIELD_VALIDATORS: Dict[int, Validator[ProfileDataElementValue]] = {
|
2021-02-12 08:19:30 +01:00
|
|
|
item[0]: item[2] for item in FIELD_TYPE_DATA
|
|
|
|
}
|
|
|
|
FIELD_CONVERTERS: Dict[int, Callable[[Any], Any]] = {
|
|
|
|
item[0]: item[3] for item in ALL_FIELD_TYPES
|
|
|
|
}
|
2022-09-19 21:43:34 +02:00
|
|
|
FIELD_TYPE_CHOICES: List[Tuple[int, StrPromise]] = [
|
2022-08-08 19:53:11 +02:00
|
|
|
(item[0], item[1]) for item in ALL_FIELD_TYPES
|
|
|
|
]
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
field_type = models.PositiveSmallIntegerField(
|
2021-02-12 08:19:30 +01:00
|
|
|
choices=FIELD_TYPE_CHOICES,
|
|
|
|
default=SHORT_TEXT,
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
)
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2018-07-25 05:57:10 +02:00
|
|
|
# A JSON blob of any additional data needed to define the field beyond
|
|
|
|
# type/name/hint.
|
|
|
|
#
|
|
|
|
# The format depends on the type. Field types SHORT_TEXT, LONG_TEXT,
|
2022-04-27 05:06:35 +02:00
|
|
|
# DATE, URL, and USER leave this empty. Fields of type SELECT store the
|
2018-07-25 05:57:10 +02:00
|
|
|
# choices' descriptions.
|
2018-07-25 00:29:05 +02:00
|
|
|
#
|
2018-07-25 05:57:10 +02:00
|
|
|
# Note: There is no performance overhead of using TextField in PostgreSQL.
|
2018-07-25 00:29:05 +02:00
|
|
|
# See https://www.postgresql.org/docs/9.0/static/datatype-character.html
|
2022-08-15 19:10:58 +02:00
|
|
|
field_data = models.TextField(default="")
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2021-02-12 08:20:45 +01:00
|
|
|
unique_together = ("realm", "name")
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2023-04-12 22:40:35 +02:00
|
|
|
def __str__(self) -> str:
|
|
|
|
return f"{self.realm!r} {self.name} {self.field_type} {self.order}"
|
|
|
|
|
2020-06-13 09:30:51 +02:00
|
|
|
def as_dict(self) -> ProfileDataElementBase:
|
2022-07-12 21:04:47 +02:00
|
|
|
data_as_dict: ProfileDataElementBase = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"id": self.id,
|
|
|
|
"name": self.name,
|
|
|
|
"type": self.field_type,
|
|
|
|
"hint": self.hint,
|
|
|
|
"field_data": self.field_data,
|
|
|
|
"order": self.order,
|
2017-03-17 10:07:22 +01:00
|
|
|
}
|
2022-07-12 21:04:47 +02:00
|
|
|
if self.display_in_profile_summary:
|
|
|
|
data_as_dict["display_in_profile_summary"] = True
|
|
|
|
|
|
|
|
return data_as_dict
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2018-11-06 10:05:31 +01:00
|
|
|
def is_renderable(self) -> bool:
|
|
|
|
if self.field_type in [CustomProfileField.SHORT_TEXT, CustomProfileField.LONG_TEXT]:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-06-23 22:47:50 +02:00
|
|
|
def custom_profile_fields_for_realm(realm_id: int) -> QuerySet[CustomProfileField]:
|
2021-02-12 08:20:45 +01:00
|
|
|
return CustomProfileField.objects.filter(realm=realm_id).order_by("order")
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-03-17 10:07:22 +01:00
|
|
|
class CustomProfileFieldValue(models.Model):
|
2022-08-15 19:10:58 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
|
|
|
field = models.ForeignKey(CustomProfileField, on_delete=CASCADE)
|
|
|
|
value = models.TextField()
|
|
|
|
rendered_value = models.TextField(null=True, default=None)
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2021-02-12 08:20:45 +01:00
|
|
|
unique_together = ("user_profile", "field")
|
2016-07-15 18:57:37 +02:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2018-03-12 03:30:30 +01:00
|
|
|
def __str__(self) -> str:
|
2023-03-08 22:18:59 +01:00
|
|
|
return f"{self.user_profile!r} {self.field!r} {self.value}"
|
2018-03-12 01:55:23 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-05-25 19:16:40 +02:00
|
|
|
# Interfaces for services
|
2020-10-23 02:43:28 +02:00
|
|
|
# They provide additional functionality like parsing message to obtain query URL, data to be sent to URL,
|
2017-05-25 19:16:40 +02:00
|
|
|
# and parsing the response.
|
2021-02-12 08:20:45 +01:00
|
|
|
GENERIC_INTERFACE = "GenericService"
|
|
|
|
SLACK_INTERFACE = "SlackOutgoingWebhookService"
|
2017-05-25 19:16:40 +02:00
|
|
|
|
2023-02-02 04:35:24 +01:00
|
|
|
|
2017-05-25 20:41:05 +02:00
|
|
|
# A Service corresponds to either an outgoing webhook bot or an embedded bot.
|
|
|
|
# The type of Service is determined by the bot_type field of the referenced
|
|
|
|
# UserProfile.
|
|
|
|
#
|
|
|
|
# If the Service is an outgoing webhook bot:
|
|
|
|
# - name is any human-readable identifier for the Service
|
|
|
|
# - base_url is the address of the third-party site
|
|
|
|
# - token is used for authentication with the third-party site
|
|
|
|
#
|
|
|
|
# If the Service is an embedded bot:
|
|
|
|
# - name is the canonical name for the type of bot (e.g. 'xkcd' for an instance
|
|
|
|
# of the xkcd bot); multiple embedded bots can have the same name, but all
|
|
|
|
# embedded bots with the same name will run the same code
|
|
|
|
# - base_url and token are currently unused
|
2016-07-15 18:57:37 +02:00
|
|
|
class Service(models.Model):
|
2022-08-15 19:10:58 +02:00
|
|
|
name = models.CharField(max_length=UserProfile.MAX_NAME_LENGTH)
|
2017-05-25 20:41:05 +02:00
|
|
|
# Bot user corresponding to the Service. The bot_type of this user
|
2022-02-08 00:13:33 +01:00
|
|
|
# determines the type of service. If non-bot services are added later,
|
2017-05-25 20:41:05 +02:00
|
|
|
# user_profile can also represent the owner of the Service.
|
2022-08-15 19:10:58 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
|
|
|
base_url = models.TextField()
|
|
|
|
token = models.TextField()
|
2017-05-25 20:41:05 +02:00
|
|
|
# Interface / API version of the service.
|
2022-08-15 19:10:58 +02:00
|
|
|
interface = models.PositiveSmallIntegerField(default=1)
|
2016-07-15 18:57:37 +02:00
|
|
|
|
2017-07-01 14:42:34 +02:00
|
|
|
# Valid interfaces are {generic, zulip_bot_service, slack}
|
2017-05-25 19:16:40 +02:00
|
|
|
GENERIC = 1
|
2017-07-01 14:42:34 +02:00
|
|
|
SLACK = 2
|
2017-05-25 19:16:40 +02:00
|
|
|
|
2017-07-21 07:15:11 +02:00
|
|
|
ALLOWED_INTERFACE_TYPES = [
|
|
|
|
GENERIC,
|
2017-07-01 14:42:34 +02:00
|
|
|
SLACK,
|
2017-07-21 07:15:11 +02:00
|
|
|
]
|
2016-07-15 18:57:37 +02:00
|
|
|
# N.B. If we used Django's choice=... we would get this for free (kinda)
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
_interfaces: Dict[int, str] = {
|
2017-05-25 19:16:40 +02:00
|
|
|
GENERIC: GENERIC_INTERFACE,
|
2017-07-01 14:42:34 +02:00
|
|
|
SLACK: SLACK_INTERFACE,
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
}
|
2016-07-15 18:57:37 +02:00
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def interface_name(self) -> str:
|
2016-07-15 18:57:37 +02:00
|
|
|
# Raises KeyError if invalid
|
|
|
|
return self._interfaces[self.interface]
|
|
|
|
|
|
|
|
|
2020-06-30 21:11:29 +02:00
|
|
|
def get_bot_services(user_profile_id: int) -> List[Service]:
|
2021-04-22 16:23:09 +02:00
|
|
|
return list(Service.objects.filter(user_profile_id=user_profile_id))
|
2016-07-15 18:57:37 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-30 21:11:29 +02:00
|
|
|
def get_service_profile(user_profile_id: int, service_name: str) -> Service:
|
2021-04-22 16:23:09 +02:00
|
|
|
return Service.objects.get(user_profile_id=user_profile_id, name=service_name)
|
2017-10-12 16:31:25 +02:00
|
|
|
|
|
|
|
|
2017-11-24 10:18:29 +01:00
|
|
|
class BotStorageData(models.Model):
|
2022-08-15 19:10:58 +02:00
|
|
|
bot_profile = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
|
|
|
key = models.TextField(db_index=True)
|
|
|
|
value = models.TextField()
|
2017-10-12 16:31:25 +02:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2017-10-12 16:31:25 +02:00
|
|
|
unique_together = ("bot_profile", "key")
|
2017-11-01 20:51:12 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-01-06 11:01:22 +01:00
|
|
|
class BotConfigData(models.Model):
|
2022-08-15 19:10:58 +02:00
|
|
|
bot_profile = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
|
|
|
key = models.TextField(db_index=True)
|
|
|
|
value = models.TextField()
|
2017-11-01 20:51:12 +01:00
|
|
|
|
2020-04-09 21:51:58 +02:00
|
|
|
class Meta:
|
2017-11-01 20:51:12 +01:00
|
|
|
unique_together = ("bot_profile", "key")
|
2019-08-30 00:21:36 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-04-15 12:34:26 +02:00
|
|
|
class AlertWord(models.Model):
|
|
|
|
# Realm isn't necessary, but it's a nice denormalization. Users
|
|
|
|
# never move to another realm, so it's static, and having Realm
|
|
|
|
# here optimizes the main query on this table, which is fetching
|
|
|
|
# all the alert words in a realm.
|
2022-08-15 19:10:58 +02:00
|
|
|
realm = models.ForeignKey(Realm, db_index=True, on_delete=CASCADE)
|
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
2020-04-27 20:04:38 +02:00
|
|
|
# Case-insensitive name for the alert word.
|
2022-08-15 19:10:58 +02:00
|
|
|
word = models.TextField()
|
2020-04-15 12:34:26 +02:00
|
|
|
|
|
|
|
class Meta:
|
|
|
|
unique_together = ("user_profile", "word")
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-06-08 22:17:05 +02:00
|
|
|
def flush_realm_alert_words(realm_id: int) -> None:
|
|
|
|
cache_delete(realm_alert_words_cache_key(realm_id))
|
|
|
|
cache_delete(realm_alert_words_automaton_cache_key(realm_id))
|
2020-04-15 12:34:26 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-07-16 00:45:17 +02:00
|
|
|
def flush_alert_word(*, instance: AlertWord, **kwargs: object) -> None:
|
2023-06-08 22:17:05 +02:00
|
|
|
realm_id = instance.realm_id
|
|
|
|
flush_realm_alert_words(realm_id)
|
2020-04-15 12:34:26 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-04-27 20:45:15 +02:00
|
|
|
post_save.connect(flush_alert_word, sender=AlertWord)
|
2020-04-15 12:34:26 +02:00
|
|
|
post_delete.connect(flush_alert_word, sender=AlertWord)
|