2023-10-12 19:43:45 +02:00
|
|
|
# https://github.com/typeddjango/django-stubs/issues/1698
|
|
|
|
# mypy: disable-error-code="explicit-override"
|
|
|
|
|
2023-07-19 00:44:51 +02:00
|
|
|
import hashlib
|
2020-09-05 04:02:13 +02:00
|
|
|
import secrets
|
2020-06-11 00:54:34 +02:00
|
|
|
import time
|
2023-07-16 18:29:58 +02:00
|
|
|
from collections import defaultdict
|
2023-11-19 19:45:19 +01:00
|
|
|
from datetime import datetime, timedelta, timezone
|
2022-07-27 23:33:49 +02:00
|
|
|
from email.headerregistry import Address
|
2023-11-27 02:06:23 +01:00
|
|
|
from enum import Enum
|
2020-06-11 00:54:34 +02:00
|
|
|
from typing import (
|
2022-06-20 23:52:17 +02:00
|
|
|
TYPE_CHECKING,
|
2020-06-11 00:54:34 +02:00
|
|
|
Any,
|
|
|
|
Callable,
|
|
|
|
Dict,
|
|
|
|
List,
|
|
|
|
Optional,
|
2021-06-07 11:38:57 +02:00
|
|
|
Pattern,
|
2020-06-11 00:54:34 +02:00
|
|
|
Set,
|
|
|
|
Tuple,
|
2022-04-27 02:23:56 +02:00
|
|
|
TypedDict,
|
2020-06-11 00:54:34 +02:00
|
|
|
TypeVar,
|
|
|
|
Union,
|
|
|
|
)
|
2022-08-15 19:10:58 +02:00
|
|
|
from uuid import uuid4
|
2013-04-23 18:51:17 +02:00
|
|
|
|
2016-11-02 21:41:10 +01:00
|
|
|
import django.contrib.auth
|
2021-09-19 20:11:34 +02:00
|
|
|
import orjson
|
CVE-2021-41115: Use re2 for user-supplied linkifier patterns.
Zulip attempts to validate that the regular expressions that admins
enter for linkifiers are well-formatted, and only contain a specific
subset of regex grammar. The process of checking these
properties (via a regex!) can cause denial-of-service via
backtracking.
Furthermore, this validation itself does not prevent the creation of
linkifiers which themselves cause denial-of-service when they are
executed. As the validator accepts literally anything inside of a
`(?P<word>...)` block, any quadratic backtracking expression can be
hidden therein.
Switch user-provided linkifier patterns to be matched in the Markdown
processor by the `re2` library, which is guaranteed constant-time.
This somewhat limits the possible features of the regular
expression (notably, look-head and -behind, and back-references);
however, these features had never been advertised as working in the
context of linkifiers.
A migration removes any existing linkifiers which would not function
under re2, after printing them for posterity during the upgrade; they
are unlikely to be common, and are impossible to fix automatically.
The denial-of-service in the linkifier validator was discovered by
@erik-krogh and @yoff, as GHSL-2021-118.
2021-09-29 01:27:54 +02:00
|
|
|
import re2
|
2022-10-02 22:52:31 +02:00
|
|
|
import uri_template
|
2020-06-11 00:54:34 +02:00
|
|
|
from bitfield import BitField
|
2022-06-27 08:00:09 +02:00
|
|
|
from bitfield.types import Bit, BitHandler
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.conf import settings
|
2021-11-02 15:42:58 +01:00
|
|
|
from django.contrib.auth.models import (
|
|
|
|
AbstractBaseUser,
|
|
|
|
AnonymousUser,
|
|
|
|
PermissionsMixin,
|
|
|
|
UserManager,
|
|
|
|
)
|
2021-04-05 18:42:45 +02:00
|
|
|
from django.contrib.contenttypes.fields import GenericRelation
|
2022-02-23 08:14:01 +01:00
|
|
|
from django.contrib.postgres.indexes import GinIndex
|
|
|
|
from django.contrib.postgres.search import SearchVectorField
|
2017-07-07 20:35:31 +02:00
|
|
|
from django.core.exceptions import ValidationError
|
2023-06-07 21:14:43 +02:00
|
|
|
from django.core.serializers.json import DjangoJSONEncoder
|
2023-05-27 05:04:50 +02:00
|
|
|
from django.core.validators import MinLengthValidator, RegexValidator, validate_email
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.db import models, transaction
|
2022-02-23 07:22:23 +01:00
|
|
|
from django.db.backends.base.base import BaseDatabaseWrapper
|
2023-03-04 01:52:14 +01:00
|
|
|
from django.db.models import CASCADE, Exists, F, OuterRef, Q, QuerySet, Sum
|
2023-02-10 19:10:19 +01:00
|
|
|
from django.db.models.functions import Lower, Upper
|
2021-08-04 16:47:36 +02:00
|
|
|
from django.db.models.signals import post_delete, post_save, pre_delete
|
2022-02-23 07:22:23 +01:00
|
|
|
from django.db.models.sql.compiler import SQLCompiler
|
2017-04-15 04:03:56 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2021-04-16 00:57:30 +02:00
|
|
|
from django.utils.translation import gettext as _
|
|
|
|
from django.utils.translation import gettext_lazy
|
2021-09-29 02:46:57 +02:00
|
|
|
from django_cte import CTEManager
|
2022-09-19 21:48:53 +02:00
|
|
|
from django_stubs_ext import StrPromise, ValuesQuerySet
|
2023-10-12 19:43:45 +02:00
|
|
|
from typing_extensions import override
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2020-06-12 16:19:17 +02:00
|
|
|
from confirmation import settings as confirmation_settings
|
2016-11-14 09:23:03 +01:00
|
|
|
from zerver.lib import cache
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.cache import (
|
|
|
|
active_non_guest_user_ids_cache_key,
|
|
|
|
active_user_ids_cache_key,
|
|
|
|
bot_dict_fields,
|
|
|
|
bot_dicts_in_realm_cache_key,
|
|
|
|
bot_profile_cache_key,
|
|
|
|
cache_delete,
|
|
|
|
cache_set,
|
|
|
|
cache_with_key,
|
|
|
|
flush_message,
|
2021-03-27 13:31:26 +01:00
|
|
|
flush_muting_users_cache,
|
2020-06-11 00:54:34 +02:00
|
|
|
flush_realm,
|
|
|
|
flush_stream,
|
|
|
|
flush_submessage,
|
|
|
|
flush_used_upload_space_cache,
|
|
|
|
flush_user_profile,
|
|
|
|
get_realm_used_upload_space_cache_key,
|
|
|
|
realm_alert_words_automaton_cache_key,
|
|
|
|
realm_alert_words_cache_key,
|
|
|
|
realm_user_dict_fields,
|
|
|
|
realm_user_dicts_cache_key,
|
|
|
|
user_profile_by_api_key_cache_key,
|
|
|
|
user_profile_by_id_cache_key,
|
|
|
|
user_profile_cache_key,
|
|
|
|
)
|
2022-11-17 09:30:48 +01:00
|
|
|
from zerver.lib.exceptions import JsonableError, RateLimitedError
|
2023-07-14 19:46:50 +02:00
|
|
|
from zerver.lib.per_request_cache import (
|
|
|
|
flush_per_request_cache,
|
|
|
|
return_same_value_during_entire_request,
|
|
|
|
)
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.pysa import mark_sanitized
|
2020-08-01 03:20:55 +02:00
|
|
|
from zerver.lib.timestamp import datetime_to_timestamp
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.types import (
|
2023-07-27 16:42:21 +02:00
|
|
|
DefaultStreamDict,
|
2020-06-11 00:54:34 +02:00
|
|
|
ExtendedFieldElement,
|
|
|
|
ExtendedValidator,
|
|
|
|
FieldElement,
|
2023-02-17 12:46:14 +01:00
|
|
|
GroupPermissionSetting,
|
2021-03-30 12:38:49 +02:00
|
|
|
LinkifierDict,
|
2020-06-11 00:54:34 +02:00
|
|
|
ProfileData,
|
2020-06-13 09:30:51 +02:00
|
|
|
ProfileDataElementBase,
|
2021-09-21 16:52:15 +02:00
|
|
|
ProfileDataElementValue,
|
2023-11-08 11:13:25 +01:00
|
|
|
RawUserDict,
|
2022-03-15 11:10:29 +01:00
|
|
|
RealmPlaygroundDict,
|
2020-06-11 00:54:34 +02:00
|
|
|
RealmUserValidator,
|
2021-11-30 13:34:37 +01:00
|
|
|
UnspecifiedValue,
|
2023-08-10 16:12:37 +02:00
|
|
|
UserDisplayRecipient,
|
2020-06-11 00:54:34 +02:00
|
|
|
UserFieldElement,
|
|
|
|
Validator,
|
|
|
|
)
|
2023-07-19 00:44:51 +02:00
|
|
|
from zerver.lib.utils import generate_api_key
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.validator import (
|
|
|
|
check_date,
|
|
|
|
check_int,
|
|
|
|
check_list,
|
|
|
|
check_long_string,
|
|
|
|
check_short_string,
|
|
|
|
check_url,
|
2021-03-24 12:48:00 +01:00
|
|
|
validate_select_field,
|
2020-06-11 00:54:34 +02:00
|
|
|
)
|
2013-03-12 17:51:55 +01:00
|
|
|
|
2018-11-01 21:23:48 +01:00
|
|
|
MAX_TOPIC_NAME_LENGTH = 60
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
MAX_LANGUAGE_ID_LENGTH: int = 50
|
2012-12-07 01:05:14 +01:00
|
|
|
|
2023-01-26 12:53:27 +01:00
|
|
|
SECONDS_PER_DAY = 86400
|
|
|
|
|
2022-06-20 23:52:17 +02:00
|
|
|
if TYPE_CHECKING:
|
|
|
|
# We use ModelBackend only for typing. Importing it otherwise causes circular dependency.
|
|
|
|
from django.contrib.auth.backends import ModelBackend
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-29 16:16:15 +01:00
|
|
|
class EmojiInfo(TypedDict):
|
|
|
|
id: str
|
|
|
|
name: str
|
|
|
|
source_url: str
|
|
|
|
deactivated: bool
|
|
|
|
author_id: Optional[int]
|
|
|
|
still_url: Optional[str]
|
|
|
|
|
|
|
|
|
2022-02-23 07:22:23 +01:00
|
|
|
@models.Field.register_lookup
|
2021-07-17 12:25:08 +02:00
|
|
|
class AndZero(models.Lookup[int]):
|
2022-02-23 07:22:23 +01:00
|
|
|
lookup_name = "andz"
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2022-02-23 07:22:23 +01:00
|
|
|
def as_sql(
|
|
|
|
self, compiler: SQLCompiler, connection: BaseDatabaseWrapper
|
2022-07-15 15:33:03 +02:00
|
|
|
) -> Tuple[str, List[Union[str, int]]]: # nocoverage # currently only used in migrations
|
2022-02-23 07:22:23 +01:00
|
|
|
lhs, lhs_params = self.process_lhs(compiler, connection)
|
|
|
|
rhs, rhs_params = self.process_rhs(compiler, connection)
|
|
|
|
return f"{lhs} & {rhs} = 0", lhs_params + rhs_params
|
|
|
|
|
|
|
|
|
|
|
|
@models.Field.register_lookup
|
2021-07-17 12:25:08 +02:00
|
|
|
class AndNonZero(models.Lookup[int]):
|
2022-02-23 07:22:23 +01:00
|
|
|
lookup_name = "andnz"
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2022-02-23 07:22:23 +01:00
|
|
|
def as_sql(
|
|
|
|
self, compiler: SQLCompiler, connection: BaseDatabaseWrapper
|
2022-07-15 15:33:03 +02:00
|
|
|
) -> Tuple[str, List[Union[str, int]]]: # nocoverage # currently only used in migrations
|
2022-02-23 07:22:23 +01:00
|
|
|
lhs, lhs_params = self.process_lhs(compiler, connection)
|
|
|
|
rhs, rhs_params = self.process_rhs(compiler, connection)
|
|
|
|
return f"{lhs} & {rhs} != 0", lhs_params + rhs_params
|
|
|
|
|
|
|
|
|
2022-06-24 17:53:28 +02:00
|
|
|
ModelT = TypeVar("ModelT", bound=models.Model)
|
|
|
|
RowT = TypeVar("RowT")
|
|
|
|
|
|
|
|
|
|
|
|
def query_for_ids(
|
2022-09-19 21:48:53 +02:00
|
|
|
query: ValuesQuerySet[ModelT, RowT],
|
2022-06-24 17:53:28 +02:00
|
|
|
user_ids: List[int],
|
|
|
|
field: str,
|
2022-09-19 21:48:53 +02:00
|
|
|
) -> ValuesQuerySet[ModelT, RowT]:
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2017-09-13 17:24:11 +02:00
|
|
|
This function optimizes searches of the form
|
|
|
|
`user_profile_id in (1, 2, 3, 4)` by quickly
|
|
|
|
building the where clauses. Profiling shows significant
|
|
|
|
speedups over the normal Django-based approach.
|
|
|
|
|
|
|
|
Use this very carefully! Also, the caller should
|
|
|
|
guard against empty lists of user_ids.
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
|
|
|
assert user_ids
|
2021-02-12 08:20:45 +01:00
|
|
|
clause = f"{field} IN %s"
|
2017-09-13 17:24:11 +02:00
|
|
|
query = query.extra(
|
2021-02-12 08:19:30 +01:00
|
|
|
where=[clause],
|
|
|
|
params=(tuple(user_ids),),
|
2017-09-13 17:24:11 +02:00
|
|
|
)
|
|
|
|
return query
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-07-14 19:46:50 +02:00
|
|
|
@return_same_value_during_entire_request
|
2021-02-12 08:19:30 +01:00
|
|
|
def get_display_recipient_by_id(
|
|
|
|
recipient_id: int, recipient_type: int, recipient_type_id: Optional[int]
|
2023-08-10 16:12:37 +02:00
|
|
|
) -> List[UserDisplayRecipient]:
|
2017-08-02 20:18:35 +02:00
|
|
|
"""
|
|
|
|
returns: an object describing the recipient (using a cache).
|
|
|
|
If the type is a stream, the type_id must be an int; a string is returned.
|
|
|
|
Otherwise, type_id may be None; an array of recipient dicts is returned.
|
|
|
|
"""
|
2019-08-15 00:44:33 +02:00
|
|
|
# Have to import here, to avoid circular dependency.
|
|
|
|
from zerver.lib.display_recipient import get_display_recipient_remote_cache
|
|
|
|
|
2023-07-14 19:46:50 +02:00
|
|
|
return get_display_recipient_remote_cache(recipient_id, recipient_type, recipient_type_id)
|
2013-09-21 15:35:12 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-08-10 16:12:37 +02:00
|
|
|
def get_display_recipient(recipient: "Recipient") -> List[UserDisplayRecipient]:
|
2013-09-21 15:35:12 +02:00
|
|
|
return get_display_recipient_by_id(
|
2017-01-24 07:06:13 +01:00
|
|
|
recipient.id,
|
|
|
|
recipient.type,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
recipient.type_id,
|
2013-09-21 15:35:12 +02:00
|
|
|
)
|
2013-04-25 20:42:28 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-04-14 21:07:57 +02:00
|
|
|
def get_recipient_ids(
|
|
|
|
recipient: Optional["Recipient"], user_profile_id: int
|
|
|
|
) -> Tuple[List[int], str]:
|
|
|
|
if recipient is None:
|
|
|
|
recipient_type_str = ""
|
|
|
|
to = []
|
|
|
|
elif recipient.type == Recipient.STREAM:
|
|
|
|
recipient_type_str = "stream"
|
|
|
|
to = [recipient.type_id]
|
|
|
|
else:
|
|
|
|
recipient_type_str = "private"
|
|
|
|
if recipient.type == Recipient.PERSONAL:
|
|
|
|
to = [recipient.type_id]
|
|
|
|
else:
|
|
|
|
to = []
|
|
|
|
for r in get_display_recipient(recipient):
|
|
|
|
assert not isinstance(r, str) # It will only be a string for streams
|
|
|
|
if r["id"] != user_profile_id:
|
|
|
|
to.append(r["id"])
|
|
|
|
return to, recipient_type_str
|
|
|
|
|
|
|
|
|
2023-07-14 12:37:29 +02:00
|
|
|
def get_all_custom_emoji_for_realm_cache_key(realm_id: int) -> str:
|
2023-06-08 21:36:07 +02:00
|
|
|
return f"realm_emoji:{realm_id}"
|
2013-08-22 16:56:37 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-03-17 22:19:53 +01:00
|
|
|
# This simple call-once caching saves ~500us in auth_enabled_helper,
|
|
|
|
# which is a significant optimization for common_context. Note that
|
|
|
|
# these values cannot change in a running production system, but do
|
|
|
|
# regularly change within unit tests; we address the latter by calling
|
|
|
|
# clear_supported_auth_backends_cache in our standard tearDown code.
|
2022-06-20 23:52:17 +02:00
|
|
|
supported_backends: Optional[List["ModelBackend"]] = None
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
|
2022-06-20 23:52:17 +02:00
|
|
|
def supported_auth_backends() -> List["ModelBackend"]:
|
2019-03-17 22:19:53 +01:00
|
|
|
global supported_backends
|
2019-03-18 19:28:13 +01:00
|
|
|
# Caching temporarily disabled for debugging
|
|
|
|
supported_backends = django.contrib.auth.get_backends()
|
|
|
|
assert supported_backends is not None
|
2019-03-17 22:19:53 +01:00
|
|
|
return supported_backends
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-03-17 22:19:53 +01:00
|
|
|
def clear_supported_auth_backends_cache() -> None:
|
|
|
|
global supported_backends
|
|
|
|
supported_backends = None
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-09-21 13:06:39 +02:00
|
|
|
class SystemGroups:
|
|
|
|
FULL_MEMBERS = "role:fullmembers"
|
|
|
|
EVERYONE_ON_INTERNET = "role:internet"
|
|
|
|
OWNERS = "role:owners"
|
|
|
|
ADMINISTRATORS = "role:administrators"
|
|
|
|
MODERATORS = "role:moderators"
|
|
|
|
MEMBERS = "role:members"
|
|
|
|
EVERYONE = "role:everyone"
|
|
|
|
NOBODY = "role:nobody"
|
|
|
|
|
|
|
|
|
2023-04-16 21:53:22 +02:00
|
|
|
class RealmAuthenticationMethod(models.Model):
|
|
|
|
"""
|
|
|
|
Tracks which authentication backends are enabled for a realm.
|
|
|
|
An enabled backend is represented in this table a row with appropriate
|
|
|
|
.realm value and .name matching the name of the target backend in the
|
|
|
|
AUTH_BACKEND_NAME_MAP dict.
|
|
|
|
"""
|
|
|
|
|
|
|
|
realm = models.ForeignKey("Realm", on_delete=CASCADE, db_index=True)
|
|
|
|
name = models.CharField(max_length=80)
|
|
|
|
|
|
|
|
class Meta:
|
|
|
|
unique_together = ("realm", "name")
|
|
|
|
|
|
|
|
|
2023-10-12 20:10:07 +02:00
|
|
|
def generate_realm_uuid_owner_secret() -> str:
|
|
|
|
token = generate_api_key()
|
|
|
|
|
|
|
|
# We include a prefix to facilitate scanning for accidental
|
|
|
|
# disclosure of secrets e.g. in Github commit pushes.
|
|
|
|
return f"zuliprealm_{token}"
|
|
|
|
|
|
|
|
|
2023-11-27 02:06:23 +01:00
|
|
|
class OrgTypeEnum(Enum):
|
|
|
|
Unspecified = 0
|
|
|
|
Business = 10
|
|
|
|
OpenSource = 20
|
|
|
|
EducationNonProfit = 30
|
|
|
|
Education = 35
|
|
|
|
Research = 40
|
|
|
|
Event = 50
|
|
|
|
NonProfit = 60
|
|
|
|
Government = 70
|
|
|
|
PoliticalGroup = 80
|
|
|
|
Community = 90
|
|
|
|
Personal = 100
|
|
|
|
Other = 1000
|
|
|
|
|
|
|
|
|
2023-11-27 02:02:45 +01:00
|
|
|
class OrgTypeDict(TypedDict):
|
|
|
|
name: str
|
|
|
|
id: int
|
|
|
|
hidden: bool
|
|
|
|
display_order: int
|
|
|
|
onboarding_zulip_guide_url: Optional[str]
|
|
|
|
|
|
|
|
|
2021-07-17 12:25:08 +02:00
|
|
|
class Realm(models.Model): # type: ignore[django-manager-missing] # django-stubs cannot resolve the custom CTEManager yet https://github.com/typeddjango/django-stubs/issues/1023
|
2017-03-23 00:15:06 +01:00
|
|
|
MAX_REALM_NAME_LENGTH = 40
|
2021-04-07 22:00:40 +02:00
|
|
|
MAX_REALM_DESCRIPTION_LENGTH = 1000
|
2017-03-23 00:15:06 +01:00
|
|
|
MAX_REALM_SUBDOMAIN_LENGTH = 40
|
2020-10-28 08:44:10 +01:00
|
|
|
MAX_REALM_REDIRECT_URL_LENGTH = 128
|
2020-06-14 23:36:14 +02:00
|
|
|
|
2018-10-24 06:09:01 +02:00
|
|
|
INVITES_STANDARD_REALM_DAILY_MAX = 3000
|
2018-10-17 10:50:59 +02:00
|
|
|
MESSAGE_VISIBILITY_LIMITED = 10000
|
2021-02-12 08:20:45 +01:00
|
|
|
SUBDOMAIN_FOR_ROOT_DOMAIN = ""
|
2020-09-11 16:11:06 +02:00
|
|
|
WILDCARD_MENTION_THRESHOLD = 15
|
2016-11-06 23:44:45 +01:00
|
|
|
|
2018-07-25 00:29:05 +02:00
|
|
|
# User-visible display name and description used on e.g. the organization homepage
|
2022-08-15 19:10:58 +02:00
|
|
|
name = models.CharField(max_length=MAX_REALM_NAME_LENGTH)
|
|
|
|
description = models.TextField(default="")
|
2018-07-25 05:57:10 +02:00
|
|
|
|
|
|
|
# A short, identifier-like name for the organization. Used in subdomains;
|
|
|
|
# e.g. on a server at example.com, an org with string_id `foo` is reached
|
|
|
|
# at `foo.example.com`.
|
2022-08-15 19:10:58 +02:00
|
|
|
string_id = models.CharField(max_length=MAX_REALM_SUBDOMAIN_LENGTH, unique=True)
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2023-10-12 20:10:07 +02:00
|
|
|
# uuid and a secret for the sake of per-realm authentication with the push notification
|
|
|
|
# bouncer.
|
|
|
|
uuid = models.UUIDField(default=uuid4, unique=True)
|
|
|
|
uuid_owner_secret = models.TextField(default=generate_realm_uuid_owner_secret)
|
2023-11-23 22:07:41 +01:00
|
|
|
# Whether push notifications are working for this realm, and
|
|
|
|
# whether there is a specific date at which we expect that to
|
|
|
|
# cease to be the case.
|
|
|
|
push_notifications_enabled = models.BooleanField(default=False, db_index=True)
|
|
|
|
push_notifications_enabled_end_timestamp = models.DateTimeField(default=None, null=True)
|
2023-10-12 20:10:07 +02:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
date_created = models.DateTimeField(default=timezone_now)
|
|
|
|
demo_organization_scheduled_deletion_date = models.DateTimeField(default=None, null=True)
|
|
|
|
deactivated = models.BooleanField(default=False)
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2020-10-28 08:44:10 +01:00
|
|
|
# Redirect URL if the Realm has moved to another server
|
2022-08-15 19:10:58 +02:00
|
|
|
deactivated_redirect = models.URLField(max_length=MAX_REALM_REDIRECT_URL_LENGTH, null=True)
|
2020-10-28 08:44:10 +01:00
|
|
|
|
2018-07-25 05:57:10 +02:00
|
|
|
# See RealmDomain for the domains that apply for a given organization.
|
2022-08-15 19:10:58 +02:00
|
|
|
emails_restricted_to_domains = models.BooleanField(default=False)
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
invite_required = models.BooleanField(default=True)
|
2021-04-02 18:47:08 +02:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
_max_invites = models.IntegerField(null=True, db_column="max_invites")
|
|
|
|
disallow_disposable_email_addresses = models.BooleanField(default=True)
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2022-01-29 00:54:13 +01:00
|
|
|
# Allow users to access web-public streams without login. This
|
|
|
|
# setting also controls API access of web-public streams.
|
2022-08-15 19:10:58 +02:00
|
|
|
enable_spectator_access = models.BooleanField(default=False)
|
2021-10-03 14:16:07 +02:00
|
|
|
|
2022-04-22 18:45:30 +02:00
|
|
|
# Whether organization has given permission to be advertised in the
|
|
|
|
# Zulip communities directory.
|
2022-08-15 19:10:58 +02:00
|
|
|
want_advertise_in_communities_directory = models.BooleanField(default=False, db_index=True)
|
2022-04-22 18:45:30 +02:00
|
|
|
|
2018-07-25 00:29:05 +02:00
|
|
|
# Whether the organization has enabled inline image and URL previews.
|
2022-08-15 19:10:58 +02:00
|
|
|
inline_image_preview = models.BooleanField(default=True)
|
|
|
|
inline_url_embed_preview = models.BooleanField(default=False)
|
2018-07-25 00:29:05 +02:00
|
|
|
|
|
|
|
# Whether digest emails are enabled for the organization.
|
2022-08-15 19:10:58 +02:00
|
|
|
digest_emails_enabled = models.BooleanField(default=False)
|
2019-03-28 04:47:03 +01:00
|
|
|
# Day of the week on which the digest is sent (default: Tuesday).
|
2022-08-15 19:10:58 +02:00
|
|
|
digest_weekday = models.SmallIntegerField(default=1)
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
send_welcome_emails = models.BooleanField(default=True)
|
|
|
|
message_content_allowed_in_email_notifications = models.BooleanField(default=True)
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
mandatory_topics = models.BooleanField(default=False)
|
2021-05-17 15:40:28 +02:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
name_changes_disabled = models.BooleanField(default=False)
|
|
|
|
email_changes_disabled = models.BooleanField(default=False)
|
|
|
|
avatar_changes_disabled = models.BooleanField(default=False)
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2020-04-02 21:53:20 +02:00
|
|
|
POLICY_MEMBERS_ONLY = 1
|
|
|
|
POLICY_ADMINS_ONLY = 2
|
|
|
|
POLICY_FULL_MEMBERS_ONLY = 3
|
2021-03-21 18:17:45 +01:00
|
|
|
POLICY_MODERATORS_ONLY = 4
|
2021-05-26 12:21:37 +02:00
|
|
|
POLICY_EVERYONE = 5
|
2021-07-18 10:11:58 +02:00
|
|
|
POLICY_NOBODY = 6
|
2021-10-04 08:33:31 +02:00
|
|
|
POLICY_OWNERS_ONLY = 7
|
2020-04-02 21:53:20 +02:00
|
|
|
|
|
|
|
COMMON_POLICY_TYPES = [
|
|
|
|
POLICY_MEMBERS_ONLY,
|
|
|
|
POLICY_ADMINS_ONLY,
|
|
|
|
POLICY_FULL_MEMBERS_ONLY,
|
2021-03-21 18:17:45 +01:00
|
|
|
POLICY_MODERATORS_ONLY,
|
2020-04-02 21:53:20 +02:00
|
|
|
]
|
|
|
|
|
2021-05-26 12:21:37 +02:00
|
|
|
COMMON_MESSAGE_POLICY_TYPES = [
|
2021-05-26 21:20:11 +02:00
|
|
|
POLICY_MEMBERS_ONLY,
|
2021-05-26 12:21:37 +02:00
|
|
|
POLICY_ADMINS_ONLY,
|
2021-05-26 21:20:11 +02:00
|
|
|
POLICY_FULL_MEMBERS_ONLY,
|
|
|
|
POLICY_MODERATORS_ONLY,
|
2021-05-26 12:21:37 +02:00
|
|
|
POLICY_EVERYONE,
|
|
|
|
]
|
|
|
|
|
2021-07-18 10:11:58 +02:00
|
|
|
INVITE_TO_REALM_POLICY_TYPES = [
|
|
|
|
POLICY_MEMBERS_ONLY,
|
|
|
|
POLICY_ADMINS_ONLY,
|
|
|
|
POLICY_FULL_MEMBERS_ONLY,
|
|
|
|
POLICY_MODERATORS_ONLY,
|
|
|
|
POLICY_NOBODY,
|
|
|
|
]
|
|
|
|
|
2021-10-04 08:33:31 +02:00
|
|
|
# We don't allow granting roles less than Moderator access to
|
|
|
|
# create web-public streams, since it's a sensitive feature that
|
|
|
|
# can be used to send spam.
|
|
|
|
CREATE_WEB_PUBLIC_STREAM_POLICY_TYPES = [
|
|
|
|
POLICY_ADMINS_ONLY,
|
|
|
|
POLICY_MODERATORS_ONLY,
|
|
|
|
POLICY_OWNERS_ONLY,
|
|
|
|
POLICY_NOBODY,
|
|
|
|
]
|
|
|
|
|
2022-09-28 16:30:10 +02:00
|
|
|
EDIT_TOPIC_POLICY_TYPES = [
|
|
|
|
POLICY_MEMBERS_ONLY,
|
|
|
|
POLICY_ADMINS_ONLY,
|
|
|
|
POLICY_FULL_MEMBERS_ONLY,
|
|
|
|
POLICY_MODERATORS_ONLY,
|
|
|
|
POLICY_EVERYONE,
|
|
|
|
POLICY_NOBODY,
|
|
|
|
]
|
|
|
|
|
2022-09-29 09:03:12 +02:00
|
|
|
MOVE_MESSAGES_BETWEEN_STREAMS_POLICY_TYPES = INVITE_TO_REALM_POLICY_TYPES
|
|
|
|
|
2023-01-26 12:53:27 +01:00
|
|
|
DEFAULT_MOVE_MESSAGE_LIMIT_SECONDS = 7 * SECONDS_PER_DAY
|
|
|
|
|
|
|
|
move_messages_within_stream_limit_seconds = models.PositiveIntegerField(
|
|
|
|
default=DEFAULT_MOVE_MESSAGE_LIMIT_SECONDS, null=True
|
|
|
|
)
|
|
|
|
|
2022-10-11 13:19:49 +02:00
|
|
|
move_messages_between_streams_limit_seconds = models.PositiveIntegerField(
|
|
|
|
default=DEFAULT_MOVE_MESSAGE_LIMIT_SECONDS, null=True
|
|
|
|
)
|
|
|
|
|
2021-05-17 15:40:28 +02:00
|
|
|
# Who in the organization is allowed to add custom emojis.
|
2022-08-15 19:10:58 +02:00
|
|
|
add_custom_emoji_policy = models.PositiveSmallIntegerField(default=POLICY_MEMBERS_ONLY)
|
2021-05-17 15:40:28 +02:00
|
|
|
|
2019-05-06 16:34:31 +02:00
|
|
|
# Who in the organization is allowed to create streams.
|
2022-08-15 19:10:58 +02:00
|
|
|
create_public_stream_policy = models.PositiveSmallIntegerField(default=POLICY_MEMBERS_ONLY)
|
|
|
|
create_private_stream_policy = models.PositiveSmallIntegerField(default=POLICY_MEMBERS_ONLY)
|
|
|
|
create_web_public_stream_policy = models.PositiveSmallIntegerField(default=POLICY_OWNERS_ONLY)
|
2019-05-06 16:34:31 +02:00
|
|
|
|
2021-06-21 18:52:51 +02:00
|
|
|
# Who in the organization is allowed to delete messages they themselves sent.
|
2022-08-15 19:10:58 +02:00
|
|
|
delete_own_message_policy = models.PositiveSmallIntegerField(default=POLICY_ADMINS_ONLY)
|
2021-06-21 18:52:51 +02:00
|
|
|
|
2021-05-26 12:21:37 +02:00
|
|
|
# Who in the organization is allowed to edit topics of any message.
|
2022-08-15 19:10:58 +02:00
|
|
|
edit_topic_policy = models.PositiveSmallIntegerField(default=POLICY_EVERYONE)
|
2021-05-26 12:21:37 +02:00
|
|
|
|
2021-04-03 20:31:48 +02:00
|
|
|
# Who in the organization is allowed to invite other users to organization.
|
2022-08-15 19:10:58 +02:00
|
|
|
invite_to_realm_policy = models.PositiveSmallIntegerField(default=POLICY_MEMBERS_ONLY)
|
2021-04-03 20:31:48 +02:00
|
|
|
|
2023-08-09 15:06:56 +02:00
|
|
|
# UserGroup whose members are allowed to create invite link.
|
|
|
|
create_multiuse_invite_group = models.ForeignKey(
|
|
|
|
"UserGroup", on_delete=models.RESTRICT, related_name="+"
|
|
|
|
)
|
|
|
|
|
2023-03-23 15:42:00 +01:00
|
|
|
# on_delete field here is set to RESTRICT because we don't want to allow
|
|
|
|
# deleting a user group in case it is referenced by this setting.
|
|
|
|
# We are not using PROTECT since we want to allow deletion of user groups
|
|
|
|
# when realm itself is deleted.
|
|
|
|
can_access_all_users_group = models.ForeignKey(
|
|
|
|
"UserGroup", on_delete=models.RESTRICT, related_name="+"
|
|
|
|
)
|
|
|
|
|
2019-04-08 19:23:00 +02:00
|
|
|
# Who in the organization is allowed to invite other users to streams.
|
2022-08-15 19:10:58 +02:00
|
|
|
invite_to_stream_policy = models.PositiveSmallIntegerField(default=POLICY_MEMBERS_ONLY)
|
2019-04-08 19:23:00 +02:00
|
|
|
|
2021-04-08 19:24:01 +02:00
|
|
|
# Who in the organization is allowed to move messages between streams.
|
2022-08-15 19:10:58 +02:00
|
|
|
move_messages_between_streams_policy = models.PositiveSmallIntegerField(
|
2021-04-08 19:24:01 +02:00
|
|
|
default=POLICY_ADMINS_ONLY
|
|
|
|
)
|
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
user_group_edit_policy = models.PositiveSmallIntegerField(default=POLICY_MEMBERS_ONLY)
|
2019-11-02 17:58:55 +01:00
|
|
|
|
2020-01-08 01:49:44 +01:00
|
|
|
PRIVATE_MESSAGE_POLICY_UNLIMITED = 1
|
|
|
|
PRIVATE_MESSAGE_POLICY_DISABLED = 2
|
2022-08-15 19:10:58 +02:00
|
|
|
private_message_policy = models.PositiveSmallIntegerField(
|
2021-02-12 08:19:30 +01:00
|
|
|
default=PRIVATE_MESSAGE_POLICY_UNLIMITED
|
|
|
|
)
|
2020-01-08 01:49:44 +01:00
|
|
|
PRIVATE_MESSAGE_POLICY_TYPES = [
|
|
|
|
PRIVATE_MESSAGE_POLICY_UNLIMITED,
|
|
|
|
PRIVATE_MESSAGE_POLICY_DISABLED,
|
|
|
|
]
|
|
|
|
|
2020-09-04 18:53:22 +02:00
|
|
|
# Global policy for who is allowed to use wildcard mentions in
|
|
|
|
# streams with a large number of subscribers. Anyone can use
|
|
|
|
# wildcard mentions in small streams regardless of this setting.
|
|
|
|
WILDCARD_MENTION_POLICY_EVERYONE = 1
|
|
|
|
WILDCARD_MENTION_POLICY_MEMBERS = 2
|
|
|
|
WILDCARD_MENTION_POLICY_FULL_MEMBERS = 3
|
|
|
|
WILDCARD_MENTION_POLICY_ADMINS = 5
|
|
|
|
WILDCARD_MENTION_POLICY_NOBODY = 6
|
2021-05-02 09:56:58 +02:00
|
|
|
WILDCARD_MENTION_POLICY_MODERATORS = 7
|
2022-08-15 19:10:58 +02:00
|
|
|
wildcard_mention_policy = models.PositiveSmallIntegerField(
|
2022-07-05 14:35:17 +02:00
|
|
|
default=WILDCARD_MENTION_POLICY_ADMINS,
|
2020-09-04 18:53:22 +02:00
|
|
|
)
|
|
|
|
WILDCARD_MENTION_POLICY_TYPES = [
|
|
|
|
WILDCARD_MENTION_POLICY_EVERYONE,
|
|
|
|
WILDCARD_MENTION_POLICY_MEMBERS,
|
|
|
|
WILDCARD_MENTION_POLICY_FULL_MEMBERS,
|
|
|
|
WILDCARD_MENTION_POLICY_ADMINS,
|
|
|
|
WILDCARD_MENTION_POLICY_NOBODY,
|
2021-05-02 09:56:58 +02:00
|
|
|
WILDCARD_MENTION_POLICY_MODERATORS,
|
2020-09-04 18:53:22 +02:00
|
|
|
]
|
|
|
|
|
2018-07-25 05:57:10 +02:00
|
|
|
# Threshold in days for new users to create streams, and potentially take
|
|
|
|
# some other actions.
|
2022-08-15 19:10:58 +02:00
|
|
|
waiting_period_threshold = models.PositiveIntegerField(default=0)
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
DEFAULT_MESSAGE_CONTENT_DELETE_LIMIT_SECONDS = (
|
|
|
|
600 # if changed, also change in admin.js, setting_org.js
|
|
|
|
)
|
2022-12-23 14:56:27 +01:00
|
|
|
MESSAGE_TIME_LIMIT_SETTING_SPECIAL_VALUES_MAP = {
|
2021-06-14 18:49:28 +02:00
|
|
|
"unlimited": None,
|
|
|
|
}
|
2022-08-15 19:10:58 +02:00
|
|
|
message_content_delete_limit_seconds = models.PositiveIntegerField(
|
2021-06-14 18:49:28 +02:00
|
|
|
default=DEFAULT_MESSAGE_CONTENT_DELETE_LIMIT_SECONDS, null=True
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
)
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
allow_message_editing = models.BooleanField(default=True)
|
2021-02-12 08:19:30 +01:00
|
|
|
DEFAULT_MESSAGE_CONTENT_EDIT_LIMIT_SECONDS = (
|
|
|
|
600 # if changed, also change in admin.js, setting_org.js
|
|
|
|
)
|
2022-08-15 19:10:58 +02:00
|
|
|
message_content_edit_limit_seconds = models.PositiveIntegerField(
|
2022-04-12 13:13:02 +02:00
|
|
|
default=DEFAULT_MESSAGE_CONTENT_EDIT_LIMIT_SECONDS, null=True
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
)
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2018-07-25 00:29:05 +02:00
|
|
|
# Whether users have access to message edit history
|
2022-08-15 19:10:58 +02:00
|
|
|
allow_edit_history = models.BooleanField(default=True)
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2018-07-25 00:29:05 +02:00
|
|
|
# Defaults for new users
|
2022-08-15 19:10:58 +02:00
|
|
|
default_language = models.CharField(default="en", max_length=MAX_LANGUAGE_ID_LENGTH)
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
DEFAULT_NOTIFICATION_STREAM_NAME = "general"
|
|
|
|
INITIAL_PRIVATE_STREAM_NAME = "core team"
|
2021-04-16 00:57:30 +02:00
|
|
|
STREAM_EVENTS_NOTIFICATION_TOPIC = gettext_lazy("stream events")
|
2022-08-15 19:10:58 +02:00
|
|
|
notifications_stream = models.ForeignKey(
|
2021-02-12 08:19:30 +01:00
|
|
|
"Stream",
|
|
|
|
related_name="+",
|
|
|
|
null=True,
|
|
|
|
blank=True,
|
2021-04-29 21:30:25 +02:00
|
|
|
on_delete=models.SET_NULL,
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
)
|
2022-08-15 19:10:58 +02:00
|
|
|
signup_notifications_stream = models.ForeignKey(
|
2021-02-12 08:19:30 +01:00
|
|
|
"Stream",
|
|
|
|
related_name="+",
|
|
|
|
null=True,
|
|
|
|
blank=True,
|
2021-04-29 21:30:25 +02:00
|
|
|
on_delete=models.SET_NULL,
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
)
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2020-06-21 11:14:35 +02:00
|
|
|
MESSAGE_RETENTION_SPECIAL_VALUES_MAP = {
|
2021-08-02 18:43:08 +02:00
|
|
|
"unlimited": -1,
|
2020-06-21 11:14:35 +02:00
|
|
|
}
|
2018-07-25 00:29:05 +02:00
|
|
|
# For old messages being automatically deleted
|
2022-08-15 19:10:58 +02:00
|
|
|
message_retention_days = models.IntegerField(null=False, default=-1)
|
2018-07-25 05:57:10 +02:00
|
|
|
|
|
|
|
# When non-null, all but the latest this many messages in the organization
|
|
|
|
# are inaccessible to users (but not deleted).
|
2022-08-15 19:10:58 +02:00
|
|
|
message_visibility_limit = models.IntegerField(null=True)
|
2015-08-20 08:41:50 +02:00
|
|
|
|
2018-10-25 07:54:37 +02:00
|
|
|
# Messages older than this message ID in the organization are inaccessible.
|
2022-08-15 19:10:58 +02:00
|
|
|
first_visible_message_id = models.IntegerField(default=0)
|
2018-10-25 07:54:37 +02:00
|
|
|
|
2021-06-24 20:05:06 +02:00
|
|
|
# Valid org types
|
2023-11-27 02:02:45 +01:00
|
|
|
ORG_TYPES: Dict[str, OrgTypeDict] = {
|
2021-06-24 20:05:06 +02:00
|
|
|
"unspecified": {
|
|
|
|
"name": "Unspecified",
|
2023-11-27 02:06:23 +01:00
|
|
|
"id": OrgTypeEnum.Unspecified.value,
|
2021-06-24 20:05:06 +02:00
|
|
|
"hidden": True,
|
|
|
|
"display_order": 0,
|
2023-03-15 20:18:09 +01:00
|
|
|
"onboarding_zulip_guide_url": None,
|
2021-06-24 20:05:06 +02:00
|
|
|
},
|
|
|
|
"business": {
|
|
|
|
"name": "Business",
|
2023-11-27 02:06:23 +01:00
|
|
|
"id": OrgTypeEnum.Business.value,
|
2021-06-24 20:05:06 +02:00
|
|
|
"hidden": False,
|
|
|
|
"display_order": 1,
|
2023-03-15 20:18:09 +01:00
|
|
|
"onboarding_zulip_guide_url": "https://zulip.com/for/business/",
|
2021-06-24 20:05:06 +02:00
|
|
|
},
|
|
|
|
"opensource": {
|
|
|
|
"name": "Open-source project",
|
2023-11-27 02:06:23 +01:00
|
|
|
"id": OrgTypeEnum.OpenSource.value,
|
2021-06-24 20:05:06 +02:00
|
|
|
"hidden": False,
|
|
|
|
"display_order": 2,
|
2023-03-15 20:18:09 +01:00
|
|
|
"onboarding_zulip_guide_url": "https://zulip.com/for/open-source/",
|
2021-06-24 20:05:06 +02:00
|
|
|
},
|
2021-07-19 03:36:52 +02:00
|
|
|
"education_nonprofit": {
|
|
|
|
"name": "Education (non-profit)",
|
2023-11-27 02:06:23 +01:00
|
|
|
"id": OrgTypeEnum.EducationNonProfit.value,
|
2021-06-24 20:05:06 +02:00
|
|
|
"hidden": False,
|
|
|
|
"display_order": 3,
|
2023-03-15 20:18:09 +01:00
|
|
|
"onboarding_zulip_guide_url": "https://zulip.com/for/education/",
|
2021-06-24 20:05:06 +02:00
|
|
|
},
|
2021-07-19 03:36:52 +02:00
|
|
|
"education": {
|
|
|
|
"name": "Education (for-profit)",
|
2023-11-27 02:06:23 +01:00
|
|
|
"id": OrgTypeEnum.Education.value,
|
2021-07-19 03:36:52 +02:00
|
|
|
"hidden": False,
|
|
|
|
"display_order": 4,
|
2023-03-15 20:18:09 +01:00
|
|
|
"onboarding_zulip_guide_url": "https://zulip.com/for/education/",
|
2021-07-19 03:36:52 +02:00
|
|
|
},
|
2021-06-24 20:05:06 +02:00
|
|
|
"research": {
|
|
|
|
"name": "Research",
|
2023-11-27 02:06:23 +01:00
|
|
|
"id": OrgTypeEnum.Research.value,
|
2021-06-24 20:05:06 +02:00
|
|
|
"hidden": False,
|
2021-07-19 03:36:52 +02:00
|
|
|
"display_order": 5,
|
2023-03-15 20:18:09 +01:00
|
|
|
"onboarding_zulip_guide_url": "https://zulip.com/for/research/",
|
2021-06-24 20:05:06 +02:00
|
|
|
},
|
|
|
|
"event": {
|
|
|
|
"name": "Event or conference",
|
2023-11-27 02:06:23 +01:00
|
|
|
"id": OrgTypeEnum.Event.value,
|
2021-06-24 20:05:06 +02:00
|
|
|
"hidden": False,
|
2021-07-19 03:36:52 +02:00
|
|
|
"display_order": 6,
|
2023-03-15 20:18:09 +01:00
|
|
|
"onboarding_zulip_guide_url": "https://zulip.com/for/events/",
|
2021-06-24 20:05:06 +02:00
|
|
|
},
|
|
|
|
"nonprofit": {
|
|
|
|
"name": "Non-profit (registered)",
|
2023-11-27 02:06:23 +01:00
|
|
|
"id": OrgTypeEnum.NonProfit.value,
|
2021-06-24 20:05:06 +02:00
|
|
|
"hidden": False,
|
2021-07-19 03:36:52 +02:00
|
|
|
"display_order": 7,
|
2023-03-15 20:18:09 +01:00
|
|
|
"onboarding_zulip_guide_url": "https://zulip.com/for/communities/",
|
2021-06-24 20:05:06 +02:00
|
|
|
},
|
|
|
|
"government": {
|
|
|
|
"name": "Government",
|
2023-11-27 02:06:23 +01:00
|
|
|
"id": OrgTypeEnum.Government.value,
|
2021-06-24 20:05:06 +02:00
|
|
|
"hidden": False,
|
2021-07-19 03:36:52 +02:00
|
|
|
"display_order": 8,
|
2023-03-15 20:18:09 +01:00
|
|
|
"onboarding_zulip_guide_url": None,
|
2021-06-24 20:05:06 +02:00
|
|
|
},
|
|
|
|
"political_group": {
|
|
|
|
"name": "Political group",
|
2023-11-27 02:06:23 +01:00
|
|
|
"id": OrgTypeEnum.PoliticalGroup.value,
|
2021-06-24 20:05:06 +02:00
|
|
|
"hidden": False,
|
2021-07-19 03:36:52 +02:00
|
|
|
"display_order": 9,
|
2023-03-15 20:18:09 +01:00
|
|
|
"onboarding_zulip_guide_url": None,
|
2021-06-24 20:05:06 +02:00
|
|
|
},
|
|
|
|
"community": {
|
|
|
|
"name": "Community",
|
2023-11-27 02:06:23 +01:00
|
|
|
"id": OrgTypeEnum.Community.value,
|
2021-06-24 20:05:06 +02:00
|
|
|
"hidden": False,
|
2021-07-19 03:36:52 +02:00
|
|
|
"display_order": 10,
|
2023-03-15 20:18:09 +01:00
|
|
|
"onboarding_zulip_guide_url": "https://zulip.com/for/communities/",
|
2021-06-24 20:05:06 +02:00
|
|
|
},
|
|
|
|
"personal": {
|
|
|
|
"name": "Personal",
|
2023-11-27 02:06:23 +01:00
|
|
|
"id": OrgTypeEnum.Personal.value,
|
2021-06-24 20:05:06 +02:00
|
|
|
"hidden": False,
|
|
|
|
"display_order": 100,
|
2023-03-15 20:18:09 +01:00
|
|
|
"onboarding_zulip_guide_url": None,
|
2021-06-24 20:05:06 +02:00
|
|
|
},
|
|
|
|
"other": {
|
|
|
|
"name": "Other",
|
2023-11-27 02:06:23 +01:00
|
|
|
"id": OrgTypeEnum.Other.value,
|
2021-06-24 20:05:06 +02:00
|
|
|
"hidden": False,
|
|
|
|
"display_order": 1000,
|
2023-03-15 20:18:09 +01:00
|
|
|
"onboarding_zulip_guide_url": None,
|
2021-06-24 20:05:06 +02:00
|
|
|
},
|
|
|
|
}
|
|
|
|
|
2023-04-26 12:13:23 +02:00
|
|
|
ORG_TYPE_IDS: List[int] = [t["id"] for t in ORG_TYPES.values()]
|
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
org_type = models.PositiveSmallIntegerField(
|
2021-06-24 20:05:06 +02:00
|
|
|
default=ORG_TYPES["unspecified"]["id"],
|
|
|
|
choices=[(t["id"], t["name"]) for t in ORG_TYPES.values()],
|
|
|
|
)
|
2016-09-16 19:05:14 +02:00
|
|
|
|
2022-02-05 08:29:54 +01:00
|
|
|
UPGRADE_TEXT_STANDARD = gettext_lazy("Available on Zulip Cloud Standard. Upgrade to access.")
|
2023-11-22 12:33:48 +01:00
|
|
|
UPGRADE_TEXT_PLUS = gettext_lazy("Available on Zulip Cloud Plus. Upgrade to access.")
|
2018-08-09 21:38:22 +02:00
|
|
|
# plan_type controls various features around resource/feature
|
2020-06-09 00:58:42 +02:00
|
|
|
# limitations for a Zulip organization on multi-tenant installations
|
|
|
|
# like Zulip Cloud.
|
2021-10-18 23:28:17 +02:00
|
|
|
PLAN_TYPE_SELF_HOSTED = 1
|
|
|
|
PLAN_TYPE_LIMITED = 2
|
|
|
|
PLAN_TYPE_STANDARD = 3
|
|
|
|
PLAN_TYPE_STANDARD_FREE = 4
|
|
|
|
PLAN_TYPE_PLUS = 10
|
2023-11-02 08:16:02 +01:00
|
|
|
|
|
|
|
# Used for creating realms with different plan types.
|
|
|
|
ALL_PLAN_TYPES = {
|
|
|
|
PLAN_TYPE_SELF_HOSTED: "self-hosted-plan",
|
|
|
|
PLAN_TYPE_LIMITED: "limited-plan",
|
|
|
|
PLAN_TYPE_STANDARD: "standard-plan",
|
|
|
|
PLAN_TYPE_STANDARD_FREE: "standard-free-plan",
|
|
|
|
PLAN_TYPE_PLUS: "plus-plan",
|
|
|
|
}
|
2022-08-15 19:10:58 +02:00
|
|
|
plan_type = models.PositiveSmallIntegerField(default=PLAN_TYPE_SELF_HOSTED)
|
2018-08-09 21:38:22 +02:00
|
|
|
|
2023-02-22 23:03:47 +01:00
|
|
|
# This value is also being used in web/src/settings_bots.bot_creation_policy_values.
|
2018-01-29 16:10:54 +01:00
|
|
|
# On updating it here, update it there as well.
|
|
|
|
BOT_CREATION_EVERYONE = 1
|
|
|
|
BOT_CREATION_LIMIT_GENERIC_BOTS = 2
|
|
|
|
BOT_CREATION_ADMINS_ONLY = 3
|
2022-08-15 19:10:58 +02:00
|
|
|
bot_creation_policy = models.PositiveSmallIntegerField(default=BOT_CREATION_EVERYONE)
|
2019-11-04 02:11:56 +01:00
|
|
|
BOT_CREATION_POLICY_TYPES = [
|
|
|
|
BOT_CREATION_EVERYONE,
|
|
|
|
BOT_CREATION_LIMIT_GENERIC_BOTS,
|
|
|
|
BOT_CREATION_ADMINS_ONLY,
|
|
|
|
]
|
2018-01-29 16:10:54 +01:00
|
|
|
|
2017-12-20 23:33:17 +01:00
|
|
|
# See upload_quota_bytes; don't interpret upload_quota_gb directly.
|
2019-01-14 11:22:59 +01:00
|
|
|
UPLOAD_QUOTA_LIMITED = 5
|
|
|
|
UPLOAD_QUOTA_STANDARD = 50
|
2022-08-15 19:10:58 +02:00
|
|
|
upload_quota_gb = models.IntegerField(null=True)
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2019-05-09 08:46:59 +02:00
|
|
|
VIDEO_CHAT_PROVIDERS = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"disabled": {
|
|
|
|
"name": "None",
|
|
|
|
"id": 0,
|
2020-04-08 00:23:15 +02:00
|
|
|
},
|
2021-02-12 08:20:45 +01:00
|
|
|
"jitsi_meet": {
|
|
|
|
"name": "Jitsi Meet",
|
|
|
|
"id": 1,
|
2019-05-09 08:46:59 +02:00
|
|
|
},
|
2020-06-14 23:36:14 +02:00
|
|
|
# ID 2 was used for the now-deleted Google Hangouts.
|
|
|
|
# ID 3 reserved for optional Zoom, see below.
|
2021-07-06 00:23:51 +02:00
|
|
|
# ID 4 reserved for optional BigBlueButton, see below.
|
2019-11-16 09:26:28 +01:00
|
|
|
}
|
2020-04-27 22:41:31 +02:00
|
|
|
|
2019-11-16 09:26:28 +01:00
|
|
|
if settings.VIDEO_ZOOM_CLIENT_ID is not None and settings.VIDEO_ZOOM_CLIENT_SECRET is not None:
|
2021-02-12 08:20:45 +01:00
|
|
|
VIDEO_CHAT_PROVIDERS["zoom"] = {
|
|
|
|
"name": "Zoom",
|
|
|
|
"id": 3,
|
2019-05-09 08:46:59 +02:00
|
|
|
}
|
2020-04-27 22:41:31 +02:00
|
|
|
|
|
|
|
if settings.BIG_BLUE_BUTTON_SECRET is not None and settings.BIG_BLUE_BUTTON_URL is not None:
|
2021-07-06 00:23:51 +02:00
|
|
|
VIDEO_CHAT_PROVIDERS["big_blue_button"] = {"name": "BigBlueButton", "id": 4}
|
2020-04-27 22:41:31 +02:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
video_chat_provider = models.PositiveSmallIntegerField(
|
2021-02-12 08:20:45 +01:00
|
|
|
default=VIDEO_CHAT_PROVIDERS["jitsi_meet"]["id"]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2013-10-02 23:40:21 +02:00
|
|
|
|
2023-09-19 19:03:08 +02:00
|
|
|
JITSI_SERVER_SPECIAL_VALUES_MAP = {"default": None}
|
|
|
|
jitsi_server_url = models.URLField(null=True, default=None)
|
|
|
|
|
2023-03-02 16:00:49 +01:00
|
|
|
# Please access this via get_giphy_rating_options.
|
2021-03-31 13:10:46 +02:00
|
|
|
GIPHY_RATING_OPTIONS = {
|
|
|
|
"disabled": {
|
2023-03-02 16:00:49 +01:00
|
|
|
"name": gettext_lazy("GIPHY integration disabled"),
|
2021-03-31 13:10:46 +02:00
|
|
|
"id": 0,
|
|
|
|
},
|
|
|
|
# Source: https://github.com/Giphy/giphy-js/blob/master/packages/fetch-api/README.md#shared-options
|
|
|
|
"y": {
|
2023-03-02 16:00:49 +01:00
|
|
|
"name": gettext_lazy("Allow GIFs rated Y (Very young audience)"),
|
2021-03-31 13:10:46 +02:00
|
|
|
"id": 1,
|
|
|
|
},
|
|
|
|
"g": {
|
2023-03-02 16:00:49 +01:00
|
|
|
"name": gettext_lazy("Allow GIFs rated G (General audience)"),
|
2021-03-31 13:10:46 +02:00
|
|
|
"id": 2,
|
|
|
|
},
|
|
|
|
"pg": {
|
2023-03-02 16:00:49 +01:00
|
|
|
"name": gettext_lazy("Allow GIFs rated PG (Parental guidance)"),
|
2021-03-31 13:10:46 +02:00
|
|
|
"id": 3,
|
|
|
|
},
|
|
|
|
"pg-13": {
|
2023-03-06 15:32:59 +01:00
|
|
|
"name": gettext_lazy("Allow GIFs rated PG-13 (Parental guidance - under 13)"),
|
2021-03-31 13:10:46 +02:00
|
|
|
"id": 4,
|
|
|
|
},
|
|
|
|
"r": {
|
2023-03-02 16:00:49 +01:00
|
|
|
"name": gettext_lazy("Allow GIFs rated R (Restricted)"),
|
2021-03-31 13:10:46 +02:00
|
|
|
"id": 5,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
# maximum rating of the GIFs that will be retrieved from GIPHY
|
2022-08-15 19:10:58 +02:00
|
|
|
giphy_rating = models.PositiveSmallIntegerField(default=GIPHY_RATING_OPTIONS["g"]["id"])
|
2021-03-31 13:10:46 +02:00
|
|
|
|
2023-07-17 13:25:24 +02:00
|
|
|
default_code_block_language = models.TextField(default="")
|
2020-03-31 15:21:27 +02:00
|
|
|
|
2022-08-04 09:50:57 +02:00
|
|
|
# Whether read receipts are enabled in the organization. If disabled,
|
|
|
|
# they will not be available regardless of users' personal settings.
|
2022-08-15 19:10:58 +02:00
|
|
|
enable_read_receipts = models.BooleanField(default=False)
|
2022-08-04 09:50:57 +02:00
|
|
|
|
2023-09-13 13:17:00 +02:00
|
|
|
# Whether clients should display "(guest)" after names of guest users.
|
|
|
|
enable_guest_user_indicator = models.BooleanField(default=True)
|
|
|
|
|
2017-03-24 01:44:29 +01:00
|
|
|
# Define the types of the various automatically managed properties
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
property_types: Dict[str, Union[type, Tuple[type, ...]]] = dict(
|
2021-05-04 19:02:24 +02:00
|
|
|
add_custom_emoji_policy=int,
|
2017-07-16 11:00:44 +02:00
|
|
|
allow_edit_history=bool,
|
2022-09-22 10:53:37 +02:00
|
|
|
allow_message_editing=bool,
|
2021-11-16 09:09:21 +01:00
|
|
|
avatar_changes_disabled=bool,
|
2018-01-29 16:10:54 +01:00
|
|
|
bot_creation_policy=int,
|
2021-03-27 05:48:37 +01:00
|
|
|
create_private_stream_policy=int,
|
2021-11-16 09:09:21 +01:00
|
|
|
create_public_stream_policy=int,
|
2021-10-04 08:33:31 +02:00
|
|
|
create_web_public_stream_policy=int,
|
2023-07-17 13:25:24 +02:00
|
|
|
default_code_block_language=str,
|
2018-05-11 02:24:34 +02:00
|
|
|
default_language=str,
|
2021-11-16 09:09:21 +01:00
|
|
|
delete_own_message_policy=int,
|
2018-05-11 02:24:34 +02:00
|
|
|
description=str,
|
2019-04-06 06:34:49 +02:00
|
|
|
digest_emails_enabled=bool,
|
2021-11-16 09:09:21 +01:00
|
|
|
digest_weekday=int,
|
2018-03-05 20:19:07 +01:00
|
|
|
disallow_disposable_email_addresses=bool,
|
2022-09-22 10:53:37 +02:00
|
|
|
edit_topic_policy=int,
|
2017-03-24 01:44:29 +01:00
|
|
|
email_changes_disabled=bool,
|
2021-11-16 09:09:21 +01:00
|
|
|
emails_restricted_to_domains=bool,
|
2023-09-13 13:17:00 +02:00
|
|
|
enable_guest_user_indicator=bool,
|
2022-08-04 11:43:59 +02:00
|
|
|
enable_read_receipts=bool,
|
2021-10-03 14:16:07 +02:00
|
|
|
enable_spectator_access=bool,
|
2021-03-31 13:10:46 +02:00
|
|
|
giphy_rating=int,
|
2017-03-24 01:44:29 +01:00
|
|
|
inline_image_preview=bool,
|
|
|
|
inline_url_embed_preview=bool,
|
2021-11-16 09:09:21 +01:00
|
|
|
invite_required=bool,
|
|
|
|
invite_to_realm_policy=int,
|
|
|
|
invite_to_stream_policy=int,
|
2023-09-19 19:03:08 +02:00
|
|
|
jitsi_server_url=(str, type(None)),
|
2017-07-04 20:04:27 +02:00
|
|
|
mandatory_topics=bool,
|
2021-11-16 09:09:21 +01:00
|
|
|
message_content_allowed_in_email_notifications=bool,
|
2022-09-22 10:53:37 +02:00
|
|
|
message_content_edit_limit_seconds=(int, type(None)),
|
2021-11-16 09:09:21 +01:00
|
|
|
message_content_delete_limit_seconds=(int, type(None)),
|
2022-10-11 13:19:49 +02:00
|
|
|
move_messages_between_streams_limit_seconds=(int, type(None)),
|
2023-01-26 12:53:27 +01:00
|
|
|
move_messages_within_stream_limit_seconds=(int, type(None)),
|
2017-04-09 00:35:41 +02:00
|
|
|
message_retention_days=(int, type(None)),
|
2021-11-16 09:09:21 +01:00
|
|
|
move_messages_between_streams_policy=int,
|
2018-05-11 02:24:34 +02:00
|
|
|
name=str,
|
2017-03-24 01:44:29 +01:00
|
|
|
name_changes_disabled=bool,
|
2021-11-16 09:09:21 +01:00
|
|
|
private_message_policy=int,
|
2023-11-23 22:07:41 +01:00
|
|
|
push_notifications_enabled=bool,
|
2018-02-18 09:34:54 +01:00
|
|
|
send_welcome_emails=bool,
|
2021-11-16 09:09:21 +01:00
|
|
|
user_group_edit_policy=int,
|
2019-05-09 09:54:38 +02:00
|
|
|
video_chat_provider=int,
|
2017-03-24 01:44:29 +01:00
|
|
|
waiting_period_threshold=int,
|
2022-04-22 18:45:30 +02:00
|
|
|
want_advertise_in_communities_directory=bool,
|
2020-09-04 18:53:22 +02:00
|
|
|
wildcard_mention_policy=int,
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
)
|
2017-03-24 01:44:29 +01:00
|
|
|
|
2023-08-09 15:06:56 +02:00
|
|
|
REALM_PERMISSION_GROUP_SETTINGS: Dict[str, GroupPermissionSetting] = dict(
|
|
|
|
create_multiuse_invite_group=GroupPermissionSetting(
|
|
|
|
require_system_group=True,
|
|
|
|
allow_internet_group=False,
|
|
|
|
allow_owners_group=False,
|
|
|
|
allow_nobody_group=True,
|
2023-09-07 02:06:51 +02:00
|
|
|
allow_everyone_group=False,
|
2023-09-21 13:06:39 +02:00
|
|
|
default_group_name=SystemGroups.ADMINISTRATORS,
|
2023-08-09 15:06:56 +02:00
|
|
|
id_field_name="create_multiuse_invite_group_id",
|
|
|
|
),
|
2023-03-23 15:42:00 +01:00
|
|
|
can_access_all_users_group=GroupPermissionSetting(
|
|
|
|
require_system_group=True,
|
|
|
|
allow_internet_group=False,
|
|
|
|
allow_owners_group=False,
|
|
|
|
allow_nobody_group=False,
|
|
|
|
allow_everyone_group=True,
|
|
|
|
default_group_name=SystemGroups.EVERYONE,
|
|
|
|
id_field_name="can_access_all_users_group_id",
|
2023-11-23 10:14:35 +01:00
|
|
|
allowed_system_groups=[SystemGroups.EVERYONE, SystemGroups.MEMBERS],
|
2023-03-23 15:42:00 +01:00
|
|
|
),
|
2023-08-09 15:06:56 +02:00
|
|
|
)
|
|
|
|
|
2019-11-16 19:16:34 +01:00
|
|
|
DIGEST_WEEKDAY_VALUES = [0, 1, 2, 3, 4, 5, 6]
|
|
|
|
|
2018-08-16 01:26:55 +02:00
|
|
|
# Icon is the square mobile icon.
|
2021-02-12 08:20:45 +01:00
|
|
|
ICON_FROM_GRAVATAR = "G"
|
|
|
|
ICON_UPLOADED = "U"
|
2017-02-21 03:41:20 +01:00
|
|
|
ICON_SOURCES = (
|
2021-02-12 08:20:45 +01:00
|
|
|
(ICON_FROM_GRAVATAR, "Hosted by Gravatar"),
|
|
|
|
(ICON_UPLOADED, "Uploaded by administrator"),
|
2017-02-21 03:41:20 +01:00
|
|
|
)
|
2022-08-15 19:10:58 +02:00
|
|
|
icon_source = models.CharField(
|
2021-02-12 08:19:30 +01:00
|
|
|
default=ICON_FROM_GRAVATAR,
|
|
|
|
choices=ICON_SOURCES,
|
|
|
|
max_length=1,
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
)
|
2022-08-15 19:10:58 +02:00
|
|
|
icon_version = models.PositiveSmallIntegerField(default=1)
|
2017-02-21 03:41:20 +01:00
|
|
|
|
2021-05-14 00:16:30 +02:00
|
|
|
# Logo is the horizontal logo we show in top-left of web app navbar UI.
|
2021-02-12 08:20:45 +01:00
|
|
|
LOGO_DEFAULT = "D"
|
|
|
|
LOGO_UPLOADED = "U"
|
2018-08-16 01:26:55 +02:00
|
|
|
LOGO_SOURCES = (
|
2021-02-12 08:20:45 +01:00
|
|
|
(LOGO_DEFAULT, "Default to Zulip"),
|
|
|
|
(LOGO_UPLOADED, "Uploaded by administrator"),
|
2018-08-16 01:26:55 +02:00
|
|
|
)
|
2022-08-15 19:10:58 +02:00
|
|
|
logo_source = models.CharField(
|
2021-02-12 08:19:30 +01:00
|
|
|
default=LOGO_DEFAULT,
|
|
|
|
choices=LOGO_SOURCES,
|
|
|
|
max_length=1,
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
)
|
2022-08-15 19:10:58 +02:00
|
|
|
logo_version = models.PositiveSmallIntegerField(default=1)
|
2018-08-16 01:26:55 +02:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
night_logo_source = models.CharField(
|
2021-02-12 08:19:30 +01:00
|
|
|
default=LOGO_DEFAULT,
|
|
|
|
choices=LOGO_SOURCES,
|
|
|
|
max_length=1,
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
)
|
2022-08-15 19:10:58 +02:00
|
|
|
night_logo_version = models.PositiveSmallIntegerField(default=1)
|
2019-01-27 08:25:10 +01:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2023-04-12 22:40:35 +02:00
|
|
|
def __str__(self) -> str:
|
|
|
|
return f"{self.string_id} {self.id}"
|
|
|
|
|
|
|
|
def get_giphy_rating_options(self) -> Dict[str, Dict[str, object]]:
|
|
|
|
"""Wrapper function for GIPHY_RATING_OPTIONS that ensures evaluation
|
|
|
|
of the lazily evaluated `name` field without modifying the original."""
|
|
|
|
return {
|
|
|
|
rating_type: {"name": str(rating["name"]), "id": rating["id"]}
|
|
|
|
for rating_type, rating in self.GIPHY_RATING_OPTIONS.items()
|
|
|
|
}
|
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def authentication_methods_dict(self) -> Dict[str, bool]:
|
2021-10-22 22:44:26 +02:00
|
|
|
"""Returns the mapping from authentication flags to their status,
|
2016-11-02 21:41:10 +01:00
|
|
|
showing only those authentication flags that are supported on
|
|
|
|
the current server (i.e. if EmailAuthBackend is not configured
|
|
|
|
on the server, this will not return an entry for "Email")."""
|
|
|
|
# This mapping needs to be imported from here due to the cyclic
|
|
|
|
# dependency.
|
2023-04-16 21:53:22 +02:00
|
|
|
from zproject.backends import AUTH_BACKEND_NAME_MAP, all_implemented_backend_names
|
2016-11-02 21:41:10 +01:00
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
ret: Dict[str, bool] = {}
|
2022-10-08 07:35:48 +02:00
|
|
|
supported_backends = [type(backend) for backend in supported_auth_backends()]
|
2023-04-16 21:53:22 +02:00
|
|
|
|
|
|
|
for backend_name in all_implemented_backend_names():
|
|
|
|
backend_class = AUTH_BACKEND_NAME_MAP[backend_name]
|
|
|
|
if backend_class in supported_backends:
|
|
|
|
ret[backend_name] = False
|
|
|
|
for realm_authentication_method in RealmAuthenticationMethod.objects.filter(
|
|
|
|
realm_id=self.id
|
|
|
|
):
|
|
|
|
backend_class = AUTH_BACKEND_NAME_MAP[realm_authentication_method.name]
|
|
|
|
if backend_class in supported_backends:
|
|
|
|
ret[realm_authentication_method.name] = True
|
2016-11-02 21:41:10 +01:00
|
|
|
return ret
|
|
|
|
|
2021-03-19 20:06:41 +01:00
|
|
|
def get_admin_users_and_bots(
|
|
|
|
self, include_realm_owners: bool = True
|
2022-06-23 22:47:50 +02:00
|
|
|
) -> QuerySet["UserProfile"]:
|
2019-06-20 23:36:15 +02:00
|
|
|
"""Use this in contexts where we want administrative users as well as
|
|
|
|
bots with administrator privileges, like send_event calls for
|
|
|
|
notifications to all administrator users.
|
|
|
|
"""
|
2021-03-19 20:06:41 +01:00
|
|
|
if include_realm_owners:
|
|
|
|
roles = [UserProfile.ROLE_REALM_ADMINISTRATOR, UserProfile.ROLE_REALM_OWNER]
|
|
|
|
else:
|
|
|
|
roles = [UserProfile.ROLE_REALM_ADMINISTRATOR]
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
return UserProfile.objects.filter(
|
|
|
|
realm=self,
|
|
|
|
is_active=True,
|
2021-03-19 20:06:41 +01:00
|
|
|
role__in=roles,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2013-11-02 15:36:17 +01:00
|
|
|
|
2022-06-23 22:47:50 +02:00
|
|
|
def get_human_admin_users(self, include_realm_owners: bool = True) -> QuerySet["UserProfile"]:
|
2019-06-20 23:36:15 +02:00
|
|
|
"""Use this in contexts where we want only human users with
|
|
|
|
administrative privileges, like sending an email to all of a
|
|
|
|
realm's administrators (bots don't have real email addresses).
|
|
|
|
"""
|
2021-03-19 20:09:39 +01:00
|
|
|
if include_realm_owners:
|
|
|
|
roles = [UserProfile.ROLE_REALM_ADMINISTRATOR, UserProfile.ROLE_REALM_OWNER]
|
|
|
|
else:
|
|
|
|
roles = [UserProfile.ROLE_REALM_ADMINISTRATOR]
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
return UserProfile.objects.filter(
|
|
|
|
realm=self,
|
|
|
|
is_bot=False,
|
|
|
|
is_active=True,
|
2021-03-19 20:09:39 +01:00
|
|
|
role__in=roles,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-06-20 23:26:54 +02:00
|
|
|
|
2022-06-23 22:47:50 +02:00
|
|
|
def get_human_billing_admin_and_realm_owner_users(self) -> QuerySet["UserProfile"]:
|
2021-02-12 08:19:30 +01:00
|
|
|
return UserProfile.objects.filter(
|
|
|
|
Q(role=UserProfile.ROLE_REALM_OWNER) | Q(is_billing_admin=True),
|
|
|
|
realm=self,
|
|
|
|
is_bot=False,
|
|
|
|
is_active=True,
|
|
|
|
)
|
2020-07-17 12:56:06 +02:00
|
|
|
|
2022-06-23 22:47:50 +02:00
|
|
|
def get_active_users(self) -> QuerySet["UserProfile"]:
|
2023-07-13 13:02:12 +02:00
|
|
|
return UserProfile.objects.filter(realm=self, is_active=True)
|
2014-01-28 17:29:00 +01:00
|
|
|
|
2021-04-16 20:09:08 +02:00
|
|
|
def get_first_human_user(self) -> Optional["UserProfile"]:
|
|
|
|
"""A useful value for communications with newly created realms.
|
|
|
|
Has a few fundamental limitations:
|
|
|
|
|
|
|
|
* Its value will be effectively random for realms imported from Slack or
|
|
|
|
other third-party tools.
|
|
|
|
* The user may be deactivated, etc., so it's not something that's useful
|
|
|
|
for features, permissions, etc.
|
|
|
|
"""
|
|
|
|
return UserProfile.objects.filter(realm=self, is_bot=False).order_by("id").first()
|
|
|
|
|
2022-06-23 22:47:50 +02:00
|
|
|
def get_human_owner_users(self) -> QuerySet["UserProfile"]:
|
2021-02-12 08:19:30 +01:00
|
|
|
return UserProfile.objects.filter(
|
|
|
|
realm=self, is_bot=False, role=UserProfile.ROLE_REALM_OWNER, is_active=True
|
|
|
|
)
|
2020-05-16 21:06:43 +02:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def get_bot_domain(self) -> str:
|
2023-11-07 08:12:19 +01:00
|
|
|
return get_fake_email_domain(self.host)
|
2017-03-05 04:17:12 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
def get_notifications_stream(self) -> Optional["Stream"]:
|
2017-08-24 00:36:29 +02:00
|
|
|
if self.notifications_stream is not None and not self.notifications_stream.deactivated:
|
|
|
|
return self.notifications_stream
|
|
|
|
return None
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
def get_signup_notifications_stream(self) -> Optional["Stream"]:
|
2021-02-12 08:19:30 +01:00
|
|
|
if (
|
|
|
|
self.signup_notifications_stream is not None
|
|
|
|
and not self.signup_notifications_stream.deactivated
|
|
|
|
):
|
2017-11-15 23:43:01 +01:00
|
|
|
return self.signup_notifications_stream
|
|
|
|
return None
|
|
|
|
|
2017-12-07 06:24:40 +01:00
|
|
|
@property
|
|
|
|
def max_invites(self) -> int:
|
|
|
|
if self._max_invites is None:
|
|
|
|
return settings.INVITES_DEFAULT_REALM_DAILY_MAX
|
|
|
|
return self._max_invites
|
|
|
|
|
|
|
|
@max_invites.setter
|
2020-02-25 08:17:46 +01:00
|
|
|
def max_invites(self, value: Optional[int]) -> None:
|
2017-12-07 06:24:40 +01:00
|
|
|
self._max_invites = value
|
|
|
|
|
2017-12-20 23:33:17 +01:00
|
|
|
def upload_quota_bytes(self) -> Optional[int]:
|
|
|
|
if self.upload_quota_gb is None:
|
|
|
|
return None
|
|
|
|
# We describe the quota to users in "GB" or "gigabytes", but actually apply
|
|
|
|
# it as gibibytes (GiB) to be a bit more generous in case of confusion.
|
|
|
|
return self.upload_quota_gb << 30
|
|
|
|
|
2022-04-13 05:42:12 +02:00
|
|
|
# `realm` instead of `self` here to make sure the parameters of the cache key
|
|
|
|
# function matches the original method.
|
2023-06-08 21:46:38 +02:00
|
|
|
@cache_with_key(
|
|
|
|
lambda realm: get_realm_used_upload_space_cache_key(realm.id), timeout=3600 * 24 * 7
|
|
|
|
)
|
2022-11-16 06:32:50 +01:00
|
|
|
def currently_used_upload_space_bytes(realm) -> int: # noqa: N805
|
2022-04-13 05:42:12 +02:00
|
|
|
used_space = Attachment.objects.filter(realm=realm).aggregate(Sum("size"))["size__sum"]
|
2019-01-11 13:41:52 +01:00
|
|
|
if used_space is None:
|
|
|
|
return 0
|
|
|
|
return used_space
|
|
|
|
|
2020-05-19 14:38:43 +02:00
|
|
|
def ensure_not_on_limited_plan(self) -> None:
|
2021-10-18 23:28:17 +02:00
|
|
|
if self.plan_type == Realm.PLAN_TYPE_LIMITED:
|
2022-08-08 19:53:11 +02:00
|
|
|
raise JsonableError(str(self.UPGRADE_TEXT_STANDARD))
|
2020-05-19 14:38:43 +02:00
|
|
|
|
2023-11-22 12:33:48 +01:00
|
|
|
def can_enable_restricted_user_access_for_guests(self) -> None:
|
|
|
|
if self.plan_type not in [Realm.PLAN_TYPE_PLUS, Realm.PLAN_TYPE_SELF_HOSTED]:
|
|
|
|
raise JsonableError(str(self.UPGRADE_TEXT_PLUS))
|
|
|
|
|
2016-10-26 18:13:43 +02:00
|
|
|
@property
|
2018-05-11 02:24:34 +02:00
|
|
|
def subdomain(self) -> str:
|
2017-10-02 08:32:09 +02:00
|
|
|
return self.string_id
|
2016-10-26 18:13:43 +02:00
|
|
|
|
2017-10-19 08:51:29 +02:00
|
|
|
@property
|
2018-05-11 02:24:34 +02:00
|
|
|
def display_subdomain(self) -> str:
|
2017-10-19 08:51:29 +02:00
|
|
|
"""Likely to be temporary function to avoid signup messages being sent
|
|
|
|
to an empty topic"""
|
|
|
|
if self.string_id == "":
|
|
|
|
return "."
|
|
|
|
return self.string_id
|
|
|
|
|
2016-08-14 00:57:45 +02:00
|
|
|
@property
|
2017-11-27 07:33:05 +01:00
|
|
|
def uri(self) -> str:
|
2017-10-20 04:40:20 +02:00
|
|
|
return settings.EXTERNAL_URI_SCHEME + self.host
|
2016-08-14 00:57:45 +02:00
|
|
|
|
2016-08-19 03:48:40 +02:00
|
|
|
@property
|
2017-11-27 07:33:05 +01:00
|
|
|
def host(self) -> str:
|
2019-12-20 00:00:45 +01:00
|
|
|
# Use mark sanitized to prevent false positives from Pysa thinking that
|
|
|
|
# the host is user controlled.
|
|
|
|
return mark_sanitized(self.host_for_subdomain(self.subdomain))
|
2017-10-27 03:27:29 +02:00
|
|
|
|
|
|
|
@staticmethod
|
2017-11-27 07:33:05 +01:00
|
|
|
def host_for_subdomain(subdomain: str) -> str:
|
alias domains: Add a v1 of this feature.
The main limitation of this version is that it's controlled entirely
from settings, with nothing in the database and no web UI or even
management command to control it. That makes it a bit more of a
burden for the server admins than it'd ideally be, but that's fine
for now.
Relatedly, the web flow for realm creation still requires choosing a
subdomain even if the realm is destined to live at an alias domain.
Specific to the dev environment, there is an annoying quirk: the
special dev login flow doesn't work on a REALM_HOSTS realm. Also,
in this version the `add_new_realm` and `add_new_user` management
commands, which are intended for use in development environments only,
don't support this feature.
In manual testing, I've confirmed that a REALM_HOSTS realm works for
signup and login, with email/password, Google SSO, or GitHub SSO.
Most of that was in dev; I used zulipstaging.com to also test
* logging in with email and password;
* logging in with Google SSO... far enough to correctly determine
that my email address is associated with some other realm.
2017-10-20 06:36:50 +02:00
|
|
|
if subdomain == Realm.SUBDOMAIN_FOR_ROOT_DOMAIN:
|
|
|
|
return settings.EXTERNAL_HOST
|
2020-06-10 06:41:04 +02:00
|
|
|
default_host = f"{subdomain}.{settings.EXTERNAL_HOST}"
|
alias domains: Add a v1 of this feature.
The main limitation of this version is that it's controlled entirely
from settings, with nothing in the database and no web UI or even
management command to control it. That makes it a bit more of a
burden for the server admins than it'd ideally be, but that's fine
for now.
Relatedly, the web flow for realm creation still requires choosing a
subdomain even if the realm is destined to live at an alias domain.
Specific to the dev environment, there is an annoying quirk: the
special dev login flow doesn't work on a REALM_HOSTS realm. Also,
in this version the `add_new_realm` and `add_new_user` management
commands, which are intended for use in development environments only,
don't support this feature.
In manual testing, I've confirmed that a REALM_HOSTS realm works for
signup and login, with email/password, Google SSO, or GitHub SSO.
Most of that was in dev; I used zulipstaging.com to also test
* logging in with email and password;
* logging in with Google SSO... far enough to correctly determine
that my email address is associated with some other realm.
2017-10-20 06:36:50 +02:00
|
|
|
return settings.REALM_HOSTS.get(subdomain, default_host)
|
2016-08-19 03:48:40 +02:00
|
|
|
|
2016-07-27 02:09:10 +02:00
|
|
|
@property
|
2017-11-27 07:33:05 +01:00
|
|
|
def is_zephyr_mirror_realm(self) -> bool:
|
2017-03-04 09:19:37 +01:00
|
|
|
return self.string_id == "zephyr"
|
2016-07-27 02:09:10 +02:00
|
|
|
|
2016-07-27 01:45:29 +02:00
|
|
|
@property
|
2017-11-27 07:33:05 +01:00
|
|
|
def webathena_enabled(self) -> bool:
|
2016-07-27 01:45:29 +02:00
|
|
|
return self.is_zephyr_mirror_realm
|
|
|
|
|
|
|
|
@property
|
2017-11-27 07:33:05 +01:00
|
|
|
def presence_disabled(self) -> bool:
|
2016-07-27 01:45:29 +02:00
|
|
|
return self.is_zephyr_mirror_realm
|
|
|
|
|
2022-05-16 20:35:54 +02:00
|
|
|
def web_public_streams_enabled(self) -> bool:
|
2021-09-21 19:49:12 +02:00
|
|
|
if not settings.WEB_PUBLIC_STREAMS_ENABLED:
|
|
|
|
# To help protect against accidentally web-public streams in
|
|
|
|
# self-hosted servers, we require the feature to be enabled at
|
|
|
|
# the server level before it is available to users.
|
|
|
|
return False
|
|
|
|
|
2021-10-18 23:28:17 +02:00
|
|
|
if self.plan_type == Realm.PLAN_TYPE_LIMITED:
|
2021-09-21 19:49:12 +02:00
|
|
|
# In Zulip Cloud, we also require a paid or sponsored
|
|
|
|
# plan, to protect against the spam/abuse attacks that
|
|
|
|
# target every open Internet service that can host files.
|
|
|
|
return False
|
|
|
|
|
2021-11-23 12:23:48 +01:00
|
|
|
if not self.enable_spectator_access:
|
|
|
|
return False
|
|
|
|
|
2021-09-21 19:49:12 +02:00
|
|
|
return True
|
|
|
|
|
2020-12-24 12:39:27 +01:00
|
|
|
def has_web_public_streams(self) -> bool:
|
2021-09-21 19:49:12 +02:00
|
|
|
if not self.web_public_streams_enabled():
|
2020-12-24 12:39:27 +01:00
|
|
|
return False
|
|
|
|
|
2021-09-28 01:10:40 +02:00
|
|
|
from zerver.lib.streams import get_web_public_streams_queryset
|
|
|
|
|
|
|
|
return get_web_public_streams_queryset(self).exists()
|
2020-12-24 12:39:27 +01:00
|
|
|
|
2021-10-03 14:16:07 +02:00
|
|
|
def allow_web_public_streams_access(self) -> bool:
|
|
|
|
"""
|
|
|
|
If any of the streams in the realm is web
|
2021-11-23 12:23:48 +01:00
|
|
|
public and `enable_spectator_access` and
|
|
|
|
settings.WEB_PUBLIC_STREAMS_ENABLED is True,
|
2022-01-29 00:54:13 +01:00
|
|
|
then the Realm is web-public.
|
2021-10-03 14:16:07 +02:00
|
|
|
"""
|
2021-11-23 12:23:48 +01:00
|
|
|
return self.has_web_public_streams()
|
2021-10-03 14:16:07 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-08-04 16:47:36 +02:00
|
|
|
post_save.connect(flush_realm, sender=Realm)
|
|
|
|
|
|
|
|
|
|
|
|
# We register realm cache flushing in a duplicate way to be run both
|
|
|
|
# pre_delete and post_delete on purpose:
|
|
|
|
# 1. pre_delete is needed because flush_realm wants to flush the UserProfile caches,
|
|
|
|
# and UserProfile objects are deleted via on_delete=CASCADE before the post_delete handler
|
|
|
|
# is called, which results in the `flush_realm` logic not having access to the details
|
|
|
|
# for the deleted users if called at that time.
|
|
|
|
# 2. post_delete is run as a precaution to reduce the risk of races where items might be
|
|
|
|
# added to the cache after the pre_delete handler but before the save.
|
|
|
|
# Note that it does not eliminate this risk, not least because it only flushes
|
|
|
|
# the realm cache, and not the user caches, for the reasons explained above.
|
|
|
|
def realm_pre_and_post_delete_handler(*, instance: Realm, **kwargs: object) -> None:
|
2020-10-30 17:18:05 +01:00
|
|
|
# This would be better as a functools.partial, but for some reason
|
|
|
|
# Django doesn't call it even when it's registered as a post_delete handler.
|
2021-07-16 00:45:17 +02:00
|
|
|
flush_realm(instance=instance, from_deletion=True)
|
2020-10-30 17:18:05 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-08-04 16:47:36 +02:00
|
|
|
pre_delete.connect(realm_pre_and_post_delete_handler, sender=Realm)
|
|
|
|
post_delete.connect(realm_pre_and_post_delete_handler, sender=Realm)
|
2014-01-28 18:18:19 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def get_realm(string_id: str) -> Realm:
|
2019-05-04 04:47:44 +02:00
|
|
|
return Realm.objects.get(string_id=string_id)
|
2016-11-11 19:32:15 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-11-16 19:33:10 +01:00
|
|
|
def get_realm_by_id(realm_id: int) -> Realm:
|
|
|
|
return Realm.objects.get(id=realm_id)
|
|
|
|
|
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def name_changes_disabled(realm: Optional[Realm]) -> bool:
|
2016-11-11 19:32:15 +01:00
|
|
|
if realm is None:
|
|
|
|
return settings.NAME_CHANGES_DISABLED
|
|
|
|
return settings.NAME_CHANGES_DISABLED or realm.name_changes_disabled
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-04-23 04:51:04 +02:00
|
|
|
def avatar_changes_disabled(realm: Realm) -> bool:
|
2019-04-29 08:41:00 +02:00
|
|
|
return settings.AVATAR_CHANGES_DISABLED or realm.avatar_changes_disabled
|
2019-04-23 04:51:04 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-07-02 20:34:00 +02:00
|
|
|
def get_org_type_display_name(org_type: int) -> str:
|
2023-07-31 22:16:30 +02:00
|
|
|
for realm_type_details in Realm.ORG_TYPES.values():
|
2021-07-02 20:34:00 +02:00
|
|
|
if realm_type_details["id"] == org_type:
|
|
|
|
return realm_type_details["name"]
|
|
|
|
|
|
|
|
return ""
|
|
|
|
|
|
|
|
|
2017-03-31 16:20:07 +02:00
|
|
|
class RealmDomain(models.Model):
|
2018-07-27 23:26:29 +02:00
|
|
|
"""For an organization with emails_restricted_to_domains enabled, the list of
|
2018-07-25 00:29:05 +02:00
|
|
|
allowed domains"""
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE)
|
2016-09-28 00:03:13 +02:00
|
|
|
# should always be stored lowercase
|
2022-08-15 19:10:58 +02:00
|
|
|
domain = models.CharField(max_length=80, db_index=True)
|
|
|
|
allow_subdomains = models.BooleanField(default=False)
|
2017-01-21 08:19:03 +01:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2017-01-21 08:19:03 +01:00
|
|
|
unique_together = ("realm", "domain")
|
2016-10-29 04:58:44 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-03-14 12:54:05 +01:00
|
|
|
class DomainNotAllowedForRealmError(Exception):
|
|
|
|
pass
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-03-14 13:25:26 +01:00
|
|
|
class DisposableEmailError(Exception):
|
|
|
|
pass
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-06-20 13:08:07 +02:00
|
|
|
class EmailContainsPlusError(Exception):
|
|
|
|
pass
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-06-23 20:04:16 +02:00
|
|
|
class RealmDomainDict(TypedDict):
|
|
|
|
domain: str
|
|
|
|
allow_subdomains: bool
|
|
|
|
|
|
|
|
|
|
|
|
def get_realm_domains(realm: Realm) -> List[RealmDomainDict]:
|
2021-02-12 08:20:45 +01:00
|
|
|
return list(realm.realmdomain_set.values("domain", "allow_subdomains"))
|
2016-09-28 00:08:36 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-10-27 08:42:27 +02:00
|
|
|
class RealmEmoji(models.Model):
|
2022-08-15 19:10:58 +02:00
|
|
|
author = models.ForeignKey(
|
2021-02-12 08:19:30 +01:00
|
|
|
"UserProfile",
|
|
|
|
blank=True,
|
|
|
|
null=True,
|
|
|
|
on_delete=CASCADE,
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
)
|
2022-08-15 19:10:58 +02:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE)
|
|
|
|
name = models.TextField(
|
2021-02-12 08:19:30 +01:00
|
|
|
validators=[
|
|
|
|
MinLengthValidator(1),
|
|
|
|
# The second part of the regex (negative lookbehind) disallows names
|
|
|
|
# ending with one of the punctuation characters.
|
|
|
|
RegexValidator(
|
2021-02-12 08:20:45 +01:00
|
|
|
regex=r"^[0-9a-z.\-_]+(?<![.\-_])$",
|
2021-04-16 00:57:30 +02:00
|
|
|
message=gettext_lazy("Invalid characters in emoji name"),
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
]
|
|
|
|
)
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2018-07-25 00:29:05 +02:00
|
|
|
# The basename of the custom emoji's filename; see PATH_ID_TEMPLATE for the full path.
|
2022-08-15 19:10:58 +02:00
|
|
|
file_name = models.TextField(db_index=True, null=True, blank=True)
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2021-08-12 10:19:53 +02:00
|
|
|
# Whether this custom emoji is an animated image.
|
2022-08-15 19:10:58 +02:00
|
|
|
is_animated = models.BooleanField(default=False)
|
2021-08-12 10:19:53 +02:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
deactivated = models.BooleanField(default=False)
|
2017-03-13 05:45:50 +01:00
|
|
|
|
2018-03-11 18:55:20 +01:00
|
|
|
PATH_ID_TEMPLATE = "{realm_id}/emoji/images/{emoji_file_name}"
|
2021-08-12 10:19:53 +02:00
|
|
|
STILL_PATH_ID_TEMPLATE = "{realm_id}/emoji/images/still/{emoji_filename_without_extension}.png"
|
2013-08-22 16:56:37 +02:00
|
|
|
|
2021-12-03 03:11:54 +01:00
|
|
|
class Meta:
|
|
|
|
constraints = [
|
|
|
|
models.UniqueConstraint(
|
|
|
|
fields=["realm", "name"],
|
|
|
|
condition=Q(deactivated=False),
|
|
|
|
name="unique_realm_emoji_when_false_deactivated",
|
|
|
|
),
|
|
|
|
]
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2023-04-12 22:40:35 +02:00
|
|
|
def __str__(self) -> str:
|
|
|
|
return f"{self.realm.string_id}: {self.id} {self.name} {self.deactivated} {self.file_name}"
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-07-14 12:37:29 +02:00
|
|
|
def get_all_custom_emoji_for_realm_uncached(realm_id: int) -> Dict[str, EmojiInfo]:
|
2022-08-09 20:41:50 +02:00
|
|
|
# RealmEmoji objects with file_name=None are still in the process
|
|
|
|
# of being uploaded, and we expect to be cleaned up by a
|
|
|
|
# try/finally block if the upload fails, so it's correct to
|
|
|
|
# exclude them.
|
2023-07-14 11:54:22 +02:00
|
|
|
query = RealmEmoji.objects.filter(realm_id=realm_id).exclude(
|
|
|
|
file_name=None,
|
2022-08-09 20:41:50 +02:00
|
|
|
)
|
2013-08-22 16:56:37 +02:00
|
|
|
d = {}
|
2017-03-13 05:45:50 +01:00
|
|
|
from zerver.lib.emoji import get_emoji_url
|
2018-03-18 16:30:58 +01:00
|
|
|
|
|
|
|
for realm_emoji in query.all():
|
2023-07-14 11:54:22 +02:00
|
|
|
author_id = realm_emoji.author_id
|
2022-08-05 23:10:04 +02:00
|
|
|
assert realm_emoji.file_name is not None
|
2018-03-18 16:30:58 +01:00
|
|
|
emoji_url = get_emoji_url(realm_emoji.file_name, realm_emoji.realm_id)
|
2021-08-12 10:19:53 +02:00
|
|
|
|
2021-12-29 16:16:15 +01:00
|
|
|
emoji_dict: EmojiInfo = dict(
|
2021-02-12 08:19:30 +01:00
|
|
|
id=str(realm_emoji.id),
|
|
|
|
name=realm_emoji.name,
|
|
|
|
source_url=emoji_url,
|
|
|
|
deactivated=realm_emoji.deactivated,
|
|
|
|
author_id=author_id,
|
2021-12-29 16:16:15 +01:00
|
|
|
still_url=None,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-08-12 10:19:53 +02:00
|
|
|
|
|
|
|
if realm_emoji.is_animated:
|
|
|
|
# For animated emoji, we include still_url with a static
|
|
|
|
# version of the image, so that clients can display the
|
|
|
|
# emoji in a less distracting (not animated) fashion when
|
|
|
|
# desired.
|
|
|
|
emoji_dict["still_url"] = get_emoji_url(
|
|
|
|
realm_emoji.file_name, realm_emoji.realm_id, still=True
|
|
|
|
)
|
|
|
|
|
|
|
|
d[str(realm_emoji.id)] = emoji_dict
|
|
|
|
|
2013-08-22 16:56:37 +02:00
|
|
|
return d
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-07-14 12:37:29 +02:00
|
|
|
@cache_with_key(get_all_custom_emoji_for_realm_cache_key, timeout=3600 * 24 * 7)
|
|
|
|
def get_all_custom_emoji_for_realm(realm_id: int) -> Dict[str, EmojiInfo]:
|
|
|
|
return get_all_custom_emoji_for_realm_uncached(realm_id)
|
2018-03-18 16:30:58 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-07-14 12:37:29 +02:00
|
|
|
def get_name_keyed_dict_for_active_realm_emoji(realm_id: int) -> Dict[str, EmojiInfo]:
|
|
|
|
# It's important to use the cached version here.
|
|
|
|
realm_emojis = get_all_custom_emoji_for_realm(realm_id)
|
|
|
|
return {row["name"]: row for row in realm_emojis.values() if not row["deactivated"]}
|
2018-03-11 18:48:56 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-07-16 00:45:17 +02:00
|
|
|
def flush_realm_emoji(*, instance: RealmEmoji, **kwargs: object) -> None:
|
2022-08-05 23:10:04 +02:00
|
|
|
if instance.file_name is None:
|
|
|
|
# Because we construct RealmEmoji.file_name using the ID for
|
|
|
|
# the RealmEmoji object, it will always have file_name=None,
|
|
|
|
# and then it'll be updated with the actual filename as soon
|
|
|
|
# as the upload completes successfully.
|
|
|
|
#
|
|
|
|
# Doing nothing when file_name=None is the best option, since
|
|
|
|
# such an object shouldn't have been cached yet, and this
|
|
|
|
# function will be called again when file_name is set.
|
|
|
|
return
|
2023-06-08 21:36:07 +02:00
|
|
|
realm_id = instance.realm_id
|
2021-02-12 08:19:30 +01:00
|
|
|
cache_set(
|
2023-07-14 12:37:29 +02:00
|
|
|
get_all_custom_emoji_for_realm_cache_key(realm_id),
|
|
|
|
get_all_custom_emoji_for_realm_uncached(realm_id),
|
2021-02-12 08:19:30 +01:00
|
|
|
timeout=3600 * 24 * 7,
|
|
|
|
)
|
|
|
|
|
2013-08-22 16:56:37 +02:00
|
|
|
|
2014-01-28 20:53:57 +01:00
|
|
|
post_save.connect(flush_realm_emoji, sender=RealmEmoji)
|
|
|
|
post_delete.connect(flush_realm_emoji, sender=RealmEmoji)
|
2013-08-22 16:56:37 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-06-07 11:38:57 +02:00
|
|
|
def filter_pattern_validator(value: str) -> Pattern[str]:
|
2016-02-13 19:17:15 +01:00
|
|
|
try:
|
CVE-2021-41115: Use re2 for user-supplied linkifier patterns.
Zulip attempts to validate that the regular expressions that admins
enter for linkifiers are well-formatted, and only contain a specific
subset of regex grammar. The process of checking these
properties (via a regex!) can cause denial-of-service via
backtracking.
Furthermore, this validation itself does not prevent the creation of
linkifiers which themselves cause denial-of-service when they are
executed. As the validator accepts literally anything inside of a
`(?P<word>...)` block, any quadratic backtracking expression can be
hidden therein.
Switch user-provided linkifier patterns to be matched in the Markdown
processor by the `re2` library, which is guaranteed constant-time.
This somewhat limits the possible features of the regular
expression (notably, look-head and -behind, and back-references);
however, these features had never been advertised as working in the
context of linkifiers.
A migration removes any existing linkifiers which would not function
under re2, after printing them for posterity during the upgrade; they
are unlikely to be common, and are impossible to fix automatically.
The denial-of-service in the linkifier validator was discovered by
@erik-krogh and @yoff, as GHSL-2021-118.
2021-09-29 01:27:54 +02:00
|
|
|
# Do not write errors to stderr (this still raises exceptions)
|
|
|
|
options = re2.Options()
|
|
|
|
options.log_errors = False
|
|
|
|
|
|
|
|
regex = re2.compile(value, options=options)
|
|
|
|
except re2.error as e:
|
|
|
|
if len(e.args) >= 1:
|
|
|
|
if isinstance(e.args[0], str): # nocoverage
|
2023-07-17 22:40:33 +02:00
|
|
|
raise ValidationError(_("Bad regular expression: {regex}").format(regex=e.args[0]))
|
CVE-2021-41115: Use re2 for user-supplied linkifier patterns.
Zulip attempts to validate that the regular expressions that admins
enter for linkifiers are well-formatted, and only contain a specific
subset of regex grammar. The process of checking these
properties (via a regex!) can cause denial-of-service via
backtracking.
Furthermore, this validation itself does not prevent the creation of
linkifiers which themselves cause denial-of-service when they are
executed. As the validator accepts literally anything inside of a
`(?P<word>...)` block, any quadratic backtracking expression can be
hidden therein.
Switch user-provided linkifier patterns to be matched in the Markdown
processor by the `re2` library, which is guaranteed constant-time.
This somewhat limits the possible features of the regular
expression (notably, look-head and -behind, and back-references);
however, these features had never been advertised as working in the
context of linkifiers.
A migration removes any existing linkifiers which would not function
under re2, after printing them for posterity during the upgrade; they
are unlikely to be common, and are impossible to fix automatically.
The denial-of-service in the linkifier validator was discovered by
@erik-krogh and @yoff, as GHSL-2021-118.
2021-09-29 01:27:54 +02:00
|
|
|
if isinstance(e.args[0], bytes):
|
2023-07-17 22:40:33 +02:00
|
|
|
raise ValidationError(
|
|
|
|
_("Bad regular expression: {regex}").format(regex=e.args[0].decode())
|
|
|
|
)
|
CVE-2021-41115: Use re2 for user-supplied linkifier patterns.
Zulip attempts to validate that the regular expressions that admins
enter for linkifiers are well-formatted, and only contain a specific
subset of regex grammar. The process of checking these
properties (via a regex!) can cause denial-of-service via
backtracking.
Furthermore, this validation itself does not prevent the creation of
linkifiers which themselves cause denial-of-service when they are
executed. As the validator accepts literally anything inside of a
`(?P<word>...)` block, any quadratic backtracking expression can be
hidden therein.
Switch user-provided linkifier patterns to be matched in the Markdown
processor by the `re2` library, which is guaranteed constant-time.
This somewhat limits the possible features of the regular
expression (notably, look-head and -behind, and back-references);
however, these features had never been advertised as working in the
context of linkifiers.
A migration removes any existing linkifiers which would not function
under re2, after printing them for posterity during the upgrade; they
are unlikely to be common, and are impossible to fix automatically.
The denial-of-service in the linkifier validator was discovered by
@erik-krogh and @yoff, as GHSL-2021-118.
2021-09-29 01:27:54 +02:00
|
|
|
raise ValidationError(_("Unknown regular expression error")) # nocoverage
|
|
|
|
|
|
|
|
return regex
|
2021-06-07 11:38:57 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-10-02 22:52:31 +02:00
|
|
|
def url_template_validator(value: str) -> None:
|
linkifier: Support URL templates for linkifiers.
This swaps out url_format_string from all of our APIs and replaces it
with url_template. Note that the documentation changes in the following
commits will be squashed with this commit.
We change the "url_format" key to "url_template" for the
realm_linkifiers events in event_schema, along with updating
LinkifierDict. "url_template" is the name chosen to normalize
mixed usages of "url_format_string" and "url_format" throughout
the backend.
The markdown processor is updated to stop handling the format string
interpolation and delegate the task template expansion to the uri_template
library instead.
This change affects many test cases. We mostly just replace "%(name)s"
with "{name}", "url_format_string" with "url_template" to make sure that
they still pass. There are some test cases dedicated for testing "%"
escaping, which aren't relevant anymore and are subject to removal.
But for now we keep most of them as-is, and make sure that "%" is always
escaped since we do not use it for variable substitution any more.
Since url_format_string is not populated anymore, a migration is created
to remove this field entirely, and make url_template non-nullable since
we will always populate it. Note that it is possible to have
url_template being null after migration 0422 and before 0424, but
in practice, url_template will not be None after backfilling and the
backend now is always setting url_template.
With the removal of url_format_string, RealmFilter model will now be cleaned
with URL template checks, and the old checks for escapes are removed.
We also modified RealmFilter.clean to skip the validation when the
url_template is invalid. This avoids raising mulitple ValidationError's
when calling full_clean on a linkifier. But we might eventually want to
have a more centric approach to data validation instead of having
the same validation in both the clean method and the validator.
Fixes #23124.
Signed-off-by: Zixuan James Li <p359101898@gmail.com>
2022-10-05 20:55:31 +02:00
|
|
|
"""Validate as a URL template"""
|
2022-10-02 22:52:31 +02:00
|
|
|
if not uri_template.validate(value):
|
|
|
|
raise ValidationError(_("Invalid URL template."))
|
|
|
|
|
|
|
|
|
2016-02-13 19:17:15 +01:00
|
|
|
class RealmFilter(models.Model):
|
2018-07-25 00:29:05 +02:00
|
|
|
"""Realm-specific regular expressions to automatically linkify certain
|
2020-08-11 01:47:49 +02:00
|
|
|
strings inside the Markdown processor. See "Custom filters" in the settings UI.
|
2018-07-25 00:29:05 +02:00
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE)
|
|
|
|
pattern = models.TextField()
|
linkifier: Support URL templates for linkifiers.
This swaps out url_format_string from all of our APIs and replaces it
with url_template. Note that the documentation changes in the following
commits will be squashed with this commit.
We change the "url_format" key to "url_template" for the
realm_linkifiers events in event_schema, along with updating
LinkifierDict. "url_template" is the name chosen to normalize
mixed usages of "url_format_string" and "url_format" throughout
the backend.
The markdown processor is updated to stop handling the format string
interpolation and delegate the task template expansion to the uri_template
library instead.
This change affects many test cases. We mostly just replace "%(name)s"
with "{name}", "url_format_string" with "url_template" to make sure that
they still pass. There are some test cases dedicated for testing "%"
escaping, which aren't relevant anymore and are subject to removal.
But for now we keep most of them as-is, and make sure that "%" is always
escaped since we do not use it for variable substitution any more.
Since url_format_string is not populated anymore, a migration is created
to remove this field entirely, and make url_template non-nullable since
we will always populate it. Note that it is possible to have
url_template being null after migration 0422 and before 0424, but
in practice, url_template will not be None after backfilling and the
backend now is always setting url_template.
With the removal of url_format_string, RealmFilter model will now be cleaned
with URL template checks, and the old checks for escapes are removed.
We also modified RealmFilter.clean to skip the validation when the
url_template is invalid. This avoids raising mulitple ValidationError's
when calling full_clean on a linkifier. But we might eventually want to
have a more centric approach to data validation instead of having
the same validation in both the clean method and the validator.
Fixes #23124.
Signed-off-by: Zixuan James Li <p359101898@gmail.com>
2022-10-05 20:55:31 +02:00
|
|
|
url_template = models.TextField(validators=[url_template_validator])
|
2023-08-10 04:09:25 +02:00
|
|
|
# Linkifiers are applied in a message/topic in order; the processing order
|
|
|
|
# is important when there are overlapping patterns.
|
|
|
|
order = models.IntegerField(default=0)
|
2013-12-06 23:02:52 +01:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2013-12-06 23:02:52 +01:00
|
|
|
unique_together = ("realm", "pattern")
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2023-04-12 22:40:35 +02:00
|
|
|
def __str__(self) -> str:
|
linkifier: Support URL templates for linkifiers.
This swaps out url_format_string from all of our APIs and replaces it
with url_template. Note that the documentation changes in the following
commits will be squashed with this commit.
We change the "url_format" key to "url_template" for the
realm_linkifiers events in event_schema, along with updating
LinkifierDict. "url_template" is the name chosen to normalize
mixed usages of "url_format_string" and "url_format" throughout
the backend.
The markdown processor is updated to stop handling the format string
interpolation and delegate the task template expansion to the uri_template
library instead.
This change affects many test cases. We mostly just replace "%(name)s"
with "{name}", "url_format_string" with "url_template" to make sure that
they still pass. There are some test cases dedicated for testing "%"
escaping, which aren't relevant anymore and are subject to removal.
But for now we keep most of them as-is, and make sure that "%" is always
escaped since we do not use it for variable substitution any more.
Since url_format_string is not populated anymore, a migration is created
to remove this field entirely, and make url_template non-nullable since
we will always populate it. Note that it is possible to have
url_template being null after migration 0422 and before 0424, but
in practice, url_template will not be None after backfilling and the
backend now is always setting url_template.
With the removal of url_format_string, RealmFilter model will now be cleaned
with URL template checks, and the old checks for escapes are removed.
We also modified RealmFilter.clean to skip the validation when the
url_template is invalid. This avoids raising mulitple ValidationError's
when calling full_clean on a linkifier. But we might eventually want to
have a more centric approach to data validation instead of having
the same validation in both the clean method and the validator.
Fixes #23124.
Signed-off-by: Zixuan James Li <p359101898@gmail.com>
2022-10-05 20:55:31 +02:00
|
|
|
return f"{self.realm.string_id}: {self.pattern} {self.url_template}"
|
2023-04-12 22:40:35 +02:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2021-06-07 11:38:57 +02:00
|
|
|
def clean(self) -> None:
|
linkifier: Support URL templates for linkifiers.
This swaps out url_format_string from all of our APIs and replaces it
with url_template. Note that the documentation changes in the following
commits will be squashed with this commit.
We change the "url_format" key to "url_template" for the
realm_linkifiers events in event_schema, along with updating
LinkifierDict. "url_template" is the name chosen to normalize
mixed usages of "url_format_string" and "url_format" throughout
the backend.
The markdown processor is updated to stop handling the format string
interpolation and delegate the task template expansion to the uri_template
library instead.
This change affects many test cases. We mostly just replace "%(name)s"
with "{name}", "url_format_string" with "url_template" to make sure that
they still pass. There are some test cases dedicated for testing "%"
escaping, which aren't relevant anymore and are subject to removal.
But for now we keep most of them as-is, and make sure that "%" is always
escaped since we do not use it for variable substitution any more.
Since url_format_string is not populated anymore, a migration is created
to remove this field entirely, and make url_template non-nullable since
we will always populate it. Note that it is possible to have
url_template being null after migration 0422 and before 0424, but
in practice, url_template will not be None after backfilling and the
backend now is always setting url_template.
With the removal of url_format_string, RealmFilter model will now be cleaned
with URL template checks, and the old checks for escapes are removed.
We also modified RealmFilter.clean to skip the validation when the
url_template is invalid. This avoids raising mulitple ValidationError's
when calling full_clean on a linkifier. But we might eventually want to
have a more centric approach to data validation instead of having
the same validation in both the clean method and the validator.
Fixes #23124.
Signed-off-by: Zixuan James Li <p359101898@gmail.com>
2022-10-05 20:55:31 +02:00
|
|
|
"""Validate whether the set of parameters in the URL template
|
2021-06-07 11:38:57 +02:00
|
|
|
match the set of parameters in the regular expression.
|
|
|
|
|
|
|
|
Django's `full_clean` calls `clean_fields` followed by `clean` method
|
|
|
|
and stores all ValidationErrors from all stages to return as JSON.
|
|
|
|
"""
|
|
|
|
|
|
|
|
# Extract variables present in the pattern
|
|
|
|
pattern = filter_pattern_validator(self.pattern)
|
|
|
|
group_set = set(pattern.groupindex.keys())
|
|
|
|
|
linkifier: Support URL templates for linkifiers.
This swaps out url_format_string from all of our APIs and replaces it
with url_template. Note that the documentation changes in the following
commits will be squashed with this commit.
We change the "url_format" key to "url_template" for the
realm_linkifiers events in event_schema, along with updating
LinkifierDict. "url_template" is the name chosen to normalize
mixed usages of "url_format_string" and "url_format" throughout
the backend.
The markdown processor is updated to stop handling the format string
interpolation and delegate the task template expansion to the uri_template
library instead.
This change affects many test cases. We mostly just replace "%(name)s"
with "{name}", "url_format_string" with "url_template" to make sure that
they still pass. There are some test cases dedicated for testing "%"
escaping, which aren't relevant anymore and are subject to removal.
But for now we keep most of them as-is, and make sure that "%" is always
escaped since we do not use it for variable substitution any more.
Since url_format_string is not populated anymore, a migration is created
to remove this field entirely, and make url_template non-nullable since
we will always populate it. Note that it is possible to have
url_template being null after migration 0422 and before 0424, but
in practice, url_template will not be None after backfilling and the
backend now is always setting url_template.
With the removal of url_format_string, RealmFilter model will now be cleaned
with URL template checks, and the old checks for escapes are removed.
We also modified RealmFilter.clean to skip the validation when the
url_template is invalid. This avoids raising mulitple ValidationError's
when calling full_clean on a linkifier. But we might eventually want to
have a more centric approach to data validation instead of having
the same validation in both the clean method and the validator.
Fixes #23124.
Signed-off-by: Zixuan James Li <p359101898@gmail.com>
2022-10-05 20:55:31 +02:00
|
|
|
# Do not continue the check if the url template is invalid to begin with.
|
|
|
|
# The ValidationError for invalid template will only be raised by the validator
|
|
|
|
# set on the url_template field instead of here to avoid duplicates.
|
|
|
|
if not uri_template.validate(self.url_template):
|
|
|
|
return
|
|
|
|
|
|
|
|
# Extract variables used in the URL template.
|
|
|
|
template_variables_set = set(uri_template.URITemplate(self.url_template).variable_names)
|
2021-06-07 11:38:57 +02:00
|
|
|
|
|
|
|
# Report patterns missing in linkifier pattern.
|
linkifier: Support URL templates for linkifiers.
This swaps out url_format_string from all of our APIs and replaces it
with url_template. Note that the documentation changes in the following
commits will be squashed with this commit.
We change the "url_format" key to "url_template" for the
realm_linkifiers events in event_schema, along with updating
LinkifierDict. "url_template" is the name chosen to normalize
mixed usages of "url_format_string" and "url_format" throughout
the backend.
The markdown processor is updated to stop handling the format string
interpolation and delegate the task template expansion to the uri_template
library instead.
This change affects many test cases. We mostly just replace "%(name)s"
with "{name}", "url_format_string" with "url_template" to make sure that
they still pass. There are some test cases dedicated for testing "%"
escaping, which aren't relevant anymore and are subject to removal.
But for now we keep most of them as-is, and make sure that "%" is always
escaped since we do not use it for variable substitution any more.
Since url_format_string is not populated anymore, a migration is created
to remove this field entirely, and make url_template non-nullable since
we will always populate it. Note that it is possible to have
url_template being null after migration 0422 and before 0424, but
in practice, url_template will not be None after backfilling and the
backend now is always setting url_template.
With the removal of url_format_string, RealmFilter model will now be cleaned
with URL template checks, and the old checks for escapes are removed.
We also modified RealmFilter.clean to skip the validation when the
url_template is invalid. This avoids raising mulitple ValidationError's
when calling full_clean on a linkifier. But we might eventually want to
have a more centric approach to data validation instead of having
the same validation in both the clean method and the validator.
Fixes #23124.
Signed-off-by: Zixuan James Li <p359101898@gmail.com>
2022-10-05 20:55:31 +02:00
|
|
|
missing_in_pattern_set = template_variables_set - group_set
|
2021-06-07 11:38:57 +02:00
|
|
|
if len(missing_in_pattern_set) > 0:
|
2022-10-30 00:32:32 +02:00
|
|
|
name = min(missing_in_pattern_set)
|
2021-06-07 11:38:57 +02:00
|
|
|
raise ValidationError(
|
linkifier: Support URL templates for linkifiers.
This swaps out url_format_string from all of our APIs and replaces it
with url_template. Note that the documentation changes in the following
commits will be squashed with this commit.
We change the "url_format" key to "url_template" for the
realm_linkifiers events in event_schema, along with updating
LinkifierDict. "url_template" is the name chosen to normalize
mixed usages of "url_format_string" and "url_format" throughout
the backend.
The markdown processor is updated to stop handling the format string
interpolation and delegate the task template expansion to the uri_template
library instead.
This change affects many test cases. We mostly just replace "%(name)s"
with "{name}", "url_format_string" with "url_template" to make sure that
they still pass. There are some test cases dedicated for testing "%"
escaping, which aren't relevant anymore and are subject to removal.
But for now we keep most of them as-is, and make sure that "%" is always
escaped since we do not use it for variable substitution any more.
Since url_format_string is not populated anymore, a migration is created
to remove this field entirely, and make url_template non-nullable since
we will always populate it. Note that it is possible to have
url_template being null after migration 0422 and before 0424, but
in practice, url_template will not be None after backfilling and the
backend now is always setting url_template.
With the removal of url_format_string, RealmFilter model will now be cleaned
with URL template checks, and the old checks for escapes are removed.
We also modified RealmFilter.clean to skip the validation when the
url_template is invalid. This avoids raising mulitple ValidationError's
when calling full_clean on a linkifier. But we might eventually want to
have a more centric approach to data validation instead of having
the same validation in both the clean method and the validator.
Fixes #23124.
Signed-off-by: Zixuan James Li <p359101898@gmail.com>
2022-10-05 20:55:31 +02:00
|
|
|
_("Group %(name)r in URL template is not present in linkifier pattern."),
|
2021-06-07 11:38:57 +02:00
|
|
|
params={"name": name},
|
|
|
|
)
|
|
|
|
|
linkifier: Support URL templates for linkifiers.
This swaps out url_format_string from all of our APIs and replaces it
with url_template. Note that the documentation changes in the following
commits will be squashed with this commit.
We change the "url_format" key to "url_template" for the
realm_linkifiers events in event_schema, along with updating
LinkifierDict. "url_template" is the name chosen to normalize
mixed usages of "url_format_string" and "url_format" throughout
the backend.
The markdown processor is updated to stop handling the format string
interpolation and delegate the task template expansion to the uri_template
library instead.
This change affects many test cases. We mostly just replace "%(name)s"
with "{name}", "url_format_string" with "url_template" to make sure that
they still pass. There are some test cases dedicated for testing "%"
escaping, which aren't relevant anymore and are subject to removal.
But for now we keep most of them as-is, and make sure that "%" is always
escaped since we do not use it for variable substitution any more.
Since url_format_string is not populated anymore, a migration is created
to remove this field entirely, and make url_template non-nullable since
we will always populate it. Note that it is possible to have
url_template being null after migration 0422 and before 0424, but
in practice, url_template will not be None after backfilling and the
backend now is always setting url_template.
With the removal of url_format_string, RealmFilter model will now be cleaned
with URL template checks, and the old checks for escapes are removed.
We also modified RealmFilter.clean to skip the validation when the
url_template is invalid. This avoids raising mulitple ValidationError's
when calling full_clean on a linkifier. But we might eventually want to
have a more centric approach to data validation instead of having
the same validation in both the clean method and the validator.
Fixes #23124.
Signed-off-by: Zixuan James Li <p359101898@gmail.com>
2022-10-05 20:55:31 +02:00
|
|
|
missing_in_url_set = group_set - template_variables_set
|
|
|
|
# Report patterns missing in URL template.
|
2021-06-07 11:38:57 +02:00
|
|
|
if len(missing_in_url_set) > 0:
|
|
|
|
# We just report the first missing pattern here. Users can
|
|
|
|
# incrementally resolve errors if there are multiple
|
|
|
|
# missing patterns.
|
2022-10-30 00:32:32 +02:00
|
|
|
name = min(missing_in_url_set)
|
2021-06-07 11:38:57 +02:00
|
|
|
raise ValidationError(
|
linkifier: Support URL templates for linkifiers.
This swaps out url_format_string from all of our APIs and replaces it
with url_template. Note that the documentation changes in the following
commits will be squashed with this commit.
We change the "url_format" key to "url_template" for the
realm_linkifiers events in event_schema, along with updating
LinkifierDict. "url_template" is the name chosen to normalize
mixed usages of "url_format_string" and "url_format" throughout
the backend.
The markdown processor is updated to stop handling the format string
interpolation and delegate the task template expansion to the uri_template
library instead.
This change affects many test cases. We mostly just replace "%(name)s"
with "{name}", "url_format_string" with "url_template" to make sure that
they still pass. There are some test cases dedicated for testing "%"
escaping, which aren't relevant anymore and are subject to removal.
But for now we keep most of them as-is, and make sure that "%" is always
escaped since we do not use it for variable substitution any more.
Since url_format_string is not populated anymore, a migration is created
to remove this field entirely, and make url_template non-nullable since
we will always populate it. Note that it is possible to have
url_template being null after migration 0422 and before 0424, but
in practice, url_template will not be None after backfilling and the
backend now is always setting url_template.
With the removal of url_format_string, RealmFilter model will now be cleaned
with URL template checks, and the old checks for escapes are removed.
We also modified RealmFilter.clean to skip the validation when the
url_template is invalid. This avoids raising mulitple ValidationError's
when calling full_clean on a linkifier. But we might eventually want to
have a more centric approach to data validation instead of having
the same validation in both the clean method and the validator.
Fixes #23124.
Signed-off-by: Zixuan James Li <p359101898@gmail.com>
2022-10-05 20:55:31 +02:00
|
|
|
_("Group %(name)r in linkifier pattern is not present in URL template."),
|
2021-06-07 11:38:57 +02:00
|
|
|
params={"name": name},
|
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-03-30 12:15:39 +02:00
|
|
|
def get_linkifiers_cache_key(realm_id: int) -> str:
|
|
|
|
return f"{cache.KEY_PREFIX}:all_linkifiers_for_realm:{realm_id}"
|
2013-12-06 23:02:52 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-07-14 19:46:50 +02:00
|
|
|
@return_same_value_during_entire_request
|
2021-03-30 12:15:39 +02:00
|
|
|
@cache_with_key(get_linkifiers_cache_key, timeout=3600 * 24 * 7)
|
2023-07-14 19:46:50 +02:00
|
|
|
def linkifiers_for_realm(realm_id: int) -> List[LinkifierDict]:
|
2023-07-31 22:52:35 +02:00
|
|
|
return [
|
|
|
|
LinkifierDict(
|
|
|
|
pattern=linkifier.pattern,
|
|
|
|
url_template=linkifier.url_template,
|
|
|
|
id=linkifier.id,
|
2021-03-30 12:38:49 +02:00
|
|
|
)
|
2023-08-10 04:09:25 +02:00
|
|
|
for linkifier in RealmFilter.objects.filter(realm_id=realm_id).order_by("order")
|
2023-07-31 22:52:35 +02:00
|
|
|
]
|
2013-12-06 23:02:52 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-07-16 00:45:17 +02:00
|
|
|
def flush_linkifiers(*, instance: RealmFilter, **kwargs: object) -> None:
|
|
|
|
realm_id = instance.realm_id
|
2021-03-30 12:15:39 +02:00
|
|
|
cache_delete(get_linkifiers_cache_key(realm_id))
|
2023-07-14 19:46:50 +02:00
|
|
|
flush_per_request_cache("linkifiers_for_realm")
|
2013-12-06 23:02:52 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-03-30 12:15:39 +02:00
|
|
|
post_save.connect(flush_linkifiers, sender=RealmFilter)
|
|
|
|
post_delete.connect(flush_linkifiers, sender=RealmFilter)
|
2013-12-06 23:02:52 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-02-14 12:07:09 +01:00
|
|
|
class RealmPlayground(models.Model):
|
|
|
|
"""Server side storage model to store playground information needed by our
|
|
|
|
'view code in playground' feature in code blocks.
|
|
|
|
"""
|
|
|
|
|
|
|
|
MAX_PYGMENTS_LANGUAGE_LENGTH = 40
|
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE)
|
2023-05-27 05:04:50 +02:00
|
|
|
url_template = models.TextField(validators=[url_template_validator])
|
2021-02-14 12:07:09 +01:00
|
|
|
|
|
|
|
# User-visible display name used when configuring playgrounds in the settings page and
|
|
|
|
# when displaying them in the playground links popover.
|
2022-08-15 19:10:58 +02:00
|
|
|
name = models.TextField(db_index=True)
|
2021-02-14 12:07:09 +01:00
|
|
|
|
|
|
|
# This stores the pygments lexer subclass names and not the aliases themselves.
|
2022-08-15 19:10:58 +02:00
|
|
|
pygments_language = models.CharField(
|
2021-02-14 12:07:09 +01:00
|
|
|
db_index=True,
|
|
|
|
max_length=MAX_PYGMENTS_LANGUAGE_LENGTH,
|
|
|
|
# We validate to see if this conforms to the character set allowed for a
|
|
|
|
# language in the code block.
|
|
|
|
validators=[
|
|
|
|
RegexValidator(
|
|
|
|
regex=r"^[ a-zA-Z0-9_+-./#]*$", message=_("Invalid characters in pygments language")
|
|
|
|
)
|
|
|
|
],
|
|
|
|
)
|
|
|
|
|
|
|
|
class Meta:
|
2021-05-13 03:39:29 +02:00
|
|
|
unique_together = (("realm", "pygments_language", "name"),)
|
2021-02-14 12:07:09 +01:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2021-02-14 12:07:09 +01:00
|
|
|
def __str__(self) -> str:
|
2023-03-08 22:18:59 +01:00
|
|
|
return f"{self.realm.string_id}: {self.pygments_language} {self.name}"
|
2021-02-14 12:07:09 +01:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2023-05-27 03:29:40 +02:00
|
|
|
def clean(self) -> None:
|
|
|
|
"""Validate whether the URL template is valid for the playground,
|
|
|
|
ensuring that "code" is the sole variable present in it.
|
|
|
|
|
|
|
|
Django's `full_clean` calls `clean_fields` followed by `clean` method
|
|
|
|
and stores all ValidationErrors from all stages to return as JSON.
|
|
|
|
"""
|
|
|
|
|
2023-05-27 05:04:50 +02:00
|
|
|
# Do not continue the check if the url template is invalid to begin
|
|
|
|
# with. The ValidationError for invalid template will only be raised by
|
|
|
|
# the validator set on the url_template field instead of here to avoid
|
|
|
|
# duplicates.
|
2023-05-27 03:29:40 +02:00
|
|
|
if not uri_template.validate(self.url_template):
|
|
|
|
return
|
|
|
|
|
|
|
|
# Extract variables used in the URL template.
|
|
|
|
template_variables = set(uri_template.URITemplate(self.url_template).variable_names)
|
|
|
|
|
2023-05-27 05:04:50 +02:00
|
|
|
if "code" not in template_variables:
|
2023-05-27 03:29:40 +02:00
|
|
|
raise ValidationError(_('Missing the required variable "code" in the URL template'))
|
|
|
|
|
|
|
|
# The URL template should only contain a single variable, which is "code".
|
|
|
|
if len(template_variables) != 1:
|
|
|
|
raise ValidationError(
|
|
|
|
_('"code" should be the only variable present in the URL template'),
|
|
|
|
)
|
|
|
|
|
2021-02-14 12:07:09 +01:00
|
|
|
|
2022-03-15 11:10:29 +01:00
|
|
|
def get_realm_playgrounds(realm: Realm) -> List[RealmPlaygroundDict]:
|
2023-07-31 22:52:35 +02:00
|
|
|
return [
|
|
|
|
RealmPlaygroundDict(
|
|
|
|
id=playground.id,
|
|
|
|
name=playground.name,
|
|
|
|
pygments_language=playground.pygments_language,
|
|
|
|
url_template=playground.url_template,
|
2021-02-14 12:07:09 +01:00
|
|
|
)
|
2023-07-31 22:52:35 +02:00
|
|
|
for playground in RealmPlayground.objects.filter(realm=realm).all()
|
|
|
|
]
|
2021-02-14 12:07:09 +01:00
|
|
|
|
|
|
|
|
2019-12-04 02:38:46 +01:00
|
|
|
class Recipient(models.Model):
|
2022-09-13 20:36:47 +02:00
|
|
|
"""Represents an audience that can potentially receive messages in Zulip.
|
|
|
|
|
|
|
|
This table essentially functions as a generic foreign key that
|
|
|
|
allows Message.recipient_id to be a simple ForeignKey representing
|
|
|
|
the audience for a message, while supporting the different types
|
|
|
|
of audiences Zulip supports for a message.
|
|
|
|
|
|
|
|
Recipient has just two attributes: The enum type, and a type_id,
|
|
|
|
which is the ID of the UserProfile/Stream/Huddle object containing
|
|
|
|
all the metadata for the audience. There are 3 recipient types:
|
|
|
|
|
2023-06-19 16:42:11 +02:00
|
|
|
1. 1:1 direct message: The type_id is the ID of the UserProfile
|
2022-09-13 20:36:47 +02:00
|
|
|
who will receive any message to this Recipient. The sender
|
|
|
|
of such a message is represented separately.
|
|
|
|
2. Stream message: The type_id is the ID of the associated Stream.
|
2023-06-19 16:42:11 +02:00
|
|
|
3. Group direct message: In Zulip, group direct messages are
|
2022-09-13 20:36:47 +02:00
|
|
|
represented by Huddle objects, which encode the set of users
|
|
|
|
in the conversation. The type_id is the ID of the associated Huddle
|
|
|
|
object; the set of users is usually retrieved via the Subscription
|
|
|
|
table. See the Huddle model for details.
|
|
|
|
|
|
|
|
See also the Subscription model, which stores which UserProfile
|
2022-09-16 01:21:09 +02:00
|
|
|
objects are subscribed to which Recipient objects.
|
2022-09-13 20:36:47 +02:00
|
|
|
"""
|
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
type_id = models.IntegerField(db_index=True)
|
|
|
|
type = models.PositiveSmallIntegerField(db_index=True)
|
2019-12-04 02:38:46 +01:00
|
|
|
# Valid types are {personal, stream, huddle}
|
2022-09-13 20:36:47 +02:00
|
|
|
|
2023-06-19 16:42:11 +02:00
|
|
|
# The type for 1:1 direct messages.
|
2019-12-04 02:38:46 +01:00
|
|
|
PERSONAL = 1
|
2022-09-13 20:36:47 +02:00
|
|
|
# The type for stream messages.
|
2019-12-04 02:38:46 +01:00
|
|
|
STREAM = 2
|
2023-06-19 16:42:11 +02:00
|
|
|
# The type group direct messages.
|
2019-12-04 02:38:46 +01:00
|
|
|
HUDDLE = 3
|
|
|
|
|
|
|
|
class Meta:
|
|
|
|
unique_together = ("type", "type_id")
|
|
|
|
|
|
|
|
# N.B. If we used Django's choice=... we would get this for free (kinda)
|
2021-02-12 08:20:45 +01:00
|
|
|
_type_names = {PERSONAL: "personal", STREAM: "stream", HUDDLE: "huddle"}
|
2019-12-04 02:38:46 +01:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2019-12-04 02:38:46 +01:00
|
|
|
def __str__(self) -> str:
|
2023-07-16 14:21:27 +02:00
|
|
|
return f"{self.label()} ({self.type_id}, {self.type})"
|
|
|
|
|
|
|
|
def label(self) -> str:
|
|
|
|
if self.type == Recipient.STREAM:
|
|
|
|
return Stream.objects.get(id=self.type_id).name
|
|
|
|
else:
|
|
|
|
return str(get_display_recipient(self))
|
2019-12-04 02:38:46 +01:00
|
|
|
|
2023-04-12 22:40:35 +02:00
|
|
|
def type_name(self) -> str:
|
|
|
|
# Raises KeyError if invalid
|
|
|
|
return self._type_names[self.type]
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-05-24 22:22:42 +02:00
|
|
|
class UserBaseSettings(models.Model):
|
|
|
|
"""This abstract class is the container for all preferences/personal
|
|
|
|
settings for users that control the behavior of the application.
|
|
|
|
|
|
|
|
It was extracted from UserProfile to support the RealmUserDefault
|
|
|
|
model (i.e. allow individual realms to configure the default
|
|
|
|
values of these preferences for new users in their organization).
|
|
|
|
|
|
|
|
Changing the default value for a field declared here likely
|
|
|
|
requires a migration to update all RealmUserDefault rows that had
|
|
|
|
the old default value to have the new default value. Otherwise,
|
|
|
|
the default change will only affect new users joining Realms
|
|
|
|
created after the change.
|
|
|
|
"""
|
|
|
|
|
2022-11-10 12:32:37 +01:00
|
|
|
### Generic UI settings
|
2022-08-15 19:10:58 +02:00
|
|
|
enter_sends = models.BooleanField(default=False)
|
2021-05-24 22:22:42 +02:00
|
|
|
|
2023-09-27 20:30:11 +02:00
|
|
|
### Preferences. ###
|
2022-11-10 12:32:37 +01:00
|
|
|
# left_side_userlist was removed from the UI in Zulip 6.0; the
|
|
|
|
# database model is being temporarily preserved in case we want to
|
|
|
|
# restore a version of the setting, preserving who had it enabled.
|
2022-08-15 19:10:58 +02:00
|
|
|
left_side_userlist = models.BooleanField(default=False)
|
|
|
|
default_language = models.CharField(default="en", max_length=MAX_LANGUAGE_ID_LENGTH)
|
2021-05-24 22:22:42 +02:00
|
|
|
# This setting controls which view is rendered first when Zulip loads.
|
|
|
|
# Values for it are URL suffix after `#`.
|
2023-11-25 17:12:57 +01:00
|
|
|
web_home_view = models.TextField(default="inbox")
|
2023-10-23 09:02:57 +02:00
|
|
|
web_escape_navigates_to_home_view = models.BooleanField(default=True)
|
2022-08-15 19:10:58 +02:00
|
|
|
dense_mode = models.BooleanField(default=True)
|
|
|
|
fluid_layout_width = models.BooleanField(default=False)
|
|
|
|
high_contrast_mode = models.BooleanField(default=False)
|
|
|
|
translate_emoticons = models.BooleanField(default=False)
|
|
|
|
display_emoji_reaction_users = models.BooleanField(default=True)
|
|
|
|
twenty_four_hour_time = models.BooleanField(default=False)
|
|
|
|
starred_message_counts = models.BooleanField(default=True)
|
2021-05-24 22:22:42 +02:00
|
|
|
COLOR_SCHEME_AUTOMATIC = 1
|
|
|
|
COLOR_SCHEME_NIGHT = 2
|
|
|
|
COLOR_SCHEME_LIGHT = 3
|
|
|
|
COLOR_SCHEME_CHOICES = [COLOR_SCHEME_AUTOMATIC, COLOR_SCHEME_NIGHT, COLOR_SCHEME_LIGHT]
|
2022-08-15 19:10:58 +02:00
|
|
|
color_scheme = models.PositiveSmallIntegerField(default=COLOR_SCHEME_AUTOMATIC)
|
2021-05-24 22:22:42 +02:00
|
|
|
|
|
|
|
# UI setting controlling Zulip's behavior of demoting in the sort
|
|
|
|
# order and graying out streams with no recent traffic. The
|
|
|
|
# default behavior, automatic, enables this behavior once a user
|
|
|
|
# is subscribed to 30+ streams in the web app.
|
|
|
|
DEMOTE_STREAMS_AUTOMATIC = 1
|
|
|
|
DEMOTE_STREAMS_ALWAYS = 2
|
|
|
|
DEMOTE_STREAMS_NEVER = 3
|
|
|
|
DEMOTE_STREAMS_CHOICES = [
|
|
|
|
DEMOTE_STREAMS_AUTOMATIC,
|
|
|
|
DEMOTE_STREAMS_ALWAYS,
|
|
|
|
DEMOTE_STREAMS_NEVER,
|
|
|
|
]
|
2022-08-15 19:10:58 +02:00
|
|
|
demote_inactive_streams = models.PositiveSmallIntegerField(default=DEMOTE_STREAMS_AUTOMATIC)
|
2021-05-24 22:22:42 +02:00
|
|
|
|
2023-04-05 21:35:47 +02:00
|
|
|
# UI setting controlling whether or not the Zulip web app will
|
|
|
|
# mark messages as read as it scrolls through the feed.
|
|
|
|
|
|
|
|
MARK_READ_ON_SCROLL_ALWAYS = 1
|
|
|
|
MARK_READ_ON_SCROLL_CONVERSATION_ONLY = 2
|
|
|
|
MARK_READ_ON_SCROLL_NEVER = 3
|
|
|
|
|
|
|
|
WEB_MARK_READ_ON_SCROLL_POLICY_CHOICES = [
|
|
|
|
MARK_READ_ON_SCROLL_ALWAYS,
|
|
|
|
MARK_READ_ON_SCROLL_CONVERSATION_ONLY,
|
|
|
|
MARK_READ_ON_SCROLL_NEVER,
|
|
|
|
]
|
|
|
|
|
|
|
|
web_mark_read_on_scroll_policy = models.SmallIntegerField(default=MARK_READ_ON_SCROLL_ALWAYS)
|
|
|
|
|
2021-09-08 21:21:51 +02:00
|
|
|
# Emoji sets
|
2021-05-24 22:22:42 +02:00
|
|
|
GOOGLE_EMOJISET = "google"
|
|
|
|
GOOGLE_BLOB_EMOJISET = "google-blob"
|
|
|
|
TEXT_EMOJISET = "text"
|
|
|
|
TWITTER_EMOJISET = "twitter"
|
|
|
|
EMOJISET_CHOICES = (
|
2022-05-19 21:37:54 +02:00
|
|
|
(GOOGLE_EMOJISET, "Google"),
|
2021-05-24 22:22:42 +02:00
|
|
|
(TWITTER_EMOJISET, "Twitter"),
|
|
|
|
(TEXT_EMOJISET, "Plain text"),
|
2022-05-19 21:37:54 +02:00
|
|
|
(GOOGLE_BLOB_EMOJISET, "Google blobs"),
|
2021-05-24 22:22:42 +02:00
|
|
|
)
|
2022-08-15 19:10:58 +02:00
|
|
|
emojiset = models.CharField(default=GOOGLE_EMOJISET, choices=EMOJISET_CHOICES, max_length=20)
|
2021-05-24 22:22:42 +02:00
|
|
|
|
2022-08-12 22:41:06 +02:00
|
|
|
# User list style
|
|
|
|
USER_LIST_STYLE_COMPACT = 1
|
|
|
|
USER_LIST_STYLE_WITH_STATUS = 2
|
|
|
|
USER_LIST_STYLE_WITH_AVATAR = 3
|
|
|
|
USER_LIST_STYLE_CHOICES = [
|
|
|
|
USER_LIST_STYLE_COMPACT,
|
|
|
|
USER_LIST_STYLE_WITH_STATUS,
|
|
|
|
USER_LIST_STYLE_WITH_AVATAR,
|
|
|
|
]
|
2022-08-15 19:10:58 +02:00
|
|
|
user_list_style = models.PositiveSmallIntegerField(default=USER_LIST_STYLE_WITH_STATUS)
|
2022-08-12 22:41:06 +02:00
|
|
|
|
2023-04-25 12:29:15 +02:00
|
|
|
# Show unread counts for
|
|
|
|
WEB_STREAM_UNREADS_COUNT_DISPLAY_POLICY_ALL_STREAMS = 1
|
|
|
|
WEB_STREAM_UNREADS_COUNT_DISPLAY_POLICY_UNMUTED_STREAMS = 2
|
|
|
|
WEB_STREAM_UNREADS_COUNT_DISPLAY_POLICY_NO_STREAMS = 3
|
|
|
|
WEB_STREAM_UNREADS_COUNT_DISPLAY_POLICY_CHOICES = [
|
|
|
|
WEB_STREAM_UNREADS_COUNT_DISPLAY_POLICY_ALL_STREAMS,
|
|
|
|
WEB_STREAM_UNREADS_COUNT_DISPLAY_POLICY_UNMUTED_STREAMS,
|
|
|
|
WEB_STREAM_UNREADS_COUNT_DISPLAY_POLICY_NO_STREAMS,
|
|
|
|
]
|
|
|
|
web_stream_unreads_count_display_policy = models.PositiveSmallIntegerField(
|
|
|
|
default=WEB_STREAM_UNREADS_COUNT_DISPLAY_POLICY_UNMUTED_STREAMS
|
|
|
|
)
|
|
|
|
|
2021-05-24 22:22:42 +02:00
|
|
|
### Notifications settings. ###
|
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
email_notifications_batching_period_seconds = models.IntegerField(default=120)
|
2021-07-19 10:56:34 +02:00
|
|
|
|
2021-05-24 22:22:42 +02:00
|
|
|
# Stream notifications.
|
2022-08-15 19:10:58 +02:00
|
|
|
enable_stream_desktop_notifications = models.BooleanField(default=False)
|
|
|
|
enable_stream_email_notifications = models.BooleanField(default=False)
|
|
|
|
enable_stream_push_notifications = models.BooleanField(default=False)
|
|
|
|
enable_stream_audible_notifications = models.BooleanField(default=False)
|
|
|
|
notification_sound = models.CharField(max_length=20, default="zulip")
|
|
|
|
wildcard_mentions_notify = models.BooleanField(default=True)
|
2021-05-24 22:22:42 +02:00
|
|
|
|
2023-05-17 16:01:16 +02:00
|
|
|
# Followed Topics notifications.
|
|
|
|
enable_followed_topic_desktop_notifications = models.BooleanField(default=True)
|
|
|
|
enable_followed_topic_email_notifications = models.BooleanField(default=True)
|
|
|
|
enable_followed_topic_push_notifications = models.BooleanField(default=True)
|
|
|
|
enable_followed_topic_audible_notifications = models.BooleanField(default=True)
|
|
|
|
enable_followed_topic_wildcard_mentions_notify = models.BooleanField(default=True)
|
|
|
|
|
2023-06-19 16:42:11 +02:00
|
|
|
# Direct message + @-mention notifications.
|
2022-08-15 19:10:58 +02:00
|
|
|
enable_desktop_notifications = models.BooleanField(default=True)
|
|
|
|
pm_content_in_desktop_notifications = models.BooleanField(default=True)
|
|
|
|
enable_sounds = models.BooleanField(default=True)
|
|
|
|
enable_offline_email_notifications = models.BooleanField(default=True)
|
|
|
|
message_content_in_email_notifications = models.BooleanField(default=True)
|
|
|
|
enable_offline_push_notifications = models.BooleanField(default=True)
|
|
|
|
enable_online_push_notifications = models.BooleanField(default=True)
|
2021-05-24 22:22:42 +02:00
|
|
|
|
|
|
|
DESKTOP_ICON_COUNT_DISPLAY_MESSAGES = 1
|
2023-11-16 09:45:19 +01:00
|
|
|
DESKTOP_ICON_COUNT_DISPLAY_DM_MENTION_FOLLOWED_TOPIC = 2
|
|
|
|
DESKTOP_ICON_COUNT_DISPLAY_DM_MENTION = 3
|
|
|
|
DESKTOP_ICON_COUNT_DISPLAY_NONE = 4
|
2021-09-08 16:27:19 +02:00
|
|
|
DESKTOP_ICON_COUNT_DISPLAY_CHOICES = [
|
|
|
|
DESKTOP_ICON_COUNT_DISPLAY_MESSAGES,
|
2023-11-16 09:45:19 +01:00
|
|
|
DESKTOP_ICON_COUNT_DISPLAY_DM_MENTION,
|
|
|
|
DESKTOP_ICON_COUNT_DISPLAY_DM_MENTION_FOLLOWED_TOPIC,
|
2021-09-08 16:27:19 +02:00
|
|
|
DESKTOP_ICON_COUNT_DISPLAY_NONE,
|
|
|
|
]
|
2022-08-15 19:10:58 +02:00
|
|
|
desktop_icon_count_display = models.PositiveSmallIntegerField(
|
2021-05-24 22:22:42 +02:00
|
|
|
default=DESKTOP_ICON_COUNT_DISPLAY_MESSAGES
|
|
|
|
)
|
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
enable_digest_emails = models.BooleanField(default=True)
|
|
|
|
enable_login_emails = models.BooleanField(default=True)
|
|
|
|
enable_marketing_emails = models.BooleanField(default=True)
|
|
|
|
presence_enabled = models.BooleanField(default=True)
|
2021-05-24 22:22:42 +02:00
|
|
|
|
2023-01-14 20:36:37 +01:00
|
|
|
REALM_NAME_IN_EMAIL_NOTIFICATIONS_POLICY_AUTOMATIC = 1
|
|
|
|
REALM_NAME_IN_EMAIL_NOTIFICATIONS_POLICY_ALWAYS = 2
|
|
|
|
REALM_NAME_IN_EMAIL_NOTIFICATIONS_POLICY_NEVER = 3
|
|
|
|
REALM_NAME_IN_EMAIL_NOTIFICATIONS_POLICY_CHOICES = [
|
|
|
|
REALM_NAME_IN_EMAIL_NOTIFICATIONS_POLICY_AUTOMATIC,
|
|
|
|
REALM_NAME_IN_EMAIL_NOTIFICATIONS_POLICY_ALWAYS,
|
|
|
|
REALM_NAME_IN_EMAIL_NOTIFICATIONS_POLICY_NEVER,
|
|
|
|
]
|
|
|
|
realm_name_in_email_notifications_policy = models.PositiveSmallIntegerField(
|
|
|
|
default=REALM_NAME_IN_EMAIL_NOTIFICATIONS_POLICY_AUTOMATIC
|
|
|
|
)
|
|
|
|
|
2023-06-17 17:37:04 +02:00
|
|
|
# The following two settings control which topics to automatically
|
|
|
|
# 'follow' or 'unmute in a muted stream', respectively.
|
|
|
|
# Follow or unmute a topic automatically on:
|
|
|
|
# - PARTICIPATION: Send a message, React to a message, Participate in a poll or Edit a TO-DO list.
|
|
|
|
# - SEND: Send a message.
|
|
|
|
# - INITIATION: Send the first message in the topic.
|
|
|
|
# - NEVER: Never automatically follow or unmute a topic.
|
|
|
|
AUTOMATICALLY_CHANGE_VISIBILITY_POLICY_ON_PARTICIPATION = 1
|
|
|
|
AUTOMATICALLY_CHANGE_VISIBILITY_POLICY_ON_SEND = 2
|
|
|
|
AUTOMATICALLY_CHANGE_VISIBILITY_POLICY_ON_INITIATION = 3
|
|
|
|
AUTOMATICALLY_CHANGE_VISIBILITY_POLICY_NEVER = 4
|
|
|
|
AUTOMATICALLY_CHANGE_VISIBILITY_POLICY_CHOICES = [
|
|
|
|
AUTOMATICALLY_CHANGE_VISIBILITY_POLICY_ON_PARTICIPATION,
|
|
|
|
AUTOMATICALLY_CHANGE_VISIBILITY_POLICY_ON_SEND,
|
|
|
|
AUTOMATICALLY_CHANGE_VISIBILITY_POLICY_ON_INITIATION,
|
|
|
|
AUTOMATICALLY_CHANGE_VISIBILITY_POLICY_NEVER,
|
|
|
|
]
|
|
|
|
automatically_follow_topics_policy = models.PositiveSmallIntegerField(
|
2023-10-22 05:31:07 +02:00
|
|
|
default=AUTOMATICALLY_CHANGE_VISIBILITY_POLICY_ON_INITIATION,
|
2023-06-17 17:37:04 +02:00
|
|
|
)
|
|
|
|
automatically_unmute_topics_in_muted_streams_policy = models.PositiveSmallIntegerField(
|
2023-10-22 05:31:07 +02:00
|
|
|
default=AUTOMATICALLY_CHANGE_VISIBILITY_POLICY_ON_SEND,
|
2023-06-17 17:37:04 +02:00
|
|
|
)
|
|
|
|
|
2020-08-25 15:47:40 +02:00
|
|
|
# Whether or not the user wants to sync their drafts.
|
2022-08-15 19:10:58 +02:00
|
|
|
enable_drafts_synchronization = models.BooleanField(default=True)
|
2020-08-25 15:47:40 +02:00
|
|
|
|
2021-10-03 08:36:36 +02:00
|
|
|
# Privacy settings
|
2022-08-15 19:10:58 +02:00
|
|
|
send_stream_typing_notifications = models.BooleanField(default=True)
|
|
|
|
send_private_typing_notifications = models.BooleanField(default=True)
|
|
|
|
send_read_receipts = models.BooleanField(default=True)
|
2021-10-03 08:36:36 +02:00
|
|
|
|
2021-10-21 10:36:57 +02:00
|
|
|
# Who in the organization has access to users' actual email
|
|
|
|
# addresses. Controls whether the UserProfile.email field is
|
|
|
|
# the same as UserProfile.delivery_email, or is instead a fake
|
|
|
|
# generated value encoding the user ID and realm hostname.
|
|
|
|
EMAIL_ADDRESS_VISIBILITY_EVERYONE = 1
|
|
|
|
EMAIL_ADDRESS_VISIBILITY_MEMBERS = 2
|
|
|
|
EMAIL_ADDRESS_VISIBILITY_ADMINS = 3
|
|
|
|
EMAIL_ADDRESS_VISIBILITY_NOBODY = 4
|
|
|
|
EMAIL_ADDRESS_VISIBILITY_MODERATORS = 5
|
|
|
|
email_address_visibility = models.PositiveSmallIntegerField(
|
|
|
|
default=EMAIL_ADDRESS_VISIBILITY_EVERYONE,
|
|
|
|
)
|
2023-02-13 17:40:16 +01:00
|
|
|
|
|
|
|
EMAIL_ADDRESS_VISIBILITY_ID_TO_NAME_MAP = {
|
|
|
|
EMAIL_ADDRESS_VISIBILITY_EVERYONE: gettext_lazy("Admins, moderators, members and guests"),
|
|
|
|
EMAIL_ADDRESS_VISIBILITY_MEMBERS: gettext_lazy("Admins, moderators and members"),
|
|
|
|
EMAIL_ADDRESS_VISIBILITY_MODERATORS: gettext_lazy("Admins and moderators"),
|
|
|
|
EMAIL_ADDRESS_VISIBILITY_ADMINS: gettext_lazy("Admins only"),
|
|
|
|
EMAIL_ADDRESS_VISIBILITY_NOBODY: gettext_lazy("Nobody"),
|
|
|
|
}
|
|
|
|
|
|
|
|
EMAIL_ADDRESS_VISIBILITY_TYPES = list(EMAIL_ADDRESS_VISIBILITY_ID_TO_NAME_MAP.keys())
|
2021-10-21 10:36:57 +02:00
|
|
|
|
2021-08-11 15:34:25 +02:00
|
|
|
display_settings_legacy = dict(
|
2022-05-03 11:21:42 +02:00
|
|
|
# Don't add anything new to this legacy dict.
|
|
|
|
# Instead, see `modern_settings` below.
|
2021-07-07 15:34:30 +02:00
|
|
|
color_scheme=int,
|
|
|
|
default_language=str,
|
2023-10-23 09:02:57 +02:00
|
|
|
web_home_view=str,
|
2021-07-07 15:34:30 +02:00
|
|
|
demote_inactive_streams=int,
|
|
|
|
dense_mode=bool,
|
|
|
|
emojiset=str,
|
2021-07-24 06:56:56 +02:00
|
|
|
enable_drafts_synchronization=bool,
|
2021-07-26 21:55:14 +02:00
|
|
|
enter_sends=bool,
|
2021-07-07 15:34:30 +02:00
|
|
|
fluid_layout_width=bool,
|
|
|
|
high_contrast_mode=bool,
|
|
|
|
left_side_userlist=bool,
|
|
|
|
starred_message_counts=bool,
|
|
|
|
translate_emoticons=bool,
|
|
|
|
twenty_four_hour_time=bool,
|
|
|
|
)
|
|
|
|
|
2021-08-11 15:34:25 +02:00
|
|
|
notification_settings_legacy = dict(
|
2022-05-03 11:21:42 +02:00
|
|
|
# Don't add anything new to this legacy dict.
|
|
|
|
# Instead, see `modern_notification_settings` below.
|
2021-11-16 09:11:33 +01:00
|
|
|
desktop_icon_count_display=int,
|
|
|
|
email_notifications_batching_period_seconds=int,
|
2021-07-07 15:42:05 +02:00
|
|
|
enable_desktop_notifications=bool,
|
|
|
|
enable_digest_emails=bool,
|
|
|
|
enable_login_emails=bool,
|
|
|
|
enable_marketing_emails=bool,
|
|
|
|
enable_offline_email_notifications=bool,
|
|
|
|
enable_offline_push_notifications=bool,
|
|
|
|
enable_online_push_notifications=bool,
|
|
|
|
enable_sounds=bool,
|
2021-11-16 09:11:33 +01:00
|
|
|
enable_stream_audible_notifications=bool,
|
2021-07-07 15:42:05 +02:00
|
|
|
enable_stream_desktop_notifications=bool,
|
|
|
|
enable_stream_email_notifications=bool,
|
|
|
|
enable_stream_push_notifications=bool,
|
|
|
|
message_content_in_email_notifications=bool,
|
|
|
|
notification_sound=str,
|
|
|
|
pm_content_in_desktop_notifications=bool,
|
|
|
|
presence_enabled=bool,
|
2023-01-14 20:36:37 +01:00
|
|
|
realm_name_in_email_notifications_policy=int,
|
2021-11-16 09:11:33 +01:00
|
|
|
wildcard_mentions_notify=bool,
|
2021-07-07 15:42:05 +02:00
|
|
|
)
|
|
|
|
|
2022-05-03 11:21:42 +02:00
|
|
|
modern_settings = dict(
|
|
|
|
# Add new general settings here.
|
|
|
|
display_emoji_reaction_users=bool,
|
2021-10-21 10:36:57 +02:00
|
|
|
email_address_visibility=int,
|
2023-10-23 09:02:57 +02:00
|
|
|
web_escape_navigates_to_home_view=bool,
|
2022-05-03 11:21:42 +02:00
|
|
|
send_private_typing_notifications=bool,
|
|
|
|
send_read_receipts=bool,
|
|
|
|
send_stream_typing_notifications=bool,
|
2023-04-05 21:35:47 +02:00
|
|
|
web_mark_read_on_scroll_policy=int,
|
2022-08-12 22:41:06 +02:00
|
|
|
user_list_style=int,
|
2023-04-25 12:29:15 +02:00
|
|
|
web_stream_unreads_count_display_policy=int,
|
2022-05-03 11:21:42 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
modern_notification_settings: Dict[str, Any] = dict(
|
|
|
|
# Add new notification settings here.
|
2023-06-13 14:56:25 +02:00
|
|
|
enable_followed_topic_desktop_notifications=bool,
|
2023-05-17 16:01:16 +02:00
|
|
|
enable_followed_topic_email_notifications=bool,
|
2023-05-28 17:03:04 +02:00
|
|
|
enable_followed_topic_push_notifications=bool,
|
2023-06-13 16:04:20 +02:00
|
|
|
enable_followed_topic_audible_notifications=bool,
|
2023-06-02 09:42:58 +02:00
|
|
|
enable_followed_topic_wildcard_mentions_notify=bool,
|
2023-06-17 17:37:04 +02:00
|
|
|
automatically_follow_topics_policy=int,
|
|
|
|
automatically_unmute_topics_in_muted_streams_policy=int,
|
2022-05-03 11:21:42 +02:00
|
|
|
)
|
|
|
|
|
2021-08-11 15:34:25 +02:00
|
|
|
notification_setting_types = {
|
2022-05-03 11:21:42 +02:00
|
|
|
**notification_settings_legacy,
|
|
|
|
**modern_notification_settings,
|
|
|
|
}
|
2021-08-11 15:34:25 +02:00
|
|
|
|
|
|
|
# Define the types of the various automatically managed properties
|
2021-10-03 08:36:36 +02:00
|
|
|
property_types = {
|
|
|
|
**display_settings_legacy,
|
|
|
|
**notification_setting_types,
|
2022-05-03 11:21:42 +02:00
|
|
|
**modern_settings,
|
2021-10-03 08:36:36 +02:00
|
|
|
}
|
2021-08-11 15:34:25 +02:00
|
|
|
|
2021-05-24 22:22:42 +02:00
|
|
|
class Meta:
|
|
|
|
abstract = True
|
|
|
|
|
2021-08-30 19:32:10 +02:00
|
|
|
@staticmethod
|
|
|
|
def emojiset_choices() -> List[Dict[str, str]]:
|
|
|
|
return [
|
|
|
|
dict(key=emojiset[0], text=emojiset[1]) for emojiset in UserProfile.EMOJISET_CHOICES
|
|
|
|
]
|
|
|
|
|
2021-05-24 22:22:42 +02:00
|
|
|
|
2021-05-25 14:52:52 +02:00
|
|
|
class RealmUserDefault(UserBaseSettings):
|
|
|
|
"""This table stores realm-level default values for user preferences
|
|
|
|
like notification settings, used when creating a new user account.
|
|
|
|
"""
|
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
realm = models.OneToOneField(Realm, on_delete=CASCADE)
|
2021-05-25 14:52:52 +02:00
|
|
|
|
|
|
|
|
2023-12-07 22:40:33 +01:00
|
|
|
class UserProfile(AbstractBaseUser, PermissionsMixin, UserBaseSettings):
|
2021-02-12 08:20:45 +01:00
|
|
|
USERNAME_FIELD = "email"
|
2018-07-25 00:29:05 +02:00
|
|
|
MAX_NAME_LENGTH = 100
|
|
|
|
MIN_NAME_LENGTH = 2
|
|
|
|
API_KEY_LENGTH = 32
|
2021-02-12 08:20:45 +01:00
|
|
|
NAME_INVALID_CHARS = ["*", "`", "\\", ">", '"', "@"]
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2016-05-18 20:23:03 +02:00
|
|
|
DEFAULT_BOT = 1
|
2016-05-19 23:44:58 +02:00
|
|
|
"""
|
|
|
|
Incoming webhook bots are limited to only sending messages via webhooks.
|
|
|
|
Thus, it is less of a security risk to expose their API keys to third-party services,
|
|
|
|
since they can't be used to read messages.
|
|
|
|
"""
|
|
|
|
INCOMING_WEBHOOK_BOT = 2
|
2023-02-22 23:03:47 +01:00
|
|
|
# This value is also being used in web/src/settings_bots.js.
|
2017-11-09 11:45:56 +01:00
|
|
|
# On updating it here, update it there as well.
|
2016-07-15 18:57:37 +02:00
|
|
|
OUTGOING_WEBHOOK_BOT = 3
|
2017-05-24 20:38:15 +02:00
|
|
|
"""
|
|
|
|
Embedded bots run within the Zulip server itself; events are added to the
|
|
|
|
embedded_bots queue and then handled by a QueueProcessingWorker.
|
|
|
|
"""
|
|
|
|
EMBEDDED_BOT = 4
|
2016-05-18 20:23:03 +02:00
|
|
|
|
2017-11-22 23:58:58 +01:00
|
|
|
BOT_TYPES = {
|
2021-02-12 08:20:45 +01:00
|
|
|
DEFAULT_BOT: "Generic bot",
|
|
|
|
INCOMING_WEBHOOK_BOT: "Incoming webhook",
|
|
|
|
OUTGOING_WEBHOOK_BOT: "Outgoing webhook",
|
|
|
|
EMBEDDED_BOT: "Embedded bot",
|
2017-11-22 23:58:58 +01:00
|
|
|
}
|
|
|
|
|
2017-05-24 21:56:51 +02:00
|
|
|
SERVICE_BOT_TYPES = [
|
|
|
|
OUTGOING_WEBHOOK_BOT,
|
2017-12-01 00:27:57 +01:00
|
|
|
EMBEDDED_BOT,
|
2017-05-24 21:56:51 +02:00
|
|
|
]
|
|
|
|
|
2020-02-03 02:23:34 +01:00
|
|
|
# For historical reasons, Zulip has two email fields. The
|
|
|
|
# `delivery_email` field is the user's email address, where all
|
|
|
|
# email notifications will be sent, and is used for all
|
|
|
|
# authentication use cases.
|
|
|
|
#
|
|
|
|
# The `email` field is the same as delivery_email in organizations
|
|
|
|
# with EMAIL_ADDRESS_VISIBILITY_EVERYONE. For other
|
|
|
|
# organizations, it will be a unique value of the form
|
|
|
|
# user1234@example.com. This field exists for backwards
|
2020-03-28 01:25:56 +01:00
|
|
|
# compatibility in Zulip APIs where users are referred to by their
|
2020-02-03 02:23:34 +01:00
|
|
|
# email address, not their ID; it should be used in all API use cases.
|
|
|
|
#
|
2021-11-15 22:46:10 +01:00
|
|
|
# Both fields are unique within a realm (in a case-insensitive
|
|
|
|
# fashion). Since Django's unique_together is case sensitive, this
|
|
|
|
# is enforced via SQL indexes created by
|
|
|
|
# zerver/migrations/0295_case_insensitive_email_indexes.py.
|
2022-08-15 19:10:58 +02:00
|
|
|
delivery_email = models.EmailField(blank=False, db_index=True)
|
|
|
|
email = models.EmailField(blank=False, db_index=True)
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE)
|
2019-11-28 16:56:04 +01:00
|
|
|
# Foreign key to the Recipient object for PERSONAL type messages to this user.
|
|
|
|
recipient = models.ForeignKey(Recipient, null=True, on_delete=models.SET_NULL)
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2023-03-27 16:32:30 +02:00
|
|
|
INACCESSIBLE_USER_NAME = gettext_lazy("Unknown user")
|
2020-07-16 14:10:43 +02:00
|
|
|
# The user's name. We prefer the model of a full_name
|
|
|
|
# over first+last because cultures vary on how many
|
2019-09-28 03:01:15 +02:00
|
|
|
# names one has, whether the family name is first or last, etc.
|
|
|
|
# It also allows organizations to encode a bit of non-name data in
|
|
|
|
# the "name" attribute if desired, like gender pronouns,
|
2020-07-16 14:10:43 +02:00
|
|
|
# graduation year, etc.
|
2022-08-15 19:10:58 +02:00
|
|
|
full_name = models.CharField(max_length=MAX_NAME_LENGTH)
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
date_joined = models.DateTimeField(default=timezone_now)
|
2023-05-08 09:17:57 +02:00
|
|
|
|
|
|
|
# Terms of Service version number that this user has accepted. We
|
|
|
|
# use the special value TOS_VERSION_BEFORE_FIRST_LOGIN for users
|
|
|
|
# whose account was created without direct user interaction (via
|
|
|
|
# the API or a data import), and null for users whose account is
|
|
|
|
# fully created on servers that do not have a configured ToS.
|
|
|
|
TOS_VERSION_BEFORE_FIRST_LOGIN = "-1"
|
2022-08-15 19:10:58 +02:00
|
|
|
tos_version = models.CharField(null=True, max_length=10)
|
2023-05-18 17:21:21 +02:00
|
|
|
api_key = models.CharField(max_length=API_KEY_LENGTH, default=generate_api_key, unique=True)
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2022-03-05 16:01:19 +01:00
|
|
|
# A UUID generated on user creation. Introduced primarily to
|
|
|
|
# provide a unique key for a user for the mobile push
|
|
|
|
# notifications bouncer that will not have collisions after doing
|
|
|
|
# a data export and then import.
|
2022-08-15 19:10:58 +02:00
|
|
|
uuid = models.UUIDField(default=uuid4, unique=True)
|
2022-03-05 16:01:19 +01:00
|
|
|
|
2018-07-25 00:29:05 +02:00
|
|
|
# Whether the user has access to server-level administrator pages, like /activity
|
2022-08-15 19:10:58 +02:00
|
|
|
is_staff = models.BooleanField(default=False)
|
2018-07-25 05:57:10 +02:00
|
|
|
|
|
|
|
# For a normal user, this is True unless the user or an admin has
|
|
|
|
# deactivated their account. The name comes from Django; this field
|
|
|
|
# isn't related to presence or to whether the user has recently used Zulip.
|
|
|
|
#
|
|
|
|
# See also `long_term_idle`.
|
2022-08-15 19:10:58 +02:00
|
|
|
is_active = models.BooleanField(default=True, db_index=True)
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
is_billing_admin = models.BooleanField(default=False, db_index=True)
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
is_bot = models.BooleanField(default=False, db_index=True)
|
|
|
|
bot_type = models.PositiveSmallIntegerField(null=True, db_index=True)
|
|
|
|
bot_owner = models.ForeignKey("self", null=True, on_delete=models.SET_NULL)
|
2013-05-03 00:25:43 +02:00
|
|
|
|
2019-10-05 02:35:07 +02:00
|
|
|
# Each role has a superset of the permissions of the next higher
|
|
|
|
# numbered role. When adding new roles, leave enough space for
|
|
|
|
# future roles to be inserted between currently adjacent
|
|
|
|
# roles. These constants appear in RealmAuditLog.extra_data, so
|
|
|
|
# changes to them will require a migration of RealmAuditLog.
|
2020-05-16 20:10:42 +02:00
|
|
|
ROLE_REALM_OWNER = 100
|
2019-10-05 02:35:07 +02:00
|
|
|
ROLE_REALM_ADMINISTRATOR = 200
|
2020-12-21 19:58:28 +01:00
|
|
|
ROLE_MODERATOR = 300
|
2019-10-05 02:35:07 +02:00
|
|
|
ROLE_MEMBER = 400
|
|
|
|
ROLE_GUEST = 600
|
2022-08-15 19:10:58 +02:00
|
|
|
role = models.PositiveSmallIntegerField(default=ROLE_MEMBER, db_index=True)
|
2019-10-05 02:35:07 +02:00
|
|
|
|
2020-05-29 16:12:09 +02:00
|
|
|
ROLE_TYPES = [
|
2020-05-16 21:06:43 +02:00
|
|
|
ROLE_REALM_OWNER,
|
2020-05-29 16:12:09 +02:00
|
|
|
ROLE_REALM_ADMINISTRATOR,
|
2021-04-19 18:41:20 +02:00
|
|
|
ROLE_MODERATOR,
|
2020-05-29 16:12:09 +02:00
|
|
|
ROLE_MEMBER,
|
|
|
|
ROLE_GUEST,
|
|
|
|
]
|
|
|
|
|
2018-07-25 05:57:10 +02:00
|
|
|
# Whether the user has been "soft-deactivated" due to weeks of inactivity.
|
|
|
|
# For these users we avoid doing UserMessage table work, as an optimization
|
|
|
|
# for large Zulip organizations with lots of single-visit users.
|
2022-08-15 19:10:58 +02:00
|
|
|
long_term_idle = models.BooleanField(default=False, db_index=True)
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2018-07-25 00:29:05 +02:00
|
|
|
# When we last added basic UserMessage rows for a long_term_idle user.
|
2022-08-15 19:10:58 +02:00
|
|
|
last_active_message_id = models.IntegerField(null=True)
|
2014-02-05 21:31:30 +01:00
|
|
|
|
2018-07-25 00:29:05 +02:00
|
|
|
# Mirror dummies are fake (!is_active) users used to provide
|
|
|
|
# message senders in our cross-protocol Zephyr<->Zulip content
|
|
|
|
# mirroring integration, so that we can display mirrored content
|
|
|
|
# like native Zulip messages (with a name + avatar, etc.).
|
2022-08-15 19:10:58 +02:00
|
|
|
is_mirror_dummy = models.BooleanField(default=False)
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2020-12-20 14:21:42 +01:00
|
|
|
# Users with this flag set are allowed to forge messages as sent by another
|
2019-07-14 04:47:24 +02:00
|
|
|
# user and to send to private streams; also used for Zephyr/Jabber mirroring.
|
2022-08-15 19:10:58 +02:00
|
|
|
can_forge_sender = models.BooleanField(default=False, db_index=True)
|
2020-12-14 22:02:22 +01:00
|
|
|
# Users with this flag set can create other users via API.
|
2022-08-15 19:10:58 +02:00
|
|
|
can_create_users = models.BooleanField(default=False, db_index=True)
|
2018-07-25 00:29:05 +02:00
|
|
|
|
|
|
|
# Used for rate-limiting certain automated messages generated by bots
|
2022-08-15 19:10:58 +02:00
|
|
|
last_reminder = models.DateTimeField(default=None, null=True)
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2018-07-25 00:29:05 +02:00
|
|
|
# Minutes to wait before warning a bot owner that their bot sent a message
|
|
|
|
# to a nonexistent stream
|
|
|
|
BOT_OWNER_STREAM_ALERT_WAITPERIOD = 1
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2018-07-25 00:29:05 +02:00
|
|
|
# API rate limits, formatted as a comma-separated list of range:max pairs
|
2022-08-15 19:10:58 +02:00
|
|
|
rate_limits = models.CharField(default="", max_length=100)
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2018-07-25 00:29:05 +02:00
|
|
|
# Default streams for some deprecated/legacy classes of bot users.
|
2022-08-15 19:10:58 +02:00
|
|
|
default_sending_stream = models.ForeignKey(
|
2021-02-12 08:19:30 +01:00
|
|
|
"zerver.Stream",
|
|
|
|
null=True,
|
|
|
|
related_name="+",
|
2021-04-29 21:30:25 +02:00
|
|
|
on_delete=models.SET_NULL,
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
)
|
2022-08-15 19:10:58 +02:00
|
|
|
default_events_register_stream = models.ForeignKey(
|
2021-02-12 08:19:30 +01:00
|
|
|
"zerver.Stream",
|
|
|
|
null=True,
|
|
|
|
related_name="+",
|
2021-04-29 21:30:25 +02:00
|
|
|
on_delete=models.SET_NULL,
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
)
|
2022-08-15 19:10:58 +02:00
|
|
|
default_all_public_streams = models.BooleanField(default=False)
|
2014-02-06 23:12:34 +01:00
|
|
|
|
2022-06-28 00:43:57 +02:00
|
|
|
# A time zone name from the `tzdata` database, as found in zoneinfo.available_timezones().
|
2018-07-25 05:57:10 +02:00
|
|
|
#
|
|
|
|
# The longest existing name is 32 characters long, so max_length=40 seems
|
|
|
|
# like a safe choice.
|
|
|
|
#
|
|
|
|
# In Django, the convention is to use an empty string instead of NULL/None
|
|
|
|
# for text-based fields. For more information, see
|
2021-11-05 20:26:37 +01:00
|
|
|
# https://docs.djangoproject.com/en/3.2/ref/models/fields/#django.db.models.Field.null.
|
2022-08-15 19:10:58 +02:00
|
|
|
timezone = models.CharField(max_length=40, default="")
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
AVATAR_FROM_GRAVATAR = "G"
|
|
|
|
AVATAR_FROM_USER = "U"
|
2013-06-07 21:51:57 +02:00
|
|
|
AVATAR_SOURCES = (
|
2021-02-12 08:20:45 +01:00
|
|
|
(AVATAR_FROM_GRAVATAR, "Hosted by Gravatar"),
|
|
|
|
(AVATAR_FROM_USER, "Uploaded by user"),
|
2013-06-07 21:51:57 +02:00
|
|
|
)
|
2022-08-15 19:10:58 +02:00
|
|
|
avatar_source = models.CharField(
|
2021-02-12 08:19:30 +01:00
|
|
|
default=AVATAR_FROM_GRAVATAR, choices=AVATAR_SOURCES, max_length=1
|
|
|
|
)
|
2022-08-15 19:10:58 +02:00
|
|
|
avatar_version = models.PositiveSmallIntegerField(default=1)
|
|
|
|
avatar_hash = models.CharField(null=True, max_length=64)
|
2013-06-07 21:51:57 +02:00
|
|
|
|
2021-05-08 11:00:12 +02:00
|
|
|
# TODO: TUTORIAL_STATUS was originally an optimization designed to
|
2023-12-01 08:20:48 +01:00
|
|
|
# allow us to skip querying the OnboardingStep table when loading
|
2021-05-08 11:00:12 +02:00
|
|
|
# /. This optimization is no longer effective, so it's possible we
|
|
|
|
# should delete it.
|
2021-02-12 08:20:45 +01:00
|
|
|
TUTORIAL_WAITING = "W"
|
|
|
|
TUTORIAL_STARTED = "S"
|
|
|
|
TUTORIAL_FINISHED = "F"
|
2021-02-12 08:19:30 +01:00
|
|
|
TUTORIAL_STATES = (
|
|
|
|
(TUTORIAL_WAITING, "Waiting"),
|
|
|
|
(TUTORIAL_STARTED, "Started"),
|
|
|
|
(TUTORIAL_FINISHED, "Finished"),
|
|
|
|
)
|
2022-08-15 19:10:58 +02:00
|
|
|
tutorial_status = models.CharField(
|
2021-02-12 08:19:30 +01:00
|
|
|
default=TUTORIAL_WAITING, choices=TUTORIAL_STATES, max_length=1
|
|
|
|
)
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2013-05-08 16:12:19 +02:00
|
|
|
# Contains serialized JSON of the form:
|
|
|
|
# [("step 1", true), ("step 2", false)]
|
|
|
|
# where the second element of each tuple is if the step has been
|
|
|
|
# completed.
|
2022-08-15 19:10:58 +02:00
|
|
|
onboarding_steps = models.TextField(default="[]")
|
2013-04-04 22:30:28 +02:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
zoom_token = models.JSONField(default=None, null=True)
|
2019-11-16 09:26:28 +01:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
objects = UserManager()
|
2017-04-01 17:28:44 +02:00
|
|
|
|
2020-07-21 02:25:28 +02:00
|
|
|
ROLE_ID_TO_NAME_MAP = {
|
2021-04-16 00:57:30 +02:00
|
|
|
ROLE_REALM_OWNER: gettext_lazy("Organization owner"),
|
|
|
|
ROLE_REALM_ADMINISTRATOR: gettext_lazy("Organization administrator"),
|
2021-04-22 22:03:03 +02:00
|
|
|
ROLE_MODERATOR: gettext_lazy("Moderator"),
|
2021-04-16 00:57:30 +02:00
|
|
|
ROLE_MEMBER: gettext_lazy("Member"),
|
|
|
|
ROLE_GUEST: gettext_lazy("Guest"),
|
2020-07-21 02:25:28 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
def get_role_name(self) -> str:
|
2022-08-08 19:53:11 +02:00
|
|
|
return str(self.ROLE_ID_TO_NAME_MAP[self.role])
|
2020-07-21 02:25:28 +02:00
|
|
|
|
2018-04-12 07:36:12 +02:00
|
|
|
def profile_data(self) -> ProfileData:
|
2017-03-17 10:07:22 +01:00
|
|
|
values = CustomProfileFieldValue.objects.filter(user_profile=self)
|
2021-02-12 08:19:30 +01:00
|
|
|
user_data = {
|
|
|
|
v.field_id: {"value": v.value, "rendered_value": v.rendered_value} for v in values
|
|
|
|
}
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
data: ProfileData = []
|
2017-03-17 10:07:22 +01:00
|
|
|
for field in custom_profile_fields_for_realm(self.realm_id):
|
2018-11-06 10:05:31 +01:00
|
|
|
field_values = user_data.get(field.id, None)
|
|
|
|
if field_values:
|
2021-02-12 08:19:30 +01:00
|
|
|
value, rendered_value = field_values.get("value"), field_values.get(
|
|
|
|
"rendered_value"
|
|
|
|
)
|
2018-11-06 10:05:31 +01:00
|
|
|
else:
|
|
|
|
value, rendered_value = None, None
|
2017-03-17 10:07:22 +01:00
|
|
|
field_type = field.field_type
|
|
|
|
if value is not None:
|
|
|
|
converter = field.FIELD_CONVERTERS[field_type]
|
|
|
|
value = converter(value)
|
|
|
|
|
2019-08-04 02:00:19 +02:00
|
|
|
field_data = field.as_dict()
|
2021-02-12 08:19:30 +01:00
|
|
|
data.append(
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"id": field_data["id"],
|
|
|
|
"name": field_data["name"],
|
|
|
|
"type": field_data["type"],
|
|
|
|
"hint": field_data["hint"],
|
|
|
|
"field_data": field_data["field_data"],
|
|
|
|
"order": field_data["order"],
|
|
|
|
"value": value,
|
|
|
|
"rendered_value": rendered_value,
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
|
|
|
)
|
2017-03-17 10:07:22 +01:00
|
|
|
|
|
|
|
return data
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
def can_admin_user(self, target_user: "UserProfile") -> bool:
|
2013-09-05 20:51:38 +02:00
|
|
|
"""Returns whether this user has permission to modify target_user"""
|
2023-07-17 18:19:01 +02:00
|
|
|
if target_user.bot_owner_id == self.id:
|
2013-09-05 20:51:38 +02:00
|
|
|
return True
|
2016-02-08 03:59:38 +01:00
|
|
|
elif self.is_realm_admin and self.realm == target_user.realm:
|
2013-09-05 20:51:38 +02:00
|
|
|
return True
|
|
|
|
else:
|
|
|
|
return False
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2018-05-11 02:24:34 +02:00
|
|
|
def __str__(self) -> str:
|
2023-03-08 22:18:59 +01:00
|
|
|
return f"{self.email} {self.realm!r}"
|
2013-03-08 19:53:00 +01:00
|
|
|
|
2020-01-30 23:38:39 +01:00
|
|
|
@property
|
2021-02-24 20:39:28 +01:00
|
|
|
def is_provisional_member(self) -> bool:
|
2020-12-28 17:14:26 +01:00
|
|
|
if self.is_moderator:
|
|
|
|
return False
|
2020-01-30 23:38:39 +01:00
|
|
|
diff = (timezone_now() - self.date_joined).days
|
|
|
|
if diff < self.realm.waiting_period_threshold:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2019-10-05 02:35:07 +02:00
|
|
|
@property
|
|
|
|
def is_realm_admin(self) -> bool:
|
2023-07-22 01:15:10 +02:00
|
|
|
return self.role in (UserProfile.ROLE_REALM_ADMINISTRATOR, UserProfile.ROLE_REALM_OWNER)
|
2019-10-05 02:35:07 +02:00
|
|
|
|
2019-12-08 20:08:25 +01:00
|
|
|
@is_realm_admin.setter
|
|
|
|
def is_realm_admin(self, value: bool) -> None:
|
|
|
|
if value:
|
|
|
|
self.role = UserProfile.ROLE_REALM_ADMINISTRATOR
|
|
|
|
elif self.role == UserProfile.ROLE_REALM_ADMINISTRATOR:
|
|
|
|
# We need to be careful to not accidentally change
|
|
|
|
# ROLE_GUEST to ROLE_MEMBER here.
|
|
|
|
self.role = UserProfile.ROLE_MEMBER
|
|
|
|
|
2020-06-09 12:24:32 +02:00
|
|
|
@property
|
|
|
|
def has_billing_access(self) -> bool:
|
2020-07-14 14:40:39 +02:00
|
|
|
return self.is_realm_owner or self.is_billing_admin
|
2020-06-09 12:24:32 +02:00
|
|
|
|
2020-05-16 20:10:42 +02:00
|
|
|
@property
|
|
|
|
def is_realm_owner(self) -> bool:
|
|
|
|
return self.role == UserProfile.ROLE_REALM_OWNER
|
|
|
|
|
2021-07-27 20:59:51 +02:00
|
|
|
@is_realm_owner.setter
|
|
|
|
def is_realm_owner(self, value: bool) -> None:
|
|
|
|
if value:
|
|
|
|
self.role = UserProfile.ROLE_REALM_OWNER
|
|
|
|
elif self.role == UserProfile.ROLE_REALM_OWNER:
|
|
|
|
# We need to be careful to not accidentally change
|
|
|
|
# ROLE_GUEST to ROLE_MEMBER here.
|
|
|
|
self.role = UserProfile.ROLE_MEMBER
|
|
|
|
|
2019-10-05 02:35:07 +02:00
|
|
|
@property
|
|
|
|
def is_guest(self) -> bool:
|
|
|
|
return self.role == UserProfile.ROLE_GUEST
|
|
|
|
|
2019-12-08 20:08:25 +01:00
|
|
|
@is_guest.setter
|
|
|
|
def is_guest(self, value: bool) -> None:
|
|
|
|
if value:
|
|
|
|
self.role = UserProfile.ROLE_GUEST
|
|
|
|
elif self.role == UserProfile.ROLE_GUEST:
|
|
|
|
# We need to be careful to not accidentally change
|
|
|
|
# ROLE_REALM_ADMINISTRATOR to ROLE_MEMBER here.
|
|
|
|
self.role = UserProfile.ROLE_MEMBER
|
|
|
|
|
2020-12-28 17:14:26 +01:00
|
|
|
@property
|
|
|
|
def is_moderator(self) -> bool:
|
|
|
|
return self.role == UserProfile.ROLE_MODERATOR
|
|
|
|
|
2021-07-27 20:59:51 +02:00
|
|
|
@is_moderator.setter
|
|
|
|
def is_moderator(self, value: bool) -> None:
|
|
|
|
if value:
|
|
|
|
self.role = UserProfile.ROLE_MODERATOR
|
|
|
|
elif self.role == UserProfile.ROLE_MODERATOR:
|
|
|
|
# We need to be careful to not accidentally change
|
|
|
|
# ROLE_GUEST to ROLE_MEMBER here.
|
|
|
|
self.role = UserProfile.ROLE_MEMBER
|
|
|
|
|
2016-05-19 23:44:58 +02:00
|
|
|
@property
|
2017-11-27 07:33:05 +01:00
|
|
|
def is_incoming_webhook(self) -> bool:
|
2016-05-19 23:44:58 +02:00
|
|
|
return self.bot_type == UserProfile.INCOMING_WEBHOOK_BOT
|
|
|
|
|
2017-11-24 16:24:24 +01:00
|
|
|
@property
|
2018-03-12 02:47:49 +01:00
|
|
|
def allowed_bot_types(self) -> List[int]:
|
2017-11-24 16:24:24 +01:00
|
|
|
allowed_bot_types = []
|
2021-02-12 08:19:30 +01:00
|
|
|
if (
|
|
|
|
self.is_realm_admin
|
2023-01-18 03:30:35 +01:00
|
|
|
or self.realm.bot_creation_policy != Realm.BOT_CREATION_LIMIT_GENERIC_BOTS
|
2021-02-12 08:19:30 +01:00
|
|
|
):
|
2017-11-24 16:24:24 +01:00
|
|
|
allowed_bot_types.append(UserProfile.DEFAULT_BOT)
|
|
|
|
allowed_bot_types += [
|
|
|
|
UserProfile.INCOMING_WEBHOOK_BOT,
|
|
|
|
UserProfile.OUTGOING_WEBHOOK_BOT,
|
|
|
|
]
|
|
|
|
if settings.EMBEDDED_BOTS_ENABLED:
|
|
|
|
allowed_bot_types.append(UserProfile.EMBEDDED_BOT)
|
|
|
|
return allowed_bot_types
|
|
|
|
|
2019-09-23 22:38:13 +02:00
|
|
|
def email_address_is_realm_public(self) -> bool:
|
2021-10-26 09:15:16 +02:00
|
|
|
# Bots always have EMAIL_ADDRESS_VISIBILITY_EVERYONE.
|
|
|
|
if self.email_address_visibility == UserProfile.EMAIL_ADDRESS_VISIBILITY_EVERYONE:
|
2019-09-23 22:51:31 +02:00
|
|
|
return True
|
2019-09-23 22:38:13 +02:00
|
|
|
return False
|
|
|
|
|
2020-04-03 22:53:51 +02:00
|
|
|
def has_permission(self, policy_name: str) -> bool:
|
2023-08-09 15:06:56 +02:00
|
|
|
from zerver.lib.user_groups import is_user_in_group
|
|
|
|
|
2021-04-03 18:05:20 +02:00
|
|
|
if policy_name not in [
|
2021-05-17 14:29:26 +02:00
|
|
|
"add_custom_emoji_policy",
|
2023-08-09 15:06:56 +02:00
|
|
|
"create_multiuse_invite_group",
|
2021-03-27 05:48:37 +01:00
|
|
|
"create_private_stream_policy",
|
|
|
|
"create_public_stream_policy",
|
2021-10-04 09:03:01 +02:00
|
|
|
"create_web_public_stream_policy",
|
2021-06-21 18:52:51 +02:00
|
|
|
"delete_own_message_policy",
|
2021-05-26 21:20:11 +02:00
|
|
|
"edit_topic_policy",
|
2021-04-03 18:05:20 +02:00
|
|
|
"invite_to_stream_policy",
|
|
|
|
"invite_to_realm_policy",
|
2021-04-08 19:33:22 +02:00
|
|
|
"move_messages_between_streams_policy",
|
2021-05-21 07:06:03 +02:00
|
|
|
"user_group_edit_policy",
|
2021-04-03 18:05:20 +02:00
|
|
|
]:
|
2020-04-03 22:53:51 +02:00
|
|
|
raise AssertionError("Invalid policy")
|
|
|
|
|
2023-08-09 15:06:56 +02:00
|
|
|
if policy_name in Realm.REALM_PERMISSION_GROUP_SETTINGS:
|
|
|
|
allowed_user_group = getattr(self.realm, policy_name)
|
|
|
|
return is_user_in_group(allowed_user_group, self)
|
|
|
|
|
2021-07-18 10:11:58 +02:00
|
|
|
policy_value = getattr(self.realm, policy_name)
|
|
|
|
if policy_value == Realm.POLICY_NOBODY:
|
|
|
|
return False
|
|
|
|
|
2021-06-21 18:52:51 +02:00
|
|
|
if policy_value == Realm.POLICY_EVERYONE:
|
|
|
|
return True
|
|
|
|
|
2021-10-04 09:03:01 +02:00
|
|
|
if self.is_realm_owner:
|
|
|
|
return True
|
|
|
|
|
|
|
|
if policy_value == Realm.POLICY_OWNERS_ONLY:
|
|
|
|
return False
|
|
|
|
|
2016-11-29 08:57:35 +01:00
|
|
|
if self.is_realm_admin:
|
2016-05-12 10:28:00 +02:00
|
|
|
return True
|
2020-04-03 22:53:51 +02:00
|
|
|
|
|
|
|
if policy_value == Realm.POLICY_ADMINS_ONLY:
|
2018-06-12 17:34:59 +02:00
|
|
|
return False
|
2020-04-03 22:53:51 +02:00
|
|
|
|
2021-03-21 18:17:45 +01:00
|
|
|
if self.is_moderator:
|
|
|
|
return True
|
|
|
|
|
|
|
|
if policy_value == Realm.POLICY_MODERATORS_ONLY:
|
|
|
|
return False
|
|
|
|
|
2018-06-12 17:34:59 +02:00
|
|
|
if self.is_guest:
|
2016-05-12 10:28:00 +02:00
|
|
|
return False
|
2018-07-30 00:59:45 +02:00
|
|
|
|
2020-04-03 22:53:51 +02:00
|
|
|
if policy_value == Realm.POLICY_MEMBERS_ONLY:
|
2019-05-06 16:34:31 +02:00
|
|
|
return True
|
2021-03-30 16:52:21 +02:00
|
|
|
|
|
|
|
assert policy_value == Realm.POLICY_FULL_MEMBERS_ONLY
|
2021-02-24 20:39:28 +01:00
|
|
|
return not self.is_provisional_member
|
2014-01-15 22:31:38 +01:00
|
|
|
|
2021-03-27 05:48:37 +01:00
|
|
|
def can_create_public_streams(self) -> bool:
|
|
|
|
return self.has_permission("create_public_stream_policy")
|
|
|
|
|
|
|
|
def can_create_private_streams(self) -> bool:
|
|
|
|
return self.has_permission("create_private_stream_policy")
|
2019-04-08 19:23:00 +02:00
|
|
|
|
2021-10-04 09:03:01 +02:00
|
|
|
def can_create_web_public_streams(self) -> bool:
|
|
|
|
if not self.realm.web_public_streams_enabled():
|
|
|
|
return False
|
|
|
|
return self.has_permission("create_web_public_stream_policy")
|
|
|
|
|
2020-04-03 22:53:51 +02:00
|
|
|
def can_subscribe_other_users(self) -> bool:
|
2021-02-12 08:20:45 +01:00
|
|
|
return self.has_permission("invite_to_stream_policy")
|
2018-07-30 01:25:13 +02:00
|
|
|
|
2023-06-26 23:38:08 +02:00
|
|
|
def can_invite_users_by_email(self) -> bool:
|
2021-04-03 18:05:20 +02:00
|
|
|
return self.has_permission("invite_to_realm_policy")
|
|
|
|
|
2023-08-09 15:06:56 +02:00
|
|
|
def can_create_multiuse_invite_to_realm(self) -> bool:
|
|
|
|
return self.has_permission("create_multiuse_invite_group")
|
|
|
|
|
2021-04-08 19:33:22 +02:00
|
|
|
def can_move_messages_between_streams(self) -> bool:
|
|
|
|
return self.has_permission("move_messages_between_streams_policy")
|
|
|
|
|
2021-05-21 07:06:03 +02:00
|
|
|
def can_edit_user_groups(self) -> bool:
|
|
|
|
return self.has_permission("user_group_edit_policy")
|
|
|
|
|
2022-10-18 13:48:30 +02:00
|
|
|
def can_move_messages_to_another_topic(self) -> bool:
|
2021-05-26 21:20:11 +02:00
|
|
|
return self.has_permission("edit_topic_policy")
|
|
|
|
|
2021-05-17 14:29:26 +02:00
|
|
|
def can_add_custom_emoji(self) -> bool:
|
|
|
|
return self.has_permission("add_custom_emoji_policy")
|
|
|
|
|
2021-06-21 18:52:51 +02:00
|
|
|
def can_delete_own_message(self) -> bool:
|
|
|
|
return self.has_permission("delete_own_message_policy")
|
|
|
|
|
2018-04-20 20:59:22 +02:00
|
|
|
def can_access_public_streams(self) -> bool:
|
2018-06-02 09:25:39 +02:00
|
|
|
return not (self.is_guest or self.realm.is_zephyr_mirror_realm)
|
2018-04-20 20:59:22 +02:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def major_tos_version(self) -> int:
|
2016-08-10 03:05:26 +02:00
|
|
|
if self.tos_version is not None:
|
2021-02-12 08:20:45 +01:00
|
|
|
return int(self.tos_version.split(".")[0])
|
2016-08-10 03:05:26 +02:00
|
|
|
else:
|
|
|
|
return -1
|
|
|
|
|
2023-06-20 22:52:31 +02:00
|
|
|
def format_requester_for_logs(self) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
return "{}@{}".format(self.id, self.realm.string_id or "root")
|
2020-03-09 11:39:20 +01:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
auth: Use zxcvbn to ensure password strength on server side.
For a long time, we've been only doing the zxcvbn password strength
checks on the browser, which is helpful, but means users could through
hackery (or a bug in the frontend validation code) manage to set a
too-weak password. We fix this by running our password strength
validation on the backend as well, using python-zxcvbn.
In theory, a bug in python-zxcvbn could result in it producing a
different opinion than the frontend version; if so, it'd be a pretty
bad bug in the library, and hopefully we'd hear about it from users,
report upstream, and get it fixed that way. Alternatively, we can
switch to shelling out to node like we do for KaTeX.
Fixes #6880.
2019-11-18 08:11:03 +01:00
|
|
|
def set_password(self, password: Optional[str]) -> None:
|
|
|
|
if password is None:
|
|
|
|
self.set_unusable_password()
|
|
|
|
return
|
|
|
|
|
|
|
|
from zproject.backends import check_password_strength
|
2021-02-12 08:19:30 +01:00
|
|
|
|
auth: Use zxcvbn to ensure password strength on server side.
For a long time, we've been only doing the zxcvbn password strength
checks on the browser, which is helpful, but means users could through
hackery (or a bug in the frontend validation code) manage to set a
too-weak password. We fix this by running our password strength
validation on the backend as well, using python-zxcvbn.
In theory, a bug in python-zxcvbn could result in it producing a
different opinion than the frontend version; if so, it'd be a pretty
bad bug in the library, and hopefully we'd hear about it from users,
report upstream, and get it fixed that way. Alternatively, we can
switch to shelling out to node like we do for KaTeX.
Fixes #6880.
2019-11-18 08:11:03 +01:00
|
|
|
if not check_password_strength(password):
|
|
|
|
raise PasswordTooWeakError
|
|
|
|
|
|
|
|
super().set_password(password)
|
|
|
|
|
2022-02-23 08:14:01 +01:00
|
|
|
class Meta:
|
|
|
|
indexes = [
|
|
|
|
models.Index(Upper("email"), name="upper_userprofile_email_idx"),
|
|
|
|
]
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
auth: Use zxcvbn to ensure password strength on server side.
For a long time, we've been only doing the zxcvbn password strength
checks on the browser, which is helpful, but means users could through
hackery (or a bug in the frontend validation code) manage to set a
too-weak password. We fix this by running our password strength
validation on the backend as well, using python-zxcvbn.
In theory, a bug in python-zxcvbn could result in it producing a
different opinion than the frontend version; if so, it'd be a pretty
bad bug in the library, and hopefully we'd hear about it from users,
report upstream, and get it fixed that way. Alternatively, we can
switch to shelling out to node like we do for KaTeX.
Fixes #6880.
2019-11-18 08:11:03 +01:00
|
|
|
class PasswordTooWeakError(Exception):
|
|
|
|
pass
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-07-17 12:25:08 +02:00
|
|
|
class UserGroup(models.Model): # type: ignore[django-manager-missing] # django-stubs cannot resolve the custom CTEManager yet https://github.com/typeddjango/django-stubs/issues/1023
|
2023-07-03 08:01:01 +02:00
|
|
|
MAX_NAME_LENGTH = 100
|
2023-07-03 08:20:48 +02:00
|
|
|
INVALID_NAME_PREFIXES = ["@", "role:", "user:", "stream:", "channel:"]
|
2023-07-03 08:01:01 +02:00
|
|
|
|
2022-08-11 00:13:00 +02:00
|
|
|
objects: CTEManager = CTEManager()
|
2023-07-03 08:01:01 +02:00
|
|
|
name = models.CharField(max_length=MAX_NAME_LENGTH)
|
2022-08-15 19:10:58 +02:00
|
|
|
direct_members = models.ManyToManyField(
|
2023-11-14 00:38:06 +01:00
|
|
|
UserProfile, through="zerver.UserGroupMembership", related_name="direct_groups"
|
2021-10-11 08:37:15 +02:00
|
|
|
)
|
2022-08-15 19:10:58 +02:00
|
|
|
direct_subgroups = models.ManyToManyField(
|
2021-09-29 02:46:57 +02:00
|
|
|
"self",
|
|
|
|
symmetrical=False,
|
2023-11-14 00:38:06 +01:00
|
|
|
through="zerver.GroupGroupMembership",
|
2021-09-29 02:46:57 +02:00
|
|
|
through_fields=("supergroup", "subgroup"),
|
|
|
|
related_name="direct_supergroups",
|
|
|
|
)
|
2022-08-15 19:10:58 +02:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE)
|
|
|
|
description = models.TextField(default="")
|
|
|
|
is_system_group = models.BooleanField(default=False)
|
2017-09-25 09:47:15 +02:00
|
|
|
|
2023-06-12 13:27:47 +02:00
|
|
|
can_mention_group = models.ForeignKey("self", on_delete=models.RESTRICT)
|
|
|
|
|
2022-08-06 10:04:44 +02:00
|
|
|
# We do not have "Full members" and "Everyone on the internet"
|
|
|
|
# group here since there isn't a separate role value for full
|
|
|
|
# members and spectators.
|
2021-08-11 15:10:17 +02:00
|
|
|
SYSTEM_USER_GROUP_ROLE_MAP = {
|
|
|
|
UserProfile.ROLE_REALM_OWNER: {
|
2023-09-21 13:06:39 +02:00
|
|
|
"name": SystemGroups.OWNERS,
|
2021-08-11 15:10:17 +02:00
|
|
|
"description": "Owners of this organization",
|
|
|
|
},
|
|
|
|
UserProfile.ROLE_REALM_ADMINISTRATOR: {
|
2023-09-21 13:06:39 +02:00
|
|
|
"name": SystemGroups.ADMINISTRATORS,
|
2021-08-11 15:10:17 +02:00
|
|
|
"description": "Administrators of this organization, including owners",
|
|
|
|
},
|
|
|
|
UserProfile.ROLE_MODERATOR: {
|
2023-09-21 13:06:39 +02:00
|
|
|
"name": SystemGroups.MODERATORS,
|
2021-08-11 15:10:17 +02:00
|
|
|
"description": "Moderators of this organization, including administrators",
|
|
|
|
},
|
|
|
|
UserProfile.ROLE_MEMBER: {
|
2023-09-21 13:06:39 +02:00
|
|
|
"name": SystemGroups.MEMBERS,
|
2021-08-11 15:10:17 +02:00
|
|
|
"description": "Members of this organization, not including guests",
|
|
|
|
},
|
|
|
|
UserProfile.ROLE_GUEST: {
|
2023-09-21 13:06:39 +02:00
|
|
|
"name": SystemGroups.EVERYONE,
|
2021-08-11 15:10:17 +02:00
|
|
|
"description": "Everyone in this organization, including guests",
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
2023-06-12 13:27:47 +02:00
|
|
|
GROUP_PERMISSION_SETTINGS = {
|
|
|
|
"can_mention_group": GroupPermissionSetting(
|
|
|
|
require_system_group=False,
|
|
|
|
allow_internet_group=False,
|
|
|
|
allow_owners_group=False,
|
|
|
|
allow_nobody_group=True,
|
2023-09-07 02:06:51 +02:00
|
|
|
allow_everyone_group=True,
|
2023-09-21 13:06:39 +02:00
|
|
|
default_group_name=SystemGroups.EVERYONE,
|
|
|
|
default_for_system_groups=SystemGroups.NOBODY,
|
2023-07-07 12:16:40 +02:00
|
|
|
id_field_name="can_mention_group_id",
|
2023-06-12 13:27:47 +02:00
|
|
|
),
|
|
|
|
}
|
|
|
|
|
2017-09-25 09:47:15 +02:00
|
|
|
class Meta:
|
2021-02-12 08:20:45 +01:00
|
|
|
unique_together = (("realm", "name"),)
|
2017-09-25 09:47:15 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-09-25 09:47:15 +02:00
|
|
|
class UserGroupMembership(models.Model):
|
2022-08-15 19:10:58 +02:00
|
|
|
user_group = models.ForeignKey(UserGroup, on_delete=CASCADE, related_name="+")
|
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE, related_name="+")
|
2017-09-25 09:47:15 +02:00
|
|
|
|
|
|
|
class Meta:
|
2021-02-12 08:20:45 +01:00
|
|
|
unique_together = (("user_group", "user_profile"),)
|
2017-09-25 09:47:15 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-09-29 02:46:57 +02:00
|
|
|
class GroupGroupMembership(models.Model):
|
2022-08-15 19:10:58 +02:00
|
|
|
supergroup = models.ForeignKey(UserGroup, on_delete=CASCADE, related_name="+")
|
|
|
|
subgroup = models.ForeignKey(UserGroup, on_delete=CASCADE, related_name="+")
|
2021-09-29 02:46:57 +02:00
|
|
|
|
|
|
|
class Meta:
|
|
|
|
constraints = [
|
|
|
|
models.UniqueConstraint(
|
|
|
|
fields=["supergroup", "subgroup"], name="zerver_groupgroupmembership_uniq"
|
|
|
|
)
|
|
|
|
]
|
|
|
|
|
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def remote_user_to_email(remote_user: str) -> str:
|
2016-11-11 19:32:15 +01:00
|
|
|
if settings.SSO_APPEND_DOMAIN is not None:
|
2022-07-27 23:33:49 +02:00
|
|
|
return Address(username=remote_user, domain=settings.SSO_APPEND_DOMAIN).addr_spec
|
2016-11-11 19:32:15 +01:00
|
|
|
return remote_user
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-03-31 03:30:33 +02:00
|
|
|
# Make sure we flush the UserProfile object from our remote cache
|
2013-03-15 21:17:32 +01:00
|
|
|
# whenever we save it.
|
2014-01-28 17:02:30 +01:00
|
|
|
post_save.connect(flush_user_profile, sender=UserProfile)
|
2013-03-15 21:17:32 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-03-02 17:33:11 +01:00
|
|
|
class PreregistrationRealm(models.Model):
|
|
|
|
"""Data on a partially created realm entered by a user who has
|
|
|
|
completed the "new organization" form. Used to transfer the user's
|
|
|
|
selections from the pre-confirmation "new organization" form to
|
|
|
|
the post-confirmation user registration form.
|
|
|
|
|
|
|
|
Note that the values stored here may not match those of the
|
|
|
|
created realm (in the event the user creates a realm at all),
|
|
|
|
because we allow the user to edit these values in the registration
|
|
|
|
form (and in fact the user will be required to do so if the
|
|
|
|
`string_id` is claimed by another realm before registraiton is
|
|
|
|
completed).
|
|
|
|
"""
|
|
|
|
|
|
|
|
name = models.CharField(max_length=Realm.MAX_REALM_NAME_LENGTH)
|
|
|
|
org_type = models.PositiveSmallIntegerField(
|
|
|
|
default=Realm.ORG_TYPES["unspecified"]["id"],
|
|
|
|
choices=[(t["id"], t["name"]) for t in Realm.ORG_TYPES.values()],
|
|
|
|
)
|
2023-09-12 21:58:58 +02:00
|
|
|
default_language = models.CharField(
|
|
|
|
default="en",
|
|
|
|
max_length=MAX_LANGUAGE_ID_LENGTH,
|
|
|
|
)
|
2023-03-02 17:33:11 +01:00
|
|
|
string_id = models.CharField(max_length=Realm.MAX_REALM_SUBDOMAIN_LENGTH)
|
|
|
|
email = models.EmailField()
|
|
|
|
|
|
|
|
confirmation = GenericRelation("confirmation.Confirmation", related_query_name="prereg_realm")
|
|
|
|
status = models.IntegerField(default=0)
|
|
|
|
|
|
|
|
# The Realm created upon completion of the registration
|
|
|
|
# for this PregistrationRealm
|
|
|
|
created_realm = models.ForeignKey(Realm, null=True, related_name="+", on_delete=models.SET_NULL)
|
|
|
|
|
|
|
|
# The UserProfile created upon completion of the registration
|
|
|
|
# for this PregistrationRealm
|
|
|
|
created_user = models.ForeignKey(
|
|
|
|
UserProfile, null=True, related_name="+", on_delete=models.SET_NULL
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2012-09-28 22:47:05 +02:00
|
|
|
class PreregistrationUser(models.Model):
|
2019-03-20 22:21:35 +01:00
|
|
|
# Data on a partially created user, before the completion of
|
|
|
|
# registration. This is used in at least three major code paths:
|
|
|
|
# * Realm creation, in which case realm is None.
|
|
|
|
#
|
|
|
|
# * Invitations, in which case referred_by will always be set.
|
|
|
|
#
|
|
|
|
# * Social authentication signup, where it's used to store data
|
|
|
|
# from the authentication step and pass it to the registration
|
|
|
|
# form.
|
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
email = models.EmailField()
|
2019-11-01 00:00:36 +01:00
|
|
|
|
2021-04-05 18:42:45 +02:00
|
|
|
confirmation = GenericRelation("confirmation.Confirmation", related_query_name="prereg_user")
|
2019-11-01 00:00:36 +01:00
|
|
|
# If the pre-registration process provides a suggested full name for this user,
|
2021-05-10 07:02:14 +02:00
|
|
|
# store it here to use it to prepopulate the full name field in the registration form:
|
2022-08-15 19:10:58 +02:00
|
|
|
full_name = models.CharField(max_length=UserProfile.MAX_NAME_LENGTH, null=True)
|
|
|
|
full_name_validated = models.BooleanField(default=False)
|
|
|
|
referred_by = models.ForeignKey(UserProfile, null=True, on_delete=CASCADE)
|
2023-11-14 00:38:06 +01:00
|
|
|
streams = models.ManyToManyField("zerver.Stream")
|
2022-08-15 19:10:58 +02:00
|
|
|
invited_at = models.DateTimeField(auto_now=True)
|
|
|
|
realm_creation = models.BooleanField(default=False)
|
2017-08-04 08:09:25 +02:00
|
|
|
# Indicates whether the user needs a password. Users who were
|
|
|
|
# created via SSO style auth (e.g. GitHub/Google) generally do not.
|
2022-08-15 19:10:58 +02:00
|
|
|
password_required = models.BooleanField(default=True)
|
2012-12-11 23:42:32 +01:00
|
|
|
|
2012-10-29 19:08:18 +01:00
|
|
|
# status: whether an object has been confirmed.
|
2022-07-16 20:09:13 +02:00
|
|
|
# if confirmed, set to confirmation.settings.STATUS_USED
|
2022-08-15 19:10:58 +02:00
|
|
|
status = models.IntegerField(default=0)
|
2012-10-29 19:08:18 +01:00
|
|
|
|
2019-03-20 22:21:35 +01:00
|
|
|
# The realm should only ever be None for PreregistrationUser
|
|
|
|
# objects created as part of realm creation.
|
2022-08-15 19:10:58 +02:00
|
|
|
realm = models.ForeignKey(Realm, null=True, on_delete=CASCADE)
|
2013-08-02 20:31:19 +02:00
|
|
|
|
2021-04-30 20:01:44 +02:00
|
|
|
# These values should be consistent with the values
|
|
|
|
# in settings_config.user_role_values.
|
2018-12-28 12:05:44 +01:00
|
|
|
INVITE_AS = dict(
|
2021-02-12 08:19:30 +01:00
|
|
|
REALM_OWNER=100,
|
|
|
|
REALM_ADMIN=200,
|
2021-04-30 15:19:47 +02:00
|
|
|
MODERATOR=300,
|
2021-02-12 08:19:30 +01:00
|
|
|
MEMBER=400,
|
|
|
|
GUEST_USER=600,
|
2018-12-28 12:05:44 +01:00
|
|
|
)
|
2022-08-15 19:10:58 +02:00
|
|
|
invited_as = models.PositiveSmallIntegerField(default=INVITE_AS["MEMBER"])
|
2018-12-28 12:05:44 +01:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
multiuse_invite = models.ForeignKey("MultiuseInvite", null=True, on_delete=models.SET_NULL)
|
2022-07-21 23:38:59 +02:00
|
|
|
|
2022-04-13 02:42:34 +02:00
|
|
|
# The UserProfile created upon completion of the registration
|
|
|
|
# for this PregistrationUser
|
2022-08-15 19:10:58 +02:00
|
|
|
created_user = models.ForeignKey(
|
2022-04-13 02:42:34 +02:00
|
|
|
UserProfile, null=True, related_name="+", on_delete=models.SET_NULL
|
|
|
|
)
|
|
|
|
|
2022-02-23 08:14:01 +01:00
|
|
|
class Meta:
|
|
|
|
indexes = [
|
|
|
|
models.Index(Upper("email"), name="upper_preregistration_email_idx"),
|
|
|
|
]
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-04-05 18:42:45 +02:00
|
|
|
def filter_to_valid_prereg_users(
|
2022-06-23 22:47:50 +02:00
|
|
|
query: QuerySet[PreregistrationUser],
|
2022-02-10 11:52:34 +01:00
|
|
|
invite_expires_in_minutes: Union[Optional[int], UnspecifiedValue] = UnspecifiedValue(),
|
2022-06-23 22:47:50 +02:00
|
|
|
) -> QuerySet[PreregistrationUser]:
|
2021-11-30 13:34:37 +01:00
|
|
|
"""
|
|
|
|
If invite_expires_in_days is specified, we return only those PreregistrationUser
|
|
|
|
objects that were created at most that many days in the past.
|
|
|
|
"""
|
2022-07-16 20:09:13 +02:00
|
|
|
used_value = confirmation_settings.STATUS_USED
|
2020-06-12 16:19:17 +02:00
|
|
|
revoked_value = confirmation_settings.STATUS_REVOKED
|
2021-04-05 18:42:45 +02:00
|
|
|
|
2022-07-16 20:09:13 +02:00
|
|
|
query = query.exclude(status__in=[used_value, revoked_value])
|
2022-02-10 11:52:34 +01:00
|
|
|
if invite_expires_in_minutes is None:
|
|
|
|
# Since invite_expires_in_minutes is None, we're invitation will never
|
2021-11-30 13:34:37 +01:00
|
|
|
# expire, we do not need to check anything else and can simply return
|
|
|
|
# after excluding objects with active and revoked status.
|
|
|
|
return query
|
|
|
|
|
2022-02-10 11:52:34 +01:00
|
|
|
assert invite_expires_in_minutes is not None
|
|
|
|
if not isinstance(invite_expires_in_minutes, UnspecifiedValue):
|
2023-11-19 19:45:19 +01:00
|
|
|
lowest_datetime = timezone_now() - timedelta(minutes=invite_expires_in_minutes)
|
2021-04-05 18:42:45 +02:00
|
|
|
return query.filter(invited_at__gte=lowest_datetime)
|
|
|
|
else:
|
2021-11-30 13:34:37 +01:00
|
|
|
return query.filter(
|
|
|
|
Q(confirmation__expiry_date=None) | Q(confirmation__expiry_date__gte=timezone_now())
|
|
|
|
)
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-12 16:19:17 +02:00
|
|
|
|
2017-08-10 22:27:57 +02:00
|
|
|
class MultiuseInvite(models.Model):
|
2022-08-15 19:10:58 +02:00
|
|
|
referred_by = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
2023-11-14 00:38:06 +01:00
|
|
|
streams = models.ManyToManyField("zerver.Stream")
|
2022-08-15 19:10:58 +02:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE)
|
|
|
|
invited_as = models.PositiveSmallIntegerField(default=PreregistrationUser.INVITE_AS["MEMBER"])
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-09-12 00:39:43 +02:00
|
|
|
# status for tracking whether the invite has been revoked.
|
|
|
|
# If revoked, set to confirmation.settings.STATUS_REVOKED.
|
|
|
|
# STATUS_USED is not supported, because these objects are supposed
|
|
|
|
# to be usable multiple times.
|
|
|
|
status = models.IntegerField(default=0)
|
|
|
|
|
2017-08-10 22:27:57 +02:00
|
|
|
|
2017-01-20 12:27:38 +01:00
|
|
|
class EmailChangeStatus(models.Model):
|
2022-08-15 19:10:58 +02:00
|
|
|
new_email = models.EmailField()
|
|
|
|
old_email = models.EmailField()
|
|
|
|
updated_at = models.DateTimeField(auto_now=True)
|
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
2017-01-20 12:27:38 +01:00
|
|
|
|
|
|
|
# status: whether an object has been confirmed.
|
2022-07-16 20:09:13 +02:00
|
|
|
# if confirmed, set to confirmation.settings.STATUS_USED
|
2022-08-15 19:10:58 +02:00
|
|
|
status = models.IntegerField(default=0)
|
2017-01-20 12:27:38 +01:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE)
|
2017-01-20 12:27:38 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-07-26 15:48:26 +02:00
|
|
|
class RealmReactivationStatus(models.Model):
|
|
|
|
# status: whether an object has been confirmed.
|
|
|
|
# if confirmed, set to confirmation.settings.STATUS_USED
|
2022-08-15 19:10:58 +02:00
|
|
|
status = models.IntegerField(default=0)
|
2022-07-26 15:48:26 +02:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE)
|
2022-07-26 15:48:26 +02:00
|
|
|
|
|
|
|
|
2017-04-19 07:22:54 +02:00
|
|
|
class AbstractPushDeviceToken(models.Model):
|
2013-12-09 23:17:16 +01:00
|
|
|
APNS = 1
|
|
|
|
GCM = 2
|
|
|
|
|
|
|
|
KINDS = (
|
2021-02-12 08:20:45 +01:00
|
|
|
(APNS, "apns"),
|
|
|
|
(GCM, "gcm"),
|
2013-12-09 23:17:16 +01:00
|
|
|
)
|
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
kind = models.PositiveSmallIntegerField(choices=KINDS)
|
2013-12-09 23:17:16 +01:00
|
|
|
|
|
|
|
# The token is a unique device-specific token that is
|
|
|
|
# sent to us from each device:
|
|
|
|
# - APNS token if kind == APNS
|
|
|
|
# - GCM registration id if kind == GCM
|
2022-08-15 19:10:58 +02:00
|
|
|
token = models.CharField(max_length=4096, db_index=True)
|
2018-10-11 00:53:13 +02:00
|
|
|
|
|
|
|
# TODO: last_updated should be renamed date_created, since it is
|
|
|
|
# no longer maintained as a last_updated value.
|
2022-08-15 19:10:58 +02:00
|
|
|
last_updated = models.DateTimeField(auto_now=True)
|
2013-12-09 23:17:16 +01:00
|
|
|
|
2015-02-10 08:08:39 +01:00
|
|
|
# [optional] Contains the app id of the device if it is an iOS device
|
2022-08-15 19:10:58 +02:00
|
|
|
ios_app_id = models.TextField(null=True)
|
2015-02-10 08:08:39 +01:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2017-04-19 07:22:54 +02:00
|
|
|
abstract = True
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-04-19 07:22:54 +02:00
|
|
|
class PushDeviceToken(AbstractPushDeviceToken):
|
2020-07-02 03:13:26 +02:00
|
|
|
# The user whose device this is
|
2022-08-15 19:10:58 +02:00
|
|
|
user = models.ForeignKey(UserProfile, db_index=True, on_delete=CASCADE)
|
2018-10-11 00:53:13 +02:00
|
|
|
|
|
|
|
class Meta:
|
|
|
|
unique_together = ("user", "kind", "token")
|
2017-04-19 07:22:54 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def generate_email_token_for_stream() -> str:
|
2020-09-05 04:02:13 +02:00
|
|
|
return secrets.token_hex(16)
|
2015-08-18 21:29:23 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-10-27 08:42:27 +02:00
|
|
|
class Stream(models.Model):
|
2013-10-09 16:55:17 +02:00
|
|
|
MAX_NAME_LENGTH = 60
|
2018-04-30 08:59:51 +02:00
|
|
|
MAX_DESCRIPTION_LENGTH = 1024
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
name = models.CharField(max_length=MAX_NAME_LENGTH, db_index=True)
|
|
|
|
realm = models.ForeignKey(Realm, db_index=True, on_delete=CASCADE)
|
|
|
|
date_created = models.DateTimeField(default=timezone_now)
|
|
|
|
deactivated = models.BooleanField(default=False)
|
|
|
|
description = models.CharField(max_length=MAX_DESCRIPTION_LENGTH, default="")
|
|
|
|
rendered_description = models.TextField(default="")
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2019-11-28 16:56:04 +01:00
|
|
|
# Foreign key to the Recipient object for STREAM type messages to this stream.
|
|
|
|
recipient = models.ForeignKey(Recipient, null=True, on_delete=models.SET_NULL)
|
|
|
|
|
2021-12-11 00:41:25 +01:00
|
|
|
# Various permission policy configurations
|
|
|
|
PERMISSION_POLICIES: Dict[str, Dict[str, Any]] = {
|
|
|
|
"web_public": {
|
|
|
|
"invite_only": False,
|
|
|
|
"history_public_to_subscribers": True,
|
|
|
|
"is_web_public": True,
|
2022-01-29 00:54:13 +01:00
|
|
|
"policy_name": gettext_lazy("Web-public"),
|
2021-12-11 00:41:25 +01:00
|
|
|
},
|
|
|
|
"public": {
|
|
|
|
"invite_only": False,
|
|
|
|
"history_public_to_subscribers": True,
|
|
|
|
"is_web_public": False,
|
|
|
|
"policy_name": gettext_lazy("Public"),
|
|
|
|
},
|
|
|
|
"private_shared_history": {
|
|
|
|
"invite_only": True,
|
|
|
|
"history_public_to_subscribers": True,
|
|
|
|
"is_web_public": False,
|
|
|
|
"policy_name": gettext_lazy("Private, shared history"),
|
|
|
|
},
|
|
|
|
"private_protected_history": {
|
|
|
|
"invite_only": True,
|
|
|
|
"history_public_to_subscribers": False,
|
|
|
|
"is_web_public": False,
|
|
|
|
"policy_name": gettext_lazy("Private, protected history"),
|
|
|
|
},
|
|
|
|
# Public streams with protected history are currently only
|
|
|
|
# available in Zephyr realms
|
|
|
|
"public_protected_history": {
|
|
|
|
"invite_only": False,
|
|
|
|
"history_public_to_subscribers": False,
|
|
|
|
"is_web_public": False,
|
|
|
|
"policy_name": gettext_lazy("Public, protected history"),
|
|
|
|
},
|
|
|
|
}
|
2022-08-15 19:10:58 +02:00
|
|
|
invite_only = models.BooleanField(default=False)
|
|
|
|
history_public_to_subscribers = models.BooleanField(default=True)
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2018-07-25 00:29:05 +02:00
|
|
|
# Whether this stream's content should be published by the web-public archive features
|
2022-08-15 19:10:58 +02:00
|
|
|
is_web_public = models.BooleanField(default=False)
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2020-02-04 21:50:55 +01:00
|
|
|
STREAM_POST_POLICY_EVERYONE = 1
|
|
|
|
STREAM_POST_POLICY_ADMINS = 2
|
|
|
|
STREAM_POST_POLICY_RESTRICT_NEW_MEMBERS = 3
|
2021-03-29 16:01:39 +02:00
|
|
|
STREAM_POST_POLICY_MODERATORS = 4
|
2020-02-04 21:50:55 +01:00
|
|
|
# TODO: Implement policy to restrict posting to a user group or admins.
|
|
|
|
|
|
|
|
# Who in the organization has permission to send messages to this stream.
|
2022-08-15 19:10:58 +02:00
|
|
|
stream_post_policy = models.PositiveSmallIntegerField(default=STREAM_POST_POLICY_EVERYONE)
|
2022-09-19 21:43:34 +02:00
|
|
|
POST_POLICIES: Dict[int, StrPromise] = {
|
2021-12-15 01:04:35 +01:00
|
|
|
# These strings should match the strings in the
|
|
|
|
# stream_post_policy_values object in stream_data.js.
|
|
|
|
STREAM_POST_POLICY_EVERYONE: gettext_lazy("All stream members can post"),
|
|
|
|
STREAM_POST_POLICY_ADMINS: gettext_lazy("Only organization administrators can post"),
|
|
|
|
STREAM_POST_POLICY_MODERATORS: gettext_lazy(
|
|
|
|
"Only organization administrators and moderators can post"
|
|
|
|
),
|
|
|
|
STREAM_POST_POLICY_RESTRICT_NEW_MEMBERS: gettext_lazy(
|
|
|
|
"Only organization full members can post"
|
|
|
|
),
|
|
|
|
}
|
|
|
|
STREAM_POST_POLICY_TYPES = list(POST_POLICIES.keys())
|
2017-10-08 21:16:51 +02:00
|
|
|
|
|
|
|
# The unique thing about Zephyr public streams is that we never list their
|
|
|
|
# users. We may try to generalize this concept later, but for now
|
|
|
|
# we just use a concrete field. (Zephyr public streams aren't exactly like
|
|
|
|
# invite-only streams--while both are private in terms of listing users,
|
|
|
|
# for Zephyr we don't even list users to stream members, yet membership
|
|
|
|
# is more public in the sense that you don't need a Zulip invite to join.
|
|
|
|
# This field is populated directly from UserProfile.is_zephyr_mirror_realm,
|
|
|
|
# and the reason for denormalizing field is performance.
|
2022-08-15 19:10:58 +02:00
|
|
|
is_in_zephyr_realm = models.BooleanField(default=False)
|
2017-10-08 21:16:51 +02:00
|
|
|
|
2013-08-08 16:51:18 +02:00
|
|
|
# Used by the e-mail forwarder. The e-mail RFC specifies a maximum
|
|
|
|
# e-mail length of 254, and our max stream length is 30, so we
|
|
|
|
# have plenty of room for the token.
|
2022-08-15 19:10:58 +02:00
|
|
|
email_token = models.CharField(
|
2021-02-12 08:19:30 +01:00
|
|
|
max_length=32,
|
|
|
|
default=generate_email_token_for_stream,
|
|
|
|
unique=True,
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
)
|
2013-09-26 21:48:08 +02:00
|
|
|
|
2020-05-07 14:48:31 +02:00
|
|
|
# For old messages being automatically deleted.
|
|
|
|
# Value NULL means "use retention policy of the realm".
|
|
|
|
# Value -1 means "disable retention policy for this stream unconditionally".
|
|
|
|
# Non-negative values have the natural meaning of "archive messages older than <value> days".
|
2020-06-21 11:14:35 +02:00
|
|
|
MESSAGE_RETENTION_SPECIAL_VALUES_MAP = {
|
2021-08-02 18:43:08 +02:00
|
|
|
"unlimited": -1,
|
2021-02-12 08:20:45 +01:00
|
|
|
"realm_default": None,
|
2020-06-21 11:14:35 +02:00
|
|
|
}
|
2022-08-15 19:10:58 +02:00
|
|
|
message_retention_days = models.IntegerField(null=True, default=None)
|
2019-06-05 19:46:35 +02:00
|
|
|
|
2022-06-24 16:18:55 +02:00
|
|
|
# on_delete field here is set to RESTRICT because we don't want to allow
|
2023-10-09 20:41:12 +02:00
|
|
|
# deleting a user group in case it is referenced by this setting.
|
2022-06-24 16:18:55 +02:00
|
|
|
# We are not using PROTECT since we want to allow deletion of user groups
|
|
|
|
# when realm itself is deleted.
|
2022-08-15 19:10:58 +02:00
|
|
|
can_remove_subscribers_group = models.ForeignKey(UserGroup, on_delete=models.RESTRICT)
|
2022-06-24 16:18:55 +02:00
|
|
|
|
2019-03-04 17:50:49 +01:00
|
|
|
# The very first message ID in the stream. Used to help clients
|
|
|
|
# determine whether they might need to display "more topics" for a
|
|
|
|
# stream based on what messages they have cached.
|
2022-08-15 19:10:58 +02:00
|
|
|
first_message_id = models.IntegerField(null=True, db_index=True)
|
2019-03-04 17:50:49 +01:00
|
|
|
|
2023-02-17 12:46:14 +01:00
|
|
|
stream_permission_group_settings = {
|
|
|
|
"can_remove_subscribers_group": GroupPermissionSetting(
|
|
|
|
require_system_group=True,
|
|
|
|
allow_internet_group=False,
|
|
|
|
allow_owners_group=False,
|
2023-04-06 08:12:41 +02:00
|
|
|
allow_nobody_group=False,
|
2023-09-07 02:06:51 +02:00
|
|
|
allow_everyone_group=True,
|
2023-09-21 13:06:39 +02:00
|
|
|
default_group_name=SystemGroups.ADMINISTRATORS,
|
2023-07-07 12:16:40 +02:00
|
|
|
id_field_name="can_remove_subscribers_group_id",
|
2023-02-17 12:46:14 +01:00
|
|
|
),
|
|
|
|
}
|
|
|
|
|
2023-04-12 22:40:35 +02:00
|
|
|
class Meta:
|
|
|
|
indexes = [
|
|
|
|
models.Index(Upper("name"), name="upper_stream_name_idx"),
|
|
|
|
]
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2018-05-11 02:24:34 +02:00
|
|
|
def __str__(self) -> str:
|
2023-03-08 22:18:59 +01:00
|
|
|
return self.name
|
Give our models meaningful reprs.
>>> from zephyr.models import UserProfile, Recipient, Zephyr, ZephyrClass
>>> for klass in [UserProfile, Recipient, Zephyr, ZephyrClass]:
... print klass.objects.all()[:2]
...
[<UserProfile: othello>, <UserProfile: iago>]
[<Recipient: Verona (1, class)>, <Recipient: Denmark (2, class)>]
[<Zephyr: Scotland / Scotland3 / <UserProfile: prospero>>, <Zephyr: Venice / Venice3 / <UserProfile: iago>>]
[<ZephyrClass: Verona>, <ZephyrClass: Denmark>]
(imported from commit 9998ffe40800213a5425990d6e85f5c5a43a5355)
2012-08-29 16:15:06 +02:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def is_public(self) -> bool:
|
2016-07-27 01:45:29 +02:00
|
|
|
# All streams are private in Zephyr mirroring realms.
|
2017-10-08 21:16:51 +02:00
|
|
|
return not self.invite_only and not self.is_in_zephyr_realm
|
2013-01-15 21:10:50 +01:00
|
|
|
|
2018-04-05 00:28:14 +02:00
|
|
|
def is_history_realm_public(self) -> bool:
|
|
|
|
return self.is_public()
|
|
|
|
|
|
|
|
def is_history_public_to_subscribers(self) -> bool:
|
2018-04-27 01:00:26 +02:00
|
|
|
return self.history_public_to_subscribers
|
2018-04-05 00:28:14 +02:00
|
|
|
|
2020-02-03 03:21:07 +01:00
|
|
|
# Stream fields included whenever a Stream object is provided to
|
|
|
|
# Zulip clients via the API. A few details worth noting:
|
|
|
|
# * "id" is represented as "stream_id" in most API interfaces.
|
|
|
|
# * "email_token" is not realm-public and thus is not included here.
|
|
|
|
# * is_in_zephyr_realm is a backend-only optimization.
|
|
|
|
# * "deactivated" streams are filtered from the API entirely.
|
2020-02-29 21:51:06 +01:00
|
|
|
# * "realm" and "recipient" are not exposed to clients via the API.
|
2020-02-03 03:21:07 +01:00
|
|
|
API_FIELDS = [
|
2022-03-12 11:44:34 +01:00
|
|
|
"date_created",
|
2020-02-03 03:21:07 +01:00
|
|
|
"description",
|
2022-03-12 11:44:34 +01:00
|
|
|
"first_message_id",
|
|
|
|
"history_public_to_subscribers",
|
|
|
|
"id",
|
2020-02-03 03:21:07 +01:00
|
|
|
"invite_only",
|
|
|
|
"is_web_public",
|
2020-07-25 05:44:12 +02:00
|
|
|
"message_retention_days",
|
2022-03-12 11:44:34 +01:00
|
|
|
"name",
|
|
|
|
"rendered_description",
|
|
|
|
"stream_post_policy",
|
2022-06-27 18:39:33 +02:00
|
|
|
"can_remove_subscribers_group_id",
|
2020-02-03 03:21:07 +01:00
|
|
|
]
|
|
|
|
|
2023-07-27 16:42:21 +02:00
|
|
|
def to_dict(self) -> DefaultStreamDict:
|
|
|
|
return DefaultStreamDict(
|
2023-07-12 12:57:57 +02:00
|
|
|
can_remove_subscribers_group=self.can_remove_subscribers_group_id,
|
2022-05-26 02:51:35 +02:00
|
|
|
date_created=datetime_to_timestamp(self.date_created),
|
|
|
|
description=self.description,
|
|
|
|
first_message_id=self.first_message_id,
|
|
|
|
history_public_to_subscribers=self.history_public_to_subscribers,
|
|
|
|
invite_only=self.invite_only,
|
|
|
|
is_web_public=self.is_web_public,
|
|
|
|
message_retention_days=self.message_retention_days,
|
|
|
|
name=self.name,
|
|
|
|
rendered_description=self.rendered_description,
|
|
|
|
stream_id=self.id,
|
|
|
|
stream_post_policy=self.stream_post_policy,
|
|
|
|
is_announcement_only=self.stream_post_policy == Stream.STREAM_POST_POLICY_ADMINS,
|
|
|
|
)
|
2014-03-02 06:46:54 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2014-01-28 20:49:55 +01:00
|
|
|
post_save.connect(flush_stream, sender=Stream)
|
|
|
|
post_delete.connect(flush_stream, sender=Stream)
|
2014-01-15 22:48:27 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-07-23 15:15:54 +02:00
|
|
|
class UserTopic(models.Model):
|
2022-08-15 19:10:58 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
|
|
|
stream = models.ForeignKey(Stream, on_delete=CASCADE)
|
|
|
|
recipient = models.ForeignKey(Recipient, on_delete=CASCADE)
|
|
|
|
topic_name = models.CharField(max_length=MAX_TOPIC_NAME_LENGTH)
|
2021-07-29 15:15:42 +02:00
|
|
|
# The default value for last_updated is a few weeks before tracking
|
2020-01-17 16:01:00 +01:00
|
|
|
# of when topics were muted was first introduced. It's designed
|
2021-07-29 15:15:42 +02:00
|
|
|
# to be obviously incorrect so that one can tell it's backfilled data.
|
2023-11-19 19:45:19 +01:00
|
|
|
last_updated = models.DateTimeField(default=datetime(2020, 1, 1, 0, 0, tzinfo=timezone.utc))
|
2017-08-30 02:19:34 +02:00
|
|
|
|
2023-03-12 16:19:42 +01:00
|
|
|
class VisibilityPolicy(models.IntegerChoices):
|
|
|
|
# A normal muted topic. No notifications and unreads hidden.
|
|
|
|
MUTED = 1, "Muted topic"
|
2021-08-02 14:08:44 +02:00
|
|
|
|
2023-03-12 16:19:42 +01:00
|
|
|
# This topic will behave like an unmuted topic in an unmuted stream even if it
|
|
|
|
# belongs to a muted stream.
|
|
|
|
UNMUTED = 2, "Unmuted topic in muted stream"
|
2022-02-25 21:48:56 +01:00
|
|
|
|
2023-03-12 16:19:42 +01:00
|
|
|
# This topic will behave like `UNMUTED`, plus some additional
|
|
|
|
# display and/or notifications priority that is TBD and likely to
|
|
|
|
# be configurable; see #6027. Not yet implemented.
|
|
|
|
FOLLOWED = 3, "Followed topic"
|
2021-08-02 09:46:56 +02:00
|
|
|
|
2023-03-12 16:19:42 +01:00
|
|
|
# Implicitly, if a UserTopic does not exist, the (user, topic)
|
|
|
|
# pair should have normal behavior for that (user, stream) pair.
|
2021-08-02 09:46:56 +02:00
|
|
|
|
2023-03-12 16:19:42 +01:00
|
|
|
# We use this in our code to represent the condition in the comment above.
|
|
|
|
INHERIT = 0, "User's default policy for the stream."
|
2021-08-02 09:46:56 +02:00
|
|
|
|
2023-03-12 16:19:42 +01:00
|
|
|
visibility_policy = models.SmallIntegerField(
|
|
|
|
choices=VisibilityPolicy.choices, default=VisibilityPolicy.MUTED
|
2021-08-02 09:46:56 +02:00
|
|
|
)
|
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2023-02-10 19:10:19 +01:00
|
|
|
constraints = [
|
|
|
|
models.UniqueConstraint(
|
|
|
|
"user_profile",
|
|
|
|
"stream",
|
|
|
|
Lower("topic_name"),
|
|
|
|
name="usertopic_case_insensitive_topic_uniq",
|
|
|
|
),
|
|
|
|
]
|
2017-08-30 02:19:34 +02:00
|
|
|
|
2021-08-02 14:08:44 +02:00
|
|
|
indexes = [
|
2022-02-23 07:33:55 +01:00
|
|
|
models.Index("stream", Upper("topic_name"), name="zerver_mutedtopic_stream_topic"),
|
2021-08-02 14:08:44 +02:00
|
|
|
# This index is designed to optimize queries fetching the
|
|
|
|
# set of users who have special policy for a stream,
|
|
|
|
# e.g. for the send-message code paths.
|
|
|
|
models.Index(
|
|
|
|
fields=("stream", "topic_name", "visibility_policy", "user_profile"),
|
|
|
|
name="zerver_usertopic_stream_topic_user_visibility_idx",
|
|
|
|
),
|
|
|
|
# This index is useful for handling API requests fetching the
|
|
|
|
# muted topics for a given user or user/stream pair.
|
|
|
|
models.Index(
|
|
|
|
fields=("user_profile", "visibility_policy", "stream", "topic_name"),
|
|
|
|
name="zerver_usertopic_user_visibility_idx",
|
|
|
|
),
|
|
|
|
]
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2018-05-11 02:24:34 +02:00
|
|
|
def __str__(self) -> str:
|
2023-03-08 22:18:59 +01:00
|
|
|
return f"({self.user_profile.email}, {self.stream.name}, {self.topic_name}, {self.last_updated})"
|
2021-07-23 15:15:54 +02:00
|
|
|
|
|
|
|
|
2021-03-27 11:43:03 +01:00
|
|
|
class MutedUser(models.Model):
|
2022-08-15 19:10:58 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, related_name="muter", on_delete=CASCADE)
|
|
|
|
muted_user = models.ForeignKey(UserProfile, related_name="muted", on_delete=CASCADE)
|
|
|
|
date_muted = models.DateTimeField(default=timezone_now)
|
2021-03-27 11:43:03 +01:00
|
|
|
|
|
|
|
class Meta:
|
2021-03-27 12:23:32 +01:00
|
|
|
unique_together = ("user_profile", "muted_user")
|
2021-03-27 11:43:03 +01:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2021-03-27 11:43:03 +01:00
|
|
|
def __str__(self) -> str:
|
2023-03-08 22:18:59 +01:00
|
|
|
return f"{self.user_profile.email} -> {self.muted_user.email}"
|
2021-03-27 11:43:03 +01:00
|
|
|
|
|
|
|
|
2021-03-27 13:31:26 +01:00
|
|
|
post_save.connect(flush_muting_users_cache, sender=MutedUser)
|
|
|
|
post_delete.connect(flush_muting_users_cache, sender=MutedUser)
|
|
|
|
|
|
|
|
|
2017-10-27 08:42:27 +02:00
|
|
|
class Client(models.Model):
|
2023-02-14 22:45:00 +01:00
|
|
|
MAX_NAME_LENGTH = 30
|
|
|
|
name = models.CharField(max_length=MAX_NAME_LENGTH, db_index=True, unique=True)
|
2012-10-19 21:30:42 +02:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2018-05-11 02:24:34 +02:00
|
|
|
def __str__(self) -> str:
|
2023-03-08 22:18:59 +01:00
|
|
|
return self.name
|
2016-04-21 00:26:45 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
get_client_cache: Dict[str, Client] = {}
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
|
2021-03-24 12:04:41 +01:00
|
|
|
def clear_client_cache() -> None: # nocoverage
|
|
|
|
global get_client_cache
|
|
|
|
get_client_cache = {}
|
|
|
|
|
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def get_client(name: str) -> Client:
|
2016-11-14 09:23:03 +01:00
|
|
|
# Accessing KEY_PREFIX through the module is necessary
|
|
|
|
# because we need the updated value of the variable.
|
2023-02-14 22:45:00 +01:00
|
|
|
cache_name = cache.KEY_PREFIX + name[0 : Client.MAX_NAME_LENGTH]
|
2016-11-14 09:23:03 +01:00
|
|
|
if cache_name not in get_client_cache:
|
2016-03-31 03:30:33 +02:00
|
|
|
result = get_client_remote_cache(name)
|
2016-11-14 09:23:03 +01:00
|
|
|
get_client_cache[cache_name] = result
|
|
|
|
return get_client_cache[cache_name]
|
2013-11-20 22:16:48 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def get_client_cache_key(name: str) -> str:
|
2023-07-19 00:44:51 +02:00
|
|
|
return f"get_client:{hashlib.sha1(name.encode()).hexdigest()}"
|
2013-03-26 17:47:52 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
@cache_with_key(get_client_cache_key, timeout=3600 * 24 * 7)
|
2018-05-11 02:24:34 +02:00
|
|
|
def get_client_remote_cache(name: str) -> Client:
|
2023-02-14 22:45:00 +01:00
|
|
|
(client, _) = Client.objects.get_or_create(name=name[0 : Client.MAX_NAME_LENGTH])
|
2012-10-19 21:30:42 +02:00
|
|
|
return client
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def get_realm_stream(stream_name: str, realm_id: int) -> Stream:
|
cache: Eliminate get-stream-by-name cache.
We remove the cache functionality for the
get_realm_stream function, and we also change it to
return a thin Stream object (instead of calling
select_related with no arguments).
The main goal here is to remove code complexity, as we
have been prone to at least one caching validation bug
related to how Realm and UserGroup interact. That
particular bug was more theoretical than practical in
terms of its impact, to be clear.
Even if we were to be perfectly disciplined about only
caching thin stream objects and always making sure to
delete cache entries when stream data changed, we would
still be prone to ugly situations like having
transactions get rolled back before we delete the cache
entry. The do_deactivate_stream is a perfect example of
where we have to consider the best time to unset the
cache. If you unset it too early, then you are prone to
races where somebody else churns the cache right before
you update the database. If you set it too late, then
you can have an invalid entry after a rollback or
deadlock situation. If you just eliminate the cache as
a moving part, that whole debate is moot.
As the lack of test changes here indicates, we rarely
fetch streams by name any more in critical sections of
our code.
The one place where we fetch by name is in loading the
home page, but that is **only** when you specify a
stream name. And, of course, that only causes about an
extra millisecond of time.
2023-07-09 22:24:32 +02:00
|
|
|
return Stream.objects.get(name__iexact=stream_name.strip(), realm_id=realm_id)
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2013-03-19 13:05:19 +01:00
|
|
|
|
2022-06-23 22:47:50 +02:00
|
|
|
def get_active_streams(realm: Realm) -> QuerySet[Stream]:
|
2014-01-24 23:30:53 +01:00
|
|
|
"""
|
|
|
|
Return all streams (including invite-only streams) that have not been deactivated.
|
|
|
|
"""
|
|
|
|
return Stream.objects.filter(realm=realm, deactivated=False)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-06-23 22:47:50 +02:00
|
|
|
def get_linkable_streams(realm_id: int) -> QuerySet[Stream]:
|
2021-12-30 13:59:30 +01:00
|
|
|
"""
|
|
|
|
This returns the streams that we are allowed to linkify using
|
|
|
|
something like "#frontend" in our markup. For now the business
|
|
|
|
rule is that you can link any stream in the realm that hasn't
|
|
|
|
been deactivated (similar to how get_active_streams works).
|
|
|
|
"""
|
|
|
|
return Stream.objects.filter(realm_id=realm_id, deactivated=False)
|
|
|
|
|
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def get_stream(stream_name: str, realm: Realm) -> Stream:
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2017-10-22 01:48:16 +02:00
|
|
|
Callers that don't have a Realm object already available should use
|
|
|
|
get_realm_stream directly, to avoid unnecessarily fetching the
|
|
|
|
Realm object.
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2017-10-22 01:48:16 +02:00
|
|
|
return get_realm_stream(stream_name, realm.id)
|
2013-01-17 22:16:39 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-01-28 05:28:29 +01:00
|
|
|
def get_stream_by_id_in_realm(stream_id: int, realm: Realm) -> Stream:
|
2023-07-21 13:11:48 +02:00
|
|
|
return Stream.objects.select_related("realm", "recipient").get(id=stream_id, realm=realm)
|
2019-01-28 05:28:29 +01:00
|
|
|
|
2013-06-27 22:52:05 +02:00
|
|
|
|
cache: Use DB for all bulk get-stream-by-name queries.
This changes bulk_get_streams so that it just uses the
database all the time. Also, we avoid calling
select_related(), so that we just get back thin and
tidy Stream objects with simple queries.
About not caching any more:
It's actually pretty rare that we fetch streams by name
in the main application. It's usually API requests that
send in stream names to find more info about streams.
It also turns out that for large queries (>= ~30 rows
for my measurements) it's more efficent to hit the
database than memcached. The database is super fast at
scale; it's just the startup cost of having Django
construct the query, and then having the database do
query planning or whatever, that slows us down. I don't
know the exact bottleneck, but you can clearly measure
that one-row queries are slow (on the order of a full
millisecond or so) but the marginal cost of additional
rows is minimal assuming you have a decent index (20
microseconds per row on my droplet).
All the query-count changes in the tests revolve around
unsubscribing somebody from a stream, and that's a
particularly odd use case for bulk_get_streams, since
you generally unsubscribe from a single stream at a
time. If there are some use cases where you do want to
unsubscribe from multiple streams, we should move
toward passing in stream ids, at least from the
application. And even if we don't do that, our cost for
most queries is a couple milliseconds.
2023-07-11 14:24:06 +02:00
|
|
|
def bulk_get_streams(realm: Realm, stream_names: Set[str]) -> Dict[str, Any]:
|
|
|
|
def fetch_streams_by_name(stream_names: Set[str]) -> QuerySet[Stream]:
|
2016-06-04 09:02:05 +02:00
|
|
|
#
|
2013-06-27 22:52:05 +02:00
|
|
|
# This should be just
|
|
|
|
#
|
2019-12-12 21:13:07 +01:00
|
|
|
# Stream.objects.select_related().filter(name__iexact__in=stream_names,
|
|
|
|
# realm_id=realm_id)
|
2013-06-27 22:52:05 +02:00
|
|
|
#
|
|
|
|
# But chaining __in and __iexact doesn't work with Django's
|
|
|
|
# ORM, so we have the following hack to construct the relevant where clause
|
2021-02-12 08:19:30 +01:00
|
|
|
where_clause = (
|
|
|
|
"upper(zerver_stream.name::text) IN (SELECT upper(name) FROM unnest(%s) AS name)"
|
|
|
|
)
|
cache: Use DB for all bulk get-stream-by-name queries.
This changes bulk_get_streams so that it just uses the
database all the time. Also, we avoid calling
select_related(), so that we just get back thin and
tidy Stream objects with simple queries.
About not caching any more:
It's actually pretty rare that we fetch streams by name
in the main application. It's usually API requests that
send in stream names to find more info about streams.
It also turns out that for large queries (>= ~30 rows
for my measurements) it's more efficent to hit the
database than memcached. The database is super fast at
scale; it's just the startup cost of having Django
construct the query, and then having the database do
query planning or whatever, that slows us down. I don't
know the exact bottleneck, but you can clearly measure
that one-row queries are slow (on the order of a full
millisecond or so) but the marginal cost of additional
rows is minimal assuming you have a decent index (20
microseconds per row on my droplet).
All the query-count changes in the tests revolve around
unsubscribing somebody from a stream, and that's a
particularly odd use case for bulk_get_streams, since
you generally unsubscribe from a single stream at a
time. If there are some use cases where you do want to
unsubscribe from multiple streams, we should move
toward passing in stream ids, at least from the
application. And even if we don't do that, our cost for
most queries is a couple milliseconds.
2023-07-11 14:24:06 +02:00
|
|
|
return get_active_streams(realm).extra(where=[where_clause], params=(list(stream_names),))
|
2013-06-27 22:52:05 +02:00
|
|
|
|
cache: Use DB for all bulk get-stream-by-name queries.
This changes bulk_get_streams so that it just uses the
database all the time. Also, we avoid calling
select_related(), so that we just get back thin and
tidy Stream objects with simple queries.
About not caching any more:
It's actually pretty rare that we fetch streams by name
in the main application. It's usually API requests that
send in stream names to find more info about streams.
It also turns out that for large queries (>= ~30 rows
for my measurements) it's more efficent to hit the
database than memcached. The database is super fast at
scale; it's just the startup cost of having Django
construct the query, and then having the database do
query planning or whatever, that slows us down. I don't
know the exact bottleneck, but you can clearly measure
that one-row queries are slow (on the order of a full
millisecond or so) but the marginal cost of additional
rows is minimal assuming you have a decent index (20
microseconds per row on my droplet).
All the query-count changes in the tests revolve around
unsubscribing somebody from a stream, and that's a
particularly odd use case for bulk_get_streams, since
you generally unsubscribe from a single stream at a
time. If there are some use cases where you do want to
unsubscribe from multiple streams, we should move
toward passing in stream ids, at least from the
application. And even if we don't do that, our cost for
most queries is a couple milliseconds.
2023-07-11 14:24:06 +02:00
|
|
|
if not stream_names:
|
|
|
|
return {}
|
|
|
|
streams = list(fetch_streams_by_name(stream_names))
|
|
|
|
return {stream.name.lower(): stream for stream in streams}
|
2013-06-27 22:52:05 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-09-19 21:48:53 +02:00
|
|
|
def get_huddle_user_ids(recipient: Recipient) -> ValuesQuerySet["Subscription", int]:
|
2021-02-12 08:19:30 +01:00
|
|
|
assert recipient.type == Recipient.HUDDLE
|
|
|
|
|
|
|
|
return (
|
|
|
|
Subscription.objects.filter(
|
|
|
|
recipient=recipient,
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
.order_by("user_profile_id")
|
|
|
|
.values_list("user_profile_id", flat=True)
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-10-29 17:03:51 +01:00
|
|
|
|
|
|
|
|
2023-07-16 19:08:55 +02:00
|
|
|
def bulk_get_huddle_user_ids(recipient_ids: List[int]) -> Dict[int, Set[int]]:
|
2019-08-13 23:05:47 +02:00
|
|
|
"""
|
2023-07-16 19:08:55 +02:00
|
|
|
Takes a list of huddle-type recipient_ids, returns a dict
|
2019-08-13 23:05:47 +02:00
|
|
|
mapping recipient id to list of user ids in the huddle.
|
2023-07-16 19:08:55 +02:00
|
|
|
|
|
|
|
We rely on our caller to pass us recipient_ids that correspond
|
|
|
|
to huddles, but technically this function is valid for any type
|
|
|
|
of subscription.
|
2019-08-13 23:05:47 +02:00
|
|
|
"""
|
2023-07-16 19:08:55 +02:00
|
|
|
if not recipient_ids:
|
2019-08-13 23:05:47 +02:00
|
|
|
return {}
|
|
|
|
|
|
|
|
subscriptions = Subscription.objects.filter(
|
2023-07-16 19:08:55 +02:00
|
|
|
recipient_id__in=recipient_ids,
|
2023-07-16 18:29:58 +02:00
|
|
|
).only("user_profile_id", "recipient_id")
|
|
|
|
|
|
|
|
result_dict: Dict[int, Set[int]] = defaultdict(set)
|
|
|
|
for subscription in subscriptions:
|
|
|
|
result_dict[subscription.recipient_id].add(subscription.user_profile_id)
|
2019-08-13 23:05:47 +02:00
|
|
|
|
|
|
|
return result_dict
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-10-27 08:42:27 +02:00
|
|
|
class AbstractMessage(models.Model):
|
2022-08-15 19:10:58 +02:00
|
|
|
sender = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
2022-09-13 20:36:47 +02:00
|
|
|
|
|
|
|
# The target of the message is signified by the Recipient object.
|
|
|
|
# See the Recipient class for details.
|
2022-08-15 19:10:58 +02:00
|
|
|
recipient = models.ForeignKey(Recipient, on_delete=CASCADE)
|
2022-09-13 20:36:47 +02:00
|
|
|
|
2023-09-27 20:11:48 +02:00
|
|
|
# The realm containing the message. Usually this will be the same
|
|
|
|
# as the realm of the messages's sender; the exception to that is
|
|
|
|
# cross-realm bot users.
|
|
|
|
#
|
|
|
|
# Important for efficient indexes and sharding in multi-realm servers.
|
2022-09-28 16:19:07 +02:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE)
|
2022-09-27 21:42:31 +02:00
|
|
|
|
2018-07-25 06:35:48 +02:00
|
|
|
# The message's topic.
|
|
|
|
#
|
|
|
|
# Early versions of Zulip called this concept a "subject", as in an email
|
|
|
|
# "subject line", before changing to "topic" in 2013 (commit dac5a46fa).
|
|
|
|
# UI and user documentation now consistently say "topic". New APIs and
|
|
|
|
# new code should generally also say "topic".
|
|
|
|
#
|
|
|
|
# See also the `topic_name` method on `Message`.
|
2022-08-15 19:10:58 +02:00
|
|
|
subject = models.CharField(max_length=MAX_TOPIC_NAME_LENGTH, db_index=True)
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2023-09-27 20:11:48 +02:00
|
|
|
# The raw Markdown-format text (E.g., what the user typed into the compose box).
|
2022-08-15 19:10:58 +02:00
|
|
|
content = models.TextField()
|
2023-09-27 20:11:48 +02:00
|
|
|
|
|
|
|
# The HTML rendered content resulting from rendering the content
|
|
|
|
# with the Markdown processor.
|
2022-08-15 19:10:58 +02:00
|
|
|
rendered_content = models.TextField(null=True)
|
2023-09-27 20:11:48 +02:00
|
|
|
# A rarely-incremented version number, theoretically useful for
|
|
|
|
# tracking which messages have been already rerendered when making
|
|
|
|
# major changes to the markup rendering process.
|
2022-08-15 19:10:58 +02:00
|
|
|
rendered_content_version = models.IntegerField(null=True)
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
date_sent = models.DateTimeField("date sent", db_index=True)
|
2023-09-27 20:11:48 +02:00
|
|
|
|
|
|
|
# A Client object indicating what type of Zulip client sent this message.
|
2022-08-15 19:10:58 +02:00
|
|
|
sending_client = models.ForeignKey(Client, on_delete=CASCADE)
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2023-09-27 20:11:48 +02:00
|
|
|
# The last time the message was modified by message editing or moving.
|
2022-08-15 19:10:58 +02:00
|
|
|
last_edit_time = models.DateTimeField(null=True)
|
2018-07-25 06:35:48 +02:00
|
|
|
|
|
|
|
# A JSON-encoded list of objects describing any past edits to this
|
|
|
|
# message, oldest first.
|
2022-08-15 19:10:58 +02:00
|
|
|
edit_history = models.TextField(null=True)
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2023-09-27 20:11:48 +02:00
|
|
|
# Whether the message contains a (link to) an uploaded file.
|
2022-08-15 19:10:58 +02:00
|
|
|
has_attachment = models.BooleanField(default=False, db_index=True)
|
2023-09-27 20:11:48 +02:00
|
|
|
# Whether the message contains a visible image element.
|
2022-08-15 19:10:58 +02:00
|
|
|
has_image = models.BooleanField(default=False, db_index=True)
|
2023-09-27 20:11:48 +02:00
|
|
|
# Whether the message contains a link.
|
2022-08-15 19:10:58 +02:00
|
|
|
has_link = models.BooleanField(default=False, db_index=True)
|
2014-02-21 17:44:48 +01:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2016-11-01 11:26:38 +01:00
|
|
|
abstract = True
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2018-05-11 02:24:34 +02:00
|
|
|
def __str__(self) -> str:
|
2023-07-16 14:21:27 +02:00
|
|
|
return f"{self.recipient.label()} / {self.subject} / {self.sender!r}"
|
2017-05-17 05:59:50 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-06-18 19:54:09 +02:00
|
|
|
class ArchiveTransaction(models.Model):
|
2022-08-15 19:10:58 +02:00
|
|
|
timestamp = models.DateTimeField(default=timezone_now, db_index=True)
|
2019-06-18 19:54:09 +02:00
|
|
|
# Marks if the data archived in this transaction has been restored:
|
2022-08-15 19:10:58 +02:00
|
|
|
restored = models.BooleanField(default=False, db_index=True)
|
2019-06-18 19:54:09 +02:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
type = models.PositiveSmallIntegerField(db_index=True)
|
2019-06-18 19:54:09 +02:00
|
|
|
# Valid types:
|
|
|
|
RETENTION_POLICY_BASED = 1 # Archiving was executed due to automated retention policies
|
|
|
|
MANUAL = 2 # Archiving was run manually, via move_messages_to_archive function
|
|
|
|
|
|
|
|
# ForeignKey to the realm with which objects archived in this transaction are associated.
|
|
|
|
# If type is set to MANUAL, this should be null.
|
2022-08-15 19:10:58 +02:00
|
|
|
realm = models.ForeignKey(Realm, null=True, on_delete=CASCADE)
|
2016-11-01 11:26:38 +01:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2019-06-24 16:34:54 +02:00
|
|
|
def __str__(self) -> str:
|
2023-03-08 22:18:59 +01:00
|
|
|
return "id: {id}, type: {type}, realm: {realm}, timestamp: {timestamp}".format(
|
2019-06-24 16:34:54 +02:00
|
|
|
id=self.id,
|
|
|
|
type="MANUAL" if self.type == self.MANUAL else "RETENTION_POLICY_BASED",
|
|
|
|
realm=self.realm.string_id if self.realm else None,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
timestamp=self.timestamp,
|
2019-06-24 16:34:54 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-11-01 11:26:38 +01:00
|
|
|
class ArchivedMessage(AbstractMessage):
|
2018-07-25 00:29:05 +02:00
|
|
|
"""Used as a temporary holding place for deleted messages before they
|
|
|
|
are permanently deleted. This is an important part of a robust
|
|
|
|
'message retention' feature.
|
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
archive_transaction = models.ForeignKey(ArchiveTransaction, on_delete=CASCADE)
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-11-01 11:26:38 +01:00
|
|
|
|
|
|
|
class Message(AbstractMessage):
|
2023-04-17 17:02:07 +02:00
|
|
|
# Recipient types used when a Message object is provided to
|
|
|
|
# Zulip clients via the API.
|
|
|
|
#
|
|
|
|
# A detail worth noting:
|
|
|
|
# * "direct" was introduced in 2023 with the goal of
|
|
|
|
# deprecating the original "private" and becoming the
|
|
|
|
# preferred way to indicate a personal or huddle
|
|
|
|
# Recipient type via the API.
|
|
|
|
API_RECIPIENT_TYPES = ["direct", "private", "stream"]
|
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
search_tsvector = SearchVectorField(null=True)
|
2016-11-01 11:26:38 +01:00
|
|
|
|
2023-08-23 08:44:02 +02:00
|
|
|
DEFAULT_SELECT_RELATED = ["sender", "realm", "recipient", "sending_client"]
|
2023-08-01 16:12:18 +02:00
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def topic_name(self) -> str:
|
2016-07-14 17:48:11 +02:00
|
|
|
"""
|
|
|
|
Please start using this helper to facilitate an
|
|
|
|
eventual switch over to a separate topic table.
|
|
|
|
"""
|
|
|
|
return self.subject
|
2012-08-28 18:44:51 +02:00
|
|
|
|
2018-11-01 16:05:30 +01:00
|
|
|
def set_topic_name(self, topic_name: str) -> None:
|
|
|
|
self.subject = topic_name
|
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def is_stream_message(self) -> bool:
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2017-10-28 21:53:47 +02:00
|
|
|
Find out whether a message is a stream message by
|
|
|
|
looking up its recipient.type. TODO: Make this
|
|
|
|
an easier operation by denormalizing the message
|
2020-03-28 01:25:56 +01:00
|
|
|
type onto Message, either explicitly (message.type)
|
2017-10-28 21:53:47 +02:00
|
|
|
or implicitly (message.stream_id is not None).
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2017-10-28 21:53:47 +02:00
|
|
|
return self.recipient.type == Recipient.STREAM
|
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def get_realm(self) -> Realm:
|
2023-08-10 05:59:25 +02:00
|
|
|
return self.realm
|
2013-08-22 16:56:37 +02:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def save_rendered_content(self) -> None:
|
2013-09-20 21:25:51 +02:00
|
|
|
self.save(update_fields=["rendered_content", "rendered_content_version"])
|
|
|
|
|
2013-09-21 16:46:28 +02:00
|
|
|
@staticmethod
|
2021-02-12 08:19:30 +01:00
|
|
|
def need_to_render_content(
|
|
|
|
rendered_content: Optional[str],
|
|
|
|
rendered_content_version: Optional[int],
|
|
|
|
markdown_version: int,
|
|
|
|
) -> bool:
|
|
|
|
return (
|
|
|
|
rendered_content is None
|
|
|
|
or rendered_content_version is None
|
|
|
|
or rendered_content_version < markdown_version
|
|
|
|
)
|
2012-08-30 19:56:15 +02:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def sent_by_human(self) -> bool:
|
2018-07-22 17:28:28 +02:00
|
|
|
"""Used to determine whether a message was sent by a full Zulip UI
|
|
|
|
style client (and thus whether the message should be treated
|
|
|
|
as sent by a human and automatically marked as read for the
|
|
|
|
sender). The purpose of this distinction is to ensure that
|
|
|
|
message sent to the user by e.g. a Google Calendar integration
|
|
|
|
using the user's own API key don't get marked as read
|
|
|
|
automatically.
|
|
|
|
"""
|
2013-12-31 22:42:38 +01:00
|
|
|
sending_client = self.sending_client.name.lower()
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
return (
|
2023-09-30 12:40:39 +02:00
|
|
|
(
|
|
|
|
sending_client
|
|
|
|
in (
|
|
|
|
"zulipandroid",
|
|
|
|
"zulipios",
|
|
|
|
"zulipdesktop",
|
|
|
|
"zulipmobile",
|
|
|
|
"zulipelectron",
|
|
|
|
"zulipterminal",
|
|
|
|
"snipe",
|
|
|
|
"website",
|
|
|
|
"ios",
|
|
|
|
"android",
|
|
|
|
)
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2023-09-30 12:40:39 +02:00
|
|
|
or ("desktop app" in sending_client)
|
|
|
|
# Since the vast majority of messages are sent by humans
|
|
|
|
# in Zulip, treat test suite messages as such.
|
|
|
|
or (sending_client == "test suite" and settings.TEST_SUITE)
|
|
|
|
)
|
2013-12-31 22:42:38 +01:00
|
|
|
|
2016-07-10 22:58:46 +02:00
|
|
|
@staticmethod
|
2018-05-11 02:24:34 +02:00
|
|
|
def is_status_message(content: str, rendered_content: str) -> bool:
|
2016-07-10 22:58:46 +02:00
|
|
|
"""
|
2019-12-03 15:29:44 +01:00
|
|
|
"status messages" start with /me and have special rendering:
|
|
|
|
/me loves chocolate -> Full Name loves chocolate
|
2016-07-10 22:58:46 +02:00
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
if content.startswith("/me "):
|
2019-12-03 15:29:44 +01:00
|
|
|
return True
|
2016-07-10 22:58:46 +02:00
|
|
|
return False
|
|
|
|
|
2022-02-23 06:21:17 +01:00
|
|
|
class Meta:
|
|
|
|
indexes = [
|
2022-02-23 08:14:01 +01:00
|
|
|
GinIndex("search_tsvector", fastupdate=False, name="zerver_message_search_tsvector"),
|
2023-08-30 21:38:59 +02:00
|
|
|
models.Index(
|
|
|
|
# For moving messages between streams or marking
|
|
|
|
# streams as read. The "id" at the end makes it easy
|
|
|
|
# to scan the resulting messages in order, and perform
|
|
|
|
# batching.
|
|
|
|
"realm_id",
|
|
|
|
"recipient_id",
|
|
|
|
"id",
|
|
|
|
name="zerver_message_realm_recipient_id",
|
|
|
|
),
|
|
|
|
models.Index(
|
|
|
|
# For generating digest emails and message archiving,
|
|
|
|
# which both group by stream.
|
|
|
|
"realm_id",
|
|
|
|
"recipient_id",
|
|
|
|
"date_sent",
|
|
|
|
name="zerver_message_realm_recipient_date_sent",
|
|
|
|
),
|
|
|
|
models.Index(
|
|
|
|
# For exports, which want to limit both sender and
|
|
|
|
# receiver. The prefix of this index (realm_id,
|
|
|
|
# sender_id) can be used for scrubbing users and/or
|
|
|
|
# deleting users' messages.
|
|
|
|
"realm_id",
|
|
|
|
"sender_id",
|
|
|
|
"recipient_id",
|
|
|
|
name="zerver_message_realm_sender_recipient",
|
|
|
|
),
|
|
|
|
models.Index(
|
|
|
|
# For analytics queries
|
|
|
|
"realm_id",
|
|
|
|
"date_sent",
|
|
|
|
name="zerver_message_realm_date_sent",
|
|
|
|
),
|
|
|
|
models.Index(
|
|
|
|
# For users searching by topic (but not stream), which
|
|
|
|
# is done case-insensitively
|
|
|
|
"realm_id",
|
|
|
|
Upper("subject"),
|
|
|
|
F("id").desc(nulls_last=True),
|
|
|
|
name="zerver_message_realm_upper_subject",
|
|
|
|
),
|
|
|
|
models.Index(
|
|
|
|
# Most stream/topic searches are case-insensitive by
|
|
|
|
# topic name (e.g. messages_for_topic). The "id" at
|
|
|
|
# the end makes it easy to scan the resulting messages
|
|
|
|
# in order, and perform batching.
|
|
|
|
"realm_id",
|
|
|
|
"recipient_id",
|
|
|
|
Upper("subject"),
|
|
|
|
F("id").desc(nulls_last=True),
|
|
|
|
name="zerver_message_realm_recipient_upper_subject",
|
|
|
|
),
|
|
|
|
models.Index(
|
2023-09-27 18:17:41 +02:00
|
|
|
# Used by already_sent_mirrored_message_id, and when
|
|
|
|
# determining recent topics (we post-process to merge
|
|
|
|
# and show the most recent case)
|
2023-08-30 21:38:59 +02:00
|
|
|
"realm_id",
|
|
|
|
"recipient_id",
|
|
|
|
"subject",
|
|
|
|
F("id").desc(nulls_last=True),
|
|
|
|
name="zerver_message_realm_recipient_subject",
|
|
|
|
),
|
|
|
|
models.Index(
|
|
|
|
# Only used by update_first_visible_message_id
|
|
|
|
"realm_id",
|
|
|
|
F("id").desc(nulls_last=True),
|
|
|
|
name="zerver_message_realm_id",
|
|
|
|
),
|
2022-02-23 06:21:17 +01:00
|
|
|
]
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-06-23 22:47:50 +02:00
|
|
|
def get_context_for_message(message: Message) -> QuerySet[Message]:
|
2014-07-15 21:03:51 +02:00
|
|
|
return Message.objects.filter(
|
2023-08-30 21:19:37 +02:00
|
|
|
# Uses index: zerver_message_realm_recipient_upper_subject
|
|
|
|
realm_id=message.realm_id,
|
2014-07-15 21:03:51 +02:00
|
|
|
recipient_id=message.recipient_id,
|
2023-02-07 17:43:35 +01:00
|
|
|
subject__iexact=message.subject,
|
2014-07-15 21:03:51 +02:00
|
|
|
id__lt=message.id,
|
2019-08-28 02:43:19 +02:00
|
|
|
date_sent__gt=message.date_sent - timedelta(minutes=15),
|
2021-02-12 08:20:45 +01:00
|
|
|
).order_by("-id")[:10]
|
2014-07-15 21:03:51 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-07-08 02:25:55 +02:00
|
|
|
post_save.connect(flush_message, sender=Message)
|
2014-07-15 21:03:51 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-05-29 16:01:34 +02:00
|
|
|
class AbstractSubMessage(models.Model):
|
2018-02-11 14:08:01 +01:00
|
|
|
# We can send little text messages that are associated with a regular
|
|
|
|
# Zulip message. These can be used for experimental widgets like embedded
|
|
|
|
# games, surveys, mini threads, etc. These are designed to be pretty
|
|
|
|
# generic in purpose.
|
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
sender = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
|
|
|
msg_type = models.TextField()
|
|
|
|
content = models.TextField()
|
2018-02-11 14:08:01 +01:00
|
|
|
|
2019-05-29 16:01:34 +02:00
|
|
|
class Meta:
|
|
|
|
abstract = True
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-05-29 16:01:34 +02:00
|
|
|
class SubMessage(AbstractSubMessage):
|
2022-08-15 19:10:58 +02:00
|
|
|
message = models.ForeignKey(Message, on_delete=CASCADE)
|
2019-05-29 16:01:34 +02:00
|
|
|
|
2018-02-11 14:08:01 +01:00
|
|
|
@staticmethod
|
|
|
|
def get_raw_db_rows(needed_ids: List[int]) -> List[Dict[str, Any]]:
|
2021-02-12 08:20:45 +01:00
|
|
|
fields = ["id", "message_id", "sender_id", "msg_type", "content"]
|
2018-02-11 14:08:01 +01:00
|
|
|
query = SubMessage.objects.filter(message_id__in=needed_ids).values(*fields)
|
2021-02-12 08:20:45 +01:00
|
|
|
query = query.order_by("message_id", "id")
|
2018-02-11 14:08:01 +01:00
|
|
|
return list(query)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-05-29 16:01:34 +02:00
|
|
|
class ArchivedSubMessage(AbstractSubMessage):
|
2022-08-15 19:10:58 +02:00
|
|
|
message = models.ForeignKey(ArchivedMessage, on_delete=CASCADE)
|
2019-05-29 16:01:34 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-02-11 14:09:17 +01:00
|
|
|
post_save.connect(flush_submessage, sender=SubMessage)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-07-23 19:24:22 +02:00
|
|
|
class Draft(models.Model):
|
2021-02-12 08:19:30 +01:00
|
|
|
"""Server-side storage model for storing drafts so that drafts can be synced across
|
2020-07-23 19:24:22 +02:00
|
|
|
multiple clients/devices.
|
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=models.CASCADE)
|
|
|
|
recipient = models.ForeignKey(Recipient, null=True, on_delete=models.SET_NULL)
|
|
|
|
topic = models.CharField(max_length=MAX_TOPIC_NAME_LENGTH, db_index=True)
|
|
|
|
content = models.TextField() # Length should not exceed MAX_MESSAGE_LENGTH
|
|
|
|
last_edit_time = models.DateTimeField(db_index=True)
|
2020-07-23 19:24:22 +02:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2020-07-23 19:24:22 +02:00
|
|
|
def __str__(self) -> str:
|
2023-03-08 22:18:59 +01:00
|
|
|
return f"{self.user_profile.email} / {self.id} / {self.last_edit_time}"
|
2020-07-23 19:24:22 +02:00
|
|
|
|
2020-08-04 05:31:41 +02:00
|
|
|
def to_dict(self) -> Dict[str, Any]:
|
2023-04-14 21:07:57 +02:00
|
|
|
to, recipient_type_str = get_recipient_ids(self.recipient, self.user_profile_id)
|
2020-07-23 19:24:22 +02:00
|
|
|
return {
|
2020-08-06 08:10:35 +02:00
|
|
|
"id": self.id,
|
2023-04-14 21:06:51 +02:00
|
|
|
"type": recipient_type_str,
|
2020-07-23 19:24:22 +02:00
|
|
|
"to": to,
|
|
|
|
"topic": self.topic,
|
|
|
|
"content": self.content,
|
2020-08-04 05:31:41 +02:00
|
|
|
"timestamp": int(self.last_edit_time.timestamp()),
|
2020-07-23 19:24:22 +02:00
|
|
|
}
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-07-23 22:11:21 +02:00
|
|
|
class AbstractEmoji(models.Model):
|
2018-07-25 05:57:10 +02:00
|
|
|
"""For emoji reactions to messages (and potentially future reaction types).
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2018-07-25 05:57:10 +02:00
|
|
|
Emoji are surprisingly complicated to implement correctly. For details
|
|
|
|
on how this subsystem works, see:
|
2018-07-25 00:29:05 +02:00
|
|
|
https://zulip.readthedocs.io/en/latest/subsystems/emoji.html
|
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2018-07-25 05:57:10 +02:00
|
|
|
# The user-facing name for an emoji reaction. With emoji aliases,
|
|
|
|
# there may be multiple accepted names for a given emoji; this
|
|
|
|
# field encodes which one the user selected.
|
2022-08-15 19:10:58 +02:00
|
|
|
emoji_name = models.TextField()
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
UNICODE_EMOJI = "unicode_emoji"
|
|
|
|
REALM_EMOJI = "realm_emoji"
|
|
|
|
ZULIP_EXTRA_EMOJI = "zulip_extra_emoji"
|
2021-02-12 08:19:30 +01:00
|
|
|
REACTION_TYPES = (
|
2021-04-16 00:57:30 +02:00
|
|
|
(UNICODE_EMOJI, gettext_lazy("Unicode emoji")),
|
|
|
|
(REALM_EMOJI, gettext_lazy("Custom emoji")),
|
|
|
|
(ZULIP_EXTRA_EMOJI, gettext_lazy("Zulip extra emoji")),
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2022-08-15 19:10:58 +02:00
|
|
|
reaction_type = models.CharField(default=UNICODE_EMOJI, choices=REACTION_TYPES, max_length=30)
|
2016-11-03 18:49:00 +01:00
|
|
|
|
2021-12-06 20:14:49 +01:00
|
|
|
# A string with the property that (realm, reaction_type,
|
|
|
|
# emoji_code) uniquely determines the emoji glyph.
|
|
|
|
#
|
|
|
|
# We cannot use `emoji_name` for this purpose, since the
|
|
|
|
# name-to-glyph mappings for unicode emoji change with time as we
|
|
|
|
# update our emoji database, and multiple custom emoji can have
|
|
|
|
# the same `emoji_name` in a realm (at most one can have
|
|
|
|
# `deactivated=False`). The format for `emoji_code` varies by
|
|
|
|
# `reaction_type`:
|
2018-07-25 05:57:10 +02:00
|
|
|
#
|
|
|
|
# * For Unicode emoji, a dash-separated hex encoding of the sequence of
|
|
|
|
# Unicode codepoints that define this emoji in the Unicode
|
|
|
|
# specification. For examples, see "non_qualified" or "unified" in the
|
|
|
|
# following data, with "non_qualified" taking precedence when both present:
|
|
|
|
# https://raw.githubusercontent.com/iamcal/emoji-data/master/emoji_pretty.json
|
|
|
|
#
|
2021-12-06 20:14:49 +01:00
|
|
|
# * For user uploaded custom emoji (`reaction_type="realm_emoji"`), the stringified ID
|
|
|
|
# of the RealmEmoji object, computed as `str(realm_emoji.id)`.
|
2018-07-25 05:57:10 +02:00
|
|
|
#
|
2021-12-06 20:14:49 +01:00
|
|
|
# * For "Zulip extra emoji" (like :zulip:), the name of the emoji (e.g. "zulip").
|
2022-08-15 19:10:58 +02:00
|
|
|
emoji_code = models.TextField()
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2021-07-23 22:11:21 +02:00
|
|
|
class Meta:
|
|
|
|
abstract = True
|
|
|
|
|
|
|
|
|
|
|
|
class AbstractReaction(AbstractEmoji):
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2019-05-29 15:52:57 +02:00
|
|
|
abstract = True
|
2023-02-25 02:29:12 +01:00
|
|
|
unique_together = ("user_profile", "message", "reaction_type", "emoji_code")
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-11-03 18:49:00 +01:00
|
|
|
|
2019-05-29 15:52:57 +02:00
|
|
|
class Reaction(AbstractReaction):
|
2022-08-15 19:10:58 +02:00
|
|
|
message = models.ForeignKey(Message, on_delete=CASCADE)
|
2019-05-29 15:52:57 +02:00
|
|
|
|
2016-12-06 07:19:34 +01:00
|
|
|
@staticmethod
|
2017-11-27 07:33:05 +01:00
|
|
|
def get_raw_db_rows(needed_ids: List[int]) -> List[Dict[str, Any]]:
|
2021-02-12 08:19:30 +01:00
|
|
|
fields = [
|
2021-02-12 08:20:45 +01:00
|
|
|
"message_id",
|
|
|
|
"emoji_name",
|
|
|
|
"emoji_code",
|
|
|
|
"reaction_type",
|
|
|
|
"user_profile__email",
|
2021-04-22 16:23:09 +02:00
|
|
|
"user_profile_id",
|
2021-02-12 08:20:45 +01:00
|
|
|
"user_profile__full_name",
|
2021-02-12 08:19:30 +01:00
|
|
|
]
|
2021-11-30 01:47:09 +01:00
|
|
|
# The ordering is important here, as it makes it convenient
|
|
|
|
# for clients to display reactions in order without
|
|
|
|
# client-side sorting code.
|
|
|
|
return Reaction.objects.filter(message_id__in=needed_ids).values(*fields).order_by("id")
|
2016-12-06 07:19:34 +01:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2019-01-03 22:25:04 +01:00
|
|
|
def __str__(self) -> str:
|
2020-06-10 06:41:04 +02:00
|
|
|
return f"{self.user_profile.email} / {self.message.id} / {self.emoji_name}"
|
2019-01-03 22:25:04 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-05-29 15:52:57 +02:00
|
|
|
class ArchivedReaction(AbstractReaction):
|
2022-08-15 19:10:58 +02:00
|
|
|
message = models.ForeignKey(ArchivedMessage, on_delete=CASCADE)
|
2019-05-29 15:52:57 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-06-08 02:04:09 +02:00
|
|
|
# Whenever a message is sent, for each user subscribed to the
|
2021-05-14 19:48:00 +02:00
|
|
|
# corresponding Recipient object (that is not long-term idle), we add
|
|
|
|
# a row to the UserMessage table indicating that that user received
|
|
|
|
# that message. This table allows us to quickly query any user's last
|
|
|
|
# 1000 messages to generate the home view and search exactly the
|
|
|
|
# user's message history.
|
2016-04-01 08:42:38 +02:00
|
|
|
#
|
2021-05-14 19:48:00 +02:00
|
|
|
# The long-term idle optimization is extremely important for large,
|
|
|
|
# open organizations, and is described in detail here:
|
|
|
|
# https://zulip.readthedocs.io/en/latest/subsystems/sending-messages.html#soft-deactivation
|
2016-04-01 08:42:38 +02:00
|
|
|
#
|
2021-05-14 19:48:00 +02:00
|
|
|
# In particular, new messages to public streams will only generate
|
|
|
|
# UserMessage rows for Members who are long_term_idle if they would
|
|
|
|
# have nonzero flags for the message (E.g. a mention, alert word, or
|
|
|
|
# mobile push notification).
|
|
|
|
#
|
|
|
|
# The flags field stores metadata like whether the user has read the
|
|
|
|
# message, starred or collapsed the message, was mentioned in the
|
|
|
|
# message, etc. We use of postgres partial indexes on flags to make
|
|
|
|
# queries for "User X's messages with flag Y" extremely fast without
|
|
|
|
# consuming much storage space.
|
|
|
|
#
|
|
|
|
# UserMessage is the largest table in many Zulip installations, even
|
2016-04-01 08:42:38 +02:00
|
|
|
# though each row is only 4 integers.
|
2017-10-27 08:42:27 +02:00
|
|
|
class AbstractUserMessage(models.Model):
|
2022-08-15 19:10:58 +02:00
|
|
|
id = models.BigAutoField(primary_key=True)
|
2019-08-24 01:34:36 +02:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
2019-06-04 09:26:45 +02:00
|
|
|
# The order here is important! It's the order of fields in the bitfield.
|
|
|
|
ALL_FLAGS = [
|
2021-02-12 08:20:45 +01:00
|
|
|
"read",
|
|
|
|
"starred",
|
|
|
|
"collapsed",
|
|
|
|
"mentioned",
|
2023-11-03 15:20:44 +01:00
|
|
|
"stream_wildcard_mentioned",
|
2023-10-19 10:17:09 +02:00
|
|
|
"topic_wildcard_mentioned",
|
|
|
|
"group_mentioned",
|
|
|
|
# These next 2 flags are from features that have since been removed.
|
2023-11-03 12:07:53 +01:00
|
|
|
# We've cleared these 2 flags in migration 0486.
|
2021-02-12 08:20:45 +01:00
|
|
|
"force_expand",
|
|
|
|
"force_collapse",
|
2019-06-04 09:26:45 +02:00
|
|
|
# Whether the message contains any of the user's alert words.
|
2021-02-12 08:20:45 +01:00
|
|
|
"has_alert_word",
|
2019-06-04 09:26:45 +02:00
|
|
|
# The historical flag is used to mark messages which the user
|
|
|
|
# did not receive when they were sent, but later added to
|
|
|
|
# their history via e.g. starring the message. This is
|
|
|
|
# important accounting for the "Subscribed to stream" dividers.
|
2021-02-12 08:20:45 +01:00
|
|
|
"historical",
|
2023-06-19 16:42:11 +02:00
|
|
|
# Whether the message is a direct message; this flag is a
|
2019-06-04 09:26:45 +02:00
|
|
|
# denormalization of message.recipient.type to support an
|
2023-06-19 16:42:11 +02:00
|
|
|
# efficient index on UserMessage for a user's direct messages.
|
2021-02-12 08:20:45 +01:00
|
|
|
"is_private",
|
2019-06-04 09:26:45 +02:00
|
|
|
# Whether we've sent a push notification to the user's mobile
|
|
|
|
# devices for this message that has not been revoked.
|
2021-02-12 08:20:45 +01:00
|
|
|
"active_mobile_push_notification",
|
2019-06-04 09:26:45 +02:00
|
|
|
]
|
2018-08-08 11:18:44 +02:00
|
|
|
# Certain flags are used only for internal accounting within the
|
2019-06-04 09:26:45 +02:00
|
|
|
# Zulip backend, and don't make sense to expose to the API.
|
2018-08-09 22:57:36 +02:00
|
|
|
NON_API_FLAGS = {"is_private", "active_mobile_push_notification"}
|
2019-06-04 09:26:45 +02:00
|
|
|
# Certain additional flags are just set once when the UserMessage
|
|
|
|
# row is created.
|
|
|
|
NON_EDITABLE_FLAGS = {
|
|
|
|
# These flags are bookkeeping and don't make sense to edit.
|
|
|
|
"has_alert_word",
|
|
|
|
"mentioned",
|
2023-11-03 15:20:44 +01:00
|
|
|
"stream_wildcard_mentioned",
|
2023-10-19 10:17:09 +02:00
|
|
|
"topic_wildcard_mentioned",
|
|
|
|
"group_mentioned",
|
2019-06-04 09:26:45 +02:00
|
|
|
"historical",
|
|
|
|
# Unused flags can't be edited.
|
|
|
|
"force_expand",
|
|
|
|
"force_collapse",
|
|
|
|
}
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
flags: BitHandler = BitField(flags=ALL_FLAGS, default=0)
|
2012-09-07 17:04:41 +02:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2016-11-01 11:26:38 +01:00
|
|
|
abstract = True
|
2012-11-08 21:08:13 +01:00
|
|
|
unique_together = ("user_profile", "message")
|
|
|
|
|
2017-05-23 03:02:01 +02:00
|
|
|
@staticmethod
|
2022-06-27 08:00:09 +02:00
|
|
|
def where_flag_is_present(flagattr: Bit) -> str:
|
2018-08-22 00:03:00 +02:00
|
|
|
# Use this for Django ORM queries to access starred messages.
|
|
|
|
# This custom SQL plays nice with our partial indexes. Grep
|
|
|
|
# the code for example usage.
|
|
|
|
#
|
|
|
|
# The key detail is that e.g.
|
|
|
|
# UserMessage.objects.filter(user_profile=user_profile, flags=UserMessage.flags.starred)
|
|
|
|
# will generate a query involving `flags & 2 = 2`, which doesn't match our index.
|
2022-06-27 08:00:09 +02:00
|
|
|
return f"flags & {1 << flagattr.number} <> 0"
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def where_flag_is_absent(flagattr: Bit) -> str:
|
|
|
|
return f"flags & {1 << flagattr.number} = 0"
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def where_unread() -> str:
|
2022-10-08 06:10:17 +02:00
|
|
|
return AbstractUserMessage.where_flag_is_absent(AbstractUserMessage.flags.read)
|
2022-06-27 08:00:09 +02:00
|
|
|
|
2021-06-16 21:15:47 +02:00
|
|
|
@staticmethod
|
|
|
|
def where_read() -> str:
|
2022-10-08 06:10:17 +02:00
|
|
|
return AbstractUserMessage.where_flag_is_present(AbstractUserMessage.flags.read)
|
2021-06-16 21:15:47 +02:00
|
|
|
|
2022-06-27 08:00:09 +02:00
|
|
|
@staticmethod
|
|
|
|
def where_starred() -> str:
|
2022-10-08 06:10:17 +02:00
|
|
|
return AbstractUserMessage.where_flag_is_present(AbstractUserMessage.flags.starred)
|
2018-08-22 00:03:00 +02:00
|
|
|
|
2018-08-22 00:08:41 +02:00
|
|
|
@staticmethod
|
|
|
|
def where_active_push_notification() -> str:
|
2022-06-27 08:00:09 +02:00
|
|
|
return AbstractUserMessage.where_flag_is_present(
|
2022-10-08 06:10:17 +02:00
|
|
|
AbstractUserMessage.flags.active_mobile_push_notification
|
2022-06-27 08:00:09 +02:00
|
|
|
)
|
2018-08-22 00:08:41 +02:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def flags_list(self) -> List[str]:
|
2017-09-10 21:36:23 +02:00
|
|
|
flags = int(self.flags)
|
|
|
|
return self.flags_list_for_flags(flags)
|
|
|
|
|
|
|
|
@staticmethod
|
2017-11-27 07:33:05 +01:00
|
|
|
def flags_list_for_flags(val: int) -> List[str]:
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2017-09-09 19:47:38 +02:00
|
|
|
This function is highly optimized, because it actually slows down
|
|
|
|
sending messages in a naive implementation.
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2017-11-07 18:40:39 +01:00
|
|
|
flags = []
|
|
|
|
mask = 1
|
|
|
|
for flag in UserMessage.ALL_FLAGS:
|
2018-08-08 11:18:44 +02:00
|
|
|
if (val & mask) and flag not in AbstractUserMessage.NON_API_FLAGS:
|
2017-11-07 18:40:39 +01:00
|
|
|
flags.append(flag)
|
|
|
|
mask <<= 1
|
|
|
|
return flags
|
2016-11-01 11:26:38 +01:00
|
|
|
|
|
|
|
|
2018-07-25 00:29:05 +02:00
|
|
|
class UserMessage(AbstractUserMessage):
|
2022-08-15 19:10:58 +02:00
|
|
|
message = models.ForeignKey(Message, on_delete=CASCADE)
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2022-02-23 07:15:28 +01:00
|
|
|
class Meta(AbstractUserMessage.Meta):
|
|
|
|
indexes = [
|
2022-02-23 07:47:46 +01:00
|
|
|
models.Index(
|
|
|
|
"user_profile",
|
|
|
|
"message",
|
|
|
|
condition=Q(flags__andnz=AbstractUserMessage.flags.starred.mask),
|
|
|
|
name="zerver_usermessage_starred_message_id",
|
|
|
|
),
|
2022-02-23 07:45:57 +01:00
|
|
|
models.Index(
|
|
|
|
"user_profile",
|
|
|
|
"message",
|
|
|
|
condition=Q(flags__andnz=AbstractUserMessage.flags.mentioned.mask),
|
|
|
|
name="zerver_usermessage_mentioned_message_id",
|
|
|
|
),
|
2022-02-23 07:44:03 +01:00
|
|
|
models.Index(
|
|
|
|
"user_profile",
|
|
|
|
"message",
|
|
|
|
condition=Q(flags__andz=AbstractUserMessage.flags.read.mask),
|
|
|
|
name="zerver_usermessage_unread_message_id",
|
|
|
|
),
|
2022-02-23 07:41:32 +01:00
|
|
|
models.Index(
|
|
|
|
"user_profile",
|
|
|
|
"message",
|
|
|
|
condition=Q(flags__andnz=AbstractUserMessage.flags.has_alert_word.mask),
|
|
|
|
name="zerver_usermessage_has_alert_word_message_id",
|
|
|
|
),
|
2022-02-23 07:38:38 +01:00
|
|
|
models.Index(
|
|
|
|
"user_profile",
|
|
|
|
"message",
|
|
|
|
condition=Q(flags__andnz=AbstractUserMessage.flags.mentioned.mask)
|
2023-11-03 15:20:44 +01:00
|
|
|
| Q(flags__andnz=AbstractUserMessage.flags.stream_wildcard_mentioned.mask),
|
2022-02-23 07:38:38 +01:00
|
|
|
name="zerver_usermessage_wildcard_mentioned_message_id",
|
|
|
|
),
|
2023-10-19 10:17:09 +02:00
|
|
|
models.Index(
|
|
|
|
"user_profile",
|
|
|
|
"message",
|
|
|
|
condition=Q(
|
|
|
|
flags__andnz=AbstractUserMessage.flags.mentioned.mask
|
2023-11-03 15:20:44 +01:00
|
|
|
| AbstractUserMessage.flags.stream_wildcard_mentioned.mask
|
2023-10-19 10:17:09 +02:00
|
|
|
| AbstractUserMessage.flags.topic_wildcard_mentioned.mask
|
|
|
|
| AbstractUserMessage.flags.group_mentioned.mask
|
|
|
|
),
|
|
|
|
name="zerver_usermessage_any_mentioned_message_id",
|
|
|
|
),
|
2022-02-23 07:15:28 +01:00
|
|
|
models.Index(
|
|
|
|
"user_profile",
|
|
|
|
"message",
|
2022-02-23 07:28:51 +01:00
|
|
|
condition=Q(flags__andnz=AbstractUserMessage.flags.is_private.mask),
|
|
|
|
name="zerver_usermessage_is_private_message_id",
|
|
|
|
),
|
|
|
|
models.Index(
|
|
|
|
"user_profile",
|
|
|
|
"message",
|
2022-02-23 07:15:28 +01:00
|
|
|
condition=Q(
|
|
|
|
flags__andnz=AbstractUserMessage.flags.active_mobile_push_notification.mask
|
|
|
|
),
|
|
|
|
name="zerver_usermessage_active_mobile_push_notification_id",
|
|
|
|
),
|
|
|
|
]
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2022-08-01 17:16:53 +02:00
|
|
|
def __str__(self) -> str:
|
2023-07-16 14:21:27 +02:00
|
|
|
recipient_string = self.message.recipient.label()
|
|
|
|
return f"{recipient_string} / {self.user_profile.email} ({self.flags_list()})"
|
2022-08-01 17:16:53 +02:00
|
|
|
|
2022-08-14 12:02:05 +02:00
|
|
|
@staticmethod
|
|
|
|
def select_for_update_query() -> QuerySet["UserMessage"]:
|
|
|
|
"""This SELECT FOR UPDATE query ensures consistent ordering on
|
|
|
|
the row locks acquired by a bulk update operation to modify
|
|
|
|
message flags using bitand/bitor.
|
|
|
|
|
2023-06-15 19:51:26 +02:00
|
|
|
This consistent ordering is important to prevent deadlocks when
|
|
|
|
2 or more bulk updates to the same rows in the UserMessage table
|
|
|
|
race against each other (For example, if a client submits
|
|
|
|
simultaneous duplicate API requests to mark a certain set of
|
|
|
|
messages as read).
|
2022-08-14 12:02:05 +02:00
|
|
|
"""
|
|
|
|
return UserMessage.objects.select_for_update().order_by("message_id")
|
|
|
|
|
2023-11-04 14:05:38 +01:00
|
|
|
@staticmethod
|
|
|
|
def has_any_mentions(user_profile_id: int, message_id: int) -> bool:
|
|
|
|
# The query uses the 'zerver_usermessage_any_mentioned_message_id' index.
|
|
|
|
return UserMessage.objects.filter(
|
|
|
|
Q(
|
|
|
|
flags__andnz=UserMessage.flags.mentioned.mask
|
2023-11-03 15:20:44 +01:00
|
|
|
| UserMessage.flags.stream_wildcard_mentioned.mask
|
2023-11-04 14:05:38 +01:00
|
|
|
| UserMessage.flags.topic_wildcard_mentioned.mask
|
|
|
|
| UserMessage.flags.group_mentioned.mask
|
|
|
|
),
|
|
|
|
user_profile_id=user_profile_id,
|
|
|
|
message_id=message_id,
|
|
|
|
).exists()
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def get_usermessage_by_message_id(
|
|
|
|
user_profile: UserProfile, message_id: int
|
|
|
|
) -> Optional[UserMessage]:
|
2018-07-27 11:47:07 +02:00
|
|
|
try:
|
2021-02-12 08:19:30 +01:00
|
|
|
return UserMessage.objects.select_related().get(
|
2021-04-22 16:23:09 +02:00
|
|
|
user_profile=user_profile, message_id=message_id
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-07-27 11:47:07 +02:00
|
|
|
except UserMessage.DoesNotExist:
|
|
|
|
return None
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-11-01 11:26:38 +01:00
|
|
|
class ArchivedUserMessage(AbstractUserMessage):
|
2018-07-25 00:29:05 +02:00
|
|
|
"""Used as a temporary holding place for deleted UserMessages objects
|
|
|
|
before they are permanently deleted. This is an important part of
|
|
|
|
a robust 'message retention' feature.
|
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
message = models.ForeignKey(ArchivedMessage, on_delete=CASCADE)
|
2016-11-01 11:26:38 +01:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2022-08-01 17:16:53 +02:00
|
|
|
def __str__(self) -> str:
|
2023-07-16 14:21:27 +02:00
|
|
|
recipient_string = self.message.recipient.label()
|
|
|
|
return f"{recipient_string} / {self.user_profile.email} ({self.flags_list()})"
|
2022-08-01 17:16:53 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-10-27 08:42:27 +02:00
|
|
|
class AbstractAttachment(models.Model):
|
2022-08-15 19:10:58 +02:00
|
|
|
file_name = models.TextField(db_index=True)
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2016-03-24 20:24:01 +01:00
|
|
|
# path_id is a storage location agnostic representation of the path of the file.
|
|
|
|
# If the path of a file is http://localhost:9991/user_uploads/a/b/abc/temp_file.py
|
|
|
|
# then its path_id will be a/b/abc/temp_file.py.
|
2022-08-15 19:10:58 +02:00
|
|
|
path_id = models.TextField(db_index=True, unique=True)
|
|
|
|
owner = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE)
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
create_time = models.DateTimeField(
|
2021-02-12 08:19:30 +01:00
|
|
|
default=timezone_now,
|
|
|
|
db_index=True,
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
)
|
2020-06-20 21:58:35 +02:00
|
|
|
# Size of the uploaded file, in bytes
|
2022-08-15 19:10:58 +02:00
|
|
|
size = models.IntegerField()
|
2016-11-01 11:26:38 +01:00
|
|
|
|
2022-03-23 05:09:26 +01:00
|
|
|
# The two fields below serve as caches to let us avoid looking up
|
|
|
|
# the corresponding messages/streams to check permissions before
|
|
|
|
# serving these files.
|
|
|
|
#
|
|
|
|
# For both fields, the `null` state is used when a change in
|
|
|
|
# message permissions mean that we need to determine their proper
|
|
|
|
# value.
|
2020-08-01 03:17:21 +02:00
|
|
|
|
2018-07-25 00:29:05 +02:00
|
|
|
# Whether this attachment has been posted to a public stream, and
|
|
|
|
# thus should be available to all non-guest users in the
|
|
|
|
# organization (even if they weren't a recipient of a message
|
2020-08-01 03:17:21 +02:00
|
|
|
# linking to it).
|
2022-08-15 19:10:58 +02:00
|
|
|
is_realm_public = models.BooleanField(default=False, null=True)
|
2020-08-01 03:17:21 +02:00
|
|
|
# Whether this attachment has been posted to a web-public stream,
|
|
|
|
# and thus should be available to everyone on the internet, even
|
|
|
|
# if the person isn't logged in.
|
2022-08-15 19:10:58 +02:00
|
|
|
is_web_public = models.BooleanField(default=False, null=True)
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2016-11-01 11:26:38 +01:00
|
|
|
abstract = True
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2018-05-11 02:24:34 +02:00
|
|
|
def __str__(self) -> str:
|
2023-03-08 22:18:59 +01:00
|
|
|
return self.file_name
|
2017-05-17 05:59:50 +02:00
|
|
|
|
2016-11-01 11:26:38 +01:00
|
|
|
|
|
|
|
class ArchivedAttachment(AbstractAttachment):
|
2018-07-25 00:29:05 +02:00
|
|
|
"""Used as a temporary holding place for deleted Attachment objects
|
|
|
|
before they are permanently deleted. This is an important part of
|
|
|
|
a robust 'message retention' feature.
|
2022-06-08 10:14:11 +02:00
|
|
|
|
|
|
|
Unlike the similar archive tables, ArchivedAttachment does not
|
|
|
|
have an ArchiveTransaction foreign key, and thus will not be
|
|
|
|
directly deleted by clean_archived_data. Instead, attachments that
|
|
|
|
were only referenced by now fully deleted messages will leave
|
|
|
|
ArchivedAttachment objects with empty `.messages`.
|
|
|
|
|
|
|
|
A second step, delete_old_unclaimed_attachments, will delete the
|
|
|
|
resulting orphaned ArchivedAttachment objects, along with removing
|
|
|
|
the associated uploaded files from storage.
|
2018-07-25 00:29:05 +02:00
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
messages = models.ManyToManyField(
|
2022-03-25 01:20:58 +01:00
|
|
|
ArchivedMessage, related_name="attachment_set", related_query_name="attachment"
|
|
|
|
)
|
2016-11-01 11:26:38 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-11-01 11:26:38 +01:00
|
|
|
class Attachment(AbstractAttachment):
|
2022-08-15 19:10:58 +02:00
|
|
|
messages = models.ManyToManyField(Message)
|
2016-03-24 20:24:01 +01:00
|
|
|
|
2023-05-07 20:04:37 +02:00
|
|
|
# This is only present for Attachment and not ArchiveAttachment.
|
|
|
|
# because ScheduledMessage is not subject to archiving.
|
2023-11-14 00:38:06 +01:00
|
|
|
scheduled_messages = models.ManyToManyField("zerver.ScheduledMessage")
|
2023-05-07 20:04:37 +02:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def is_claimed(self) -> bool:
|
2023-09-07 18:22:41 +02:00
|
|
|
return self.messages.exists() or self.scheduled_messages.exists()
|
2016-03-24 20:24:01 +01:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def to_dict(self) -> Dict[str, Any]:
|
2016-12-28 14:46:42 +01:00
|
|
|
return {
|
2021-02-12 08:20:45 +01:00
|
|
|
"id": self.id,
|
|
|
|
"name": self.file_name,
|
|
|
|
"path_id": self.path_id,
|
|
|
|
"size": self.size,
|
2017-09-15 01:17:38 +02:00
|
|
|
# convert to JavaScript-style UNIX timestamp so we can take
|
2022-02-24 21:15:43 +01:00
|
|
|
# advantage of client time zones.
|
2021-02-12 08:20:45 +01:00
|
|
|
"create_time": int(time.mktime(self.create_time.timetuple()) * 1000),
|
|
|
|
"messages": [
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"id": m.id,
|
|
|
|
"date_sent": int(time.mktime(m.date_sent.timetuple()) * 1000),
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
|
|
|
for m in self.messages.all()
|
|
|
|
],
|
2016-12-28 14:46:42 +01:00
|
|
|
}
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-01-14 07:46:31 +01:00
|
|
|
post_save.connect(flush_used_upload_space_cache, sender=Attachment)
|
|
|
|
post_delete.connect(flush_used_upload_space_cache, sender=Attachment)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-03-23 05:09:26 +01:00
|
|
|
def validate_attachment_request_for_spectator_access(
|
|
|
|
realm: Realm, attachment: Attachment
|
|
|
|
) -> Optional[bool]:
|
|
|
|
if attachment.realm != realm:
|
|
|
|
return False
|
|
|
|
|
|
|
|
# Update cached is_web_public property, if necessary.
|
|
|
|
if attachment.is_web_public is None:
|
|
|
|
# Fill the cache in a single query. This is important to avoid
|
|
|
|
# a potential race condition between checking and setting,
|
|
|
|
# where the attachment could have been moved again.
|
|
|
|
Attachment.objects.filter(id=attachment.id, is_web_public__isnull=True).update(
|
|
|
|
is_web_public=Exists(
|
|
|
|
Message.objects.filter(
|
2023-08-30 21:19:37 +02:00
|
|
|
# Uses index: zerver_attachment_messages_attachment_id_message_id_key
|
|
|
|
realm_id=realm.id,
|
2022-03-23 05:09:26 +01:00
|
|
|
attachment=OuterRef("id"),
|
|
|
|
recipient__stream__invite_only=False,
|
|
|
|
recipient__stream__is_web_public=True,
|
|
|
|
),
|
|
|
|
),
|
|
|
|
)
|
|
|
|
attachment.refresh_from_db()
|
|
|
|
|
2021-11-02 15:42:58 +01:00
|
|
|
if not attachment.is_web_public:
|
|
|
|
return False
|
|
|
|
|
|
|
|
if settings.RATE_LIMITING:
|
|
|
|
try:
|
|
|
|
from zerver.lib.rate_limiter import rate_limit_spectator_attachment_access_by_file
|
|
|
|
|
|
|
|
rate_limit_spectator_attachment_access_by_file(attachment.path_id)
|
2022-11-17 09:30:48 +01:00
|
|
|
except RateLimitedError:
|
2021-11-02 15:42:58 +01:00
|
|
|
return False
|
2022-03-23 05:09:26 +01:00
|
|
|
|
2021-11-02 15:42:58 +01:00
|
|
|
return True
|
2022-03-23 05:09:26 +01:00
|
|
|
|
2021-11-02 15:42:58 +01:00
|
|
|
|
|
|
|
def validate_attachment_request(
|
|
|
|
maybe_user_profile: Union[UserProfile, AnonymousUser],
|
|
|
|
path_id: str,
|
|
|
|
realm: Optional[Realm] = None,
|
|
|
|
) -> Optional[bool]:
|
2016-06-17 19:48:17 +02:00
|
|
|
try:
|
|
|
|
attachment = Attachment.objects.get(path_id=path_id)
|
|
|
|
except Attachment.DoesNotExist:
|
|
|
|
return None
|
2016-11-01 11:26:38 +01:00
|
|
|
|
2021-11-02 15:42:58 +01:00
|
|
|
if isinstance(maybe_user_profile, AnonymousUser):
|
|
|
|
assert realm is not None
|
|
|
|
return validate_attachment_request_for_spectator_access(realm, attachment)
|
|
|
|
|
|
|
|
user_profile = maybe_user_profile
|
|
|
|
assert isinstance(user_profile, UserProfile)
|
|
|
|
|
2022-03-23 05:09:26 +01:00
|
|
|
# Update cached is_realm_public property, if necessary.
|
|
|
|
if attachment.is_realm_public is None:
|
|
|
|
# Fill the cache in a single query. This is important to avoid
|
|
|
|
# a potential race condition between checking and setting,
|
|
|
|
# where the attachment could have been moved again.
|
|
|
|
Attachment.objects.filter(id=attachment.id, is_realm_public__isnull=True).update(
|
|
|
|
is_realm_public=Exists(
|
|
|
|
Message.objects.filter(
|
2023-08-30 21:19:37 +02:00
|
|
|
# Uses index: zerver_attachment_messages_attachment_id_message_id_key
|
|
|
|
realm_id=user_profile.realm_id,
|
2022-03-23 05:09:26 +01:00
|
|
|
attachment=OuterRef("id"),
|
|
|
|
recipient__stream__invite_only=False,
|
|
|
|
),
|
|
|
|
),
|
|
|
|
)
|
|
|
|
attachment.refresh_from_db()
|
|
|
|
|
2018-06-05 21:02:02 +02:00
|
|
|
if user_profile == attachment.owner:
|
|
|
|
# If you own the file, you can access it.
|
|
|
|
return True
|
2021-02-12 08:19:30 +01:00
|
|
|
if (
|
|
|
|
attachment.is_realm_public
|
|
|
|
and attachment.realm == user_profile.realm
|
|
|
|
and user_profile.can_access_public_streams()
|
|
|
|
):
|
2018-06-05 21:02:02 +02:00
|
|
|
# Any user in the realm can access realm-public files
|
|
|
|
return True
|
|
|
|
|
|
|
|
messages = attachment.messages.all()
|
|
|
|
if UserMessage.objects.filter(user_profile=user_profile, message__in=messages).exists():
|
2023-06-19 16:42:11 +02:00
|
|
|
# If it was sent in a direct message or private stream
|
2018-06-05 21:02:02 +02:00
|
|
|
# message, then anyone who received that message can access it.
|
|
|
|
return True
|
|
|
|
|
2018-06-05 21:12:28 +02:00
|
|
|
# The user didn't receive any of the messages that included this
|
|
|
|
# attachment. But they might still have access to it, if it was
|
|
|
|
# sent to a stream they are on where history is public to
|
|
|
|
# subscribers.
|
|
|
|
|
|
|
|
# These are subscriptions to a stream one of the messages was sent to
|
|
|
|
relevant_stream_ids = Subscription.objects.filter(
|
|
|
|
user_profile=user_profile,
|
|
|
|
active=True,
|
|
|
|
recipient__type=Recipient.STREAM,
|
2021-02-12 08:19:30 +01:00
|
|
|
recipient__in=[m.recipient_id for m in messages],
|
|
|
|
).values_list("recipient__type_id", flat=True)
|
2018-06-05 21:12:28 +02:00
|
|
|
if len(relevant_stream_ids) == 0:
|
|
|
|
return False
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
return Stream.objects.filter(
|
|
|
|
id__in=relevant_stream_ids, history_public_to_subscribers=True
|
|
|
|
).exists()
|
|
|
|
|
2018-06-05 21:02:02 +02:00
|
|
|
|
2022-05-18 22:07:15 +02:00
|
|
|
def get_old_unclaimed_attachments(
|
|
|
|
weeks_ago: int,
|
|
|
|
) -> Tuple[QuerySet[Attachment], QuerySet[ArchivedAttachment]]:
|
2022-05-18 22:29:01 +02:00
|
|
|
"""
|
|
|
|
The logic in this function is fairly tricky. The essence is that
|
|
|
|
a file should be cleaned up if and only if it not referenced by any
|
2023-05-07 20:04:37 +02:00
|
|
|
Message, ScheduledMessage or ArchivedMessage. The way to find that out is through the
|
2022-05-18 22:29:01 +02:00
|
|
|
Attachment and ArchivedAttachment tables.
|
|
|
|
The queries are complicated by the fact that an uploaded file
|
|
|
|
may have either only an Attachment row, only an ArchivedAttachment row,
|
|
|
|
or both - depending on whether some, all or none of the messages
|
|
|
|
linking to it have been archived.
|
|
|
|
"""
|
2023-11-19 19:45:19 +01:00
|
|
|
delta_weeks_ago = timezone_now() - timedelta(weeks=weeks_ago)
|
2023-05-07 20:04:37 +02:00
|
|
|
|
|
|
|
# The Attachment vs ArchivedAttachment queries are asymmetric because only
|
|
|
|
# Attachment has the scheduled_messages relation.
|
2022-05-18 22:07:15 +02:00
|
|
|
old_attachments = Attachment.objects.annotate(
|
|
|
|
has_other_messages=Exists(
|
|
|
|
ArchivedAttachment.objects.filter(id=OuterRef("id")).exclude(messages=None)
|
|
|
|
)
|
2023-05-07 20:04:37 +02:00
|
|
|
).filter(
|
|
|
|
messages=None,
|
|
|
|
scheduled_messages=None,
|
|
|
|
create_time__lt=delta_weeks_ago,
|
|
|
|
has_other_messages=False,
|
|
|
|
)
|
2022-05-18 22:07:15 +02:00
|
|
|
old_archived_attachments = ArchivedAttachment.objects.annotate(
|
|
|
|
has_other_messages=Exists(
|
retention: Prevent deletion of partially-archived messages.
Previously, this code:
```python3
old_archived_attachments = ArchivedAttachment.objects.annotate(
has_other_messages=Exists(
Attachment.objects.filter(id=OuterRef("id"))
.exclude(messages=None)
.exclude(scheduled_messages=None)
)
).filter(messages=None, create_time__lt=delta_weeks_ago, has_other_messages=False)
```
...protected from removal any ArchivedAttachment objects where there
was an Attachment which had _both_ a message _and_ a scheduled
message, instead of _either_ a message _or_ a scheduled message.
Since files are removed from disk when the ArchivedAttachment rows are
deleted, this meant that if an upload was referenced in two messages,
and one was deleted, the file was permanently deleted when the
ArchivedMessage and ArchivedAttachment were cleaned up, despite being
still referenced in live Messages and Attachments.
Switch from `.exclude(messages=None).exclude(scheduled_messages=None)`
to `.exclude(messages=None, scheduled_messages=None)` which "OR"s
those conditions appropriately.
Pull the relevant test into its own file, and expand it significantly
to cover this, and other, corner cases.
2023-07-28 20:53:07 +02:00
|
|
|
Attachment.objects.filter(id=OuterRef("id")).exclude(
|
|
|
|
messages=None, scheduled_messages=None
|
|
|
|
)
|
2022-05-18 22:07:15 +02:00
|
|
|
)
|
|
|
|
).filter(messages=None, create_time__lt=delta_weeks_ago, has_other_messages=False)
|
|
|
|
|
|
|
|
return old_attachments, old_archived_attachments
|
2016-03-24 20:24:01 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-10-27 08:42:27 +02:00
|
|
|
class Subscription(models.Model):
|
2022-09-13 20:36:47 +02:00
|
|
|
"""Keeps track of which users are part of the
|
|
|
|
audience for a given Recipient object.
|
|
|
|
|
2023-06-19 16:42:11 +02:00
|
|
|
For 1:1 and group direct message Recipient objects, only the
|
2022-09-13 20:36:47 +02:00
|
|
|
user_profile and recipient fields have any meaning, defining the
|
|
|
|
immutable set of users who are in the audience for that Recipient.
|
|
|
|
|
|
|
|
For Recipient objects associated with a Stream, the remaining
|
|
|
|
fields in this model describe the user's subscription to that stream.
|
|
|
|
"""
|
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
|
|
|
recipient = models.ForeignKey(Recipient, on_delete=CASCADE)
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2018-07-25 01:03:19 +02:00
|
|
|
# Whether the user has since unsubscribed. We mark Subscription
|
|
|
|
# objects as inactive, rather than deleting them, when a user
|
2020-03-28 01:25:56 +01:00
|
|
|
# unsubscribes, so we can preserve user customizations like
|
2018-07-25 01:03:19 +02:00
|
|
|
# notification settings, stream color, etc., if the user later
|
|
|
|
# resubscribes.
|
2022-08-15 19:10:58 +02:00
|
|
|
active = models.BooleanField(default=True)
|
2021-02-14 00:03:40 +01:00
|
|
|
# This is a denormalization designed to improve the performance of
|
|
|
|
# bulk queries of Subscription objects, Whether the subscribed user
|
|
|
|
# is active tends to be a key condition in those queries.
|
|
|
|
# We intentionally don't specify a default value to promote thinking
|
|
|
|
# about this explicitly, as in some special cases, such as data import,
|
|
|
|
# we may be creating Subscription objects for a user that's deactivated.
|
2022-08-15 19:10:58 +02:00
|
|
|
is_user_active = models.BooleanField()
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2018-08-02 23:46:05 +02:00
|
|
|
# Whether this user had muted this stream.
|
2022-08-15 19:10:58 +02:00
|
|
|
is_muted = models.BooleanField(default=False)
|
2012-08-29 17:50:36 +02:00
|
|
|
|
2020-04-09 21:51:58 +02:00
|
|
|
DEFAULT_STREAM_COLOR = "#c2c2c2"
|
2022-08-15 19:10:58 +02:00
|
|
|
color = models.CharField(max_length=10, default=DEFAULT_STREAM_COLOR)
|
|
|
|
pin_to_top = models.BooleanField(default=False)
|
2014-02-05 23:00:46 +01:00
|
|
|
|
2019-02-13 10:22:16 +01:00
|
|
|
# These fields are stream-level overrides for the user's default
|
|
|
|
# configuration for notification, configured in UserProfile. The
|
|
|
|
# default, None, means we just inherit the user-level default.
|
2022-08-15 19:10:58 +02:00
|
|
|
desktop_notifications = models.BooleanField(null=True, default=None)
|
|
|
|
audible_notifications = models.BooleanField(null=True, default=None)
|
|
|
|
push_notifications = models.BooleanField(null=True, default=None)
|
|
|
|
email_notifications = models.BooleanField(null=True, default=None)
|
|
|
|
wildcard_mentions_notify = models.BooleanField(null=True, default=None)
|
2014-02-05 23:00:46 +01:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2012-11-07 22:33:38 +01:00
|
|
|
unique_together = ("user_profile", "recipient")
|
2021-02-14 00:10:37 +01:00
|
|
|
indexes = [
|
|
|
|
models.Index(
|
|
|
|
fields=("recipient", "user_profile"),
|
|
|
|
name="zerver_subscription_recipient_id_user_profile_id_idx",
|
|
|
|
condition=Q(active=True, is_user_active=True),
|
|
|
|
),
|
|
|
|
]
|
2012-11-07 22:33:38 +01:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2018-05-11 02:24:34 +02:00
|
|
|
def __str__(self) -> str:
|
2023-03-08 22:18:59 +01:00
|
|
|
return f"{self.user_profile!r} -> {self.recipient!r}"
|
2012-08-28 22:56:21 +02:00
|
|
|
|
2020-02-03 03:21:07 +01:00
|
|
|
# Subscription fields included whenever a Subscription object is provided to
|
|
|
|
# Zulip clients via the API. A few details worth noting:
|
|
|
|
# * These fields will generally be merged with Stream.API_FIELDS
|
|
|
|
# data about the stream.
|
|
|
|
# * "user_profile" is usually implied as full API access to Subscription
|
|
|
|
# is primarily done for the current user; API access to other users'
|
|
|
|
# subscriptions is generally limited to boolean yes/no.
|
|
|
|
# * "id" and "recipient_id" are not included as they are not used
|
|
|
|
# in the Zulip API; it's an internal implementation detail.
|
|
|
|
# Subscription objects are always looked up in the API via
|
|
|
|
# (user_profile, stream) pairs.
|
|
|
|
# * "active" is often excluded in API use cases where it is implied.
|
|
|
|
# * "is_muted" often needs to be copied to not "in_home_view" for
|
|
|
|
# backwards-compatibility.
|
|
|
|
API_FIELDS = [
|
|
|
|
"audible_notifications",
|
2022-03-12 11:44:34 +01:00
|
|
|
"color",
|
2020-02-03 03:21:07 +01:00
|
|
|
"desktop_notifications",
|
|
|
|
"email_notifications",
|
2022-03-12 11:44:34 +01:00
|
|
|
"is_muted",
|
|
|
|
"pin_to_top",
|
2020-02-03 03:21:07 +01:00
|
|
|
"push_notifications",
|
2022-03-12 11:44:34 +01:00
|
|
|
"wildcard_mentions_notify",
|
2020-02-03 03:21:07 +01:00
|
|
|
]
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
@cache_with_key(user_profile_by_id_cache_key, timeout=3600 * 24 * 7)
|
2022-04-13 05:42:12 +02:00
|
|
|
def get_user_profile_by_id(user_profile_id: int) -> UserProfile:
|
2023-11-02 12:50:56 +01:00
|
|
|
return UserProfile.objects.select_related(
|
|
|
|
"realm", "realm__can_access_all_users_group", "bot_owner"
|
|
|
|
).get(id=user_profile_id)
|
2013-03-26 18:51:55 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def get_user_profile_by_email(email: str) -> UserProfile:
|
2021-03-17 15:37:43 +01:00
|
|
|
"""This function is intended to be used for
|
2020-02-03 02:23:34 +01:00
|
|
|
manual manage.py shell work; robust code must use get_user or
|
|
|
|
get_user_by_delivery_email instead, because Zulip supports
|
|
|
|
multiple users with a given (delivery) email address existing on a
|
|
|
|
single server (in different realms).
|
2019-08-06 02:10:58 +02:00
|
|
|
"""
|
2023-07-13 11:22:03 +02:00
|
|
|
return UserProfile.objects.select_related("realm").get(delivery_email__iexact=email.strip())
|
2013-03-28 20:20:31 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
@cache_with_key(user_profile_by_api_key_cache_key, timeout=3600 * 24 * 7)
|
2020-03-06 15:17:54 +01:00
|
|
|
def maybe_get_user_profile_by_api_key(api_key: str) -> Optional[UserProfile]:
|
|
|
|
try:
|
2023-11-02 12:50:56 +01:00
|
|
|
return UserProfile.objects.select_related(
|
|
|
|
"realm", "realm__can_access_all_users_group", "bot_owner"
|
|
|
|
).get(api_key=api_key)
|
2020-03-06 15:17:54 +01:00
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
# We will cache failed lookups with None. The
|
|
|
|
# use case here is that broken API clients may
|
|
|
|
# continually ask for the same wrong API key, and
|
|
|
|
# we want to handle that as quickly as possible.
|
|
|
|
return None
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def get_user_profile_by_api_key(api_key: str) -> UserProfile:
|
2020-03-06 15:17:54 +01:00
|
|
|
user_profile = maybe_get_user_profile_by_api_key(api_key)
|
|
|
|
if user_profile is None:
|
2023-02-04 02:07:20 +01:00
|
|
|
raise UserProfile.DoesNotExist
|
2020-03-06 15:17:54 +01:00
|
|
|
|
|
|
|
return user_profile
|
2017-08-25 07:43:38 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-12-07 00:05:57 +01:00
|
|
|
def get_user_by_delivery_email(email: str, realm: Realm) -> UserProfile:
|
2020-02-03 02:23:34 +01:00
|
|
|
"""Fetches a user given their delivery email. For use in
|
|
|
|
authentication/registration contexts. Do not use for user-facing
|
|
|
|
views (e.g. Zulip API endpoints) as doing so would violate the
|
|
|
|
EMAIL_ADDRESS_VISIBILITY_ADMINS security model. Use get_user in
|
|
|
|
those code paths.
|
|
|
|
"""
|
2023-11-02 12:50:56 +01:00
|
|
|
return UserProfile.objects.select_related(
|
|
|
|
"realm", "realm__can_access_all_users_group", "bot_owner"
|
|
|
|
).get(delivery_email__iexact=email.strip(), realm=realm)
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-12-07 00:05:57 +01:00
|
|
|
|
2022-06-23 22:47:50 +02:00
|
|
|
def get_users_by_delivery_email(emails: Set[str], realm: Realm) -> QuerySet[UserProfile]:
|
2020-08-02 05:57:20 +02:00
|
|
|
"""This is similar to get_user_by_delivery_email, and
|
2020-03-02 18:56:52 +01:00
|
|
|
it has the same security caveats. It gets multiple
|
|
|
|
users and returns a QuerySet, since most callers
|
|
|
|
will only need two or three fields.
|
|
|
|
|
|
|
|
If you are using this to get large UserProfile objects, you are
|
|
|
|
probably making a mistake, but if you must,
|
|
|
|
then use `select_related`.
|
|
|
|
"""
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2020-03-02 18:56:52 +01:00
|
|
|
Django doesn't support delivery_email__iexact__in, so
|
|
|
|
we simply OR all the filters that we'd do for the
|
|
|
|
one-email case.
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2020-03-02 18:56:52 +01:00
|
|
|
email_filter = Q()
|
|
|
|
for email in emails:
|
|
|
|
email_filter |= Q(delivery_email__iexact=email.strip())
|
|
|
|
|
|
|
|
return UserProfile.objects.filter(realm=realm).filter(email_filter)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
@cache_with_key(user_profile_cache_key, timeout=3600 * 24 * 7)
|
2018-05-11 02:24:34 +02:00
|
|
|
def get_user(email: str, realm: Realm) -> UserProfile:
|
2020-02-03 02:23:34 +01:00
|
|
|
"""Fetches the user by its visible-to-other users username (in the
|
|
|
|
`email` field). For use in API contexts; do not use in
|
|
|
|
authentication/registration contexts as doing so will break
|
|
|
|
authentication in organizations using
|
|
|
|
EMAIL_ADDRESS_VISIBILITY_ADMINS. In those code paths, use
|
|
|
|
get_user_by_delivery_email.
|
|
|
|
"""
|
2023-11-02 12:50:56 +01:00
|
|
|
return UserProfile.objects.select_related(
|
|
|
|
"realm", "realm__can_access_all_users_group", "bot_owner"
|
|
|
|
).get(email__iexact=email.strip(), realm=realm)
|
2017-05-22 19:45:54 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-21 04:00:15 +02:00
|
|
|
def get_active_user(email: str, realm: Realm) -> UserProfile:
|
2020-02-03 02:23:34 +01:00
|
|
|
"""Variant of get_user_by_email that excludes deactivated users.
|
|
|
|
See get_user docstring for important usage notes."""
|
2018-05-21 04:00:15 +02:00
|
|
|
user_profile = get_user(email, realm)
|
|
|
|
if not user_profile.is_active:
|
2023-02-04 02:07:20 +01:00
|
|
|
raise UserProfile.DoesNotExist
|
2018-05-21 04:00:15 +02:00
|
|
|
return user_profile
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-17 19:36:33 +02:00
|
|
|
def get_user_profile_by_id_in_realm(uid: int, realm: Realm) -> UserProfile:
|
2023-11-02 12:50:56 +01:00
|
|
|
return UserProfile.objects.select_related(
|
|
|
|
"realm", "realm__can_access_all_users_group", "bot_owner"
|
|
|
|
).get(id=uid, realm=realm)
|
2018-05-17 19:36:33 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-04-09 19:07:57 +02:00
|
|
|
def get_active_user_profile_by_id_in_realm(uid: int, realm: Realm) -> UserProfile:
|
|
|
|
user_profile = get_user_profile_by_id_in_realm(uid, realm)
|
|
|
|
if not user_profile.is_active:
|
2023-02-04 02:07:20 +01:00
|
|
|
raise UserProfile.DoesNotExist
|
2020-04-09 19:07:57 +02:00
|
|
|
return user_profile
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-07-26 17:18:36 +02:00
|
|
|
def get_user_including_cross_realm(email: str, realm: Realm) -> UserProfile:
|
2017-12-07 21:15:34 +01:00
|
|
|
if is_cross_realm_bot_email(email):
|
2021-07-26 17:18:36 +02:00
|
|
|
return get_system_bot(email, realm.id)
|
2017-07-17 20:55:32 +02:00
|
|
|
assert realm is not None
|
|
|
|
return get_user(email, realm)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
@cache_with_key(bot_profile_cache_key, timeout=3600 * 24 * 7)
|
2021-07-26 17:42:44 +02:00
|
|
|
def get_system_bot(email: str, realm_id: int) -> UserProfile:
|
|
|
|
"""
|
|
|
|
This function doesn't use the realm_id argument yet, but requires
|
|
|
|
passing it as preparation for adding system bots to each realm instead
|
|
|
|
of having them all in a separate system bot realm.
|
|
|
|
If you're calling this function, use the id of the realm in which the system
|
|
|
|
bot will be after that migration. If the bot is supposed to send a message,
|
|
|
|
the same realm as the one *to* which the message will be sent should be used - because
|
|
|
|
cross-realm messages will be eliminated as part of the migration.
|
|
|
|
"""
|
2023-07-14 10:14:35 +02:00
|
|
|
return UserProfile.objects.select_related("realm").get(email__iexact=email.strip())
|
2017-05-22 23:37:15 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-09-01 22:39:29 +02:00
|
|
|
def get_user_by_id_in_realm_including_cross_realm(
|
2021-02-12 08:19:30 +01:00
|
|
|
uid: int,
|
|
|
|
realm: Optional[Realm],
|
2018-09-01 22:39:29 +02:00
|
|
|
) -> UserProfile:
|
|
|
|
user_profile = get_user_profile_by_id(uid)
|
|
|
|
if user_profile.realm == realm:
|
|
|
|
return user_profile
|
|
|
|
|
|
|
|
# Note: This doesn't validate whether the `realm` passed in is
|
2022-09-16 14:27:32 +02:00
|
|
|
# None/invalid for the is_cross_realm_bot_email case.
|
|
|
|
if is_cross_realm_bot_email(user_profile.delivery_email):
|
2018-09-01 22:39:29 +02:00
|
|
|
return user_profile
|
|
|
|
|
2023-02-04 02:07:20 +01:00
|
|
|
raise UserProfile.DoesNotExist
|
2018-09-01 22:39:29 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
@cache_with_key(realm_user_dicts_cache_key, timeout=3600 * 24 * 7)
|
2023-11-08 11:13:25 +01:00
|
|
|
def get_realm_user_dicts(realm_id: int) -> List[RawUserDict]:
|
2022-10-12 17:22:57 +02:00
|
|
|
return list(
|
|
|
|
UserProfile.objects.filter(
|
|
|
|
realm_id=realm_id,
|
|
|
|
).values(*realm_user_dict_fields)
|
|
|
|
)
|
2013-08-28 20:25:31 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
@cache_with_key(active_user_ids_cache_key, timeout=3600 * 24 * 7)
|
2017-11-27 07:33:05 +01:00
|
|
|
def active_user_ids(realm_id: int) -> List[int]:
|
2017-09-16 21:44:03 +02:00
|
|
|
query = UserProfile.objects.filter(
|
|
|
|
realm_id=realm_id,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
is_active=True,
|
2021-02-12 08:20:45 +01:00
|
|
|
).values_list("id", flat=True)
|
2017-09-16 21:44:03 +02:00
|
|
|
return list(query)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
@cache_with_key(active_non_guest_user_ids_cache_key, timeout=3600 * 24 * 7)
|
2018-06-03 19:11:52 +02:00
|
|
|
def active_non_guest_user_ids(realm_id: int) -> List[int]:
|
2021-02-12 08:19:30 +01:00
|
|
|
query = (
|
|
|
|
UserProfile.objects.filter(
|
|
|
|
realm_id=realm_id,
|
|
|
|
is_active=True,
|
|
|
|
)
|
|
|
|
.exclude(
|
|
|
|
role=UserProfile.ROLE_GUEST,
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
.values_list("id", flat=True)
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-06-03 19:11:52 +02:00
|
|
|
return list(query)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-04-14 23:27:29 +02:00
|
|
|
def bot_owner_user_ids(user_profile: UserProfile) -> Set[int]:
|
|
|
|
is_private_bot = (
|
|
|
|
user_profile.default_sending_stream
|
|
|
|
and user_profile.default_sending_stream.invite_only
|
|
|
|
or user_profile.default_events_register_stream
|
|
|
|
and user_profile.default_events_register_stream.invite_only
|
|
|
|
)
|
2022-07-20 02:51:43 +02:00
|
|
|
assert user_profile.bot_owner_id is not None
|
2022-04-14 23:27:29 +02:00
|
|
|
if is_private_bot:
|
|
|
|
return {user_profile.bot_owner_id}
|
|
|
|
else:
|
|
|
|
users = {user.id for user in user_profile.realm.get_human_admin_users()}
|
|
|
|
users.add(user_profile.bot_owner_id)
|
|
|
|
return users
|
|
|
|
|
|
|
|
|
2020-11-16 19:33:10 +01:00
|
|
|
def get_source_profile(email: str, realm_id: int) -> Optional[UserProfile]:
|
2018-05-18 19:54:50 +02:00
|
|
|
try:
|
2020-11-16 19:33:10 +01:00
|
|
|
return get_user_by_delivery_email(email, get_realm_by_id(realm_id))
|
2019-05-04 04:47:44 +02:00
|
|
|
except (Realm.DoesNotExist, UserProfile.DoesNotExist):
|
2018-05-18 19:54:50 +02:00
|
|
|
return None
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-06-08 21:42:16 +02:00
|
|
|
@cache_with_key(lambda realm: bot_dicts_in_realm_cache_key(realm.id), timeout=3600 * 24 * 7)
|
2017-11-27 07:33:05 +01:00
|
|
|
def get_bot_dicts_in_realm(realm: Realm) -> List[Dict[str, Any]]:
|
2022-10-12 17:22:57 +02:00
|
|
|
return list(UserProfile.objects.filter(realm=realm, is_bot=True).values(*bot_dict_fields))
|
2014-02-26 00:12:14 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def is_cross_realm_bot_email(email: str) -> bool:
|
2017-12-07 20:47:10 +01:00
|
|
|
return email.lower() in settings.CROSS_REALM_BOT_EMAILS
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2012-09-04 23:20:21 +02:00
|
|
|
class Huddle(models.Model):
|
2022-09-13 20:36:47 +02:00
|
|
|
"""
|
|
|
|
Represents a group of individuals who may have a
|
2023-06-19 16:42:11 +02:00
|
|
|
group direct message conversation together.
|
2022-09-13 20:36:47 +02:00
|
|
|
|
|
|
|
The membership of the Huddle is stored in the Subscription table just like with
|
|
|
|
Streams - for each user in the Huddle, there is a Subscription object
|
|
|
|
tied to the UserProfile and the Huddle's recipient object.
|
|
|
|
|
|
|
|
A hash of the list of user IDs is stored in the huddle_hash field
|
|
|
|
below, to support efficiently mapping from a set of users to the
|
|
|
|
corresponding Huddle object.
|
|
|
|
"""
|
|
|
|
|
2012-09-07 20:14:13 +02:00
|
|
|
# TODO: We should consider whether using
|
|
|
|
# CommaSeparatedIntegerField would be better.
|
2022-08-15 19:10:58 +02:00
|
|
|
huddle_hash = models.CharField(max_length=40, db_index=True, unique=True)
|
2020-03-15 19:05:27 +01:00
|
|
|
# Foreign key to the Recipient object for this Huddle.
|
|
|
|
recipient = models.ForeignKey(Recipient, null=True, on_delete=models.SET_NULL)
|
2012-09-04 23:20:21 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def get_huddle_hash(id_list: List[int]) -> str:
|
2012-09-05 17:38:09 +02:00
|
|
|
id_list = sorted(set(id_list))
|
2012-09-05 17:41:53 +02:00
|
|
|
hash_key = ",".join(str(x) for x in id_list)
|
2023-07-19 00:44:51 +02:00
|
|
|
return hashlib.sha1(hash_key.encode()).hexdigest()
|
2012-10-20 18:02:58 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-09-27 18:43:07 +02:00
|
|
|
def get_or_create_huddle(id_list: List[int]) -> Huddle:
|
2022-09-13 20:36:47 +02:00
|
|
|
"""
|
|
|
|
Takes a list of user IDs and returns the Huddle object for the
|
|
|
|
group consisting of these users. If the Huddle object does not
|
|
|
|
yet exist, it will be transparently created.
|
|
|
|
"""
|
2012-10-20 18:02:58 +02:00
|
|
|
huddle_hash = get_huddle_hash(id_list)
|
2017-01-06 17:29:41 +01:00
|
|
|
with transaction.atomic():
|
2023-08-09 16:22:16 +02:00
|
|
|
(huddle, created) = Huddle.objects.get_or_create(huddle_hash=huddle_hash)
|
2017-01-06 17:29:41 +01:00
|
|
|
if created:
|
2021-02-12 08:19:30 +01:00
|
|
|
recipient = Recipient.objects.create(type_id=huddle.id, type=Recipient.HUDDLE)
|
2020-03-15 19:05:27 +01:00
|
|
|
huddle.recipient = recipient
|
|
|
|
huddle.save(update_fields=["recipient"])
|
2021-02-12 08:19:30 +01:00
|
|
|
subs_to_create = [
|
2021-02-14 00:03:40 +01:00
|
|
|
Subscription(
|
|
|
|
recipient=recipient,
|
|
|
|
user_profile_id=user_profile_id,
|
|
|
|
is_user_active=is_active,
|
|
|
|
)
|
|
|
|
for user_profile_id, is_active in UserProfile.objects.filter(id__in=id_list)
|
|
|
|
.distinct("id")
|
|
|
|
.values_list("id", "is_active")
|
2021-02-12 08:19:30 +01:00
|
|
|
]
|
2013-03-26 18:51:55 +01:00
|
|
|
Subscription.objects.bulk_create(subs_to_create)
|
2017-01-06 17:29:41 +01:00
|
|
|
return huddle
|
2012-09-04 23:20:21 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2012-11-08 23:02:16 +01:00
|
|
|
class UserActivity(models.Model):
|
2021-02-27 14:21:59 +01:00
|
|
|
"""Data table recording the last time each user hit Zulip endpoints
|
|
|
|
via which Clients; unlike UserPresence, these data are not exposed
|
|
|
|
to users via the Zulip API.
|
|
|
|
|
|
|
|
Useful for debugging as well as to answer analytics questions like
|
|
|
|
"How many users have accessed the Zulip mobile app in the last
|
|
|
|
month?" or "Which users/organizations have recently used API
|
|
|
|
endpoint X that is about to be desupported" for communications
|
|
|
|
and database migration purposes.
|
|
|
|
"""
|
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
|
|
|
client = models.ForeignKey(Client, on_delete=CASCADE)
|
|
|
|
query = models.CharField(max_length=50, db_index=True)
|
2012-11-08 23:02:16 +01:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
count = models.IntegerField()
|
|
|
|
last_visit = models.DateTimeField("last visit")
|
2012-11-08 23:02:16 +01:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2012-11-08 23:02:16 +01:00
|
|
|
unique_together = ("user_profile", "client", "query")
|
2012-11-27 18:26:51 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2013-09-06 21:52:12 +02:00
|
|
|
class UserActivityInterval(models.Model):
|
2023-11-19 19:45:19 +01:00
|
|
|
MIN_INTERVAL_LENGTH = timedelta(minutes=15)
|
2017-04-15 07:20:16 +02:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
|
|
|
start = models.DateTimeField("start time", db_index=True)
|
|
|
|
end = models.DateTimeField("end time", db_index=True)
|
2013-09-06 21:52:12 +02:00
|
|
|
|
migrations: Add a (profile,end) index on useractivityinterval.
The `user_activity_interval` worker calls:
```python3
last = UserActivityInterval.objects.filter(user_profile=user_profile).order_by("-end")[0]
`````
Which results in a query like:
```sql
SELECT "zerver_useractivityinterval"."id", "zerver_useractivityinterval"."user_profile_id", "zerver_useractivityinterval"."start", "zerver_useractivityinterval"."end" FROM "zerver_useractivityinterval" WHERE "zerver_useractivityinterval"."user_profile_id" = 12345 ORDER BY "zerver_useractivityinterval"."end" DESC LIMIT 1
```
For users which have at least one matching row, this results in a
query plan like:
```
Limit (cost=0.56..711.38 rows=1 width=24) (actual time=0.078..0.078 rows=1 loops=1)
-> Index Scan Backward using zerver_useractivityinterval_7f021a14 on zerver_useractivityinterval (cost=0.56..1031399.46 rows=1451 width=24) (actual time=0.077..0.078 rows=1 loops=1)
Filter: (user_profile_id = 12345)
Rows Removed by Filter: 98
Planning Time: 0.059 ms
Execution Time: 0.088 ms
```
But for users that have just been created, with no matching rows, this
is considerably more expensive:
```
Limit (cost=0.56..711.38 rows=1 width=24) (actual time=10798.146..10798.146 rows=0 loops=1)
-> Index Scan Backward using zerver_useractivityinterval_7f021a14 on zerver_useractivityinterval (cost=0.56..1031399.46 rows=1451 width=24) (actual time=10798.145..10798.145 rows=0 loops=1)
Filter: (user_profile_id = 12345)
Rows Removed by Filter: (count of every single row in the table, redacted)
Planning Time: 0.053 ms
Execution Time: 10798.158 ms
```
Regular vacuuming can force the use of the index on `user_profile_id`
as long as there are few enough users, which is fast -- however, at
some point, the query planner decides that is insufficiently specific,
always chooses the effective-whole-table-scan.
Add an index on `(user_profile_id, end)`, which is expected to be
sufficiently specific that it is used even with large numbers of user
profiles.
Ref #19250.
2021-08-31 20:27:52 +02:00
|
|
|
class Meta:
|
2023-06-11 21:22:36 +02:00
|
|
|
indexes = [
|
|
|
|
models.Index(
|
|
|
|
fields=["user_profile", "end"],
|
|
|
|
name="zerver_useractivityinterval_user_profile_id_end_bb3bfc37_idx",
|
|
|
|
),
|
migrations: Add a (profile,end) index on useractivityinterval.
The `user_activity_interval` worker calls:
```python3
last = UserActivityInterval.objects.filter(user_profile=user_profile).order_by("-end")[0]
`````
Which results in a query like:
```sql
SELECT "zerver_useractivityinterval"."id", "zerver_useractivityinterval"."user_profile_id", "zerver_useractivityinterval"."start", "zerver_useractivityinterval"."end" FROM "zerver_useractivityinterval" WHERE "zerver_useractivityinterval"."user_profile_id" = 12345 ORDER BY "zerver_useractivityinterval"."end" DESC LIMIT 1
```
For users which have at least one matching row, this results in a
query plan like:
```
Limit (cost=0.56..711.38 rows=1 width=24) (actual time=0.078..0.078 rows=1 loops=1)
-> Index Scan Backward using zerver_useractivityinterval_7f021a14 on zerver_useractivityinterval (cost=0.56..1031399.46 rows=1451 width=24) (actual time=0.077..0.078 rows=1 loops=1)
Filter: (user_profile_id = 12345)
Rows Removed by Filter: 98
Planning Time: 0.059 ms
Execution Time: 0.088 ms
```
But for users that have just been created, with no matching rows, this
is considerably more expensive:
```
Limit (cost=0.56..711.38 rows=1 width=24) (actual time=10798.146..10798.146 rows=0 loops=1)
-> Index Scan Backward using zerver_useractivityinterval_7f021a14 on zerver_useractivityinterval (cost=0.56..1031399.46 rows=1451 width=24) (actual time=10798.145..10798.145 rows=0 loops=1)
Filter: (user_profile_id = 12345)
Rows Removed by Filter: (count of every single row in the table, redacted)
Planning Time: 0.053 ms
Execution Time: 10798.158 ms
```
Regular vacuuming can force the use of the index on `user_profile_id`
as long as there are few enough users, which is fast -- however, at
some point, the query planner decides that is insufficiently specific,
always chooses the effective-whole-table-scan.
Add an index on `(user_profile_id, end)`, which is expected to be
sufficiently specific that it is used even with large numbers of user
profiles.
Ref #19250.
2021-08-31 20:27:52 +02:00
|
|
|
]
|
|
|
|
|
2017-04-15 07:20:16 +02:00
|
|
|
|
2013-02-08 23:44:15 +01:00
|
|
|
class UserPresence(models.Model):
|
2018-07-25 05:57:10 +02:00
|
|
|
"""A record from the last time we heard from a given user on a given client.
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2021-02-27 14:21:59 +01:00
|
|
|
NOTE: Users can disable updates to this table (see UserProfile.presence_enabled),
|
|
|
|
so this cannot be used to determine if a user was recently active on Zulip.
|
|
|
|
The UserActivity table is recommended for that purpose.
|
|
|
|
|
2018-07-25 05:57:10 +02:00
|
|
|
This is a tricky subsystem, because it is highly optimized. See the docs:
|
2018-07-25 00:29:05 +02:00
|
|
|
https://zulip.readthedocs.io/en/latest/subsystems/presence.html
|
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-11 16:03:47 +02:00
|
|
|
user_profile = models.OneToOneField(UserProfile, on_delete=CASCADE, unique=True)
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2020-06-11 16:03:47 +02:00
|
|
|
# Realm is just here as denormalization to optimize database
|
|
|
|
# queries to fetch all presence data for a given realm.
|
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE)
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2020-06-11 16:03:47 +02:00
|
|
|
# The last time the user had a client connected to Zulip,
|
|
|
|
# including idle clients where the user hasn't interacted with the
|
|
|
|
# system recently (and thus might be AFK).
|
2023-04-08 15:52:48 +02:00
|
|
|
last_connected_time = models.DateTimeField(default=timezone_now, db_index=True, null=True)
|
2020-06-11 16:03:47 +02:00
|
|
|
# The last time a client connected to Zulip reported that the user
|
|
|
|
# was actually present (E.g. via focusing a browser window or
|
|
|
|
# interacting with a computer running the desktop app)
|
2023-04-08 15:52:48 +02:00
|
|
|
last_active_time = models.DateTimeField(default=timezone_now, db_index=True, null=True)
|
2020-06-11 16:03:47 +02:00
|
|
|
|
|
|
|
# The following constants are used in the presence API for
|
|
|
|
# communicating whether a user is active (last_active_time recent)
|
|
|
|
# or idle (last_connected_time recent) or offline (neither
|
|
|
|
# recent). They're no longer part of the data model.
|
|
|
|
LEGACY_STATUS_ACTIVE = "active"
|
|
|
|
LEGACY_STATUS_IDLE = "idle"
|
|
|
|
LEGACY_STATUS_ACTIVE_INT = 1
|
|
|
|
LEGACY_STATUS_IDLE_INT = 2
|
2013-02-08 23:44:15 +01:00
|
|
|
|
2023-04-12 22:40:35 +02:00
|
|
|
class Meta:
|
2023-06-11 21:22:36 +02:00
|
|
|
indexes = [
|
|
|
|
models.Index(
|
|
|
|
fields=["realm", "last_active_time"],
|
|
|
|
name="zerver_userpresence_realm_id_last_active_time_1c5aa9a2_idx",
|
|
|
|
),
|
|
|
|
models.Index(
|
|
|
|
fields=["realm", "last_connected_time"],
|
|
|
|
name="zerver_userpresence_realm_id_last_connected_time_98d2fc9f_idx",
|
|
|
|
),
|
2023-04-12 22:40:35 +02:00
|
|
|
]
|
|
|
|
|
2013-04-03 22:00:02 +02:00
|
|
|
@staticmethod
|
2018-11-27 20:21:55 +01:00
|
|
|
def status_from_string(status: str) -> Optional[int]:
|
2021-02-12 08:20:45 +01:00
|
|
|
if status == "active":
|
2020-06-11 16:03:47 +02:00
|
|
|
return UserPresence.LEGACY_STATUS_ACTIVE_INT
|
2021-02-12 08:20:45 +01:00
|
|
|
elif status == "idle":
|
2020-06-11 16:03:47 +02:00
|
|
|
return UserPresence.LEGACY_STATUS_IDLE_INT
|
2013-04-03 22:00:02 +02:00
|
|
|
|
2020-06-11 16:03:47 +02:00
|
|
|
return None
|
2013-04-03 22:00:02 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-07-23 22:11:21 +02:00
|
|
|
class UserStatus(AbstractEmoji):
|
2022-08-15 19:10:58 +02:00
|
|
|
user_profile = models.OneToOneField(UserProfile, on_delete=CASCADE)
|
2018-12-17 16:19:18 +01:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
timestamp = models.DateTimeField()
|
|
|
|
client = models.ForeignKey(Client, on_delete=CASCADE)
|
2018-12-17 16:19:18 +01:00
|
|
|
|
2021-07-23 22:11:21 +02:00
|
|
|
# Override emoji_name and emoji_code field of (AbstractReaction model) to accept
|
|
|
|
# default value.
|
2022-08-15 19:10:58 +02:00
|
|
|
emoji_name = models.TextField(default="")
|
|
|
|
emoji_code = models.TextField(default="")
|
2018-12-17 16:19:18 +01:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
status_text = models.CharField(max_length=255, default="")
|
2018-12-17 16:19:18 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2012-11-27 18:26:51 +01:00
|
|
|
class DefaultStream(models.Model):
|
2022-08-15 19:10:58 +02:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE)
|
|
|
|
stream = models.ForeignKey(Stream, on_delete=CASCADE)
|
2012-11-27 18:26:51 +01:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2012-11-27 18:26:51 +01:00
|
|
|
unique_together = ("realm", "stream")
|
2012-12-01 04:35:59 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-10-12 19:35:14 +02:00
|
|
|
class DefaultStreamGroup(models.Model):
|
|
|
|
MAX_NAME_LENGTH = 60
|
2020-07-02 03:13:26 +02:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
name = models.CharField(max_length=MAX_NAME_LENGTH, db_index=True)
|
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE)
|
2023-11-14 00:38:06 +01:00
|
|
|
streams = models.ManyToManyField("zerver.Stream")
|
2022-08-15 19:10:58 +02:00
|
|
|
description = models.CharField(max_length=1024, default="")
|
2017-10-12 19:35:14 +02:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2017-10-12 19:35:14 +02:00
|
|
|
unique_together = ("realm", "name")
|
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def to_dict(self) -> Dict[str, Any]:
|
2021-02-12 08:19:30 +01:00
|
|
|
return dict(
|
|
|
|
name=self.name,
|
|
|
|
id=self.id,
|
|
|
|
description=self.description,
|
2021-03-25 09:37:47 +01:00
|
|
|
streams=[stream.to_dict() for stream in self.streams.all().order_by("name")],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
|
2017-10-12 19:35:14 +02:00
|
|
|
|
2022-06-23 22:47:50 +02:00
|
|
|
def get_default_stream_groups(realm: Realm) -> QuerySet[DefaultStreamGroup]:
|
2017-10-12 19:35:14 +02:00
|
|
|
return DefaultStreamGroup.objects.filter(realm=realm)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-07-02 21:10:41 +02:00
|
|
|
class AbstractScheduledJob(models.Model):
|
2022-08-15 19:10:58 +02:00
|
|
|
scheduled_timestamp = models.DateTimeField(db_index=True)
|
2017-07-02 21:10:41 +02:00
|
|
|
# JSON representation of arguments to consumer
|
2022-08-15 19:10:58 +02:00
|
|
|
data = models.TextField()
|
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE)
|
2017-07-02 21:10:41 +02:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2017-07-02 21:10:41 +02:00
|
|
|
abstract = True
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-07-02 21:10:41 +02:00
|
|
|
class ScheduledEmail(AbstractScheduledJob):
|
2019-01-04 01:50:21 +01:00
|
|
|
# Exactly one of users or address should be set. These are
|
|
|
|
# duplicate values, used to efficiently filter the set of
|
|
|
|
# ScheduledEmails for use in clear_scheduled_emails; the
|
|
|
|
# recipients used for actually sending messages are stored in the
|
|
|
|
# data field of AbstractScheduledJob.
|
2022-08-15 19:10:58 +02:00
|
|
|
users = models.ManyToManyField(UserProfile)
|
2017-07-02 21:10:41 +02:00
|
|
|
# Just the address part of a full "name <address>" email address
|
2022-08-15 19:10:58 +02:00
|
|
|
address = models.EmailField(null=True, db_index=True)
|
2017-07-02 21:10:41 +02:00
|
|
|
|
|
|
|
# Valid types are below
|
|
|
|
WELCOME = 1
|
|
|
|
DIGEST = 2
|
|
|
|
INVITATION_REMINDER = 3
|
2022-08-15 19:10:58 +02:00
|
|
|
type = models.PositiveSmallIntegerField()
|
2013-11-06 00:47:59 +01:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2018-05-11 02:24:34 +02:00
|
|
|
def __str__(self) -> str:
|
2023-03-08 22:18:59 +01:00
|
|
|
return f"{self.type} {self.address or list(self.users.all())} {self.scheduled_timestamp}"
|
2017-09-21 14:58:49 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-12-25 22:28:05 +01:00
|
|
|
class MissedMessageEmailAddress(models.Model):
|
2022-08-15 19:10:58 +02:00
|
|
|
message = models.ForeignKey(Message, on_delete=CASCADE)
|
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
|
|
|
email_token = models.CharField(max_length=34, unique=True, db_index=True)
|
2019-12-25 22:28:05 +01:00
|
|
|
|
|
|
|
# Timestamp of when the missed message address generated.
|
2022-08-15 19:10:58 +02:00
|
|
|
timestamp = models.DateTimeField(db_index=True, default=timezone_now)
|
2022-02-23 05:32:17 +01:00
|
|
|
# Number of times the missed message address has been used.
|
2022-08-15 19:10:58 +02:00
|
|
|
times_used = models.PositiveIntegerField(default=0, db_index=True)
|
2019-12-25 22:28:05 +01:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2019-12-25 22:28:05 +01:00
|
|
|
def __str__(self) -> str:
|
|
|
|
return settings.EMAIL_GATEWAY_PATTERN % (self.email_token,)
|
|
|
|
|
2019-12-26 13:46:55 +01:00
|
|
|
def increment_times_used(self) -> None:
|
|
|
|
self.times_used += 1
|
|
|
|
self.save(update_fields=["times_used"])
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-07-09 13:38:12 +02:00
|
|
|
class NotificationTriggers:
|
2023-08-04 19:54:41 +02:00
|
|
|
# "direct_message" is for 1:1 direct messages as well as huddles
|
|
|
|
DIRECT_MESSAGE = "direct_message"
|
2021-07-09 13:38:12 +02:00
|
|
|
MENTION = "mentioned"
|
2023-06-07 19:19:33 +02:00
|
|
|
TOPIC_WILDCARD_MENTION = "topic_wildcard_mentioned"
|
2023-06-03 16:51:38 +02:00
|
|
|
STREAM_WILDCARD_MENTION = "stream_wildcard_mentioned"
|
2021-07-09 13:38:12 +02:00
|
|
|
STREAM_PUSH = "stream_push_notify"
|
|
|
|
STREAM_EMAIL = "stream_email_notify"
|
2023-05-28 17:03:04 +02:00
|
|
|
FOLLOWED_TOPIC_PUSH = "followed_topic_push_notify"
|
2023-05-17 16:01:16 +02:00
|
|
|
FOLLOWED_TOPIC_EMAIL = "followed_topic_email_notify"
|
2023-06-07 19:19:33 +02:00
|
|
|
TOPIC_WILDCARD_MENTION_IN_FOLLOWED_TOPIC = "topic_wildcard_mentioned_in_followed_topic"
|
2023-06-03 16:51:38 +02:00
|
|
|
STREAM_WILDCARD_MENTION_IN_FOLLOWED_TOPIC = "stream_wildcard_mentioned_in_followed_topic"
|
2021-07-09 13:38:12 +02:00
|
|
|
|
|
|
|
|
2021-07-07 16:55:25 +02:00
|
|
|
class ScheduledMessageNotificationEmail(models.Model):
|
|
|
|
"""Stores planned outgoing message notification emails. They may be
|
|
|
|
processed earlier should Zulip choose to batch multiple messages
|
|
|
|
in a single email, but typically will be processed just after
|
|
|
|
scheduled_timestamp.
|
|
|
|
"""
|
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
|
|
|
message = models.ForeignKey(Message, on_delete=CASCADE)
|
2021-07-07 16:55:25 +02:00
|
|
|
|
|
|
|
EMAIL_NOTIFICATION_TRIGGER_CHOICES = [
|
2023-08-04 19:54:41 +02:00
|
|
|
(NotificationTriggers.DIRECT_MESSAGE, "Direct message"),
|
2021-07-07 16:55:25 +02:00
|
|
|
(NotificationTriggers.MENTION, "Mention"),
|
2023-06-07 19:19:33 +02:00
|
|
|
(NotificationTriggers.TOPIC_WILDCARD_MENTION, "Topic wildcard mention"),
|
2023-06-03 16:51:38 +02:00
|
|
|
(NotificationTriggers.STREAM_WILDCARD_MENTION, "Stream wildcard mention"),
|
2021-07-07 16:55:25 +02:00
|
|
|
(NotificationTriggers.STREAM_EMAIL, "Stream notifications enabled"),
|
2023-05-17 16:01:16 +02:00
|
|
|
(NotificationTriggers.FOLLOWED_TOPIC_EMAIL, "Followed topic notifications enabled"),
|
2023-06-07 19:19:33 +02:00
|
|
|
(
|
|
|
|
NotificationTriggers.TOPIC_WILDCARD_MENTION_IN_FOLLOWED_TOPIC,
|
|
|
|
"Topic wildcard mention in followed topic",
|
|
|
|
),
|
2023-06-03 16:51:38 +02:00
|
|
|
(
|
|
|
|
NotificationTriggers.STREAM_WILDCARD_MENTION_IN_FOLLOWED_TOPIC,
|
|
|
|
"Stream wildcard mention in followed topic",
|
|
|
|
),
|
2021-07-07 16:55:25 +02:00
|
|
|
]
|
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
trigger = models.TextField(choices=EMAIL_NOTIFICATION_TRIGGER_CHOICES)
|
|
|
|
mentioned_user_group = models.ForeignKey(UserGroup, null=True, on_delete=CASCADE)
|
2021-07-07 16:55:25 +02:00
|
|
|
|
|
|
|
# Timestamp for when the notification should be processed and sent.
|
|
|
|
# Calculated from the time the event was received and the batching period.
|
2022-08-15 19:10:58 +02:00
|
|
|
scheduled_timestamp = models.DateTimeField(db_index=True)
|
2021-07-07 16:55:25 +02:00
|
|
|
|
|
|
|
|
2023-05-09 14:36:52 +02:00
|
|
|
class APIScheduledStreamMessageDict(TypedDict):
|
2023-04-20 04:11:24 +02:00
|
|
|
scheduled_message_id: int
|
2023-04-28 17:42:23 +02:00
|
|
|
to: int
|
2023-04-20 04:11:24 +02:00
|
|
|
type: str
|
|
|
|
content: str
|
|
|
|
rendered_content: str
|
|
|
|
topic: str
|
2023-04-20 04:26:41 +02:00
|
|
|
scheduled_delivery_timestamp: int
|
2023-05-11 19:31:13 +02:00
|
|
|
failed: bool
|
2023-04-20 04:11:24 +02:00
|
|
|
|
|
|
|
|
2023-05-09 14:36:52 +02:00
|
|
|
class APIScheduledDirectMessageDict(TypedDict):
|
2023-04-28 17:42:23 +02:00
|
|
|
scheduled_message_id: int
|
|
|
|
to: List[int]
|
|
|
|
type: str
|
|
|
|
content: str
|
|
|
|
rendered_content: str
|
2023-04-20 04:26:41 +02:00
|
|
|
scheduled_delivery_timestamp: int
|
2023-05-11 19:31:13 +02:00
|
|
|
failed: bool
|
2023-04-28 17:42:23 +02:00
|
|
|
|
|
|
|
|
2018-01-01 20:41:24 +01:00
|
|
|
class ScheduledMessage(models.Model):
|
2022-08-15 19:10:58 +02:00
|
|
|
sender = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
|
|
|
recipient = models.ForeignKey(Recipient, on_delete=CASCADE)
|
|
|
|
subject = models.CharField(max_length=MAX_TOPIC_NAME_LENGTH)
|
|
|
|
content = models.TextField()
|
2023-04-14 21:04:19 +02:00
|
|
|
rendered_content = models.TextField()
|
2022-08-15 19:10:58 +02:00
|
|
|
sending_client = models.ForeignKey(Client, on_delete=CASCADE)
|
|
|
|
stream = models.ForeignKey(Stream, null=True, on_delete=CASCADE)
|
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE)
|
|
|
|
scheduled_timestamp = models.DateTimeField(db_index=True)
|
|
|
|
delivered = models.BooleanField(default=False)
|
2023-05-09 03:47:07 +02:00
|
|
|
delivered_message = models.ForeignKey(Message, null=True, on_delete=CASCADE)
|
2023-05-07 20:04:37 +02:00
|
|
|
has_attachment = models.BooleanField(default=False, db_index=True)
|
2018-01-01 20:41:24 +01:00
|
|
|
|
2023-05-05 01:23:04 +02:00
|
|
|
# Metadata for messages that failed to send when their scheduled
|
|
|
|
# moment arrived.
|
|
|
|
failed = models.BooleanField(default=False)
|
|
|
|
failure_message = models.TextField(null=True)
|
|
|
|
|
2018-01-12 12:02:47 +01:00
|
|
|
SEND_LATER = 1
|
|
|
|
REMIND = 2
|
|
|
|
|
|
|
|
DELIVERY_TYPES = (
|
2021-02-12 08:20:45 +01:00
|
|
|
(SEND_LATER, "send_later"),
|
|
|
|
(REMIND, "remind"),
|
2018-01-12 12:02:47 +01:00
|
|
|
)
|
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
delivery_type = models.PositiveSmallIntegerField(
|
2021-02-12 08:19:30 +01:00
|
|
|
choices=DELIVERY_TYPES,
|
|
|
|
default=SEND_LATER,
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
)
|
2018-01-12 12:02:47 +01:00
|
|
|
|
2023-05-05 02:38:55 +02:00
|
|
|
class Meta:
|
|
|
|
indexes = [
|
|
|
|
# We expect a large number of delivered scheduled messages
|
|
|
|
# to accumulate over time. This first index is for the
|
|
|
|
# deliver_scheduled_messages worker.
|
|
|
|
models.Index(
|
|
|
|
name="zerver_unsent_scheduled_messages_by_time",
|
|
|
|
fields=["scheduled_timestamp"],
|
|
|
|
condition=Q(
|
|
|
|
delivered=False,
|
|
|
|
failed=False,
|
|
|
|
),
|
|
|
|
),
|
|
|
|
# This index is for displaying scheduled messages to the
|
|
|
|
# user themself via the API; we don't filter failed
|
|
|
|
# messages since we will want to display those so that
|
|
|
|
# failures don't just disappear into a black hole.
|
2023-08-30 21:38:59 +02:00
|
|
|
models.Index(
|
|
|
|
name="zerver_realm_unsent_scheduled_messages_by_user",
|
|
|
|
fields=["realm_id", "sender", "delivery_type", "scheduled_timestamp"],
|
|
|
|
condition=Q(
|
|
|
|
delivered=False,
|
|
|
|
),
|
|
|
|
),
|
2023-05-05 02:38:55 +02:00
|
|
|
]
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2023-04-12 22:40:35 +02:00
|
|
|
def __str__(self) -> str:
|
2023-07-16 14:21:27 +02:00
|
|
|
return f"{self.recipient.label()} {self.subject} {self.sender!r} {self.scheduled_timestamp}"
|
2023-04-12 22:40:35 +02:00
|
|
|
|
2018-11-01 15:31:55 +01:00
|
|
|
def topic_name(self) -> str:
|
|
|
|
return self.subject
|
|
|
|
|
|
|
|
def set_topic_name(self, topic_name: str) -> None:
|
|
|
|
self.subject = topic_name
|
|
|
|
|
2023-05-07 20:04:37 +02:00
|
|
|
def is_stream_message(self) -> bool:
|
|
|
|
return self.recipient.type == Recipient.STREAM
|
|
|
|
|
2023-05-09 14:36:52 +02:00
|
|
|
def to_dict(self) -> Union[APIScheduledStreamMessageDict, APIScheduledDirectMessageDict]:
|
2023-04-20 04:11:24 +02:00
|
|
|
recipient, recipient_type_str = get_recipient_ids(self.recipient, self.sender.id)
|
|
|
|
|
2023-04-28 17:42:23 +02:00
|
|
|
if recipient_type_str == "private":
|
|
|
|
# The topic for direct messages should always be an empty string.
|
|
|
|
assert self.topic_name() == ""
|
|
|
|
|
2023-05-09 14:36:52 +02:00
|
|
|
return APIScheduledDirectMessageDict(
|
2023-04-28 17:42:23 +02:00
|
|
|
scheduled_message_id=self.id,
|
|
|
|
to=recipient,
|
|
|
|
type=recipient_type_str,
|
|
|
|
content=self.content,
|
|
|
|
rendered_content=self.rendered_content,
|
2023-04-20 04:26:41 +02:00
|
|
|
scheduled_delivery_timestamp=datetime_to_timestamp(self.scheduled_timestamp),
|
2023-05-11 19:31:13 +02:00
|
|
|
failed=self.failed,
|
2023-04-28 17:42:23 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
# The recipient for stream messages should always just be the unique stream ID.
|
|
|
|
assert len(recipient) == 1
|
|
|
|
|
2023-05-09 14:36:52 +02:00
|
|
|
return APIScheduledStreamMessageDict(
|
2023-04-20 04:11:24 +02:00
|
|
|
scheduled_message_id=self.id,
|
2023-04-28 17:42:23 +02:00
|
|
|
to=recipient[0],
|
2023-04-20 04:11:24 +02:00
|
|
|
type=recipient_type_str,
|
|
|
|
content=self.content,
|
|
|
|
rendered_content=self.rendered_content,
|
|
|
|
topic=self.topic_name(),
|
2023-04-20 04:26:41 +02:00
|
|
|
scheduled_delivery_timestamp=datetime_to_timestamp(self.scheduled_timestamp),
|
2023-05-11 19:31:13 +02:00
|
|
|
failed=self.failed,
|
2023-04-20 04:11:24 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-07-02 21:10:41 +02:00
|
|
|
EMAIL_TYPES = {
|
2023-07-18 11:44:27 +02:00
|
|
|
"account_registered": ScheduledEmail.WELCOME,
|
2023-07-18 11:50:12 +02:00
|
|
|
"onboarding_zulip_topics": ScheduledEmail.WELCOME,
|
2023-03-15 20:18:09 +01:00
|
|
|
"onboarding_zulip_guide": ScheduledEmail.WELCOME,
|
2023-04-24 17:46:49 +02:00
|
|
|
"onboarding_team_to_zulip": ScheduledEmail.WELCOME,
|
2021-02-12 08:20:45 +01:00
|
|
|
"digest": ScheduledEmail.DIGEST,
|
|
|
|
"invitation_reminder": ScheduledEmail.INVITATION_REMINDER,
|
2017-07-02 21:10:41 +02:00
|
|
|
}
|
2017-02-15 04:35:10 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-10-03 02:01:36 +02:00
|
|
|
class AbstractRealmAuditLog(models.Model):
|
|
|
|
"""Defines fields common to RealmAuditLog and RemoteRealmAuditLog."""
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
event_time = models.DateTimeField(db_index=True)
|
2017-03-30 05:20:36 +02:00
|
|
|
# If True, event_time is an overestimate of the true time. Can be used
|
|
|
|
# by migrations when introducing a new event_type.
|
2022-08-15 19:10:58 +02:00
|
|
|
backfilled = models.BooleanField(default=False)
|
2019-10-05 02:36:16 +02:00
|
|
|
|
|
|
|
# Keys within extra_data, when extra_data is a json dict. Keys are strings because
|
|
|
|
# json keys must always be strings.
|
2021-02-12 08:20:45 +01:00
|
|
|
OLD_VALUE = "1"
|
|
|
|
NEW_VALUE = "2"
|
|
|
|
ROLE_COUNT = "10"
|
|
|
|
ROLE_COUNT_HUMANS = "11"
|
|
|
|
ROLE_COUNT_BOTS = "12"
|
2017-01-24 01:48:35 +01:00
|
|
|
|
2023-07-13 19:46:06 +02:00
|
|
|
extra_data = models.JSONField(default=dict, encoder=DjangoJSONEncoder)
|
2019-09-26 03:20:56 +02:00
|
|
|
|
2019-10-08 06:05:22 +02:00
|
|
|
# Event types
|
2019-09-26 03:20:56 +02:00
|
|
|
USER_CREATED = 101
|
|
|
|
USER_ACTIVATED = 102
|
|
|
|
USER_DEACTIVATED = 103
|
|
|
|
USER_REACTIVATED = 104
|
2019-10-05 02:36:16 +02:00
|
|
|
USER_ROLE_CHANGED = 105
|
2021-12-29 19:14:34 +01:00
|
|
|
USER_DELETED = 106
|
2022-04-16 23:58:44 +02:00
|
|
|
USER_DELETED_PRESERVING_MESSAGES = 107
|
2019-09-26 03:20:56 +02:00
|
|
|
|
|
|
|
USER_SOFT_ACTIVATED = 120
|
|
|
|
USER_SOFT_DEACTIVATED = 121
|
|
|
|
USER_PASSWORD_CHANGED = 122
|
|
|
|
USER_AVATAR_SOURCE_CHANGED = 123
|
|
|
|
USER_FULL_NAME_CHANGED = 124
|
|
|
|
USER_EMAIL_CHANGED = 125
|
2021-12-07 02:23:24 +01:00
|
|
|
USER_TERMS_OF_SERVICE_VERSION_CHANGED = 126
|
2019-09-26 03:20:56 +02:00
|
|
|
USER_API_KEY_CHANGED = 127
|
|
|
|
USER_BOT_OWNER_CHANGED = 128
|
2020-07-12 23:45:50 +02:00
|
|
|
USER_DEFAULT_SENDING_STREAM_CHANGED = 129
|
|
|
|
USER_DEFAULT_REGISTER_STREAM_CHANGED = 130
|
|
|
|
USER_DEFAULT_ALL_PUBLIC_STREAMS_CHANGED = 131
|
2021-09-07 19:33:26 +02:00
|
|
|
USER_SETTING_CHANGED = 132
|
2020-11-12 12:11:35 +01:00
|
|
|
USER_DIGEST_EMAIL_CREATED = 133
|
2019-09-26 03:20:56 +02:00
|
|
|
|
|
|
|
REALM_DEACTIVATED = 201
|
|
|
|
REALM_REACTIVATED = 202
|
|
|
|
REALM_SCRUBBED = 203
|
|
|
|
REALM_PLAN_TYPE_CHANGED = 204
|
|
|
|
REALM_LOGO_CHANGED = 205
|
|
|
|
REALM_EXPORTED = 206
|
2020-06-29 15:34:19 +02:00
|
|
|
REALM_PROPERTY_CHANGED = 207
|
2020-07-11 20:59:52 +02:00
|
|
|
REALM_ICON_SOURCE_CHANGED = 208
|
2020-12-04 11:08:10 +01:00
|
|
|
REALM_DISCOUNT_CHANGED = 209
|
2020-12-04 11:16:33 +01:00
|
|
|
REALM_SPONSORSHIP_APPROVED = 210
|
2023-12-01 13:19:04 +01:00
|
|
|
REALM_BILLING_MODALITY_CHANGED = 211
|
2020-12-04 11:46:51 +01:00
|
|
|
REALM_REACTIVATION_EMAIL_SENT = 212
|
2020-12-04 12:14:51 +01:00
|
|
|
REALM_SPONSORSHIP_PENDING_STATUS_CHANGED = 213
|
2020-12-06 20:04:33 +01:00
|
|
|
REALM_SUBDOMAIN_CHANGED = 214
|
2021-04-20 12:29:19 +02:00
|
|
|
REALM_CREATED = 215
|
2021-07-21 13:40:46 +02:00
|
|
|
REALM_DEFAULT_USER_SETTINGS_CHANGED = 216
|
2021-10-07 21:30:54 +02:00
|
|
|
REALM_ORG_TYPE_CHANGED = 217
|
2022-03-07 14:49:16 +01:00
|
|
|
REALM_DOMAIN_ADDED = 218
|
2022-03-07 15:19:13 +01:00
|
|
|
REALM_DOMAIN_CHANGED = 219
|
2022-03-07 15:35:17 +01:00
|
|
|
REALM_DOMAIN_REMOVED = 220
|
2022-03-11 15:16:04 +01:00
|
|
|
REALM_PLAYGROUND_ADDED = 221
|
2022-03-11 15:40:42 +01:00
|
|
|
REALM_PLAYGROUND_REMOVED = 222
|
2022-03-14 11:50:24 +01:00
|
|
|
REALM_LINKIFIER_ADDED = 223
|
2022-03-14 12:10:25 +01:00
|
|
|
REALM_LINKIFIER_CHANGED = 224
|
2022-03-14 14:50:55 +01:00
|
|
|
REALM_LINKIFIER_REMOVED = 225
|
2022-04-06 18:34:07 +02:00
|
|
|
REALM_EMOJI_ADDED = 226
|
2022-04-07 12:24:30 +02:00
|
|
|
REALM_EMOJI_REMOVED = 227
|
2023-08-10 04:09:25 +02:00
|
|
|
REALM_LINKIFIERS_REORDERED = 228
|
2019-09-26 03:20:56 +02:00
|
|
|
|
|
|
|
SUBSCRIPTION_CREATED = 301
|
|
|
|
SUBSCRIPTION_ACTIVATED = 302
|
|
|
|
SUBSCRIPTION_DEACTIVATED = 303
|
2020-07-12 23:48:08 +02:00
|
|
|
SUBSCRIPTION_PROPERTY_CHANGED = 304
|
2019-09-26 03:20:56 +02:00
|
|
|
|
2021-04-10 19:32:22 +02:00
|
|
|
USER_MUTED = 350
|
|
|
|
USER_UNMUTED = 351
|
|
|
|
|
2019-09-26 03:20:56 +02:00
|
|
|
STRIPE_CUSTOMER_CREATED = 401
|
|
|
|
STRIPE_CARD_CHANGED = 402
|
|
|
|
STRIPE_PLAN_CHANGED = 403
|
|
|
|
STRIPE_PLAN_QUANTITY_RESET = 404
|
|
|
|
|
|
|
|
CUSTOMER_CREATED = 501
|
|
|
|
CUSTOMER_PLAN_CREATED = 502
|
2020-06-15 20:09:24 +02:00
|
|
|
CUSTOMER_SWITCHED_FROM_MONTHLY_TO_ANNUAL_PLAN = 503
|
2023-11-20 13:01:25 +01:00
|
|
|
CUSTOMER_SWITCHED_FROM_ANNUAL_TO_MONTHLY_PLAN = 504
|
2019-09-26 03:20:56 +02:00
|
|
|
|
2020-06-29 23:31:25 +02:00
|
|
|
STREAM_CREATED = 601
|
2020-06-29 15:02:07 +02:00
|
|
|
STREAM_DEACTIVATED = 602
|
2020-07-12 23:39:54 +02:00
|
|
|
STREAM_NAME_CHANGED = 603
|
2021-10-21 12:32:07 +02:00
|
|
|
STREAM_REACTIVATED = 604
|
2021-12-07 20:47:49 +01:00
|
|
|
STREAM_MESSAGE_RETENTION_DAYS_CHANGED = 605
|
2021-12-14 21:00:45 +01:00
|
|
|
STREAM_PROPERTY_CHANGED = 607
|
2023-02-17 12:46:14 +01:00
|
|
|
STREAM_GROUP_BASED_SETTING_CHANGED = 608
|
2020-06-29 23:31:25 +02:00
|
|
|
|
2022-11-21 04:48:09 +01:00
|
|
|
USER_GROUP_CREATED = 701
|
|
|
|
USER_GROUP_DELETED = 702
|
|
|
|
USER_GROUP_DIRECT_USER_MEMBERSHIP_ADDED = 703
|
|
|
|
USER_GROUP_DIRECT_USER_MEMBERSHIP_REMOVED = 704
|
|
|
|
USER_GROUP_DIRECT_SUBGROUP_MEMBERSHIP_ADDED = 705
|
|
|
|
USER_GROUP_DIRECT_SUBGROUP_MEMBERSHIP_REMOVED = 706
|
|
|
|
USER_GROUP_DIRECT_SUPERGROUP_MEMBERSHIP_ADDED = 707
|
|
|
|
USER_GROUP_DIRECT_SUPERGROUP_MEMBERSHIP_REMOVED = 708
|
|
|
|
# 709 to 719 reserved for membership changes
|
|
|
|
USER_GROUP_NAME_CHANGED = 720
|
|
|
|
USER_GROUP_DESCRIPTION_CHANGED = 721
|
2023-06-05 22:34:52 +02:00
|
|
|
USER_GROUP_GROUP_BASED_SETTING_CHANGED = 722
|
2022-11-21 04:48:09 +01:00
|
|
|
|
2021-12-01 17:31:08 +01:00
|
|
|
# The following values are only for RemoteZulipServerAuditLog
|
2021-12-15 18:53:58 +01:00
|
|
|
# Values should be exactly 10000 greater than the corresponding
|
|
|
|
# value used for the same purpose in RealmAuditLog (e.g.
|
|
|
|
# REALM_DEACTIVATED = 201, and REMOTE_SERVER_DEACTIVATED = 10201).
|
|
|
|
REMOTE_SERVER_DEACTIVATED = 10201
|
2023-11-09 20:40:42 +01:00
|
|
|
REMOTE_SERVER_PLAN_TYPE_CHANGED = 10204
|
|
|
|
REMOTE_SERVER_DISCOUNT_CHANGED = 10209
|
|
|
|
REMOTE_SERVER_SPONSORSHIP_APPROVED = 10210
|
2023-12-01 13:19:04 +01:00
|
|
|
REMOTE_SERVER_BILLING_MODALITY_CHANGED = 10211
|
2023-11-09 20:40:42 +01:00
|
|
|
REMOTE_SERVER_SPONSORSHIP_PENDING_STATUS_CHANGED = 10213
|
|
|
|
REMOTE_SERVER_CREATED = 10215
|
2021-12-01 17:31:08 +01:00
|
|
|
|
2023-11-08 20:02:10 +01:00
|
|
|
# This value is for RemoteRealmAuditLog entries tracking changes to the
|
|
|
|
# RemoteRealm model resulting from modified realm information sent to us
|
|
|
|
# via send_analytics_to_push_bouncer.
|
|
|
|
REMOTE_REALM_VALUE_UPDATED = 20001
|
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
event_type = models.PositiveSmallIntegerField()
|
2018-06-28 00:48:51 +02:00
|
|
|
|
2020-06-09 00:58:42 +02:00
|
|
|
# event_types synced from on-prem installations to Zulip Cloud when
|
2019-10-08 06:05:22 +02:00
|
|
|
# billing for mobile push notifications is enabled. Every billing
|
|
|
|
# event_type should have ROLE_COUNT populated in extra_data.
|
|
|
|
SYNCED_BILLING_EVENTS = [
|
2021-02-12 08:19:30 +01:00
|
|
|
USER_CREATED,
|
|
|
|
USER_ACTIVATED,
|
|
|
|
USER_DEACTIVATED,
|
|
|
|
USER_REACTIVATED,
|
|
|
|
USER_ROLE_CHANGED,
|
|
|
|
REALM_DEACTIVATED,
|
|
|
|
REALM_REACTIVATED,
|
|
|
|
]
|
2019-10-08 06:05:22 +02:00
|
|
|
|
2019-10-03 02:01:36 +02:00
|
|
|
class Meta:
|
|
|
|
abstract = True
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-10-03 02:01:36 +02:00
|
|
|
class RealmAuditLog(AbstractRealmAuditLog):
|
|
|
|
"""
|
|
|
|
RealmAuditLog tracks important changes to users, streams, and
|
|
|
|
realms in Zulip. It is intended to support both
|
|
|
|
debugging/introspection (e.g. determining when a user's left a
|
|
|
|
given stream?) as well as help with some database migrations where
|
|
|
|
we might be able to do a better data backfill with it. Here are a
|
|
|
|
few key details about how this works:
|
|
|
|
|
|
|
|
* acting_user is the user who initiated the state change
|
|
|
|
* modified_user (if present) is the user being modified
|
|
|
|
* modified_stream (if present) is the stream being modified
|
2022-11-21 04:48:09 +01:00
|
|
|
* modified_user_group (if present) is the user group being modified
|
2019-10-03 02:01:36 +02:00
|
|
|
|
|
|
|
For example:
|
|
|
|
* When a user subscribes another user to a stream, modified_user,
|
|
|
|
acting_user, and modified_stream will all be present and different.
|
|
|
|
* When an administrator changes an organization's realm icon,
|
2022-11-21 04:48:09 +01:00
|
|
|
acting_user is that administrator and modified_user,
|
|
|
|
modified_stream and modified_user_group will be None.
|
2019-10-03 02:01:36 +02:00
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE)
|
|
|
|
acting_user = models.ForeignKey(
|
2021-02-12 08:19:30 +01:00
|
|
|
UserProfile,
|
|
|
|
null=True,
|
|
|
|
related_name="+",
|
|
|
|
on_delete=CASCADE,
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
)
|
2022-08-15 19:10:58 +02:00
|
|
|
modified_user = models.ForeignKey(
|
2021-02-12 08:19:30 +01:00
|
|
|
UserProfile,
|
|
|
|
null=True,
|
|
|
|
related_name="+",
|
|
|
|
on_delete=CASCADE,
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
)
|
2022-08-15 19:10:58 +02:00
|
|
|
modified_stream = models.ForeignKey(
|
2021-02-12 08:19:30 +01:00
|
|
|
Stream,
|
|
|
|
null=True,
|
|
|
|
on_delete=CASCADE,
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
)
|
2022-11-21 04:48:09 +01:00
|
|
|
modified_user_group = models.ForeignKey(
|
|
|
|
UserGroup,
|
|
|
|
null=True,
|
|
|
|
on_delete=CASCADE,
|
|
|
|
)
|
2021-07-17 12:25:08 +02:00
|
|
|
event_last_message_id = models.IntegerField(null=True)
|
2019-10-03 02:01:36 +02:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2017-11-27 07:33:05 +01:00
|
|
|
def __str__(self) -> str:
|
2017-09-22 16:09:37 +02:00
|
|
|
if self.modified_user is not None:
|
2023-03-08 22:18:59 +01:00
|
|
|
return f"{self.modified_user!r} {self.event_type} {self.event_time} {self.id}"
|
2017-09-22 16:09:37 +02:00
|
|
|
if self.modified_stream is not None:
|
2023-03-08 22:18:59 +01:00
|
|
|
return f"{self.modified_stream!r} {self.event_type} {self.event_time} {self.id}"
|
2022-11-21 04:48:09 +01:00
|
|
|
if self.modified_user_group is not None:
|
|
|
|
return f"{self.modified_user_group!r} {self.event_type} {self.event_time} {self.id}"
|
2023-03-08 22:18:59 +01:00
|
|
|
return f"{self.realm!r} {self.event_type} {self.event_time} {self.id}"
|
2017-09-22 16:09:37 +02:00
|
|
|
|
2023-04-28 21:08:21 +02:00
|
|
|
class Meta:
|
|
|
|
indexes = [
|
|
|
|
models.Index(
|
|
|
|
name="zerver_realmauditlog_user_subscriptions_idx",
|
|
|
|
fields=["modified_user", "modified_stream"],
|
|
|
|
condition=Q(
|
|
|
|
event_type__in=[
|
|
|
|
AbstractRealmAuditLog.SUBSCRIPTION_CREATED,
|
|
|
|
AbstractRealmAuditLog.SUBSCRIPTION_ACTIVATED,
|
|
|
|
AbstractRealmAuditLog.SUBSCRIPTION_DEACTIVATED,
|
|
|
|
]
|
|
|
|
),
|
|
|
|
)
|
|
|
|
]
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-12-01 08:20:48 +01:00
|
|
|
class OnboardingStep(models.Model):
|
2022-08-15 19:10:58 +02:00
|
|
|
user = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
2023-12-01 08:20:48 +01:00
|
|
|
onboarding_step = models.CharField(max_length=30)
|
2022-08-15 19:10:58 +02:00
|
|
|
timestamp = models.DateTimeField(default=timezone_now)
|
2017-01-24 01:48:35 +01:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2023-12-01 08:20:48 +01:00
|
|
|
unique_together = ("user", "onboarding_step")
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def check_valid_user_ids(realm_id: int, val: object, allow_deactivated: bool = False) -> List[int]:
|
2020-06-23 01:12:03 +02:00
|
|
|
user_ids = check_list(check_int)("User IDs", val)
|
2018-06-07 20:01:31 +02:00
|
|
|
realm = Realm.objects.get(id=realm_id)
|
|
|
|
for user_id in user_ids:
|
|
|
|
# TODO: Structurally, we should be doing a bulk fetch query to
|
|
|
|
# get the users here, not doing these in a loop. But because
|
|
|
|
# this is a rarely used feature and likely to never have more
|
|
|
|
# than a handful of users, it's probably mostly OK.
|
|
|
|
try:
|
|
|
|
user_profile = get_user_profile_by_id_in_realm(user_id, realm)
|
|
|
|
except UserProfile.DoesNotExist:
|
2023-07-17 22:40:33 +02:00
|
|
|
raise ValidationError(_("Invalid user ID: {user_id}").format(user_id=user_id))
|
2018-06-07 20:01:31 +02:00
|
|
|
|
2023-01-18 02:59:37 +01:00
|
|
|
if not allow_deactivated and not user_profile.is_active:
|
2023-07-17 22:40:33 +02:00
|
|
|
raise ValidationError(
|
|
|
|
_("User with ID {user_id} is deactivated").format(user_id=user_id)
|
|
|
|
)
|
2018-05-08 13:54:40 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
if user_profile.is_bot:
|
2023-07-17 22:40:33 +02:00
|
|
|
raise ValidationError(_("User with ID {user_id} is a bot").format(user_id=user_id))
|
2018-06-07 20:01:31 +02:00
|
|
|
|
2020-06-21 02:36:20 +02:00
|
|
|
return user_ids
|
2018-05-08 13:54:40 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-03-17 10:07:22 +01:00
|
|
|
class CustomProfileField(models.Model):
|
2018-07-25 05:57:10 +02:00
|
|
|
"""Defines a form field for the per-realm custom profile fields feature.
|
|
|
|
|
|
|
|
See CustomProfileFieldValue for an individual user's values for one of
|
|
|
|
these fields.
|
2018-07-25 00:29:05 +02:00
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-03-31 07:30:24 +02:00
|
|
|
HINT_MAX_LENGTH = 80
|
2018-08-16 20:12:00 +02:00
|
|
|
NAME_MAX_LENGTH = 40
|
2022-07-12 21:04:47 +02:00
|
|
|
MAX_DISPLAY_IN_PROFILE_SUMMARY_FIELDS = 2
|
2018-03-31 07:30:24 +02:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE)
|
|
|
|
name = models.CharField(max_length=NAME_MAX_LENGTH)
|
|
|
|
hint = models.CharField(max_length=HINT_MAX_LENGTH, default="")
|
2022-07-12 21:04:47 +02:00
|
|
|
|
|
|
|
# Sort order for display of custom profile fields.
|
2022-08-15 19:10:58 +02:00
|
|
|
order = models.IntegerField(default=0)
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2022-07-12 21:04:47 +02:00
|
|
|
# Whether the field should be displayed in smaller summary
|
|
|
|
# sections of a page displaying custom profile fields.
|
2022-08-15 19:10:58 +02:00
|
|
|
display_in_profile_summary = models.BooleanField(default=False)
|
2022-07-12 21:04:47 +02:00
|
|
|
|
2018-04-02 15:04:22 +02:00
|
|
|
SHORT_TEXT = 1
|
|
|
|
LONG_TEXT = 2
|
2021-03-20 11:39:22 +01:00
|
|
|
SELECT = 3
|
2018-04-03 18:06:13 +02:00
|
|
|
DATE = 4
|
2018-04-25 19:20:58 +02:00
|
|
|
URL = 5
|
2018-05-06 09:43:38 +02:00
|
|
|
USER = 6
|
2019-05-27 10:59:55 +02:00
|
|
|
EXTERNAL_ACCOUNT = 7
|
2022-10-01 12:16:11 +02:00
|
|
|
PRONOUNS = 8
|
2018-04-08 09:50:05 +02:00
|
|
|
|
2018-05-06 09:43:38 +02:00
|
|
|
# These are the fields whose validators require more than var_name
|
2021-03-20 11:39:22 +01:00
|
|
|
# and value argument. i.e. SELECT require field_data, USER require
|
2018-05-06 09:43:38 +02:00
|
|
|
# realm as argument.
|
2021-03-20 11:39:22 +01:00
|
|
|
SELECT_FIELD_TYPE_DATA: List[ExtendedFieldElement] = [
|
2021-04-16 00:57:30 +02:00
|
|
|
(SELECT, gettext_lazy("List of options"), validate_select_field, str, "SELECT"),
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
]
|
|
|
|
USER_FIELD_TYPE_DATA: List[UserFieldElement] = [
|
2021-09-19 20:11:34 +02:00
|
|
|
(USER, gettext_lazy("Person picker"), check_valid_user_ids, orjson.loads, "USER"),
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
]
|
2018-04-08 09:50:05 +02:00
|
|
|
|
2021-03-20 11:39:22 +01:00
|
|
|
SELECT_FIELD_VALIDATORS: Dict[int, ExtendedValidator] = {
|
|
|
|
item[0]: item[2] for item in SELECT_FIELD_TYPE_DATA
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
}
|
|
|
|
USER_FIELD_VALIDATORS: Dict[int, RealmUserValidator] = {
|
2018-05-06 09:43:38 +02:00
|
|
|
item[0]: item[2] for item in USER_FIELD_TYPE_DATA
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
}
|
2017-03-17 10:07:22 +01:00
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
FIELD_TYPE_DATA: List[FieldElement] = [
|
2021-05-10 07:02:14 +02:00
|
|
|
# Type, display name, validator, converter, keyword
|
2021-04-16 00:57:30 +02:00
|
|
|
(SHORT_TEXT, gettext_lazy("Short text"), check_short_string, str, "SHORT_TEXT"),
|
|
|
|
(LONG_TEXT, gettext_lazy("Long text"), check_long_string, str, "LONG_TEXT"),
|
|
|
|
(DATE, gettext_lazy("Date picker"), check_date, str, "DATE"),
|
|
|
|
(URL, gettext_lazy("Link"), check_url, str, "URL"),
|
2021-02-12 08:19:30 +01:00
|
|
|
(
|
|
|
|
EXTERNAL_ACCOUNT,
|
2021-04-16 00:57:30 +02:00
|
|
|
gettext_lazy("External account"),
|
2021-02-12 08:19:30 +01:00
|
|
|
check_short_string,
|
|
|
|
str,
|
|
|
|
"EXTERNAL_ACCOUNT",
|
|
|
|
),
|
2022-10-01 12:16:11 +02:00
|
|
|
(PRONOUNS, gettext_lazy("Pronouns"), check_short_string, str, "PRONOUNS"),
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
]
|
2018-04-08 09:50:05 +02:00
|
|
|
|
2021-03-20 11:39:22 +01:00
|
|
|
ALL_FIELD_TYPES = [*FIELD_TYPE_DATA, *SELECT_FIELD_TYPE_DATA, *USER_FIELD_TYPE_DATA]
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2021-09-21 16:52:15 +02:00
|
|
|
FIELD_VALIDATORS: Dict[int, Validator[ProfileDataElementValue]] = {
|
2021-02-12 08:19:30 +01:00
|
|
|
item[0]: item[2] for item in FIELD_TYPE_DATA
|
|
|
|
}
|
|
|
|
FIELD_CONVERTERS: Dict[int, Callable[[Any], Any]] = {
|
|
|
|
item[0]: item[3] for item in ALL_FIELD_TYPES
|
|
|
|
}
|
2022-09-19 21:43:34 +02:00
|
|
|
FIELD_TYPE_CHOICES: List[Tuple[int, StrPromise]] = [
|
2022-08-08 19:53:11 +02:00
|
|
|
(item[0], item[1]) for item in ALL_FIELD_TYPES
|
|
|
|
]
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2022-08-15 19:10:58 +02:00
|
|
|
field_type = models.PositiveSmallIntegerField(
|
2021-02-12 08:19:30 +01:00
|
|
|
choices=FIELD_TYPE_CHOICES,
|
|
|
|
default=SHORT_TEXT,
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
)
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2018-07-25 05:57:10 +02:00
|
|
|
# A JSON blob of any additional data needed to define the field beyond
|
|
|
|
# type/name/hint.
|
|
|
|
#
|
|
|
|
# The format depends on the type. Field types SHORT_TEXT, LONG_TEXT,
|
2022-04-27 05:06:35 +02:00
|
|
|
# DATE, URL, and USER leave this empty. Fields of type SELECT store the
|
2018-07-25 05:57:10 +02:00
|
|
|
# choices' descriptions.
|
2018-07-25 00:29:05 +02:00
|
|
|
#
|
2018-07-25 05:57:10 +02:00
|
|
|
# Note: There is no performance overhead of using TextField in PostgreSQL.
|
2018-07-25 00:29:05 +02:00
|
|
|
# See https://www.postgresql.org/docs/9.0/static/datatype-character.html
|
2022-08-15 19:10:58 +02:00
|
|
|
field_data = models.TextField(default="")
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2021-02-12 08:20:45 +01:00
|
|
|
unique_together = ("realm", "name")
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2023-04-12 22:40:35 +02:00
|
|
|
def __str__(self) -> str:
|
|
|
|
return f"{self.realm!r} {self.name} {self.field_type} {self.order}"
|
|
|
|
|
2020-06-13 09:30:51 +02:00
|
|
|
def as_dict(self) -> ProfileDataElementBase:
|
2022-07-12 21:04:47 +02:00
|
|
|
data_as_dict: ProfileDataElementBase = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"id": self.id,
|
|
|
|
"name": self.name,
|
|
|
|
"type": self.field_type,
|
|
|
|
"hint": self.hint,
|
|
|
|
"field_data": self.field_data,
|
|
|
|
"order": self.order,
|
2017-03-17 10:07:22 +01:00
|
|
|
}
|
2022-07-12 21:04:47 +02:00
|
|
|
if self.display_in_profile_summary:
|
|
|
|
data_as_dict["display_in_profile_summary"] = True
|
|
|
|
|
|
|
|
return data_as_dict
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2018-11-06 10:05:31 +01:00
|
|
|
def is_renderable(self) -> bool:
|
|
|
|
if self.field_type in [CustomProfileField.SHORT_TEXT, CustomProfileField.LONG_TEXT]:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-06-23 22:47:50 +02:00
|
|
|
def custom_profile_fields_for_realm(realm_id: int) -> QuerySet[CustomProfileField]:
|
2021-02-12 08:20:45 +01:00
|
|
|
return CustomProfileField.objects.filter(realm=realm_id).order_by("order")
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-03-17 10:07:22 +01:00
|
|
|
class CustomProfileFieldValue(models.Model):
|
2022-08-15 19:10:58 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
|
|
|
field = models.ForeignKey(CustomProfileField, on_delete=CASCADE)
|
|
|
|
value = models.TextField()
|
|
|
|
rendered_value = models.TextField(null=True, default=None)
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2021-02-12 08:20:45 +01:00
|
|
|
unique_together = ("user_profile", "field")
|
2016-07-15 18:57:37 +02:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2018-03-12 03:30:30 +01:00
|
|
|
def __str__(self) -> str:
|
2023-03-08 22:18:59 +01:00
|
|
|
return f"{self.user_profile!r} {self.field!r} {self.value}"
|
2018-03-12 01:55:23 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-05-25 19:16:40 +02:00
|
|
|
# Interfaces for services
|
2020-10-23 02:43:28 +02:00
|
|
|
# They provide additional functionality like parsing message to obtain query URL, data to be sent to URL,
|
2017-05-25 19:16:40 +02:00
|
|
|
# and parsing the response.
|
2021-02-12 08:20:45 +01:00
|
|
|
GENERIC_INTERFACE = "GenericService"
|
|
|
|
SLACK_INTERFACE = "SlackOutgoingWebhookService"
|
2017-05-25 19:16:40 +02:00
|
|
|
|
2023-02-02 04:35:24 +01:00
|
|
|
|
2017-05-25 20:41:05 +02:00
|
|
|
# A Service corresponds to either an outgoing webhook bot or an embedded bot.
|
|
|
|
# The type of Service is determined by the bot_type field of the referenced
|
|
|
|
# UserProfile.
|
|
|
|
#
|
|
|
|
# If the Service is an outgoing webhook bot:
|
|
|
|
# - name is any human-readable identifier for the Service
|
|
|
|
# - base_url is the address of the third-party site
|
|
|
|
# - token is used for authentication with the third-party site
|
|
|
|
#
|
|
|
|
# If the Service is an embedded bot:
|
|
|
|
# - name is the canonical name for the type of bot (e.g. 'xkcd' for an instance
|
|
|
|
# of the xkcd bot); multiple embedded bots can have the same name, but all
|
|
|
|
# embedded bots with the same name will run the same code
|
|
|
|
# - base_url and token are currently unused
|
2016-07-15 18:57:37 +02:00
|
|
|
class Service(models.Model):
|
2022-08-15 19:10:58 +02:00
|
|
|
name = models.CharField(max_length=UserProfile.MAX_NAME_LENGTH)
|
2017-05-25 20:41:05 +02:00
|
|
|
# Bot user corresponding to the Service. The bot_type of this user
|
2022-02-08 00:13:33 +01:00
|
|
|
# determines the type of service. If non-bot services are added later,
|
2017-05-25 20:41:05 +02:00
|
|
|
# user_profile can also represent the owner of the Service.
|
2022-08-15 19:10:58 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
|
|
|
base_url = models.TextField()
|
|
|
|
token = models.TextField()
|
2017-05-25 20:41:05 +02:00
|
|
|
# Interface / API version of the service.
|
2022-08-15 19:10:58 +02:00
|
|
|
interface = models.PositiveSmallIntegerField(default=1)
|
2016-07-15 18:57:37 +02:00
|
|
|
|
2017-07-01 14:42:34 +02:00
|
|
|
# Valid interfaces are {generic, zulip_bot_service, slack}
|
2017-05-25 19:16:40 +02:00
|
|
|
GENERIC = 1
|
2017-07-01 14:42:34 +02:00
|
|
|
SLACK = 2
|
2017-05-25 19:16:40 +02:00
|
|
|
|
2017-07-21 07:15:11 +02:00
|
|
|
ALLOWED_INTERFACE_TYPES = [
|
|
|
|
GENERIC,
|
2017-07-01 14:42:34 +02:00
|
|
|
SLACK,
|
2017-07-21 07:15:11 +02:00
|
|
|
]
|
2016-07-15 18:57:37 +02:00
|
|
|
# N.B. If we used Django's choice=... we would get this for free (kinda)
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
_interfaces: Dict[int, str] = {
|
2017-05-25 19:16:40 +02:00
|
|
|
GENERIC: GENERIC_INTERFACE,
|
2017-07-01 14:42:34 +02:00
|
|
|
SLACK: SLACK_INTERFACE,
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
}
|
2016-07-15 18:57:37 +02:00
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def interface_name(self) -> str:
|
2016-07-15 18:57:37 +02:00
|
|
|
# Raises KeyError if invalid
|
|
|
|
return self._interfaces[self.interface]
|
|
|
|
|
|
|
|
|
2020-06-30 21:11:29 +02:00
|
|
|
def get_bot_services(user_profile_id: int) -> List[Service]:
|
2021-04-22 16:23:09 +02:00
|
|
|
return list(Service.objects.filter(user_profile_id=user_profile_id))
|
2016-07-15 18:57:37 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-30 21:11:29 +02:00
|
|
|
def get_service_profile(user_profile_id: int, service_name: str) -> Service:
|
2021-04-22 16:23:09 +02:00
|
|
|
return Service.objects.get(user_profile_id=user_profile_id, name=service_name)
|
2017-10-12 16:31:25 +02:00
|
|
|
|
|
|
|
|
2017-11-24 10:18:29 +01:00
|
|
|
class BotStorageData(models.Model):
|
2022-08-15 19:10:58 +02:00
|
|
|
bot_profile = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
|
|
|
key = models.TextField(db_index=True)
|
|
|
|
value = models.TextField()
|
2017-10-12 16:31:25 +02:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2017-10-12 16:31:25 +02:00
|
|
|
unique_together = ("bot_profile", "key")
|
2017-11-01 20:51:12 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-01-06 11:01:22 +01:00
|
|
|
class BotConfigData(models.Model):
|
2022-08-15 19:10:58 +02:00
|
|
|
bot_profile = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
|
|
|
key = models.TextField(db_index=True)
|
|
|
|
value = models.TextField()
|
2017-11-01 20:51:12 +01:00
|
|
|
|
2020-04-09 21:51:58 +02:00
|
|
|
class Meta:
|
2017-11-01 20:51:12 +01:00
|
|
|
unique_together = ("bot_profile", "key")
|
2019-08-30 00:21:36 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-11-17 09:30:48 +01:00
|
|
|
class InvalidFakeEmailDomainError(Exception):
|
2019-08-30 00:21:36 +02:00
|
|
|
pass
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-11-07 08:12:19 +01:00
|
|
|
def get_fake_email_domain(realm_host: str) -> str:
|
2021-01-18 14:34:54 +01:00
|
|
|
try:
|
|
|
|
# Check that realm.host can be used to form valid email addresses.
|
2023-11-07 08:12:19 +01:00
|
|
|
validate_email(Address(username="bot", domain=realm_host).addr_spec)
|
|
|
|
return realm_host
|
2021-01-18 14:34:54 +01:00
|
|
|
except ValidationError:
|
|
|
|
pass
|
|
|
|
|
2019-08-30 00:21:36 +02:00
|
|
|
try:
|
|
|
|
# Check that the fake email domain can be used to form valid email addresses.
|
2022-07-27 23:33:49 +02:00
|
|
|
validate_email(Address(username="bot", domain=settings.FAKE_EMAIL_DOMAIN).addr_spec)
|
2019-08-30 00:21:36 +02:00
|
|
|
except ValidationError:
|
2022-11-17 09:30:48 +01:00
|
|
|
raise InvalidFakeEmailDomainError(
|
2021-02-12 08:20:45 +01:00
|
|
|
settings.FAKE_EMAIL_DOMAIN + " is not a valid domain. "
|
|
|
|
"Consider setting the FAKE_EMAIL_DOMAIN setting."
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-08-30 00:21:36 +02:00
|
|
|
|
|
|
|
return settings.FAKE_EMAIL_DOMAIN
|
2020-04-15 12:34:26 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-04-15 12:34:26 +02:00
|
|
|
class AlertWord(models.Model):
|
|
|
|
# Realm isn't necessary, but it's a nice denormalization. Users
|
|
|
|
# never move to another realm, so it's static, and having Realm
|
|
|
|
# here optimizes the main query on this table, which is fetching
|
|
|
|
# all the alert words in a realm.
|
2022-08-15 19:10:58 +02:00
|
|
|
realm = models.ForeignKey(Realm, db_index=True, on_delete=CASCADE)
|
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
2020-04-27 20:04:38 +02:00
|
|
|
# Case-insensitive name for the alert word.
|
2022-08-15 19:10:58 +02:00
|
|
|
word = models.TextField()
|
2020-04-15 12:34:26 +02:00
|
|
|
|
|
|
|
class Meta:
|
|
|
|
unique_together = ("user_profile", "word")
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-06-08 22:17:05 +02:00
|
|
|
def flush_realm_alert_words(realm_id: int) -> None:
|
|
|
|
cache_delete(realm_alert_words_cache_key(realm_id))
|
|
|
|
cache_delete(realm_alert_words_automaton_cache_key(realm_id))
|
2020-04-15 12:34:26 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-07-16 00:45:17 +02:00
|
|
|
def flush_alert_word(*, instance: AlertWord, **kwargs: object) -> None:
|
2023-06-08 22:17:05 +02:00
|
|
|
realm_id = instance.realm_id
|
|
|
|
flush_realm_alert_words(realm_id)
|
2020-04-15 12:34:26 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-04-27 20:45:15 +02:00
|
|
|
post_save.connect(flush_alert_word, sender=AlertWord)
|
2020-04-15 12:34:26 +02:00
|
|
|
post_delete.connect(flush_alert_word, sender=AlertWord)
|