2018-05-11 02:24:34 +02:00
|
|
|
from typing import Any, DefaultDict, Dict, List, Set, Tuple, TypeVar, \
|
2019-02-02 23:53:55 +01:00
|
|
|
Union, Optional, Sequence, AbstractSet, Callable, Iterable
|
2016-05-07 18:02:57 +02:00
|
|
|
from typing.re import Match
|
2013-04-23 18:51:17 +02:00
|
|
|
|
2012-08-28 18:44:51 +02:00
|
|
|
from django.db import models
|
2019-02-02 23:53:55 +01:00
|
|
|
from django.db.models.query import QuerySet
|
2019-01-11 13:41:52 +01:00
|
|
|
from django.db.models import Manager, Sum, CASCADE
|
2012-09-19 19:39:34 +02:00
|
|
|
from django.conf import settings
|
2013-06-24 17:51:10 +02:00
|
|
|
from django.contrib.auth.models import AbstractBaseUser, UserManager, \
|
|
|
|
PermissionsMixin
|
2016-11-02 21:41:10 +01:00
|
|
|
import django.contrib.auth
|
2017-07-07 20:35:31 +02:00
|
|
|
from django.core.exceptions import ValidationError
|
2017-03-17 10:07:22 +01:00
|
|
|
from django.core.validators import URLValidator, MinLengthValidator, \
|
2019-08-30 00:21:36 +02:00
|
|
|
RegexValidator, validate_email
|
2014-02-21 21:18:38 +01:00
|
|
|
from django.dispatch import receiver
|
2014-01-28 18:18:19 +01:00
|
|
|
from zerver.lib.cache import cache_with_key, flush_user_profile, flush_realm, \
|
2018-06-03 19:11:52 +02:00
|
|
|
user_profile_by_api_key_cache_key, active_non_guest_user_ids_cache_key, \
|
2013-04-05 00:13:03 +02:00
|
|
|
user_profile_by_id_cache_key, user_profile_by_email_cache_key, \
|
2017-05-22 19:45:54 +02:00
|
|
|
user_profile_cache_key, generic_bulk_cached_fetch, cache_set, flush_stream, \
|
2019-08-15 00:44:33 +02:00
|
|
|
cache_delete, active_user_ids_cache_key, \
|
2017-10-21 18:20:49 +02:00
|
|
|
get_stream_cache_key, realm_user_dicts_cache_key, \
|
|
|
|
bot_dicts_in_realm_cache_key, realm_user_dict_fields, \
|
2019-01-14 07:46:31 +01:00
|
|
|
bot_dict_fields, flush_message, flush_submessage, bot_profile_cache_key, \
|
|
|
|
flush_used_upload_space_cache, get_realm_used_upload_space_cache_key
|
2013-08-08 16:51:18 +02:00
|
|
|
from zerver.lib.utils import make_safe_digest, generate_random_token
|
2016-02-12 21:08:56 +01:00
|
|
|
from django.db import transaction
|
2017-04-15 04:03:56 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2012-10-29 19:43:00 +01:00
|
|
|
from django.contrib.sessions.models import Session
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver.lib.timestamp import datetime_to_timestamp
|
2014-02-21 21:18:38 +01:00
|
|
|
from django.db.models.signals import pre_save, post_save, post_delete
|
2016-06-05 02:45:30 +02:00
|
|
|
from django.utils.translation import ugettext_lazy as _
|
2016-11-14 09:23:03 +01:00
|
|
|
from zerver.lib import cache
|
2019-02-02 23:53:55 +01:00
|
|
|
from zerver.lib.validator import check_int, \
|
2018-04-25 19:20:58 +02:00
|
|
|
check_short_string, check_long_string, validate_choice_field, check_date, \
|
2018-06-07 20:01:31 +02:00
|
|
|
check_url, check_list
|
2018-03-05 20:19:07 +01:00
|
|
|
from zerver.lib.name_restrictions import is_disposable_domain
|
2018-04-08 09:50:05 +02:00
|
|
|
from zerver.lib.types import Validator, ExtendedValidator, \
|
2019-04-16 07:14:51 +02:00
|
|
|
ProfileDataElement, ProfileData, RealmUserValidator, \
|
2019-08-18 00:24:46 +02:00
|
|
|
ExtendedFieldElement, UserFieldElement, FieldElement, \
|
2019-08-18 00:40:35 +02:00
|
|
|
DisplayRecipientT
|
2018-03-05 20:19:07 +01:00
|
|
|
|
2013-03-12 17:51:55 +01:00
|
|
|
from bitfield import BitField
|
2016-06-13 10:39:47 +02:00
|
|
|
from bitfield.types import BitHandler
|
2019-09-28 01:06:46 +02:00
|
|
|
from collections import defaultdict
|
2014-07-15 21:03:51 +02:00
|
|
|
from datetime import timedelta
|
2013-07-08 17:53:50 +02:00
|
|
|
import pylibmc
|
2014-02-21 21:18:38 +01:00
|
|
|
import re
|
2016-12-22 15:44:33 +01:00
|
|
|
import sre_constants
|
2016-03-24 20:24:01 +01:00
|
|
|
import time
|
|
|
|
import datetime
|
2013-03-12 17:51:55 +01:00
|
|
|
|
2018-11-01 21:23:48 +01:00
|
|
|
MAX_TOPIC_NAME_LENGTH = 60
|
2012-12-11 17:12:53 +01:00
|
|
|
MAX_MESSAGE_LENGTH = 10000
|
2017-07-09 01:16:47 +02:00
|
|
|
MAX_LANGUAGE_ID_LENGTH = 50 # type: int
|
2012-12-07 01:05:14 +01:00
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
STREAM_NAMES = TypeVar('STREAM_NAMES', Sequence[str], AbstractSet[str])
|
2016-05-07 18:02:57 +02:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def query_for_ids(query: QuerySet, user_ids: List[int], field: str) -> QuerySet:
|
2017-09-13 17:24:11 +02:00
|
|
|
'''
|
|
|
|
This function optimizes searches of the form
|
|
|
|
`user_profile_id in (1, 2, 3, 4)` by quickly
|
|
|
|
building the where clauses. Profiling shows significant
|
|
|
|
speedups over the normal Django-based approach.
|
|
|
|
|
|
|
|
Use this very carefully! Also, the caller should
|
|
|
|
guard against empty lists of user_ids.
|
|
|
|
'''
|
|
|
|
assert(user_ids)
|
|
|
|
value_list = ', '.join(str(int(user_id)) for user_id in user_ids)
|
|
|
|
clause = '%s in (%s)' % (field, value_list)
|
|
|
|
query = query.extra(
|
|
|
|
where=[clause]
|
|
|
|
)
|
|
|
|
return query
|
|
|
|
|
2016-03-31 03:30:33 +02:00
|
|
|
# Doing 1000 remote cache requests to get_display_recipient is quite slow,
|
|
|
|
# so add a local cache as well as the remote cache cache.
|
2019-08-07 08:22:23 +02:00
|
|
|
#
|
|
|
|
# This local cache has a lifetime of just a single request; it is
|
|
|
|
# cleared inside `flush_per_request_caches` in our middleware. It
|
|
|
|
# could be replaced with smarter bulk-fetching logic that deduplicates
|
|
|
|
# queries for the same recipient; this is just a convenient way to
|
|
|
|
# write that code.
|
2019-08-18 00:40:35 +02:00
|
|
|
per_request_display_recipient_cache = {} # type: Dict[int, DisplayRecipientT]
|
2017-11-27 07:33:05 +01:00
|
|
|
def get_display_recipient_by_id(recipient_id: int, recipient_type: int,
|
2019-08-18 00:40:35 +02:00
|
|
|
recipient_type_id: Optional[int]) -> DisplayRecipientT:
|
2017-08-02 20:18:35 +02:00
|
|
|
"""
|
|
|
|
returns: an object describing the recipient (using a cache).
|
|
|
|
If the type is a stream, the type_id must be an int; a string is returned.
|
|
|
|
Otherwise, type_id may be None; an array of recipient dicts is returned.
|
|
|
|
"""
|
2019-08-15 00:44:33 +02:00
|
|
|
# Have to import here, to avoid circular dependency.
|
|
|
|
from zerver.lib.display_recipient import get_display_recipient_remote_cache
|
|
|
|
|
2013-12-18 23:00:14 +01:00
|
|
|
if recipient_id not in per_request_display_recipient_cache:
|
2016-03-31 03:30:33 +02:00
|
|
|
result = get_display_recipient_remote_cache(recipient_id, recipient_type, recipient_type_id)
|
2013-12-18 23:00:14 +01:00
|
|
|
per_request_display_recipient_cache[recipient_id] = result
|
|
|
|
return per_request_display_recipient_cache[recipient_id]
|
2013-09-21 15:35:12 +02:00
|
|
|
|
2019-08-18 00:40:35 +02:00
|
|
|
def get_display_recipient(recipient: 'Recipient') -> DisplayRecipientT:
|
2013-09-21 15:35:12 +02:00
|
|
|
return get_display_recipient_by_id(
|
2017-01-24 07:06:13 +01:00
|
|
|
recipient.id,
|
|
|
|
recipient.type,
|
|
|
|
recipient.type_id
|
2013-09-21 15:35:12 +02:00
|
|
|
)
|
2013-04-25 20:42:28 +02:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def flush_per_request_caches() -> None:
|
2013-12-18 23:00:14 +01:00
|
|
|
global per_request_display_recipient_cache
|
|
|
|
per_request_display_recipient_cache = {}
|
2013-12-18 23:01:11 +01:00
|
|
|
global per_request_realm_filters_cache
|
|
|
|
per_request_realm_filters_cache = {}
|
2013-08-22 17:45:15 +02:00
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def get_realm_emoji_cache_key(realm: 'Realm') -> str:
|
2016-06-12 07:25:42 +02:00
|
|
|
return u'realm_emoji:%s' % (realm.id,)
|
2013-08-22 16:56:37 +02:00
|
|
|
|
2018-03-11 18:48:56 +01:00
|
|
|
def get_active_realm_emoji_cache_key(realm: 'Realm') -> str:
|
|
|
|
return u'active_realm_emoji:%s' % (realm.id,)
|
|
|
|
|
2019-03-17 22:19:53 +01:00
|
|
|
# This simple call-once caching saves ~500us in auth_enabled_helper,
|
|
|
|
# which is a significant optimization for common_context. Note that
|
|
|
|
# these values cannot change in a running production system, but do
|
|
|
|
# regularly change within unit tests; we address the latter by calling
|
|
|
|
# clear_supported_auth_backends_cache in our standard tearDown code.
|
|
|
|
supported_backends = None # type: Optional[Set[type]]
|
|
|
|
def supported_auth_backends() -> Set[type]:
|
|
|
|
global supported_backends
|
2019-03-18 19:28:13 +01:00
|
|
|
# Caching temporarily disabled for debugging
|
|
|
|
supported_backends = django.contrib.auth.get_backends()
|
|
|
|
assert supported_backends is not None
|
2019-03-17 22:19:53 +01:00
|
|
|
return supported_backends
|
|
|
|
|
|
|
|
def clear_supported_auth_backends_cache() -> None:
|
|
|
|
global supported_backends
|
|
|
|
supported_backends = None
|
|
|
|
|
2017-10-27 08:42:27 +02:00
|
|
|
class Realm(models.Model):
|
2017-03-23 00:15:06 +01:00
|
|
|
MAX_REALM_NAME_LENGTH = 40
|
|
|
|
MAX_REALM_SUBDOMAIN_LENGTH = 40
|
2018-04-23 14:51:30 +02:00
|
|
|
MAX_GOOGLE_HANGOUTS_DOMAIN_LENGTH = 255 # This is just the maximum domain length by RFC
|
2018-10-24 06:09:01 +02:00
|
|
|
INVITES_STANDARD_REALM_DAILY_MAX = 3000
|
2018-10-17 10:50:59 +02:00
|
|
|
MESSAGE_VISIBILITY_LIMITED = 10000
|
2019-09-29 06:32:56 +02:00
|
|
|
AUTHENTICATION_FLAGS = [u'Google', u'Email', u'GitHub', u'LDAP', u'Dev',
|
|
|
|
u'RemoteUser', u'AzureAD', u'SAML']
|
2017-10-19 07:46:05 +02:00
|
|
|
SUBDOMAIN_FOR_ROOT_DOMAIN = ''
|
2016-11-06 23:44:45 +01:00
|
|
|
|
2018-07-25 00:29:05 +02:00
|
|
|
# User-visible display name and description used on e.g. the organization homepage
|
2018-05-11 02:24:34 +02:00
|
|
|
name = models.CharField(max_length=MAX_REALM_NAME_LENGTH, null=True) # type: Optional[str]
|
2018-07-25 00:29:05 +02:00
|
|
|
description = models.TextField(default=u"") # type: str
|
2018-07-25 05:57:10 +02:00
|
|
|
|
|
|
|
# A short, identifier-like name for the organization. Used in subdomains;
|
|
|
|
# e.g. on a server at example.com, an org with string_id `foo` is reached
|
|
|
|
# at `foo.example.com`.
|
2018-05-11 02:24:34 +02:00
|
|
|
string_id = models.CharField(max_length=MAX_REALM_SUBDOMAIN_LENGTH, unique=True) # type: str
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2018-07-25 00:29:05 +02:00
|
|
|
date_created = models.DateTimeField(default=timezone_now) # type: datetime.datetime
|
|
|
|
deactivated = models.BooleanField(default=False) # type: bool
|
|
|
|
|
2018-07-25 05:57:10 +02:00
|
|
|
# See RealmDomain for the domains that apply for a given organization.
|
2018-07-27 23:26:29 +02:00
|
|
|
emails_restricted_to_domains = models.BooleanField(default=False) # type: bool
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2017-07-09 01:16:47 +02:00
|
|
|
invite_required = models.BooleanField(default=True) # type: bool
|
|
|
|
invite_by_admins_only = models.BooleanField(default=False) # type: bool
|
2018-07-25 00:29:05 +02:00
|
|
|
_max_invites = models.IntegerField(null=True, db_column='max_invites') # type: Optional[int]
|
|
|
|
disallow_disposable_email_addresses = models.BooleanField(default=True) # type: bool
|
|
|
|
authentication_methods = BitField(flags=AUTHENTICATION_FLAGS,
|
|
|
|
default=2**31 - 1) # type: BitHandler
|
|
|
|
|
|
|
|
# Whether the organization has enabled inline image and URL previews.
|
2017-07-09 01:16:47 +02:00
|
|
|
inline_image_preview = models.BooleanField(default=True) # type: bool
|
2019-05-31 04:45:02 +02:00
|
|
|
inline_url_embed_preview = models.BooleanField(default=False) # type: bool
|
2018-07-25 00:29:05 +02:00
|
|
|
|
|
|
|
# Whether digest emails are enabled for the organization.
|
2019-05-08 07:58:26 +02:00
|
|
|
digest_emails_enabled = models.BooleanField(default=False) # type: bool
|
2019-03-28 04:47:03 +01:00
|
|
|
# Day of the week on which the digest is sent (default: Tuesday).
|
|
|
|
digest_weekday = models.SmallIntegerField(default=1) # type: int
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2018-07-25 00:29:05 +02:00
|
|
|
send_welcome_emails = models.BooleanField(default=True) # type: bool
|
2019-01-14 14:04:08 +01:00
|
|
|
message_content_allowed_in_email_notifications = models.BooleanField(default=True) # type: bool
|
2018-07-25 00:29:05 +02:00
|
|
|
|
|
|
|
mandatory_topics = models.BooleanField(default=False) # type: bool
|
2017-07-09 01:16:47 +02:00
|
|
|
add_emoji_by_admins_only = models.BooleanField(default=False) # type: bool
|
|
|
|
name_changes_disabled = models.BooleanField(default=False) # type: bool
|
|
|
|
email_changes_disabled = models.BooleanField(default=False) # type: bool
|
2019-04-23 04:51:04 +02:00
|
|
|
avatar_changes_disabled = models.BooleanField(default=False) # type: bool
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2019-05-06 16:34:31 +02:00
|
|
|
# Who in the organization is allowed to create streams.
|
|
|
|
CREATE_STREAM_POLICY_MEMBERS = 1
|
|
|
|
CREATE_STREAM_POLICY_ADMINS = 2
|
|
|
|
CREATE_STREAM_POLICY_WAITING_PERIOD = 3
|
|
|
|
create_stream_policy = models.PositiveSmallIntegerField(
|
|
|
|
default=CREATE_STREAM_POLICY_MEMBERS) # type: int
|
|
|
|
|
2019-04-08 19:23:00 +02:00
|
|
|
# Who in the organization is allowed to invite other users to streams.
|
|
|
|
INVITE_TO_STREAM_POLICY_MEMBERS = 1
|
|
|
|
INVITE_TO_STREAM_POLICY_ADMINS = 2
|
|
|
|
INVITE_TO_STREAM_POLICY_WAITING_PERIOD = 3
|
|
|
|
invite_to_stream_policy = models.PositiveSmallIntegerField(
|
2019-05-06 16:34:31 +02:00
|
|
|
default=INVITE_TO_STREAM_POLICY_MEMBERS) # type: int
|
2019-04-08 19:23:00 +02:00
|
|
|
|
2018-12-07 00:48:06 +01:00
|
|
|
# Who in the organization has access to users' actual email
|
|
|
|
# addresses. Controls whether the UserProfile.email field is the
|
|
|
|
# same as UserProfile.delivery_email, or is instead garbage.
|
|
|
|
EMAIL_ADDRESS_VISIBILITY_EVERYONE = 1
|
|
|
|
EMAIL_ADDRESS_VISIBILITY_MEMBERS = 2
|
|
|
|
EMAIL_ADDRESS_VISIBILITY_ADMINS = 3
|
|
|
|
email_address_visibility = models.PositiveSmallIntegerField(default=EMAIL_ADDRESS_VISIBILITY_EVERYONE) # type: int
|
|
|
|
EMAIL_ADDRESS_VISIBILITY_TYPES = [
|
|
|
|
EMAIL_ADDRESS_VISIBILITY_EVERYONE,
|
|
|
|
# The MEMBERS level is not yet implemented on the backend.
|
|
|
|
## EMAIL_ADDRESS_VISIBILITY_MEMBERS,
|
|
|
|
EMAIL_ADDRESS_VISIBILITY_ADMINS,
|
|
|
|
]
|
|
|
|
|
2018-07-25 05:57:10 +02:00
|
|
|
# Threshold in days for new users to create streams, and potentially take
|
|
|
|
# some other actions.
|
2018-07-25 00:29:05 +02:00
|
|
|
waiting_period_threshold = models.PositiveIntegerField(default=0) # type: int
|
|
|
|
|
2017-11-08 13:40:46 +01:00
|
|
|
allow_message_deleting = models.BooleanField(default=False) # type: bool
|
2017-11-26 09:12:10 +01:00
|
|
|
DEFAULT_MESSAGE_CONTENT_DELETE_LIMIT_SECONDS = 600 # if changed, also change in admin.js, setting_org.js
|
|
|
|
message_content_delete_limit_seconds = models.IntegerField(default=DEFAULT_MESSAGE_CONTENT_DELETE_LIMIT_SECONDS) # type: int
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2017-07-09 01:16:47 +02:00
|
|
|
allow_message_editing = models.BooleanField(default=True) # type: bool
|
2017-11-26 09:12:10 +01:00
|
|
|
DEFAULT_MESSAGE_CONTENT_EDIT_LIMIT_SECONDS = 600 # if changed, also change in admin.js, setting_org.js
|
2017-07-09 01:16:47 +02:00
|
|
|
message_content_edit_limit_seconds = models.IntegerField(default=DEFAULT_MESSAGE_CONTENT_EDIT_LIMIT_SECONDS) # type: int
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2018-07-25 00:29:05 +02:00
|
|
|
# Whether users have access to message edit history
|
2017-07-16 11:00:44 +02:00
|
|
|
allow_edit_history = models.BooleanField(default=True) # type: bool
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2017-12-03 00:56:17 +01:00
|
|
|
DEFAULT_COMMUNITY_TOPIC_EDITING_LIMIT_SECONDS = 86400
|
2018-04-24 11:38:30 +02:00
|
|
|
allow_community_topic_editing = models.BooleanField(default=True) # type: bool
|
2018-07-25 00:29:05 +02:00
|
|
|
|
|
|
|
# Defaults for new users
|
2018-03-30 22:38:16 +02:00
|
|
|
default_twenty_four_hour_time = models.BooleanField(default=False) # type: bool
|
2018-07-25 00:29:05 +02:00
|
|
|
default_language = models.CharField(default=u'en', max_length=MAX_LANGUAGE_ID_LENGTH) # type: str
|
|
|
|
|
2019-02-23 23:14:31 +01:00
|
|
|
DEFAULT_NOTIFICATION_STREAM_NAME = u'general'
|
2018-07-25 00:29:05 +02:00
|
|
|
INITIAL_PRIVATE_STREAM_NAME = u'core team'
|
2019-07-19 20:23:05 +02:00
|
|
|
STREAM_EVENTS_NOTIFICATION_TOPIC = _('stream events')
|
2018-07-25 00:29:05 +02:00
|
|
|
notifications_stream = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE) # type: Optional[Stream]
|
|
|
|
signup_notifications_stream = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE) # type: Optional[Stream]
|
|
|
|
|
|
|
|
# For old messages being automatically deleted
|
|
|
|
message_retention_days = models.IntegerField(null=True) # type: Optional[int]
|
2018-07-25 05:57:10 +02:00
|
|
|
|
|
|
|
# When non-null, all but the latest this many messages in the organization
|
|
|
|
# are inaccessible to users (but not deleted).
|
2018-07-25 00:29:05 +02:00
|
|
|
message_visibility_limit = models.IntegerField(null=True) # type: Optional[int]
|
2015-08-20 08:41:50 +02:00
|
|
|
|
2018-10-25 07:54:37 +02:00
|
|
|
# Messages older than this message ID in the organization are inaccessible.
|
|
|
|
first_visible_message_id = models.IntegerField(default=0) # type: int
|
|
|
|
|
2016-09-16 19:05:14 +02:00
|
|
|
# Valid org_types are {CORPORATE, COMMUNITY}
|
|
|
|
CORPORATE = 1
|
|
|
|
COMMUNITY = 2
|
2017-07-09 01:16:47 +02:00
|
|
|
org_type = models.PositiveSmallIntegerField(default=CORPORATE) # type: int
|
2016-09-16 19:05:14 +02:00
|
|
|
|
2019-07-21 02:31:24 +02:00
|
|
|
UPGRADE_TEXT_STANDARD = _("Available on Zulip Standard. Upgrade to access.")
|
2018-08-09 21:38:22 +02:00
|
|
|
# plan_type controls various features around resource/feature
|
|
|
|
# limitations for a Zulip organization on multi-tenant servers
|
|
|
|
# like zulipchat.com.
|
|
|
|
SELF_HOSTED = 1
|
|
|
|
LIMITED = 2
|
2018-10-24 06:09:01 +02:00
|
|
|
STANDARD = 3
|
|
|
|
STANDARD_FREE = 4
|
2018-08-09 21:38:22 +02:00
|
|
|
plan_type = models.PositiveSmallIntegerField(default=SELF_HOSTED) # type: int
|
|
|
|
|
2018-01-29 16:10:54 +01:00
|
|
|
# This value is also being used in static/js/settings_bots.bot_creation_policy_values.
|
|
|
|
# On updating it here, update it there as well.
|
|
|
|
BOT_CREATION_EVERYONE = 1
|
|
|
|
BOT_CREATION_LIMIT_GENERIC_BOTS = 2
|
|
|
|
BOT_CREATION_ADMINS_ONLY = 3
|
|
|
|
bot_creation_policy = models.PositiveSmallIntegerField(default=BOT_CREATION_EVERYONE) # type: int
|
|
|
|
|
2017-12-20 23:33:17 +01:00
|
|
|
# See upload_quota_bytes; don't interpret upload_quota_gb directly.
|
2019-01-14 11:22:59 +01:00
|
|
|
UPLOAD_QUOTA_LIMITED = 5
|
|
|
|
UPLOAD_QUOTA_STANDARD = 50
|
2018-02-19 06:37:51 +01:00
|
|
|
upload_quota_gb = models.IntegerField(null=True) # type: Optional[int]
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2019-05-09 08:46:59 +02:00
|
|
|
VIDEO_CHAT_PROVIDERS = {
|
|
|
|
'jitsi_meet': {
|
2019-05-09 11:48:17 +02:00
|
|
|
'name': u"Jitsi Meet",
|
2019-05-09 08:46:59 +02:00
|
|
|
'id': 1
|
|
|
|
},
|
|
|
|
'google_hangouts': {
|
|
|
|
'name': u"Google Hangouts",
|
|
|
|
'id': 2
|
|
|
|
},
|
|
|
|
'zoom': {
|
|
|
|
'name': u"Zoom",
|
|
|
|
'id': 3
|
|
|
|
}
|
|
|
|
}
|
2019-05-09 09:54:38 +02:00
|
|
|
video_chat_provider = models.PositiveSmallIntegerField(default=VIDEO_CHAT_PROVIDERS['jitsi_meet']['id'])
|
2018-04-23 14:51:30 +02:00
|
|
|
google_hangouts_domain = models.TextField(default="")
|
2018-12-28 20:45:54 +01:00
|
|
|
zoom_user_id = models.TextField(default="")
|
|
|
|
zoom_api_key = models.TextField(default="")
|
|
|
|
zoom_api_secret = models.TextField(default="")
|
2013-10-02 23:40:21 +02:00
|
|
|
|
2017-03-24 01:44:29 +01:00
|
|
|
# Define the types of the various automatically managed properties
|
|
|
|
property_types = dict(
|
|
|
|
add_emoji_by_admins_only=bool,
|
2017-07-16 11:00:44 +02:00
|
|
|
allow_edit_history=bool,
|
2017-11-08 13:40:46 +01:00
|
|
|
allow_message_deleting=bool,
|
2018-01-29 16:10:54 +01:00
|
|
|
bot_creation_policy=int,
|
2019-05-06 16:34:31 +02:00
|
|
|
create_stream_policy=int,
|
2019-04-08 19:23:00 +02:00
|
|
|
invite_to_stream_policy=int,
|
2018-05-11 02:24:34 +02:00
|
|
|
default_language=str,
|
2018-03-30 22:38:16 +02:00
|
|
|
default_twenty_four_hour_time = bool,
|
2018-05-11 02:24:34 +02:00
|
|
|
description=str,
|
2019-04-06 06:34:49 +02:00
|
|
|
digest_emails_enabled=bool,
|
2018-03-05 20:19:07 +01:00
|
|
|
disallow_disposable_email_addresses=bool,
|
2018-12-07 00:48:06 +01:00
|
|
|
email_address_visibility=int,
|
2017-03-24 01:44:29 +01:00
|
|
|
email_changes_disabled=bool,
|
2018-05-11 02:24:34 +02:00
|
|
|
google_hangouts_domain=str,
|
2018-12-28 20:45:54 +01:00
|
|
|
zoom_user_id=str,
|
|
|
|
zoom_api_key=str,
|
|
|
|
zoom_api_secret=str,
|
2017-03-24 01:44:29 +01:00
|
|
|
invite_required=bool,
|
|
|
|
invite_by_admins_only=bool,
|
|
|
|
inline_image_preview=bool,
|
|
|
|
inline_url_embed_preview=bool,
|
2017-07-04 20:04:27 +02:00
|
|
|
mandatory_topics=bool,
|
2017-04-09 00:35:41 +02:00
|
|
|
message_retention_days=(int, type(None)),
|
2018-05-11 02:24:34 +02:00
|
|
|
name=str,
|
2017-03-24 01:44:29 +01:00
|
|
|
name_changes_disabled=bool,
|
2019-04-23 04:51:04 +02:00
|
|
|
avatar_changes_disabled=bool,
|
2018-07-27 23:26:29 +02:00
|
|
|
emails_restricted_to_domains=bool,
|
2018-02-18 09:34:54 +01:00
|
|
|
send_welcome_emails=bool,
|
2019-01-14 14:04:08 +01:00
|
|
|
message_content_allowed_in_email_notifications=bool,
|
2019-05-09 09:54:38 +02:00
|
|
|
video_chat_provider=int,
|
2017-03-24 01:44:29 +01:00
|
|
|
waiting_period_threshold=int,
|
2019-03-31 12:13:42 +02:00
|
|
|
digest_weekday=int,
|
2017-04-12 23:09:09 +02:00
|
|
|
) # type: Dict[str, Union[type, Tuple[type, ...]]]
|
2017-03-24 01:44:29 +01:00
|
|
|
|
2018-08-16 01:26:55 +02:00
|
|
|
# Icon is the square mobile icon.
|
2017-02-21 03:41:20 +01:00
|
|
|
ICON_FROM_GRAVATAR = u'G'
|
|
|
|
ICON_UPLOADED = u'U'
|
|
|
|
ICON_SOURCES = (
|
|
|
|
(ICON_FROM_GRAVATAR, 'Hosted by Gravatar'),
|
|
|
|
(ICON_UPLOADED, 'Uploaded by administrator'),
|
|
|
|
)
|
|
|
|
icon_source = models.CharField(default=ICON_FROM_GRAVATAR, choices=ICON_SOURCES,
|
2018-05-11 02:24:34 +02:00
|
|
|
max_length=1) # type: str
|
2017-02-21 03:41:20 +01:00
|
|
|
icon_version = models.PositiveSmallIntegerField(default=1) # type: int
|
|
|
|
|
2018-08-16 01:26:55 +02:00
|
|
|
# Logo is the horizonal logo we show in top-left of webapp navbar UI.
|
|
|
|
LOGO_DEFAULT = u'D'
|
|
|
|
LOGO_UPLOADED = u'U'
|
|
|
|
LOGO_SOURCES = (
|
|
|
|
(LOGO_DEFAULT, 'Default to Zulip'),
|
|
|
|
(LOGO_UPLOADED, 'Uploaded by administrator'),
|
|
|
|
)
|
|
|
|
logo_source = models.CharField(default=LOGO_DEFAULT, choices=LOGO_SOURCES,
|
|
|
|
max_length=1) # type: str
|
|
|
|
logo_version = models.PositiveSmallIntegerField(default=1) # type: int
|
|
|
|
|
2019-01-27 08:25:10 +01:00
|
|
|
night_logo_source = models.CharField(default=LOGO_DEFAULT, choices=LOGO_SOURCES,
|
|
|
|
max_length=1) # type: str
|
|
|
|
night_logo_version = models.PositiveSmallIntegerField(default=1) # type: int
|
|
|
|
|
2018-01-29 16:10:54 +01:00
|
|
|
BOT_CREATION_POLICY_TYPES = [
|
|
|
|
BOT_CREATION_EVERYONE,
|
|
|
|
BOT_CREATION_LIMIT_GENERIC_BOTS,
|
|
|
|
BOT_CREATION_ADMINS_ONLY,
|
|
|
|
]
|
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def authentication_methods_dict(self) -> Dict[str, bool]:
|
2016-11-02 21:41:10 +01:00
|
|
|
"""Returns the a mapping from authentication flags to their status,
|
|
|
|
showing only those authentication flags that are supported on
|
|
|
|
the current server (i.e. if EmailAuthBackend is not configured
|
|
|
|
on the server, this will not return an entry for "Email")."""
|
|
|
|
# This mapping needs to be imported from here due to the cyclic
|
|
|
|
# dependency.
|
|
|
|
from zproject.backends import AUTH_BACKEND_NAME_MAP
|
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
ret = {} # type: Dict[str, bool]
|
2019-03-17 22:19:53 +01:00
|
|
|
supported_backends = [backend.__class__ for backend in supported_auth_backends()]
|
2016-11-02 21:41:10 +01:00
|
|
|
for k, v in self.authentication_methods.iteritems():
|
|
|
|
backend = AUTH_BACKEND_NAME_MAP[k]
|
|
|
|
if backend in supported_backends:
|
|
|
|
ret[k] = v
|
|
|
|
return ret
|
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def __str__(self) -> str:
|
2017-10-27 09:06:40 +02:00
|
|
|
return "<Realm: %s %s>" % (self.string_id, self.id)
|
2012-09-05 21:49:56 +02:00
|
|
|
|
2013-08-22 16:56:37 +02:00
|
|
|
@cache_with_key(get_realm_emoji_cache_key, timeout=3600*24*7)
|
2018-05-11 02:24:34 +02:00
|
|
|
def get_emoji(self) -> Dict[str, Dict[str, Iterable[str]]]:
|
2013-08-22 16:56:37 +02:00
|
|
|
return get_realm_emoji_uncached(self)
|
|
|
|
|
2018-03-11 18:48:56 +01:00
|
|
|
@cache_with_key(get_active_realm_emoji_cache_key, timeout=3600*24*7)
|
|
|
|
def get_active_emoji(self) -> Dict[str, Dict[str, Iterable[str]]]:
|
|
|
|
return get_active_realm_emoji_uncached(self)
|
|
|
|
|
2019-06-20 23:36:15 +02:00
|
|
|
def get_admin_users_and_bots(self) -> Sequence['UserProfile']:
|
|
|
|
"""Use this in contexts where we want administrative users as well as
|
|
|
|
bots with administrator privileges, like send_event calls for
|
|
|
|
notifications to all administrator users.
|
|
|
|
"""
|
2016-06-12 14:43:15 +02:00
|
|
|
# TODO: Change return type to QuerySet[UserProfile]
|
2019-10-05 02:35:07 +02:00
|
|
|
return UserProfile.objects.filter(realm=self, role=UserProfile.ROLE_REALM_ADMINISTRATOR,
|
2017-12-19 23:19:52 +01:00
|
|
|
is_active=True)
|
2013-11-02 15:36:17 +01:00
|
|
|
|
2019-06-20 23:26:54 +02:00
|
|
|
def get_human_admin_users(self) -> Sequence['UserProfile']:
|
2019-06-20 23:36:15 +02:00
|
|
|
"""Use this in contexts where we want only human users with
|
|
|
|
administrative privileges, like sending an email to all of a
|
|
|
|
realm's administrators (bots don't have real email addresses).
|
|
|
|
"""
|
2019-06-20 23:26:54 +02:00
|
|
|
# TODO: Change return type to QuerySet[UserProfile]
|
2019-10-05 02:35:07 +02:00
|
|
|
return UserProfile.objects.filter(realm=self, is_bot=False,
|
|
|
|
role=UserProfile.ROLE_REALM_ADMINISTRATOR,
|
2019-06-20 23:26:54 +02:00
|
|
|
is_active=True)
|
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def get_active_users(self) -> Sequence['UserProfile']:
|
2016-06-12 14:43:15 +02:00
|
|
|
# TODO: Change return type to QuerySet[UserProfile]
|
2014-01-28 17:29:00 +01:00
|
|
|
return UserProfile.objects.filter(realm=self, is_active=True).select_related()
|
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def get_bot_domain(self) -> str:
|
2019-08-30 00:21:36 +02:00
|
|
|
return get_fake_email_domain()
|
2017-03-05 04:17:12 +01:00
|
|
|
|
2017-12-05 07:58:19 +01:00
|
|
|
def get_notifications_stream(self) -> Optional['Stream']:
|
2017-08-24 00:36:29 +02:00
|
|
|
if self.notifications_stream is not None and not self.notifications_stream.deactivated:
|
|
|
|
return self.notifications_stream
|
|
|
|
return None
|
|
|
|
|
2017-12-05 07:58:19 +01:00
|
|
|
def get_signup_notifications_stream(self) -> Optional['Stream']:
|
2017-11-15 23:43:01 +01:00
|
|
|
if self.signup_notifications_stream is not None and not self.signup_notifications_stream.deactivated:
|
|
|
|
return self.signup_notifications_stream
|
|
|
|
return None
|
|
|
|
|
2017-12-07 06:24:40 +01:00
|
|
|
@property
|
|
|
|
def max_invites(self) -> int:
|
|
|
|
if self._max_invites is None:
|
|
|
|
return settings.INVITES_DEFAULT_REALM_DAILY_MAX
|
|
|
|
return self._max_invites
|
|
|
|
|
|
|
|
@max_invites.setter
|
|
|
|
def max_invites(self, value: int) -> None:
|
|
|
|
self._max_invites = value
|
|
|
|
|
2017-12-20 23:33:17 +01:00
|
|
|
def upload_quota_bytes(self) -> Optional[int]:
|
|
|
|
if self.upload_quota_gb is None:
|
|
|
|
return None
|
|
|
|
# We describe the quota to users in "GB" or "gigabytes", but actually apply
|
|
|
|
# it as gibibytes (GiB) to be a bit more generous in case of confusion.
|
|
|
|
return self.upload_quota_gb << 30
|
|
|
|
|
2019-01-14 07:46:31 +01:00
|
|
|
@cache_with_key(get_realm_used_upload_space_cache_key, timeout=3600*24*7)
|
2019-01-11 13:41:52 +01:00
|
|
|
def currently_used_upload_space_bytes(self) -> int:
|
|
|
|
used_space = Attachment.objects.filter(realm=self).aggregate(Sum('size'))['size__sum']
|
|
|
|
if used_space is None:
|
|
|
|
return 0
|
|
|
|
return used_space
|
|
|
|
|
2016-10-26 18:13:43 +02:00
|
|
|
@property
|
2018-05-11 02:24:34 +02:00
|
|
|
def subdomain(self) -> str:
|
2017-10-02 08:32:09 +02:00
|
|
|
return self.string_id
|
2016-10-26 18:13:43 +02:00
|
|
|
|
2017-10-19 08:51:29 +02:00
|
|
|
@property
|
2018-05-11 02:24:34 +02:00
|
|
|
def display_subdomain(self) -> str:
|
2017-10-19 08:51:29 +02:00
|
|
|
"""Likely to be temporary function to avoid signup messages being sent
|
|
|
|
to an empty topic"""
|
|
|
|
if self.string_id == "":
|
|
|
|
return "."
|
|
|
|
return self.string_id
|
|
|
|
|
2016-08-14 00:57:45 +02:00
|
|
|
@property
|
2017-11-27 07:33:05 +01:00
|
|
|
def uri(self) -> str:
|
2017-10-20 04:40:20 +02:00
|
|
|
return settings.EXTERNAL_URI_SCHEME + self.host
|
2016-08-14 00:57:45 +02:00
|
|
|
|
2016-08-19 03:48:40 +02:00
|
|
|
@property
|
2017-11-27 07:33:05 +01:00
|
|
|
def host(self) -> str:
|
2017-10-27 03:27:29 +02:00
|
|
|
return self.host_for_subdomain(self.subdomain)
|
|
|
|
|
|
|
|
@staticmethod
|
2017-11-27 07:33:05 +01:00
|
|
|
def host_for_subdomain(subdomain: str) -> str:
|
alias domains: Add a v1 of this feature.
The main limitation of this version is that it's controlled entirely
from settings, with nothing in the database and no web UI or even
management command to control it. That makes it a bit more of a
burden for the server admins than it'd ideally be, but that's fine
for now.
Relatedly, the web flow for realm creation still requires choosing a
subdomain even if the realm is destined to live at an alias domain.
Specific to the dev environment, there is an annoying quirk: the
special dev login flow doesn't work on a REALM_HOSTS realm. Also,
in this version the `add_new_realm` and `add_new_user` management
commands, which are intended for use in development environments only,
don't support this feature.
In manual testing, I've confirmed that a REALM_HOSTS realm works for
signup and login, with email/password, Google SSO, or GitHub SSO.
Most of that was in dev; I used zulipstaging.com to also test
* logging in with email and password;
* logging in with Google SSO... far enough to correctly determine
that my email address is associated with some other realm.
2017-10-20 06:36:50 +02:00
|
|
|
if subdomain == Realm.SUBDOMAIN_FOR_ROOT_DOMAIN:
|
|
|
|
return settings.EXTERNAL_HOST
|
|
|
|
default_host = "%s.%s" % (subdomain, settings.EXTERNAL_HOST)
|
|
|
|
return settings.REALM_HOSTS.get(subdomain, default_host)
|
2016-08-19 03:48:40 +02:00
|
|
|
|
2016-07-27 02:09:10 +02:00
|
|
|
@property
|
2017-11-27 07:33:05 +01:00
|
|
|
def is_zephyr_mirror_realm(self) -> bool:
|
2017-03-04 09:19:37 +01:00
|
|
|
return self.string_id == "zephyr"
|
2016-07-27 02:09:10 +02:00
|
|
|
|
2016-07-27 01:45:29 +02:00
|
|
|
@property
|
2017-11-27 07:33:05 +01:00
|
|
|
def webathena_enabled(self) -> bool:
|
2016-07-27 01:45:29 +02:00
|
|
|
return self.is_zephyr_mirror_realm
|
|
|
|
|
|
|
|
@property
|
2017-11-27 07:33:05 +01:00
|
|
|
def presence_disabled(self) -> bool:
|
2016-07-27 01:45:29 +02:00
|
|
|
return self.is_zephyr_mirror_realm
|
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2013-06-24 21:26:38 +02:00
|
|
|
permissions = (
|
|
|
|
('administer', "Administer a realm"),
|
2015-09-20 19:32:01 +02:00
|
|
|
('api_super_user', "Can send messages as other users for mirroring"),
|
2013-06-24 21:26:38 +02:00
|
|
|
)
|
|
|
|
|
2014-01-28 18:18:19 +01:00
|
|
|
post_save.connect(flush_realm, sender=Realm)
|
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def get_realm(string_id: str) -> Realm:
|
2019-05-04 04:47:44 +02:00
|
|
|
return Realm.objects.get(string_id=string_id)
|
2016-11-11 19:32:15 +01:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def name_changes_disabled(realm: Optional[Realm]) -> bool:
|
2016-11-11 19:32:15 +01:00
|
|
|
if realm is None:
|
|
|
|
return settings.NAME_CHANGES_DISABLED
|
|
|
|
return settings.NAME_CHANGES_DISABLED or realm.name_changes_disabled
|
|
|
|
|
2019-04-23 04:51:04 +02:00
|
|
|
def avatar_changes_disabled(realm: Realm) -> bool:
|
2019-04-29 08:41:00 +02:00
|
|
|
return settings.AVATAR_CHANGES_DISABLED or realm.avatar_changes_disabled
|
2019-04-23 04:51:04 +02:00
|
|
|
|
2017-03-31 16:20:07 +02:00
|
|
|
class RealmDomain(models.Model):
|
2018-07-27 23:26:29 +02:00
|
|
|
"""For an organization with emails_restricted_to_domains enabled, the list of
|
2018-07-25 00:29:05 +02:00
|
|
|
allowed domains"""
|
2017-07-09 01:16:47 +02:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE) # type: Realm
|
2016-09-28 00:03:13 +02:00
|
|
|
# should always be stored lowercase
|
2018-05-11 02:24:34 +02:00
|
|
|
domain = models.CharField(max_length=80, db_index=True) # type: str
|
2017-01-21 08:19:03 +01:00
|
|
|
allow_subdomains = models.BooleanField(default=False)
|
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2017-01-21 08:19:03 +01:00
|
|
|
unique_together = ("realm", "domain")
|
2016-10-29 04:58:44 +02:00
|
|
|
|
2013-07-18 18:48:56 +02:00
|
|
|
# These functions should only be used on email addresses that have
|
|
|
|
# been validated via django.core.validators.validate_email
|
|
|
|
#
|
|
|
|
# Note that we need to use some care, since can you have multiple @-signs; e.g.
|
2013-07-24 23:41:24 +02:00
|
|
|
# "tabbott@test"@zulip.com
|
2013-07-18 18:48:56 +02:00
|
|
|
# is valid email address
|
2018-05-11 02:24:34 +02:00
|
|
|
def email_to_username(email: str) -> str:
|
2013-08-15 19:16:03 +02:00
|
|
|
return "@".join(email.split("@")[:-1]).lower()
|
2013-07-18 18:48:56 +02:00
|
|
|
|
2013-11-22 23:48:00 +01:00
|
|
|
# Returns the raw domain portion of the desired email address
|
2018-05-11 02:24:34 +02:00
|
|
|
def email_to_domain(email: str) -> str:
|
2013-08-15 19:16:03 +02:00
|
|
|
return email.split("@")[-1].lower()
|
2013-07-18 18:48:56 +02:00
|
|
|
|
2018-03-14 12:54:05 +01:00
|
|
|
class DomainNotAllowedForRealmError(Exception):
|
|
|
|
pass
|
|
|
|
|
2018-03-14 13:25:26 +01:00
|
|
|
class DisposableEmailError(Exception):
|
|
|
|
pass
|
|
|
|
|
2018-06-20 13:08:07 +02:00
|
|
|
class EmailContainsPlusError(Exception):
|
|
|
|
pass
|
|
|
|
|
2016-01-12 16:24:34 +01:00
|
|
|
# Is a user with the given email address allowed to be in the given realm?
|
|
|
|
# (This function does not check whether the user has been invited to the realm.
|
|
|
|
# So for invite-only realms, this is the test for whether a user can be invited,
|
|
|
|
# not whether the user can sign up currently.)
|
2018-05-11 02:24:34 +02:00
|
|
|
def email_allowed_for_realm(email: str, realm: Realm) -> None:
|
2018-07-27 23:26:29 +02:00
|
|
|
if not realm.emails_restricted_to_domains:
|
2018-03-14 13:25:26 +01:00
|
|
|
if realm.disallow_disposable_email_addresses and \
|
|
|
|
is_disposable_domain(email_to_domain(email)):
|
|
|
|
raise DisposableEmailError
|
2018-03-14 12:54:05 +01:00
|
|
|
return
|
2018-06-20 13:08:07 +02:00
|
|
|
elif '+' in email_to_username(email):
|
|
|
|
raise EmailContainsPlusError
|
2018-03-14 13:25:26 +01:00
|
|
|
|
2016-11-11 21:13:30 +01:00
|
|
|
domain = email_to_domain(email)
|
2017-03-31 16:20:07 +02:00
|
|
|
query = RealmDomain.objects.filter(realm=realm)
|
2017-01-21 08:19:03 +01:00
|
|
|
if query.filter(domain=domain).exists():
|
2018-03-14 12:54:05 +01:00
|
|
|
return
|
2017-01-21 08:19:03 +01:00
|
|
|
else:
|
|
|
|
query = query.filter(allow_subdomains=True)
|
|
|
|
while len(domain) > 0:
|
|
|
|
subdomain, sep, domain = domain.partition('.')
|
|
|
|
if query.filter(domain=domain).exists():
|
2018-03-14 12:54:05 +01:00
|
|
|
return
|
|
|
|
raise DomainNotAllowedForRealmError
|
2016-01-12 16:24:34 +01:00
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def get_realm_domains(realm: Realm) -> List[Dict[str, str]]:
|
2017-04-29 06:06:57 +02:00
|
|
|
return list(realm.realmdomain_set.values('domain', 'allow_subdomains'))
|
2016-09-28 00:08:36 +02:00
|
|
|
|
2017-10-27 08:42:27 +02:00
|
|
|
class RealmEmoji(models.Model):
|
2018-03-25 03:32:04 +02:00
|
|
|
author = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE) # type: Optional[UserProfile]
|
2017-07-09 01:16:47 +02:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE) # type: Realm
|
2018-07-25 00:29:05 +02:00
|
|
|
name = models.TextField(validators=[
|
|
|
|
MinLengthValidator(1),
|
|
|
|
# The second part of the regex (negative lookbehind) disallows names
|
|
|
|
# ending with one of the punctuation characters.
|
|
|
|
RegexValidator(regex=r'^[0-9a-z.\-_]+(?<![.\-_])$',
|
|
|
|
message=_("Invalid characters in emoji name"))]) # type: str
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2018-07-25 00:29:05 +02:00
|
|
|
# The basename of the custom emoji's filename; see PATH_ID_TEMPLATE for the full path.
|
2018-05-11 02:24:34 +02:00
|
|
|
file_name = models.TextField(db_index=True, null=True, blank=True) # type: Optional[str]
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2017-07-09 01:16:47 +02:00
|
|
|
deactivated = models.BooleanField(default=False) # type: bool
|
2017-03-13 05:45:50 +01:00
|
|
|
|
2018-03-11 18:55:20 +01:00
|
|
|
PATH_ID_TEMPLATE = "{realm_id}/emoji/images/{emoji_file_name}"
|
2013-08-22 16:56:37 +02:00
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def __str__(self) -> str:
|
2017-12-15 17:12:10 +01:00
|
|
|
return "<RealmEmoji(%s): %s %s %s %s>" % (self.realm.string_id,
|
|
|
|
self.id,
|
|
|
|
self.name,
|
|
|
|
self.deactivated,
|
|
|
|
self.file_name)
|
2013-08-22 16:56:37 +02:00
|
|
|
|
2018-03-18 16:30:58 +01:00
|
|
|
def get_realm_emoji_dicts(realm: Realm,
|
|
|
|
only_active_emojis: bool=False) -> Dict[str, Dict[str, Any]]:
|
|
|
|
query = RealmEmoji.objects.filter(realm=realm).select_related('author')
|
|
|
|
if only_active_emojis:
|
|
|
|
query = query.filter(deactivated=False)
|
2013-08-22 16:56:37 +02:00
|
|
|
d = {}
|
2017-03-13 05:45:50 +01:00
|
|
|
from zerver.lib.emoji import get_emoji_url
|
2018-03-18 16:30:58 +01:00
|
|
|
|
|
|
|
for realm_emoji in query.all():
|
2017-08-04 07:53:19 +02:00
|
|
|
author = None
|
2018-03-18 16:30:58 +01:00
|
|
|
if realm_emoji.author:
|
2016-12-20 10:51:28 +01:00
|
|
|
author = {
|
2018-03-18 16:30:58 +01:00
|
|
|
'id': realm_emoji.author.id,
|
|
|
|
'email': realm_emoji.author.email,
|
|
|
|
'full_name': realm_emoji.author.full_name}
|
|
|
|
emoji_url = get_emoji_url(realm_emoji.file_name, realm_emoji.realm_id)
|
2018-03-11 18:55:20 +01:00
|
|
|
d[str(realm_emoji.id)] = dict(id=str(realm_emoji.id),
|
|
|
|
name=realm_emoji.name,
|
|
|
|
source_url=emoji_url,
|
|
|
|
deactivated=realm_emoji.deactivated,
|
|
|
|
author=author)
|
2013-08-22 16:56:37 +02:00
|
|
|
return d
|
|
|
|
|
2018-03-18 16:30:58 +01:00
|
|
|
def get_realm_emoji_uncached(realm: Realm) -> Dict[str, Dict[str, Any]]:
|
|
|
|
return get_realm_emoji_dicts(realm)
|
|
|
|
|
2018-03-11 18:48:56 +01:00
|
|
|
def get_active_realm_emoji_uncached(realm: Realm) -> Dict[str, Dict[str, Any]]:
|
2018-03-11 18:55:20 +01:00
|
|
|
realm_emojis = get_realm_emoji_dicts(realm, only_active_emojis=True)
|
|
|
|
d = {}
|
|
|
|
for emoji_id, emoji_dict in realm_emojis.items():
|
|
|
|
d[emoji_dict['name']] = emoji_dict
|
|
|
|
return d
|
2018-03-11 18:48:56 +01:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def flush_realm_emoji(sender: Any, **kwargs: Any) -> None:
|
2013-08-22 16:56:37 +02:00
|
|
|
realm = kwargs['instance'].realm
|
|
|
|
cache_set(get_realm_emoji_cache_key(realm),
|
|
|
|
get_realm_emoji_uncached(realm),
|
|
|
|
timeout=3600*24*7)
|
2018-03-11 18:48:56 +01:00
|
|
|
cache_set(get_active_realm_emoji_cache_key(realm),
|
|
|
|
get_active_realm_emoji_uncached(realm),
|
|
|
|
timeout=3600*24*7)
|
2013-08-22 16:56:37 +02:00
|
|
|
|
2014-01-28 20:53:57 +01:00
|
|
|
post_save.connect(flush_realm_emoji, sender=RealmEmoji)
|
|
|
|
post_delete.connect(flush_realm_emoji, sender=RealmEmoji)
|
2013-08-22 16:56:37 +02:00
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def filter_pattern_validator(value: str) -> None:
|
2018-12-17 21:12:52 +01:00
|
|
|
regex = re.compile(r'^(?:(?:[\w\-#_= /:]*|[+]|[!])(\(\?P<\w+>.+\)))+$')
|
2019-04-20 03:49:03 +02:00
|
|
|
error_msg = _('Invalid filter pattern. Valid characters are %s.') % (
|
|
|
|
'[ a-zA-Z_#=/:+!-]',)
|
2016-02-13 19:17:15 +01:00
|
|
|
|
|
|
|
if not regex.match(str(value)):
|
|
|
|
raise ValidationError(error_msg)
|
|
|
|
|
|
|
|
try:
|
|
|
|
re.compile(value)
|
2016-12-22 15:44:33 +01:00
|
|
|
except sre_constants.error:
|
2016-02-13 19:17:15 +01:00
|
|
|
# Regex is invalid
|
|
|
|
raise ValidationError(error_msg)
|
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def filter_format_validator(value: str) -> None:
|
2019-05-14 20:17:41 +02:00
|
|
|
regex = re.compile(r'^([\.\/:a-zA-Z0-9#_?=&;-]+%\(([a-zA-Z0-9_-]+)\)s)+[/a-zA-Z0-9#_?=&;-]*$')
|
2016-02-13 19:17:15 +01:00
|
|
|
|
|
|
|
if not regex.match(value):
|
2018-12-17 21:25:12 +01:00
|
|
|
raise ValidationError(_('Invalid URL format string.'))
|
2016-02-13 19:17:15 +01:00
|
|
|
|
|
|
|
class RealmFilter(models.Model):
|
2018-07-25 00:29:05 +02:00
|
|
|
"""Realm-specific regular expressions to automatically linkify certain
|
|
|
|
strings inside the markdown processor. See "Custom filters" in the settings UI.
|
|
|
|
"""
|
2017-07-09 01:16:47 +02:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE) # type: Realm
|
2018-05-11 02:24:34 +02:00
|
|
|
pattern = models.TextField(validators=[filter_pattern_validator]) # type: str
|
|
|
|
url_format_string = models.TextField(validators=[URLValidator(), filter_format_validator]) # type: str
|
2013-12-06 23:02:52 +01:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2013-12-06 23:02:52 +01:00
|
|
|
unique_together = ("realm", "pattern")
|
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def __str__(self) -> str:
|
2017-10-27 09:06:40 +02:00
|
|
|
return "<RealmFilter(%s): %s %s>" % (self.realm.string_id, self.pattern, self.url_format_string)
|
2013-12-06 23:02:52 +01:00
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def get_realm_filters_cache_key(realm_id: int) -> str:
|
2017-11-30 08:43:12 +01:00
|
|
|
return u'%s:all_realm_filters:%s' % (cache.KEY_PREFIX, realm_id,)
|
2013-12-06 23:02:52 +01:00
|
|
|
|
2016-03-31 03:30:33 +02:00
|
|
|
# We have a per-process cache to avoid doing 1000 remote cache queries during page load
|
2018-05-11 02:24:34 +02:00
|
|
|
per_request_realm_filters_cache = {} # type: Dict[int, List[Tuple[str, str, int]]]
|
2016-09-23 21:47:44 +02:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def realm_in_local_realm_filters_cache(realm_id: int) -> bool:
|
2016-12-31 03:08:43 +01:00
|
|
|
return realm_id in per_request_realm_filters_cache
|
2016-09-23 21:47:44 +02:00
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def realm_filters_for_realm(realm_id: int) -> List[Tuple[str, str, int]]:
|
2016-12-31 03:08:43 +01:00
|
|
|
if not realm_in_local_realm_filters_cache(realm_id):
|
|
|
|
per_request_realm_filters_cache[realm_id] = realm_filters_for_realm_remote_cache(realm_id)
|
|
|
|
return per_request_realm_filters_cache[realm_id]
|
2013-12-18 23:01:11 +01:00
|
|
|
|
|
|
|
@cache_with_key(get_realm_filters_cache_key, timeout=3600*24*7)
|
2018-05-11 02:24:34 +02:00
|
|
|
def realm_filters_for_realm_remote_cache(realm_id: int) -> List[Tuple[str, str, int]]:
|
2013-12-06 23:02:52 +01:00
|
|
|
filters = []
|
2016-12-31 03:08:43 +01:00
|
|
|
for realm_filter in RealmFilter.objects.filter(realm_id=realm_id):
|
2016-02-13 19:17:15 +01:00
|
|
|
filters.append((realm_filter.pattern, realm_filter.url_format_string, realm_filter.id))
|
2013-12-06 23:02:52 +01:00
|
|
|
|
|
|
|
return filters
|
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def all_realm_filters() -> Dict[int, List[Tuple[str, str, int]]]:
|
|
|
|
filters = defaultdict(list) # type: DefaultDict[int, List[Tuple[str, str, int]]]
|
2013-12-06 23:02:52 +01:00
|
|
|
for realm_filter in RealmFilter.objects.all():
|
2017-11-09 11:45:56 +01:00
|
|
|
filters[realm_filter.realm_id].append((realm_filter.pattern,
|
|
|
|
realm_filter.url_format_string,
|
|
|
|
realm_filter.id))
|
2013-12-06 23:02:52 +01:00
|
|
|
|
|
|
|
return filters
|
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def flush_realm_filter(sender: Any, **kwargs: Any) -> None:
|
2017-01-03 21:04:55 +01:00
|
|
|
realm_id = kwargs['instance'].realm_id
|
2016-12-31 03:08:43 +01:00
|
|
|
cache_delete(get_realm_filters_cache_key(realm_id))
|
2013-12-18 23:01:11 +01:00
|
|
|
try:
|
2016-12-31 03:08:43 +01:00
|
|
|
per_request_realm_filters_cache.pop(realm_id)
|
2013-12-18 23:01:11 +01:00
|
|
|
except KeyError:
|
|
|
|
pass
|
2013-12-06 23:02:52 +01:00
|
|
|
|
2014-01-28 21:00:38 +01:00
|
|
|
post_save.connect(flush_realm_filter, sender=RealmFilter)
|
|
|
|
post_delete.connect(flush_realm_filter, sender=RealmFilter)
|
2013-12-06 23:02:52 +01:00
|
|
|
|
2017-10-27 08:42:27 +02:00
|
|
|
class UserProfile(AbstractBaseUser, PermissionsMixin):
|
2018-07-25 00:29:05 +02:00
|
|
|
USERNAME_FIELD = 'email'
|
|
|
|
MAX_NAME_LENGTH = 100
|
|
|
|
MIN_NAME_LENGTH = 2
|
|
|
|
API_KEY_LENGTH = 32
|
2019-06-29 04:41:13 +02:00
|
|
|
NAME_INVALID_CHARS = ['*', '`', "\\", '>', '"', '@']
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2016-05-18 20:23:03 +02:00
|
|
|
DEFAULT_BOT = 1
|
2016-05-19 23:44:58 +02:00
|
|
|
"""
|
|
|
|
Incoming webhook bots are limited to only sending messages via webhooks.
|
|
|
|
Thus, it is less of a security risk to expose their API keys to third-party services,
|
|
|
|
since they can't be used to read messages.
|
|
|
|
"""
|
|
|
|
INCOMING_WEBHOOK_BOT = 2
|
2017-11-09 11:45:56 +01:00
|
|
|
# This value is also being used in static/js/settings_bots.js.
|
|
|
|
# On updating it here, update it there as well.
|
2016-07-15 18:57:37 +02:00
|
|
|
OUTGOING_WEBHOOK_BOT = 3
|
2017-05-24 20:38:15 +02:00
|
|
|
"""
|
|
|
|
Embedded bots run within the Zulip server itself; events are added to the
|
|
|
|
embedded_bots queue and then handled by a QueueProcessingWorker.
|
|
|
|
"""
|
|
|
|
EMBEDDED_BOT = 4
|
2016-05-18 20:23:03 +02:00
|
|
|
|
2017-11-22 23:58:58 +01:00
|
|
|
BOT_TYPES = {
|
|
|
|
DEFAULT_BOT: 'Generic bot',
|
|
|
|
INCOMING_WEBHOOK_BOT: 'Incoming webhook',
|
|
|
|
OUTGOING_WEBHOOK_BOT: 'Outgoing webhook',
|
|
|
|
EMBEDDED_BOT: 'Embedded bot',
|
|
|
|
}
|
|
|
|
|
2017-05-24 21:56:51 +02:00
|
|
|
SERVICE_BOT_TYPES = [
|
|
|
|
OUTGOING_WEBHOOK_BOT,
|
2017-12-01 00:27:57 +01:00
|
|
|
EMBEDDED_BOT,
|
2017-05-24 21:56:51 +02:00
|
|
|
]
|
|
|
|
|
2018-12-07 00:48:06 +01:00
|
|
|
# The display email address, used for Zulip APIs, etc. This field
|
|
|
|
# should never be used for actually emailing someone because it
|
|
|
|
# will be invalid for various values of
|
|
|
|
# Realm.email_address_visibility; for that, see delivery_email.
|
2018-05-11 02:24:34 +02:00
|
|
|
email = models.EmailField(blank=False, db_index=True) # type: str
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2018-07-25 00:29:05 +02:00
|
|
|
# delivery_email is just used for sending emails. In almost all
|
2018-07-25 05:57:10 +02:00
|
|
|
# organizations, it matches `email`; this field is part of our
|
2018-07-25 00:29:05 +02:00
|
|
|
# transition towards supporting organizations where email
|
|
|
|
# addresses are not public.
|
models: Create delivery_email field in userprofile.
This commit creates a new field called delivery_email. For now, it is
exactly the same as email upon user profile creation and should stay
that way even when email is changed, and is used only for sending
outgoing email from Zulip.
The purpose of this field is to support an upcoming option where the
existing `email` field in Zulip becomes effectively the user's
"display email" address, as part of making it possible for users
actual email addresses (that can receive email, stored in the
delivery_email field) to not be available to other non-administrator
users in the organization.
Because the `email` field is used in numerous places in display code,
in the API, and in database queries, the shortest path to implementing
this "private email" feature is to keep "email" as-is in those parts
of the codebase, and just set the existing "email" ("display email")
model field to be something generated like
"username@zulip.example.com" for display purposes.
Eventually, we'll want to do further refactoring, either in the form
of having both `display_email` and `delivery_email` as fields, or
renaming "email" to "username".
2018-07-05 20:08:30 +02:00
|
|
|
delivery_email = models.EmailField(blank=False, db_index=True) # type: str
|
2018-07-25 00:29:05 +02:00
|
|
|
|
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE) # type: Realm
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2019-09-28 03:01:15 +02:00
|
|
|
# The user's name. We prefer the model of a full_name and
|
|
|
|
# short_name over first+last because cultures vary on how many
|
|
|
|
# names one has, whether the family name is first or last, etc.
|
|
|
|
# It also allows organizations to encode a bit of non-name data in
|
|
|
|
# the "name" attribute if desired, like gender pronouns,
|
|
|
|
# graduation year, etc. The short_name attribute is currently not
|
|
|
|
# used anywhere, but the intent is that it would be used as the
|
|
|
|
# shorter familiar name for addressing the user in the UI.
|
2018-07-25 00:29:05 +02:00
|
|
|
full_name = models.CharField(max_length=MAX_NAME_LENGTH) # type: str
|
|
|
|
short_name = models.CharField(max_length=MAX_NAME_LENGTH) # type: str
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2018-07-25 00:29:05 +02:00
|
|
|
date_joined = models.DateTimeField(default=timezone_now) # type: datetime.datetime
|
|
|
|
tos_version = models.CharField(null=True, max_length=10) # type: Optional[str]
|
|
|
|
api_key = models.CharField(max_length=API_KEY_LENGTH) # type: str
|
|
|
|
|
|
|
|
# pointer points to Message.id, NOT UserMessage.id.
|
|
|
|
pointer = models.IntegerField() # type: int
|
|
|
|
|
2018-07-25 05:57:10 +02:00
|
|
|
last_pointer_updater = models.CharField(max_length=64) # type: str
|
|
|
|
|
2018-07-25 00:29:05 +02:00
|
|
|
# Whether the user has access to server-level administrator pages, like /activity
|
2017-07-09 01:16:47 +02:00
|
|
|
is_staff = models.BooleanField(default=False) # type: bool
|
2018-07-25 05:57:10 +02:00
|
|
|
|
|
|
|
# For a normal user, this is True unless the user or an admin has
|
|
|
|
# deactivated their account. The name comes from Django; this field
|
|
|
|
# isn't related to presence or to whether the user has recently used Zulip.
|
|
|
|
#
|
|
|
|
# See also `long_term_idle`.
|
2017-07-09 01:16:47 +02:00
|
|
|
is_active = models.BooleanField(default=True, db_index=True) # type: bool
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2018-08-22 07:49:48 +02:00
|
|
|
is_billing_admin = models.BooleanField(default=False, db_index=True) # type: bool
|
2018-07-25 05:57:10 +02:00
|
|
|
|
|
|
|
is_bot = models.BooleanField(default=False, db_index=True) # type: bool
|
2017-07-09 01:16:47 +02:00
|
|
|
bot_type = models.PositiveSmallIntegerField(null=True, db_index=True) # type: Optional[int]
|
|
|
|
bot_owner = models.ForeignKey('self', null=True, on_delete=models.SET_NULL) # type: Optional[UserProfile]
|
2013-05-03 00:25:43 +02:00
|
|
|
|
2019-10-05 02:35:07 +02:00
|
|
|
# Each role has a superset of the permissions of the next higher
|
|
|
|
# numbered role. When adding new roles, leave enough space for
|
|
|
|
# future roles to be inserted between currently adjacent
|
|
|
|
# roles. These constants appear in RealmAuditLog.extra_data, so
|
|
|
|
# changes to them will require a migration of RealmAuditLog.
|
|
|
|
# ROLE_REALM_OWNER = 100
|
|
|
|
ROLE_REALM_ADMINISTRATOR = 200
|
|
|
|
# ROLE_MODERATOR = 300
|
|
|
|
ROLE_MEMBER = 400
|
|
|
|
ROLE_GUEST = 600
|
|
|
|
role = models.PositiveSmallIntegerField(default=ROLE_MEMBER, db_index=True) # type: int
|
|
|
|
|
2018-07-25 05:57:10 +02:00
|
|
|
# Whether the user has been "soft-deactivated" due to weeks of inactivity.
|
|
|
|
# For these users we avoid doing UserMessage table work, as an optimization
|
|
|
|
# for large Zulip organizations with lots of single-visit users.
|
2018-07-25 00:29:05 +02:00
|
|
|
long_term_idle = models.BooleanField(default=False, db_index=True) # type: bool
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2018-07-25 00:29:05 +02:00
|
|
|
# When we last added basic UserMessage rows for a long_term_idle user.
|
2018-03-25 03:02:19 +02:00
|
|
|
last_active_message_id = models.IntegerField(null=True) # type: Optional[int]
|
2014-02-05 21:31:30 +01:00
|
|
|
|
2018-07-25 00:29:05 +02:00
|
|
|
# Mirror dummies are fake (!is_active) users used to provide
|
|
|
|
# message senders in our cross-protocol Zephyr<->Zulip content
|
|
|
|
# mirroring integration, so that we can display mirrored content
|
|
|
|
# like native Zulip messages (with a name + avatar, etc.).
|
|
|
|
is_mirror_dummy = models.BooleanField(default=False) # type: bool
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2018-07-25 00:29:05 +02:00
|
|
|
# API super users are allowed to forge messages as sent by another
|
2019-07-14 04:47:24 +02:00
|
|
|
# user and to send to private streams; also used for Zephyr/Jabber mirroring.
|
2018-07-25 00:29:05 +02:00
|
|
|
is_api_super_user = models.BooleanField(default=False, db_index=True) # type: bool
|
|
|
|
|
2014-02-05 21:31:30 +01:00
|
|
|
### Notifications settings. ###
|
|
|
|
|
|
|
|
# Stream notifications.
|
2017-07-09 01:16:47 +02:00
|
|
|
enable_stream_desktop_notifications = models.BooleanField(default=False) # type: bool
|
2017-11-21 04:34:01 +01:00
|
|
|
enable_stream_email_notifications = models.BooleanField(default=False) # type: bool
|
2017-08-17 16:55:32 +02:00
|
|
|
enable_stream_push_notifications = models.BooleanField(default=False) # type: bool
|
2019-06-11 08:47:49 +02:00
|
|
|
enable_stream_audible_notifications = models.BooleanField(default=False) # type: bool
|
2018-01-11 21:36:11 +01:00
|
|
|
notification_sound = models.CharField(max_length=20, default='zulip') # type: str
|
2014-02-05 21:31:30 +01:00
|
|
|
|
|
|
|
# PM + @-mention notifications.
|
2017-07-09 01:16:47 +02:00
|
|
|
enable_desktop_notifications = models.BooleanField(default=True) # type: bool
|
2016-12-07 17:29:12 +01:00
|
|
|
pm_content_in_desktop_notifications = models.BooleanField(default=True) # type: bool
|
2017-07-09 01:16:47 +02:00
|
|
|
enable_sounds = models.BooleanField(default=True) # type: bool
|
|
|
|
enable_offline_email_notifications = models.BooleanField(default=True) # type: bool
|
2017-11-29 13:42:39 +01:00
|
|
|
message_content_in_email_notifications = models.BooleanField(default=True) # type: bool
|
2017-07-09 01:16:47 +02:00
|
|
|
enable_offline_push_notifications = models.BooleanField(default=True) # type: bool
|
|
|
|
enable_online_push_notifications = models.BooleanField(default=False) # type: bool
|
2014-02-05 21:31:30 +01:00
|
|
|
|
2019-06-29 22:00:44 +02:00
|
|
|
DESKTOP_ICON_COUNT_DISPLAY_MESSAGES = 1
|
|
|
|
DESKTOP_ICON_COUNT_DISPLAY_NOTIFIABLE = 2
|
2019-08-19 23:33:11 +02:00
|
|
|
DESKTOP_ICON_COUNT_DISPLAY_NONE = 3
|
2019-06-29 22:00:44 +02:00
|
|
|
desktop_icon_count_display = models.PositiveSmallIntegerField(
|
|
|
|
default=DESKTOP_ICON_COUNT_DISPLAY_MESSAGES) # type: int
|
|
|
|
|
2017-07-09 01:16:47 +02:00
|
|
|
enable_digest_emails = models.BooleanField(default=True) # type: bool
|
2018-08-24 07:28:51 +02:00
|
|
|
enable_login_emails = models.BooleanField(default=True) # type: bool
|
2018-01-06 23:30:43 +01:00
|
|
|
realm_name_in_notifications = models.BooleanField(default=False) # type: bool
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2018-07-25 00:29:05 +02:00
|
|
|
# Words that trigger a mention for this user, formatted as a json-serialized list of strings
|
|
|
|
alert_words = models.TextField(default=u'[]') # type: str
|
2012-08-28 18:44:51 +02:00
|
|
|
|
2018-07-25 00:29:05 +02:00
|
|
|
# Used for rate-limiting certain automated messages generated by bots
|
2018-03-29 20:47:39 +02:00
|
|
|
last_reminder = models.DateTimeField(default=None, null=True) # type: Optional[datetime.datetime]
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2018-07-25 00:29:05 +02:00
|
|
|
# Minutes to wait before warning a bot owner that their bot sent a message
|
|
|
|
# to a nonexistent stream
|
|
|
|
BOT_OWNER_STREAM_ALERT_WAITPERIOD = 1
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2018-07-25 00:29:05 +02:00
|
|
|
# API rate limits, formatted as a comma-separated list of range:max pairs
|
|
|
|
rate_limits = models.CharField(default=u"", max_length=100) # type: str
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2018-07-25 00:29:05 +02:00
|
|
|
# Hours to wait before sending another email to a user
|
|
|
|
EMAIL_REMINDER_WAITPERIOD = 24
|
2014-02-05 21:31:30 +01:00
|
|
|
|
2018-07-25 00:29:05 +02:00
|
|
|
# Default streams for some deprecated/legacy classes of bot users.
|
2017-07-09 01:16:47 +02:00
|
|
|
default_sending_stream = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE) # type: Optional[Stream]
|
|
|
|
default_events_register_stream = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE) # type: Optional[Stream]
|
|
|
|
default_all_public_streams = models.BooleanField(default=False) # type: bool
|
2014-02-06 23:12:34 +01:00
|
|
|
|
2013-12-04 22:48:47 +01:00
|
|
|
# UI vars
|
2017-07-09 01:16:47 +02:00
|
|
|
enter_sends = models.NullBooleanField(default=False) # type: Optional[bool]
|
|
|
|
left_side_userlist = models.BooleanField(default=False) # type: bool
|
2013-12-04 22:48:47 +01:00
|
|
|
|
2015-08-19 22:35:46 +02:00
|
|
|
# display settings
|
2018-05-11 02:24:34 +02:00
|
|
|
default_language = models.CharField(default=u'en', max_length=MAX_LANGUAGE_ID_LENGTH) # type: str
|
2019-06-13 15:21:20 +02:00
|
|
|
dense_mode = models.BooleanField(default=True) # type: bool
|
|
|
|
fluid_layout_width = models.BooleanField(default=False) # type: bool
|
2017-07-07 18:15:10 +02:00
|
|
|
high_contrast_mode = models.BooleanField(default=False) # type: bool
|
2017-11-14 20:42:31 +01:00
|
|
|
night_mode = models.BooleanField(default=False) # type: bool
|
2018-01-15 19:36:32 +01:00
|
|
|
translate_emoticons = models.BooleanField(default=False) # type: bool
|
2019-06-13 15:21:20 +02:00
|
|
|
twenty_four_hour_time = models.BooleanField(default=False) # type: bool
|
2018-08-17 08:09:07 +02:00
|
|
|
starred_message_counts = models.BooleanField(default=False) # type: bool
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2019-03-17 14:48:51 +01:00
|
|
|
# UI setting controlling Zulip's behavior of demoting in the sort
|
|
|
|
# order and graying out streams with no recent traffic. The
|
|
|
|
# default behavior, automatic, enables this behavior once a user
|
|
|
|
# is subscribed to 30+ streams in the webapp.
|
|
|
|
DEMOTE_STREAMS_AUTOMATIC = 1
|
|
|
|
DEMOTE_STREAMS_ALWAYS = 2
|
|
|
|
DEMOTE_STREAMS_NEVER = 3
|
|
|
|
DEMOTE_STREAMS_CHOICES = [
|
|
|
|
DEMOTE_STREAMS_AUTOMATIC,
|
|
|
|
DEMOTE_STREAMS_ALWAYS,
|
|
|
|
DEMOTE_STREAMS_NEVER
|
|
|
|
]
|
|
|
|
demote_inactive_streams = models.PositiveSmallIntegerField(default=DEMOTE_STREAMS_AUTOMATIC)
|
|
|
|
|
2018-07-25 05:57:10 +02:00
|
|
|
# A timezone name from the `tzdata` database, as found in pytz.all_timezones.
|
|
|
|
#
|
|
|
|
# The longest existing name is 32 characters long, so max_length=40 seems
|
|
|
|
# like a safe choice.
|
|
|
|
#
|
|
|
|
# In Django, the convention is to use an empty string instead of NULL/None
|
|
|
|
# for text-based fields. For more information, see
|
2018-07-25 00:29:05 +02:00
|
|
|
# https://docs.djangoproject.com/en/1.10/ref/models/fields/#django.db.models.Field.null.
|
|
|
|
timezone = models.CharField(max_length=40, default=u'') # type: str
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2018-07-25 00:29:05 +02:00
|
|
|
# Emojisets
|
2018-08-25 09:18:49 +02:00
|
|
|
GOOGLE_EMOJISET = 'google'
|
|
|
|
GOOGLE_BLOB_EMOJISET = 'google-blob'
|
|
|
|
TEXT_EMOJISET = 'text'
|
|
|
|
TWITTER_EMOJISET = 'twitter'
|
|
|
|
EMOJISET_CHOICES = ((GOOGLE_EMOJISET, "Google modern"),
|
|
|
|
(GOOGLE_BLOB_EMOJISET, "Google classic"),
|
|
|
|
(TWITTER_EMOJISET, "Twitter"),
|
|
|
|
(TEXT_EMOJISET, "Plain text"))
|
|
|
|
emojiset = models.CharField(default=GOOGLE_BLOB_EMOJISET, choices=EMOJISET_CHOICES, max_length=20) # type: str
|
2013-05-17 21:28:51 +02:00
|
|
|
|
2016-06-12 13:27:16 +02:00
|
|
|
AVATAR_FROM_GRAVATAR = u'G'
|
|
|
|
AVATAR_FROM_USER = u'U'
|
2013-06-07 21:51:57 +02:00
|
|
|
AVATAR_SOURCES = (
|
2017-01-24 07:06:13 +01:00
|
|
|
(AVATAR_FROM_GRAVATAR, 'Hosted by Gravatar'),
|
|
|
|
(AVATAR_FROM_USER, 'Uploaded by user'),
|
2013-06-07 21:51:57 +02:00
|
|
|
)
|
2018-05-11 02:24:34 +02:00
|
|
|
avatar_source = models.CharField(default=AVATAR_FROM_GRAVATAR, choices=AVATAR_SOURCES, max_length=1) # type: str
|
2017-07-09 01:16:47 +02:00
|
|
|
avatar_version = models.PositiveSmallIntegerField(default=1) # type: int
|
2019-06-28 00:10:58 +02:00
|
|
|
avatar_hash = models.CharField(null=True, max_length=64) # type: Optional[str]
|
2013-06-07 21:51:57 +02:00
|
|
|
|
2016-06-12 13:27:16 +02:00
|
|
|
TUTORIAL_WAITING = u'W'
|
|
|
|
TUTORIAL_STARTED = u'S'
|
|
|
|
TUTORIAL_FINISHED = u'F'
|
2017-01-24 06:21:14 +01:00
|
|
|
TUTORIAL_STATES = ((TUTORIAL_WAITING, "Waiting"),
|
|
|
|
(TUTORIAL_STARTED, "Started"),
|
2013-04-04 22:30:28 +02:00
|
|
|
(TUTORIAL_FINISHED, "Finished"))
|
2018-05-11 02:24:34 +02:00
|
|
|
tutorial_status = models.CharField(default=TUTORIAL_WAITING, choices=TUTORIAL_STATES, max_length=1) # type: str
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2013-05-08 16:12:19 +02:00
|
|
|
# Contains serialized JSON of the form:
|
|
|
|
# [("step 1", true), ("step 2", false)]
|
|
|
|
# where the second element of each tuple is if the step has been
|
|
|
|
# completed.
|
2018-05-11 02:24:34 +02:00
|
|
|
onboarding_steps = models.TextField(default=u'[]') # type: str
|
2013-04-04 22:30:28 +02:00
|
|
|
|
2017-07-09 01:16:47 +02:00
|
|
|
objects = UserManager() # type: UserManager
|
2017-04-01 17:28:44 +02:00
|
|
|
|
2017-04-07 00:05:55 +02:00
|
|
|
# Define the types of the various automatically managed properties
|
|
|
|
property_types = dict(
|
2018-05-11 02:24:34 +02:00
|
|
|
default_language=str,
|
2019-03-17 14:48:51 +01:00
|
|
|
demote_inactive_streams=int,
|
2018-05-24 20:53:26 +02:00
|
|
|
dense_mode=bool,
|
2018-05-11 02:24:34 +02:00
|
|
|
emojiset=str,
|
2019-06-13 15:21:20 +02:00
|
|
|
fluid_layout_width=bool,
|
2017-07-07 18:15:10 +02:00
|
|
|
high_contrast_mode=bool,
|
2019-06-13 15:21:20 +02:00
|
|
|
left_side_userlist=bool,
|
2017-11-14 20:42:31 +01:00
|
|
|
night_mode=bool,
|
2018-08-17 08:09:07 +02:00
|
|
|
starred_message_counts=bool,
|
2019-06-13 15:21:20 +02:00
|
|
|
timezone=str,
|
|
|
|
translate_emoticons=bool,
|
|
|
|
twenty_four_hour_time=bool,
|
2017-04-07 00:05:55 +02:00
|
|
|
)
|
|
|
|
|
2017-05-23 03:19:21 +02:00
|
|
|
notification_setting_types = dict(
|
|
|
|
enable_desktop_notifications=bool,
|
|
|
|
enable_digest_emails=bool,
|
2018-08-24 07:28:51 +02:00
|
|
|
enable_login_emails=bool,
|
2017-05-23 03:19:21 +02:00
|
|
|
enable_offline_email_notifications=bool,
|
|
|
|
enable_offline_push_notifications=bool,
|
|
|
|
enable_online_push_notifications=bool,
|
|
|
|
enable_sounds=bool,
|
|
|
|
enable_stream_desktop_notifications=bool,
|
2017-11-21 04:34:01 +01:00
|
|
|
enable_stream_email_notifications=bool,
|
2017-08-17 16:55:32 +02:00
|
|
|
enable_stream_push_notifications=bool,
|
2019-06-11 08:47:49 +02:00
|
|
|
enable_stream_audible_notifications=bool,
|
2017-11-29 13:42:39 +01:00
|
|
|
message_content_in_email_notifications=bool,
|
2018-01-11 21:36:11 +01:00
|
|
|
notification_sound=str,
|
2017-05-23 03:19:21 +02:00
|
|
|
pm_content_in_desktop_notifications=bool,
|
2019-06-29 22:00:44 +02:00
|
|
|
desktop_icon_count_display=int,
|
2018-01-06 23:30:43 +01:00
|
|
|
realm_name_in_notifications=bool,
|
2017-05-23 03:19:21 +02:00
|
|
|
)
|
|
|
|
|
2017-11-22 20:22:11 +01:00
|
|
|
class Meta:
|
|
|
|
unique_together = (('realm', 'email'),)
|
|
|
|
|
2017-03-17 10:07:22 +01:00
|
|
|
@property
|
2018-04-12 07:36:12 +02:00
|
|
|
def profile_data(self) -> ProfileData:
|
2017-03-17 10:07:22 +01:00
|
|
|
values = CustomProfileFieldValue.objects.filter(user_profile=self)
|
2018-11-06 10:05:31 +01:00
|
|
|
user_data = {v.field_id: {"value": v.value, "rendered_value": v.rendered_value} for v in values}
|
2018-04-12 07:36:12 +02:00
|
|
|
data = [] # type: ProfileData
|
2017-03-17 10:07:22 +01:00
|
|
|
for field in custom_profile_fields_for_realm(self.realm_id):
|
2018-11-06 10:05:31 +01:00
|
|
|
field_values = user_data.get(field.id, None)
|
|
|
|
if field_values:
|
|
|
|
value, rendered_value = field_values.get("value"), field_values.get("rendered_value")
|
|
|
|
else:
|
|
|
|
value, rendered_value = None, None
|
2017-03-17 10:07:22 +01:00
|
|
|
field_type = field.field_type
|
|
|
|
if value is not None:
|
|
|
|
converter = field.FIELD_CONVERTERS[field_type]
|
|
|
|
value = converter(value)
|
|
|
|
|
2019-08-04 02:00:19 +02:00
|
|
|
field_data = field.as_dict()
|
2017-03-17 10:07:22 +01:00
|
|
|
field_data['value'] = value
|
2018-11-06 10:05:31 +01:00
|
|
|
field_data['rendered_value'] = rendered_value
|
2017-03-17 10:07:22 +01:00
|
|
|
data.append(field_data)
|
|
|
|
|
|
|
|
return data
|
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def can_admin_user(self, target_user: 'UserProfile') -> bool:
|
2013-09-05 20:51:38 +02:00
|
|
|
"""Returns whether this user has permission to modify target_user"""
|
|
|
|
if target_user.bot_owner == self:
|
|
|
|
return True
|
2016-02-08 03:59:38 +01:00
|
|
|
elif self.is_realm_admin and self.realm == target_user.realm:
|
2013-09-05 20:51:38 +02:00
|
|
|
return True
|
|
|
|
else:
|
|
|
|
return False
|
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def __str__(self) -> str:
|
2017-10-27 09:06:40 +02:00
|
|
|
return "<UserProfile: %s %s>" % (self.email, self.realm)
|
2013-03-08 19:53:00 +01:00
|
|
|
|
2019-10-05 02:35:07 +02:00
|
|
|
@property
|
|
|
|
def is_realm_admin(self) -> bool:
|
|
|
|
return self.role == UserProfile.ROLE_REALM_ADMINISTRATOR
|
|
|
|
|
|
|
|
@property
|
|
|
|
def is_guest(self) -> bool:
|
|
|
|
return self.role == UserProfile.ROLE_GUEST
|
|
|
|
|
2016-05-19 23:44:58 +02:00
|
|
|
@property
|
2017-11-27 07:33:05 +01:00
|
|
|
def is_incoming_webhook(self) -> bool:
|
2016-05-19 23:44:58 +02:00
|
|
|
return self.bot_type == UserProfile.INCOMING_WEBHOOK_BOT
|
|
|
|
|
2017-11-24 16:24:24 +01:00
|
|
|
@property
|
2018-03-12 02:47:49 +01:00
|
|
|
def allowed_bot_types(self) -> List[int]:
|
2017-11-24 16:24:24 +01:00
|
|
|
allowed_bot_types = []
|
2018-01-29 16:10:54 +01:00
|
|
|
if self.is_realm_admin or \
|
|
|
|
not self.realm.bot_creation_policy == Realm.BOT_CREATION_LIMIT_GENERIC_BOTS:
|
2017-11-24 16:24:24 +01:00
|
|
|
allowed_bot_types.append(UserProfile.DEFAULT_BOT)
|
|
|
|
allowed_bot_types += [
|
|
|
|
UserProfile.INCOMING_WEBHOOK_BOT,
|
|
|
|
UserProfile.OUTGOING_WEBHOOK_BOT,
|
|
|
|
]
|
|
|
|
if settings.EMBEDDED_BOTS_ENABLED:
|
|
|
|
allowed_bot_types.append(UserProfile.EMBEDDED_BOT)
|
|
|
|
return allowed_bot_types
|
|
|
|
|
2017-04-01 17:28:44 +02:00
|
|
|
@staticmethod
|
2019-09-28 01:06:46 +02:00
|
|
|
def emojiset_choices() -> List[Dict[str, str]]:
|
|
|
|
return [dict(key=emojiset[0], text=emojiset[1]) for emojiset in UserProfile.EMOJISET_CHOICES]
|
2017-04-01 17:28:44 +02:00
|
|
|
|
2013-10-20 21:10:03 +02:00
|
|
|
@staticmethod
|
2018-05-11 02:24:34 +02:00
|
|
|
def emails_from_ids(user_ids: Sequence[int]) -> Dict[int, str]:
|
2013-10-20 21:10:03 +02:00
|
|
|
rows = UserProfile.objects.filter(id__in=user_ids).values('id', 'email')
|
|
|
|
return {row['id']: row['email'] for row in rows}
|
|
|
|
|
2019-09-23 22:38:13 +02:00
|
|
|
def email_address_is_realm_public(self) -> bool:
|
|
|
|
if self.realm.email_address_visibility == Realm.EMAIL_ADDRESS_VISIBILITY_EVERYONE:
|
|
|
|
return True
|
2019-09-23 22:51:31 +02:00
|
|
|
if self.is_bot:
|
|
|
|
return True
|
2019-09-23 22:38:13 +02:00
|
|
|
return False
|
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def can_create_streams(self) -> bool:
|
2016-11-29 08:57:35 +01:00
|
|
|
if self.is_realm_admin:
|
2016-05-12 10:28:00 +02:00
|
|
|
return True
|
2019-05-06 16:34:31 +02:00
|
|
|
if self.realm.create_stream_policy == Realm.CREATE_STREAM_POLICY_ADMINS:
|
2018-06-12 17:34:59 +02:00
|
|
|
return False
|
|
|
|
if self.is_guest:
|
2016-05-12 10:28:00 +02:00
|
|
|
return False
|
2018-07-30 00:59:45 +02:00
|
|
|
|
2019-05-06 16:34:31 +02:00
|
|
|
if self.realm.create_stream_policy == Realm.CREATE_STREAM_POLICY_MEMBERS:
|
|
|
|
return True
|
|
|
|
|
2018-07-30 00:59:45 +02:00
|
|
|
diff = (timezone_now() - self.date_joined).days
|
2016-11-29 08:57:35 +01:00
|
|
|
if diff >= self.realm.waiting_period_threshold:
|
|
|
|
return True
|
|
|
|
return False
|
2014-01-15 22:31:38 +01:00
|
|
|
|
2018-07-30 01:25:13 +02:00
|
|
|
def can_subscribe_other_users(self) -> bool:
|
|
|
|
if self.is_realm_admin:
|
|
|
|
return True
|
2019-04-08 19:23:00 +02:00
|
|
|
if self.realm.invite_to_stream_policy == Realm.INVITE_TO_STREAM_POLICY_ADMINS:
|
|
|
|
return False
|
2018-07-30 01:25:13 +02:00
|
|
|
if self.is_guest:
|
|
|
|
return False
|
|
|
|
|
2019-04-08 19:23:00 +02:00
|
|
|
if self.realm.invite_to_stream_policy == Realm.INVITE_TO_STREAM_POLICY_MEMBERS:
|
|
|
|
return True
|
|
|
|
|
|
|
|
assert self.realm.invite_to_stream_policy == Realm.INVITE_TO_STREAM_POLICY_WAITING_PERIOD
|
2018-07-30 01:25:13 +02:00
|
|
|
diff = (timezone_now() - self.date_joined).days
|
|
|
|
if diff >= self.realm.waiting_period_threshold:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2018-04-20 20:59:22 +02:00
|
|
|
def can_access_public_streams(self) -> bool:
|
2018-06-02 09:25:39 +02:00
|
|
|
return not (self.is_guest or self.realm.is_zephyr_mirror_realm)
|
2018-04-20 20:59:22 +02:00
|
|
|
|
2018-06-02 15:45:27 +02:00
|
|
|
def can_access_all_realm_members(self) -> bool:
|
|
|
|
return not (self.realm.is_zephyr_mirror_realm or self.is_guest)
|
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def major_tos_version(self) -> int:
|
2016-08-10 03:05:26 +02:00
|
|
|
if self.tos_version is not None:
|
|
|
|
return int(self.tos_version.split('.')[0])
|
|
|
|
else:
|
|
|
|
return -1
|
|
|
|
|
2017-09-25 09:47:15 +02:00
|
|
|
class UserGroup(models.Model):
|
|
|
|
name = models.CharField(max_length=100)
|
|
|
|
members = models.ManyToManyField(UserProfile, through='UserGroupMembership')
|
2018-01-29 08:17:31 +01:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE)
|
2018-05-11 02:24:34 +02:00
|
|
|
description = models.TextField(default=u'') # type: str
|
2017-09-25 09:47:15 +02:00
|
|
|
|
|
|
|
class Meta:
|
|
|
|
unique_together = (('realm', 'name'),)
|
|
|
|
|
|
|
|
class UserGroupMembership(models.Model):
|
2018-01-29 08:17:31 +01:00
|
|
|
user_group = models.ForeignKey(UserGroup, on_delete=CASCADE)
|
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
2017-09-25 09:47:15 +02:00
|
|
|
|
|
|
|
class Meta:
|
|
|
|
unique_together = (('user_group', 'user_profile'),)
|
|
|
|
|
2017-11-29 02:49:11 +01:00
|
|
|
def receives_offline_push_notifications(user_profile: UserProfile) -> bool:
|
|
|
|
return (user_profile.enable_offline_push_notifications and
|
|
|
|
not user_profile.is_bot)
|
|
|
|
|
|
|
|
def receives_offline_email_notifications(user_profile: UserProfile) -> bool:
|
|
|
|
return (user_profile.enable_offline_email_notifications and
|
2014-01-24 22:29:17 +01:00
|
|
|
not user_profile.is_bot)
|
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def receives_online_notifications(user_profile: UserProfile) -> bool:
|
2016-09-19 22:55:18 +02:00
|
|
|
return (user_profile.enable_online_push_notifications and
|
|
|
|
not user_profile.is_bot)
|
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def receives_stream_notifications(user_profile: UserProfile) -> bool:
|
2017-08-17 16:55:32 +02:00
|
|
|
return (user_profile.enable_stream_push_notifications and
|
|
|
|
not user_profile.is_bot)
|
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def remote_user_to_email(remote_user: str) -> str:
|
2016-11-11 19:32:15 +01:00
|
|
|
if settings.SSO_APPEND_DOMAIN is not None:
|
|
|
|
remote_user += "@" + settings.SSO_APPEND_DOMAIN
|
|
|
|
return remote_user
|
|
|
|
|
2016-03-31 03:30:33 +02:00
|
|
|
# Make sure we flush the UserProfile object from our remote cache
|
2013-03-15 21:17:32 +01:00
|
|
|
# whenever we save it.
|
2014-01-28 17:02:30 +01:00
|
|
|
post_save.connect(flush_user_profile, sender=UserProfile)
|
2013-03-15 21:17:32 +01:00
|
|
|
|
2012-09-28 22:47:05 +02:00
|
|
|
class PreregistrationUser(models.Model):
|
2019-03-20 22:21:35 +01:00
|
|
|
# Data on a partially created user, before the completion of
|
|
|
|
# registration. This is used in at least three major code paths:
|
|
|
|
# * Realm creation, in which case realm is None.
|
|
|
|
#
|
|
|
|
# * Invitations, in which case referred_by will always be set.
|
|
|
|
#
|
|
|
|
# * Social authentication signup, where it's used to store data
|
|
|
|
# from the authentication step and pass it to the registration
|
|
|
|
# form.
|
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
email = models.EmailField() # type: str
|
2018-03-25 03:32:04 +02:00
|
|
|
referred_by = models.ForeignKey(UserProfile, null=True, on_delete=CASCADE) # type: Optional[UserProfile]
|
2017-07-09 01:16:47 +02:00
|
|
|
streams = models.ManyToManyField('Stream') # type: Manager
|
|
|
|
invited_at = models.DateTimeField(auto_now=True) # type: datetime.datetime
|
2016-06-03 01:02:58 +02:00
|
|
|
realm_creation = models.BooleanField(default=False)
|
2017-08-04 08:09:25 +02:00
|
|
|
# Indicates whether the user needs a password. Users who were
|
|
|
|
# created via SSO style auth (e.g. GitHub/Google) generally do not.
|
|
|
|
password_required = models.BooleanField(default=True)
|
2012-12-11 23:42:32 +01:00
|
|
|
|
2012-10-29 19:08:18 +01:00
|
|
|
# status: whether an object has been confirmed.
|
|
|
|
# if confirmed, set to confirmation.settings.STATUS_ACTIVE
|
2017-07-09 01:16:47 +02:00
|
|
|
status = models.IntegerField(default=0) # type: int
|
2012-10-29 19:08:18 +01:00
|
|
|
|
2019-03-20 22:21:35 +01:00
|
|
|
# The realm should only ever be None for PreregistrationUser
|
|
|
|
# objects created as part of realm creation.
|
2017-07-09 01:16:47 +02:00
|
|
|
realm = models.ForeignKey(Realm, null=True, on_delete=CASCADE) # type: Optional[Realm]
|
2013-08-02 20:31:19 +02:00
|
|
|
|
2019-01-03 16:59:21 +01:00
|
|
|
# Changes to INVITED_AS should also be reflected in
|
|
|
|
# settings_invites.invited_as_values in
|
|
|
|
# static/js/settings_invites.js
|
2018-12-28 12:05:44 +01:00
|
|
|
INVITE_AS = dict(
|
2019-01-03 16:50:36 +01:00
|
|
|
MEMBER = 1,
|
2018-12-28 12:05:44 +01:00
|
|
|
REALM_ADMIN = 2,
|
|
|
|
GUEST_USER = 3,
|
|
|
|
)
|
2019-01-03 16:50:36 +01:00
|
|
|
invited_as = models.PositiveSmallIntegerField(default=INVITE_AS['MEMBER']) # type: int
|
2018-12-28 12:05:44 +01:00
|
|
|
|
2017-08-10 22:27:57 +02:00
|
|
|
class MultiuseInvite(models.Model):
|
|
|
|
referred_by = models.ForeignKey(UserProfile, on_delete=CASCADE) # Optional[UserProfile]
|
|
|
|
streams = models.ManyToManyField('Stream') # type: Manager
|
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE) # type: Realm
|
2019-02-06 22:57:14 +01:00
|
|
|
invited_as = models.PositiveSmallIntegerField(default=PreregistrationUser.INVITE_AS['MEMBER']) # type: int
|
2017-08-10 22:27:57 +02:00
|
|
|
|
2017-01-20 12:27:38 +01:00
|
|
|
class EmailChangeStatus(models.Model):
|
2018-05-11 02:24:34 +02:00
|
|
|
new_email = models.EmailField() # type: str
|
|
|
|
old_email = models.EmailField() # type: str
|
2017-07-09 01:16:47 +02:00
|
|
|
updated_at = models.DateTimeField(auto_now=True) # type: datetime.datetime
|
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile
|
2017-01-20 12:27:38 +01:00
|
|
|
|
|
|
|
# status: whether an object has been confirmed.
|
|
|
|
# if confirmed, set to confirmation.settings.STATUS_ACTIVE
|
2017-07-09 01:16:47 +02:00
|
|
|
status = models.IntegerField(default=0) # type: int
|
2017-01-20 12:27:38 +01:00
|
|
|
|
2017-07-09 01:16:47 +02:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE) # type: Realm
|
2017-01-20 12:27:38 +01:00
|
|
|
|
2017-04-19 07:22:54 +02:00
|
|
|
class AbstractPushDeviceToken(models.Model):
|
2013-12-09 23:17:16 +01:00
|
|
|
APNS = 1
|
|
|
|
GCM = 2
|
|
|
|
|
|
|
|
KINDS = (
|
2017-01-24 06:21:14 +01:00
|
|
|
(APNS, 'apns'),
|
|
|
|
(GCM, 'gcm'),
|
2013-12-09 23:17:16 +01:00
|
|
|
)
|
|
|
|
|
2017-07-09 01:16:47 +02:00
|
|
|
kind = models.PositiveSmallIntegerField(choices=KINDS) # type: int
|
2013-12-09 23:17:16 +01:00
|
|
|
|
|
|
|
# The token is a unique device-specific token that is
|
|
|
|
# sent to us from each device:
|
|
|
|
# - APNS token if kind == APNS
|
|
|
|
# - GCM registration id if kind == GCM
|
2018-12-18 22:04:34 +01:00
|
|
|
token = models.CharField(max_length=4096, db_index=True) # type: bytes
|
2018-10-11 00:53:13 +02:00
|
|
|
|
|
|
|
# TODO: last_updated should be renamed date_created, since it is
|
|
|
|
# no longer maintained as a last_updated value.
|
2017-07-09 01:16:47 +02:00
|
|
|
last_updated = models.DateTimeField(auto_now=True) # type: datetime.datetime
|
2013-12-09 23:17:16 +01:00
|
|
|
|
2015-02-10 08:08:39 +01:00
|
|
|
# [optional] Contains the app id of the device if it is an iOS device
|
2018-05-11 02:24:34 +02:00
|
|
|
ios_app_id = models.TextField(null=True) # type: Optional[str]
|
2015-02-10 08:08:39 +01:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2017-04-19 07:22:54 +02:00
|
|
|
abstract = True
|
|
|
|
|
|
|
|
class PushDeviceToken(AbstractPushDeviceToken):
|
|
|
|
# The user who's device this is
|
2017-07-09 01:16:47 +02:00
|
|
|
user = models.ForeignKey(UserProfile, db_index=True, on_delete=CASCADE) # type: UserProfile
|
2018-10-11 00:53:13 +02:00
|
|
|
|
|
|
|
class Meta:
|
|
|
|
unique_together = ("user", "kind", "token")
|
2017-04-19 07:22:54 +02:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def generate_email_token_for_stream() -> str:
|
2015-08-18 21:29:23 +02:00
|
|
|
return generate_random_token(32)
|
|
|
|
|
2017-10-27 08:42:27 +02:00
|
|
|
class Stream(models.Model):
|
2013-10-09 16:55:17 +02:00
|
|
|
MAX_NAME_LENGTH = 60
|
2018-04-30 08:59:51 +02:00
|
|
|
MAX_DESCRIPTION_LENGTH = 1024
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
name = models.CharField(max_length=MAX_NAME_LENGTH, db_index=True) # type: str
|
2017-07-09 01:16:47 +02:00
|
|
|
realm = models.ForeignKey(Realm, db_index=True, on_delete=CASCADE) # type: Realm
|
2018-07-25 00:29:05 +02:00
|
|
|
date_created = models.DateTimeField(default=timezone_now) # type: datetime.datetime
|
|
|
|
deactivated = models.BooleanField(default=False) # type: bool
|
|
|
|
description = models.CharField(max_length=MAX_DESCRIPTION_LENGTH, default=u'') # type: str
|
2019-01-11 13:48:22 +01:00
|
|
|
rendered_description = models.TextField(default=u'') # type: str
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2017-07-09 01:16:47 +02:00
|
|
|
invite_only = models.NullBooleanField(default=False) # type: Optional[bool]
|
2018-04-27 01:00:26 +02:00
|
|
|
history_public_to_subscribers = models.BooleanField(default=False) # type: bool
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2018-07-25 00:29:05 +02:00
|
|
|
# Whether this stream's content should be published by the web-public archive features
|
2018-04-27 10:05:14 +02:00
|
|
|
is_web_public = models.BooleanField(default=False) # type: bool
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2018-07-25 00:29:05 +02:00
|
|
|
# Whether only organization administrators can send messages to this stream
|
|
|
|
is_announcement_only = models.BooleanField(default=False) # type: bool
|
2017-10-08 21:16:51 +02:00
|
|
|
|
|
|
|
# The unique thing about Zephyr public streams is that we never list their
|
|
|
|
# users. We may try to generalize this concept later, but for now
|
|
|
|
# we just use a concrete field. (Zephyr public streams aren't exactly like
|
|
|
|
# invite-only streams--while both are private in terms of listing users,
|
|
|
|
# for Zephyr we don't even list users to stream members, yet membership
|
|
|
|
# is more public in the sense that you don't need a Zulip invite to join.
|
|
|
|
# This field is populated directly from UserProfile.is_zephyr_mirror_realm,
|
|
|
|
# and the reason for denormalizing field is performance.
|
|
|
|
is_in_zephyr_realm = models.BooleanField(default=False) # type: bool
|
|
|
|
|
2013-08-08 16:51:18 +02:00
|
|
|
# Used by the e-mail forwarder. The e-mail RFC specifies a maximum
|
|
|
|
# e-mail length of 254, and our max stream length is 30, so we
|
|
|
|
# have plenty of room for the token.
|
|
|
|
email_token = models.CharField(
|
2019-03-17 09:37:30 +01:00
|
|
|
max_length=32, default=generate_email_token_for_stream, unique=True) # type: str
|
2013-09-26 21:48:08 +02:00
|
|
|
|
2019-06-05 19:46:35 +02:00
|
|
|
# For old messages being automatically deleted
|
|
|
|
message_retention_days = models.IntegerField(null=True, default=None) # type: Optional[int]
|
|
|
|
|
2019-03-04 17:50:49 +01:00
|
|
|
# The very first message ID in the stream. Used to help clients
|
|
|
|
# determine whether they might need to display "more topics" for a
|
|
|
|
# stream based on what messages they have cached.
|
|
|
|
first_message_id = models.IntegerField(null=True, db_index=True) # type: Optional[int]
|
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def __str__(self) -> str:
|
2017-10-27 09:06:40 +02:00
|
|
|
return "<Stream: %s>" % (self.name,)
|
Give our models meaningful reprs.
>>> from zephyr.models import UserProfile, Recipient, Zephyr, ZephyrClass
>>> for klass in [UserProfile, Recipient, Zephyr, ZephyrClass]:
... print klass.objects.all()[:2]
...
[<UserProfile: othello>, <UserProfile: iago>]
[<Recipient: Verona (1, class)>, <Recipient: Denmark (2, class)>]
[<Zephyr: Scotland / Scotland3 / <UserProfile: prospero>>, <Zephyr: Venice / Venice3 / <UserProfile: iago>>]
[<ZephyrClass: Verona>, <ZephyrClass: Denmark>]
(imported from commit 9998ffe40800213a5425990d6e85f5c5a43a5355)
2012-08-29 16:15:06 +02:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def is_public(self) -> bool:
|
2016-07-27 01:45:29 +02:00
|
|
|
# All streams are private in Zephyr mirroring realms.
|
2017-10-08 21:16:51 +02:00
|
|
|
return not self.invite_only and not self.is_in_zephyr_realm
|
2013-01-15 21:10:50 +01:00
|
|
|
|
2018-04-05 00:28:14 +02:00
|
|
|
def is_history_realm_public(self) -> bool:
|
|
|
|
return self.is_public()
|
|
|
|
|
|
|
|
def is_history_public_to_subscribers(self) -> bool:
|
2018-04-27 01:00:26 +02:00
|
|
|
return self.history_public_to_subscribers
|
2018-04-05 00:28:14 +02:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2012-11-07 22:33:38 +01:00
|
|
|
unique_together = ("name", "realm")
|
|
|
|
|
2014-03-02 06:46:54 +01:00
|
|
|
# This is stream information that is sent to clients
|
2017-11-27 07:33:05 +01:00
|
|
|
def to_dict(self) -> Dict[str, Any]:
|
2018-05-07 23:14:15 +02:00
|
|
|
return dict(
|
|
|
|
name=self.name,
|
|
|
|
stream_id=self.id,
|
|
|
|
description=self.description,
|
2019-01-11 13:48:22 +01:00
|
|
|
rendered_description=self.rendered_description,
|
2018-05-07 23:14:15 +02:00
|
|
|
invite_only=self.invite_only,
|
2019-04-07 20:29:25 +02:00
|
|
|
is_web_public=self.is_web_public,
|
2018-05-14 12:06:25 +02:00
|
|
|
is_announcement_only=self.is_announcement_only,
|
2019-03-04 17:50:49 +01:00
|
|
|
history_public_to_subscribers=self.history_public_to_subscribers,
|
|
|
|
first_message_id=self.first_message_id,
|
2018-05-07 23:14:15 +02:00
|
|
|
)
|
2014-03-02 06:46:54 +01:00
|
|
|
|
2014-01-28 20:49:55 +01:00
|
|
|
post_save.connect(flush_stream, sender=Stream)
|
|
|
|
post_delete.connect(flush_stream, sender=Stream)
|
2014-01-15 22:48:27 +01:00
|
|
|
|
2016-04-01 08:42:38 +02:00
|
|
|
# The Recipient table is used to map Messages to the set of users who
|
|
|
|
# received the message. It is implemented as a set of triples (id,
|
|
|
|
# type_id, type). We have 3 types of recipients: Huddles (for group
|
|
|
|
# private messages), UserProfiles (for 1:1 private messages), and
|
2016-06-02 21:59:58 +02:00
|
|
|
# Streams. The recipient table maps a globally unique recipient id
|
2016-04-01 08:42:38 +02:00
|
|
|
# (used by the Message table) to the type-specific unique id (the
|
|
|
|
# stream id, user_profile id, or huddle id).
|
2017-10-27 08:42:27 +02:00
|
|
|
class Recipient(models.Model):
|
2017-07-09 01:16:47 +02:00
|
|
|
type_id = models.IntegerField(db_index=True) # type: int
|
|
|
|
type = models.PositiveSmallIntegerField(db_index=True) # type: int
|
2012-10-10 22:58:51 +02:00
|
|
|
# Valid types are {personal, stream, huddle}
|
2012-09-07 20:14:13 +02:00
|
|
|
PERSONAL = 1
|
2012-10-10 22:57:21 +02:00
|
|
|
STREAM = 2
|
2012-09-07 20:14:13 +02:00
|
|
|
HUDDLE = 3
|
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2012-11-07 22:33:38 +01:00
|
|
|
unique_together = ("type", "type_id")
|
|
|
|
|
2012-11-02 21:08:29 +01:00
|
|
|
# N.B. If we used Django's choice=... we would get this for free (kinda)
|
|
|
|
_type_names = {
|
|
|
|
PERSONAL: 'personal',
|
2017-01-24 06:21:14 +01:00
|
|
|
STREAM: 'stream',
|
|
|
|
HUDDLE: 'huddle'}
|
2012-11-02 21:08:29 +01:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def type_name(self) -> str:
|
2012-11-02 21:08:29 +01:00
|
|
|
# Raises KeyError if invalid
|
|
|
|
return self._type_names[self.type]
|
2012-08-28 21:27:42 +02:00
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def __str__(self) -> str:
|
Give our models meaningful reprs.
>>> from zephyr.models import UserProfile, Recipient, Zephyr, ZephyrClass
>>> for klass in [UserProfile, Recipient, Zephyr, ZephyrClass]:
... print klass.objects.all()[:2]
...
[<UserProfile: othello>, <UserProfile: iago>]
[<Recipient: Verona (1, class)>, <Recipient: Denmark (2, class)>]
[<Zephyr: Scotland / Scotland3 / <UserProfile: prospero>>, <Zephyr: Venice / Venice3 / <UserProfile: iago>>]
[<ZephyrClass: Verona>, <ZephyrClass: Denmark>]
(imported from commit 9998ffe40800213a5425990d6e85f5c5a43a5355)
2012-08-29 16:15:06 +02:00
|
|
|
display_recipient = get_display_recipient(self)
|
2017-10-27 09:06:40 +02:00
|
|
|
return "<Recipient: %s (%d, %s)>" % (display_recipient, self.type_id, self.type)
|
Give our models meaningful reprs.
>>> from zephyr.models import UserProfile, Recipient, Zephyr, ZephyrClass
>>> for klass in [UserProfile, Recipient, Zephyr, ZephyrClass]:
... print klass.objects.all()[:2]
...
[<UserProfile: othello>, <UserProfile: iago>]
[<Recipient: Verona (1, class)>, <Recipient: Denmark (2, class)>]
[<Zephyr: Scotland / Scotland3 / <UserProfile: prospero>>, <Zephyr: Venice / Venice3 / <UserProfile: iago>>]
[<ZephyrClass: Verona>, <ZephyrClass: Denmark>]
(imported from commit 9998ffe40800213a5425990d6e85f5c5a43a5355)
2012-08-29 16:15:06 +02:00
|
|
|
|
2017-10-27 08:42:27 +02:00
|
|
|
class MutedTopic(models.Model):
|
2017-08-30 02:19:34 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
|
|
|
stream = models.ForeignKey(Stream, on_delete=CASCADE)
|
|
|
|
recipient = models.ForeignKey(Recipient, on_delete=CASCADE)
|
2018-11-01 21:23:48 +01:00
|
|
|
topic_name = models.CharField(max_length=MAX_TOPIC_NAME_LENGTH)
|
2017-08-30 02:19:34 +02:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2017-08-30 02:19:34 +02:00
|
|
|
unique_together = ('user_profile', 'stream', 'topic_name')
|
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def __str__(self) -> str:
|
2017-10-27 09:06:40 +02:00
|
|
|
return "<MutedTopic: (%s, %s, %s)>" % (self.user_profile.email, self.stream.name, self.topic_name)
|
2017-08-30 02:19:34 +02:00
|
|
|
|
2017-10-27 08:42:27 +02:00
|
|
|
class Client(models.Model):
|
2018-05-11 02:24:34 +02:00
|
|
|
name = models.CharField(max_length=30, db_index=True, unique=True) # type: str
|
2012-10-19 21:30:42 +02:00
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def __str__(self) -> str:
|
2017-10-27 09:06:40 +02:00
|
|
|
return "<Client: %s>" % (self.name,)
|
2016-04-21 00:26:45 +02:00
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
get_client_cache = {} # type: Dict[str, Client]
|
|
|
|
def get_client(name: str) -> Client:
|
2016-11-14 09:23:03 +01:00
|
|
|
# Accessing KEY_PREFIX through the module is necessary
|
|
|
|
# because we need the updated value of the variable.
|
|
|
|
cache_name = cache.KEY_PREFIX + name
|
|
|
|
if cache_name not in get_client_cache:
|
2016-03-31 03:30:33 +02:00
|
|
|
result = get_client_remote_cache(name)
|
2016-11-14 09:23:03 +01:00
|
|
|
get_client_cache[cache_name] = result
|
|
|
|
return get_client_cache[cache_name]
|
2013-11-20 22:16:48 +01:00
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def get_client_cache_key(name: str) -> str:
|
2016-06-12 07:25:42 +02:00
|
|
|
return u'get_client:%s' % (make_safe_digest(name),)
|
2013-03-26 17:47:52 +01:00
|
|
|
|
2013-03-26 19:09:45 +01:00
|
|
|
@cache_with_key(get_client_cache_key, timeout=3600*24*7)
|
2018-05-11 02:24:34 +02:00
|
|
|
def get_client_remote_cache(name: str) -> Client:
|
2013-11-01 18:59:05 +01:00
|
|
|
(client, _) = Client.objects.get_or_create(name=name)
|
2012-10-19 21:30:42 +02:00
|
|
|
return client
|
|
|
|
|
2013-03-26 19:09:45 +01:00
|
|
|
@cache_with_key(get_stream_cache_key, timeout=3600*24*7)
|
2018-05-11 02:24:34 +02:00
|
|
|
def get_realm_stream(stream_name: str, realm_id: int) -> Stream:
|
2013-03-19 13:05:19 +01:00
|
|
|
return Stream.objects.select_related("realm").get(
|
2017-09-17 22:26:43 +02:00
|
|
|
name__iexact=stream_name.strip(), realm_id=realm_id)
|
2013-03-19 13:05:19 +01:00
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def stream_name_in_use(stream_name: str, realm_id: int) -> bool:
|
2017-09-17 22:07:00 +02:00
|
|
|
return Stream.objects.filter(
|
|
|
|
name__iexact=stream_name.strip(),
|
|
|
|
realm_id=realm_id
|
|
|
|
).exists()
|
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def get_active_streams(realm: Optional[Realm]) -> QuerySet:
|
2018-03-15 00:02:39 +01:00
|
|
|
# TODO: Change return type to QuerySet[Stream]
|
|
|
|
# NOTE: Return value is used as a QuerySet, so cannot currently be Sequence[QuerySet]
|
2014-01-24 23:30:53 +01:00
|
|
|
"""
|
|
|
|
Return all streams (including invite-only streams) that have not been deactivated.
|
|
|
|
"""
|
|
|
|
return Stream.objects.filter(realm=realm, deactivated=False)
|
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def get_stream(stream_name: str, realm: Realm) -> Stream:
|
2017-10-22 01:48:16 +02:00
|
|
|
'''
|
|
|
|
Callers that don't have a Realm object already available should use
|
|
|
|
get_realm_stream directly, to avoid unnecessarily fetching the
|
|
|
|
Realm object.
|
|
|
|
'''
|
|
|
|
return get_realm_stream(stream_name, realm.id)
|
2013-01-17 22:16:39 +01:00
|
|
|
|
2019-01-28 05:28:29 +01:00
|
|
|
def get_stream_by_id_in_realm(stream_id: int, realm: Realm) -> Stream:
|
|
|
|
return Stream.objects.select_related().get(id=stream_id, realm=realm)
|
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def bulk_get_streams(realm: Realm, stream_names: STREAM_NAMES) -> Dict[str, Any]:
|
2013-06-27 22:52:05 +02:00
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def fetch_streams_by_name(stream_names: List[str]) -> Sequence[Stream]:
|
2016-06-04 09:02:05 +02:00
|
|
|
#
|
2013-06-27 22:52:05 +02:00
|
|
|
# This should be just
|
|
|
|
#
|
|
|
|
# Stream.objects.select_related("realm").filter(name__iexact__in=stream_names,
|
|
|
|
# realm_id=realm_id)
|
|
|
|
#
|
|
|
|
# But chaining __in and __iexact doesn't work with Django's
|
|
|
|
# ORM, so we have the following hack to construct the relevant where clause
|
|
|
|
upper_list = ", ".join(["UPPER(%s)"] * len(stream_names))
|
2013-07-29 23:03:31 +02:00
|
|
|
where_clause = "UPPER(zerver_stream.name::text) IN (%s)" % (upper_list,)
|
2016-09-20 03:19:50 +02:00
|
|
|
return get_active_streams(realm.id).select_related("realm").extra(
|
2013-06-27 22:52:05 +02:00
|
|
|
where=[where_clause],
|
|
|
|
params=stream_names)
|
|
|
|
|
2019-08-08 21:34:06 +02:00
|
|
|
def stream_name_to_cache_key(stream_name: str) -> str:
|
|
|
|
return get_stream_cache_key(stream_name, realm.id)
|
|
|
|
|
|
|
|
def stream_to_lower_name(stream: Stream) -> str:
|
|
|
|
return stream.name.lower()
|
|
|
|
|
|
|
|
return generic_bulk_cached_fetch(stream_name_to_cache_key,
|
2013-06-27 22:52:05 +02:00
|
|
|
fetch_streams_by_name,
|
|
|
|
[stream_name.lower() for stream_name in stream_names],
|
2019-08-08 21:34:06 +02:00
|
|
|
id_fetcher=stream_to_lower_name)
|
2013-06-27 22:52:05 +02:00
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def get_recipient_cache_key(type: int, type_id: int) -> str:
|
2017-05-05 14:44:45 +02:00
|
|
|
return u"%s:get_recipient:%s:%s" % (cache.KEY_PREFIX, type, type_id,)
|
2013-03-26 17:10:44 +01:00
|
|
|
|
2013-03-26 19:09:45 +01:00
|
|
|
@cache_with_key(get_recipient_cache_key, timeout=3600*24*7)
|
2017-11-27 07:33:05 +01:00
|
|
|
def get_recipient(type: int, type_id: int) -> Recipient:
|
2013-03-18 16:54:58 +01:00
|
|
|
return Recipient.objects.get(type_id=type_id, type=type)
|
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def get_stream_recipient(stream_id: int) -> Recipient:
|
2017-10-28 20:26:11 +02:00
|
|
|
return get_recipient(Recipient.STREAM, stream_id)
|
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def get_personal_recipient(user_profile_id: int) -> Recipient:
|
2017-10-28 21:31:21 +02:00
|
|
|
return get_recipient(Recipient.PERSONAL, user_profile_id)
|
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def get_huddle_recipient(user_profile_ids: Set[int]) -> Recipient:
|
2017-10-28 21:14:41 +02:00
|
|
|
|
|
|
|
# The caller should ensure that user_profile_ids includes
|
|
|
|
# the sender. Note that get_huddle hits the cache, and then
|
|
|
|
# we hit another cache to get the recipient. We may want to
|
|
|
|
# unify our caching strategy here.
|
|
|
|
huddle = get_huddle(list(user_profile_ids))
|
|
|
|
return get_recipient(Recipient.HUDDLE, huddle.id)
|
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def get_huddle_user_ids(recipient: Recipient) -> List[int]:
|
2017-10-29 17:03:51 +01:00
|
|
|
assert(recipient.type == Recipient.HUDDLE)
|
|
|
|
|
|
|
|
return Subscription.objects.filter(
|
2019-07-09 01:07:21 +02:00
|
|
|
recipient=recipient
|
2017-10-29 17:03:51 +01:00
|
|
|
).order_by('user_profile_id').values_list('user_profile_id', flat=True)
|
|
|
|
|
2019-08-13 23:05:47 +02:00
|
|
|
def bulk_get_huddle_user_ids(recipients: List[Recipient]) -> Dict[int, List[int]]:
|
|
|
|
"""
|
|
|
|
Takes a list of huddle-type recipients, returns a dict
|
|
|
|
mapping recipient id to list of user ids in the huddle.
|
|
|
|
"""
|
|
|
|
assert all(recipient.type == Recipient.HUDDLE for recipient in recipients)
|
|
|
|
if not recipients:
|
|
|
|
return {}
|
|
|
|
|
|
|
|
subscriptions = Subscription.objects.filter(
|
|
|
|
recipient__in=recipients
|
|
|
|
).order_by('user_profile_id')
|
|
|
|
|
|
|
|
result_dict = {} # type: Dict[int, List[int]]
|
|
|
|
for recipient in recipients:
|
|
|
|
result_dict[recipient.id] = [subscription.user_profile_id
|
|
|
|
for subscription in subscriptions
|
|
|
|
if subscription.recipient_id == recipient.id]
|
|
|
|
|
|
|
|
return result_dict
|
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def bulk_get_recipients(type: int, type_ids: List[int]) -> Dict[int, Any]:
|
2018-05-11 02:24:34 +02:00
|
|
|
def cache_key_function(type_id: int) -> str:
|
2013-06-25 19:26:58 +02:00
|
|
|
return get_recipient_cache_key(type, type_id)
|
2016-11-29 07:22:02 +01:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def query_function(type_ids: List[int]) -> Sequence[Recipient]:
|
2016-06-12 14:43:15 +02:00
|
|
|
# TODO: Change return type to QuerySet[Recipient]
|
2013-06-25 19:26:58 +02:00
|
|
|
return Recipient.objects.filter(type=type, type_id__in=type_ids)
|
|
|
|
|
2019-08-08 21:34:06 +02:00
|
|
|
def recipient_to_type_id(recipient: Recipient) -> int:
|
|
|
|
return recipient.type_id
|
|
|
|
|
2013-06-25 19:26:58 +02:00
|
|
|
return generic_bulk_cached_fetch(cache_key_function, query_function, type_ids,
|
2019-08-08 21:34:06 +02:00
|
|
|
id_fetcher=recipient_to_type_id)
|
2013-06-25 19:26:58 +02:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def get_stream_recipients(stream_ids: List[int]) -> List[Recipient]:
|
2017-10-29 19:01:08 +01:00
|
|
|
|
|
|
|
'''
|
|
|
|
We could call bulk_get_recipients(...).values() here, but it actually
|
|
|
|
leads to an extra query in test mode.
|
|
|
|
'''
|
|
|
|
return Recipient.objects.filter(
|
|
|
|
type=Recipient.STREAM,
|
|
|
|
type_id__in=stream_ids,
|
|
|
|
)
|
2016-12-06 07:19:34 +01:00
|
|
|
|
2017-10-27 08:42:27 +02:00
|
|
|
class AbstractMessage(models.Model):
|
2017-07-09 01:16:47 +02:00
|
|
|
sender = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile
|
|
|
|
recipient = models.ForeignKey(Recipient, on_delete=CASCADE) # type: Recipient
|
2018-07-25 06:35:48 +02:00
|
|
|
# The message's topic.
|
|
|
|
#
|
|
|
|
# Early versions of Zulip called this concept a "subject", as in an email
|
|
|
|
# "subject line", before changing to "topic" in 2013 (commit dac5a46fa).
|
|
|
|
# UI and user documentation now consistently say "topic". New APIs and
|
|
|
|
# new code should generally also say "topic".
|
|
|
|
#
|
|
|
|
# See also the `topic_name` method on `Message`.
|
2018-11-01 21:23:48 +01:00
|
|
|
subject = models.CharField(max_length=MAX_TOPIC_NAME_LENGTH, db_index=True) # type: str
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
content = models.TextField() # type: str
|
|
|
|
rendered_content = models.TextField(null=True) # type: Optional[str]
|
2017-07-09 01:16:47 +02:00
|
|
|
rendered_content_version = models.IntegerField(null=True) # type: Optional[int]
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2019-08-28 02:43:19 +02:00
|
|
|
date_sent = models.DateTimeField('date sent', db_index=True) # type: datetime.datetime
|
2017-07-09 01:16:47 +02:00
|
|
|
sending_client = models.ForeignKey(Client, on_delete=CASCADE) # type: Client
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2017-07-09 01:16:47 +02:00
|
|
|
last_edit_time = models.DateTimeField(null=True) # type: Optional[datetime.datetime]
|
2018-07-25 06:35:48 +02:00
|
|
|
|
|
|
|
# A JSON-encoded list of objects describing any past edits to this
|
|
|
|
# message, oldest first.
|
2018-05-11 02:24:34 +02:00
|
|
|
edit_history = models.TextField(null=True) # type: Optional[str]
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2017-07-09 01:16:47 +02:00
|
|
|
has_attachment = models.BooleanField(default=False, db_index=True) # type: bool
|
|
|
|
has_image = models.BooleanField(default=False, db_index=True) # type: bool
|
|
|
|
has_link = models.BooleanField(default=False, db_index=True) # type: bool
|
2014-02-21 17:44:48 +01:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2016-11-01 11:26:38 +01:00
|
|
|
abstract = True
|
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def __str__(self) -> str:
|
2017-05-17 05:59:50 +02:00
|
|
|
display_recipient = get_display_recipient(self.recipient)
|
2017-11-02 05:50:03 +01:00
|
|
|
return "<%s: %s / %s / %s>" % (self.__class__.__name__, display_recipient,
|
2017-10-27 09:06:40 +02:00
|
|
|
self.subject, self.sender)
|
2017-05-17 05:59:50 +02:00
|
|
|
|
2019-06-18 19:54:09 +02:00
|
|
|
class ArchiveTransaction(models.Model):
|
|
|
|
timestamp = models.DateTimeField(default=timezone_now, db_index=True) # type: datetime.datetime
|
|
|
|
# Marks if the data archived in this transaction has been restored:
|
|
|
|
restored = models.BooleanField(default=False, db_index=True) # type: bool
|
|
|
|
|
|
|
|
type = models.PositiveSmallIntegerField(db_index=True) # type: int
|
|
|
|
# Valid types:
|
|
|
|
RETENTION_POLICY_BASED = 1 # Archiving was executed due to automated retention policies
|
|
|
|
MANUAL = 2 # Archiving was run manually, via move_messages_to_archive function
|
|
|
|
|
|
|
|
# ForeignKey to the realm with which objects archived in this transaction are associated.
|
|
|
|
# If type is set to MANUAL, this should be null.
|
|
|
|
realm = models.ForeignKey(Realm, null=True, on_delete=CASCADE) # type: Optional[Realm]
|
2016-11-01 11:26:38 +01:00
|
|
|
|
2019-06-24 16:34:54 +02:00
|
|
|
def __str__(self) -> str:
|
|
|
|
return "ArchiveTransaction id: {id}, type: {type}, realm: {realm}, timestamp: {timestamp}".format(
|
|
|
|
id=self.id,
|
|
|
|
type="MANUAL" if self.type == self.MANUAL else "RETENTION_POLICY_BASED",
|
|
|
|
realm=self.realm.string_id if self.realm else None,
|
|
|
|
timestamp=self.timestamp
|
|
|
|
)
|
|
|
|
|
2016-11-01 11:26:38 +01:00
|
|
|
class ArchivedMessage(AbstractMessage):
|
2018-07-25 00:29:05 +02:00
|
|
|
"""Used as a temporary holding place for deleted messages before they
|
|
|
|
are permanently deleted. This is an important part of a robust
|
|
|
|
'message retention' feature.
|
|
|
|
"""
|
2019-06-26 00:37:59 +02:00
|
|
|
archive_transaction = models.ForeignKey(ArchiveTransaction, on_delete=CASCADE) # type: ArchiveTransaction
|
2016-11-01 11:26:38 +01:00
|
|
|
|
|
|
|
class Message(AbstractMessage):
|
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def topic_name(self) -> str:
|
2016-07-14 17:48:11 +02:00
|
|
|
"""
|
|
|
|
Please start using this helper to facilitate an
|
|
|
|
eventual switch over to a separate topic table.
|
|
|
|
"""
|
|
|
|
return self.subject
|
2012-08-28 18:44:51 +02:00
|
|
|
|
2018-11-01 16:05:30 +01:00
|
|
|
def set_topic_name(self, topic_name: str) -> None:
|
|
|
|
self.subject = topic_name
|
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def is_stream_message(self) -> bool:
|
2017-10-28 21:53:47 +02:00
|
|
|
'''
|
|
|
|
Find out whether a message is a stream message by
|
|
|
|
looking up its recipient.type. TODO: Make this
|
|
|
|
an easier operation by denormalizing the message
|
|
|
|
type onto Message, either explicity (message.type)
|
|
|
|
or implicitly (message.stream_id is not None).
|
|
|
|
'''
|
|
|
|
return self.recipient.type == Recipient.STREAM
|
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def get_realm(self) -> Realm:
|
2013-08-22 16:56:37 +02:00
|
|
|
return self.sender.realm
|
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def save_rendered_content(self) -> None:
|
2013-09-20 21:25:51 +02:00
|
|
|
self.save(update_fields=["rendered_content", "rendered_content_version"])
|
|
|
|
|
2013-09-21 16:46:28 +02:00
|
|
|
@staticmethod
|
2018-05-11 02:24:34 +02:00
|
|
|
def need_to_render_content(rendered_content: Optional[str],
|
2017-11-27 07:33:05 +01:00
|
|
|
rendered_content_version: Optional[int],
|
|
|
|
bugdown_version: int) -> bool:
|
2017-02-11 05:03:41 +01:00
|
|
|
return (rendered_content is None or
|
|
|
|
rendered_content_version is None or
|
|
|
|
rendered_content_version < bugdown_version)
|
2012-08-30 19:56:15 +02:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def to_log_dict(self) -> Dict[str, Any]:
|
2012-10-24 20:16:26 +02:00
|
|
|
return dict(
|
|
|
|
id = self.id,
|
2016-05-19 07:35:02 +02:00
|
|
|
sender_id = self.sender.id,
|
2013-03-28 20:43:34 +01:00
|
|
|
sender_email = self.sender.email,
|
2017-03-14 23:31:05 +01:00
|
|
|
sender_realm_str = self.sender.realm.string_id,
|
2012-10-24 20:16:26 +02:00
|
|
|
sender_full_name = self.sender.full_name,
|
|
|
|
sender_short_name = self.sender.short_name,
|
|
|
|
sending_client = self.sending_client.name,
|
|
|
|
type = self.recipient.type_name(),
|
2012-12-03 19:49:12 +01:00
|
|
|
recipient = get_display_recipient(self.recipient),
|
2016-07-15 06:36:45 +02:00
|
|
|
subject = self.topic_name(),
|
2012-10-24 20:16:26 +02:00
|
|
|
content = self.content,
|
2019-08-28 02:43:19 +02:00
|
|
|
timestamp = datetime_to_timestamp(self.date_sent))
|
2012-09-27 19:58:42 +02:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def sent_by_human(self) -> bool:
|
2018-07-22 17:28:28 +02:00
|
|
|
"""Used to determine whether a message was sent by a full Zulip UI
|
|
|
|
style client (and thus whether the message should be treated
|
|
|
|
as sent by a human and automatically marked as read for the
|
|
|
|
sender). The purpose of this distinction is to ensure that
|
|
|
|
message sent to the user by e.g. a Google Calendar integration
|
|
|
|
using the user's own API key don't get marked as read
|
|
|
|
automatically.
|
|
|
|
"""
|
2013-12-31 22:42:38 +01:00
|
|
|
sending_client = self.sending_client.name.lower()
|
|
|
|
|
|
|
|
return (sending_client in ('zulipandroid', 'zulipios', 'zulipdesktop',
|
2018-07-22 17:28:28 +02:00
|
|
|
'zulipmobile', 'zulipelectron', 'zulipterminal', 'snipe',
|
2016-11-30 14:17:35 +01:00
|
|
|
'website', 'ios', 'android')) or (
|
2017-01-24 07:06:13 +01:00
|
|
|
'desktop app' in sending_client)
|
2013-12-31 22:42:38 +01:00
|
|
|
|
2014-02-21 21:18:38 +01:00
|
|
|
@staticmethod
|
2018-05-11 02:24:34 +02:00
|
|
|
def content_has_attachment(content: str) -> Match:
|
2016-06-12 15:44:39 +02:00
|
|
|
return re.search(r'[/\-]user[\-_]uploads[/\.-]', content)
|
2014-02-21 21:18:38 +01:00
|
|
|
|
|
|
|
@staticmethod
|
2018-05-11 02:24:34 +02:00
|
|
|
def content_has_image(content: str) -> bool:
|
2017-11-09 11:45:56 +01:00
|
|
|
return bool(re.search(r'[/\-]user[\-_]uploads[/\.-]\S+\.(bmp|gif|jpg|jpeg|png|webp)',
|
|
|
|
content, re.IGNORECASE))
|
2014-02-21 21:18:38 +01:00
|
|
|
|
|
|
|
@staticmethod
|
2018-05-11 02:24:34 +02:00
|
|
|
def content_has_link(content: str) -> bool:
|
2016-03-10 17:17:40 +01:00
|
|
|
return ('http://' in content or
|
|
|
|
'https://' in content or
|
|
|
|
'/user_uploads' in content or
|
2018-01-19 11:17:38 +01:00
|
|
|
(settings.ENABLE_FILE_LINKS and 'file:///' in content) or
|
|
|
|
'bitcoin:' in content)
|
2014-02-21 21:18:38 +01:00
|
|
|
|
2016-07-10 22:58:46 +02:00
|
|
|
@staticmethod
|
2018-05-11 02:24:34 +02:00
|
|
|
def is_status_message(content: str, rendered_content: str) -> bool:
|
2016-07-10 22:58:46 +02:00
|
|
|
"""
|
|
|
|
Returns True if content and rendered_content are from 'me_message'
|
|
|
|
"""
|
2019-06-21 07:39:52 +02:00
|
|
|
if content.startswith('/me '):
|
2016-07-10 22:58:46 +02:00
|
|
|
if rendered_content.startswith('<p>') and rendered_content.endswith('</p>'):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def update_calculated_fields(self) -> None:
|
2014-02-21 21:18:38 +01:00
|
|
|
# TODO: rendered_content could also be considered a calculated field
|
|
|
|
content = self.content
|
|
|
|
self.has_attachment = bool(Message.content_has_attachment(content))
|
|
|
|
self.has_image = bool(Message.content_has_image(content))
|
|
|
|
self.has_link = bool(Message.content_has_link(content))
|
|
|
|
|
|
|
|
@receiver(pre_save, sender=Message)
|
2017-11-27 07:33:05 +01:00
|
|
|
def pre_save_message(sender: Any, **kwargs: Any) -> None:
|
2014-02-21 21:18:38 +01:00
|
|
|
if kwargs['update_fields'] is None or "content" in kwargs['update_fields']:
|
|
|
|
message = kwargs['instance']
|
|
|
|
message.update_calculated_fields()
|
|
|
|
|
2018-03-15 00:02:39 +01:00
|
|
|
def get_context_for_message(message: Message) -> Sequence[Message]:
|
2016-06-12 14:43:15 +02:00
|
|
|
# TODO: Change return type to QuerySet[Message]
|
2014-07-15 21:03:51 +02:00
|
|
|
return Message.objects.filter(
|
|
|
|
recipient_id=message.recipient_id,
|
|
|
|
subject=message.subject,
|
|
|
|
id__lt=message.id,
|
2019-08-28 02:43:19 +02:00
|
|
|
date_sent__gt=message.date_sent - timedelta(minutes=15),
|
2015-02-21 02:46:19 +01:00
|
|
|
).order_by('-id')[:10]
|
2014-07-15 21:03:51 +02:00
|
|
|
|
2016-07-08 02:25:55 +02:00
|
|
|
post_save.connect(flush_message, sender=Message)
|
2014-07-15 21:03:51 +02:00
|
|
|
|
2019-05-29 16:01:34 +02:00
|
|
|
class AbstractSubMessage(models.Model):
|
2018-02-11 14:08:01 +01:00
|
|
|
# We can send little text messages that are associated with a regular
|
|
|
|
# Zulip message. These can be used for experimental widgets like embedded
|
|
|
|
# games, surveys, mini threads, etc. These are designed to be pretty
|
|
|
|
# generic in purpose.
|
|
|
|
|
|
|
|
sender = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile
|
|
|
|
msg_type = models.TextField()
|
|
|
|
content = models.TextField()
|
|
|
|
|
2019-05-29 16:01:34 +02:00
|
|
|
class Meta:
|
|
|
|
abstract = True
|
|
|
|
|
|
|
|
class SubMessage(AbstractSubMessage):
|
|
|
|
message = models.ForeignKey(Message, on_delete=CASCADE) # type: Message
|
|
|
|
|
2018-02-11 14:08:01 +01:00
|
|
|
@staticmethod
|
|
|
|
def get_raw_db_rows(needed_ids: List[int]) -> List[Dict[str, Any]]:
|
|
|
|
fields = ['id', 'message_id', 'sender_id', 'msg_type', 'content']
|
|
|
|
query = SubMessage.objects.filter(message_id__in=needed_ids).values(*fields)
|
|
|
|
query = query.order_by('message_id', 'id')
|
|
|
|
return list(query)
|
|
|
|
|
2019-05-29 16:01:34 +02:00
|
|
|
class ArchivedSubMessage(AbstractSubMessage):
|
|
|
|
message = models.ForeignKey(ArchivedMessage, on_delete=CASCADE) # type: ArchivedMessage
|
|
|
|
|
2018-02-11 14:09:17 +01:00
|
|
|
post_save.connect(flush_submessage, sender=SubMessage)
|
|
|
|
|
2019-05-29 15:52:57 +02:00
|
|
|
class AbstractReaction(models.Model):
|
2018-07-25 05:57:10 +02:00
|
|
|
"""For emoji reactions to messages (and potentially future reaction types).
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2018-07-25 05:57:10 +02:00
|
|
|
Emoji are surprisingly complicated to implement correctly. For details
|
|
|
|
on how this subsystem works, see:
|
2018-07-25 00:29:05 +02:00
|
|
|
https://zulip.readthedocs.io/en/latest/subsystems/emoji.html
|
|
|
|
"""
|
2017-07-09 01:16:47 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2018-07-25 05:57:10 +02:00
|
|
|
# The user-facing name for an emoji reaction. With emoji aliases,
|
|
|
|
# there may be multiple accepted names for a given emoji; this
|
|
|
|
# field encodes which one the user selected.
|
2018-05-11 02:24:34 +02:00
|
|
|
emoji_name = models.TextField() # type: str
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2017-05-01 07:29:56 +02:00
|
|
|
UNICODE_EMOJI = u'unicode_emoji'
|
|
|
|
REALM_EMOJI = u'realm_emoji'
|
|
|
|
ZULIP_EXTRA_EMOJI = u'zulip_extra_emoji'
|
|
|
|
REACTION_TYPES = ((UNICODE_EMOJI, _("Unicode emoji")),
|
2018-03-17 00:54:32 +01:00
|
|
|
(REALM_EMOJI, _("Custom emoji")),
|
2017-05-01 07:29:56 +02:00
|
|
|
(ZULIP_EXTRA_EMOJI, _("Zulip extra emoji")))
|
2018-05-11 02:24:34 +02:00
|
|
|
reaction_type = models.CharField(default=UNICODE_EMOJI, choices=REACTION_TYPES, max_length=30) # type: str
|
2016-11-03 18:49:00 +01:00
|
|
|
|
2018-07-25 05:57:10 +02:00
|
|
|
# A string that uniquely identifies a particular emoji. The format varies
|
|
|
|
# by type:
|
|
|
|
#
|
|
|
|
# * For Unicode emoji, a dash-separated hex encoding of the sequence of
|
|
|
|
# Unicode codepoints that define this emoji in the Unicode
|
|
|
|
# specification. For examples, see "non_qualified" or "unified" in the
|
|
|
|
# following data, with "non_qualified" taking precedence when both present:
|
|
|
|
# https://raw.githubusercontent.com/iamcal/emoji-data/master/emoji_pretty.json
|
|
|
|
#
|
|
|
|
# * For realm emoji (aka user uploaded custom emoji), the ID
|
|
|
|
# (in ASCII decimal) of the RealmEmoji object.
|
|
|
|
#
|
|
|
|
# * For "Zulip extra emoji" (like :zulip:), the filename of the emoji.
|
|
|
|
emoji_code = models.TextField() # type: str
|
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2019-05-29 15:52:57 +02:00
|
|
|
abstract = True
|
2016-11-03 18:49:00 +01:00
|
|
|
unique_together = ("user_profile", "message", "emoji_name")
|
|
|
|
|
2019-05-29 15:52:57 +02:00
|
|
|
class Reaction(AbstractReaction):
|
|
|
|
message = models.ForeignKey(Message, on_delete=CASCADE) # type: Message
|
|
|
|
|
2016-12-06 07:19:34 +01:00
|
|
|
@staticmethod
|
2017-11-27 07:33:05 +01:00
|
|
|
def get_raw_db_rows(needed_ids: List[int]) -> List[Dict[str, Any]]:
|
2017-05-01 07:29:56 +02:00
|
|
|
fields = ['message_id', 'emoji_name', 'emoji_code', 'reaction_type',
|
|
|
|
'user_profile__email', 'user_profile__id', 'user_profile__full_name']
|
2016-12-06 07:19:34 +01:00
|
|
|
return Reaction.objects.filter(message_id__in=needed_ids).values(*fields)
|
|
|
|
|
2019-01-03 22:25:04 +01:00
|
|
|
def __str__(self) -> str:
|
|
|
|
return "%s / %s / %s" % (self.user_profile.email, self.message.id, self.emoji_name)
|
|
|
|
|
2019-05-29 15:52:57 +02:00
|
|
|
class ArchivedReaction(AbstractReaction):
|
|
|
|
message = models.ForeignKey(ArchivedMessage, on_delete=CASCADE) # type: ArchivedMessage
|
|
|
|
|
2017-06-08 02:04:09 +02:00
|
|
|
# Whenever a message is sent, for each user subscribed to the
|
2016-04-01 08:42:38 +02:00
|
|
|
# corresponding Recipient object, we add a row to the UserMessage
|
2017-06-08 02:04:09 +02:00
|
|
|
# table indicating that that user received that message. This table
|
2016-04-01 08:42:38 +02:00
|
|
|
# allows us to quickly query any user's last 1000 messages to generate
|
|
|
|
# the home view.
|
|
|
|
#
|
|
|
|
# Additionally, the flags field stores metadata like whether the user
|
2017-06-08 02:04:09 +02:00
|
|
|
# has read the message, starred or collapsed the message, was
|
|
|
|
# mentioned in the message, etc.
|
2016-04-01 08:42:38 +02:00
|
|
|
#
|
|
|
|
# UserMessage is the largest table in a Zulip installation, even
|
|
|
|
# though each row is only 4 integers.
|
2017-10-27 08:42:27 +02:00
|
|
|
class AbstractUserMessage(models.Model):
|
2019-08-24 01:51:02 +02:00
|
|
|
id = models.BigAutoField(primary_key=True) # type: int
|
2019-08-24 01:34:36 +02:00
|
|
|
|
2017-08-27 20:27:01 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile
|
2019-06-04 09:26:45 +02:00
|
|
|
# The order here is important! It's the order of fields in the bitfield.
|
|
|
|
ALL_FLAGS = [
|
|
|
|
'read',
|
|
|
|
'starred',
|
|
|
|
'collapsed',
|
|
|
|
'mentioned',
|
|
|
|
'wildcard_mentioned',
|
|
|
|
# These next 4 flags are from features that have since been removed.
|
|
|
|
'summarize_in_home',
|
|
|
|
'summarize_in_stream',
|
|
|
|
'force_expand',
|
|
|
|
'force_collapse',
|
|
|
|
# Whether the message contains any of the user's alert words.
|
|
|
|
'has_alert_word',
|
|
|
|
# The historical flag is used to mark messages which the user
|
|
|
|
# did not receive when they were sent, but later added to
|
|
|
|
# their history via e.g. starring the message. This is
|
|
|
|
# important accounting for the "Subscribed to stream" dividers.
|
|
|
|
'historical',
|
|
|
|
# Whether the message is a private message; this flag is a
|
|
|
|
# denormalization of message.recipient.type to support an
|
|
|
|
# efficient index on UserMessage for a user's private messages.
|
|
|
|
'is_private',
|
|
|
|
# Whether we've sent a push notification to the user's mobile
|
|
|
|
# devices for this message that has not been revoked.
|
|
|
|
'active_mobile_push_notification',
|
|
|
|
]
|
2018-08-08 11:18:44 +02:00
|
|
|
# Certain flags are used only for internal accounting within the
|
2019-06-04 09:26:45 +02:00
|
|
|
# Zulip backend, and don't make sense to expose to the API.
|
2018-08-09 22:57:36 +02:00
|
|
|
NON_API_FLAGS = {"is_private", "active_mobile_push_notification"}
|
2019-06-04 09:26:45 +02:00
|
|
|
# Certain additional flags are just set once when the UserMessage
|
|
|
|
# row is created.
|
|
|
|
NON_EDITABLE_FLAGS = {
|
|
|
|
# These flags are bookkeeping and don't make sense to edit.
|
|
|
|
"has_alert_word",
|
|
|
|
"mentioned",
|
|
|
|
"wildcard_mentioned",
|
|
|
|
"historical",
|
|
|
|
# Unused flags can't be edited.
|
|
|
|
"force_expand",
|
|
|
|
"force_collapse",
|
|
|
|
"summarize_in_home",
|
|
|
|
"summarize_in_stream",
|
|
|
|
}
|
2017-07-09 01:16:47 +02:00
|
|
|
flags = BitField(flags=ALL_FLAGS, default=0) # type: BitHandler
|
2012-09-07 17:04:41 +02:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2016-11-01 11:26:38 +01:00
|
|
|
abstract = True
|
2012-11-08 21:08:13 +01:00
|
|
|
unique_together = ("user_profile", "message")
|
|
|
|
|
2017-05-23 03:02:01 +02:00
|
|
|
@staticmethod
|
2017-11-27 07:33:05 +01:00
|
|
|
def where_unread() -> str:
|
2018-08-22 00:03:00 +02:00
|
|
|
# Use this for Django ORM queries to access unread message.
|
|
|
|
# This custom SQL plays nice with our partial indexes. Grep
|
|
|
|
# the code for example usage.
|
2017-05-23 03:02:01 +02:00
|
|
|
return 'flags & 1 = 0'
|
|
|
|
|
2018-08-22 00:03:00 +02:00
|
|
|
@staticmethod
|
|
|
|
def where_starred() -> str:
|
|
|
|
# Use this for Django ORM queries to access starred messages.
|
|
|
|
# This custom SQL plays nice with our partial indexes. Grep
|
|
|
|
# the code for example usage.
|
|
|
|
#
|
|
|
|
# The key detail is that e.g.
|
|
|
|
# UserMessage.objects.filter(user_profile=user_profile, flags=UserMessage.flags.starred)
|
|
|
|
# will generate a query involving `flags & 2 = 2`, which doesn't match our index.
|
|
|
|
return 'flags & 2 <> 0'
|
|
|
|
|
2018-08-22 00:08:41 +02:00
|
|
|
@staticmethod
|
|
|
|
def where_active_push_notification() -> str:
|
|
|
|
# See where_starred for documentation.
|
|
|
|
return 'flags & 4096 <> 0'
|
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def flags_list(self) -> List[str]:
|
2017-09-10 21:36:23 +02:00
|
|
|
flags = int(self.flags)
|
|
|
|
return self.flags_list_for_flags(flags)
|
|
|
|
|
|
|
|
@staticmethod
|
2017-11-27 07:33:05 +01:00
|
|
|
def flags_list_for_flags(val: int) -> List[str]:
|
2017-09-09 19:47:38 +02:00
|
|
|
'''
|
|
|
|
This function is highly optimized, because it actually slows down
|
|
|
|
sending messages in a naive implementation.
|
|
|
|
'''
|
2017-11-07 18:40:39 +01:00
|
|
|
flags = []
|
|
|
|
mask = 1
|
|
|
|
for flag in UserMessage.ALL_FLAGS:
|
2018-08-08 11:18:44 +02:00
|
|
|
if (val & mask) and flag not in AbstractUserMessage.NON_API_FLAGS:
|
2017-11-07 18:40:39 +01:00
|
|
|
flags.append(flag)
|
|
|
|
mask <<= 1
|
|
|
|
return flags
|
2016-11-01 11:26:38 +01:00
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def __str__(self) -> str:
|
2017-05-17 05:59:50 +02:00
|
|
|
display_recipient = get_display_recipient(self.message.recipient)
|
2017-10-27 09:06:40 +02:00
|
|
|
return "<%s: %s / %s (%s)>" % (self.__class__.__name__, display_recipient,
|
|
|
|
self.user_profile.email, self.flags_list())
|
2017-05-17 05:59:50 +02:00
|
|
|
|
2016-11-01 11:26:38 +01:00
|
|
|
|
2018-07-25 00:29:05 +02:00
|
|
|
class UserMessage(AbstractUserMessage):
|
|
|
|
message = models.ForeignKey(Message, on_delete=CASCADE) # type: Message
|
|
|
|
|
2018-07-27 11:47:07 +02:00
|
|
|
def get_usermessage_by_message_id(user_profile: UserProfile, message_id: int) -> Optional[UserMessage]:
|
|
|
|
try:
|
|
|
|
return UserMessage.objects.select_related().get(user_profile=user_profile,
|
|
|
|
message__id=message_id)
|
|
|
|
except UserMessage.DoesNotExist:
|
|
|
|
return None
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2016-11-01 11:26:38 +01:00
|
|
|
class ArchivedUserMessage(AbstractUserMessage):
|
2018-07-25 00:29:05 +02:00
|
|
|
"""Used as a temporary holding place for deleted UserMessages objects
|
|
|
|
before they are permanently deleted. This is an important part of
|
|
|
|
a robust 'message retention' feature.
|
|
|
|
"""
|
2017-06-01 10:44:16 +02:00
|
|
|
message = models.ForeignKey(ArchivedMessage, on_delete=CASCADE) # type: Message
|
2016-11-01 11:26:38 +01:00
|
|
|
|
2017-10-27 08:42:27 +02:00
|
|
|
class AbstractAttachment(models.Model):
|
2018-05-11 02:24:34 +02:00
|
|
|
file_name = models.TextField(db_index=True) # type: str
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2016-03-24 20:24:01 +01:00
|
|
|
# path_id is a storage location agnostic representation of the path of the file.
|
|
|
|
# If the path of a file is http://localhost:9991/user_uploads/a/b/abc/temp_file.py
|
|
|
|
# then its path_id will be a/b/abc/temp_file.py.
|
2018-05-11 02:24:34 +02:00
|
|
|
path_id = models.TextField(db_index=True, unique=True) # type: str
|
2017-06-01 10:44:16 +02:00
|
|
|
owner = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile
|
|
|
|
realm = models.ForeignKey(Realm, blank=True, null=True, on_delete=CASCADE) # type: Optional[Realm]
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2017-04-15 04:03:56 +02:00
|
|
|
create_time = models.DateTimeField(default=timezone_now,
|
2016-11-01 11:26:38 +01:00
|
|
|
db_index=True) # type: datetime.datetime
|
2017-05-23 22:17:08 +02:00
|
|
|
size = models.IntegerField(null=True) # type: Optional[int]
|
2016-11-01 11:26:38 +01:00
|
|
|
|
2018-07-25 00:29:05 +02:00
|
|
|
# Whether this attachment has been posted to a public stream, and
|
|
|
|
# thus should be available to all non-guest users in the
|
|
|
|
# organization (even if they weren't a recipient of a message
|
|
|
|
# linking to it). This lets us avoid looking up the corresponding
|
2018-07-25 05:57:10 +02:00
|
|
|
# messages/streams to check permissions before serving these files.
|
2018-07-25 00:29:05 +02:00
|
|
|
is_realm_public = models.BooleanField(default=False) # type: bool
|
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2016-11-01 11:26:38 +01:00
|
|
|
abstract = True
|
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def __str__(self) -> str:
|
2017-10-27 09:06:40 +02:00
|
|
|
return "<%s: %s>" % (self.__class__.__name__, self.file_name,)
|
2017-05-17 05:59:50 +02:00
|
|
|
|
2016-11-01 11:26:38 +01:00
|
|
|
|
|
|
|
class ArchivedAttachment(AbstractAttachment):
|
2018-07-25 00:29:05 +02:00
|
|
|
"""Used as a temporary holding place for deleted Attachment objects
|
|
|
|
before they are permanently deleted. This is an important part of
|
|
|
|
a robust 'message retention' feature.
|
|
|
|
"""
|
2016-11-01 11:26:38 +01:00
|
|
|
messages = models.ManyToManyField(ArchivedMessage) # type: Manager
|
|
|
|
|
|
|
|
class Attachment(AbstractAttachment):
|
|
|
|
messages = models.ManyToManyField(Message) # type: Manager
|
2016-03-24 20:24:01 +01:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def is_claimed(self) -> bool:
|
2016-03-24 20:24:01 +01:00
|
|
|
return self.messages.count() > 0
|
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def to_dict(self) -> Dict[str, Any]:
|
2016-12-28 14:46:42 +01:00
|
|
|
return {
|
|
|
|
'id': self.id,
|
|
|
|
'name': self.file_name,
|
|
|
|
'path_id': self.path_id,
|
2017-09-15 01:17:16 +02:00
|
|
|
'size': self.size,
|
2017-09-15 01:17:38 +02:00
|
|
|
# convert to JavaScript-style UNIX timestamp so we can take
|
|
|
|
# advantage of client timezones.
|
|
|
|
'create_time': time.mktime(self.create_time.timetuple()) * 1000,
|
2016-12-28 14:46:42 +01:00
|
|
|
'messages': [{
|
|
|
|
'id': m.id,
|
2019-08-28 02:43:19 +02:00
|
|
|
'name': time.mktime(m.date_sent.timetuple()) * 1000
|
2016-12-28 14:46:42 +01:00
|
|
|
} for m in self.messages.all()]
|
|
|
|
}
|
|
|
|
|
2019-01-14 07:46:31 +01:00
|
|
|
post_save.connect(flush_used_upload_space_cache, sender=Attachment)
|
|
|
|
post_delete.connect(flush_used_upload_space_cache, sender=Attachment)
|
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def validate_attachment_request(user_profile: UserProfile, path_id: str) -> Optional[bool]:
|
2016-06-17 19:48:17 +02:00
|
|
|
try:
|
|
|
|
attachment = Attachment.objects.get(path_id=path_id)
|
|
|
|
except Attachment.DoesNotExist:
|
|
|
|
return None
|
2016-11-01 11:26:38 +01:00
|
|
|
|
2018-06-05 21:02:02 +02:00
|
|
|
if user_profile == attachment.owner:
|
|
|
|
# If you own the file, you can access it.
|
|
|
|
return True
|
2018-06-05 21:12:28 +02:00
|
|
|
if (attachment.is_realm_public and attachment.realm == user_profile.realm and
|
|
|
|
user_profile.can_access_public_streams()):
|
2018-06-05 21:02:02 +02:00
|
|
|
# Any user in the realm can access realm-public files
|
|
|
|
return True
|
|
|
|
|
|
|
|
messages = attachment.messages.all()
|
|
|
|
if UserMessage.objects.filter(user_profile=user_profile, message__in=messages).exists():
|
|
|
|
# If it was sent in a private message or private stream
|
|
|
|
# message, then anyone who received that message can access it.
|
|
|
|
return True
|
|
|
|
|
2018-06-05 21:12:28 +02:00
|
|
|
# The user didn't receive any of the messages that included this
|
|
|
|
# attachment. But they might still have access to it, if it was
|
|
|
|
# sent to a stream they are on where history is public to
|
|
|
|
# subscribers.
|
|
|
|
|
|
|
|
# These are subscriptions to a stream one of the messages was sent to
|
|
|
|
relevant_stream_ids = Subscription.objects.filter(
|
|
|
|
user_profile=user_profile,
|
|
|
|
active=True,
|
|
|
|
recipient__type=Recipient.STREAM,
|
|
|
|
recipient__in=[m.recipient_id for m in messages]).values_list("recipient__type_id", flat=True)
|
|
|
|
if len(relevant_stream_ids) == 0:
|
|
|
|
return False
|
|
|
|
|
|
|
|
return Stream.objects.filter(id__in=relevant_stream_ids,
|
|
|
|
history_public_to_subscribers=True).exists()
|
2018-06-05 21:02:02 +02:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def get_old_unclaimed_attachments(weeks_ago: int) -> Sequence[Attachment]:
|
2016-06-12 14:43:15 +02:00
|
|
|
# TODO: Change return type to QuerySet[Attachment]
|
2017-04-15 04:03:56 +02:00
|
|
|
delta_weeks_ago = timezone_now() - datetime.timedelta(weeks=weeks_ago)
|
2016-03-24 20:24:01 +01:00
|
|
|
old_attachments = Attachment.objects.filter(messages=None, create_time__lt=delta_weeks_ago)
|
|
|
|
return old_attachments
|
|
|
|
|
2017-10-27 08:42:27 +02:00
|
|
|
class Subscription(models.Model):
|
2017-07-09 01:16:47 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile
|
2018-07-25 01:03:19 +02:00
|
|
|
recipient = models.ForeignKey(Recipient, on_delete=CASCADE) # type: Recipient
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2018-07-25 01:03:19 +02:00
|
|
|
# Whether the user has since unsubscribed. We mark Subscription
|
|
|
|
# objects as inactive, rather than deleting them, when a user
|
|
|
|
# unsubscribes, so we can preseve user customizations like
|
|
|
|
# notification settings, stream color, etc., if the user later
|
|
|
|
# resubscribes.
|
2017-07-09 01:16:47 +02:00
|
|
|
active = models.BooleanField(default=True) # type: bool
|
2018-07-25 05:57:10 +02:00
|
|
|
|
2018-08-02 23:46:05 +02:00
|
|
|
# Whether this user had muted this stream.
|
|
|
|
is_muted = models.NullBooleanField(default=False) # type: Optional[bool]
|
2012-08-29 17:50:36 +02:00
|
|
|
|
2016-07-11 15:54:15 +02:00
|
|
|
DEFAULT_STREAM_COLOR = u"#c2c2c2"
|
2018-05-11 02:24:34 +02:00
|
|
|
color = models.CharField(max_length=10, default=DEFAULT_STREAM_COLOR) # type: str
|
2017-07-09 01:16:47 +02:00
|
|
|
pin_to_top = models.BooleanField(default=False) # type: bool
|
2014-02-05 23:00:46 +01:00
|
|
|
|
2019-02-13 10:22:16 +01:00
|
|
|
# These fields are stream-level overrides for the user's default
|
|
|
|
# configuration for notification, configured in UserProfile. The
|
|
|
|
# default, None, means we just inherit the user-level default.
|
|
|
|
desktop_notifications = models.NullBooleanField(default=None) # type: Optional[bool]
|
|
|
|
audible_notifications = models.NullBooleanField(default=None) # type: Optional[bool]
|
|
|
|
push_notifications = models.NullBooleanField(default=None) # type: Optional[bool]
|
|
|
|
email_notifications = models.NullBooleanField(default=None) # type: Optional[bool]
|
2014-02-05 23:00:46 +01:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2012-11-07 22:33:38 +01:00
|
|
|
unique_together = ("user_profile", "recipient")
|
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def __str__(self) -> str:
|
2017-11-02 05:50:03 +01:00
|
|
|
return "<Subscription: %s -> %s>" % (self.user_profile, self.recipient)
|
2012-08-28 22:56:21 +02:00
|
|
|
|
2013-03-26 19:09:45 +01:00
|
|
|
@cache_with_key(user_profile_by_id_cache_key, timeout=3600*24*7)
|
2017-11-27 07:33:05 +01:00
|
|
|
def get_user_profile_by_id(uid: int) -> UserProfile:
|
2013-03-26 18:51:55 +01:00
|
|
|
return UserProfile.objects.select_related().get(id=uid)
|
|
|
|
|
2013-03-28 20:20:31 +01:00
|
|
|
@cache_with_key(user_profile_by_email_cache_key, timeout=3600*24*7)
|
2018-05-11 02:24:34 +02:00
|
|
|
def get_user_profile_by_email(email: str) -> UserProfile:
|
2019-08-06 02:10:58 +02:00
|
|
|
"""This should only be used by our unit tests and for manual manage.py
|
|
|
|
shell work; robust code must use get_user instead, because Zulip
|
|
|
|
supports multiple users with a given email address existing (in
|
|
|
|
different realms). Also, for many applications, we should prefer
|
|
|
|
get_user_by_delivery_email.
|
|
|
|
"""
|
2018-12-07 00:05:57 +01:00
|
|
|
return UserProfile.objects.select_related().get(delivery_email__iexact=email.strip())
|
2013-03-28 20:20:31 +01:00
|
|
|
|
2017-08-25 07:43:38 +02:00
|
|
|
@cache_with_key(user_profile_by_api_key_cache_key, timeout=3600*24*7)
|
2018-05-11 02:24:34 +02:00
|
|
|
def get_user_profile_by_api_key(api_key: str) -> UserProfile:
|
2017-08-25 07:43:38 +02:00
|
|
|
return UserProfile.objects.select_related().get(api_key=api_key)
|
|
|
|
|
2018-12-07 00:05:57 +01:00
|
|
|
def get_user_by_delivery_email(email: str, realm: Realm) -> UserProfile:
|
|
|
|
# Fetches users by delivery_email for use in
|
|
|
|
# authentication/registration contexts. Do not use for user-facing
|
2019-08-06 02:10:58 +02:00
|
|
|
# views (e.g. Zulip API endpoints); for that, you want get_user,
|
|
|
|
# both because it does lookup by email (not delivery_email) and
|
|
|
|
# because it correctly handles Zulip's support for multiple users
|
|
|
|
# with the same email address in different realms.
|
2018-12-07 00:05:57 +01:00
|
|
|
return UserProfile.objects.select_related().get(delivery_email__iexact=email.strip(), realm=realm)
|
|
|
|
|
2017-05-22 19:45:54 +02:00
|
|
|
@cache_with_key(user_profile_cache_key, timeout=3600*24*7)
|
2018-05-11 02:24:34 +02:00
|
|
|
def get_user(email: str, realm: Realm) -> UserProfile:
|
2018-12-07 00:05:57 +01:00
|
|
|
# Fetches the user by its visible-to-other users username (in the
|
|
|
|
# `email` field). For use in API contexts; do not use in
|
|
|
|
# authentication/registration contexts; for that, you need to use
|
|
|
|
# get_user_by_delivery_email.
|
2017-05-22 19:45:54 +02:00
|
|
|
return UserProfile.objects.select_related().get(email__iexact=email.strip(), realm=realm)
|
|
|
|
|
2018-12-07 00:05:57 +01:00
|
|
|
def get_active_user_by_delivery_email(email: str, realm: Realm) -> UserProfile:
|
|
|
|
user_profile = get_user_by_delivery_email(email, realm)
|
|
|
|
if not user_profile.is_active:
|
|
|
|
raise UserProfile.DoesNotExist()
|
|
|
|
return user_profile
|
|
|
|
|
2018-05-21 04:00:15 +02:00
|
|
|
def get_active_user(email: str, realm: Realm) -> UserProfile:
|
|
|
|
user_profile = get_user(email, realm)
|
|
|
|
if not user_profile.is_active:
|
|
|
|
raise UserProfile.DoesNotExist()
|
|
|
|
return user_profile
|
|
|
|
|
2018-05-17 19:36:33 +02:00
|
|
|
def get_user_profile_by_id_in_realm(uid: int, realm: Realm) -> UserProfile:
|
|
|
|
return UserProfile.objects.select_related().get(id=uid, realm=realm)
|
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def get_user_including_cross_realm(email: str, realm: Optional[Realm]=None) -> UserProfile:
|
2017-12-07 21:15:34 +01:00
|
|
|
if is_cross_realm_bot_email(email):
|
2017-07-17 20:55:32 +02:00
|
|
|
return get_system_bot(email)
|
|
|
|
assert realm is not None
|
|
|
|
return get_user(email, realm)
|
|
|
|
|
2017-05-22 23:37:15 +02:00
|
|
|
@cache_with_key(bot_profile_cache_key, timeout=3600*24*7)
|
2018-05-11 02:24:34 +02:00
|
|
|
def get_system_bot(email: str) -> UserProfile:
|
2017-05-22 23:37:15 +02:00
|
|
|
return UserProfile.objects.select_related().get(email__iexact=email.strip())
|
|
|
|
|
2018-09-01 22:39:29 +02:00
|
|
|
def get_user_by_id_in_realm_including_cross_realm(
|
|
|
|
uid: int,
|
2019-05-04 04:47:44 +02:00
|
|
|
realm: Optional[Realm]
|
2018-09-01 22:39:29 +02:00
|
|
|
) -> UserProfile:
|
|
|
|
user_profile = get_user_profile_by_id(uid)
|
|
|
|
if user_profile.realm == realm:
|
|
|
|
return user_profile
|
|
|
|
|
|
|
|
# Note: This doesn't validate whether the `realm` passed in is
|
|
|
|
# None/invalid for the CROSS_REALM_BOT_EMAILS case.
|
|
|
|
if user_profile.email in settings.CROSS_REALM_BOT_EMAILS:
|
|
|
|
return user_profile
|
|
|
|
|
|
|
|
raise UserProfile.DoesNotExist()
|
|
|
|
|
2017-10-21 18:20:49 +02:00
|
|
|
@cache_with_key(realm_user_dicts_cache_key, timeout=3600*24*7)
|
2017-11-27 07:33:05 +01:00
|
|
|
def get_realm_user_dicts(realm_id: int) -> List[Dict[str, Any]]:
|
2017-09-16 20:56:56 +02:00
|
|
|
return UserProfile.objects.filter(
|
|
|
|
realm_id=realm_id,
|
2017-10-21 18:20:49 +02:00
|
|
|
).values(*realm_user_dict_fields)
|
2013-08-28 20:25:31 +02:00
|
|
|
|
2017-09-16 21:44:03 +02:00
|
|
|
@cache_with_key(active_user_ids_cache_key, timeout=3600*24*7)
|
2017-11-27 07:33:05 +01:00
|
|
|
def active_user_ids(realm_id: int) -> List[int]:
|
2017-09-16 21:44:03 +02:00
|
|
|
query = UserProfile.objects.filter(
|
|
|
|
realm_id=realm_id,
|
|
|
|
is_active=True
|
|
|
|
).values_list('id', flat=True)
|
|
|
|
return list(query)
|
|
|
|
|
2018-06-03 19:11:52 +02:00
|
|
|
@cache_with_key(active_non_guest_user_ids_cache_key, timeout=3600*24*7)
|
|
|
|
def active_non_guest_user_ids(realm_id: int) -> List[int]:
|
|
|
|
query = UserProfile.objects.filter(
|
|
|
|
realm_id=realm_id,
|
2019-10-05 02:35:07 +02:00
|
|
|
is_active=True
|
|
|
|
).exclude(
|
|
|
|
role=UserProfile.ROLE_GUEST
|
2018-06-03 19:11:52 +02:00
|
|
|
).values_list('id', flat=True)
|
|
|
|
return list(query)
|
|
|
|
|
2018-05-18 19:54:50 +02:00
|
|
|
def get_source_profile(email: str, string_id: str) -> Optional[UserProfile]:
|
|
|
|
try:
|
2019-05-04 04:47:44 +02:00
|
|
|
return get_user_by_delivery_email(email, get_realm(string_id))
|
|
|
|
except (Realm.DoesNotExist, UserProfile.DoesNotExist):
|
2018-05-18 19:54:50 +02:00
|
|
|
return None
|
|
|
|
|
2017-02-06 20:45:26 +01:00
|
|
|
@cache_with_key(bot_dicts_in_realm_cache_key, timeout=3600*24*7)
|
2017-11-27 07:33:05 +01:00
|
|
|
def get_bot_dicts_in_realm(realm: Realm) -> List[Dict[str, Any]]:
|
2017-02-06 20:45:26 +01:00
|
|
|
return UserProfile.objects.filter(realm=realm, is_bot=True).values(*bot_dict_fields)
|
2014-02-26 00:12:14 +01:00
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def is_cross_realm_bot_email(email: str) -> bool:
|
2017-12-07 20:47:10 +01:00
|
|
|
return email.lower() in settings.CROSS_REALM_BOT_EMAILS
|
|
|
|
|
2016-04-01 08:42:38 +02:00
|
|
|
# The Huddle class represents a group of individuals who have had a
|
|
|
|
# Group Private Message conversation together. The actual membership
|
|
|
|
# of the Huddle is stored in the Subscription table just like with
|
|
|
|
# Streams, and a hash of that list is stored in the huddle_hash field
|
|
|
|
# below, to support efficiently mapping from a set of users to the
|
|
|
|
# corresponding Huddle object.
|
2012-09-04 23:20:21 +02:00
|
|
|
class Huddle(models.Model):
|
2012-09-07 20:14:13 +02:00
|
|
|
# TODO: We should consider whether using
|
|
|
|
# CommaSeparatedIntegerField would be better.
|
2018-05-11 02:24:34 +02:00
|
|
|
huddle_hash = models.CharField(max_length=40, db_index=True, unique=True) # type: str
|
2012-09-04 23:20:21 +02:00
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def get_huddle_hash(id_list: List[int]) -> str:
|
2012-09-05 17:38:09 +02:00
|
|
|
id_list = sorted(set(id_list))
|
2012-09-05 17:41:53 +02:00
|
|
|
hash_key = ",".join(str(x) for x in id_list)
|
2013-03-20 15:31:27 +01:00
|
|
|
return make_safe_digest(hash_key)
|
2012-10-20 18:02:58 +02:00
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def huddle_hash_cache_key(huddle_hash: str) -> str:
|
2016-06-12 07:25:42 +02:00
|
|
|
return u"huddle_by_hash:%s" % (huddle_hash,)
|
2013-03-26 18:17:55 +01:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def get_huddle(id_list: List[int]) -> Huddle:
|
2012-10-20 18:02:58 +02:00
|
|
|
huddle_hash = get_huddle_hash(id_list)
|
2013-03-26 18:17:55 +01:00
|
|
|
return get_huddle_backend(huddle_hash, id_list)
|
|
|
|
|
2013-03-26 19:09:45 +01:00
|
|
|
@cache_with_key(lambda huddle_hash, id_list: huddle_hash_cache_key(huddle_hash), timeout=3600*24*7)
|
2018-05-11 02:24:34 +02:00
|
|
|
def get_huddle_backend(huddle_hash: str, id_list: List[int]) -> Huddle:
|
2017-01-06 17:29:41 +01:00
|
|
|
with transaction.atomic():
|
|
|
|
(huddle, created) = Huddle.objects.get_or_create(huddle_hash=huddle_hash)
|
|
|
|
if created:
|
2013-03-26 18:51:55 +01:00
|
|
|
recipient = Recipient.objects.create(type_id=huddle.id,
|
|
|
|
type=Recipient.HUDDLE)
|
|
|
|
subs_to_create = [Subscription(recipient=recipient,
|
2017-06-10 14:24:04 +02:00
|
|
|
user_profile_id=user_profile_id)
|
2013-03-26 18:51:55 +01:00
|
|
|
for user_profile_id in id_list]
|
|
|
|
Subscription.objects.bulk_create(subs_to_create)
|
2017-01-06 17:29:41 +01:00
|
|
|
return huddle
|
2012-09-04 23:20:21 +02:00
|
|
|
|
2018-03-12 19:35:28 +01:00
|
|
|
def clear_database() -> None: # nocoverage # Only used in populate_db
|
2013-07-08 17:53:50 +02:00
|
|
|
pylibmc.Client(['127.0.0.1']).flush_all()
|
2017-07-09 01:16:47 +02:00
|
|
|
model = None # type: Any
|
2013-04-01 16:57:50 +02:00
|
|
|
for model in [Message, Stream, UserProfile, Recipient,
|
2012-11-27 18:26:51 +01:00
|
|
|
Realm, Subscription, Huddle, UserMessage, Client,
|
|
|
|
DefaultStream]:
|
2012-10-29 19:43:00 +01:00
|
|
|
model.objects.all().delete()
|
|
|
|
Session.objects.all().delete()
|
2012-11-08 23:02:16 +01:00
|
|
|
|
|
|
|
class UserActivity(models.Model):
|
2017-07-09 01:16:47 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile
|
|
|
|
client = models.ForeignKey(Client, on_delete=CASCADE) # type: Client
|
2018-05-11 02:24:34 +02:00
|
|
|
query = models.CharField(max_length=50, db_index=True) # type: str
|
2012-11-08 23:02:16 +01:00
|
|
|
|
2017-07-09 01:16:47 +02:00
|
|
|
count = models.IntegerField() # type: int
|
|
|
|
last_visit = models.DateTimeField('last visit') # type: datetime.datetime
|
2012-11-08 23:02:16 +01:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2012-11-08 23:02:16 +01:00
|
|
|
unique_together = ("user_profile", "client", "query")
|
2012-11-27 18:26:51 +01:00
|
|
|
|
2013-09-06 21:52:12 +02:00
|
|
|
class UserActivityInterval(models.Model):
|
2017-04-15 07:20:16 +02:00
|
|
|
MIN_INTERVAL_LENGTH = datetime.timedelta(minutes=15)
|
|
|
|
|
2017-07-09 01:16:47 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile
|
|
|
|
start = models.DateTimeField('start time', db_index=True) # type: datetime.datetime
|
|
|
|
end = models.DateTimeField('end time', db_index=True) # type: datetime.datetime
|
2013-09-06 21:52:12 +02:00
|
|
|
|
2017-04-15 07:20:16 +02:00
|
|
|
|
2013-02-08 23:44:15 +01:00
|
|
|
class UserPresence(models.Model):
|
2018-07-25 05:57:10 +02:00
|
|
|
"""A record from the last time we heard from a given user on a given client.
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2018-07-25 05:57:10 +02:00
|
|
|
This is a tricky subsystem, because it is highly optimized. See the docs:
|
2018-07-25 00:29:05 +02:00
|
|
|
https://zulip.readthedocs.io/en/latest/subsystems/presence.html
|
|
|
|
"""
|
2018-07-25 05:57:10 +02:00
|
|
|
class Meta:
|
|
|
|
unique_together = ("user_profile", "client")
|
|
|
|
|
2017-07-09 01:16:47 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile
|
|
|
|
client = models.ForeignKey(Client, on_delete=CASCADE) # type: Client
|
2013-02-08 23:44:15 +01:00
|
|
|
|
2018-07-25 05:57:10 +02:00
|
|
|
# The time we heard this update from the client.
|
|
|
|
timestamp = models.DateTimeField('presence changed') # type: datetime.datetime
|
|
|
|
|
|
|
|
# The user was actively using this Zulip client as of `timestamp` (i.e.,
|
|
|
|
# they had interacted with the client recently). When the timestamp is
|
|
|
|
# itself recent, this is the green "active" status in the webapp.
|
2013-02-08 23:44:15 +01:00
|
|
|
ACTIVE = 1
|
2018-07-25 05:57:10 +02:00
|
|
|
|
|
|
|
# There had been no user activity (keyboard/mouse/etc.) on this client
|
|
|
|
# recently. So the client was online at the specified time, but it
|
|
|
|
# could be the user's desktop which they were away from. Displayed as
|
2018-07-25 00:29:05 +02:00
|
|
|
# orange/idle if the timestamp is current.
|
2013-02-08 23:44:15 +01:00
|
|
|
IDLE = 2
|
2018-07-25 05:57:10 +02:00
|
|
|
|
|
|
|
# Information from the client about the user's recent interaction with
|
|
|
|
# that client, as of `timestamp`. Possible values above.
|
|
|
|
#
|
2018-07-25 00:29:05 +02:00
|
|
|
# There is no "inactive" status, because that is encoded by the
|
|
|
|
# timestamp being old.
|
2017-07-09 01:16:47 +02:00
|
|
|
status = models.PositiveSmallIntegerField(default=ACTIVE) # type: int
|
2013-02-08 23:44:15 +01:00
|
|
|
|
2013-09-13 23:33:11 +02:00
|
|
|
@staticmethod
|
2017-11-27 07:33:05 +01:00
|
|
|
def status_to_string(status: int) -> str:
|
2013-09-13 23:33:11 +02:00
|
|
|
if status == UserPresence.ACTIVE:
|
|
|
|
return 'active'
|
|
|
|
elif status == UserPresence.IDLE:
|
|
|
|
return 'idle'
|
2018-03-12 19:37:45 +01:00
|
|
|
else: # nocoverage # TODO: Add a presence test to cover this.
|
2017-03-03 20:30:49 +01:00
|
|
|
raise ValueError('Unknown status: %s' % (status,))
|
2013-09-13 23:33:11 +02:00
|
|
|
|
2017-02-11 08:38:16 +01:00
|
|
|
@staticmethod
|
2017-11-27 07:33:05 +01:00
|
|
|
def get_status_dict_by_user(user_profile: UserProfile) -> Dict[str, Dict[str, Any]]:
|
2017-02-11 08:38:16 +01:00
|
|
|
query = UserPresence.objects.filter(user_profile=user_profile).values(
|
|
|
|
'client__name',
|
|
|
|
'status',
|
|
|
|
'timestamp',
|
|
|
|
'user_profile__email',
|
|
|
|
'user_profile__id',
|
|
|
|
'user_profile__enable_offline_push_notifications',
|
|
|
|
)
|
2017-09-13 16:43:02 +02:00
|
|
|
presence_rows = list(query)
|
2017-02-11 08:38:16 +01:00
|
|
|
|
2017-09-08 19:05:13 +02:00
|
|
|
mobile_user_ids = set() # type: Set[int]
|
2018-03-12 19:37:45 +01:00
|
|
|
if PushDeviceToken.objects.filter(user=user_profile).exists(): # nocoverage
|
|
|
|
# TODO: Add a test, though this is low priority, since we don't use mobile_user_ids yet.
|
2017-09-08 19:05:13 +02:00
|
|
|
mobile_user_ids.add(user_profile.id)
|
2017-02-11 08:38:16 +01:00
|
|
|
|
2017-09-13 16:43:02 +02:00
|
|
|
return UserPresence.get_status_dicts_for_rows(presence_rows, mobile_user_ids)
|
2017-02-11 08:38:16 +01:00
|
|
|
|
Optimize user presence/activity query.
The get_status_dict_by_realm helper gets called whenever our
realm user_presences cache expires, and it used to query these fields:
"zerver_userpresence"."id", "zerver_userpresence"."user_profile_id", "zerver_userpresence"."client_id", "zerver_userpresence"."timestamp", "zerver_userpresence"."status", "zerver_userprofile"."id", "zerver_userprofile"."password", "zerver_userprofile"."last_login", "zerver_userprofile"."is_superuser", "zerver_userprofile"."email", "zerver_userprofile"."is_staff", "zerver_userprofile"."is_active", "zerver_userprofile"."is_bot", "zerver_userprofile"."date_joined", "zerver_userprofile"."bot_owner_id", "zerver_userprofile"."full_name", "zerver_userprofile"."short_name", "zerver_userprofile"."pointer", "zerver_userprofile"."last_pointer_updater", "zerver_userprofile"."realm_id", "zerver_userprofile"."api_key", "zerver_userprofile"."enable_desktop_notifications", "zerver_userprofile"."enable_sounds", "zerver_userprofile"."enter_sends", "zerver_userprofile"."enable_offline_email_notifications", "zerver_userprofile"."last_reminder", "zerver_userprofile"."rate_limits", "zerver_userprofile"."avatar_source", "zerver_userprofile"."tutorial_status", "zerver_userprofile"."onboarding_steps", "zerver_userprofile"."invites_granted", "zerver_userprofile"."invites_used", "zerver_userprofile"."alert_words", "zerver_userprofile"."muted_topics", "zerver_client"."id", "zerver_client"."name"
Now it queries just the fields it needs:
"zerver_client"."name", "zerver_userpresence"."status", "zerver_userpresence"."timestamp", "zerver_userprofile"."email" FROM "zerver_userpresence"
Also, get_status_dict_by_realm is now namespaced under UserPresence as a static method.
(imported from commit be1266844b6bd28b6c615594796713c026a850a1)
2013-09-14 23:59:03 +02:00
|
|
|
@staticmethod
|
2017-11-27 07:33:05 +01:00
|
|
|
def get_status_dict_by_realm(realm_id: int) -> Dict[str, Dict[str, Any]]:
|
2017-09-08 17:00:35 +02:00
|
|
|
user_profile_ids = UserProfile.objects.filter(
|
|
|
|
realm_id=realm_id,
|
|
|
|
is_active=True,
|
|
|
|
is_bot=False
|
|
|
|
).order_by('id').values_list('id', flat=True)
|
|
|
|
|
|
|
|
user_profile_ids = list(user_profile_ids)
|
2018-03-12 19:38:57 +01:00
|
|
|
if not user_profile_ids: # nocoverage
|
|
|
|
# This conditional is necessary because query_for_ids
|
|
|
|
# throws an exception if passed an empty list.
|
|
|
|
#
|
|
|
|
# It's not clear this condition is actually possible,
|
|
|
|
# though, because it shouldn't be possible to end up with
|
|
|
|
# a realm with 0 active users.
|
2017-09-08 17:00:35 +02:00
|
|
|
return {}
|
|
|
|
|
2017-09-13 17:24:11 +02:00
|
|
|
two_weeks_ago = timezone_now() - datetime.timedelta(weeks=2)
|
Optimize user presence/activity query.
The get_status_dict_by_realm helper gets called whenever our
realm user_presences cache expires, and it used to query these fields:
"zerver_userpresence"."id", "zerver_userpresence"."user_profile_id", "zerver_userpresence"."client_id", "zerver_userpresence"."timestamp", "zerver_userpresence"."status", "zerver_userprofile"."id", "zerver_userprofile"."password", "zerver_userprofile"."last_login", "zerver_userprofile"."is_superuser", "zerver_userprofile"."email", "zerver_userprofile"."is_staff", "zerver_userprofile"."is_active", "zerver_userprofile"."is_bot", "zerver_userprofile"."date_joined", "zerver_userprofile"."bot_owner_id", "zerver_userprofile"."full_name", "zerver_userprofile"."short_name", "zerver_userprofile"."pointer", "zerver_userprofile"."last_pointer_updater", "zerver_userprofile"."realm_id", "zerver_userprofile"."api_key", "zerver_userprofile"."enable_desktop_notifications", "zerver_userprofile"."enable_sounds", "zerver_userprofile"."enter_sends", "zerver_userprofile"."enable_offline_email_notifications", "zerver_userprofile"."last_reminder", "zerver_userprofile"."rate_limits", "zerver_userprofile"."avatar_source", "zerver_userprofile"."tutorial_status", "zerver_userprofile"."onboarding_steps", "zerver_userprofile"."invites_granted", "zerver_userprofile"."invites_used", "zerver_userprofile"."alert_words", "zerver_userprofile"."muted_topics", "zerver_client"."id", "zerver_client"."name"
Now it queries just the fields it needs:
"zerver_client"."name", "zerver_userpresence"."status", "zerver_userpresence"."timestamp", "zerver_userprofile"."email" FROM "zerver_userpresence"
Also, get_status_dict_by_realm is now namespaced under UserPresence as a static method.
(imported from commit be1266844b6bd28b6c615594796713c026a850a1)
2013-09-14 23:59:03 +02:00
|
|
|
query = UserPresence.objects.filter(
|
2017-09-13 17:24:11 +02:00
|
|
|
timestamp__gte=two_weeks_ago
|
|
|
|
).values(
|
2017-01-24 07:06:13 +01:00
|
|
|
'client__name',
|
|
|
|
'status',
|
|
|
|
'timestamp',
|
|
|
|
'user_profile__email',
|
|
|
|
'user_profile__id',
|
|
|
|
'user_profile__enable_offline_push_notifications',
|
Optimize user presence/activity query.
The get_status_dict_by_realm helper gets called whenever our
realm user_presences cache expires, and it used to query these fields:
"zerver_userpresence"."id", "zerver_userpresence"."user_profile_id", "zerver_userpresence"."client_id", "zerver_userpresence"."timestamp", "zerver_userpresence"."status", "zerver_userprofile"."id", "zerver_userprofile"."password", "zerver_userprofile"."last_login", "zerver_userprofile"."is_superuser", "zerver_userprofile"."email", "zerver_userprofile"."is_staff", "zerver_userprofile"."is_active", "zerver_userprofile"."is_bot", "zerver_userprofile"."date_joined", "zerver_userprofile"."bot_owner_id", "zerver_userprofile"."full_name", "zerver_userprofile"."short_name", "zerver_userprofile"."pointer", "zerver_userprofile"."last_pointer_updater", "zerver_userprofile"."realm_id", "zerver_userprofile"."api_key", "zerver_userprofile"."enable_desktop_notifications", "zerver_userprofile"."enable_sounds", "zerver_userprofile"."enter_sends", "zerver_userprofile"."enable_offline_email_notifications", "zerver_userprofile"."last_reminder", "zerver_userprofile"."rate_limits", "zerver_userprofile"."avatar_source", "zerver_userprofile"."tutorial_status", "zerver_userprofile"."onboarding_steps", "zerver_userprofile"."invites_granted", "zerver_userprofile"."invites_used", "zerver_userprofile"."alert_words", "zerver_userprofile"."muted_topics", "zerver_client"."id", "zerver_client"."name"
Now it queries just the fields it needs:
"zerver_client"."name", "zerver_userpresence"."status", "zerver_userpresence"."timestamp", "zerver_userprofile"."email" FROM "zerver_userpresence"
Also, get_status_dict_by_realm is now namespaced under UserPresence as a static method.
(imported from commit be1266844b6bd28b6c615594796713c026a850a1)
2013-09-14 23:59:03 +02:00
|
|
|
)
|
2017-09-13 17:24:11 +02:00
|
|
|
|
|
|
|
query = query_for_ids(
|
|
|
|
query=query,
|
|
|
|
user_ids=user_profile_ids,
|
|
|
|
field='user_profile_id'
|
|
|
|
)
|
2017-09-13 16:43:02 +02:00
|
|
|
presence_rows = list(query)
|
Optimize user presence/activity query.
The get_status_dict_by_realm helper gets called whenever our
realm user_presences cache expires, and it used to query these fields:
"zerver_userpresence"."id", "zerver_userpresence"."user_profile_id", "zerver_userpresence"."client_id", "zerver_userpresence"."timestamp", "zerver_userpresence"."status", "zerver_userprofile"."id", "zerver_userprofile"."password", "zerver_userprofile"."last_login", "zerver_userprofile"."is_superuser", "zerver_userprofile"."email", "zerver_userprofile"."is_staff", "zerver_userprofile"."is_active", "zerver_userprofile"."is_bot", "zerver_userprofile"."date_joined", "zerver_userprofile"."bot_owner_id", "zerver_userprofile"."full_name", "zerver_userprofile"."short_name", "zerver_userprofile"."pointer", "zerver_userprofile"."last_pointer_updater", "zerver_userprofile"."realm_id", "zerver_userprofile"."api_key", "zerver_userprofile"."enable_desktop_notifications", "zerver_userprofile"."enable_sounds", "zerver_userprofile"."enter_sends", "zerver_userprofile"."enable_offline_email_notifications", "zerver_userprofile"."last_reminder", "zerver_userprofile"."rate_limits", "zerver_userprofile"."avatar_source", "zerver_userprofile"."tutorial_status", "zerver_userprofile"."onboarding_steps", "zerver_userprofile"."invites_granted", "zerver_userprofile"."invites_used", "zerver_userprofile"."alert_words", "zerver_userprofile"."muted_topics", "zerver_client"."id", "zerver_client"."name"
Now it queries just the fields it needs:
"zerver_client"."name", "zerver_userpresence"."status", "zerver_userpresence"."timestamp", "zerver_userprofile"."email" FROM "zerver_userpresence"
Also, get_status_dict_by_realm is now namespaced under UserPresence as a static method.
(imported from commit be1266844b6bd28b6c615594796713c026a850a1)
2013-09-14 23:59:03 +02:00
|
|
|
|
2017-09-13 17:24:11 +02:00
|
|
|
mobile_query = PushDeviceToken.objects.distinct(
|
|
|
|
'user_id'
|
|
|
|
).values_list(
|
|
|
|
'user_id',
|
|
|
|
flat=True
|
2017-09-08 19:05:13 +02:00
|
|
|
)
|
|
|
|
|
2017-09-13 17:24:11 +02:00
|
|
|
mobile_query = query_for_ids(
|
|
|
|
query=mobile_query,
|
|
|
|
user_ids=user_profile_ids,
|
|
|
|
field='user_id'
|
|
|
|
)
|
|
|
|
mobile_user_ids = set(mobile_query)
|
2014-02-26 22:06:57 +01:00
|
|
|
|
2017-09-13 16:43:02 +02:00
|
|
|
return UserPresence.get_status_dicts_for_rows(presence_rows, mobile_user_ids)
|
2017-02-11 07:49:27 +01:00
|
|
|
|
|
|
|
@staticmethod
|
2017-11-27 07:33:05 +01:00
|
|
|
def get_status_dicts_for_rows(presence_rows: List[Dict[str, Any]],
|
|
|
|
mobile_user_ids: Set[int]) -> Dict[str, Dict[str, Any]]:
|
2017-09-08 15:22:14 +02:00
|
|
|
|
2017-10-30 01:16:17 +01:00
|
|
|
info_row_dct = defaultdict(list) # type: DefaultDict[str, List[Dict[str, Any]]]
|
2017-09-13 16:43:02 +02:00
|
|
|
for row in presence_rows:
|
2017-09-08 15:22:14 +02:00
|
|
|
email = row['user_profile__email']
|
|
|
|
client_name = row['client__name']
|
|
|
|
status = UserPresence.status_to_string(row['status'])
|
|
|
|
dt = row['timestamp']
|
|
|
|
timestamp = datetime_to_timestamp(dt)
|
|
|
|
push_enabled = row['user_profile__enable_offline_push_notifications']
|
|
|
|
has_push_devices = row['user_profile__id'] in mobile_user_ids
|
|
|
|
pushable = (push_enabled and has_push_devices)
|
|
|
|
|
|
|
|
info = dict(
|
|
|
|
client=client_name,
|
|
|
|
status=status,
|
|
|
|
dt=dt,
|
|
|
|
timestamp=timestamp,
|
|
|
|
pushable=pushable,
|
2017-01-24 06:34:26 +01:00
|
|
|
)
|
2017-09-08 15:22:14 +02:00
|
|
|
|
|
|
|
info_row_dct[email].append(info)
|
|
|
|
|
|
|
|
user_statuses = dict() # type: Dict[str, Dict[str, Any]]
|
|
|
|
|
|
|
|
for email, info_rows in info_row_dct.items():
|
|
|
|
# Note that datetime values have sub-second granularity, which is
|
|
|
|
# mostly important for avoiding test flakes, but it's also technically
|
|
|
|
# more precise for real users.
|
|
|
|
by_time = lambda row: row['dt']
|
|
|
|
most_recent_info = max(info_rows, key=by_time)
|
|
|
|
|
|
|
|
# We don't send datetime values to the client.
|
|
|
|
for r in info_rows:
|
|
|
|
del r['dt']
|
|
|
|
|
|
|
|
client_dict = {info['client']: info for info in info_rows}
|
|
|
|
user_statuses[email] = client_dict
|
|
|
|
|
|
|
|
# The word "aggegrated" here is possibly misleading.
|
|
|
|
# It's really just the most recent client's info.
|
|
|
|
user_statuses[email]['aggregated'] = dict(
|
|
|
|
client=most_recent_info['client'],
|
|
|
|
status=most_recent_info['status'],
|
|
|
|
timestamp=most_recent_info['timestamp'],
|
|
|
|
)
|
|
|
|
|
Optimize user presence/activity query.
The get_status_dict_by_realm helper gets called whenever our
realm user_presences cache expires, and it used to query these fields:
"zerver_userpresence"."id", "zerver_userpresence"."user_profile_id", "zerver_userpresence"."client_id", "zerver_userpresence"."timestamp", "zerver_userpresence"."status", "zerver_userprofile"."id", "zerver_userprofile"."password", "zerver_userprofile"."last_login", "zerver_userprofile"."is_superuser", "zerver_userprofile"."email", "zerver_userprofile"."is_staff", "zerver_userprofile"."is_active", "zerver_userprofile"."is_bot", "zerver_userprofile"."date_joined", "zerver_userprofile"."bot_owner_id", "zerver_userprofile"."full_name", "zerver_userprofile"."short_name", "zerver_userprofile"."pointer", "zerver_userprofile"."last_pointer_updater", "zerver_userprofile"."realm_id", "zerver_userprofile"."api_key", "zerver_userprofile"."enable_desktop_notifications", "zerver_userprofile"."enable_sounds", "zerver_userprofile"."enter_sends", "zerver_userprofile"."enable_offline_email_notifications", "zerver_userprofile"."last_reminder", "zerver_userprofile"."rate_limits", "zerver_userprofile"."avatar_source", "zerver_userprofile"."tutorial_status", "zerver_userprofile"."onboarding_steps", "zerver_userprofile"."invites_granted", "zerver_userprofile"."invites_used", "zerver_userprofile"."alert_words", "zerver_userprofile"."muted_topics", "zerver_client"."id", "zerver_client"."name"
Now it queries just the fields it needs:
"zerver_client"."name", "zerver_userpresence"."status", "zerver_userpresence"."timestamp", "zerver_userprofile"."email" FROM "zerver_userpresence"
Also, get_status_dict_by_realm is now namespaced under UserPresence as a static method.
(imported from commit be1266844b6bd28b6c615594796713c026a850a1)
2013-09-14 23:59:03 +02:00
|
|
|
return user_statuses
|
|
|
|
|
|
|
|
@staticmethod
|
2018-05-11 02:24:34 +02:00
|
|
|
def to_presence_dict(client_name: str, status: int, dt: datetime.datetime, push_enabled: bool=False,
|
2017-12-25 10:18:57 +01:00
|
|
|
has_push_devices: bool=False) -> Dict[str, Any]:
|
Optimize user presence/activity query.
The get_status_dict_by_realm helper gets called whenever our
realm user_presences cache expires, and it used to query these fields:
"zerver_userpresence"."id", "zerver_userpresence"."user_profile_id", "zerver_userpresence"."client_id", "zerver_userpresence"."timestamp", "zerver_userpresence"."status", "zerver_userprofile"."id", "zerver_userprofile"."password", "zerver_userprofile"."last_login", "zerver_userprofile"."is_superuser", "zerver_userprofile"."email", "zerver_userprofile"."is_staff", "zerver_userprofile"."is_active", "zerver_userprofile"."is_bot", "zerver_userprofile"."date_joined", "zerver_userprofile"."bot_owner_id", "zerver_userprofile"."full_name", "zerver_userprofile"."short_name", "zerver_userprofile"."pointer", "zerver_userprofile"."last_pointer_updater", "zerver_userprofile"."realm_id", "zerver_userprofile"."api_key", "zerver_userprofile"."enable_desktop_notifications", "zerver_userprofile"."enable_sounds", "zerver_userprofile"."enter_sends", "zerver_userprofile"."enable_offline_email_notifications", "zerver_userprofile"."last_reminder", "zerver_userprofile"."rate_limits", "zerver_userprofile"."avatar_source", "zerver_userprofile"."tutorial_status", "zerver_userprofile"."onboarding_steps", "zerver_userprofile"."invites_granted", "zerver_userprofile"."invites_used", "zerver_userprofile"."alert_words", "zerver_userprofile"."muted_topics", "zerver_client"."id", "zerver_client"."name"
Now it queries just the fields it needs:
"zerver_client"."name", "zerver_userpresence"."status", "zerver_userpresence"."timestamp", "zerver_userprofile"."email" FROM "zerver_userpresence"
Also, get_status_dict_by_realm is now namespaced under UserPresence as a static method.
(imported from commit be1266844b6bd28b6c615594796713c026a850a1)
2013-09-14 23:59:03 +02:00
|
|
|
presence_val = UserPresence.status_to_string(status)
|
2016-06-04 00:44:30 +02:00
|
|
|
|
|
|
|
timestamp = datetime_to_timestamp(dt)
|
Optimize user presence/activity query.
The get_status_dict_by_realm helper gets called whenever our
realm user_presences cache expires, and it used to query these fields:
"zerver_userpresence"."id", "zerver_userpresence"."user_profile_id", "zerver_userpresence"."client_id", "zerver_userpresence"."timestamp", "zerver_userpresence"."status", "zerver_userprofile"."id", "zerver_userprofile"."password", "zerver_userprofile"."last_login", "zerver_userprofile"."is_superuser", "zerver_userprofile"."email", "zerver_userprofile"."is_staff", "zerver_userprofile"."is_active", "zerver_userprofile"."is_bot", "zerver_userprofile"."date_joined", "zerver_userprofile"."bot_owner_id", "zerver_userprofile"."full_name", "zerver_userprofile"."short_name", "zerver_userprofile"."pointer", "zerver_userprofile"."last_pointer_updater", "zerver_userprofile"."realm_id", "zerver_userprofile"."api_key", "zerver_userprofile"."enable_desktop_notifications", "zerver_userprofile"."enable_sounds", "zerver_userprofile"."enter_sends", "zerver_userprofile"."enable_offline_email_notifications", "zerver_userprofile"."last_reminder", "zerver_userprofile"."rate_limits", "zerver_userprofile"."avatar_source", "zerver_userprofile"."tutorial_status", "zerver_userprofile"."onboarding_steps", "zerver_userprofile"."invites_granted", "zerver_userprofile"."invites_used", "zerver_userprofile"."alert_words", "zerver_userprofile"."muted_topics", "zerver_client"."id", "zerver_client"."name"
Now it queries just the fields it needs:
"zerver_client"."name", "zerver_userpresence"."status", "zerver_userpresence"."timestamp", "zerver_userprofile"."email" FROM "zerver_userpresence"
Also, get_status_dict_by_realm is now namespaced under UserPresence as a static method.
(imported from commit be1266844b6bd28b6c615594796713c026a850a1)
2013-09-14 23:59:03 +02:00
|
|
|
return dict(
|
2017-01-24 07:06:13 +01:00
|
|
|
client=client_name,
|
|
|
|
status=presence_val,
|
|
|
|
timestamp=timestamp,
|
|
|
|
pushable=(push_enabled and has_push_devices),
|
Optimize user presence/activity query.
The get_status_dict_by_realm helper gets called whenever our
realm user_presences cache expires, and it used to query these fields:
"zerver_userpresence"."id", "zerver_userpresence"."user_profile_id", "zerver_userpresence"."client_id", "zerver_userpresence"."timestamp", "zerver_userpresence"."status", "zerver_userprofile"."id", "zerver_userprofile"."password", "zerver_userprofile"."last_login", "zerver_userprofile"."is_superuser", "zerver_userprofile"."email", "zerver_userprofile"."is_staff", "zerver_userprofile"."is_active", "zerver_userprofile"."is_bot", "zerver_userprofile"."date_joined", "zerver_userprofile"."bot_owner_id", "zerver_userprofile"."full_name", "zerver_userprofile"."short_name", "zerver_userprofile"."pointer", "zerver_userprofile"."last_pointer_updater", "zerver_userprofile"."realm_id", "zerver_userprofile"."api_key", "zerver_userprofile"."enable_desktop_notifications", "zerver_userprofile"."enable_sounds", "zerver_userprofile"."enter_sends", "zerver_userprofile"."enable_offline_email_notifications", "zerver_userprofile"."last_reminder", "zerver_userprofile"."rate_limits", "zerver_userprofile"."avatar_source", "zerver_userprofile"."tutorial_status", "zerver_userprofile"."onboarding_steps", "zerver_userprofile"."invites_granted", "zerver_userprofile"."invites_used", "zerver_userprofile"."alert_words", "zerver_userprofile"."muted_topics", "zerver_client"."id", "zerver_client"."name"
Now it queries just the fields it needs:
"zerver_client"."name", "zerver_userpresence"."status", "zerver_userpresence"."timestamp", "zerver_userprofile"."email" FROM "zerver_userpresence"
Also, get_status_dict_by_realm is now namespaced under UserPresence as a static method.
(imported from commit be1266844b6bd28b6c615594796713c026a850a1)
2013-09-14 23:59:03 +02:00
|
|
|
)
|
2013-04-03 22:00:02 +02:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def to_dict(self) -> Dict[str, Any]:
|
2014-02-14 22:45:32 +01:00
|
|
|
return UserPresence.to_presence_dict(
|
2017-02-11 04:13:33 +01:00
|
|
|
self.client.name,
|
|
|
|
self.status,
|
|
|
|
self.timestamp
|
Optimize user presence/activity query.
The get_status_dict_by_realm helper gets called whenever our
realm user_presences cache expires, and it used to query these fields:
"zerver_userpresence"."id", "zerver_userpresence"."user_profile_id", "zerver_userpresence"."client_id", "zerver_userpresence"."timestamp", "zerver_userpresence"."status", "zerver_userprofile"."id", "zerver_userprofile"."password", "zerver_userprofile"."last_login", "zerver_userprofile"."is_superuser", "zerver_userprofile"."email", "zerver_userprofile"."is_staff", "zerver_userprofile"."is_active", "zerver_userprofile"."is_bot", "zerver_userprofile"."date_joined", "zerver_userprofile"."bot_owner_id", "zerver_userprofile"."full_name", "zerver_userprofile"."short_name", "zerver_userprofile"."pointer", "zerver_userprofile"."last_pointer_updater", "zerver_userprofile"."realm_id", "zerver_userprofile"."api_key", "zerver_userprofile"."enable_desktop_notifications", "zerver_userprofile"."enable_sounds", "zerver_userprofile"."enter_sends", "zerver_userprofile"."enable_offline_email_notifications", "zerver_userprofile"."last_reminder", "zerver_userprofile"."rate_limits", "zerver_userprofile"."avatar_source", "zerver_userprofile"."tutorial_status", "zerver_userprofile"."onboarding_steps", "zerver_userprofile"."invites_granted", "zerver_userprofile"."invites_used", "zerver_userprofile"."alert_words", "zerver_userprofile"."muted_topics", "zerver_client"."id", "zerver_client"."name"
Now it queries just the fields it needs:
"zerver_client"."name", "zerver_userpresence"."status", "zerver_userpresence"."timestamp", "zerver_userprofile"."email" FROM "zerver_userpresence"
Also, get_status_dict_by_realm is now namespaced under UserPresence as a static method.
(imported from commit be1266844b6bd28b6c615594796713c026a850a1)
2013-09-14 23:59:03 +02:00
|
|
|
)
|
2013-04-03 22:00:02 +02:00
|
|
|
|
|
|
|
@staticmethod
|
2018-11-27 20:21:55 +01:00
|
|
|
def status_from_string(status: str) -> Optional[int]:
|
2013-04-03 22:00:02 +02:00
|
|
|
if status == 'active':
|
2017-08-04 07:53:19 +02:00
|
|
|
status_val = UserPresence.ACTIVE # type: Optional[int] # See https://github.com/python/mypy/issues/2611
|
2013-04-03 22:00:02 +02:00
|
|
|
elif status == 'idle':
|
|
|
|
status_val = UserPresence.IDLE
|
|
|
|
else:
|
|
|
|
status_val = None
|
|
|
|
|
|
|
|
return status_val
|
|
|
|
|
2018-12-17 16:19:18 +01:00
|
|
|
class UserStatus(models.Model):
|
|
|
|
user_profile = models.OneToOneField(UserProfile, on_delete=CASCADE) # type: UserProfile
|
|
|
|
|
|
|
|
timestamp = models.DateTimeField() # type: datetime.datetime
|
|
|
|
client = models.ForeignKey(Client, on_delete=CASCADE) # type: Client
|
|
|
|
|
2019-01-21 17:40:23 +01:00
|
|
|
NORMAL = 0
|
2018-12-17 16:19:18 +01:00
|
|
|
AWAY = 1
|
|
|
|
|
2019-01-21 17:40:23 +01:00
|
|
|
status = models.PositiveSmallIntegerField(default=NORMAL) # type: int
|
|
|
|
status_text = models.CharField(max_length=255, default='') # type: str
|
2018-12-17 16:19:18 +01:00
|
|
|
|
2012-11-27 18:26:51 +01:00
|
|
|
class DefaultStream(models.Model):
|
2017-07-09 01:16:47 +02:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE) # type: Realm
|
|
|
|
stream = models.ForeignKey(Stream, on_delete=CASCADE) # type: Stream
|
2012-11-27 18:26:51 +01:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2012-11-27 18:26:51 +01:00
|
|
|
unique_together = ("realm", "stream")
|
2012-12-01 04:35:59 +01:00
|
|
|
|
2017-10-12 19:35:14 +02:00
|
|
|
class DefaultStreamGroup(models.Model):
|
|
|
|
MAX_NAME_LENGTH = 60
|
2018-05-11 02:24:34 +02:00
|
|
|
name = models.CharField(max_length=MAX_NAME_LENGTH, db_index=True) # type: str
|
2017-10-12 19:35:14 +02:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE) # type: Realm
|
|
|
|
streams = models.ManyToManyField('Stream') # type: Manager
|
2018-05-11 02:24:34 +02:00
|
|
|
description = models.CharField(max_length=1024, default=u'') # type: str
|
2017-10-12 19:35:14 +02:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2017-10-12 19:35:14 +02:00
|
|
|
unique_together = ("realm", "name")
|
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def to_dict(self) -> Dict[str, Any]:
|
2017-11-14 20:33:09 +01:00
|
|
|
return dict(name=self.name,
|
|
|
|
id=self.id,
|
2017-11-14 20:51:34 +01:00
|
|
|
description=self.description,
|
2017-11-14 20:33:09 +01:00
|
|
|
streams=[stream.to_dict() for stream in self.streams.all()])
|
2017-10-12 19:35:14 +02:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def get_default_stream_groups(realm: Realm) -> List[DefaultStreamGroup]:
|
2017-10-12 19:35:14 +02:00
|
|
|
return DefaultStreamGroup.objects.filter(realm=realm)
|
|
|
|
|
2017-07-02 21:10:41 +02:00
|
|
|
class AbstractScheduledJob(models.Model):
|
|
|
|
scheduled_timestamp = models.DateTimeField(db_index=True) # type: datetime.datetime
|
|
|
|
# JSON representation of arguments to consumer
|
2018-05-11 02:24:34 +02:00
|
|
|
data = models.TextField() # type: str
|
2017-12-05 03:19:48 +01:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE) # type: Realm
|
2017-07-02 21:10:41 +02:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2017-07-02 21:10:41 +02:00
|
|
|
abstract = True
|
|
|
|
|
|
|
|
class ScheduledEmail(AbstractScheduledJob):
|
2019-01-04 01:50:21 +01:00
|
|
|
# Exactly one of users or address should be set. These are
|
|
|
|
# duplicate values, used to efficiently filter the set of
|
|
|
|
# ScheduledEmails for use in clear_scheduled_emails; the
|
|
|
|
# recipients used for actually sending messages are stored in the
|
|
|
|
# data field of AbstractScheduledJob.
|
|
|
|
users = models.ManyToManyField(UserProfile) # type: Manager
|
2017-07-02 21:10:41 +02:00
|
|
|
# Just the address part of a full "name <address>" email address
|
2018-05-11 02:24:34 +02:00
|
|
|
address = models.EmailField(null=True, db_index=True) # type: Optional[str]
|
2017-07-02 21:10:41 +02:00
|
|
|
|
|
|
|
# Valid types are below
|
|
|
|
WELCOME = 1
|
|
|
|
DIGEST = 2
|
|
|
|
INVITATION_REMINDER = 3
|
2017-07-09 01:16:47 +02:00
|
|
|
type = models.PositiveSmallIntegerField() # type: int
|
2013-11-06 00:47:59 +01:00
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def __str__(self) -> str:
|
2019-01-04 01:50:21 +01:00
|
|
|
return "<ScheduledEmail: %s %s %s>" % (self.type,
|
|
|
|
self.address or list(self.users.all()),
|
2017-10-27 09:06:40 +02:00
|
|
|
self.scheduled_timestamp)
|
2017-09-21 14:58:49 +02:00
|
|
|
|
2018-01-01 20:41:24 +01:00
|
|
|
class ScheduledMessage(models.Model):
|
|
|
|
sender = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile
|
|
|
|
recipient = models.ForeignKey(Recipient, on_delete=CASCADE) # type: Recipient
|
2018-11-01 21:23:48 +01:00
|
|
|
subject = models.CharField(max_length=MAX_TOPIC_NAME_LENGTH) # type: str
|
2018-05-11 02:24:34 +02:00
|
|
|
content = models.TextField() # type: str
|
2018-01-01 20:41:24 +01:00
|
|
|
sending_client = models.ForeignKey(Client, on_delete=CASCADE) # type: Client
|
2018-03-25 03:02:19 +02:00
|
|
|
stream = models.ForeignKey(Stream, null=True, on_delete=CASCADE) # type: Optional[Stream]
|
2018-01-01 20:41:24 +01:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE) # type: Realm
|
|
|
|
scheduled_timestamp = models.DateTimeField(db_index=True) # type: datetime.datetime
|
|
|
|
delivered = models.BooleanField(default=False) # type: bool
|
|
|
|
|
2018-01-12 12:02:47 +01:00
|
|
|
SEND_LATER = 1
|
|
|
|
REMIND = 2
|
|
|
|
|
|
|
|
DELIVERY_TYPES = (
|
|
|
|
(SEND_LATER, 'send_later'),
|
|
|
|
(REMIND, 'remind'),
|
|
|
|
)
|
|
|
|
|
|
|
|
delivery_type = models.PositiveSmallIntegerField(choices=DELIVERY_TYPES,
|
|
|
|
default=SEND_LATER) # type: int
|
|
|
|
|
2018-11-01 15:31:55 +01:00
|
|
|
def topic_name(self) -> str:
|
|
|
|
return self.subject
|
|
|
|
|
|
|
|
def set_topic_name(self, topic_name: str) -> None:
|
|
|
|
self.subject = topic_name
|
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def __str__(self) -> str:
|
2018-01-01 20:41:24 +01:00
|
|
|
display_recipient = get_display_recipient(self.recipient)
|
|
|
|
return "<ScheduledMessage: %s %s %s %s>" % (display_recipient,
|
|
|
|
self.subject, self.sender,
|
|
|
|
self.scheduled_timestamp)
|
|
|
|
|
2017-07-02 21:10:41 +02:00
|
|
|
EMAIL_TYPES = {
|
|
|
|
'followup_day1': ScheduledEmail.WELCOME,
|
|
|
|
'followup_day2': ScheduledEmail.WELCOME,
|
|
|
|
'digest': ScheduledEmail.DIGEST,
|
|
|
|
'invitation_reminder': ScheduledEmail.INVITATION_REMINDER,
|
|
|
|
}
|
2017-02-15 04:35:10 +01:00
|
|
|
|
2019-10-03 02:01:36 +02:00
|
|
|
class AbstractRealmAuditLog(models.Model):
|
|
|
|
"""Defines fields common to RealmAuditLog and RemoteRealmAuditLog."""
|
2017-07-09 01:16:47 +02:00
|
|
|
event_time = models.DateTimeField(db_index=True) # type: datetime.datetime
|
2017-03-30 05:20:36 +02:00
|
|
|
# If True, event_time is an overestimate of the true time. Can be used
|
|
|
|
# by migrations when introducing a new event_type.
|
2017-07-09 01:16:47 +02:00
|
|
|
backfilled = models.BooleanField(default=False) # type: bool
|
2019-10-05 02:36:16 +02:00
|
|
|
|
|
|
|
# Keys within extra_data, when extra_data is a json dict. Keys are strings because
|
|
|
|
# json keys must always be strings.
|
|
|
|
OLD_VALUE = '1'
|
|
|
|
NEW_VALUE = '2'
|
|
|
|
ROLE_COUNT = '10'
|
|
|
|
ROLE_COUNT_HUMANS = '11'
|
|
|
|
ROLE_COUNT_BOTS = '12'
|
2017-01-24 01:48:35 +01:00
|
|
|
|
2019-10-08 06:05:22 +02:00
|
|
|
extra_data = models.TextField(null=True) # type: Optional[str]
|
2019-09-26 03:20:56 +02:00
|
|
|
|
2019-10-08 06:05:22 +02:00
|
|
|
# Event types
|
2019-09-26 03:20:56 +02:00
|
|
|
USER_CREATED = 101
|
|
|
|
USER_ACTIVATED = 102
|
|
|
|
USER_DEACTIVATED = 103
|
|
|
|
USER_REACTIVATED = 104
|
2019-10-05 02:36:16 +02:00
|
|
|
USER_ROLE_CHANGED = 105
|
2019-09-26 03:20:56 +02:00
|
|
|
|
|
|
|
USER_SOFT_ACTIVATED = 120
|
|
|
|
USER_SOFT_DEACTIVATED = 121
|
|
|
|
USER_PASSWORD_CHANGED = 122
|
|
|
|
USER_AVATAR_SOURCE_CHANGED = 123
|
|
|
|
USER_FULL_NAME_CHANGED = 124
|
|
|
|
USER_EMAIL_CHANGED = 125
|
|
|
|
USER_TOS_VERSION_CHANGED = 126
|
|
|
|
USER_API_KEY_CHANGED = 127
|
|
|
|
USER_BOT_OWNER_CHANGED = 128
|
|
|
|
|
|
|
|
REALM_DEACTIVATED = 201
|
|
|
|
REALM_REACTIVATED = 202
|
|
|
|
REALM_SCRUBBED = 203
|
|
|
|
REALM_PLAN_TYPE_CHANGED = 204
|
|
|
|
REALM_LOGO_CHANGED = 205
|
|
|
|
REALM_EXPORTED = 206
|
|
|
|
|
|
|
|
SUBSCRIPTION_CREATED = 301
|
|
|
|
SUBSCRIPTION_ACTIVATED = 302
|
|
|
|
SUBSCRIPTION_DEACTIVATED = 303
|
|
|
|
|
|
|
|
STRIPE_CUSTOMER_CREATED = 401
|
|
|
|
STRIPE_CARD_CHANGED = 402
|
|
|
|
STRIPE_PLAN_CHANGED = 403
|
|
|
|
STRIPE_PLAN_QUANTITY_RESET = 404
|
|
|
|
|
|
|
|
CUSTOMER_CREATED = 501
|
|
|
|
CUSTOMER_PLAN_CREATED = 502
|
|
|
|
|
|
|
|
event_type = models.PositiveSmallIntegerField() # type: int
|
2018-06-28 00:48:51 +02:00
|
|
|
|
2019-10-08 06:05:22 +02:00
|
|
|
# event_types synced from on-prem installations to zulipchat.com when
|
|
|
|
# billing for mobile push notifications is enabled. Every billing
|
|
|
|
# event_type should have ROLE_COUNT populated in extra_data.
|
|
|
|
SYNCED_BILLING_EVENTS = [
|
|
|
|
USER_CREATED, USER_ACTIVATED, USER_DEACTIVATED, USER_REACTIVATED, USER_ROLE_CHANGED,
|
|
|
|
REALM_DEACTIVATED, REALM_REACTIVATED]
|
|
|
|
|
2019-10-03 02:01:36 +02:00
|
|
|
class Meta:
|
|
|
|
abstract = True
|
|
|
|
|
|
|
|
class RealmAuditLog(AbstractRealmAuditLog):
|
|
|
|
"""
|
|
|
|
RealmAuditLog tracks important changes to users, streams, and
|
|
|
|
realms in Zulip. It is intended to support both
|
|
|
|
debugging/introspection (e.g. determining when a user's left a
|
|
|
|
given stream?) as well as help with some database migrations where
|
|
|
|
we might be able to do a better data backfill with it. Here are a
|
|
|
|
few key details about how this works:
|
|
|
|
|
|
|
|
* acting_user is the user who initiated the state change
|
|
|
|
* modified_user (if present) is the user being modified
|
|
|
|
* modified_stream (if present) is the stream being modified
|
|
|
|
|
|
|
|
For example:
|
|
|
|
* When a user subscribes another user to a stream, modified_user,
|
|
|
|
acting_user, and modified_stream will all be present and different.
|
|
|
|
* When an administrator changes an organization's realm icon,
|
|
|
|
acting_user is that administrator and both modified_user and
|
|
|
|
modified_stream will be None.
|
|
|
|
"""
|
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE) # type: Realm
|
|
|
|
acting_user = models.ForeignKey(UserProfile, null=True, related_name='+', on_delete=CASCADE) # type: Optional[UserProfile]
|
|
|
|
modified_user = models.ForeignKey(UserProfile, null=True, related_name='+', on_delete=CASCADE) # type: Optional[UserProfile]
|
|
|
|
modified_stream = models.ForeignKey(Stream, null=True, on_delete=CASCADE) # type: Optional[Stream]
|
|
|
|
event_last_message_id = models.IntegerField(null=True) # type: Optional[int]
|
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def __str__(self) -> str:
|
2017-09-22 16:09:37 +02:00
|
|
|
if self.modified_user is not None:
|
2018-08-08 18:42:29 +02:00
|
|
|
return "<RealmAuditLog: %s %s %s %s>" % (
|
|
|
|
self.modified_user, self.event_type, self.event_time, self.id)
|
2017-09-22 16:09:37 +02:00
|
|
|
if self.modified_stream is not None:
|
2018-08-08 18:42:29 +02:00
|
|
|
return "<RealmAuditLog: %s %s %s %s>" % (
|
|
|
|
self.modified_stream, self.event_type, self.event_time, self.id)
|
|
|
|
return "<RealmAuditLog: %s %s %s %s>" % (
|
|
|
|
self.realm, self.event_type, self.event_time, self.id)
|
2017-09-22 16:09:37 +02:00
|
|
|
|
2017-01-24 01:48:35 +01:00
|
|
|
class UserHotspot(models.Model):
|
2017-07-09 01:16:47 +02:00
|
|
|
user = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile
|
2018-05-11 02:24:34 +02:00
|
|
|
hotspot = models.CharField(max_length=30) # type: str
|
2017-07-09 01:16:47 +02:00
|
|
|
timestamp = models.DateTimeField(default=timezone_now) # type: datetime.datetime
|
2017-01-24 01:48:35 +01:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2017-01-24 01:48:35 +01:00
|
|
|
unique_together = ("user", "hotspot")
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2018-06-07 20:01:31 +02:00
|
|
|
def check_valid_user_ids(realm_id: int, user_ids: List[int],
|
|
|
|
allow_deactivated: bool=False) -> Optional[str]:
|
|
|
|
error = check_list(check_int)("User IDs", user_ids)
|
|
|
|
if error:
|
|
|
|
return error
|
|
|
|
realm = Realm.objects.get(id=realm_id)
|
|
|
|
for user_id in user_ids:
|
|
|
|
# TODO: Structurally, we should be doing a bulk fetch query to
|
|
|
|
# get the users here, not doing these in a loop. But because
|
|
|
|
# this is a rarely used feature and likely to never have more
|
|
|
|
# than a handful of users, it's probably mostly OK.
|
|
|
|
try:
|
|
|
|
user_profile = get_user_profile_by_id_in_realm(user_id, realm)
|
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
return _('Invalid user ID: %d') % (user_id)
|
|
|
|
|
2018-05-08 13:54:40 +02:00
|
|
|
if not allow_deactivated:
|
|
|
|
if not user_profile.is_active:
|
2018-06-07 20:01:31 +02:00
|
|
|
return _('User with ID %d is deactivated') % (user_id)
|
2018-05-08 13:54:40 +02:00
|
|
|
|
|
|
|
if (user_profile.is_bot):
|
2018-06-07 20:01:31 +02:00
|
|
|
return _('User with ID %d is a bot') % (user_id)
|
|
|
|
|
|
|
|
return None
|
2018-05-08 13:54:40 +02:00
|
|
|
|
2017-03-17 10:07:22 +01:00
|
|
|
class CustomProfileField(models.Model):
|
2018-07-25 05:57:10 +02:00
|
|
|
"""Defines a form field for the per-realm custom profile fields feature.
|
|
|
|
|
|
|
|
See CustomProfileFieldValue for an individual user's values for one of
|
|
|
|
these fields.
|
2018-07-25 00:29:05 +02:00
|
|
|
"""
|
2018-03-31 07:30:24 +02:00
|
|
|
HINT_MAX_LENGTH = 80
|
2018-08-16 20:12:00 +02:00
|
|
|
NAME_MAX_LENGTH = 40
|
2018-03-31 07:30:24 +02:00
|
|
|
|
2017-03-17 10:07:22 +01:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE) # type: Realm
|
2018-08-16 20:12:00 +02:00
|
|
|
name = models.CharField(max_length=NAME_MAX_LENGTH) # type: str
|
2018-05-11 02:24:34 +02:00
|
|
|
hint = models.CharField(max_length=HINT_MAX_LENGTH, default='', null=True) # type: Optional[str]
|
2018-07-25 00:29:05 +02:00
|
|
|
order = models.IntegerField(default=0) # type: int
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2018-04-02 15:04:22 +02:00
|
|
|
SHORT_TEXT = 1
|
|
|
|
LONG_TEXT = 2
|
2018-04-08 09:50:05 +02:00
|
|
|
CHOICE = 3
|
2018-04-03 18:06:13 +02:00
|
|
|
DATE = 4
|
2018-04-25 19:20:58 +02:00
|
|
|
URL = 5
|
2018-05-06 09:43:38 +02:00
|
|
|
USER = 6
|
2019-05-27 10:59:55 +02:00
|
|
|
EXTERNAL_ACCOUNT = 7
|
2018-04-08 09:50:05 +02:00
|
|
|
|
2018-05-06 09:43:38 +02:00
|
|
|
# These are the fields whose validators require more than var_name
|
|
|
|
# and value argument. i.e. CHOICE require field_data, USER require
|
|
|
|
# realm as argument.
|
|
|
|
CHOICE_FIELD_TYPE_DATA = [
|
2018-08-15 13:28:51 +02:00
|
|
|
(CHOICE, str(_('List of options')), validate_choice_field, str, "CHOICE"),
|
2019-04-16 07:14:51 +02:00
|
|
|
] # type: List[ExtendedFieldElement]
|
2018-05-06 09:43:38 +02:00
|
|
|
USER_FIELD_TYPE_DATA = [
|
2018-08-15 13:28:51 +02:00
|
|
|
(USER, str(_('Person picker')), check_valid_user_ids, eval, "USER"),
|
2019-04-16 07:14:51 +02:00
|
|
|
] # type: List[UserFieldElement]
|
2018-04-08 09:50:05 +02:00
|
|
|
|
2018-05-06 09:43:38 +02:00
|
|
|
CHOICE_FIELD_VALIDATORS = {
|
|
|
|
item[0]: item[2] for item in CHOICE_FIELD_TYPE_DATA
|
2018-04-08 09:50:05 +02:00
|
|
|
} # type: Dict[int, ExtendedValidator]
|
2018-05-06 09:43:38 +02:00
|
|
|
USER_FIELD_VALIDATORS = {
|
|
|
|
item[0]: item[2] for item in USER_FIELD_TYPE_DATA
|
|
|
|
} # type: Dict[int, RealmUserValidator]
|
2017-03-17 10:07:22 +01:00
|
|
|
|
|
|
|
FIELD_TYPE_DATA = [
|
2018-08-15 11:34:31 +02:00
|
|
|
# Type, Display Name, Validator, Converter, Keyword
|
|
|
|
(SHORT_TEXT, str(_('Short text')), check_short_string, str, "SHORT_TEXT"),
|
|
|
|
(LONG_TEXT, str(_('Long text')), check_long_string, str, "LONG_TEXT"),
|
2018-08-15 13:28:51 +02:00
|
|
|
(DATE, str(_('Date picker')), check_date, str, "DATE"),
|
|
|
|
(URL, str(_('Link')), check_url, str, "URL"),
|
2019-05-27 10:59:55 +02:00
|
|
|
(EXTERNAL_ACCOUNT, str(_('External account')), check_short_string, str, "EXTERNAL_ACCOUNT"),
|
2019-04-16 07:14:51 +02:00
|
|
|
] # type: List[FieldElement]
|
2018-04-08 09:50:05 +02:00
|
|
|
|
2019-04-16 07:14:51 +02:00
|
|
|
ALL_FIELD_TYPES = [*FIELD_TYPE_DATA, *CHOICE_FIELD_TYPE_DATA, *USER_FIELD_TYPE_DATA]
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2018-03-16 16:41:29 +01:00
|
|
|
FIELD_VALIDATORS = {item[0]: item[2] for item in FIELD_TYPE_DATA} # type: Dict[int, Validator]
|
2018-04-08 09:50:05 +02:00
|
|
|
FIELD_CONVERTERS = {item[0]: item[3] for item in ALL_FIELD_TYPES} # type: Dict[int, Callable[[Any], Any]]
|
2018-05-11 02:24:34 +02:00
|
|
|
FIELD_TYPE_CHOICES = [(item[0], item[1]) for item in ALL_FIELD_TYPES] # type: List[Tuple[int, str]]
|
2018-08-15 11:34:31 +02:00
|
|
|
FIELD_TYPE_CHOICES_DICT = {
|
|
|
|
item[4]: {"id": item[0], "name": item[1]} for item in ALL_FIELD_TYPES
|
|
|
|
} # type: Dict[str, Dict[str, Union[str, int]]]
|
2017-03-17 10:07:22 +01:00
|
|
|
|
|
|
|
field_type = models.PositiveSmallIntegerField(choices=FIELD_TYPE_CHOICES,
|
|
|
|
default=SHORT_TEXT) # type: int
|
2018-07-25 00:29:05 +02:00
|
|
|
|
2018-07-25 05:57:10 +02:00
|
|
|
# A JSON blob of any additional data needed to define the field beyond
|
|
|
|
# type/name/hint.
|
|
|
|
#
|
|
|
|
# The format depends on the type. Field types SHORT_TEXT, LONG_TEXT,
|
|
|
|
# DATE, URL, and USER leave this null. Fields of type CHOICE store the
|
|
|
|
# choices' descriptions.
|
2018-07-25 00:29:05 +02:00
|
|
|
#
|
2018-07-25 05:57:10 +02:00
|
|
|
# Note: There is no performance overhead of using TextField in PostgreSQL.
|
2018-07-25 00:29:05 +02:00
|
|
|
# See https://www.postgresql.org/docs/9.0/static/datatype-character.html
|
|
|
|
field_data = models.TextField(default='', null=True) # type: Optional[str]
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2017-03-17 10:07:22 +01:00
|
|
|
unique_together = ('realm', 'name')
|
|
|
|
|
2018-04-12 07:36:12 +02:00
|
|
|
def as_dict(self) -> ProfileDataElement:
|
2017-03-17 10:07:22 +01:00
|
|
|
return {
|
|
|
|
'id': self.id,
|
|
|
|
'name': self.name,
|
|
|
|
'type': self.field_type,
|
2018-03-31 07:30:24 +02:00
|
|
|
'hint': self.hint,
|
2018-04-08 09:50:05 +02:00
|
|
|
'field_data': self.field_data,
|
2018-04-08 18:13:37 +02:00
|
|
|
'order': self.order,
|
2017-03-17 10:07:22 +01:00
|
|
|
}
|
|
|
|
|
2018-11-06 10:05:31 +01:00
|
|
|
def is_renderable(self) -> bool:
|
|
|
|
if self.field_type in [CustomProfileField.SHORT_TEXT, CustomProfileField.LONG_TEXT]:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2018-03-12 03:30:30 +01:00
|
|
|
def __str__(self) -> str:
|
2018-04-08 18:13:37 +02:00
|
|
|
return "<CustomProfileField: %s %s %s %d>" % (self.realm, self.name, self.field_type, self.order)
|
2018-03-12 01:55:23 +01:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def custom_profile_fields_for_realm(realm_id: int) -> List[CustomProfileField]:
|
2018-04-08 18:13:37 +02:00
|
|
|
return CustomProfileField.objects.filter(realm=realm_id).order_by('order')
|
2017-03-17 10:07:22 +01:00
|
|
|
|
|
|
|
class CustomProfileFieldValue(models.Model):
|
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile
|
|
|
|
field = models.ForeignKey(CustomProfileField, on_delete=CASCADE) # type: CustomProfileField
|
2018-05-11 02:24:34 +02:00
|
|
|
value = models.TextField() # type: str
|
2018-11-06 10:05:31 +01:00
|
|
|
rendered_value = models.TextField(null=True, default=None) # type: Optional[str]
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2017-03-17 10:07:22 +01:00
|
|
|
unique_together = ('user_profile', 'field')
|
2016-07-15 18:57:37 +02:00
|
|
|
|
2018-03-12 03:30:30 +01:00
|
|
|
def __str__(self) -> str:
|
2018-03-12 01:55:23 +01:00
|
|
|
return "<CustomProfileFieldValue: %s %s %s>" % (self.user_profile, self.field, self.value)
|
|
|
|
|
2017-05-25 19:16:40 +02:00
|
|
|
# Interfaces for services
|
|
|
|
# They provide additional functionality like parsing message to obtain query url, data to be sent to url,
|
|
|
|
# and parsing the response.
|
|
|
|
GENERIC_INTERFACE = u'GenericService'
|
2017-07-01 14:42:34 +02:00
|
|
|
SLACK_INTERFACE = u'SlackOutgoingWebhookService'
|
2017-05-25 19:16:40 +02:00
|
|
|
|
2017-05-25 20:41:05 +02:00
|
|
|
# A Service corresponds to either an outgoing webhook bot or an embedded bot.
|
|
|
|
# The type of Service is determined by the bot_type field of the referenced
|
|
|
|
# UserProfile.
|
|
|
|
#
|
|
|
|
# If the Service is an outgoing webhook bot:
|
|
|
|
# - name is any human-readable identifier for the Service
|
|
|
|
# - base_url is the address of the third-party site
|
|
|
|
# - token is used for authentication with the third-party site
|
|
|
|
#
|
|
|
|
# If the Service is an embedded bot:
|
|
|
|
# - name is the canonical name for the type of bot (e.g. 'xkcd' for an instance
|
|
|
|
# of the xkcd bot); multiple embedded bots can have the same name, but all
|
|
|
|
# embedded bots with the same name will run the same code
|
|
|
|
# - base_url and token are currently unused
|
2016-07-15 18:57:37 +02:00
|
|
|
class Service(models.Model):
|
2018-05-11 02:24:34 +02:00
|
|
|
name = models.CharField(max_length=UserProfile.MAX_NAME_LENGTH) # type: str
|
2017-05-25 20:41:05 +02:00
|
|
|
# Bot user corresponding to the Service. The bot_type of this user
|
|
|
|
# deterines the type of service. If non-bot services are added later,
|
|
|
|
# user_profile can also represent the owner of the Service.
|
2017-07-09 01:16:47 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile
|
2018-05-11 02:24:34 +02:00
|
|
|
base_url = models.TextField() # type: str
|
|
|
|
token = models.TextField() # type: str
|
2017-05-25 20:41:05 +02:00
|
|
|
# Interface / API version of the service.
|
2016-07-15 18:57:37 +02:00
|
|
|
interface = models.PositiveSmallIntegerField(default=1) # type: int
|
|
|
|
|
2017-07-01 14:42:34 +02:00
|
|
|
# Valid interfaces are {generic, zulip_bot_service, slack}
|
2017-05-25 19:16:40 +02:00
|
|
|
GENERIC = 1
|
2017-07-01 14:42:34 +02:00
|
|
|
SLACK = 2
|
2017-05-25 19:16:40 +02:00
|
|
|
|
2017-07-21 07:15:11 +02:00
|
|
|
ALLOWED_INTERFACE_TYPES = [
|
|
|
|
GENERIC,
|
2017-07-01 14:42:34 +02:00
|
|
|
SLACK,
|
2017-07-21 07:15:11 +02:00
|
|
|
]
|
2016-07-15 18:57:37 +02:00
|
|
|
# N.B. If we used Django's choice=... we would get this for free (kinda)
|
2017-05-25 19:16:40 +02:00
|
|
|
_interfaces = {
|
|
|
|
GENERIC: GENERIC_INTERFACE,
|
2017-07-01 14:42:34 +02:00
|
|
|
SLACK: SLACK_INTERFACE,
|
2018-05-11 02:24:34 +02:00
|
|
|
} # type: Dict[int, str]
|
2016-07-15 18:57:37 +02:00
|
|
|
|
2018-05-11 02:24:34 +02:00
|
|
|
def interface_name(self) -> str:
|
2016-07-15 18:57:37 +02:00
|
|
|
# Raises KeyError if invalid
|
|
|
|
return self._interfaces[self.interface]
|
|
|
|
|
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def get_bot_services(user_profile_id: str) -> List[Service]:
|
2016-07-23 08:13:33 +02:00
|
|
|
return list(Service.objects.filter(user_profile__id=user_profile_id))
|
2016-07-15 18:57:37 +02:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def get_service_profile(user_profile_id: str, service_name: str) -> Service:
|
2017-07-21 07:15:11 +02:00
|
|
|
return Service.objects.get(user_profile__id=user_profile_id, name=service_name)
|
2017-10-12 16:31:25 +02:00
|
|
|
|
|
|
|
|
2017-11-24 10:18:29 +01:00
|
|
|
class BotStorageData(models.Model):
|
2017-10-12 16:31:25 +02:00
|
|
|
bot_profile = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile
|
2018-05-11 02:24:34 +02:00
|
|
|
key = models.TextField(db_index=True) # type: str
|
|
|
|
value = models.TextField() # type: str
|
2017-10-12 16:31:25 +02:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2017-10-12 16:31:25 +02:00
|
|
|
unique_together = ("bot_profile", "key")
|
2017-11-01 20:51:12 +01:00
|
|
|
|
2018-01-06 11:01:22 +01:00
|
|
|
class BotConfigData(models.Model):
|
2017-11-01 20:51:12 +01:00
|
|
|
bot_profile = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile
|
2018-05-11 02:24:34 +02:00
|
|
|
key = models.TextField(db_index=True) # type: str
|
|
|
|
value = models.TextField() # type: str
|
2017-11-01 20:51:12 +01:00
|
|
|
|
|
|
|
class Meta(object):
|
|
|
|
unique_together = ("bot_profile", "key")
|
2019-08-30 00:21:36 +02:00
|
|
|
|
|
|
|
class InvalidFakeEmailDomain(Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def get_fake_email_domain() -> str:
|
|
|
|
try:
|
|
|
|
# Check that the fake email domain can be used to form valid email addresses.
|
|
|
|
validate_email("bot@" + settings.FAKE_EMAIL_DOMAIN)
|
|
|
|
except ValidationError:
|
|
|
|
raise InvalidFakeEmailDomain(settings.FAKE_EMAIL_DOMAIN + ' is not a valid domain.')
|
|
|
|
|
|
|
|
return settings.FAKE_EMAIL_DOMAIN
|