2017-03-20 05:02:30 +01:00
|
|
|
from typing import Any, DefaultDict, Dict, List, Set, Tuple, TypeVar, Text, \
|
2017-03-13 05:45:50 +01:00
|
|
|
Union, Optional, Sequence, AbstractSet, Pattern, AnyStr, Callable, Iterable
|
2016-05-07 18:02:57 +02:00
|
|
|
from typing.re import Match
|
2016-06-12 15:05:01 +02:00
|
|
|
from zerver.lib.str_utils import NonBinaryStr
|
2013-04-23 18:51:17 +02:00
|
|
|
|
2012-08-28 18:44:51 +02:00
|
|
|
from django.db import models
|
2017-10-12 16:31:25 +02:00
|
|
|
from django.db.models.query import QuerySet, F
|
|
|
|
from django.db.models import Manager, CASCADE, Sum
|
|
|
|
from django.db.models.functions import Length
|
2012-09-19 19:39:34 +02:00
|
|
|
from django.conf import settings
|
2013-06-24 17:51:10 +02:00
|
|
|
from django.contrib.auth.models import AbstractBaseUser, UserManager, \
|
|
|
|
PermissionsMixin
|
2016-11-02 21:41:10 +01:00
|
|
|
import django.contrib.auth
|
2017-07-07 20:35:31 +02:00
|
|
|
from django.core.exceptions import ValidationError
|
2017-03-17 10:07:22 +01:00
|
|
|
from django.core.validators import URLValidator, MinLengthValidator, \
|
|
|
|
RegexValidator
|
2014-02-21 21:18:38 +01:00
|
|
|
from django.dispatch import receiver
|
2014-01-28 18:18:19 +01:00
|
|
|
from zerver.lib.cache import cache_with_key, flush_user_profile, flush_realm, \
|
2017-08-25 07:43:38 +02:00
|
|
|
user_profile_by_api_key_cache_key, \
|
2013-04-05 00:13:03 +02:00
|
|
|
user_profile_by_id_cache_key, user_profile_by_email_cache_key, \
|
2017-05-22 19:45:54 +02:00
|
|
|
user_profile_cache_key, generic_bulk_cached_fetch, cache_set, flush_stream, \
|
2017-09-16 21:44:03 +02:00
|
|
|
display_recipient_cache_key, cache_delete, active_user_ids_cache_key, \
|
2017-10-21 18:20:49 +02:00
|
|
|
get_stream_cache_key, realm_user_dicts_cache_key, \
|
|
|
|
bot_dicts_in_realm_cache_key, realm_user_dict_fields, \
|
2017-05-22 23:37:15 +02:00
|
|
|
bot_dict_fields, flush_message, bot_profile_cache_key
|
2013-08-08 16:51:18 +02:00
|
|
|
from zerver.lib.utils import make_safe_digest, generate_random_token
|
2016-02-12 21:08:56 +01:00
|
|
|
from django.db import transaction
|
2017-04-15 04:03:56 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2012-10-29 19:43:00 +01:00
|
|
|
from django.contrib.sessions.models import Session
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver.lib.timestamp import datetime_to_timestamp
|
2014-02-21 21:18:38 +01:00
|
|
|
from django.db.models.signals import pre_save, post_save, post_delete
|
2016-06-05 02:45:30 +02:00
|
|
|
from django.utils.translation import ugettext_lazy as _
|
2016-11-14 09:23:03 +01:00
|
|
|
from zerver.lib import cache
|
2017-03-17 10:07:22 +01:00
|
|
|
from zerver.lib.validator import check_int, check_float, check_string, \
|
|
|
|
check_short_string
|
2017-04-01 17:28:44 +02:00
|
|
|
from django.utils.encoding import force_text
|
2012-09-21 16:10:36 +02:00
|
|
|
|
2013-03-12 17:51:55 +01:00
|
|
|
from bitfield import BitField
|
2016-06-13 10:39:47 +02:00
|
|
|
from bitfield.types import BitHandler
|
2013-09-13 23:33:11 +02:00
|
|
|
from collections import defaultdict
|
2014-07-15 21:03:51 +02:00
|
|
|
from datetime import timedelta
|
2013-07-08 17:53:50 +02:00
|
|
|
import pylibmc
|
2014-02-21 21:18:38 +01:00
|
|
|
import re
|
2013-09-17 22:31:05 +02:00
|
|
|
import logging
|
2016-12-22 15:44:33 +01:00
|
|
|
import sre_constants
|
2016-03-24 20:24:01 +01:00
|
|
|
import time
|
|
|
|
import datetime
|
2017-05-25 00:09:57 +02:00
|
|
|
import sys
|
2013-03-12 17:51:55 +01:00
|
|
|
|
2012-12-07 01:05:14 +01:00
|
|
|
MAX_SUBJECT_LENGTH = 60
|
2012-12-11 17:12:53 +01:00
|
|
|
MAX_MESSAGE_LENGTH = 10000
|
2017-07-09 01:16:47 +02:00
|
|
|
MAX_LANGUAGE_ID_LENGTH = 50 # type: int
|
2012-12-07 01:05:14 +01:00
|
|
|
|
2016-11-23 05:01:12 +01:00
|
|
|
STREAM_NAMES = TypeVar('STREAM_NAMES', Sequence[Text], AbstractSet[Text])
|
2016-05-07 18:02:57 +02:00
|
|
|
|
2017-09-13 17:24:11 +02:00
|
|
|
def query_for_ids(query, user_ids, field):
|
|
|
|
# type: (QuerySet, List[int], str) -> QuerySet
|
|
|
|
'''
|
|
|
|
This function optimizes searches of the form
|
|
|
|
`user_profile_id in (1, 2, 3, 4)` by quickly
|
|
|
|
building the where clauses. Profiling shows significant
|
|
|
|
speedups over the normal Django-based approach.
|
|
|
|
|
|
|
|
Use this very carefully! Also, the caller should
|
|
|
|
guard against empty lists of user_ids.
|
|
|
|
'''
|
|
|
|
assert(user_ids)
|
|
|
|
value_list = ', '.join(str(int(user_id)) for user_id in user_ids)
|
|
|
|
clause = '%s in (%s)' % (field, value_list)
|
|
|
|
query = query.extra(
|
|
|
|
where=[clause]
|
|
|
|
)
|
|
|
|
return query
|
|
|
|
|
2016-03-31 03:30:33 +02:00
|
|
|
# Doing 1000 remote cache requests to get_display_recipient is quite slow,
|
|
|
|
# so add a local cache as well as the remote cache cache.
|
2017-10-28 17:53:55 +02:00
|
|
|
per_request_display_recipient_cache = {} # type: Dict[int, Union[Text, List[Dict[str, Any]]]]
|
2013-09-21 15:35:12 +02:00
|
|
|
def get_display_recipient_by_id(recipient_id, recipient_type, recipient_type_id):
|
2017-08-02 20:18:35 +02:00
|
|
|
# type: (int, int, Optional[int]) -> Union[Text, List[Dict[str, Any]]]
|
|
|
|
"""
|
|
|
|
returns: an object describing the recipient (using a cache).
|
|
|
|
If the type is a stream, the type_id must be an int; a string is returned.
|
|
|
|
Otherwise, type_id may be None; an array of recipient dicts is returned.
|
|
|
|
"""
|
2013-12-18 23:00:14 +01:00
|
|
|
if recipient_id not in per_request_display_recipient_cache:
|
2016-03-31 03:30:33 +02:00
|
|
|
result = get_display_recipient_remote_cache(recipient_id, recipient_type, recipient_type_id)
|
2013-12-18 23:00:14 +01:00
|
|
|
per_request_display_recipient_cache[recipient_id] = result
|
|
|
|
return per_request_display_recipient_cache[recipient_id]
|
2013-09-21 15:35:12 +02:00
|
|
|
|
2013-04-25 20:42:28 +02:00
|
|
|
def get_display_recipient(recipient):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Recipient) -> Union[Text, List[Dict[str, Any]]]
|
2013-09-21 15:35:12 +02:00
|
|
|
return get_display_recipient_by_id(
|
2017-01-24 07:06:13 +01:00
|
|
|
recipient.id,
|
|
|
|
recipient.type,
|
|
|
|
recipient.type_id
|
2013-09-21 15:35:12 +02:00
|
|
|
)
|
2013-04-25 20:42:28 +02:00
|
|
|
|
2013-12-18 23:00:14 +01:00
|
|
|
def flush_per_request_caches():
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: () -> None
|
2013-12-18 23:00:14 +01:00
|
|
|
global per_request_display_recipient_cache
|
|
|
|
per_request_display_recipient_cache = {}
|
2013-12-18 23:01:11 +01:00
|
|
|
global per_request_realm_filters_cache
|
|
|
|
per_request_realm_filters_cache = {}
|
2013-08-22 17:45:15 +02:00
|
|
|
|
2013-09-21 15:35:12 +02:00
|
|
|
@cache_with_key(lambda *args: display_recipient_cache_key(args[0]),
|
2013-03-26 19:09:45 +01:00
|
|
|
timeout=3600*24*7)
|
2016-03-31 03:30:33 +02:00
|
|
|
def get_display_recipient_remote_cache(recipient_id, recipient_type, recipient_type_id):
|
2017-08-02 20:18:35 +02:00
|
|
|
# type: (int, int, Optional[int]) -> Union[Text, List[Dict[str, Any]]]
|
Give our models meaningful reprs.
>>> from zephyr.models import UserProfile, Recipient, Zephyr, ZephyrClass
>>> for klass in [UserProfile, Recipient, Zephyr, ZephyrClass]:
... print klass.objects.all()[:2]
...
[<UserProfile: othello>, <UserProfile: iago>]
[<Recipient: Verona (1, class)>, <Recipient: Denmark (2, class)>]
[<Zephyr: Scotland / Scotland3 / <UserProfile: prospero>>, <Zephyr: Venice / Venice3 / <UserProfile: iago>>]
[<ZephyrClass: Verona>, <ZephyrClass: Denmark>]
(imported from commit 9998ffe40800213a5425990d6e85f5c5a43a5355)
2012-08-29 16:15:06 +02:00
|
|
|
"""
|
2012-12-03 19:49:12 +01:00
|
|
|
returns: an appropriate object describing the recipient. For a
|
|
|
|
stream this will be the stream name as a string. For a huddle or
|
|
|
|
personal, it will be an array of dicts about each recipient.
|
Give our models meaningful reprs.
>>> from zephyr.models import UserProfile, Recipient, Zephyr, ZephyrClass
>>> for klass in [UserProfile, Recipient, Zephyr, ZephyrClass]:
... print klass.objects.all()[:2]
...
[<UserProfile: othello>, <UserProfile: iago>]
[<Recipient: Verona (1, class)>, <Recipient: Denmark (2, class)>]
[<Zephyr: Scotland / Scotland3 / <UserProfile: prospero>>, <Zephyr: Venice / Venice3 / <UserProfile: iago>>]
[<ZephyrClass: Verona>, <ZephyrClass: Denmark>]
(imported from commit 9998ffe40800213a5425990d6e85f5c5a43a5355)
2012-08-29 16:15:06 +02:00
|
|
|
"""
|
2013-09-21 15:35:12 +02:00
|
|
|
if recipient_type == Recipient.STREAM:
|
2017-08-02 20:18:35 +02:00
|
|
|
assert recipient_type_id is not None
|
2013-09-21 15:35:12 +02:00
|
|
|
stream = Stream.objects.get(id=recipient_type_id)
|
2012-10-10 22:53:24 +02:00
|
|
|
return stream.name
|
2012-09-27 19:58:42 +02:00
|
|
|
|
2017-05-12 22:53:06 +02:00
|
|
|
# The main priority for ordering here is being deterministic.
|
|
|
|
# Right now, we order by ID, which matches the ordering of user
|
|
|
|
# names in the left sidebar.
|
2013-09-21 15:35:12 +02:00
|
|
|
user_profile_list = (UserProfile.objects.filter(subscription__recipient_id=recipient_id)
|
2012-12-03 19:49:12 +01:00
|
|
|
.select_related()
|
2017-05-03 09:22:58 +02:00
|
|
|
.order_by('id'))
|
2013-03-28 20:43:34 +01:00
|
|
|
return [{'email': user_profile.email,
|
2012-09-27 19:58:42 +02:00
|
|
|
'full_name': user_profile.full_name,
|
2013-07-29 22:07:42 +02:00
|
|
|
'short_name': user_profile.short_name,
|
2014-07-18 00:18:06 +02:00
|
|
|
'id': user_profile.id,
|
2016-11-30 21:55:59 +01:00
|
|
|
'is_mirror_dummy': user_profile.is_mirror_dummy} for user_profile in user_profile_list]
|
2012-09-27 19:58:42 +02:00
|
|
|
|
2013-08-22 16:56:37 +02:00
|
|
|
def get_realm_emoji_cache_key(realm):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Realm) -> Text
|
2016-06-12 07:25:42 +02:00
|
|
|
return u'realm_emoji:%s' % (realm.id,)
|
2013-08-22 16:56:37 +02:00
|
|
|
|
2017-10-27 08:42:27 +02:00
|
|
|
class Realm(models.Model):
|
2017-03-23 00:15:06 +01:00
|
|
|
MAX_REALM_NAME_LENGTH = 40
|
|
|
|
MAX_REALM_SUBDOMAIN_LENGTH = 40
|
2016-11-06 23:44:45 +01:00
|
|
|
AUTHENTICATION_FLAGS = [u'Google', u'Email', u'GitHub', u'LDAP', u'Dev', u'RemoteUser']
|
2017-10-19 07:46:05 +02:00
|
|
|
SUBDOMAIN_FOR_ROOT_DOMAIN = ''
|
2016-11-06 23:44:45 +01:00
|
|
|
|
2017-07-09 01:16:47 +02:00
|
|
|
name = models.CharField(max_length=MAX_REALM_NAME_LENGTH, null=True) # type: Optional[Text]
|
|
|
|
string_id = models.CharField(max_length=MAX_REALM_SUBDOMAIN_LENGTH, unique=True) # type: Text
|
|
|
|
restricted_to_domain = models.BooleanField(default=False) # type: bool
|
|
|
|
invite_required = models.BooleanField(default=True) # type: bool
|
|
|
|
invite_by_admins_only = models.BooleanField(default=False) # type: bool
|
|
|
|
inline_image_preview = models.BooleanField(default=True) # type: bool
|
|
|
|
inline_url_embed_preview = models.BooleanField(default=True) # type: bool
|
|
|
|
create_stream_by_admins_only = models.BooleanField(default=False) # type: bool
|
|
|
|
add_emoji_by_admins_only = models.BooleanField(default=False) # type: bool
|
|
|
|
mandatory_topics = models.BooleanField(default=False) # type: bool
|
|
|
|
show_digest_email = models.BooleanField(default=True) # type: bool
|
|
|
|
name_changes_disabled = models.BooleanField(default=False) # type: bool
|
|
|
|
email_changes_disabled = models.BooleanField(default=False) # type: bool
|
|
|
|
description = models.TextField(null=True) # type: Optional[Text]
|
|
|
|
|
2017-11-08 13:40:46 +01:00
|
|
|
allow_message_deleting = models.BooleanField(default=False) # type: bool
|
2017-07-09 01:16:47 +02:00
|
|
|
allow_message_editing = models.BooleanField(default=True) # type: bool
|
|
|
|
DEFAULT_MESSAGE_CONTENT_EDIT_LIMIT_SECONDS = 600 # if changed, also change in admin.js
|
|
|
|
message_content_edit_limit_seconds = models.IntegerField(default=DEFAULT_MESSAGE_CONTENT_EDIT_LIMIT_SECONDS) # type: int
|
|
|
|
message_retention_days = models.IntegerField(null=True) # type: Optional[int]
|
2017-07-16 11:00:44 +02:00
|
|
|
allow_edit_history = models.BooleanField(default=True) # type: bool
|
2015-08-20 08:41:50 +02:00
|
|
|
|
2016-09-16 19:05:14 +02:00
|
|
|
# Valid org_types are {CORPORATE, COMMUNITY}
|
|
|
|
CORPORATE = 1
|
|
|
|
COMMUNITY = 2
|
2017-07-09 01:16:47 +02:00
|
|
|
org_type = models.PositiveSmallIntegerField(default=CORPORATE) # type: int
|
2016-09-16 19:05:14 +02:00
|
|
|
|
2017-07-09 01:16:47 +02:00
|
|
|
date_created = models.DateTimeField(default=timezone_now) # type: datetime.datetime
|
|
|
|
notifications_stream = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE) # type: Optional[Stream]
|
|
|
|
deactivated = models.BooleanField(default=False) # type: bool
|
|
|
|
default_language = models.CharField(default=u'en', max_length=MAX_LANGUAGE_ID_LENGTH) # type: Text
|
2016-11-02 21:41:10 +01:00
|
|
|
authentication_methods = BitField(flags=AUTHENTICATION_FLAGS,
|
2017-07-09 01:16:47 +02:00
|
|
|
default=2**31 - 1) # type: BitHandler
|
|
|
|
waiting_period_threshold = models.PositiveIntegerField(default=0) # type: int
|
2013-10-02 23:40:21 +02:00
|
|
|
|
2017-03-24 01:44:29 +01:00
|
|
|
# Define the types of the various automatically managed properties
|
|
|
|
property_types = dict(
|
|
|
|
add_emoji_by_admins_only=bool,
|
2017-07-16 11:00:44 +02:00
|
|
|
allow_edit_history=bool,
|
2017-11-08 13:40:46 +01:00
|
|
|
allow_message_deleting=bool,
|
2017-03-24 01:44:29 +01:00
|
|
|
create_stream_by_admins_only=bool,
|
|
|
|
default_language=Text,
|
|
|
|
description=Text,
|
|
|
|
email_changes_disabled=bool,
|
|
|
|
invite_required=bool,
|
|
|
|
invite_by_admins_only=bool,
|
|
|
|
inline_image_preview=bool,
|
|
|
|
inline_url_embed_preview=bool,
|
2017-07-04 20:04:27 +02:00
|
|
|
mandatory_topics=bool,
|
2017-04-09 00:35:41 +02:00
|
|
|
message_retention_days=(int, type(None)),
|
2017-03-24 01:44:29 +01:00
|
|
|
name=Text,
|
|
|
|
name_changes_disabled=bool,
|
|
|
|
restricted_to_domain=bool,
|
|
|
|
waiting_period_threshold=int,
|
2017-04-12 23:09:09 +02:00
|
|
|
) # type: Dict[str, Union[type, Tuple[type, ...]]]
|
2017-03-24 01:44:29 +01:00
|
|
|
|
2017-02-21 03:41:20 +01:00
|
|
|
ICON_FROM_GRAVATAR = u'G'
|
|
|
|
ICON_UPLOADED = u'U'
|
|
|
|
ICON_SOURCES = (
|
|
|
|
(ICON_FROM_GRAVATAR, 'Hosted by Gravatar'),
|
|
|
|
(ICON_UPLOADED, 'Uploaded by administrator'),
|
|
|
|
)
|
|
|
|
icon_source = models.CharField(default=ICON_FROM_GRAVATAR, choices=ICON_SOURCES,
|
|
|
|
max_length=1) # type: Text
|
|
|
|
icon_version = models.PositiveSmallIntegerField(default=1) # type: int
|
|
|
|
|
2016-06-13 05:30:22 +02:00
|
|
|
DEFAULT_NOTIFICATION_STREAM_NAME = u'announce'
|
2012-09-05 21:49:56 +02:00
|
|
|
|
2016-11-02 21:41:10 +01:00
|
|
|
def authentication_methods_dict(self):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: () -> Dict[Text, bool]
|
2016-11-02 21:41:10 +01:00
|
|
|
"""Returns the a mapping from authentication flags to their status,
|
|
|
|
showing only those authentication flags that are supported on
|
|
|
|
the current server (i.e. if EmailAuthBackend is not configured
|
|
|
|
on the server, this will not return an entry for "Email")."""
|
|
|
|
# This mapping needs to be imported from here due to the cyclic
|
|
|
|
# dependency.
|
|
|
|
from zproject.backends import AUTH_BACKEND_NAME_MAP
|
|
|
|
|
2017-07-09 01:16:47 +02:00
|
|
|
ret = {} # type: Dict[Text, bool]
|
2016-11-02 21:41:10 +01:00
|
|
|
supported_backends = {backend.__class__ for backend in django.contrib.auth.get_backends()}
|
|
|
|
for k, v in self.authentication_methods.iteritems():
|
|
|
|
backend = AUTH_BACKEND_NAME_MAP[k]
|
|
|
|
if backend in supported_backends:
|
|
|
|
ret[k] = v
|
|
|
|
return ret
|
|
|
|
|
2017-10-27 09:06:40 +02:00
|
|
|
def __str__(self):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: () -> Text
|
2017-10-27 09:06:40 +02:00
|
|
|
return "<Realm: %s %s>" % (self.string_id, self.id)
|
2012-09-05 21:49:56 +02:00
|
|
|
|
2013-08-22 16:56:37 +02:00
|
|
|
@cache_with_key(get_realm_emoji_cache_key, timeout=3600*24*7)
|
|
|
|
def get_emoji(self):
|
2017-03-13 05:45:50 +01:00
|
|
|
# type: () -> Dict[Text, Optional[Dict[str, Iterable[Text]]]]
|
2013-08-22 16:56:37 +02:00
|
|
|
return get_realm_emoji_uncached(self)
|
|
|
|
|
2013-11-02 15:36:17 +01:00
|
|
|
def get_admin_users(self):
|
2016-06-12 14:43:15 +02:00
|
|
|
# type: () -> Sequence[UserProfile]
|
|
|
|
# TODO: Change return type to QuerySet[UserProfile]
|
2016-02-08 03:59:38 +01:00
|
|
|
return UserProfile.objects.filter(realm=self, is_realm_admin=True,
|
|
|
|
is_active=True).select_related()
|
2013-11-02 15:36:17 +01:00
|
|
|
|
2014-01-28 17:29:00 +01:00
|
|
|
def get_active_users(self):
|
2016-06-12 14:43:15 +02:00
|
|
|
# type: () -> Sequence[UserProfile]
|
|
|
|
# TODO: Change return type to QuerySet[UserProfile]
|
2014-01-28 17:29:00 +01:00
|
|
|
return UserProfile.objects.filter(realm=self, is_active=True).select_related()
|
|
|
|
|
2017-03-05 04:17:12 +01:00
|
|
|
def get_bot_domain(self):
|
|
|
|
# type: () -> str
|
|
|
|
# Remove the port. Mainly needed for development environment.
|
2017-10-20 04:42:58 +02:00
|
|
|
return self.host.split(':')[0]
|
2017-03-05 04:17:12 +01:00
|
|
|
|
2017-08-24 00:36:29 +02:00
|
|
|
def get_notifications_stream(self):
|
|
|
|
# type: () -> Optional[Realm]
|
|
|
|
if self.notifications_stream is not None and not self.notifications_stream.deactivated:
|
|
|
|
return self.notifications_stream
|
|
|
|
return None
|
|
|
|
|
2016-10-26 18:13:43 +02:00
|
|
|
@property
|
|
|
|
def subdomain(self):
|
2017-10-19 07:31:34 +02:00
|
|
|
# type: () -> Text
|
2017-10-02 08:32:09 +02:00
|
|
|
return self.string_id
|
2016-10-26 18:13:43 +02:00
|
|
|
|
2017-10-19 08:51:29 +02:00
|
|
|
@property
|
|
|
|
def display_subdomain(self):
|
|
|
|
# type: () -> Text
|
|
|
|
"""Likely to be temporary function to avoid signup messages being sent
|
|
|
|
to an empty topic"""
|
|
|
|
if self.string_id == "":
|
|
|
|
return "."
|
|
|
|
return self.string_id
|
|
|
|
|
2016-08-14 00:57:45 +02:00
|
|
|
@property
|
|
|
|
def uri(self):
|
2016-09-10 20:36:59 +02:00
|
|
|
# type: () -> str
|
2017-10-20 04:40:20 +02:00
|
|
|
return settings.EXTERNAL_URI_SCHEME + self.host
|
2016-08-14 00:57:45 +02:00
|
|
|
|
2016-08-19 03:48:40 +02:00
|
|
|
@property
|
|
|
|
def host(self):
|
2016-09-10 20:36:59 +02:00
|
|
|
# type: () -> str
|
2017-10-27 03:27:29 +02:00
|
|
|
return self.host_for_subdomain(self.subdomain)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def host_for_subdomain(subdomain):
|
|
|
|
# type: (str) -> str
|
alias domains: Add a v1 of this feature.
The main limitation of this version is that it's controlled entirely
from settings, with nothing in the database and no web UI or even
management command to control it. That makes it a bit more of a
burden for the server admins than it'd ideally be, but that's fine
for now.
Relatedly, the web flow for realm creation still requires choosing a
subdomain even if the realm is destined to live at an alias domain.
Specific to the dev environment, there is an annoying quirk: the
special dev login flow doesn't work on a REALM_HOSTS realm. Also,
in this version the `add_new_realm` and `add_new_user` management
commands, which are intended for use in development environments only,
don't support this feature.
In manual testing, I've confirmed that a REALM_HOSTS realm works for
signup and login, with email/password, Google SSO, or GitHub SSO.
Most of that was in dev; I used zulipstaging.com to also test
* logging in with email and password;
* logging in with Google SSO... far enough to correctly determine
that my email address is associated with some other realm.
2017-10-20 06:36:50 +02:00
|
|
|
if subdomain == Realm.SUBDOMAIN_FOR_ROOT_DOMAIN:
|
|
|
|
return settings.EXTERNAL_HOST
|
|
|
|
default_host = "%s.%s" % (subdomain, settings.EXTERNAL_HOST)
|
|
|
|
return settings.REALM_HOSTS.get(subdomain, default_host)
|
2016-08-19 03:48:40 +02:00
|
|
|
|
2016-07-27 02:09:10 +02:00
|
|
|
@property
|
2016-07-27 01:45:29 +02:00
|
|
|
def is_zephyr_mirror_realm(self):
|
2016-07-27 02:09:10 +02:00
|
|
|
# type: () -> bool
|
2017-03-04 09:19:37 +01:00
|
|
|
return self.string_id == "zephyr"
|
2016-07-27 02:09:10 +02:00
|
|
|
|
2016-07-27 01:45:29 +02:00
|
|
|
@property
|
|
|
|
def webathena_enabled(self):
|
|
|
|
# type: () -> bool
|
|
|
|
return self.is_zephyr_mirror_realm
|
|
|
|
|
|
|
|
@property
|
|
|
|
def presence_disabled(self):
|
|
|
|
# type: () -> bool
|
|
|
|
return self.is_zephyr_mirror_realm
|
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2013-06-24 21:26:38 +02:00
|
|
|
permissions = (
|
|
|
|
('administer', "Administer a realm"),
|
2015-09-20 19:32:01 +02:00
|
|
|
('api_super_user', "Can send messages as other users for mirroring"),
|
2013-06-24 21:26:38 +02:00
|
|
|
)
|
|
|
|
|
2014-01-28 18:18:19 +01:00
|
|
|
post_save.connect(flush_realm, sender=Realm)
|
|
|
|
|
2017-01-04 05:30:48 +01:00
|
|
|
def get_realm(string_id):
|
2017-02-11 04:41:23 +01:00
|
|
|
# type: (Text) -> Realm
|
2017-01-08 20:35:24 +01:00
|
|
|
return Realm.objects.filter(string_id=string_id).first()
|
2016-11-11 19:32:15 +01:00
|
|
|
|
|
|
|
def name_changes_disabled(realm):
|
|
|
|
# type: (Optional[Realm]) -> bool
|
|
|
|
if realm is None:
|
|
|
|
return settings.NAME_CHANGES_DISABLED
|
|
|
|
return settings.NAME_CHANGES_DISABLED or realm.name_changes_disabled
|
|
|
|
|
2017-03-31 16:20:07 +02:00
|
|
|
class RealmDomain(models.Model):
|
2017-07-09 01:16:47 +02:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE) # type: Realm
|
2016-09-28 00:03:13 +02:00
|
|
|
# should always be stored lowercase
|
2017-07-09 01:16:47 +02:00
|
|
|
domain = models.CharField(max_length=80, db_index=True) # type: Text
|
2017-01-21 08:19:03 +01:00
|
|
|
allow_subdomains = models.BooleanField(default=False)
|
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2017-01-21 08:19:03 +01:00
|
|
|
unique_together = ("realm", "domain")
|
2016-10-29 04:58:44 +02:00
|
|
|
|
2013-07-18 18:48:56 +02:00
|
|
|
# These functions should only be used on email addresses that have
|
|
|
|
# been validated via django.core.validators.validate_email
|
|
|
|
#
|
|
|
|
# Note that we need to use some care, since can you have multiple @-signs; e.g.
|
2013-07-24 23:41:24 +02:00
|
|
|
# "tabbott@test"@zulip.com
|
2013-07-18 18:48:56 +02:00
|
|
|
# is valid email address
|
|
|
|
def email_to_username(email):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Text) -> Text
|
2013-08-15 19:16:03 +02:00
|
|
|
return "@".join(email.split("@")[:-1]).lower()
|
2013-07-18 18:48:56 +02:00
|
|
|
|
2013-11-22 23:48:00 +01:00
|
|
|
# Returns the raw domain portion of the desired email address
|
2016-11-11 21:13:30 +01:00
|
|
|
def email_to_domain(email):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Text) -> Text
|
2013-08-15 19:16:03 +02:00
|
|
|
return email.split("@")[-1].lower()
|
2013-07-18 18:48:56 +02:00
|
|
|
|
2016-11-09 02:40:54 +01:00
|
|
|
class GetRealmByDomainException(Exception):
|
|
|
|
pass
|
|
|
|
|
2016-01-12 16:24:34 +01:00
|
|
|
# Is a user with the given email address allowed to be in the given realm?
|
|
|
|
# (This function does not check whether the user has been invited to the realm.
|
|
|
|
# So for invite-only realms, this is the test for whether a user can be invited,
|
|
|
|
# not whether the user can sign up currently.)
|
|
|
|
def email_allowed_for_realm(email, realm):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Text, Realm) -> bool
|
2016-01-12 16:24:34 +01:00
|
|
|
if not realm.restricted_to_domain:
|
|
|
|
return True
|
2016-11-11 21:13:30 +01:00
|
|
|
domain = email_to_domain(email)
|
2017-03-31 16:20:07 +02:00
|
|
|
query = RealmDomain.objects.filter(realm=realm)
|
2017-01-21 08:19:03 +01:00
|
|
|
if query.filter(domain=domain).exists():
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
query = query.filter(allow_subdomains=True)
|
|
|
|
while len(domain) > 0:
|
|
|
|
subdomain, sep, domain = domain.partition('.')
|
|
|
|
if query.filter(domain=domain).exists():
|
|
|
|
return True
|
|
|
|
return False
|
2016-01-12 16:24:34 +01:00
|
|
|
|
2017-04-29 06:06:57 +02:00
|
|
|
def get_realm_domains(realm):
|
|
|
|
# type: (Realm) -> List[Dict[str, Text]]
|
|
|
|
return list(realm.realmdomain_set.values('domain', 'allow_subdomains'))
|
2016-09-28 00:08:36 +02:00
|
|
|
|
2017-10-27 08:42:27 +02:00
|
|
|
class RealmEmoji(models.Model):
|
2017-06-01 10:44:16 +02:00
|
|
|
author = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
|
2017-07-09 01:16:47 +02:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE) # type: Realm
|
2017-11-09 11:45:56 +01:00
|
|
|
# Second part of the regex (negative lookbehind) disallows names ending with
|
|
|
|
# one of the punctuation characters
|
2016-02-12 21:08:56 +01:00
|
|
|
name = models.TextField(validators=[MinLengthValidator(1),
|
2017-05-03 03:46:36 +02:00
|
|
|
RegexValidator(regex=r'^[0-9a-z.\-_]+(?<![.\-_])$',
|
2017-07-09 01:16:47 +02:00
|
|
|
message=_("Invalid characters in emoji name"))]) # type: Text
|
|
|
|
file_name = models.TextField(db_index=True, null=True) # type: Optional[Text]
|
|
|
|
deactivated = models.BooleanField(default=False) # type: bool
|
2017-03-13 05:45:50 +01:00
|
|
|
|
|
|
|
PATH_ID_TEMPLATE = "{realm_id}/emoji/{emoji_file_name}"
|
2013-08-22 16:56:37 +02:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2013-08-22 16:56:37 +02:00
|
|
|
unique_together = ("realm", "name")
|
|
|
|
|
2017-10-27 09:06:40 +02:00
|
|
|
def __str__(self):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: () -> Text
|
2017-10-27 09:06:40 +02:00
|
|
|
return "<RealmEmoji(%s): %s %s>" % (self.realm.string_id, self.name, self.file_name)
|
2013-08-22 16:56:37 +02:00
|
|
|
|
|
|
|
def get_realm_emoji_uncached(realm):
|
2017-05-23 00:30:41 +02:00
|
|
|
# type: (Realm) -> Dict[Text, Dict[str, Any]]
|
2013-08-22 16:56:37 +02:00
|
|
|
d = {}
|
2017-03-13 05:45:50 +01:00
|
|
|
from zerver.lib.emoji import get_emoji_url
|
2016-12-20 10:51:28 +01:00
|
|
|
for row in RealmEmoji.objects.filter(realm=realm).select_related('author'):
|
2017-08-04 07:53:19 +02:00
|
|
|
author = None
|
2016-12-20 10:51:28 +01:00
|
|
|
if row.author:
|
|
|
|
author = {
|
|
|
|
'id': row.author.id,
|
|
|
|
'email': row.author.email,
|
|
|
|
'full_name': row.author.full_name}
|
2017-03-13 05:45:50 +01:00
|
|
|
d[row.name] = dict(source_url=get_emoji_url(row.file_name, row.realm_id),
|
2017-05-22 20:40:02 +02:00
|
|
|
deactivated=row.deactivated,
|
2016-12-20 10:51:28 +01:00
|
|
|
author=author)
|
2013-08-22 16:56:37 +02:00
|
|
|
return d
|
|
|
|
|
2014-01-28 20:53:57 +01:00
|
|
|
def flush_realm_emoji(sender, **kwargs):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: (Any, **Any) -> None
|
2013-08-22 16:56:37 +02:00
|
|
|
realm = kwargs['instance'].realm
|
|
|
|
cache_set(get_realm_emoji_cache_key(realm),
|
|
|
|
get_realm_emoji_uncached(realm),
|
|
|
|
timeout=3600*24*7)
|
|
|
|
|
2014-01-28 20:53:57 +01:00
|
|
|
post_save.connect(flush_realm_emoji, sender=RealmEmoji)
|
|
|
|
post_delete.connect(flush_realm_emoji, sender=RealmEmoji)
|
2013-08-22 16:56:37 +02:00
|
|
|
|
2016-02-13 19:17:15 +01:00
|
|
|
def filter_pattern_validator(value):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Text) -> None
|
2016-12-14 23:55:32 +01:00
|
|
|
regex = re.compile(r'(?:[\w\-#]*)(\(\?P<\w+>.+\))')
|
|
|
|
error_msg = 'Invalid filter pattern, you must use the following format OPTIONAL_PREFIX(?P<id>.+)'
|
2016-02-13 19:17:15 +01:00
|
|
|
|
|
|
|
if not regex.match(str(value)):
|
|
|
|
raise ValidationError(error_msg)
|
|
|
|
|
|
|
|
try:
|
|
|
|
re.compile(value)
|
2016-12-22 15:44:33 +01:00
|
|
|
except sre_constants.error:
|
2016-02-13 19:17:15 +01:00
|
|
|
# Regex is invalid
|
|
|
|
raise ValidationError(error_msg)
|
|
|
|
|
|
|
|
def filter_format_validator(value):
|
|
|
|
# type: (str) -> None
|
2017-03-26 01:13:34 +01:00
|
|
|
regex = re.compile(r'^[\.\/:a-zA-Z0-9_?=-]+%\(([a-zA-Z0-9_-]+)\)s[a-zA-Z0-9_-]*$')
|
2016-02-13 19:17:15 +01:00
|
|
|
|
|
|
|
if not regex.match(value):
|
2017-11-09 11:45:56 +01:00
|
|
|
raise ValidationError('URL format string must be in the following format: '
|
|
|
|
'`https://example.com/%(\w+)s`')
|
2016-02-13 19:17:15 +01:00
|
|
|
|
|
|
|
class RealmFilter(models.Model):
|
2017-07-09 01:16:47 +02:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE) # type: Realm
|
|
|
|
pattern = models.TextField(validators=[filter_pattern_validator]) # type: Text
|
2017-07-22 01:08:26 +02:00
|
|
|
url_format_string = models.TextField(validators=[URLValidator(), filter_format_validator]) # type: Text
|
2013-12-06 23:02:52 +01:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2013-12-06 23:02:52 +01:00
|
|
|
unique_together = ("realm", "pattern")
|
|
|
|
|
2017-10-27 09:06:40 +02:00
|
|
|
def __str__(self):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: () -> Text
|
2017-10-27 09:06:40 +02:00
|
|
|
return "<RealmFilter(%s): %s %s>" % (self.realm.string_id, self.pattern, self.url_format_string)
|
2013-12-06 23:02:52 +01:00
|
|
|
|
2016-12-31 03:08:43 +01:00
|
|
|
def get_realm_filters_cache_key(realm_id):
|
|
|
|
# type: (int) -> Text
|
|
|
|
return u'all_realm_filters:%s' % (realm_id,)
|
2013-12-06 23:02:52 +01:00
|
|
|
|
2016-03-31 03:30:33 +02:00
|
|
|
# We have a per-process cache to avoid doing 1000 remote cache queries during page load
|
2017-07-09 01:16:47 +02:00
|
|
|
per_request_realm_filters_cache = {} # type: Dict[int, List[Tuple[Text, Text, int]]]
|
2016-09-23 21:47:44 +02:00
|
|
|
|
2016-12-31 03:08:43 +01:00
|
|
|
def realm_in_local_realm_filters_cache(realm_id):
|
|
|
|
# type: (int) -> bool
|
|
|
|
return realm_id in per_request_realm_filters_cache
|
2016-09-23 21:47:44 +02:00
|
|
|
|
2016-12-31 03:08:43 +01:00
|
|
|
def realm_filters_for_realm(realm_id):
|
|
|
|
# type: (int) -> List[Tuple[Text, Text, int]]
|
|
|
|
if not realm_in_local_realm_filters_cache(realm_id):
|
|
|
|
per_request_realm_filters_cache[realm_id] = realm_filters_for_realm_remote_cache(realm_id)
|
|
|
|
return per_request_realm_filters_cache[realm_id]
|
2013-12-18 23:01:11 +01:00
|
|
|
|
|
|
|
@cache_with_key(get_realm_filters_cache_key, timeout=3600*24*7)
|
2016-12-31 03:08:43 +01:00
|
|
|
def realm_filters_for_realm_remote_cache(realm_id):
|
|
|
|
# type: (int) -> List[Tuple[Text, Text, int]]
|
2013-12-06 23:02:52 +01:00
|
|
|
filters = []
|
2016-12-31 03:08:43 +01:00
|
|
|
for realm_filter in RealmFilter.objects.filter(realm_id=realm_id):
|
2016-02-13 19:17:15 +01:00
|
|
|
filters.append((realm_filter.pattern, realm_filter.url_format_string, realm_filter.id))
|
2013-12-06 23:02:52 +01:00
|
|
|
|
|
|
|
return filters
|
|
|
|
|
|
|
|
def all_realm_filters():
|
2016-12-31 03:08:43 +01:00
|
|
|
# type: () -> Dict[int, List[Tuple[Text, Text, int]]]
|
2017-07-09 01:16:47 +02:00
|
|
|
filters = defaultdict(list) # type: DefaultDict[int, List[Tuple[Text, Text, int]]]
|
2013-12-06 23:02:52 +01:00
|
|
|
for realm_filter in RealmFilter.objects.all():
|
2017-11-09 11:45:56 +01:00
|
|
|
filters[realm_filter.realm_id].append((realm_filter.pattern,
|
|
|
|
realm_filter.url_format_string,
|
|
|
|
realm_filter.id))
|
2013-12-06 23:02:52 +01:00
|
|
|
|
|
|
|
return filters
|
|
|
|
|
2014-01-28 21:00:38 +01:00
|
|
|
def flush_realm_filter(sender, **kwargs):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: (Any, **Any) -> None
|
2017-01-03 21:04:55 +01:00
|
|
|
realm_id = kwargs['instance'].realm_id
|
2016-12-31 03:08:43 +01:00
|
|
|
cache_delete(get_realm_filters_cache_key(realm_id))
|
2013-12-18 23:01:11 +01:00
|
|
|
try:
|
2016-12-31 03:08:43 +01:00
|
|
|
per_request_realm_filters_cache.pop(realm_id)
|
2013-12-18 23:01:11 +01:00
|
|
|
except KeyError:
|
|
|
|
pass
|
2013-12-06 23:02:52 +01:00
|
|
|
|
2014-01-28 21:00:38 +01:00
|
|
|
post_save.connect(flush_realm_filter, sender=RealmFilter)
|
|
|
|
post_delete.connect(flush_realm_filter, sender=RealmFilter)
|
2013-12-06 23:02:52 +01:00
|
|
|
|
2017-10-27 08:42:27 +02:00
|
|
|
class UserProfile(AbstractBaseUser, PermissionsMixin):
|
2016-05-18 20:23:03 +02:00
|
|
|
DEFAULT_BOT = 1
|
2016-05-19 23:44:58 +02:00
|
|
|
"""
|
|
|
|
Incoming webhook bots are limited to only sending messages via webhooks.
|
|
|
|
Thus, it is less of a security risk to expose their API keys to third-party services,
|
|
|
|
since they can't be used to read messages.
|
|
|
|
"""
|
|
|
|
INCOMING_WEBHOOK_BOT = 2
|
2017-11-09 11:45:56 +01:00
|
|
|
# This value is also being used in static/js/settings_bots.js.
|
|
|
|
# On updating it here, update it there as well.
|
2016-07-15 18:57:37 +02:00
|
|
|
OUTGOING_WEBHOOK_BOT = 3
|
2017-05-24 20:38:15 +02:00
|
|
|
"""
|
|
|
|
Embedded bots run within the Zulip server itself; events are added to the
|
|
|
|
embedded_bots queue and then handled by a QueueProcessingWorker.
|
|
|
|
"""
|
|
|
|
EMBEDDED_BOT = 4
|
2016-05-18 20:23:03 +02:00
|
|
|
|
2017-05-30 19:19:48 +02:00
|
|
|
# For now, don't allow creating other bot types via the UI
|
|
|
|
ALLOWED_BOT_TYPES = [
|
|
|
|
DEFAULT_BOT,
|
|
|
|
INCOMING_WEBHOOK_BOT,
|
2017-06-10 18:43:31 +02:00
|
|
|
OUTGOING_WEBHOOK_BOT,
|
2017-07-14 16:44:07 +02:00
|
|
|
EMBEDDED_BOT,
|
2017-05-30 19:19:48 +02:00
|
|
|
]
|
|
|
|
|
2017-05-24 21:56:51 +02:00
|
|
|
SERVICE_BOT_TYPES = [
|
|
|
|
OUTGOING_WEBHOOK_BOT,
|
|
|
|
EMBEDDED_BOT
|
|
|
|
]
|
|
|
|
|
2013-04-04 20:38:22 +02:00
|
|
|
# Fields from models.AbstractUser minus last_name and first_name,
|
|
|
|
# which we don't use; email is modified to make it indexed and unique.
|
2017-07-09 01:16:47 +02:00
|
|
|
email = models.EmailField(blank=False, db_index=True, unique=True) # type: Text
|
|
|
|
is_staff = models.BooleanField(default=False) # type: bool
|
|
|
|
is_active = models.BooleanField(default=True, db_index=True) # type: bool
|
|
|
|
is_realm_admin = models.BooleanField(default=False, db_index=True) # type: bool
|
|
|
|
is_bot = models.BooleanField(default=False, db_index=True) # type: bool
|
|
|
|
bot_type = models.PositiveSmallIntegerField(null=True, db_index=True) # type: Optional[int]
|
|
|
|
is_api_super_user = models.BooleanField(default=False, db_index=True) # type: bool
|
|
|
|
date_joined = models.DateTimeField(default=timezone_now) # type: datetime.datetime
|
|
|
|
is_mirror_dummy = models.BooleanField(default=False) # type: bool
|
|
|
|
bot_owner = models.ForeignKey('self', null=True, on_delete=models.SET_NULL) # type: Optional[UserProfile]
|
2017-07-10 15:55:15 +02:00
|
|
|
long_term_idle = models.BooleanField(default=False, db_index=True) # type: bool
|
2013-05-03 00:25:43 +02:00
|
|
|
|
2013-03-08 19:53:00 +01:00
|
|
|
USERNAME_FIELD = 'email'
|
2013-07-08 16:42:00 +02:00
|
|
|
MAX_NAME_LENGTH = 100
|
2017-05-12 04:21:49 +02:00
|
|
|
MIN_NAME_LENGTH = 3
|
2017-04-27 22:13:59 +02:00
|
|
|
API_KEY_LENGTH = 32
|
2017-01-16 04:18:42 +01:00
|
|
|
NAME_INVALID_CHARS = ['*', '`', '>', '"', '@']
|
2013-03-08 19:53:00 +01:00
|
|
|
|
|
|
|
# Our custom site-specific fields
|
2017-07-09 01:16:47 +02:00
|
|
|
full_name = models.CharField(max_length=MAX_NAME_LENGTH) # type: Text
|
|
|
|
short_name = models.CharField(max_length=MAX_NAME_LENGTH) # type: Text
|
2013-09-20 20:53:56 +02:00
|
|
|
# pointer points to Message.id, NOT UserMessage.id.
|
2017-07-09 01:16:47 +02:00
|
|
|
pointer = models.IntegerField() # type: int
|
|
|
|
last_pointer_updater = models.CharField(max_length=64) # type: Text
|
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE) # type: Realm
|
|
|
|
api_key = models.CharField(max_length=API_KEY_LENGTH) # type: Text
|
|
|
|
tos_version = models.CharField(null=True, max_length=10) # type: Optional[Text]
|
2017-07-10 15:55:15 +02:00
|
|
|
last_active_message_id = models.IntegerField(null=True) # type: int
|
2014-02-05 21:31:30 +01:00
|
|
|
|
|
|
|
### Notifications settings. ###
|
|
|
|
|
|
|
|
# Stream notifications.
|
2017-07-09 01:16:47 +02:00
|
|
|
enable_stream_desktop_notifications = models.BooleanField(default=False) # type: bool
|
2017-08-17 16:55:32 +02:00
|
|
|
enable_stream_push_notifications = models.BooleanField(default=False) # type: bool
|
2017-07-09 01:16:47 +02:00
|
|
|
enable_stream_sounds = models.BooleanField(default=False) # type: bool
|
2014-02-05 21:31:30 +01:00
|
|
|
|
|
|
|
# PM + @-mention notifications.
|
2017-07-09 01:16:47 +02:00
|
|
|
enable_desktop_notifications = models.BooleanField(default=True) # type: bool
|
2016-12-07 17:29:12 +01:00
|
|
|
pm_content_in_desktop_notifications = models.BooleanField(default=True) # type: bool
|
2017-07-09 01:16:47 +02:00
|
|
|
enable_sounds = models.BooleanField(default=True) # type: bool
|
|
|
|
enable_offline_email_notifications = models.BooleanField(default=True) # type: bool
|
|
|
|
enable_offline_push_notifications = models.BooleanField(default=True) # type: bool
|
|
|
|
enable_online_push_notifications = models.BooleanField(default=False) # type: bool
|
2014-02-05 21:31:30 +01:00
|
|
|
|
2017-07-09 01:16:47 +02:00
|
|
|
enable_digest_emails = models.BooleanField(default=True) # type: bool
|
2012-08-28 18:44:51 +02:00
|
|
|
|
2014-02-05 21:31:30 +01:00
|
|
|
# Old notification field superseded by existence of stream notification
|
|
|
|
# settings.
|
2017-07-09 01:16:47 +02:00
|
|
|
default_desktop_notifications = models.BooleanField(default=True) # type: bool
|
2014-02-05 21:31:30 +01:00
|
|
|
|
|
|
|
###
|
|
|
|
|
2017-07-09 01:16:47 +02:00
|
|
|
last_reminder = models.DateTimeField(default=timezone_now, null=True) # type: Optional[datetime.datetime]
|
|
|
|
rate_limits = models.CharField(default=u"", max_length=100) # type: Text # comma-separated list of range:max pairs
|
2014-02-05 21:31:30 +01:00
|
|
|
|
2014-02-06 23:12:34 +01:00
|
|
|
# Default streams
|
2017-07-09 01:16:47 +02:00
|
|
|
default_sending_stream = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE) # type: Optional[Stream]
|
|
|
|
default_events_register_stream = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE) # type: Optional[Stream]
|
|
|
|
default_all_public_streams = models.BooleanField(default=False) # type: bool
|
2014-02-06 23:12:34 +01:00
|
|
|
|
2013-12-04 22:48:47 +01:00
|
|
|
# UI vars
|
2017-07-09 01:16:47 +02:00
|
|
|
enter_sends = models.NullBooleanField(default=False) # type: Optional[bool]
|
|
|
|
autoscroll_forever = models.BooleanField(default=False) # type: bool
|
|
|
|
left_side_userlist = models.BooleanField(default=False) # type: bool
|
|
|
|
emoji_alt_code = models.BooleanField(default=False) # type: bool
|
2013-12-04 22:48:47 +01:00
|
|
|
|
2015-08-19 22:35:46 +02:00
|
|
|
# display settings
|
2017-07-09 01:16:47 +02:00
|
|
|
twenty_four_hour_time = models.BooleanField(default=False) # type: bool
|
|
|
|
default_language = models.CharField(default=u'en', max_length=MAX_LANGUAGE_ID_LENGTH) # type: Text
|
2017-07-07 18:15:10 +02:00
|
|
|
high_contrast_mode = models.BooleanField(default=False) # type: bool
|
2017-11-14 20:42:31 +01:00
|
|
|
night_mode = models.BooleanField(default=False) # type: bool
|
2015-08-19 22:35:46 +02:00
|
|
|
|
2013-05-17 21:28:51 +02:00
|
|
|
# Hours to wait before sending another email to a user
|
|
|
|
EMAIL_REMINDER_WAITPERIOD = 24
|
2017-07-05 11:40:33 +02:00
|
|
|
# Minutes to wait before warning a bot owner that their bot sent a message
|
2013-09-17 22:31:05 +02:00
|
|
|
# to a nonexistent stream
|
|
|
|
BOT_OWNER_STREAM_ALERT_WAITPERIOD = 1
|
2013-05-17 21:28:51 +02:00
|
|
|
|
2016-06-12 13:27:16 +02:00
|
|
|
AVATAR_FROM_GRAVATAR = u'G'
|
|
|
|
AVATAR_FROM_USER = u'U'
|
2013-06-07 21:51:57 +02:00
|
|
|
AVATAR_SOURCES = (
|
2017-01-24 07:06:13 +01:00
|
|
|
(AVATAR_FROM_GRAVATAR, 'Hosted by Gravatar'),
|
|
|
|
(AVATAR_FROM_USER, 'Uploaded by user'),
|
2013-06-07 21:51:57 +02:00
|
|
|
)
|
2017-07-09 01:16:47 +02:00
|
|
|
avatar_source = models.CharField(default=AVATAR_FROM_GRAVATAR, choices=AVATAR_SOURCES, max_length=1) # type: Text
|
|
|
|
avatar_version = models.PositiveSmallIntegerField(default=1) # type: int
|
2013-06-07 21:51:57 +02:00
|
|
|
|
2016-06-12 13:27:16 +02:00
|
|
|
TUTORIAL_WAITING = u'W'
|
|
|
|
TUTORIAL_STARTED = u'S'
|
|
|
|
TUTORIAL_FINISHED = u'F'
|
2017-01-24 06:21:14 +01:00
|
|
|
TUTORIAL_STATES = ((TUTORIAL_WAITING, "Waiting"),
|
|
|
|
(TUTORIAL_STARTED, "Started"),
|
2013-04-04 22:30:28 +02:00
|
|
|
(TUTORIAL_FINISHED, "Finished"))
|
|
|
|
|
2017-07-09 01:16:47 +02:00
|
|
|
tutorial_status = models.CharField(default=TUTORIAL_WAITING, choices=TUTORIAL_STATES, max_length=1) # type: Text
|
2013-05-08 16:12:19 +02:00
|
|
|
# Contains serialized JSON of the form:
|
|
|
|
# [("step 1", true), ("step 2", false)]
|
|
|
|
# where the second element of each tuple is if the step has been
|
|
|
|
# completed.
|
2017-07-09 01:16:47 +02:00
|
|
|
onboarding_steps = models.TextField(default=u'[]') # type: Text
|
2013-04-04 22:30:28 +02:00
|
|
|
|
2017-07-09 01:16:47 +02:00
|
|
|
alert_words = models.TextField(default=u'[]') # type: Text # json-serialized list of strings
|
2013-09-03 22:41:17 +02:00
|
|
|
|
2017-07-09 01:16:47 +02:00
|
|
|
objects = UserManager() # type: UserManager
|
Give our models meaningful reprs.
>>> from zephyr.models import UserProfile, Recipient, Zephyr, ZephyrClass
>>> for klass in [UserProfile, Recipient, Zephyr, ZephyrClass]:
... print klass.objects.all()[:2]
...
[<UserProfile: othello>, <UserProfile: iago>]
[<Recipient: Verona (1, class)>, <Recipient: Denmark (2, class)>]
[<Zephyr: Scotland / Scotland3 / <UserProfile: prospero>>, <Zephyr: Venice / Venice3 / <UserProfile: iago>>]
[<ZephyrClass: Verona>, <ZephyrClass: Denmark>]
(imported from commit 9998ffe40800213a5425990d6e85f5c5a43a5355)
2012-08-29 16:15:06 +02:00
|
|
|
|
2017-03-02 11:17:10 +01:00
|
|
|
DEFAULT_UPLOADS_QUOTA = 1024*1024*1024
|
|
|
|
|
2017-07-09 01:16:47 +02:00
|
|
|
quota = models.IntegerField(default=DEFAULT_UPLOADS_QUOTA) # type: int
|
2017-03-14 10:53:09 +01:00
|
|
|
# The maximum length of a timezone in pytz.all_timezones is 32.
|
|
|
|
# Setting max_length=40 is a safe choice.
|
2017-03-16 11:46:56 +01:00
|
|
|
# In Django, the convention is to use empty string instead of Null
|
|
|
|
# for text based fields. For more information, see
|
|
|
|
# https://docs.djangoproject.com/en/1.10/ref/models/fields/#django.db.models.Field.null.
|
|
|
|
timezone = models.CharField(max_length=40, default=u'') # type: Text
|
2017-03-02 11:17:10 +01:00
|
|
|
|
2017-04-01 17:28:44 +02:00
|
|
|
# Emojisets
|
|
|
|
APPLE_EMOJISET = u'apple'
|
|
|
|
EMOJIONE_EMOJISET = u'emojione'
|
|
|
|
GOOGLE_EMOJISET = u'google'
|
|
|
|
TWITTER_EMOJISET = u'twitter'
|
2017-11-11 01:55:40 +01:00
|
|
|
EMOJISET_CHOICES = ((APPLE_EMOJISET, "Apple"),
|
|
|
|
(EMOJIONE_EMOJISET, "Emoji One"),
|
|
|
|
(GOOGLE_EMOJISET, "Google"),
|
|
|
|
(TWITTER_EMOJISET, "Twitter"))
|
2017-07-09 01:16:47 +02:00
|
|
|
emojiset = models.CharField(default=GOOGLE_EMOJISET, choices=EMOJISET_CHOICES, max_length=20) # type: Text
|
2017-04-01 17:28:44 +02:00
|
|
|
|
2017-04-07 00:05:55 +02:00
|
|
|
# Define the types of the various automatically managed properties
|
|
|
|
property_types = dict(
|
|
|
|
default_language=Text,
|
|
|
|
emoji_alt_code=bool,
|
2017-04-01 17:28:44 +02:00
|
|
|
emojiset=Text,
|
2017-04-07 00:05:55 +02:00
|
|
|
left_side_userlist=bool,
|
|
|
|
timezone=Text,
|
|
|
|
twenty_four_hour_time=bool,
|
2017-07-07 18:15:10 +02:00
|
|
|
high_contrast_mode=bool,
|
2017-11-14 20:42:31 +01:00
|
|
|
night_mode=bool,
|
2017-04-07 00:05:55 +02:00
|
|
|
)
|
|
|
|
|
2017-05-23 03:19:21 +02:00
|
|
|
notification_setting_types = dict(
|
|
|
|
enable_desktop_notifications=bool,
|
|
|
|
enable_digest_emails=bool,
|
|
|
|
enable_offline_email_notifications=bool,
|
|
|
|
enable_offline_push_notifications=bool,
|
|
|
|
enable_online_push_notifications=bool,
|
|
|
|
enable_sounds=bool,
|
|
|
|
enable_stream_desktop_notifications=bool,
|
2017-08-17 16:55:32 +02:00
|
|
|
enable_stream_push_notifications=bool,
|
2017-05-23 03:19:21 +02:00
|
|
|
enable_stream_sounds=bool,
|
|
|
|
pm_content_in_desktop_notifications=bool,
|
|
|
|
)
|
|
|
|
|
2017-03-17 10:07:22 +01:00
|
|
|
@property
|
|
|
|
def profile_data(self):
|
|
|
|
# type: () -> List[Dict[str, Union[int, float, Text]]]
|
|
|
|
values = CustomProfileFieldValue.objects.filter(user_profile=self)
|
|
|
|
user_data = {v.field_id: v.value for v in values}
|
|
|
|
data = [] # type: List[Dict[str, Union[int, float, Text]]]
|
|
|
|
for field in custom_profile_fields_for_realm(self.realm_id):
|
|
|
|
value = user_data.get(field.id, None)
|
|
|
|
field_type = field.field_type
|
|
|
|
if value is not None:
|
|
|
|
converter = field.FIELD_CONVERTERS[field_type]
|
|
|
|
value = converter(value)
|
|
|
|
|
|
|
|
field_data = {} # type: Dict[str, Union[int, float, Text]]
|
|
|
|
for k, v in field.as_dict().items():
|
|
|
|
field_data[k] = v
|
|
|
|
field_data['value'] = value
|
|
|
|
data.append(field_data)
|
|
|
|
|
|
|
|
return data
|
|
|
|
|
2013-09-05 20:51:38 +02:00
|
|
|
def can_admin_user(self, target_user):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: (UserProfile) -> bool
|
2013-09-05 20:51:38 +02:00
|
|
|
"""Returns whether this user has permission to modify target_user"""
|
|
|
|
if target_user.bot_owner == self:
|
|
|
|
return True
|
2016-02-08 03:59:38 +01:00
|
|
|
elif self.is_realm_admin and self.realm == target_user.realm:
|
2013-09-05 20:51:38 +02:00
|
|
|
return True
|
|
|
|
else:
|
|
|
|
return False
|
|
|
|
|
2017-10-27 09:06:40 +02:00
|
|
|
def __str__(self):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: () -> Text
|
2017-10-27 09:06:40 +02:00
|
|
|
return "<UserProfile: %s %s>" % (self.email, self.realm)
|
2013-03-08 19:53:00 +01:00
|
|
|
|
2016-05-19 23:44:58 +02:00
|
|
|
@property
|
|
|
|
def is_incoming_webhook(self):
|
2016-08-08 13:56:59 +02:00
|
|
|
# type: () -> bool
|
2016-05-19 23:44:58 +02:00
|
|
|
return self.bot_type == UserProfile.INCOMING_WEBHOOK_BOT
|
|
|
|
|
2016-07-15 18:57:37 +02:00
|
|
|
@property
|
|
|
|
def is_outgoing_webhook_bot(self):
|
|
|
|
# type: () -> bool
|
|
|
|
return self.bot_type == UserProfile.OUTGOING_WEBHOOK_BOT
|
|
|
|
|
2017-05-24 20:38:15 +02:00
|
|
|
@property
|
|
|
|
def is_embedded_bot(self):
|
|
|
|
# type: () -> bool
|
|
|
|
return self.bot_type == UserProfile.EMBEDDED_BOT
|
|
|
|
|
2017-05-24 21:56:51 +02:00
|
|
|
@property
|
|
|
|
def is_service_bot(self):
|
|
|
|
# type: () -> bool
|
|
|
|
return self.is_bot and self.bot_type in UserProfile.SERVICE_BOT_TYPES
|
|
|
|
|
2017-04-01 17:28:44 +02:00
|
|
|
@staticmethod
|
|
|
|
def emojiset_choices():
|
|
|
|
# type: () -> Dict[Text, Text]
|
|
|
|
return {emojiset[0]: force_text(emojiset[1]) for emojiset in UserProfile.EMOJISET_CHOICES}
|
|
|
|
|
2013-10-20 21:10:03 +02:00
|
|
|
@staticmethod
|
|
|
|
def emails_from_ids(user_ids):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Sequence[int]) -> Dict[int, Text]
|
2013-10-20 21:10:03 +02:00
|
|
|
rows = UserProfile.objects.filter(id__in=user_ids).values('id', 'email')
|
|
|
|
return {row['id']: row['email'] for row in rows}
|
|
|
|
|
2014-01-15 22:31:38 +01:00
|
|
|
def can_create_streams(self):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: () -> bool
|
2017-04-15 04:03:56 +02:00
|
|
|
diff = (timezone_now() - self.date_joined).days
|
2016-11-29 08:57:35 +01:00
|
|
|
if self.is_realm_admin:
|
2016-05-12 10:28:00 +02:00
|
|
|
return True
|
2016-11-29 08:57:35 +01:00
|
|
|
elif self.realm.create_stream_by_admins_only:
|
2016-05-12 10:28:00 +02:00
|
|
|
return False
|
2016-11-29 08:57:35 +01:00
|
|
|
if diff >= self.realm.waiting_period_threshold:
|
|
|
|
return True
|
|
|
|
return False
|
2014-01-15 22:31:38 +01:00
|
|
|
|
2016-08-10 03:05:26 +02:00
|
|
|
def major_tos_version(self):
|
|
|
|
# type: () -> int
|
|
|
|
if self.tos_version is not None:
|
|
|
|
return int(self.tos_version.split('.')[0])
|
|
|
|
else:
|
|
|
|
return -1
|
|
|
|
|
2017-09-25 09:47:15 +02:00
|
|
|
class UserGroup(models.Model):
|
|
|
|
name = models.CharField(max_length=100)
|
|
|
|
members = models.ManyToManyField(UserProfile, through='UserGroupMembership')
|
|
|
|
realm = models.ForeignKey(Realm)
|
2017-11-01 09:01:38 +01:00
|
|
|
description = models.CharField(max_length=1024, default=u'') # type: Text
|
2017-09-25 09:47:15 +02:00
|
|
|
|
|
|
|
class Meta:
|
|
|
|
unique_together = (('realm', 'name'),)
|
|
|
|
|
|
|
|
class UserGroupMembership(models.Model):
|
|
|
|
user_group = models.ForeignKey(UserGroup)
|
|
|
|
user_profile = models.ForeignKey(UserProfile)
|
|
|
|
|
|
|
|
class Meta:
|
|
|
|
unique_together = (('user_group', 'user_profile'),)
|
|
|
|
|
2014-01-24 22:29:17 +01:00
|
|
|
def receives_offline_notifications(user_profile):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: (UserProfile) -> bool
|
2014-01-24 22:29:17 +01:00
|
|
|
return ((user_profile.enable_offline_email_notifications or
|
|
|
|
user_profile.enable_offline_push_notifications) and
|
|
|
|
not user_profile.is_bot)
|
|
|
|
|
2016-09-19 22:55:18 +02:00
|
|
|
def receives_online_notifications(user_profile):
|
|
|
|
# type: (UserProfile) -> bool
|
|
|
|
return (user_profile.enable_online_push_notifications and
|
|
|
|
not user_profile.is_bot)
|
|
|
|
|
2017-08-17 16:55:32 +02:00
|
|
|
def receives_stream_notifications(user_profile):
|
|
|
|
# type: (UserProfile) -> bool
|
|
|
|
return (user_profile.enable_stream_push_notifications and
|
|
|
|
not user_profile.is_bot)
|
|
|
|
|
2016-11-11 19:32:15 +01:00
|
|
|
def remote_user_to_email(remote_user):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Text) -> Text
|
2016-11-11 19:32:15 +01:00
|
|
|
if settings.SSO_APPEND_DOMAIN is not None:
|
|
|
|
remote_user += "@" + settings.SSO_APPEND_DOMAIN
|
|
|
|
return remote_user
|
|
|
|
|
2016-03-31 03:30:33 +02:00
|
|
|
# Make sure we flush the UserProfile object from our remote cache
|
2013-03-15 21:17:32 +01:00
|
|
|
# whenever we save it.
|
2014-01-28 17:02:30 +01:00
|
|
|
post_save.connect(flush_user_profile, sender=UserProfile)
|
2013-03-15 21:17:32 +01:00
|
|
|
|
2012-09-28 22:47:05 +02:00
|
|
|
class PreregistrationUser(models.Model):
|
2017-07-09 01:16:47 +02:00
|
|
|
email = models.EmailField() # type: Text
|
|
|
|
referred_by = models.ForeignKey(UserProfile, null=True, on_delete=CASCADE) # Optional[UserProfile]
|
|
|
|
streams = models.ManyToManyField('Stream') # type: Manager
|
|
|
|
invited_at = models.DateTimeField(auto_now=True) # type: datetime.datetime
|
2016-06-03 01:02:58 +02:00
|
|
|
realm_creation = models.BooleanField(default=False)
|
2017-08-04 08:09:25 +02:00
|
|
|
# Indicates whether the user needs a password. Users who were
|
|
|
|
# created via SSO style auth (e.g. GitHub/Google) generally do not.
|
|
|
|
password_required = models.BooleanField(default=True)
|
2012-12-11 23:42:32 +01:00
|
|
|
|
2012-10-29 19:08:18 +01:00
|
|
|
# status: whether an object has been confirmed.
|
|
|
|
# if confirmed, set to confirmation.settings.STATUS_ACTIVE
|
2017-07-09 01:16:47 +02:00
|
|
|
status = models.IntegerField(default=0) # type: int
|
2012-10-29 19:08:18 +01:00
|
|
|
|
2017-07-09 01:16:47 +02:00
|
|
|
realm = models.ForeignKey(Realm, null=True, on_delete=CASCADE) # type: Optional[Realm]
|
2013-08-02 20:31:19 +02:00
|
|
|
|
2017-10-15 18:34:47 +02:00
|
|
|
invited_as_admin = models.BooleanField(default=False) # type: Optional[bool]
|
|
|
|
|
2017-08-10 22:27:57 +02:00
|
|
|
class MultiuseInvite(models.Model):
|
|
|
|
referred_by = models.ForeignKey(UserProfile, on_delete=CASCADE) # Optional[UserProfile]
|
|
|
|
streams = models.ManyToManyField('Stream') # type: Manager
|
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE) # type: Realm
|
|
|
|
|
2017-01-20 12:27:38 +01:00
|
|
|
class EmailChangeStatus(models.Model):
|
2017-07-09 01:16:47 +02:00
|
|
|
new_email = models.EmailField() # type: Text
|
|
|
|
old_email = models.EmailField() # type: Text
|
|
|
|
updated_at = models.DateTimeField(auto_now=True) # type: datetime.datetime
|
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile
|
2017-01-20 12:27:38 +01:00
|
|
|
|
|
|
|
# status: whether an object has been confirmed.
|
|
|
|
# if confirmed, set to confirmation.settings.STATUS_ACTIVE
|
2017-07-09 01:16:47 +02:00
|
|
|
status = models.IntegerField(default=0) # type: int
|
2017-01-20 12:27:38 +01:00
|
|
|
|
2017-07-09 01:16:47 +02:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE) # type: Realm
|
2017-01-20 12:27:38 +01:00
|
|
|
|
2017-04-19 07:22:54 +02:00
|
|
|
class AbstractPushDeviceToken(models.Model):
|
2013-12-09 23:17:16 +01:00
|
|
|
APNS = 1
|
|
|
|
GCM = 2
|
|
|
|
|
|
|
|
KINDS = (
|
2017-01-24 06:21:14 +01:00
|
|
|
(APNS, 'apns'),
|
|
|
|
(GCM, 'gcm'),
|
2013-12-09 23:17:16 +01:00
|
|
|
)
|
|
|
|
|
2017-07-09 01:16:47 +02:00
|
|
|
kind = models.PositiveSmallIntegerField(choices=KINDS) # type: int
|
2013-12-09 23:17:16 +01:00
|
|
|
|
|
|
|
# The token is a unique device-specific token that is
|
|
|
|
# sent to us from each device:
|
|
|
|
# - APNS token if kind == APNS
|
|
|
|
# - GCM registration id if kind == GCM
|
2017-07-09 01:16:47 +02:00
|
|
|
last_updated = models.DateTimeField(auto_now=True) # type: datetime.datetime
|
2013-12-09 23:17:16 +01:00
|
|
|
|
2015-02-10 08:08:39 +01:00
|
|
|
# [optional] Contains the app id of the device if it is an iOS device
|
2017-07-09 01:16:47 +02:00
|
|
|
ios_app_id = models.TextField(null=True) # type: Optional[Text]
|
2015-02-10 08:08:39 +01:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2017-04-19 07:22:54 +02:00
|
|
|
abstract = True
|
|
|
|
|
|
|
|
class PushDeviceToken(AbstractPushDeviceToken):
|
|
|
|
# The user who's device this is
|
2017-07-09 01:16:47 +02:00
|
|
|
user = models.ForeignKey(UserProfile, db_index=True, on_delete=CASCADE) # type: UserProfile
|
2017-10-19 06:21:27 +02:00
|
|
|
token = models.CharField(max_length=4096, unique=True) # type: bytes
|
2017-04-19 07:22:54 +02:00
|
|
|
|
2015-08-18 21:29:23 +02:00
|
|
|
def generate_email_token_for_stream():
|
2017-07-08 02:46:51 +02:00
|
|
|
# type: () -> str
|
2015-08-18 21:29:23 +02:00
|
|
|
return generate_random_token(32)
|
|
|
|
|
2017-10-27 08:42:27 +02:00
|
|
|
class Stream(models.Model):
|
2013-10-09 16:55:17 +02:00
|
|
|
MAX_NAME_LENGTH = 60
|
2017-07-09 01:16:47 +02:00
|
|
|
name = models.CharField(max_length=MAX_NAME_LENGTH, db_index=True) # type: Text
|
|
|
|
realm = models.ForeignKey(Realm, db_index=True, on_delete=CASCADE) # type: Realm
|
|
|
|
invite_only = models.NullBooleanField(default=False) # type: Optional[bool]
|
2017-10-08 21:16:51 +02:00
|
|
|
|
|
|
|
# The unique thing about Zephyr public streams is that we never list their
|
|
|
|
# users. We may try to generalize this concept later, but for now
|
|
|
|
# we just use a concrete field. (Zephyr public streams aren't exactly like
|
|
|
|
# invite-only streams--while both are private in terms of listing users,
|
|
|
|
# for Zephyr we don't even list users to stream members, yet membership
|
|
|
|
# is more public in the sense that you don't need a Zulip invite to join.
|
|
|
|
# This field is populated directly from UserProfile.is_zephyr_mirror_realm,
|
|
|
|
# and the reason for denormalizing field is performance.
|
|
|
|
is_in_zephyr_realm = models.BooleanField(default=False) # type: bool
|
|
|
|
|
2013-08-08 16:51:18 +02:00
|
|
|
# Used by the e-mail forwarder. The e-mail RFC specifies a maximum
|
|
|
|
# e-mail length of 254, and our max stream length is 30, so we
|
|
|
|
# have plenty of room for the token.
|
|
|
|
email_token = models.CharField(
|
2017-07-08 02:46:51 +02:00
|
|
|
max_length=32, default=generate_email_token_for_stream) # type: str
|
2017-07-09 01:16:47 +02:00
|
|
|
description = models.CharField(max_length=1024, default=u'') # type: Text
|
2012-08-28 18:44:51 +02:00
|
|
|
|
2017-07-09 01:16:47 +02:00
|
|
|
date_created = models.DateTimeField(default=timezone_now) # type: datetime.datetime
|
|
|
|
deactivated = models.BooleanField(default=False) # type: bool
|
2013-09-26 21:48:08 +02:00
|
|
|
|
2017-10-27 09:06:40 +02:00
|
|
|
def __str__(self):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: () -> Text
|
2017-10-27 09:06:40 +02:00
|
|
|
return "<Stream: %s>" % (self.name,)
|
Give our models meaningful reprs.
>>> from zephyr.models import UserProfile, Recipient, Zephyr, ZephyrClass
>>> for klass in [UserProfile, Recipient, Zephyr, ZephyrClass]:
... print klass.objects.all()[:2]
...
[<UserProfile: othello>, <UserProfile: iago>]
[<Recipient: Verona (1, class)>, <Recipient: Denmark (2, class)>]
[<Zephyr: Scotland / Scotland3 / <UserProfile: prospero>>, <Zephyr: Venice / Venice3 / <UserProfile: iago>>]
[<ZephyrClass: Verona>, <ZephyrClass: Denmark>]
(imported from commit 9998ffe40800213a5425990d6e85f5c5a43a5355)
2012-08-29 16:15:06 +02:00
|
|
|
|
2013-01-15 21:10:50 +01:00
|
|
|
def is_public(self):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: () -> bool
|
2016-07-27 01:45:29 +02:00
|
|
|
# All streams are private in Zephyr mirroring realms.
|
2017-10-08 21:16:51 +02:00
|
|
|
return not self.invite_only and not self.is_in_zephyr_realm
|
2013-01-15 21:10:50 +01:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2012-11-07 22:33:38 +01:00
|
|
|
unique_together = ("name", "realm")
|
|
|
|
|
2014-03-02 06:46:54 +01:00
|
|
|
# This is stream information that is sent to clients
|
|
|
|
def to_dict(self):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: () -> Dict[str, Any]
|
2014-03-02 06:46:54 +01:00
|
|
|
return dict(name=self.name,
|
|
|
|
stream_id=self.id,
|
|
|
|
description=self.description,
|
|
|
|
invite_only=self.invite_only)
|
|
|
|
|
2014-01-28 20:49:55 +01:00
|
|
|
post_save.connect(flush_stream, sender=Stream)
|
|
|
|
post_delete.connect(flush_stream, sender=Stream)
|
2014-01-15 22:48:27 +01:00
|
|
|
|
2016-04-01 08:42:38 +02:00
|
|
|
# The Recipient table is used to map Messages to the set of users who
|
|
|
|
# received the message. It is implemented as a set of triples (id,
|
|
|
|
# type_id, type). We have 3 types of recipients: Huddles (for group
|
|
|
|
# private messages), UserProfiles (for 1:1 private messages), and
|
2016-06-02 21:59:58 +02:00
|
|
|
# Streams. The recipient table maps a globally unique recipient id
|
2016-04-01 08:42:38 +02:00
|
|
|
# (used by the Message table) to the type-specific unique id (the
|
|
|
|
# stream id, user_profile id, or huddle id).
|
2017-10-27 08:42:27 +02:00
|
|
|
class Recipient(models.Model):
|
2017-07-09 01:16:47 +02:00
|
|
|
type_id = models.IntegerField(db_index=True) # type: int
|
|
|
|
type = models.PositiveSmallIntegerField(db_index=True) # type: int
|
2012-10-10 22:58:51 +02:00
|
|
|
# Valid types are {personal, stream, huddle}
|
2012-09-07 20:14:13 +02:00
|
|
|
PERSONAL = 1
|
2012-10-10 22:57:21 +02:00
|
|
|
STREAM = 2
|
2012-09-07 20:14:13 +02:00
|
|
|
HUDDLE = 3
|
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2012-11-07 22:33:38 +01:00
|
|
|
unique_together = ("type", "type_id")
|
|
|
|
|
2012-11-02 21:08:29 +01:00
|
|
|
# N.B. If we used Django's choice=... we would get this for free (kinda)
|
|
|
|
_type_names = {
|
|
|
|
PERSONAL: 'personal',
|
2017-01-24 06:21:14 +01:00
|
|
|
STREAM: 'stream',
|
|
|
|
HUDDLE: 'huddle'}
|
2012-11-02 21:08:29 +01:00
|
|
|
|
2012-09-07 20:14:13 +02:00
|
|
|
def type_name(self):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: () -> str
|
2012-11-02 21:08:29 +01:00
|
|
|
# Raises KeyError if invalid
|
|
|
|
return self._type_names[self.type]
|
2012-08-28 21:27:42 +02:00
|
|
|
|
2017-10-27 09:06:40 +02:00
|
|
|
def __str__(self):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: () -> Text
|
Give our models meaningful reprs.
>>> from zephyr.models import UserProfile, Recipient, Zephyr, ZephyrClass
>>> for klass in [UserProfile, Recipient, Zephyr, ZephyrClass]:
... print klass.objects.all()[:2]
...
[<UserProfile: othello>, <UserProfile: iago>]
[<Recipient: Verona (1, class)>, <Recipient: Denmark (2, class)>]
[<Zephyr: Scotland / Scotland3 / <UserProfile: prospero>>, <Zephyr: Venice / Venice3 / <UserProfile: iago>>]
[<ZephyrClass: Verona>, <ZephyrClass: Denmark>]
(imported from commit 9998ffe40800213a5425990d6e85f5c5a43a5355)
2012-08-29 16:15:06 +02:00
|
|
|
display_recipient = get_display_recipient(self)
|
2017-10-27 09:06:40 +02:00
|
|
|
return "<Recipient: %s (%d, %s)>" % (display_recipient, self.type_id, self.type)
|
Give our models meaningful reprs.
>>> from zephyr.models import UserProfile, Recipient, Zephyr, ZephyrClass
>>> for klass in [UserProfile, Recipient, Zephyr, ZephyrClass]:
... print klass.objects.all()[:2]
...
[<UserProfile: othello>, <UserProfile: iago>]
[<Recipient: Verona (1, class)>, <Recipient: Denmark (2, class)>]
[<Zephyr: Scotland / Scotland3 / <UserProfile: prospero>>, <Zephyr: Venice / Venice3 / <UserProfile: iago>>]
[<ZephyrClass: Verona>, <ZephyrClass: Denmark>]
(imported from commit 9998ffe40800213a5425990d6e85f5c5a43a5355)
2012-08-29 16:15:06 +02:00
|
|
|
|
2017-10-27 08:42:27 +02:00
|
|
|
class MutedTopic(models.Model):
|
2017-08-30 02:19:34 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE)
|
|
|
|
stream = models.ForeignKey(Stream, on_delete=CASCADE)
|
|
|
|
recipient = models.ForeignKey(Recipient, on_delete=CASCADE)
|
|
|
|
topic_name = models.CharField(max_length=MAX_SUBJECT_LENGTH)
|
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2017-08-30 02:19:34 +02:00
|
|
|
unique_together = ('user_profile', 'stream', 'topic_name')
|
|
|
|
|
2017-10-27 09:06:40 +02:00
|
|
|
def __str__(self):
|
2017-08-30 02:19:34 +02:00
|
|
|
# type: () -> Text
|
2017-10-27 09:06:40 +02:00
|
|
|
return "<MutedTopic: (%s, %s, %s)>" % (self.user_profile.email, self.stream.name, self.topic_name)
|
2017-08-30 02:19:34 +02:00
|
|
|
|
2017-10-27 08:42:27 +02:00
|
|
|
class Client(models.Model):
|
2017-07-09 01:16:47 +02:00
|
|
|
name = models.CharField(max_length=30, db_index=True, unique=True) # type: Text
|
2012-10-19 21:30:42 +02:00
|
|
|
|
2017-10-27 09:06:40 +02:00
|
|
|
def __str__(self):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: () -> Text
|
2017-10-27 09:06:40 +02:00
|
|
|
return "<Client: %s>" % (self.name,)
|
2016-04-21 00:26:45 +02:00
|
|
|
|
2017-07-09 01:16:47 +02:00
|
|
|
get_client_cache = {} # type: Dict[Text, Client]
|
2013-11-20 22:16:48 +01:00
|
|
|
def get_client(name):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Text) -> Client
|
2016-11-14 09:23:03 +01:00
|
|
|
# Accessing KEY_PREFIX through the module is necessary
|
|
|
|
# because we need the updated value of the variable.
|
|
|
|
cache_name = cache.KEY_PREFIX + name
|
|
|
|
if cache_name not in get_client_cache:
|
2016-03-31 03:30:33 +02:00
|
|
|
result = get_client_remote_cache(name)
|
2016-11-14 09:23:03 +01:00
|
|
|
get_client_cache[cache_name] = result
|
|
|
|
return get_client_cache[cache_name]
|
2013-11-20 22:16:48 +01:00
|
|
|
|
2013-03-26 17:47:52 +01:00
|
|
|
def get_client_cache_key(name):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Text) -> Text
|
2016-06-12 07:25:42 +02:00
|
|
|
return u'get_client:%s' % (make_safe_digest(name),)
|
2013-03-26 17:47:52 +01:00
|
|
|
|
2013-03-26 19:09:45 +01:00
|
|
|
@cache_with_key(get_client_cache_key, timeout=3600*24*7)
|
2016-03-31 03:30:33 +02:00
|
|
|
def get_client_remote_cache(name):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Text) -> Client
|
2013-11-01 18:59:05 +01:00
|
|
|
(client, _) = Client.objects.get_or_create(name=name)
|
2012-10-19 21:30:42 +02:00
|
|
|
return client
|
|
|
|
|
2013-03-26 19:09:45 +01:00
|
|
|
@cache_with_key(get_stream_cache_key, timeout=3600*24*7)
|
2017-10-22 01:48:16 +02:00
|
|
|
def get_realm_stream(stream_name, realm_id):
|
2017-09-17 22:26:43 +02:00
|
|
|
# type: (Text, int) -> Stream
|
2013-03-19 13:05:19 +01:00
|
|
|
return Stream.objects.select_related("realm").get(
|
2017-09-17 22:26:43 +02:00
|
|
|
name__iexact=stream_name.strip(), realm_id=realm_id)
|
2013-03-19 13:05:19 +01:00
|
|
|
|
2017-09-17 22:07:00 +02:00
|
|
|
def stream_name_in_use(stream_name, realm_id):
|
|
|
|
# type: (Text, int) -> bool
|
|
|
|
return Stream.objects.filter(
|
|
|
|
name__iexact=stream_name.strip(),
|
|
|
|
realm_id=realm_id
|
|
|
|
).exists()
|
|
|
|
|
2014-01-24 23:30:53 +01:00
|
|
|
def get_active_streams(realm):
|
2017-05-26 02:08:16 +02:00
|
|
|
# type: (Optional[Realm]) -> QuerySet
|
2014-01-24 23:30:53 +01:00
|
|
|
"""
|
|
|
|
Return all streams (including invite-only streams) that have not been deactivated.
|
|
|
|
"""
|
|
|
|
return Stream.objects.filter(realm=realm, deactivated=False)
|
|
|
|
|
2013-03-19 13:05:19 +01:00
|
|
|
def get_stream(stream_name, realm):
|
2017-03-23 07:22:28 +01:00
|
|
|
# type: (Text, Realm) -> Stream
|
2017-10-22 01:48:16 +02:00
|
|
|
'''
|
|
|
|
Callers that don't have a Realm object already available should use
|
|
|
|
get_realm_stream directly, to avoid unnecessarily fetching the
|
|
|
|
Realm object.
|
|
|
|
'''
|
|
|
|
return get_realm_stream(stream_name, realm.id)
|
2013-01-17 22:16:39 +01:00
|
|
|
|
2013-06-27 22:52:05 +02:00
|
|
|
def bulk_get_streams(realm, stream_names):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Realm, STREAM_NAMES) -> Dict[Text, Any]
|
2013-06-27 22:52:05 +02:00
|
|
|
|
|
|
|
def fetch_streams_by_name(stream_names):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (List[Text]) -> Sequence[Stream]
|
2016-06-04 09:02:05 +02:00
|
|
|
#
|
2013-06-27 22:52:05 +02:00
|
|
|
# This should be just
|
|
|
|
#
|
|
|
|
# Stream.objects.select_related("realm").filter(name__iexact__in=stream_names,
|
|
|
|
# realm_id=realm_id)
|
|
|
|
#
|
|
|
|
# But chaining __in and __iexact doesn't work with Django's
|
|
|
|
# ORM, so we have the following hack to construct the relevant where clause
|
|
|
|
if len(stream_names) == 0:
|
|
|
|
return []
|
|
|
|
upper_list = ", ".join(["UPPER(%s)"] * len(stream_names))
|
2013-07-29 23:03:31 +02:00
|
|
|
where_clause = "UPPER(zerver_stream.name::text) IN (%s)" % (upper_list,)
|
2016-09-20 03:19:50 +02:00
|
|
|
return get_active_streams(realm.id).select_related("realm").extra(
|
2013-06-27 22:52:05 +02:00
|
|
|
where=[where_clause],
|
|
|
|
params=stream_names)
|
|
|
|
|
2017-09-17 22:26:43 +02:00
|
|
|
return generic_bulk_cached_fetch(lambda stream_name: get_stream_cache_key(stream_name, realm.id),
|
2013-06-27 22:52:05 +02:00
|
|
|
fetch_streams_by_name,
|
|
|
|
[stream_name.lower() for stream_name in stream_names],
|
|
|
|
id_fetcher=lambda stream: stream.name.lower())
|
|
|
|
|
2013-03-26 17:10:44 +01:00
|
|
|
def get_recipient_cache_key(type, type_id):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (int, int) -> Text
|
2017-05-05 14:44:45 +02:00
|
|
|
return u"%s:get_recipient:%s:%s" % (cache.KEY_PREFIX, type, type_id,)
|
2013-03-26 17:10:44 +01:00
|
|
|
|
2013-03-26 19:09:45 +01:00
|
|
|
@cache_with_key(get_recipient_cache_key, timeout=3600*24*7)
|
2013-03-18 16:54:58 +01:00
|
|
|
def get_recipient(type, type_id):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: (int, int) -> Recipient
|
2013-03-18 16:54:58 +01:00
|
|
|
return Recipient.objects.get(type_id=type_id, type=type)
|
|
|
|
|
2017-10-28 20:26:11 +02:00
|
|
|
def get_stream_recipient(stream_id):
|
|
|
|
# type: (int) -> Recipient
|
|
|
|
return get_recipient(Recipient.STREAM, stream_id)
|
|
|
|
|
2017-10-28 21:31:21 +02:00
|
|
|
def get_personal_recipient(user_profile_id):
|
|
|
|
# type: (int) -> Recipient
|
|
|
|
return get_recipient(Recipient.PERSONAL, user_profile_id)
|
|
|
|
|
2017-10-28 21:14:41 +02:00
|
|
|
def get_huddle_recipient(user_profile_ids):
|
|
|
|
# type: (Set[int]) -> Recipient
|
|
|
|
|
|
|
|
# The caller should ensure that user_profile_ids includes
|
|
|
|
# the sender. Note that get_huddle hits the cache, and then
|
|
|
|
# we hit another cache to get the recipient. We may want to
|
|
|
|
# unify our caching strategy here.
|
|
|
|
huddle = get_huddle(list(user_profile_ids))
|
|
|
|
return get_recipient(Recipient.HUDDLE, huddle.id)
|
|
|
|
|
2017-10-29 17:03:51 +01:00
|
|
|
def get_huddle_user_ids(recipient):
|
|
|
|
# type: (Recipient) -> List[int]
|
|
|
|
assert(recipient.type == Recipient.HUDDLE)
|
|
|
|
|
|
|
|
return Subscription.objects.filter(
|
|
|
|
recipient=recipient,
|
|
|
|
active=True,
|
|
|
|
).order_by('user_profile_id').values_list('user_profile_id', flat=True)
|
|
|
|
|
2013-06-25 19:26:58 +02:00
|
|
|
def bulk_get_recipients(type, type_ids):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: (int, List[int]) -> Dict[int, Any]
|
2013-06-25 19:26:58 +02:00
|
|
|
def cache_key_function(type_id):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (int) -> Text
|
2013-06-25 19:26:58 +02:00
|
|
|
return get_recipient_cache_key(type, type_id)
|
2016-11-29 07:22:02 +01:00
|
|
|
|
2013-06-25 19:26:58 +02:00
|
|
|
def query_function(type_ids):
|
2016-06-12 14:43:15 +02:00
|
|
|
# type: (List[int]) -> Sequence[Recipient]
|
|
|
|
# TODO: Change return type to QuerySet[Recipient]
|
2013-06-25 19:26:58 +02:00
|
|
|
return Recipient.objects.filter(type=type, type_id__in=type_ids)
|
|
|
|
|
|
|
|
return generic_bulk_cached_fetch(cache_key_function, query_function, type_ids,
|
|
|
|
id_fetcher=lambda recipient: recipient.type_id)
|
|
|
|
|
2017-10-29 19:01:08 +01:00
|
|
|
def get_stream_recipients(stream_ids):
|
|
|
|
# type: (List[int]) -> List[Recipient]
|
|
|
|
|
|
|
|
'''
|
|
|
|
We could call bulk_get_recipients(...).values() here, but it actually
|
|
|
|
leads to an extra query in test mode.
|
|
|
|
'''
|
|
|
|
return Recipient.objects.filter(
|
|
|
|
type=Recipient.STREAM,
|
|
|
|
type_id__in=stream_ids,
|
|
|
|
)
|
2016-12-06 07:19:34 +01:00
|
|
|
|
2017-10-27 08:42:27 +02:00
|
|
|
class AbstractMessage(models.Model):
|
2017-07-09 01:16:47 +02:00
|
|
|
sender = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile
|
|
|
|
recipient = models.ForeignKey(Recipient, on_delete=CASCADE) # type: Recipient
|
|
|
|
subject = models.CharField(max_length=MAX_SUBJECT_LENGTH, db_index=True) # type: Text
|
|
|
|
content = models.TextField() # type: Text
|
|
|
|
rendered_content = models.TextField(null=True) # type: Optional[Text]
|
|
|
|
rendered_content_version = models.IntegerField(null=True) # type: Optional[int]
|
|
|
|
pub_date = models.DateTimeField('date published', db_index=True) # type: datetime.datetime
|
|
|
|
sending_client = models.ForeignKey(Client, on_delete=CASCADE) # type: Client
|
|
|
|
last_edit_time = models.DateTimeField(null=True) # type: Optional[datetime.datetime]
|
|
|
|
edit_history = models.TextField(null=True) # type: Optional[Text]
|
|
|
|
has_attachment = models.BooleanField(default=False, db_index=True) # type: bool
|
|
|
|
has_image = models.BooleanField(default=False, db_index=True) # type: bool
|
|
|
|
has_link = models.BooleanField(default=False, db_index=True) # type: bool
|
2014-02-21 17:44:48 +01:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2016-11-01 11:26:38 +01:00
|
|
|
abstract = True
|
|
|
|
|
2017-10-27 09:06:40 +02:00
|
|
|
def __str__(self):
|
2017-05-17 05:59:50 +02:00
|
|
|
# type: () -> Text
|
|
|
|
display_recipient = get_display_recipient(self.recipient)
|
2017-11-02 05:50:03 +01:00
|
|
|
return "<%s: %s / %s / %s>" % (self.__class__.__name__, display_recipient,
|
2017-10-27 09:06:40 +02:00
|
|
|
self.subject, self.sender)
|
2017-05-17 05:59:50 +02:00
|
|
|
|
2016-11-01 11:26:38 +01:00
|
|
|
|
|
|
|
class ArchivedMessage(AbstractMessage):
|
2017-04-15 04:03:56 +02:00
|
|
|
archive_timestamp = models.DateTimeField(default=timezone_now, db_index=True) # type: datetime.datetime
|
2016-11-01 11:26:38 +01:00
|
|
|
|
|
|
|
|
|
|
|
class Message(AbstractMessage):
|
|
|
|
|
2016-07-14 17:48:11 +02:00
|
|
|
def topic_name(self):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: () -> Text
|
2016-07-14 17:48:11 +02:00
|
|
|
"""
|
|
|
|
Please start using this helper to facilitate an
|
|
|
|
eventual switch over to a separate topic table.
|
|
|
|
"""
|
|
|
|
return self.subject
|
2012-08-28 18:44:51 +02:00
|
|
|
|
2017-10-28 21:53:47 +02:00
|
|
|
def is_stream_message(self):
|
|
|
|
# type: () -> bool
|
|
|
|
'''
|
|
|
|
Find out whether a message is a stream message by
|
|
|
|
looking up its recipient.type. TODO: Make this
|
|
|
|
an easier operation by denormalizing the message
|
|
|
|
type onto Message, either explicity (message.type)
|
|
|
|
or implicitly (message.stream_id is not None).
|
|
|
|
'''
|
|
|
|
return self.recipient.type == Recipient.STREAM
|
|
|
|
|
2013-08-22 16:56:37 +02:00
|
|
|
def get_realm(self):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: () -> Realm
|
2013-08-22 16:56:37 +02:00
|
|
|
return self.sender.realm
|
|
|
|
|
2013-09-20 21:25:51 +02:00
|
|
|
def save_rendered_content(self):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: () -> None
|
2013-09-20 21:25:51 +02:00
|
|
|
self.save(update_fields=["rendered_content", "rendered_content_version"])
|
|
|
|
|
2013-09-21 16:46:28 +02:00
|
|
|
@staticmethod
|
2016-10-04 15:52:26 +02:00
|
|
|
def need_to_render_content(rendered_content, rendered_content_version, bugdown_version):
|
2017-02-11 05:03:41 +01:00
|
|
|
# type: (Optional[Text], Optional[int], int) -> bool
|
|
|
|
return (rendered_content is None or
|
|
|
|
rendered_content_version is None or
|
|
|
|
rendered_content_version < bugdown_version)
|
2012-08-30 19:56:15 +02:00
|
|
|
|
2012-09-27 19:58:42 +02:00
|
|
|
def to_log_dict(self):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: () -> Dict[str, Any]
|
2012-10-24 20:16:26 +02:00
|
|
|
return dict(
|
|
|
|
id = self.id,
|
2016-05-19 07:35:02 +02:00
|
|
|
sender_id = self.sender.id,
|
2013-03-28 20:43:34 +01:00
|
|
|
sender_email = self.sender.email,
|
2017-03-14 23:31:05 +01:00
|
|
|
sender_realm_str = self.sender.realm.string_id,
|
2012-10-24 20:16:26 +02:00
|
|
|
sender_full_name = self.sender.full_name,
|
|
|
|
sender_short_name = self.sender.short_name,
|
|
|
|
sending_client = self.sending_client.name,
|
|
|
|
type = self.recipient.type_name(),
|
2012-12-03 19:49:12 +01:00
|
|
|
recipient = get_display_recipient(self.recipient),
|
2016-07-15 06:36:45 +02:00
|
|
|
subject = self.topic_name(),
|
2012-10-24 20:16:26 +02:00
|
|
|
content = self.content,
|
2012-12-11 23:08:17 +01:00
|
|
|
timestamp = datetime_to_timestamp(self.pub_date))
|
2012-09-27 19:58:42 +02:00
|
|
|
|
2013-12-31 22:42:38 +01:00
|
|
|
def sent_by_human(self):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: () -> bool
|
2013-12-31 22:42:38 +01:00
|
|
|
sending_client = self.sending_client.name.lower()
|
|
|
|
|
|
|
|
return (sending_client in ('zulipandroid', 'zulipios', 'zulipdesktop',
|
2017-02-16 22:18:19 +01:00
|
|
|
'zulipmobile', 'zulipelectron', 'snipe',
|
2016-11-30 14:17:35 +01:00
|
|
|
'website', 'ios', 'android')) or (
|
2017-01-24 07:06:13 +01:00
|
|
|
'desktop app' in sending_client)
|
2013-12-31 22:42:38 +01:00
|
|
|
|
2014-02-21 21:18:38 +01:00
|
|
|
@staticmethod
|
|
|
|
def content_has_attachment(content):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Text) -> Match
|
2016-06-12 15:44:39 +02:00
|
|
|
return re.search(r'[/\-]user[\-_]uploads[/\.-]', content)
|
2014-02-21 21:18:38 +01:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def content_has_image(content):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Text) -> bool
|
2017-11-09 11:45:56 +01:00
|
|
|
return bool(re.search(r'[/\-]user[\-_]uploads[/\.-]\S+\.(bmp|gif|jpg|jpeg|png|webp)',
|
|
|
|
content, re.IGNORECASE))
|
2014-02-21 21:18:38 +01:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def content_has_link(content):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Text) -> bool
|
2016-03-10 17:17:40 +01:00
|
|
|
return ('http://' in content or
|
|
|
|
'https://' in content or
|
|
|
|
'/user_uploads' in content or
|
|
|
|
(settings.ENABLE_FILE_LINKS and 'file:///' in content))
|
2014-02-21 21:18:38 +01:00
|
|
|
|
2016-07-10 22:58:46 +02:00
|
|
|
@staticmethod
|
|
|
|
def is_status_message(content, rendered_content):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Text, Text) -> bool
|
2016-07-10 22:58:46 +02:00
|
|
|
"""
|
|
|
|
Returns True if content and rendered_content are from 'me_message'
|
|
|
|
"""
|
|
|
|
if content.startswith('/me ') and '\n' not in content:
|
|
|
|
if rendered_content.startswith('<p>') and rendered_content.endswith('</p>'):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2014-02-21 21:18:38 +01:00
|
|
|
def update_calculated_fields(self):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: () -> None
|
2014-02-21 21:18:38 +01:00
|
|
|
# TODO: rendered_content could also be considered a calculated field
|
|
|
|
content = self.content
|
|
|
|
self.has_attachment = bool(Message.content_has_attachment(content))
|
|
|
|
self.has_image = bool(Message.content_has_image(content))
|
|
|
|
self.has_link = bool(Message.content_has_link(content))
|
|
|
|
|
|
|
|
@receiver(pre_save, sender=Message)
|
|
|
|
def pre_save_message(sender, **kwargs):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: (Any, **Any) -> None
|
2014-02-21 21:18:38 +01:00
|
|
|
if kwargs['update_fields'] is None or "content" in kwargs['update_fields']:
|
|
|
|
message = kwargs['instance']
|
|
|
|
message.update_calculated_fields()
|
|
|
|
|
2014-07-15 21:03:51 +02:00
|
|
|
def get_context_for_message(message):
|
2017-03-06 08:45:59 +01:00
|
|
|
# type: (Message) -> QuerySet[Message]
|
2016-06-12 14:43:15 +02:00
|
|
|
# TODO: Change return type to QuerySet[Message]
|
2014-07-15 21:03:51 +02:00
|
|
|
return Message.objects.filter(
|
|
|
|
recipient_id=message.recipient_id,
|
|
|
|
subject=message.subject,
|
|
|
|
id__lt=message.id,
|
2015-02-21 02:46:19 +01:00
|
|
|
pub_date__gt=message.pub_date - timedelta(minutes=15),
|
|
|
|
).order_by('-id')[:10]
|
2014-07-15 21:03:51 +02:00
|
|
|
|
2016-07-08 02:25:55 +02:00
|
|
|
post_save.connect(flush_message, sender=Message)
|
2014-07-15 21:03:51 +02:00
|
|
|
|
2017-10-27 08:42:27 +02:00
|
|
|
class Reaction(models.Model):
|
2017-07-09 01:16:47 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile
|
|
|
|
message = models.ForeignKey(Message, on_delete=CASCADE) # type: Message
|
|
|
|
emoji_name = models.TextField() # type: Text
|
2017-05-01 07:29:56 +02:00
|
|
|
emoji_code = models.TextField() # type: Text
|
|
|
|
|
|
|
|
UNICODE_EMOJI = u'unicode_emoji'
|
|
|
|
REALM_EMOJI = u'realm_emoji'
|
|
|
|
ZULIP_EXTRA_EMOJI = u'zulip_extra_emoji'
|
|
|
|
REACTION_TYPES = ((UNICODE_EMOJI, _("Unicode emoji")),
|
|
|
|
(REALM_EMOJI, _("Realm emoji")),
|
|
|
|
(ZULIP_EXTRA_EMOJI, _("Zulip extra emoji")))
|
|
|
|
|
|
|
|
reaction_type = models.CharField(default=UNICODE_EMOJI, choices=REACTION_TYPES, max_length=30) # type: Text
|
2016-11-03 18:49:00 +01:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2016-11-03 18:49:00 +01:00
|
|
|
unique_together = ("user_profile", "message", "emoji_name")
|
|
|
|
|
2016-12-06 07:19:34 +01:00
|
|
|
@staticmethod
|
|
|
|
def get_raw_db_rows(needed_ids):
|
|
|
|
# type: (List[int]) -> List[Dict[str, Any]]
|
2017-05-01 07:29:56 +02:00
|
|
|
fields = ['message_id', 'emoji_name', 'emoji_code', 'reaction_type',
|
|
|
|
'user_profile__email', 'user_profile__id', 'user_profile__full_name']
|
2016-12-06 07:19:34 +01:00
|
|
|
return Reaction.objects.filter(message_id__in=needed_ids).values(*fields)
|
|
|
|
|
2017-06-08 02:04:09 +02:00
|
|
|
# Whenever a message is sent, for each user subscribed to the
|
2016-04-01 08:42:38 +02:00
|
|
|
# corresponding Recipient object, we add a row to the UserMessage
|
2017-06-08 02:04:09 +02:00
|
|
|
# table indicating that that user received that message. This table
|
2016-04-01 08:42:38 +02:00
|
|
|
# allows us to quickly query any user's last 1000 messages to generate
|
|
|
|
# the home view.
|
|
|
|
#
|
|
|
|
# Additionally, the flags field stores metadata like whether the user
|
2017-06-08 02:04:09 +02:00
|
|
|
# has read the message, starred or collapsed the message, was
|
|
|
|
# mentioned in the message, etc.
|
2016-04-01 08:42:38 +02:00
|
|
|
#
|
|
|
|
# UserMessage is the largest table in a Zulip installation, even
|
|
|
|
# though each row is only 4 integers.
|
2017-10-27 08:42:27 +02:00
|
|
|
class AbstractUserMessage(models.Model):
|
2017-08-27 20:27:01 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile
|
|
|
|
# WARNING: We removed the previously-final flag,
|
2017-08-27 19:10:01 +02:00
|
|
|
# is_me_message, without clearing any values it might have had in
|
|
|
|
# the database. So when we next add a flag, you need to do a
|
|
|
|
# migration to set it to 0 first
|
2013-07-25 22:08:16 +02:00
|
|
|
ALL_FLAGS = ['read', 'starred', 'collapsed', 'mentioned', 'wildcard_mentioned',
|
2013-09-03 22:41:17 +02:00
|
|
|
'summarize_in_home', 'summarize_in_stream', 'force_expand', 'force_collapse',
|
2017-08-27 18:10:36 +02:00
|
|
|
'has_alert_word', "historical"]
|
2017-07-09 01:16:47 +02:00
|
|
|
flags = BitField(flags=ALL_FLAGS, default=0) # type: BitHandler
|
2012-09-07 17:04:41 +02:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2016-11-01 11:26:38 +01:00
|
|
|
abstract = True
|
2012-11-08 21:08:13 +01:00
|
|
|
unique_together = ("user_profile", "message")
|
|
|
|
|
2017-05-23 03:02:01 +02:00
|
|
|
@staticmethod
|
|
|
|
def where_unread():
|
|
|
|
# type: () -> str
|
|
|
|
# Use this for Django ORM queries where we are getting lots
|
2017-07-28 13:05:30 +02:00
|
|
|
# of rows. This custom SQL plays nice with our partial indexes.
|
2017-05-23 03:02:01 +02:00
|
|
|
# Grep the code for example usage.
|
|
|
|
return 'flags & 1 = 0'
|
|
|
|
|
2016-11-01 11:26:38 +01:00
|
|
|
def flags_list(self):
|
|
|
|
# type: () -> List[str]
|
2017-09-10 21:36:23 +02:00
|
|
|
flags = int(self.flags)
|
|
|
|
return self.flags_list_for_flags(flags)
|
|
|
|
|
|
|
|
@staticmethod
|
2017-11-07 18:40:39 +01:00
|
|
|
def flags_list_for_flags(val):
|
2017-09-10 21:36:23 +02:00
|
|
|
# type: (int) -> List[str]
|
2017-09-09 19:47:38 +02:00
|
|
|
'''
|
|
|
|
This function is highly optimized, because it actually slows down
|
|
|
|
sending messages in a naive implementation.
|
|
|
|
'''
|
2017-11-07 18:40:39 +01:00
|
|
|
flags = []
|
|
|
|
mask = 1
|
|
|
|
for flag in UserMessage.ALL_FLAGS:
|
|
|
|
if val & mask:
|
|
|
|
flags.append(flag)
|
|
|
|
mask <<= 1
|
|
|
|
return flags
|
2016-11-01 11:26:38 +01:00
|
|
|
|
2017-10-27 09:06:40 +02:00
|
|
|
def __str__(self):
|
2017-05-17 05:59:50 +02:00
|
|
|
# type: () -> Text
|
|
|
|
display_recipient = get_display_recipient(self.message.recipient)
|
2017-10-27 09:06:40 +02:00
|
|
|
return "<%s: %s / %s (%s)>" % (self.__class__.__name__, display_recipient,
|
|
|
|
self.user_profile.email, self.flags_list())
|
2017-05-17 05:59:50 +02:00
|
|
|
|
2016-11-01 11:26:38 +01:00
|
|
|
|
|
|
|
class ArchivedUserMessage(AbstractUserMessage):
|
2017-06-01 10:44:16 +02:00
|
|
|
message = models.ForeignKey(ArchivedMessage, on_delete=CASCADE) # type: Message
|
2017-04-15 04:03:56 +02:00
|
|
|
archive_timestamp = models.DateTimeField(default=timezone_now, db_index=True) # type: datetime.datetime
|
2016-11-01 11:26:38 +01:00
|
|
|
|
|
|
|
|
|
|
|
class UserMessage(AbstractUserMessage):
|
2017-06-01 10:44:16 +02:00
|
|
|
message = models.ForeignKey(Message, on_delete=CASCADE) # type: Message
|
2016-11-01 11:26:38 +01:00
|
|
|
|
2013-03-11 15:47:29 +01:00
|
|
|
|
2017-10-27 08:42:27 +02:00
|
|
|
class AbstractAttachment(models.Model):
|
2016-11-01 11:26:38 +01:00
|
|
|
file_name = models.TextField(db_index=True) # type: Text
|
2016-03-24 20:24:01 +01:00
|
|
|
# path_id is a storage location agnostic representation of the path of the file.
|
|
|
|
# If the path of a file is http://localhost:9991/user_uploads/a/b/abc/temp_file.py
|
|
|
|
# then its path_id will be a/b/abc/temp_file.py.
|
2017-04-14 00:30:23 +02:00
|
|
|
path_id = models.TextField(db_index=True, unique=True) # type: Text
|
2017-06-01 10:44:16 +02:00
|
|
|
owner = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile
|
|
|
|
realm = models.ForeignKey(Realm, blank=True, null=True, on_delete=CASCADE) # type: Optional[Realm]
|
2016-11-01 11:26:38 +01:00
|
|
|
is_realm_public = models.BooleanField(default=False) # type: bool
|
2017-04-15 04:03:56 +02:00
|
|
|
create_time = models.DateTimeField(default=timezone_now,
|
2016-11-01 11:26:38 +01:00
|
|
|
db_index=True) # type: datetime.datetime
|
2017-05-23 22:17:08 +02:00
|
|
|
size = models.IntegerField(null=True) # type: Optional[int]
|
2016-11-01 11:26:38 +01:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2016-11-01 11:26:38 +01:00
|
|
|
abstract = True
|
|
|
|
|
2017-10-27 09:06:40 +02:00
|
|
|
def __str__(self):
|
2017-05-17 05:59:50 +02:00
|
|
|
# type: () -> Text
|
2017-10-27 09:06:40 +02:00
|
|
|
return "<%s: %s>" % (self.__class__.__name__, self.file_name,)
|
2017-05-17 05:59:50 +02:00
|
|
|
|
2016-11-01 11:26:38 +01:00
|
|
|
|
|
|
|
class ArchivedAttachment(AbstractAttachment):
|
2017-04-15 04:03:56 +02:00
|
|
|
archive_timestamp = models.DateTimeField(default=timezone_now, db_index=True) # type: datetime.datetime
|
2016-11-01 11:26:38 +01:00
|
|
|
messages = models.ManyToManyField(ArchivedMessage) # type: Manager
|
|
|
|
|
|
|
|
|
|
|
|
class Attachment(AbstractAttachment):
|
|
|
|
messages = models.ManyToManyField(Message) # type: Manager
|
2016-03-24 20:24:01 +01:00
|
|
|
|
|
|
|
def is_claimed(self):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: () -> bool
|
2016-03-24 20:24:01 +01:00
|
|
|
return self.messages.count() > 0
|
|
|
|
|
2016-12-28 14:46:42 +01:00
|
|
|
def to_dict(self):
|
|
|
|
# type: () -> Dict[str, Any]
|
|
|
|
return {
|
|
|
|
'id': self.id,
|
|
|
|
'name': self.file_name,
|
|
|
|
'path_id': self.path_id,
|
2017-09-15 01:17:16 +02:00
|
|
|
'size': self.size,
|
2017-09-15 01:17:38 +02:00
|
|
|
# convert to JavaScript-style UNIX timestamp so we can take
|
|
|
|
# advantage of client timezones.
|
|
|
|
'create_time': time.mktime(self.create_time.timetuple()) * 1000,
|
2016-12-28 14:46:42 +01:00
|
|
|
'messages': [{
|
|
|
|
'id': m.id,
|
2017-02-24 02:30:47 +01:00
|
|
|
'name': time.mktime(m.pub_date.timetuple()) * 1000
|
2016-12-28 14:46:42 +01:00
|
|
|
} for m in self.messages.all()]
|
|
|
|
}
|
|
|
|
|
2016-06-17 19:48:17 +02:00
|
|
|
def validate_attachment_request(user_profile, path_id):
|
|
|
|
# type: (UserProfile, Text) -> Optional[bool]
|
|
|
|
try:
|
|
|
|
attachment = Attachment.objects.get(path_id=path_id)
|
|
|
|
messages = attachment.messages.all()
|
|
|
|
|
|
|
|
if user_profile == attachment.owner:
|
|
|
|
# If you own the file, you can access it.
|
|
|
|
return True
|
|
|
|
elif attachment.is_realm_public and attachment.realm == user_profile.realm:
|
|
|
|
# Any user in the realm can access realm-public files
|
|
|
|
return True
|
|
|
|
elif UserMessage.objects.filter(user_profile=user_profile, message__in=messages).exists():
|
|
|
|
# If it was sent in a private message or private stream
|
|
|
|
# message, then anyone who received that message can access it.
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
return False
|
|
|
|
except Attachment.DoesNotExist:
|
|
|
|
return None
|
2016-11-01 11:26:38 +01:00
|
|
|
|
2016-03-24 20:24:01 +01:00
|
|
|
def get_old_unclaimed_attachments(weeks_ago):
|
2016-06-12 14:43:15 +02:00
|
|
|
# type: (int) -> Sequence[Attachment]
|
|
|
|
# TODO: Change return type to QuerySet[Attachment]
|
2017-04-15 04:03:56 +02:00
|
|
|
delta_weeks_ago = timezone_now() - datetime.timedelta(weeks=weeks_ago)
|
2016-03-24 20:24:01 +01:00
|
|
|
old_attachments = Attachment.objects.filter(messages=None, create_time__lt=delta_weeks_ago)
|
|
|
|
return old_attachments
|
|
|
|
|
2017-10-27 08:42:27 +02:00
|
|
|
class Subscription(models.Model):
|
2017-07-09 01:16:47 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile
|
|
|
|
recipient = models.ForeignKey(Recipient, on_delete=CASCADE) # type: Recipient
|
|
|
|
active = models.BooleanField(default=True) # type: bool
|
|
|
|
in_home_view = models.NullBooleanField(default=True) # type: Optional[bool]
|
2012-08-29 17:50:36 +02:00
|
|
|
|
2016-07-11 15:54:15 +02:00
|
|
|
DEFAULT_STREAM_COLOR = u"#c2c2c2"
|
2017-07-09 01:16:47 +02:00
|
|
|
color = models.CharField(max_length=10, default=DEFAULT_STREAM_COLOR) # type: Text
|
|
|
|
pin_to_top = models.BooleanField(default=False) # type: bool
|
2014-02-05 23:00:46 +01:00
|
|
|
|
2017-07-09 01:16:47 +02:00
|
|
|
desktop_notifications = models.BooleanField(default=True) # type: bool
|
|
|
|
audible_notifications = models.BooleanField(default=True) # type: bool
|
2017-08-17 16:55:32 +02:00
|
|
|
push_notifications = models.BooleanField(default=False) # type: bool
|
2014-02-05 23:00:46 +01:00
|
|
|
|
|
|
|
# Combination desktop + audible notifications superseded by the
|
|
|
|
# above.
|
2017-07-09 01:16:47 +02:00
|
|
|
notifications = models.BooleanField(default=False) # type: bool
|
2013-03-29 20:57:02 +01:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2012-11-07 22:33:38 +01:00
|
|
|
unique_together = ("user_profile", "recipient")
|
|
|
|
|
2017-10-27 09:06:40 +02:00
|
|
|
def __str__(self):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: () -> Text
|
2017-11-02 05:50:03 +01:00
|
|
|
return "<Subscription: %s -> %s>" % (self.user_profile, self.recipient)
|
2012-08-28 22:56:21 +02:00
|
|
|
|
2013-03-26 19:09:45 +01:00
|
|
|
@cache_with_key(user_profile_by_id_cache_key, timeout=3600*24*7)
|
2013-03-26 18:51:55 +01:00
|
|
|
def get_user_profile_by_id(uid):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: (int) -> UserProfile
|
2013-03-26 18:51:55 +01:00
|
|
|
return UserProfile.objects.select_related().get(id=uid)
|
|
|
|
|
2013-03-28 20:20:31 +01:00
|
|
|
@cache_with_key(user_profile_by_email_cache_key, timeout=3600*24*7)
|
|
|
|
def get_user_profile_by_email(email):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Text) -> UserProfile
|
2014-01-08 00:07:53 +01:00
|
|
|
return UserProfile.objects.select_related().get(email__iexact=email.strip())
|
2013-03-28 20:20:31 +01:00
|
|
|
|
2017-08-25 07:43:38 +02:00
|
|
|
@cache_with_key(user_profile_by_api_key_cache_key, timeout=3600*24*7)
|
|
|
|
def get_user_profile_by_api_key(api_key):
|
|
|
|
# type: (Text) -> UserProfile
|
|
|
|
return UserProfile.objects.select_related().get(api_key=api_key)
|
|
|
|
|
2017-05-22 19:45:54 +02:00
|
|
|
@cache_with_key(user_profile_cache_key, timeout=3600*24*7)
|
|
|
|
def get_user(email, realm):
|
|
|
|
# type: (Text, Realm) -> UserProfile
|
|
|
|
return UserProfile.objects.select_related().get(email__iexact=email.strip(), realm=realm)
|
|
|
|
|
2017-07-17 20:55:32 +02:00
|
|
|
def get_user_including_cross_realm(email, realm=None):
|
|
|
|
# type: (Text, Optional[Realm]) -> UserProfile
|
|
|
|
if email in get_cross_realm_emails():
|
|
|
|
return get_system_bot(email)
|
|
|
|
assert realm is not None
|
|
|
|
return get_user(email, realm)
|
|
|
|
|
2017-05-22 23:37:15 +02:00
|
|
|
@cache_with_key(bot_profile_cache_key, timeout=3600*24*7)
|
|
|
|
def get_system_bot(email):
|
|
|
|
# type: (Text) -> UserProfile
|
|
|
|
return UserProfile.objects.select_related().get(email__iexact=email.strip())
|
|
|
|
|
2017-10-21 18:20:49 +02:00
|
|
|
@cache_with_key(realm_user_dicts_cache_key, timeout=3600*24*7)
|
|
|
|
def get_realm_user_dicts(realm_id):
|
2017-09-16 20:56:56 +02:00
|
|
|
# type: (int) -> List[Dict[str, Any]]
|
|
|
|
return UserProfile.objects.filter(
|
|
|
|
realm_id=realm_id,
|
2017-10-21 18:20:49 +02:00
|
|
|
).values(*realm_user_dict_fields)
|
2013-08-28 20:25:31 +02:00
|
|
|
|
2017-09-16 21:44:03 +02:00
|
|
|
@cache_with_key(active_user_ids_cache_key, timeout=3600*24*7)
|
|
|
|
def active_user_ids(realm_id):
|
|
|
|
# type: (int) -> List[int]
|
|
|
|
query = UserProfile.objects.filter(
|
|
|
|
realm_id=realm_id,
|
|
|
|
is_active=True
|
|
|
|
).values_list('id', flat=True)
|
|
|
|
return list(query)
|
|
|
|
|
2017-02-06 20:45:26 +01:00
|
|
|
@cache_with_key(bot_dicts_in_realm_cache_key, timeout=3600*24*7)
|
|
|
|
def get_bot_dicts_in_realm(realm):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: (Realm) -> List[Dict[str, Any]]
|
2017-02-06 20:45:26 +01:00
|
|
|
return UserProfile.objects.filter(realm=realm, is_bot=True).values(*bot_dict_fields)
|
2014-02-26 00:12:14 +01:00
|
|
|
|
2016-04-27 23:22:52 +02:00
|
|
|
def get_owned_bot_dicts(user_profile, include_all_realm_bots_if_admin=True):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: (UserProfile, bool) -> List[Dict[str, Any]]
|
2016-04-27 23:22:52 +02:00
|
|
|
if user_profile.is_realm_admin and include_all_realm_bots_if_admin:
|
2017-02-06 20:45:26 +01:00
|
|
|
result = get_bot_dicts_in_realm(user_profile.realm)
|
2016-04-27 23:22:52 +02:00
|
|
|
else:
|
2017-02-06 20:45:26 +01:00
|
|
|
result = UserProfile.objects.filter(realm=user_profile.realm, is_bot=True,
|
|
|
|
bot_owner=user_profile).values(*bot_dict_fields)
|
2016-09-28 00:21:31 +02:00
|
|
|
# TODO: Remove this import cycle
|
2017-05-10 07:09:28 +02:00
|
|
|
from zerver.lib.avatar import avatar_url_from_dict
|
2016-09-28 00:21:31 +02:00
|
|
|
|
2016-04-27 23:22:52 +02:00
|
|
|
return [{'email': botdict['email'],
|
2016-10-26 03:35:32 +02:00
|
|
|
'user_id': botdict['id'],
|
2016-04-27 23:22:52 +02:00
|
|
|
'full_name': botdict['full_name'],
|
2017-06-12 19:50:03 +02:00
|
|
|
'bot_type': botdict['bot_type'],
|
2017-02-06 20:45:26 +01:00
|
|
|
'is_active': botdict['is_active'],
|
2016-04-27 23:22:52 +02:00
|
|
|
'api_key': botdict['api_key'],
|
|
|
|
'default_sending_stream': botdict['default_sending_stream__name'],
|
|
|
|
'default_events_register_stream': botdict['default_events_register_stream__name'],
|
|
|
|
'default_all_public_streams': botdict['default_all_public_streams'],
|
|
|
|
'owner': botdict['bot_owner__email'],
|
2017-05-10 07:09:28 +02:00
|
|
|
'avatar_url': avatar_url_from_dict(botdict),
|
2016-12-01 06:16:45 +01:00
|
|
|
}
|
2016-04-27 23:22:52 +02:00
|
|
|
for botdict in result]
|
2014-02-26 00:12:14 +01:00
|
|
|
|
2013-04-08 18:27:07 +02:00
|
|
|
def get_prereg_user_by_email(email):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Text) -> PreregistrationUser
|
2013-04-08 18:27:07 +02:00
|
|
|
# A user can be invited many times, so only return the result of the latest
|
|
|
|
# invite.
|
2014-01-08 00:07:53 +01:00
|
|
|
return PreregistrationUser.objects.filter(email__iexact=email.strip()).latest("invited_at")
|
2013-04-08 18:27:07 +02:00
|
|
|
|
2016-11-02 21:57:59 +01:00
|
|
|
def get_cross_realm_emails():
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: () -> Set[Text]
|
2016-08-23 06:46:10 +02:00
|
|
|
return set(settings.CROSS_REALM_BOT_EMAILS)
|
2016-06-11 20:22:13 +02:00
|
|
|
|
2016-04-01 08:42:38 +02:00
|
|
|
# The Huddle class represents a group of individuals who have had a
|
|
|
|
# Group Private Message conversation together. The actual membership
|
|
|
|
# of the Huddle is stored in the Subscription table just like with
|
|
|
|
# Streams, and a hash of that list is stored in the huddle_hash field
|
|
|
|
# below, to support efficiently mapping from a set of users to the
|
|
|
|
# corresponding Huddle object.
|
2012-09-04 23:20:21 +02:00
|
|
|
class Huddle(models.Model):
|
2012-09-07 20:14:13 +02:00
|
|
|
# TODO: We should consider whether using
|
|
|
|
# CommaSeparatedIntegerField would be better.
|
2017-07-09 01:16:47 +02:00
|
|
|
huddle_hash = models.CharField(max_length=40, db_index=True, unique=True) # type: Text
|
2012-09-04 23:20:21 +02:00
|
|
|
|
2012-10-20 18:02:58 +02:00
|
|
|
def get_huddle_hash(id_list):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (List[int]) -> Text
|
2012-09-05 17:38:09 +02:00
|
|
|
id_list = sorted(set(id_list))
|
2012-09-05 17:41:53 +02:00
|
|
|
hash_key = ",".join(str(x) for x in id_list)
|
2013-03-20 15:31:27 +01:00
|
|
|
return make_safe_digest(hash_key)
|
2012-10-20 18:02:58 +02:00
|
|
|
|
2013-03-26 18:17:55 +01:00
|
|
|
def huddle_hash_cache_key(huddle_hash):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Text) -> Text
|
2016-06-12 07:25:42 +02:00
|
|
|
return u"huddle_by_hash:%s" % (huddle_hash,)
|
2013-03-26 18:17:55 +01:00
|
|
|
|
2012-10-20 18:02:58 +02:00
|
|
|
def get_huddle(id_list):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: (List[int]) -> Huddle
|
2012-10-20 18:02:58 +02:00
|
|
|
huddle_hash = get_huddle_hash(id_list)
|
2013-03-26 18:17:55 +01:00
|
|
|
return get_huddle_backend(huddle_hash, id_list)
|
|
|
|
|
2013-03-26 19:09:45 +01:00
|
|
|
@cache_with_key(lambda huddle_hash, id_list: huddle_hash_cache_key(huddle_hash), timeout=3600*24*7)
|
2013-03-26 18:17:55 +01:00
|
|
|
def get_huddle_backend(huddle_hash, id_list):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Text, List[int]) -> Huddle
|
2017-01-06 17:29:41 +01:00
|
|
|
with transaction.atomic():
|
|
|
|
(huddle, created) = Huddle.objects.get_or_create(huddle_hash=huddle_hash)
|
|
|
|
if created:
|
2013-03-26 18:51:55 +01:00
|
|
|
recipient = Recipient.objects.create(type_id=huddle.id,
|
|
|
|
type=Recipient.HUDDLE)
|
|
|
|
subs_to_create = [Subscription(recipient=recipient,
|
2017-06-10 14:24:04 +02:00
|
|
|
user_profile_id=user_profile_id)
|
2013-03-26 18:51:55 +01:00
|
|
|
for user_profile_id in id_list]
|
|
|
|
Subscription.objects.bulk_create(subs_to_create)
|
2017-01-06 17:29:41 +01:00
|
|
|
return huddle
|
2012-09-04 23:20:21 +02:00
|
|
|
|
2012-10-29 19:43:00 +01:00
|
|
|
def clear_database():
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: () -> None
|
2013-07-08 17:53:50 +02:00
|
|
|
pylibmc.Client(['127.0.0.1']).flush_all()
|
2017-07-09 01:16:47 +02:00
|
|
|
model = None # type: Any
|
2013-04-01 16:57:50 +02:00
|
|
|
for model in [Message, Stream, UserProfile, Recipient,
|
2012-11-27 18:26:51 +01:00
|
|
|
Realm, Subscription, Huddle, UserMessage, Client,
|
|
|
|
DefaultStream]:
|
2012-10-29 19:43:00 +01:00
|
|
|
model.objects.all().delete()
|
|
|
|
Session.objects.all().delete()
|
2012-11-08 23:02:16 +01:00
|
|
|
|
|
|
|
class UserActivity(models.Model):
|
2017-07-09 01:16:47 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile
|
|
|
|
client = models.ForeignKey(Client, on_delete=CASCADE) # type: Client
|
|
|
|
query = models.CharField(max_length=50, db_index=True) # type: Text
|
2012-11-08 23:02:16 +01:00
|
|
|
|
2017-07-09 01:16:47 +02:00
|
|
|
count = models.IntegerField() # type: int
|
|
|
|
last_visit = models.DateTimeField('last visit') # type: datetime.datetime
|
2012-11-08 23:02:16 +01:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2012-11-08 23:02:16 +01:00
|
|
|
unique_together = ("user_profile", "client", "query")
|
2012-11-27 18:26:51 +01:00
|
|
|
|
2013-09-06 21:52:12 +02:00
|
|
|
class UserActivityInterval(models.Model):
|
2017-04-15 07:20:16 +02:00
|
|
|
MIN_INTERVAL_LENGTH = datetime.timedelta(minutes=15)
|
|
|
|
|
2017-07-09 01:16:47 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile
|
|
|
|
start = models.DateTimeField('start time', db_index=True) # type: datetime.datetime
|
|
|
|
end = models.DateTimeField('end time', db_index=True) # type: datetime.datetime
|
2013-09-06 21:52:12 +02:00
|
|
|
|
2017-04-15 07:20:16 +02:00
|
|
|
|
2013-02-08 23:44:15 +01:00
|
|
|
class UserPresence(models.Model):
|
2017-07-09 01:16:47 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile
|
|
|
|
client = models.ForeignKey(Client, on_delete=CASCADE) # type: Client
|
2013-02-08 23:44:15 +01:00
|
|
|
|
|
|
|
# Valid statuses
|
|
|
|
ACTIVE = 1
|
|
|
|
IDLE = 2
|
|
|
|
|
2017-07-09 01:16:47 +02:00
|
|
|
timestamp = models.DateTimeField('presence changed') # type: datetime.datetime
|
|
|
|
status = models.PositiveSmallIntegerField(default=ACTIVE) # type: int
|
2013-02-08 23:44:15 +01:00
|
|
|
|
2013-09-13 23:33:11 +02:00
|
|
|
@staticmethod
|
|
|
|
def status_to_string(status):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: (int) -> str
|
2013-09-13 23:33:11 +02:00
|
|
|
if status == UserPresence.ACTIVE:
|
|
|
|
return 'active'
|
|
|
|
elif status == UserPresence.IDLE:
|
|
|
|
return 'idle'
|
2017-03-03 20:30:49 +01:00
|
|
|
else:
|
|
|
|
raise ValueError('Unknown status: %s' % (status,))
|
2013-09-13 23:33:11 +02:00
|
|
|
|
2017-02-11 08:38:16 +01:00
|
|
|
@staticmethod
|
|
|
|
def get_status_dict_by_user(user_profile):
|
2017-10-30 01:16:17 +01:00
|
|
|
# type: (UserProfile) -> Dict[str, Dict[str, Any]]
|
2017-02-11 08:38:16 +01:00
|
|
|
query = UserPresence.objects.filter(user_profile=user_profile).values(
|
|
|
|
'client__name',
|
|
|
|
'status',
|
|
|
|
'timestamp',
|
|
|
|
'user_profile__email',
|
|
|
|
'user_profile__id',
|
|
|
|
'user_profile__enable_offline_push_notifications',
|
|
|
|
)
|
2017-09-13 16:43:02 +02:00
|
|
|
presence_rows = list(query)
|
2017-02-11 08:38:16 +01:00
|
|
|
|
2017-09-08 19:05:13 +02:00
|
|
|
mobile_user_ids = set() # type: Set[int]
|
2017-02-11 08:38:16 +01:00
|
|
|
if PushDeviceToken.objects.filter(user=user_profile).exists():
|
2017-09-08 19:05:13 +02:00
|
|
|
mobile_user_ids.add(user_profile.id)
|
2017-02-11 08:38:16 +01:00
|
|
|
|
2017-09-13 16:43:02 +02:00
|
|
|
return UserPresence.get_status_dicts_for_rows(presence_rows, mobile_user_ids)
|
2017-02-11 08:38:16 +01:00
|
|
|
|
Optimize user presence/activity query.
The get_status_dict_by_realm helper gets called whenever our
realm user_presences cache expires, and it used to query these fields:
"zerver_userpresence"."id", "zerver_userpresence"."user_profile_id", "zerver_userpresence"."client_id", "zerver_userpresence"."timestamp", "zerver_userpresence"."status", "zerver_userprofile"."id", "zerver_userprofile"."password", "zerver_userprofile"."last_login", "zerver_userprofile"."is_superuser", "zerver_userprofile"."email", "zerver_userprofile"."is_staff", "zerver_userprofile"."is_active", "zerver_userprofile"."is_bot", "zerver_userprofile"."date_joined", "zerver_userprofile"."bot_owner_id", "zerver_userprofile"."full_name", "zerver_userprofile"."short_name", "zerver_userprofile"."pointer", "zerver_userprofile"."last_pointer_updater", "zerver_userprofile"."realm_id", "zerver_userprofile"."api_key", "zerver_userprofile"."enable_desktop_notifications", "zerver_userprofile"."enable_sounds", "zerver_userprofile"."enter_sends", "zerver_userprofile"."enable_offline_email_notifications", "zerver_userprofile"."last_reminder", "zerver_userprofile"."rate_limits", "zerver_userprofile"."avatar_source", "zerver_userprofile"."tutorial_status", "zerver_userprofile"."onboarding_steps", "zerver_userprofile"."invites_granted", "zerver_userprofile"."invites_used", "zerver_userprofile"."alert_words", "zerver_userprofile"."muted_topics", "zerver_client"."id", "zerver_client"."name"
Now it queries just the fields it needs:
"zerver_client"."name", "zerver_userpresence"."status", "zerver_userpresence"."timestamp", "zerver_userprofile"."email" FROM "zerver_userpresence"
Also, get_status_dict_by_realm is now namespaced under UserPresence as a static method.
(imported from commit be1266844b6bd28b6c615594796713c026a850a1)
2013-09-14 23:59:03 +02:00
|
|
|
@staticmethod
|
|
|
|
def get_status_dict_by_realm(realm_id):
|
2017-10-30 01:16:17 +01:00
|
|
|
# type: (int) -> Dict[str, Dict[str, Any]]
|
2017-09-08 17:00:35 +02:00
|
|
|
user_profile_ids = UserProfile.objects.filter(
|
|
|
|
realm_id=realm_id,
|
|
|
|
is_active=True,
|
|
|
|
is_bot=False
|
|
|
|
).order_by('id').values_list('id', flat=True)
|
|
|
|
|
|
|
|
user_profile_ids = list(user_profile_ids)
|
|
|
|
|
|
|
|
if not user_profile_ids:
|
|
|
|
return {}
|
|
|
|
|
2017-09-13 17:24:11 +02:00
|
|
|
two_weeks_ago = timezone_now() - datetime.timedelta(weeks=2)
|
Optimize user presence/activity query.
The get_status_dict_by_realm helper gets called whenever our
realm user_presences cache expires, and it used to query these fields:
"zerver_userpresence"."id", "zerver_userpresence"."user_profile_id", "zerver_userpresence"."client_id", "zerver_userpresence"."timestamp", "zerver_userpresence"."status", "zerver_userprofile"."id", "zerver_userprofile"."password", "zerver_userprofile"."last_login", "zerver_userprofile"."is_superuser", "zerver_userprofile"."email", "zerver_userprofile"."is_staff", "zerver_userprofile"."is_active", "zerver_userprofile"."is_bot", "zerver_userprofile"."date_joined", "zerver_userprofile"."bot_owner_id", "zerver_userprofile"."full_name", "zerver_userprofile"."short_name", "zerver_userprofile"."pointer", "zerver_userprofile"."last_pointer_updater", "zerver_userprofile"."realm_id", "zerver_userprofile"."api_key", "zerver_userprofile"."enable_desktop_notifications", "zerver_userprofile"."enable_sounds", "zerver_userprofile"."enter_sends", "zerver_userprofile"."enable_offline_email_notifications", "zerver_userprofile"."last_reminder", "zerver_userprofile"."rate_limits", "zerver_userprofile"."avatar_source", "zerver_userprofile"."tutorial_status", "zerver_userprofile"."onboarding_steps", "zerver_userprofile"."invites_granted", "zerver_userprofile"."invites_used", "zerver_userprofile"."alert_words", "zerver_userprofile"."muted_topics", "zerver_client"."id", "zerver_client"."name"
Now it queries just the fields it needs:
"zerver_client"."name", "zerver_userpresence"."status", "zerver_userpresence"."timestamp", "zerver_userprofile"."email" FROM "zerver_userpresence"
Also, get_status_dict_by_realm is now namespaced under UserPresence as a static method.
(imported from commit be1266844b6bd28b6c615594796713c026a850a1)
2013-09-14 23:59:03 +02:00
|
|
|
query = UserPresence.objects.filter(
|
2017-09-13 17:24:11 +02:00
|
|
|
timestamp__gte=two_weeks_ago
|
|
|
|
).values(
|
2017-01-24 07:06:13 +01:00
|
|
|
'client__name',
|
|
|
|
'status',
|
|
|
|
'timestamp',
|
|
|
|
'user_profile__email',
|
|
|
|
'user_profile__id',
|
|
|
|
'user_profile__enable_offline_push_notifications',
|
Optimize user presence/activity query.
The get_status_dict_by_realm helper gets called whenever our
realm user_presences cache expires, and it used to query these fields:
"zerver_userpresence"."id", "zerver_userpresence"."user_profile_id", "zerver_userpresence"."client_id", "zerver_userpresence"."timestamp", "zerver_userpresence"."status", "zerver_userprofile"."id", "zerver_userprofile"."password", "zerver_userprofile"."last_login", "zerver_userprofile"."is_superuser", "zerver_userprofile"."email", "zerver_userprofile"."is_staff", "zerver_userprofile"."is_active", "zerver_userprofile"."is_bot", "zerver_userprofile"."date_joined", "zerver_userprofile"."bot_owner_id", "zerver_userprofile"."full_name", "zerver_userprofile"."short_name", "zerver_userprofile"."pointer", "zerver_userprofile"."last_pointer_updater", "zerver_userprofile"."realm_id", "zerver_userprofile"."api_key", "zerver_userprofile"."enable_desktop_notifications", "zerver_userprofile"."enable_sounds", "zerver_userprofile"."enter_sends", "zerver_userprofile"."enable_offline_email_notifications", "zerver_userprofile"."last_reminder", "zerver_userprofile"."rate_limits", "zerver_userprofile"."avatar_source", "zerver_userprofile"."tutorial_status", "zerver_userprofile"."onboarding_steps", "zerver_userprofile"."invites_granted", "zerver_userprofile"."invites_used", "zerver_userprofile"."alert_words", "zerver_userprofile"."muted_topics", "zerver_client"."id", "zerver_client"."name"
Now it queries just the fields it needs:
"zerver_client"."name", "zerver_userpresence"."status", "zerver_userpresence"."timestamp", "zerver_userprofile"."email" FROM "zerver_userpresence"
Also, get_status_dict_by_realm is now namespaced under UserPresence as a static method.
(imported from commit be1266844b6bd28b6c615594796713c026a850a1)
2013-09-14 23:59:03 +02:00
|
|
|
)
|
2017-09-13 17:24:11 +02:00
|
|
|
|
|
|
|
query = query_for_ids(
|
|
|
|
query=query,
|
|
|
|
user_ids=user_profile_ids,
|
|
|
|
field='user_profile_id'
|
|
|
|
)
|
2017-09-13 16:43:02 +02:00
|
|
|
presence_rows = list(query)
|
Optimize user presence/activity query.
The get_status_dict_by_realm helper gets called whenever our
realm user_presences cache expires, and it used to query these fields:
"zerver_userpresence"."id", "zerver_userpresence"."user_profile_id", "zerver_userpresence"."client_id", "zerver_userpresence"."timestamp", "zerver_userpresence"."status", "zerver_userprofile"."id", "zerver_userprofile"."password", "zerver_userprofile"."last_login", "zerver_userprofile"."is_superuser", "zerver_userprofile"."email", "zerver_userprofile"."is_staff", "zerver_userprofile"."is_active", "zerver_userprofile"."is_bot", "zerver_userprofile"."date_joined", "zerver_userprofile"."bot_owner_id", "zerver_userprofile"."full_name", "zerver_userprofile"."short_name", "zerver_userprofile"."pointer", "zerver_userprofile"."last_pointer_updater", "zerver_userprofile"."realm_id", "zerver_userprofile"."api_key", "zerver_userprofile"."enable_desktop_notifications", "zerver_userprofile"."enable_sounds", "zerver_userprofile"."enter_sends", "zerver_userprofile"."enable_offline_email_notifications", "zerver_userprofile"."last_reminder", "zerver_userprofile"."rate_limits", "zerver_userprofile"."avatar_source", "zerver_userprofile"."tutorial_status", "zerver_userprofile"."onboarding_steps", "zerver_userprofile"."invites_granted", "zerver_userprofile"."invites_used", "zerver_userprofile"."alert_words", "zerver_userprofile"."muted_topics", "zerver_client"."id", "zerver_client"."name"
Now it queries just the fields it needs:
"zerver_client"."name", "zerver_userpresence"."status", "zerver_userpresence"."timestamp", "zerver_userprofile"."email" FROM "zerver_userpresence"
Also, get_status_dict_by_realm is now namespaced under UserPresence as a static method.
(imported from commit be1266844b6bd28b6c615594796713c026a850a1)
2013-09-14 23:59:03 +02:00
|
|
|
|
2017-09-13 17:24:11 +02:00
|
|
|
mobile_query = PushDeviceToken.objects.distinct(
|
|
|
|
'user_id'
|
|
|
|
).values_list(
|
|
|
|
'user_id',
|
|
|
|
flat=True
|
2017-09-08 19:05:13 +02:00
|
|
|
)
|
|
|
|
|
2017-09-13 17:24:11 +02:00
|
|
|
mobile_query = query_for_ids(
|
|
|
|
query=mobile_query,
|
|
|
|
user_ids=user_profile_ids,
|
|
|
|
field='user_id'
|
|
|
|
)
|
|
|
|
mobile_user_ids = set(mobile_query)
|
2014-02-26 22:06:57 +01:00
|
|
|
|
2017-09-13 16:43:02 +02:00
|
|
|
return UserPresence.get_status_dicts_for_rows(presence_rows, mobile_user_ids)
|
2017-02-11 07:49:27 +01:00
|
|
|
|
|
|
|
@staticmethod
|
2017-09-13 16:43:02 +02:00
|
|
|
def get_status_dicts_for_rows(presence_rows, mobile_user_ids):
|
2017-10-30 01:16:17 +01:00
|
|
|
# type: (List[Dict[str, Any]], Set[int]) -> Dict[str, Dict[str, Any]]
|
2017-09-08 15:22:14 +02:00
|
|
|
|
2017-10-30 01:16:17 +01:00
|
|
|
info_row_dct = defaultdict(list) # type: DefaultDict[str, List[Dict[str, Any]]]
|
2017-09-13 16:43:02 +02:00
|
|
|
for row in presence_rows:
|
2017-09-08 15:22:14 +02:00
|
|
|
email = row['user_profile__email']
|
|
|
|
client_name = row['client__name']
|
|
|
|
status = UserPresence.status_to_string(row['status'])
|
|
|
|
dt = row['timestamp']
|
|
|
|
timestamp = datetime_to_timestamp(dt)
|
|
|
|
push_enabled = row['user_profile__enable_offline_push_notifications']
|
|
|
|
has_push_devices = row['user_profile__id'] in mobile_user_ids
|
|
|
|
pushable = (push_enabled and has_push_devices)
|
|
|
|
|
|
|
|
info = dict(
|
|
|
|
client=client_name,
|
|
|
|
status=status,
|
|
|
|
dt=dt,
|
|
|
|
timestamp=timestamp,
|
|
|
|
pushable=pushable,
|
2017-01-24 06:34:26 +01:00
|
|
|
)
|
2017-09-08 15:22:14 +02:00
|
|
|
|
|
|
|
info_row_dct[email].append(info)
|
|
|
|
|
|
|
|
user_statuses = dict() # type: Dict[str, Dict[str, Any]]
|
|
|
|
|
|
|
|
for email, info_rows in info_row_dct.items():
|
|
|
|
# Note that datetime values have sub-second granularity, which is
|
|
|
|
# mostly important for avoiding test flakes, but it's also technically
|
|
|
|
# more precise for real users.
|
|
|
|
by_time = lambda row: row['dt']
|
|
|
|
most_recent_info = max(info_rows, key=by_time)
|
|
|
|
|
|
|
|
# We don't send datetime values to the client.
|
|
|
|
for r in info_rows:
|
|
|
|
del r['dt']
|
|
|
|
|
|
|
|
client_dict = {info['client']: info for info in info_rows}
|
|
|
|
user_statuses[email] = client_dict
|
|
|
|
|
|
|
|
# The word "aggegrated" here is possibly misleading.
|
|
|
|
# It's really just the most recent client's info.
|
|
|
|
user_statuses[email]['aggregated'] = dict(
|
|
|
|
client=most_recent_info['client'],
|
|
|
|
status=most_recent_info['status'],
|
|
|
|
timestamp=most_recent_info['timestamp'],
|
|
|
|
)
|
|
|
|
|
Optimize user presence/activity query.
The get_status_dict_by_realm helper gets called whenever our
realm user_presences cache expires, and it used to query these fields:
"zerver_userpresence"."id", "zerver_userpresence"."user_profile_id", "zerver_userpresence"."client_id", "zerver_userpresence"."timestamp", "zerver_userpresence"."status", "zerver_userprofile"."id", "zerver_userprofile"."password", "zerver_userprofile"."last_login", "zerver_userprofile"."is_superuser", "zerver_userprofile"."email", "zerver_userprofile"."is_staff", "zerver_userprofile"."is_active", "zerver_userprofile"."is_bot", "zerver_userprofile"."date_joined", "zerver_userprofile"."bot_owner_id", "zerver_userprofile"."full_name", "zerver_userprofile"."short_name", "zerver_userprofile"."pointer", "zerver_userprofile"."last_pointer_updater", "zerver_userprofile"."realm_id", "zerver_userprofile"."api_key", "zerver_userprofile"."enable_desktop_notifications", "zerver_userprofile"."enable_sounds", "zerver_userprofile"."enter_sends", "zerver_userprofile"."enable_offline_email_notifications", "zerver_userprofile"."last_reminder", "zerver_userprofile"."rate_limits", "zerver_userprofile"."avatar_source", "zerver_userprofile"."tutorial_status", "zerver_userprofile"."onboarding_steps", "zerver_userprofile"."invites_granted", "zerver_userprofile"."invites_used", "zerver_userprofile"."alert_words", "zerver_userprofile"."muted_topics", "zerver_client"."id", "zerver_client"."name"
Now it queries just the fields it needs:
"zerver_client"."name", "zerver_userpresence"."status", "zerver_userpresence"."timestamp", "zerver_userprofile"."email" FROM "zerver_userpresence"
Also, get_status_dict_by_realm is now namespaced under UserPresence as a static method.
(imported from commit be1266844b6bd28b6c615594796713c026a850a1)
2013-09-14 23:59:03 +02:00
|
|
|
return user_statuses
|
|
|
|
|
|
|
|
@staticmethod
|
2017-04-25 11:50:30 +02:00
|
|
|
def to_presence_dict(client_name, status, dt, push_enabled=False,
|
2017-09-08 16:20:13 +02:00
|
|
|
has_push_devices=False):
|
|
|
|
# type: (Text, int, datetime.datetime, bool, bool) -> Dict[str, Any]
|
Optimize user presence/activity query.
The get_status_dict_by_realm helper gets called whenever our
realm user_presences cache expires, and it used to query these fields:
"zerver_userpresence"."id", "zerver_userpresence"."user_profile_id", "zerver_userpresence"."client_id", "zerver_userpresence"."timestamp", "zerver_userpresence"."status", "zerver_userprofile"."id", "zerver_userprofile"."password", "zerver_userprofile"."last_login", "zerver_userprofile"."is_superuser", "zerver_userprofile"."email", "zerver_userprofile"."is_staff", "zerver_userprofile"."is_active", "zerver_userprofile"."is_bot", "zerver_userprofile"."date_joined", "zerver_userprofile"."bot_owner_id", "zerver_userprofile"."full_name", "zerver_userprofile"."short_name", "zerver_userprofile"."pointer", "zerver_userprofile"."last_pointer_updater", "zerver_userprofile"."realm_id", "zerver_userprofile"."api_key", "zerver_userprofile"."enable_desktop_notifications", "zerver_userprofile"."enable_sounds", "zerver_userprofile"."enter_sends", "zerver_userprofile"."enable_offline_email_notifications", "zerver_userprofile"."last_reminder", "zerver_userprofile"."rate_limits", "zerver_userprofile"."avatar_source", "zerver_userprofile"."tutorial_status", "zerver_userprofile"."onboarding_steps", "zerver_userprofile"."invites_granted", "zerver_userprofile"."invites_used", "zerver_userprofile"."alert_words", "zerver_userprofile"."muted_topics", "zerver_client"."id", "zerver_client"."name"
Now it queries just the fields it needs:
"zerver_client"."name", "zerver_userpresence"."status", "zerver_userpresence"."timestamp", "zerver_userprofile"."email" FROM "zerver_userpresence"
Also, get_status_dict_by_realm is now namespaced under UserPresence as a static method.
(imported from commit be1266844b6bd28b6c615594796713c026a850a1)
2013-09-14 23:59:03 +02:00
|
|
|
presence_val = UserPresence.status_to_string(status)
|
2016-06-04 00:44:30 +02:00
|
|
|
|
|
|
|
timestamp = datetime_to_timestamp(dt)
|
Optimize user presence/activity query.
The get_status_dict_by_realm helper gets called whenever our
realm user_presences cache expires, and it used to query these fields:
"zerver_userpresence"."id", "zerver_userpresence"."user_profile_id", "zerver_userpresence"."client_id", "zerver_userpresence"."timestamp", "zerver_userpresence"."status", "zerver_userprofile"."id", "zerver_userprofile"."password", "zerver_userprofile"."last_login", "zerver_userprofile"."is_superuser", "zerver_userprofile"."email", "zerver_userprofile"."is_staff", "zerver_userprofile"."is_active", "zerver_userprofile"."is_bot", "zerver_userprofile"."date_joined", "zerver_userprofile"."bot_owner_id", "zerver_userprofile"."full_name", "zerver_userprofile"."short_name", "zerver_userprofile"."pointer", "zerver_userprofile"."last_pointer_updater", "zerver_userprofile"."realm_id", "zerver_userprofile"."api_key", "zerver_userprofile"."enable_desktop_notifications", "zerver_userprofile"."enable_sounds", "zerver_userprofile"."enter_sends", "zerver_userprofile"."enable_offline_email_notifications", "zerver_userprofile"."last_reminder", "zerver_userprofile"."rate_limits", "zerver_userprofile"."avatar_source", "zerver_userprofile"."tutorial_status", "zerver_userprofile"."onboarding_steps", "zerver_userprofile"."invites_granted", "zerver_userprofile"."invites_used", "zerver_userprofile"."alert_words", "zerver_userprofile"."muted_topics", "zerver_client"."id", "zerver_client"."name"
Now it queries just the fields it needs:
"zerver_client"."name", "zerver_userpresence"."status", "zerver_userpresence"."timestamp", "zerver_userprofile"."email" FROM "zerver_userpresence"
Also, get_status_dict_by_realm is now namespaced under UserPresence as a static method.
(imported from commit be1266844b6bd28b6c615594796713c026a850a1)
2013-09-14 23:59:03 +02:00
|
|
|
return dict(
|
2017-01-24 07:06:13 +01:00
|
|
|
client=client_name,
|
|
|
|
status=presence_val,
|
|
|
|
timestamp=timestamp,
|
|
|
|
pushable=(push_enabled and has_push_devices),
|
Optimize user presence/activity query.
The get_status_dict_by_realm helper gets called whenever our
realm user_presences cache expires, and it used to query these fields:
"zerver_userpresence"."id", "zerver_userpresence"."user_profile_id", "zerver_userpresence"."client_id", "zerver_userpresence"."timestamp", "zerver_userpresence"."status", "zerver_userprofile"."id", "zerver_userprofile"."password", "zerver_userprofile"."last_login", "zerver_userprofile"."is_superuser", "zerver_userprofile"."email", "zerver_userprofile"."is_staff", "zerver_userprofile"."is_active", "zerver_userprofile"."is_bot", "zerver_userprofile"."date_joined", "zerver_userprofile"."bot_owner_id", "zerver_userprofile"."full_name", "zerver_userprofile"."short_name", "zerver_userprofile"."pointer", "zerver_userprofile"."last_pointer_updater", "zerver_userprofile"."realm_id", "zerver_userprofile"."api_key", "zerver_userprofile"."enable_desktop_notifications", "zerver_userprofile"."enable_sounds", "zerver_userprofile"."enter_sends", "zerver_userprofile"."enable_offline_email_notifications", "zerver_userprofile"."last_reminder", "zerver_userprofile"."rate_limits", "zerver_userprofile"."avatar_source", "zerver_userprofile"."tutorial_status", "zerver_userprofile"."onboarding_steps", "zerver_userprofile"."invites_granted", "zerver_userprofile"."invites_used", "zerver_userprofile"."alert_words", "zerver_userprofile"."muted_topics", "zerver_client"."id", "zerver_client"."name"
Now it queries just the fields it needs:
"zerver_client"."name", "zerver_userpresence"."status", "zerver_userpresence"."timestamp", "zerver_userprofile"."email" FROM "zerver_userpresence"
Also, get_status_dict_by_realm is now namespaced under UserPresence as a static method.
(imported from commit be1266844b6bd28b6c615594796713c026a850a1)
2013-09-14 23:59:03 +02:00
|
|
|
)
|
2013-04-03 22:00:02 +02:00
|
|
|
|
Optimize user presence/activity query.
The get_status_dict_by_realm helper gets called whenever our
realm user_presences cache expires, and it used to query these fields:
"zerver_userpresence"."id", "zerver_userpresence"."user_profile_id", "zerver_userpresence"."client_id", "zerver_userpresence"."timestamp", "zerver_userpresence"."status", "zerver_userprofile"."id", "zerver_userprofile"."password", "zerver_userprofile"."last_login", "zerver_userprofile"."is_superuser", "zerver_userprofile"."email", "zerver_userprofile"."is_staff", "zerver_userprofile"."is_active", "zerver_userprofile"."is_bot", "zerver_userprofile"."date_joined", "zerver_userprofile"."bot_owner_id", "zerver_userprofile"."full_name", "zerver_userprofile"."short_name", "zerver_userprofile"."pointer", "zerver_userprofile"."last_pointer_updater", "zerver_userprofile"."realm_id", "zerver_userprofile"."api_key", "zerver_userprofile"."enable_desktop_notifications", "zerver_userprofile"."enable_sounds", "zerver_userprofile"."enter_sends", "zerver_userprofile"."enable_offline_email_notifications", "zerver_userprofile"."last_reminder", "zerver_userprofile"."rate_limits", "zerver_userprofile"."avatar_source", "zerver_userprofile"."tutorial_status", "zerver_userprofile"."onboarding_steps", "zerver_userprofile"."invites_granted", "zerver_userprofile"."invites_used", "zerver_userprofile"."alert_words", "zerver_userprofile"."muted_topics", "zerver_client"."id", "zerver_client"."name"
Now it queries just the fields it needs:
"zerver_client"."name", "zerver_userpresence"."status", "zerver_userpresence"."timestamp", "zerver_userprofile"."email" FROM "zerver_userpresence"
Also, get_status_dict_by_realm is now namespaced under UserPresence as a static method.
(imported from commit be1266844b6bd28b6c615594796713c026a850a1)
2013-09-14 23:59:03 +02:00
|
|
|
def to_dict(self):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: () -> Dict[str, Any]
|
2014-02-14 22:45:32 +01:00
|
|
|
return UserPresence.to_presence_dict(
|
2017-02-11 04:13:33 +01:00
|
|
|
self.client.name,
|
|
|
|
self.status,
|
|
|
|
self.timestamp
|
Optimize user presence/activity query.
The get_status_dict_by_realm helper gets called whenever our
realm user_presences cache expires, and it used to query these fields:
"zerver_userpresence"."id", "zerver_userpresence"."user_profile_id", "zerver_userpresence"."client_id", "zerver_userpresence"."timestamp", "zerver_userpresence"."status", "zerver_userprofile"."id", "zerver_userprofile"."password", "zerver_userprofile"."last_login", "zerver_userprofile"."is_superuser", "zerver_userprofile"."email", "zerver_userprofile"."is_staff", "zerver_userprofile"."is_active", "zerver_userprofile"."is_bot", "zerver_userprofile"."date_joined", "zerver_userprofile"."bot_owner_id", "zerver_userprofile"."full_name", "zerver_userprofile"."short_name", "zerver_userprofile"."pointer", "zerver_userprofile"."last_pointer_updater", "zerver_userprofile"."realm_id", "zerver_userprofile"."api_key", "zerver_userprofile"."enable_desktop_notifications", "zerver_userprofile"."enable_sounds", "zerver_userprofile"."enter_sends", "zerver_userprofile"."enable_offline_email_notifications", "zerver_userprofile"."last_reminder", "zerver_userprofile"."rate_limits", "zerver_userprofile"."avatar_source", "zerver_userprofile"."tutorial_status", "zerver_userprofile"."onboarding_steps", "zerver_userprofile"."invites_granted", "zerver_userprofile"."invites_used", "zerver_userprofile"."alert_words", "zerver_userprofile"."muted_topics", "zerver_client"."id", "zerver_client"."name"
Now it queries just the fields it needs:
"zerver_client"."name", "zerver_userpresence"."status", "zerver_userpresence"."timestamp", "zerver_userprofile"."email" FROM "zerver_userpresence"
Also, get_status_dict_by_realm is now namespaced under UserPresence as a static method.
(imported from commit be1266844b6bd28b6c615594796713c026a850a1)
2013-09-14 23:59:03 +02:00
|
|
|
)
|
2013-04-03 22:00:02 +02:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def status_from_string(status):
|
2016-06-12 15:05:01 +02:00
|
|
|
# type: (NonBinaryStr) -> Optional[int]
|
2013-04-03 22:00:02 +02:00
|
|
|
if status == 'active':
|
2017-08-04 07:53:19 +02:00
|
|
|
status_val = UserPresence.ACTIVE # type: Optional[int] # See https://github.com/python/mypy/issues/2611
|
2013-04-03 22:00:02 +02:00
|
|
|
elif status == 'idle':
|
|
|
|
status_val = UserPresence.IDLE
|
|
|
|
else:
|
|
|
|
status_val = None
|
|
|
|
|
|
|
|
return status_val
|
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2013-02-08 23:44:15 +01:00
|
|
|
unique_together = ("user_profile", "client")
|
|
|
|
|
2012-11-27 18:26:51 +01:00
|
|
|
class DefaultStream(models.Model):
|
2017-07-09 01:16:47 +02:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE) # type: Realm
|
|
|
|
stream = models.ForeignKey(Stream, on_delete=CASCADE) # type: Stream
|
2012-11-27 18:26:51 +01:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2012-11-27 18:26:51 +01:00
|
|
|
unique_together = ("realm", "stream")
|
2012-12-01 04:35:59 +01:00
|
|
|
|
2017-10-12 19:35:14 +02:00
|
|
|
class DefaultStreamGroup(models.Model):
|
|
|
|
MAX_NAME_LENGTH = 60
|
|
|
|
name = models.CharField(max_length=MAX_NAME_LENGTH, db_index=True) # type: Text
|
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE) # type: Realm
|
|
|
|
streams = models.ManyToManyField('Stream') # type: Manager
|
2017-11-14 20:51:34 +01:00
|
|
|
description = models.CharField(max_length=1024, default=u'') # type: Text
|
2017-10-12 19:35:14 +02:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2017-10-12 19:35:14 +02:00
|
|
|
unique_together = ("realm", "name")
|
|
|
|
|
|
|
|
def to_dict(self):
|
|
|
|
# type: () -> Dict[str, Any]
|
2017-11-14 20:33:09 +01:00
|
|
|
return dict(name=self.name,
|
|
|
|
id=self.id,
|
2017-11-14 20:51:34 +01:00
|
|
|
description=self.description,
|
2017-11-14 20:33:09 +01:00
|
|
|
streams=[stream.to_dict() for stream in self.streams.all()])
|
2017-10-12 19:35:14 +02:00
|
|
|
|
|
|
|
def get_default_stream_groups(realm):
|
|
|
|
# type: (Realm) -> List[DefaultStreamGroup]
|
|
|
|
return DefaultStreamGroup.objects.filter(realm=realm)
|
|
|
|
|
2017-07-02 21:10:41 +02:00
|
|
|
class AbstractScheduledJob(models.Model):
|
|
|
|
scheduled_timestamp = models.DateTimeField(db_index=True) # type: datetime.datetime
|
|
|
|
# JSON representation of arguments to consumer
|
|
|
|
data = models.TextField() # type: Text
|
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2017-07-02 21:10:41 +02:00
|
|
|
abstract = True
|
|
|
|
|
|
|
|
class ScheduledEmail(AbstractScheduledJob):
|
|
|
|
# Exactly one of user or address should be set. These are used to
|
|
|
|
# filter the set of ScheduledEmails.
|
|
|
|
user = models.ForeignKey(UserProfile, null=True, on_delete=CASCADE) # type: UserProfile
|
|
|
|
# Just the address part of a full "name <address>" email address
|
|
|
|
address = models.EmailField(null=True, db_index=True) # type: Text
|
|
|
|
|
|
|
|
# Valid types are below
|
|
|
|
WELCOME = 1
|
|
|
|
DIGEST = 2
|
|
|
|
INVITATION_REMINDER = 3
|
2017-07-09 01:16:47 +02:00
|
|
|
type = models.PositiveSmallIntegerField() # type: int
|
2013-11-06 00:47:59 +01:00
|
|
|
|
2017-09-21 14:58:49 +02:00
|
|
|
def __str__(self):
|
|
|
|
# type: () -> Text
|
2017-10-27 09:06:40 +02:00
|
|
|
return "<ScheduledEmail: %s %s %s>" % (self.type, self.user or self.address,
|
|
|
|
self.scheduled_timestamp)
|
2017-09-21 14:58:49 +02:00
|
|
|
|
2017-07-02 21:10:41 +02:00
|
|
|
EMAIL_TYPES = {
|
|
|
|
'followup_day1': ScheduledEmail.WELCOME,
|
|
|
|
'followup_day2': ScheduledEmail.WELCOME,
|
|
|
|
'digest': ScheduledEmail.DIGEST,
|
|
|
|
'invitation_reminder': ScheduledEmail.INVITATION_REMINDER,
|
|
|
|
}
|
2017-02-15 04:35:10 +01:00
|
|
|
|
2017-10-27 08:42:27 +02:00
|
|
|
class RealmAuditLog(models.Model):
|
2017-07-09 01:16:47 +02:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE) # type: Realm
|
|
|
|
acting_user = models.ForeignKey(UserProfile, null=True, related_name='+', on_delete=CASCADE) # type: Optional[UserProfile]
|
|
|
|
modified_user = models.ForeignKey(UserProfile, null=True, related_name='+', on_delete=CASCADE) # type: Optional[UserProfile]
|
|
|
|
modified_stream = models.ForeignKey(Stream, null=True, on_delete=CASCADE) # type: Optional[Stream]
|
2017-07-12 17:28:27 +02:00
|
|
|
event_last_message_id = models.IntegerField(null=True) # type: Optional[int]
|
2017-07-09 01:16:47 +02:00
|
|
|
event_type = models.CharField(max_length=40) # type: Text
|
|
|
|
event_time = models.DateTimeField(db_index=True) # type: datetime.datetime
|
2017-03-30 05:20:36 +02:00
|
|
|
# If True, event_time is an overestimate of the true time. Can be used
|
|
|
|
# by migrations when introducing a new event_type.
|
2017-07-09 01:16:47 +02:00
|
|
|
backfilled = models.BooleanField(default=False) # type: bool
|
|
|
|
extra_data = models.TextField(null=True) # type: Optional[Text]
|
2017-01-24 01:48:35 +01:00
|
|
|
|
2017-10-27 09:06:40 +02:00
|
|
|
def __str__(self):
|
2017-09-22 16:32:54 +02:00
|
|
|
# type: () -> str
|
2017-09-22 16:09:37 +02:00
|
|
|
if self.modified_user is not None:
|
2017-10-27 09:06:40 +02:00
|
|
|
return "<RealmAuditLog: %s %s %s>" % (self.modified_user, self.event_type, self.event_time)
|
2017-09-22 16:09:37 +02:00
|
|
|
if self.modified_stream is not None:
|
2017-10-27 09:06:40 +02:00
|
|
|
return "<RealmAuditLog: %s %s %s>" % (self.modified_stream, self.event_type, self.event_time)
|
2017-09-22 16:09:37 +02:00
|
|
|
return "<RealmAuditLog: %s %s %s>" % (self.realm, self.event_type, self.event_time)
|
|
|
|
|
2017-01-24 01:48:35 +01:00
|
|
|
class UserHotspot(models.Model):
|
2017-07-09 01:16:47 +02:00
|
|
|
user = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile
|
|
|
|
hotspot = models.CharField(max_length=30) # type: Text
|
|
|
|
timestamp = models.DateTimeField(default=timezone_now) # type: datetime.datetime
|
2017-01-24 01:48:35 +01:00
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2017-01-24 01:48:35 +01:00
|
|
|
unique_together = ("user", "hotspot")
|
2017-03-17 10:07:22 +01:00
|
|
|
|
|
|
|
class CustomProfileField(models.Model):
|
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE) # type: Realm
|
|
|
|
name = models.CharField(max_length=100) # type: Text
|
|
|
|
|
|
|
|
INTEGER = 1
|
|
|
|
FLOAT = 2
|
|
|
|
SHORT_TEXT = 3
|
|
|
|
LONG_TEXT = 4
|
|
|
|
|
|
|
|
FIELD_TYPE_DATA = [
|
|
|
|
# Type, Name, Validator, Converter
|
|
|
|
(INTEGER, u'Integer', check_int, int),
|
|
|
|
(FLOAT, u'Float', check_float, float),
|
|
|
|
(SHORT_TEXT, u'Short Text', check_short_string, str),
|
|
|
|
(LONG_TEXT, u'Long Text', check_string, str),
|
|
|
|
] # type: List[Tuple[int, Text, Callable[[str, Any], str], Callable[[Any], Any]]]
|
|
|
|
|
|
|
|
FIELD_VALIDATORS = {item[0]: item[2] for item in FIELD_TYPE_DATA} # type: Dict[int, Callable[[str, Any], str]]
|
|
|
|
FIELD_CONVERTERS = {item[0]: item[3] for item in FIELD_TYPE_DATA} # type: Dict[int, Callable[[Any], Any]]
|
|
|
|
FIELD_TYPE_CHOICES = [(item[0], item[1]) for item in FIELD_TYPE_DATA] # type: List[Tuple[int, Text]]
|
|
|
|
|
|
|
|
field_type = models.PositiveSmallIntegerField(choices=FIELD_TYPE_CHOICES,
|
|
|
|
default=SHORT_TEXT) # type: int
|
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2017-03-17 10:07:22 +01:00
|
|
|
unique_together = ('realm', 'name')
|
|
|
|
|
|
|
|
def as_dict(self):
|
|
|
|
# type: () -> Dict[str, Union[int, Text]]
|
|
|
|
return {
|
|
|
|
'id': self.id,
|
|
|
|
'name': self.name,
|
|
|
|
'type': self.field_type,
|
|
|
|
}
|
|
|
|
|
|
|
|
def custom_profile_fields_for_realm(realm_id):
|
|
|
|
# type: (int) -> List[CustomProfileField]
|
|
|
|
return CustomProfileField.objects.filter(realm=realm_id).order_by('name')
|
|
|
|
|
|
|
|
class CustomProfileFieldValue(models.Model):
|
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile
|
|
|
|
field = models.ForeignKey(CustomProfileField, on_delete=CASCADE) # type: CustomProfileField
|
|
|
|
value = models.TextField() # type: Text
|
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2017-03-17 10:07:22 +01:00
|
|
|
unique_together = ('user_profile', 'field')
|
2016-07-15 18:57:37 +02:00
|
|
|
|
2017-05-25 19:16:40 +02:00
|
|
|
# Interfaces for services
|
|
|
|
# They provide additional functionality like parsing message to obtain query url, data to be sent to url,
|
|
|
|
# and parsing the response.
|
|
|
|
GENERIC_INTERFACE = u'GenericService'
|
2017-07-01 14:42:34 +02:00
|
|
|
SLACK_INTERFACE = u'SlackOutgoingWebhookService'
|
2017-05-25 19:16:40 +02:00
|
|
|
|
2017-05-25 20:41:05 +02:00
|
|
|
# A Service corresponds to either an outgoing webhook bot or an embedded bot.
|
|
|
|
# The type of Service is determined by the bot_type field of the referenced
|
|
|
|
# UserProfile.
|
|
|
|
#
|
|
|
|
# If the Service is an outgoing webhook bot:
|
|
|
|
# - name is any human-readable identifier for the Service
|
|
|
|
# - base_url is the address of the third-party site
|
|
|
|
# - token is used for authentication with the third-party site
|
|
|
|
#
|
|
|
|
# If the Service is an embedded bot:
|
|
|
|
# - name is the canonical name for the type of bot (e.g. 'xkcd' for an instance
|
|
|
|
# of the xkcd bot); multiple embedded bots can have the same name, but all
|
|
|
|
# embedded bots with the same name will run the same code
|
|
|
|
# - base_url and token are currently unused
|
2016-07-15 18:57:37 +02:00
|
|
|
class Service(models.Model):
|
2017-07-09 01:16:47 +02:00
|
|
|
name = models.CharField(max_length=UserProfile.MAX_NAME_LENGTH) # type: Text
|
2017-05-25 20:41:05 +02:00
|
|
|
# Bot user corresponding to the Service. The bot_type of this user
|
|
|
|
# deterines the type of service. If non-bot services are added later,
|
|
|
|
# user_profile can also represent the owner of the Service.
|
2017-07-09 01:16:47 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile
|
|
|
|
base_url = models.TextField() # type: Text
|
|
|
|
token = models.TextField() # type: Text
|
2017-05-25 20:41:05 +02:00
|
|
|
# Interface / API version of the service.
|
2016-07-15 18:57:37 +02:00
|
|
|
interface = models.PositiveSmallIntegerField(default=1) # type: int
|
|
|
|
|
2017-07-01 14:42:34 +02:00
|
|
|
# Valid interfaces are {generic, zulip_bot_service, slack}
|
2017-05-25 19:16:40 +02:00
|
|
|
GENERIC = 1
|
2017-07-01 14:42:34 +02:00
|
|
|
SLACK = 2
|
2017-05-25 19:16:40 +02:00
|
|
|
|
2017-07-21 07:15:11 +02:00
|
|
|
ALLOWED_INTERFACE_TYPES = [
|
|
|
|
GENERIC,
|
2017-07-01 14:42:34 +02:00
|
|
|
SLACK,
|
2017-07-21 07:15:11 +02:00
|
|
|
]
|
2016-07-15 18:57:37 +02:00
|
|
|
# N.B. If we used Django's choice=... we would get this for free (kinda)
|
2017-05-25 19:16:40 +02:00
|
|
|
_interfaces = {
|
|
|
|
GENERIC: GENERIC_INTERFACE,
|
2017-07-01 14:42:34 +02:00
|
|
|
SLACK: SLACK_INTERFACE,
|
2017-05-25 19:16:40 +02:00
|
|
|
} # type: Dict[int, Text]
|
2016-07-15 18:57:37 +02:00
|
|
|
|
|
|
|
def interface_name(self):
|
|
|
|
# type: () -> Text
|
|
|
|
# Raises KeyError if invalid
|
|
|
|
return self._interfaces[self.interface]
|
|
|
|
|
|
|
|
|
|
|
|
def get_realm_outgoing_webhook_services_name(realm):
|
|
|
|
# type: (Realm) -> List[Any]
|
2017-11-09 11:45:56 +01:00
|
|
|
return list(Service.objects.filter(
|
|
|
|
user_profile__realm=realm, user_profile__is_bot=True,
|
|
|
|
user_profile__bot_type=UserProfile.OUTGOING_WEBHOOK_BOT).values('name'))
|
2016-07-15 18:57:37 +02:00
|
|
|
|
2016-07-23 08:13:33 +02:00
|
|
|
def get_bot_services(user_profile_id):
|
|
|
|
# type: (str) -> List[Service]
|
|
|
|
return list(Service.objects.filter(user_profile__id=user_profile_id))
|
2016-07-15 18:57:37 +02:00
|
|
|
|
2017-07-21 07:15:11 +02:00
|
|
|
def get_service_profile(user_profile_id, service_name):
|
|
|
|
# type: (str, str) -> Service
|
|
|
|
return Service.objects.get(user_profile__id=user_profile_id, name=service_name)
|
2017-10-12 16:31:25 +02:00
|
|
|
|
|
|
|
|
|
|
|
class BotUserStateData(models.Model):
|
|
|
|
bot_profile = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile
|
|
|
|
key = models.TextField(db_index=True) # type: Text
|
|
|
|
value = models.TextField() # type: Text
|
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class Meta:
|
2017-10-12 16:31:25 +02:00
|
|
|
unique_together = ("bot_profile", "key")
|