2013-04-23 18:51:17 +02:00
|
|
|
from __future__ import absolute_import
|
2017-03-20 05:02:30 +01:00
|
|
|
from typing import Any, DefaultDict, Dict, List, Set, Tuple, TypeVar, Text, \
|
2017-03-13 05:45:50 +01:00
|
|
|
Union, Optional, Sequence, AbstractSet, Pattern, AnyStr, Callable, Iterable
|
2016-05-07 18:02:57 +02:00
|
|
|
from typing.re import Match
|
2016-06-12 15:05:01 +02:00
|
|
|
from zerver.lib.str_utils import NonBinaryStr
|
2013-04-23 18:51:17 +02:00
|
|
|
|
2012-08-28 18:44:51 +02:00
|
|
|
from django.db import models
|
2016-05-07 18:02:57 +02:00
|
|
|
from django.db.models.query import QuerySet
|
2017-03-17 10:07:22 +01:00
|
|
|
from django.db.models import Manager, CASCADE
|
2012-09-19 19:39:34 +02:00
|
|
|
from django.conf import settings
|
2013-06-24 17:51:10 +02:00
|
|
|
from django.contrib.auth.models import AbstractBaseUser, UserManager, \
|
|
|
|
PermissionsMixin
|
2016-11-02 21:41:10 +01:00
|
|
|
import django.contrib.auth
|
2017-07-07 20:35:31 +02:00
|
|
|
from django.core.exceptions import ValidationError
|
2017-03-17 10:07:22 +01:00
|
|
|
from django.core.validators import URLValidator, MinLengthValidator, \
|
|
|
|
RegexValidator
|
2014-02-21 21:18:38 +01:00
|
|
|
from django.dispatch import receiver
|
2014-01-28 18:18:19 +01:00
|
|
|
from zerver.lib.cache import cache_with_key, flush_user_profile, flush_realm, \
|
2013-04-05 00:13:03 +02:00
|
|
|
user_profile_by_id_cache_key, user_profile_by_email_cache_key, \
|
2017-05-22 19:45:54 +02:00
|
|
|
user_profile_cache_key, generic_bulk_cached_fetch, cache_set, flush_stream, \
|
2014-01-15 22:48:27 +01:00
|
|
|
display_recipient_cache_key, cache_delete, \
|
2014-02-26 00:12:14 +01:00
|
|
|
get_stream_cache_key, active_user_dicts_in_realm_cache_key, \
|
2017-02-06 20:45:26 +01:00
|
|
|
bot_dicts_in_realm_cache_key, active_user_dict_fields, \
|
2017-05-22 23:37:15 +02:00
|
|
|
bot_dict_fields, flush_message, bot_profile_cache_key
|
2013-08-08 16:51:18 +02:00
|
|
|
from zerver.lib.utils import make_safe_digest, generate_random_token
|
2016-10-04 15:52:26 +02:00
|
|
|
from zerver.lib.str_utils import ModelReprMixin
|
2016-02-12 21:08:56 +01:00
|
|
|
from django.db import transaction
|
2017-04-15 04:03:56 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2012-10-29 19:43:00 +01:00
|
|
|
from django.contrib.sessions.models import Session
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver.lib.timestamp import datetime_to_timestamp
|
2014-02-21 21:18:38 +01:00
|
|
|
from django.db.models.signals import pre_save, post_save, post_delete
|
2016-06-05 02:45:30 +02:00
|
|
|
from django.utils.translation import ugettext_lazy as _
|
2016-11-14 09:23:03 +01:00
|
|
|
from zerver.lib import cache
|
2017-03-17 10:07:22 +01:00
|
|
|
from zerver.lib.validator import check_int, check_float, check_string, \
|
|
|
|
check_short_string
|
2017-04-01 17:28:44 +02:00
|
|
|
from django.utils.encoding import force_text
|
2012-09-21 16:10:36 +02:00
|
|
|
|
2013-03-12 17:51:55 +01:00
|
|
|
from bitfield import BitField
|
2016-06-13 10:39:47 +02:00
|
|
|
from bitfield.types import BitHandler
|
2013-09-13 23:33:11 +02:00
|
|
|
from collections import defaultdict
|
2014-07-15 21:03:51 +02:00
|
|
|
from datetime import timedelta
|
2013-07-08 17:53:50 +02:00
|
|
|
import pylibmc
|
2014-02-21 21:18:38 +01:00
|
|
|
import re
|
2013-09-17 22:31:05 +02:00
|
|
|
import logging
|
2016-12-22 15:44:33 +01:00
|
|
|
import sre_constants
|
2016-03-24 20:24:01 +01:00
|
|
|
import time
|
|
|
|
import datetime
|
2017-05-25 00:09:57 +02:00
|
|
|
import sys
|
2013-03-12 17:51:55 +01:00
|
|
|
|
2012-12-07 01:05:14 +01:00
|
|
|
MAX_SUBJECT_LENGTH = 60
|
2012-12-11 17:12:53 +01:00
|
|
|
MAX_MESSAGE_LENGTH = 10000
|
2016-08-06 00:05:38 +02:00
|
|
|
MAX_LANGUAGE_ID_LENGTH = 50 # type: int
|
2012-12-07 01:05:14 +01:00
|
|
|
|
2016-11-23 05:01:12 +01:00
|
|
|
STREAM_NAMES = TypeVar('STREAM_NAMES', Sequence[Text], AbstractSet[Text])
|
2016-05-07 18:02:57 +02:00
|
|
|
|
2016-03-31 03:30:33 +02:00
|
|
|
# Doing 1000 remote cache requests to get_display_recipient is quite slow,
|
|
|
|
# so add a local cache as well as the remote cache cache.
|
2016-06-12 00:47:19 +02:00
|
|
|
per_request_display_recipient_cache = {} # type: Dict[int, List[Dict[str, Any]]]
|
2013-09-21 15:35:12 +02:00
|
|
|
def get_display_recipient_by_id(recipient_id, recipient_type, recipient_type_id):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (int, int, int) -> Union[Text, List[Dict[str, Any]]]
|
2013-12-18 23:00:14 +01:00
|
|
|
if recipient_id not in per_request_display_recipient_cache:
|
2016-03-31 03:30:33 +02:00
|
|
|
result = get_display_recipient_remote_cache(recipient_id, recipient_type, recipient_type_id)
|
2013-12-18 23:00:14 +01:00
|
|
|
per_request_display_recipient_cache[recipient_id] = result
|
|
|
|
return per_request_display_recipient_cache[recipient_id]
|
2013-09-21 15:35:12 +02:00
|
|
|
|
2013-04-25 20:42:28 +02:00
|
|
|
def get_display_recipient(recipient):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Recipient) -> Union[Text, List[Dict[str, Any]]]
|
2013-09-21 15:35:12 +02:00
|
|
|
return get_display_recipient_by_id(
|
2017-01-24 07:06:13 +01:00
|
|
|
recipient.id,
|
|
|
|
recipient.type,
|
|
|
|
recipient.type_id
|
2013-09-21 15:35:12 +02:00
|
|
|
)
|
2013-04-25 20:42:28 +02:00
|
|
|
|
2013-12-18 23:00:14 +01:00
|
|
|
def flush_per_request_caches():
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: () -> None
|
2013-12-18 23:00:14 +01:00
|
|
|
global per_request_display_recipient_cache
|
|
|
|
per_request_display_recipient_cache = {}
|
2013-12-18 23:01:11 +01:00
|
|
|
global per_request_realm_filters_cache
|
|
|
|
per_request_realm_filters_cache = {}
|
2013-08-22 17:45:15 +02:00
|
|
|
|
2013-09-21 15:35:12 +02:00
|
|
|
@cache_with_key(lambda *args: display_recipient_cache_key(args[0]),
|
2013-03-26 19:09:45 +01:00
|
|
|
timeout=3600*24*7)
|
2016-03-31 03:30:33 +02:00
|
|
|
def get_display_recipient_remote_cache(recipient_id, recipient_type, recipient_type_id):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (int, int, int) -> Union[Text, List[Dict[str, Any]]]
|
Give our models meaningful reprs.
>>> from zephyr.models import UserProfile, Recipient, Zephyr, ZephyrClass
>>> for klass in [UserProfile, Recipient, Zephyr, ZephyrClass]:
... print klass.objects.all()[:2]
...
[<UserProfile: othello>, <UserProfile: iago>]
[<Recipient: Verona (1, class)>, <Recipient: Denmark (2, class)>]
[<Zephyr: Scotland / Scotland3 / <UserProfile: prospero>>, <Zephyr: Venice / Venice3 / <UserProfile: iago>>]
[<ZephyrClass: Verona>, <ZephyrClass: Denmark>]
(imported from commit 9998ffe40800213a5425990d6e85f5c5a43a5355)
2012-08-29 16:15:06 +02:00
|
|
|
"""
|
2012-12-03 19:49:12 +01:00
|
|
|
returns: an appropriate object describing the recipient. For a
|
|
|
|
stream this will be the stream name as a string. For a huddle or
|
|
|
|
personal, it will be an array of dicts about each recipient.
|
Give our models meaningful reprs.
>>> from zephyr.models import UserProfile, Recipient, Zephyr, ZephyrClass
>>> for klass in [UserProfile, Recipient, Zephyr, ZephyrClass]:
... print klass.objects.all()[:2]
...
[<UserProfile: othello>, <UserProfile: iago>]
[<Recipient: Verona (1, class)>, <Recipient: Denmark (2, class)>]
[<Zephyr: Scotland / Scotland3 / <UserProfile: prospero>>, <Zephyr: Venice / Venice3 / <UserProfile: iago>>]
[<ZephyrClass: Verona>, <ZephyrClass: Denmark>]
(imported from commit 9998ffe40800213a5425990d6e85f5c5a43a5355)
2012-08-29 16:15:06 +02:00
|
|
|
"""
|
2013-09-21 15:35:12 +02:00
|
|
|
if recipient_type == Recipient.STREAM:
|
|
|
|
stream = Stream.objects.get(id=recipient_type_id)
|
2012-10-10 22:53:24 +02:00
|
|
|
return stream.name
|
2012-09-27 19:58:42 +02:00
|
|
|
|
2017-05-12 22:53:06 +02:00
|
|
|
# The main priority for ordering here is being deterministic.
|
|
|
|
# Right now, we order by ID, which matches the ordering of user
|
|
|
|
# names in the left sidebar.
|
2013-09-21 15:35:12 +02:00
|
|
|
user_profile_list = (UserProfile.objects.filter(subscription__recipient_id=recipient_id)
|
2012-12-03 19:49:12 +01:00
|
|
|
.select_related()
|
2017-05-03 09:22:58 +02:00
|
|
|
.order_by('id'))
|
2013-03-28 20:43:34 +01:00
|
|
|
return [{'email': user_profile.email,
|
2012-09-27 19:58:42 +02:00
|
|
|
'full_name': user_profile.full_name,
|
2013-07-29 22:07:42 +02:00
|
|
|
'short_name': user_profile.short_name,
|
2014-07-18 00:18:06 +02:00
|
|
|
'id': user_profile.id,
|
2016-11-30 21:55:59 +01:00
|
|
|
'is_mirror_dummy': user_profile.is_mirror_dummy} for user_profile in user_profile_list]
|
2012-09-27 19:58:42 +02:00
|
|
|
|
2013-08-22 16:56:37 +02:00
|
|
|
def get_realm_emoji_cache_key(realm):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Realm) -> Text
|
2016-06-12 07:25:42 +02:00
|
|
|
return u'realm_emoji:%s' % (realm.id,)
|
2013-08-22 16:56:37 +02:00
|
|
|
|
2016-06-12 09:36:05 +02:00
|
|
|
class Realm(ModelReprMixin, models.Model):
|
2017-03-23 00:15:06 +01:00
|
|
|
MAX_REALM_NAME_LENGTH = 40
|
|
|
|
MAX_REALM_SUBDOMAIN_LENGTH = 40
|
2016-11-06 23:44:45 +01:00
|
|
|
AUTHENTICATION_FLAGS = [u'Google', u'Email', u'GitHub', u'LDAP', u'Dev', u'RemoteUser']
|
|
|
|
|
2017-03-23 00:15:06 +01:00
|
|
|
name = models.CharField(max_length=MAX_REALM_NAME_LENGTH, null=True) # type: Optional[Text]
|
|
|
|
string_id = models.CharField(max_length=MAX_REALM_SUBDOMAIN_LENGTH, unique=True) # type: Text
|
2016-10-28 21:43:47 +02:00
|
|
|
restricted_to_domain = models.BooleanField(default=False) # type: bool
|
|
|
|
invite_required = models.BooleanField(default=True) # type: bool
|
2016-06-13 05:30:22 +02:00
|
|
|
invite_by_admins_only = models.BooleanField(default=False) # type: bool
|
2017-03-13 14:42:03 +01:00
|
|
|
inline_image_preview = models.BooleanField(default=True) # type: bool
|
|
|
|
inline_url_embed_preview = models.BooleanField(default=True) # type: bool
|
2016-06-13 05:30:22 +02:00
|
|
|
create_stream_by_admins_only = models.BooleanField(default=False) # type: bool
|
2016-12-20 15:41:30 +01:00
|
|
|
add_emoji_by_admins_only = models.BooleanField(default=False) # type: bool
|
2016-06-13 05:30:22 +02:00
|
|
|
mandatory_topics = models.BooleanField(default=False) # type: bool
|
|
|
|
show_digest_email = models.BooleanField(default=True) # type: bool
|
|
|
|
name_changes_disabled = models.BooleanField(default=False) # type: bool
|
2017-03-04 06:39:45 +01:00
|
|
|
email_changes_disabled = models.BooleanField(default=False) # type: bool
|
2017-05-11 22:52:14 +02:00
|
|
|
description = models.TextField(null=True) # type: Optional[Text]
|
2016-07-08 02:25:55 +02:00
|
|
|
|
2016-06-21 21:34:41 +02:00
|
|
|
allow_message_editing = models.BooleanField(default=True) # type: bool
|
2016-07-08 02:25:55 +02:00
|
|
|
DEFAULT_MESSAGE_CONTENT_EDIT_LIMIT_SECONDS = 600 # if changed, also change in admin.js
|
|
|
|
message_content_edit_limit_seconds = models.IntegerField(default=DEFAULT_MESSAGE_CONTENT_EDIT_LIMIT_SECONDS) # type: int
|
2016-10-25 19:51:31 +02:00
|
|
|
message_retention_days = models.IntegerField(null=True) # type: Optional[int]
|
2015-08-20 08:41:50 +02:00
|
|
|
|
2016-09-16 19:05:14 +02:00
|
|
|
# Valid org_types are {CORPORATE, COMMUNITY}
|
|
|
|
CORPORATE = 1
|
|
|
|
COMMUNITY = 2
|
2017-06-26 23:58:50 +02:00
|
|
|
org_type = models.PositiveSmallIntegerField(default=CORPORATE) # type: int
|
2016-09-16 19:05:14 +02:00
|
|
|
|
2017-04-15 04:03:56 +02:00
|
|
|
date_created = models.DateTimeField(default=timezone_now) # type: datetime.datetime
|
2017-06-01 10:44:16 +02:00
|
|
|
notifications_stream = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE) # type: Optional[Stream]
|
2016-06-13 05:30:22 +02:00
|
|
|
deactivated = models.BooleanField(default=False) # type: bool
|
2016-11-23 05:01:12 +01:00
|
|
|
default_language = models.CharField(default=u'en', max_length=MAX_LANGUAGE_ID_LENGTH) # type: Text
|
2016-11-02 21:41:10 +01:00
|
|
|
authentication_methods = BitField(flags=AUTHENTICATION_FLAGS,
|
|
|
|
default=2**31 - 1) # type: BitHandler
|
2016-11-29 08:57:35 +01:00
|
|
|
waiting_period_threshold = models.PositiveIntegerField(default=0) # type: int
|
2013-10-02 23:40:21 +02:00
|
|
|
|
2017-03-24 01:44:29 +01:00
|
|
|
# Define the types of the various automatically managed properties
|
|
|
|
property_types = dict(
|
|
|
|
add_emoji_by_admins_only=bool,
|
|
|
|
create_stream_by_admins_only=bool,
|
|
|
|
default_language=Text,
|
|
|
|
description=Text,
|
|
|
|
email_changes_disabled=bool,
|
|
|
|
invite_required=bool,
|
|
|
|
invite_by_admins_only=bool,
|
|
|
|
inline_image_preview=bool,
|
|
|
|
inline_url_embed_preview=bool,
|
2017-07-04 20:04:27 +02:00
|
|
|
mandatory_topics=bool,
|
2017-04-09 00:35:41 +02:00
|
|
|
message_retention_days=(int, type(None)),
|
2017-03-24 01:44:29 +01:00
|
|
|
name=Text,
|
|
|
|
name_changes_disabled=bool,
|
|
|
|
restricted_to_domain=bool,
|
|
|
|
waiting_period_threshold=int,
|
2017-04-12 23:09:09 +02:00
|
|
|
) # type: Dict[str, Union[type, Tuple[type, ...]]]
|
2017-03-24 01:44:29 +01:00
|
|
|
|
2017-02-21 03:41:20 +01:00
|
|
|
ICON_FROM_GRAVATAR = u'G'
|
|
|
|
ICON_UPLOADED = u'U'
|
|
|
|
ICON_SOURCES = (
|
|
|
|
(ICON_FROM_GRAVATAR, 'Hosted by Gravatar'),
|
|
|
|
(ICON_UPLOADED, 'Uploaded by administrator'),
|
|
|
|
)
|
|
|
|
icon_source = models.CharField(default=ICON_FROM_GRAVATAR, choices=ICON_SOURCES,
|
|
|
|
max_length=1) # type: Text
|
|
|
|
icon_version = models.PositiveSmallIntegerField(default=1) # type: int
|
|
|
|
|
2016-06-13 05:30:22 +02:00
|
|
|
DEFAULT_NOTIFICATION_STREAM_NAME = u'announce'
|
2012-09-05 21:49:56 +02:00
|
|
|
|
2016-11-02 21:41:10 +01:00
|
|
|
def authentication_methods_dict(self):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: () -> Dict[Text, bool]
|
2016-11-02 21:41:10 +01:00
|
|
|
"""Returns the a mapping from authentication flags to their status,
|
|
|
|
showing only those authentication flags that are supported on
|
|
|
|
the current server (i.e. if EmailAuthBackend is not configured
|
|
|
|
on the server, this will not return an entry for "Email")."""
|
|
|
|
# This mapping needs to be imported from here due to the cyclic
|
|
|
|
# dependency.
|
|
|
|
from zproject.backends import AUTH_BACKEND_NAME_MAP
|
|
|
|
|
2016-11-23 05:01:12 +01:00
|
|
|
ret = {} # type: Dict[Text, bool]
|
2016-11-02 21:41:10 +01:00
|
|
|
supported_backends = {backend.__class__ for backend in django.contrib.auth.get_backends()}
|
|
|
|
for k, v in self.authentication_methods.iteritems():
|
|
|
|
backend = AUTH_BACKEND_NAME_MAP[k]
|
|
|
|
if backend in supported_backends:
|
|
|
|
ret[k] = v
|
|
|
|
return ret
|
|
|
|
|
2016-06-12 09:36:05 +02:00
|
|
|
def __unicode__(self):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: () -> Text
|
2017-03-13 17:23:07 +01:00
|
|
|
return u"<Realm: %s %s>" % (self.string_id, self.id)
|
2012-09-05 21:49:56 +02:00
|
|
|
|
2013-08-22 16:56:37 +02:00
|
|
|
@cache_with_key(get_realm_emoji_cache_key, timeout=3600*24*7)
|
|
|
|
def get_emoji(self):
|
2017-03-13 05:45:50 +01:00
|
|
|
# type: () -> Dict[Text, Optional[Dict[str, Iterable[Text]]]]
|
2013-08-22 16:56:37 +02:00
|
|
|
return get_realm_emoji_uncached(self)
|
|
|
|
|
2013-11-02 15:36:17 +01:00
|
|
|
def get_admin_users(self):
|
2016-06-12 14:43:15 +02:00
|
|
|
# type: () -> Sequence[UserProfile]
|
|
|
|
# TODO: Change return type to QuerySet[UserProfile]
|
2016-02-08 03:59:38 +01:00
|
|
|
return UserProfile.objects.filter(realm=self, is_realm_admin=True,
|
|
|
|
is_active=True).select_related()
|
2013-11-02 15:36:17 +01:00
|
|
|
|
2014-01-28 17:29:00 +01:00
|
|
|
def get_active_users(self):
|
2016-06-12 14:43:15 +02:00
|
|
|
# type: () -> Sequence[UserProfile]
|
|
|
|
# TODO: Change return type to QuerySet[UserProfile]
|
2014-01-28 17:29:00 +01:00
|
|
|
return UserProfile.objects.filter(realm=self, is_active=True).select_related()
|
|
|
|
|
2017-03-05 04:17:12 +01:00
|
|
|
def get_bot_domain(self):
|
|
|
|
# type: () -> str
|
|
|
|
# Remove the port. Mainly needed for development environment.
|
|
|
|
external_host = settings.EXTERNAL_HOST.split(':')[0]
|
|
|
|
if settings.REALMS_HAVE_SUBDOMAINS or \
|
|
|
|
Realm.objects.filter(deactivated=False) \
|
|
|
|
.exclude(string_id__in=settings.SYSTEM_ONLY_REALMS).count() > 1:
|
|
|
|
return "%s.%s" % (self.string_id, external_host)
|
|
|
|
return external_host
|
|
|
|
|
2016-10-26 18:13:43 +02:00
|
|
|
@property
|
|
|
|
def subdomain(self):
|
2017-02-11 04:41:23 +01:00
|
|
|
# type: () -> Optional[Text]
|
2016-10-26 18:13:43 +02:00
|
|
|
if settings.REALMS_HAVE_SUBDOMAINS:
|
|
|
|
return self.string_id
|
|
|
|
return None
|
|
|
|
|
2016-08-14 00:57:45 +02:00
|
|
|
@property
|
|
|
|
def uri(self):
|
2016-09-10 20:36:59 +02:00
|
|
|
# type: () -> str
|
2016-07-19 14:35:08 +02:00
|
|
|
if settings.REALMS_HAVE_SUBDOMAINS and self.subdomain is not None:
|
|
|
|
return '%s%s.%s' % (settings.EXTERNAL_URI_SCHEME,
|
|
|
|
self.subdomain, settings.EXTERNAL_HOST)
|
2016-08-14 00:57:45 +02:00
|
|
|
return settings.SERVER_URI
|
|
|
|
|
2016-08-19 03:48:40 +02:00
|
|
|
@property
|
|
|
|
def host(self):
|
2016-09-10 20:36:59 +02:00
|
|
|
# type: () -> str
|
2016-07-19 14:35:08 +02:00
|
|
|
if settings.REALMS_HAVE_SUBDOMAINS and self.subdomain is not None:
|
|
|
|
return "%s.%s" % (self.subdomain, settings.EXTERNAL_HOST)
|
2016-08-19 03:48:40 +02:00
|
|
|
return settings.EXTERNAL_HOST
|
|
|
|
|
2016-07-27 02:09:10 +02:00
|
|
|
@property
|
2016-07-27 01:45:29 +02:00
|
|
|
def is_zephyr_mirror_realm(self):
|
2016-07-27 02:09:10 +02:00
|
|
|
# type: () -> bool
|
2017-03-04 09:19:37 +01:00
|
|
|
return self.string_id == "zephyr"
|
2016-07-27 02:09:10 +02:00
|
|
|
|
2016-07-27 01:45:29 +02:00
|
|
|
@property
|
|
|
|
def webathena_enabled(self):
|
|
|
|
# type: () -> bool
|
|
|
|
return self.is_zephyr_mirror_realm
|
|
|
|
|
|
|
|
@property
|
|
|
|
def presence_disabled(self):
|
|
|
|
# type: () -> bool
|
|
|
|
return self.is_zephyr_mirror_realm
|
|
|
|
|
2015-10-14 22:43:04 +02:00
|
|
|
class Meta(object):
|
2013-06-24 21:26:38 +02:00
|
|
|
permissions = (
|
|
|
|
('administer', "Administer a realm"),
|
2015-09-20 19:32:01 +02:00
|
|
|
('api_super_user', "Can send messages as other users for mirroring"),
|
2013-06-24 21:26:38 +02:00
|
|
|
)
|
|
|
|
|
2014-01-28 18:18:19 +01:00
|
|
|
post_save.connect(flush_realm, sender=Realm)
|
|
|
|
|
2017-01-04 05:30:48 +01:00
|
|
|
def get_realm(string_id):
|
2017-02-11 04:41:23 +01:00
|
|
|
# type: (Text) -> Realm
|
2017-01-08 20:35:24 +01:00
|
|
|
return Realm.objects.filter(string_id=string_id).first()
|
2016-11-11 19:32:15 +01:00
|
|
|
|
2016-12-24 02:10:26 +01:00
|
|
|
def completely_open(realm):
|
|
|
|
# type: (Realm) -> bool
|
|
|
|
# This realm is completely open to everyone on the internet to
|
2017-03-31 16:20:07 +02:00
|
|
|
# join. E-mail addresses do not need to match a realmdomain and
|
2016-11-11 19:32:15 +01:00
|
|
|
# an invite from an existing user is not required.
|
|
|
|
if not realm:
|
|
|
|
return False
|
|
|
|
return not realm.invite_required and not realm.restricted_to_domain
|
|
|
|
|
2017-06-02 23:56:08 +02:00
|
|
|
def get_unique_non_system_realm():
|
2016-11-11 19:32:15 +01:00
|
|
|
# type: () -> Optional[Realm]
|
|
|
|
realms = Realm.objects.filter(deactivated=False)
|
|
|
|
# On production installations, the (usually "zulip.com") system
|
|
|
|
# realm is an empty realm just used for system bots, so don't
|
|
|
|
# include it in this accounting.
|
2017-01-04 09:20:23 +01:00
|
|
|
realms = realms.exclude(string_id__in=settings.SYSTEM_ONLY_REALMS)
|
2016-11-11 19:32:15 +01:00
|
|
|
if len(realms) != 1:
|
|
|
|
return None
|
2017-06-02 23:56:08 +02:00
|
|
|
return realms[0]
|
|
|
|
|
|
|
|
def get_unique_open_realm():
|
|
|
|
# type: () -> Optional[Realm]
|
|
|
|
"""We only return a realm if there is a unique non-system-only realm,
|
|
|
|
it is completely open, and there are no subdomains."""
|
|
|
|
if settings.REALMS_HAVE_SUBDOMAINS:
|
|
|
|
return None
|
|
|
|
realm = get_unique_non_system_realm()
|
|
|
|
if realm is None:
|
|
|
|
return None
|
2016-11-11 19:32:15 +01:00
|
|
|
if realm.invite_required or realm.restricted_to_domain:
|
|
|
|
return None
|
|
|
|
return realm
|
|
|
|
|
|
|
|
def name_changes_disabled(realm):
|
|
|
|
# type: (Optional[Realm]) -> bool
|
|
|
|
if realm is None:
|
|
|
|
return settings.NAME_CHANGES_DISABLED
|
|
|
|
return settings.NAME_CHANGES_DISABLED or realm.name_changes_disabled
|
|
|
|
|
2017-03-31 16:20:07 +02:00
|
|
|
class RealmDomain(models.Model):
|
2017-06-01 10:44:16 +02:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE) # type: Realm
|
2016-09-28 00:03:13 +02:00
|
|
|
# should always be stored lowercase
|
2016-11-23 05:01:12 +01:00
|
|
|
domain = models.CharField(max_length=80, db_index=True) # type: Text
|
2017-01-21 08:19:03 +01:00
|
|
|
allow_subdomains = models.BooleanField(default=False)
|
|
|
|
|
|
|
|
class Meta(object):
|
|
|
|
unique_together = ("realm", "domain")
|
2016-10-29 04:58:44 +02:00
|
|
|
|
2017-03-31 17:03:42 +02:00
|
|
|
def can_add_realm_domain(domain):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Text) -> bool
|
2016-10-29 04:58:44 +02:00
|
|
|
if settings.REALMS_HAVE_SUBDOMAINS:
|
|
|
|
return True
|
2017-03-31 16:20:07 +02:00
|
|
|
if RealmDomain.objects.filter(domain=domain).exists():
|
2016-10-29 04:58:44 +02:00
|
|
|
return False
|
|
|
|
return True
|
2013-11-07 20:17:54 +01:00
|
|
|
|
2013-07-18 18:48:56 +02:00
|
|
|
# These functions should only be used on email addresses that have
|
|
|
|
# been validated via django.core.validators.validate_email
|
|
|
|
#
|
|
|
|
# Note that we need to use some care, since can you have multiple @-signs; e.g.
|
2013-07-24 23:41:24 +02:00
|
|
|
# "tabbott@test"@zulip.com
|
2013-07-18 18:48:56 +02:00
|
|
|
# is valid email address
|
|
|
|
def email_to_username(email):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Text) -> Text
|
2013-08-15 19:16:03 +02:00
|
|
|
return "@".join(email.split("@")[:-1]).lower()
|
2013-07-18 18:48:56 +02:00
|
|
|
|
2013-11-22 23:48:00 +01:00
|
|
|
# Returns the raw domain portion of the desired email address
|
2016-11-11 21:13:30 +01:00
|
|
|
def email_to_domain(email):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Text) -> Text
|
2013-08-15 19:16:03 +02:00
|
|
|
return email.split("@")[-1].lower()
|
2013-07-18 18:48:56 +02:00
|
|
|
|
2016-11-09 02:40:54 +01:00
|
|
|
class GetRealmByDomainException(Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def get_realm_by_email_domain(email):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Text) -> Optional[Realm]
|
2016-11-09 02:40:54 +01:00
|
|
|
if settings.REALMS_HAVE_SUBDOMAINS:
|
|
|
|
raise GetRealmByDomainException(
|
|
|
|
"Cannot get realm from email domain when settings.REALMS_HAVE_SUBDOMAINS = True")
|
2017-01-21 08:19:03 +01:00
|
|
|
domain = email_to_domain(email)
|
2017-03-31 16:20:07 +02:00
|
|
|
query = RealmDomain.objects.select_related('realm')
|
2017-02-09 16:33:56 +01:00
|
|
|
# Search for the longest match. If found return immediately. Since in case of
|
|
|
|
# settings.REALMS_HAVE_SUBDOMAINS=True, we have a unique mapping between the
|
|
|
|
# realm and domain so don't worry about `allow_subdomains` being True or False.
|
2017-03-31 19:38:06 +02:00
|
|
|
realm_domain = query.filter(domain=domain).first()
|
|
|
|
if realm_domain is not None:
|
|
|
|
return realm_domain.realm
|
2017-01-21 08:19:03 +01:00
|
|
|
else:
|
2017-02-09 16:33:56 +01:00
|
|
|
# Since we have not found any match. We will now try matching the parent domain.
|
|
|
|
# Filter out the realm domains with `allow_subdomains=False` so that we don't end
|
|
|
|
# up matching 'test.zulip.com' wrongly to (realm, 'zulip.com', False).
|
2017-01-21 08:19:03 +01:00
|
|
|
query = query.filter(allow_subdomains=True)
|
|
|
|
while len(domain) > 0:
|
|
|
|
subdomain, sep, domain = domain.partition('.')
|
2017-03-31 19:38:06 +02:00
|
|
|
realm_domain = query.filter(domain=domain).first()
|
|
|
|
if realm_domain is not None:
|
|
|
|
return realm_domain.realm
|
2017-01-21 08:19:03 +01:00
|
|
|
return None
|
2013-11-22 23:48:00 +01:00
|
|
|
|
2016-01-12 16:24:34 +01:00
|
|
|
# Is a user with the given email address allowed to be in the given realm?
|
|
|
|
# (This function does not check whether the user has been invited to the realm.
|
|
|
|
# So for invite-only realms, this is the test for whether a user can be invited,
|
|
|
|
# not whether the user can sign up currently.)
|
|
|
|
def email_allowed_for_realm(email, realm):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Text, Realm) -> bool
|
2016-01-12 16:24:34 +01:00
|
|
|
if not realm.restricted_to_domain:
|
|
|
|
return True
|
2016-11-11 21:13:30 +01:00
|
|
|
domain = email_to_domain(email)
|
2017-03-31 16:20:07 +02:00
|
|
|
query = RealmDomain.objects.filter(realm=realm)
|
2017-01-21 08:19:03 +01:00
|
|
|
if query.filter(domain=domain).exists():
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
query = query.filter(allow_subdomains=True)
|
|
|
|
while len(domain) > 0:
|
|
|
|
subdomain, sep, domain = domain.partition('.')
|
|
|
|
if query.filter(domain=domain).exists():
|
|
|
|
return True
|
|
|
|
return False
|
2016-01-12 16:24:34 +01:00
|
|
|
|
2017-04-29 06:06:57 +02:00
|
|
|
def get_realm_domains(realm):
|
|
|
|
# type: (Realm) -> List[Dict[str, Text]]
|
|
|
|
return list(realm.realmdomain_set.values('domain', 'allow_subdomains'))
|
2016-09-28 00:08:36 +02:00
|
|
|
|
2016-06-12 09:36:05 +02:00
|
|
|
class RealmEmoji(ModelReprMixin, models.Model):
|
2017-06-01 10:44:16 +02:00
|
|
|
author = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
|
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE) # type: Realm
|
2016-02-12 21:08:56 +01:00
|
|
|
# Second part of the regex (negative lookbehind) disallows names ending with one of the punctuation characters
|
|
|
|
name = models.TextField(validators=[MinLengthValidator(1),
|
2017-05-03 03:46:36 +02:00
|
|
|
RegexValidator(regex=r'^[0-9a-z.\-_]+(?<![.\-_])$',
|
2017-01-29 00:08:08 +01:00
|
|
|
message=_("Invalid characters in emoji name"))]) # type: Text
|
2017-05-23 22:17:08 +02:00
|
|
|
file_name = models.TextField(db_index=True, null=True) # type: Optional[Text]
|
2017-05-22 16:50:32 +02:00
|
|
|
deactivated = models.BooleanField(default=False) # type: bool
|
2017-03-13 05:45:50 +01:00
|
|
|
|
|
|
|
PATH_ID_TEMPLATE = "{realm_id}/emoji/{emoji_file_name}"
|
2013-08-22 16:56:37 +02:00
|
|
|
|
2015-10-14 22:43:04 +02:00
|
|
|
class Meta(object):
|
2013-08-22 16:56:37 +02:00
|
|
|
unique_together = ("realm", "name")
|
|
|
|
|
2016-06-12 09:36:05 +02:00
|
|
|
def __unicode__(self):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: () -> Text
|
2017-03-13 05:45:50 +01:00
|
|
|
return u"<RealmEmoji(%s): %s %s>" % (self.realm.string_id, self.name, self.file_name)
|
2013-08-22 16:56:37 +02:00
|
|
|
|
|
|
|
def get_realm_emoji_uncached(realm):
|
2017-05-23 00:30:41 +02:00
|
|
|
# type: (Realm) -> Dict[Text, Dict[str, Any]]
|
2013-08-22 16:56:37 +02:00
|
|
|
d = {}
|
2017-03-13 05:45:50 +01:00
|
|
|
from zerver.lib.emoji import get_emoji_url
|
2016-12-20 10:51:28 +01:00
|
|
|
for row in RealmEmoji.objects.filter(realm=realm).select_related('author'):
|
|
|
|
if row.author:
|
|
|
|
author = {
|
|
|
|
'id': row.author.id,
|
|
|
|
'email': row.author.email,
|
|
|
|
'full_name': row.author.full_name}
|
|
|
|
else:
|
|
|
|
author = None
|
2017-03-13 05:45:50 +01:00
|
|
|
d[row.name] = dict(source_url=get_emoji_url(row.file_name, row.realm_id),
|
2017-05-22 20:40:02 +02:00
|
|
|
deactivated=row.deactivated,
|
2016-12-20 10:51:28 +01:00
|
|
|
author=author)
|
2013-08-22 16:56:37 +02:00
|
|
|
return d
|
|
|
|
|
2014-01-28 20:53:57 +01:00
|
|
|
def flush_realm_emoji(sender, **kwargs):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: (Any, **Any) -> None
|
2013-08-22 16:56:37 +02:00
|
|
|
realm = kwargs['instance'].realm
|
|
|
|
cache_set(get_realm_emoji_cache_key(realm),
|
|
|
|
get_realm_emoji_uncached(realm),
|
|
|
|
timeout=3600*24*7)
|
|
|
|
|
2014-01-28 20:53:57 +01:00
|
|
|
post_save.connect(flush_realm_emoji, sender=RealmEmoji)
|
|
|
|
post_delete.connect(flush_realm_emoji, sender=RealmEmoji)
|
2013-08-22 16:56:37 +02:00
|
|
|
|
2016-02-13 19:17:15 +01:00
|
|
|
def filter_pattern_validator(value):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Text) -> None
|
2016-12-14 23:55:32 +01:00
|
|
|
regex = re.compile(r'(?:[\w\-#]*)(\(\?P<\w+>.+\))')
|
|
|
|
error_msg = 'Invalid filter pattern, you must use the following format OPTIONAL_PREFIX(?P<id>.+)'
|
2016-02-13 19:17:15 +01:00
|
|
|
|
|
|
|
if not regex.match(str(value)):
|
|
|
|
raise ValidationError(error_msg)
|
|
|
|
|
|
|
|
try:
|
|
|
|
re.compile(value)
|
2016-12-22 15:44:33 +01:00
|
|
|
except sre_constants.error:
|
2016-02-13 19:17:15 +01:00
|
|
|
# Regex is invalid
|
|
|
|
raise ValidationError(error_msg)
|
|
|
|
|
|
|
|
def filter_format_validator(value):
|
|
|
|
# type: (str) -> None
|
2017-03-26 01:13:34 +01:00
|
|
|
regex = re.compile(r'^[\.\/:a-zA-Z0-9_?=-]+%\(([a-zA-Z0-9_-]+)\)s[a-zA-Z0-9_-]*$')
|
2016-02-13 19:17:15 +01:00
|
|
|
|
|
|
|
if not regex.match(value):
|
|
|
|
raise ValidationError('URL format string must be in the following format: `https://example.com/%(\w+)s`')
|
|
|
|
|
|
|
|
class RealmFilter(models.Model):
|
2017-06-01 10:44:16 +02:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE) # type: Realm
|
2016-11-23 05:01:12 +01:00
|
|
|
pattern = models.TextField(validators=[filter_pattern_validator]) # type: Text
|
|
|
|
url_format_string = models.TextField(validators=[URLValidator, filter_format_validator]) # type: Text
|
2013-12-06 23:02:52 +01:00
|
|
|
|
2015-10-14 22:43:04 +02:00
|
|
|
class Meta(object):
|
2013-12-06 23:02:52 +01:00
|
|
|
unique_together = ("realm", "pattern")
|
|
|
|
|
2016-06-12 09:36:05 +02:00
|
|
|
def __unicode__(self):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: () -> Text
|
2017-03-13 17:27:21 +01:00
|
|
|
return u"<RealmFilter(%s): %s %s>" % (self.realm.string_id, self.pattern, self.url_format_string)
|
2013-12-06 23:02:52 +01:00
|
|
|
|
2016-12-31 03:08:43 +01:00
|
|
|
def get_realm_filters_cache_key(realm_id):
|
|
|
|
# type: (int) -> Text
|
|
|
|
return u'all_realm_filters:%s' % (realm_id,)
|
2013-12-06 23:02:52 +01:00
|
|
|
|
2016-03-31 03:30:33 +02:00
|
|
|
# We have a per-process cache to avoid doing 1000 remote cache queries during page load
|
2016-12-31 03:08:43 +01:00
|
|
|
per_request_realm_filters_cache = {} # type: Dict[int, List[Tuple[Text, Text, int]]]
|
2016-09-23 21:47:44 +02:00
|
|
|
|
2016-12-31 03:08:43 +01:00
|
|
|
def realm_in_local_realm_filters_cache(realm_id):
|
|
|
|
# type: (int) -> bool
|
|
|
|
return realm_id in per_request_realm_filters_cache
|
2016-09-23 21:47:44 +02:00
|
|
|
|
2016-12-31 03:08:43 +01:00
|
|
|
def realm_filters_for_realm(realm_id):
|
|
|
|
# type: (int) -> List[Tuple[Text, Text, int]]
|
|
|
|
if not realm_in_local_realm_filters_cache(realm_id):
|
|
|
|
per_request_realm_filters_cache[realm_id] = realm_filters_for_realm_remote_cache(realm_id)
|
|
|
|
return per_request_realm_filters_cache[realm_id]
|
2013-12-18 23:01:11 +01:00
|
|
|
|
|
|
|
@cache_with_key(get_realm_filters_cache_key, timeout=3600*24*7)
|
2016-12-31 03:08:43 +01:00
|
|
|
def realm_filters_for_realm_remote_cache(realm_id):
|
|
|
|
# type: (int) -> List[Tuple[Text, Text, int]]
|
2013-12-06 23:02:52 +01:00
|
|
|
filters = []
|
2016-12-31 03:08:43 +01:00
|
|
|
for realm_filter in RealmFilter.objects.filter(realm_id=realm_id):
|
2016-02-13 19:17:15 +01:00
|
|
|
filters.append((realm_filter.pattern, realm_filter.url_format_string, realm_filter.id))
|
2013-12-06 23:02:52 +01:00
|
|
|
|
|
|
|
return filters
|
|
|
|
|
|
|
|
def all_realm_filters():
|
2016-12-31 03:08:43 +01:00
|
|
|
# type: () -> Dict[int, List[Tuple[Text, Text, int]]]
|
2017-03-20 05:02:30 +01:00
|
|
|
filters = defaultdict(list) # type: DefaultDict[int, List[Tuple[Text, Text, int]]]
|
2013-12-06 23:02:52 +01:00
|
|
|
for realm_filter in RealmFilter.objects.all():
|
2016-12-31 03:08:43 +01:00
|
|
|
filters[realm_filter.realm_id].append((realm_filter.pattern, realm_filter.url_format_string, realm_filter.id))
|
2013-12-06 23:02:52 +01:00
|
|
|
|
|
|
|
return filters
|
|
|
|
|
2014-01-28 21:00:38 +01:00
|
|
|
def flush_realm_filter(sender, **kwargs):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: (Any, **Any) -> None
|
2017-01-03 21:04:55 +01:00
|
|
|
realm_id = kwargs['instance'].realm_id
|
2016-12-31 03:08:43 +01:00
|
|
|
cache_delete(get_realm_filters_cache_key(realm_id))
|
2013-12-18 23:01:11 +01:00
|
|
|
try:
|
2016-12-31 03:08:43 +01:00
|
|
|
per_request_realm_filters_cache.pop(realm_id)
|
2013-12-18 23:01:11 +01:00
|
|
|
except KeyError:
|
|
|
|
pass
|
2013-12-06 23:02:52 +01:00
|
|
|
|
2014-01-28 21:00:38 +01:00
|
|
|
post_save.connect(flush_realm_filter, sender=RealmFilter)
|
|
|
|
post_delete.connect(flush_realm_filter, sender=RealmFilter)
|
2013-12-06 23:02:52 +01:00
|
|
|
|
2016-06-12 09:36:05 +02:00
|
|
|
class UserProfile(ModelReprMixin, AbstractBaseUser, PermissionsMixin):
|
2016-05-18 20:23:03 +02:00
|
|
|
DEFAULT_BOT = 1
|
2016-05-19 23:44:58 +02:00
|
|
|
"""
|
|
|
|
Incoming webhook bots are limited to only sending messages via webhooks.
|
|
|
|
Thus, it is less of a security risk to expose their API keys to third-party services,
|
|
|
|
since they can't be used to read messages.
|
|
|
|
"""
|
|
|
|
INCOMING_WEBHOOK_BOT = 2
|
2017-06-10 18:43:31 +02:00
|
|
|
# This value is also being used in static/js/settings_bots.js. On updating it here, update it there as well.
|
2016-07-15 18:57:37 +02:00
|
|
|
OUTGOING_WEBHOOK_BOT = 3
|
2017-05-24 20:38:15 +02:00
|
|
|
"""
|
|
|
|
Embedded bots run within the Zulip server itself; events are added to the
|
|
|
|
embedded_bots queue and then handled by a QueueProcessingWorker.
|
|
|
|
"""
|
|
|
|
EMBEDDED_BOT = 4
|
2016-05-18 20:23:03 +02:00
|
|
|
|
2017-05-30 19:19:48 +02:00
|
|
|
# For now, don't allow creating other bot types via the UI
|
|
|
|
ALLOWED_BOT_TYPES = [
|
|
|
|
DEFAULT_BOT,
|
|
|
|
INCOMING_WEBHOOK_BOT,
|
2017-06-10 18:43:31 +02:00
|
|
|
OUTGOING_WEBHOOK_BOT,
|
2017-05-30 19:19:48 +02:00
|
|
|
]
|
|
|
|
|
2017-05-24 21:56:51 +02:00
|
|
|
SERVICE_BOT_TYPES = [
|
|
|
|
OUTGOING_WEBHOOK_BOT,
|
|
|
|
EMBEDDED_BOT
|
|
|
|
]
|
|
|
|
|
2013-04-04 20:38:22 +02:00
|
|
|
# Fields from models.AbstractUser minus last_name and first_name,
|
|
|
|
# which we don't use; email is modified to make it indexed and unique.
|
2016-11-23 05:01:12 +01:00
|
|
|
email = models.EmailField(blank=False, db_index=True, unique=True) # type: Text
|
2016-06-13 07:27:34 +02:00
|
|
|
is_staff = models.BooleanField(default=False) # type: bool
|
|
|
|
is_active = models.BooleanField(default=True, db_index=True) # type: bool
|
|
|
|
is_realm_admin = models.BooleanField(default=False, db_index=True) # type: bool
|
|
|
|
is_bot = models.BooleanField(default=False, db_index=True) # type: bool
|
|
|
|
bot_type = models.PositiveSmallIntegerField(null=True, db_index=True) # type: Optional[int]
|
|
|
|
is_api_super_user = models.BooleanField(default=False, db_index=True) # type: bool
|
2017-04-15 04:03:56 +02:00
|
|
|
date_joined = models.DateTimeField(default=timezone_now) # type: datetime.datetime
|
2016-06-13 07:27:34 +02:00
|
|
|
is_mirror_dummy = models.BooleanField(default=False) # type: bool
|
|
|
|
bot_owner = models.ForeignKey('self', null=True, on_delete=models.SET_NULL) # type: Optional[UserProfile]
|
2017-07-10 15:55:15 +02:00
|
|
|
long_term_idle = models.BooleanField(default=False, db_index=True) # type: bool
|
2013-05-03 00:25:43 +02:00
|
|
|
|
2013-03-08 19:53:00 +01:00
|
|
|
USERNAME_FIELD = 'email'
|
2013-07-08 16:42:00 +02:00
|
|
|
MAX_NAME_LENGTH = 100
|
2017-05-12 04:21:49 +02:00
|
|
|
MIN_NAME_LENGTH = 3
|
2017-04-27 22:13:59 +02:00
|
|
|
API_KEY_LENGTH = 32
|
2017-01-16 04:18:42 +01:00
|
|
|
NAME_INVALID_CHARS = ['*', '`', '>', '"', '@']
|
2013-03-08 19:53:00 +01:00
|
|
|
|
|
|
|
# Our custom site-specific fields
|
2016-11-23 05:01:12 +01:00
|
|
|
full_name = models.CharField(max_length=MAX_NAME_LENGTH) # type: Text
|
|
|
|
short_name = models.CharField(max_length=MAX_NAME_LENGTH) # type: Text
|
2013-09-20 20:53:56 +02:00
|
|
|
# pointer points to Message.id, NOT UserMessage.id.
|
2016-06-13 07:27:34 +02:00
|
|
|
pointer = models.IntegerField() # type: int
|
2016-11-23 05:01:12 +01:00
|
|
|
last_pointer_updater = models.CharField(max_length=64) # type: Text
|
2017-06-01 10:44:16 +02:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE) # type: Realm
|
2017-04-27 22:13:59 +02:00
|
|
|
api_key = models.CharField(max_length=API_KEY_LENGTH) # type: Text
|
2017-05-23 22:17:08 +02:00
|
|
|
tos_version = models.CharField(null=True, max_length=10) # type: Optional[Text]
|
2017-07-10 15:55:15 +02:00
|
|
|
last_active_message_id = models.IntegerField(null=True) # type: int
|
2014-02-05 21:31:30 +01:00
|
|
|
|
|
|
|
### Notifications settings. ###
|
|
|
|
|
|
|
|
# Stream notifications.
|
2016-06-13 07:27:34 +02:00
|
|
|
enable_stream_desktop_notifications = models.BooleanField(default=False) # type: bool
|
|
|
|
enable_stream_sounds = models.BooleanField(default=False) # type: bool
|
2014-02-05 21:31:30 +01:00
|
|
|
|
|
|
|
# PM + @-mention notifications.
|
2016-06-13 07:27:34 +02:00
|
|
|
enable_desktop_notifications = models.BooleanField(default=True) # type: bool
|
2016-12-07 17:29:12 +01:00
|
|
|
pm_content_in_desktop_notifications = models.BooleanField(default=True) # type: bool
|
2016-06-13 07:27:34 +02:00
|
|
|
enable_sounds = models.BooleanField(default=True) # type: bool
|
|
|
|
enable_offline_email_notifications = models.BooleanField(default=True) # type: bool
|
|
|
|
enable_offline_push_notifications = models.BooleanField(default=True) # type: bool
|
2016-09-19 22:55:18 +02:00
|
|
|
enable_online_push_notifications = models.BooleanField(default=False) # type: bool
|
2014-02-05 21:31:30 +01:00
|
|
|
|
2016-06-13 07:27:34 +02:00
|
|
|
enable_digest_emails = models.BooleanField(default=True) # type: bool
|
2012-08-28 18:44:51 +02:00
|
|
|
|
2014-02-05 21:31:30 +01:00
|
|
|
# Old notification field superseded by existence of stream notification
|
|
|
|
# settings.
|
2016-06-13 07:27:34 +02:00
|
|
|
default_desktop_notifications = models.BooleanField(default=True) # type: bool
|
2014-02-05 21:31:30 +01:00
|
|
|
|
|
|
|
###
|
|
|
|
|
2017-04-15 04:03:56 +02:00
|
|
|
last_reminder = models.DateTimeField(default=timezone_now, null=True) # type: Optional[datetime.datetime]
|
2016-11-23 05:01:12 +01:00
|
|
|
rate_limits = models.CharField(default=u"", max_length=100) # type: Text # comma-separated list of range:max pairs
|
2014-02-05 21:31:30 +01:00
|
|
|
|
2014-02-06 23:12:34 +01:00
|
|
|
# Default streams
|
2017-06-01 10:44:16 +02:00
|
|
|
default_sending_stream = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE) # type: Optional[Stream]
|
|
|
|
default_events_register_stream = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE) # type: Optional[Stream]
|
2016-06-13 07:27:34 +02:00
|
|
|
default_all_public_streams = models.BooleanField(default=False) # type: bool
|
2014-02-06 23:12:34 +01:00
|
|
|
|
2013-12-04 22:48:47 +01:00
|
|
|
# UI vars
|
2016-12-29 03:21:56 +01:00
|
|
|
enter_sends = models.NullBooleanField(default=False) # type: Optional[bool]
|
2016-06-13 07:27:34 +02:00
|
|
|
autoscroll_forever = models.BooleanField(default=False) # type: bool
|
|
|
|
left_side_userlist = models.BooleanField(default=False) # type: bool
|
2017-03-02 08:30:53 +01:00
|
|
|
emoji_alt_code = models.BooleanField(default=False) # type: bool
|
2013-12-04 22:48:47 +01:00
|
|
|
|
2015-08-19 22:35:46 +02:00
|
|
|
# display settings
|
2016-06-13 07:27:34 +02:00
|
|
|
twenty_four_hour_time = models.BooleanField(default=False) # type: bool
|
2016-11-23 05:01:12 +01:00
|
|
|
default_language = models.CharField(default=u'en', max_length=MAX_LANGUAGE_ID_LENGTH) # type: Text
|
2015-08-19 22:35:46 +02:00
|
|
|
|
2013-05-17 21:28:51 +02:00
|
|
|
# Hours to wait before sending another email to a user
|
|
|
|
EMAIL_REMINDER_WAITPERIOD = 24
|
2017-07-05 11:40:33 +02:00
|
|
|
# Minutes to wait before warning a bot owner that their bot sent a message
|
2013-09-17 22:31:05 +02:00
|
|
|
# to a nonexistent stream
|
|
|
|
BOT_OWNER_STREAM_ALERT_WAITPERIOD = 1
|
2013-05-17 21:28:51 +02:00
|
|
|
|
2016-06-12 13:27:16 +02:00
|
|
|
AVATAR_FROM_GRAVATAR = u'G'
|
|
|
|
AVATAR_FROM_USER = u'U'
|
2013-06-07 21:51:57 +02:00
|
|
|
AVATAR_SOURCES = (
|
2017-01-24 07:06:13 +01:00
|
|
|
(AVATAR_FROM_GRAVATAR, 'Hosted by Gravatar'),
|
|
|
|
(AVATAR_FROM_USER, 'Uploaded by user'),
|
2013-06-07 21:51:57 +02:00
|
|
|
)
|
2016-11-23 05:01:12 +01:00
|
|
|
avatar_source = models.CharField(default=AVATAR_FROM_GRAVATAR, choices=AVATAR_SOURCES, max_length=1) # type: Text
|
2017-01-23 18:59:07 +01:00
|
|
|
avatar_version = models.PositiveSmallIntegerField(default=1) # type: int
|
2013-06-07 21:51:57 +02:00
|
|
|
|
2016-06-12 13:27:16 +02:00
|
|
|
TUTORIAL_WAITING = u'W'
|
|
|
|
TUTORIAL_STARTED = u'S'
|
|
|
|
TUTORIAL_FINISHED = u'F'
|
2017-01-24 06:21:14 +01:00
|
|
|
TUTORIAL_STATES = ((TUTORIAL_WAITING, "Waiting"),
|
|
|
|
(TUTORIAL_STARTED, "Started"),
|
2013-04-04 22:30:28 +02:00
|
|
|
(TUTORIAL_FINISHED, "Finished"))
|
|
|
|
|
2016-11-23 05:01:12 +01:00
|
|
|
tutorial_status = models.CharField(default=TUTORIAL_WAITING, choices=TUTORIAL_STATES, max_length=1) # type: Text
|
2013-05-08 16:12:19 +02:00
|
|
|
# Contains serialized JSON of the form:
|
|
|
|
# [("step 1", true), ("step 2", false)]
|
|
|
|
# where the second element of each tuple is if the step has been
|
|
|
|
# completed.
|
2016-11-23 05:01:12 +01:00
|
|
|
onboarding_steps = models.TextField(default=u'[]') # type: Text
|
2013-04-04 22:30:28 +02:00
|
|
|
|
2016-11-23 05:01:12 +01:00
|
|
|
alert_words = models.TextField(default=u'[]') # type: Text # json-serialized list of strings
|
2013-09-03 22:41:17 +02:00
|
|
|
|
2013-09-09 21:53:40 +02:00
|
|
|
# Contains serialized JSON of the form:
|
|
|
|
# [["social", "mit"], ["devel", "ios"]]
|
2016-11-23 05:01:12 +01:00
|
|
|
muted_topics = models.TextField(default=u'[]') # type: Text
|
2013-09-09 21:53:40 +02:00
|
|
|
|
2016-01-25 21:12:34 +01:00
|
|
|
objects = UserManager() # type: UserManager
|
Give our models meaningful reprs.
>>> from zephyr.models import UserProfile, Recipient, Zephyr, ZephyrClass
>>> for klass in [UserProfile, Recipient, Zephyr, ZephyrClass]:
... print klass.objects.all()[:2]
...
[<UserProfile: othello>, <UserProfile: iago>]
[<Recipient: Verona (1, class)>, <Recipient: Denmark (2, class)>]
[<Zephyr: Scotland / Scotland3 / <UserProfile: prospero>>, <Zephyr: Venice / Venice3 / <UserProfile: iago>>]
[<ZephyrClass: Verona>, <ZephyrClass: Denmark>]
(imported from commit 9998ffe40800213a5425990d6e85f5c5a43a5355)
2012-08-29 16:15:06 +02:00
|
|
|
|
2017-03-02 11:17:10 +01:00
|
|
|
DEFAULT_UPLOADS_QUOTA = 1024*1024*1024
|
|
|
|
|
|
|
|
quota = models.IntegerField(default=DEFAULT_UPLOADS_QUOTA) # type: int
|
2017-03-14 10:53:09 +01:00
|
|
|
# The maximum length of a timezone in pytz.all_timezones is 32.
|
|
|
|
# Setting max_length=40 is a safe choice.
|
2017-03-16 11:46:56 +01:00
|
|
|
# In Django, the convention is to use empty string instead of Null
|
|
|
|
# for text based fields. For more information, see
|
|
|
|
# https://docs.djangoproject.com/en/1.10/ref/models/fields/#django.db.models.Field.null.
|
|
|
|
timezone = models.CharField(max_length=40, default=u'') # type: Text
|
2017-03-02 11:17:10 +01:00
|
|
|
|
2017-04-01 17:28:44 +02:00
|
|
|
# Emojisets
|
|
|
|
APPLE_EMOJISET = u'apple'
|
|
|
|
EMOJIONE_EMOJISET = u'emojione'
|
|
|
|
GOOGLE_EMOJISET = u'google'
|
|
|
|
TWITTER_EMOJISET = u'twitter'
|
|
|
|
EMOJISET_CHOICES = ((APPLE_EMOJISET, _("Apple style")),
|
|
|
|
(EMOJIONE_EMOJISET, _("Emoji One style")),
|
|
|
|
(GOOGLE_EMOJISET, _("Google style")),
|
|
|
|
(TWITTER_EMOJISET, _("Twitter style")))
|
|
|
|
emojiset = models.CharField(default=GOOGLE_EMOJISET, choices=EMOJISET_CHOICES, max_length=20) # type: Text
|
|
|
|
|
2017-04-07 00:05:55 +02:00
|
|
|
# Define the types of the various automatically managed properties
|
|
|
|
property_types = dict(
|
|
|
|
default_language=Text,
|
|
|
|
emoji_alt_code=bool,
|
2017-04-01 17:28:44 +02:00
|
|
|
emojiset=Text,
|
2017-04-07 00:05:55 +02:00
|
|
|
left_side_userlist=bool,
|
|
|
|
timezone=Text,
|
|
|
|
twenty_four_hour_time=bool,
|
|
|
|
)
|
|
|
|
|
2017-05-23 03:19:21 +02:00
|
|
|
notification_setting_types = dict(
|
|
|
|
enable_desktop_notifications=bool,
|
|
|
|
enable_digest_emails=bool,
|
|
|
|
enable_offline_email_notifications=bool,
|
|
|
|
enable_offline_push_notifications=bool,
|
|
|
|
enable_online_push_notifications=bool,
|
|
|
|
enable_sounds=bool,
|
|
|
|
enable_stream_desktop_notifications=bool,
|
|
|
|
enable_stream_sounds=bool,
|
|
|
|
pm_content_in_desktop_notifications=bool,
|
|
|
|
)
|
|
|
|
|
2017-03-17 10:07:22 +01:00
|
|
|
@property
|
|
|
|
def profile_data(self):
|
|
|
|
# type: () -> List[Dict[str, Union[int, float, Text]]]
|
|
|
|
values = CustomProfileFieldValue.objects.filter(user_profile=self)
|
|
|
|
user_data = {v.field_id: v.value for v in values}
|
|
|
|
data = [] # type: List[Dict[str, Union[int, float, Text]]]
|
|
|
|
for field in custom_profile_fields_for_realm(self.realm_id):
|
|
|
|
value = user_data.get(field.id, None)
|
|
|
|
field_type = field.field_type
|
|
|
|
if value is not None:
|
|
|
|
converter = field.FIELD_CONVERTERS[field_type]
|
|
|
|
value = converter(value)
|
|
|
|
|
|
|
|
field_data = {} # type: Dict[str, Union[int, float, Text]]
|
|
|
|
for k, v in field.as_dict().items():
|
|
|
|
field_data[k] = v
|
|
|
|
field_data['value'] = value
|
|
|
|
data.append(field_data)
|
|
|
|
|
|
|
|
return data
|
|
|
|
|
2013-09-05 20:51:38 +02:00
|
|
|
def can_admin_user(self, target_user):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: (UserProfile) -> bool
|
2013-09-05 20:51:38 +02:00
|
|
|
"""Returns whether this user has permission to modify target_user"""
|
|
|
|
if target_user.bot_owner == self:
|
|
|
|
return True
|
2016-02-08 03:59:38 +01:00
|
|
|
elif self.is_realm_admin and self.realm == target_user.realm:
|
2013-09-05 20:51:38 +02:00
|
|
|
return True
|
|
|
|
else:
|
|
|
|
return False
|
|
|
|
|
2016-06-12 09:36:05 +02:00
|
|
|
def __unicode__(self):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: () -> Text
|
2016-06-12 09:36:05 +02:00
|
|
|
return u"<UserProfile: %s %s>" % (self.email, self.realm)
|
2013-03-08 19:53:00 +01:00
|
|
|
|
2016-05-19 23:44:58 +02:00
|
|
|
@property
|
|
|
|
def is_incoming_webhook(self):
|
2016-08-08 13:56:59 +02:00
|
|
|
# type: () -> bool
|
2016-05-19 23:44:58 +02:00
|
|
|
return self.bot_type == UserProfile.INCOMING_WEBHOOK_BOT
|
|
|
|
|
2016-07-15 18:57:37 +02:00
|
|
|
@property
|
|
|
|
def is_outgoing_webhook_bot(self):
|
|
|
|
# type: () -> bool
|
|
|
|
return self.bot_type == UserProfile.OUTGOING_WEBHOOK_BOT
|
|
|
|
|
2017-05-24 20:38:15 +02:00
|
|
|
@property
|
|
|
|
def is_embedded_bot(self):
|
|
|
|
# type: () -> bool
|
|
|
|
return self.bot_type == UserProfile.EMBEDDED_BOT
|
|
|
|
|
2017-05-24 21:56:51 +02:00
|
|
|
@property
|
|
|
|
def is_service_bot(self):
|
|
|
|
# type: () -> bool
|
|
|
|
return self.is_bot and self.bot_type in UserProfile.SERVICE_BOT_TYPES
|
|
|
|
|
2017-04-01 17:28:44 +02:00
|
|
|
@staticmethod
|
|
|
|
def emojiset_choices():
|
|
|
|
# type: () -> Dict[Text, Text]
|
|
|
|
return {emojiset[0]: force_text(emojiset[1]) for emojiset in UserProfile.EMOJISET_CHOICES}
|
|
|
|
|
2013-10-20 21:10:03 +02:00
|
|
|
@staticmethod
|
|
|
|
def emails_from_ids(user_ids):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Sequence[int]) -> Dict[int, Text]
|
2013-10-20 21:10:03 +02:00
|
|
|
rows = UserProfile.objects.filter(id__in=user_ids).values('id', 'email')
|
|
|
|
return {row['id']: row['email'] for row in rows}
|
|
|
|
|
2014-01-15 22:31:38 +01:00
|
|
|
def can_create_streams(self):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: () -> bool
|
2017-04-15 04:03:56 +02:00
|
|
|
diff = (timezone_now() - self.date_joined).days
|
2016-11-29 08:57:35 +01:00
|
|
|
if self.is_realm_admin:
|
2016-05-12 10:28:00 +02:00
|
|
|
return True
|
2016-11-29 08:57:35 +01:00
|
|
|
elif self.realm.create_stream_by_admins_only:
|
2016-05-12 10:28:00 +02:00
|
|
|
return False
|
2016-11-29 08:57:35 +01:00
|
|
|
if diff >= self.realm.waiting_period_threshold:
|
|
|
|
return True
|
|
|
|
return False
|
2014-01-15 22:31:38 +01:00
|
|
|
|
2016-08-10 03:05:26 +02:00
|
|
|
def major_tos_version(self):
|
|
|
|
# type: () -> int
|
|
|
|
if self.tos_version is not None:
|
|
|
|
return int(self.tos_version.split('.')[0])
|
|
|
|
else:
|
|
|
|
return -1
|
|
|
|
|
2014-01-24 22:29:17 +01:00
|
|
|
def receives_offline_notifications(user_profile):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: (UserProfile) -> bool
|
2014-01-24 22:29:17 +01:00
|
|
|
return ((user_profile.enable_offline_email_notifications or
|
|
|
|
user_profile.enable_offline_push_notifications) and
|
|
|
|
not user_profile.is_bot)
|
|
|
|
|
2016-09-19 22:55:18 +02:00
|
|
|
def receives_online_notifications(user_profile):
|
|
|
|
# type: (UserProfile) -> bool
|
|
|
|
return (user_profile.enable_online_push_notifications and
|
|
|
|
not user_profile.is_bot)
|
|
|
|
|
2016-11-11 19:32:15 +01:00
|
|
|
def remote_user_to_email(remote_user):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Text) -> Text
|
2016-11-11 19:32:15 +01:00
|
|
|
if settings.SSO_APPEND_DOMAIN is not None:
|
|
|
|
remote_user += "@" + settings.SSO_APPEND_DOMAIN
|
|
|
|
return remote_user
|
|
|
|
|
2016-03-31 03:30:33 +02:00
|
|
|
# Make sure we flush the UserProfile object from our remote cache
|
2013-03-15 21:17:32 +01:00
|
|
|
# whenever we save it.
|
2014-01-28 17:02:30 +01:00
|
|
|
post_save.connect(flush_user_profile, sender=UserProfile)
|
2013-03-15 21:17:32 +01:00
|
|
|
|
2012-09-28 22:47:05 +02:00
|
|
|
class PreregistrationUser(models.Model):
|
2016-11-23 05:01:12 +01:00
|
|
|
email = models.EmailField() # type: Text
|
2017-06-01 10:44:16 +02:00
|
|
|
referred_by = models.ForeignKey(UserProfile, null=True, on_delete=CASCADE) # Optional[UserProfile]
|
2016-06-13 08:00:27 +02:00
|
|
|
streams = models.ManyToManyField('Stream') # type: Manager
|
|
|
|
invited_at = models.DateTimeField(auto_now=True) # type: datetime.datetime
|
2016-06-03 01:02:58 +02:00
|
|
|
realm_creation = models.BooleanField(default=False)
|
2012-12-11 23:42:32 +01:00
|
|
|
|
2012-10-29 19:08:18 +01:00
|
|
|
# status: whether an object has been confirmed.
|
|
|
|
# if confirmed, set to confirmation.settings.STATUS_ACTIVE
|
2016-06-13 08:00:27 +02:00
|
|
|
status = models.IntegerField(default=0) # type: int
|
2012-10-29 19:08:18 +01:00
|
|
|
|
2017-06-01 10:44:16 +02:00
|
|
|
realm = models.ForeignKey(Realm, null=True, on_delete=CASCADE) # type: Optional[Realm]
|
2013-08-02 20:31:19 +02:00
|
|
|
|
2017-01-20 12:27:38 +01:00
|
|
|
class EmailChangeStatus(models.Model):
|
|
|
|
new_email = models.EmailField() # type: Text
|
|
|
|
old_email = models.EmailField() # type: Text
|
|
|
|
updated_at = models.DateTimeField(auto_now=True) # type: datetime.datetime
|
2017-06-01 10:44:16 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile
|
2017-01-20 12:27:38 +01:00
|
|
|
|
|
|
|
# status: whether an object has been confirmed.
|
|
|
|
# if confirmed, set to confirmation.settings.STATUS_ACTIVE
|
|
|
|
status = models.IntegerField(default=0) # type: int
|
|
|
|
|
2017-06-01 10:44:16 +02:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE) # type: Realm
|
2017-01-20 12:27:38 +01:00
|
|
|
|
2017-04-19 07:22:54 +02:00
|
|
|
class AbstractPushDeviceToken(models.Model):
|
2013-12-09 23:17:16 +01:00
|
|
|
APNS = 1
|
|
|
|
GCM = 2
|
|
|
|
|
|
|
|
KINDS = (
|
2017-01-24 06:21:14 +01:00
|
|
|
(APNS, 'apns'),
|
|
|
|
(GCM, 'gcm'),
|
2013-12-09 23:17:16 +01:00
|
|
|
)
|
|
|
|
|
2016-06-13 08:02:21 +02:00
|
|
|
kind = models.PositiveSmallIntegerField(choices=KINDS) # type: int
|
2013-12-09 23:17:16 +01:00
|
|
|
|
|
|
|
# The token is a unique device-specific token that is
|
|
|
|
# sent to us from each device:
|
|
|
|
# - APNS token if kind == APNS
|
|
|
|
# - GCM registration id if kind == GCM
|
2017-03-06 03:19:40 +01:00
|
|
|
token = models.CharField(max_length=4096, unique=True) # type: bytes
|
2016-06-13 08:02:21 +02:00
|
|
|
last_updated = models.DateTimeField(auto_now=True) # type: datetime.datetime
|
2013-12-09 23:17:16 +01:00
|
|
|
|
2015-02-10 08:08:39 +01:00
|
|
|
# [optional] Contains the app id of the device if it is an iOS device
|
2016-11-23 05:01:12 +01:00
|
|
|
ios_app_id = models.TextField(null=True) # type: Optional[Text]
|
2015-02-10 08:08:39 +01:00
|
|
|
|
2017-04-19 07:22:54 +02:00
|
|
|
class Meta(object):
|
|
|
|
abstract = True
|
|
|
|
|
|
|
|
class PushDeviceToken(AbstractPushDeviceToken):
|
|
|
|
# The user who's device this is
|
2017-06-01 10:44:16 +02:00
|
|
|
user = models.ForeignKey(UserProfile, db_index=True, on_delete=CASCADE) # type: UserProfile
|
2017-04-19 07:22:54 +02:00
|
|
|
|
2015-08-18 21:29:23 +02:00
|
|
|
def generate_email_token_for_stream():
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: () -> Text
|
2015-08-18 21:29:23 +02:00
|
|
|
return generate_random_token(32)
|
|
|
|
|
2016-06-12 09:36:05 +02:00
|
|
|
class Stream(ModelReprMixin, models.Model):
|
2013-10-09 16:55:17 +02:00
|
|
|
MAX_NAME_LENGTH = 60
|
2016-11-23 05:01:12 +01:00
|
|
|
name = models.CharField(max_length=MAX_NAME_LENGTH, db_index=True) # type: Text
|
2017-06-01 10:44:16 +02:00
|
|
|
realm = models.ForeignKey(Realm, db_index=True, on_delete=CASCADE) # type: Realm
|
2016-06-13 08:55:59 +02:00
|
|
|
invite_only = models.NullBooleanField(default=False) # type: Optional[bool]
|
2013-08-08 16:51:18 +02:00
|
|
|
# Used by the e-mail forwarder. The e-mail RFC specifies a maximum
|
|
|
|
# e-mail length of 254, and our max stream length is 30, so we
|
|
|
|
# have plenty of room for the token.
|
|
|
|
email_token = models.CharField(
|
2016-11-23 05:01:12 +01:00
|
|
|
max_length=32, default=generate_email_token_for_stream) # type: Text
|
|
|
|
description = models.CharField(max_length=1024, default=u'') # type: Text
|
2012-08-28 18:44:51 +02:00
|
|
|
|
2017-04-15 04:03:56 +02:00
|
|
|
date_created = models.DateTimeField(default=timezone_now) # type: datetime.datetime
|
2016-06-13 08:55:59 +02:00
|
|
|
deactivated = models.BooleanField(default=False) # type: bool
|
2013-09-26 21:48:08 +02:00
|
|
|
|
2016-06-12 09:36:05 +02:00
|
|
|
def __unicode__(self):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: () -> Text
|
2016-06-12 09:36:05 +02:00
|
|
|
return u"<Stream: %s>" % (self.name,)
|
Give our models meaningful reprs.
>>> from zephyr.models import UserProfile, Recipient, Zephyr, ZephyrClass
>>> for klass in [UserProfile, Recipient, Zephyr, ZephyrClass]:
... print klass.objects.all()[:2]
...
[<UserProfile: othello>, <UserProfile: iago>]
[<Recipient: Verona (1, class)>, <Recipient: Denmark (2, class)>]
[<Zephyr: Scotland / Scotland3 / <UserProfile: prospero>>, <Zephyr: Venice / Venice3 / <UserProfile: iago>>]
[<ZephyrClass: Verona>, <ZephyrClass: Denmark>]
(imported from commit 9998ffe40800213a5425990d6e85f5c5a43a5355)
2012-08-29 16:15:06 +02:00
|
|
|
|
2013-01-15 21:10:50 +01:00
|
|
|
def is_public(self):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: () -> bool
|
2016-07-27 01:45:29 +02:00
|
|
|
# All streams are private in Zephyr mirroring realms.
|
|
|
|
return not self.invite_only and not self.realm.is_zephyr_mirror_realm
|
2013-01-15 21:10:50 +01:00
|
|
|
|
2015-10-14 22:43:04 +02:00
|
|
|
class Meta(object):
|
2012-11-07 22:33:38 +01:00
|
|
|
unique_together = ("name", "realm")
|
|
|
|
|
2013-09-19 22:55:08 +02:00
|
|
|
def num_subscribers(self):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: () -> int
|
2013-09-19 22:55:08 +02:00
|
|
|
return Subscription.objects.filter(
|
2017-01-24 07:06:13 +01:00
|
|
|
recipient__type=Recipient.STREAM,
|
|
|
|
recipient__type_id=self.id,
|
|
|
|
user_profile__is_active=True,
|
|
|
|
active=True
|
2013-09-19 22:55:08 +02:00
|
|
|
).count()
|
|
|
|
|
2014-03-02 06:46:54 +01:00
|
|
|
# This is stream information that is sent to clients
|
|
|
|
def to_dict(self):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: () -> Dict[str, Any]
|
2014-03-02 06:46:54 +01:00
|
|
|
return dict(name=self.name,
|
|
|
|
stream_id=self.id,
|
|
|
|
description=self.description,
|
|
|
|
invite_only=self.invite_only)
|
|
|
|
|
2014-01-28 20:49:55 +01:00
|
|
|
post_save.connect(flush_stream, sender=Stream)
|
|
|
|
post_delete.connect(flush_stream, sender=Stream)
|
2014-01-15 22:48:27 +01:00
|
|
|
|
2016-04-01 08:42:38 +02:00
|
|
|
# The Recipient table is used to map Messages to the set of users who
|
|
|
|
# received the message. It is implemented as a set of triples (id,
|
|
|
|
# type_id, type). We have 3 types of recipients: Huddles (for group
|
|
|
|
# private messages), UserProfiles (for 1:1 private messages), and
|
2016-06-02 21:59:58 +02:00
|
|
|
# Streams. The recipient table maps a globally unique recipient id
|
2016-04-01 08:42:38 +02:00
|
|
|
# (used by the Message table) to the type-specific unique id (the
|
|
|
|
# stream id, user_profile id, or huddle id).
|
2016-06-12 09:36:05 +02:00
|
|
|
class Recipient(ModelReprMixin, models.Model):
|
2016-06-13 10:18:29 +02:00
|
|
|
type_id = models.IntegerField(db_index=True) # type: int
|
|
|
|
type = models.PositiveSmallIntegerField(db_index=True) # type: int
|
2012-10-10 22:58:51 +02:00
|
|
|
# Valid types are {personal, stream, huddle}
|
2012-09-07 20:14:13 +02:00
|
|
|
PERSONAL = 1
|
2012-10-10 22:57:21 +02:00
|
|
|
STREAM = 2
|
2012-09-07 20:14:13 +02:00
|
|
|
HUDDLE = 3
|
|
|
|
|
2015-10-14 22:43:04 +02:00
|
|
|
class Meta(object):
|
2012-11-07 22:33:38 +01:00
|
|
|
unique_together = ("type", "type_id")
|
|
|
|
|
2012-11-02 21:08:29 +01:00
|
|
|
# N.B. If we used Django's choice=... we would get this for free (kinda)
|
|
|
|
_type_names = {
|
|
|
|
PERSONAL: 'personal',
|
2017-01-24 06:21:14 +01:00
|
|
|
STREAM: 'stream',
|
|
|
|
HUDDLE: 'huddle'}
|
2012-11-02 21:08:29 +01:00
|
|
|
|
2012-09-07 20:14:13 +02:00
|
|
|
def type_name(self):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: () -> str
|
2012-11-02 21:08:29 +01:00
|
|
|
# Raises KeyError if invalid
|
|
|
|
return self._type_names[self.type]
|
2012-08-28 21:27:42 +02:00
|
|
|
|
2016-06-12 09:36:05 +02:00
|
|
|
def __unicode__(self):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: () -> Text
|
Give our models meaningful reprs.
>>> from zephyr.models import UserProfile, Recipient, Zephyr, ZephyrClass
>>> for klass in [UserProfile, Recipient, Zephyr, ZephyrClass]:
... print klass.objects.all()[:2]
...
[<UserProfile: othello>, <UserProfile: iago>]
[<Recipient: Verona (1, class)>, <Recipient: Denmark (2, class)>]
[<Zephyr: Scotland / Scotland3 / <UserProfile: prospero>>, <Zephyr: Venice / Venice3 / <UserProfile: iago>>]
[<ZephyrClass: Verona>, <ZephyrClass: Denmark>]
(imported from commit 9998ffe40800213a5425990d6e85f5c5a43a5355)
2012-08-29 16:15:06 +02:00
|
|
|
display_recipient = get_display_recipient(self)
|
2016-06-12 09:36:05 +02:00
|
|
|
return u"<Recipient: %s (%d, %s)>" % (display_recipient, self.type_id, self.type)
|
Give our models meaningful reprs.
>>> from zephyr.models import UserProfile, Recipient, Zephyr, ZephyrClass
>>> for klass in [UserProfile, Recipient, Zephyr, ZephyrClass]:
... print klass.objects.all()[:2]
...
[<UserProfile: othello>, <UserProfile: iago>]
[<Recipient: Verona (1, class)>, <Recipient: Denmark (2, class)>]
[<Zephyr: Scotland / Scotland3 / <UserProfile: prospero>>, <Zephyr: Venice / Venice3 / <UserProfile: iago>>]
[<ZephyrClass: Verona>, <ZephyrClass: Denmark>]
(imported from commit 9998ffe40800213a5425990d6e85f5c5a43a5355)
2012-08-29 16:15:06 +02:00
|
|
|
|
2016-06-12 09:36:05 +02:00
|
|
|
class Client(ModelReprMixin, models.Model):
|
2016-11-23 05:01:12 +01:00
|
|
|
name = models.CharField(max_length=30, db_index=True, unique=True) # type: Text
|
2012-10-19 21:30:42 +02:00
|
|
|
|
2016-06-12 09:36:05 +02:00
|
|
|
def __unicode__(self):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: () -> Text
|
2016-06-12 09:36:05 +02:00
|
|
|
return u"<Client: %s>" % (self.name,)
|
2016-04-21 00:26:45 +02:00
|
|
|
|
2016-11-23 05:01:12 +01:00
|
|
|
get_client_cache = {} # type: Dict[Text, Client]
|
2013-11-20 22:16:48 +01:00
|
|
|
def get_client(name):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Text) -> Client
|
2016-11-14 09:23:03 +01:00
|
|
|
# Accessing KEY_PREFIX through the module is necessary
|
|
|
|
# because we need the updated value of the variable.
|
|
|
|
cache_name = cache.KEY_PREFIX + name
|
|
|
|
if cache_name not in get_client_cache:
|
2016-03-31 03:30:33 +02:00
|
|
|
result = get_client_remote_cache(name)
|
2016-11-14 09:23:03 +01:00
|
|
|
get_client_cache[cache_name] = result
|
|
|
|
return get_client_cache[cache_name]
|
2013-11-20 22:16:48 +01:00
|
|
|
|
2013-03-26 17:47:52 +01:00
|
|
|
def get_client_cache_key(name):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Text) -> Text
|
2016-06-12 07:25:42 +02:00
|
|
|
return u'get_client:%s' % (make_safe_digest(name),)
|
2013-03-26 17:47:52 +01:00
|
|
|
|
2013-03-26 19:09:45 +01:00
|
|
|
@cache_with_key(get_client_cache_key, timeout=3600*24*7)
|
2016-03-31 03:30:33 +02:00
|
|
|
def get_client_remote_cache(name):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Text) -> Client
|
2013-11-01 18:59:05 +01:00
|
|
|
(client, _) = Client.objects.get_or_create(name=name)
|
2012-10-19 21:30:42 +02:00
|
|
|
return client
|
|
|
|
|
2013-03-19 13:05:19 +01:00
|
|
|
# get_stream_backend takes either a realm id or a realm
|
2013-03-26 19:09:45 +01:00
|
|
|
@cache_with_key(get_stream_cache_key, timeout=3600*24*7)
|
2013-03-19 13:05:19 +01:00
|
|
|
def get_stream_backend(stream_name, realm):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Text, Realm) -> Stream
|
2013-03-19 13:05:19 +01:00
|
|
|
return Stream.objects.select_related("realm").get(
|
2016-09-20 03:13:39 +02:00
|
|
|
name__iexact=stream_name.strip(), realm_id=realm.id)
|
2013-03-19 13:05:19 +01:00
|
|
|
|
2014-01-24 23:30:53 +01:00
|
|
|
def get_active_streams(realm):
|
2017-05-26 02:08:16 +02:00
|
|
|
# type: (Optional[Realm]) -> QuerySet
|
2014-01-24 23:30:53 +01:00
|
|
|
"""
|
|
|
|
Return all streams (including invite-only streams) that have not been deactivated.
|
|
|
|
"""
|
|
|
|
return Stream.objects.filter(realm=realm, deactivated=False)
|
|
|
|
|
2013-03-19 13:05:19 +01:00
|
|
|
def get_stream(stream_name, realm):
|
2017-03-23 07:22:28 +01:00
|
|
|
# type: (Text, Realm) -> Stream
|
|
|
|
return get_stream_backend(stream_name, realm)
|
2013-01-17 22:16:39 +01:00
|
|
|
|
2013-06-27 22:52:05 +02:00
|
|
|
def bulk_get_streams(realm, stream_names):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Realm, STREAM_NAMES) -> Dict[Text, Any]
|
2013-06-27 22:52:05 +02:00
|
|
|
|
|
|
|
def fetch_streams_by_name(stream_names):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (List[Text]) -> Sequence[Stream]
|
2016-06-04 09:02:05 +02:00
|
|
|
#
|
2013-06-27 22:52:05 +02:00
|
|
|
# This should be just
|
|
|
|
#
|
|
|
|
# Stream.objects.select_related("realm").filter(name__iexact__in=stream_names,
|
|
|
|
# realm_id=realm_id)
|
|
|
|
#
|
|
|
|
# But chaining __in and __iexact doesn't work with Django's
|
|
|
|
# ORM, so we have the following hack to construct the relevant where clause
|
|
|
|
if len(stream_names) == 0:
|
|
|
|
return []
|
|
|
|
upper_list = ", ".join(["UPPER(%s)"] * len(stream_names))
|
2013-07-29 23:03:31 +02:00
|
|
|
where_clause = "UPPER(zerver_stream.name::text) IN (%s)" % (upper_list,)
|
2016-09-20 03:19:50 +02:00
|
|
|
return get_active_streams(realm.id).select_related("realm").extra(
|
2013-06-27 22:52:05 +02:00
|
|
|
where=[where_clause],
|
|
|
|
params=stream_names)
|
|
|
|
|
|
|
|
return generic_bulk_cached_fetch(lambda stream_name: get_stream_cache_key(stream_name, realm),
|
|
|
|
fetch_streams_by_name,
|
|
|
|
[stream_name.lower() for stream_name in stream_names],
|
|
|
|
id_fetcher=lambda stream: stream.name.lower())
|
|
|
|
|
2013-03-26 17:10:44 +01:00
|
|
|
def get_recipient_cache_key(type, type_id):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (int, int) -> Text
|
2017-05-05 14:44:45 +02:00
|
|
|
return u"%s:get_recipient:%s:%s" % (cache.KEY_PREFIX, type, type_id,)
|
2013-03-26 17:10:44 +01:00
|
|
|
|
2013-03-26 19:09:45 +01:00
|
|
|
@cache_with_key(get_recipient_cache_key, timeout=3600*24*7)
|
2013-03-18 16:54:58 +01:00
|
|
|
def get_recipient(type, type_id):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: (int, int) -> Recipient
|
2013-03-18 16:54:58 +01:00
|
|
|
return Recipient.objects.get(type_id=type_id, type=type)
|
|
|
|
|
2013-06-25 19:26:58 +02:00
|
|
|
def bulk_get_recipients(type, type_ids):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: (int, List[int]) -> Dict[int, Any]
|
2013-06-25 19:26:58 +02:00
|
|
|
def cache_key_function(type_id):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (int) -> Text
|
2013-06-25 19:26:58 +02:00
|
|
|
return get_recipient_cache_key(type, type_id)
|
2016-11-29 07:22:02 +01:00
|
|
|
|
2013-06-25 19:26:58 +02:00
|
|
|
def query_function(type_ids):
|
2016-06-12 14:43:15 +02:00
|
|
|
# type: (List[int]) -> Sequence[Recipient]
|
|
|
|
# TODO: Change return type to QuerySet[Recipient]
|
2013-06-25 19:26:58 +02:00
|
|
|
return Recipient.objects.filter(type=type, type_id__in=type_ids)
|
|
|
|
|
|
|
|
return generic_bulk_cached_fetch(cache_key_function, query_function, type_ids,
|
|
|
|
id_fetcher=lambda recipient: recipient.type_id)
|
|
|
|
|
2016-12-06 07:19:34 +01:00
|
|
|
|
|
|
|
def sew_messages_and_reactions(messages, reactions):
|
|
|
|
# type: (List[Dict[str, Any]], List[Dict[str, Any]]) -> List[Dict[str, Any]]
|
|
|
|
"""Given a iterable of messages and reactions stitch reactions
|
|
|
|
into messages.
|
|
|
|
"""
|
|
|
|
# Add all messages with empty reaction item
|
|
|
|
for message in messages:
|
|
|
|
message['reactions'] = []
|
|
|
|
|
|
|
|
# Convert list of messages into dictionary to make reaction stitching easy
|
|
|
|
converted_messages = {message['id']: message for message in messages}
|
|
|
|
|
|
|
|
for reaction in reactions:
|
|
|
|
converted_messages[reaction['message_id']]['reactions'].append(
|
|
|
|
reaction)
|
|
|
|
|
|
|
|
return list(converted_messages.values())
|
|
|
|
|
|
|
|
|
2016-11-01 11:26:38 +01:00
|
|
|
class AbstractMessage(ModelReprMixin, models.Model):
|
2017-06-01 10:44:16 +02:00
|
|
|
sender = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile
|
|
|
|
recipient = models.ForeignKey(Recipient, on_delete=CASCADE) # type: Recipient
|
2016-11-23 05:01:12 +01:00
|
|
|
subject = models.CharField(max_length=MAX_SUBJECT_LENGTH, db_index=True) # type: Text
|
|
|
|
content = models.TextField() # type: Text
|
|
|
|
rendered_content = models.TextField(null=True) # type: Optional[Text]
|
2016-06-13 10:22:56 +02:00
|
|
|
rendered_content_version = models.IntegerField(null=True) # type: Optional[int]
|
|
|
|
pub_date = models.DateTimeField('date published', db_index=True) # type: datetime.datetime
|
2017-06-01 10:44:16 +02:00
|
|
|
sending_client = models.ForeignKey(Client, on_delete=CASCADE) # type: Client
|
2016-06-13 10:22:56 +02:00
|
|
|
last_edit_time = models.DateTimeField(null=True) # type: Optional[datetime.datetime]
|
2016-11-23 05:01:12 +01:00
|
|
|
edit_history = models.TextField(null=True) # type: Optional[Text]
|
2016-06-13 10:22:56 +02:00
|
|
|
has_attachment = models.BooleanField(default=False, db_index=True) # type: bool
|
|
|
|
has_image = models.BooleanField(default=False, db_index=True) # type: bool
|
|
|
|
has_link = models.BooleanField(default=False, db_index=True) # type: bool
|
2014-02-21 17:44:48 +01:00
|
|
|
|
2016-11-01 11:26:38 +01:00
|
|
|
class Meta(object):
|
|
|
|
abstract = True
|
|
|
|
|
2017-05-17 05:59:50 +02:00
|
|
|
def __unicode__(self):
|
|
|
|
# type: () -> Text
|
|
|
|
display_recipient = get_display_recipient(self.recipient)
|
|
|
|
return u"<%s: %s / %s / %r>" % (self.__class__.__name__, display_recipient,
|
|
|
|
self.subject, self.sender)
|
|
|
|
|
2016-11-01 11:26:38 +01:00
|
|
|
|
|
|
|
class ArchivedMessage(AbstractMessage):
|
2017-04-15 04:03:56 +02:00
|
|
|
archive_timestamp = models.DateTimeField(default=timezone_now, db_index=True) # type: datetime.datetime
|
2016-11-01 11:26:38 +01:00
|
|
|
|
|
|
|
|
|
|
|
class Message(AbstractMessage):
|
|
|
|
|
2016-07-14 17:48:11 +02:00
|
|
|
def topic_name(self):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: () -> Text
|
2016-07-14 17:48:11 +02:00
|
|
|
"""
|
|
|
|
Please start using this helper to facilitate an
|
|
|
|
eventual switch over to a separate topic table.
|
|
|
|
"""
|
|
|
|
return self.subject
|
2012-08-28 18:44:51 +02:00
|
|
|
|
2013-08-22 16:56:37 +02:00
|
|
|
def get_realm(self):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: () -> Realm
|
2013-08-22 16:56:37 +02:00
|
|
|
return self.sender.realm
|
|
|
|
|
2013-09-20 21:25:51 +02:00
|
|
|
def save_rendered_content(self):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: () -> None
|
2013-09-20 21:25:51 +02:00
|
|
|
self.save(update_fields=["rendered_content", "rendered_content_version"])
|
|
|
|
|
2013-09-21 16:46:28 +02:00
|
|
|
@staticmethod
|
2016-10-04 15:52:26 +02:00
|
|
|
def need_to_render_content(rendered_content, rendered_content_version, bugdown_version):
|
2017-02-11 05:03:41 +01:00
|
|
|
# type: (Optional[Text], Optional[int], int) -> bool
|
|
|
|
return (rendered_content is None or
|
|
|
|
rendered_content_version is None or
|
|
|
|
rendered_content_version < bugdown_version)
|
2012-08-30 19:56:15 +02:00
|
|
|
|
2012-09-27 19:58:42 +02:00
|
|
|
def to_log_dict(self):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: () -> Dict[str, Any]
|
2012-10-24 20:16:26 +02:00
|
|
|
return dict(
|
|
|
|
id = self.id,
|
2016-05-19 07:35:02 +02:00
|
|
|
sender_id = self.sender.id,
|
2013-03-28 20:43:34 +01:00
|
|
|
sender_email = self.sender.email,
|
2017-03-14 23:31:05 +01:00
|
|
|
sender_realm_str = self.sender.realm.string_id,
|
2012-10-24 20:16:26 +02:00
|
|
|
sender_full_name = self.sender.full_name,
|
|
|
|
sender_short_name = self.sender.short_name,
|
|
|
|
sending_client = self.sending_client.name,
|
|
|
|
type = self.recipient.type_name(),
|
2012-12-03 19:49:12 +01:00
|
|
|
recipient = get_display_recipient(self.recipient),
|
2016-07-15 06:36:45 +02:00
|
|
|
subject = self.topic_name(),
|
2012-10-24 20:16:26 +02:00
|
|
|
content = self.content,
|
2012-12-11 23:08:17 +01:00
|
|
|
timestamp = datetime_to_timestamp(self.pub_date))
|
2012-09-27 19:58:42 +02:00
|
|
|
|
2013-09-20 02:19:25 +02:00
|
|
|
@staticmethod
|
2013-09-21 18:43:43 +02:00
|
|
|
def get_raw_db_rows(needed_ids):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: (List[int]) -> List[Dict[str, Any]]
|
2013-09-20 02:19:25 +02:00
|
|
|
# This is a special purpose function optimized for
|
2017-03-24 07:51:46 +01:00
|
|
|
# callers like get_messages_backend().
|
2013-09-21 18:43:43 +02:00
|
|
|
fields = [
|
|
|
|
'id',
|
|
|
|
'subject',
|
|
|
|
'pub_date',
|
|
|
|
'last_edit_time',
|
|
|
|
'edit_history',
|
|
|
|
'content',
|
|
|
|
'rendered_content',
|
|
|
|
'rendered_content_version',
|
|
|
|
'recipient_id',
|
|
|
|
'recipient__type',
|
|
|
|
'recipient__type_id',
|
|
|
|
'sender_id',
|
|
|
|
'sending_client__name',
|
|
|
|
'sender__email',
|
|
|
|
'sender__full_name',
|
|
|
|
'sender__short_name',
|
|
|
|
'sender__realm__id',
|
2017-03-14 23:31:05 +01:00
|
|
|
'sender__realm__string_id',
|
2014-07-18 00:18:06 +02:00
|
|
|
'sender__avatar_source',
|
2017-02-16 21:49:21 +01:00
|
|
|
'sender__avatar_version',
|
2014-07-18 00:18:06 +02:00
|
|
|
'sender__is_mirror_dummy',
|
2013-09-21 18:43:43 +02:00
|
|
|
]
|
2016-12-06 07:19:34 +01:00
|
|
|
messages = Message.objects.filter(id__in=needed_ids).values(*fields)
|
|
|
|
"""Adding one-many or Many-Many relationship in values results in N X
|
|
|
|
results.
|
|
|
|
|
|
|
|
Link: https://docs.djangoproject.com/en/1.8/ref/models/querysets/#values
|
|
|
|
"""
|
|
|
|
reactions = Reaction.get_raw_db_rows(needed_ids)
|
|
|
|
return sew_messages_and_reactions(messages, reactions)
|
2013-09-20 02:19:25 +02:00
|
|
|
|
2013-12-31 22:42:38 +01:00
|
|
|
def sent_by_human(self):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: () -> bool
|
2013-12-31 22:42:38 +01:00
|
|
|
sending_client = self.sending_client.name.lower()
|
|
|
|
|
|
|
|
return (sending_client in ('zulipandroid', 'zulipios', 'zulipdesktop',
|
2017-02-16 22:18:19 +01:00
|
|
|
'zulipmobile', 'zulipelectron', 'snipe',
|
2016-11-30 14:17:35 +01:00
|
|
|
'website', 'ios', 'android')) or (
|
2017-01-24 07:06:13 +01:00
|
|
|
'desktop app' in sending_client)
|
2013-12-31 22:42:38 +01:00
|
|
|
|
2014-02-21 21:18:38 +01:00
|
|
|
@staticmethod
|
|
|
|
def content_has_attachment(content):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Text) -> Match
|
2016-06-12 15:44:39 +02:00
|
|
|
return re.search(r'[/\-]user[\-_]uploads[/\.-]', content)
|
2014-02-21 21:18:38 +01:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def content_has_image(content):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Text) -> bool
|
2016-06-12 15:44:39 +02:00
|
|
|
return bool(re.search(r'[/\-]user[\-_]uploads[/\.-]\S+\.(bmp|gif|jpg|jpeg|png|webp)', content, re.IGNORECASE))
|
2014-02-21 21:18:38 +01:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def content_has_link(content):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Text) -> bool
|
2016-03-10 17:17:40 +01:00
|
|
|
return ('http://' in content or
|
|
|
|
'https://' in content or
|
|
|
|
'/user_uploads' in content or
|
|
|
|
(settings.ENABLE_FILE_LINKS and 'file:///' in content))
|
2014-02-21 21:18:38 +01:00
|
|
|
|
2016-07-10 22:58:46 +02:00
|
|
|
@staticmethod
|
|
|
|
def is_status_message(content, rendered_content):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Text, Text) -> bool
|
2016-07-10 22:58:46 +02:00
|
|
|
"""
|
|
|
|
Returns True if content and rendered_content are from 'me_message'
|
|
|
|
"""
|
|
|
|
if content.startswith('/me ') and '\n' not in content:
|
|
|
|
if rendered_content.startswith('<p>') and rendered_content.endswith('</p>'):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2014-02-21 21:18:38 +01:00
|
|
|
def update_calculated_fields(self):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: () -> None
|
2014-02-21 21:18:38 +01:00
|
|
|
# TODO: rendered_content could also be considered a calculated field
|
|
|
|
content = self.content
|
|
|
|
self.has_attachment = bool(Message.content_has_attachment(content))
|
|
|
|
self.has_image = bool(Message.content_has_image(content))
|
|
|
|
self.has_link = bool(Message.content_has_link(content))
|
|
|
|
|
|
|
|
@receiver(pre_save, sender=Message)
|
|
|
|
def pre_save_message(sender, **kwargs):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: (Any, **Any) -> None
|
2014-02-21 21:18:38 +01:00
|
|
|
if kwargs['update_fields'] is None or "content" in kwargs['update_fields']:
|
|
|
|
message = kwargs['instance']
|
|
|
|
message.update_calculated_fields()
|
|
|
|
|
2014-07-15 21:03:51 +02:00
|
|
|
def get_context_for_message(message):
|
2017-03-06 08:45:59 +01:00
|
|
|
# type: (Message) -> QuerySet[Message]
|
2016-06-12 14:43:15 +02:00
|
|
|
# TODO: Change return type to QuerySet[Message]
|
2014-07-15 21:03:51 +02:00
|
|
|
return Message.objects.filter(
|
|
|
|
recipient_id=message.recipient_id,
|
|
|
|
subject=message.subject,
|
|
|
|
id__lt=message.id,
|
2015-02-21 02:46:19 +01:00
|
|
|
pub_date__gt=message.pub_date - timedelta(minutes=15),
|
|
|
|
).order_by('-id')[:10]
|
2014-07-15 21:03:51 +02:00
|
|
|
|
2016-07-08 02:25:55 +02:00
|
|
|
post_save.connect(flush_message, sender=Message)
|
2014-07-15 21:03:51 +02:00
|
|
|
|
2016-11-03 18:49:00 +01:00
|
|
|
class Reaction(ModelReprMixin, models.Model):
|
2017-06-01 10:44:16 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile
|
|
|
|
message = models.ForeignKey(Message, on_delete=CASCADE) # type: Message
|
2016-11-27 03:46:31 +01:00
|
|
|
emoji_name = models.TextField() # type: Text
|
2016-11-03 18:49:00 +01:00
|
|
|
|
|
|
|
class Meta(object):
|
|
|
|
unique_together = ("user_profile", "message", "emoji_name")
|
|
|
|
|
2016-12-06 07:19:34 +01:00
|
|
|
@staticmethod
|
|
|
|
def get_raw_db_rows(needed_ids):
|
|
|
|
# type: (List[int]) -> List[Dict[str, Any]]
|
|
|
|
fields = ['message_id', 'emoji_name', 'user_profile__email',
|
|
|
|
'user_profile__id', 'user_profile__full_name']
|
|
|
|
return Reaction.objects.filter(message_id__in=needed_ids).values(*fields)
|
|
|
|
|
2017-06-08 02:04:09 +02:00
|
|
|
# Whenever a message is sent, for each user subscribed to the
|
2016-04-01 08:42:38 +02:00
|
|
|
# corresponding Recipient object, we add a row to the UserMessage
|
2017-06-08 02:04:09 +02:00
|
|
|
# table indicating that that user received that message. This table
|
2016-04-01 08:42:38 +02:00
|
|
|
# allows us to quickly query any user's last 1000 messages to generate
|
|
|
|
# the home view.
|
|
|
|
#
|
|
|
|
# Additionally, the flags field stores metadata like whether the user
|
2017-06-08 02:04:09 +02:00
|
|
|
# has read the message, starred or collapsed the message, was
|
|
|
|
# mentioned in the message, etc.
|
2016-04-01 08:42:38 +02:00
|
|
|
#
|
|
|
|
# UserMessage is the largest table in a Zulip installation, even
|
|
|
|
# though each row is only 4 integers.
|
2016-11-01 11:26:38 +01:00
|
|
|
class AbstractUserMessage(ModelReprMixin, models.Model):
|
2017-06-01 10:44:16 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile
|
2013-07-25 22:08:16 +02:00
|
|
|
ALL_FLAGS = ['read', 'starred', 'collapsed', 'mentioned', 'wildcard_mentioned',
|
2013-09-03 22:41:17 +02:00
|
|
|
'summarize_in_home', 'summarize_in_stream', 'force_expand', 'force_collapse',
|
2014-01-27 16:56:01 +01:00
|
|
|
'has_alert_word', "historical", 'is_me_message']
|
2016-06-13 10:39:47 +02:00
|
|
|
flags = BitField(flags=ALL_FLAGS, default=0) # type: BitHandler
|
2012-09-07 17:04:41 +02:00
|
|
|
|
2015-10-14 22:43:04 +02:00
|
|
|
class Meta(object):
|
2016-11-01 11:26:38 +01:00
|
|
|
abstract = True
|
2012-11-08 21:08:13 +01:00
|
|
|
unique_together = ("user_profile", "message")
|
|
|
|
|
2016-11-01 11:26:38 +01:00
|
|
|
def flags_list(self):
|
|
|
|
# type: () -> List[str]
|
|
|
|
return [flag for flag in self.flags.keys() if getattr(self.flags, flag).is_set]
|
|
|
|
|
2017-05-17 05:59:50 +02:00
|
|
|
def __unicode__(self):
|
|
|
|
# type: () -> Text
|
|
|
|
display_recipient = get_display_recipient(self.message.recipient)
|
|
|
|
return u"<%s: %s / %s (%s)>" % (self.__class__.__name__, display_recipient,
|
|
|
|
self.user_profile.email, self.flags_list())
|
|
|
|
|
2016-11-01 11:26:38 +01:00
|
|
|
|
|
|
|
class ArchivedUserMessage(AbstractUserMessage):
|
2017-06-01 10:44:16 +02:00
|
|
|
message = models.ForeignKey(ArchivedMessage, on_delete=CASCADE) # type: Message
|
2017-04-15 04:03:56 +02:00
|
|
|
archive_timestamp = models.DateTimeField(default=timezone_now, db_index=True) # type: datetime.datetime
|
2016-11-01 11:26:38 +01:00
|
|
|
|
|
|
|
|
|
|
|
class UserMessage(AbstractUserMessage):
|
2017-06-01 10:44:16 +02:00
|
|
|
message = models.ForeignKey(Message, on_delete=CASCADE) # type: Message
|
2016-11-01 11:26:38 +01:00
|
|
|
|
2013-03-11 15:47:29 +01:00
|
|
|
|
2013-06-19 20:43:45 +02:00
|
|
|
def parse_usermessage_flags(val):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: (int) -> List[str]
|
2013-06-19 20:43:45 +02:00
|
|
|
flags = []
|
|
|
|
mask = 1
|
|
|
|
for flag in UserMessage.ALL_FLAGS:
|
|
|
|
if val & mask:
|
|
|
|
flags.append(flag)
|
|
|
|
mask <<= 1
|
|
|
|
return flags
|
|
|
|
|
2016-11-01 11:26:38 +01:00
|
|
|
|
|
|
|
class AbstractAttachment(ModelReprMixin, models.Model):
|
|
|
|
file_name = models.TextField(db_index=True) # type: Text
|
2016-03-24 20:24:01 +01:00
|
|
|
# path_id is a storage location agnostic representation of the path of the file.
|
|
|
|
# If the path of a file is http://localhost:9991/user_uploads/a/b/abc/temp_file.py
|
|
|
|
# then its path_id will be a/b/abc/temp_file.py.
|
2017-04-14 00:30:23 +02:00
|
|
|
path_id = models.TextField(db_index=True, unique=True) # type: Text
|
2017-06-01 10:44:16 +02:00
|
|
|
owner = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile
|
|
|
|
realm = models.ForeignKey(Realm, blank=True, null=True, on_delete=CASCADE) # type: Optional[Realm]
|
2016-11-01 11:26:38 +01:00
|
|
|
is_realm_public = models.BooleanField(default=False) # type: bool
|
2017-04-15 04:03:56 +02:00
|
|
|
create_time = models.DateTimeField(default=timezone_now,
|
2016-11-01 11:26:38 +01:00
|
|
|
db_index=True) # type: datetime.datetime
|
2017-05-23 22:17:08 +02:00
|
|
|
size = models.IntegerField(null=True) # type: Optional[int]
|
2016-11-01 11:26:38 +01:00
|
|
|
|
|
|
|
class Meta(object):
|
|
|
|
abstract = True
|
|
|
|
|
2017-05-17 05:59:50 +02:00
|
|
|
def __unicode__(self):
|
|
|
|
# type: () -> Text
|
|
|
|
return u"<%s: %s>" % (self.__class__.__name__, self.file_name,)
|
|
|
|
|
2016-11-01 11:26:38 +01:00
|
|
|
|
|
|
|
class ArchivedAttachment(AbstractAttachment):
|
2017-04-15 04:03:56 +02:00
|
|
|
archive_timestamp = models.DateTimeField(default=timezone_now, db_index=True) # type: datetime.datetime
|
2016-11-01 11:26:38 +01:00
|
|
|
messages = models.ManyToManyField(ArchivedMessage) # type: Manager
|
|
|
|
|
|
|
|
|
|
|
|
class Attachment(AbstractAttachment):
|
|
|
|
messages = models.ManyToManyField(Message) # type: Manager
|
2016-03-24 20:24:01 +01:00
|
|
|
|
|
|
|
def is_claimed(self):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: () -> bool
|
2016-03-24 20:24:01 +01:00
|
|
|
return self.messages.count() > 0
|
|
|
|
|
2016-12-28 14:46:42 +01:00
|
|
|
def to_dict(self):
|
|
|
|
# type: () -> Dict[str, Any]
|
|
|
|
return {
|
|
|
|
'id': self.id,
|
|
|
|
'name': self.file_name,
|
|
|
|
'path_id': self.path_id,
|
|
|
|
'messages': [{
|
|
|
|
'id': m.id,
|
2017-02-24 02:30:47 +01:00
|
|
|
# convert to JavaScript-style UNIX timestamp so we can take
|
|
|
|
# advantage of client timezones.
|
|
|
|
'name': time.mktime(m.pub_date.timetuple()) * 1000
|
2016-12-28 14:46:42 +01:00
|
|
|
} for m in self.messages.all()]
|
|
|
|
}
|
|
|
|
|
2016-06-17 19:48:17 +02:00
|
|
|
def validate_attachment_request(user_profile, path_id):
|
|
|
|
# type: (UserProfile, Text) -> Optional[bool]
|
|
|
|
try:
|
|
|
|
attachment = Attachment.objects.get(path_id=path_id)
|
|
|
|
messages = attachment.messages.all()
|
|
|
|
|
|
|
|
if user_profile == attachment.owner:
|
|
|
|
# If you own the file, you can access it.
|
|
|
|
return True
|
|
|
|
elif attachment.is_realm_public and attachment.realm == user_profile.realm:
|
|
|
|
# Any user in the realm can access realm-public files
|
|
|
|
return True
|
|
|
|
elif UserMessage.objects.filter(user_profile=user_profile, message__in=messages).exists():
|
|
|
|
# If it was sent in a private message or private stream
|
|
|
|
# message, then anyone who received that message can access it.
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
return False
|
|
|
|
except Attachment.DoesNotExist:
|
|
|
|
return None
|
2016-11-01 11:26:38 +01:00
|
|
|
|
2016-03-24 20:24:01 +01:00
|
|
|
def get_old_unclaimed_attachments(weeks_ago):
|
2016-06-12 14:43:15 +02:00
|
|
|
# type: (int) -> Sequence[Attachment]
|
|
|
|
# TODO: Change return type to QuerySet[Attachment]
|
2017-04-15 04:03:56 +02:00
|
|
|
delta_weeks_ago = timezone_now() - datetime.timedelta(weeks=weeks_ago)
|
2016-03-24 20:24:01 +01:00
|
|
|
old_attachments = Attachment.objects.filter(messages=None, create_time__lt=delta_weeks_ago)
|
|
|
|
return old_attachments
|
|
|
|
|
2016-06-12 09:36:05 +02:00
|
|
|
class Subscription(ModelReprMixin, models.Model):
|
2017-06-01 10:44:16 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile
|
|
|
|
recipient = models.ForeignKey(Recipient, on_delete=CASCADE) # type: Recipient
|
2016-06-13 11:01:50 +02:00
|
|
|
active = models.BooleanField(default=True) # type: bool
|
|
|
|
in_home_view = models.NullBooleanField(default=True) # type: Optional[bool]
|
2012-08-29 17:50:36 +02:00
|
|
|
|
2016-07-11 15:54:15 +02:00
|
|
|
DEFAULT_STREAM_COLOR = u"#c2c2c2"
|
2016-11-23 05:01:12 +01:00
|
|
|
color = models.CharField(max_length=10, default=DEFAULT_STREAM_COLOR) # type: Text
|
2016-07-01 07:26:09 +02:00
|
|
|
pin_to_top = models.BooleanField(default=False) # type: bool
|
2014-02-05 23:00:46 +01:00
|
|
|
|
2016-06-13 11:01:50 +02:00
|
|
|
desktop_notifications = models.BooleanField(default=True) # type: bool
|
|
|
|
audible_notifications = models.BooleanField(default=True) # type: bool
|
2014-02-05 23:00:46 +01:00
|
|
|
|
|
|
|
# Combination desktop + audible notifications superseded by the
|
|
|
|
# above.
|
2016-06-13 11:01:50 +02:00
|
|
|
notifications = models.BooleanField(default=False) # type: bool
|
2013-03-29 20:57:02 +01:00
|
|
|
|
2015-10-14 22:43:04 +02:00
|
|
|
class Meta(object):
|
2012-11-07 22:33:38 +01:00
|
|
|
unique_together = ("user_profile", "recipient")
|
|
|
|
|
2016-06-12 09:36:05 +02:00
|
|
|
def __unicode__(self):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: () -> Text
|
2016-06-12 09:36:05 +02:00
|
|
|
return u"<Subscription: %r -> %s>" % (self.user_profile, self.recipient)
|
2012-08-28 22:56:21 +02:00
|
|
|
|
2013-03-26 19:09:45 +01:00
|
|
|
@cache_with_key(user_profile_by_id_cache_key, timeout=3600*24*7)
|
2013-03-26 18:51:55 +01:00
|
|
|
def get_user_profile_by_id(uid):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: (int) -> UserProfile
|
2013-03-26 18:51:55 +01:00
|
|
|
return UserProfile.objects.select_related().get(id=uid)
|
|
|
|
|
2013-03-28 20:20:31 +01:00
|
|
|
@cache_with_key(user_profile_by_email_cache_key, timeout=3600*24*7)
|
|
|
|
def get_user_profile_by_email(email):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Text) -> UserProfile
|
2014-01-08 00:07:53 +01:00
|
|
|
return UserProfile.objects.select_related().get(email__iexact=email.strip())
|
2013-03-28 20:20:31 +01:00
|
|
|
|
2017-05-22 19:45:54 +02:00
|
|
|
@cache_with_key(user_profile_cache_key, timeout=3600*24*7)
|
|
|
|
def get_user(email, realm):
|
|
|
|
# type: (Text, Realm) -> UserProfile
|
|
|
|
return UserProfile.objects.select_related().get(email__iexact=email.strip(), realm=realm)
|
|
|
|
|
2017-05-22 23:37:15 +02:00
|
|
|
@cache_with_key(bot_profile_cache_key, timeout=3600*24*7)
|
|
|
|
def get_system_bot(email):
|
|
|
|
# type: (Text) -> UserProfile
|
|
|
|
return UserProfile.objects.select_related().get(email__iexact=email.strip())
|
|
|
|
|
2013-10-23 23:16:39 +02:00
|
|
|
@cache_with_key(active_user_dicts_in_realm_cache_key, timeout=3600*24*7)
|
|
|
|
def get_active_user_dicts_in_realm(realm):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: (Realm) -> List[Dict[str, Any]]
|
2016-04-27 23:44:26 +02:00
|
|
|
return UserProfile.objects.filter(realm=realm, is_active=True) \
|
|
|
|
.values(*active_user_dict_fields)
|
2013-08-28 20:25:31 +02:00
|
|
|
|
2017-02-06 20:45:26 +01:00
|
|
|
@cache_with_key(bot_dicts_in_realm_cache_key, timeout=3600*24*7)
|
|
|
|
def get_bot_dicts_in_realm(realm):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: (Realm) -> List[Dict[str, Any]]
|
2017-02-06 20:45:26 +01:00
|
|
|
return UserProfile.objects.filter(realm=realm, is_bot=True).values(*bot_dict_fields)
|
2014-02-26 00:12:14 +01:00
|
|
|
|
2016-04-27 23:22:52 +02:00
|
|
|
def get_owned_bot_dicts(user_profile, include_all_realm_bots_if_admin=True):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: (UserProfile, bool) -> List[Dict[str, Any]]
|
2016-04-27 23:22:52 +02:00
|
|
|
if user_profile.is_realm_admin and include_all_realm_bots_if_admin:
|
2017-02-06 20:45:26 +01:00
|
|
|
result = get_bot_dicts_in_realm(user_profile.realm)
|
2016-04-27 23:22:52 +02:00
|
|
|
else:
|
2017-02-06 20:45:26 +01:00
|
|
|
result = UserProfile.objects.filter(realm=user_profile.realm, is_bot=True,
|
|
|
|
bot_owner=user_profile).values(*bot_dict_fields)
|
2016-09-28 00:21:31 +02:00
|
|
|
# TODO: Remove this import cycle
|
2017-05-10 07:09:28 +02:00
|
|
|
from zerver.lib.avatar import avatar_url_from_dict
|
2016-09-28 00:21:31 +02:00
|
|
|
|
2016-04-27 23:22:52 +02:00
|
|
|
return [{'email': botdict['email'],
|
2016-10-26 03:35:32 +02:00
|
|
|
'user_id': botdict['id'],
|
2016-04-27 23:22:52 +02:00
|
|
|
'full_name': botdict['full_name'],
|
2017-06-12 19:50:03 +02:00
|
|
|
'bot_type': botdict['bot_type'],
|
2017-02-06 20:45:26 +01:00
|
|
|
'is_active': botdict['is_active'],
|
2016-04-27 23:22:52 +02:00
|
|
|
'api_key': botdict['api_key'],
|
|
|
|
'default_sending_stream': botdict['default_sending_stream__name'],
|
|
|
|
'default_events_register_stream': botdict['default_events_register_stream__name'],
|
|
|
|
'default_all_public_streams': botdict['default_all_public_streams'],
|
|
|
|
'owner': botdict['bot_owner__email'],
|
2017-05-10 07:09:28 +02:00
|
|
|
'avatar_url': avatar_url_from_dict(botdict),
|
2016-12-01 06:16:45 +01:00
|
|
|
}
|
2016-04-27 23:22:52 +02:00
|
|
|
for botdict in result]
|
2014-02-26 00:12:14 +01:00
|
|
|
|
2013-04-08 18:27:07 +02:00
|
|
|
def get_prereg_user_by_email(email):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Text) -> PreregistrationUser
|
2013-04-08 18:27:07 +02:00
|
|
|
# A user can be invited many times, so only return the result of the latest
|
|
|
|
# invite.
|
2014-01-08 00:07:53 +01:00
|
|
|
return PreregistrationUser.objects.filter(email__iexact=email.strip()).latest("invited_at")
|
2013-04-08 18:27:07 +02:00
|
|
|
|
2016-11-02 21:57:59 +01:00
|
|
|
def get_cross_realm_emails():
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: () -> Set[Text]
|
2016-08-23 06:46:10 +02:00
|
|
|
return set(settings.CROSS_REALM_BOT_EMAILS)
|
2016-06-11 20:22:13 +02:00
|
|
|
|
2016-04-01 08:42:38 +02:00
|
|
|
# The Huddle class represents a group of individuals who have had a
|
|
|
|
# Group Private Message conversation together. The actual membership
|
|
|
|
# of the Huddle is stored in the Subscription table just like with
|
|
|
|
# Streams, and a hash of that list is stored in the huddle_hash field
|
|
|
|
# below, to support efficiently mapping from a set of users to the
|
|
|
|
# corresponding Huddle object.
|
2012-09-04 23:20:21 +02:00
|
|
|
class Huddle(models.Model):
|
2012-09-07 20:14:13 +02:00
|
|
|
# TODO: We should consider whether using
|
|
|
|
# CommaSeparatedIntegerField would be better.
|
2016-11-23 05:01:12 +01:00
|
|
|
huddle_hash = models.CharField(max_length=40, db_index=True, unique=True) # type: Text
|
2012-09-04 23:20:21 +02:00
|
|
|
|
2012-10-20 18:02:58 +02:00
|
|
|
def get_huddle_hash(id_list):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (List[int]) -> Text
|
2012-09-05 17:38:09 +02:00
|
|
|
id_list = sorted(set(id_list))
|
2012-09-05 17:41:53 +02:00
|
|
|
hash_key = ",".join(str(x) for x in id_list)
|
2013-03-20 15:31:27 +01:00
|
|
|
return make_safe_digest(hash_key)
|
2012-10-20 18:02:58 +02:00
|
|
|
|
2013-03-26 18:17:55 +01:00
|
|
|
def huddle_hash_cache_key(huddle_hash):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Text) -> Text
|
2016-06-12 07:25:42 +02:00
|
|
|
return u"huddle_by_hash:%s" % (huddle_hash,)
|
2013-03-26 18:17:55 +01:00
|
|
|
|
2012-10-20 18:02:58 +02:00
|
|
|
def get_huddle(id_list):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: (List[int]) -> Huddle
|
2012-10-20 18:02:58 +02:00
|
|
|
huddle_hash = get_huddle_hash(id_list)
|
2013-03-26 18:17:55 +01:00
|
|
|
return get_huddle_backend(huddle_hash, id_list)
|
|
|
|
|
2013-03-26 19:09:45 +01:00
|
|
|
@cache_with_key(lambda huddle_hash, id_list: huddle_hash_cache_key(huddle_hash), timeout=3600*24*7)
|
2013-03-26 18:17:55 +01:00
|
|
|
def get_huddle_backend(huddle_hash, id_list):
|
2016-11-23 05:01:12 +01:00
|
|
|
# type: (Text, List[int]) -> Huddle
|
2017-01-06 17:29:41 +01:00
|
|
|
with transaction.atomic():
|
|
|
|
(huddle, created) = Huddle.objects.get_or_create(huddle_hash=huddle_hash)
|
|
|
|
if created:
|
2013-03-26 18:51:55 +01:00
|
|
|
recipient = Recipient.objects.create(type_id=huddle.id,
|
|
|
|
type=Recipient.HUDDLE)
|
|
|
|
subs_to_create = [Subscription(recipient=recipient,
|
2017-06-10 14:24:04 +02:00
|
|
|
user_profile_id=user_profile_id)
|
2013-03-26 18:51:55 +01:00
|
|
|
for user_profile_id in id_list]
|
|
|
|
Subscription.objects.bulk_create(subs_to_create)
|
2017-01-06 17:29:41 +01:00
|
|
|
return huddle
|
2012-09-04 23:20:21 +02:00
|
|
|
|
2012-10-29 19:43:00 +01:00
|
|
|
def clear_database():
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: () -> None
|
2013-07-08 17:53:50 +02:00
|
|
|
pylibmc.Client(['127.0.0.1']).flush_all()
|
2016-01-25 21:12:34 +01:00
|
|
|
model = None # type: Any
|
2013-04-01 16:57:50 +02:00
|
|
|
for model in [Message, Stream, UserProfile, Recipient,
|
2012-11-27 18:26:51 +01:00
|
|
|
Realm, Subscription, Huddle, UserMessage, Client,
|
|
|
|
DefaultStream]:
|
2012-10-29 19:43:00 +01:00
|
|
|
model.objects.all().delete()
|
|
|
|
Session.objects.all().delete()
|
2012-11-08 23:02:16 +01:00
|
|
|
|
|
|
|
class UserActivity(models.Model):
|
2017-06-01 10:44:16 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile
|
|
|
|
client = models.ForeignKey(Client, on_delete=CASCADE) # type: Client
|
2016-11-23 05:01:12 +01:00
|
|
|
query = models.CharField(max_length=50, db_index=True) # type: Text
|
2012-11-08 23:02:16 +01:00
|
|
|
|
2016-06-13 11:10:27 +02:00
|
|
|
count = models.IntegerField() # type: int
|
|
|
|
last_visit = models.DateTimeField('last visit') # type: datetime.datetime
|
2012-11-08 23:02:16 +01:00
|
|
|
|
2015-10-14 22:43:04 +02:00
|
|
|
class Meta(object):
|
2012-11-08 23:02:16 +01:00
|
|
|
unique_together = ("user_profile", "client", "query")
|
2012-11-27 18:26:51 +01:00
|
|
|
|
2013-09-06 21:52:12 +02:00
|
|
|
class UserActivityInterval(models.Model):
|
2017-04-15 07:20:16 +02:00
|
|
|
MIN_INTERVAL_LENGTH = datetime.timedelta(minutes=15)
|
|
|
|
|
2017-06-01 10:44:16 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile
|
2016-06-13 11:10:27 +02:00
|
|
|
start = models.DateTimeField('start time', db_index=True) # type: datetime.datetime
|
|
|
|
end = models.DateTimeField('end time', db_index=True) # type: datetime.datetime
|
2013-09-06 21:52:12 +02:00
|
|
|
|
2017-04-15 07:20:16 +02:00
|
|
|
|
2013-02-08 23:44:15 +01:00
|
|
|
class UserPresence(models.Model):
|
2017-06-01 10:44:16 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile
|
|
|
|
client = models.ForeignKey(Client, on_delete=CASCADE) # type: Client
|
2013-02-08 23:44:15 +01:00
|
|
|
|
|
|
|
# Valid statuses
|
|
|
|
ACTIVE = 1
|
|
|
|
IDLE = 2
|
|
|
|
|
2016-06-13 11:10:27 +02:00
|
|
|
timestamp = models.DateTimeField('presence changed') # type: datetime.datetime
|
|
|
|
status = models.PositiveSmallIntegerField(default=ACTIVE) # type: int
|
2013-02-08 23:44:15 +01:00
|
|
|
|
2013-09-13 23:33:11 +02:00
|
|
|
@staticmethod
|
|
|
|
def status_to_string(status):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: (int) -> str
|
2013-09-13 23:33:11 +02:00
|
|
|
if status == UserPresence.ACTIVE:
|
|
|
|
return 'active'
|
|
|
|
elif status == UserPresence.IDLE:
|
|
|
|
return 'idle'
|
2017-03-03 20:30:49 +01:00
|
|
|
else:
|
|
|
|
raise ValueError('Unknown status: %s' % (status,))
|
2013-09-13 23:33:11 +02:00
|
|
|
|
2017-02-11 08:38:16 +01:00
|
|
|
@staticmethod
|
|
|
|
def get_status_dict_by_user(user_profile):
|
2017-03-20 05:02:30 +01:00
|
|
|
# type: (UserProfile) -> DefaultDict[Any, Dict[Any, Any]]
|
2017-02-11 08:38:16 +01:00
|
|
|
query = UserPresence.objects.filter(user_profile=user_profile).values(
|
|
|
|
'client__name',
|
|
|
|
'status',
|
|
|
|
'timestamp',
|
|
|
|
'user_profile__email',
|
|
|
|
'user_profile__id',
|
|
|
|
'user_profile__enable_offline_push_notifications',
|
|
|
|
'user_profile__is_mirror_dummy',
|
|
|
|
)
|
|
|
|
|
|
|
|
if PushDeviceToken.objects.filter(user=user_profile).exists():
|
2017-02-11 08:57:28 +01:00
|
|
|
mobile_user_ids = [user_profile.id] # type: List[int]
|
2017-02-11 08:38:16 +01:00
|
|
|
else:
|
|
|
|
mobile_user_ids = []
|
|
|
|
|
|
|
|
return UserPresence.get_status_dicts_for_query(query, mobile_user_ids)
|
|
|
|
|
2017-02-19 16:43:32 +01:00
|
|
|
@staticmethod
|
|
|
|
def exclude_old_users(query):
|
|
|
|
# type: (QuerySet) -> QuerySet
|
2017-04-15 04:03:56 +02:00
|
|
|
two_weeks_ago = timezone_now() - datetime.timedelta(weeks=2)
|
2017-02-19 16:43:32 +01:00
|
|
|
return query.filter(timestamp__gte=two_weeks_ago)
|
|
|
|
|
Optimize user presence/activity query.
The get_status_dict_by_realm helper gets called whenever our
realm user_presences cache expires, and it used to query these fields:
"zerver_userpresence"."id", "zerver_userpresence"."user_profile_id", "zerver_userpresence"."client_id", "zerver_userpresence"."timestamp", "zerver_userpresence"."status", "zerver_userprofile"."id", "zerver_userprofile"."password", "zerver_userprofile"."last_login", "zerver_userprofile"."is_superuser", "zerver_userprofile"."email", "zerver_userprofile"."is_staff", "zerver_userprofile"."is_active", "zerver_userprofile"."is_bot", "zerver_userprofile"."date_joined", "zerver_userprofile"."bot_owner_id", "zerver_userprofile"."full_name", "zerver_userprofile"."short_name", "zerver_userprofile"."pointer", "zerver_userprofile"."last_pointer_updater", "zerver_userprofile"."realm_id", "zerver_userprofile"."api_key", "zerver_userprofile"."enable_desktop_notifications", "zerver_userprofile"."enable_sounds", "zerver_userprofile"."enter_sends", "zerver_userprofile"."enable_offline_email_notifications", "zerver_userprofile"."last_reminder", "zerver_userprofile"."rate_limits", "zerver_userprofile"."avatar_source", "zerver_userprofile"."tutorial_status", "zerver_userprofile"."onboarding_steps", "zerver_userprofile"."invites_granted", "zerver_userprofile"."invites_used", "zerver_userprofile"."alert_words", "zerver_userprofile"."muted_topics", "zerver_client"."id", "zerver_client"."name"
Now it queries just the fields it needs:
"zerver_client"."name", "zerver_userpresence"."status", "zerver_userpresence"."timestamp", "zerver_userprofile"."email" FROM "zerver_userpresence"
Also, get_status_dict_by_realm is now namespaced under UserPresence as a static method.
(imported from commit be1266844b6bd28b6c615594796713c026a850a1)
2013-09-14 23:59:03 +02:00
|
|
|
@staticmethod
|
|
|
|
def get_status_dict_by_realm(realm_id):
|
2017-03-20 05:02:30 +01:00
|
|
|
# type: (int) -> DefaultDict[Any, Dict[Any, Any]]
|
Optimize user presence/activity query.
The get_status_dict_by_realm helper gets called whenever our
realm user_presences cache expires, and it used to query these fields:
"zerver_userpresence"."id", "zerver_userpresence"."user_profile_id", "zerver_userpresence"."client_id", "zerver_userpresence"."timestamp", "zerver_userpresence"."status", "zerver_userprofile"."id", "zerver_userprofile"."password", "zerver_userprofile"."last_login", "zerver_userprofile"."is_superuser", "zerver_userprofile"."email", "zerver_userprofile"."is_staff", "zerver_userprofile"."is_active", "zerver_userprofile"."is_bot", "zerver_userprofile"."date_joined", "zerver_userprofile"."bot_owner_id", "zerver_userprofile"."full_name", "zerver_userprofile"."short_name", "zerver_userprofile"."pointer", "zerver_userprofile"."last_pointer_updater", "zerver_userprofile"."realm_id", "zerver_userprofile"."api_key", "zerver_userprofile"."enable_desktop_notifications", "zerver_userprofile"."enable_sounds", "zerver_userprofile"."enter_sends", "zerver_userprofile"."enable_offline_email_notifications", "zerver_userprofile"."last_reminder", "zerver_userprofile"."rate_limits", "zerver_userprofile"."avatar_source", "zerver_userprofile"."tutorial_status", "zerver_userprofile"."onboarding_steps", "zerver_userprofile"."invites_granted", "zerver_userprofile"."invites_used", "zerver_userprofile"."alert_words", "zerver_userprofile"."muted_topics", "zerver_client"."id", "zerver_client"."name"
Now it queries just the fields it needs:
"zerver_client"."name", "zerver_userpresence"."status", "zerver_userpresence"."timestamp", "zerver_userprofile"."email" FROM "zerver_userpresence"
Also, get_status_dict_by_realm is now namespaced under UserPresence as a static method.
(imported from commit be1266844b6bd28b6c615594796713c026a850a1)
2013-09-14 23:59:03 +02:00
|
|
|
query = UserPresence.objects.filter(
|
2017-01-24 07:06:13 +01:00
|
|
|
user_profile__realm_id=realm_id,
|
|
|
|
user_profile__is_active=True,
|
|
|
|
user_profile__is_bot=False
|
2017-02-19 16:43:32 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
query = UserPresence.exclude_old_users(query)
|
|
|
|
|
|
|
|
query = query.values(
|
2017-01-24 07:06:13 +01:00
|
|
|
'client__name',
|
|
|
|
'status',
|
|
|
|
'timestamp',
|
|
|
|
'user_profile__email',
|
|
|
|
'user_profile__id',
|
|
|
|
'user_profile__enable_offline_push_notifications',
|
|
|
|
'user_profile__is_mirror_dummy',
|
Optimize user presence/activity query.
The get_status_dict_by_realm helper gets called whenever our
realm user_presences cache expires, and it used to query these fields:
"zerver_userpresence"."id", "zerver_userpresence"."user_profile_id", "zerver_userpresence"."client_id", "zerver_userpresence"."timestamp", "zerver_userpresence"."status", "zerver_userprofile"."id", "zerver_userprofile"."password", "zerver_userprofile"."last_login", "zerver_userprofile"."is_superuser", "zerver_userprofile"."email", "zerver_userprofile"."is_staff", "zerver_userprofile"."is_active", "zerver_userprofile"."is_bot", "zerver_userprofile"."date_joined", "zerver_userprofile"."bot_owner_id", "zerver_userprofile"."full_name", "zerver_userprofile"."short_name", "zerver_userprofile"."pointer", "zerver_userprofile"."last_pointer_updater", "zerver_userprofile"."realm_id", "zerver_userprofile"."api_key", "zerver_userprofile"."enable_desktop_notifications", "zerver_userprofile"."enable_sounds", "zerver_userprofile"."enter_sends", "zerver_userprofile"."enable_offline_email_notifications", "zerver_userprofile"."last_reminder", "zerver_userprofile"."rate_limits", "zerver_userprofile"."avatar_source", "zerver_userprofile"."tutorial_status", "zerver_userprofile"."onboarding_steps", "zerver_userprofile"."invites_granted", "zerver_userprofile"."invites_used", "zerver_userprofile"."alert_words", "zerver_userprofile"."muted_topics", "zerver_client"."id", "zerver_client"."name"
Now it queries just the fields it needs:
"zerver_client"."name", "zerver_userpresence"."status", "zerver_userpresence"."timestamp", "zerver_userprofile"."email" FROM "zerver_userpresence"
Also, get_status_dict_by_realm is now namespaced under UserPresence as a static method.
(imported from commit be1266844b6bd28b6c615594796713c026a850a1)
2013-09-14 23:59:03 +02:00
|
|
|
)
|
|
|
|
|
2014-02-26 22:06:57 +01:00
|
|
|
mobile_user_ids = [row['user'] for row in PushDeviceToken.objects.filter(
|
2017-01-24 07:06:13 +01:00
|
|
|
user__realm_id=1,
|
|
|
|
user__is_active=True,
|
|
|
|
user__is_bot=False,
|
2014-02-26 22:06:57 +01:00
|
|
|
).distinct("user").values("user")]
|
|
|
|
|
2017-02-11 07:49:27 +01:00
|
|
|
return UserPresence.get_status_dicts_for_query(query, mobile_user_ids)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_status_dicts_for_query(query, mobile_user_ids):
|
2017-03-20 05:02:30 +01:00
|
|
|
# type: (QuerySet, List[int]) -> DefaultDict[Any, Dict[Any, Any]]
|
|
|
|
user_statuses = defaultdict(dict) # type: DefaultDict[Any, Dict[Any, Any]]
|
2017-03-02 09:52:17 +01:00
|
|
|
# Order of query is important to get a latest status as aggregated status.
|
|
|
|
for row in query.order_by("user_profile__id", "-timestamp"):
|
2014-02-14 22:45:32 +01:00
|
|
|
info = UserPresence.to_presence_dict(
|
2017-02-11 04:13:33 +01:00
|
|
|
row['client__name'],
|
|
|
|
row['status'],
|
|
|
|
row['timestamp'],
|
2017-01-24 07:06:13 +01:00
|
|
|
push_enabled=row['user_profile__enable_offline_push_notifications'],
|
|
|
|
has_push_devices=row['user_profile__id'] in mobile_user_ids,
|
|
|
|
is_mirror_dummy=row['user_profile__is_mirror_dummy'],
|
2017-01-24 06:34:26 +01:00
|
|
|
)
|
2017-03-02 09:52:17 +01:00
|
|
|
if not user_statuses.get(row['user_profile__email']):
|
|
|
|
# Applying the latest status as aggregated status for user.
|
|
|
|
user_statuses[row['user_profile__email']]['aggregated'] = {
|
|
|
|
'status': info['status'],
|
2017-03-18 10:41:57 +01:00
|
|
|
'timestamp': info['timestamp'],
|
|
|
|
'client': info['client']
|
2017-03-02 09:52:17 +01:00
|
|
|
}
|
2014-02-14 22:45:32 +01:00
|
|
|
user_statuses[row['user_profile__email']][row['client__name']] = info
|
Optimize user presence/activity query.
The get_status_dict_by_realm helper gets called whenever our
realm user_presences cache expires, and it used to query these fields:
"zerver_userpresence"."id", "zerver_userpresence"."user_profile_id", "zerver_userpresence"."client_id", "zerver_userpresence"."timestamp", "zerver_userpresence"."status", "zerver_userprofile"."id", "zerver_userprofile"."password", "zerver_userprofile"."last_login", "zerver_userprofile"."is_superuser", "zerver_userprofile"."email", "zerver_userprofile"."is_staff", "zerver_userprofile"."is_active", "zerver_userprofile"."is_bot", "zerver_userprofile"."date_joined", "zerver_userprofile"."bot_owner_id", "zerver_userprofile"."full_name", "zerver_userprofile"."short_name", "zerver_userprofile"."pointer", "zerver_userprofile"."last_pointer_updater", "zerver_userprofile"."realm_id", "zerver_userprofile"."api_key", "zerver_userprofile"."enable_desktop_notifications", "zerver_userprofile"."enable_sounds", "zerver_userprofile"."enter_sends", "zerver_userprofile"."enable_offline_email_notifications", "zerver_userprofile"."last_reminder", "zerver_userprofile"."rate_limits", "zerver_userprofile"."avatar_source", "zerver_userprofile"."tutorial_status", "zerver_userprofile"."onboarding_steps", "zerver_userprofile"."invites_granted", "zerver_userprofile"."invites_used", "zerver_userprofile"."alert_words", "zerver_userprofile"."muted_topics", "zerver_client"."id", "zerver_client"."name"
Now it queries just the fields it needs:
"zerver_client"."name", "zerver_userpresence"."status", "zerver_userpresence"."timestamp", "zerver_userprofile"."email" FROM "zerver_userpresence"
Also, get_status_dict_by_realm is now namespaced under UserPresence as a static method.
(imported from commit be1266844b6bd28b6c615594796713c026a850a1)
2013-09-14 23:59:03 +02:00
|
|
|
return user_statuses
|
|
|
|
|
|
|
|
@staticmethod
|
2017-04-25 11:50:30 +02:00
|
|
|
def to_presence_dict(client_name, status, dt, push_enabled=False,
|
|
|
|
has_push_devices=False, is_mirror_dummy=None):
|
|
|
|
# type: (Text, int, datetime.datetime, bool, bool, Optional[bool]) -> Dict[str, Any]
|
Optimize user presence/activity query.
The get_status_dict_by_realm helper gets called whenever our
realm user_presences cache expires, and it used to query these fields:
"zerver_userpresence"."id", "zerver_userpresence"."user_profile_id", "zerver_userpresence"."client_id", "zerver_userpresence"."timestamp", "zerver_userpresence"."status", "zerver_userprofile"."id", "zerver_userprofile"."password", "zerver_userprofile"."last_login", "zerver_userprofile"."is_superuser", "zerver_userprofile"."email", "zerver_userprofile"."is_staff", "zerver_userprofile"."is_active", "zerver_userprofile"."is_bot", "zerver_userprofile"."date_joined", "zerver_userprofile"."bot_owner_id", "zerver_userprofile"."full_name", "zerver_userprofile"."short_name", "zerver_userprofile"."pointer", "zerver_userprofile"."last_pointer_updater", "zerver_userprofile"."realm_id", "zerver_userprofile"."api_key", "zerver_userprofile"."enable_desktop_notifications", "zerver_userprofile"."enable_sounds", "zerver_userprofile"."enter_sends", "zerver_userprofile"."enable_offline_email_notifications", "zerver_userprofile"."last_reminder", "zerver_userprofile"."rate_limits", "zerver_userprofile"."avatar_source", "zerver_userprofile"."tutorial_status", "zerver_userprofile"."onboarding_steps", "zerver_userprofile"."invites_granted", "zerver_userprofile"."invites_used", "zerver_userprofile"."alert_words", "zerver_userprofile"."muted_topics", "zerver_client"."id", "zerver_client"."name"
Now it queries just the fields it needs:
"zerver_client"."name", "zerver_userpresence"."status", "zerver_userpresence"."timestamp", "zerver_userprofile"."email" FROM "zerver_userpresence"
Also, get_status_dict_by_realm is now namespaced under UserPresence as a static method.
(imported from commit be1266844b6bd28b6c615594796713c026a850a1)
2013-09-14 23:59:03 +02:00
|
|
|
presence_val = UserPresence.status_to_string(status)
|
2016-06-04 00:44:30 +02:00
|
|
|
|
|
|
|
timestamp = datetime_to_timestamp(dt)
|
Optimize user presence/activity query.
The get_status_dict_by_realm helper gets called whenever our
realm user_presences cache expires, and it used to query these fields:
"zerver_userpresence"."id", "zerver_userpresence"."user_profile_id", "zerver_userpresence"."client_id", "zerver_userpresence"."timestamp", "zerver_userpresence"."status", "zerver_userprofile"."id", "zerver_userprofile"."password", "zerver_userprofile"."last_login", "zerver_userprofile"."is_superuser", "zerver_userprofile"."email", "zerver_userprofile"."is_staff", "zerver_userprofile"."is_active", "zerver_userprofile"."is_bot", "zerver_userprofile"."date_joined", "zerver_userprofile"."bot_owner_id", "zerver_userprofile"."full_name", "zerver_userprofile"."short_name", "zerver_userprofile"."pointer", "zerver_userprofile"."last_pointer_updater", "zerver_userprofile"."realm_id", "zerver_userprofile"."api_key", "zerver_userprofile"."enable_desktop_notifications", "zerver_userprofile"."enable_sounds", "zerver_userprofile"."enter_sends", "zerver_userprofile"."enable_offline_email_notifications", "zerver_userprofile"."last_reminder", "zerver_userprofile"."rate_limits", "zerver_userprofile"."avatar_source", "zerver_userprofile"."tutorial_status", "zerver_userprofile"."onboarding_steps", "zerver_userprofile"."invites_granted", "zerver_userprofile"."invites_used", "zerver_userprofile"."alert_words", "zerver_userprofile"."muted_topics", "zerver_client"."id", "zerver_client"."name"
Now it queries just the fields it needs:
"zerver_client"."name", "zerver_userpresence"."status", "zerver_userpresence"."timestamp", "zerver_userprofile"."email" FROM "zerver_userpresence"
Also, get_status_dict_by_realm is now namespaced under UserPresence as a static method.
(imported from commit be1266844b6bd28b6c615594796713c026a850a1)
2013-09-14 23:59:03 +02:00
|
|
|
return dict(
|
2017-01-24 07:06:13 +01:00
|
|
|
client=client_name,
|
|
|
|
status=presence_val,
|
|
|
|
timestamp=timestamp,
|
|
|
|
pushable=(push_enabled and has_push_devices),
|
Optimize user presence/activity query.
The get_status_dict_by_realm helper gets called whenever our
realm user_presences cache expires, and it used to query these fields:
"zerver_userpresence"."id", "zerver_userpresence"."user_profile_id", "zerver_userpresence"."client_id", "zerver_userpresence"."timestamp", "zerver_userpresence"."status", "zerver_userprofile"."id", "zerver_userprofile"."password", "zerver_userprofile"."last_login", "zerver_userprofile"."is_superuser", "zerver_userprofile"."email", "zerver_userprofile"."is_staff", "zerver_userprofile"."is_active", "zerver_userprofile"."is_bot", "zerver_userprofile"."date_joined", "zerver_userprofile"."bot_owner_id", "zerver_userprofile"."full_name", "zerver_userprofile"."short_name", "zerver_userprofile"."pointer", "zerver_userprofile"."last_pointer_updater", "zerver_userprofile"."realm_id", "zerver_userprofile"."api_key", "zerver_userprofile"."enable_desktop_notifications", "zerver_userprofile"."enable_sounds", "zerver_userprofile"."enter_sends", "zerver_userprofile"."enable_offline_email_notifications", "zerver_userprofile"."last_reminder", "zerver_userprofile"."rate_limits", "zerver_userprofile"."avatar_source", "zerver_userprofile"."tutorial_status", "zerver_userprofile"."onboarding_steps", "zerver_userprofile"."invites_granted", "zerver_userprofile"."invites_used", "zerver_userprofile"."alert_words", "zerver_userprofile"."muted_topics", "zerver_client"."id", "zerver_client"."name"
Now it queries just the fields it needs:
"zerver_client"."name", "zerver_userpresence"."status", "zerver_userpresence"."timestamp", "zerver_userprofile"."email" FROM "zerver_userpresence"
Also, get_status_dict_by_realm is now namespaced under UserPresence as a static method.
(imported from commit be1266844b6bd28b6c615594796713c026a850a1)
2013-09-14 23:59:03 +02:00
|
|
|
)
|
2013-04-03 22:00:02 +02:00
|
|
|
|
Optimize user presence/activity query.
The get_status_dict_by_realm helper gets called whenever our
realm user_presences cache expires, and it used to query these fields:
"zerver_userpresence"."id", "zerver_userpresence"."user_profile_id", "zerver_userpresence"."client_id", "zerver_userpresence"."timestamp", "zerver_userpresence"."status", "zerver_userprofile"."id", "zerver_userprofile"."password", "zerver_userprofile"."last_login", "zerver_userprofile"."is_superuser", "zerver_userprofile"."email", "zerver_userprofile"."is_staff", "zerver_userprofile"."is_active", "zerver_userprofile"."is_bot", "zerver_userprofile"."date_joined", "zerver_userprofile"."bot_owner_id", "zerver_userprofile"."full_name", "zerver_userprofile"."short_name", "zerver_userprofile"."pointer", "zerver_userprofile"."last_pointer_updater", "zerver_userprofile"."realm_id", "zerver_userprofile"."api_key", "zerver_userprofile"."enable_desktop_notifications", "zerver_userprofile"."enable_sounds", "zerver_userprofile"."enter_sends", "zerver_userprofile"."enable_offline_email_notifications", "zerver_userprofile"."last_reminder", "zerver_userprofile"."rate_limits", "zerver_userprofile"."avatar_source", "zerver_userprofile"."tutorial_status", "zerver_userprofile"."onboarding_steps", "zerver_userprofile"."invites_granted", "zerver_userprofile"."invites_used", "zerver_userprofile"."alert_words", "zerver_userprofile"."muted_topics", "zerver_client"."id", "zerver_client"."name"
Now it queries just the fields it needs:
"zerver_client"."name", "zerver_userpresence"."status", "zerver_userpresence"."timestamp", "zerver_userprofile"."email" FROM "zerver_userpresence"
Also, get_status_dict_by_realm is now namespaced under UserPresence as a static method.
(imported from commit be1266844b6bd28b6c615594796713c026a850a1)
2013-09-14 23:59:03 +02:00
|
|
|
def to_dict(self):
|
2016-05-07 18:02:57 +02:00
|
|
|
# type: () -> Dict[str, Any]
|
2014-02-14 22:45:32 +01:00
|
|
|
return UserPresence.to_presence_dict(
|
2017-02-11 04:13:33 +01:00
|
|
|
self.client.name,
|
|
|
|
self.status,
|
|
|
|
self.timestamp
|
Optimize user presence/activity query.
The get_status_dict_by_realm helper gets called whenever our
realm user_presences cache expires, and it used to query these fields:
"zerver_userpresence"."id", "zerver_userpresence"."user_profile_id", "zerver_userpresence"."client_id", "zerver_userpresence"."timestamp", "zerver_userpresence"."status", "zerver_userprofile"."id", "zerver_userprofile"."password", "zerver_userprofile"."last_login", "zerver_userprofile"."is_superuser", "zerver_userprofile"."email", "zerver_userprofile"."is_staff", "zerver_userprofile"."is_active", "zerver_userprofile"."is_bot", "zerver_userprofile"."date_joined", "zerver_userprofile"."bot_owner_id", "zerver_userprofile"."full_name", "zerver_userprofile"."short_name", "zerver_userprofile"."pointer", "zerver_userprofile"."last_pointer_updater", "zerver_userprofile"."realm_id", "zerver_userprofile"."api_key", "zerver_userprofile"."enable_desktop_notifications", "zerver_userprofile"."enable_sounds", "zerver_userprofile"."enter_sends", "zerver_userprofile"."enable_offline_email_notifications", "zerver_userprofile"."last_reminder", "zerver_userprofile"."rate_limits", "zerver_userprofile"."avatar_source", "zerver_userprofile"."tutorial_status", "zerver_userprofile"."onboarding_steps", "zerver_userprofile"."invites_granted", "zerver_userprofile"."invites_used", "zerver_userprofile"."alert_words", "zerver_userprofile"."muted_topics", "zerver_client"."id", "zerver_client"."name"
Now it queries just the fields it needs:
"zerver_client"."name", "zerver_userpresence"."status", "zerver_userpresence"."timestamp", "zerver_userprofile"."email" FROM "zerver_userpresence"
Also, get_status_dict_by_realm is now namespaced under UserPresence as a static method.
(imported from commit be1266844b6bd28b6c615594796713c026a850a1)
2013-09-14 23:59:03 +02:00
|
|
|
)
|
2013-04-03 22:00:02 +02:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def status_from_string(status):
|
2016-06-12 15:05:01 +02:00
|
|
|
# type: (NonBinaryStr) -> Optional[int]
|
2013-04-03 22:00:02 +02:00
|
|
|
if status == 'active':
|
|
|
|
status_val = UserPresence.ACTIVE
|
|
|
|
elif status == 'idle':
|
|
|
|
status_val = UserPresence.IDLE
|
|
|
|
else:
|
|
|
|
status_val = None
|
|
|
|
|
|
|
|
return status_val
|
|
|
|
|
2015-10-14 22:43:04 +02:00
|
|
|
class Meta(object):
|
2013-02-08 23:44:15 +01:00
|
|
|
unique_together = ("user_profile", "client")
|
|
|
|
|
2012-11-27 18:26:51 +01:00
|
|
|
class DefaultStream(models.Model):
|
2017-06-01 10:44:16 +02:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE) # type: Realm
|
|
|
|
stream = models.ForeignKey(Stream, on_delete=CASCADE) # type: Stream
|
2012-11-27 18:26:51 +01:00
|
|
|
|
2015-10-14 22:43:04 +02:00
|
|
|
class Meta(object):
|
2012-11-27 18:26:51 +01:00
|
|
|
unique_together = ("realm", "stream")
|
2012-12-01 04:35:59 +01:00
|
|
|
|
2013-11-06 00:47:59 +01:00
|
|
|
class ScheduledJob(models.Model):
|
2016-06-13 11:18:10 +02:00
|
|
|
scheduled_timestamp = models.DateTimeField(auto_now_add=False, null=False) # type: datetime.datetime
|
|
|
|
type = models.PositiveSmallIntegerField() # type: int
|
2013-11-06 00:47:59 +01:00
|
|
|
# Valid types are {email}
|
|
|
|
# for EMAIL, filter_string is recipient_email
|
|
|
|
EMAIL = 1
|
|
|
|
|
|
|
|
# JSON representation of the job's data. Be careful, as we are not relying on Django to do validation
|
2016-11-23 05:01:12 +01:00
|
|
|
data = models.TextField() # type: Text
|
2013-11-06 00:47:59 +01:00
|
|
|
# Kind if like a ForeignKey, but table is determined by type.
|
2016-06-13 11:18:10 +02:00
|
|
|
filter_id = models.IntegerField(null=True) # type: Optional[int]
|
2016-11-23 05:01:12 +01:00
|
|
|
filter_string = models.CharField(max_length=100) # type: Text
|
2017-02-15 04:35:10 +01:00
|
|
|
|
|
|
|
class RealmAuditLog(models.Model):
|
2017-06-01 10:44:16 +02:00
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE) # type: Realm
|
|
|
|
acting_user = models.ForeignKey(UserProfile, null=True, related_name='+', on_delete=CASCADE) # type: Optional[UserProfile]
|
|
|
|
modified_user = models.ForeignKey(UserProfile, null=True, related_name='+', on_delete=CASCADE) # type: Optional[UserProfile]
|
|
|
|
modified_stream = models.ForeignKey(Stream, null=True, on_delete=CASCADE) # type: Optional[Stream]
|
2017-02-15 04:35:10 +01:00
|
|
|
event_type = models.CharField(max_length=40) # type: Text
|
2017-03-31 07:53:39 +02:00
|
|
|
event_time = models.DateTimeField(db_index=True) # type: datetime.datetime
|
2017-03-30 05:20:36 +02:00
|
|
|
# If True, event_time is an overestimate of the true time. Can be used
|
|
|
|
# by migrations when introducing a new event_type.
|
2017-02-15 04:35:10 +01:00
|
|
|
backfilled = models.BooleanField(default=False) # type: bool
|
2017-05-23 22:17:08 +02:00
|
|
|
extra_data = models.TextField(null=True) # type: Optional[Text]
|
2017-01-24 01:48:35 +01:00
|
|
|
|
|
|
|
class UserHotspot(models.Model):
|
2017-06-01 10:44:16 +02:00
|
|
|
user = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile
|
2017-01-24 01:48:35 +01:00
|
|
|
hotspot = models.CharField(max_length=30) # type: Text
|
2017-04-15 04:03:56 +02:00
|
|
|
timestamp = models.DateTimeField(default=timezone_now) # type: datetime.datetime
|
2017-01-24 01:48:35 +01:00
|
|
|
|
|
|
|
class Meta(object):
|
|
|
|
unique_together = ("user", "hotspot")
|
2017-03-17 10:07:22 +01:00
|
|
|
|
|
|
|
class CustomProfileField(models.Model):
|
|
|
|
realm = models.ForeignKey(Realm, on_delete=CASCADE) # type: Realm
|
|
|
|
name = models.CharField(max_length=100) # type: Text
|
|
|
|
|
|
|
|
INTEGER = 1
|
|
|
|
FLOAT = 2
|
|
|
|
SHORT_TEXT = 3
|
|
|
|
LONG_TEXT = 4
|
|
|
|
|
|
|
|
FIELD_TYPE_DATA = [
|
|
|
|
# Type, Name, Validator, Converter
|
|
|
|
(INTEGER, u'Integer', check_int, int),
|
|
|
|
(FLOAT, u'Float', check_float, float),
|
|
|
|
(SHORT_TEXT, u'Short Text', check_short_string, str),
|
|
|
|
(LONG_TEXT, u'Long Text', check_string, str),
|
|
|
|
] # type: List[Tuple[int, Text, Callable[[str, Any], str], Callable[[Any], Any]]]
|
|
|
|
|
|
|
|
FIELD_VALIDATORS = {item[0]: item[2] for item in FIELD_TYPE_DATA} # type: Dict[int, Callable[[str, Any], str]]
|
|
|
|
FIELD_CONVERTERS = {item[0]: item[3] for item in FIELD_TYPE_DATA} # type: Dict[int, Callable[[Any], Any]]
|
|
|
|
FIELD_TYPE_CHOICES = [(item[0], item[1]) for item in FIELD_TYPE_DATA] # type: List[Tuple[int, Text]]
|
|
|
|
|
|
|
|
field_type = models.PositiveSmallIntegerField(choices=FIELD_TYPE_CHOICES,
|
|
|
|
default=SHORT_TEXT) # type: int
|
|
|
|
|
|
|
|
class Meta(object):
|
|
|
|
unique_together = ('realm', 'name')
|
|
|
|
|
|
|
|
def as_dict(self):
|
|
|
|
# type: () -> Dict[str, Union[int, Text]]
|
|
|
|
return {
|
|
|
|
'id': self.id,
|
|
|
|
'name': self.name,
|
|
|
|
'type': self.field_type,
|
|
|
|
}
|
|
|
|
|
|
|
|
def custom_profile_fields_for_realm(realm_id):
|
|
|
|
# type: (int) -> List[CustomProfileField]
|
|
|
|
return CustomProfileField.objects.filter(realm=realm_id).order_by('name')
|
|
|
|
|
|
|
|
class CustomProfileFieldValue(models.Model):
|
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile
|
|
|
|
field = models.ForeignKey(CustomProfileField, on_delete=CASCADE) # type: CustomProfileField
|
|
|
|
value = models.TextField() # type: Text
|
|
|
|
|
|
|
|
class Meta(object):
|
|
|
|
unique_together = ('user_profile', 'field')
|
2016-07-15 18:57:37 +02:00
|
|
|
|
2017-05-25 19:16:40 +02:00
|
|
|
# Interfaces for services
|
|
|
|
# They provide additional functionality like parsing message to obtain query url, data to be sent to url,
|
|
|
|
# and parsing the response.
|
|
|
|
GENERIC_INTERFACE = u'GenericService'
|
|
|
|
|
2017-05-25 20:41:05 +02:00
|
|
|
# A Service corresponds to either an outgoing webhook bot or an embedded bot.
|
|
|
|
# The type of Service is determined by the bot_type field of the referenced
|
|
|
|
# UserProfile.
|
|
|
|
#
|
|
|
|
# If the Service is an outgoing webhook bot:
|
|
|
|
# - name is any human-readable identifier for the Service
|
|
|
|
# - base_url is the address of the third-party site
|
|
|
|
# - token is used for authentication with the third-party site
|
|
|
|
#
|
|
|
|
# If the Service is an embedded bot:
|
|
|
|
# - name is the canonical name for the type of bot (e.g. 'xkcd' for an instance
|
|
|
|
# of the xkcd bot); multiple embedded bots can have the same name, but all
|
|
|
|
# embedded bots with the same name will run the same code
|
|
|
|
# - base_url and token are currently unused
|
2016-07-15 18:57:37 +02:00
|
|
|
class Service(models.Model):
|
|
|
|
name = models.CharField(max_length=UserProfile.MAX_NAME_LENGTH) # type: Text
|
2017-05-25 20:41:05 +02:00
|
|
|
# Bot user corresponding to the Service. The bot_type of this user
|
|
|
|
# deterines the type of service. If non-bot services are added later,
|
|
|
|
# user_profile can also represent the owner of the Service.
|
2017-06-01 10:44:16 +02:00
|
|
|
user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile
|
2016-07-15 18:57:37 +02:00
|
|
|
base_url = models.TextField() # type: Text
|
|
|
|
token = models.TextField() # type: Text
|
2017-05-25 20:41:05 +02:00
|
|
|
# Interface / API version of the service.
|
2016-07-15 18:57:37 +02:00
|
|
|
interface = models.PositiveSmallIntegerField(default=1) # type: int
|
|
|
|
|
2017-05-25 19:16:40 +02:00
|
|
|
# Valid interfaces are {generic}
|
|
|
|
GENERIC = 1
|
|
|
|
|
2016-07-15 18:57:37 +02:00
|
|
|
# N.B. If we used Django's choice=... we would get this for free (kinda)
|
2017-05-25 19:16:40 +02:00
|
|
|
_interfaces = {
|
|
|
|
GENERIC: GENERIC_INTERFACE,
|
|
|
|
} # type: Dict[int, Text]
|
2016-07-15 18:57:37 +02:00
|
|
|
|
|
|
|
def interface_name(self):
|
|
|
|
# type: () -> Text
|
|
|
|
# Raises KeyError if invalid
|
|
|
|
return self._interfaces[self.interface]
|
|
|
|
|
|
|
|
|
|
|
|
def get_realm_outgoing_webhook_services_name(realm):
|
|
|
|
# type: (Realm) -> List[Any]
|
|
|
|
return list(Service.objects.filter(user_profile__realm=realm, user_profile__is_bot=True,
|
|
|
|
user_profile__bot_type=UserProfile.OUTGOING_WEBHOOK_BOT).values('name'))
|
|
|
|
|
2016-07-23 08:13:33 +02:00
|
|
|
def get_bot_services(user_profile_id):
|
|
|
|
# type: (str) -> List[Service]
|
|
|
|
return list(Service.objects.filter(user_profile__id=user_profile_id))
|
2016-07-15 18:57:37 +02:00
|
|
|
|
|
|
|
def get_service_profile(email, realm, service_name):
|
|
|
|
# type: (str, Realm, str) -> Service
|
|
|
|
return Service.objects.get(user_profile__email=email, user_profile__realm=realm, name=service_name)
|