2013-03-12 17:51:35 +01:00
|
|
|
# -*- coding: utf-8 -*-
|
2013-04-23 18:51:17 +02:00
|
|
|
from __future__ import absolute_import
|
2016-01-24 03:56:05 +01:00
|
|
|
from __future__ import division
|
2013-04-23 18:51:17 +02:00
|
|
|
|
2017-03-03 19:01:52 +01:00
|
|
|
from typing import Any, Callable, List, Optional, Sequence, TypeVar, Iterable, Set, Tuple, Text
|
2016-12-21 13:17:53 +01:00
|
|
|
from six import binary_type
|
2013-08-08 16:50:58 +02:00
|
|
|
import base64
|
2016-08-08 23:30:46 +02:00
|
|
|
import errno
|
2013-03-20 15:31:27 +01:00
|
|
|
import hashlib
|
2016-08-14 18:33:29 +02:00
|
|
|
import heapq
|
|
|
|
import itertools
|
2013-08-08 16:50:58 +02:00
|
|
|
import os
|
2017-01-06 18:56:36 +01:00
|
|
|
import sys
|
2013-06-27 20:03:51 +02:00
|
|
|
from time import sleep
|
2013-08-08 16:50:58 +02:00
|
|
|
|
2013-04-16 22:57:50 +02:00
|
|
|
from django.conf import settings
|
2016-07-19 14:35:08 +02:00
|
|
|
from django.http import HttpRequest
|
2017-01-06 18:56:36 +01:00
|
|
|
from six.moves import range, map, zip_longest
|
2016-06-12 14:22:20 +02:00
|
|
|
from zerver.lib.str_utils import force_text
|
2013-04-16 22:57:50 +02:00
|
|
|
|
2016-06-03 18:39:57 +02:00
|
|
|
T = TypeVar('T')
|
|
|
|
|
2013-04-30 23:58:59 +02:00
|
|
|
def statsd_key(val, clean_periods=False):
|
2016-06-03 18:39:57 +02:00
|
|
|
# type: (Any, bool) -> str
|
2013-04-16 22:57:50 +02:00
|
|
|
if not isinstance(val, str):
|
|
|
|
val = str(val)
|
|
|
|
|
|
|
|
if ':' in val:
|
|
|
|
val = val.split(':')[0]
|
|
|
|
val = val.replace('-', "_")
|
2013-04-30 23:58:59 +02:00
|
|
|
if clean_periods:
|
|
|
|
val = val.replace('.', '_')
|
2013-04-16 22:57:50 +02:00
|
|
|
|
|
|
|
return val
|
|
|
|
|
|
|
|
class StatsDWrapper(object):
|
|
|
|
"""Transparently either submit metrics to statsd
|
|
|
|
or do nothing without erroring out"""
|
|
|
|
|
|
|
|
# Backported support for gauge deltas
|
|
|
|
# as our statsd server supports them but supporting
|
|
|
|
# pystatsd is not released yet
|
|
|
|
def _our_gauge(self, stat, value, rate=1, delta=False):
|
2017-03-03 20:30:49 +01:00
|
|
|
# type: (str, float, float, bool) -> None
|
2013-04-16 22:57:50 +02:00
|
|
|
"""Set a gauge value."""
|
|
|
|
from django_statsd.clients import statsd
|
|
|
|
if delta:
|
2016-06-03 18:39:57 +02:00
|
|
|
value_str = '%+g|g' % (value,)
|
2013-04-16 22:57:50 +02:00
|
|
|
else:
|
2016-06-03 18:39:57 +02:00
|
|
|
value_str = '%g|g' % (value,)
|
|
|
|
statsd._send(stat, value_str, rate)
|
2013-04-16 22:57:50 +02:00
|
|
|
|
|
|
|
def __getattr__(self, name):
|
2016-06-03 18:39:57 +02:00
|
|
|
# type: (str) -> Any
|
2013-04-16 22:57:50 +02:00
|
|
|
# Hand off to statsd if we have it enabled
|
|
|
|
# otherwise do nothing
|
|
|
|
if name in ['timer', 'timing', 'incr', 'decr', 'gauge']:
|
2015-08-22 22:18:55 +02:00
|
|
|
if settings.STATSD_HOST != '':
|
2013-04-16 22:57:50 +02:00
|
|
|
from django_statsd.clients import statsd
|
|
|
|
if name == 'gauge':
|
|
|
|
return self._our_gauge
|
|
|
|
else:
|
|
|
|
return getattr(statsd, name)
|
|
|
|
else:
|
|
|
|
return lambda *args, **kwargs: None
|
|
|
|
|
|
|
|
raise AttributeError
|
|
|
|
|
|
|
|
statsd = StatsDWrapper()
|
2013-03-12 17:51:35 +01:00
|
|
|
|
|
|
|
# Runs the callback with slices of all_list of a given batch_size
|
|
|
|
def run_in_batches(all_list, batch_size, callback, sleep_time = 0, logger = None):
|
2016-06-03 18:39:57 +02:00
|
|
|
# type: (Sequence[T], int, Callable[[Sequence[T]], None], int, Optional[Callable[[str], None]]) -> None
|
2013-03-12 17:51:35 +01:00
|
|
|
if len(all_list) == 0:
|
|
|
|
return
|
|
|
|
|
2016-11-09 13:44:29 +01:00
|
|
|
limit = (len(all_list) // batch_size) + 1
|
2015-11-01 17:15:05 +01:00
|
|
|
for i in range(limit):
|
2013-03-12 17:51:35 +01:00
|
|
|
start = i*batch_size
|
|
|
|
end = (i+1) * batch_size
|
|
|
|
if end >= len(all_list):
|
|
|
|
end = len(all_list)
|
|
|
|
batch = all_list[start:end]
|
|
|
|
|
|
|
|
if logger:
|
|
|
|
logger("Executing %s in batch %s of %s" % (end-start, i+1, limit))
|
|
|
|
|
|
|
|
callback(batch)
|
2013-03-18 18:09:16 +01:00
|
|
|
|
|
|
|
if i != limit - 1:
|
|
|
|
sleep(sleep_time)
|
2013-03-20 15:31:27 +01:00
|
|
|
|
|
|
|
def make_safe_digest(string, hash_func=hashlib.sha1):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (Text, Callable[[binary_type], Any]) -> Text
|
2013-03-20 15:31:27 +01:00
|
|
|
"""
|
|
|
|
return a hex digest of `string`.
|
|
|
|
"""
|
|
|
|
# hashlib.sha1, md5, etc. expect bytes, so non-ASCII strings must
|
|
|
|
# be encoded.
|
2016-06-12 14:22:20 +02:00
|
|
|
return force_text(hash_func(string.encode('utf-8')).hexdigest())
|
2013-04-16 22:57:50 +02:00
|
|
|
|
|
|
|
|
|
|
|
def log_statsd_event(name):
|
2016-06-03 18:39:57 +02:00
|
|
|
# type: (str) -> None
|
2013-04-16 22:57:50 +02:00
|
|
|
"""
|
|
|
|
Sends a single event to statsd with the desired name and the current timestamp
|
|
|
|
|
|
|
|
This can be used to provide vertical lines in generated graphs,
|
|
|
|
for example when doing a prod deploy, bankruptcy request, or
|
|
|
|
other one-off events
|
|
|
|
|
|
|
|
Note that to draw this event as a vertical line in graphite
|
|
|
|
you can use the drawAsInfinite() command
|
|
|
|
"""
|
|
|
|
event_name = "events.%s" % (name,)
|
2013-06-07 23:53:20 +02:00
|
|
|
statsd.incr(event_name)
|
2013-08-08 16:50:58 +02:00
|
|
|
|
|
|
|
def generate_random_token(length):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (int) -> Text
|
2016-06-12 12:24:27 +02:00
|
|
|
return base64.b16encode(os.urandom(length // 2)).decode('utf-8').lower()
|
2016-08-08 23:30:46 +02:00
|
|
|
|
|
|
|
def mkdir_p(path):
|
|
|
|
# type: (str) -> None
|
|
|
|
# Python doesn't have an analog to `mkdir -p` < Python 3.2.
|
|
|
|
try:
|
|
|
|
os.makedirs(path)
|
|
|
|
except OSError as e:
|
|
|
|
if e.errno == errno.EEXIST and os.path.isdir(path):
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
raise
|
2016-08-14 18:33:29 +02:00
|
|
|
|
|
|
|
def query_chunker(queries, id_collector=None, chunk_size=1000, db_chunk_size=None):
|
|
|
|
# type: (List[Any], Set[int], int, int) -> Iterable[Any]
|
|
|
|
'''
|
|
|
|
This merges one or more Django ascending-id queries into
|
|
|
|
a generator that returns chunks of chunk_size row objects
|
|
|
|
during each yield, preserving id order across all results..
|
|
|
|
|
|
|
|
Queries should satisfy these conditions:
|
|
|
|
- They should be Django filters.
|
|
|
|
- They should return Django objects with "id" attributes.
|
|
|
|
- They should be disjoint.
|
|
|
|
|
|
|
|
The generator also populates id_collector, which we use
|
|
|
|
internally to enforce unique ids, but which the caller
|
|
|
|
can pass in to us if they want the side effect of collecting
|
|
|
|
all ids.
|
|
|
|
'''
|
|
|
|
if db_chunk_size is None:
|
|
|
|
db_chunk_size = chunk_size // len(queries)
|
|
|
|
|
|
|
|
assert db_chunk_size >= 2
|
|
|
|
assert chunk_size >= 2
|
|
|
|
|
|
|
|
if id_collector is not None:
|
|
|
|
assert(len(id_collector) == 0)
|
|
|
|
else:
|
|
|
|
id_collector = set()
|
|
|
|
|
|
|
|
def chunkify(q, i):
|
|
|
|
# type: (Any, int) -> Iterable[Tuple[int, int, Any]]
|
|
|
|
q = q.order_by('id')
|
|
|
|
min_id = -1
|
|
|
|
while True:
|
|
|
|
rows = list(q.filter(id__gt=min_id)[0:db_chunk_size])
|
|
|
|
if len(rows) == 0:
|
|
|
|
break
|
|
|
|
for row in rows:
|
|
|
|
yield (row.id, i, row)
|
|
|
|
min_id = rows[-1].id
|
|
|
|
|
|
|
|
iterators = [chunkify(q, i) for i, q in enumerate(queries)]
|
|
|
|
merged_query = heapq.merge(*iterators)
|
|
|
|
|
|
|
|
while True:
|
|
|
|
tup_chunk = list(itertools.islice(merged_query, 0, chunk_size))
|
|
|
|
if len(tup_chunk) == 0:
|
|
|
|
break
|
|
|
|
|
|
|
|
# Do duplicate-id management here.
|
|
|
|
tup_ids = set([tup[0] for tup in tup_chunk])
|
|
|
|
assert len(tup_ids) == len(tup_chunk)
|
|
|
|
assert len(tup_ids.intersection(id_collector)) == 0
|
|
|
|
id_collector.update(tup_ids)
|
|
|
|
|
|
|
|
yield [row for row_id, i, row in tup_chunk]
|
2016-07-19 14:35:08 +02:00
|
|
|
|
2017-01-10 10:44:56 +01:00
|
|
|
def _extract_subdomain(request):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (HttpRequest) -> Text
|
2016-07-19 14:35:08 +02:00
|
|
|
domain = request.get_host().lower()
|
|
|
|
index = domain.find("." + settings.EXTERNAL_HOST)
|
|
|
|
if index == -1:
|
|
|
|
return ""
|
2017-01-10 10:44:56 +01:00
|
|
|
return domain[0:index]
|
|
|
|
|
|
|
|
def get_subdomain(request):
|
|
|
|
# type: (HttpRequest) -> Text
|
|
|
|
subdomain = _extract_subdomain(request)
|
2016-08-19 05:30:16 +02:00
|
|
|
if subdomain in settings.ROOT_SUBDOMAIN_ALIASES:
|
|
|
|
return ""
|
2016-07-19 14:35:08 +02:00
|
|
|
return subdomain
|
|
|
|
|
2017-01-10 10:44:56 +01:00
|
|
|
def is_subdomain_root_or_alias(request):
|
|
|
|
# type: (HttpRequest) -> bool
|
|
|
|
subdomain = _extract_subdomain(request)
|
|
|
|
return not subdomain or subdomain in settings.ROOT_SUBDOMAIN_ALIASES
|
|
|
|
|
2016-07-19 14:35:08 +02:00
|
|
|
def check_subdomain(realm_subdomain, user_subdomain):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (Text, Text) -> bool
|
2016-07-19 14:35:08 +02:00
|
|
|
if settings.REALMS_HAVE_SUBDOMAINS and realm_subdomain is not None:
|
|
|
|
if (realm_subdomain == "" and user_subdomain is None):
|
|
|
|
return True
|
|
|
|
if realm_subdomain != user_subdomain:
|
|
|
|
return False
|
|
|
|
return True
|
2017-01-06 18:56:36 +01:00
|
|
|
|
|
|
|
def split_by(array, group_size, filler):
|
|
|
|
# type: (List[Any], int, Any) -> List[List[Any]]
|
|
|
|
"""
|
|
|
|
Group elements into list of size `group_size` and fill empty cells with
|
|
|
|
`filler`. Recipe from https://docs.python.org/3/library/itertools.html
|
|
|
|
"""
|
|
|
|
args = [iter(array)] * group_size
|
|
|
|
return list(map(list, zip_longest(*args, fillvalue=filler)))
|