2013-08-29 23:41:03 +02:00
|
|
|
from __future__ import absolute_import
|
|
|
|
|
2013-09-03 22:33:20 +02:00
|
|
|
from django.conf import settings
|
2013-11-08 02:02:48 +01:00
|
|
|
from django.core.handlers.wsgi import WSGIRequest
|
|
|
|
from django.core.handlers.base import BaseHandler
|
2013-09-03 22:33:20 +02:00
|
|
|
from postmonkey import PostMonkey, MailChimpException
|
2013-11-04 17:22:58 +01:00
|
|
|
from zerver.models import get_user_profile_by_email, \
|
2013-09-04 00:00:44 +02:00
|
|
|
get_user_profile_by_id, get_prereg_user_by_email, get_client
|
2013-10-29 20:03:42 +01:00
|
|
|
from zerver.lib.context_managers import lockfile
|
2013-09-07 00:27:10 +02:00
|
|
|
from zerver.lib.queue import SimpleQueueClient, queue_json_publish
|
2013-09-04 00:00:44 +02:00
|
|
|
from zerver.lib.timestamp import timestamp_to_datetime
|
|
|
|
from zerver.lib.actions import handle_missedmessage_emails, do_send_confirmation_email, \
|
2013-09-30 17:53:46 +02:00
|
|
|
do_update_user_activity, do_update_user_activity_interval, do_update_user_presence, \
|
2013-09-07 00:27:10 +02:00
|
|
|
internal_send_message, send_local_email_template_with_delay, clear_followup_emails_queue, \
|
2013-11-26 20:21:59 +01:00
|
|
|
check_send_message, extract_recipients, one_click_unsubscribe_link, \
|
2013-11-19 00:55:24 +01:00
|
|
|
enqueue_welcome_emails, handle_push_notification
|
2013-10-21 23:26:41 +02:00
|
|
|
from zerver.lib.digest import handle_digest_email
|
2013-09-04 00:00:44 +02:00
|
|
|
from zerver.decorator import JsonableError
|
2013-11-05 23:05:03 +01:00
|
|
|
from zerver.lib.socket import req_redis_key
|
2013-10-10 20:39:43 +02:00
|
|
|
from confirmation.models import Confirmation
|
2013-11-19 20:32:20 +01:00
|
|
|
from django.db import reset_queries
|
2013-11-25 22:07:37 +01:00
|
|
|
from django.core.mail import EmailMessage
|
2013-10-10 20:39:43 +02:00
|
|
|
|
2013-09-03 22:33:20 +02:00
|
|
|
import os
|
2013-10-17 22:55:09 +02:00
|
|
|
import sys
|
2013-09-03 22:33:20 +02:00
|
|
|
import ujson
|
|
|
|
from collections import defaultdict
|
|
|
|
import time
|
|
|
|
import datetime
|
|
|
|
import logging
|
2013-10-17 22:55:09 +02:00
|
|
|
import simplejson
|
2013-11-05 23:05:03 +01:00
|
|
|
import redis
|
2013-11-08 02:02:48 +01:00
|
|
|
import StringIO
|
2013-08-29 23:41:03 +02:00
|
|
|
|
2013-10-23 20:17:33 +02:00
|
|
|
def assign_queue(queue_name, enabled=True):
|
2013-08-29 23:41:03 +02:00
|
|
|
def decorate(clazz):
|
|
|
|
clazz.queue_name = queue_name
|
2013-10-23 20:17:33 +02:00
|
|
|
if enabled:
|
|
|
|
register_worker(queue_name, clazz)
|
2013-08-29 23:41:03 +02:00
|
|
|
return clazz
|
|
|
|
return decorate
|
|
|
|
|
|
|
|
worker_classes = {}
|
|
|
|
def register_worker(queue_name, clazz):
|
|
|
|
worker_classes[queue_name] = clazz
|
|
|
|
|
|
|
|
def get_worker(queue_name):
|
|
|
|
return worker_classes[queue_name]()
|
|
|
|
|
2013-10-23 20:50:21 +02:00
|
|
|
def get_active_worker_queues():
|
|
|
|
return worker_classes.iterkeys()
|
|
|
|
|
2013-08-29 23:41:03 +02:00
|
|
|
class QueueProcessingWorker(object):
|
|
|
|
def __init__(self):
|
|
|
|
self.q = SimpleQueueClient()
|
|
|
|
|
2013-10-30 22:03:50 +01:00
|
|
|
def consume_wrapper(self, data):
|
2013-10-29 20:03:42 +01:00
|
|
|
try:
|
2013-11-01 19:02:11 +01:00
|
|
|
self.consume(data)
|
2013-10-29 20:03:42 +01:00
|
|
|
except Exception:
|
|
|
|
self._log_problem()
|
|
|
|
if not os.path.exists(settings.QUEUE_ERROR_DIR):
|
|
|
|
os.mkdir(settings.QUEUE_ERROR_DIR)
|
|
|
|
fname = '%s.errors' % (self.queue_name,)
|
|
|
|
fn = os.path.join(settings.QUEUE_ERROR_DIR, fname)
|
|
|
|
line = '%s\t%s\n' % (time.asctime(), ujson.dumps(data))
|
|
|
|
lock_fn = fn + '.lock'
|
|
|
|
with lockfile(lock_fn):
|
|
|
|
with open(fn, 'a') as f:
|
|
|
|
f.write(line)
|
2013-11-19 20:32:20 +01:00
|
|
|
reset_queries()
|
2013-10-29 20:03:42 +01:00
|
|
|
|
|
|
|
def _log_problem(self):
|
|
|
|
logging.exception("Problem handling data on queue %s" % (self.queue_name,))
|
2013-10-17 18:55:23 +02:00
|
|
|
|
2013-08-29 23:41:03 +02:00
|
|
|
def start(self):
|
2013-10-29 20:03:42 +01:00
|
|
|
self.q.register_json_consumer(self.queue_name, self.consume_wrapper)
|
2013-08-29 23:41:03 +02:00
|
|
|
self.q.start_consuming()
|
|
|
|
|
|
|
|
def stop(self):
|
|
|
|
self.q.stop_consuming()
|
2013-09-03 22:33:20 +02:00
|
|
|
|
2013-11-01 19:31:00 +01:00
|
|
|
@assign_queue('signups')
|
2013-09-03 22:33:20 +02:00
|
|
|
class SignupWorker(QueueProcessingWorker):
|
|
|
|
def __init__(self):
|
|
|
|
super(SignupWorker, self).__init__()
|
2013-10-25 21:19:30 +02:00
|
|
|
if settings.MAILCHIMP_API_KEY != '':
|
|
|
|
self.pm = PostMonkey(settings.MAILCHIMP_API_KEY, timeout=10)
|
2013-09-03 22:33:20 +02:00
|
|
|
|
2013-10-30 22:03:50 +01:00
|
|
|
def consume(self, data):
|
2013-10-07 17:36:38 +02:00
|
|
|
merge_vars=data['merge_vars']
|
2013-10-10 20:39:43 +02:00
|
|
|
# This should clear out any invitation reminder emails
|
2013-11-14 06:09:10 +01:00
|
|
|
clear_followup_emails_queue(data["EMAIL"])
|
2013-11-01 19:31:00 +01:00
|
|
|
if settings.MAILCHIMP_API_KEY != '' and settings.DEPLOYED:
|
|
|
|
try:
|
|
|
|
self.pm.listSubscribe(
|
|
|
|
id=settings.ZULIP_FRIENDS_LIST_ID,
|
|
|
|
email_address=data['EMAIL'],
|
|
|
|
merge_vars=merge_vars,
|
|
|
|
double_optin=False,
|
|
|
|
send_welcome=False)
|
|
|
|
except MailChimpException, e:
|
|
|
|
if e.code == 214:
|
|
|
|
logging.warning("Attempted to sign up already existing email to list: %s" % (data['EMAIL'],))
|
|
|
|
else:
|
|
|
|
raise e
|
2013-11-26 20:21:59 +01:00
|
|
|
|
2013-10-07 17:36:38 +02:00
|
|
|
email = data.get("EMAIL")
|
|
|
|
name = merge_vars.get("NAME")
|
2013-11-26 20:21:59 +01:00
|
|
|
enqueue_welcome_emails(email, name)
|
2013-10-07 17:36:38 +02:00
|
|
|
|
2013-11-01 19:31:00 +01:00
|
|
|
@assign_queue('invites')
|
2013-09-03 22:33:20 +02:00
|
|
|
class ConfirmationEmailWorker(QueueProcessingWorker):
|
2013-10-30 22:03:50 +01:00
|
|
|
def consume(self, data):
|
2013-09-03 22:33:20 +02:00
|
|
|
invitee = get_prereg_user_by_email(data["email"])
|
|
|
|
referrer = get_user_profile_by_email(data["referrer_email"])
|
|
|
|
do_send_confirmation_email(invitee, referrer)
|
|
|
|
|
2013-10-10 20:39:43 +02:00
|
|
|
# queue invitation reminder for two days from now.
|
|
|
|
link = Confirmation.objects.get_link_for_object(invitee)
|
|
|
|
send_local_email_template_with_delay([{'email': data["email"], 'name': ""}],
|
|
|
|
"zerver/emails/invitation/invitation_reminder_email",
|
2013-11-16 00:54:12 +01:00
|
|
|
{'activate_url': link,
|
|
|
|
'referrer': referrer,
|
|
|
|
'enterprise': settings.ENTERPRISE,
|
|
|
|
'external_host': settings.EXTERNAL_HOST,
|
|
|
|
'support_email': settings.ZULIP_ADMINISTRATOR},
|
2013-10-10 20:39:43 +02:00
|
|
|
datetime.timedelta(days=2),
|
|
|
|
tags=["invitation-reminders"],
|
2013-11-16 00:54:12 +01:00
|
|
|
sender={'email': settings.ZULIP_ADMINISTRATOR, 'name': 'Zulip'})
|
2013-10-10 20:39:43 +02:00
|
|
|
|
2013-09-03 22:33:20 +02:00
|
|
|
@assign_queue('user_activity')
|
|
|
|
class UserActivityWorker(QueueProcessingWorker):
|
2013-10-30 22:03:50 +01:00
|
|
|
def consume(self, event):
|
2013-09-04 00:00:44 +02:00
|
|
|
user_profile = get_user_profile_by_id(event["user_profile_id"])
|
|
|
|
client = get_client(event["client"])
|
|
|
|
log_time = timestamp_to_datetime(event["time"])
|
|
|
|
query = event["query"]
|
|
|
|
do_update_user_activity(user_profile, client, query, log_time)
|
|
|
|
|
|
|
|
@assign_queue('user_activity_interval')
|
|
|
|
class UserActivityIntervalWorker(QueueProcessingWorker):
|
2013-10-30 22:03:50 +01:00
|
|
|
def consume(self, event):
|
2013-09-04 00:00:44 +02:00
|
|
|
user_profile = get_user_profile_by_id(event["user_profile_id"])
|
|
|
|
log_time = timestamp_to_datetime(event["time"])
|
|
|
|
do_update_user_activity_interval(user_profile, log_time)
|
|
|
|
|
|
|
|
@assign_queue('user_presence')
|
|
|
|
class UserPresenceWorker(QueueProcessingWorker):
|
2013-10-30 22:03:50 +01:00
|
|
|
def consume(self, event):
|
2013-09-04 00:00:44 +02:00
|
|
|
logging.info("Received event: %s" % (event),)
|
|
|
|
user_profile = get_user_profile_by_id(event["user_profile_id"])
|
|
|
|
client = get_client(event["client"])
|
|
|
|
log_time = timestamp_to_datetime(event["time"])
|
|
|
|
status = event["status"]
|
|
|
|
do_update_user_presence(user_profile, client, log_time, status)
|
2013-09-03 22:33:20 +02:00
|
|
|
|
|
|
|
@assign_queue('missedmessage_emails')
|
|
|
|
class MissedMessageWorker(QueueProcessingWorker):
|
|
|
|
def start(self):
|
|
|
|
while True:
|
|
|
|
missed_events = self.q.drain_queue("missedmessage_emails", json=True)
|
|
|
|
by_recipient = defaultdict(list)
|
|
|
|
|
|
|
|
for event in missed_events:
|
|
|
|
logging.info("Received event: %s" % (event,))
|
|
|
|
by_recipient[event['user_profile_id']].append(event)
|
|
|
|
|
|
|
|
for user_profile_id, events in by_recipient.items():
|
2013-11-01 19:02:11 +01:00
|
|
|
handle_missedmessage_emails(user_profile_id, events)
|
2013-09-03 22:33:20 +02:00
|
|
|
|
2013-11-19 20:32:20 +01:00
|
|
|
reset_queries()
|
2013-09-03 22:33:20 +02:00
|
|
|
# Aggregate all messages received every 2 minutes to let someone finish sending a batch
|
|
|
|
# of messages
|
|
|
|
time.sleep(2 * 60)
|
2013-09-30 17:53:46 +02:00
|
|
|
|
2013-11-19 00:55:24 +01:00
|
|
|
@assign_queue('missedmessage_mobile_notifications')
|
|
|
|
class PushNotificationsWorker(QueueProcessingWorker):
|
|
|
|
def consume(self, data):
|
|
|
|
handle_push_notification(data['user_profile_id'], data)
|
|
|
|
|
2013-11-13 19:12:22 +01:00
|
|
|
def make_feedback_client():
|
|
|
|
sys.path.append(os.path.join(os.path.dirname(__file__), '../../api'))
|
|
|
|
import zulip
|
|
|
|
return zulip.Client(
|
2013-12-06 23:50:55 +01:00
|
|
|
client="ZulipFeedback/0.1",
|
2013-11-13 19:12:22 +01:00
|
|
|
email=settings.DEPLOYMENT_ROLE_NAME,
|
|
|
|
api_key=settings.DEPLOYMENT_ROLE_KEY,
|
|
|
|
verbose=True,
|
|
|
|
site=settings.FEEDBACK_TARGET)
|
|
|
|
|
2013-10-17 22:55:09 +02:00
|
|
|
@assign_queue('feedback_messages')
|
|
|
|
class FeedbackBot(QueueProcessingWorker):
|
|
|
|
def start(self):
|
2013-11-25 22:07:37 +01:00
|
|
|
if settings.FEEDBACK_EMAIL is None:
|
|
|
|
self.staging_client = make_feedback_client()
|
|
|
|
self.staging_client._register(
|
2013-10-17 22:55:09 +02:00
|
|
|
'forward_feedback',
|
|
|
|
method='POST',
|
|
|
|
url='deployments/feedback',
|
|
|
|
make_request=(lambda request: {'message': simplejson.dumps(request)}),
|
2013-11-25 22:07:37 +01:00
|
|
|
)
|
2013-10-17 22:55:09 +02:00
|
|
|
QueueProcessingWorker.start(self)
|
|
|
|
|
2013-10-30 22:03:50 +01:00
|
|
|
def consume(self, event):
|
2013-11-25 22:07:37 +01:00
|
|
|
if settings.FEEDBACK_EMAIL is not None:
|
|
|
|
to_email = settings.FEEDBACK_EMAIL
|
|
|
|
subject = "Zulip feedback from %s" % (event["sender_email"],)
|
|
|
|
content = event["content"]
|
|
|
|
from_email = '"%s" <%s>' % (event["sender_full_name"], event["sender_email"])
|
|
|
|
headers = {'Reply-To' : '"%s" <%s>' % (event["sender_full_name"], event["sender_email"])}
|
|
|
|
msg = EmailMessage(subject, content, from_email, [to_email], headers=headers)
|
|
|
|
msg.send()
|
|
|
|
return
|
|
|
|
|
2013-10-17 22:55:09 +02:00
|
|
|
self.staging_client.forward_feedback(event)
|
|
|
|
|
2013-11-13 19:12:22 +01:00
|
|
|
@assign_queue('error_reports')
|
|
|
|
class ErrorReporter(QueueProcessingWorker):
|
|
|
|
def start(self):
|
|
|
|
self.staging_client = make_feedback_client()
|
|
|
|
self.staging_client._register(
|
|
|
|
'forward_error',
|
|
|
|
method='POST',
|
|
|
|
url='deployments/report_error',
|
|
|
|
make_request=(lambda type, report: {'type': type, 'report': simplejson.dumps(report)}),
|
|
|
|
)
|
|
|
|
QueueProcessingWorker.start(self)
|
|
|
|
|
|
|
|
def consume(self, event):
|
|
|
|
self.staging_client.forward_error(event['type'], event['report'])
|
|
|
|
|
2013-11-13 01:56:27 +01:00
|
|
|
@assign_queue('slow_queries')
|
2013-09-30 17:53:46 +02:00
|
|
|
class SlowQueryWorker(QueueProcessingWorker):
|
|
|
|
def start(self):
|
|
|
|
while True:
|
2013-11-13 01:55:06 +01:00
|
|
|
self.process_one_batch()
|
|
|
|
# Aggregate all slow query messages in 1-minute chunks to avoid message spam
|
|
|
|
time.sleep(1 * 60)
|
2013-10-31 18:33:19 +01:00
|
|
|
|
2013-11-13 01:55:06 +01:00
|
|
|
def process_one_batch(self):
|
2013-11-13 02:14:15 +01:00
|
|
|
slow_queries = self.q.drain_queue("slow_queries", json=True)
|
|
|
|
|
2013-11-13 01:55:06 +01:00
|
|
|
if settings.ERROR_BOT is None:
|
|
|
|
return
|
2013-09-30 17:53:46 +02:00
|
|
|
|
2013-11-13 01:55:06 +01:00
|
|
|
if len(slow_queries) > 0:
|
|
|
|
topic = "%s: slow queries" % (settings.STATSD_PREFIX,)
|
2013-09-30 17:53:46 +02:00
|
|
|
|
2013-11-13 01:55:06 +01:00
|
|
|
content = ""
|
|
|
|
for query in slow_queries:
|
|
|
|
content += " %s\n" % (query,)
|
2013-09-30 17:53:46 +02:00
|
|
|
|
2013-11-13 01:55:06 +01:00
|
|
|
internal_send_message(settings.ERROR_BOT, "stream", "logs", topic, content)
|
2013-09-07 00:27:10 +02:00
|
|
|
|
2013-11-19 20:32:20 +01:00
|
|
|
reset_queries()
|
|
|
|
|
2013-09-07 00:27:10 +02:00
|
|
|
@assign_queue("message_sender")
|
|
|
|
class MessageSenderWorker(QueueProcessingWorker):
|
2013-11-05 23:05:03 +01:00
|
|
|
def __init__(self):
|
|
|
|
super(MessageSenderWorker, self).__init__()
|
|
|
|
self.redis_client = redis.StrictRedis(host=settings.REDIS_HOST, port=settings.REDIS_PORT, db=0)
|
2013-11-08 02:02:48 +01:00
|
|
|
self.handler = BaseHandler()
|
|
|
|
self.handler.load_middleware()
|
2013-11-05 23:05:03 +01:00
|
|
|
|
2013-10-30 22:03:50 +01:00
|
|
|
def consume(self, event):
|
2013-11-08 02:02:48 +01:00
|
|
|
server_meta = event['server_meta']
|
|
|
|
|
|
|
|
environ = {'REQUEST_METHOD': 'SOCKET',
|
|
|
|
'SCRIPT_NAME': '',
|
|
|
|
'PATH_INFO': '/json/send_message',
|
|
|
|
'SERVER_NAME': 'localhost',
|
|
|
|
'SERVER_PORT': 9993,
|
|
|
|
'SERVER_PROTOCOL': 'ZULIP_SOCKET/1.0',
|
|
|
|
'wsgi.version': (1, 0),
|
|
|
|
'wsgi.input': StringIO.StringIO(),
|
|
|
|
'wsgi.errors': sys.stderr,
|
|
|
|
'wsgi.multithread': False,
|
|
|
|
'wsgi.multiprocess': True,
|
|
|
|
'wsgi.run_once': False,
|
|
|
|
'zulip.emulated_method': 'POST'}
|
|
|
|
# We're mostly using a WSGIRequest for convenience
|
|
|
|
environ.update(server_meta['request_environ'])
|
|
|
|
request = WSGIRequest(environ)
|
|
|
|
request._request = event['request']
|
|
|
|
request.csrf_processing_done = True
|
|
|
|
|
|
|
|
user_profile = get_user_profile_by_id(server_meta['user_id'])
|
|
|
|
request._cached_user = user_profile
|
|
|
|
|
|
|
|
resp = self.handler.get_response(request)
|
2013-11-08 23:11:37 +01:00
|
|
|
server_meta['time_request_finished'] = time.time()
|
|
|
|
server_meta['worker_log_data'] = request._log_data
|
2013-11-08 02:02:48 +01:00
|
|
|
|
2013-11-08 23:11:37 +01:00
|
|
|
resp_content = resp.content
|
2013-11-08 02:02:48 +01:00
|
|
|
result = {'response': ujson.loads(resp_content), 'req_id': event['req_id'],
|
|
|
|
'server_meta': server_meta}
|
|
|
|
|
|
|
|
redis_key = req_redis_key(server_meta['client_id'], event['req_id'])
|
2013-11-05 23:05:03 +01:00
|
|
|
self.redis_client.hmset(redis_key, {'status': 'complete',
|
2013-11-08 02:02:48 +01:00
|
|
|
'response': resp_content});
|
2013-11-05 23:05:03 +01:00
|
|
|
|
2013-11-08 02:02:48 +01:00
|
|
|
queue_json_publish(server_meta['return_queue'], result, lambda e: None)
|
2013-09-07 00:27:10 +02:00
|
|
|
|
2013-10-21 23:26:41 +02:00
|
|
|
@assign_queue('digest_emails')
|
|
|
|
class DigestWorker(QueueProcessingWorker):
|
2013-10-30 20:48:04 +01:00
|
|
|
# Who gets a digest is entirely determined by the enqueue_digest_emails
|
2013-10-21 23:26:41 +02:00
|
|
|
# management command, not here.
|
2013-10-30 22:03:50 +01:00
|
|
|
def consume(self, event):
|
2013-10-28 20:56:43 +01:00
|
|
|
logging.info("Received digest event: %s" % (event,))
|
|
|
|
handle_digest_email(event["user_profile_id"], event["cutoff"])
|
2013-10-28 20:45:35 +01:00
|
|
|
|
2013-10-30 16:01:18 +01:00
|
|
|
@assign_queue('test')
|
|
|
|
class TestWorker(QueueProcessingWorker):
|
|
|
|
# This worker allows you to test the queue worker infrastructure without
|
|
|
|
# creating significant side effects. It can be useful in development or
|
|
|
|
# for troubleshooting prod/staging. It pulls a message off the test queue
|
|
|
|
# and appends it to a file in /tmp.
|
2013-10-30 22:03:50 +01:00
|
|
|
def consume(self, event):
|
2013-10-30 16:01:18 +01:00
|
|
|
fn = settings.ZULIP_WORKER_TEST_FILE
|
|
|
|
message = ujson.dumps(event)
|
|
|
|
logging.info("TestWorker should append this message to %s: %s" % (fn, message))
|
|
|
|
with open(fn, 'a') as f:
|
|
|
|
f.write(message + '\n')
|