2013-04-23 18:51:17 +02:00
|
|
|
from __future__ import absolute_import
|
|
|
|
|
2016-05-25 15:02:02 +02:00
|
|
|
from django.utils.translation import ugettext as _
|
2016-06-03 03:47:17 +02:00
|
|
|
from django.http import HttpRequest, HttpResponse
|
|
|
|
|
2016-06-04 20:38:42 +02:00
|
|
|
from six import text_type
|
|
|
|
|
2016-06-03 03:47:17 +02:00
|
|
|
from zerver.models import get_client, UserProfile, Client
|
2013-01-08 17:44:22 +01:00
|
|
|
|
2013-11-05 17:02:34 +01:00
|
|
|
from zerver.decorator import asynchronous, \
|
2013-01-08 17:44:22 +01:00
|
|
|
authenticated_json_post_view, internal_notify_view, RespondAsynchronously, \
|
2016-06-03 03:47:17 +02:00
|
|
|
has_request_variables, REQ, _RespondAsynchronously
|
2013-01-23 22:25:22 +01:00
|
|
|
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver.lib.response import json_success, json_error
|
2014-02-14 16:25:31 +01:00
|
|
|
from zerver.lib.validator import check_bool, check_list, check_string
|
2016-11-27 06:56:06 +01:00
|
|
|
from zerver.tornado.event_queue import get_client_descriptor, \
|
2014-01-27 23:21:39 +01:00
|
|
|
process_notification, fetch_events
|
2016-06-26 14:52:09 +02:00
|
|
|
from django.core.handlers.base import BaseHandler
|
2016-06-03 03:47:17 +02:00
|
|
|
|
2016-06-26 14:52:09 +02:00
|
|
|
from typing import Union, Optional, Iterable, Sequence, List
|
2014-01-28 18:11:08 +01:00
|
|
|
import time
|
2013-06-18 23:55:55 +02:00
|
|
|
import ujson
|
2013-01-08 17:44:22 +01:00
|
|
|
|
|
|
|
@internal_notify_view
|
2013-01-23 23:24:44 +01:00
|
|
|
def notify(request):
|
2016-06-03 03:47:17 +02:00
|
|
|
# type: (HttpRequest) -> HttpResponse
|
2013-06-18 23:55:55 +02:00
|
|
|
process_notification(ujson.loads(request.POST['data']))
|
2013-01-08 17:44:22 +01:00
|
|
|
return json_success()
|
|
|
|
|
2013-11-19 23:11:30 +01:00
|
|
|
@has_request_variables
|
|
|
|
def cleanup_event_queue(request, user_profile, queue_id=REQ()):
|
2016-06-04 20:38:42 +02:00
|
|
|
# type: (HttpRequest, UserProfile, text_type) -> HttpResponse
|
2016-07-03 18:09:53 +02:00
|
|
|
client = get_client_descriptor(str(queue_id))
|
2013-11-19 23:11:30 +01:00
|
|
|
if client is None:
|
2016-05-25 15:02:02 +02:00
|
|
|
return json_error(_("Bad event queue id: %s") % (queue_id,))
|
2013-11-19 23:11:30 +01:00
|
|
|
if user_profile.id != client.user_profile_id:
|
2016-05-25 15:02:02 +02:00
|
|
|
return json_error(_("You are not authorized to access this queue"))
|
2013-11-19 23:11:30 +01:00
|
|
|
request._log_data['extra'] = "[%s]" % (queue_id,)
|
|
|
|
client.cleanup()
|
|
|
|
return json_success()
|
|
|
|
|
2013-03-26 18:06:00 +01:00
|
|
|
@asynchronous
|
|
|
|
@has_request_variables
|
2014-01-28 17:10:10 +01:00
|
|
|
def get_events_backend(request, user_profile, handler,
|
2013-05-08 17:14:52 +02:00
|
|
|
user_client = REQ(converter=get_client, default=None),
|
2013-03-21 22:43:53 +01:00
|
|
|
last_event_id = REQ(converter=int, default=None),
|
2013-07-30 20:36:16 +02:00
|
|
|
queue_id = REQ(default=None),
|
2014-02-14 15:48:42 +01:00
|
|
|
apply_markdown = REQ(default=False, validator=check_bool),
|
|
|
|
all_public_streams = REQ(default=False, validator=check_bool),
|
2014-02-14 16:25:31 +01:00
|
|
|
event_types = REQ(default=None, validator=check_list(check_string)),
|
2014-02-14 15:48:42 +01:00
|
|
|
dont_block = REQ(default=False, validator=check_bool),
|
2014-02-14 16:25:31 +01:00
|
|
|
narrow = REQ(default=[], validator=check_list(None)),
|
2013-08-05 22:09:12 +02:00
|
|
|
lifespan_secs = REQ(default=0, converter=int)):
|
2016-06-26 14:52:09 +02:00
|
|
|
# type: (HttpRequest, UserProfile, BaseHandler, Optional[Client], Optional[int], Optional[List[text_type]], bool, bool, Optional[text_type], bool, Iterable[Sequence[text_type]], int) -> Union[HttpResponse, _RespondAsynchronously]
|
2013-05-07 17:25:25 +02:00
|
|
|
if user_client is None:
|
|
|
|
user_client = request.client
|
|
|
|
|
2014-01-28 18:11:08 +01:00
|
|
|
events_query = dict(
|
|
|
|
user_profile_id = user_profile.id,
|
|
|
|
user_profile_email = user_profile.email,
|
|
|
|
queue_id = queue_id,
|
|
|
|
last_event_id = last_event_id,
|
|
|
|
event_types = event_types,
|
|
|
|
client_type_name = user_client.name,
|
|
|
|
all_public_streams = all_public_streams,
|
|
|
|
lifespan_secs = lifespan_secs,
|
|
|
|
narrow = narrow,
|
|
|
|
dont_block = dont_block,
|
|
|
|
handler_id = handler.handler_id)
|
|
|
|
|
|
|
|
if queue_id is None:
|
|
|
|
events_query['new_queue_data'] = dict(
|
|
|
|
user_profile_id = user_profile.id,
|
|
|
|
realm_id = user_profile.realm.id,
|
|
|
|
user_profile_email = user_profile.email,
|
|
|
|
event_types = event_types,
|
|
|
|
client_type_name = user_client.name,
|
|
|
|
apply_markdown = apply_markdown,
|
|
|
|
all_public_streams = all_public_streams,
|
|
|
|
queue_timeout = lifespan_secs,
|
|
|
|
last_connection_time = time.time(),
|
|
|
|
narrow = narrow)
|
|
|
|
|
2014-01-28 20:03:05 +01:00
|
|
|
result = fetch_events(events_query)
|
|
|
|
if "extra_log_data" in result:
|
|
|
|
request._log_data['extra'] = result["extra_log_data"]
|
|
|
|
|
|
|
|
if result["type"] == "async":
|
2014-01-27 23:21:39 +01:00
|
|
|
handler._request = request
|
2014-01-28 20:03:05 +01:00
|
|
|
return RespondAsynchronously
|
|
|
|
if result["type"] == "error":
|
|
|
|
return json_error(result["message"])
|
|
|
|
return json_success(result["response"])
|