2017-11-16 00:53:11 +01:00
|
|
|
import time
|
2019-10-22 01:43:54 +02:00
|
|
|
from typing import Iterable, Optional, Sequence, Union
|
2013-01-08 17:44:22 +01:00
|
|
|
|
2017-11-16 00:53:11 +01:00
|
|
|
import ujson
|
|
|
|
from django.core.handlers.base import BaseHandler
|
|
|
|
from django.http import HttpRequest, HttpResponse
|
|
|
|
from django.utils.translation import ugettext as _
|
2013-01-23 22:25:22 +01:00
|
|
|
|
2017-11-16 00:53:11 +01:00
|
|
|
from zerver.decorator import REQ, RespondAsynchronously, \
|
2018-12-05 23:49:54 +01:00
|
|
|
_RespondAsynchronously, asynchronous, to_non_negative_int, \
|
2018-07-13 12:58:16 +02:00
|
|
|
has_request_variables, internal_notify_view, process_client
|
2017-11-16 00:53:11 +01:00
|
|
|
from zerver.lib.response import json_error, json_success
|
2019-11-13 08:17:49 +01:00
|
|
|
from zerver.lib.validator import check_bool, check_int, check_list, check_string
|
2018-07-13 12:58:16 +02:00
|
|
|
from zerver.models import Client, UserProfile, get_client, get_user_profile_by_id
|
2017-11-16 00:53:11 +01:00
|
|
|
from zerver.tornado.event_queue import fetch_events, \
|
|
|
|
get_client_descriptor, process_notification
|
2017-07-21 02:20:31 +02:00
|
|
|
from zerver.tornado.exceptions import BadEventQueueIdError
|
2013-01-08 17:44:22 +01:00
|
|
|
|
2017-04-18 18:56:19 +02:00
|
|
|
@internal_notify_view(True)
|
2017-10-26 11:38:28 +02:00
|
|
|
def notify(request: HttpRequest) -> HttpResponse:
|
2013-06-18 23:55:55 +02:00
|
|
|
process_notification(ujson.loads(request.POST['data']))
|
2013-01-08 17:44:22 +01:00
|
|
|
return json_success()
|
|
|
|
|
2013-11-19 23:11:30 +01:00
|
|
|
@has_request_variables
|
2017-10-26 11:38:28 +02:00
|
|
|
def cleanup_event_queue(request: HttpRequest, user_profile: UserProfile,
|
2018-05-11 01:43:30 +02:00
|
|
|
queue_id: str=REQ()) -> HttpResponse:
|
2016-07-03 18:09:53 +02:00
|
|
|
client = get_client_descriptor(str(queue_id))
|
2013-11-19 23:11:30 +01:00
|
|
|
if client is None:
|
2017-07-25 22:17:55 +02:00
|
|
|
raise BadEventQueueIdError(queue_id)
|
2013-11-19 23:11:30 +01:00
|
|
|
if user_profile.id != client.user_profile_id:
|
2016-05-25 15:02:02 +02:00
|
|
|
return json_error(_("You are not authorized to access this queue"))
|
2013-11-19 23:11:30 +01:00
|
|
|
request._log_data['extra'] = "[%s]" % (queue_id,)
|
|
|
|
client.cleanup()
|
|
|
|
return json_success()
|
|
|
|
|
2013-03-26 18:06:00 +01:00
|
|
|
@asynchronous
|
2018-07-13 12:58:16 +02:00
|
|
|
@internal_notify_view(True)
|
|
|
|
@has_request_variables
|
2019-11-13 08:17:49 +01:00
|
|
|
def get_events_internal(
|
|
|
|
request: HttpRequest,
|
|
|
|
handler: BaseHandler,
|
|
|
|
user_profile_id: int = REQ(validator=check_int),
|
|
|
|
) -> Union[HttpResponse, _RespondAsynchronously]:
|
2018-07-13 12:58:16 +02:00
|
|
|
user_profile = get_user_profile_by_id(user_profile_id)
|
2019-11-16 02:05:03 +01:00
|
|
|
request._email = user_profile.delivery_email
|
2018-07-13 12:58:16 +02:00
|
|
|
process_client(request, user_profile, client_name="internal")
|
|
|
|
return get_events_backend(request, user_profile, handler)
|
|
|
|
|
|
|
|
@asynchronous
|
|
|
|
def get_events(request: HttpRequest, user_profile: UserProfile,
|
|
|
|
handler: BaseHandler) -> Union[HttpResponse, _RespondAsynchronously]:
|
2018-07-13 13:10:12 +02:00
|
|
|
return get_events_backend(request, user_profile, handler)
|
|
|
|
|
2013-03-26 18:06:00 +01:00
|
|
|
@has_request_variables
|
2017-12-30 08:52:28 +01:00
|
|
|
def get_events_backend(request: HttpRequest, user_profile: UserProfile, handler: BaseHandler,
|
2019-10-22 01:43:54 +02:00
|
|
|
# user_client is intended only for internal Django=>Tornado requests
|
|
|
|
# and thus shouldn't be documented for external use.
|
|
|
|
user_client: Optional[Client]=REQ(converter=get_client, default=None,
|
|
|
|
intentionally_undocumented=True),
|
2017-12-30 08:52:28 +01:00
|
|
|
last_event_id: Optional[int]=REQ(converter=int, default=None),
|
2019-10-22 01:43:54 +02:00
|
|
|
queue_id: Optional[str]=REQ(default=None),
|
|
|
|
# apply_markdown, client_gravatar, all_public_streams, and various
|
|
|
|
# other parameters are only used when registering a new queue via this
|
|
|
|
# endpoint. This is a feature used primarily by get_events_internal
|
|
|
|
# and not expected to be used by third-party clients.
|
|
|
|
apply_markdown: bool=REQ(default=False, validator=check_bool,
|
|
|
|
intentionally_undocumented=True),
|
|
|
|
client_gravatar: bool=REQ(default=False, validator=check_bool,
|
|
|
|
intentionally_undocumented=True),
|
|
|
|
all_public_streams: bool=REQ(default=False, validator=check_bool,
|
|
|
|
intentionally_undocumented=True),
|
|
|
|
event_types: Optional[str]=REQ(default=None, validator=check_list(check_string),
|
|
|
|
intentionally_undocumented=True),
|
2017-12-30 08:52:28 +01:00
|
|
|
dont_block: bool=REQ(default=False, validator=check_bool),
|
2019-10-22 01:43:54 +02:00
|
|
|
narrow: Iterable[Sequence[str]]=REQ(default=[], validator=check_list(None),
|
|
|
|
intentionally_undocumented=True),
|
|
|
|
lifespan_secs: int=REQ(default=0, converter=to_non_negative_int,
|
|
|
|
intentionally_undocumented=True)
|
2017-12-30 08:52:28 +01:00
|
|
|
) -> Union[HttpResponse, _RespondAsynchronously]:
|
2013-05-07 17:25:25 +02:00
|
|
|
if user_client is None:
|
2018-01-08 19:30:23 +01:00
|
|
|
valid_user_client = request.client
|
|
|
|
else:
|
|
|
|
valid_user_client = user_client
|
2013-05-07 17:25:25 +02:00
|
|
|
|
2014-01-28 18:11:08 +01:00
|
|
|
events_query = dict(
|
|
|
|
user_profile_id = user_profile.id,
|
|
|
|
queue_id = queue_id,
|
|
|
|
last_event_id = last_event_id,
|
|
|
|
event_types = event_types,
|
2018-01-08 19:30:23 +01:00
|
|
|
client_type_name = valid_user_client.name,
|
2014-01-28 18:11:08 +01:00
|
|
|
all_public_streams = all_public_streams,
|
|
|
|
lifespan_secs = lifespan_secs,
|
|
|
|
narrow = narrow,
|
|
|
|
dont_block = dont_block,
|
|
|
|
handler_id = handler.handler_id)
|
|
|
|
|
|
|
|
if queue_id is None:
|
|
|
|
events_query['new_queue_data'] = dict(
|
|
|
|
user_profile_id = user_profile.id,
|
2017-01-03 21:04:55 +01:00
|
|
|
realm_id = user_profile.realm_id,
|
2014-01-28 18:11:08 +01:00
|
|
|
event_types = event_types,
|
2018-01-08 19:30:23 +01:00
|
|
|
client_type_name = valid_user_client.name,
|
2014-01-28 18:11:08 +01:00
|
|
|
apply_markdown = apply_markdown,
|
2017-10-31 18:36:18 +01:00
|
|
|
client_gravatar = client_gravatar,
|
2014-01-28 18:11:08 +01:00
|
|
|
all_public_streams = all_public_streams,
|
|
|
|
queue_timeout = lifespan_secs,
|
|
|
|
last_connection_time = time.time(),
|
|
|
|
narrow = narrow)
|
|
|
|
|
2014-01-28 20:03:05 +01:00
|
|
|
result = fetch_events(events_query)
|
|
|
|
if "extra_log_data" in result:
|
|
|
|
request._log_data['extra'] = result["extra_log_data"]
|
|
|
|
|
|
|
|
if result["type"] == "async":
|
2014-01-27 23:21:39 +01:00
|
|
|
handler._request = request
|
2014-01-28 20:03:05 +01:00
|
|
|
return RespondAsynchronously
|
|
|
|
if result["type"] == "error":
|
2017-07-25 22:17:55 +02:00
|
|
|
raise result["exception"]
|
2014-01-28 20:03:05 +01:00
|
|
|
return json_success(result["response"])
|