tests: Call real consume method of queue processors.

This switches to more real tests for a first batch of
queue_json_publish() calls that don't cause trouble when
used with call_consume_tests=True.
This commit is contained in:
derAnfaenger 2017-10-19 15:02:03 +02:00 committed by Tim Abbott
parent 18e5bcbbb1
commit 1792dcbd09
9 changed files with 13 additions and 6 deletions

View File

@ -1158,7 +1158,7 @@ def do_send_messages(messages_maybe_none):
'message_content': message['message'].content,
'message_realm_id': message['realm'].id,
'urls': links_for_embed}
queue_json_publish('embed_links', event_data, lambda x: None)
queue_json_publish('embed_links', event_data, lambda x: None, call_consume_in_tests=True)
if (settings.ENABLE_FEEDBACK and settings.FEEDBACK_BOT and
message['message'].recipient.type == Recipient.PERSONAL):

View File

@ -59,7 +59,7 @@ def queue_digest_recipient(user_profile, cutoff):
# Convert cutoff to epoch seconds for transit.
event = {"user_profile_id": user_profile.id,
"cutoff": cutoff.strftime('%s')}
queue_json_publish("digest_emails", event, lambda event: None)
queue_json_publish("digest_emails", event, lambda event: None, call_consume_in_tests=True)
def enqueue_emails(cutoff):
# type: (datetime.datetime) -> None

View File

@ -353,7 +353,8 @@ def do_send_missedmessage_events_reply_in_zulip(user_profile, missed_messages, m
'from_address': from_address,
'reply_to_email': formataddr((reply_to_name, reply_to_address)),
'context': context}
queue_json_publish("missedmessage_email_senders", email_dict, send_email_from_dict)
queue_json_publish("missedmessage_email_senders", email_dict, send_email_from_dict,
call_consume_in_tests=True)
user_profile.last_reminder = timezone_now()
user_profile.save(update_fields=['last_reminder'])

View File

@ -107,7 +107,7 @@ class AdminZulipHandler(logging.Handler):
queue_json_publish('error_reports', dict(
type = "server",
report = report,
), lambda x: None)
), lambda x: None, call_consume_in_tests=True)
except Exception:
# If this breaks, complain loudly but don't pass the traceback up the stream
# However, we *don't* want to use logging.exception since that could trigger a loop.

View File

@ -57,4 +57,6 @@ You can use "-" to represent stdin.
# Verify that payload is valid json.
data = ujson.loads(payload)
# This is designed to use the `error` method rather than
# the call_consume_in_tests flow.
queue_json_publish(queue_name, data, error)

View File

@ -208,6 +208,8 @@ def write_log_line(log_data, path, method, remote_ip, email, client_name,
logger.info(logger_line)
if (is_slow_query(time_delta, path)):
# Since the slow query worker patches code, we can't directly
# use call_consume_in_tests here without further work.
queue_json_publish("slow_queries", "%s (%s)" % (logger_line, email), lambda e: None)
if settings.PROFILE_ALL_REQUESTS:

View File

@ -1130,7 +1130,7 @@ def update_message_backend(request, user_profile,
# `render_incoming_message` call earlier in this function.
'message_realm_id': user_profile.realm_id,
'urls': links_for_embed}
queue_json_publish('embed_links', event_data, lambda x: None)
queue_json_publish('embed_links', event_data, lambda x: None, call_consume_in_tests=True)
return json_success()

View File

@ -128,6 +128,6 @@ def report_error(request, user_profile, message=REQ(), stacktrace=REQ(),
log = log,
more_info = more_info,
)
), lambda x: None)
), lambda x: None, call_consume_in_tests=True)
return json_success()

View File

@ -393,6 +393,8 @@ class MessageSenderWorker(QueueProcessingWorker):
self.redis_client.hmset(redis_key, {'status': 'complete',
'response': resp_content})
# Since this sends back to Tornado, we can't use
# call_consume_in_tests here.
queue_json_publish(server_meta['return_queue'], result, lambda e: None)
@assign_queue('digest_emails')