Extract process_one_batch() in SlowQueryWorker.

By extracting the function, we eliminate a redundant sleep
call.

(imported from commit 24d4485019f96ae10e9a00244643b77ff2a7db57)
This commit is contained in:
Steve Howell 2013-11-12 19:55:06 -05:00
parent 5f655088ef
commit cf79e77347
1 changed files with 16 additions and 15 deletions

View File

@ -213,24 +213,25 @@ class FeedbackBot(QueueProcessingWorker):
class SlowQueryWorker(QueueProcessingWorker):
def start(self):
while True:
if settings.ERROR_BOT is None:
time.sleep(1 * 60)
continue
slow_queries = self.q.drain_queue("slow_queries", json=True)
if len(slow_queries) > 0:
topic = "%s: slow queries" % (settings.STATSD_PREFIX,)
content = ""
for query in slow_queries:
content += " %s\n" % (query,)
internal_send_message(settings.ERROR_BOT, "stream", "logs", topic, content)
self.process_one_batch()
# Aggregate all slow query messages in 1-minute chunks to avoid message spam
time.sleep(1 * 60)
def process_one_batch(self):
if settings.ERROR_BOT is None:
return
slow_queries = self.q.drain_queue("slow_queries", json=True)
if len(slow_queries) > 0:
topic = "%s: slow queries" % (settings.STATSD_PREFIX,)
content = ""
for query in slow_queries:
content += " %s\n" % (query,)
internal_send_message(settings.ERROR_BOT, "stream", "logs", topic, content)
@assign_queue("message_sender")
class MessageSenderWorker(QueueProcessingWorker):
def __init__(self):