diff --git a/api/zulip/__init__.py b/api/zulip/__init__.py index 7aba50700c..79a2ed979f 100644 --- a/api/zulip/__init__.py +++ b/api/zulip/__init__.py @@ -245,7 +245,7 @@ class Client(object): def do_api_query(self, orig_request, url, method="POST", longpolling = False): request = {} - for (key, val) in orig_request.iteritems(): + for (key, val) in six.iteritems(orig_request): if not (isinstance(val, str) or isinstance(val, six.text_type)): request[key] = simplejson.dumps(val) else: diff --git a/bots/check-mirroring b/bots/check-mirroring index 9794df9866..2724361bfc 100755 --- a/bots/check-mirroring +++ b/bots/check-mirroring @@ -279,7 +279,7 @@ logger.info("Finished receiving Zulip messages!") receive_zephyrs() logger.info("Finished receiving Zephyr messages!") -all_keys = set(zhkeys.keys() + hzkeys.keys()) +all_keys = set(list(zhkeys.keys()) + list(hzkeys.keys())) def process_keys(content_list): # Start by filtering out any keys that might have come from # concurrent check-mirroring processes diff --git a/bots/zephyr_mirror_backend.py b/bots/zephyr_mirror_backend.py index ee57c1bffb..9fc6953a31 100755 --- a/bots/zephyr_mirror_backend.py +++ b/bots/zephyr_mirror_backend.py @@ -804,9 +804,9 @@ def add_zulip_subscriptions(verbose): unauthorized = res.get("unauthorized") if verbose: if already is not None and len(already) > 0: - logger.info("\nAlready subscribed to: %s" % (", ".join(already.values()[0]),)) + logger.info("\nAlready subscribed to: %s" % (", ".join(list(already.values())[0]),)) if new is not None and len(new) > 0: - logger.info("\nSuccessfully subscribed to: %s" % (", ".join(new.values()[0]),)) + logger.info("\nSuccessfully subscribed to: %s" % (", ".join(list(new.values())[0]),)) if unauthorized is not None and len(unauthorized) > 0: logger.info("\n" + "\n".join(textwrap.wrap("""\ The following streams you have NOT been subscribed to, diff --git a/tools/minify-js b/tools/minify-js index 32aa05c012..39469cbfc3 100755 --- a/tools/minify-js +++ b/tools/minify-js @@ -9,6 +9,7 @@ import os import subprocess import optparse import sys +import six parser = optparse.OptionParser() parser.add_option('--prev-deploy', nargs=1, metavar='DIR', @@ -92,7 +93,7 @@ MIN_DIR = os.path.join(STATIC_PATH, 'min/') MAP_DIR = os.path.join(STATIC_PATH, 'source-map/') subprocess.check_call(['mkdir', '-p', MIN_DIR, MAP_DIR]) -for js_group, filespec in JS_SPECS.iteritems(): +for js_group, filespec in six.iteritems(JS_SPECS): # JS_SPECS look like 'js/foobar.js'. # changed_files look like 'static/js/foobar.js'. # So we prepend 'static/' to the JS_SPECS so these match up. diff --git a/zerver/lib/actions.py b/zerver/lib/actions.py index 5f57e2ff7c..e1dd1424a9 100644 --- a/zerver/lib/actions.py +++ b/zerver/lib/actions.py @@ -1318,7 +1318,7 @@ def notify_subscriptions_removed(user_profile, streams, no_log=False): # we should do the same for removes. notifications_for = get_subscribers_to_streams(streams) - for event_recipient, notifications in notifications_for.iteritems(): + for event_recipient, notifications in six.iteritems(notifications_for): # Don't send a peer subscription notice to yourself. if event_recipient == user_profile: continue diff --git a/zerver/lib/alert_words.py b/zerver/lib/alert_words.py index 628fbb8575..4495e1851d 100644 --- a/zerver/lib/alert_words.py +++ b/zerver/lib/alert_words.py @@ -4,13 +4,14 @@ from django.db.models import Q import zerver.models from zerver.lib.cache import cache_with_key, realm_alert_words_cache_key import ujson +import six @cache_with_key(realm_alert_words_cache_key, timeout=3600*24) def alert_words_in_realm(realm): users_query = zerver.models.UserProfile.objects.filter(realm=realm, is_active=True) alert_word_data = users_query.filter(~Q(alert_words=ujson.dumps([]))).values('id', 'alert_words') all_user_words = dict((elt['id'], ujson.loads(elt['alert_words'])) for elt in alert_word_data) - user_ids_with_words = dict((user_id, w) for (user_id, w) in all_user_words.iteritems() if len(w)) + user_ids_with_words = dict((user_id, w) for (user_id, w) in six.iteritems(all_user_words) if len(w)) return user_ids_with_words def user_alert_words(user_profile): diff --git a/zerver/lib/bugdown/__init__.py b/zerver/lib/bugdown/__init__.py index 1eb5aa1a14..5d836c5ade 100644 --- a/zerver/lib/bugdown/__init__.py +++ b/zerver/lib/bugdown/__init__.py @@ -835,7 +835,7 @@ class AlertWordsNotificationProcessor(markdown.preprocessors.Preprocessor): allowed_before_punctuation = "|".join([r'\s', '^', r'[\(\".,\';\[\*`>]']) allowed_after_punctuation = "|".join([r'\s', '$', r'[\)\"\?:.,\';\]!\*`]']) - for user_id, words in realm_words.iteritems(): + for user_id, words in six.iteritems(realm_words): for word in words: escaped = re.escape(word.lower()) match_re = re.compile(r'(?:%s)%s(?:%s)' % @@ -1021,7 +1021,7 @@ def maybe_update_realm_filters(domain): if domain is None: all_filters = all_realm_filters() all_filters['default'] = [] - for domain, filters in all_filters.iteritems(): + for domain, filters in six.iteritems(all_filters): make_realm_filters(domain, filters) # Hack to ensure that getConfig("realm") is right for mirrored Zephyrs make_realm_filters("mit.edu/zephyr_mirror", []) diff --git a/zerver/lib/event_queue.py b/zerver/lib/event_queue.py index e31dcbe307..6587c2e7ef 100644 --- a/zerver/lib/event_queue.py +++ b/zerver/lib/event_queue.py @@ -28,6 +28,7 @@ from zerver.lib.narrow import build_narrow_filter from zerver.lib.queue import queue_json_publish from zerver.lib.timestamp import timestamp_to_datetime import copy +import six # The idle timeout used to be a week, but we found that in that # situation, queues from dead browser sessions would grow quite large @@ -353,7 +354,7 @@ def gc_event_queues(): to_remove = set() affected_users = set() affected_realms = set() - for (id, client) in clients.iteritems(): + for (id, client) in six.iteritems(clients): if client.idle(start): to_remove.add(id) affected_users.add(client.user_profile_id) @@ -372,7 +373,7 @@ def dump_event_queues(): start = time.time() with open(settings.JSON_PERSISTENT_QUEUE_FILENAME, "w") as stored_queues: - ujson.dump([(qid, client.to_dict()) for (qid, client) in clients.iteritems()], + ujson.dump([(qid, client.to_dict()) for (qid, client) in six.iteritems(clients)], stored_queues) logging.info('Tornado dumped %d event queues in %.3fs' @@ -396,7 +397,7 @@ def load_event_queues(): except (IOError, EOFError): pass - for client in clients.itervalues(): + for client in six.itervalues(clients): # Put code for migrations due to event queue data format changes here add_to_client_dicts(client) @@ -406,7 +407,7 @@ def load_event_queues(): def send_restart_events(): event = dict(type='restart', server_generation=settings.SERVER_GENERATION) - for client in clients.itervalues(): + for client in six.itervalues(clients): if client.accepts_event(event): client.add_event(event.copy()) @@ -578,7 +579,7 @@ def receiver_is_idle(user_profile_id, realm_presences): latest_active_timestamp = None idle = False - for client, status in user_presence.iteritems(): + for client, status in six.iteritems(user_presence): if (latest_active_timestamp is None or status['timestamp'] > latest_active_timestamp) and \ status['status'] == 'active': latest_active_timestamp = status['timestamp'] @@ -642,7 +643,7 @@ def process_message_event(event_template, users): extra_user_data[user_profile_id] = notified - for client_data in send_to_clients.itervalues(): + for client_data in six.itervalues(send_to_clients): client = client_data['client'] flags = client_data['flags'] is_sender = client_data.get('is_sender', False) diff --git a/zerver/lib/statistics.py b/zerver/lib/statistics.py index 99c32a06a2..c28de7f4c5 100644 --- a/zerver/lib/statistics.py +++ b/zerver/lib/statistics.py @@ -9,6 +9,7 @@ from django.utils.timezone import utc from datetime import timedelta from itertools import chain from six.moves import range +import six def median(data): data = sorted(data) @@ -116,7 +117,7 @@ def activity_averages_between(begin, end, by_day=True): if by_day: return dict((day, calculate_stats(values, all_users=users_to_measure)) - for day, values in seconds_active.iteritems()) + for day, values in six.iteritems(seconds_active)) else: return calculate_stats(list(chain.from_iterable(seconds_active.values())), all_users=users_to_measure) diff --git a/zerver/management/commands/realm_emoji.py b/zerver/management/commands/realm_emoji.py index 92a702405c..4c9ca8aa00 100644 --- a/zerver/management/commands/realm_emoji.py +++ b/zerver/management/commands/realm_emoji.py @@ -5,6 +5,7 @@ from django.core.management.base import BaseCommand from zerver.models import Realm, get_realm from zerver.lib.actions import do_add_realm_emoji, do_remove_realm_emoji import sys +import six class Command(BaseCommand): help = """Manage emoji for the specified realm @@ -33,7 +34,7 @@ Example: python2.7 manage.py realm_emoji --realm=zulip.com --op=show def handle(self, *args, **options): realm = get_realm(options["domain"]) if options["op"] == "show": - for name, url in realm.get_emoji().iteritems(): + for name, url in six.iteritems(realm.get_emoji()): print(name, url) sys.exit(0) diff --git a/zerver/test_bugdown.py b/zerver/test_bugdown.py index f0580eff8d..f1b93d7fde 100644 --- a/zerver/test_bugdown.py +++ b/zerver/test_bugdown.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- from __future__ import print_function +from __future__ import absolute_import from django.conf import settings from django.test import TestCase @@ -18,6 +19,7 @@ from zerver.models import ( import os import ujson +import six class FencedBlockPreprocessorTest(TestCase): def test_simple_quoting(self): @@ -147,7 +149,7 @@ class BugdownTest(TestCase): format_tests, linkify_tests = self.load_bugdown_tests() self.maxDiff = None - for name, test in format_tests.iteritems(): + for name, test in six.iteritems(format_tests): converted = bugdown_convert(test['input']) print("Running Bugdown test %s" % (name,)) diff --git a/zerver/test_signup.py b/zerver/test_signup.py index 89db9fe590..c033c51b8b 100644 --- a/zerver/test_signup.py +++ b/zerver/test_signup.py @@ -27,6 +27,7 @@ import ujson from six.moves import urllib from six.moves import range +import six class PublicURLTest(TestCase): @@ -80,9 +81,9 @@ class PublicURLTest(TestCase): "/api/v1/fetch_api_key", ], } - for status_code, url_set in get_urls.iteritems(): + for status_code, url_set in six.iteritems(get_urls): self.fetch("get", url_set, status_code) - for status_code, url_set in post_urls.iteritems(): + for status_code, url_set in six.iteritems(post_urls): self.fetch("post", url_set, status_code) def test_get_gcid_when_not_configured(self): diff --git a/zerver/test_subs.py b/zerver/test_subs.py index 4f790eb38f..fafd6874dc 100644 --- a/zerver/test_subs.py +++ b/zerver/test_subs.py @@ -1082,7 +1082,7 @@ class SubscriptionAPITest(AuthedTestCase): {"subscriptions": ujson.dumps(subscriptions)}) self.assert_json_success(result) json = ujson.loads(result.content) - for key, val in json_dict.iteritems(): + for key, val in six.iteritems(json_dict): self.assertItemsEqual(val, json[key]) # we don't care about the order of the items new_streams = self.get_streams(email) self.assertItemsEqual(new_streams, new_subs) diff --git a/zerver/views/messages.py b/zerver/views/messages.py index 8e907bbd26..8472327693 100644 --- a/zerver/views/messages.py +++ b/zerver/views/messages.py @@ -419,9 +419,9 @@ def exclude_muting_conditions(user_profile, narrow): muted_streams = bulk_get_streams(user_profile.realm, [muted[0] for muted in muted_topics]) muted_recipients = bulk_get_recipients(Recipient.STREAM, - [stream.id for stream in muted_streams.itervalues()]) + [stream.id for stream in six.itervalues(muted_streams)]) recipient_map = dict((s.name.lower(), muted_recipients[s.id].id) - for s in muted_streams.itervalues()) + for s in six.itervalues(muted_streams)) muted_topics = [m for m in muted_topics if m[0].lower() in recipient_map] diff --git a/zerver/views/streams.py b/zerver/views/streams.py index c383898a6c..2ed7379a7f 100644 --- a/zerver/views/streams.py +++ b/zerver/views/streams.py @@ -26,6 +26,7 @@ import ujson from six.moves import urllib from zerver.lib.rest import rest_dispatch as _rest_dispatch +import six rest_dispatch = csrf_exempt((lambda request, *args, **kwargs: _rest_dispatch(request, globals(), *args, **kwargs))) def list_to_streams(streams_raw, user_profile, autocreate=False, invite_only=False): @@ -296,7 +297,7 @@ def add_subscriptions_backend(request, user_profile, # or if a new stream was created with the "announce" option. notifications = [] if principals and result["subscribed"]: - for email, subscriptions in result["subscribed"].iteritems(): + for email, subscriptions in six.iteritems(result["subscribed"]): if email == user_profile.email: # Don't send a Zulip if you invited yourself. continue diff --git a/zilencer/error_notify.py b/zilencer/error_notify.py index cc3dc301e7..8145590d2e 100644 --- a/zilencer/error_notify.py +++ b/zilencer/error_notify.py @@ -1,3 +1,4 @@ +from __future__ import absolute_import from collections import defaultdict import logging @@ -5,6 +6,7 @@ from django.conf import settings from django.core.mail import mail_admins from zerver.lib.actions import internal_send_message +import six def format_subject(subject): """ @@ -41,7 +43,7 @@ def email_browser_error(report): more_info = report['more_info'] if more_info is not None: body += "\nAdditional information:" - for (key, value) in more_info.iteritems(): + for (key, value) in six.iteritems(more_info): body += "\n %s: %s" % (key, value) body += "\n\nLog:\n%s" % (report['log'],) diff --git a/zproject/settings.py b/zproject/settings.py index dccaf105c8..a04e6aa7ae 100644 --- a/zproject/settings.py +++ b/zproject/settings.py @@ -15,6 +15,7 @@ import sys import six.moves.configparser from zerver.lib.db import TimeTrackingConnection +import six ######################################################################## # INITIAL SETTINGS @@ -161,7 +162,7 @@ DEFAULT_SETTINGS = {'TWITTER_CONSUMER_KEY': '', 'DBX_APNS_CERT_FILE': None, } -for setting_name, setting_val in DEFAULT_SETTINGS.iteritems(): +for setting_name, setting_val in six.iteritems(DEFAULT_SETTINGS): if not setting_name in vars(): vars()[setting_name] = setting_val