mirror of https://github.com/zulip/zulip.git
Partially apply Python 3 libmodernize.fixes.fix_dict_six.
Refer to #256
This commit is contained in:
parent
176c507b0a
commit
9e3c3e14f5
|
@ -245,7 +245,7 @@ class Client(object):
|
|||
def do_api_query(self, orig_request, url, method="POST", longpolling = False):
|
||||
request = {}
|
||||
|
||||
for (key, val) in orig_request.iteritems():
|
||||
for (key, val) in six.iteritems(orig_request):
|
||||
if not (isinstance(val, str) or isinstance(val, six.text_type)):
|
||||
request[key] = simplejson.dumps(val)
|
||||
else:
|
||||
|
|
|
@ -279,7 +279,7 @@ logger.info("Finished receiving Zulip messages!")
|
|||
receive_zephyrs()
|
||||
logger.info("Finished receiving Zephyr messages!")
|
||||
|
||||
all_keys = set(zhkeys.keys() + hzkeys.keys())
|
||||
all_keys = set(list(zhkeys.keys()) + list(hzkeys.keys()))
|
||||
def process_keys(content_list):
|
||||
# Start by filtering out any keys that might have come from
|
||||
# concurrent check-mirroring processes
|
||||
|
|
|
@ -804,9 +804,9 @@ def add_zulip_subscriptions(verbose):
|
|||
unauthorized = res.get("unauthorized")
|
||||
if verbose:
|
||||
if already is not None and len(already) > 0:
|
||||
logger.info("\nAlready subscribed to: %s" % (", ".join(already.values()[0]),))
|
||||
logger.info("\nAlready subscribed to: %s" % (", ".join(list(already.values())[0]),))
|
||||
if new is not None and len(new) > 0:
|
||||
logger.info("\nSuccessfully subscribed to: %s" % (", ".join(new.values()[0]),))
|
||||
logger.info("\nSuccessfully subscribed to: %s" % (", ".join(list(new.values())[0]),))
|
||||
if unauthorized is not None and len(unauthorized) > 0:
|
||||
logger.info("\n" + "\n".join(textwrap.wrap("""\
|
||||
The following streams you have NOT been subscribed to,
|
||||
|
|
|
@ -9,6 +9,7 @@ import os
|
|||
import subprocess
|
||||
import optparse
|
||||
import sys
|
||||
import six
|
||||
|
||||
parser = optparse.OptionParser()
|
||||
parser.add_option('--prev-deploy', nargs=1, metavar='DIR',
|
||||
|
@ -92,7 +93,7 @@ MIN_DIR = os.path.join(STATIC_PATH, 'min/')
|
|||
MAP_DIR = os.path.join(STATIC_PATH, 'source-map/')
|
||||
subprocess.check_call(['mkdir', '-p', MIN_DIR, MAP_DIR])
|
||||
|
||||
for js_group, filespec in JS_SPECS.iteritems():
|
||||
for js_group, filespec in six.iteritems(JS_SPECS):
|
||||
# JS_SPECS look like 'js/foobar.js'.
|
||||
# changed_files look like 'static/js/foobar.js'.
|
||||
# So we prepend 'static/' to the JS_SPECS so these match up.
|
||||
|
|
|
@ -1318,7 +1318,7 @@ def notify_subscriptions_removed(user_profile, streams, no_log=False):
|
|||
# we should do the same for removes.
|
||||
notifications_for = get_subscribers_to_streams(streams)
|
||||
|
||||
for event_recipient, notifications in notifications_for.iteritems():
|
||||
for event_recipient, notifications in six.iteritems(notifications_for):
|
||||
# Don't send a peer subscription notice to yourself.
|
||||
if event_recipient == user_profile:
|
||||
continue
|
||||
|
|
|
@ -4,13 +4,14 @@ from django.db.models import Q
|
|||
import zerver.models
|
||||
from zerver.lib.cache import cache_with_key, realm_alert_words_cache_key
|
||||
import ujson
|
||||
import six
|
||||
|
||||
@cache_with_key(realm_alert_words_cache_key, timeout=3600*24)
|
||||
def alert_words_in_realm(realm):
|
||||
users_query = zerver.models.UserProfile.objects.filter(realm=realm, is_active=True)
|
||||
alert_word_data = users_query.filter(~Q(alert_words=ujson.dumps([]))).values('id', 'alert_words')
|
||||
all_user_words = dict((elt['id'], ujson.loads(elt['alert_words'])) for elt in alert_word_data)
|
||||
user_ids_with_words = dict((user_id, w) for (user_id, w) in all_user_words.iteritems() if len(w))
|
||||
user_ids_with_words = dict((user_id, w) for (user_id, w) in six.iteritems(all_user_words) if len(w))
|
||||
return user_ids_with_words
|
||||
|
||||
def user_alert_words(user_profile):
|
||||
|
|
|
@ -835,7 +835,7 @@ class AlertWordsNotificationProcessor(markdown.preprocessors.Preprocessor):
|
|||
allowed_before_punctuation = "|".join([r'\s', '^', r'[\(\".,\';\[\*`>]'])
|
||||
allowed_after_punctuation = "|".join([r'\s', '$', r'[\)\"\?:.,\';\]!\*`]'])
|
||||
|
||||
for user_id, words in realm_words.iteritems():
|
||||
for user_id, words in six.iteritems(realm_words):
|
||||
for word in words:
|
||||
escaped = re.escape(word.lower())
|
||||
match_re = re.compile(r'(?:%s)%s(?:%s)' %
|
||||
|
@ -1021,7 +1021,7 @@ def maybe_update_realm_filters(domain):
|
|||
if domain is None:
|
||||
all_filters = all_realm_filters()
|
||||
all_filters['default'] = []
|
||||
for domain, filters in all_filters.iteritems():
|
||||
for domain, filters in six.iteritems(all_filters):
|
||||
make_realm_filters(domain, filters)
|
||||
# Hack to ensure that getConfig("realm") is right for mirrored Zephyrs
|
||||
make_realm_filters("mit.edu/zephyr_mirror", [])
|
||||
|
|
|
@ -28,6 +28,7 @@ from zerver.lib.narrow import build_narrow_filter
|
|||
from zerver.lib.queue import queue_json_publish
|
||||
from zerver.lib.timestamp import timestamp_to_datetime
|
||||
import copy
|
||||
import six
|
||||
|
||||
# The idle timeout used to be a week, but we found that in that
|
||||
# situation, queues from dead browser sessions would grow quite large
|
||||
|
@ -353,7 +354,7 @@ def gc_event_queues():
|
|||
to_remove = set()
|
||||
affected_users = set()
|
||||
affected_realms = set()
|
||||
for (id, client) in clients.iteritems():
|
||||
for (id, client) in six.iteritems(clients):
|
||||
if client.idle(start):
|
||||
to_remove.add(id)
|
||||
affected_users.add(client.user_profile_id)
|
||||
|
@ -372,7 +373,7 @@ def dump_event_queues():
|
|||
start = time.time()
|
||||
|
||||
with open(settings.JSON_PERSISTENT_QUEUE_FILENAME, "w") as stored_queues:
|
||||
ujson.dump([(qid, client.to_dict()) for (qid, client) in clients.iteritems()],
|
||||
ujson.dump([(qid, client.to_dict()) for (qid, client) in six.iteritems(clients)],
|
||||
stored_queues)
|
||||
|
||||
logging.info('Tornado dumped %d event queues in %.3fs'
|
||||
|
@ -396,7 +397,7 @@ def load_event_queues():
|
|||
except (IOError, EOFError):
|
||||
pass
|
||||
|
||||
for client in clients.itervalues():
|
||||
for client in six.itervalues(clients):
|
||||
# Put code for migrations due to event queue data format changes here
|
||||
|
||||
add_to_client_dicts(client)
|
||||
|
@ -406,7 +407,7 @@ def load_event_queues():
|
|||
|
||||
def send_restart_events():
|
||||
event = dict(type='restart', server_generation=settings.SERVER_GENERATION)
|
||||
for client in clients.itervalues():
|
||||
for client in six.itervalues(clients):
|
||||
if client.accepts_event(event):
|
||||
client.add_event(event.copy())
|
||||
|
||||
|
@ -578,7 +579,7 @@ def receiver_is_idle(user_profile_id, realm_presences):
|
|||
latest_active_timestamp = None
|
||||
idle = False
|
||||
|
||||
for client, status in user_presence.iteritems():
|
||||
for client, status in six.iteritems(user_presence):
|
||||
if (latest_active_timestamp is None or status['timestamp'] > latest_active_timestamp) and \
|
||||
status['status'] == 'active':
|
||||
latest_active_timestamp = status['timestamp']
|
||||
|
@ -642,7 +643,7 @@ def process_message_event(event_template, users):
|
|||
|
||||
extra_user_data[user_profile_id] = notified
|
||||
|
||||
for client_data in send_to_clients.itervalues():
|
||||
for client_data in six.itervalues(send_to_clients):
|
||||
client = client_data['client']
|
||||
flags = client_data['flags']
|
||||
is_sender = client_data.get('is_sender', False)
|
||||
|
|
|
@ -9,6 +9,7 @@ from django.utils.timezone import utc
|
|||
from datetime import timedelta
|
||||
from itertools import chain
|
||||
from six.moves import range
|
||||
import six
|
||||
|
||||
def median(data):
|
||||
data = sorted(data)
|
||||
|
@ -116,7 +117,7 @@ def activity_averages_between(begin, end, by_day=True):
|
|||
|
||||
if by_day:
|
||||
return dict((day, calculate_stats(values, all_users=users_to_measure))
|
||||
for day, values in seconds_active.iteritems())
|
||||
for day, values in six.iteritems(seconds_active))
|
||||
else:
|
||||
return calculate_stats(list(chain.from_iterable(seconds_active.values())),
|
||||
all_users=users_to_measure)
|
||||
|
|
|
@ -5,6 +5,7 @@ from django.core.management.base import BaseCommand
|
|||
from zerver.models import Realm, get_realm
|
||||
from zerver.lib.actions import do_add_realm_emoji, do_remove_realm_emoji
|
||||
import sys
|
||||
import six
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = """Manage emoji for the specified realm
|
||||
|
@ -33,7 +34,7 @@ Example: python2.7 manage.py realm_emoji --realm=zulip.com --op=show
|
|||
def handle(self, *args, **options):
|
||||
realm = get_realm(options["domain"])
|
||||
if options["op"] == "show":
|
||||
for name, url in realm.get_emoji().iteritems():
|
||||
for name, url in six.iteritems(realm.get_emoji()):
|
||||
print(name, url)
|
||||
sys.exit(0)
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import print_function
|
||||
from __future__ import absolute_import
|
||||
from django.conf import settings
|
||||
from django.test import TestCase
|
||||
|
||||
|
@ -18,6 +19,7 @@ from zerver.models import (
|
|||
|
||||
import os
|
||||
import ujson
|
||||
import six
|
||||
|
||||
class FencedBlockPreprocessorTest(TestCase):
|
||||
def test_simple_quoting(self):
|
||||
|
@ -147,7 +149,7 @@ class BugdownTest(TestCase):
|
|||
format_tests, linkify_tests = self.load_bugdown_tests()
|
||||
|
||||
self.maxDiff = None
|
||||
for name, test in format_tests.iteritems():
|
||||
for name, test in six.iteritems(format_tests):
|
||||
converted = bugdown_convert(test['input'])
|
||||
|
||||
print("Running Bugdown test %s" % (name,))
|
||||
|
|
|
@ -27,6 +27,7 @@ import ujson
|
|||
|
||||
from six.moves import urllib
|
||||
from six.moves import range
|
||||
import six
|
||||
|
||||
|
||||
class PublicURLTest(TestCase):
|
||||
|
@ -80,9 +81,9 @@ class PublicURLTest(TestCase):
|
|||
"/api/v1/fetch_api_key",
|
||||
],
|
||||
}
|
||||
for status_code, url_set in get_urls.iteritems():
|
||||
for status_code, url_set in six.iteritems(get_urls):
|
||||
self.fetch("get", url_set, status_code)
|
||||
for status_code, url_set in post_urls.iteritems():
|
||||
for status_code, url_set in six.iteritems(post_urls):
|
||||
self.fetch("post", url_set, status_code)
|
||||
|
||||
def test_get_gcid_when_not_configured(self):
|
||||
|
|
|
@ -1082,7 +1082,7 @@ class SubscriptionAPITest(AuthedTestCase):
|
|||
{"subscriptions": ujson.dumps(subscriptions)})
|
||||
self.assert_json_success(result)
|
||||
json = ujson.loads(result.content)
|
||||
for key, val in json_dict.iteritems():
|
||||
for key, val in six.iteritems(json_dict):
|
||||
self.assertItemsEqual(val, json[key]) # we don't care about the order of the items
|
||||
new_streams = self.get_streams(email)
|
||||
self.assertItemsEqual(new_streams, new_subs)
|
||||
|
|
|
@ -419,9 +419,9 @@ def exclude_muting_conditions(user_profile, narrow):
|
|||
muted_streams = bulk_get_streams(user_profile.realm,
|
||||
[muted[0] for muted in muted_topics])
|
||||
muted_recipients = bulk_get_recipients(Recipient.STREAM,
|
||||
[stream.id for stream in muted_streams.itervalues()])
|
||||
[stream.id for stream in six.itervalues(muted_streams)])
|
||||
recipient_map = dict((s.name.lower(), muted_recipients[s.id].id)
|
||||
for s in muted_streams.itervalues())
|
||||
for s in six.itervalues(muted_streams))
|
||||
|
||||
muted_topics = [m for m in muted_topics if m[0].lower() in recipient_map]
|
||||
|
||||
|
|
|
@ -26,6 +26,7 @@ import ujson
|
|||
from six.moves import urllib
|
||||
|
||||
from zerver.lib.rest import rest_dispatch as _rest_dispatch
|
||||
import six
|
||||
rest_dispatch = csrf_exempt((lambda request, *args, **kwargs: _rest_dispatch(request, globals(), *args, **kwargs)))
|
||||
|
||||
def list_to_streams(streams_raw, user_profile, autocreate=False, invite_only=False):
|
||||
|
@ -296,7 +297,7 @@ def add_subscriptions_backend(request, user_profile,
|
|||
# or if a new stream was created with the "announce" option.
|
||||
notifications = []
|
||||
if principals and result["subscribed"]:
|
||||
for email, subscriptions in result["subscribed"].iteritems():
|
||||
for email, subscriptions in six.iteritems(result["subscribed"]):
|
||||
if email == user_profile.email:
|
||||
# Don't send a Zulip if you invited yourself.
|
||||
continue
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
from __future__ import absolute_import
|
||||
from collections import defaultdict
|
||||
import logging
|
||||
|
||||
|
@ -5,6 +6,7 @@ from django.conf import settings
|
|||
from django.core.mail import mail_admins
|
||||
|
||||
from zerver.lib.actions import internal_send_message
|
||||
import six
|
||||
|
||||
def format_subject(subject):
|
||||
"""
|
||||
|
@ -41,7 +43,7 @@ def email_browser_error(report):
|
|||
more_info = report['more_info']
|
||||
if more_info is not None:
|
||||
body += "\nAdditional information:"
|
||||
for (key, value) in more_info.iteritems():
|
||||
for (key, value) in six.iteritems(more_info):
|
||||
body += "\n %s: %s" % (key, value)
|
||||
|
||||
body += "\n\nLog:\n%s" % (report['log'],)
|
||||
|
|
|
@ -15,6 +15,7 @@ import sys
|
|||
import six.moves.configparser
|
||||
|
||||
from zerver.lib.db import TimeTrackingConnection
|
||||
import six
|
||||
|
||||
########################################################################
|
||||
# INITIAL SETTINGS
|
||||
|
@ -161,7 +162,7 @@ DEFAULT_SETTINGS = {'TWITTER_CONSUMER_KEY': '',
|
|||
'DBX_APNS_CERT_FILE': None,
|
||||
}
|
||||
|
||||
for setting_name, setting_val in DEFAULT_SETTINGS.iteritems():
|
||||
for setting_name, setting_val in six.iteritems(DEFAULT_SETTINGS):
|
||||
if not setting_name in vars():
|
||||
vars()[setting_name] = setting_val
|
||||
|
||||
|
|
Loading…
Reference in New Issue