python3: Fix usage of .keys()/.values() to handle iterators.

This fixes the places where we use the result of .keys(), .items(),
and .values() that wouldn't work with an iterator to wrap them with
list().
This commit is contained in:
Tim Abbott 2016-01-24 16:27:18 -08:00
parent f5de149976
commit 5bacda3662
16 changed files with 28 additions and 30 deletions

View File

@ -2347,7 +2347,7 @@ class P4Sync(Command, P4UserMap):
self.labels[newestChange] = [output, revisions]
if self.verbose:
print("Label changes: %s" % self.labels.keys())
print("Label changes: %s" % (list(self.labels.keys()),))
# Import p4 labels as git tags. A direct mapping does not
# exist, so assume that if all the files are at the same revision
@ -2780,7 +2780,7 @@ class P4Sync(Command, P4UserMap):
if short in branches:
self.p4BranchesInGit = [ short ]
else:
self.p4BranchesInGit = branches.keys()
self.p4BranchesInGit = list(branches.keys())
if len(self.p4BranchesInGit) > 1:
if not self.silent:
@ -3215,7 +3215,7 @@ commands = {
def main():
if len(sys.argv[1:]) == 0:
printUsage(commands.keys())
printUsage(list(commands.keys()))
sys.exit(2)
cmdName = sys.argv[1]
@ -3225,7 +3225,7 @@ def main():
except KeyError:
print("unknown command %s" % cmdName)
print("")
printUsage(commands.keys())
printUsage(list(commands.keys()))
sys.exit(2)
options = cmd.options

View File

@ -64,16 +64,14 @@ def generate_support_stats():
analyze_messages(msgs, word_count, email_count)
if True:
words = word_count.keys()
words = [w for w in words if word_count[w] >= 10]
words = [w for w in words if len(w) >= 5]
words = [w for w in word_count.keys() if word_count[w] >= 10 and len(w) >= 5]
words = sorted(words, key=lambda w: word_count[w], reverse=True)
for word in words:
print(word, word_count[word])
if False:
emails = email_count.keys()
emails = sorted(emails, key=lambda w: email_count[w], reverse=True)
emails = sorted(list(email_count.keys()),
key=lambda w: email_count[w], reverse=True)
for email in emails:
print(email, email_count[email])

View File

@ -1339,7 +1339,7 @@ def bulk_remove_subscriptions(users, streams):
subs_by_user = dict((user_profile.id, []) for user_profile in users)
for sub in Subscription.objects.select_related("user_profile").filter(user_profile__in=users,
recipient__in=recipients_map.values(),
recipient__in=list(recipients_map.values()),
active=True):
subs_by_user[sub.user_profile_id].append(sub)

View File

@ -452,7 +452,7 @@ class InlineInterestingLinkProcessor(markdown.treeprocessors.Treeprocessor):
# Find the image size that is smaller than
# TWITTER_MAX_IMAGE_HEIGHT px tall or the smallest
size_name_tuples = media_item['sizes'].items()
size_name_tuples = list(media_item['sizes'].items())
size_name_tuples.sort(reverse=True,
key=lambda x: x[1]['h'])
for size_name, size in size_name_tuples:

View File

@ -53,10 +53,10 @@ def gather_hot_conversations(user_profile, stream_messages):
user_message.message.sender.full_name)
conversation_length[key] += 1
diversity_list = conversation_diversity.items()
diversity_list = list(conversation_diversity.items())
diversity_list.sort(key=lambda entry: len(entry[1]), reverse=True)
length_list = conversation_length.items()
length_list = list(conversation_length.items())
length_list.sort(key=lambda entry: entry[1], reverse=True)
# Get up to the 4 best conversations from the diversity list

View File

@ -344,7 +344,7 @@ def handle_missedmessage_emails(user_profile_id, missed_email_events):
unique_messages = {m.id: m for m in msg_list}
do_send_missedmessage_events_reply_in_zulip(
user_profile,
unique_messages.values(),
list(unique_messages.values()),
mesage_count_by_recipient_subject[recipient_subject],
)
else:
@ -356,7 +356,7 @@ def handle_missedmessage_emails(user_profile_id, missed_email_events):
unique_messages = {m.id: m for m in all_messages}
do_send_missedmessage_events(
user_profile,
unique_messages.values(),
list(unique_messages.values()),
len(messages),
)

View File

@ -53,7 +53,7 @@ def rest_dispatch(request, globals_list, **kwargs):
if method_to_use == "SOCKET" and "zulip.emulated_method" in request.META:
method_to_use = request.META["zulip.emulated_method"]
if method_to_use in supported_methods.keys():
if method_to_use in supported_methods:
target_function = globals_list[supported_methods[method_to_use]]
# Set request._query for update_activity_user(), which is called
@ -90,6 +90,6 @@ def rest_dispatch(request, globals_list, **kwargs):
return target_function(request, **kwargs)
return json_method_not_allowed(supported_methods.keys())
return json_method_not_allowed(list(supported_methods.keys()))

View File

@ -47,7 +47,7 @@ class AddHeaderMixin(object):
if processed:
ret_dict[old_path] = val
return ret_dict.itervalues()
return list(ret_dict.values())
class ZulipStorage(PipelineMixin, AddHeaderMixin, CachedFilesMixin,

View File

@ -1117,7 +1117,7 @@ class UserPresenceTests(AuthedTestCase):
self.assertEqual(json['presences'][email][client]['status'], 'idle')
self.assertIn('timestamp', json['presences'][email][client])
self.assertIsInstance(json['presences'][email][client]['timestamp'], int)
self.assertEqual(json['presences'].keys(), ['hamlet@zulip.com'])
self.assertEqual(list(json['presences'].keys()), ['hamlet@zulip.com'])
return json['presences'][email][client]['timestamp']
result = self.client.post("/json/update_active_status", {'status': 'idle'})
@ -1134,7 +1134,7 @@ class UserPresenceTests(AuthedTestCase):
json = ujson.loads(result.content)
self.assertEqual(json['presences'][email][client]['status'], 'idle')
self.assertEqual(json['presences']['hamlet@zulip.com'][client]['status'], 'idle')
self.assertEqual(json['presences'].keys(), ['hamlet@zulip.com', 'othello@zulip.com'])
self.assertEqual(list(json['presences'].keys()), ['hamlet@zulip.com', 'othello@zulip.com'])
newer_timestamp = json['presences'][email][client]['timestamp']
self.assertGreaterEqual(newer_timestamp, timestamp)
@ -1265,7 +1265,7 @@ class AlertWordTests(AuthedTestCase):
realm_words = alert_words_in_realm(user2.realm)
self.assertEqual(len(realm_words), 2)
self.assertEqual(realm_words.keys(), [user1.id, user2.id])
self.assertEqual(list(realm_words.keys()), [user1.id, user2.id])
self.assertEqual(realm_words[user1.id],
self.interesting_alert_word_list)
self.assertEqual(realm_words[user2.id], ['another'])

View File

@ -1034,7 +1034,7 @@ def json_upload_file(request, user_profile):
if len(request.FILES) != 1:
return json_error("You may only upload one file at a time")
user_file = request.FILES.values()[0]
user_file = list(request.FILES.values())[0]
if ((settings.MAX_FILE_UPLOAD_SIZE * 1024 * 1024) < user_file._get_size()):
return json_error("File Upload is larger than allowed limit")

View File

@ -186,8 +186,8 @@ class NarrowBuilder(object):
matching_streams = get_active_streams(self.user_profile.realm).filter(
name__iregex=r'^(un)*%s(\.d)*$' % (self._pg_re_escape(base_stream_name),))
matching_stream_ids = [matching_stream.id for matching_stream in matching_streams]
recipients = bulk_get_recipients(Recipient.STREAM, matching_stream_ids).values()
cond = column("recipient_id").in_([recipient.id for recipient in recipients])
recipients_map = bulk_get_recipients(Recipient.STREAM, matching_stream_ids)
cond = column("recipient_id").in_([recipient.id for recipient in recipients_map.values()])
return query.where(maybe_negate(cond))
recipient = get_recipient(Recipient.STREAM, type_id=stream.id)

View File

@ -215,7 +215,7 @@ def filter_stream_authorization(user_profile, streams):
streams_subscribed = set()
recipients_map = bulk_get_recipients(Recipient.STREAM, [stream.id for stream in streams])
subs = Subscription.objects.filter(user_profile=user_profile,
recipient__in=recipients_map.values(),
recipient__in=list(recipients_map.values()),
active=True)
for sub in subs:

View File

@ -167,7 +167,7 @@ def json_set_avatar(request, user_profile):
if len(request.FILES) != 1:
return json_error("You must upload exactly one avatar.")
user_file = request.FILES.values()[0]
user_file = list(request.FILES.values())[0]
upload_avatar_image(user_file, user_profile, user_profile.email)
do_change_avatar_source(user_profile, UserProfile.AVATAR_FROM_USER)
user_avatar_url = avatar_url(user_profile)

View File

@ -129,7 +129,7 @@ def patch_bot_backend(request, user_profile, email,
if len(request.FILES) == 0:
pass
elif len(request.FILES) == 1:
user_file = request.FILES.values()[0]
user_file = list(request.FILES.values())[0]
upload_avatar_image(user_file, user_profile, bot.email)
avatar_source = UserProfile.AVATAR_FROM_USER
do_change_avatar_source(bot, avatar_source)
@ -184,7 +184,7 @@ def add_bot_backend(request, user_profile, full_name=REQ, short_name=REQ,
elif len(request.FILES) != 1:
return json_error("You may only upload one file at a time")
else:
user_file = request.FILES.values()[0]
user_file = list(request.FILES.values())[0]
upload_avatar_image(user_file, user_profile, email)
avatar_source = UserProfile.AVATAR_FROM_USER

View File

@ -50,7 +50,7 @@ def get_worker(queue_name):
return worker_classes[queue_name]()
def get_active_worker_queues():
return worker_classes.iterkeys()
return list(worker_classes.keys())
class QueueProcessingWorker(object):
def __init__(self):

View File

@ -394,7 +394,7 @@ def restore_saved_messages():
clients[client.name] = client
print(datetime.datetime.now(), "Creating streams...")
bulk_create_streams(realms, stream_dict.values())
bulk_create_streams(realms, list(stream_dict.values()))
streams = {}
for stream in Stream.objects.all():