Apply Python 3 futurize transform libfuturize.fixes.fix_print_with_import.

This commit is contained in:
Tim Abbott 2015-11-01 08:11:06 -08:00
parent f97649b35c
commit f3783fb4a1
69 changed files with 451 additions and 382 deletions

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
from django.core.management.base import BaseCommand

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
import datetime
import pytz
@ -19,6 +20,6 @@ class Command(BaseCommand):
date = datetime.datetime.now() - datetime.timedelta(days=1)
else:
date = datetime.datetime.strptime(options["date"], "%Y-%m-%d")
print "Activity data for", date
print activity_averages_during_day(date)
print "Please note that the total registered user count is a total for today"
print("Activity data for", date)
print(activity_averages_during_day(date))
print("Please note that the total registered user count is a total for today")

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
from optparse import make_option
from django.core.management.base import BaseCommand
@ -63,7 +64,7 @@ def compute_stats(log_level):
logging.info("Top %6s | %s%%" % (size, round(top_percents[size], 1)))
grand_total = sum(total_counts.values())
print grand_total
print(grand_total)
logging.info("%15s | %s" % ("Client", "Percentage"))
for client in total_counts.keys():
logging.info("%15s | %s%%" % (client, round(100. * total_counts[client] / grand_total, 1)))

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
from zerver.lib.statistics import seconds_usage_between
@ -16,7 +17,7 @@ def analyze_activity(options):
if options["realm"]:
user_profile_query = user_profile_query.filter(realm__domain=options["realm"])
print "Per-user online duration:\n"
print("Per-user online duration:\n")
total_duration = datetime.timedelta(0)
for user_profile in user_profile_query:
duration = seconds_usage_between(user_profile, day_start, day_end)
@ -25,11 +26,11 @@ def analyze_activity(options):
continue
total_duration += duration
print "%-*s%s" % (37, user_profile.email, duration, )
print("%-*s%s" % (37, user_profile.email, duration, ))
print "\nTotal Duration: %s" % (total_duration,)
print "\nTotal Duration in minutes: %s" % (total_duration.total_seconds() / 60.,)
print "Total Duration amortized to a month: %s" % (total_duration.total_seconds() * 30. / 60.,)
print("\nTotal Duration: %s" % (total_duration,))
print("\nTotal Duration in minutes: %s" % (total_duration.total_seconds() / 60.,))
print("Total Duration amortized to a month: %s" % (total_duration.total_seconds() * 30. / 60.,))
class Command(BaseCommand):
help = """Report analytics of user activity on a per-user and realm basis.

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
from django.core.management.base import BaseCommand
from django.db.models import Count
@ -48,8 +49,8 @@ python2.7 manage.py client_activity jesstess@zulip.com"""
counts.sort()
for count in counts:
print "%25s %15d" % (count[1], count[0])
print "Total:", total
print("%25s %15d" % (count[1], count[0]))
print("Total:", total)
def handle(self, *args, **options):
@ -70,5 +71,5 @@ python2.7 manage.py client_activity jesstess@zulip.com"""
self.compute_activity(UserActivity.objects.filter(
user_profile__realm=realm))
except Realm.DoesNotExist:
print "Unknown user or domain %s" % (arg,)
print("Unknown user or domain %s" % (arg,))
exit(1)

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
import datetime
import pytz
@ -65,45 +66,45 @@ class Command(BaseCommand):
fraction = 0.0
else:
fraction = numerator / float(denominator)
print "%.2f%% of" % (fraction * 100,), text
print("%.2f%% of" % (fraction * 100,), text)
def handle(self, *args, **options):
if options['realms']:
try:
realms = [get_realm(domain) for domain in options['realms']]
except Realm.DoesNotExist as e:
print e
print(e)
exit(1)
else:
realms = Realm.objects.all()
for realm in realms:
print realm.domain
print(realm.domain)
user_profiles = UserProfile.objects.filter(realm=realm, is_active=True)
active_users = self.active_users(realm)
num_active = len(active_users)
print "%d active users (%d total)" % (num_active, len(user_profiles))
print("%d active users (%d total)" % (num_active, len(user_profiles)))
streams = Stream.objects.filter(realm=realm).extra(
tables=['zerver_subscription', 'zerver_recipient'],
where=['zerver_subscription.recipient_id = zerver_recipient.id',
'zerver_recipient.type = 2',
'zerver_recipient.type_id = zerver_stream.id',
'zerver_subscription.active = true']).annotate(count=Count("name"))
print "%d streams" % (streams.count(),)
print("%d streams" % (streams.count(),))
for days_ago in (1, 7, 30):
print "In last %d days, users sent:" % (days_ago,)
print("In last %d days, users sent:" % (days_ago,))
sender_quantities = [self.messages_sent_by(user, days_ago) for user in user_profiles]
for quantity in sorted(sender_quantities, reverse=True):
print quantity,
print ""
print(quantity, end=' ')
print("")
print "%d stream messages" % (self.stream_messages(realm, days_ago),)
print "%d one-on-one private messages" % (self.private_messages(realm, days_ago),)
print "%d messages sent via the API" % (self.api_messages(realm, days_ago),)
print "%d group private messages" % (self.group_private_messages(realm, days_ago),)
print("%d stream messages" % (self.stream_messages(realm, days_ago),))
print("%d one-on-one private messages" % (self.private_messages(realm, days_ago),))
print("%d messages sent via the API" % (self.api_messages(realm, days_ago),))
print("%d group private messages" % (self.group_private_messages(realm, days_ago),))
num_notifications_enabled = len(filter(lambda x: x.enable_desktop_notifications == True,
active_users))
@ -124,8 +125,8 @@ class Command(BaseCommand):
starrers = UserMessage.objects.filter(user_profile__in=user_profiles,
flags=UserMessage.flags.starred).values(
"user_profile").annotate(count=Count("user_profile"))
print "%d users have starred %d messages" % (
len(starrers), sum([elt["count"] for elt in starrers]))
print("%d users have starred %d messages" % (
len(starrers), sum([elt["count"] for elt in starrers])))
active_user_subs = Subscription.objects.filter(
user_profile__in=user_profiles, active=True)
@ -133,20 +134,20 @@ class Command(BaseCommand):
# Streams not in home view
non_home_view = active_user_subs.filter(in_home_view=False).values(
"user_profile").annotate(count=Count("user_profile"))
print "%d users have %d streams not in home view" % (
len(non_home_view), sum([elt["count"] for elt in non_home_view]))
print("%d users have %d streams not in home view" % (
len(non_home_view), sum([elt["count"] for elt in non_home_view])))
# Code block markup
markup_messages = human_messages.filter(
sender__realm=realm, content__contains="~~~").values(
"sender").annotate(count=Count("sender"))
print "%d users have used code block markup on %s messages" % (
len(markup_messages), sum([elt["count"] for elt in markup_messages]))
print("%d users have used code block markup on %s messages" % (
len(markup_messages), sum([elt["count"] for elt in markup_messages])))
# Notifications for stream messages
notifications = active_user_subs.filter(notifications=True).values(
"user_profile").annotate(count=Count("user_profile"))
print "%d users receive desktop notifications for %d streams" % (
len(notifications), sum([elt["count"] for elt in notifications]))
print("%d users receive desktop notifications for %d streams" % (
len(notifications), sum([elt["count"] for elt in notifications])))
print ""
print("")

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
from django.core.management.base import BaseCommand
from django.db.models import Q
@ -16,25 +17,25 @@ class Command(BaseCommand):
try:
realms = [get_realm(domain) for domain in options['realms']]
except Realm.DoesNotExist as e:
print e
print(e)
exit(1)
else:
realms = Realm.objects.all()
for realm in realms:
print realm.domain
print "------------"
print "%25s %15s %10s" % ("stream", "subscribers", "messages")
print(realm.domain)
print("------------")
print("%25s %15s %10s" % ("stream", "subscribers", "messages"))
streams = Stream.objects.filter(realm=realm).exclude(Q(name__istartswith="tutorial-"))
invite_only_count = 0
for stream in streams:
if stream.invite_only:
invite_only_count += 1
continue
print "%25s" % (stream.name,),
print("%25s" % (stream.name,), end=' ')
recipient = Recipient.objects.filter(type=Recipient.STREAM, type_id=stream.id)
print "%10d" % (len(Subscription.objects.filter(recipient=recipient, active=True)),),
print("%10d" % (len(Subscription.objects.filter(recipient=recipient, active=True)),), end=' ')
num_messages = len(Message.objects.filter(recipient=recipient))
print "%12d" % (num_messages,)
print "%d invite-only streams" % (invite_only_count,)
print ""
print("%12d" % (num_messages,))
print("%d invite-only streams" % (invite_only_count,))
print("")

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
import datetime
import pytz
@ -23,19 +24,19 @@ class Command(BaseCommand):
try:
realms = [get_realm(domain) for domain in options['realms']]
except Realm.DoesNotExist as e:
print e
print(e)
exit(1)
else:
realms = Realm.objects.all()
for realm in realms:
print realm.domain
print(realm.domain)
user_profiles = UserProfile.objects.filter(realm=realm, is_active=True)
print "%d users" % (len(user_profiles),)
print "%d streams" % (len(Stream.objects.filter(realm=realm)),)
print("%d users" % (len(user_profiles),))
print("%d streams" % (len(Stream.objects.filter(realm=realm)),))
for user_profile in user_profiles:
print "%35s" % (user_profile.email,),
print("%35s" % (user_profile.email,), end=' ')
for week in range(10):
print "%5d" % (self.messages_sent_by(user_profile, week)),
print ""
print("%5d" % (self.messages_sent_by(user_profile, week)), end=' ')
print("")

View File

@ -7,6 +7,7 @@
# 2007 Trolltech ASA
# License: MIT <http://www.opensource.org/licenses/mit-license.php>
#
from __future__ import print_function
import sys
if sys.hexversion < 0x02040000:
# The limiter is the subprocess module
@ -693,7 +694,7 @@ def createOrUpdateBranchesFromOrigin(localRefPrefix = "refs/remotes/p4/", silent
update = False
if not gitBranchExists(remoteHead):
if verbose:
print "creating %s" % remoteHead
print("creating %s" % remoteHead)
update = True
else:
settings = extractSettingsGitLog(extractLogMessageFromGitCommit(remoteHead))
@ -895,9 +896,9 @@ class P4Debug(Command):
def run(self, args):
j = 0
for output in p4CmdList(args):
print 'Element: %d' % j
print('Element: %d' % j)
j += 1
print output
print(output)
return True
class P4RollBack(Command):
@ -938,14 +939,14 @@ class P4RollBack(Command):
if len(p4Cmd("changes -m 1 " + ' '.join (['%s...@%s' % (p, maxChange)
for p in depotPaths]))) == 0:
print "Branch %s did not exist at change %s, deleting." % (ref, maxChange)
print("Branch %s did not exist at change %s, deleting." % (ref, maxChange))
system("git update-ref -d %s `git rev-parse %s`" % (ref, ref))
continue
while change and int(change) > maxChange:
changed = True
if self.verbose:
print "%s is at %s ; rewinding towards %s" % (ref, change, maxChange)
print("%s is at %s ; rewinding towards %s" % (ref, change, maxChange))
system("git update-ref %s \"%s^\"" % (ref, ref))
log = extractLogMessageFromGitCommit(ref)
settings = extractSettingsGitLog(log)
@ -955,7 +956,7 @@ class P4RollBack(Command):
change = settings['change']
if changed:
print "%s rewound to %s" % (ref, change)
print("%s rewound to %s" % (ref, change))
return True
@ -1069,10 +1070,10 @@ class P4Submit(Command, P4UserMap):
except:
# cleanup our temporary file
os.unlink(outFileName)
print "Failed to strip RCS keywords in %s" % file
print("Failed to strip RCS keywords in %s" % file)
raise
print "Patched up RCS keywords in %s" % file
print("Patched up RCS keywords in %s" % file)
def p4UserForCommit(self, id):
# Return the tuple (perforce user,git email) for a given git commit id
@ -1092,7 +1093,7 @@ class P4Submit(Command, P4UserMap):
if not user:
msg = "Cannot find p4 user for email %s in commit %s." % (email, id)
if gitConfigBool("git-p4.allowMissingP4Users"):
print "%s" % msg
print("%s" % msg)
else:
die("Error: %s\nSet git-p4.allowMissingP4Users to true to allow this." % msg)
@ -1219,8 +1220,8 @@ class P4Submit(Command, P4UserMap):
def applyCommit(self, id):
"""Apply one commit, return True if it succeeded."""
print "Applying", read_pipe(["git", "show", "-s",
"--format=format:%h %s", id])
print("Applying", read_pipe(["git", "show", "-s",
"--format=format:%h %s", id]))
(p4User, gitEmail) = self.p4UserForCommit(id)
@ -1298,7 +1299,7 @@ class P4Submit(Command, P4UserMap):
if os.system(tryPatchCmd) != 0:
fixed_rcs_keywords = False
patch_succeeded = False
print "Unfortunately applying the change failed!"
print("Unfortunately applying the change failed!")
# Patch failed, maybe it's just RCS keyword woes. Look through
# the patch to see if that's possible.
@ -1316,13 +1317,13 @@ class P4Submit(Command, P4UserMap):
for line in read_pipe_lines(["git", "diff", "%s^..%s" % (id, id), file]):
if regexp.search(line):
if verbose:
print "got keyword match on %s in %s in %s" % (pattern, line, file)
print("got keyword match on %s in %s in %s" % (pattern, line, file))
kwfiles[file] = pattern
break
for file in kwfiles:
if verbose:
print "zapping %s with %s" % (line, pattern)
print("zapping %s with %s" % (line, pattern))
# File is being deleted, so not open in p4. Must
# disable the read-only bit on windows.
if self.isWindows and file not in editedFiles:
@ -1331,7 +1332,7 @@ class P4Submit(Command, P4UserMap):
fixed_rcs_keywords = True
if fixed_rcs_keywords:
print "Retrying the patch with RCS keywords cleaned up"
print("Retrying the patch with RCS keywords cleaned up")
if os.system(tryPatchCmd) == 0:
patch_succeeded = True
@ -1411,34 +1412,34 @@ class P4Submit(Command, P4UserMap):
# Leave the p4 tree prepared, and the submit template around
# and let the user decide what to do next
#
print
print "P4 workspace prepared for submission."
print "To submit or revert, go to client workspace"
print " " + self.clientPath
print
print "To submit, use \"p4 submit\" to write a new description,"
print "or \"p4 submit -i %s\" to use the one prepared by" \
" \"git p4\"." % fileName
print "You can delete the file \"%s\" when finished." % fileName
print()
print("P4 workspace prepared for submission.")
print("To submit or revert, go to client workspace")
print(" " + self.clientPath)
print()
print("To submit, use \"p4 submit\" to write a new description,")
print("or \"p4 submit -i %s\" to use the one prepared by" \
" \"git p4\"." % fileName)
print("You can delete the file \"%s\" when finished." % fileName)
if self.preserveUser and p4User and not self.p4UserIsMe(p4User):
print "To preserve change ownership by user %s, you must\n" \
print("To preserve change ownership by user %s, you must\n" \
"do \"p4 change -f <change>\" after submitting and\n" \
"edit the User field."
"edit the User field.")
if pureRenameCopy:
print "After submitting, renamed files must be re-synced."
print "Invoke \"p4 sync -f\" on each of these files:"
print("After submitting, renamed files must be re-synced.")
print("Invoke \"p4 sync -f\" on each of these files:")
for f in pureRenameCopy:
print " " + f
print(" " + f)
print
print "To revert the changes, use \"p4 revert ...\", and delete"
print "the submit template file \"%s\"" % fileName
print()
print("To revert the changes, use \"p4 revert ...\", and delete")
print("the submit template file \"%s\"" % fileName)
if filesToAdd:
print "Since the commit adds new files, they must be deleted:"
print("Since the commit adds new files, they must be deleted:")
for f in filesToAdd:
print " " + f
print
print(" " + f)
print()
return True
#
@ -1471,7 +1472,7 @@ class P4Submit(Command, P4UserMap):
else:
# skip this patch
ret = False
print "Submission cancelled, undoing p4 changes."
print("Submission cancelled, undoing p4 changes.")
for f in editedFiles:
p4_revert(f)
for f in filesToAdd:
@ -1495,7 +1496,7 @@ class P4Submit(Command, P4UserMap):
if not m.match(name):
if verbose:
print "tag %s does not match regexp %s" % (name, validLabelRegexp)
print("tag %s does not match regexp %s" % (name, validLabelRegexp))
continue
# Get the p4 commit this corresponds to
@ -1505,7 +1506,7 @@ class P4Submit(Command, P4UserMap):
if 'change' not in values:
# a tag pointing to something not sent to p4; ignore
if verbose:
print "git tag %s does not give a p4 commit" % name
print("git tag %s does not give a p4 commit" % name)
continue
else:
changelist = values['change']
@ -1540,10 +1541,10 @@ class P4Submit(Command, P4UserMap):
labelTemplate += "\t%s\n" % depot_side
if self.dry_run:
print "Would create p4 label %s for tag" % name
print("Would create p4 label %s for tag" % name)
elif self.prepare_p4_only:
print "Not creating p4 label %s for tag due to option" \
" --prepare-p4-only" % name
print("Not creating p4 label %s for tag due to option" \
" --prepare-p4-only" % name)
else:
p4_write_pipe(["label", "-i"], labelTemplate)
@ -1552,7 +1553,7 @@ class P4Submit(Command, P4UserMap):
["%s@%s" % (depot_side, changelist) for depot_side in clientSpec.mappings])
if verbose:
print "created p4 label for tag %s" % name
print("created p4 label for tag %s" % name)
def run(self, args):
if len(args) == 0:
@ -1590,10 +1591,10 @@ class P4Submit(Command, P4UserMap):
self.conflict_behavior = val
if self.verbose:
print "Origin branch is " + self.origin
print("Origin branch is " + self.origin)
if len(self.depotPath) == 0:
print "Internal error: cannot locate perforce depot path from existing branches"
print("Internal error: cannot locate perforce depot path from existing branches")
sys.exit(128)
self.useClientSpec = False
@ -1611,7 +1612,7 @@ class P4Submit(Command, P4UserMap):
if self.clientPath == "":
die("Error: Cannot locate perforce checkout of %s in client view" % self.depotPath)
print "Perforce checkout for depot path %s located at %s" % (self.depotPath, self.clientPath)
print("Perforce checkout for depot path %s located at %s" % (self.depotPath, self.clientPath))
self.oldWorkingDirectory = os.getcwd()
# ensure the clientPath exists
@ -1622,9 +1623,9 @@ class P4Submit(Command, P4UserMap):
chdir(self.clientPath, is_client_path=True)
if self.dry_run:
print "Would synchronize p4 checkout in %s" % self.clientPath
print("Would synchronize p4 checkout in %s" % self.clientPath)
else:
print "Synchronizing p4 checkout..."
print("Synchronizing p4 checkout...")
if new_client_dir:
# old one was destroyed, and maybe nobody told p4
p4_sync("...", "-f")
@ -1681,13 +1682,13 @@ class P4Submit(Command, P4UserMap):
# continue to try the rest of the patches, or quit.
#
if self.dry_run:
print "Would apply"
print("Would apply")
applied = []
last = len(commits) - 1
for i, commit in enumerate(commits):
if self.dry_run:
print " ", read_pipe(["git", "show", "-s",
"--format=format:%h %s", commit])
print(" ", read_pipe(["git", "show", "-s",
"--format=format:%h %s", commit]))
ok = True
else:
ok = self.applyCommit(commit)
@ -1695,15 +1696,15 @@ class P4Submit(Command, P4UserMap):
applied.append(commit)
else:
if self.prepare_p4_only and i < last:
print "Processing only the first commit due to option" \
" --prepare-p4-only"
print("Processing only the first commit due to option" \
" --prepare-p4-only")
break
if i < last:
quit = False
while True:
# prompt for what to do, or use the option/variable
if self.conflict_behavior == "ask":
print "What do you want to do?"
print("What do you want to do?")
response = raw_input("[s]kip this commit but apply"
" the rest, or [q]uit? ")
if not response:
@ -1717,10 +1718,10 @@ class P4Submit(Command, P4UserMap):
self.conflict_behavior)
if response[0] == "s":
print "Skipping this commit, but applying the rest"
print("Skipping this commit, but applying the rest")
break
if response[0] == "q":
print "Quitting"
print("Quitting")
quit = True
break
if quit:
@ -1733,7 +1734,7 @@ class P4Submit(Command, P4UserMap):
elif self.prepare_p4_only:
pass
elif len(commits) == len(applied):
print "All commits applied!"
print("All commits applied!")
sync = P4Sync()
if self.branch:
@ -1745,17 +1746,17 @@ class P4Submit(Command, P4UserMap):
else:
if len(applied) == 0:
print "No commits applied."
print("No commits applied.")
else:
print "Applied only the commits marked with '*':"
print("Applied only the commits marked with '*':")
for c in commits:
if c in applied:
star = "*"
else:
star = " "
print star, read_pipe(["git", "show", "-s",
"--format=format:%h %s", c])
print "You will have to do 'git p4 sync' and rebase."
print(star, read_pipe(["git", "show", "-s",
"--format=format:%h %s", c]))
print("You will have to do 'git p4 sync' and rebase.")
if gitConfigBool("git-p4.exportLabels"):
self.exportLabels = True
@ -1931,7 +1932,7 @@ class P4Sync(Command, P4UserMap):
self.gitStream.write("progress checkpoint\n\n")
out = self.gitOutput.readline()
if self.verbose:
print "checkpoint finished: " + out
print("checkpoint finished: " + out)
def extractFilesFromCommit(self, commit):
self.cloneExclude = [re.sub(r"\.\.\.$", "", path)
@ -2084,7 +2085,7 @@ class P4Sync(Command, P4UserMap):
# Ideally, someday, this script can learn how to generate
# appledouble files directly and import those to git, but
# non-mac machines can never find a use for apple filetype.
print "\nIgnoring apple filetype file %s" % file['depotFile']
print("\nIgnoring apple filetype file %s" % file['depotFile'])
return
# Note that we do not try to de-mangle keywords on utf16 files,
@ -2208,7 +2209,7 @@ class P4Sync(Command, P4UserMap):
# Stream a p4 tag
def streamTag(self, gitStream, labelName, labelDetails, commit, epoch):
if verbose:
print "writing tag %s for commit %s" % (labelName, commit)
print("writing tag %s for commit %s" % (labelName, commit))
gitStream.write("tag %s\n" % labelName)
gitStream.write("from %s\n" % commit)
@ -2227,7 +2228,7 @@ class P4Sync(Command, P4UserMap):
gitStream.write("tagger %s\n" % tagger)
print "labelDetails=", labelDetails
print("labelDetails=", labelDetails)
if 'Description' in labelDetails:
description = labelDetails['Description']
else:
@ -2242,7 +2243,7 @@ class P4Sync(Command, P4UserMap):
author = details["user"]
if self.verbose:
print "commit into %s" % branch
print("commit into %s" % branch)
# start with reading files; if that fails, we should not
# create a commit.
@ -2276,7 +2277,7 @@ class P4Sync(Command, P4UserMap):
if len(parent) > 0:
if self.verbose:
print "parent %s" % parent
print("parent %s" % parent)
self.gitStream.write("from %s\n" % parent)
self.streamP4Files(new_files)
@ -2289,7 +2290,7 @@ class P4Sync(Command, P4UserMap):
labelDetails = label[0]
labelRevisions = label[1]
if self.verbose:
print "Change %s is labelled %s" % (change, labelDetails)
print("Change %s is labelled %s" % (change, labelDetails))
files = p4CmdList(["files"] + ["%s...@%s" % (p, change)
for p in self.branchPrefixes])
@ -2321,14 +2322,14 @@ class P4Sync(Command, P4UserMap):
l = p4CmdList(["labels"] + ["%s..." % p for p in self.depotPaths])
if len(l) > 0 and not self.silent:
print "Finding files belonging to labels in %s" % repr(self.depotPaths)
print("Finding files belonging to labels in %s" % repr(self.depotPaths))
for output in l:
label = output["label"]
revisions = {}
newestChange = 0
if self.verbose:
print "Querying files for label %s" % label
print("Querying files for label %s" % label)
for file in p4CmdList(["files"] +
["%s...@%s" % (p, label)
for p in self.depotPaths]):
@ -2340,7 +2341,7 @@ class P4Sync(Command, P4UserMap):
self.labels[newestChange] = [output, revisions]
if self.verbose:
print "Label changes: %s" % self.labels.keys()
print("Label changes: %s" % self.labels.keys())
# Import p4 labels as git tags. A direct mapping does not
# exist, so assume that if all the files are at the same revision
@ -2348,7 +2349,7 @@ class P4Sync(Command, P4UserMap):
# just ignore.
def importP4Labels(self, stream, p4Labels):
if verbose:
print "import p4 labels: " + ' '.join(p4Labels)
print("import p4 labels: " + ' '.join(p4Labels))
ignoredP4Labels = gitConfigList("git-p4.ignoredP4Labels")
validLabelRegexp = gitConfig("git-p4.labelImportRegexp")
@ -2361,7 +2362,7 @@ class P4Sync(Command, P4UserMap):
if not m.match(name):
if verbose:
print "label %s does not match regexp %s" % (name, validLabelRegexp)
print("label %s does not match regexp %s" % (name, validLabelRegexp))
continue
if name in ignoredP4Labels:
@ -2379,7 +2380,7 @@ class P4Sync(Command, P4UserMap):
gitCommit = read_pipe(["git", "rev-list", "--max-count=1",
"--reverse", ":/\[git-p4:.*change = %d\]" % changelist])
if len(gitCommit) == 0:
print "could not find git commit for changelist %d" % changelist
print("could not find git commit for changelist %d" % changelist)
else:
gitCommit = gitCommit.strip()
commitFound = True
@ -2387,16 +2388,16 @@ class P4Sync(Command, P4UserMap):
try:
tmwhen = time.strptime(labelDetails['Update'], "%Y/%m/%d %H:%M:%S")
except ValueError:
print "Could not convert label time %s" % labelDetails['Update']
print("Could not convert label time %s" % labelDetails['Update'])
tmwhen = 1
when = int(time.mktime(tmwhen))
self.streamTag(stream, name, labelDetails, gitCommit, when)
if verbose:
print "p4 label %s mapped to git commit %s" % (name, gitCommit)
print("p4 label %s mapped to git commit %s" % (name, gitCommit))
else:
if verbose:
print "Label %s has no changelists - possibly deleted?" % name
print("Label %s has no changelists - possibly deleted?" % name)
if not commitFound:
# We can't import this label; don't try again as it will get very
@ -2441,8 +2442,8 @@ class P4Sync(Command, P4UserMap):
if destination in self.knownBranches:
if not self.silent:
print "p4 branch %s defines a mapping from %s to %s" % (info["branch"], source, destination)
print "but there exists another mapping from %s to %s already!" % (self.knownBranches[destination], destination)
print("p4 branch %s defines a mapping from %s to %s" % (info["branch"], source, destination))
print("but there exists another mapping from %s to %s already!" % (self.knownBranches[destination], destination))
continue
self.knownBranches[destination] = source
@ -2506,28 +2507,28 @@ class P4Sync(Command, P4UserMap):
def gitCommitByP4Change(self, ref, change):
if self.verbose:
print "looking in ref " + ref + " for change %s using bisect..." % change
print("looking in ref " + ref + " for change %s using bisect..." % change)
earliestCommit = ""
latestCommit = parseRevision(ref)
while True:
if self.verbose:
print "trying: earliest %s latest %s" % (earliestCommit, latestCommit)
print("trying: earliest %s latest %s" % (earliestCommit, latestCommit))
next = read_pipe("git rev-list --bisect %s %s" % (latestCommit, earliestCommit)).strip()
if len(next) == 0:
if self.verbose:
print "argh"
print("argh")
return ""
log = extractLogMessageFromGitCommit(next)
settings = extractSettingsGitLog(log)
currentChange = int(settings['change'])
if self.verbose:
print "current change %s" % currentChange
print("current change %s" % currentChange)
if currentChange == change:
if self.verbose:
print "found %s" % next
print("found %s" % next)
return next
if currentChange < change:
@ -2573,7 +2574,7 @@ class P4Sync(Command, P4UserMap):
if len(read_pipe(["git", "diff-tree", blob, target])) == 0:
parentFound = True
if self.verbose:
print "Found parent of %s in commit %s" % (branch, blob)
print("Found parent of %s in commit %s" % (branch, blob))
break
if parentFound:
return blob
@ -2604,7 +2605,7 @@ class P4Sync(Command, P4UserMap):
filesForCommit = branches[branch]
if self.verbose:
print "branch is %s" % branch
print("branch is %s" % branch)
self.updatedBranches.add(branch)
@ -2625,13 +2626,13 @@ class P4Sync(Command, P4UserMap):
print("\n Resuming with change %s" % change);
if self.verbose:
print "parent determined through known branches: %s" % parent
print("parent determined through known branches: %s" % parent)
branch = self.gitRefForBranch(branch)
parent = self.gitRefForBranch(parent)
if self.verbose:
print "looking for initial parent for %s; current parent is %s" % (branch, parent)
print("looking for initial parent for %s; current parent is %s" % (branch, parent))
if len(parent) == 0 and branch in self.initialParents:
parent = self.initialParents[branch]
@ -2641,7 +2642,7 @@ class P4Sync(Command, P4UserMap):
if len(parent) > 0:
tempBranch = "%s/%d" % (self.tempBranchLocation, change)
if self.verbose:
print "Creating temporary branch: " + tempBranch
print("Creating temporary branch: " + tempBranch)
self.commit(description, filesForCommit, tempBranch)
self.tempBranches.append(tempBranch)
self.checkpoint()
@ -2650,7 +2651,7 @@ class P4Sync(Command, P4UserMap):
self.commit(description, filesForCommit, branch, blob)
else:
if self.verbose:
print "Parent of %s not found. Committing into head of %s" % (branch, parent)
print("Parent of %s not found. Committing into head of %s" % (branch, parent))
self.commit(description, filesForCommit, branch, parent)
else:
files = self.extractFilesFromCommit(description)
@ -2659,11 +2660,11 @@ class P4Sync(Command, P4UserMap):
# only needed once, to connect to the previous commit
self.initialParent = ""
except IOError:
print self.gitError.read()
print(self.gitError.read())
sys.exit(1)
def importHeadRevision(self, revision):
print "Doing initial import of %s from revision %s into %s" % (' '.join(self.depotPaths), revision, self.branch)
print("Doing initial import of %s from revision %s into %s" % (' '.join(self.depotPaths), revision, self.branch))
details = {}
details["user"] = "git perforce import user"
@ -2715,8 +2716,8 @@ class P4Sync(Command, P4UserMap):
try:
self.commit(details, self.extractFilesFromCommit(details), self.branch)
except IOError:
print "IO error with git fast-import. Is your git version recent enough?"
print self.gitError.read()
print("IO error with git fast-import. Is your git version recent enough?")
print(self.gitError.read())
def run(self, args):
@ -2738,7 +2739,7 @@ class P4Sync(Command, P4UserMap):
self.hasOrigin = originP4BranchesExist()
if self.hasOrigin:
if not self.silent:
print 'Syncing with origin first, using "git fetch origin"'
print('Syncing with origin first, using "git fetch origin"')
system("git fetch origin")
branch_arg_given = bool(self.branch)
@ -2777,14 +2778,14 @@ class P4Sync(Command, P4UserMap):
if len(self.p4BranchesInGit) > 1:
if not self.silent:
print "Importing from/into multiple branches"
print("Importing from/into multiple branches")
self.detectBranches = True
for branch in branches.keys():
self.initialParents[self.refPrefix + branch] = \
branches[branch]
if self.verbose:
print "branches: %s" % self.p4BranchesInGit
print("branches: %s" % self.p4BranchesInGit)
p4Change = 0
for branch in self.p4BranchesInGit:
@ -2819,7 +2820,7 @@ class P4Sync(Command, P4UserMap):
self.depotPaths = sorted(self.previousDepotPaths)
self.changeRange = "@%s,#head" % p4Change
if not self.silent and not self.detectBranches:
print "Performing incremental import into %s git branch" % self.branch
print("Performing incremental import into %s git branch" % self.branch)
# accept multiple ref name abbreviations:
# refs/foo/bar/branch -> use it exactly
@ -2836,7 +2837,7 @@ class P4Sync(Command, P4UserMap):
if len(args) == 0 and self.depotPaths:
if not self.silent:
print "Depot paths: %s" % ' '.join(self.depotPaths)
print("Depot paths: %s" % ' '.join(self.depotPaths))
else:
if self.depotPaths and self.depotPaths != args:
print ("previous import used depot path %s and now %s was specified. "
@ -2906,8 +2907,8 @@ class P4Sync(Command, P4UserMap):
else:
self.getBranchMapping()
if self.verbose:
print "p4-git branches: %s" % self.p4BranchesInGit
print "initial parents: %s" % self.initialParents
print("p4-git branches: %s" % self.p4BranchesInGit)
print("initial parents: %s" % self.initialParents)
for b in self.p4BranchesInGit:
if b != "master":
@ -2959,8 +2960,8 @@ class P4Sync(Command, P4UserMap):
self.branch)
if self.verbose:
print "Getting p4 changes for %s...%s" % (', '.join(self.depotPaths),
self.changeRange)
print("Getting p4 changes for %s...%s" % (', '.join(self.depotPaths),
self.changeRange))
changes = p4ChangesForPaths(self.depotPaths, self.changeRange)
if len(self.maxChanges) > 0:
@ -2968,10 +2969,10 @@ class P4Sync(Command, P4UserMap):
if len(changes) == 0:
if not self.silent:
print "No changes to import!"
print("No changes to import!")
else:
if not self.silent and not self.detectBranches:
print "Import destination: %s" % self.branch
print("Import destination: %s" % self.branch)
self.updatedBranches = set()
@ -2986,7 +2987,7 @@ class P4Sync(Command, P4UserMap):
self.importChanges(changes)
if not self.silent:
print ""
print("")
if len(self.updatedBranches) > 0:
sys.stdout.write("Updated branches: ")
for b in self.updatedBranches:
@ -3054,7 +3055,7 @@ class P4Rebase(Command):
# the branchpoint may be p4/foo~3, so strip off the parent
upstream = re.sub("~[0-9]+$", "", upstream)
print "Rebasing the current branch onto %s" % upstream
print("Rebasing the current branch onto %s" % upstream)
oldHead = read_pipe("git rev-parse HEAD").strip()
system("git rebase %s" % upstream)
system("git diff-tree --stat --summary -M %s HEAD" % oldHead)
@ -3117,7 +3118,7 @@ class P4Clone(P4Sync):
if not self.cloneDestination:
self.cloneDestination = self.defaultDestination(args)
print "Importing from %s into %s" % (', '.join(depotPaths), self.cloneDestination)
print("Importing from %s into %s" % (', '.join(depotPaths), self.cloneDestination))
if not os.path.exists(self.cloneDestination):
os.makedirs(self.cloneDestination)
@ -3139,8 +3140,8 @@ class P4Clone(P4Sync):
if not self.cloneBare:
system([ "git", "checkout", "-f" ])
else:
print 'Not checking out any branch, use ' \
'"git checkout -q -b master <branch>"'
print('Not checking out any branch, use ' \
'"git checkout -q -b master <branch>"')
# auto-set this variable if invoked with --use-client-spec
if self.useClientSpec_from_options:
@ -3173,7 +3174,7 @@ class P4Branches(Command):
log = extractLogMessageFromGitCommit("refs/remotes/%s" % branch)
settings = extractSettingsGitLog(log)
print "%s <= %s (%s)" % (branch, ",".join(settings["depot-paths"]), settings["change"])
print("%s <= %s (%s)" % (branch, ",".join(settings["depot-paths"]), settings["change"]))
return True
class HelpFormatter(optparse.IndentedHelpFormatter):
@ -3187,12 +3188,12 @@ class HelpFormatter(optparse.IndentedHelpFormatter):
return ""
def printUsage(commands):
print "usage: %s <command> [options]" % sys.argv[0]
print ""
print "valid commands: %s" % ", ".join(commands)
print ""
print "Try %s <command> --help for command specific help." % sys.argv[0]
print ""
print("usage: %s <command> [options]" % sys.argv[0])
print("")
print("valid commands: %s" % ", ".join(commands))
print("")
print("Try %s <command> --help for command specific help." % sys.argv[0])
print("")
commands = {
"debug" : P4Debug,
@ -3216,8 +3217,8 @@ def main():
klass = commands[cmdName]
cmd = klass()
except KeyError:
print "unknown command %s" % cmdName
print ""
print("unknown command %s" % cmdName)
print("")
printUsage(commands.keys())
sys.exit(2)

View File

@ -33,6 +33,7 @@ For example:
1234 //depot/security/src/
'''
from __future__ import print_function
import os
import sys
@ -59,12 +60,12 @@ try:
changelist = int(sys.argv[1])
changeroot = sys.argv[2]
except IndexError:
print >> sys.stderr, "Wrong number of arguments.\n\n",
print >> sys.stderr, __doc__
print("Wrong number of arguments.\n\n", end=' ', file=sys.stderr)
print(__doc__, file=sys.stderr)
sys.exit(-1)
except ValueError:
print >> sys.stderr, "First argument must be an integer.\n\n",
print >> sys.stderr, __doc__
print("First argument must be an integer.\n\n", end=' ', file=sys.stderr)
print(__doc__, file=sys.stderr)
sys.exit(-1)
metadata = git_p4.p4_describe(changelist)

View File

@ -1,6 +1,7 @@
#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
from __future__ import print_function
import os
import sys
@ -60,13 +61,13 @@ except ImportError:
try:
import simplejson
except ImportError:
print >>sys.stderr, "simplejson is not installed"
print("simplejson is not installed", file=sys.stderr)
sys.exit(1)
try:
import requests
assert(LooseVersion(requests.__version__) >= LooseVersion('0.12.1'))
except (ImportError, AssertionError):
print >>sys.stderr, "requests >=0.12.1 is not installed"
print("requests >=0.12.1 is not installed", file=sys.stderr)
sys.exit(1)

View File

@ -20,6 +20,7 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import print_function
import simplejson
import requests
import time
@ -87,7 +88,7 @@ class RandomExponentialBackoff(CountingBackoff):
try:
logger.warning(message)
except NameError:
print message
print(message)
time.sleep(delay)
def _default_client():
@ -273,9 +274,9 @@ class Client(object):
def end_error_retry(succeeded):
if query_state["had_error_retry"] and self.verbose:
if succeeded:
print "Success!"
print("Success!")
else:
print "Failed!"
print("Failed!")
while True:
try:
@ -364,7 +365,7 @@ class Client(object):
if 'error' in res.get('result'):
if self.verbose:
print "Server returned error:\n%s" % res['msg']
print("Server returned error:\n%s" % res['msg'])
time.sleep(1)
else:
return (res['queue_id'], res['last_event_id'])
@ -378,13 +379,13 @@ class Client(object):
if 'error' in res.get('result'):
if res["result"] == "http-error":
if self.verbose:
print "HTTP error fetching events -- probably a server restart"
print("HTTP error fetching events -- probably a server restart")
elif res["result"] == "connection-error":
if self.verbose:
print "Connection error fetching events -- probably server is temporarily down?"
print("Connection error fetching events -- probably server is temporarily down?")
else:
if self.verbose:
print "Server returned error:\n%s" % res["msg"]
print("Server returned error:\n%s" % res["msg"])
if res["msg"].startswith("Bad event queue id:"):
# Our event queue went away, probably because
# we were asleep or the server restarted

View File

@ -6,6 +6,7 @@
# Setup: First, you need to install python-irc version 8.5.3
# (https://bitbucket.org/jaraco/irc)
from __future__ import print_function
import irc.bot
import irc.strings
from irc.client import ip_numstr_to_quad, ip_quad_to_numstr
@ -53,12 +54,12 @@ class IRCBot(irc.bot.SingleServerIRCBot):
return
# Forward the PM to Zulip
print zulip_client.send_message({
print(zulip_client.send_message({
"sender": sender,
"type": "private",
"to": "username@example.com",
"content": content,
})
}))
def on_pubmsg(self, c, e):
content = e.arguments[0]
@ -68,14 +69,14 @@ class IRCBot(irc.bot.SingleServerIRCBot):
return
# Forward the stream message to Zulip
print zulip_client.send_message({
print(zulip_client.send_message({
"forged": "yes",
"sender": sender,
"type": "stream",
"to": stream,
"subject": "IRC",
"content": content,
})
}))
def on_dccmsg(self, c, e):
c.privmsg("You said: " + e.arguments[0])

View File

@ -21,6 +21,7 @@
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import print_function
import sys
import subprocess
import os
@ -39,7 +40,7 @@ args.extend(sys.argv[1:])
backoff = RandomExponentialBackoff(timeout_success_equivalent=300)
while backoff.keep_going():
print "Starting Jabber mirroring bot"
print("Starting Jabber mirroring bot")
try:
ret = subprocess.call(args)
except:
@ -51,9 +52,9 @@ while backoff.keep_going():
backoff.fail()
print ""
print ""
print "ERROR: The Jabber mirroring bot is unable to continue mirroring Jabber."
print "Please contact zulip-devel@googlegroups.com if you need assistance."
print ""
print("")
print("")
print("ERROR: The Jabber mirroring bot is unable to continue mirroring Jabber.")
print("Please contact zulip-devel@googlegroups.com if you need assistance.")
print("")
sys.exit(1)

View File

@ -1,3 +1,4 @@
from __future__ import print_function
# This is hacky code to analyze data on our support stream. The main
# reusable bits are get_recent_messages and get_words.
@ -31,7 +32,7 @@ def analyze_messages(msgs, word_count, email_count):
if False:
if ' ack' in msg['content']:
name = msg['sender_full_name'].split()[0]
print 'ACK', name
print('ACK', name)
m = re.search('ticket (Z....).*email: (\S+).*~~~(.*)', msg['content'], re.M | re.S)
if m:
ticket, email, req = m.groups()
@ -40,9 +41,9 @@ def analyze_messages(msgs, word_count, email_count):
word_count[word] += 1
email_count[email] += 1
if False:
print
print()
for k, v in msg.items():
print '%-20s: %s' % (k, v)
print('%-20s: %s' % (k, v))
def generate_support_stats():
client = zulip.Client()
@ -68,12 +69,12 @@ def generate_support_stats():
words = filter(lambda w: len(w) >= 5, words)
words = sorted(words, key=lambda w: word_count[w], reverse=True)
for word in words:
print word, word_count[word]
print(word, word_count[word])
if False:
emails = email_count.keys()
emails = sorted(emails, key=lambda w: email_count[w], reverse=True)
for email in emails:
print email, email_count[email]
print(email, email_count[email])
generate_support_stats()

View File

@ -22,6 +22,7 @@
# SOFTWARE.
from __future__ import absolute_import
from __future__ import print_function
import sys
import subprocess
import os
@ -53,30 +54,30 @@ if options.forward_class_messages and not options.noshard:
if options.on_startup_command is not None:
subprocess.call([options.on_startup_command])
from zerver.lib.parallel import run_parallel
print "Starting parallel zephyr class mirroring bot"
print("Starting parallel zephyr class mirroring bot")
jobs = list("0123456789abcdef")
def run_job(shard):
subprocess.call(args + ["--shard=%s" % (shard,)])
return 0
for (status, job) in run_parallel(run_job, jobs, threads=16):
print "A mirroring shard died!"
print("A mirroring shard died!")
pass
sys.exit(0)
backoff = RandomExponentialBackoff(timeout_success_equivalent=300)
while backoff.keep_going():
print "Starting zephyr mirroring bot"
print("Starting zephyr mirroring bot")
try:
subprocess.call(args)
except:
traceback.print_exc()
backoff.fail()
print ""
print ""
print "ERROR: The Zephyr mirroring bot is unable to continue mirroring Zephyrs."
print "This is often caused by failing to maintain unexpired Kerberos tickets"
print "or AFS tokens. See https://zulip.com/zephyr for documentation on how to"
print "maintain unexpired Kerberos tickets and AFS tokens."
print ""
print("")
print("")
print("ERROR: The Zephyr mirroring bot is unable to continue mirroring Zephyrs.")
print("This is often caused by failing to maintain unexpired Kerberos tickets")
print("or AFS tokens. See https://zulip.com/zephyr for documentation on how to")
print("maintain unexpired Kerberos tickets and AFS tokens.")
print("")
sys.exit(1)

View File

@ -1,3 +1,4 @@
from __future__ import print_function
import os
import sys
import logging
@ -53,9 +54,9 @@ VENV_PATH="/srv/zulip-venv"
ZULIP_PATH="/srv/zulip"
if not os.path.exists(os.path.join(os.path.dirname(__file__), ".git")):
print "Error: No Zulip git repository present at /srv/zulip!"
print "To setup the Zulip development environment, you should clone the code"
print "from GitHub, rather than using a Zulip production release tarball."
print("Error: No Zulip git repository present at /srv/zulip!")
print("To setup the Zulip development environment, you should clone the code")
print("from GitHub, rather than using a Zulip production release tarball.")
sys.exit(1)
# TODO: Parse arguments properly

View File

@ -1,5 +1,6 @@
#!/usr/bin/env python2.7
from __future__ import print_function
import subprocess
import sys
import logging
@ -12,7 +13,7 @@ logger = logging.getLogger(__name__)
def run(args, dry_run=False):
if dry_run:
print "Would have run: " + " ".join(args)
print("Would have run: " + " ".join(args))
return ""
p = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE,

View File

@ -1,6 +1,7 @@
#!/usr/bin/env python2.7
# This tools generates local_settings_generated.py using the template
from __future__ import print_function
import sys, os, os.path
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
@ -23,7 +24,7 @@ CAMO_KEY=%s
""" % (camo_key,)
with open(CAMO_CONFIG_FILENAME, 'w') as camo_file:
camo_file.write(camo_config)
print "Generated Camo config file %s" % (CAMO_CONFIG_FILENAME,)
print("Generated Camo config file %s" % (CAMO_CONFIG_FILENAME,))
def generate_django_secretkey():
# Secret key generation taken from Django's startproject.py
@ -55,7 +56,7 @@ def generate_secrets(development=False):
out.write("".join(lines))
out.close()
print "Generated %s with auto-generated secrets!" % (OUTPUT_SETTINGS_FILENAME,)
print("Generated %s with auto-generated secrets!" % (OUTPUT_SETTINGS_FILENAME,))
if __name__ == '__main__':

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python2.7
from __future__ import print_function
import datetime
import monthdelta
@ -26,7 +27,7 @@ class Company(object):
delta = flow.cashflow(start_date, end_date, (end_date - start_date).days)
cash += delta
if self.verbose:
print flow.name, round(delta, 2)
print(flow.name, round(delta, 2))
return round(cash, 2)
def cash_at_date(self, start, end):
@ -39,10 +40,10 @@ class Company(object):
cur_date = parse_date(start)
end_date = parse_date(end)
while cur_date <= end_date:
print cur_date, self.cash_at_date_internal(start_date, cur_date)
print(cur_date, self.cash_at_date_internal(start_date, cur_date))
cur_date += monthdelta.MonthDelta(1)
if self.verbose:
print
print()
# CashFlow objects fundamentally just provide a function that says how
# much cash has been spent by that source at each time
@ -208,5 +209,5 @@ if __name__ == "__main__":
assert(c.cash_at_date("2012-01-01", "2012-02-15") == 499207.33)
c.add_flow(SemiMonthlyWages("Payroll", -4000, "2012-01-01"))
print c
print(c)
c.cash_monthly_summary("2012-01-01", "2012-07-01")

View File

@ -2,6 +2,7 @@
#
# Generates % delta activity metrics from graphite/statsd data
#
from __future__ import print_function
import os, sys
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
@ -43,7 +44,7 @@ def get_data(url, username, pw):
res = requests.get(url, auth=HTTPDigestAuth(username, pw), verify=False)
if res.status_code != 200:
print "Failed to fetch data url: %s" % (res.error,)
print("Failed to fetch data url: %s" % (res.error,))
return []
return extract_json_response(res)
@ -98,8 +99,8 @@ def parse_data(data, today):
percent = percent_diff(best_last_time, best)
if best is not None:
print "Last %s, %s %s ago:\t%.01f\t\t%s" \
% (day.strftime("%A"), i, "days", best, percent)
print("Last %s, %s %s ago:\t%.01f\t\t%s" \
% (day.strftime("%A"), i, "days", best, percent))
best_last_time = best
for metric in data:
@ -108,12 +109,12 @@ def parse_data(data, today):
metric['datapoints'].sort(key=lambda p: p[1])
best_today = best_during_day(metric['datapoints'], today)
print "Date\t\t\t\tUsers\t\tChange from then to today"
print "Today, 0 days ago:\t\t%.01f" % (best_today,)
print("Date\t\t\t\tUsers\t\tChange from then to today")
print("Today, 0 days ago:\t\t%.01f" % (best_today,))
print_results(xrange(1, 1000), [0, 1, 2, 3, 4, 7])
print "\n\nWeekly Wednesday results"
print "Date\t\t\t\tUsers\t\tDelta from previous week"
print("\n\nWeekly Wednesday results")
print("Date\t\t\t\tUsers\t\tDelta from previous week")
print_results(reversed(xrange(1, 1000)), [2], True)
@ -151,7 +152,7 @@ if __name__ == '__main__':
startfrom = noon_of(day=datetime.now())
if options.start_from != 'today':
startfrom = noon_of(day=datetime.fromtimestamp(int(options.start_from)))
print "Using baseline of today as %s" % (startfrom,)
print("Using baseline of today as %s" % (startfrom,))
realm_key = statsd_key(options.realm, True)
buckets = [options.bucket]

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python2.7
from __future__ import print_function
import os
import shutil
import subprocess
@ -66,13 +67,13 @@ for name, code_point in emoji_map.items():
try:
bw_font(name, code_point)
except Exception as e:
print e
print 'Missing {}, {}'.format(name, code_point)
print(e)
print('Missing {}, {}'.format(name, code_point))
failed = True
continue
os.symlink('unicode/{}.png'.format(code_point), 'out/{}.png'.format(name))
if failed:
print "Errors dumping emoji!"
print("Errors dumping emoji!")
sys.exit(1)

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python2.7
from __future__ import print_function
import sys
import pstats
@ -12,10 +13,10 @@ can find more advanced tools for showing profiler results.
try:
fn = sys.argv[1]
except:
print '''
print('''
Please supply a filename. (If you use the profiled decorator,
the file will have a suffix of ".profile".)
'''
''')
sys.exit(1)
p = pstats.Stats(fn)

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python2.7
from __future__ import print_function
import re
from collections import defaultdict
import os
@ -15,19 +16,19 @@ for line in file(user_agents_path).readlines():
line = line.strip()
match = re.match('^(?P<count>[0-9]+) "(?P<user_agent>.*)"$', line)
if match is None:
print line
print(line)
continue
groupdict = match.groupdict()
count = groupdict["count"]
user_agent = groupdict["user_agent"]
ret = parse_user_agent(user_agent)
if ret is None:
print "parse error", line
print("parse error", line)
parse_errors += 1
continue
user_agents_parsed[ret["name"]] += int(count)
for key in user_agents_parsed:
print " ", key, user_agents_parsed[key]
print(" ", key, user_agents_parsed[key])
print "%s parse errors!" % (parse_errors,)
print("%s parse errors!" % (parse_errors,))

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
from django.conf import settings
from django.core import validators

View File

@ -1,14 +1,15 @@
from __future__ import print_function
import re
import time
def timed_ddl(db, stmt):
print
print time.asctime()
print stmt
print()
print(time.asctime())
print(stmt)
t = time.time()
db.execute(stmt)
delay = time.time() - t
print 'Took %.2fs' % (delay,)
print('Took %.2fs' % (delay,))
def validate(sql_thingy):
# Do basic validation that table/col name is safe.
@ -24,16 +25,16 @@ def do_batch_update(db, table, cols, vals, batch_size=10000, sleep=0.1):
SET (%s) = (%s)
WHERE id >= %%s AND id < %%s
''' % (table, ', '.join(cols), ', '.join(['%s'] * len(cols)))
print stmt
print(stmt)
(min_id, max_id) = db.execute("SELECT MIN(id), MAX(id) FROM %s" % (table,))[0]
if min_id is None:
return
print "%s rows need updating" % (max_id - min_id,)
print("%s rows need updating" % (max_id - min_id,))
while min_id <= max_id:
lower = min_id
upper = min_id + batch_size
print '%s about to update range [%s,%s)' % (time.asctime(), lower, upper)
print('%s about to update range [%s,%s)' % (time.asctime(), lower, upper))
db.start_transaction()
params = list(vals) + [lower, upper]
db.execute(stmt, params=params)
@ -73,7 +74,7 @@ def create_index_if_nonexistant(db, table, col, index):
test = """SELECT relname FROM pg_class
WHERE relname = %s"""
if len(db.execute(test, params=[index])) != 0:
print "Not creating index '%s' because it already exists" % (index,)
print("Not creating index '%s' because it already exists" % (index,))
else:
stmt = "CREATE INDEX %s ON %s (%s)" % (index, table, col)
timed_ddl(db, stmt)
@ -88,13 +89,13 @@ def act_on_message_ranges(db, orm, tasks, batch_size=5000, sleep=0.5):
try:
min_id = all_objects.all().order_by('id')[0].id
except IndexError:
print 'There is no work to do'
print('There is no work to do')
return
max_id = all_objects.all().order_by('-id')[0].id
print "max_id = %d" % (max_id,)
print("max_id = %d" % (max_id,))
overhead = int((max_id + 1 - min_id)/ batch_size * sleep / 60)
print "Expect this to take at least %d minutes, just due to sleeps alone." % (overhead,)
print("Expect this to take at least %d minutes, just due to sleeps alone." % (overhead,))
while min_id <= max_id:
lower = min_id
@ -102,7 +103,7 @@ def act_on_message_ranges(db, orm, tasks, batch_size=5000, sleep=0.5):
if upper > max_id:
upper = max_id
print '%s about to update range %s to %s' % (time.asctime(), lower, upper)
print('%s about to update range %s to %s' % (time.asctime(), lower, upper))
db.start_transaction()
for filterer, action in tasks:

View File

@ -1,3 +1,4 @@
from __future__ import print_function
from confirmation.models import Confirmation
from django.conf import settings
from django.core.mail import EmailMultiAlternatives
@ -381,7 +382,7 @@ def clear_followup_emails_queue(email, mail_client=None):
for email in mail_client.messages.list_scheduled(to=email):
result = mail_client.messages.cancel_scheduled(id=email["_id"])
if result.get("status") == "error":
print result.get("name"), result.get("error")
print(result.get("name"), result.get("error"))
return
def log_digest_event(msg):

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
import os
import pty
@ -62,10 +63,10 @@ if __name__ == "__main__":
for (status, job) in run_parallel(wait_and_print, jobs):
output.append(job)
if output == expected_output:
print "Successfully passed test!"
print("Successfully passed test!")
else:
print "Failed test!"
print jobs
print expected_output
print output
print("Failed test!")
print(jobs)
print(expected_output)
print(output)

View File

@ -1,3 +1,4 @@
from __future__ import print_function
from django.test.runner import DiscoverRunner
from zerver.lib.cache import bounce_key_prefix_for_testing
@ -46,7 +47,7 @@ def enforce_timely_test_completion(test_method, test_name, delay):
max_delay = max_delay * 3
if delay > max_delay:
print 'Test is TOO slow: %s (%.3f s)' % (test_name, delay)
print('Test is TOO slow: %s (%.3f s)' % (test_name, delay))
def fast_tests_only():
return os.environ.get('FAST_TESTS_ONLY', False)
@ -61,10 +62,10 @@ def run_test(test):
bounce_key_prefix_for_testing(test_name)
print 'Running', test_name
print('Running', test_name)
if not hasattr(test, "_pre_setup"):
print "somehow the test doesn't have _pre_setup; it may be an import fail."
print "Here's a debugger. Good luck!"
print("somehow the test doesn't have _pre_setup; it may be an import fail.")
print("Here's a debugger. Good luck!")
import pdb; pdb.set_trace()
test._pre_setup()
@ -99,5 +100,5 @@ class Runner(DiscoverRunner):
get_sqlalchemy_connection()
self.run_suite(suite)
self.teardown_test_environment()
print 'DONE!'
print
print('DONE!')
print()

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
from optparse import make_option
@ -51,6 +52,6 @@ class Command(BaseCommand):
for user_profile in user_profiles:
stream, _ = create_stream_if_needed(user_profile.realm, stream_name)
did_subscribe = do_add_subscription(user_profile, stream)
print "%s %s to %s" % (
print("%s %s to %s" % (
"Subscribed" if did_subscribe else "Already subscribed",
user_profile.email, stream_name)
user_profile.email, stream_name))

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
from django.core.management.base import BaseCommand
@ -17,7 +18,7 @@ class Command(BaseCommand):
try:
user_profile = get_user_profile_by_email(email)
except UserProfile.DoesNotExist:
print "e-mail %s doesn't exist in the system, skipping" % (email,)
print("e-mail %s doesn't exist in the system, skipping" % (email,))
continue
do_update_message_flags(user_profile, "add", "read", None, True)
@ -29,6 +30,6 @@ class Command(BaseCommand):
new_pointer = messages[0].id
user_profile.pointer = new_pointer
user_profile.save(update_fields=["pointer"])
print "%s: %d => %d" % (email, old_pointer, new_pointer)
print("%s: %d => %d" % (email, old_pointer, new_pointer))
else:
print "%s has no messages, can't bankrupt!" % (email,)
print("%s has no messages, can't bankrupt!" % (email,))

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
from django.core.management.base import BaseCommand
@ -21,7 +22,7 @@ class Command(BaseCommand):
try:
user_profile = get_user_profile_by_email(email)
old_name = user_profile.full_name
print "%s: %s -> %s" % (email, old_name, new_name)
print("%s: %s -> %s" % (email, old_name, new_name))
do_change_full_name(user_profile, new_name)
except UserProfile.DoesNotExist:
print "* E-mail %s doesn't exist in the system, skipping." % (email,)
print("* E-mail %s doesn't exist in the system, skipping." % (email,))

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
from django.core.management.base import BaseCommand
@ -20,7 +21,7 @@ class Command(BaseCommand):
try:
user_profile = get_user_profile_by_email(old_email)
except UserProfile.DoesNotExist:
print "Old e-mail doesn't exist in the system."
print("Old e-mail doesn't exist in the system.")
exit(1)
do_change_user_email(user_profile, new_email)

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
from zerver.models import get_user_profile_by_id
from zerver.lib.rate_limiter import client, max_api_calls, max_api_window
@ -44,7 +45,7 @@ than max_api_calls! (trying to trim) %s %s" % (key, count))
def handle(self, *args, **options):
if not settings.RATE_LIMITING:
print "This machine is not using redis or rate limiting, aborting"
print("This machine is not using redis or rate limiting, aborting")
exit(1)
# Find all keys, and make sure they're all within size constraints

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
from optparse import make_option
@ -18,5 +19,5 @@ class Command(BaseCommand):
except AttributeError:
pass
print "Error: You must set %s in /etc/zulip/settings.py." % (setting_name,)
print("Error: You must set %s in /etc/zulip/settings.py." % (setting_name,))
sys.exit(1)

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
from optparse import make_option
from django.conf import settings
@ -55,16 +56,16 @@ Usage: python2.7 manage.py create_realm --domain=foo.com --name='Foo, Inc.'"""
def handle(self, *args, **options):
if options["domain"] is None or options["name"] is None:
print >>sys.stderr, "\033[1;31mPlease provide both a domain and name.\033[0m\n"
print("\033[1;31mPlease provide both a domain and name.\033[0m\n", file=sys.stderr)
self.print_help("python2.7 manage.py", "create_realm")
exit(1)
if options["open_realm"] and options["deployment_id"] is not None:
print >>sys.stderr, "\033[1;31mExternal deployments cannot be open realms.\033[0m\n"
print("\033[1;31mExternal deployments cannot be open realms.\033[0m\n", file=sys.stderr)
self.print_help("python2.7 manage.py", "create_realm")
exit(1)
if options["deployment_id"] is not None and settings.VOYAGER:
print >>sys.stderr, "\033[1;31mExternal deployments are not supported on voyager deployments.\033[0m\n"
print("\033[1;31mExternal deployments are not supported on voyager deployments.\033[0m\n", file=sys.stderr)
exit(1)
domain = options["domain"]
@ -75,12 +76,12 @@ Usage: python2.7 manage.py create_realm --domain=foo.com --name='Foo, Inc.'"""
realm, created = do_create_realm(
domain, name, restricted_to_domain=not options["open_realm"])
if created:
print domain, "created."
print(domain, "created.")
if options["deployment_id"] is not None:
deployment = Deployment.objects.get(id=options["deployment_id"])
deployment.realms.add(realm)
deployment.save()
print "Added to deployment", str(deployment.id)
print("Added to deployment", str(deployment.id))
elif settings.ZULIP_COM:
deployment = Deployment.objects.get(base_site_url="https://zulip.com/")
deployment.realms.add(realm)
@ -89,6 +90,6 @@ Usage: python2.7 manage.py create_realm --domain=foo.com --name='Foo, Inc.'"""
set_default_streams(realm, ["social", "engineering"])
print "\033[1;36mDefault streams set to social,engineering,zulip!\033[0m"
print("\033[1;36mDefault streams set to social,engineering,zulip!\033[0m")
else:
print domain, "already exists."
print(domain, "already exists.")

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
from django.core.management.base import BaseCommand
@ -27,7 +28,7 @@ the command."""
try:
realm = get_realm(domain)
except Realm.DoesNotExist:
print "Unknown domain %s" % (domain,)
print("Unknown domain %s" % (domain,))
exit(1)
do_create_stream(realm, stream_name.decode(encoding))

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
import sys
import argparse
@ -68,7 +69,7 @@ parameters, or specify no parameters for interactive user creation.""")
validators.validate_email(email)
break
except ValidationError:
print >> sys.stderr, "Invalid email address."
print("Invalid email address.", file=sys.stderr)
full_name = raw_input("Full name: ")
try:

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
from django.core.management.base import BaseCommand
@ -13,6 +14,6 @@ class Command(BaseCommand):
help='domain of realm to deactivate')
def handle(self, *args, **options):
print "Deactivating", options["domain"]
print("Deactivating", options["domain"])
do_deactivate_realm(get_realm(options["domain"]))
print "Done!"
print("Done!")

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
from optparse import make_option
@ -22,23 +23,23 @@ class Command(BaseCommand):
def handle(self, *args, **options):
user_profile = get_user_profile_by_email(options['email'])
print "Deactivating %s (%s) - %s" % (user_profile.full_name,
print("Deactivating %s (%s) - %s" % (user_profile.full_name,
user_profile.email,
user_profile.realm.domain)
print "%s has the following active sessions:" % (user_profile.email,)
user_profile.realm.domain))
print("%s has the following active sessions:" % (user_profile.email,))
for session in user_sessions(user_profile):
print session.expire_date, session.get_decoded()
print ""
print "%s has %s active bots that will also be deactivated." % (
print(session.expire_date, session.get_decoded())
print("")
print("%s has %s active bots that will also be deactivated." % (
user_profile.email,
UserProfile.objects.filter(
is_bot=True, is_active=True, bot_owner=user_profile
).count()
)
))
if not options["for_real"]:
print "This was a dry run. Pass -f to actually deactivate."
print("This was a dry run. Pass -f to actually deactivate.")
exit(1)
do_deactivate_user(user_profile)
print "Sessions deleted, user deactivated."
print("Sessions deleted, user deactivated.")

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
from optparse import make_option
from django.core.management.base import BaseCommand
@ -31,4 +32,4 @@ class Command(BaseCommand):
messages = Message.objects.filter(pub_date__gt=cutoff, recipient__in=recipients)
for message in messages:
print message.to_dict(False)
print(message.to_dict(False))

View File

@ -34,6 +34,7 @@ This script can be used via two mechanisms:
from __future__ import absolute_import
from __future__ import print_function
import email
import os
@ -144,13 +145,13 @@ class Command(BaseCommand):
try:
mark_missed_message_address_as_used(rcpt_to)
except ZulipEmailForwardError:
print "5.1.1 Bad destination mailbox address: Bad or expired missed message address."
print("5.1.1 Bad destination mailbox address: Bad or expired missed message address.")
exit(posix.EX_NOUSER)
else:
try:
extract_and_validate(rcpt_to)
except ZulipEmailForwardError:
print "5.1.1 Bad destination mailbox address: Please use the address specified in your Streams page."
print("5.1.1 Bad destination mailbox address: Please use the address specified in your Streams page.")
exit(posix.EX_NOUSER)
# Read in the message, at most 25MiB. This is the limit enforced by
@ -159,7 +160,7 @@ class Command(BaseCommand):
if len(sys.stdin.read(1)) != 0:
# We're not at EOF, reject large mail.
print "5.3.4 Message too big for system: Max size is 25MiB"
print("5.3.4 Message too big for system: Max size is 25MiB")
exit(posix.EX_DATAERR)
queue_json_publish(
@ -175,7 +176,7 @@ class Command(BaseCommand):
if (not settings.EMAIL_GATEWAY_BOT or not settings.EMAIL_GATEWAY_LOGIN or
not settings.EMAIL_GATEWAY_PASSWORD or not settings.EMAIL_GATEWAY_IMAP_SERVER or
not settings.EMAIL_GATEWAY_IMAP_PORT or not settings.EMAIL_GATEWAY_IMAP_FOLDER):
print "Please configure the Email Mirror Gateway in your local_settings.py, or specify $ORIGINAL_RECIPIENT if piping a single mail."
print("Please configure the Email Mirror Gateway in your local_settings.py, or specify $ORIGINAL_RECIPIENT if piping a single mail.")
exit(1)
reactor.callLater(0, main)
reactor.run()

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
from django.core.management.base import BaseCommand
from zerver.lib.queue import queue_json_publish
@ -47,7 +48,7 @@ You can use "-" to represent stdin.
except IndexError:
payload = line
print 'Queueing to queue %s: %s' % (queue_name, payload)
print('Queueing to queue %s: %s' % (queue_name, payload))
# Verify that payload is valid json.
data = ujson.loads(payload)

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
import os
import sys
@ -60,7 +61,7 @@ class Command(BaseCommand):
def handle(self, *args, **options):
if len(options['log_files']) == 0:
print >>sys.stderr, 'WARNING: No log files specified; doing nothing.'
print('WARNING: No log files specified; doing nothing.', file=sys.stderr)
for infile in options['log_files']:
try:
@ -68,5 +69,5 @@ class Command(BaseCommand):
except KeyboardInterrupt:
raise
except:
print >>sys.stderr, 'WARNING: Could not expunge from', infile
print('WARNING: Could not expunge from', infile, file=sys.stderr)
traceback.print_exc()

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
from django.core.management.base import BaseCommand
from confirmation.models import Confirmation
@ -26,7 +27,7 @@ class Command(BaseCommand):
for email in options['emails']:
try:
get_user_profile_by_email(email)
print email + ": There is already a user registered with that address."
print(email + ": There is already a user registered with that address.")
duplicates = True
continue
except UserProfile.DoesNotExist:
@ -40,8 +41,8 @@ class Command(BaseCommand):
if domain:
realm = get_realm(domain)
if not realm:
print "The realm %s doesn't exist yet, please create it first." % (domain,)
print "Don't forget default streams!"
print("The realm %s doesn't exist yet, please create it first." % (domain,))
print("Don't forget default streams!")
exit(1)
for email in options['emails']:
@ -49,14 +50,14 @@ class Command(BaseCommand):
if realm.restricted_to_domain and \
domain.lower() != email.split("@", 1)[-1].lower() and \
not options["force"]:
print "You've asked to add an external user (%s) to a closed realm (%s)." % (
email, domain)
print "Are you sure? To do this, pass --force."
print("You've asked to add an external user (%s) to a closed realm (%s)." % (
email, domain))
print("Are you sure? To do this, pass --force.")
exit(1)
else:
prereg_user = PreregistrationUser(email=email, realm=realm)
else:
prereg_user = PreregistrationUser(email=email)
prereg_user.save()
print email + ": " + Confirmation.objects.get_link_for_object(prereg_user)
print(email + ": " + Confirmation.objects.get_link_for_object(prereg_user))

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
from optparse import make_option
@ -41,9 +42,9 @@ Usage: python2.7 manage.py import_dump [--destroy-rebuild-database] [--chunk-siz
def new_instance_check(self, model):
count = model.objects.count()
if count:
print "Zulip instance is not empty, found %d rows in %s table. " \
% (count, model._meta.db_table)
print "You may use --destroy-rebuild-database to destroy and rebuild the database prior to import."
print("Zulip instance is not empty, found %d rows in %s table. " \
% (count, model._meta.db_table))
print("You may use --destroy-rebuild-database to destroy and rebuild the database prior to import.")
exit(1)
@ -110,7 +111,7 @@ Usage: python2.7 manage.py import_dump [--destroy-rebuild-database] [--chunk-siz
encoding = sys.getfilesystemencoding()
if len(args) == 0:
print "Please provide at least one database dump file name."
print("Please provide at least one database dump file name.")
exit(1)
if not options["destroy_rebuild_database"]:
@ -133,10 +134,10 @@ Usage: python2.7 manage.py import_dump [--destroy-rebuild-database] [--chunk-siz
try:
fp = open(file_name, 'r')
except IOError:
print "File not found: '%s'" % (file_name,)
print("File not found: '%s'" % (file_name,))
exit(1)
print "Processing file: %s ..." % (file_name,)
print("Processing file: %s ..." % (file_name,))
# parse the database dump and load in memory
# TODO: change this to a streaming parser to support loads > RAM size
@ -146,19 +147,19 @@ Usage: python2.7 manage.py import_dump [--destroy-rebuild-database] [--chunk-siz
self.increment_row_counter(row_counter, database_dump, model)
self.import_table(database_dump, realm_notification_map, model)
print ""
print("")
# set notifications_stream_id on realm objects to correct value now
# that foreign keys are in streams table
if len(realm_notification_map):
print "Setting realm notification stream..."
print("Setting realm notification stream...")
for id, notifications_stream_id in realm_notification_map.items():
Realm.objects \
.filter(id=id) \
.update(notifications_stream = notifications_stream_id)
print ""
print "Testing data import: "
print("")
print("Testing data import: ")
# test that everything from all json dumps made it into the database
for model in models_to_import:

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
from django.core.management.base import BaseCommand, CommandError
from django.core.exceptions import ValidationError
@ -45,16 +46,16 @@ ONLY perform this on customer request from an authorized person.
else:
if options['ack']:
do_change_is_admin(profile, True, permission=options['permission'])
print "Done!"
print("Done!")
else:
print "Would have granted %s %s rights for %s" % (email, options['permission'], profile.realm.domain)
print("Would have granted %s %s rights for %s" % (email, options['permission'], profile.realm.domain))
else:
if profile.has_perm(options['permission'], profile.realm):
if options['ack']:
do_change_is_admin(profile, False, permission=options['permission'])
print "Done!"
print("Done!")
else:
print "Would have removed %s's %s rights on %s" % (email, options['permission'],
profile.realm.domain)
print("Would have removed %s's %s rights on %s" % (email, options['permission'],
profile.realm.domain))
else:
raise CommandError("User did not have permission for this realm!")

View File

@ -5,6 +5,7 @@ Shows backlog count of ScheduledJobs of type Email
"""
from __future__ import absolute_import
from __future__ import print_function
from django.conf import settings
from django.core.management.base import BaseCommand
@ -24,7 +25,7 @@ Usage: python2.7 manage.py print_email_delivery_backlog
"""
def handle(self, *args, **options):
print len(ScheduledJob.objects.filter(type=ScheduledJob.EMAIL,
scheduled_timestamp__lte=datetime.utcnow()-timedelta(minutes=1)))
print(len(ScheduledJob.objects.filter(type=ScheduledJob.EMAIL,
scheduled_timestamp__lte=datetime.utcnow()-timedelta(minutes=1))))
return

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
from django.core.management.base import BaseCommand
from django.core.management import CommandError
@ -15,4 +16,4 @@ class Command(BaseCommand):
queue = SimpleQueueClient()
queue.ensure_queue(queue_name, lambda: None)
queue.channel.queue_purge(queue_name)
print "Done"
print("Done")

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
import sys
@ -16,10 +17,10 @@ def query_ldap(**options):
if isinstance(backend, LDAPBackend):
ldap_attrs = _LDAPUser(backend, backend.django_to_ldap_username(email)).attrs
if ldap_attrs is None:
print "No such user found"
print("No such user found")
else:
for django_field, ldap_field in settings.AUTH_LDAP_USER_ATTR_MAP.items():
print "%s: %s" % (django_field, ldap_attrs[ldap_field])
print("%s: %s" % (django_field, ldap_attrs[ldap_field]))
class Command(BaseCommand):
def add_arguments(self, parser):

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
from zerver.models import UserProfile, get_user_profile_by_email
from zerver.lib.rate_limiter import block_user, unblock_user
@ -36,7 +37,7 @@ class Command(BaseCommand):
def handle(self, *args, **options):
if (not options['api_key'] and not options['email']) or \
(options['api_key'] and options['email']):
print "Please enter either an email or API key to manage"
print("Please enter either an email or API key to manage")
exit(1)
if options['email']:
@ -45,7 +46,7 @@ class Command(BaseCommand):
try:
user_profile = UserProfile.objects.get(api_key=options['api_key'])
except:
print "Unable to get user profile for api key %s" % (options['api_key'], )
print("Unable to get user profile for api key %s" % (options['api_key'], ))
exit(1)
users = [user_profile]
@ -55,7 +56,7 @@ class Command(BaseCommand):
operation = options['operation']
for user in users:
print "Applying operation to User ID: %s: %s" % (user.id, operation)
print("Applying operation to User ID: %s: %s" % (user.id, operation))
if operation == 'block':
block_user(user, options['seconds'], options['domain'])

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
from django.core.management.base import BaseCommand
from zerver.models import Realm, RealmAlias, get_realm
@ -25,15 +26,15 @@ class Command(BaseCommand):
def handle(self, *args, **options):
realm = get_realm(options["domain"])
if options["op"] == "show":
print "Aliases for %s:" % (realm.domain,)
print("Aliases for %s:" % (realm.domain,))
for alias in realm_aliases(realm):
print alias
print(alias)
sys.exit(0)
alias = options['alias']
if options["op"] == "add":
if get_realm(alias) is not None:
print "A Realm already exists for this domain, cannot add it as an alias for another realm!"
print("A Realm already exists for this domain, cannot add it as an alias for another realm!")
sys.exit(1)
RealmAlias.objects.create(realm=realm, domain=alias)
sys.exit(0)

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
from django.core.management.base import BaseCommand
from zerver.models import Realm, get_realm
@ -33,7 +34,7 @@ Example: python2.7 manage.py realm_emoji --realm=zulip.com --op=show
realm = get_realm(options["domain"])
if options["op"] == "show":
for name, url in realm.get_emoji().iteritems():
print name, url
print(name, url)
sys.exit(0)
name = options['name']

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
from optparse import make_option
from django.core.management.base import BaseCommand
@ -39,7 +40,7 @@ Example: python2.7 manage.py realm_filters --realm=zulip.com --op=show
def handle(self, *args, **options):
realm = get_realm(options["domain"])
if options["op"] == "show":
print "%s: %s" % (realm.domain, all_realm_filters().get(realm.domain, ""))
print("%s: %s" % (realm.domain, all_realm_filters().get(realm.domain, "")))
sys.exit(0)
pattern = options['pattern']

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
from optparse import make_option
@ -51,6 +52,6 @@ class Command(BaseCommand):
for user_profile in user_profiles:
did_remove = do_remove_subscription(user_profile, stream)
print "%s %s from %s" % (
print("%s %s from %s" % (
"Removed" if did_remove else "Couldn't remove",
user_profile.email, stream_name)
user_profile.email, stream_name))

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
from django.core.management.base import BaseCommand
@ -27,7 +28,7 @@ class Command(BaseCommand):
try:
realm = get_realm(domain)
except Realm.DoesNotExist:
print "Unknown domain %s" % (domain,)
print("Unknown domain %s" % (domain,))
exit(1)
do_rename_stream(realm, old_name.decode(encoding),

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
from django.conf import settings
settings.RUNNING_INSIDE_TORNADO = True
@ -73,11 +74,11 @@ class Command(BaseCommand):
from django.utils import translation
translation.activate(settings.LANGUAGE_CODE)
print "Validating Django models.py..."
print("Validating Django models.py...")
self.validate(display_num_errors=True)
print "\nDjango version %s" % (django.get_version())
print "Tornado server is running at http://%s:%s/" % (addr, port)
print "Quit the server with %s." % (quit_command,)
print("\nDjango version %s" % (django.get_version()))
print("Tornado server is running at http://%s:%s/" % (addr, port))
print("Quit the server with %s." % (quit_command,))
if settings.USING_RABBITMQ:
queue_client = get_queue_client()

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
from django.core.management.base import BaseCommand
@ -36,8 +37,8 @@ python2.7 manage.py set_default_streams --domain=foo.com --streams=
def handle(self, **options):
if options["domain"] is None or options["streams"] is None:
print >>sys.stderr, "Please provide both a domain name and a default \
set of streams (which can be empty, with `--streams=`)."
print("Please provide both a domain name and a default \
set of streams (which can be empty, with `--streams=`).", file=sys.stderr)
exit(1)
stream_names = [stream.strip() for stream in options["streams"].split(",")]

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
from optparse import make_option
import logging
@ -41,7 +42,7 @@ class Command(BaseCommand):
def handle(self, *args, **options):
if not options["flag"] or not options["op"] or not options["email"]:
print "Please specify an operation, a flag and an email"
print("Please specify an operation, a flag and an email")
exit(1)
op = options['op']

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
from django.core.management.base import BaseCommand
from zerver.models import get_realm, Realm
@ -17,16 +18,16 @@ class Command(BaseCommand):
try:
realm = get_realm(realm)
except Realm.DoesNotExist:
print 'There is no realm called %s.' % (realm,)
print('There is no realm called %s.' % (realm,))
sys.exit(1)
users = realm.get_admin_users()
if users:
print 'Admins:\n'
print('Admins:\n')
for user in users:
print ' %s (%s)' % (user.email, user.full_name)
print(' %s (%s)' % (user.email, user.full_name))
else:
print 'There are no admins for this realm!'
print('There are no admins for this realm!')
print '\nYou can use the "knight" management command to knight admins.'
print('\nYou can use the "knight" management command to knight admins.')

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
from optparse import make_option
@ -35,12 +36,12 @@ class Command(BaseCommand):
for email in emails:
user_profiles.append(get_user_profile_by_email(email))
print "Turned off digest emails for:"
print("Turned off digest emails for:")
for user_profile in user_profiles:
already_disabled_prefix = ""
if user_profile.enable_digest_emails:
do_change_enable_digest_emails(user_profile, False)
else:
already_disabled_prefix = "(already off) "
print "%s%s <%s>" % (already_disabled_prefix, user_profile.full_name,
user_profile.email)
print("%s%s <%s>" % (already_disabled_prefix, user_profile.full_name,
user_profile.email))

View File

@ -1,4 +1,5 @@
# -*- coding: utf-8 -*-
from __future__ import print_function
from django.conf import settings
from django.test import TestCase
@ -149,7 +150,7 @@ class BugdownTest(TestCase):
for name, test in format_tests.iteritems():
converted = bugdown_convert(test['input'])
print "Running Bugdown test %s" % (name,)
print("Running Bugdown test %s" % (name,))
self.assertEqual(converted, test['expected_output'])
def replaced(payload, url, phrase=''):
@ -164,7 +165,7 @@ class BugdownTest(TestCase):
return payload % ("<a href=\"%s\"%s title=\"%s\">%s</a>" % (href, target, href, url),)
print "Running Bugdown Linkify tests"
print("Running Bugdown Linkify tests")
self.maxDiff = None
for inline_url, reference, url in linkify_tests:
try:

View File

@ -1,5 +1,6 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import print_function
from django.test import TestCase
@ -40,7 +41,7 @@ import time
import ujson
def bail(msg):
print '\nERROR: %s\n' % (msg,)
print('\nERROR: %s\n' % (msg,))
sys.exit(1)
try:

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
from optparse import make_option
import sys
@ -31,17 +32,17 @@ class Command(BaseCommand):
def handle(self, *args, **options):
if None in (options["api"], options["web"], options["domain"]):
print >>sys.stderr, "\033[1;31mYou must provide a domain, an API URL, and a web URL.\033[0m\n"
print("\033[1;31mYou must provide a domain, an API URL, and a web URL.\033[0m\n", file=sys.stderr)
self.print_help("python2.7 manage.py", "create_realm")
exit(1)
if not options["no_realm"]:
CreateRealm().handle(*args, **options)
print # Newline
print() # Newline
realm = get_realm(options["domain"])
if realm is None:
print >>sys.stderr, "\033[1;31mRealm does not exist!\033[0m\n"
print("\033[1;31mRealm does not exist!\033[0m\n", file=sys.stderr)
exit(2)
dep = Deployment()
@ -55,6 +56,6 @@ class Command(BaseCommand):
dep.base_api_url = options["api"]
dep.base_site_url = options["web"]
dep.save()
print "Deployment %s created." % (dep.id,)
print "DEPLOYMENT_ROLE_NAME = %s" % (dep.name,)
print "DEPLOYMENT_ROLE_KEY = %s" % (dep.api_key,)
print("Deployment %s created." % (dep.id,))
print("DEPLOYMENT_ROLE_NAME = %s" % (dep.name,))
print("DEPLOYMENT_ROLE_KEY = %s" % (dep.api_key,))

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
from django.core.management.base import BaseCommand
from django.utils.timezone import now
@ -261,7 +262,7 @@ def restore_saved_messages():
client_set = set(["populate_db", "website", "zephyr_mirror"])
huddle_user_set = set()
# First, determine all the objects our messages will need.
print datetime.datetime.now(), "Creating realms/streams/etc..."
print(datetime.datetime.now(), "Creating realms/streams/etc...")
def process_line(line):
old_message_json = line.strip()
@ -376,21 +377,21 @@ def restore_saved_messages():
huddle_recipients = {}
# Then, create the objects our messages need.
print datetime.datetime.now(), "Creating realms..."
print(datetime.datetime.now(), "Creating realms...")
bulk_create_realms(realm_set)
realms = {}
for realm in Realm.objects.all():
realms[realm.domain] = realm
print datetime.datetime.now(), "Creating clients..."
print(datetime.datetime.now(), "Creating clients...")
bulk_create_clients(client_set)
clients = {}
for client in Client.objects.all():
clients[client.name] = client
print datetime.datetime.now(), "Creating streams..."
print(datetime.datetime.now(), "Creating streams...")
bulk_create_streams(realms, stream_dict.values())
streams = {}
@ -400,7 +401,7 @@ def restore_saved_messages():
stream_recipients[(streams[recipient.type_id].realm_id,
streams[recipient.type_id].name.lower())] = recipient
print datetime.datetime.now(), "Creating users..."
print(datetime.datetime.now(), "Creating users...")
bulk_create_users(realms, user_set)
users = {}
@ -411,7 +412,7 @@ def restore_saved_messages():
for recipient in Recipient.objects.filter(type=Recipient.PERSONAL):
user_recipients[users_by_id[recipient.type_id].email] = recipient
print datetime.datetime.now(), "Creating huddles..."
print(datetime.datetime.now(), "Creating huddles...")
bulk_create_huddles(users, huddle_user_set)
huddles_by_id = {}
@ -423,14 +424,14 @@ def restore_saved_messages():
# TODO: Add a special entry type in the log that is a subscription
# change and import those as we go to make subscription changes
# take effect!
print datetime.datetime.now(), "Importing subscriptions..."
print(datetime.datetime.now(), "Importing subscriptions...")
subscribers = {}
for s in Subscription.objects.select_related().all():
if s.active:
subscribers.setdefault(s.recipient.id, set()).add(s.user_profile.id)
# Then create all the messages, without talking to the DB!
print datetime.datetime.now(), "Importing messages, part 1..."
print(datetime.datetime.now(), "Importing messages, part 1...")
first_message_id = None
if Message.objects.exists():
first_message_id = Message.objects.all().order_by("-id")[0].id + 1
@ -482,12 +483,12 @@ def restore_saved_messages():
raise ValueError('Bad message type')
messages_to_create.append(message)
print datetime.datetime.now(), "Importing messages, part 2..."
print(datetime.datetime.now(), "Importing messages, part 2...")
Message.objects.bulk_create(messages_to_create)
messages_to_create = []
# Finally, create all the UserMessage objects
print datetime.datetime.now(), "Importing usermessages, part 1..."
print(datetime.datetime.now(), "Importing usermessages, part 1...")
personal_recipients = {}
for r in Recipient.objects.filter(type = Recipient.PERSONAL):
personal_recipients[r.id] = True
@ -500,7 +501,7 @@ def restore_saved_messages():
messages_by_id[message.id] = message
if len(messages_by_id) == 0:
print datetime.datetime.now(), "No old messages to replay"
print(datetime.datetime.now(), "No old messages to replay")
return
if first_message_id is None:
@ -526,8 +527,8 @@ def restore_saved_messages():
try:
subscribers[stream_recipients[stream_key].id].remove(user_id)
except KeyError:
print "Error unsubscribing %s from %s: not subscribed" % (
old_message["user"], old_message["name"])
print("Error unsubscribing %s from %s: not subscribed" % (
old_message["user"], old_message["name"]))
pending_subs[(stream_recipients[stream_key].id,
users[old_message["user"]].id)] = False
continue
@ -627,11 +628,11 @@ def restore_saved_messages():
UserMessage.objects.bulk_create(user_messages_to_create)
user_messages_to_create = []
print datetime.datetime.now(), "Importing usermessages, part 2..."
print(datetime.datetime.now(), "Importing usermessages, part 2...")
tot_user_messages += len(user_messages_to_create)
UserMessage.objects.bulk_create(user_messages_to_create)
print datetime.datetime.now(), "Finalizing subscriptions..."
print(datetime.datetime.now(), "Finalizing subscriptions...")
current_subs = {}
current_subs_obj = {}
for s in Subscription.objects.select_related().all():
@ -666,14 +667,14 @@ def restore_saved_messages():
# TODO: do restore of subscription colors -- we're currently not
# logging changes so there's little point in having the code :(
print datetime.datetime.now(), "Finished importing %s messages (%s usermessages)" % \
(len(all_messages), tot_user_messages)
print(datetime.datetime.now(), "Finished importing %s messages (%s usermessages)" % \
(len(all_messages), tot_user_messages))
site = Site.objects.get_current()
site.domain = 'zulip.com'
site.save()
print datetime.datetime.now(), "Filling in user pointers..."
print(datetime.datetime.now(), "Filling in user pointers...")
# Set restored pointers to the very latest messages
for user_profile in UserProfile.objects.all():
@ -685,7 +686,7 @@ def restore_saved_messages():
user_profile.pointer = -1
user_profile.save(update_fields=["pointer"])
print datetime.datetime.now(), "Done replaying old messages"
print(datetime.datetime.now(), "Done replaying old messages")
# Create some test messages, including:
# - multiple streams

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
from django.core.management.base import BaseCommand
from zerver.lib.initial_password import initial_password
@ -14,9 +15,9 @@ class Command(BaseCommand):
help="email of user to show password and API key for")
def handle(self, *args, **options):
print self.fmt % ('email', 'password', 'API key')
print(self.fmt % ('email', 'password', 'API key'))
for email in options['emails']:
if '@' not in email:
print 'ERROR: %s does not look like an email address' % (email,)
print('ERROR: %s does not look like an email address' % (email,))
continue
print self.fmt % (email, initial_password(email), get_user_profile_by_email(email).api_key)
print(self.fmt % (email, initial_password(email), get_user_profile_by_email(email).api_key))

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
from django.core.management.base import BaseCommand
@ -20,6 +21,6 @@ Usage: python2.7 manage.py render_old_messages"""
for message in messages:
message.maybe_render_content(None, save=True)
total_rendered += len(messages)
print datetime.datetime.now(), total_rendered
print(datetime.datetime.now(), total_rendered)
# Put in some sleep so this can run safely on low resource machines
time.sleep(0.25)

View File

@ -1,4 +1,5 @@
from __future__ import absolute_import
from __future__ import print_function
from django.core.management.base import BaseCommand
from zerver.models import get_user_profile_by_email, UserProfile
@ -23,4 +24,4 @@ class Command(BaseCommand):
user_profile.api_key = api_key
user_profile.save(update_fields=["api_key"])
except UserProfile.DoesNotExist:
print "User %s does not exist; not syncing API key" % (email,)
print("User %s does not exist; not syncing API key" % (email,))

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python2.7
from __future__ import print_function
import os
import sys
import datetime
@ -27,4 +28,4 @@ def make_deploy_path():
if __name__ == '__main__':
cmd = sys.argv[1]
if cmd == 'make_deploy_path':
print make_deploy_path()
print(make_deploy_path())