mirror of https://github.com/zulip/zulip.git
python: Convert more percent formatting to Python 3.6 f-strings.
Generated by pyupgrade --py36-plus. Now including %d, %i, %u, and multi-line strings. Signed-off-by: Anders Kaseorg <anders@zulip.com>
This commit is contained in:
parent
1ed2d9b4a0
commit
74c17bf94a
|
@ -54,7 +54,7 @@ Usage examples:
|
|||
counts.sort()
|
||||
|
||||
for count in counts:
|
||||
print("%25s %15d" % (count[1], count[0]))
|
||||
print(f"{count[1]:>25} {count[0]:15}")
|
||||
print("Total:", total)
|
||||
|
||||
def handle(self, *args: Any, **options: Optional[str]) -> None:
|
||||
|
|
|
@ -93,26 +93,26 @@ class Command(BaseCommand):
|
|||
active_users = self.active_users(realm)
|
||||
num_active = len(active_users)
|
||||
|
||||
print("%d active users (%d total)" % (num_active, len(user_profiles)))
|
||||
print(f"{num_active} active users ({len(user_profiles)} total)")
|
||||
streams = Stream.objects.filter(realm=realm).extra(
|
||||
tables=['zerver_subscription', 'zerver_recipient'],
|
||||
where=['zerver_subscription.recipient_id = zerver_recipient.id',
|
||||
'zerver_recipient.type = 2',
|
||||
'zerver_recipient.type_id = zerver_stream.id',
|
||||
'zerver_subscription.active = true']).annotate(count=Count("name"))
|
||||
print("%d streams" % (streams.count(),))
|
||||
print(f"{streams.count()} streams")
|
||||
|
||||
for days_ago in (1, 7, 30):
|
||||
print("In last %d days, users sent:" % (days_ago,))
|
||||
print(f"In last {days_ago} days, users sent:")
|
||||
sender_quantities = [self.messages_sent_by(user, days_ago) for user in user_profiles]
|
||||
for quantity in sorted(sender_quantities, reverse=True):
|
||||
print(quantity, end=' ')
|
||||
print("")
|
||||
|
||||
print("%d stream messages" % (self.stream_messages(realm, days_ago),))
|
||||
print("%d one-on-one private messages" % (self.private_messages(realm, days_ago),))
|
||||
print("%d messages sent via the API" % (self.api_messages(realm, days_ago),))
|
||||
print("%d group private messages" % (self.group_private_messages(realm, days_ago),))
|
||||
print(f"{self.stream_messages(realm, days_ago)} stream messages")
|
||||
print(f"{self.private_messages(realm, days_ago)} one-on-one private messages")
|
||||
print(f"{self.api_messages(realm, days_ago)} messages sent via the API")
|
||||
print(f"{self.group_private_messages(realm, days_ago)} group private messages")
|
||||
|
||||
num_notifications_enabled = len([x for x in active_users if x.enable_desktop_notifications])
|
||||
self.report_percentage(num_notifications_enabled, num_active,
|
||||
|
@ -132,7 +132,7 @@ class Command(BaseCommand):
|
|||
starrers = UserMessage.objects.filter(user_profile__in=user_profiles,
|
||||
flags=UserMessage.flags.starred).values(
|
||||
"user_profile").annotate(count=Count("user_profile"))
|
||||
print("%d users have starred %d messages" % (
|
||||
print("{} users have starred {} messages".format(
|
||||
len(starrers), sum([elt["count"] for elt in starrers])))
|
||||
|
||||
active_user_subs = Subscription.objects.filter(
|
||||
|
@ -141,20 +141,20 @@ class Command(BaseCommand):
|
|||
# Streams not in home view
|
||||
non_home_view = active_user_subs.filter(is_muted=True).values(
|
||||
"user_profile").annotate(count=Count("user_profile"))
|
||||
print("%d users have %d streams not in home view" % (
|
||||
print("{} users have {} streams not in home view".format(
|
||||
len(non_home_view), sum([elt["count"] for elt in non_home_view])))
|
||||
|
||||
# Code block markup
|
||||
markup_messages = human_messages.filter(
|
||||
sender__realm=realm, content__contains="~~~").values(
|
||||
"sender").annotate(count=Count("sender"))
|
||||
print("%d users have used code block markup on %s messages" % (
|
||||
print("{} users have used code block markup on {} messages".format(
|
||||
len(markup_messages), sum([elt["count"] for elt in markup_messages])))
|
||||
|
||||
# Notifications for stream messages
|
||||
notifications = active_user_subs.filter(desktop_notifications=True).values(
|
||||
"user_profile").annotate(count=Count("user_profile"))
|
||||
print("%d users receive desktop notifications for %d streams" % (
|
||||
print("{} users receive desktop notifications for {} streams".format(
|
||||
len(notifications), sum([elt["count"] for elt in notifications])))
|
||||
|
||||
print("")
|
||||
|
|
|
@ -36,8 +36,8 @@ class Command(BaseCommand):
|
|||
public_count += 1
|
||||
print("------------")
|
||||
print(realm.string_id, end=' ')
|
||||
print("%10s %d public streams and" % ("(", public_count), end=' ')
|
||||
print("%d private streams )" % (private_count,))
|
||||
print("{:>10} {} public streams and".format("(", public_count), end=' ')
|
||||
print(f"{private_count} private streams )")
|
||||
print("------------")
|
||||
print("{:>25} {:>15} {:>10} {:>12}".format("stream", "subscribers", "messages", "type"))
|
||||
|
||||
|
@ -48,9 +48,9 @@ class Command(BaseCommand):
|
|||
stream_type = 'public'
|
||||
print(f"{stream.name:>25}", end=' ')
|
||||
recipient = Recipient.objects.filter(type=Recipient.STREAM, type_id=stream.id)
|
||||
print("%10d" % (len(Subscription.objects.filter(recipient=recipient,
|
||||
active=True)),), end=' ')
|
||||
print("{:10}".format(len(Subscription.objects.filter(recipient=recipient,
|
||||
active=True))), end=' ')
|
||||
num_messages = len(Message.objects.filter(recipient=recipient))
|
||||
print("%12d" % (num_messages,), end=' ')
|
||||
print(f"{num_messages:12}", end=' ')
|
||||
print(f"{stream_type:>15}")
|
||||
print("")
|
||||
|
|
|
@ -32,11 +32,11 @@ class Command(BaseCommand):
|
|||
for realm in realms:
|
||||
print(realm.string_id)
|
||||
user_profiles = UserProfile.objects.filter(realm=realm, is_active=True)
|
||||
print("%d users" % (len(user_profiles),))
|
||||
print("%d streams" % (len(Stream.objects.filter(realm=realm)),))
|
||||
print(f"{len(user_profiles)} users")
|
||||
print(f"{len(Stream.objects.filter(realm=realm))} streams")
|
||||
|
||||
for user_profile in user_profiles:
|
||||
print(f"{user_profile.email:>35}", end=' ')
|
||||
for week in range(10):
|
||||
print("%5d" % (self.messages_sent_by(user_profile, week),), end=' ')
|
||||
print(f"{self.messages_sent_by(user_profile, week):5}", end=' ')
|
||||
print("")
|
||||
|
|
|
@ -108,7 +108,7 @@ class Confirmation(models.Model):
|
|||
type: int = models.PositiveSmallIntegerField()
|
||||
|
||||
def __str__(self) -> str:
|
||||
return '<Confirmation: %s>' % (self.content_object,)
|
||||
return f'<Confirmation: {self.content_object}>'
|
||||
|
||||
class Meta:
|
||||
unique_together = ("type", "confirmation_key")
|
||||
|
|
|
@ -149,7 +149,7 @@ def normalize_fixture_data(decorated_function: CallableT,
|
|||
('src', 24), ('invst', 26), ('acct', 16), ('rcpt', 31)]
|
||||
# We'll replace cus_D7OT2jf5YAtZQ2 with something like cus_NORMALIZED0001
|
||||
pattern_translations = {
|
||||
"%s_[A-Za-z0-9]{%d}" % (prefix, length): "%s_NORMALIZED%%0%dd" % (prefix, length - 10)
|
||||
f"{prefix}_[A-Za-z0-9]{{{length}}}": f"{prefix}_NORMALIZED%0{length - 10}d"
|
||||
for prefix, length in id_lengths
|
||||
}
|
||||
# We'll replace "invoice_prefix": "A35BC4Q" with something like "invoice_prefix": "NORMA01"
|
||||
|
@ -168,7 +168,7 @@ def normalize_fixture_data(decorated_function: CallableT,
|
|||
# Don't use (..) notation, since the matched timestamp can easily appear in other fields
|
||||
pattern_translations[
|
||||
f'"{timestamp_field}": 1[5-9][0-9]{{8}}(?![0-9-])'
|
||||
] = '"%s": 1%02d%%07d' % (timestamp_field, i+1)
|
||||
] = f'"{timestamp_field}": 1{i+1:02}%07d'
|
||||
|
||||
normalized_values: Dict[str, Dict[str, str]] = {
|
||||
pattern: {} for pattern in pattern_translations.keys()
|
||||
|
|
|
@ -70,22 +70,22 @@ replay_diff = secondary_recv_offset - secondary_replay_offset
|
|||
|
||||
# xlog segments are normally 16MB each. These thresholds are pretty arbitrary.
|
||||
if recv_diff > 5 * 16 * 1024**2:
|
||||
report('CRITICAL', 'secondary is %d bytes behind on receiving xlog' % (recv_diff,))
|
||||
report('CRITICAL', f'secondary is {recv_diff} bytes behind on receiving xlog')
|
||||
|
||||
if replay_diff > 5 * 16 * 1024**2:
|
||||
report('CRITICAL', 'secondary is %d bytes behind on applying received xlog' % (replay_diff,))
|
||||
report('CRITICAL', f'secondary is {replay_diff} bytes behind on applying received xlog')
|
||||
|
||||
if recv_diff < 0:
|
||||
report('CRITICAL', 'secondary is %d bytes ahead on receiving xlog' % (recv_diff,))
|
||||
report('CRITICAL', f'secondary is {recv_diff} bytes ahead on receiving xlog')
|
||||
|
||||
if replay_diff < 0:
|
||||
report('CRITICAL', 'secondary is %d bytes ahead on applying received xlog' % (replay_diff,))
|
||||
report('CRITICAL', f'secondary is {replay_diff} bytes ahead on applying received xlog')
|
||||
|
||||
if recv_diff > 16 * 1024**2:
|
||||
report('WARNING', 'secondary is %d bytes behind on receiving xlog' % (recv_diff,))
|
||||
report('WARNING', f'secondary is {recv_diff} bytes behind on receiving xlog')
|
||||
|
||||
if replay_diff > 16 * 1024**2:
|
||||
report('WARNING', 'secondary is %d bytes behind on applying received xlog' % (replay_diff,))
|
||||
report('WARNING', f'secondary is {replay_diff} bytes behind on applying received xlog')
|
||||
|
||||
report('OK', ('secondary is %d bytes behind on receiving and %d bytes behind on applying xlog'
|
||||
% (recv_diff, replay_diff)))
|
||||
|
|
|
@ -52,7 +52,7 @@ import netifaces
|
|||
|
||||
def address_of(device_id: int) -> Optional[str]:
|
||||
try:
|
||||
return netifaces.ifaddresses("ens%i" % (device_id,))[netifaces.AF_INET][0]['addr']
|
||||
return netifaces.ifaddresses(f"ens{device_id}")[netifaces.AF_INET][0]['addr']
|
||||
except KeyError:
|
||||
return None
|
||||
|
||||
|
@ -83,7 +83,7 @@ ifaces = [iface for iface in netifaces.interfaces() if ":" not in iface and ifac
|
|||
|
||||
# Number of IDs should equal number of interfaces
|
||||
if len(ids) != len(ifaces):
|
||||
log.error("Metadata indicated %i interfaces but we have %i!" % (len(ids), len(ifaces)))
|
||||
log.error(f"Metadata indicated {len(ids)} interfaces but we have {len(ifaces)}!")
|
||||
sys.exit(1)
|
||||
|
||||
for device in macs.values():
|
||||
|
@ -105,8 +105,8 @@ for device in macs.values():
|
|||
|
||||
if address is None:
|
||||
# If the device was not autoconfigured, do so now.
|
||||
log.info("Device ens%i not configured, starting dhcpd" % (device_number,))
|
||||
subprocess.check_call(['/sbin/dhcpcd', 'ens%i' % (device_number,)])
|
||||
log.info(f"Device ens{device_number} not configured, starting dhcpd")
|
||||
subprocess.check_call(['/sbin/dhcpcd', f'ens{device_number}'])
|
||||
|
||||
dev_num = str(device_number)
|
||||
address = address_of(device_number)
|
||||
|
@ -120,7 +120,7 @@ for device in macs.values():
|
|||
['/sbin/ip', 'rule', 'add', 'fwmark', dev_num, 'table', dev_num])
|
||||
subprocess.check_call(
|
||||
['/sbin/ip', 'route', 'add', '0.0.0.0/0', 'table', dev_num, 'dev',
|
||||
'ens%i' % (device_number,), 'via', gateway])
|
||||
f'ens{device_number}', 'via', gateway])
|
||||
subprocess.check_call(
|
||||
['/sbin/iptables', '-t', 'mangle', '-A', 'OUTPUT', '-m', 'conntrack', '--ctorigdst',
|
||||
address, '-j', 'MARK', '--set-mark', dev_num])
|
||||
|
@ -129,7 +129,7 @@ for device in macs.values():
|
|||
|
||||
for (count, ip) in enumerate(to_configure):
|
||||
# Configure the IP via a virtual interface
|
||||
device = "ens%i:%i" % (device_number, count)
|
||||
device = f"ens{device_number}:{count}"
|
||||
log.info(f"Configuring {device} with IP {ip}")
|
||||
subprocess.check_call(['/sbin/ifconfig', device, ip])
|
||||
subprocess.check_call(
|
||||
|
|
|
@ -12,7 +12,7 @@ elems = [tree.getroot().findall(
|
|||
|
||||
for i in range(1, 100):
|
||||
# Prepare a modified SVG
|
||||
s = '%2d' % (i,)
|
||||
s = f'{i:2}'
|
||||
for e in elems:
|
||||
e.text = s
|
||||
with open('tmp.svg', 'wb') as out:
|
||||
|
@ -20,5 +20,5 @@ for i in range(1, 100):
|
|||
|
||||
# Convert to PNG
|
||||
subprocess.check_call(['inkscape', '--without-gui', '--export-area-page',
|
||||
'--export-png=../../../static/images/favicon/favicon-%d.png' % (i,),
|
||||
f'--export-png=../../../static/images/favicon/favicon-{i}.png',
|
||||
'tmp.svg'])
|
||||
|
|
|
@ -157,12 +157,7 @@ def tokenize(text: str) -> List[Token]:
|
|||
continue
|
||||
except TokenizationException as e:
|
||||
raise FormattedException(
|
||||
'''%s at Line %d Col %d:"%s"''' % (
|
||||
e.message,
|
||||
state.line,
|
||||
state.col,
|
||||
e.line_content,
|
||||
),
|
||||
f'''{e.message} at Line {state.line} Col {state.col}:"{e.line_content}"''',
|
||||
)
|
||||
|
||||
line_span = len(s.split('\n'))
|
||||
|
@ -223,13 +218,13 @@ def validate(fn: Optional[str] = None, text: Optional[str] = None, check_indent:
|
|||
self.matcher = func
|
||||
|
||||
def no_start_tag(token: Token) -> None:
|
||||
raise TemplateParserException('''
|
||||
raise TemplateParserException(f'''
|
||||
No start tag
|
||||
fn: %s
|
||||
fn: {fn}
|
||||
end tag:
|
||||
%s
|
||||
line %d, col %d
|
||||
''' % (fn, token.tag, token.line, token.col))
|
||||
{token.tag}
|
||||
line {token.line}, col {token.col}
|
||||
''')
|
||||
|
||||
state = State(no_start_tag)
|
||||
|
||||
|
@ -261,16 +256,16 @@ def validate(fn: Optional[str] = None, text: Optional[str] = None, check_indent:
|
|||
if end_col != start_col:
|
||||
problem = 'Bad indentation.'
|
||||
if problem:
|
||||
raise TemplateParserException('''
|
||||
fn: %s
|
||||
%s
|
||||
raise TemplateParserException(f'''
|
||||
fn: {fn}
|
||||
{problem}
|
||||
start:
|
||||
%s
|
||||
line %d, col %d
|
||||
{start_token.s}
|
||||
line {start_line}, col {start_col}
|
||||
end tag:
|
||||
%s
|
||||
line %d, col %d
|
||||
''' % (fn, problem, start_token.s, start_line, start_col, end_tag, end_line, end_col))
|
||||
{end_tag}
|
||||
line {end_line}, col {end_col}
|
||||
''')
|
||||
state.matcher = old_matcher
|
||||
state.depth -= 1
|
||||
state.matcher = f
|
||||
|
|
|
@ -30,7 +30,7 @@ def renumber_migration(conflicts: List[str], order: List[int], last_correct_migr
|
|||
|
||||
# Rename the migration indexing at the end
|
||||
new_name = conflicts[i-1].replace(conflicts[i-1][0:4],
|
||||
'%04d' % (int(last_correct_migration[0:4]) + 1,))
|
||||
f'{int(last_correct_migration[0:4]) + 1:04}')
|
||||
os.rename('zerver/migrations/' + conflicts[i-1], 'zerver/migrations/' + new_name)
|
||||
|
||||
last_correct_migration = new_name.replace('.py', '')
|
||||
|
|
|
@ -37,14 +37,14 @@ def ensure_on_clean_master() -> None:
|
|||
run(['git', 'rebase', 'upstream/master'])
|
||||
|
||||
def create_pull_branch(pull_id: int) -> None:
|
||||
run(['git', 'fetch', 'upstream', 'pull/%d/head' % (pull_id,)])
|
||||
run(['git', 'fetch', 'upstream', f'pull/{pull_id}/head'])
|
||||
run(['git', 'checkout', '-B', f'review-{pull_id}', 'FETCH_HEAD'])
|
||||
run(['git', 'rebase', 'upstream/master'])
|
||||
run(['git', 'log', 'upstream/master..', '--oneline'])
|
||||
run(['git', 'diff', 'upstream/master..', '--name-status'])
|
||||
|
||||
print()
|
||||
print('PR: %d' % (pull_id,))
|
||||
print(f'PR: {pull_id}')
|
||||
print(subprocess.check_output(['git', 'log', 'HEAD~..',
|
||||
'--pretty=format:Author: %an']))
|
||||
|
||||
|
|
|
@ -136,9 +136,9 @@ with open(pid_file_path, 'w+') as f:
|
|||
def server_processes() -> List[List[str]]:
|
||||
main_cmds = [
|
||||
['./manage.py', 'runserver'] +
|
||||
manage_args + runserver_args + ['127.0.0.1:%d' % (django_port,)],
|
||||
manage_args + runserver_args + [f'127.0.0.1:{django_port}'],
|
||||
['env', 'PYTHONUNBUFFERED=1', './manage.py', 'runtornado'] +
|
||||
manage_args + ['127.0.0.1:%d' % (tornado_port,)],
|
||||
manage_args + [f'127.0.0.1:{tornado_port}'],
|
||||
]
|
||||
|
||||
if options.streamlined:
|
||||
|
|
|
@ -84,5 +84,5 @@ for msg in result['messages']:
|
|||
filename = f"zulip-{options.stream}.json"
|
||||
with open(filename, 'wb') as f:
|
||||
f.write(json.dumps(messages, indent=0, sort_keys=False).encode('utf-8'))
|
||||
print("%d messages exported to %s" % (len(messages), filename))
|
||||
print(f"{len(messages)} messages exported to {filename}")
|
||||
sys.exit(0)
|
||||
|
|
|
@ -235,7 +235,7 @@ def convert_gitter_workspace_messages(gitter_data: GitterDataT, output_dir: str,
|
|||
|
||||
message_json['zerver_message'] = zerver_message
|
||||
message_json['zerver_usermessage'] = zerver_usermessage
|
||||
message_filename = os.path.join(output_dir, "messages-%06d.json" % (dump_file_id,))
|
||||
message_filename = os.path.join(output_dir, f"messages-{dump_file_id:06}.json")
|
||||
logging.info("Writing Messages to %s\n", message_filename)
|
||||
write_data_to_file(os.path.join(message_filename), message_json)
|
||||
|
||||
|
|
|
@ -745,7 +745,7 @@ def process_raw_message_batch(realm_id: int,
|
|||
)
|
||||
|
||||
dump_file_id = NEXT_ID('dump_file_id')
|
||||
message_file = "/messages-%06d.json" % (dump_file_id,)
|
||||
message_file = f"/messages-{dump_file_id:06}.json"
|
||||
create_converted_data_files(message_json, output_dir, message_file)
|
||||
|
||||
def do_convert_data(input_tar_file: str,
|
||||
|
|
|
@ -390,7 +390,7 @@ def process_raw_message_batch(realm_id: int,
|
|||
)
|
||||
|
||||
dump_file_id = NEXT_ID('dump_file_id' + str(realm_id))
|
||||
message_file = "/messages-%06d.json" % (dump_file_id,)
|
||||
message_file = f"/messages-{dump_file_id:06}.json"
|
||||
create_converted_data_files(message_json, output_dir, message_file)
|
||||
|
||||
def process_posts(num_teams: int,
|
||||
|
|
|
@ -618,7 +618,7 @@ def convert_slack_workspace_messages(slack_data_dir: str, users: List[ZerverFiel
|
|||
zerver_message=zerver_message,
|
||||
zerver_usermessage=zerver_usermessage)
|
||||
|
||||
message_file = "/messages-%06d.json" % (dump_file_id,)
|
||||
message_file = f"/messages-{dump_file_id:06}.json"
|
||||
logging.info("Writing Messages to %s\n", output_dir + message_file)
|
||||
create_converted_data_files(message_json, output_dir, message_file)
|
||||
|
||||
|
|
|
@ -347,9 +347,7 @@ def notify_new_user(user_profile: UserProfile) -> None:
|
|||
sender,
|
||||
signup_notifications_stream,
|
||||
"signups",
|
||||
"@_**%s|%s** just signed up for Zulip. (total: %i)" % (
|
||||
user_profile.full_name, user_profile.id, user_count,
|
||||
),
|
||||
f"@_**{user_profile.full_name}|{user_profile.id}** just signed up for Zulip. (total: {user_count})",
|
||||
)
|
||||
|
||||
# We also send a notification to the Zulip administrative realm
|
||||
|
@ -362,11 +360,7 @@ def notify_new_user(user_profile: UserProfile) -> None:
|
|||
sender,
|
||||
signups_stream,
|
||||
user_profile.realm.display_subdomain,
|
||||
"%s <`%s`> just signed up for Zulip! (total: **%i**)" % (
|
||||
user_profile.full_name,
|
||||
user_profile.email,
|
||||
user_count,
|
||||
),
|
||||
f"{user_profile.full_name} <`{user_profile.email}`> just signed up for Zulip! (total: **{user_count}**)",
|
||||
)
|
||||
|
||||
except Stream.DoesNotExist:
|
||||
|
|
|
@ -38,7 +38,7 @@ def avatar_url_from_dict(userdict: Dict[str, Any], medium: bool=False) -> str:
|
|||
userdict['realm_id'],
|
||||
email=userdict['email'],
|
||||
medium=medium)
|
||||
url += '&version=%d' % (userdict['avatar_version'],)
|
||||
url += '&version={:d}'.format(userdict['avatar_version'])
|
||||
return url
|
||||
|
||||
def get_avatar_field(user_id: int,
|
||||
|
@ -87,12 +87,12 @@ def get_avatar_field(user_id: int,
|
|||
email=email,
|
||||
medium=medium,
|
||||
)
|
||||
url += '&version=%d' % (avatar_version,)
|
||||
url += f'&version={avatar_version:d}'
|
||||
return url
|
||||
|
||||
def get_gravatar_url(email: str, avatar_version: int, medium: bool=False) -> str:
|
||||
url = _get_unversioned_gravatar_url(email, medium)
|
||||
url += '&version=%d' % (avatar_version,)
|
||||
url += f'&version={avatar_version:d}'
|
||||
return url
|
||||
|
||||
def _get_unversioned_gravatar_url(email: str, medium: bool) -> str:
|
||||
|
|
|
@ -109,13 +109,13 @@ EMOJI_REGEX = r'(?P<syntax>:[\w\-\+]+:)'
|
|||
|
||||
def verbose_compile(pattern: str) -> Any:
|
||||
return re.compile(
|
||||
"^(.*?)%s(.*?)$" % (pattern,),
|
||||
f"^(.*?){pattern}(.*?)$",
|
||||
re.DOTALL | re.UNICODE | re.VERBOSE,
|
||||
)
|
||||
|
||||
def normal_compile(pattern: str) -> Any:
|
||||
return re.compile(
|
||||
r"^(.*?)%s(.*)$" % (pattern,),
|
||||
fr"^(.*?){pattern}(.*)$",
|
||||
re.DOTALL | re.UNICODE,
|
||||
)
|
||||
|
||||
|
@ -178,7 +178,7 @@ def get_web_link_regex() -> str:
|
|||
nested_paren_chunk = nested_paren_chunk % (inner_paren_contents,)
|
||||
|
||||
file_links = r"| (?:file://(/[^/ ]*)+/?)" if settings.ENABLE_FILE_LINKS else r""
|
||||
REGEX = r"""
|
||||
REGEX = fr"""
|
||||
(?<![^\s'"\(,:<]) # Start after whitespace or specified chars
|
||||
# (Double-negative lookbehind to allow start-of-string)
|
||||
(?P<url> # Main group
|
||||
|
@ -186,21 +186,21 @@ def get_web_link_regex() -> str:
|
|||
https?://[\w.:@-]+? # If it has a protocol, anything goes.
|
||||
|(?: # Or, if not, be more strict to avoid false-positives
|
||||
(?:[\w-]+\.)+ # One or more domain components, separated by dots
|
||||
(?:%s) # TLDs (filled in via format from tlds-alpha-by-domain.txt)
|
||||
(?:{tlds}) # TLDs (filled in via format from tlds-alpha-by-domain.txt)
|
||||
)
|
||||
)
|
||||
(?:/ # A path, beginning with /
|
||||
%s # zero-to-6 sets of paired parens
|
||||
{nested_paren_chunk} # zero-to-6 sets of paired parens
|
||||
)?) # Path is optional
|
||||
| (?:[\w.-]+\@[\w.-]+\.[\w]+) # Email is separate, since it can't have a path
|
||||
%s # File path start with file:///, enable by setting ENABLE_FILE_LINKS=True
|
||||
| (?:bitcoin:[13][a-km-zA-HJ-NP-Z1-9]{25,34}) # Bitcoin address pattern, see https://mokagio.github.io/tech-journal/2014/11/21/regex-bitcoin.html
|
||||
{file_links} # File path start with file:///, enable by setting ENABLE_FILE_LINKS=True
|
||||
| (?:bitcoin:[13][a-km-zA-HJ-NP-Z1-9]{{25,34}}) # Bitcoin address pattern, see https://mokagio.github.io/tech-journal/2014/11/21/regex-bitcoin.html
|
||||
)
|
||||
(?= # URL must be followed by (not included in group)
|
||||
[!:;\?\),\.\'\"\>]* # Optional punctuation characters
|
||||
(?:\Z|\s) # followed by whitespace or end of string
|
||||
)
|
||||
""" % (tlds, nested_paren_chunk, file_links)
|
||||
"""
|
||||
LINK_REGEX = verbose_compile(REGEX)
|
||||
return LINK_REGEX
|
||||
|
||||
|
@ -527,7 +527,7 @@ class BacktickPattern(markdown.inlinepatterns.Pattern):
|
|||
|
||||
def __init__(self, pattern: str) -> None:
|
||||
markdown.inlinepatterns.Pattern.__init__(self, pattern)
|
||||
self.ESCAPED_BSLASH = '%s%s%s' % (markdown.util.STX, ord('\\'), markdown.util.ETX)
|
||||
self.ESCAPED_BSLASH = '{}{}{}'.format(markdown.util.STX, ord('\\'), markdown.util.ETX)
|
||||
self.tag = 'code'
|
||||
|
||||
def handleMatch(self, m: Match[str]) -> Union[str, Element]:
|
||||
|
@ -787,7 +787,7 @@ class InlineInterestingLinkProcessor(markdown.treeprocessors.Treeprocessor):
|
|||
yt_id = self.youtube_id(url)
|
||||
|
||||
if yt_id is not None:
|
||||
return "https://i.ytimg.com/vi/%s/default.jpg" % (yt_id,)
|
||||
return f"https://i.ytimg.com/vi/{yt_id}/default.jpg"
|
||||
return None
|
||||
|
||||
def vimeo_id(self, url: str) -> Optional[str]:
|
||||
|
@ -953,7 +953,7 @@ class InlineInterestingLinkProcessor(markdown.treeprocessors.Treeprocessor):
|
|||
tweet.append(p)
|
||||
|
||||
span = SubElement(tweet, 'span')
|
||||
span.text = "- %s (@%s)" % (user['name'], user['screen_name'])
|
||||
span.text = "- {} (@{})".format(user['name'], user['screen_name'])
|
||||
|
||||
# Add image previews
|
||||
for media_item in media:
|
||||
|
@ -970,7 +970,7 @@ class InlineInterestingLinkProcessor(markdown.treeprocessors.Treeprocessor):
|
|||
if size['h'] < self.TWITTER_MAX_IMAGE_HEIGHT:
|
||||
break
|
||||
|
||||
media_url = '%s:%s' % (media_item['media_url_https'], size_name)
|
||||
media_url = '{}:{}'.format(media_item['media_url_https'], size_name)
|
||||
img_div = SubElement(tweet, 'div')
|
||||
img_div.set('class', 'twitter-image')
|
||||
img_a = SubElement(img_div, 'a')
|
||||
|
@ -1296,7 +1296,7 @@ def make_emoji(codepoint: str, display_string: str) -> Element:
|
|||
# Replace underscore in emoji's title with space
|
||||
title = display_string[1:-1].replace("_", " ")
|
||||
span = Element('span')
|
||||
span.set('class', 'emoji emoji-%s' % (codepoint,))
|
||||
span.set('class', f'emoji emoji-{codepoint}')
|
||||
span.set('title', title)
|
||||
span.set('role', 'img')
|
||||
span.set('aria-label', title)
|
||||
|
@ -1588,7 +1588,7 @@ def prepare_realm_pattern(source: str) -> str:
|
|||
whitespace, or opening delimiters, won't match if there are word
|
||||
characters directly after, and saves what was matched as
|
||||
OUTER_CAPTURE_GROUP."""
|
||||
return r"""(?<![^\s'"\(,:<])(?P<%s>%s)(?!\w)""" % (OUTER_CAPTURE_GROUP, source)
|
||||
return fr"""(?<![^\s'"\(,:<])(?P<{OUTER_CAPTURE_GROUP}>{source})(?!\w)"""
|
||||
|
||||
# Given a regular expression pattern, linkifies groups that match it
|
||||
# using the provided format string to construct the URL.
|
||||
|
@ -1643,7 +1643,7 @@ class UserMentionPattern(markdown.inlinepatterns.Pattern):
|
|||
|
||||
el = Element("span")
|
||||
el.set('data-user-id', user_id)
|
||||
text = "%s" % (name,)
|
||||
text = f"{name}"
|
||||
if silent:
|
||||
el.set('class', 'user-mention silent')
|
||||
else:
|
||||
|
@ -1673,7 +1673,7 @@ class UserGroupMentionPattern(markdown.inlinepatterns.Pattern):
|
|||
el = Element("span")
|
||||
el.set('class', 'user-group-mention')
|
||||
el.set('data-user-group-id', user_group_id)
|
||||
text = "@%s" % (name,)
|
||||
text = f"@{name}"
|
||||
el.text = markdown.util.AtomicString(text)
|
||||
return el
|
||||
return None
|
||||
|
@ -1829,7 +1829,7 @@ class Bugdown(markdown.Markdown):
|
|||
# define default configs
|
||||
self.config = {
|
||||
"realm_filters": [kwargs['realm_filters'],
|
||||
"Realm-specific filters for realm_filters_key %s" % (kwargs['realm'],)],
|
||||
"Realm-specific filters for realm_filters_key {}".format(kwargs['realm'])],
|
||||
"realm": [kwargs['realm'], "Realm id"],
|
||||
"code_block_processor_disabled": [kwargs['code_block_processor_disabled'],
|
||||
"Disabled for email gateway"],
|
||||
|
@ -1953,7 +1953,7 @@ class Bugdown(markdown.Markdown):
|
|||
def register_realm_filters(self, inlinePatterns: markdown.util.Registry) -> markdown.util.Registry:
|
||||
for (pattern, format_string, id) in self.getConfig("realm_filters"):
|
||||
inlinePatterns.register(RealmFilterPattern(pattern, format_string, self),
|
||||
'realm_filters/%s' % (pattern,), 45)
|
||||
f'realm_filters/{pattern}', 45)
|
||||
return inlinePatterns
|
||||
|
||||
def build_treeprocessors(self) -> markdown.util.Registry:
|
||||
|
|
|
@ -421,7 +421,7 @@ def preview_url_cache_key(url: str) -> str:
|
|||
return f"preview_url:{make_safe_digest(url)}"
|
||||
|
||||
def display_recipient_cache_key(recipient_id: int) -> str:
|
||||
return "display_recipient_dict:%d" % (recipient_id,)
|
||||
return f"display_recipient_dict:{recipient_id}"
|
||||
|
||||
def display_recipient_bulk_get_users_by_id_cache_key(user_id: int) -> str:
|
||||
# Cache key function for a function for bulk fetching users, used internally
|
||||
|
@ -612,7 +612,7 @@ def flush_used_upload_space_cache(sender: Any, **kwargs: Any) -> None:
|
|||
cache_delete(get_realm_used_upload_space_cache_key(attachment.owner.realm))
|
||||
|
||||
def to_dict_cache_key_id(message_id: int) -> str:
|
||||
return 'message_dict:%d' % (message_id,)
|
||||
return f'message_dict:{message_id}'
|
||||
|
||||
def to_dict_cache_key(message: 'Message', realm_id: Optional[int]=None) -> str:
|
||||
return to_dict_cache_key_id(message.id)
|
||||
|
|
|
@ -16,10 +16,10 @@ def generate_dev_ldap_dir(mode: str, num_users: int=8) -> Dict[str, Dict[str, An
|
|||
mode = mode.lower()
|
||||
ldap_data = []
|
||||
for i in range(1, num_users+1):
|
||||
name = 'LDAP User %d' % (i,)
|
||||
email = 'ldapuser%d@zulip.com' % (i,)
|
||||
phone_number = '999999999%d' % (i,)
|
||||
birthdate = '19%02d-%02d-%02d' % (i, i, i)
|
||||
name = f'LDAP User {i}'
|
||||
email = f'ldapuser{i}@zulip.com'
|
||||
phone_number = f'999999999{i}'
|
||||
birthdate = f'19{i:02}-{i:02}-{i:02}'
|
||||
ldap_data.append((name, email, phone_number, birthdate))
|
||||
|
||||
profile_images = [open(path, "rb").read() for path in
|
||||
|
|
|
@ -1130,7 +1130,7 @@ def write_message_partial_for_query(realm: Realm, message_query: Any, dump_file_
|
|||
break
|
||||
|
||||
# Figure out the name of our shard file.
|
||||
message_filename = os.path.join(output_dir, "messages-%06d.json" % (dump_file_id,))
|
||||
message_filename = os.path.join(output_dir, f"messages-{dump_file_id:06}.json")
|
||||
message_filename += '.partial'
|
||||
logging.info("Fetched Messages for %s", message_filename)
|
||||
|
||||
|
@ -1479,7 +1479,7 @@ def do_write_stats_file_for_realm_export(output_dir: Path) -> None:
|
|||
with open(fn) as filename:
|
||||
data = ujson.load(filename)
|
||||
for k in sorted(data):
|
||||
f.write('%5d %s\n' % (len(data[k]), k))
|
||||
f.write(f'{len(data[k]):5} {k}\n')
|
||||
f.write('\n')
|
||||
|
||||
avatar_file = os.path.join(output_dir, 'avatars/records.json')
|
||||
|
@ -1489,7 +1489,7 @@ def do_write_stats_file_for_realm_export(output_dir: Path) -> None:
|
|||
f.write(fn+'\n')
|
||||
with open(fn) as filename:
|
||||
data = ujson.load(filename)
|
||||
f.write('%5d records\n' % (len(data),))
|
||||
f.write(f'{len(data):5} records\n')
|
||||
f.write('\n')
|
||||
|
||||
def do_export_realm(realm: Realm, output_dir: Path, threads: int,
|
||||
|
@ -1712,7 +1712,7 @@ def export_messages_single_user(user_profile: UserProfile, output_dir: Path,
|
|||
item['display_recipient'] = get_display_recipient(user_message.message.recipient)
|
||||
message_chunk.append(item)
|
||||
|
||||
message_filename = os.path.join(output_dir, "messages-%06d.json" % (dump_file_id,))
|
||||
message_filename = os.path.join(output_dir, f"messages-{dump_file_id:06}.json")
|
||||
logging.info("Fetched Messages for %s", message_filename)
|
||||
|
||||
output = {'zerver_message': message_chunk}
|
||||
|
|
|
@ -1169,7 +1169,7 @@ def get_incoming_message_ids(import_dir: Path,
|
|||
|
||||
dump_file_id = 1
|
||||
while True:
|
||||
message_filename = os.path.join(import_dir, "messages-%06d.json" % (dump_file_id,))
|
||||
message_filename = os.path.join(import_dir, f"messages-{dump_file_id:06}.json")
|
||||
if not os.path.exists(message_filename):
|
||||
break
|
||||
|
||||
|
@ -1212,7 +1212,7 @@ def import_message_data(realm: Realm,
|
|||
import_dir: Path) -> None:
|
||||
dump_file_id = 1
|
||||
while True:
|
||||
message_filename = os.path.join(import_dir, "messages-%06d.json" % (dump_file_id,))
|
||||
message_filename = os.path.join(import_dir, f"messages-{dump_file_id:06}.json")
|
||||
if not os.path.exists(message_filename):
|
||||
break
|
||||
|
||||
|
|
|
@ -310,7 +310,7 @@ def do_rest_call(base_url: str,
|
|||
{'message_url': get_message_url(event),
|
||||
'status_code': response.status_code,
|
||||
'response': response.content})
|
||||
failure_message = "Third party responded with %d" % (response.status_code,)
|
||||
failure_message = f"Third party responded with {response.status_code}"
|
||||
fail_with_message(event, failure_message)
|
||||
notify_bot_owner(event, response.status_code, response.content)
|
||||
|
||||
|
|
|
@ -198,7 +198,7 @@ def check_list(sub_validator: Optional[Validator], length: Optional[int]=None) -
|
|||
|
||||
if sub_validator:
|
||||
for i, item in enumerate(val):
|
||||
vname = '%s[%d]' % (var_name, i)
|
||||
vname = f'{var_name}[{i}]'
|
||||
error = sub_validator(vname, item)
|
||||
if error:
|
||||
return error
|
||||
|
|
|
@ -25,7 +25,7 @@ class Command(ZulipBaseCommand):
|
|||
owner_detail = ""
|
||||
if user.id in owner_user_ids:
|
||||
owner_detail = " [owner]"
|
||||
print(' %s (%s)%s' % (user.delivery_email, user.full_name, owner_detail))
|
||||
print(f' {user.delivery_email} ({user.full_name}){owner_detail}')
|
||||
|
||||
else:
|
||||
raise CommandError('There are no admins for this realm!')
|
||||
|
|
|
@ -813,7 +813,7 @@ class Recipient(models.Model):
|
|||
|
||||
def __str__(self) -> str:
|
||||
display_recipient = get_display_recipient(self)
|
||||
return "<Recipient: %s (%d, %s)>" % (display_recipient, self.type_id, self.type)
|
||||
return f"<Recipient: {display_recipient} ({self.type_id}, {self.type})>"
|
||||
|
||||
class UserProfile(AbstractBaseUser, PermissionsMixin):
|
||||
USERNAME_FIELD = 'email'
|
||||
|
@ -2846,7 +2846,7 @@ class CustomProfileField(models.Model):
|
|||
return False
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "<CustomProfileField: %s %s %s %d>" % (self.realm, self.name, self.field_type, self.order)
|
||||
return f"<CustomProfileField: {self.realm} {self.name} {self.field_type} {self.order}>"
|
||||
|
||||
def custom_profile_fields_for_realm(realm_id: int) -> List[CustomProfileField]:
|
||||
return CustomProfileField.objects.filter(realm=realm_id).order_by('order')
|
||||
|
|
|
@ -26,8 +26,9 @@ register = Library()
|
|||
def and_n_others(values: List[str], limit: int) -> str:
|
||||
# A helper for the commonly appended "and N other(s)" string, with
|
||||
# the appropriate pluralization.
|
||||
return " and %d other%s" % (len(values) - limit,
|
||||
"" if len(values) == limit + 1 else "s")
|
||||
return " and {} other{}".format(
|
||||
len(values) - limit, "" if len(values) == limit + 1 else "s",
|
||||
)
|
||||
|
||||
@register.filter(name='display_list', is_safe=True)
|
||||
def display_list(values: List[str], display_limit: int) -> str:
|
||||
|
|
|
@ -139,8 +139,8 @@ class BotTest(ZulipTestCase, UploadSerializeMixin):
|
|||
|
||||
num_bots = 3
|
||||
for i in range(num_bots):
|
||||
full_name = 'Bot %d' % (i,)
|
||||
short_name = 'bot-%d' % (i,)
|
||||
full_name = f'Bot {i}'
|
||||
short_name = f'bot-{i}'
|
||||
bot_info = dict(
|
||||
full_name=full_name,
|
||||
short_name=short_name,
|
||||
|
|
|
@ -371,7 +371,7 @@ class BugdownTest(ZulipTestCase):
|
|||
# We do not want any ignored tests to be committed and merged.
|
||||
format_tests, linkify_tests = self.load_bugdown_tests()
|
||||
for name, test in format_tests.items():
|
||||
message = 'Test "%s" shouldn\'t be ignored.' % (name,)
|
||||
message = f'Test "{name}" shouldn\'t be ignored.'
|
||||
is_ignored = test.get('ignore', False)
|
||||
self.assertFalse(is_ignored, message)
|
||||
|
||||
|
@ -409,7 +409,7 @@ class BugdownTest(ZulipTestCase):
|
|||
href = 'mailto:' + url
|
||||
else:
|
||||
href = 'http://' + url
|
||||
return payload % ("<a href=\"%s\">%s</a>" % (href, url),)
|
||||
return payload % (f"<a href=\"{href}\">{url}</a>",)
|
||||
|
||||
print("Running Bugdown Linkify tests")
|
||||
with mock.patch('zerver.lib.url_preview.preview.link_embed_data_from_cache', return_value=None):
|
||||
|
@ -722,7 +722,7 @@ class BugdownTest(ZulipTestCase):
|
|||
|
||||
def test_inline_interesting_links(self) -> None:
|
||||
def make_link(url: str) -> str:
|
||||
return '<a href="%s">%s</a>' % (url, url)
|
||||
return f'<a href="{url}">{url}</a>'
|
||||
|
||||
normal_tweet_html = ('<a href="https://twitter.com/Twitter"'
|
||||
'>@Twitter</a> '
|
||||
|
@ -759,41 +759,41 @@ class BugdownTest(ZulipTestCase):
|
|||
|
||||
msg = 'http://www.twitter.com'
|
||||
converted = bugdown_convert(msg)
|
||||
self.assertEqual(converted, '<p>%s</p>' % (make_link('http://www.twitter.com'),))
|
||||
self.assertEqual(converted, '<p>{}</p>'.format(make_link('http://www.twitter.com')))
|
||||
|
||||
msg = 'http://www.twitter.com/wdaher/'
|
||||
converted = bugdown_convert(msg)
|
||||
self.assertEqual(converted, '<p>%s</p>' % (make_link('http://www.twitter.com/wdaher/'),))
|
||||
self.assertEqual(converted, '<p>{}</p>'.format(make_link('http://www.twitter.com/wdaher/')))
|
||||
|
||||
msg = 'http://www.twitter.com/wdaher/status/3'
|
||||
converted = bugdown_convert(msg)
|
||||
self.assertEqual(converted, '<p>%s</p>' % (make_link('http://www.twitter.com/wdaher/status/3'),))
|
||||
self.assertEqual(converted, '<p>{}</p>'.format(make_link('http://www.twitter.com/wdaher/status/3')))
|
||||
|
||||
# id too long
|
||||
msg = 'http://www.twitter.com/wdaher/status/2879779692873154569'
|
||||
converted = bugdown_convert(msg)
|
||||
self.assertEqual(converted, '<p>%s</p>' % (make_link('http://www.twitter.com/wdaher/status/2879779692873154569'),))
|
||||
self.assertEqual(converted, '<p>{}</p>'.format(make_link('http://www.twitter.com/wdaher/status/2879779692873154569')))
|
||||
|
||||
# id too large (i.e. tweet doesn't exist)
|
||||
msg = 'http://www.twitter.com/wdaher/status/999999999999999999'
|
||||
converted = bugdown_convert(msg)
|
||||
self.assertEqual(converted, '<p>%s</p>' % (make_link('http://www.twitter.com/wdaher/status/999999999999999999'),))
|
||||
self.assertEqual(converted, '<p>{}</p>'.format(make_link('http://www.twitter.com/wdaher/status/999999999999999999')))
|
||||
|
||||
msg = 'http://www.twitter.com/wdaher/status/287977969287315456'
|
||||
converted = bugdown_convert(msg)
|
||||
self.assertEqual(converted, '<p>%s</p>\n%s' % (
|
||||
self.assertEqual(converted, '<p>{}</p>\n{}'.format(
|
||||
make_link('http://www.twitter.com/wdaher/status/287977969287315456'),
|
||||
make_inline_twitter_preview('http://www.twitter.com/wdaher/status/287977969287315456', normal_tweet_html)))
|
||||
|
||||
msg = 'https://www.twitter.com/wdaher/status/287977969287315456'
|
||||
converted = bugdown_convert(msg)
|
||||
self.assertEqual(converted, '<p>%s</p>\n%s' % (
|
||||
self.assertEqual(converted, '<p>{}</p>\n{}'.format(
|
||||
make_link('https://www.twitter.com/wdaher/status/287977969287315456'),
|
||||
make_inline_twitter_preview('https://www.twitter.com/wdaher/status/287977969287315456', normal_tweet_html)))
|
||||
|
||||
msg = 'http://twitter.com/wdaher/status/287977969287315456'
|
||||
converted = bugdown_convert(msg)
|
||||
self.assertEqual(converted, '<p>%s</p>\n%s' % (
|
||||
self.assertEqual(converted, '<p>{}</p>\n{}'.format(
|
||||
make_link('http://twitter.com/wdaher/status/287977969287315456'),
|
||||
make_inline_twitter_preview('http://twitter.com/wdaher/status/287977969287315456', normal_tweet_html)))
|
||||
|
||||
|
@ -803,7 +803,7 @@ class BugdownTest(ZulipTestCase):
|
|||
'http://twitter.com/wdaher/status/287977969287315457 '
|
||||
'http://twitter.com/wdaher/status/287977969287315457')
|
||||
converted = bugdown_convert(msg)
|
||||
self.assertEqual(converted, '<p>%s %s %s %s</p>\n%s%s' % (
|
||||
self.assertEqual(converted, '<p>{} {} {} {}</p>\n{}{}'.format(
|
||||
make_link('http://twitter.com/wdaher/status/287977969287315456'),
|
||||
make_link('http://twitter.com/wdaher/status/287977969287315457'),
|
||||
make_link('http://twitter.com/wdaher/status/287977969287315457'),
|
||||
|
@ -817,7 +817,7 @@ class BugdownTest(ZulipTestCase):
|
|||
'https://twitter.com/wdaher/status/287977969287315456 '
|
||||
'http://twitter.com/wdaher/status/287977969287315460')
|
||||
converted = bugdown_convert(msg)
|
||||
self.assertEqual(converted, '<p>%s %s %s %s</p>\n%s%s%s' % (
|
||||
self.assertEqual(converted, '<p>{} {} {} {}</p>\n{}{}{}'.format(
|
||||
make_link('http://twitter.com/wdaher/status/287977969287315456'),
|
||||
make_link('http://twitter.com/wdaher/status/287977969287315457'),
|
||||
make_link('https://twitter.com/wdaher/status/287977969287315456'),
|
||||
|
@ -830,7 +830,7 @@ class BugdownTest(ZulipTestCase):
|
|||
msg = 'http://twitter.com/wdaher/status/287977969287315458'
|
||||
|
||||
converted = bugdown_convert(msg)
|
||||
self.assertEqual(converted, '<p>%s</p>\n%s' % (
|
||||
self.assertEqual(converted, '<p>{}</p>\n{}'.format(
|
||||
make_link('http://twitter.com/wdaher/status/287977969287315458'),
|
||||
make_inline_twitter_preview('http://twitter.com/wdaher/status/287977969287315458', mention_in_link_tweet_html)))
|
||||
|
||||
|
@ -838,7 +838,7 @@ class BugdownTest(ZulipTestCase):
|
|||
msg = 'http://twitter.com/wdaher/status/287977969287315459'
|
||||
|
||||
converted = bugdown_convert(msg)
|
||||
self.assertEqual(converted, '<p>%s</p>\n%s' % (
|
||||
self.assertEqual(converted, '<p>{}</p>\n{}'.format(
|
||||
make_link('http://twitter.com/wdaher/status/287977969287315459'),
|
||||
make_inline_twitter_preview('http://twitter.com/wdaher/status/287977969287315459',
|
||||
media_tweet_html,
|
||||
|
@ -850,7 +850,7 @@ class BugdownTest(ZulipTestCase):
|
|||
|
||||
msg = 'http://twitter.com/wdaher/status/287977969287315460'
|
||||
converted = bugdown_convert(msg)
|
||||
self.assertEqual(converted, '<p>%s</p>\n%s' % (
|
||||
self.assertEqual(converted, '<p>{}</p>\n{}'.format(
|
||||
make_link('http://twitter.com/wdaher/status/287977969287315460'),
|
||||
make_inline_twitter_preview('http://twitter.com/wdaher/status/287977969287315460', emoji_in_tweet_html)))
|
||||
|
||||
|
@ -870,7 +870,7 @@ class BugdownTest(ZulipTestCase):
|
|||
|
||||
def test_realm_emoji(self) -> None:
|
||||
def emoji_img(name: str, file_name: str, realm_id: int) -> str:
|
||||
return '<img alt="%s" class="emoji" src="%s" title="%s">' % (
|
||||
return '<img alt="{}" class="emoji" src="{}" title="{}">'.format(
|
||||
name, get_emoji_url(file_name, realm_id), name[1:-1].replace("_", " "))
|
||||
|
||||
realm = get_realm('zulip')
|
||||
|
@ -881,7 +881,7 @@ class BugdownTest(ZulipTestCase):
|
|||
realm_emoji = RealmEmoji.objects.filter(realm=realm,
|
||||
name='green_tick',
|
||||
deactivated=False).get()
|
||||
self.assertEqual(converted, '<p>%s</p>' % (emoji_img(':green_tick:', realm_emoji.file_name, realm.id),))
|
||||
self.assertEqual(converted, '<p>{}</p>'.format(emoji_img(':green_tick:', realm_emoji.file_name, realm.id)))
|
||||
|
||||
# Deactivate realm emoji.
|
||||
do_remove_realm_emoji(realm, 'green_tick')
|
||||
|
@ -2041,7 +2041,7 @@ class BugdownApiTests(ZulipTestCase):
|
|||
user_id = self.example_user('hamlet').id
|
||||
stream_id = get_stream('Denmark', get_realm('zulip')).id
|
||||
self.assertEqual(result.json()['rendered'],
|
||||
'<p>This mentions <a class="stream" data-stream-id="%s" href="/#narrow/stream/%s-Denmark">#Denmark</a> and <span class="user-mention" data-user-id="%s">@King Hamlet</span>.</p>' % (stream_id, stream_id, user_id))
|
||||
f'<p>This mentions <a class="stream" data-stream-id="{stream_id}" href="/#narrow/stream/{stream_id}-Denmark">#Denmark</a> and <span class="user-mention" data-user-id="{user_id}">@King Hamlet</span>.</p>')
|
||||
|
||||
class BugdownErrorTests(ZulipTestCase):
|
||||
def test_bugdown_error_handling(self) -> None:
|
||||
|
|
|
@ -314,7 +314,7 @@ class DeleteCustomProfileFieldTest(CustomProfileFieldTestCase):
|
|||
'data': ujson.dumps([invalid_field_id]),
|
||||
})
|
||||
self.assert_json_error(result,
|
||||
'Field id %d not found.' % (invalid_field_id,))
|
||||
f'Field id {invalid_field_id} not found.')
|
||||
|
||||
field = CustomProfileField.objects.get(name="Mentor", realm=realm)
|
||||
data: List[Dict[str, Union[int, str, List[int]]]] = [
|
||||
|
|
|
@ -531,15 +531,15 @@ class HomeTest(ZulipTestCase):
|
|||
for i in range(3):
|
||||
bots[i] = self.create_bot(
|
||||
owner=hamlet,
|
||||
bot_email='bot-%d@zulip.com' % (i,),
|
||||
bot_name='Bot %d' % (i,),
|
||||
bot_email=f'bot-{i}@zulip.com',
|
||||
bot_name=f'Bot {i}',
|
||||
)
|
||||
|
||||
for i in range(3):
|
||||
defunct_user = self.create_non_active_user(
|
||||
realm=realm,
|
||||
email='defunct-%d@zulip.com' % (i,),
|
||||
name='Defunct User %d' % (i,),
|
||||
email=f'defunct-{i}@zulip.com',
|
||||
name=f'Defunct User {i}',
|
||||
)
|
||||
|
||||
result = self._get_home_page()
|
||||
|
|
|
@ -65,8 +65,7 @@ class TranslationTestCase(ZulipTestCase):
|
|||
def fetch(self, method: str, url: str, expected_status: int, **kwargs: Any) -> HttpResponse:
|
||||
response = getattr(self.client, method)(url, **kwargs)
|
||||
self.assertEqual(response.status_code, expected_status,
|
||||
msg="Expected %d, received %d for %s to %s" % (
|
||||
expected_status, response.status_code, method, url))
|
||||
msg=f"Expected {expected_status}, received {response.status_code} for {method} to {url}")
|
||||
return response
|
||||
|
||||
def test_accept_language_header(self) -> None:
|
||||
|
|
|
@ -142,7 +142,7 @@ class TopicHistoryTest(ZulipTestCase):
|
|||
# that the new topic is not accessible
|
||||
self.login_user(user_profile)
|
||||
self.subscribe(user_profile, stream_name)
|
||||
endpoint = '/json/users/me/%d/topics' % (stream.id,)
|
||||
endpoint = f'/json/users/me/{stream.id}/topics'
|
||||
result = self.client_get(endpoint, dict(), subdomain="zephyr")
|
||||
self.assert_json_success(result)
|
||||
history = result.json()['topics']
|
||||
|
@ -196,7 +196,7 @@ class TopicHistoryTest(ZulipTestCase):
|
|||
topic1_msg_id = create_test_message('topic1')
|
||||
topic0_msg_id = create_test_message('topic0')
|
||||
|
||||
endpoint = '/json/users/me/%d/topics' % (stream.id,)
|
||||
endpoint = f'/json/users/me/{stream.id}/topics'
|
||||
result = self.client_get(endpoint, dict())
|
||||
self.assert_json_success(result)
|
||||
history = result.json()['topics']
|
||||
|
@ -968,7 +968,7 @@ class StreamMessagesTest(ZulipTestCase):
|
|||
# Make every other user be idle.
|
||||
long_term_idle = i % 2 > 0
|
||||
|
||||
email = 'foo%d@example.com' % (i,)
|
||||
email = f'foo{i}@example.com'
|
||||
user = UserProfile.objects.create(
|
||||
realm=realm,
|
||||
email=email,
|
||||
|
@ -1380,7 +1380,7 @@ class MessageDictTest(ZulipTestCase):
|
|||
message = Message(
|
||||
sender=sender,
|
||||
recipient=recipient,
|
||||
content='whatever %d' % (i,),
|
||||
content=f'whatever {i}',
|
||||
rendered_content='DOES NOT MATTER',
|
||||
rendered_content_version=bugdown.version,
|
||||
date_sent=timezone_now(),
|
||||
|
@ -1573,7 +1573,7 @@ class SewMessageAndReactionTest(ZulipTestCase):
|
|||
message = Message(
|
||||
sender=sender,
|
||||
recipient=recipient,
|
||||
content='whatever %d' % (i,),
|
||||
content=f'whatever {i}',
|
||||
date_sent=timezone_now(),
|
||||
sending_client=sending_client,
|
||||
last_edit_time=timezone_now(),
|
||||
|
@ -3511,7 +3511,7 @@ class EditMessageTest(ZulipTestCase):
|
|||
messages = get_topic_messages(user_profile, new_stream, "test")
|
||||
self.assertEqual(len(messages), 3)
|
||||
self.assertEqual(messages[0].id, msg_id_later)
|
||||
self.assertEqual(messages[2].content, "This topic was moved here from #**test move stream>test** by @_**Iago|%d**" % (user_profile.id,))
|
||||
self.assertEqual(messages[2].content, f"This topic was moved here from #**test move stream>test** by @_**Iago|{user_profile.id}**")
|
||||
|
||||
def test_move_message_to_stream_no_allowed(self) -> None:
|
||||
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
||||
|
@ -3609,7 +3609,7 @@ class EditMessageTest(ZulipTestCase):
|
|||
|
||||
messages = get_topic_messages(user_profile, new_stream, "test")
|
||||
self.assertEqual(len(messages), 4)
|
||||
self.assertEqual(messages[3].content, "This topic was moved here from #**test move stream>test** by @_**Iago|%d**" % (user_profile.id,))
|
||||
self.assertEqual(messages[3].content, f"This topic was moved here from #**test move stream>test** by @_**Iago|{user_profile.id}**")
|
||||
|
||||
def test_notify_old_thread_move_message_to_stream(self) -> None:
|
||||
(user_profile, old_stream, new_stream, msg_id, msg_id_lt) = self.prepare_move_topics(
|
||||
|
|
|
@ -2528,13 +2528,9 @@ class GetOldMessagesTest(ZulipTestCase):
|
|||
self.assertNotIn(f'AND message_id = {LARGER_THAN_MAX_MESSAGE_ID}', sql)
|
||||
self.assertIn('ORDER BY message_id ASC', sql)
|
||||
|
||||
cond = 'WHERE user_profile_id = %d AND message_id >= %d' % (
|
||||
user_profile.id, first_unread_message_id,
|
||||
)
|
||||
cond = f'WHERE user_profile_id = {user_profile.id} AND message_id >= {first_unread_message_id}'
|
||||
self.assertIn(cond, sql)
|
||||
cond = 'WHERE user_profile_id = %d AND message_id <= %d' % (
|
||||
user_profile.id, first_unread_message_id - 1,
|
||||
)
|
||||
cond = f'WHERE user_profile_id = {user_profile.id} AND message_id <= {first_unread_message_id - 1}'
|
||||
self.assertIn(cond, sql)
|
||||
self.assertIn('UNION', sql)
|
||||
|
||||
|
@ -2575,13 +2571,9 @@ class GetOldMessagesTest(ZulipTestCase):
|
|||
sql = queries[0]['sql']
|
||||
self.assertNotIn(f'AND message_id = {LARGER_THAN_MAX_MESSAGE_ID}', sql)
|
||||
self.assertIn('ORDER BY message_id ASC', sql)
|
||||
cond = 'WHERE user_profile_id = %d AND message_id <= %d' % (
|
||||
user_profile.id, first_unread_message_id - 1,
|
||||
)
|
||||
cond = f'WHERE user_profile_id = {user_profile.id} AND message_id <= {first_unread_message_id - 1}'
|
||||
self.assertIn(cond, sql)
|
||||
cond = 'WHERE user_profile_id = %d AND message_id >= %d' % (
|
||||
user_profile.id, first_visible_message_id,
|
||||
)
|
||||
cond = f'WHERE user_profile_id = {user_profile.id} AND message_id >= {first_visible_message_id}'
|
||||
self.assertIn(cond, sql)
|
||||
|
||||
def test_use_first_unread_anchor_with_no_unread_messages(self) -> None:
|
||||
|
@ -2664,7 +2656,7 @@ class GetOldMessagesTest(ZulipTestCase):
|
|||
# the `message_id = LARGER_THAN_MAX_MESSAGE_ID` hack.
|
||||
queries = [q for q in all_queries if '/* get_messages */' in q['sql']]
|
||||
self.assertEqual(len(queries), 1)
|
||||
self.assertIn('AND zerver_message.id = %d' % (LARGER_THAN_MAX_MESSAGE_ID,),
|
||||
self.assertIn(f'AND zerver_message.id = {LARGER_THAN_MAX_MESSAGE_ID}',
|
||||
queries[0]['sql'])
|
||||
|
||||
def test_exclude_muting_conditions(self) -> None:
|
||||
|
|
|
@ -191,4 +191,4 @@ class TestNotifyNewUser(ZulipTestCase):
|
|||
self.assertEqual(message.recipient.type, Recipient.STREAM)
|
||||
actual_stream = Stream.objects.get(id=message.recipient.type_id)
|
||||
self.assertEqual(actual_stream.name, Realm.INITIAL_PRIVATE_STREAM_NAME)
|
||||
self.assertIn('@_**Cordelia Lear|%d** just signed up for Zulip.' % (new_user.id,), message.content)
|
||||
self.assertIn(f'@_**Cordelia Lear|{new_user.id}** just signed up for Zulip.', message.content)
|
||||
|
|
|
@ -827,7 +827,7 @@ class InviteUserTest(InviteUserBase):
|
|||
daily_counts = [(1, max_daily_count)]
|
||||
|
||||
invite_emails = [
|
||||
'foo-%02d@zulip.com' % (i,)
|
||||
f'foo-{i:02}@zulip.com'
|
||||
for i in range(num_invitees)
|
||||
]
|
||||
invitees = ','.join(invite_emails)
|
||||
|
@ -3842,7 +3842,7 @@ class UserSignUpTest(InviteUserBase):
|
|||
"""Verify that /devtools/register_user creates a new user, logs them
|
||||
in, and redirects to the logged-in app."""
|
||||
count = UserProfile.objects.count()
|
||||
email = "user-%d@zulip.com" % (count,)
|
||||
email = f"user-{count}@zulip.com"
|
||||
|
||||
result = self.client_post('/devtools/register_user/')
|
||||
user_profile = UserProfile.objects.all().order_by("id").last()
|
||||
|
@ -3855,7 +3855,7 @@ class UserSignUpTest(InviteUserBase):
|
|||
@override_settings(TERMS_OF_SERVICE=False)
|
||||
def test_dev_user_registration_create_realm(self) -> None:
|
||||
count = UserProfile.objects.count()
|
||||
string_id = "realm-%d" % (count,)
|
||||
string_id = f"realm-{count}"
|
||||
|
||||
result = self.client_post('/devtools/register_realm/')
|
||||
self.assertEqual(result.status_code, 302)
|
||||
|
|
|
@ -332,8 +332,7 @@ class RecipientTest(ZulipTestCase):
|
|||
type_id=stream.id,
|
||||
type=Recipient.STREAM,
|
||||
)
|
||||
self.assertEqual(str(recipient), '<Recipient: Verona (%d, %d)>' % (
|
||||
stream.id, Recipient.STREAM))
|
||||
self.assertEqual(str(recipient), f'<Recipient: Verona ({stream.id}, {Recipient.STREAM})>')
|
||||
|
||||
class StreamAdminTest(ZulipTestCase):
|
||||
def test_make_stream_public(self) -> None:
|
||||
|
@ -347,7 +346,7 @@ class StreamAdminTest(ZulipTestCase):
|
|||
'is_private': ujson.dumps(False),
|
||||
}
|
||||
stream_id = get_stream('private_stream', user_profile.realm).id
|
||||
result = self.client_patch("/json/streams/%d" % (stream_id,), params)
|
||||
result = self.client_patch(f"/json/streams/{stream_id}", params)
|
||||
self.assert_json_error(result, 'Invalid stream id')
|
||||
|
||||
stream = self.subscribe(user_profile, 'private_stream')
|
||||
|
@ -358,7 +357,7 @@ class StreamAdminTest(ZulipTestCase):
|
|||
'stream_name': ujson.dumps('private_stream'),
|
||||
'is_private': ujson.dumps(False),
|
||||
}
|
||||
result = self.client_patch("/json/streams/%d" % (stream_id,), params)
|
||||
result = self.client_patch(f"/json/streams/{stream_id}", params)
|
||||
self.assert_json_success(result)
|
||||
|
||||
realm = user_profile.realm
|
||||
|
@ -378,7 +377,7 @@ class StreamAdminTest(ZulipTestCase):
|
|||
'is_private': ujson.dumps(True),
|
||||
}
|
||||
stream_id = get_stream('public_stream', realm).id
|
||||
result = self.client_patch("/json/streams/%d" % (stream_id,), params)
|
||||
result = self.client_patch(f"/json/streams/{stream_id}", params)
|
||||
self.assert_json_success(result)
|
||||
stream = get_stream('public_stream', realm)
|
||||
self.assertTrue(stream.invite_only)
|
||||
|
@ -397,7 +396,7 @@ class StreamAdminTest(ZulipTestCase):
|
|||
'is_private': ujson.dumps(False),
|
||||
}
|
||||
stream_id = get_stream('target_stream', realm).id
|
||||
result = self.client_patch("/json/streams/%d" % (stream_id,), params,
|
||||
result = self.client_patch(f"/json/streams/{stream_id}", params,
|
||||
subdomain="zephyr")
|
||||
self.assert_json_success(result)
|
||||
stream = get_stream('target_stream', realm)
|
||||
|
@ -417,7 +416,7 @@ class StreamAdminTest(ZulipTestCase):
|
|||
'history_public_to_subscribers': ujson.dumps(True),
|
||||
}
|
||||
stream_id = get_stream('public_history_stream', realm).id
|
||||
result = self.client_patch("/json/streams/%d" % (stream_id,), params)
|
||||
result = self.client_patch(f"/json/streams/{stream_id}", params)
|
||||
self.assert_json_success(result)
|
||||
stream = get_stream('public_history_stream', realm)
|
||||
self.assertTrue(stream.invite_only)
|
||||
|
@ -436,7 +435,7 @@ class StreamAdminTest(ZulipTestCase):
|
|||
'history_public_to_subscribers': ujson.dumps(False),
|
||||
}
|
||||
stream_id = get_stream('public_stream', realm).id
|
||||
result = self.client_patch("/json/streams/%d" % (stream_id,), params)
|
||||
result = self.client_patch(f"/json/streams/{stream_id}", params)
|
||||
self.assert_json_success(result)
|
||||
stream = get_stream('public_stream', realm)
|
||||
self.assertFalse(stream.invite_only)
|
||||
|
@ -449,7 +448,7 @@ class StreamAdminTest(ZulipTestCase):
|
|||
self.subscribe(user_profile, stream.name)
|
||||
do_change_user_role(user_profile, UserProfile.ROLE_REALM_ADMINISTRATOR)
|
||||
|
||||
result = self.client_delete('/json/streams/%d' % (stream.id,))
|
||||
result = self.client_delete(f'/json/streams/{stream.id}')
|
||||
self.assert_json_success(result)
|
||||
subscription_exists = get_active_subscriptions_for_stream_id(stream.id).filter(
|
||||
user_profile=user_profile,
|
||||
|
@ -514,7 +513,7 @@ class StreamAdminTest(ZulipTestCase):
|
|||
self.subscribe(user_profile, 'new_stream')
|
||||
|
||||
stream_id = get_stream('new_stream', user_profile.realm).id
|
||||
result = self.client_delete('/json/streams/%d' % (stream_id,))
|
||||
result = self.client_delete(f'/json/streams/{stream_id}')
|
||||
self.assert_json_error(result, 'Must be an organization administrator')
|
||||
|
||||
def test_private_stream_live_updates(self) -> None:
|
||||
|
@ -530,7 +529,7 @@ class StreamAdminTest(ZulipTestCase):
|
|||
events: List[Mapping[str, Any]] = []
|
||||
with tornado_redirected_to_list(events):
|
||||
stream_id = get_stream('private_stream', user_profile.realm).id
|
||||
result = self.client_patch('/json/streams/%d' % (stream_id,),
|
||||
result = self.client_patch(f'/json/streams/{stream_id}',
|
||||
{'description': ujson.dumps('Test description')})
|
||||
self.assert_json_success(result)
|
||||
# Should be just a description change event
|
||||
|
@ -547,7 +546,7 @@ class StreamAdminTest(ZulipTestCase):
|
|||
events = []
|
||||
with tornado_redirected_to_list(events):
|
||||
stream_id = get_stream('private_stream', user_profile.realm).id
|
||||
result = self.client_patch('/json/streams/%d' % (stream_id,),
|
||||
result = self.client_patch(f'/json/streams/{stream_id}',
|
||||
{'new_name': ujson.dumps('whatever')})
|
||||
self.assert_json_success(result)
|
||||
# Should be a name event, an email address event and a notification event
|
||||
|
@ -573,25 +572,25 @@ class StreamAdminTest(ZulipTestCase):
|
|||
stream = self.subscribe(user_profile, 'stream_name1')
|
||||
do_change_user_role(user_profile, UserProfile.ROLE_REALM_ADMINISTRATOR)
|
||||
|
||||
result = self.client_patch('/json/streams/%d' % (stream.id,),
|
||||
result = self.client_patch(f'/json/streams/{stream.id}',
|
||||
{'new_name': ujson.dumps('stream_name1')})
|
||||
self.assert_json_error(result, "Stream already has that name!")
|
||||
result = self.client_patch('/json/streams/%d' % (stream.id,),
|
||||
result = self.client_patch(f'/json/streams/{stream.id}',
|
||||
{'new_name': ujson.dumps('Denmark')})
|
||||
self.assert_json_error(result, "Stream name 'Denmark' is already taken.")
|
||||
result = self.client_patch('/json/streams/%d' % (stream.id,),
|
||||
result = self.client_patch(f'/json/streams/{stream.id}',
|
||||
{'new_name': ujson.dumps('denmark ')})
|
||||
self.assert_json_error(result, "Stream name 'denmark' is already taken.")
|
||||
|
||||
# Do a rename that is case-only--this should succeed.
|
||||
result = self.client_patch('/json/streams/%d' % (stream.id,),
|
||||
result = self.client_patch(f'/json/streams/{stream.id}',
|
||||
{'new_name': ujson.dumps('sTREAm_name1')})
|
||||
self.assert_json_success(result)
|
||||
|
||||
events: List[Mapping[str, Any]] = []
|
||||
with tornado_redirected_to_list(events):
|
||||
stream_id = get_stream('stream_name1', user_profile.realm).id
|
||||
result = self.client_patch('/json/streams/%d' % (stream_id,),
|
||||
result = self.client_patch(f'/json/streams/{stream_id}',
|
||||
{'new_name': ujson.dumps('stream_name2')})
|
||||
self.assert_json_success(result)
|
||||
event = events[1]['event']
|
||||
|
@ -622,7 +621,7 @@ class StreamAdminTest(ZulipTestCase):
|
|||
# *NOTE: Here Encoding is needed when Unicode string is passed as an argument*
|
||||
with tornado_redirected_to_list(events):
|
||||
stream_id = stream_name2_exists.id
|
||||
result = self.client_patch('/json/streams/%d' % (stream_id,),
|
||||
result = self.client_patch(f'/json/streams/{stream_id}',
|
||||
{'new_name': ujson.dumps('नया नाम'.encode())})
|
||||
self.assert_json_success(result)
|
||||
# While querying, system can handle unicode strings.
|
||||
|
@ -634,7 +633,7 @@ class StreamAdminTest(ZulipTestCase):
|
|||
# by client_patch call, encoding of URL is not needed.
|
||||
with tornado_redirected_to_list(events):
|
||||
stream_id = stream_name_uni_exists.id
|
||||
result = self.client_patch('/json/streams/%d' % (stream_id,),
|
||||
result = self.client_patch(f'/json/streams/{stream_id}',
|
||||
{'new_name': ujson.dumps('नाम में क्या रक्खा हे'.encode())})
|
||||
self.assert_json_success(result)
|
||||
# While querying, system can handle unicode strings.
|
||||
|
@ -646,7 +645,7 @@ class StreamAdminTest(ZulipTestCase):
|
|||
# Test case to change name from one language to other.
|
||||
with tornado_redirected_to_list(events):
|
||||
stream_id = stream_name_new_uni_exists.id
|
||||
result = self.client_patch('/json/streams/%d' % (stream_id,),
|
||||
result = self.client_patch(f'/json/streams/{stream_id}',
|
||||
{'new_name': ujson.dumps('français'.encode())})
|
||||
self.assert_json_success(result)
|
||||
stream_name_fr_exists = get_stream('français', realm)
|
||||
|
@ -655,7 +654,7 @@ class StreamAdminTest(ZulipTestCase):
|
|||
# Test case to change name to mixed language name.
|
||||
with tornado_redirected_to_list(events):
|
||||
stream_id = stream_name_fr_exists.id
|
||||
result = self.client_patch('/json/streams/%d' % (stream_id,),
|
||||
result = self.client_patch(f'/json/streams/{stream_id}',
|
||||
{'new_name': ujson.dumps('français name'.encode())})
|
||||
self.assert_json_success(result)
|
||||
stream_name_mixed_exists = get_stream('français name', realm)
|
||||
|
@ -667,7 +666,7 @@ class StreamAdminTest(ZulipTestCase):
|
|||
del events[:]
|
||||
with tornado_redirected_to_list(events):
|
||||
stream_id = get_stream('stream_private_name1', realm).id
|
||||
result = self.client_patch('/json/streams/%d' % (stream_id,),
|
||||
result = self.client_patch(f'/json/streams/{stream_id}',
|
||||
{'new_name': ujson.dumps('stream_private_name2')})
|
||||
self.assert_json_success(result)
|
||||
notified_user_ids = set(events[1]['users'])
|
||||
|
@ -686,7 +685,7 @@ class StreamAdminTest(ZulipTestCase):
|
|||
self.make_stream('stream_name1')
|
||||
|
||||
stream_id = get_stream('stream_name1', user_profile.realm).id
|
||||
result = self.client_patch('/json/streams/%d' % (stream_id,),
|
||||
result = self.client_patch(f'/json/streams/{stream_id}',
|
||||
{'new_name': ujson.dumps('stream_name2')})
|
||||
self.assert_json_error(result, 'Must be an organization administrator')
|
||||
|
||||
|
@ -697,7 +696,7 @@ class StreamAdminTest(ZulipTestCase):
|
|||
|
||||
stream = self.subscribe(user_profile, 'stream_name1')
|
||||
do_change_user_role(user_profile, UserProfile.ROLE_REALM_ADMINISTRATOR)
|
||||
result = self.client_patch('/json/streams/%d' % (stream.id,),
|
||||
result = self.client_patch(f'/json/streams/{stream.id}',
|
||||
{'new_name': ujson.dumps('stream_name2')})
|
||||
self.assert_json_success(result)
|
||||
|
||||
|
@ -723,16 +722,16 @@ class StreamAdminTest(ZulipTestCase):
|
|||
self.assert_json_success(result)
|
||||
|
||||
stream_id = get_stream('private_stream', iago.realm).id
|
||||
result = self.client_patch('/json/streams/%d' % (stream_id,),
|
||||
result = self.client_patch(f'/json/streams/{stream_id}',
|
||||
{'new_name': ujson.dumps('new_private_stream')})
|
||||
self.assert_json_success(result)
|
||||
|
||||
result = self.client_patch('/json/streams/%d' % (stream_id,),
|
||||
result = self.client_patch(f'/json/streams/{stream_id}',
|
||||
{'new_description': ujson.dumps('new description')})
|
||||
self.assert_json_success(result)
|
||||
|
||||
# But cannot change stream type.
|
||||
result = self.client_patch('/json/streams/%d' % (stream_id,),
|
||||
result = self.client_patch(f'/json/streams/{stream_id}',
|
||||
{'stream_name': ujson.dumps('private_stream'),
|
||||
'is_private': ujson.dumps(True)})
|
||||
self.assert_json_error(result, "Invalid stream id")
|
||||
|
@ -746,7 +745,7 @@ class StreamAdminTest(ZulipTestCase):
|
|||
events: List[Mapping[str, Any]] = []
|
||||
with tornado_redirected_to_list(events):
|
||||
stream_id = get_stream('stream_name1', realm).id
|
||||
result = self.client_patch('/json/streams/%d' % (stream_id,),
|
||||
result = self.client_patch(f'/json/streams/{stream_id}',
|
||||
{'description': ujson.dumps('Test description')})
|
||||
self.assert_json_success(result)
|
||||
|
||||
|
@ -773,12 +772,12 @@ class StreamAdminTest(ZulipTestCase):
|
|||
|
||||
self.assertEqual('Test description', stream.description)
|
||||
|
||||
result = self.client_patch('/json/streams/%d' % (stream_id,),
|
||||
result = self.client_patch(f'/json/streams/{stream_id}',
|
||||
{'description': ujson.dumps('a' * 1025)})
|
||||
self.assert_json_error(result, "description is too long (limit: %s characters)"
|
||||
% (Stream.MAX_DESCRIPTION_LENGTH,))
|
||||
|
||||
result = self.client_patch('/json/streams/%d' % (stream_id,),
|
||||
result = self.client_patch(f'/json/streams/{stream_id}',
|
||||
{'description': ujson.dumps('a\nmulti\nline\ndescription')})
|
||||
self.assert_json_success(result)
|
||||
stream = get_stream('stream_name1', realm)
|
||||
|
@ -786,7 +785,7 @@ class StreamAdminTest(ZulipTestCase):
|
|||
|
||||
# Verify that we don't render inline URL previews in this code path.
|
||||
with self.settings(INLINE_URL_EMBED_PREVIEW=True):
|
||||
result = self.client_patch('/json/streams/%d' % (stream_id,),
|
||||
result = self.client_patch(f'/json/streams/{stream_id}',
|
||||
{'description': ujson.dumps('See https://zulip.com/team')})
|
||||
self.assert_json_success(result)
|
||||
stream = get_stream('stream_name1', realm)
|
||||
|
@ -803,7 +802,7 @@ class StreamAdminTest(ZulipTestCase):
|
|||
do_change_user_role(user_profile, UserProfile.ROLE_MEMBER)
|
||||
|
||||
stream_id = get_stream('stream_name1', user_profile.realm).id
|
||||
result = self.client_patch('/json/streams/%d' % (stream_id,),
|
||||
result = self.client_patch(f'/json/streams/{stream_id}',
|
||||
{'description': ujson.dumps('Test description')})
|
||||
self.assert_json_error(result, 'Must be an organization administrator')
|
||||
|
||||
|
@ -815,7 +814,7 @@ class StreamAdminTest(ZulipTestCase):
|
|||
do_change_user_role(user_profile, UserProfile.ROLE_REALM_ADMINISTRATOR)
|
||||
|
||||
stream_id = get_stream('stream_name1', user_profile.realm).id
|
||||
result = self.client_patch('/json/streams/%d' % (stream_id,),
|
||||
result = self.client_patch(f'/json/streams/{stream_id}',
|
||||
{'is_announcement_only': ujson.dumps(True)})
|
||||
self.assert_json_success(result)
|
||||
stream = get_stream('stream_name1', user_profile.realm)
|
||||
|
@ -835,7 +834,7 @@ class StreamAdminTest(ZulipTestCase):
|
|||
user_profile.save()
|
||||
self.assertEqual(user_profile.is_new_member, is_new)
|
||||
stream_id = get_stream('stream_name1', user_profile.realm).id
|
||||
result = self.client_patch('/json/streams/%d' % (stream_id,),
|
||||
result = self.client_patch(f'/json/streams/{stream_id}',
|
||||
{'stream_post_policy': ujson.dumps(policy)})
|
||||
self.assert_json_error(result, 'Must be an organization administrator')
|
||||
|
||||
|
@ -849,7 +848,7 @@ class StreamAdminTest(ZulipTestCase):
|
|||
|
||||
for policy in policies:
|
||||
stream_id = get_stream('stream_name1', user_profile.realm).id
|
||||
result = self.client_patch('/json/streams/%d' % (stream_id,),
|
||||
result = self.client_patch(f'/json/streams/{stream_id}',
|
||||
{'stream_post_policy': ujson.dumps(policy)})
|
||||
self.assert_json_success(result)
|
||||
stream = get_stream('stream_name1', user_profile.realm)
|
||||
|
@ -1673,7 +1672,7 @@ class SubscriptionPropertiesTest(ZulipTestCase):
|
|||
"stream_id": not_subbed[0]["stream_id"],
|
||||
"value": "#ffffff"}])})
|
||||
self.assert_json_error(
|
||||
result, "Not subscribed to stream id %d" % (not_subbed[0]["stream_id"],))
|
||||
result, "Not subscribed to stream id {}".format(not_subbed[0]["stream_id"]))
|
||||
|
||||
def test_set_color_missing_color(self) -> None:
|
||||
"""
|
||||
|
@ -1938,7 +1937,7 @@ class SubscriptionRestApiTest(ZulipTestCase):
|
|||
|
||||
self.login_user(user)
|
||||
subs = gather_subscriptions(user)[0]
|
||||
result = self.api_patch(user, "/api/v1/users/me/subscriptions/%d" % (subs[0]["stream_id"],),
|
||||
result = self.api_patch(user, "/api/v1/users/me/subscriptions/{}".format(subs[0]["stream_id"]),
|
||||
{'property': 'color', 'value': '#c2c2c2'})
|
||||
self.assert_json_success(result)
|
||||
|
||||
|
@ -1952,7 +1951,7 @@ class SubscriptionRestApiTest(ZulipTestCase):
|
|||
self.login_user(user)
|
||||
subs = gather_subscriptions(user)[0]
|
||||
|
||||
result = self.api_patch(user, "/api/v1/users/me/subscriptions/%d" % (subs[0]["stream_id"],),
|
||||
result = self.api_patch(user, "/api/v1/users/me/subscriptions/{}".format(subs[0]["stream_id"]),
|
||||
{'property': 'invalid', 'value': 'somevalue'})
|
||||
self.assert_json_error(result,
|
||||
"Unknown subscription property: invalid")
|
||||
|
@ -2252,7 +2251,7 @@ class SubscriptionAPITest(ZulipTestCase):
|
|||
msg = self.get_second_to_last_message()
|
||||
self.assertEqual(msg.recipient.type, Recipient.STREAM)
|
||||
self.assertEqual(msg.sender_id, self.notification_bot().id)
|
||||
expected_msg = "@_**%s|%d** created a new stream #**%s**." % (invitee_full_name, invitee.id, invite_streams[0])
|
||||
expected_msg = f"@_**{invitee_full_name}|{invitee.id}** created a new stream #**{invite_streams[0]}**."
|
||||
self.assertEqual(msg.content, expected_msg)
|
||||
|
||||
def test_successful_cross_realm_notification(self) -> None:
|
||||
|
@ -2290,8 +2289,7 @@ class SubscriptionAPITest(ZulipTestCase):
|
|||
self.assertEqual(msg.recipient.type, Recipient.STREAM)
|
||||
self.assertEqual(msg.sender_id, self.notification_bot().id)
|
||||
stream_id = Stream.objects.latest('id').id
|
||||
expected_rendered_msg = '<p><span class="user-mention silent" data-user-id="%d">%s</span> created a new stream <a class="stream" data-stream-id="%d" href="/#narrow/stream/%s-%s">#%s</a>.</p>' % (
|
||||
user.id, user.full_name, stream_id, stream_id, invite_streams[0], invite_streams[0])
|
||||
expected_rendered_msg = f'<p><span class="user-mention silent" data-user-id="{user.id}">{user.full_name}</span> created a new stream <a class="stream" data-stream-id="{stream_id}" href="/#narrow/stream/{stream_id}-{invite_streams[0]}">#{invite_streams[0]}</a>.</p>'
|
||||
self.assertEqual(msg.rendered_content, expected_rendered_msg)
|
||||
|
||||
def test_successful_subscriptions_notifies_with_escaping(self) -> None:
|
||||
|
@ -2319,7 +2317,7 @@ class SubscriptionAPITest(ZulipTestCase):
|
|||
|
||||
msg = self.get_second_to_last_message()
|
||||
self.assertEqual(msg.sender_id, self.notification_bot().id)
|
||||
expected_msg = "@_**%s|%d** created a new stream #**%s**." % (invitee_full_name, invitee.id, invite_streams[0])
|
||||
expected_msg = f"@_**{invitee_full_name}|{invitee.id}** created a new stream #**{invite_streams[0]}**."
|
||||
self.assertEqual(msg.content, expected_msg)
|
||||
|
||||
def test_non_ascii_stream_subscription(self) -> None:
|
||||
|
@ -3212,7 +3210,7 @@ class SubscriptionAPITest(ZulipTestCase):
|
|||
|
||||
def delete_stream(stream_name: str) -> None:
|
||||
stream_id = get_stream(stream_name, realm).id
|
||||
result = self.client_delete('/json/streams/%d' % (stream_id,))
|
||||
result = self.client_delete(f'/json/streams/{stream_id}')
|
||||
self.assert_json_success(result)
|
||||
|
||||
# Deleted/deactivated stream should not be returned in the helper results
|
||||
|
@ -3564,7 +3562,7 @@ class InviteOnlyStreamTest(ZulipTestCase):
|
|||
|
||||
# Make sure both users are subscribed to this stream
|
||||
stream_id = get_stream(stream_name, hamlet.realm).id
|
||||
result = self.api_get(hamlet, "/api/v1/streams/%d/members" % (stream_id,))
|
||||
result = self.api_get(hamlet, f"/api/v1/streams/{stream_id}/members")
|
||||
self.assert_json_success(result)
|
||||
json = result.json()
|
||||
|
||||
|
@ -3607,7 +3605,7 @@ class GetSubscribersTest(ZulipTestCase):
|
|||
def make_subscriber_request(self, stream_id: int, user: Optional[UserProfile]=None) -> HttpResponse:
|
||||
if user is None:
|
||||
user = self.user_profile
|
||||
return self.api_get(user, "/api/v1/streams/%d/members" % (stream_id,))
|
||||
return self.api_get(user, f"/api/v1/streams/{stream_id}/members")
|
||||
|
||||
def make_successful_subscriber_request(self, stream_name: str) -> None:
|
||||
stream_id = get_stream(stream_name, self.user_profile.realm).id
|
||||
|
@ -3907,12 +3905,12 @@ class GetSubscribersTest(ZulipTestCase):
|
|||
stream_id = get_stream(stream_name, self.user_profile.realm).id
|
||||
# Verify another user can't get the data.
|
||||
self.login('cordelia')
|
||||
result = self.client_get("/json/streams/%d/members" % (stream_id,))
|
||||
result = self.client_get(f"/json/streams/{stream_id}/members")
|
||||
self.assert_json_error(result, 'Invalid stream id')
|
||||
|
||||
# But an organization administrator can
|
||||
self.login('iago')
|
||||
result = self.client_get("/json/streams/%d/members" % (stream_id,))
|
||||
result = self.client_get(f"/json/streams/{stream_id}/members")
|
||||
self.assert_json_success(result)
|
||||
|
||||
def test_json_get_subscribers_stream_not_exist(self) -> None:
|
||||
|
@ -3920,7 +3918,7 @@ class GetSubscribersTest(ZulipTestCase):
|
|||
json_get_subscribers also returns the list of subscribers for a stream.
|
||||
"""
|
||||
stream_id = 99999999
|
||||
result = self.client_get("/json/streams/%d/members" % (stream_id,))
|
||||
result = self.client_get(f"/json/streams/{stream_id}/members")
|
||||
self.assert_json_error(result, 'Invalid stream id')
|
||||
|
||||
def test_json_get_subscribers(self) -> None:
|
||||
|
@ -3932,7 +3930,7 @@ class GetSubscribersTest(ZulipTestCase):
|
|||
stream_id = get_stream(stream_name, self.user_profile.realm).id
|
||||
expected_subscribers = gather_subscriptions(
|
||||
self.user_profile, include_subscribers=True)[0][0]['subscribers']
|
||||
result = self.client_get("/json/streams/%d/members" % (stream_id,))
|
||||
result = self.client_get(f"/json/streams/{stream_id}/members")
|
||||
self.assert_json_success(result)
|
||||
result_dict = result.json()
|
||||
self.assertIn('subscribers', result_dict)
|
||||
|
|
|
@ -23,8 +23,7 @@ class PublicURLTest(ZulipTestCase):
|
|||
# e.g. self.client_post(url) if method is "post"
|
||||
response = getattr(self, method)(url)
|
||||
self.assertEqual(response.status_code, expected_status,
|
||||
msg="Expected %d, received %d for %s to %s" % (
|
||||
expected_status, response.status_code, method, url))
|
||||
msg=f"Expected {expected_status}, received {response.status_code} for {method} to {url}")
|
||||
|
||||
@slow("Tests dozens of endpoints, including all of our /help/ documents")
|
||||
def test_public_urls(self) -> None:
|
||||
|
@ -40,7 +39,7 @@ class PublicURLTest(ZulipTestCase):
|
|||
"/en/accounts/login/", "/ru/accounts/login/",
|
||||
"/help/"],
|
||||
302: ["/", "/en/", "/ru/"],
|
||||
401: ["/json/streams/%d/members" % (denmark_stream_id,),
|
||||
401: [f"/json/streams/{denmark_stream_id}/members",
|
||||
"/api/v1/users/me/subscriptions",
|
||||
"/api/v1/messages",
|
||||
"/json/messages",
|
||||
|
@ -92,16 +91,14 @@ class PublicURLTest(ZulipTestCase):
|
|||
with self.settings(GOOGLE_CLIENT_ID=None):
|
||||
resp = self.client_get("/api/v1/fetch_google_client_id")
|
||||
self.assertEqual(400, resp.status_code,
|
||||
msg="Expected 400, received %d for GET /api/v1/fetch_google_client_id" % (
|
||||
resp.status_code,))
|
||||
msg=f"Expected 400, received {resp.status_code} for GET /api/v1/fetch_google_client_id")
|
||||
self.assertEqual('error', resp.json()['result'])
|
||||
|
||||
def test_get_gcid_when_configured(self) -> None:
|
||||
with self.settings(GOOGLE_CLIENT_ID="ABCD"):
|
||||
resp = self.client_get("/api/v1/fetch_google_client_id")
|
||||
self.assertEqual(200, resp.status_code,
|
||||
msg="Expected 200, received %d for GET /api/v1/fetch_google_client_id" % (
|
||||
resp.status_code,))
|
||||
msg=f"Expected 200, received {resp.status_code} for GET /api/v1/fetch_google_client_id")
|
||||
data = ujson.loads(resp.content)
|
||||
self.assertEqual('success', data['result'])
|
||||
self.assertEqual('ABCD', data['google_client_id'])
|
||||
|
|
|
@ -880,7 +880,7 @@ class UserProfileTest(ZulipTestCase):
|
|||
self.assertEqual(check_valid_user_ids(realm.id, invalid_uid),
|
||||
"User IDs is not a list")
|
||||
self.assertEqual(check_valid_user_ids(realm.id, [invalid_uid]),
|
||||
"Invalid user ID: %d" % (invalid_uid,))
|
||||
f"Invalid user ID: {invalid_uid}")
|
||||
|
||||
invalid_uid = "abc"
|
||||
self.assertEqual(check_valid_user_ids(realm.id, [invalid_uid]),
|
||||
|
@ -891,19 +891,19 @@ class UserProfileTest(ZulipTestCase):
|
|||
|
||||
# User is in different realm
|
||||
self.assertEqual(check_valid_user_ids(get_realm("zephyr").id, [hamlet.id]),
|
||||
"Invalid user ID: %d" % (hamlet.id,))
|
||||
f"Invalid user ID: {hamlet.id}")
|
||||
|
||||
# User is not active
|
||||
hamlet.is_active = False
|
||||
hamlet.save()
|
||||
self.assertEqual(check_valid_user_ids(realm.id, [hamlet.id]),
|
||||
"User with ID %d is deactivated" % (hamlet.id,))
|
||||
f"User with ID {hamlet.id} is deactivated")
|
||||
self.assertEqual(check_valid_user_ids(realm.id, [hamlet.id], allow_deactivated=True),
|
||||
None)
|
||||
|
||||
# User is a bot
|
||||
self.assertEqual(check_valid_user_ids(realm.id, [bot.id]),
|
||||
"User with ID %d is a bot" % (bot.id,))
|
||||
f"User with ID {bot.id} is a bot")
|
||||
|
||||
# Successfully get non-bot, active user belong to your realm
|
||||
self.assertEqual(check_valid_user_ids(realm.id, [othello.id]), None)
|
||||
|
|
|
@ -22,9 +22,9 @@ def get_tornado_uri(realm: Realm) -> str:
|
|||
return settings.TORNADO_SERVER
|
||||
|
||||
port = get_tornado_port(realm)
|
||||
return "http://127.0.0.1:%d" % (port,)
|
||||
return f"http://127.0.0.1:{port}"
|
||||
|
||||
def notify_tornado_queue_name(port: int) -> str:
|
||||
if settings.TORNADO_PROCESSES == 1:
|
||||
return "notify_tornado"
|
||||
return "notify_tornado_port_%d" % (port,)
|
||||
return f"notify_tornado_port_{port}"
|
||||
|
|
|
@ -27,7 +27,7 @@ def register_development_user(request: HttpRequest) -> HttpResponse:
|
|||
if get_subdomain(request) == '':
|
||||
request.META['HTTP_HOST'] = settings.REALM_HOSTS['zulip']
|
||||
count = UserProfile.objects.count()
|
||||
name = 'user-%d' % (count,)
|
||||
name = f'user-{count}'
|
||||
email = f'{name}@zulip.com'
|
||||
prereg = create_preregistration_user(email, request, realm_creation=False,
|
||||
password_required=False)
|
||||
|
@ -42,9 +42,9 @@ def register_development_user(request: HttpRequest) -> HttpResponse:
|
|||
@csrf_exempt
|
||||
def register_development_realm(request: HttpRequest) -> HttpResponse:
|
||||
count = UserProfile.objects.count()
|
||||
name = 'user-%d' % (count,)
|
||||
name = f'user-{count}'
|
||||
email = f'{name}@zulip.com'
|
||||
realm_name = 'realm-%d' % (count,)
|
||||
realm_name = f'realm-{count}'
|
||||
prereg = create_preregistration_user(email, request, realm_creation=True,
|
||||
password_required=False)
|
||||
activation_url = create_confirmation_link(prereg,
|
||||
|
|
|
@ -10,12 +10,12 @@ class Command(ZulipBaseCommand):
|
|||
help = """Add a new realm and initial user for manual testing of the onboarding process."""
|
||||
|
||||
def handle(self, **options: Any) -> None:
|
||||
string_id = 'realm%02d' % (
|
||||
Realm.objects.filter(string_id__startswith='realm').count(),)
|
||||
string_id = 'realm{:02}'.format(
|
||||
Realm.objects.filter(string_id__startswith='realm').count())
|
||||
realm = do_create_realm(string_id, string_id)
|
||||
|
||||
name = '%02d-user' % (
|
||||
UserProfile.objects.filter(email__contains='user@').count(),)
|
||||
name = '{:02}-user'.format(
|
||||
UserProfile.objects.filter(email__contains='user@').count())
|
||||
user = do_create_user(f'{name}@{string_id}.zulip.com',
|
||||
'password', realm, name, name, role=UserProfile.ROLE_REALM_ADMINISTRATOR)
|
||||
bulk_add_subscriptions([realm.signup_notifications_stream], [user])
|
||||
|
|
|
@ -29,5 +29,5 @@ and will otherwise fall back to the zulip realm."""
|
|||
valid_realm = realm
|
||||
domain = realm.string_id + '.zulip.com'
|
||||
|
||||
name = '%02d-user' % (UserProfile.objects.filter(email__contains='user@').count(),)
|
||||
name = '{:02}-user'.format(UserProfile.objects.filter(email__contains='user@').count())
|
||||
do_create_user(f'{name}@{domain}', 'password', valid_realm, name, name)
|
||||
|
|
|
@ -272,8 +272,8 @@ class Command(BaseCommand):
|
|||
num_boring_names = 300
|
||||
|
||||
for i in range(min(num_names, num_boring_names)):
|
||||
full_name = 'Extra%03d User' % (i,)
|
||||
names.append((full_name, 'extrauser%d@zulip.com' % (i,)))
|
||||
full_name = f'Extra{i:03} User'
|
||||
names.append((full_name, f'extrauser{i}@zulip.com'))
|
||||
|
||||
if num_names > num_boring_names:
|
||||
fnames = ['Amber', 'Arpita', 'Bob', 'Cindy', 'Daniela', 'Dan', 'Dinesh',
|
||||
|
@ -319,7 +319,7 @@ class Command(BaseCommand):
|
|||
("Zulip Default Bot", "default-bot@zulip.com"),
|
||||
]
|
||||
for i in range(options["extra_bots"]):
|
||||
zulip_realm_bots.append(('Extra Bot %d' % (i,), 'extrabot%d@zulip.com' % (i,)))
|
||||
zulip_realm_bots.append((f'Extra Bot {i}', f'extrabot{i}@zulip.com'))
|
||||
|
||||
create_users(zulip_realm, zulip_realm_bots, bot_type=UserProfile.DEFAULT_BOT)
|
||||
|
||||
|
|
|
@ -223,7 +223,7 @@ SOCIAL_AUTH_SAML_ORG_INFO = {
|
|||
"en-US": {
|
||||
"displayname": "Example, Inc. Zulip",
|
||||
"name": "zulip",
|
||||
"url": "%s%s" % ('https://', EXTERNAL_HOST),
|
||||
"url": "{}{}".format('https://', EXTERNAL_HOST),
|
||||
},
|
||||
}
|
||||
SOCIAL_AUTH_SAML_ENABLED_IDPS = {
|
||||
|
|
|
@ -206,7 +206,7 @@ SOCIAL_AUTH_SAML_ORG_INFO = {
|
|||
"en-US": {
|
||||
"name": "example",
|
||||
"displayname": "Example Inc.",
|
||||
"url": "%s%s" % ('http://', EXTERNAL_HOST),
|
||||
"url": "{}{}".format('http://', EXTERNAL_HOST),
|
||||
},
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue