mirror of https://github.com/zulip/zulip.git
python: Convert percent formatting to Python 3.6 f-strings.
Generated by pyupgrade --py36-plus. Signed-off-by: Anders Kaseorg <anders@zulip.com>
This commit is contained in:
parent
6480deaf27
commit
67e7a3631d
|
@ -40,7 +40,7 @@ class CountStat:
|
|||
self.data_collector = data_collector
|
||||
# might have to do something different for bitfields
|
||||
if frequency not in self.FREQUENCIES:
|
||||
raise AssertionError("Unknown frequency: %s" % (frequency,))
|
||||
raise AssertionError(f"Unknown frequency: {frequency}")
|
||||
self.frequency = frequency
|
||||
if interval is not None:
|
||||
self.interval = interval
|
||||
|
@ -50,7 +50,7 @@ class CountStat:
|
|||
self.interval = timedelta(days=1)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "<CountStat: %s>" % (self.property,)
|
||||
return f"<CountStat: {self.property}>"
|
||||
|
||||
class LoggingCountStat(CountStat):
|
||||
def __init__(self, property: str, output_table: Type[BaseCount], frequency: str) -> None:
|
||||
|
@ -86,11 +86,11 @@ def process_count_stat(stat: CountStat, fill_to_time: datetime,
|
|||
elif stat.frequency == CountStat.DAY:
|
||||
time_increment = timedelta(days=1)
|
||||
else:
|
||||
raise AssertionError("Unknown frequency: %s" % (stat.frequency,))
|
||||
raise AssertionError(f"Unknown frequency: {stat.frequency}")
|
||||
|
||||
verify_UTC(fill_to_time)
|
||||
if floor_to_hour(fill_to_time) != fill_to_time:
|
||||
raise ValueError("fill_to_time must be on an hour boundary: %s" % (fill_to_time,))
|
||||
raise ValueError(f"fill_to_time must be on an hour boundary: {fill_to_time}")
|
||||
|
||||
fill_state = FillState.objects.filter(property=stat.property).first()
|
||||
if fill_state is None:
|
||||
|
@ -108,7 +108,7 @@ def process_count_stat(stat: CountStat, fill_to_time: datetime,
|
|||
elif fill_state.state == FillState.DONE:
|
||||
currently_filled = fill_state.end_time
|
||||
else:
|
||||
raise AssertionError("Unknown value for FillState.state: %s." % (fill_state.state,))
|
||||
raise AssertionError(f"Unknown value for FillState.state: {fill_state.state}.")
|
||||
|
||||
if isinstance(stat, DependentCountStat):
|
||||
for dependency in stat.dependencies:
|
||||
|
|
|
@ -43,7 +43,7 @@ def generate_time_series_data(days: int=100, business_hours_base: float=10,
|
|||
[24*non_business_hours_base] * 2
|
||||
holidays = [random() < holiday_rate for i in range(days)]
|
||||
else:
|
||||
raise AssertionError("Unknown frequency: %s" % (frequency,))
|
||||
raise AssertionError(f"Unknown frequency: {frequency}")
|
||||
if length < 2:
|
||||
raise AssertionError("Must be generating at least 2 data points. "
|
||||
"Currently generating %s" % (length,))
|
||||
|
|
|
@ -19,7 +19,7 @@ def time_range(start: datetime, end: datetime, frequency: str,
|
|||
end = floor_to_day(end)
|
||||
step = timedelta(days=1)
|
||||
else:
|
||||
raise AssertionError("Unknown frequency: %s" % (frequency,))
|
||||
raise AssertionError(f"Unknown frequency: {frequency}")
|
||||
|
||||
times = []
|
||||
if min_length is not None:
|
||||
|
|
|
@ -26,9 +26,9 @@ def analyze_activity(options: Dict[str, Any]) -> None:
|
|||
total_duration += duration
|
||||
print("%-*s%s" % (37, user_profile.email, duration,))
|
||||
|
||||
print("\nTotal Duration: %s" % (total_duration,))
|
||||
print("\nTotal Duration in minutes: %s" % (total_duration.total_seconds() / 60.,))
|
||||
print("Total Duration amortized to a month: %s" % (total_duration.total_seconds() * 30. / 60.,))
|
||||
print(f"\nTotal Duration: {total_duration}")
|
||||
print(f"\nTotal Duration in minutes: {total_duration.total_seconds() / 60.}")
|
||||
print(f"Total Duration amortized to a month: {total_duration.total_seconds() * 30. / 60.}")
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = """Report analytics of user activity on a per-user and realm basis.
|
||||
|
|
|
@ -33,8 +33,7 @@ class Command(BaseCommand):
|
|||
state_file_tmp = state_file_path + "-tmp"
|
||||
|
||||
with open(state_file_tmp, "w") as f:
|
||||
f.write("%s|%s|%s|%s\n" % (
|
||||
int(time.time()), status, states[status], message))
|
||||
f.write(f"{int(time.time())}|{status}|{states[status]}|{message}\n")
|
||||
os.rename(state_file_tmp, state_file_path)
|
||||
|
||||
def get_fill_state(self) -> Dict[str, Any]:
|
||||
|
@ -50,7 +49,7 @@ class Command(BaseCommand):
|
|||
try:
|
||||
verify_UTC(last_fill)
|
||||
except TimezoneNotUTCException:
|
||||
return {'status': 2, 'message': 'FillState not in UTC for %s' % (property,)}
|
||||
return {'status': 2, 'message': f'FillState not in UTC for {property}'}
|
||||
|
||||
if stat.frequency == CountStat.DAY:
|
||||
floor_function = floor_to_day
|
||||
|
|
|
@ -20,7 +20,7 @@ class Command(BaseCommand):
|
|||
def handle(self, *args: Any, **options: Any) -> None:
|
||||
property = options['property']
|
||||
if property not in COUNT_STATS:
|
||||
raise CommandError("Invalid property: %s" % (property,))
|
||||
raise CommandError(f"Invalid property: {property}")
|
||||
if not options['force']:
|
||||
raise CommandError("No action taken. Use --force.")
|
||||
|
||||
|
|
|
@ -66,7 +66,7 @@ class Command(BaseCommand):
|
|||
fraction = 0.0
|
||||
else:
|
||||
fraction = numerator / float(denominator)
|
||||
print("%.2f%% of" % (fraction * 100,), text)
|
||||
print(f"{fraction * 100:.2f}% of", text)
|
||||
|
||||
def handle(self, *args: Any, **options: Any) -> None:
|
||||
if options['realms']:
|
||||
|
|
|
@ -40,18 +40,18 @@ class Command(BaseCommand):
|
|||
print("%10s %d public streams and" % ("(", public_count), end=' ')
|
||||
print("%d private streams )" % (private_count,))
|
||||
print("------------")
|
||||
print("%25s %15s %10s %12s" % ("stream", "subscribers", "messages", "type"))
|
||||
print("{:>25} {:>15} {:>10} {:>12}".format("stream", "subscribers", "messages", "type"))
|
||||
|
||||
for stream in streams:
|
||||
if stream.invite_only:
|
||||
stream_type = 'private'
|
||||
else:
|
||||
stream_type = 'public'
|
||||
print("%25s" % (stream.name,), end=' ')
|
||||
print(f"{stream.name:>25}", end=' ')
|
||||
recipient = Recipient.objects.filter(type=Recipient.STREAM, type_id=stream.id)
|
||||
print("%10d" % (len(Subscription.objects.filter(recipient=recipient,
|
||||
active=True)),), end=' ')
|
||||
num_messages = len(Message.objects.filter(recipient=recipient))
|
||||
print("%12d" % (num_messages,), end=' ')
|
||||
print("%15s" % (stream_type,))
|
||||
print(f"{stream_type:>15}")
|
||||
print("")
|
||||
|
|
|
@ -71,7 +71,7 @@ class Command(BaseCommand):
|
|||
else:
|
||||
stats = list(COUNT_STATS.values())
|
||||
|
||||
logger.info("Starting updating analytics counts through %s" % (fill_to_time,))
|
||||
logger.info(f"Starting updating analytics counts through {fill_to_time}")
|
||||
if options['verbose']:
|
||||
start = time.time()
|
||||
last = start
|
||||
|
@ -79,13 +79,13 @@ class Command(BaseCommand):
|
|||
for stat in stats:
|
||||
process_count_stat(stat, fill_to_time)
|
||||
if options['verbose']:
|
||||
print("Updated %s in %.3fs" % (stat.property, time.time() - last))
|
||||
print(f"Updated {stat.property} in {time.time() - last:.3f}s")
|
||||
last = time.time()
|
||||
|
||||
if options['verbose']:
|
||||
print("Finished updating analytics counts through %s in %.3fs" %
|
||||
(fill_to_time, time.time() - start))
|
||||
logger.info("Finished updating analytics counts through %s" % (fill_to_time,))
|
||||
logger.info(f"Finished updating analytics counts through {fill_to_time}")
|
||||
|
||||
if settings.PUSH_NOTIFICATION_BOUNCER_URL and settings.SUBMIT_USAGE_STATISTICS:
|
||||
send_analytics_to_remote_server()
|
||||
|
|
|
@ -36,7 +36,7 @@ class Command(BaseCommand):
|
|||
print("%d streams" % (len(Stream.objects.filter(realm=realm)),))
|
||||
|
||||
for user_profile in user_profiles:
|
||||
print("%35s" % (user_profile.email,), end=' ')
|
||||
print(f"{user_profile.email:>35}", end=' ')
|
||||
for week in range(10):
|
||||
print("%5d" % (self.messages_sent_by(user_profile, week),), end=' ')
|
||||
print("")
|
||||
|
|
|
@ -17,7 +17,7 @@ class FillState(models.Model):
|
|||
state: int = models.PositiveSmallIntegerField()
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "<FillState: %s %s %s>" % (self.property, self.end_time, self.state)
|
||||
return f"<FillState: {self.property} {self.end_time} {self.state}>"
|
||||
|
||||
# The earliest/starting end_time in FillState
|
||||
# We assume there is at least one realm
|
||||
|
@ -61,7 +61,7 @@ class InstallationCount(BaseCount):
|
|||
]
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "<InstallationCount: %s %s %s>" % (self.property, self.subgroup, self.value)
|
||||
return f"<InstallationCount: {self.property} {self.subgroup} {self.value}>"
|
||||
|
||||
class RealmCount(BaseCount):
|
||||
realm = models.ForeignKey(Realm, on_delete=models.CASCADE)
|
||||
|
@ -81,7 +81,7 @@ class RealmCount(BaseCount):
|
|||
index_together = ["property", "end_time"]
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "<RealmCount: %s %s %s %s>" % (self.realm, self.property, self.subgroup, self.value)
|
||||
return f"<RealmCount: {self.realm} {self.property} {self.subgroup} {self.value}>"
|
||||
|
||||
class UserCount(BaseCount):
|
||||
user = models.ForeignKey(UserProfile, on_delete=models.CASCADE)
|
||||
|
@ -104,7 +104,7 @@ class UserCount(BaseCount):
|
|||
index_together = ["property", "realm", "end_time"]
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "<UserCount: %s %s %s %s>" % (self.user, self.property, self.subgroup, self.value)
|
||||
return f"<UserCount: {self.user} {self.property} {self.subgroup} {self.value}>"
|
||||
|
||||
class StreamCount(BaseCount):
|
||||
stream = models.ForeignKey(Stream, on_delete=models.CASCADE)
|
||||
|
@ -127,5 +127,4 @@ class StreamCount(BaseCount):
|
|||
index_together = ["property", "realm", "end_time"]
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "<StreamCount: %s %s %s %s %s>" % (
|
||||
self.stream, self.property, self.subgroup, self.value, self.id)
|
||||
return f"<StreamCount: {self.stream} {self.property} {self.subgroup} {self.value} {self.id}>"
|
||||
|
|
|
@ -49,7 +49,7 @@ class AnalyticsTestCase(TestCase):
|
|||
def create_user(self, **kwargs: Any) -> UserProfile:
|
||||
self.name_counter += 1
|
||||
defaults = {
|
||||
'email': 'user%s@domain.tld' % (self.name_counter,),
|
||||
'email': f'user{self.name_counter}@domain.tld',
|
||||
'date_joined': self.TIME_LAST_HOUR,
|
||||
'full_name': 'full_name',
|
||||
'short_name': 'short_name',
|
||||
|
@ -71,7 +71,7 @@ class AnalyticsTestCase(TestCase):
|
|||
|
||||
def create_stream_with_recipient(self, **kwargs: Any) -> Tuple[Stream, Recipient]:
|
||||
self.name_counter += 1
|
||||
defaults = {'name': 'stream name %s' % (self.name_counter,),
|
||||
defaults = {'name': f'stream name {self.name_counter}',
|
||||
'realm': self.default_realm,
|
||||
'date_created': self.TIME_LAST_HOUR}
|
||||
for key, value in defaults.items():
|
||||
|
@ -84,7 +84,7 @@ class AnalyticsTestCase(TestCase):
|
|||
|
||||
def create_huddle_with_recipient(self, **kwargs: Any) -> Tuple[Huddle, Recipient]:
|
||||
self.name_counter += 1
|
||||
defaults = {'huddle_hash': 'hash%s' % (self.name_counter,)}
|
||||
defaults = {'huddle_hash': f'hash{self.name_counter}'}
|
||||
for key, value in defaults.items():
|
||||
kwargs[key] = kwargs.get(key, value)
|
||||
huddle = Huddle.objects.create(**kwargs)
|
||||
|
@ -339,10 +339,10 @@ class TestCountStats(AnalyticsTestCase):
|
|||
date_created=self.TIME_ZERO-2*self.DAY)
|
||||
for minutes_ago in [0, 1, 61, 60*24+1]:
|
||||
creation_time = self.TIME_ZERO - minutes_ago*self.MINUTE
|
||||
user = self.create_user(email='user-%s@second.analytics' % (minutes_ago,),
|
||||
user = self.create_user(email=f'user-{minutes_ago}@second.analytics',
|
||||
realm=self.second_realm, date_joined=creation_time)
|
||||
recipient = self.create_stream_with_recipient(
|
||||
name='stream %s' % (minutes_ago,), realm=self.second_realm,
|
||||
name=f'stream {minutes_ago}', realm=self.second_realm,
|
||||
date_created=creation_time)[1]
|
||||
self.create_message(user, recipient, date_sent=creation_time)
|
||||
self.hourly_user = get_user('user-1@second.analytics', self.second_realm)
|
||||
|
|
|
@ -408,7 +408,7 @@ class TestSupportEndpoint(ZulipTestCase):
|
|||
|
||||
def check_zulip_realm_query_result(result: HttpResponse) -> None:
|
||||
zulip_realm = get_realm("zulip")
|
||||
self.assert_in_success_response(['<input type="hidden" name="realm_id" value="%s"' % (zulip_realm.id,),
|
||||
self.assert_in_success_response([f'<input type="hidden" name="realm_id" value="{zulip_realm.id}"',
|
||||
'Zulip Dev</h3>',
|
||||
'<option value="1" selected>Self Hosted</option>',
|
||||
'<option value="2" >Limited</option>',
|
||||
|
@ -420,7 +420,7 @@ class TestSupportEndpoint(ZulipTestCase):
|
|||
|
||||
def check_lear_realm_query_result(result: HttpResponse) -> None:
|
||||
lear_realm = get_realm("lear")
|
||||
self.assert_in_success_response(['<input type="hidden" name="realm_id" value="%s"' % (lear_realm.id,),
|
||||
self.assert_in_success_response([f'<input type="hidden" name="realm_id" value="{lear_realm.id}"',
|
||||
'Lear & Co.</h3>',
|
||||
'<option value="1" selected>Self Hosted</option>',
|
||||
'<option value="2" >Limited</option>',
|
||||
|
@ -532,7 +532,7 @@ class TestSupportEndpoint(ZulipTestCase):
|
|||
cordelia = self.example_user('cordelia')
|
||||
self.login_user(cordelia)
|
||||
|
||||
result = self.client_post("/activity/support", {"realm_id": "%s" % (cordelia.realm_id,), "plan_type": "2"})
|
||||
result = self.client_post("/activity/support", {"realm_id": f"{cordelia.realm_id}", "plan_type": "2"})
|
||||
self.assertEqual(result.status_code, 302)
|
||||
self.assertEqual(result["Location"], "/login/")
|
||||
|
||||
|
@ -540,7 +540,7 @@ class TestSupportEndpoint(ZulipTestCase):
|
|||
self.login_user(iago)
|
||||
|
||||
with mock.patch("analytics.views.do_change_plan_type") as m:
|
||||
result = self.client_post("/activity/support", {"realm_id": "%s" % (iago.realm_id,), "plan_type": "2"})
|
||||
result = self.client_post("/activity/support", {"realm_id": f"{iago.realm_id}", "plan_type": "2"})
|
||||
m.assert_called_once_with(get_realm("zulip"), 2)
|
||||
self.assert_in_success_response(["Plan type of Zulip Dev changed from self hosted to limited"], result)
|
||||
|
||||
|
@ -549,14 +549,14 @@ class TestSupportEndpoint(ZulipTestCase):
|
|||
lear_realm = get_realm('lear')
|
||||
self.login_user(cordelia)
|
||||
|
||||
result = self.client_post("/activity/support", {"realm_id": "%s" % (lear_realm.id,), "discount": "25"})
|
||||
result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}", "discount": "25"})
|
||||
self.assertEqual(result.status_code, 302)
|
||||
self.assertEqual(result["Location"], "/login/")
|
||||
|
||||
self.login('iago')
|
||||
|
||||
with mock.patch("analytics.views.attach_discount_to_realm") as m:
|
||||
result = self.client_post("/activity/support", {"realm_id": "%s" % (lear_realm.id,), "discount": "25"})
|
||||
result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}", "discount": "25"})
|
||||
m.assert_called_once_with(get_realm("lear"), 25)
|
||||
self.assert_in_success_response(["Discount of Lear & Co. changed to 25 from None"], result)
|
||||
|
||||
|
@ -565,19 +565,19 @@ class TestSupportEndpoint(ZulipTestCase):
|
|||
lear_realm = get_realm('lear')
|
||||
self.login_user(cordelia)
|
||||
|
||||
result = self.client_post("/activity/support", {"realm_id": "%s" % (lear_realm.id,), "status": "deactivated"})
|
||||
result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}", "status": "deactivated"})
|
||||
self.assertEqual(result.status_code, 302)
|
||||
self.assertEqual(result["Location"], "/login/")
|
||||
|
||||
self.login('iago')
|
||||
|
||||
with mock.patch("analytics.views.do_deactivate_realm") as m:
|
||||
result = self.client_post("/activity/support", {"realm_id": "%s" % (lear_realm.id,), "status": "deactivated"})
|
||||
result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}", "status": "deactivated"})
|
||||
m.assert_called_once_with(lear_realm, self.example_user("iago"))
|
||||
self.assert_in_success_response(["Lear & Co. deactivated"], result)
|
||||
|
||||
with mock.patch("analytics.views.do_send_realm_reactivation_email") as m:
|
||||
result = self.client_post("/activity/support", {"realm_id": "%s" % (lear_realm.id,), "status": "active"})
|
||||
result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}", "status": "active"})
|
||||
m.assert_called_once_with(lear_realm)
|
||||
self.assert_in_success_response(["Realm reactivation email sent to admins of Lear"], result)
|
||||
|
||||
|
@ -586,19 +586,19 @@ class TestSupportEndpoint(ZulipTestCase):
|
|||
lear_realm = get_realm('lear')
|
||||
self.login_user(cordelia)
|
||||
|
||||
result = self.client_post("/activity/support", {"realm_id": "%s" % (lear_realm.id,), "discount": "25"})
|
||||
result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}", "discount": "25"})
|
||||
self.assertEqual(result.status_code, 302)
|
||||
self.assertEqual(result["Location"], "/login/")
|
||||
|
||||
self.login('iago')
|
||||
|
||||
with mock.patch("analytics.views.do_scrub_realm") as m:
|
||||
result = self.client_post("/activity/support", {"realm_id": "%s" % (lear_realm.id,), "scrub_realm": "scrub_realm"})
|
||||
result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}", "scrub_realm": "scrub_realm"})
|
||||
m.assert_called_once_with(lear_realm)
|
||||
self.assert_in_success_response(["Lear & Co. scrubbed"], result)
|
||||
|
||||
with mock.patch("analytics.views.do_scrub_realm") as m:
|
||||
result = self.client_post("/activity/support", {"realm_id": "%s" % (lear_realm.id,)})
|
||||
result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}"})
|
||||
m.assert_not_called()
|
||||
|
||||
class TestGetChartDataHelpers(ZulipTestCase):
|
||||
|
|
|
@ -96,9 +96,9 @@ def stats_for_realm(request: HttpRequest, realm_str: str) -> HttpResponse:
|
|||
try:
|
||||
realm = get_realm(realm_str)
|
||||
except Realm.DoesNotExist:
|
||||
return HttpResponseNotFound("Realm %s does not exist" % (realm_str,))
|
||||
return HttpResponseNotFound(f"Realm {realm_str} does not exist")
|
||||
|
||||
return render_stats(request, '/realm/%s' % (realm_str,), realm.name or realm.string_id,
|
||||
return render_stats(request, f'/realm/{realm_str}', realm.name or realm.string_id,
|
||||
analytics_ready=is_analytics_ready(realm))
|
||||
|
||||
@require_server_admin
|
||||
|
@ -106,8 +106,8 @@ def stats_for_realm(request: HttpRequest, realm_str: str) -> HttpResponse:
|
|||
def stats_for_remote_realm(request: HttpRequest, remote_server_id: str,
|
||||
remote_realm_id: str) -> HttpResponse:
|
||||
server = RemoteZulipServer.objects.get(id=remote_server_id)
|
||||
return render_stats(request, '/remote/%s/realm/%s' % (server.id, remote_realm_id),
|
||||
"Realm %s on server %s" % (remote_realm_id, server.hostname))
|
||||
return render_stats(request, f'/remote/{server.id}/realm/{remote_realm_id}',
|
||||
f"Realm {remote_realm_id} on server {server.hostname}")
|
||||
|
||||
@require_server_admin_api
|
||||
@has_request_variables
|
||||
|
@ -136,8 +136,8 @@ def stats_for_installation(request: HttpRequest) -> HttpResponse:
|
|||
@require_server_admin
|
||||
def stats_for_remote_installation(request: HttpRequest, remote_server_id: str) -> HttpResponse:
|
||||
server = RemoteZulipServer.objects.get(id=remote_server_id)
|
||||
return render_stats(request, '/remote/%s/installation' % (server.id,),
|
||||
'remote Installation %s' % (server.hostname,), True, True)
|
||||
return render_stats(request, f'/remote/{server.id}/installation',
|
||||
f'remote Installation {server.hostname}', True, True)
|
||||
|
||||
@require_server_admin_api
|
||||
@has_request_variables
|
||||
|
@ -332,7 +332,7 @@ def table_filtered_to_id(table: Type[BaseCount], key_id: int) -> QuerySet:
|
|||
elif table == RemoteRealmCount:
|
||||
return RemoteRealmCount.objects.filter(realm_id=key_id)
|
||||
else:
|
||||
raise AssertionError("Unknown table: %s" % (table,))
|
||||
raise AssertionError(f"Unknown table: {table}")
|
||||
|
||||
def client_label_map(name: str) -> str:
|
||||
if name == "website":
|
||||
|
@ -463,7 +463,7 @@ def get_realm_day_counts() -> Dict[str, Dict[str, str]]:
|
|||
else:
|
||||
good_bad = 'neutral'
|
||||
|
||||
return '<td class="number %s">%s</td>' % (good_bad, cnt)
|
||||
return f'<td class="number {good_bad}">{cnt}</td>'
|
||||
|
||||
cnts = (format_count(raw_cnts[0], 'neutral')
|
||||
+ ''.join(map(format_count, raw_cnts[1:])))
|
||||
|
@ -629,7 +629,7 @@ def realm_summary_table(realm_minutes: Dict[str, float]) -> str:
|
|||
total_hours += hours
|
||||
row['hours'] = str(int(hours))
|
||||
try:
|
||||
row['hours_per_user'] = '%.1f' % (hours / row['dau_count'],)
|
||||
row['hours_per_user'] = '{:.1f}'.format(hours / row['dau_count'])
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
@ -709,7 +709,7 @@ def user_activity_intervals() -> Tuple[mark_safe, Dict[str, float]]:
|
|||
|
||||
for string_id, realm_intervals in itertools.groupby(all_intervals, by_string_id):
|
||||
realm_duration = timedelta(0)
|
||||
output += '<hr>%s\n' % (string_id,)
|
||||
output += f'<hr>{string_id}\n'
|
||||
for email, intervals in itertools.groupby(realm_intervals, by_email):
|
||||
duration = timedelta(0)
|
||||
for interval in intervals:
|
||||
|
@ -723,9 +723,9 @@ def user_activity_intervals() -> Tuple[mark_safe, Dict[str, float]]:
|
|||
|
||||
realm_minutes[string_id] = realm_duration.total_seconds() / 60
|
||||
|
||||
output += "\nTotal Duration: %s\n" % (total_duration,)
|
||||
output += "\nTotal Duration in minutes: %s\n" % (total_duration.total_seconds() / 60.,)
|
||||
output += "Total Duration amortized to a month: %s" % (total_duration.total_seconds() * 30. / 60.,)
|
||||
output += f"\nTotal Duration: {total_duration}\n"
|
||||
output += f"\nTotal Duration in minutes: {total_duration.total_seconds() / 60.}\n"
|
||||
output += f"Total Duration amortized to a month: {total_duration.total_seconds() * 30. / 60.}"
|
||||
content = mark_safe('<pre>' + output + '</pre>')
|
||||
return content, realm_minutes
|
||||
|
||||
|
@ -841,7 +841,7 @@ def ad_hoc_queries() -> List[Dict[str, str]]:
|
|||
###
|
||||
|
||||
for mobile_type in ['Android', 'ZulipiOS']:
|
||||
title = '%s usage' % (mobile_type,)
|
||||
title = f'{mobile_type} usage'
|
||||
|
||||
query = SQL('''
|
||||
select
|
||||
|
@ -1284,13 +1284,13 @@ def format_date_for_activity_reports(date: Optional[datetime]) -> str:
|
|||
def user_activity_link(email: str) -> mark_safe:
|
||||
url_name = 'analytics.views.get_user_activity'
|
||||
url = reverse(url_name, kwargs=dict(email=email))
|
||||
email_link = '<a href="%s">%s</a>' % (url, email)
|
||||
email_link = f'<a href="{url}">{email}</a>'
|
||||
return mark_safe(email_link)
|
||||
|
||||
def realm_activity_link(realm_str: str) -> mark_safe:
|
||||
url_name = 'analytics.views.get_realm_activity'
|
||||
url = reverse(url_name, kwargs=dict(realm_str=realm_str))
|
||||
realm_link = '<a href="%s">%s</a>' % (url, realm_str)
|
||||
realm_link = f'<a href="{url}">{realm_str}</a>'
|
||||
return mark_safe(realm_link)
|
||||
|
||||
def realm_stats_link(realm_str: str) -> mark_safe:
|
||||
|
@ -1449,7 +1449,7 @@ def get_realm_activity(request: HttpRequest, realm_str: str) -> HttpResponse:
|
|||
try:
|
||||
admins = Realm.objects.get(string_id=realm_str).get_human_admin_users()
|
||||
except Realm.DoesNotExist:
|
||||
return HttpResponseNotFound("Realm %s does not exist" % (realm_str,))
|
||||
return HttpResponseNotFound(f"Realm {realm_str} does not exist")
|
||||
|
||||
admin_emails = {admin.delivery_email for admin in admins}
|
||||
|
||||
|
|
|
@ -185,7 +185,7 @@ def do_create_stripe_customer(user: UserProfile, stripe_token: Optional[str]=Non
|
|||
# bad thing that will happen is that we will create an extra stripe
|
||||
# customer that we can delete or ignore.
|
||||
stripe_customer = stripe.Customer.create(
|
||||
description="%s (%s)" % (realm.string_id, realm.name),
|
||||
description=f"{realm.string_id} ({realm.name})",
|
||||
email=user.delivery_email,
|
||||
metadata={'realm_id': realm.id, 'realm_str': realm.string_id},
|
||||
source=stripe_token)
|
||||
|
|
|
@ -14,7 +14,7 @@ class Customer(models.Model):
|
|||
default_discount: Optional[Decimal] = models.DecimalField(decimal_places=4, max_digits=7, null=True)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "<Customer %s %s>" % (self.realm, self.stripe_customer_id)
|
||||
return f"<Customer {self.realm} {self.stripe_customer_id}>"
|
||||
|
||||
def get_customer_by_realm(realm: Realm) -> Optional[Customer]:
|
||||
return Customer.objects.filter(realm=realm).first()
|
||||
|
|
|
@ -144,7 +144,7 @@ def normalize_fixture_data(decorated_function: CallableT,
|
|||
for i, timestamp_field in enumerate(tested_timestamp_fields):
|
||||
# Don't use (..) notation, since the matched timestamp can easily appear in other fields
|
||||
pattern_translations[
|
||||
'"%s": 1[5-9][0-9]{8}(?![0-9-])' % (timestamp_field,)
|
||||
f'"{timestamp_field}": 1[5-9][0-9]{{8}}(?![0-9-])'
|
||||
] = '"%s": 1%02d%%07d' % (timestamp_field, i+1)
|
||||
|
||||
normalized_values: Dict[str, Dict[str, str]] = {
|
||||
|
@ -463,8 +463,8 @@ class StripeTest(StripeTestCase):
|
|||
self.assert_not_in_success_response(['Pay annually'], response)
|
||||
for substring in [
|
||||
'Zulip Standard', str(self.seat_count),
|
||||
'You are using', '%s of %s licenses' % (self.seat_count, self.seat_count),
|
||||
'Your plan will renew on', 'January 2, 2013', '$%s.00' % (80 * self.seat_count,),
|
||||
'You are using', f'{self.seat_count} of {self.seat_count} licenses',
|
||||
'Your plan will renew on', 'January 2, 2013', f'${80 * self.seat_count}.00',
|
||||
'Visa ending in 4242',
|
||||
'Update card']:
|
||||
self.assert_in_response(substring, response)
|
||||
|
@ -547,7 +547,7 @@ class StripeTest(StripeTestCase):
|
|||
self.assert_not_in_success_response(['Pay annually', 'Update card'], response)
|
||||
for substring in [
|
||||
'Zulip Standard', str(123),
|
||||
'You are using', '%s of %s licenses' % (self.seat_count, 123),
|
||||
'You are using', f'{self.seat_count} of {123} licenses',
|
||||
'Your plan will renew on', 'January 2, 2013', '$9,840.00', # 9840 = 80 * 123
|
||||
'Billed by invoice']:
|
||||
self.assert_in_response(substring, response)
|
||||
|
@ -618,8 +618,8 @@ class StripeTest(StripeTestCase):
|
|||
self.assert_not_in_success_response(['Pay annually'], response)
|
||||
for substring in [
|
||||
'Zulip Standard', 'Free Trial', str(self.seat_count),
|
||||
'You are using', '%s of %s licenses' % (self.seat_count, self.seat_count),
|
||||
'Your plan will be upgraded to', 'March 2, 2012', '$%s.00' % (80 * self.seat_count,),
|
||||
'You are using', f'{self.seat_count} of {self.seat_count} licenses',
|
||||
'Your plan will be upgraded to', 'March 2, 2012', f'${80 * self.seat_count}.00',
|
||||
'Visa ending in 4242',
|
||||
'Update card']:
|
||||
self.assert_in_response(substring, response)
|
||||
|
@ -772,7 +772,7 @@ class StripeTest(StripeTestCase):
|
|||
self.assert_not_in_success_response(['Pay annually'], response)
|
||||
for substring in [
|
||||
'Zulip Standard', 'Free Trial', str(self.seat_count),
|
||||
'You are using', '%s of %s licenses' % (self.seat_count, 123),
|
||||
'You are using', f'{self.seat_count} of {123} licenses',
|
||||
'Your plan will be upgraded to', 'March 2, 2012',
|
||||
f'{80 * 123:,.2f}', 'Billed by invoice'
|
||||
]:
|
||||
|
|
|
@ -114,7 +114,7 @@ def upgrade(request: HttpRequest, user: UserProfile,
|
|||
)
|
||||
return json_error(e.message, data={'error_description': e.description})
|
||||
except Exception as e:
|
||||
billing_logger.exception("Uncaught exception in billing: %s" % (e,))
|
||||
billing_logger.exception(f"Uncaught exception in billing: {e}")
|
||||
error_message = BillingError.CONTACT_SUPPORT
|
||||
error_description = "uncaught exception during upgrade"
|
||||
return json_error(error_message, data={'error_description': error_description})
|
||||
|
|
|
@ -121,7 +121,7 @@ def run_tests(files: Iterable[str], external_host: str) -> None:
|
|||
for test_file in test_files:
|
||||
test_name = os.path.basename(test_file)
|
||||
cmd = ["node_modules/.bin/casperjs"] + remote_debug + verbose + xunit_export + ["test", test_file]
|
||||
print("\n\n===================== %s\nRunning %s\n\n" % (test_name, " ".join(map(shlex.quote, cmd))), flush=True)
|
||||
print("\n\n===================== {}\nRunning {}\n\n".format(test_name, " ".join(map(shlex.quote, cmd))), flush=True)
|
||||
ret = subprocess.call(cmd)
|
||||
if ret != 0:
|
||||
return ret
|
||||
|
|
|
@ -36,7 +36,7 @@ def nagios_from_file(results_file: str, max_time_diff: int=60 * 2) -> 'Tuple[int
|
|||
state = pieces[2]
|
||||
data = pieces[3]
|
||||
|
||||
return (ret, "%s: %s" % (state, data))
|
||||
return (ret, f"{state}: {data}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
RESULTS_FILE = sys.argv[1]
|
||||
|
|
|
@ -12,7 +12,7 @@ import sys
|
|||
wildcard = os.path.join("/var/log/zulip/queue_error", '*.errors')
|
||||
clean = True
|
||||
for fn in glob.glob(wildcard):
|
||||
print('WARNING: Queue errors logged in %s' % (fn,))
|
||||
print(f'WARNING: Queue errors logged in {fn}')
|
||||
clean = False
|
||||
|
||||
if not clean:
|
||||
|
|
|
@ -17,7 +17,7 @@ if len(sys.argv) < 2:
|
|||
print("Please pass the name of the consumer file to check")
|
||||
exit(1)
|
||||
|
||||
RESULTS_FILE = "/var/lib/nagios_state/check-rabbitmq-consumers-%s" % (sys.argv[1],)
|
||||
RESULTS_FILE = f"/var/lib/nagios_state/check-rabbitmq-consumers-{sys.argv[1]}"
|
||||
|
||||
ret, result = nagios_from_file(RESULTS_FILE)
|
||||
|
||||
|
|
|
@ -88,24 +88,24 @@ states = {
|
|||
def report(state: str, timestamp: Any = None, msg: Optional[str] = None) -> None:
|
||||
now = int(time.time())
|
||||
if msg is None:
|
||||
msg = "send time was %s" % (timestamp,)
|
||||
msg = f"send time was {timestamp}"
|
||||
state_file_path = "/var/lib/nagios_state/check_send_receive_state"
|
||||
with open(state_file_path + ".tmp", 'w') as f:
|
||||
f.write("%s|%s|%s|%s\n" % (now, states[state], state, msg))
|
||||
f.write(f"{now}|{states[state]}|{state}|{msg}\n")
|
||||
os.rename(state_file_path + ".tmp", state_file_path)
|
||||
print("%s: %s" % (state, msg))
|
||||
print(f"{state}: {msg}")
|
||||
exit(states[state])
|
||||
|
||||
def send_zulip(sender: zulip.Client, message: Dict[str, Any]) -> None:
|
||||
result = sender.send_message(message)
|
||||
if result["result"] != "success" and options.nagios:
|
||||
report("CRITICAL", msg="Error sending Zulip, args were: %s, %s" % (message, result))
|
||||
report("CRITICAL", msg=f"Error sending Zulip, args were: {message}, {result}")
|
||||
|
||||
def get_zulips() -> List[Dict[str, Any]]:
|
||||
global queue_id, last_event_id
|
||||
res = zulip_recipient.get_events(queue_id=queue_id, last_event_id=last_event_id)
|
||||
if 'error' in res.get('result', {}):
|
||||
report("CRITICAL", msg="Error receiving Zulips, error was: %s" % (res["msg"],))
|
||||
report("CRITICAL", msg="Error receiving Zulips, error was: {}".format(res["msg"]))
|
||||
for event in res['events']:
|
||||
last_event_id = max(last_event_id, int(event['id']))
|
||||
# If we get a heartbeat event, that means we've been hanging for
|
||||
|
@ -141,10 +141,10 @@ zulip_recipient = zulip.Client(
|
|||
try:
|
||||
res = zulip_recipient.register(event_types=["message"])
|
||||
if 'error' in res.get('result', {}):
|
||||
report("CRITICAL", msg="Error subscribing to Zulips: %s" % (res['msg'],))
|
||||
report("CRITICAL", msg="Error subscribing to Zulips: {}".format(res['msg']))
|
||||
queue_id, last_event_id = (res['queue_id'], res['last_event_id'])
|
||||
except Exception:
|
||||
report("CRITICAL", msg="Error subscribing to Zulips:\n%s" % (traceback.format_exc(),))
|
||||
report("CRITICAL", msg=f"Error subscribing to Zulips:\n{traceback.format_exc()}")
|
||||
msg_to_send = str(random.getrandbits(64))
|
||||
time_start = time.time()
|
||||
|
||||
|
@ -172,6 +172,6 @@ if options.nagios:
|
|||
report('WARNING', timestamp=seconds_diff)
|
||||
|
||||
if options.munin:
|
||||
print("sendreceive.value %s" % (seconds_diff,))
|
||||
print(f"sendreceive.value {seconds_diff}")
|
||||
elif options.nagios:
|
||||
report('OK', timestamp=seconds_diff)
|
||||
|
|
|
@ -22,17 +22,17 @@ states = {
|
|||
}
|
||||
|
||||
def report(state: str, msg: str) -> "NoReturn":
|
||||
print("%s: %s" % (state, msg))
|
||||
print(f"{state}: {msg}")
|
||||
exit(states[state])
|
||||
|
||||
def get_loc_over_ssh(host: str, func: str) -> str:
|
||||
try:
|
||||
return subprocess.check_output(['ssh', host,
|
||||
'psql -v ON_ERROR_STOP=1 zulip -t -c "SELECT %s()"' % (func,)],
|
||||
f'psql -v ON_ERROR_STOP=1 zulip -t -c "SELECT {func}()"'],
|
||||
stderr=subprocess.STDOUT,
|
||||
universal_newlines=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
report('CRITICAL', 'ssh failed: %s: %s' % (str(e), e.output))
|
||||
report('CRITICAL', f'ssh failed: {str(e)}: {e.output}')
|
||||
|
||||
def loc_to_abs_offset(loc_str: str) -> int:
|
||||
m = re.match(r'^\s*([0-9a-fA-F]+)/([0-9a-fA-F]+)\s*$', loc_str)
|
||||
|
|
|
@ -22,7 +22,7 @@ states = {
|
|||
}
|
||||
|
||||
def report(state: str, num: str) -> None:
|
||||
print("%s: %s rows in fts_update_log table" % (state, num))
|
||||
print(f"{state}: {num} rows in fts_update_log table")
|
||||
exit(states[state])
|
||||
|
||||
conn = psycopg2.connect(database="zulip")
|
||||
|
|
|
@ -13,7 +13,7 @@ states = {
|
|||
}
|
||||
|
||||
def report(state: str, msg: str) -> None:
|
||||
print("%s: %s" % (state, msg))
|
||||
print(f"{state}: {msg}")
|
||||
exit(states[state])
|
||||
|
||||
if subprocess.check_output(['psql', '-v', 'ON_ERROR_STOP=1',
|
||||
|
@ -28,6 +28,6 @@ except OSError:
|
|||
report('UNKNOWN', 'could not determine completion time of last Postgres backup')
|
||||
|
||||
if datetime.now(tz=timezone.utc) - last_backup > timedelta(hours=25):
|
||||
report('CRITICAL', 'last Postgres backup completed more than 25 hours ago: %s' % (last_backup,))
|
||||
report('CRITICAL', f'last Postgres backup completed more than 25 hours ago: {last_backup}')
|
||||
|
||||
report('OK', 'last Postgres backup completed less than 25 hours ago: %s' % (last_backup,))
|
||||
report('OK', f'last Postgres backup completed less than 25 hours ago: {last_backup}')
|
||||
|
|
|
@ -42,7 +42,7 @@ if is_rhel_based:
|
|||
else:
|
||||
pg_data_paths = glob.glob('/var/lib/postgresql/*/main')
|
||||
if len(pg_data_paths) != 1:
|
||||
print("Postgres installation is not unique: %s" % (pg_data_paths,))
|
||||
print(f"Postgres installation is not unique: {pg_data_paths}")
|
||||
sys.exit(1)
|
||||
pg_data_path = pg_data_paths[0]
|
||||
run(['env-wal-e', 'backup-push', pg_data_path])
|
||||
|
|
|
@ -142,7 +142,7 @@ while True:
|
|||
# Catch up on any historical columns
|
||||
while True:
|
||||
rows_updated = update_fts_columns(cursor)
|
||||
notice = "Processed %s rows catching up" % (rows_updated,)
|
||||
notice = f"Processed {rows_updated} rows catching up"
|
||||
if rows_updated > 0:
|
||||
logger.info(notice)
|
||||
else:
|
||||
|
|
|
@ -22,7 +22,7 @@ states: Dict[str, int] = {
|
|||
}
|
||||
|
||||
def report(state: str, output: str) -> None:
|
||||
print("%s\n%s" % (state, output))
|
||||
print(f"{state}\n{output}")
|
||||
exit(states[state])
|
||||
|
||||
output = ""
|
||||
|
@ -41,7 +41,7 @@ for results_file_name in os.listdir(RESULTS_DIR):
|
|||
down_count += 1
|
||||
this_state = "DOWN"
|
||||
last_check_ts = time.strftime("%Y-%m-%d %H:%M %Z", time.gmtime(last_check))
|
||||
output += "%s: %s (%s)\n" % (results_file, this_state, last_check_ts)
|
||||
output += f"{results_file}: {this_state} ({last_check_ts})\n"
|
||||
|
||||
if down_count == 0:
|
||||
state = "OK"
|
||||
|
|
|
@ -39,11 +39,11 @@ def report(state: str, short_msg: str, too_old: Optional[Set[Any]] = None) -> No
|
|||
too_old_data = ""
|
||||
if too_old:
|
||||
too_old_data = "\nLast call to get_message for recently out of date mirrors:\n" + "\n".join(
|
||||
["%16s: %s" % (user.user_profile.email,
|
||||
["{:>16}: {}".format(user.user_profile.email,
|
||||
user.last_visit.strftime("%Y-%m-%d %H:%M %Z")
|
||||
) for user in too_old]
|
||||
)
|
||||
print("%s: %s%s" % (state, short_msg, too_old_data))
|
||||
print(f"{state}: {short_msg}{too_old_data}")
|
||||
|
||||
exit(states[state])
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@ states: Dict[str, int] = {
|
|||
}
|
||||
|
||||
def report(state: str, data: str, last_check: float) -> None:
|
||||
print("%s: Last test run completed at %s\n%s" % (
|
||||
print("{}: Last test run completed at {}\n{}".format(
|
||||
state, time.strftime("%Y-%m-%d %H:%M %Z", time.gmtime(last_check)),
|
||||
data))
|
||||
exit(states[state])
|
||||
|
|
|
@ -131,7 +131,7 @@ for device in macs.values():
|
|||
for (count, ip) in enumerate(to_configure):
|
||||
# Configure the IP via a virtual interface
|
||||
device = "ens%i:%i" % (device_number, count)
|
||||
log.info("Configuring %s with IP %s" % (device, ip))
|
||||
log.info(f"Configuring {device} with IP {ip}")
|
||||
subprocess.check_call(['/sbin/ifconfig', device, ip])
|
||||
subprocess.check_call(
|
||||
['/sbin/iptables', '-t', 'mangle', '-A', 'OUTPUT', '-m', 'conntrack', '--ctorigdst',
|
||||
|
|
|
@ -77,7 +77,7 @@ def analyze_queue_stats(queue_name: str, stats: Dict[str, Any],
|
|||
# 50).
|
||||
return dict(status=CRITICAL,
|
||||
name=queue_name,
|
||||
message='queue appears to be stuck, last update %s, queue size %s' % (
|
||||
message='queue appears to be stuck, last update {}, queue size {}'.format(
|
||||
stats['update_time'], queue_count_rabbitmqctl))
|
||||
|
||||
current_size = stats['current_queue_size']
|
||||
|
@ -108,8 +108,7 @@ def analyze_queue_stats(queue_name: str, stats: Dict[str, Any],
|
|||
|
||||
return dict(status=status,
|
||||
name=queue_name,
|
||||
message='clearing the backlog will take too long: %ss, size: %s' % (
|
||||
expected_time_to_clear_backlog, current_size))
|
||||
message=f'clearing the backlog will take too long: {expected_time_to_clear_backlog}s, size: {current_size}')
|
||||
else:
|
||||
# We slept recently, so treat this as a burst.
|
||||
if expected_time_to_clear_backlog > MAX_SECONDS_TO_CLEAR_FOR_BURSTS[queue_name]:
|
||||
|
@ -120,8 +119,7 @@ def analyze_queue_stats(queue_name: str, stats: Dict[str, Any],
|
|||
|
||||
return dict(status=status,
|
||||
name=queue_name,
|
||||
message='clearing the burst will take too long: %ss, size: %s' % (
|
||||
expected_time_to_clear_backlog, current_size))
|
||||
message=f'clearing the burst will take too long: {expected_time_to_clear_backlog}s, size: {current_size}')
|
||||
|
||||
return dict(status=OK,
|
||||
name=queue_name,
|
||||
|
@ -139,10 +137,10 @@ def check_other_queues(queue_counts_dict: Dict[str, int]) -> List[Dict[str, Any]
|
|||
|
||||
if count > CRITICAL_COUNT_THRESHOLD_DEFAULT:
|
||||
results.append(dict(status=CRITICAL, name=queue,
|
||||
message='count critical: %s' % (count,)))
|
||||
message=f'count critical: {count}'))
|
||||
elif count > WARN_COUNT_THRESHOLD_DEFAULT:
|
||||
results.append(dict(status=WARNING, name=queue,
|
||||
message='count warning: %s' % (count,)))
|
||||
message=f'count warning: {count}'))
|
||||
else:
|
||||
results.append(dict(status=OK, name=queue, message=''))
|
||||
|
||||
|
@ -210,6 +208,6 @@ def check_rabbitmq_queues() -> None:
|
|||
queue_error_template.format(result['name'], states[result['status']], result['message'])
|
||||
for result in results if result['status'] > 0
|
||||
])
|
||||
print("%s|%s|%s|%s" % (now, status, states[status], error_message))
|
||||
print(f"{now}|{status}|{states[status]}|{error_message}")
|
||||
else:
|
||||
print("%s|%s|%s|queues normal" % (now, status, states[status]))
|
||||
print(f"{now}|{status}|{states[status]}|queues normal")
|
||||
|
|
|
@ -15,9 +15,9 @@ setup_path()
|
|||
from scripts.lib.zulip_tools import get_config_file
|
||||
|
||||
def write_realm_nginx_config_line(f: Any, host: str, port: str) -> None:
|
||||
f.write("""if ($host = '%s') {
|
||||
set $tornado_server http://tornado%s;
|
||||
}\n""" % (host, port))
|
||||
f.write("""if ($host = '{}') {{
|
||||
set $tornado_server http://tornado{};
|
||||
}}\n""".format(host, port))
|
||||
|
||||
# Basic system to do Tornado sharding. Writes two output .tmp files that need
|
||||
# to be renamed to the following files to finalize the changes:
|
||||
|
@ -48,7 +48,7 @@ with open('/etc/zulip/nginx_sharding.conf.tmp', 'w') as nginx_sharding_conf_f, \
|
|||
host = shard
|
||||
else:
|
||||
host = f"{shard}.{external_host}"
|
||||
assert host not in shard_map, "host %s duplicated" % (host,)
|
||||
assert host not in shard_map, f"host {host} duplicated"
|
||||
shard_map[host] = int(port)
|
||||
write_realm_nginx_config_line(nginx_sharding_conf_f, host, port)
|
||||
nginx_sharding_conf_f.write('\n')
|
||||
|
|
|
@ -14,7 +14,7 @@ from scripts.lib.zulip_tools import DEPLOYMENTS_DIR, FAIL, ENDC, make_deploy_pat
|
|||
import version
|
||||
|
||||
if len(sys.argv) != 2:
|
||||
print(FAIL + "Usage: %s <tarball>" % (sys.argv[0],) + ENDC)
|
||||
print(FAIL + f"Usage: {sys.argv[0]} <tarball>" + ENDC)
|
||||
sys.exit(1)
|
||||
|
||||
tarball_path = sys.argv[1]
|
||||
|
|
|
@ -28,12 +28,12 @@ logging.basicConfig(format="%(asctime)s upgrade-zulip: %(message)s",
|
|||
level=logging.INFO)
|
||||
|
||||
if len(sys.argv) != 2:
|
||||
print(FAIL + "Usage: %s <tarball>" % (sys.argv[0],) + ENDC)
|
||||
print(FAIL + f"Usage: {sys.argv[0]} <tarball>" + ENDC)
|
||||
sys.exit(1)
|
||||
|
||||
tarball_path = sys.argv[1]
|
||||
|
||||
error_rerun_script = "%s/current/scripts/upgrade-zulip %s" % (DEPLOYMENTS_DIR, tarball_path)
|
||||
error_rerun_script = f"{DEPLOYMENTS_DIR}/current/scripts/upgrade-zulip {tarball_path}"
|
||||
get_deployment_lock(error_rerun_script)
|
||||
|
||||
try:
|
||||
|
|
|
@ -42,7 +42,7 @@ if args.remote_url:
|
|||
os.makedirs(DEPLOYMENTS_DIR, exist_ok=True)
|
||||
os.makedirs('/home/zulip/logs', exist_ok=True)
|
||||
|
||||
error_rerun_script = "%s/current/scripts/upgrade-zulip-from-git %s" % (DEPLOYMENTS_DIR, refname)
|
||||
error_rerun_script = f"{DEPLOYMENTS_DIR}/current/scripts/upgrade-zulip-from-git {refname}"
|
||||
get_deployment_lock(error_rerun_script)
|
||||
|
||||
try:
|
||||
|
|
|
@ -81,7 +81,5 @@ for queue_name in consumers.keys():
|
|||
else:
|
||||
status = 0
|
||||
with open(state_file_tmp, "w") as f:
|
||||
f.write("%s|%s|%s|queue %s has %s consumers, needs %s\n" % (
|
||||
now, status, states[status], queue_name,
|
||||
consumers[queue_name], target_count))
|
||||
f.write(f"{now}|{status}|{states[status]}|queue {queue_name} has {consumers[queue_name]} consumers, needs {target_count}\n")
|
||||
os.rename(state_file_tmp, state_file_path)
|
||||
|
|
|
@ -32,4 +32,4 @@ def nagios_from_file(results_file: str) -> Tuple[int, str]:
|
|||
state = pieces[2]
|
||||
data = pieces[3]
|
||||
|
||||
return (ret, "%s: %s" % (state, data))
|
||||
return (ret, f"{state}: {data}")
|
||||
|
|
|
@ -66,7 +66,7 @@ if tornado_processes > 1:
|
|||
# supervisord group where if any individual process is slow to
|
||||
# stop, the whole bundle stays stopped for an extended time.
|
||||
logging.info("Restarting Tornado process on port %s", p)
|
||||
subprocess.check_call(["supervisorctl", "restart", "zulip-tornado:port-%s" % (p,)])
|
||||
subprocess.check_call(["supervisorctl", "restart", f"zulip-tornado:port-{p}"])
|
||||
else:
|
||||
logging.info("Restarting Tornado process")
|
||||
subprocess.check_call(["supervisorctl", "restart", "zulip-tornado", "zulip-tornado:*"])
|
||||
|
|
|
@ -82,7 +82,7 @@ def generate_secrets(development: bool = False) -> None:
|
|||
return name not in current_conf
|
||||
|
||||
def add_secret(name: str, value: str) -> None:
|
||||
lines.append("%s = %s\n" % (name, value))
|
||||
lines.append(f"{name} = {value}\n")
|
||||
current_conf[name] = value
|
||||
|
||||
for name in AUTOGENERATED_SETTINGS:
|
||||
|
@ -178,7 +178,7 @@ def generate_secrets(development: bool = False) -> None:
|
|||
# the end of the file due to human editing.
|
||||
f.write("\n" + "".join(lines))
|
||||
|
||||
print("Generated new secrets in %s." % (OUTPUT_SETTINGS_FILENAME,))
|
||||
print(f"Generated new secrets in {OUTPUT_SETTINGS_FILENAME}.")
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
|
|
|
@ -78,7 +78,7 @@ def strip_unnecesary_tags(text: str) -> str:
|
|||
text = text[start:end]
|
||||
return text
|
||||
else:
|
||||
raise ValueError("Template does not have %s or %s" % (start_block, end_block))
|
||||
raise ValueError(f"Template does not have {start_block} or {end_block}")
|
||||
|
||||
def get_all_templates_from_directory(directory: str) -> Set[str]:
|
||||
result = set()
|
||||
|
|
|
@ -31,7 +31,7 @@ Exec { path => "/usr/sbin:/usr/bin:/sbin:/bin" }
|
|||
"""
|
||||
|
||||
for pclass in re.split(r'\s*,\s*', config.get('machine', 'puppet_classes')):
|
||||
puppet_config += "include %s\n" % (pclass,)
|
||||
puppet_config += f"include {pclass}\n"
|
||||
|
||||
# We use the puppet configuration from the same Zulip checkout as this script
|
||||
scripts_path = os.path.join(BASE_DIR, "scripts")
|
||||
|
|
|
@ -7,7 +7,7 @@ import subprocess
|
|||
# Open the SVG and find the number text elements using XPath
|
||||
tree = ET.parse('orig.svg')
|
||||
elems = [tree.getroot().findall(
|
||||
".//*[@id='%s']/{http://www.w3.org/2000/svg}tspan" % (name,))[0]
|
||||
f".//*[@id='{name}']/{{http://www.w3.org/2000/svg}}tspan")[0]
|
||||
for name in ('number_back', 'number_front')]
|
||||
|
||||
for i in range(1, 100):
|
||||
|
|
|
@ -64,7 +64,7 @@ def check_issue_labels() -> None:
|
|||
if args.force:
|
||||
response = requests.get(next_page_url)
|
||||
else:
|
||||
response = requests.get(next_page_url, headers={'Authorization': 'token %s' % (token,)})
|
||||
response = requests.get(next_page_url, headers={'Authorization': f'token {token}'})
|
||||
if response.status_code == 401:
|
||||
sys.exit("Error. Please check the token.")
|
||||
if response.status_code == 403:
|
||||
|
|
|
@ -65,11 +65,11 @@ def create_single_page(pattern: str, out_dir: str, href: str, calls: List[Call])
|
|||
}
|
||||
</style>
|
||||
''')
|
||||
f.write('<h3>%s</h3>\n' % (html.escape(pattern),))
|
||||
f.write(f'<h3>{html.escape(pattern)}</h3>\n')
|
||||
calls.sort(key=lambda call: call['status_code'])
|
||||
for call in calls:
|
||||
f.write('<hr>')
|
||||
f.write('\n%s' % (fix_test_name(call['test_name']),))
|
||||
f.write('\n{}'.format(fix_test_name(call['test_name'])))
|
||||
f.write('<div class="test">')
|
||||
try:
|
||||
f.write(call['url'])
|
||||
|
@ -77,7 +77,7 @@ def create_single_page(pattern: str, out_dir: str, href: str, calls: List[Call])
|
|||
f.write(call['url'].encode('utf8'))
|
||||
f.write('<br>\n')
|
||||
f.write(call['method'] + '<br>\n')
|
||||
f.write('status code: %s<br>\n' % (call['status_code'],))
|
||||
f.write('status code: {}<br>\n'.format(call['status_code']))
|
||||
f.write('<br>')
|
||||
f.write('</div>')
|
||||
|
||||
|
@ -136,13 +136,13 @@ def create_user_docs() -> None:
|
|||
f.write('<ul>\n')
|
||||
for pattern in sorted(groups[name]):
|
||||
href = pattern.replace('/', '-') + '.html'
|
||||
link = '<a href="%s">%s</a>' % (href, html.escape(pattern))
|
||||
link = f'<a href="{href}">{html.escape(pattern)}</a>'
|
||||
f.write('<li>' + link + '</li>\n')
|
||||
create_single_page(pattern, out_dir, href, pattern_dict[pattern])
|
||||
f.write('</ul>')
|
||||
f.write('\n')
|
||||
|
||||
print('open %s' % (main_page,))
|
||||
print(f'open {main_page}')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
@ -190,8 +190,8 @@ def build_id_dict(templates: List[str]) -> (Dict[str, List[str]]):
|
|||
list_tags = tokenize(text)
|
||||
except FormattedException as e:
|
||||
raise Exception('''
|
||||
fn: %s
|
||||
%s''' % (fn, e))
|
||||
fn: {}
|
||||
{}'''.format(fn, e))
|
||||
|
||||
for tag in list_tags:
|
||||
info = get_tag_info(tag)
|
||||
|
|
|
@ -276,7 +276,7 @@ def main(options: argparse.Namespace) -> int:
|
|||
|
||||
destroyed = destroy_leaked_test_databases()
|
||||
if destroyed:
|
||||
print("Dropped %s stale test databases!" % (destroyed,))
|
||||
print(f"Dropped {destroyed} stale test databases!")
|
||||
|
||||
clean_unused_caches()
|
||||
|
||||
|
@ -304,7 +304,7 @@ def main(options: argparse.Namespace) -> int:
|
|||
pass
|
||||
|
||||
version_file = os.path.join(UUID_VAR_PATH, 'provision_version')
|
||||
print('writing to %s\n' % (version_file,))
|
||||
print(f'writing to {version_file}\n')
|
||||
open(version_file, 'w').write(PROVISION_VERSION + '\n')
|
||||
|
||||
print()
|
||||
|
|
|
@ -11,7 +11,7 @@ def check_venv(filename: str) -> None:
|
|||
ujson
|
||||
zulip
|
||||
except ImportError:
|
||||
print("You need to run %s inside a Zulip dev environment." % (filename,))
|
||||
print(f"You need to run {filename} inside a Zulip dev environment.")
|
||||
user_id = os.getuid()
|
||||
user_name = pwd.getpwuid(user_id).pw_name
|
||||
if user_name != 'vagrant' and user_name != 'zulipdev':
|
||||
|
|
|
@ -213,8 +213,8 @@ def validate(fn: Optional[str] = None, text: Optional[str] = None, check_indent:
|
|||
tokens = tokenize(text)
|
||||
except FormattedException as e:
|
||||
raise TemplateParserException('''
|
||||
fn: %s
|
||||
%s''' % (fn, e))
|
||||
fn: {}
|
||||
{}'''.format(fn, e))
|
||||
|
||||
class State:
|
||||
def __init__(self, func: Callable[[Token], None]) -> None:
|
||||
|
|
|
@ -37,7 +37,7 @@ def assert_server_running(server: "subprocess.Popen[bytes]", log_file: Optional[
|
|||
if server.poll() is not None:
|
||||
message = 'Server died unexpectedly!'
|
||||
if log_file:
|
||||
message += '\nSee %s\n' % (log_file,)
|
||||
message += f'\nSee {log_file}\n'
|
||||
raise RuntimeError(message)
|
||||
|
||||
def server_is_up(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> bool:
|
||||
|
|
|
@ -5,7 +5,7 @@ import sys
|
|||
|
||||
def clean_html(filenames: List[str]) -> None:
|
||||
for fn in filenames:
|
||||
print('Prettifying: %s' % (fn,))
|
||||
print(f'Prettifying: {fn}')
|
||||
with open(fn) as f:
|
||||
html = f.read()
|
||||
phtml = pretty_print_html(html)
|
||||
|
|
|
@ -31,14 +31,14 @@ def check_git_pristine() -> None:
|
|||
def ensure_on_clean_master() -> None:
|
||||
branch = get_git_branch()
|
||||
if branch != 'master':
|
||||
exit('You are still on a feature branch: %s' % (branch,))
|
||||
exit(f'You are still on a feature branch: {branch}')
|
||||
check_git_pristine()
|
||||
run(['git', 'fetch', 'upstream', 'master'])
|
||||
run(['git', 'rebase', 'upstream/master'])
|
||||
|
||||
def create_pull_branch(pull_id: int) -> None:
|
||||
run(['git', 'fetch', 'upstream', 'pull/%d/head' % (pull_id,)])
|
||||
run(['git', 'checkout', '-B', 'review-%s' % (pull_id,), 'FETCH_HEAD'])
|
||||
run(['git', 'checkout', '-B', f'review-{pull_id}', 'FETCH_HEAD'])
|
||||
run(['git', 'rebase', 'upstream/master'])
|
||||
run(['git', 'log', 'upstream/master..', '--oneline'])
|
||||
run(['git', 'diff', 'upstream/master..', '--name-status'])
|
||||
|
|
|
@ -98,7 +98,7 @@ if options.test:
|
|||
else:
|
||||
settings_module = "zproject.settings"
|
||||
|
||||
manage_args = ['--settings=%s' % (settings_module,)]
|
||||
manage_args = [f'--settings={settings_module}']
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = settings_module
|
||||
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
|
||||
|
@ -158,7 +158,7 @@ def server_processes() -> List[List[str]]:
|
|||
'./puppet/zulip/files/postgresql/process_fts_updates', '--quiet'],
|
||||
['./manage.py', 'deliver_scheduled_messages'],
|
||||
['/srv/zulip-thumbor-venv/bin/thumbor', '-c', './zthumbor/thumbor.conf',
|
||||
'-p', '%s' % (thumbor_port,)],
|
||||
'-p', f'{thumbor_port}'],
|
||||
]
|
||||
|
||||
# NORMAL (but slower) operation:
|
||||
|
|
|
@ -79,7 +79,7 @@ def main() -> None:
|
|||
# anyway, because EMOJI_CACHE_PATH is created by puppet before
|
||||
# build_emoji would be run.
|
||||
run_as_root(["mkdir", "-p", EMOJI_CACHE_PATH])
|
||||
run_as_root(["chown", "%s:%s" % (os.getuid(), os.getgid()), EMOJI_CACHE_PATH])
|
||||
run_as_root(["chown", f"{os.getuid()}:{os.getgid()}", EMOJI_CACHE_PATH])
|
||||
|
||||
sha1_hexdigest = generate_sha1sum_emoji(ZULIP_PATH)
|
||||
source_emoji_dump = os.path.join(EMOJI_CACHE_PATH, sha1_hexdigest, 'emoji')
|
||||
|
@ -107,7 +107,7 @@ def main() -> None:
|
|||
os.remove(os.path.join(TARGET_EMOJI_STYLES, filename))
|
||||
|
||||
def percent(f: float) -> str:
|
||||
return '%0.3f%%' % (f * 100,)
|
||||
return f'{f * 100:0.3f}%'
|
||||
|
||||
def get_square_size(emoji_data: List[Dict[str, Any]]) -> int:
|
||||
"""
|
||||
|
@ -207,7 +207,7 @@ def generate_sprite_css_files(cache_path: str,
|
|||
'pos_y': percent(emoji["sheet_y"] / (n - 1)),
|
||||
}
|
||||
|
||||
SPRITE_CSS_PATH = os.path.join(cache_path, '%s-sprite.css' % (emojiset,))
|
||||
SPRITE_CSS_PATH = os.path.join(cache_path, f'{emojiset}-sprite.css')
|
||||
with open(SPRITE_CSS_PATH, 'w') as f:
|
||||
f.write(SPRITE_CSS_FILE_TEMPLATE % {'emojiset': emojiset,
|
||||
'alt_name': alt_name,
|
||||
|
|
|
@ -47,12 +47,12 @@ def load_data(data_file: str) -> List[List[str]]:
|
|||
|
||||
def check_uniqueness(emoji_name: str) -> None:
|
||||
if emoji_name in emoji_names:
|
||||
raise Exception("Duplicate emoji name: %s" % (emoji_name,))
|
||||
raise Exception(f"Duplicate emoji name: {emoji_name}")
|
||||
emoji_names.add(emoji_name)
|
||||
|
||||
def check_valid_emoji_name(emoji_name: str) -> None:
|
||||
if re.fullmatch("[+-]?[a-z0-9_-]+", emoji_name) is None:
|
||||
raise Exception("Invalid emoji name: %s" % (emoji_name,))
|
||||
raise Exception(f"Invalid emoji name: {emoji_name}")
|
||||
|
||||
def check_emoji_names(canonical_name: str, aliases: List[str]) -> None:
|
||||
if canonical_name == 'X':
|
||||
|
|
|
@ -314,7 +314,7 @@ def main() -> None:
|
|||
classname = suite.rsplit('.', 1)[0]
|
||||
rewrite_arguments(classname)
|
||||
elif suite[0].isupper():
|
||||
rewrite_arguments('class %s(' % (suite,))
|
||||
rewrite_arguments(f'class {suite}(')
|
||||
|
||||
for i, suite in enumerate(args):
|
||||
if suite.startswith('test'):
|
||||
|
@ -403,7 +403,7 @@ def main() -> None:
|
|||
# We only check the templates if all the tests ran and passed
|
||||
if not failures and full_suite and templates_not_rendered:
|
||||
missed_count = len(templates_not_rendered)
|
||||
print("\nError: %s templates have no tests!" % (missed_count,))
|
||||
print(f"\nError: {missed_count} templates have no tests!")
|
||||
for template in templates_not_rendered:
|
||||
print(f' {template}')
|
||||
print("See zerver/tests/test_templates.py for the exclude list.")
|
||||
|
@ -424,8 +424,8 @@ def main() -> None:
|
|||
for path in enforce_fully_covered:
|
||||
missing_lines = cov.analysis2(path)[3]
|
||||
if len(missing_lines) > 0:
|
||||
print("ERROR: %s no longer has complete backend test coverage" % (path,))
|
||||
print(" Lines missing coverage: %s" % (missing_lines,))
|
||||
print(f"ERROR: {path} no longer has complete backend test coverage")
|
||||
print(f" Lines missing coverage: {missing_lines}")
|
||||
print()
|
||||
failures = True
|
||||
if failures:
|
||||
|
@ -439,7 +439,7 @@ def main() -> None:
|
|||
try:
|
||||
missing_lines = cov.analysis2(path)[3]
|
||||
if len(missing_lines) == 0 and path != "zerver/lib/migrate.py":
|
||||
print("ERROR: %s has complete backend test coverage but is still in not_yet_fully_covered." % (path,))
|
||||
print(f"ERROR: {path} has complete backend test coverage but is still in not_yet_fully_covered.")
|
||||
ok = False
|
||||
except coverage.misc.NoSource:
|
||||
continue
|
||||
|
@ -471,7 +471,7 @@ def main() -> None:
|
|||
|
||||
removed = remove_test_run_directories()
|
||||
if removed:
|
||||
print("Removed %s stale test run directories!" % (removed,))
|
||||
print(f"Removed {removed} stale test run directories!")
|
||||
|
||||
# We'll have printed whether tests passed or failed above
|
||||
sys.exit(bool(failures))
|
||||
|
|
|
@ -242,7 +242,7 @@ def run_tests_via_node_js() -> int:
|
|||
try:
|
||||
ret = subprocess.check_call(command)
|
||||
except OSError:
|
||||
print('Bad command: %s' % (command,))
|
||||
print(f'Bad command: {command}')
|
||||
raise
|
||||
except subprocess.CalledProcessError:
|
||||
print('\n** Tests failed, PLEASE FIX! **\n')
|
||||
|
@ -257,8 +257,8 @@ def check_line_coverage(fn: str, line_coverage: Dict[Any, Any], line_mapping: Di
|
|||
missing_lines.append(str(actual_line["start"]["line"]))
|
||||
if missing_lines:
|
||||
if log:
|
||||
print_error("%s no longer has complete node test coverage" % (fn,))
|
||||
print(" Lines missing coverage: %s" % (", ".join(sorted(missing_lines, key=int)),))
|
||||
print_error(f"{fn} no longer has complete node test coverage")
|
||||
print(" Lines missing coverage: {}".format(", ".join(sorted(missing_lines, key=int))))
|
||||
print()
|
||||
return False
|
||||
return True
|
||||
|
@ -288,7 +288,7 @@ def enforce_proper_coverage(coverage_json: Any) -> bool:
|
|||
path = ROOT_DIR + "/" + relative_path
|
||||
if not (path in coverage_json):
|
||||
coverage_lost = True
|
||||
print_error("%s has no node test coverage" % (relative_path,))
|
||||
print_error(f"{relative_path} has no node test coverage")
|
||||
continue
|
||||
line_coverage = coverage_json[path]['s']
|
||||
line_mapping = coverage_json[path]['statementMap']
|
||||
|
|
|
@ -58,7 +58,7 @@ def run_tests(files: Iterable[str], external_host: str) -> None:
|
|||
for test_file in test_files:
|
||||
test_name = os.path.basename(test_file)
|
||||
cmd = ["node"] + [test_file]
|
||||
print("\n\n===================== %s\nRunning %s\n\n" % (test_name, " ".join(map(shlex.quote, cmd))), flush=True)
|
||||
print("\n\n===================== {}\nRunning {}\n\n".format(test_name, " ".join(map(shlex.quote, cmd))), flush=True)
|
||||
ret = subprocess.call(cmd)
|
||||
if ret != 0:
|
||||
return ret
|
||||
|
|
|
@ -108,14 +108,14 @@ class TestHtmlBranches(unittest.TestCase):
|
|||
|
||||
self.assertEqual(set(template_id_dict.keys()), {'below_navbar', 'hello_{{ message }}', 'intro'})
|
||||
self.assertEqual(template_id_dict['hello_{{ message }}'], [
|
||||
'Line 12:%s/tools/tests/test_template_data/test_template1.html' % (ZULIP_PATH,),
|
||||
'Line 12:%s/tools/tests/test_template_data/test_template2.html' % (ZULIP_PATH,)])
|
||||
f'Line 12:{ZULIP_PATH}/tools/tests/test_template_data/test_template1.html',
|
||||
f'Line 12:{ZULIP_PATH}/tools/tests/test_template_data/test_template2.html'])
|
||||
self.assertEqual(template_id_dict['intro'], [
|
||||
'Line 10:%s/tools/tests/test_template_data/test_template1.html' % (ZULIP_PATH,),
|
||||
'Line 11:%s/tools/tests/test_template_data/test_template1.html' % (ZULIP_PATH,),
|
||||
'Line 11:%s/tools/tests/test_template_data/test_template2.html' % (ZULIP_PATH,)])
|
||||
f'Line 10:{ZULIP_PATH}/tools/tests/test_template_data/test_template1.html',
|
||||
f'Line 11:{ZULIP_PATH}/tools/tests/test_template_data/test_template1.html',
|
||||
f'Line 11:{ZULIP_PATH}/tools/tests/test_template_data/test_template2.html'])
|
||||
self.assertEqual(template_id_dict['below_navbar'], [
|
||||
'Line 10:%s/tools/tests/test_template_data/test_template2.html' % (ZULIP_PATH,)])
|
||||
f'Line 10:{ZULIP_PATH}/tools/tests/test_template_data/test_template2.html'])
|
||||
|
||||
def test_split_for_id_and_class(self) -> None:
|
||||
id1 = "{{ red|blue }}"
|
||||
|
|
|
@ -65,11 +65,11 @@ for zuliprc_path in zuliprc_paths_list:
|
|||
with open(zuliprc_path, 'w+') as w:
|
||||
zuliprc.write(w)
|
||||
result = 'SUCCESS'
|
||||
reason = 'API field updated for user %s' % (email,)
|
||||
reason = f'API field updated for user {email}'
|
||||
except OSError:
|
||||
result = 'FAILURE'
|
||||
reason = 'Writing to file unsuccessful'
|
||||
else:
|
||||
result = 'SUCCESS'
|
||||
reason = 'API key for user %s is already consistent' % (email,)
|
||||
reason = f'API key for user {email} is already consistent'
|
||||
print(f'{zuliprc_path}: {result}: {reason}')
|
||||
|
|
|
@ -97,9 +97,9 @@ def build_for_most_tests() -> None:
|
|||
with open('tools/webpack.assets.json') as json_data:
|
||||
for entry in json.load(json_data).keys():
|
||||
entries[entry] = [{
|
||||
"name": "%s.js" % (entry,),
|
||||
"publicPath": "http://localhost:3000/webpack-stub/%s-stubentry.js" % (entry,),
|
||||
"path": "/stubfolder/%s-stubfile.js" % (entry,)
|
||||
"name": f"{entry}.js",
|
||||
"publicPath": f"http://localhost:3000/webpack-stub/{entry}-stubentry.js",
|
||||
"path": f"/stubfolder/{entry}-stubfile.js"
|
||||
}]
|
||||
stat_data = {
|
||||
"status": "done",
|
||||
|
|
|
@ -81,7 +81,7 @@ for msg in result['messages']:
|
|||
msg.pop(k, None)
|
||||
messages.append(msg)
|
||||
|
||||
filename = "zulip-%s.json" % (options.stream,)
|
||||
filename = f"zulip-{options.stream}.json"
|
||||
with open(filename, 'wb') as f:
|
||||
f.write(json.dumps(messages, indent=0, sort_keys=False).encode('utf-8'))
|
||||
print("%d messages exported to %s" % (len(messages), filename,))
|
||||
|
|
|
@ -141,7 +141,7 @@ def zulip_default_context(request: HttpRequest) -> Dict[str, Any]:
|
|||
'landing_page_navbar_message': settings.LANDING_PAGE_NAVBAR_MESSAGE,
|
||||
}
|
||||
|
||||
context['OPEN_GRAPH_URL'] = '%s%s' % (realm_uri, request.path)
|
||||
context['OPEN_GRAPH_URL'] = f'{realm_uri}{request.path}'
|
||||
if realm is not None and realm.icon_source == realm.ICON_UPLOADED:
|
||||
context['OPEN_GRAPH_IMAGE'] = urljoin(realm_uri, realm_icon)
|
||||
|
||||
|
@ -175,7 +175,7 @@ def login_context(request: HttpRequest) -> Dict[str, Any]:
|
|||
no_auth_enabled = True
|
||||
for auth_backend_name in AUTH_BACKEND_NAME_MAP:
|
||||
name_lower = auth_backend_name.lower()
|
||||
key = "%s_auth_enabled" % (name_lower,)
|
||||
key = f"{name_lower}_auth_enabled"
|
||||
is_enabled = auth_enabled_helper([auth_backend_name], realm)
|
||||
context[key] = is_enabled
|
||||
if is_enabled:
|
||||
|
@ -191,7 +191,7 @@ def login_context(request: HttpRequest) -> Dict[str, Any]:
|
|||
external_authentication_methods = get_external_method_dicts(realm)
|
||||
)
|
||||
for auth_dict in context['page_params']['external_authentication_methods']:
|
||||
auth_dict['button_id_suffix'] = "auth_button_%s" % (auth_dict['name'],)
|
||||
auth_dict['button_id_suffix'] = "auth_button_{}".format(auth_dict['name'])
|
||||
|
||||
return context
|
||||
|
||||
|
|
|
@ -97,7 +97,7 @@ def build_userprofile(timestamp: Any, domain_name: str,
|
|||
|
||||
def get_user_email(user_data: ZerverFieldsT, domain_name: str) -> str:
|
||||
# TODO Get user email from github
|
||||
email = ("%s@users.noreply.github.com" % (user_data['username'],))
|
||||
email = ("{}@users.noreply.github.com".format(user_data['username']))
|
||||
return email
|
||||
|
||||
def build_stream_map(timestamp: Any,
|
||||
|
@ -240,14 +240,14 @@ def get_usermentions(message: Dict[str, Any], user_map: Dict[str, int],
|
|||
if 'mentions' in message:
|
||||
for mention in message['mentions']:
|
||||
if mention.get('userId') in user_map:
|
||||
gitter_mention = '@%s' % (mention['screenName'],)
|
||||
gitter_mention = '@{}'.format(mention['screenName'])
|
||||
if mention['screenName'] not in user_short_name_to_full_name:
|
||||
logging.info("Mentioned user %s never sent any messages, so has no full name data",
|
||||
mention['screenName'])
|
||||
full_name = mention['screenName']
|
||||
else:
|
||||
full_name = user_short_name_to_full_name[mention['screenName']]
|
||||
zulip_mention = ('@**%s**' % (full_name,))
|
||||
zulip_mention = (f'@**{full_name}**')
|
||||
message['text'] = message['text'].replace(gitter_mention, zulip_mention)
|
||||
|
||||
mentioned_user_ids.append(user_map[mention['userId']])
|
||||
|
|
|
@ -50,7 +50,7 @@ def build_zerver_realm(realm_id: int, realm_subdomain: str, time: float,
|
|||
other_product: str) -> List[ZerverFieldsT]:
|
||||
realm = Realm(id=realm_id, date_created=time,
|
||||
name=realm_subdomain, string_id=realm_subdomain,
|
||||
description="Organization imported from %s!" % (other_product,))
|
||||
description=f"Organization imported from {other_product}!")
|
||||
auth_methods = [[flag[0], flag[1]] for flag in realm.authentication_methods]
|
||||
realm_dict = model_to_dict(realm, exclude='authentication_methods')
|
||||
realm_dict['authentication_methods'] = auth_methods
|
||||
|
@ -514,8 +514,8 @@ def process_avatars(avatar_list: List[ZerverFieldsT], avatar_dir: str, realm_id:
|
|||
avatar_url = avatar['path']
|
||||
avatar_original = dict(avatar)
|
||||
|
||||
image_path = '%s.png' % (avatar_hash,)
|
||||
original_image_path = '%s.original' % (avatar_hash,)
|
||||
image_path = f'{avatar_hash}.png'
|
||||
original_image_path = f'{avatar_hash}.original'
|
||||
|
||||
avatar_upload_list.append([avatar_url, image_path, original_image_path])
|
||||
# We don't add the size field here in avatar's records.json,
|
||||
|
|
|
@ -227,7 +227,7 @@ def build_customprofile_field(customprofile_field: List[ZerverFieldsT], fields:
|
|||
if field in slack_custom_fields:
|
||||
field_name = field
|
||||
else:
|
||||
field_name = "slack custom field %s" % (str(custom_profile_field_id + 1),)
|
||||
field_name = f"slack custom field {str(custom_profile_field_id + 1)}"
|
||||
customprofilefield = CustomProfileField(
|
||||
id=custom_profile_field_id,
|
||||
name=field_name,
|
||||
|
@ -294,10 +294,10 @@ def get_user_email(user: ZerverFieldsT, domain_name: str) -> str:
|
|||
slack_bot_name = user['profile']['first_name']
|
||||
else:
|
||||
raise AssertionError("Could not identify bot type")
|
||||
return slack_bot_name.replace("Bot", "").replace(" ", "") + "-bot@%s" % (domain_name,)
|
||||
return slack_bot_name.replace("Bot", "").replace(" ", "") + f"-bot@{domain_name}"
|
||||
if get_user_full_name(user).lower() == "slackbot":
|
||||
return "imported-slackbot-bot@%s" % (domain_name,)
|
||||
raise AssertionError("Could not find email address for Slack user %s" % (user,))
|
||||
return f"imported-slackbot-bot@{domain_name}"
|
||||
raise AssertionError(f"Could not find email address for Slack user {user}")
|
||||
|
||||
def build_avatar_url(slack_user_id: str, team_id: str, avatar_hash: str) -> str:
|
||||
avatar_url = f"https://ca.slack-edge.com/{team_id}-{slack_user_id}-{avatar_hash}"
|
||||
|
@ -742,7 +742,7 @@ def channel_message_to_zerver_message(realm_id: int,
|
|||
# For example "sh_room_created" has the message 'started a call'
|
||||
# which should be displayed as '/me started a call'
|
||||
if subtype in ["bot_add", "sh_room_created", "me_message"]:
|
||||
content = '/me %s' % (content,)
|
||||
content = f'/me {content}'
|
||||
if subtype == 'file_comment':
|
||||
# The file_comment message type only indicates the
|
||||
# responsible user in a subfield.
|
||||
|
@ -864,7 +864,7 @@ def process_message_files(message: ZerverFieldsT,
|
|||
file_name = fileinfo['title']
|
||||
else:
|
||||
file_name = fileinfo['name']
|
||||
markdown_links.append('[%s](%s)' % (file_name, fileinfo['url_private']))
|
||||
markdown_links.append('[{}]({})'.format(file_name, fileinfo['url_private']))
|
||||
|
||||
content = '\n'.join(markdown_links)
|
||||
|
||||
|
@ -887,8 +887,8 @@ def get_attachment_path_and_content(fileinfo: ZerverFieldsT, realm_id: int) -> T
|
|||
random_name(18),
|
||||
sanitize_name(fileinfo['name'])
|
||||
])
|
||||
attachment_path = '/user_uploads/%s' % (s3_path,)
|
||||
content = '[%s](%s)' % (fileinfo['title'], attachment_path)
|
||||
attachment_path = f'/user_uploads/{s3_path}'
|
||||
content = '[{}]({})'.format(fileinfo['title'], attachment_path)
|
||||
|
||||
return s3_path, content
|
||||
|
||||
|
@ -1119,7 +1119,7 @@ def get_slack_api_data(slack_api_url: str, get_param: str, **kwargs: Any) -> Any
|
|||
if data.status_code == requests.codes.ok:
|
||||
result = data.json()
|
||||
if not result['ok']:
|
||||
raise Exception('Error accessing Slack API: %s' % (result['error'],))
|
||||
raise Exception('Error accessing Slack API: {}'.format(result['error']))
|
||||
return result[get_param]
|
||||
|
||||
raise Exception('HTTP error accessing the Slack API.')
|
||||
|
|
|
@ -85,7 +85,7 @@ def convert_to_zulip_markdown(text: str, users: List[ZerverFieldsT],
|
|||
# Map Slack channel mention: '<#C5Z73A7RA|general>' to '#**general**'
|
||||
for cname, ids in added_channels.items():
|
||||
cid = ids[0]
|
||||
text = text.replace('<#%s|%s>' % (cid, cname), '#**' + cname + '**')
|
||||
text = text.replace(f'<#{cid}|{cname}>', '#**' + cname + '**')
|
||||
|
||||
tokens = text.split(' ')
|
||||
for iterator in range(len(tokens)):
|
||||
|
|
|
@ -57,7 +57,7 @@ def email_is_not_mit_mailing_list(email: str) -> None:
|
|||
username = email.rsplit("@", 1)[0]
|
||||
# Check whether the user exists and can get mail.
|
||||
try:
|
||||
DNS.dnslookup("%s.pobox.ns.athena.mit.edu" % (username,), DNS.Type.TXT)
|
||||
DNS.dnslookup(f"{username}.pobox.ns.athena.mit.edu", DNS.Type.TXT)
|
||||
except DNS.Base.ServerError as e:
|
||||
if e.rcode == DNS.Status.NXDOMAIN:
|
||||
raise ValidationError(mark_safe(MIT_VALIDATION_ERROR))
|
||||
|
|
|
@ -394,10 +394,7 @@ def process_new_human_user(user_profile: UserProfile,
|
|||
user_profile.realm,
|
||||
get_system_bot(settings.NOTIFICATION_BOT),
|
||||
prereg_user.referred_by,
|
||||
"%s <`%s`> accepted your invitation to join Zulip!" % (
|
||||
user_profile.full_name,
|
||||
user_profile.email,
|
||||
)
|
||||
f"{user_profile.full_name} <`{user_profile.email}`> accepted your invitation to join Zulip!"
|
||||
)
|
||||
# Mark any other PreregistrationUsers that are STATUS_ACTIVE as
|
||||
# inactive so we can keep track of the PreregistrationUser we
|
||||
|
@ -598,8 +595,7 @@ def do_set_realm_property(realm: Realm, name: str, value: Any) -> None:
|
|||
"""
|
||||
property_type = Realm.property_types[name]
|
||||
assert isinstance(value, property_type), (
|
||||
'Cannot update %s: %s is not an instance of %s' % (
|
||||
name, value, property_type,))
|
||||
f'Cannot update {name}: {value} is not an instance of {property_type}')
|
||||
|
||||
old_value = getattr(realm, name)
|
||||
setattr(realm, name, value)
|
||||
|
@ -2359,8 +2355,7 @@ def _internal_prep_message(realm: Realm,
|
|||
return check_message(sender, get_client("Internal"), addressee,
|
||||
content, realm=realm)
|
||||
except JsonableError as e:
|
||||
logging.exception("Error queueing internal message by %s: %s" % (
|
||||
sender.delivery_email, e))
|
||||
logging.exception(f"Error queueing internal message by {sender.delivery_email}: {e}")
|
||||
|
||||
return None
|
||||
|
||||
|
@ -3587,7 +3582,7 @@ def do_change_stream_description(stream: Stream, new_description: str) -> None:
|
|||
def do_create_realm(string_id: str, name: str,
|
||||
emails_restricted_to_domains: Optional[bool]=None) -> Realm:
|
||||
if Realm.objects.filter(string_id=string_id).exists():
|
||||
raise AssertionError("Realm %s already exists!" % (string_id,))
|
||||
raise AssertionError(f"Realm {string_id} already exists!")
|
||||
if not server_initialized():
|
||||
logging.info("Server not yet initialized. Creating the internal realm first.")
|
||||
create_internal_realm()
|
||||
|
@ -3653,8 +3648,7 @@ def do_change_notification_settings(user_profile: UserProfile, name: str,
|
|||
|
||||
notification_setting_type = UserProfile.notification_setting_types[name]
|
||||
assert isinstance(value, notification_setting_type), (
|
||||
'Cannot update %s: %s is not an instance of %s' % (
|
||||
name, value, notification_setting_type,))
|
||||
f'Cannot update {name}: {value} is not an instance of {notification_setting_type}')
|
||||
|
||||
setattr(user_profile, name, value)
|
||||
|
||||
|
@ -4179,7 +4173,7 @@ def do_update_message_flags(user_profile: UserProfile,
|
|||
if flag == "read" and operation == "add":
|
||||
do_clear_mobile_push_notifications_for_ids(user_profile, messages)
|
||||
|
||||
statsd.incr("flags.%s.%s" % (flag, operation), count)
|
||||
statsd.incr(f"flags.{flag}.{operation}", count)
|
||||
return count
|
||||
|
||||
class MessageUpdateUserInfoResult(TypedDict):
|
||||
|
@ -4199,9 +4193,9 @@ def notify_topic_moved_streams(user_profile: UserProfile,
|
|||
if new_topic is None:
|
||||
new_topic = old_topic
|
||||
|
||||
user_mention = "@_**%s|%s**" % (user_profile.full_name, user_profile.id)
|
||||
old_topic_link = "#**%s>%s**" % (old_stream.name, old_topic)
|
||||
new_topic_link = "#**%s>%s**" % (new_stream.name, new_topic)
|
||||
user_mention = f"@_**{user_profile.full_name}|{user_profile.id}**"
|
||||
old_topic_link = f"#**{old_stream.name}>{old_topic}**"
|
||||
new_topic_link = f"#**{new_stream.name}>{new_topic}**"
|
||||
if send_notification_to_new_thread:
|
||||
internal_send_stream_message(
|
||||
new_stream.realm, sender, new_stream, new_topic,
|
||||
|
@ -4918,7 +4912,7 @@ def do_send_confirmation_email(invitee: PreregistrationUser,
|
|||
activation_url = create_confirmation_link(invitee, referrer.realm.host, Confirmation.INVITATION)
|
||||
context = {'referrer_full_name': referrer.full_name, 'referrer_email': referrer.delivery_email,
|
||||
'activate_url': activation_url, 'referrer_realm_name': referrer.realm.name}
|
||||
from_name = "%s (via Zulip)" % (referrer.full_name,)
|
||||
from_name = f"{referrer.full_name} (via Zulip)"
|
||||
send_email('zerver/emails/invitation', to_emails=[invitee.email], from_name=from_name,
|
||||
from_address=FromAddress.tokenized_no_reply_address(),
|
||||
language=referrer.realm.default_language, context=context)
|
||||
|
|
|
@ -92,9 +92,9 @@ def get_gravatar_url(email: str, avatar_version: int, medium: bool=False) -> str
|
|||
|
||||
def _get_unversioned_gravatar_url(email: str, medium: bool) -> str:
|
||||
if settings.ENABLE_GRAVATAR:
|
||||
gravitar_query_suffix = "&s=%s" % (MEDIUM_AVATAR_SIZE,) if medium else ""
|
||||
gravitar_query_suffix = f"&s={MEDIUM_AVATAR_SIZE}" if medium else ""
|
||||
hash_key = gravatar_hash(email)
|
||||
return "https://secure.gravatar.com/avatar/%s?d=identicon%s" % (hash_key, gravitar_query_suffix)
|
||||
return f"https://secure.gravatar.com/avatar/{hash_key}?d=identicon{gravitar_query_suffix}"
|
||||
return settings.DEFAULT_AVATAR_URI+'?x=x'
|
||||
|
||||
def _get_unversioned_avatar_url(user_profile_id: int,
|
||||
|
|
|
@ -34,7 +34,7 @@ def user_avatar_path(user_profile: UserProfile) -> str:
|
|||
|
||||
def user_avatar_path_from_ids(user_profile_id: int, realm_id: int) -> str:
|
||||
user_id_hash = user_avatar_hash(str(user_profile_id))
|
||||
return '%s/%s' % (str(realm_id), user_id_hash)
|
||||
return f'{str(realm_id)}/{user_id_hash}'
|
||||
|
||||
def user_avatar_content_hash(ldap_avatar: bytes) -> str:
|
||||
return hashlib.sha256(ldap_avatar).hexdigest()
|
||||
|
|
|
@ -27,7 +27,7 @@ def get_bot_handler(service_name: str) -> Any:
|
|||
configured_service = embedded_bot_service.name
|
||||
if not configured_service:
|
||||
return None
|
||||
bot_module_name = 'zulip_bots.bots.%s.%s' % (configured_service, configured_service)
|
||||
bot_module_name = f'zulip_bots.bots.{configured_service}.{configured_service}'
|
||||
bot_module: Any = importlib.import_module(bot_module_name)
|
||||
return bot_module.handler_class()
|
||||
|
||||
|
|
|
@ -34,9 +34,9 @@ gear_instructions = """
|
|||
|
||||
def gear_handle_match(key: str) -> str:
|
||||
if relative_help_links:
|
||||
item = '[%s](%s)' % (gear_info[key][0], gear_info[key][1])
|
||||
item = f'[{gear_info[key][0]}]({gear_info[key][1]})'
|
||||
else:
|
||||
item = '**%s**' % (gear_info[key][0],)
|
||||
item = f'**{gear_info[key][0]}**'
|
||||
return gear_instructions % {'item': item}
|
||||
|
||||
|
||||
|
@ -54,7 +54,7 @@ stream_instructions_no_link = """
|
|||
|
||||
def stream_handle_match(key: str) -> str:
|
||||
if relative_help_links:
|
||||
return "1. Go to [%s](%s)." % (stream_info[key][0], stream_info[key][1])
|
||||
return f"1. Go to [{stream_info[key][0]}]({stream_info[key][1]})."
|
||||
if key == 'all':
|
||||
return stream_instructions_no_link + "\n\n1. Click **All streams** in the upper left."
|
||||
return stream_instructions_no_link
|
||||
|
|
|
@ -103,9 +103,9 @@ class Setting(Preprocessor):
|
|||
setting_name = link_mapping[setting_identifier][1]
|
||||
setting_link = link_mapping[setting_identifier][2]
|
||||
if relative_settings_links:
|
||||
return "1. Go to [%s](%s)." % (setting_name, setting_link)
|
||||
return f"1. Go to [{setting_name}]({setting_link})."
|
||||
return settings_markdown % {'setting_type_name': setting_type_name,
|
||||
'setting_reference': "**%s**" % (setting_name,)}
|
||||
'setting_reference': f"**{setting_name}**"}
|
||||
|
||||
def makeExtension(*args: Any, **kwargs: Any) -> SettingHelpExtension:
|
||||
return SettingHelpExtension(*args, **kwargs)
|
||||
|
|
|
@ -176,7 +176,7 @@ def cache_with_key(
|
|||
metric_key = statsd_key(key)
|
||||
|
||||
status = "hit" if val is not None else "miss"
|
||||
statsd.incr("cache%s.%s.%s" % (extra, metric_key, status))
|
||||
statsd.incr(f"cache{extra}.{metric_key}.{status}")
|
||||
|
||||
# Values are singleton tuples so that we can distinguish
|
||||
# a result of None from a missing key.
|
||||
|
@ -409,7 +409,7 @@ def generic_bulk_cached_fetch(
|
|||
if cache_keys[object_id] in cached_objects}
|
||||
|
||||
def preview_url_cache_key(url: str) -> str:
|
||||
return "preview_url:%s" % (make_safe_digest(url),)
|
||||
return f"preview_url:{make_safe_digest(url)}"
|
||||
|
||||
def display_recipient_cache_key(recipient_id: int) -> str:
|
||||
return "display_recipient_dict:%d" % (recipient_id,)
|
||||
|
@ -423,22 +423,22 @@ def user_profile_by_email_cache_key(email: str) -> str:
|
|||
# See the comment in zerver/lib/avatar_hash.py:gravatar_hash for why we
|
||||
# are proactively encoding email addresses even though they will
|
||||
# with high likelihood be ASCII-only for the foreseeable future.
|
||||
return 'user_profile_by_email:%s' % (make_safe_digest(email.strip()),)
|
||||
return f'user_profile_by_email:{make_safe_digest(email.strip())}'
|
||||
|
||||
def user_profile_cache_key_id(email: str, realm_id: int) -> str:
|
||||
return "user_profile:%s:%s" % (make_safe_digest(email.strip()), realm_id,)
|
||||
return f"user_profile:{make_safe_digest(email.strip())}:{realm_id}"
|
||||
|
||||
def user_profile_cache_key(email: str, realm: 'Realm') -> str:
|
||||
return user_profile_cache_key_id(email, realm.id)
|
||||
|
||||
def bot_profile_cache_key(email: str) -> str:
|
||||
return "bot_profile:%s" % (make_safe_digest(email.strip()),)
|
||||
return f"bot_profile:{make_safe_digest(email.strip())}"
|
||||
|
||||
def user_profile_by_id_cache_key(user_profile_id: int) -> str:
|
||||
return "user_profile_by_id:%s" % (user_profile_id,)
|
||||
return f"user_profile_by_id:{user_profile_id}"
|
||||
|
||||
def user_profile_by_api_key_cache_key(api_key: str) -> str:
|
||||
return "user_profile_by_api_key:%s" % (api_key,)
|
||||
return f"user_profile_by_api_key:{api_key}"
|
||||
|
||||
realm_user_dict_fields: List[str] = [
|
||||
'id', 'full_name', 'short_name', 'email',
|
||||
|
@ -449,16 +449,16 @@ realm_user_dict_fields: List[str] = [
|
|||
]
|
||||
|
||||
def realm_user_dicts_cache_key(realm_id: int) -> str:
|
||||
return "realm_user_dicts:%s" % (realm_id,)
|
||||
return f"realm_user_dicts:{realm_id}"
|
||||
|
||||
def get_realm_used_upload_space_cache_key(realm: 'Realm') -> str:
|
||||
return 'realm_used_upload_space:%s' % (realm.id,)
|
||||
return f'realm_used_upload_space:{realm.id}'
|
||||
|
||||
def active_user_ids_cache_key(realm_id: int) -> str:
|
||||
return "active_user_ids:%s" % (realm_id,)
|
||||
return f"active_user_ids:{realm_id}"
|
||||
|
||||
def active_non_guest_user_ids_cache_key(realm_id: int) -> str:
|
||||
return "active_non_guest_user_ids:%s" % (realm_id,)
|
||||
return f"active_non_guest_user_ids:{realm_id}"
|
||||
|
||||
bot_dict_fields: List[str] = [
|
||||
'api_key',
|
||||
|
@ -478,11 +478,10 @@ bot_dict_fields: List[str] = [
|
|||
]
|
||||
|
||||
def bot_dicts_in_realm_cache_key(realm: 'Realm') -> str:
|
||||
return "bot_dicts_in_realm:%s" % (realm.id,)
|
||||
return f"bot_dicts_in_realm:{realm.id}"
|
||||
|
||||
def get_stream_cache_key(stream_name: str, realm_id: int) -> str:
|
||||
return "stream_by_realm_and_name:%s:%s" % (
|
||||
realm_id, make_safe_digest(stream_name.strip().lower()))
|
||||
return f"stream_by_realm_and_name:{realm_id}:{make_safe_digest(stream_name.strip().lower())}"
|
||||
|
||||
def delete_user_profile_caches(user_profiles: Iterable['UserProfile']) -> None:
|
||||
# Imported here to avoid cyclic dependency.
|
||||
|
@ -571,16 +570,16 @@ def flush_realm(sender: Any, **kwargs: Any) -> None:
|
|||
cache_delete(realm_text_description_cache_key(realm))
|
||||
|
||||
def realm_alert_words_cache_key(realm: 'Realm') -> str:
|
||||
return "realm_alert_words:%s" % (realm.string_id,)
|
||||
return f"realm_alert_words:{realm.string_id}"
|
||||
|
||||
def realm_alert_words_automaton_cache_key(realm: 'Realm') -> str:
|
||||
return "realm_alert_words_automaton:%s" % (realm.string_id,)
|
||||
return f"realm_alert_words_automaton:{realm.string_id}"
|
||||
|
||||
def realm_rendered_description_cache_key(realm: 'Realm') -> str:
|
||||
return "realm_rendered_description:%s" % (realm.string_id,)
|
||||
return f"realm_rendered_description:{realm.string_id}"
|
||||
|
||||
def realm_text_description_cache_key(realm: 'Realm') -> str:
|
||||
return "realm_text_description:%s" % (realm.string_id,)
|
||||
return f"realm_text_description:{realm.string_id}"
|
||||
|
||||
# Called by models.py to flush the stream cache whenever we save a stream
|
||||
# object.
|
||||
|
@ -610,7 +609,7 @@ def to_dict_cache_key(message: 'Message', realm_id: Optional[int]=None) -> str:
|
|||
return to_dict_cache_key_id(message.id)
|
||||
|
||||
def open_graph_description_cache_key(content: Any, request: HttpRequest) -> str:
|
||||
return 'open_graph_description_path:%s' % (make_safe_digest(request.META['PATH_INFO']),)
|
||||
return 'open_graph_description_path:{}'.format(make_safe_digest(request.META['PATH_INFO']))
|
||||
|
||||
def flush_message(sender: Any, **kwargs: Any) -> None:
|
||||
message = kwargs['instance']
|
||||
|
|
|
@ -8,7 +8,7 @@ def generate_camo_url(url: str) -> str:
|
|||
encoded_camo_key = settings.CAMO_KEY.encode("utf-8")
|
||||
digest = hmac.new(encoded_camo_key, encoded_url, hashlib.sha1).hexdigest()
|
||||
hex_encoded_url = binascii.b2a_hex(encoded_url)
|
||||
return "%s/%s" % (digest, hex_encoded_url.decode("utf-8"))
|
||||
return "{}/{}".format(digest, hex_encoded_url.decode("utf-8"))
|
||||
|
||||
# Encodes the provided URL using the same algorithm used by the camo
|
||||
# caching https image proxy
|
||||
|
@ -16,7 +16,7 @@ def get_camo_url(url: str) -> str:
|
|||
# Only encode the url if Camo is enabled
|
||||
if settings.CAMO_URI == '':
|
||||
return url
|
||||
return "%s%s" % (settings.CAMO_URI, generate_camo_url(url))
|
||||
return f"{settings.CAMO_URI}{generate_camo_url(url)}"
|
||||
|
||||
def is_camo_url_valid(digest: str, url: str) -> bool:
|
||||
camo_url = generate_camo_url(url)
|
||||
|
|
|
@ -33,7 +33,7 @@ def copy_user_settings(source_profile: UserProfile, target_profile: UserProfile)
|
|||
|
||||
def get_display_email_address(user_profile: UserProfile, realm: Realm) -> str:
|
||||
if not user_profile.email_address_is_realm_public():
|
||||
return "user%s@%s" % (user_profile.id, get_fake_email_domain())
|
||||
return f"user{user_profile.id}@{get_fake_email_domain()}"
|
||||
return user_profile.delivery_email
|
||||
|
||||
def get_role_for_new_user(invited_as: int, realm_creation: bool=False) -> int:
|
||||
|
|
|
@ -23,7 +23,7 @@ def wrapper_execute(self: CursorObj,
|
|||
stop = time.time()
|
||||
duration = stop - start
|
||||
self.connection.queries.append({
|
||||
'time': "%.3f" % (duration,),
|
||||
'time': f"{duration:.3f}",
|
||||
})
|
||||
|
||||
class TimeTrackingCursor(cursor):
|
||||
|
|
|
@ -146,14 +146,14 @@ def gather_new_streams(user_profile: UserProfile,
|
|||
else:
|
||||
new_streams = []
|
||||
|
||||
base_url = "%s/#narrow/stream/" % (user_profile.realm.uri,)
|
||||
base_url = f"{user_profile.realm.uri}/#narrow/stream/"
|
||||
|
||||
streams_html = []
|
||||
streams_plain = []
|
||||
|
||||
for stream in new_streams:
|
||||
narrow_url = base_url + encode_stream(stream.id, stream.name)
|
||||
stream_link = "<a href='%s'>%s</a>" % (narrow_url, stream.name)
|
||||
stream_link = f"<a href='{narrow_url}'>{stream.name}</a>"
|
||||
streams_html.append(stream_link)
|
||||
streams_plain.append(stream.name)
|
||||
|
||||
|
|
|
@ -69,7 +69,7 @@ def report_to_zulip(error_message: str) -> None:
|
|||
error_bot,
|
||||
error_stream,
|
||||
"email mirror error",
|
||||
"""~~~\n%s\n~~~""" % (error_message,)
|
||||
f"""~~~\n{error_message}\n~~~"""
|
||||
)
|
||||
|
||||
def log_and_report(email_message: EmailMessage, error_message: str, to: Optional[str]) -> None:
|
||||
|
@ -155,7 +155,7 @@ def construct_zulip_body(message: EmailMessage, realm: Realm, show_sender: bool=
|
|||
|
||||
if show_sender:
|
||||
sender = handle_header_content(message.get("From", ""))
|
||||
body = "From: %s\n%s" % (sender, body)
|
||||
body = f"From: {sender}\n{body}"
|
||||
|
||||
return body
|
||||
|
||||
|
@ -279,7 +279,7 @@ def extract_and_upload_attachments(message: EmailMessage, realm: Realm) -> str:
|
|||
attachment,
|
||||
user_profile,
|
||||
target_realm=realm)
|
||||
formatted_link = "[%s](%s)" % (filename, s3_url)
|
||||
formatted_link = f"[{filename}]({s3_url})"
|
||||
attachment_links.append(formatted_link)
|
||||
else:
|
||||
logger.warning("Payload is not bytes (invalid attachment %s in message from %s).",
|
||||
|
|
|
@ -60,7 +60,7 @@ def encode_email_address_helper(name: str, email_token: str, show_sender: bool=F
|
|||
|
||||
# If encoded_name ends up empty, we just skip this part of the address:
|
||||
if encoded_name:
|
||||
encoded_token = "%s.%s" % (encoded_name, email_token)
|
||||
encoded_token = f"{encoded_name}.{email_token}"
|
||||
else:
|
||||
encoded_token = email_token
|
||||
|
||||
|
|
|
@ -93,16 +93,9 @@ def fix_emojis(content: str, base_url: str, emojiset: str) -> str:
|
|||
emoji_code = match.group('emoji_code')
|
||||
emoji_name = emoji_span_elem.get('title')
|
||||
alt_code = emoji_span_elem.text
|
||||
image_url = base_url + '/static/generated/emoji/images-%(emojiset)s-64/%(emoji_code)s.png' % {
|
||||
'emojiset': emojiset,
|
||||
'emoji_code': emoji_code
|
||||
}
|
||||
image_url = base_url + f'/static/generated/emoji/images-{emojiset}-64/{emoji_code}.png'
|
||||
img_elem = lxml.html.fromstring(
|
||||
'<img alt="%(alt_code)s" src="%(image_url)s" title="%(title)s">' % {
|
||||
'alt_code': alt_code,
|
||||
'image_url': image_url,
|
||||
'title': emoji_name,
|
||||
})
|
||||
f'<img alt="{alt_code}" src="{image_url}" title="{emoji_name}">')
|
||||
img_elem.set('style', 'height: 20px;')
|
||||
img_elem.tail = emoji_span_elem.tail
|
||||
return img_elem
|
||||
|
@ -179,8 +172,8 @@ def build_message_list(user_profile: UserProfile, messages: List[Message]) -> Li
|
|||
def message_header(user_profile: UserProfile, message: Message) -> Dict[str, Any]:
|
||||
if message.recipient.type == Recipient.PERSONAL:
|
||||
narrow_link = get_narrow_url(user_profile, message)
|
||||
header = "You and %s" % (message.sender.full_name,)
|
||||
header_html = "<a style='color: #ffffff;' href='%s'>%s</a>" % (narrow_link, header)
|
||||
header = f"You and {message.sender.full_name}"
|
||||
header_html = f"<a style='color: #ffffff;' href='{narrow_link}'>{header}</a>"
|
||||
elif message.recipient.type == Recipient.HUDDLE:
|
||||
display_recipient = get_display_recipient(message.recipient)
|
||||
assert not isinstance(display_recipient, str)
|
||||
|
@ -188,15 +181,14 @@ def build_message_list(user_profile: UserProfile, messages: List[Message]) -> Li
|
|||
display_recipient=display_recipient)
|
||||
other_recipients = [r['full_name'] for r in display_recipient
|
||||
if r['id'] != user_profile.id]
|
||||
header = "You and %s" % (", ".join(other_recipients),)
|
||||
header_html = "<a style='color: #ffffff;' href='%s'>%s</a>" % (narrow_link, header)
|
||||
header = "You and {}".format(", ".join(other_recipients))
|
||||
header_html = f"<a style='color: #ffffff;' href='{narrow_link}'>{header}</a>"
|
||||
else:
|
||||
stream = Stream.objects.only('id', 'name').get(id=message.recipient.type_id)
|
||||
narrow_link = get_narrow_url(user_profile, message, stream=stream)
|
||||
header = "%s > %s" % (stream.name, message.topic_name())
|
||||
header = f"{stream.name} > {message.topic_name()}"
|
||||
stream_link = stream_narrow_url(user_profile.realm, stream)
|
||||
header_html = "<a href='%s'>%s</a> > <a href='%s'>%s</a>" % (
|
||||
stream_link, stream.name, narrow_link, message.topic_name())
|
||||
header_html = f"<a href='{stream_link}'>{stream.name}</a> > <a href='{narrow_link}'>{message.topic_name()}</a>"
|
||||
return {"plain": header,
|
||||
"html": header_html,
|
||||
"stream_message": message.recipient.type_name() == "stream"}
|
||||
|
@ -371,11 +363,10 @@ def do_send_missedmessage_events_reply_in_zulip(user_profile: UserProfile,
|
|||
huddle_display_name = " and ".join(other_recipients)
|
||||
context.update({'huddle_display_name': huddle_display_name})
|
||||
elif len(other_recipients) == 3:
|
||||
huddle_display_name = "%s, %s, and %s" % (
|
||||
other_recipients[0], other_recipients[1], other_recipients[2])
|
||||
huddle_display_name = f"{other_recipients[0]}, {other_recipients[1]}, and {other_recipients[2]}"
|
||||
context.update({'huddle_display_name': huddle_display_name})
|
||||
else:
|
||||
huddle_display_name = "%s, and %s others" % (
|
||||
huddle_display_name = "{}, and {} others".format(
|
||||
', '.join(other_recipients[:2]), len(other_recipients) - 2)
|
||||
context.update({'huddle_display_name': huddle_display_name})
|
||||
elif (missed_messages[0]['message'].recipient.type == Recipient.PERSONAL):
|
||||
|
@ -388,7 +379,7 @@ def do_send_missedmessage_events_reply_in_zulip(user_profile: UserProfile,
|
|||
m['trigger'] == 'wildcard_mentioned'})
|
||||
message = missed_messages[0]['message']
|
||||
stream = Stream.objects.only('id', 'name').get(id=message.recipient.type_id)
|
||||
stream_header = "%s > %s" % (stream.name, message.topic_name())
|
||||
stream_header = f"{stream.name} > {message.topic_name()}"
|
||||
context.update({
|
||||
'stream_header': stream_header,
|
||||
})
|
||||
|
|
|
@ -112,7 +112,7 @@ def validate_email_is_valid(
|
|||
return None
|
||||
|
||||
def email_reserved_for_system_bots_error(email: str) -> str:
|
||||
return '%s is reserved for system bots' % (email,)
|
||||
return f'{email} is reserved for system bots'
|
||||
|
||||
def get_existing_user_errors(
|
||||
target_realm: Realm,
|
||||
|
|
|
@ -34,7 +34,7 @@ def user_info_str(report: Dict[str, Any]) -> str:
|
|||
def deployment_repr(report: Dict[str, Any]) -> str:
|
||||
deployment = 'Deployed code:\n'
|
||||
for field, val in report['deployment_data'].items():
|
||||
deployment += '- %s: %s\n' % (field, val)
|
||||
deployment += f'- {field}: {val}\n'
|
||||
return deployment
|
||||
|
||||
def notify_browser_error(report: Dict[str, Any]) -> None:
|
||||
|
@ -44,7 +44,7 @@ def notify_browser_error(report: Dict[str, Any]) -> None:
|
|||
email_browser_error(report)
|
||||
|
||||
def email_browser_error(report: Dict[str, Any]) -> None:
|
||||
email_subject = "Browser error for %s" % (user_info_str(report),)
|
||||
email_subject = f"Browser error for {user_info_str(report)}"
|
||||
|
||||
body = ("User: %(user_full_name)s <%(user_email)s> on %(deployment)s\n\n"
|
||||
"Message:\n%(message)s\n\nStacktrace:\n%(stacktrace)s\n\n"
|
||||
|
@ -59,18 +59,18 @@ def email_browser_error(report: Dict[str, Any]) -> None:
|
|||
if more_info is not None:
|
||||
body += "\nAdditional information:"
|
||||
for (key, value) in more_info.items():
|
||||
body += "\n %s: %s" % (key, value)
|
||||
body += f"\n {key}: {value}"
|
||||
|
||||
body += "\n\nLog:\n%s" % (report['log'],)
|
||||
body += "\n\nLog:\n{}".format(report['log'])
|
||||
|
||||
mail_admins(email_subject, body)
|
||||
|
||||
def zulip_browser_error(report: Dict[str, Any]) -> None:
|
||||
email_subject = "JS error: %s" % (report['user_email'],)
|
||||
email_subject = "JS error: {}".format(report['user_email'])
|
||||
|
||||
user_info = user_info_str(report)
|
||||
|
||||
body = "User: %s\n" % (user_info,)
|
||||
body = f"User: {user_info}\n"
|
||||
body += ("Message: %(message)s\n"
|
||||
% dict(report))
|
||||
|
||||
|
@ -108,7 +108,7 @@ def zulip_server_error(report: Dict[str, Any]) -> None:
|
|||
val = report.get(field.lower())
|
||||
if field == "QUERY_STRING":
|
||||
val = clean_data_from_query_parameters(str(val))
|
||||
request_repr += "- %s: \"%s\"\n" % (field, val)
|
||||
request_repr += f"- {field}: \"{val}\"\n"
|
||||
request_repr += "~~~~"
|
||||
else:
|
||||
request_repr = "Request info: none"
|
||||
|
@ -144,7 +144,7 @@ def email_server_error(report: Dict[str, Any]) -> None:
|
|||
val = report.get(field.lower())
|
||||
if field == "QUERY_STRING":
|
||||
val = clean_data_from_query_parameters(str(val))
|
||||
request_repr += "- %s: \"%s\"\n" % (field, val)
|
||||
request_repr += f"- {field}: \"{val}\"\n"
|
||||
else:
|
||||
request_repr = "Request info: none\n"
|
||||
|
||||
|
|
|
@ -818,7 +818,7 @@ def apply_event(state: Dict[str, Any],
|
|||
elif event['type'] == 'has_zoom_token':
|
||||
state['has_zoom_token'] = event['value']
|
||||
else:
|
||||
raise AssertionError("Unexpected event type %s" % (event['type'],))
|
||||
raise AssertionError("Unexpected event type {}".format(event['type']))
|
||||
|
||||
def do_events_register(user_profile: UserProfile, user_client: Client,
|
||||
apply_markdown: bool = True,
|
||||
|
|
|
@ -420,13 +420,13 @@ class Config:
|
|||
using id_source.''')
|
||||
if self.id_source[0] != self.virtual_parent.table:
|
||||
raise AssertionError('''
|
||||
Configuration error. To populate %s, you
|
||||
want data from %s, but that differs from
|
||||
the table name of your virtual parent (%s),
|
||||
Configuration error. To populate {}, you
|
||||
want data from {}, but that differs from
|
||||
the table name of your virtual parent ({}),
|
||||
which suggests you many not have set up
|
||||
the ordering correctly. You may simply
|
||||
need to assign a virtual_parent, or there
|
||||
may be deeper issues going on.''' % (
|
||||
may be deeper issues going on.'''.format(
|
||||
self.table,
|
||||
self.id_source[0],
|
||||
self.virtual_parent.table))
|
||||
|
@ -466,7 +466,7 @@ def export_from_config(response: TableData, config: Config, seed_object: Optiona
|
|||
if config.custom_tables:
|
||||
for t in config.custom_tables:
|
||||
if t not in response:
|
||||
raise AssertionError('Custom fetch failed to populate %s' % (t,))
|
||||
raise AssertionError(f'Custom fetch failed to populate {t}')
|
||||
|
||||
elif config.concat_and_destroy:
|
||||
# When we concat_and_destroy, we are working with
|
||||
|
@ -1177,16 +1177,16 @@ def _check_key_metadata(email_gateway_bot: Optional[UserProfile],
|
|||
# Helper function for export_files_from_s3
|
||||
if 'realm_id' in key.metadata and key.metadata['realm_id'] != str(realm.id):
|
||||
if email_gateway_bot is None or key.metadata['user_profile_id'] != str(email_gateway_bot.id):
|
||||
raise AssertionError("Key metadata problem: %s %s / %s" % (key.name, key.metadata, realm.id))
|
||||
raise AssertionError(f"Key metadata problem: {key.name} {key.metadata} / {realm.id}")
|
||||
# Email gateway bot sends messages, potentially including attachments, cross-realm.
|
||||
print("File uploaded by email gateway bot: %s / %s" % (key.key, key.metadata))
|
||||
print(f"File uploaded by email gateway bot: {key.key} / {key.metadata}")
|
||||
elif processing_avatars:
|
||||
if 'user_profile_id' not in key.metadata:
|
||||
raise AssertionError("Missing user_profile_id in key metadata: %s" % (key.metadata,))
|
||||
raise AssertionError(f"Missing user_profile_id in key metadata: {key.metadata}")
|
||||
if int(key.metadata['user_profile_id']) not in user_ids:
|
||||
raise AssertionError("Wrong user_profile_id in key metadata: %s" % (key.metadata,))
|
||||
raise AssertionError(f"Wrong user_profile_id in key metadata: {key.metadata}")
|
||||
elif 'realm_id' not in key.metadata:
|
||||
raise AssertionError("Missing realm_id in key metadata: %s" % (key.metadata,))
|
||||
raise AssertionError(f"Missing realm_id in key metadata: {key.metadata}")
|
||||
|
||||
def _get_exported_s3_record(
|
||||
bucket_name: str,
|
||||
|
@ -1233,11 +1233,11 @@ def _save_s3_object_to_file(key: ServiceResource, output_dir: str, processing_av
|
|||
else:
|
||||
fields = key.key.split('/')
|
||||
if len(fields) != 3:
|
||||
raise AssertionError("Suspicious key with invalid format %s" % (key.key,))
|
||||
raise AssertionError(f"Suspicious key with invalid format {key.key}")
|
||||
filename = os.path.join(output_dir, key.key)
|
||||
|
||||
if "../" in filename:
|
||||
raise AssertionError("Suspicious file with invalid format %s" % (filename,))
|
||||
raise AssertionError(f"Suspicious file with invalid format {filename}")
|
||||
|
||||
dirname = os.path.dirname(filename)
|
||||
if not os.path.exists(dirname):
|
||||
|
@ -1264,11 +1264,11 @@ def export_files_from_s3(realm: Realm, bucket_name: str, output_dir: Path,
|
|||
user_ids.add(user_profile.id)
|
||||
|
||||
if processing_realm_icon_and_logo:
|
||||
object_prefix = "%s/realm/" % (realm.id,)
|
||||
object_prefix = f"{realm.id}/realm/"
|
||||
elif processing_emoji:
|
||||
object_prefix = "%s/emoji/images/" % (realm.id,)
|
||||
object_prefix = f"{realm.id}/emoji/images/"
|
||||
else:
|
||||
object_prefix = "%s/" % (realm.id,)
|
||||
object_prefix = f"{realm.id}/"
|
||||
|
||||
if settings.EMAIL_GATEWAY_BOT is not None:
|
||||
email_gateway_bot: Optional[UserProfile] = get_system_bot(settings.EMAIL_GATEWAY_BOT)
|
||||
|
@ -1576,7 +1576,7 @@ def launch_user_message_subprocesses(threads: int, output_dir: Path,
|
|||
while pids:
|
||||
pid, status = os.wait()
|
||||
shard = pids.pop(pid)
|
||||
print('Shard %s finished, status %s' % (shard, status))
|
||||
print(f'Shard {shard} finished, status {status}')
|
||||
|
||||
def do_export_user(user_profile: UserProfile, output_dir: Path) -> None:
|
||||
response: TableData = {}
|
||||
|
@ -1741,8 +1741,8 @@ def export_realm_wrapper(realm: Realm, output_dir: str,
|
|||
tarball_path = do_export_realm(realm=realm, output_dir=output_dir,
|
||||
threads=threads, public_only=public_only,
|
||||
consent_message_id=consent_message_id)
|
||||
print("Finished exporting to %s" % (output_dir,))
|
||||
print("Tarball written to %s" % (tarball_path,))
|
||||
print(f"Finished exporting to {output_dir}")
|
||||
print(f"Tarball written to {tarball_path}")
|
||||
|
||||
if not upload:
|
||||
return None
|
||||
|
@ -1753,11 +1753,11 @@ def export_realm_wrapper(realm: Realm, output_dir: str,
|
|||
print("Uploading export tarball...")
|
||||
public_url = zerver.lib.upload.upload_backend.upload_export_tarball(realm, tarball_path)
|
||||
print()
|
||||
print("Uploaded to %s" % (public_url,))
|
||||
print(f"Uploaded to {public_url}")
|
||||
|
||||
if delete_after_upload:
|
||||
os.remove(tarball_path)
|
||||
print("Successfully deleted the tarball at %s" % (tarball_path,))
|
||||
print(f"Successfully deleted the tarball at {tarball_path}")
|
||||
return public_url
|
||||
|
||||
def get_realm_exports_serialized(user: UserProfile) -> List[Dict[str, Any]]:
|
||||
|
|
|
@ -4,7 +4,7 @@ from lxml.html.diff import htmldiff
|
|||
from typing import Optional
|
||||
|
||||
def highlight_with_class(text: str, klass: str) -> str:
|
||||
return '<span class="%s">%s</span>' % (klass, text)
|
||||
return f'<span class="{klass}">{text}</span>'
|
||||
|
||||
def highlight_html_differences(s1: str, s2: str, msg_id: Optional[int]=None) -> str:
|
||||
retval = htmldiff(s1, s2)
|
||||
|
|
|
@ -101,10 +101,10 @@ path_maps: Dict[str, Dict[str, str]] = {
|
|||
def update_id_map(table: TableName, old_id: int, new_id: int) -> None:
|
||||
if table not in ID_MAP:
|
||||
raise Exception('''
|
||||
Table %s is not initialized in ID_MAP, which could
|
||||
Table {} is not initialized in ID_MAP, which could
|
||||
mean that we have not thought through circular
|
||||
dependencies.
|
||||
''' % (table,))
|
||||
'''.format(table))
|
||||
ID_MAP[table][old_id] = new_id
|
||||
|
||||
def fix_datetime_fields(data: TableData, table: TableName) -> None:
|
||||
|
|
|
@ -70,7 +70,7 @@ class _RateLimitFilter:
|
|||
try:
|
||||
# Track duplicate errors
|
||||
duplicate = False
|
||||
rate = getattr(settings, '%s_LIMIT' % (self.__class__.__name__.upper(),),
|
||||
rate = getattr(settings, f'{self.__class__.__name__.upper()}_LIMIT',
|
||||
600) # seconds
|
||||
|
||||
if rate > 0:
|
||||
|
|
|
@ -28,7 +28,7 @@ def check_config() -> None:
|
|||
except AttributeError:
|
||||
pass
|
||||
|
||||
raise CommandError("Error: You must set %s in /etc/zulip/settings.py." % (setting_name,))
|
||||
raise CommandError(f"Error: You must set {setting_name} in /etc/zulip/settings.py.")
|
||||
|
||||
def sleep_forever() -> None:
|
||||
while True: # nocoverage
|
||||
|
@ -127,7 +127,7 @@ You can use the command list_realms to find ID of the realms in this server."""
|
|||
return UserProfile.objects.select_related().get(
|
||||
delivery_email__iexact=email.strip(), realm=realm)
|
||||
except UserProfile.DoesNotExist:
|
||||
raise CommandError("The realm '%s' does not contain a user with email '%s'" % (realm, email))
|
||||
raise CommandError(f"The realm '{realm}' does not contain a user with email '{email}'")
|
||||
|
||||
# Realm is None in the remaining code path. Here, we
|
||||
# optimistically try to see if there is exactly one user with
|
||||
|
@ -139,7 +139,7 @@ You can use the command list_realms to find ID of the realms in this server."""
|
|||
"(in different realms); please pass `--realm` "
|
||||
"to specify which one to modify.")
|
||||
except UserProfile.DoesNotExist:
|
||||
raise CommandError("This Zulip server does not contain a user with email '%s'" % (email,))
|
||||
raise CommandError(f"This Zulip server does not contain a user with email '{email}'")
|
||||
|
||||
def get_client(self) -> Client:
|
||||
"""Returns a Zulip Client object to be used for things done in management commands"""
|
||||
|
|
|
@ -519,7 +519,7 @@ class MessageDict:
|
|||
elif recip['email'] > display_recipient[0]['email']:
|
||||
display_recipient = [display_recipient[0], recip]
|
||||
else:
|
||||
raise AssertionError("Invalid recipient type %s" % (recipient_type,))
|
||||
raise AssertionError(f"Invalid recipient type {recipient_type}")
|
||||
|
||||
obj['display_recipient'] = display_recipient
|
||||
obj['type'] = display_type
|
||||
|
@ -1019,7 +1019,7 @@ def apply_unread_message_event(user_profile: UserProfile,
|
|||
else:
|
||||
message_type = 'huddle'
|
||||
else:
|
||||
raise AssertionError("Invalid message type %s" % (message['type'],))
|
||||
raise AssertionError("Invalid message type {}".format(message['type']))
|
||||
|
||||
sender_id = message['sender_id']
|
||||
|
||||
|
|
|
@ -28,11 +28,11 @@ def do_batch_update(cursor: CursorObj,
|
|||
if min_id is None:
|
||||
return
|
||||
|
||||
print("\n Range of rows to update: [%s, %s]" % (min_id, max_id))
|
||||
print(f"\n Range of rows to update: [{min_id}, {max_id}]")
|
||||
while min_id <= max_id:
|
||||
lower = min_id
|
||||
upper = min_id + batch_size
|
||||
print(' Updating range [%s,%s)' % (lower, upper))
|
||||
print(f' Updating range [{lower},{upper})')
|
||||
cursor.execute(stmt, [lower, upper])
|
||||
|
||||
min_id = upper
|
||||
|
|
|
@ -15,7 +15,7 @@ def xor_hex_strings(bytes_a: str, bytes_b: str) -> str:
|
|||
"""Given two hex strings of equal length, return a hex string with
|
||||
the bitwise xor of the two hex strings."""
|
||||
assert len(bytes_a) == len(bytes_b)
|
||||
return ''.join(["%x" % (int(x, 16) ^ int(y, 16),)
|
||||
return ''.join([f"{int(x, 16) ^ int(y, 16):x}"
|
||||
for x, y in zip(bytes_a, bytes_b)])
|
||||
|
||||
def ascii_to_hex(input_string: str) -> str:
|
||||
|
|
|
@ -223,11 +223,11 @@ def notify_bot_owner(event: Dict[str, Any],
|
|||
bot_id = event['user_profile_id']
|
||||
bot_owner = get_user_profile_by_id(bot_id).bot_owner
|
||||
|
||||
notification_message = "[A message](%s) triggered an outgoing webhook." % (message_url,)
|
||||
notification_message = f"[A message]({message_url}) triggered an outgoing webhook."
|
||||
if failure_message:
|
||||
notification_message += "\n" + failure_message
|
||||
if status_code:
|
||||
notification_message += "\nThe webhook got a response with status code *%s*." % (status_code,)
|
||||
notification_message += f"\nThe webhook got a response with status code *{status_code}*."
|
||||
if response_content:
|
||||
notification_message += "\nThe response contains the following payload:\n" \
|
||||
"```\n%s\n```" % (str(response_content),)
|
||||
|
@ -326,6 +326,6 @@ def do_rest_call(base_url: str,
|
|||
response_message = ("An exception of type *%s* occurred for message `%s`! "
|
||||
"See the Zulip server logs for more information." % (
|
||||
type(e).__name__, event["command"],))
|
||||
logging.exception("Outhook trigger failed:\n %s" % (e,))
|
||||
logging.exception(f"Outhook trigger failed:\n {e}")
|
||||
fail_with_message(event, response_message)
|
||||
notify_bot_owner(event, exception=e)
|
||||
|
|
|
@ -468,14 +468,14 @@ def get_gcm_alert(message: Message) -> str:
|
|||
"""
|
||||
sender_str = message.sender.full_name
|
||||
if message.recipient.type == Recipient.HUDDLE and message.trigger == 'private_message':
|
||||
return "New private group message from %s" % (sender_str,)
|
||||
return f"New private group message from {sender_str}"
|
||||
elif message.recipient.type == Recipient.PERSONAL and message.trigger == 'private_message':
|
||||
return "New private message from %s" % (sender_str,)
|
||||
return f"New private message from {sender_str}"
|
||||
elif message.is_stream_message() and (message.trigger == 'mentioned' or
|
||||
message.trigger == 'wildcard_mentioned'):
|
||||
return "New mention from %s" % (sender_str,)
|
||||
return f"New mention from {sender_str}"
|
||||
else: # message.is_stream_message() and message.trigger == 'stream_push_notify'
|
||||
return "New stream message from %s in %s" % (sender_str, get_display_recipient(message.recipient),)
|
||||
return f"New stream message from {sender_str} in {get_display_recipient(message.recipient)}"
|
||||
|
||||
def get_mobile_push_content(rendered_content: str) -> str:
|
||||
def get_text(elem: lxml.html.HtmlElement) -> str:
|
||||
|
@ -586,7 +586,7 @@ def get_apns_alert_title(message: Message) -> str:
|
|||
assert isinstance(recipients, list)
|
||||
return ', '.join(sorted(r['full_name'] for r in recipients))
|
||||
elif message.is_stream_message():
|
||||
return "#%s > %s" % (get_display_recipient(message.recipient), message.topic_name(),)
|
||||
return f"#{get_display_recipient(message.recipient)} > {message.topic_name()}"
|
||||
# For personal PMs, we just show the sender name.
|
||||
return message.sender.full_name
|
||||
|
||||
|
|
|
@ -38,7 +38,7 @@ class SimpleQueueClient:
|
|||
start = time.time()
|
||||
self.connection = pika.BlockingConnection(self._get_parameters())
|
||||
self.channel = self.connection.channel()
|
||||
self.log.info('SimpleQueueClient connected (connecting took %.3fs)' % (time.time() - start,))
|
||||
self.log.info(f'SimpleQueueClient connected (connecting took {time.time() - start:.3f}s)')
|
||||
|
||||
def _reconnect(self) -> None:
|
||||
self.connection = None
|
||||
|
@ -76,10 +76,10 @@ class SimpleQueueClient:
|
|||
credentials=credentials)
|
||||
|
||||
def _generate_ctag(self, queue_name: str) -> str:
|
||||
return "%s_%s" % (queue_name, str(random.getrandbits(16)))
|
||||
return f"{queue_name}_{str(random.getrandbits(16))}"
|
||||
|
||||
def _reconnect_consumer_callback(self, queue: str, consumer: Consumer) -> None:
|
||||
self.log.info("Queue reconnecting saved consumer %s to queue %s" % (consumer, queue))
|
||||
self.log.info(f"Queue reconnecting saved consumer {consumer} to queue {queue}")
|
||||
self.ensure_queue(queue, lambda: self.channel.basic_consume(queue,
|
||||
consumer,
|
||||
consumer_tag=self._generate_ctag(queue)))
|
||||
|
@ -115,7 +115,7 @@ class SimpleQueueClient:
|
|||
properties=pika.BasicProperties(delivery_mode=2),
|
||||
body=body)
|
||||
|
||||
statsd.incr("rabbitmq.publish.%s" % (queue_name,))
|
||||
statsd.incr(f"rabbitmq.publish.{queue_name}")
|
||||
|
||||
self.ensure_queue(queue_name, do_publish)
|
||||
|
||||
|
|
|
@ -286,7 +286,7 @@ class TornadoInMemoryRateLimiterBackend(RateLimiterBackend):
|
|||
ratelimited, time_till_free = cls.need_to_limit(entity_key, time_window, max_count)
|
||||
|
||||
if ratelimited:
|
||||
statsd.incr("ratelimiter.limited.%s" % (entity_key,))
|
||||
statsd.incr(f"ratelimiter.limited.{entity_key}")
|
||||
break
|
||||
|
||||
return ratelimited, time_till_free
|
||||
|
@ -450,7 +450,7 @@ class RedisRateLimiterBackend(RateLimiterBackend):
|
|||
ratelimited, time = cls.is_ratelimited(entity_key, rules)
|
||||
|
||||
if ratelimited:
|
||||
statsd.incr("ratelimiter.limited.%s" % (entity_key,))
|
||||
statsd.incr(f"ratelimiter.limited.{entity_key}")
|
||||
|
||||
else:
|
||||
try:
|
||||
|
|
|
@ -12,6 +12,6 @@ def get_realm_icon_url(realm: Realm) -> str:
|
|||
return upload_backend.get_realm_icon_url(realm.id, realm.icon_version)
|
||||
elif settings.ENABLE_GRAVATAR:
|
||||
hash_key = gravatar_hash(realm.string_id)
|
||||
return "https://secure.gravatar.com/avatar/%s?d=identicon" % (hash_key,)
|
||||
return f"https://secure.gravatar.com/avatar/{hash_key}?d=identicon"
|
||||
else:
|
||||
return settings.DEFAULT_AVATAR_URI+'?version=0'
|
||||
|
|
|
@ -63,4 +63,4 @@ def validate_key_fits_format(key: str, key_format: str) -> None:
|
|||
regex = key_format.format(token=r"[a-zA-Z0-9]+")
|
||||
|
||||
if not re.fullmatch(regex, key):
|
||||
raise ZulipRedisKeyOfWrongFormatError("%s does not match format %s" % (key, key_format))
|
||||
raise ZulipRedisKeyOfWrongFormatError(f"{key} does not match format {key_format}")
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue