diff --git a/analytics/lib/counts.py b/analytics/lib/counts.py index 87a51e5fa6..ce8ab4ce14 100644 --- a/analytics/lib/counts.py +++ b/analytics/lib/counts.py @@ -40,7 +40,7 @@ class CountStat: self.data_collector = data_collector # might have to do something different for bitfields if frequency not in self.FREQUENCIES: - raise AssertionError("Unknown frequency: %s" % (frequency,)) + raise AssertionError(f"Unknown frequency: {frequency}") self.frequency = frequency if interval is not None: self.interval = interval @@ -50,7 +50,7 @@ class CountStat: self.interval = timedelta(days=1) def __str__(self) -> str: - return "" % (self.property,) + return f"" class LoggingCountStat(CountStat): def __init__(self, property: str, output_table: Type[BaseCount], frequency: str) -> None: @@ -86,11 +86,11 @@ def process_count_stat(stat: CountStat, fill_to_time: datetime, elif stat.frequency == CountStat.DAY: time_increment = timedelta(days=1) else: - raise AssertionError("Unknown frequency: %s" % (stat.frequency,)) + raise AssertionError(f"Unknown frequency: {stat.frequency}") verify_UTC(fill_to_time) if floor_to_hour(fill_to_time) != fill_to_time: - raise ValueError("fill_to_time must be on an hour boundary: %s" % (fill_to_time,)) + raise ValueError(f"fill_to_time must be on an hour boundary: {fill_to_time}") fill_state = FillState.objects.filter(property=stat.property).first() if fill_state is None: @@ -108,7 +108,7 @@ def process_count_stat(stat: CountStat, fill_to_time: datetime, elif fill_state.state == FillState.DONE: currently_filled = fill_state.end_time else: - raise AssertionError("Unknown value for FillState.state: %s." % (fill_state.state,)) + raise AssertionError(f"Unknown value for FillState.state: {fill_state.state}.") if isinstance(stat, DependentCountStat): for dependency in stat.dependencies: diff --git a/analytics/lib/fixtures.py b/analytics/lib/fixtures.py index bd24940416..a439b56d95 100644 --- a/analytics/lib/fixtures.py +++ b/analytics/lib/fixtures.py @@ -43,7 +43,7 @@ def generate_time_series_data(days: int=100, business_hours_base: float=10, [24*non_business_hours_base] * 2 holidays = [random() < holiday_rate for i in range(days)] else: - raise AssertionError("Unknown frequency: %s" % (frequency,)) + raise AssertionError(f"Unknown frequency: {frequency}") if length < 2: raise AssertionError("Must be generating at least 2 data points. " "Currently generating %s" % (length,)) diff --git a/analytics/lib/time_utils.py b/analytics/lib/time_utils.py index e372125430..48d8f54223 100644 --- a/analytics/lib/time_utils.py +++ b/analytics/lib/time_utils.py @@ -19,7 +19,7 @@ def time_range(start: datetime, end: datetime, frequency: str, end = floor_to_day(end) step = timedelta(days=1) else: - raise AssertionError("Unknown frequency: %s" % (frequency,)) + raise AssertionError(f"Unknown frequency: {frequency}") times = [] if min_length is not None: diff --git a/analytics/management/commands/analyze_user_activity.py b/analytics/management/commands/analyze_user_activity.py index 0e41d6deaa..19322082e2 100644 --- a/analytics/management/commands/analyze_user_activity.py +++ b/analytics/management/commands/analyze_user_activity.py @@ -26,9 +26,9 @@ def analyze_activity(options: Dict[str, Any]) -> None: total_duration += duration print("%-*s%s" % (37, user_profile.email, duration,)) - print("\nTotal Duration: %s" % (total_duration,)) - print("\nTotal Duration in minutes: %s" % (total_duration.total_seconds() / 60.,)) - print("Total Duration amortized to a month: %s" % (total_duration.total_seconds() * 30. / 60.,)) + print(f"\nTotal Duration: {total_duration}") + print(f"\nTotal Duration in minutes: {total_duration.total_seconds() / 60.}") + print(f"Total Duration amortized to a month: {total_duration.total_seconds() * 30. / 60.}") class Command(BaseCommand): help = """Report analytics of user activity on a per-user and realm basis. diff --git a/analytics/management/commands/check_analytics_state.py b/analytics/management/commands/check_analytics_state.py index 8e7d903163..464659b68f 100644 --- a/analytics/management/commands/check_analytics_state.py +++ b/analytics/management/commands/check_analytics_state.py @@ -33,8 +33,7 @@ class Command(BaseCommand): state_file_tmp = state_file_path + "-tmp" with open(state_file_tmp, "w") as f: - f.write("%s|%s|%s|%s\n" % ( - int(time.time()), status, states[status], message)) + f.write(f"{int(time.time())}|{status}|{states[status]}|{message}\n") os.rename(state_file_tmp, state_file_path) def get_fill_state(self) -> Dict[str, Any]: @@ -50,7 +49,7 @@ class Command(BaseCommand): try: verify_UTC(last_fill) except TimezoneNotUTCException: - return {'status': 2, 'message': 'FillState not in UTC for %s' % (property,)} + return {'status': 2, 'message': f'FillState not in UTC for {property}'} if stat.frequency == CountStat.DAY: floor_function = floor_to_day diff --git a/analytics/management/commands/clear_single_stat.py b/analytics/management/commands/clear_single_stat.py index cc004bc444..ff5964ddd3 100644 --- a/analytics/management/commands/clear_single_stat.py +++ b/analytics/management/commands/clear_single_stat.py @@ -20,7 +20,7 @@ class Command(BaseCommand): def handle(self, *args: Any, **options: Any) -> None: property = options['property'] if property not in COUNT_STATS: - raise CommandError("Invalid property: %s" % (property,)) + raise CommandError(f"Invalid property: {property}") if not options['force']: raise CommandError("No action taken. Use --force.") diff --git a/analytics/management/commands/realm_stats.py b/analytics/management/commands/realm_stats.py index fb8cf2a0cf..8730a3a217 100644 --- a/analytics/management/commands/realm_stats.py +++ b/analytics/management/commands/realm_stats.py @@ -66,7 +66,7 @@ class Command(BaseCommand): fraction = 0.0 else: fraction = numerator / float(denominator) - print("%.2f%% of" % (fraction * 100,), text) + print(f"{fraction * 100:.2f}% of", text) def handle(self, *args: Any, **options: Any) -> None: if options['realms']: diff --git a/analytics/management/commands/stream_stats.py b/analytics/management/commands/stream_stats.py index d96efbbcb7..b6bde4bdc2 100644 --- a/analytics/management/commands/stream_stats.py +++ b/analytics/management/commands/stream_stats.py @@ -40,18 +40,18 @@ class Command(BaseCommand): print("%10s %d public streams and" % ("(", public_count), end=' ') print("%d private streams )" % (private_count,)) print("------------") - print("%25s %15s %10s %12s" % ("stream", "subscribers", "messages", "type")) + print("{:>25} {:>15} {:>10} {:>12}".format("stream", "subscribers", "messages", "type")) for stream in streams: if stream.invite_only: stream_type = 'private' else: stream_type = 'public' - print("%25s" % (stream.name,), end=' ') + print(f"{stream.name:>25}", end=' ') recipient = Recipient.objects.filter(type=Recipient.STREAM, type_id=stream.id) print("%10d" % (len(Subscription.objects.filter(recipient=recipient, active=True)),), end=' ') num_messages = len(Message.objects.filter(recipient=recipient)) print("%12d" % (num_messages,), end=' ') - print("%15s" % (stream_type,)) + print(f"{stream_type:>15}") print("") diff --git a/analytics/management/commands/update_analytics_counts.py b/analytics/management/commands/update_analytics_counts.py index 886b744853..9e89bb2246 100644 --- a/analytics/management/commands/update_analytics_counts.py +++ b/analytics/management/commands/update_analytics_counts.py @@ -71,7 +71,7 @@ class Command(BaseCommand): else: stats = list(COUNT_STATS.values()) - logger.info("Starting updating analytics counts through %s" % (fill_to_time,)) + logger.info(f"Starting updating analytics counts through {fill_to_time}") if options['verbose']: start = time.time() last = start @@ -79,13 +79,13 @@ class Command(BaseCommand): for stat in stats: process_count_stat(stat, fill_to_time) if options['verbose']: - print("Updated %s in %.3fs" % (stat.property, time.time() - last)) + print(f"Updated {stat.property} in {time.time() - last:.3f}s") last = time.time() if options['verbose']: print("Finished updating analytics counts through %s in %.3fs" % (fill_to_time, time.time() - start)) - logger.info("Finished updating analytics counts through %s" % (fill_to_time,)) + logger.info(f"Finished updating analytics counts through {fill_to_time}") if settings.PUSH_NOTIFICATION_BOUNCER_URL and settings.SUBMIT_USAGE_STATISTICS: send_analytics_to_remote_server() diff --git a/analytics/management/commands/user_stats.py b/analytics/management/commands/user_stats.py index 2b023db0e4..ae83b14cc8 100644 --- a/analytics/management/commands/user_stats.py +++ b/analytics/management/commands/user_stats.py @@ -36,7 +36,7 @@ class Command(BaseCommand): print("%d streams" % (len(Stream.objects.filter(realm=realm)),)) for user_profile in user_profiles: - print("%35s" % (user_profile.email,), end=' ') + print(f"{user_profile.email:>35}", end=' ') for week in range(10): print("%5d" % (self.messages_sent_by(user_profile, week),), end=' ') print("") diff --git a/analytics/models.py b/analytics/models.py index 4d6ebd11de..491e144ac0 100644 --- a/analytics/models.py +++ b/analytics/models.py @@ -17,7 +17,7 @@ class FillState(models.Model): state: int = models.PositiveSmallIntegerField() def __str__(self) -> str: - return "" % (self.property, self.end_time, self.state) + return f"" # The earliest/starting end_time in FillState # We assume there is at least one realm @@ -61,7 +61,7 @@ class InstallationCount(BaseCount): ] def __str__(self) -> str: - return "" % (self.property, self.subgroup, self.value) + return f"" class RealmCount(BaseCount): realm = models.ForeignKey(Realm, on_delete=models.CASCADE) @@ -81,7 +81,7 @@ class RealmCount(BaseCount): index_together = ["property", "end_time"] def __str__(self) -> str: - return "" % (self.realm, self.property, self.subgroup, self.value) + return f"" class UserCount(BaseCount): user = models.ForeignKey(UserProfile, on_delete=models.CASCADE) @@ -104,7 +104,7 @@ class UserCount(BaseCount): index_together = ["property", "realm", "end_time"] def __str__(self) -> str: - return "" % (self.user, self.property, self.subgroup, self.value) + return f"" class StreamCount(BaseCount): stream = models.ForeignKey(Stream, on_delete=models.CASCADE) @@ -127,5 +127,4 @@ class StreamCount(BaseCount): index_together = ["property", "realm", "end_time"] def __str__(self) -> str: - return "" % ( - self.stream, self.property, self.subgroup, self.value, self.id) + return f"" diff --git a/analytics/tests/test_counts.py b/analytics/tests/test_counts.py index dcc0aef4a1..7992da7af6 100644 --- a/analytics/tests/test_counts.py +++ b/analytics/tests/test_counts.py @@ -49,7 +49,7 @@ class AnalyticsTestCase(TestCase): def create_user(self, **kwargs: Any) -> UserProfile: self.name_counter += 1 defaults = { - 'email': 'user%s@domain.tld' % (self.name_counter,), + 'email': f'user{self.name_counter}@domain.tld', 'date_joined': self.TIME_LAST_HOUR, 'full_name': 'full_name', 'short_name': 'short_name', @@ -71,7 +71,7 @@ class AnalyticsTestCase(TestCase): def create_stream_with_recipient(self, **kwargs: Any) -> Tuple[Stream, Recipient]: self.name_counter += 1 - defaults = {'name': 'stream name %s' % (self.name_counter,), + defaults = {'name': f'stream name {self.name_counter}', 'realm': self.default_realm, 'date_created': self.TIME_LAST_HOUR} for key, value in defaults.items(): @@ -84,7 +84,7 @@ class AnalyticsTestCase(TestCase): def create_huddle_with_recipient(self, **kwargs: Any) -> Tuple[Huddle, Recipient]: self.name_counter += 1 - defaults = {'huddle_hash': 'hash%s' % (self.name_counter,)} + defaults = {'huddle_hash': f'hash{self.name_counter}'} for key, value in defaults.items(): kwargs[key] = kwargs.get(key, value) huddle = Huddle.objects.create(**kwargs) @@ -339,10 +339,10 @@ class TestCountStats(AnalyticsTestCase): date_created=self.TIME_ZERO-2*self.DAY) for minutes_ago in [0, 1, 61, 60*24+1]: creation_time = self.TIME_ZERO - minutes_ago*self.MINUTE - user = self.create_user(email='user-%s@second.analytics' % (minutes_ago,), + user = self.create_user(email=f'user-{minutes_ago}@second.analytics', realm=self.second_realm, date_joined=creation_time) recipient = self.create_stream_with_recipient( - name='stream %s' % (minutes_ago,), realm=self.second_realm, + name=f'stream {minutes_ago}', realm=self.second_realm, date_created=creation_time)[1] self.create_message(user, recipient, date_sent=creation_time) self.hourly_user = get_user('user-1@second.analytics', self.second_realm) diff --git a/analytics/tests/test_views.py b/analytics/tests/test_views.py index aec6e365d7..6e4de62be8 100644 --- a/analytics/tests/test_views.py +++ b/analytics/tests/test_views.py @@ -408,7 +408,7 @@ class TestSupportEndpoint(ZulipTestCase): def check_zulip_realm_query_result(result: HttpResponse) -> None: zulip_realm = get_realm("zulip") - self.assert_in_success_response(['', '', '', @@ -420,7 +420,7 @@ class TestSupportEndpoint(ZulipTestCase): def check_lear_realm_query_result(result: HttpResponse) -> None: lear_realm = get_realm("lear") - self.assert_in_success_response(['', '', '', @@ -532,7 +532,7 @@ class TestSupportEndpoint(ZulipTestCase): cordelia = self.example_user('cordelia') self.login_user(cordelia) - result = self.client_post("/activity/support", {"realm_id": "%s" % (cordelia.realm_id,), "plan_type": "2"}) + result = self.client_post("/activity/support", {"realm_id": f"{cordelia.realm_id}", "plan_type": "2"}) self.assertEqual(result.status_code, 302) self.assertEqual(result["Location"], "/login/") @@ -540,7 +540,7 @@ class TestSupportEndpoint(ZulipTestCase): self.login_user(iago) with mock.patch("analytics.views.do_change_plan_type") as m: - result = self.client_post("/activity/support", {"realm_id": "%s" % (iago.realm_id,), "plan_type": "2"}) + result = self.client_post("/activity/support", {"realm_id": f"{iago.realm_id}", "plan_type": "2"}) m.assert_called_once_with(get_realm("zulip"), 2) self.assert_in_success_response(["Plan type of Zulip Dev changed from self hosted to limited"], result) @@ -549,14 +549,14 @@ class TestSupportEndpoint(ZulipTestCase): lear_realm = get_realm('lear') self.login_user(cordelia) - result = self.client_post("/activity/support", {"realm_id": "%s" % (lear_realm.id,), "discount": "25"}) + result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}", "discount": "25"}) self.assertEqual(result.status_code, 302) self.assertEqual(result["Location"], "/login/") self.login('iago') with mock.patch("analytics.views.attach_discount_to_realm") as m: - result = self.client_post("/activity/support", {"realm_id": "%s" % (lear_realm.id,), "discount": "25"}) + result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}", "discount": "25"}) m.assert_called_once_with(get_realm("lear"), 25) self.assert_in_success_response(["Discount of Lear & Co. changed to 25 from None"], result) @@ -565,19 +565,19 @@ class TestSupportEndpoint(ZulipTestCase): lear_realm = get_realm('lear') self.login_user(cordelia) - result = self.client_post("/activity/support", {"realm_id": "%s" % (lear_realm.id,), "status": "deactivated"}) + result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}", "status": "deactivated"}) self.assertEqual(result.status_code, 302) self.assertEqual(result["Location"], "/login/") self.login('iago') with mock.patch("analytics.views.do_deactivate_realm") as m: - result = self.client_post("/activity/support", {"realm_id": "%s" % (lear_realm.id,), "status": "deactivated"}) + result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}", "status": "deactivated"}) m.assert_called_once_with(lear_realm, self.example_user("iago")) self.assert_in_success_response(["Lear & Co. deactivated"], result) with mock.patch("analytics.views.do_send_realm_reactivation_email") as m: - result = self.client_post("/activity/support", {"realm_id": "%s" % (lear_realm.id,), "status": "active"}) + result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}", "status": "active"}) m.assert_called_once_with(lear_realm) self.assert_in_success_response(["Realm reactivation email sent to admins of Lear"], result) @@ -586,19 +586,19 @@ class TestSupportEndpoint(ZulipTestCase): lear_realm = get_realm('lear') self.login_user(cordelia) - result = self.client_post("/activity/support", {"realm_id": "%s" % (lear_realm.id,), "discount": "25"}) + result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}", "discount": "25"}) self.assertEqual(result.status_code, 302) self.assertEqual(result["Location"], "/login/") self.login('iago') with mock.patch("analytics.views.do_scrub_realm") as m: - result = self.client_post("/activity/support", {"realm_id": "%s" % (lear_realm.id,), "scrub_realm": "scrub_realm"}) + result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}", "scrub_realm": "scrub_realm"}) m.assert_called_once_with(lear_realm) self.assert_in_success_response(["Lear & Co. scrubbed"], result) with mock.patch("analytics.views.do_scrub_realm") as m: - result = self.client_post("/activity/support", {"realm_id": "%s" % (lear_realm.id,)}) + result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}"}) m.assert_not_called() class TestGetChartDataHelpers(ZulipTestCase): diff --git a/analytics/views.py b/analytics/views.py index 4987344709..d818a1d229 100644 --- a/analytics/views.py +++ b/analytics/views.py @@ -96,9 +96,9 @@ def stats_for_realm(request: HttpRequest, realm_str: str) -> HttpResponse: try: realm = get_realm(realm_str) except Realm.DoesNotExist: - return HttpResponseNotFound("Realm %s does not exist" % (realm_str,)) + return HttpResponseNotFound(f"Realm {realm_str} does not exist") - return render_stats(request, '/realm/%s' % (realm_str,), realm.name or realm.string_id, + return render_stats(request, f'/realm/{realm_str}', realm.name or realm.string_id, analytics_ready=is_analytics_ready(realm)) @require_server_admin @@ -106,8 +106,8 @@ def stats_for_realm(request: HttpRequest, realm_str: str) -> HttpResponse: def stats_for_remote_realm(request: HttpRequest, remote_server_id: str, remote_realm_id: str) -> HttpResponse: server = RemoteZulipServer.objects.get(id=remote_server_id) - return render_stats(request, '/remote/%s/realm/%s' % (server.id, remote_realm_id), - "Realm %s on server %s" % (remote_realm_id, server.hostname)) + return render_stats(request, f'/remote/{server.id}/realm/{remote_realm_id}', + f"Realm {remote_realm_id} on server {server.hostname}") @require_server_admin_api @has_request_variables @@ -136,8 +136,8 @@ def stats_for_installation(request: HttpRequest) -> HttpResponse: @require_server_admin def stats_for_remote_installation(request: HttpRequest, remote_server_id: str) -> HttpResponse: server = RemoteZulipServer.objects.get(id=remote_server_id) - return render_stats(request, '/remote/%s/installation' % (server.id,), - 'remote Installation %s' % (server.hostname,), True, True) + return render_stats(request, f'/remote/{server.id}/installation', + f'remote Installation {server.hostname}', True, True) @require_server_admin_api @has_request_variables @@ -332,7 +332,7 @@ def table_filtered_to_id(table: Type[BaseCount], key_id: int) -> QuerySet: elif table == RemoteRealmCount: return RemoteRealmCount.objects.filter(realm_id=key_id) else: - raise AssertionError("Unknown table: %s" % (table,)) + raise AssertionError(f"Unknown table: {table}") def client_label_map(name: str) -> str: if name == "website": @@ -463,7 +463,7 @@ def get_realm_day_counts() -> Dict[str, Dict[str, str]]: else: good_bad = 'neutral' - return '%s' % (good_bad, cnt) + return f'{cnt}' cnts = (format_count(raw_cnts[0], 'neutral') + ''.join(map(format_count, raw_cnts[1:]))) @@ -629,7 +629,7 @@ def realm_summary_table(realm_minutes: Dict[str, float]) -> str: total_hours += hours row['hours'] = str(int(hours)) try: - row['hours_per_user'] = '%.1f' % (hours / row['dau_count'],) + row['hours_per_user'] = '{:.1f}'.format(hours / row['dau_count']) except Exception: pass @@ -709,7 +709,7 @@ def user_activity_intervals() -> Tuple[mark_safe, Dict[str, float]]: for string_id, realm_intervals in itertools.groupby(all_intervals, by_string_id): realm_duration = timedelta(0) - output += '
%s\n' % (string_id,) + output += f'
{string_id}\n' for email, intervals in itertools.groupby(realm_intervals, by_email): duration = timedelta(0) for interval in intervals: @@ -723,9 +723,9 @@ def user_activity_intervals() -> Tuple[mark_safe, Dict[str, float]]: realm_minutes[string_id] = realm_duration.total_seconds() / 60 - output += "\nTotal Duration: %s\n" % (total_duration,) - output += "\nTotal Duration in minutes: %s\n" % (total_duration.total_seconds() / 60.,) - output += "Total Duration amortized to a month: %s" % (total_duration.total_seconds() * 30. / 60.,) + output += f"\nTotal Duration: {total_duration}\n" + output += f"\nTotal Duration in minutes: {total_duration.total_seconds() / 60.}\n" + output += f"Total Duration amortized to a month: {total_duration.total_seconds() * 30. / 60.}" content = mark_safe('
' + output + '
') return content, realm_minutes @@ -841,7 +841,7 @@ def ad_hoc_queries() -> List[Dict[str, str]]: ### for mobile_type in ['Android', 'ZulipiOS']: - title = '%s usage' % (mobile_type,) + title = f'{mobile_type} usage' query = SQL(''' select @@ -1284,13 +1284,13 @@ def format_date_for_activity_reports(date: Optional[datetime]) -> str: def user_activity_link(email: str) -> mark_safe: url_name = 'analytics.views.get_user_activity' url = reverse(url_name, kwargs=dict(email=email)) - email_link = '%s' % (url, email) + email_link = f'{email}' return mark_safe(email_link) def realm_activity_link(realm_str: str) -> mark_safe: url_name = 'analytics.views.get_realm_activity' url = reverse(url_name, kwargs=dict(realm_str=realm_str)) - realm_link = '%s' % (url, realm_str) + realm_link = f'{realm_str}' return mark_safe(realm_link) def realm_stats_link(realm_str: str) -> mark_safe: @@ -1449,7 +1449,7 @@ def get_realm_activity(request: HttpRequest, realm_str: str) -> HttpResponse: try: admins = Realm.objects.get(string_id=realm_str).get_human_admin_users() except Realm.DoesNotExist: - return HttpResponseNotFound("Realm %s does not exist" % (realm_str,)) + return HttpResponseNotFound(f"Realm {realm_str} does not exist") admin_emails = {admin.delivery_email for admin in admins} diff --git a/corporate/lib/stripe.py b/corporate/lib/stripe.py index 362e13b6be..54ee6d3610 100644 --- a/corporate/lib/stripe.py +++ b/corporate/lib/stripe.py @@ -185,7 +185,7 @@ def do_create_stripe_customer(user: UserProfile, stripe_token: Optional[str]=Non # bad thing that will happen is that we will create an extra stripe # customer that we can delete or ignore. stripe_customer = stripe.Customer.create( - description="%s (%s)" % (realm.string_id, realm.name), + description=f"{realm.string_id} ({realm.name})", email=user.delivery_email, metadata={'realm_id': realm.id, 'realm_str': realm.string_id}, source=stripe_token) diff --git a/corporate/models.py b/corporate/models.py index b512f7c4ff..bb6ea75fe7 100644 --- a/corporate/models.py +++ b/corporate/models.py @@ -14,7 +14,7 @@ class Customer(models.Model): default_discount: Optional[Decimal] = models.DecimalField(decimal_places=4, max_digits=7, null=True) def __str__(self) -> str: - return "" % (self.realm, self.stripe_customer_id) + return f"" def get_customer_by_realm(realm: Realm) -> Optional[Customer]: return Customer.objects.filter(realm=realm).first() diff --git a/corporate/tests/test_stripe.py b/corporate/tests/test_stripe.py index d16a8b5c9a..7643ee67f2 100644 --- a/corporate/tests/test_stripe.py +++ b/corporate/tests/test_stripe.py @@ -144,7 +144,7 @@ def normalize_fixture_data(decorated_function: CallableT, for i, timestamp_field in enumerate(tested_timestamp_fields): # Don't use (..) notation, since the matched timestamp can easily appear in other fields pattern_translations[ - '"%s": 1[5-9][0-9]{8}(?![0-9-])' % (timestamp_field,) + f'"{timestamp_field}": 1[5-9][0-9]{{8}}(?![0-9-])' ] = '"%s": 1%02d%%07d' % (timestamp_field, i+1) normalized_values: Dict[str, Dict[str, str]] = { @@ -463,8 +463,8 @@ class StripeTest(StripeTestCase): self.assert_not_in_success_response(['Pay annually'], response) for substring in [ 'Zulip Standard', str(self.seat_count), - 'You are using', '%s of %s licenses' % (self.seat_count, self.seat_count), - 'Your plan will renew on', 'January 2, 2013', '$%s.00' % (80 * self.seat_count,), + 'You are using', f'{self.seat_count} of {self.seat_count} licenses', + 'Your plan will renew on', 'January 2, 2013', f'${80 * self.seat_count}.00', 'Visa ending in 4242', 'Update card']: self.assert_in_response(substring, response) @@ -547,7 +547,7 @@ class StripeTest(StripeTestCase): self.assert_not_in_success_response(['Pay annually', 'Update card'], response) for substring in [ 'Zulip Standard', str(123), - 'You are using', '%s of %s licenses' % (self.seat_count, 123), + 'You are using', f'{self.seat_count} of {123} licenses', 'Your plan will renew on', 'January 2, 2013', '$9,840.00', # 9840 = 80 * 123 'Billed by invoice']: self.assert_in_response(substring, response) @@ -618,8 +618,8 @@ class StripeTest(StripeTestCase): self.assert_not_in_success_response(['Pay annually'], response) for substring in [ 'Zulip Standard', 'Free Trial', str(self.seat_count), - 'You are using', '%s of %s licenses' % (self.seat_count, self.seat_count), - 'Your plan will be upgraded to', 'March 2, 2012', '$%s.00' % (80 * self.seat_count,), + 'You are using', f'{self.seat_count} of {self.seat_count} licenses', + 'Your plan will be upgraded to', 'March 2, 2012', f'${80 * self.seat_count}.00', 'Visa ending in 4242', 'Update card']: self.assert_in_response(substring, response) @@ -772,7 +772,7 @@ class StripeTest(StripeTestCase): self.assert_not_in_success_response(['Pay annually'], response) for substring in [ 'Zulip Standard', 'Free Trial', str(self.seat_count), - 'You are using', '%s of %s licenses' % (self.seat_count, 123), + 'You are using', f'{self.seat_count} of {123} licenses', 'Your plan will be upgraded to', 'March 2, 2012', f'{80 * 123:,.2f}', 'Billed by invoice' ]: diff --git a/corporate/views.py b/corporate/views.py index 3767a3b139..0f8c8f3ffc 100644 --- a/corporate/views.py +++ b/corporate/views.py @@ -114,7 +114,7 @@ def upgrade(request: HttpRequest, user: UserProfile, ) return json_error(e.message, data={'error_description': e.description}) except Exception as e: - billing_logger.exception("Uncaught exception in billing: %s" % (e,)) + billing_logger.exception(f"Uncaught exception in billing: {e}") error_message = BillingError.CONTACT_SUPPORT error_description = "uncaught exception during upgrade" return json_error(error_message, data={'error_description': error_description}) diff --git a/frontend_tests/run-casper b/frontend_tests/run-casper index 72d972dd0f..e59ad4f3b6 100755 --- a/frontend_tests/run-casper +++ b/frontend_tests/run-casper @@ -121,7 +121,7 @@ def run_tests(files: Iterable[str], external_host: str) -> None: for test_file in test_files: test_name = os.path.basename(test_file) cmd = ["node_modules/.bin/casperjs"] + remote_debug + verbose + xunit_export + ["test", test_file] - print("\n\n===================== %s\nRunning %s\n\n" % (test_name, " ".join(map(shlex.quote, cmd))), flush=True) + print("\n\n===================== {}\nRunning {}\n\n".format(test_name, " ".join(map(shlex.quote, cmd))), flush=True) ret = subprocess.call(cmd) if ret != 0: return ret diff --git a/puppet/zulip/files/nagios_plugins/zulip_app_frontend/check_cron_file b/puppet/zulip/files/nagios_plugins/zulip_app_frontend/check_cron_file index 416be339a2..64f53c11e7 100755 --- a/puppet/zulip/files/nagios_plugins/zulip_app_frontend/check_cron_file +++ b/puppet/zulip/files/nagios_plugins/zulip_app_frontend/check_cron_file @@ -36,7 +36,7 @@ def nagios_from_file(results_file: str, max_time_diff: int=60 * 2) -> 'Tuple[int state = pieces[2] data = pieces[3] - return (ret, "%s: %s" % (state, data)) + return (ret, f"{state}: {data}") if __name__ == "__main__": RESULTS_FILE = sys.argv[1] diff --git a/puppet/zulip/files/nagios_plugins/zulip_app_frontend/check_queue_worker_errors b/puppet/zulip/files/nagios_plugins/zulip_app_frontend/check_queue_worker_errors index 2a6138bf38..2ed4152bbc 100755 --- a/puppet/zulip/files/nagios_plugins/zulip_app_frontend/check_queue_worker_errors +++ b/puppet/zulip/files/nagios_plugins/zulip_app_frontend/check_queue_worker_errors @@ -12,7 +12,7 @@ import sys wildcard = os.path.join("/var/log/zulip/queue_error", '*.errors') clean = True for fn in glob.glob(wildcard): - print('WARNING: Queue errors logged in %s' % (fn,)) + print(f'WARNING: Queue errors logged in {fn}') clean = False if not clean: diff --git a/puppet/zulip/files/nagios_plugins/zulip_app_frontend/check_rabbitmq_consumers b/puppet/zulip/files/nagios_plugins/zulip_app_frontend/check_rabbitmq_consumers index 332e3cef17..c1be7abb78 100755 --- a/puppet/zulip/files/nagios_plugins/zulip_app_frontend/check_rabbitmq_consumers +++ b/puppet/zulip/files/nagios_plugins/zulip_app_frontend/check_rabbitmq_consumers @@ -17,7 +17,7 @@ if len(sys.argv) < 2: print("Please pass the name of the consumer file to check") exit(1) -RESULTS_FILE = "/var/lib/nagios_state/check-rabbitmq-consumers-%s" % (sys.argv[1],) +RESULTS_FILE = f"/var/lib/nagios_state/check-rabbitmq-consumers-{sys.argv[1]}" ret, result = nagios_from_file(RESULTS_FILE) diff --git a/puppet/zulip/files/nagios_plugins/zulip_app_frontend/check_send_receive_time b/puppet/zulip/files/nagios_plugins/zulip_app_frontend/check_send_receive_time index 6c762d96ff..ba220bded7 100755 --- a/puppet/zulip/files/nagios_plugins/zulip_app_frontend/check_send_receive_time +++ b/puppet/zulip/files/nagios_plugins/zulip_app_frontend/check_send_receive_time @@ -88,24 +88,24 @@ states = { def report(state: str, timestamp: Any = None, msg: Optional[str] = None) -> None: now = int(time.time()) if msg is None: - msg = "send time was %s" % (timestamp,) + msg = f"send time was {timestamp}" state_file_path = "/var/lib/nagios_state/check_send_receive_state" with open(state_file_path + ".tmp", 'w') as f: - f.write("%s|%s|%s|%s\n" % (now, states[state], state, msg)) + f.write(f"{now}|{states[state]}|{state}|{msg}\n") os.rename(state_file_path + ".tmp", state_file_path) - print("%s: %s" % (state, msg)) + print(f"{state}: {msg}") exit(states[state]) def send_zulip(sender: zulip.Client, message: Dict[str, Any]) -> None: result = sender.send_message(message) if result["result"] != "success" and options.nagios: - report("CRITICAL", msg="Error sending Zulip, args were: %s, %s" % (message, result)) + report("CRITICAL", msg=f"Error sending Zulip, args were: {message}, {result}") def get_zulips() -> List[Dict[str, Any]]: global queue_id, last_event_id res = zulip_recipient.get_events(queue_id=queue_id, last_event_id=last_event_id) if 'error' in res.get('result', {}): - report("CRITICAL", msg="Error receiving Zulips, error was: %s" % (res["msg"],)) + report("CRITICAL", msg="Error receiving Zulips, error was: {}".format(res["msg"])) for event in res['events']: last_event_id = max(last_event_id, int(event['id'])) # If we get a heartbeat event, that means we've been hanging for @@ -141,10 +141,10 @@ zulip_recipient = zulip.Client( try: res = zulip_recipient.register(event_types=["message"]) if 'error' in res.get('result', {}): - report("CRITICAL", msg="Error subscribing to Zulips: %s" % (res['msg'],)) + report("CRITICAL", msg="Error subscribing to Zulips: {}".format(res['msg'])) queue_id, last_event_id = (res['queue_id'], res['last_event_id']) except Exception: - report("CRITICAL", msg="Error subscribing to Zulips:\n%s" % (traceback.format_exc(),)) + report("CRITICAL", msg=f"Error subscribing to Zulips:\n{traceback.format_exc()}") msg_to_send = str(random.getrandbits(64)) time_start = time.time() @@ -172,6 +172,6 @@ if options.nagios: report('WARNING', timestamp=seconds_diff) if options.munin: - print("sendreceive.value %s" % (seconds_diff,)) + print(f"sendreceive.value {seconds_diff}") elif options.nagios: report('OK', timestamp=seconds_diff) diff --git a/puppet/zulip/files/nagios_plugins/zulip_nagios_server/check_postgres_replication_lag b/puppet/zulip/files/nagios_plugins/zulip_nagios_server/check_postgres_replication_lag index 5e145f3fca..59b0a3f47a 100755 --- a/puppet/zulip/files/nagios_plugins/zulip_nagios_server/check_postgres_replication_lag +++ b/puppet/zulip/files/nagios_plugins/zulip_nagios_server/check_postgres_replication_lag @@ -22,17 +22,17 @@ states = { } def report(state: str, msg: str) -> "NoReturn": - print("%s: %s" % (state, msg)) + print(f"{state}: {msg}") exit(states[state]) def get_loc_over_ssh(host: str, func: str) -> str: try: return subprocess.check_output(['ssh', host, - 'psql -v ON_ERROR_STOP=1 zulip -t -c "SELECT %s()"' % (func,)], + f'psql -v ON_ERROR_STOP=1 zulip -t -c "SELECT {func}()"'], stderr=subprocess.STDOUT, universal_newlines=True) except subprocess.CalledProcessError as e: - report('CRITICAL', 'ssh failed: %s: %s' % (str(e), e.output)) + report('CRITICAL', f'ssh failed: {str(e)}: {e.output}') def loc_to_abs_offset(loc_str: str) -> int: m = re.match(r'^\s*([0-9a-fA-F]+)/([0-9a-fA-F]+)\s*$', loc_str) diff --git a/puppet/zulip/files/nagios_plugins/zulip_postgres_appdb/check_fts_update_log b/puppet/zulip/files/nagios_plugins/zulip_postgres_appdb/check_fts_update_log index 0571287bdc..1f11d9f60d 100755 --- a/puppet/zulip/files/nagios_plugins/zulip_postgres_appdb/check_fts_update_log +++ b/puppet/zulip/files/nagios_plugins/zulip_postgres_appdb/check_fts_update_log @@ -22,7 +22,7 @@ states = { } def report(state: str, num: str) -> None: - print("%s: %s rows in fts_update_log table" % (state, num)) + print(f"{state}: {num} rows in fts_update_log table") exit(states[state]) conn = psycopg2.connect(database="zulip") diff --git a/puppet/zulip/files/nagios_plugins/zulip_postgres_common/check_postgres_backup b/puppet/zulip/files/nagios_plugins/zulip_postgres_common/check_postgres_backup index 7b7ef54d0c..33305aae70 100755 --- a/puppet/zulip/files/nagios_plugins/zulip_postgres_common/check_postgres_backup +++ b/puppet/zulip/files/nagios_plugins/zulip_postgres_common/check_postgres_backup @@ -13,7 +13,7 @@ states = { } def report(state: str, msg: str) -> None: - print("%s: %s" % (state, msg)) + print(f"{state}: {msg}") exit(states[state]) if subprocess.check_output(['psql', '-v', 'ON_ERROR_STOP=1', @@ -28,6 +28,6 @@ except OSError: report('UNKNOWN', 'could not determine completion time of last Postgres backup') if datetime.now(tz=timezone.utc) - last_backup > timedelta(hours=25): - report('CRITICAL', 'last Postgres backup completed more than 25 hours ago: %s' % (last_backup,)) + report('CRITICAL', f'last Postgres backup completed more than 25 hours ago: {last_backup}') -report('OK', 'last Postgres backup completed less than 25 hours ago: %s' % (last_backup,)) +report('OK', f'last Postgres backup completed less than 25 hours ago: {last_backup}') diff --git a/puppet/zulip/files/postgresql/pg_backup_and_purge b/puppet/zulip/files/postgresql/pg_backup_and_purge index 1fed9ab152..13844950f3 100755 --- a/puppet/zulip/files/postgresql/pg_backup_and_purge +++ b/puppet/zulip/files/postgresql/pg_backup_and_purge @@ -42,7 +42,7 @@ if is_rhel_based: else: pg_data_paths = glob.glob('/var/lib/postgresql/*/main') if len(pg_data_paths) != 1: - print("Postgres installation is not unique: %s" % (pg_data_paths,)) + print(f"Postgres installation is not unique: {pg_data_paths}") sys.exit(1) pg_data_path = pg_data_paths[0] run(['env-wal-e', 'backup-push', pg_data_path]) diff --git a/puppet/zulip/files/postgresql/process_fts_updates b/puppet/zulip/files/postgresql/process_fts_updates index f0ec736d10..8aedfe20ff 100755 --- a/puppet/zulip/files/postgresql/process_fts_updates +++ b/puppet/zulip/files/postgresql/process_fts_updates @@ -142,7 +142,7 @@ while True: # Catch up on any historical columns while True: rows_updated = update_fts_columns(cursor) - notice = "Processed %s rows catching up" % (rows_updated,) + notice = f"Processed {rows_updated} rows catching up" if rows_updated > 0: logger.info(notice) else: diff --git a/puppet/zulip_ops/files/nagios_plugins/zulip_zephyr_mirror/check_personal_zephyr_mirrors b/puppet/zulip_ops/files/nagios_plugins/zulip_zephyr_mirror/check_personal_zephyr_mirrors index 3187b2a5b6..af8fd0ab86 100755 --- a/puppet/zulip_ops/files/nagios_plugins/zulip_zephyr_mirror/check_personal_zephyr_mirrors +++ b/puppet/zulip_ops/files/nagios_plugins/zulip_zephyr_mirror/check_personal_zephyr_mirrors @@ -22,7 +22,7 @@ states: Dict[str, int] = { } def report(state: str, output: str) -> None: - print("%s\n%s" % (state, output)) + print(f"{state}\n{output}") exit(states[state]) output = "" @@ -41,7 +41,7 @@ for results_file_name in os.listdir(RESULTS_DIR): down_count += 1 this_state = "DOWN" last_check_ts = time.strftime("%Y-%m-%d %H:%M %Z", time.gmtime(last_check)) - output += "%s: %s (%s)\n" % (results_file, this_state, last_check_ts) + output += f"{results_file}: {this_state} ({last_check_ts})\n" if down_count == 0: state = "OK" diff --git a/puppet/zulip_ops/files/nagios_plugins/zulip_zephyr_mirror/check_user_zephyr_mirror_liveness b/puppet/zulip_ops/files/nagios_plugins/zulip_zephyr_mirror/check_user_zephyr_mirror_liveness index d518b6a2ca..c6b525f62a 100755 --- a/puppet/zulip_ops/files/nagios_plugins/zulip_zephyr_mirror/check_user_zephyr_mirror_liveness +++ b/puppet/zulip_ops/files/nagios_plugins/zulip_zephyr_mirror/check_user_zephyr_mirror_liveness @@ -39,11 +39,11 @@ def report(state: str, short_msg: str, too_old: Optional[Set[Any]] = None) -> No too_old_data = "" if too_old: too_old_data = "\nLast call to get_message for recently out of date mirrors:\n" + "\n".join( - ["%16s: %s" % (user.user_profile.email, - user.last_visit.strftime("%Y-%m-%d %H:%M %Z") - ) for user in too_old] + ["{:>16}: {}".format(user.user_profile.email, + user.last_visit.strftime("%Y-%m-%d %H:%M %Z") + ) for user in too_old] ) - print("%s: %s%s" % (state, short_msg, too_old_data)) + print(f"{state}: {short_msg}{too_old_data}") exit(states[state]) diff --git a/puppet/zulip_ops/files/nagios_plugins/zulip_zephyr_mirror/check_zephyr_mirror b/puppet/zulip_ops/files/nagios_plugins/zulip_zephyr_mirror/check_zephyr_mirror index fff7cef75a..3e9c86184c 100755 --- a/puppet/zulip_ops/files/nagios_plugins/zulip_zephyr_mirror/check_zephyr_mirror +++ b/puppet/zulip_ops/files/nagios_plugins/zulip_zephyr_mirror/check_zephyr_mirror @@ -23,7 +23,7 @@ states: Dict[str, int] = { } def report(state: str, data: str, last_check: float) -> None: - print("%s: Last test run completed at %s\n%s" % ( + print("{}: Last test run completed at {}\n{}".format( state, time.strftime("%Y-%m-%d %H:%M %Z", time.gmtime(last_check)), data)) exit(states[state]) diff --git a/puppet/zulip_ops/files/zulip-ec2-configure-interfaces b/puppet/zulip_ops/files/zulip-ec2-configure-interfaces index 9f813dcdc5..d5ecbcf852 100755 --- a/puppet/zulip_ops/files/zulip-ec2-configure-interfaces +++ b/puppet/zulip_ops/files/zulip-ec2-configure-interfaces @@ -131,7 +131,7 @@ for device in macs.values(): for (count, ip) in enumerate(to_configure): # Configure the IP via a virtual interface device = "ens%i:%i" % (device_number, count) - log.info("Configuring %s with IP %s" % (device, ip)) + log.info(f"Configuring {device} with IP {ip}") subprocess.check_call(['/sbin/ifconfig', device, ip]) subprocess.check_call( ['/sbin/iptables', '-t', 'mangle', '-A', 'OUTPUT', '-m', 'conntrack', '--ctorigdst', diff --git a/scripts/lib/check_rabbitmq_queue.py b/scripts/lib/check_rabbitmq_queue.py index 34068ab3cc..2bae26d2b5 100644 --- a/scripts/lib/check_rabbitmq_queue.py +++ b/scripts/lib/check_rabbitmq_queue.py @@ -77,7 +77,7 @@ def analyze_queue_stats(queue_name: str, stats: Dict[str, Any], # 50). return dict(status=CRITICAL, name=queue_name, - message='queue appears to be stuck, last update %s, queue size %s' % ( + message='queue appears to be stuck, last update {}, queue size {}'.format( stats['update_time'], queue_count_rabbitmqctl)) current_size = stats['current_queue_size'] @@ -108,8 +108,7 @@ def analyze_queue_stats(queue_name: str, stats: Dict[str, Any], return dict(status=status, name=queue_name, - message='clearing the backlog will take too long: %ss, size: %s' % ( - expected_time_to_clear_backlog, current_size)) + message=f'clearing the backlog will take too long: {expected_time_to_clear_backlog}s, size: {current_size}') else: # We slept recently, so treat this as a burst. if expected_time_to_clear_backlog > MAX_SECONDS_TO_CLEAR_FOR_BURSTS[queue_name]: @@ -120,8 +119,7 @@ def analyze_queue_stats(queue_name: str, stats: Dict[str, Any], return dict(status=status, name=queue_name, - message='clearing the burst will take too long: %ss, size: %s' % ( - expected_time_to_clear_backlog, current_size)) + message=f'clearing the burst will take too long: {expected_time_to_clear_backlog}s, size: {current_size}') return dict(status=OK, name=queue_name, @@ -139,10 +137,10 @@ def check_other_queues(queue_counts_dict: Dict[str, int]) -> List[Dict[str, Any] if count > CRITICAL_COUNT_THRESHOLD_DEFAULT: results.append(dict(status=CRITICAL, name=queue, - message='count critical: %s' % (count,))) + message=f'count critical: {count}')) elif count > WARN_COUNT_THRESHOLD_DEFAULT: results.append(dict(status=WARNING, name=queue, - message='count warning: %s' % (count,))) + message=f'count warning: {count}')) else: results.append(dict(status=OK, name=queue, message='')) @@ -210,6 +208,6 @@ def check_rabbitmq_queues() -> None: queue_error_template.format(result['name'], states[result['status']], result['message']) for result in results if result['status'] > 0 ]) - print("%s|%s|%s|%s" % (now, status, states[status], error_message)) + print(f"{now}|{status}|{states[status]}|{error_message}") else: - print("%s|%s|%s|queues normal" % (now, status, states[status])) + print(f"{now}|{status}|{states[status]}|queues normal") diff --git a/scripts/lib/sharding.py b/scripts/lib/sharding.py index 694153f638..df68fbfe09 100755 --- a/scripts/lib/sharding.py +++ b/scripts/lib/sharding.py @@ -15,9 +15,9 @@ setup_path() from scripts.lib.zulip_tools import get_config_file def write_realm_nginx_config_line(f: Any, host: str, port: str) -> None: - f.write("""if ($host = '%s') { - set $tornado_server http://tornado%s; -}\n""" % (host, port)) + f.write("""if ($host = '{}') {{ + set $tornado_server http://tornado{}; +}}\n""".format(host, port)) # Basic system to do Tornado sharding. Writes two output .tmp files that need # to be renamed to the following files to finalize the changes: @@ -48,7 +48,7 @@ with open('/etc/zulip/nginx_sharding.conf.tmp', 'w') as nginx_sharding_conf_f, \ host = shard else: host = f"{shard}.{external_host}" - assert host not in shard_map, "host %s duplicated" % (host,) + assert host not in shard_map, f"host {host} duplicated" shard_map[host] = int(port) write_realm_nginx_config_line(nginx_sharding_conf_f, host, port) nginx_sharding_conf_f.write('\n') diff --git a/scripts/lib/unpack-zulip b/scripts/lib/unpack-zulip index debde5af7b..4d73e1b257 100755 --- a/scripts/lib/unpack-zulip +++ b/scripts/lib/unpack-zulip @@ -14,7 +14,7 @@ from scripts.lib.zulip_tools import DEPLOYMENTS_DIR, FAIL, ENDC, make_deploy_pat import version if len(sys.argv) != 2: - print(FAIL + "Usage: %s " % (sys.argv[0],) + ENDC) + print(FAIL + f"Usage: {sys.argv[0]} " + ENDC) sys.exit(1) tarball_path = sys.argv[1] diff --git a/scripts/lib/upgrade-zulip b/scripts/lib/upgrade-zulip index 6463c2ddf4..1454b4a0b0 100755 --- a/scripts/lib/upgrade-zulip +++ b/scripts/lib/upgrade-zulip @@ -28,12 +28,12 @@ logging.basicConfig(format="%(asctime)s upgrade-zulip: %(message)s", level=logging.INFO) if len(sys.argv) != 2: - print(FAIL + "Usage: %s " % (sys.argv[0],) + ENDC) + print(FAIL + f"Usage: {sys.argv[0]} " + ENDC) sys.exit(1) tarball_path = sys.argv[1] -error_rerun_script = "%s/current/scripts/upgrade-zulip %s" % (DEPLOYMENTS_DIR, tarball_path) +error_rerun_script = f"{DEPLOYMENTS_DIR}/current/scripts/upgrade-zulip {tarball_path}" get_deployment_lock(error_rerun_script) try: diff --git a/scripts/lib/upgrade-zulip-from-git b/scripts/lib/upgrade-zulip-from-git index 2168fb44ec..406354b14b 100755 --- a/scripts/lib/upgrade-zulip-from-git +++ b/scripts/lib/upgrade-zulip-from-git @@ -42,7 +42,7 @@ if args.remote_url: os.makedirs(DEPLOYMENTS_DIR, exist_ok=True) os.makedirs('/home/zulip/logs', exist_ok=True) -error_rerun_script = "%s/current/scripts/upgrade-zulip-from-git %s" % (DEPLOYMENTS_DIR, refname) +error_rerun_script = f"{DEPLOYMENTS_DIR}/current/scripts/upgrade-zulip-from-git {refname}" get_deployment_lock(error_rerun_script) try: diff --git a/scripts/nagios/check-rabbitmq-consumers b/scripts/nagios/check-rabbitmq-consumers index 79688e41cd..59977446e7 100755 --- a/scripts/nagios/check-rabbitmq-consumers +++ b/scripts/nagios/check-rabbitmq-consumers @@ -81,7 +81,5 @@ for queue_name in consumers.keys(): else: status = 0 with open(state_file_tmp, "w") as f: - f.write("%s|%s|%s|queue %s has %s consumers, needs %s\n" % ( - now, status, states[status], queue_name, - consumers[queue_name], target_count)) + f.write(f"{now}|{status}|{states[status]}|queue {queue_name} has {consumers[queue_name]} consumers, needs {target_count}\n") os.rename(state_file_tmp, state_file_path) diff --git a/scripts/nagios/cron_file_helper.py b/scripts/nagios/cron_file_helper.py index e71e8218f6..f8a7954e3c 100644 --- a/scripts/nagios/cron_file_helper.py +++ b/scripts/nagios/cron_file_helper.py @@ -32,4 +32,4 @@ def nagios_from_file(results_file: str) -> Tuple[int, str]: state = pieces[2] data = pieces[3] - return (ret, "%s: %s" % (state, data)) + return (ret, f"{state}: {data}") diff --git a/scripts/restart-server b/scripts/restart-server index be2306d643..6a0bb789e5 100755 --- a/scripts/restart-server +++ b/scripts/restart-server @@ -66,7 +66,7 @@ if tornado_processes > 1: # supervisord group where if any individual process is slow to # stop, the whole bundle stays stopped for an extended time. logging.info("Restarting Tornado process on port %s", p) - subprocess.check_call(["supervisorctl", "restart", "zulip-tornado:port-%s" % (p,)]) + subprocess.check_call(["supervisorctl", "restart", f"zulip-tornado:port-{p}"]) else: logging.info("Restarting Tornado process") subprocess.check_call(["supervisorctl", "restart", "zulip-tornado", "zulip-tornado:*"]) diff --git a/scripts/setup/generate_secrets.py b/scripts/setup/generate_secrets.py index 1e58718dff..8a8fa111c5 100755 --- a/scripts/setup/generate_secrets.py +++ b/scripts/setup/generate_secrets.py @@ -82,7 +82,7 @@ def generate_secrets(development: bool = False) -> None: return name not in current_conf def add_secret(name: str, value: str) -> None: - lines.append("%s = %s\n" % (name, value)) + lines.append(f"{name} = {value}\n") current_conf[name] = value for name in AUTOGENERATED_SETTINGS: @@ -178,7 +178,7 @@ def generate_secrets(development: bool = False) -> None: # the end of the file due to human editing. f.write("\n" + "".join(lines)) - print("Generated new secrets in %s." % (OUTPUT_SETTINGS_FILENAME,)) + print(f"Generated new secrets in {OUTPUT_SETTINGS_FILENAME}.") if __name__ == '__main__': diff --git a/scripts/setup/inline_email_css.py b/scripts/setup/inline_email_css.py index ece74f39a7..58bf6bdc3e 100755 --- a/scripts/setup/inline_email_css.py +++ b/scripts/setup/inline_email_css.py @@ -78,7 +78,7 @@ def strip_unnecesary_tags(text: str) -> str: text = text[start:end] return text else: - raise ValueError("Template does not have %s or %s" % (start_block, end_block)) + raise ValueError(f"Template does not have {start_block} or {end_block}") def get_all_templates_from_directory(directory: str) -> Set[str]: result = set() diff --git a/scripts/zulip-puppet-apply b/scripts/zulip-puppet-apply index 99be427734..672591c44a 100755 --- a/scripts/zulip-puppet-apply +++ b/scripts/zulip-puppet-apply @@ -31,7 +31,7 @@ Exec { path => "/usr/sbin:/usr/bin:/sbin:/bin" } """ for pclass in re.split(r'\s*,\s*', config.get('machine', 'puppet_classes')): - puppet_config += "include %s\n" % (pclass,) + puppet_config += f"include {pclass}\n" # We use the puppet configuration from the same Zulip checkout as this script scripts_path = os.path.join(BASE_DIR, "scripts") diff --git a/static/assets/favicon/generate b/static/assets/favicon/generate index dca92f569b..9ba478fd64 100755 --- a/static/assets/favicon/generate +++ b/static/assets/favicon/generate @@ -7,7 +7,7 @@ import subprocess # Open the SVG and find the number text elements using XPath tree = ET.parse('orig.svg') elems = [tree.getroot().findall( - ".//*[@id='%s']/{http://www.w3.org/2000/svg}tspan" % (name,))[0] + f".//*[@id='{name}']/{{http://www.w3.org/2000/svg}}tspan")[0] for name in ('number_back', 'number_front')] for i in range(1, 100): diff --git a/tools/check-issue-labels b/tools/check-issue-labels index 703c28e537..fea01f1fc7 100755 --- a/tools/check-issue-labels +++ b/tools/check-issue-labels @@ -64,7 +64,7 @@ def check_issue_labels() -> None: if args.force: response = requests.get(next_page_url) else: - response = requests.get(next_page_url, headers={'Authorization': 'token %s' % (token,)}) + response = requests.get(next_page_url, headers={'Authorization': f'token {token}'}) if response.status_code == 401: sys.exit("Error. Please check the token.") if response.status_code == 403: diff --git a/tools/create-test-api-docs b/tools/create-test-api-docs index 69b6c473a3..2c313659d2 100755 --- a/tools/create-test-api-docs +++ b/tools/create-test-api-docs @@ -65,11 +65,11 @@ def create_single_page(pattern: str, out_dir: str, href: str, calls: List[Call]) } ''') - f.write('

%s

\n' % (html.escape(pattern),)) + f.write(f'

{html.escape(pattern)}

\n') calls.sort(key=lambda call: call['status_code']) for call in calls: f.write('
') - f.write('\n%s' % (fix_test_name(call['test_name']),)) + f.write('\n{}'.format(fix_test_name(call['test_name']))) f.write('
') try: f.write(call['url']) @@ -77,7 +77,7 @@ def create_single_page(pattern: str, out_dir: str, href: str, calls: List[Call]) f.write(call['url'].encode('utf8')) f.write('
\n') f.write(call['method'] + '
\n') - f.write('status code: %s
\n' % (call['status_code'],)) + f.write('status code: {}
\n'.format(call['status_code'])) f.write('
') f.write('
') @@ -136,13 +136,13 @@ def create_user_docs() -> None: f.write('
    \n') for pattern in sorted(groups[name]): href = pattern.replace('/', '-') + '.html' - link = '%s' % (href, html.escape(pattern)) + link = f'{html.escape(pattern)}' f.write('
  • ' + link + '
  • \n') create_single_page(pattern, out_dir, href, pattern_dict[pattern]) f.write('
') f.write('\n') - print('open %s' % (main_page,)) + print(f'open {main_page}') if __name__ == '__main__': diff --git a/tools/lib/html_branches.py b/tools/lib/html_branches.py index 7dfd379dd5..fbfce1a2ec 100644 --- a/tools/lib/html_branches.py +++ b/tools/lib/html_branches.py @@ -190,8 +190,8 @@ def build_id_dict(templates: List[str]) -> (Dict[str, List[str]]): list_tags = tokenize(text) except FormattedException as e: raise Exception(''' - fn: %s - %s''' % (fn, e)) + fn: {} + {}'''.format(fn, e)) for tag in list_tags: info = get_tag_info(tag) diff --git a/tools/lib/provision_inner.py b/tools/lib/provision_inner.py index 9598603f25..cd71b86c19 100755 --- a/tools/lib/provision_inner.py +++ b/tools/lib/provision_inner.py @@ -276,7 +276,7 @@ def main(options: argparse.Namespace) -> int: destroyed = destroy_leaked_test_databases() if destroyed: - print("Dropped %s stale test databases!" % (destroyed,)) + print(f"Dropped {destroyed} stale test databases!") clean_unused_caches() @@ -304,7 +304,7 @@ def main(options: argparse.Namespace) -> int: pass version_file = os.path.join(UUID_VAR_PATH, 'provision_version') - print('writing to %s\n' % (version_file,)) + print(f'writing to {version_file}\n') open(version_file, 'w').write(PROVISION_VERSION + '\n') print() diff --git a/tools/lib/sanity_check.py b/tools/lib/sanity_check.py index c5bfc9edf7..dac8209fdb 100644 --- a/tools/lib/sanity_check.py +++ b/tools/lib/sanity_check.py @@ -11,7 +11,7 @@ def check_venv(filename: str) -> None: ujson zulip except ImportError: - print("You need to run %s inside a Zulip dev environment." % (filename,)) + print(f"You need to run {filename} inside a Zulip dev environment.") user_id = os.getuid() user_name = pwd.getpwuid(user_id).pw_name if user_name != 'vagrant' and user_name != 'zulipdev': diff --git a/tools/lib/template_parser.py b/tools/lib/template_parser.py index fe07cdbec2..311297a7ea 100644 --- a/tools/lib/template_parser.py +++ b/tools/lib/template_parser.py @@ -213,8 +213,8 @@ def validate(fn: Optional[str] = None, text: Optional[str] = None, check_indent: tokens = tokenize(text) except FormattedException as e: raise TemplateParserException(''' - fn: %s - %s''' % (fn, e)) + fn: {} + {}'''.format(fn, e)) class State: def __init__(self, func: Callable[[Token], None]) -> None: diff --git a/tools/lib/test_server.py b/tools/lib/test_server.py index dfdec9bf37..be325233a8 100644 --- a/tools/lib/test_server.py +++ b/tools/lib/test_server.py @@ -37,7 +37,7 @@ def assert_server_running(server: "subprocess.Popen[bytes]", log_file: Optional[ if server.poll() is not None: message = 'Server died unexpectedly!' if log_file: - message += '\nSee %s\n' % (log_file,) + message += f'\nSee {log_file}\n' raise RuntimeError(message) def server_is_up(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> bool: diff --git a/tools/pretty-print-html b/tools/pretty-print-html index c949c210e2..d3fb88a4b3 100755 --- a/tools/pretty-print-html +++ b/tools/pretty-print-html @@ -5,7 +5,7 @@ import sys def clean_html(filenames: List[str]) -> None: for fn in filenames: - print('Prettifying: %s' % (fn,)) + print(f'Prettifying: {fn}') with open(fn) as f: html = f.read() phtml = pretty_print_html(html) diff --git a/tools/review b/tools/review index 2c98a1aa26..24541b4f0b 100755 --- a/tools/review +++ b/tools/review @@ -31,14 +31,14 @@ def check_git_pristine() -> None: def ensure_on_clean_master() -> None: branch = get_git_branch() if branch != 'master': - exit('You are still on a feature branch: %s' % (branch,)) + exit(f'You are still on a feature branch: {branch}') check_git_pristine() run(['git', 'fetch', 'upstream', 'master']) run(['git', 'rebase', 'upstream/master']) def create_pull_branch(pull_id: int) -> None: run(['git', 'fetch', 'upstream', 'pull/%d/head' % (pull_id,)]) - run(['git', 'checkout', '-B', 'review-%s' % (pull_id,), 'FETCH_HEAD']) + run(['git', 'checkout', '-B', f'review-{pull_id}', 'FETCH_HEAD']) run(['git', 'rebase', 'upstream/master']) run(['git', 'log', 'upstream/master..', '--oneline']) run(['git', 'diff', 'upstream/master..', '--name-status']) diff --git a/tools/run-dev.py b/tools/run-dev.py index 627e0cbf96..1ee92c8475 100755 --- a/tools/run-dev.py +++ b/tools/run-dev.py @@ -98,7 +98,7 @@ if options.test: else: settings_module = "zproject.settings" -manage_args = ['--settings=%s' % (settings_module,)] +manage_args = [f'--settings={settings_module}'] os.environ['DJANGO_SETTINGS_MODULE'] = settings_module sys.path.append(os.path.join(os.path.dirname(__file__), '..')) @@ -158,7 +158,7 @@ def server_processes() -> List[List[str]]: './puppet/zulip/files/postgresql/process_fts_updates', '--quiet'], ['./manage.py', 'deliver_scheduled_messages'], ['/srv/zulip-thumbor-venv/bin/thumbor', '-c', './zthumbor/thumbor.conf', - '-p', '%s' % (thumbor_port,)], + '-p', f'{thumbor_port}'], ] # NORMAL (but slower) operation: diff --git a/tools/setup/emoji/build_emoji b/tools/setup/emoji/build_emoji index 4e78c55b69..23923323bf 100755 --- a/tools/setup/emoji/build_emoji +++ b/tools/setup/emoji/build_emoji @@ -79,7 +79,7 @@ def main() -> None: # anyway, because EMOJI_CACHE_PATH is created by puppet before # build_emoji would be run. run_as_root(["mkdir", "-p", EMOJI_CACHE_PATH]) - run_as_root(["chown", "%s:%s" % (os.getuid(), os.getgid()), EMOJI_CACHE_PATH]) + run_as_root(["chown", f"{os.getuid()}:{os.getgid()}", EMOJI_CACHE_PATH]) sha1_hexdigest = generate_sha1sum_emoji(ZULIP_PATH) source_emoji_dump = os.path.join(EMOJI_CACHE_PATH, sha1_hexdigest, 'emoji') @@ -107,7 +107,7 @@ def main() -> None: os.remove(os.path.join(TARGET_EMOJI_STYLES, filename)) def percent(f: float) -> str: - return '%0.3f%%' % (f * 100,) + return f'{f * 100:0.3f}%' def get_square_size(emoji_data: List[Dict[str, Any]]) -> int: """ @@ -207,7 +207,7 @@ def generate_sprite_css_files(cache_path: str, 'pos_y': percent(emoji["sheet_y"] / (n - 1)), } - SPRITE_CSS_PATH = os.path.join(cache_path, '%s-sprite.css' % (emojiset,)) + SPRITE_CSS_PATH = os.path.join(cache_path, f'{emojiset}-sprite.css') with open(SPRITE_CSS_PATH, 'w') as f: f.write(SPRITE_CSS_FILE_TEMPLATE % {'emojiset': emojiset, 'alt_name': alt_name, diff --git a/tools/setup/emoji/import_emoji_names_from_csv b/tools/setup/emoji/import_emoji_names_from_csv index e6d0ed78f6..5cf7d16859 100755 --- a/tools/setup/emoji/import_emoji_names_from_csv +++ b/tools/setup/emoji/import_emoji_names_from_csv @@ -47,12 +47,12 @@ def load_data(data_file: str) -> List[List[str]]: def check_uniqueness(emoji_name: str) -> None: if emoji_name in emoji_names: - raise Exception("Duplicate emoji name: %s" % (emoji_name,)) + raise Exception(f"Duplicate emoji name: {emoji_name}") emoji_names.add(emoji_name) def check_valid_emoji_name(emoji_name: str) -> None: if re.fullmatch("[+-]?[a-z0-9_-]+", emoji_name) is None: - raise Exception("Invalid emoji name: %s" % (emoji_name,)) + raise Exception(f"Invalid emoji name: {emoji_name}") def check_emoji_names(canonical_name: str, aliases: List[str]) -> None: if canonical_name == 'X': diff --git a/tools/test-backend b/tools/test-backend index 9e8bce0574..6f7830849d 100755 --- a/tools/test-backend +++ b/tools/test-backend @@ -314,7 +314,7 @@ def main() -> None: classname = suite.rsplit('.', 1)[0] rewrite_arguments(classname) elif suite[0].isupper(): - rewrite_arguments('class %s(' % (suite,)) + rewrite_arguments(f'class {suite}(') for i, suite in enumerate(args): if suite.startswith('test'): @@ -403,7 +403,7 @@ def main() -> None: # We only check the templates if all the tests ran and passed if not failures and full_suite and templates_not_rendered: missed_count = len(templates_not_rendered) - print("\nError: %s templates have no tests!" % (missed_count,)) + print(f"\nError: {missed_count} templates have no tests!") for template in templates_not_rendered: print(f' {template}') print("See zerver/tests/test_templates.py for the exclude list.") @@ -424,8 +424,8 @@ def main() -> None: for path in enforce_fully_covered: missing_lines = cov.analysis2(path)[3] if len(missing_lines) > 0: - print("ERROR: %s no longer has complete backend test coverage" % (path,)) - print(" Lines missing coverage: %s" % (missing_lines,)) + print(f"ERROR: {path} no longer has complete backend test coverage") + print(f" Lines missing coverage: {missing_lines}") print() failures = True if failures: @@ -439,7 +439,7 @@ def main() -> None: try: missing_lines = cov.analysis2(path)[3] if len(missing_lines) == 0 and path != "zerver/lib/migrate.py": - print("ERROR: %s has complete backend test coverage but is still in not_yet_fully_covered." % (path,)) + print(f"ERROR: {path} has complete backend test coverage but is still in not_yet_fully_covered.") ok = False except coverage.misc.NoSource: continue @@ -471,7 +471,7 @@ def main() -> None: removed = remove_test_run_directories() if removed: - print("Removed %s stale test run directories!" % (removed,)) + print(f"Removed {removed} stale test run directories!") # We'll have printed whether tests passed or failed above sys.exit(bool(failures)) diff --git a/tools/test-js-with-node b/tools/test-js-with-node index 2320665135..7d3f0aa3a4 100755 --- a/tools/test-js-with-node +++ b/tools/test-js-with-node @@ -242,7 +242,7 @@ def run_tests_via_node_js() -> int: try: ret = subprocess.check_call(command) except OSError: - print('Bad command: %s' % (command,)) + print(f'Bad command: {command}') raise except subprocess.CalledProcessError: print('\n** Tests failed, PLEASE FIX! **\n') @@ -257,8 +257,8 @@ def check_line_coverage(fn: str, line_coverage: Dict[Any, Any], line_mapping: Di missing_lines.append(str(actual_line["start"]["line"])) if missing_lines: if log: - print_error("%s no longer has complete node test coverage" % (fn,)) - print(" Lines missing coverage: %s" % (", ".join(sorted(missing_lines, key=int)),)) + print_error(f"{fn} no longer has complete node test coverage") + print(" Lines missing coverage: {}".format(", ".join(sorted(missing_lines, key=int)))) print() return False return True @@ -288,7 +288,7 @@ def enforce_proper_coverage(coverage_json: Any) -> bool: path = ROOT_DIR + "/" + relative_path if not (path in coverage_json): coverage_lost = True - print_error("%s has no node test coverage" % (relative_path,)) + print_error(f"{relative_path} has no node test coverage") continue line_coverage = coverage_json[path]['s'] line_mapping = coverage_json[path]['statementMap'] diff --git a/tools/test-js-with-puppeteer b/tools/test-js-with-puppeteer index 9f76e29a21..fc2219b67c 100755 --- a/tools/test-js-with-puppeteer +++ b/tools/test-js-with-puppeteer @@ -58,7 +58,7 @@ def run_tests(files: Iterable[str], external_host: str) -> None: for test_file in test_files: test_name = os.path.basename(test_file) cmd = ["node"] + [test_file] - print("\n\n===================== %s\nRunning %s\n\n" % (test_name, " ".join(map(shlex.quote, cmd))), flush=True) + print("\n\n===================== {}\nRunning {}\n\n".format(test_name, " ".join(map(shlex.quote, cmd))), flush=True) ret = subprocess.call(cmd) if ret != 0: return ret diff --git a/tools/tests/test_html_branches.py b/tools/tests/test_html_branches.py index 45dbafad32..7db3609124 100644 --- a/tools/tests/test_html_branches.py +++ b/tools/tests/test_html_branches.py @@ -108,14 +108,14 @@ class TestHtmlBranches(unittest.TestCase): self.assertEqual(set(template_id_dict.keys()), {'below_navbar', 'hello_{{ message }}', 'intro'}) self.assertEqual(template_id_dict['hello_{{ message }}'], [ - 'Line 12:%s/tools/tests/test_template_data/test_template1.html' % (ZULIP_PATH,), - 'Line 12:%s/tools/tests/test_template_data/test_template2.html' % (ZULIP_PATH,)]) + f'Line 12:{ZULIP_PATH}/tools/tests/test_template_data/test_template1.html', + f'Line 12:{ZULIP_PATH}/tools/tests/test_template_data/test_template2.html']) self.assertEqual(template_id_dict['intro'], [ - 'Line 10:%s/tools/tests/test_template_data/test_template1.html' % (ZULIP_PATH,), - 'Line 11:%s/tools/tests/test_template_data/test_template1.html' % (ZULIP_PATH,), - 'Line 11:%s/tools/tests/test_template_data/test_template2.html' % (ZULIP_PATH,)]) + f'Line 10:{ZULIP_PATH}/tools/tests/test_template_data/test_template1.html', + f'Line 11:{ZULIP_PATH}/tools/tests/test_template_data/test_template1.html', + f'Line 11:{ZULIP_PATH}/tools/tests/test_template_data/test_template2.html']) self.assertEqual(template_id_dict['below_navbar'], [ - 'Line 10:%s/tools/tests/test_template_data/test_template2.html' % (ZULIP_PATH,)]) + f'Line 10:{ZULIP_PATH}/tools/tests/test_template_data/test_template2.html']) def test_split_for_id_and_class(self) -> None: id1 = "{{ red|blue }}" diff --git a/tools/update-zuliprc-api-field b/tools/update-zuliprc-api-field index dfde686911..bc8c28c88e 100755 --- a/tools/update-zuliprc-api-field +++ b/tools/update-zuliprc-api-field @@ -65,11 +65,11 @@ for zuliprc_path in zuliprc_paths_list: with open(zuliprc_path, 'w+') as w: zuliprc.write(w) result = 'SUCCESS' - reason = 'API field updated for user %s' % (email,) + reason = f'API field updated for user {email}' except OSError: result = 'FAILURE' reason = 'Writing to file unsuccessful' else: result = 'SUCCESS' - reason = 'API key for user %s is already consistent' % (email,) + reason = f'API key for user {email} is already consistent' print(f'{zuliprc_path}: {result}: {reason}') diff --git a/tools/webpack b/tools/webpack index 4e3ce74980..3672cd82ff 100755 --- a/tools/webpack +++ b/tools/webpack @@ -97,9 +97,9 @@ def build_for_most_tests() -> None: with open('tools/webpack.assets.json') as json_data: for entry in json.load(json_data).keys(): entries[entry] = [{ - "name": "%s.js" % (entry,), - "publicPath": "http://localhost:3000/webpack-stub/%s-stubentry.js" % (entry,), - "path": "/stubfolder/%s-stubfile.js" % (entry,) + "name": f"{entry}.js", + "publicPath": f"http://localhost:3000/webpack-stub/{entry}-stubentry.js", + "path": f"/stubfolder/{entry}-stubfile.js" }] stat_data = { "status": "done", diff --git a/tools/zulip-export/zulip-export b/tools/zulip-export/zulip-export index d83b6b592f..3ac431fd34 100755 --- a/tools/zulip-export/zulip-export +++ b/tools/zulip-export/zulip-export @@ -81,7 +81,7 @@ for msg in result['messages']: msg.pop(k, None) messages.append(msg) -filename = "zulip-%s.json" % (options.stream,) +filename = f"zulip-{options.stream}.json" with open(filename, 'wb') as f: f.write(json.dumps(messages, indent=0, sort_keys=False).encode('utf-8')) print("%d messages exported to %s" % (len(messages), filename,)) diff --git a/zerver/context_processors.py b/zerver/context_processors.py index d6ae401bb8..c4798cd3a8 100644 --- a/zerver/context_processors.py +++ b/zerver/context_processors.py @@ -141,7 +141,7 @@ def zulip_default_context(request: HttpRequest) -> Dict[str, Any]: 'landing_page_navbar_message': settings.LANDING_PAGE_NAVBAR_MESSAGE, } - context['OPEN_GRAPH_URL'] = '%s%s' % (realm_uri, request.path) + context['OPEN_GRAPH_URL'] = f'{realm_uri}{request.path}' if realm is not None and realm.icon_source == realm.ICON_UPLOADED: context['OPEN_GRAPH_IMAGE'] = urljoin(realm_uri, realm_icon) @@ -175,7 +175,7 @@ def login_context(request: HttpRequest) -> Dict[str, Any]: no_auth_enabled = True for auth_backend_name in AUTH_BACKEND_NAME_MAP: name_lower = auth_backend_name.lower() - key = "%s_auth_enabled" % (name_lower,) + key = f"{name_lower}_auth_enabled" is_enabled = auth_enabled_helper([auth_backend_name], realm) context[key] = is_enabled if is_enabled: @@ -191,7 +191,7 @@ def login_context(request: HttpRequest) -> Dict[str, Any]: external_authentication_methods = get_external_method_dicts(realm) ) for auth_dict in context['page_params']['external_authentication_methods']: - auth_dict['button_id_suffix'] = "auth_button_%s" % (auth_dict['name'],) + auth_dict['button_id_suffix'] = "auth_button_{}".format(auth_dict['name']) return context diff --git a/zerver/data_import/gitter.py b/zerver/data_import/gitter.py index 01e32c8336..1dd10fa8c2 100644 --- a/zerver/data_import/gitter.py +++ b/zerver/data_import/gitter.py @@ -97,7 +97,7 @@ def build_userprofile(timestamp: Any, domain_name: str, def get_user_email(user_data: ZerverFieldsT, domain_name: str) -> str: # TODO Get user email from github - email = ("%s@users.noreply.github.com" % (user_data['username'],)) + email = ("{}@users.noreply.github.com".format(user_data['username'])) return email def build_stream_map(timestamp: Any, @@ -240,14 +240,14 @@ def get_usermentions(message: Dict[str, Any], user_map: Dict[str, int], if 'mentions' in message: for mention in message['mentions']: if mention.get('userId') in user_map: - gitter_mention = '@%s' % (mention['screenName'],) + gitter_mention = '@{}'.format(mention['screenName']) if mention['screenName'] not in user_short_name_to_full_name: logging.info("Mentioned user %s never sent any messages, so has no full name data", mention['screenName']) full_name = mention['screenName'] else: full_name = user_short_name_to_full_name[mention['screenName']] - zulip_mention = ('@**%s**' % (full_name,)) + zulip_mention = (f'@**{full_name}**') message['text'] = message['text'].replace(gitter_mention, zulip_mention) mentioned_user_ids.append(user_map[mention['userId']]) diff --git a/zerver/data_import/import_util.py b/zerver/data_import/import_util.py index a446fa14ea..5ff147ad1b 100644 --- a/zerver/data_import/import_util.py +++ b/zerver/data_import/import_util.py @@ -50,7 +50,7 @@ def build_zerver_realm(realm_id: int, realm_subdomain: str, time: float, other_product: str) -> List[ZerverFieldsT]: realm = Realm(id=realm_id, date_created=time, name=realm_subdomain, string_id=realm_subdomain, - description="Organization imported from %s!" % (other_product,)) + description=f"Organization imported from {other_product}!") auth_methods = [[flag[0], flag[1]] for flag in realm.authentication_methods] realm_dict = model_to_dict(realm, exclude='authentication_methods') realm_dict['authentication_methods'] = auth_methods @@ -514,8 +514,8 @@ def process_avatars(avatar_list: List[ZerverFieldsT], avatar_dir: str, realm_id: avatar_url = avatar['path'] avatar_original = dict(avatar) - image_path = '%s.png' % (avatar_hash,) - original_image_path = '%s.original' % (avatar_hash,) + image_path = f'{avatar_hash}.png' + original_image_path = f'{avatar_hash}.original' avatar_upload_list.append([avatar_url, image_path, original_image_path]) # We don't add the size field here in avatar's records.json, diff --git a/zerver/data_import/slack.py b/zerver/data_import/slack.py index f612fa4d0e..47b0948758 100755 --- a/zerver/data_import/slack.py +++ b/zerver/data_import/slack.py @@ -227,7 +227,7 @@ def build_customprofile_field(customprofile_field: List[ZerverFieldsT], fields: if field in slack_custom_fields: field_name = field else: - field_name = "slack custom field %s" % (str(custom_profile_field_id + 1),) + field_name = f"slack custom field {str(custom_profile_field_id + 1)}" customprofilefield = CustomProfileField( id=custom_profile_field_id, name=field_name, @@ -294,10 +294,10 @@ def get_user_email(user: ZerverFieldsT, domain_name: str) -> str: slack_bot_name = user['profile']['first_name'] else: raise AssertionError("Could not identify bot type") - return slack_bot_name.replace("Bot", "").replace(" ", "") + "-bot@%s" % (domain_name,) + return slack_bot_name.replace("Bot", "").replace(" ", "") + f"-bot@{domain_name}" if get_user_full_name(user).lower() == "slackbot": - return "imported-slackbot-bot@%s" % (domain_name,) - raise AssertionError("Could not find email address for Slack user %s" % (user,)) + return f"imported-slackbot-bot@{domain_name}" + raise AssertionError(f"Could not find email address for Slack user {user}") def build_avatar_url(slack_user_id: str, team_id: str, avatar_hash: str) -> str: avatar_url = f"https://ca.slack-edge.com/{team_id}-{slack_user_id}-{avatar_hash}" @@ -742,7 +742,7 @@ def channel_message_to_zerver_message(realm_id: int, # For example "sh_room_created" has the message 'started a call' # which should be displayed as '/me started a call' if subtype in ["bot_add", "sh_room_created", "me_message"]: - content = '/me %s' % (content,) + content = f'/me {content}' if subtype == 'file_comment': # The file_comment message type only indicates the # responsible user in a subfield. @@ -864,7 +864,7 @@ def process_message_files(message: ZerverFieldsT, file_name = fileinfo['title'] else: file_name = fileinfo['name'] - markdown_links.append('[%s](%s)' % (file_name, fileinfo['url_private'])) + markdown_links.append('[{}]({})'.format(file_name, fileinfo['url_private'])) content = '\n'.join(markdown_links) @@ -887,8 +887,8 @@ def get_attachment_path_and_content(fileinfo: ZerverFieldsT, realm_id: int) -> T random_name(18), sanitize_name(fileinfo['name']) ]) - attachment_path = '/user_uploads/%s' % (s3_path,) - content = '[%s](%s)' % (fileinfo['title'], attachment_path) + attachment_path = f'/user_uploads/{s3_path}' + content = '[{}]({})'.format(fileinfo['title'], attachment_path) return s3_path, content @@ -1119,7 +1119,7 @@ def get_slack_api_data(slack_api_url: str, get_param: str, **kwargs: Any) -> Any if data.status_code == requests.codes.ok: result = data.json() if not result['ok']: - raise Exception('Error accessing Slack API: %s' % (result['error'],)) + raise Exception('Error accessing Slack API: {}'.format(result['error'])) return result[get_param] raise Exception('HTTP error accessing the Slack API.') diff --git a/zerver/data_import/slack_message_conversion.py b/zerver/data_import/slack_message_conversion.py index e0aefcca5b..0905541568 100644 --- a/zerver/data_import/slack_message_conversion.py +++ b/zerver/data_import/slack_message_conversion.py @@ -85,7 +85,7 @@ def convert_to_zulip_markdown(text: str, users: List[ZerverFieldsT], # Map Slack channel mention: '<#C5Z73A7RA|general>' to '#**general**' for cname, ids in added_channels.items(): cid = ids[0] - text = text.replace('<#%s|%s>' % (cid, cname), '#**' + cname + '**') + text = text.replace(f'<#{cid}|{cname}>', '#**' + cname + '**') tokens = text.split(' ') for iterator in range(len(tokens)): diff --git a/zerver/forms.py b/zerver/forms.py index 617dcea6ca..b70bf414c3 100644 --- a/zerver/forms.py +++ b/zerver/forms.py @@ -57,7 +57,7 @@ def email_is_not_mit_mailing_list(email: str) -> None: username = email.rsplit("@", 1)[0] # Check whether the user exists and can get mail. try: - DNS.dnslookup("%s.pobox.ns.athena.mit.edu" % (username,), DNS.Type.TXT) + DNS.dnslookup(f"{username}.pobox.ns.athena.mit.edu", DNS.Type.TXT) except DNS.Base.ServerError as e: if e.rcode == DNS.Status.NXDOMAIN: raise ValidationError(mark_safe(MIT_VALIDATION_ERROR)) diff --git a/zerver/lib/actions.py b/zerver/lib/actions.py index 9fb9308231..a32462f9c7 100644 --- a/zerver/lib/actions.py +++ b/zerver/lib/actions.py @@ -394,10 +394,7 @@ def process_new_human_user(user_profile: UserProfile, user_profile.realm, get_system_bot(settings.NOTIFICATION_BOT), prereg_user.referred_by, - "%s <`%s`> accepted your invitation to join Zulip!" % ( - user_profile.full_name, - user_profile.email, - ) + f"{user_profile.full_name} <`{user_profile.email}`> accepted your invitation to join Zulip!" ) # Mark any other PreregistrationUsers that are STATUS_ACTIVE as # inactive so we can keep track of the PreregistrationUser we @@ -598,8 +595,7 @@ def do_set_realm_property(realm: Realm, name: str, value: Any) -> None: """ property_type = Realm.property_types[name] assert isinstance(value, property_type), ( - 'Cannot update %s: %s is not an instance of %s' % ( - name, value, property_type,)) + f'Cannot update {name}: {value} is not an instance of {property_type}') old_value = getattr(realm, name) setattr(realm, name, value) @@ -2359,8 +2355,7 @@ def _internal_prep_message(realm: Realm, return check_message(sender, get_client("Internal"), addressee, content, realm=realm) except JsonableError as e: - logging.exception("Error queueing internal message by %s: %s" % ( - sender.delivery_email, e)) + logging.exception(f"Error queueing internal message by {sender.delivery_email}: {e}") return None @@ -3587,7 +3582,7 @@ def do_change_stream_description(stream: Stream, new_description: str) -> None: def do_create_realm(string_id: str, name: str, emails_restricted_to_domains: Optional[bool]=None) -> Realm: if Realm.objects.filter(string_id=string_id).exists(): - raise AssertionError("Realm %s already exists!" % (string_id,)) + raise AssertionError(f"Realm {string_id} already exists!") if not server_initialized(): logging.info("Server not yet initialized. Creating the internal realm first.") create_internal_realm() @@ -3653,8 +3648,7 @@ def do_change_notification_settings(user_profile: UserProfile, name: str, notification_setting_type = UserProfile.notification_setting_types[name] assert isinstance(value, notification_setting_type), ( - 'Cannot update %s: %s is not an instance of %s' % ( - name, value, notification_setting_type,)) + f'Cannot update {name}: {value} is not an instance of {notification_setting_type}') setattr(user_profile, name, value) @@ -4179,7 +4173,7 @@ def do_update_message_flags(user_profile: UserProfile, if flag == "read" and operation == "add": do_clear_mobile_push_notifications_for_ids(user_profile, messages) - statsd.incr("flags.%s.%s" % (flag, operation), count) + statsd.incr(f"flags.{flag}.{operation}", count) return count class MessageUpdateUserInfoResult(TypedDict): @@ -4199,9 +4193,9 @@ def notify_topic_moved_streams(user_profile: UserProfile, if new_topic is None: new_topic = old_topic - user_mention = "@_**%s|%s**" % (user_profile.full_name, user_profile.id) - old_topic_link = "#**%s>%s**" % (old_stream.name, old_topic) - new_topic_link = "#**%s>%s**" % (new_stream.name, new_topic) + user_mention = f"@_**{user_profile.full_name}|{user_profile.id}**" + old_topic_link = f"#**{old_stream.name}>{old_topic}**" + new_topic_link = f"#**{new_stream.name}>{new_topic}**" if send_notification_to_new_thread: internal_send_stream_message( new_stream.realm, sender, new_stream, new_topic, @@ -4918,7 +4912,7 @@ def do_send_confirmation_email(invitee: PreregistrationUser, activation_url = create_confirmation_link(invitee, referrer.realm.host, Confirmation.INVITATION) context = {'referrer_full_name': referrer.full_name, 'referrer_email': referrer.delivery_email, 'activate_url': activation_url, 'referrer_realm_name': referrer.realm.name} - from_name = "%s (via Zulip)" % (referrer.full_name,) + from_name = f"{referrer.full_name} (via Zulip)" send_email('zerver/emails/invitation', to_emails=[invitee.email], from_name=from_name, from_address=FromAddress.tokenized_no_reply_address(), language=referrer.realm.default_language, context=context) diff --git a/zerver/lib/avatar.py b/zerver/lib/avatar.py index 9078fbeca8..469b8e27f8 100644 --- a/zerver/lib/avatar.py +++ b/zerver/lib/avatar.py @@ -92,9 +92,9 @@ def get_gravatar_url(email: str, avatar_version: int, medium: bool=False) -> str def _get_unversioned_gravatar_url(email: str, medium: bool) -> str: if settings.ENABLE_GRAVATAR: - gravitar_query_suffix = "&s=%s" % (MEDIUM_AVATAR_SIZE,) if medium else "" + gravitar_query_suffix = f"&s={MEDIUM_AVATAR_SIZE}" if medium else "" hash_key = gravatar_hash(email) - return "https://secure.gravatar.com/avatar/%s?d=identicon%s" % (hash_key, gravitar_query_suffix) + return f"https://secure.gravatar.com/avatar/{hash_key}?d=identicon{gravitar_query_suffix}" return settings.DEFAULT_AVATAR_URI+'?x=x' def _get_unversioned_avatar_url(user_profile_id: int, diff --git a/zerver/lib/avatar_hash.py b/zerver/lib/avatar_hash.py index ac645c9614..bbd6c10c17 100644 --- a/zerver/lib/avatar_hash.py +++ b/zerver/lib/avatar_hash.py @@ -34,7 +34,7 @@ def user_avatar_path(user_profile: UserProfile) -> str: def user_avatar_path_from_ids(user_profile_id: int, realm_id: int) -> str: user_id_hash = user_avatar_hash(str(user_profile_id)) - return '%s/%s' % (str(realm_id), user_id_hash) + return f'{str(realm_id)}/{user_id_hash}' def user_avatar_content_hash(ldap_avatar: bytes) -> str: return hashlib.sha256(ldap_avatar).hexdigest() diff --git a/zerver/lib/bot_lib.py b/zerver/lib/bot_lib.py index 37532e3890..24532c3f67 100644 --- a/zerver/lib/bot_lib.py +++ b/zerver/lib/bot_lib.py @@ -27,7 +27,7 @@ def get_bot_handler(service_name: str) -> Any: configured_service = embedded_bot_service.name if not configured_service: return None - bot_module_name = 'zulip_bots.bots.%s.%s' % (configured_service, configured_service) + bot_module_name = f'zulip_bots.bots.{configured_service}.{configured_service}' bot_module: Any = importlib.import_module(bot_module_name) return bot_module.handler_class() diff --git a/zerver/lib/bugdown/help_relative_links.py b/zerver/lib/bugdown/help_relative_links.py index 1aa1251e0c..910d908471 100644 --- a/zerver/lib/bugdown/help_relative_links.py +++ b/zerver/lib/bugdown/help_relative_links.py @@ -34,9 +34,9 @@ gear_instructions = """ def gear_handle_match(key: str) -> str: if relative_help_links: - item = '[%s](%s)' % (gear_info[key][0], gear_info[key][1]) + item = f'[{gear_info[key][0]}]({gear_info[key][1]})' else: - item = '**%s**' % (gear_info[key][0],) + item = f'**{gear_info[key][0]}**' return gear_instructions % {'item': item} @@ -54,7 +54,7 @@ stream_instructions_no_link = """ def stream_handle_match(key: str) -> str: if relative_help_links: - return "1. Go to [%s](%s)." % (stream_info[key][0], stream_info[key][1]) + return f"1. Go to [{stream_info[key][0]}]({stream_info[key][1]})." if key == 'all': return stream_instructions_no_link + "\n\n1. Click **All streams** in the upper left." return stream_instructions_no_link diff --git a/zerver/lib/bugdown/help_settings_links.py b/zerver/lib/bugdown/help_settings_links.py index b313909a60..4c7b9e34e3 100644 --- a/zerver/lib/bugdown/help_settings_links.py +++ b/zerver/lib/bugdown/help_settings_links.py @@ -103,9 +103,9 @@ class Setting(Preprocessor): setting_name = link_mapping[setting_identifier][1] setting_link = link_mapping[setting_identifier][2] if relative_settings_links: - return "1. Go to [%s](%s)." % (setting_name, setting_link) + return f"1. Go to [{setting_name}]({setting_link})." return settings_markdown % {'setting_type_name': setting_type_name, - 'setting_reference': "**%s**" % (setting_name,)} + 'setting_reference': f"**{setting_name}**"} def makeExtension(*args: Any, **kwargs: Any) -> SettingHelpExtension: return SettingHelpExtension(*args, **kwargs) diff --git a/zerver/lib/cache.py b/zerver/lib/cache.py index 866ab5b617..1a1a22ca09 100644 --- a/zerver/lib/cache.py +++ b/zerver/lib/cache.py @@ -176,7 +176,7 @@ def cache_with_key( metric_key = statsd_key(key) status = "hit" if val is not None else "miss" - statsd.incr("cache%s.%s.%s" % (extra, metric_key, status)) + statsd.incr(f"cache{extra}.{metric_key}.{status}") # Values are singleton tuples so that we can distinguish # a result of None from a missing key. @@ -409,7 +409,7 @@ def generic_bulk_cached_fetch( if cache_keys[object_id] in cached_objects} def preview_url_cache_key(url: str) -> str: - return "preview_url:%s" % (make_safe_digest(url),) + return f"preview_url:{make_safe_digest(url)}" def display_recipient_cache_key(recipient_id: int) -> str: return "display_recipient_dict:%d" % (recipient_id,) @@ -423,22 +423,22 @@ def user_profile_by_email_cache_key(email: str) -> str: # See the comment in zerver/lib/avatar_hash.py:gravatar_hash for why we # are proactively encoding email addresses even though they will # with high likelihood be ASCII-only for the foreseeable future. - return 'user_profile_by_email:%s' % (make_safe_digest(email.strip()),) + return f'user_profile_by_email:{make_safe_digest(email.strip())}' def user_profile_cache_key_id(email: str, realm_id: int) -> str: - return "user_profile:%s:%s" % (make_safe_digest(email.strip()), realm_id,) + return f"user_profile:{make_safe_digest(email.strip())}:{realm_id}" def user_profile_cache_key(email: str, realm: 'Realm') -> str: return user_profile_cache_key_id(email, realm.id) def bot_profile_cache_key(email: str) -> str: - return "bot_profile:%s" % (make_safe_digest(email.strip()),) + return f"bot_profile:{make_safe_digest(email.strip())}" def user_profile_by_id_cache_key(user_profile_id: int) -> str: - return "user_profile_by_id:%s" % (user_profile_id,) + return f"user_profile_by_id:{user_profile_id}" def user_profile_by_api_key_cache_key(api_key: str) -> str: - return "user_profile_by_api_key:%s" % (api_key,) + return f"user_profile_by_api_key:{api_key}" realm_user_dict_fields: List[str] = [ 'id', 'full_name', 'short_name', 'email', @@ -449,16 +449,16 @@ realm_user_dict_fields: List[str] = [ ] def realm_user_dicts_cache_key(realm_id: int) -> str: - return "realm_user_dicts:%s" % (realm_id,) + return f"realm_user_dicts:{realm_id}" def get_realm_used_upload_space_cache_key(realm: 'Realm') -> str: - return 'realm_used_upload_space:%s' % (realm.id,) + return f'realm_used_upload_space:{realm.id}' def active_user_ids_cache_key(realm_id: int) -> str: - return "active_user_ids:%s" % (realm_id,) + return f"active_user_ids:{realm_id}" def active_non_guest_user_ids_cache_key(realm_id: int) -> str: - return "active_non_guest_user_ids:%s" % (realm_id,) + return f"active_non_guest_user_ids:{realm_id}" bot_dict_fields: List[str] = [ 'api_key', @@ -478,11 +478,10 @@ bot_dict_fields: List[str] = [ ] def bot_dicts_in_realm_cache_key(realm: 'Realm') -> str: - return "bot_dicts_in_realm:%s" % (realm.id,) + return f"bot_dicts_in_realm:{realm.id}" def get_stream_cache_key(stream_name: str, realm_id: int) -> str: - return "stream_by_realm_and_name:%s:%s" % ( - realm_id, make_safe_digest(stream_name.strip().lower())) + return f"stream_by_realm_and_name:{realm_id}:{make_safe_digest(stream_name.strip().lower())}" def delete_user_profile_caches(user_profiles: Iterable['UserProfile']) -> None: # Imported here to avoid cyclic dependency. @@ -571,16 +570,16 @@ def flush_realm(sender: Any, **kwargs: Any) -> None: cache_delete(realm_text_description_cache_key(realm)) def realm_alert_words_cache_key(realm: 'Realm') -> str: - return "realm_alert_words:%s" % (realm.string_id,) + return f"realm_alert_words:{realm.string_id}" def realm_alert_words_automaton_cache_key(realm: 'Realm') -> str: - return "realm_alert_words_automaton:%s" % (realm.string_id,) + return f"realm_alert_words_automaton:{realm.string_id}" def realm_rendered_description_cache_key(realm: 'Realm') -> str: - return "realm_rendered_description:%s" % (realm.string_id,) + return f"realm_rendered_description:{realm.string_id}" def realm_text_description_cache_key(realm: 'Realm') -> str: - return "realm_text_description:%s" % (realm.string_id,) + return f"realm_text_description:{realm.string_id}" # Called by models.py to flush the stream cache whenever we save a stream # object. @@ -610,7 +609,7 @@ def to_dict_cache_key(message: 'Message', realm_id: Optional[int]=None) -> str: return to_dict_cache_key_id(message.id) def open_graph_description_cache_key(content: Any, request: HttpRequest) -> str: - return 'open_graph_description_path:%s' % (make_safe_digest(request.META['PATH_INFO']),) + return 'open_graph_description_path:{}'.format(make_safe_digest(request.META['PATH_INFO'])) def flush_message(sender: Any, **kwargs: Any) -> None: message = kwargs['instance'] diff --git a/zerver/lib/camo.py b/zerver/lib/camo.py index 0079893ba0..d1c7a66f1c 100644 --- a/zerver/lib/camo.py +++ b/zerver/lib/camo.py @@ -8,7 +8,7 @@ def generate_camo_url(url: str) -> str: encoded_camo_key = settings.CAMO_KEY.encode("utf-8") digest = hmac.new(encoded_camo_key, encoded_url, hashlib.sha1).hexdigest() hex_encoded_url = binascii.b2a_hex(encoded_url) - return "%s/%s" % (digest, hex_encoded_url.decode("utf-8")) + return "{}/{}".format(digest, hex_encoded_url.decode("utf-8")) # Encodes the provided URL using the same algorithm used by the camo # caching https image proxy @@ -16,7 +16,7 @@ def get_camo_url(url: str) -> str: # Only encode the url if Camo is enabled if settings.CAMO_URI == '': return url - return "%s%s" % (settings.CAMO_URI, generate_camo_url(url)) + return f"{settings.CAMO_URI}{generate_camo_url(url)}" def is_camo_url_valid(digest: str, url: str) -> bool: camo_url = generate_camo_url(url) diff --git a/zerver/lib/create_user.py b/zerver/lib/create_user.py index c5e1ad5755..7924db0bbb 100644 --- a/zerver/lib/create_user.py +++ b/zerver/lib/create_user.py @@ -33,7 +33,7 @@ def copy_user_settings(source_profile: UserProfile, target_profile: UserProfile) def get_display_email_address(user_profile: UserProfile, realm: Realm) -> str: if not user_profile.email_address_is_realm_public(): - return "user%s@%s" % (user_profile.id, get_fake_email_domain()) + return f"user{user_profile.id}@{get_fake_email_domain()}" return user_profile.delivery_email def get_role_for_new_user(invited_as: int, realm_creation: bool=False) -> int: diff --git a/zerver/lib/db.py b/zerver/lib/db.py index 038064b86a..2971ac2edc 100644 --- a/zerver/lib/db.py +++ b/zerver/lib/db.py @@ -23,7 +23,7 @@ def wrapper_execute(self: CursorObj, stop = time.time() duration = stop - start self.connection.queries.append({ - 'time': "%.3f" % (duration,), + 'time': f"{duration:.3f}", }) class TimeTrackingCursor(cursor): diff --git a/zerver/lib/digest.py b/zerver/lib/digest.py index d4192644f8..f5e15a8c6f 100644 --- a/zerver/lib/digest.py +++ b/zerver/lib/digest.py @@ -146,14 +146,14 @@ def gather_new_streams(user_profile: UserProfile, else: new_streams = [] - base_url = "%s/#narrow/stream/" % (user_profile.realm.uri,) + base_url = f"{user_profile.realm.uri}/#narrow/stream/" streams_html = [] streams_plain = [] for stream in new_streams: narrow_url = base_url + encode_stream(stream.id, stream.name) - stream_link = "%s" % (narrow_url, stream.name) + stream_link = f"{stream.name}" streams_html.append(stream_link) streams_plain.append(stream.name) diff --git a/zerver/lib/email_mirror.py b/zerver/lib/email_mirror.py index d91ed3b310..426529965e 100644 --- a/zerver/lib/email_mirror.py +++ b/zerver/lib/email_mirror.py @@ -69,7 +69,7 @@ def report_to_zulip(error_message: str) -> None: error_bot, error_stream, "email mirror error", - """~~~\n%s\n~~~""" % (error_message,) + f"""~~~\n{error_message}\n~~~""" ) def log_and_report(email_message: EmailMessage, error_message: str, to: Optional[str]) -> None: @@ -155,7 +155,7 @@ def construct_zulip_body(message: EmailMessage, realm: Realm, show_sender: bool= if show_sender: sender = handle_header_content(message.get("From", "")) - body = "From: %s\n%s" % (sender, body) + body = f"From: {sender}\n{body}" return body @@ -279,7 +279,7 @@ def extract_and_upload_attachments(message: EmailMessage, realm: Realm) -> str: attachment, user_profile, target_realm=realm) - formatted_link = "[%s](%s)" % (filename, s3_url) + formatted_link = f"[{filename}]({s3_url})" attachment_links.append(formatted_link) else: logger.warning("Payload is not bytes (invalid attachment %s in message from %s).", diff --git a/zerver/lib/email_mirror_helpers.py b/zerver/lib/email_mirror_helpers.py index dabbcb5503..f79ae02ac8 100644 --- a/zerver/lib/email_mirror_helpers.py +++ b/zerver/lib/email_mirror_helpers.py @@ -60,7 +60,7 @@ def encode_email_address_helper(name: str, email_token: str, show_sender: bool=F # If encoded_name ends up empty, we just skip this part of the address: if encoded_name: - encoded_token = "%s.%s" % (encoded_name, email_token) + encoded_token = f"{encoded_name}.{email_token}" else: encoded_token = email_token diff --git a/zerver/lib/email_notifications.py b/zerver/lib/email_notifications.py index a95ce07453..70bf7f8c50 100644 --- a/zerver/lib/email_notifications.py +++ b/zerver/lib/email_notifications.py @@ -93,16 +93,9 @@ def fix_emojis(content: str, base_url: str, emojiset: str) -> str: emoji_code = match.group('emoji_code') emoji_name = emoji_span_elem.get('title') alt_code = emoji_span_elem.text - image_url = base_url + '/static/generated/emoji/images-%(emojiset)s-64/%(emoji_code)s.png' % { - 'emojiset': emojiset, - 'emoji_code': emoji_code - } + image_url = base_url + f'/static/generated/emoji/images-{emojiset}-64/{emoji_code}.png' img_elem = lxml.html.fromstring( - '%(alt_code)s' % { - 'alt_code': alt_code, - 'image_url': image_url, - 'title': emoji_name, - }) + f'{alt_code}') img_elem.set('style', 'height: 20px;') img_elem.tail = emoji_span_elem.tail return img_elem @@ -179,8 +172,8 @@ def build_message_list(user_profile: UserProfile, messages: List[Message]) -> Li def message_header(user_profile: UserProfile, message: Message) -> Dict[str, Any]: if message.recipient.type == Recipient.PERSONAL: narrow_link = get_narrow_url(user_profile, message) - header = "You and %s" % (message.sender.full_name,) - header_html = "%s" % (narrow_link, header) + header = f"You and {message.sender.full_name}" + header_html = f"{header}" elif message.recipient.type == Recipient.HUDDLE: display_recipient = get_display_recipient(message.recipient) assert not isinstance(display_recipient, str) @@ -188,15 +181,14 @@ def build_message_list(user_profile: UserProfile, messages: List[Message]) -> Li display_recipient=display_recipient) other_recipients = [r['full_name'] for r in display_recipient if r['id'] != user_profile.id] - header = "You and %s" % (", ".join(other_recipients),) - header_html = "%s" % (narrow_link, header) + header = "You and {}".format(", ".join(other_recipients)) + header_html = f"{header}" else: stream = Stream.objects.only('id', 'name').get(id=message.recipient.type_id) narrow_link = get_narrow_url(user_profile, message, stream=stream) - header = "%s > %s" % (stream.name, message.topic_name()) + header = f"{stream.name} > {message.topic_name()}" stream_link = stream_narrow_url(user_profile.realm, stream) - header_html = "%s > %s" % ( - stream_link, stream.name, narrow_link, message.topic_name()) + header_html = f"{stream.name} > {message.topic_name()}" return {"plain": header, "html": header_html, "stream_message": message.recipient.type_name() == "stream"} @@ -371,11 +363,10 @@ def do_send_missedmessage_events_reply_in_zulip(user_profile: UserProfile, huddle_display_name = " and ".join(other_recipients) context.update({'huddle_display_name': huddle_display_name}) elif len(other_recipients) == 3: - huddle_display_name = "%s, %s, and %s" % ( - other_recipients[0], other_recipients[1], other_recipients[2]) + huddle_display_name = f"{other_recipients[0]}, {other_recipients[1]}, and {other_recipients[2]}" context.update({'huddle_display_name': huddle_display_name}) else: - huddle_display_name = "%s, and %s others" % ( + huddle_display_name = "{}, and {} others".format( ', '.join(other_recipients[:2]), len(other_recipients) - 2) context.update({'huddle_display_name': huddle_display_name}) elif (missed_messages[0]['message'].recipient.type == Recipient.PERSONAL): @@ -388,7 +379,7 @@ def do_send_missedmessage_events_reply_in_zulip(user_profile: UserProfile, m['trigger'] == 'wildcard_mentioned'}) message = missed_messages[0]['message'] stream = Stream.objects.only('id', 'name').get(id=message.recipient.type_id) - stream_header = "%s > %s" % (stream.name, message.topic_name()) + stream_header = f"{stream.name} > {message.topic_name()}" context.update({ 'stream_header': stream_header, }) diff --git a/zerver/lib/email_validation.py b/zerver/lib/email_validation.py index a50faf2d89..d67398fd98 100644 --- a/zerver/lib/email_validation.py +++ b/zerver/lib/email_validation.py @@ -112,7 +112,7 @@ def validate_email_is_valid( return None def email_reserved_for_system_bots_error(email: str) -> str: - return '%s is reserved for system bots' % (email,) + return f'{email} is reserved for system bots' def get_existing_user_errors( target_realm: Realm, diff --git a/zerver/lib/error_notify.py b/zerver/lib/error_notify.py index 1e29cda2cd..b416046f40 100644 --- a/zerver/lib/error_notify.py +++ b/zerver/lib/error_notify.py @@ -34,7 +34,7 @@ def user_info_str(report: Dict[str, Any]) -> str: def deployment_repr(report: Dict[str, Any]) -> str: deployment = 'Deployed code:\n' for field, val in report['deployment_data'].items(): - deployment += '- %s: %s\n' % (field, val) + deployment += f'- {field}: {val}\n' return deployment def notify_browser_error(report: Dict[str, Any]) -> None: @@ -44,7 +44,7 @@ def notify_browser_error(report: Dict[str, Any]) -> None: email_browser_error(report) def email_browser_error(report: Dict[str, Any]) -> None: - email_subject = "Browser error for %s" % (user_info_str(report),) + email_subject = f"Browser error for {user_info_str(report)}" body = ("User: %(user_full_name)s <%(user_email)s> on %(deployment)s\n\n" "Message:\n%(message)s\n\nStacktrace:\n%(stacktrace)s\n\n" @@ -59,18 +59,18 @@ def email_browser_error(report: Dict[str, Any]) -> None: if more_info is not None: body += "\nAdditional information:" for (key, value) in more_info.items(): - body += "\n %s: %s" % (key, value) + body += f"\n {key}: {value}" - body += "\n\nLog:\n%s" % (report['log'],) + body += "\n\nLog:\n{}".format(report['log']) mail_admins(email_subject, body) def zulip_browser_error(report: Dict[str, Any]) -> None: - email_subject = "JS error: %s" % (report['user_email'],) + email_subject = "JS error: {}".format(report['user_email']) user_info = user_info_str(report) - body = "User: %s\n" % (user_info,) + body = f"User: {user_info}\n" body += ("Message: %(message)s\n" % dict(report)) @@ -108,7 +108,7 @@ def zulip_server_error(report: Dict[str, Any]) -> None: val = report.get(field.lower()) if field == "QUERY_STRING": val = clean_data_from_query_parameters(str(val)) - request_repr += "- %s: \"%s\"\n" % (field, val) + request_repr += f"- {field}: \"{val}\"\n" request_repr += "~~~~" else: request_repr = "Request info: none" @@ -144,7 +144,7 @@ def email_server_error(report: Dict[str, Any]) -> None: val = report.get(field.lower()) if field == "QUERY_STRING": val = clean_data_from_query_parameters(str(val)) - request_repr += "- %s: \"%s\"\n" % (field, val) + request_repr += f"- {field}: \"{val}\"\n" else: request_repr = "Request info: none\n" diff --git a/zerver/lib/events.py b/zerver/lib/events.py index 225e819863..e71a0595de 100644 --- a/zerver/lib/events.py +++ b/zerver/lib/events.py @@ -818,7 +818,7 @@ def apply_event(state: Dict[str, Any], elif event['type'] == 'has_zoom_token': state['has_zoom_token'] = event['value'] else: - raise AssertionError("Unexpected event type %s" % (event['type'],)) + raise AssertionError("Unexpected event type {}".format(event['type'])) def do_events_register(user_profile: UserProfile, user_client: Client, apply_markdown: bool = True, diff --git a/zerver/lib/export.py b/zerver/lib/export.py index b35b61e49f..163976bbe4 100644 --- a/zerver/lib/export.py +++ b/zerver/lib/export.py @@ -420,13 +420,13 @@ class Config: using id_source.''') if self.id_source[0] != self.virtual_parent.table: raise AssertionError(''' - Configuration error. To populate %s, you - want data from %s, but that differs from - the table name of your virtual parent (%s), + Configuration error. To populate {}, you + want data from {}, but that differs from + the table name of your virtual parent ({}), which suggests you many not have set up the ordering correctly. You may simply need to assign a virtual_parent, or there - may be deeper issues going on.''' % ( + may be deeper issues going on.'''.format( self.table, self.id_source[0], self.virtual_parent.table)) @@ -466,7 +466,7 @@ def export_from_config(response: TableData, config: Config, seed_object: Optiona if config.custom_tables: for t in config.custom_tables: if t not in response: - raise AssertionError('Custom fetch failed to populate %s' % (t,)) + raise AssertionError(f'Custom fetch failed to populate {t}') elif config.concat_and_destroy: # When we concat_and_destroy, we are working with @@ -1177,16 +1177,16 @@ def _check_key_metadata(email_gateway_bot: Optional[UserProfile], # Helper function for export_files_from_s3 if 'realm_id' in key.metadata and key.metadata['realm_id'] != str(realm.id): if email_gateway_bot is None or key.metadata['user_profile_id'] != str(email_gateway_bot.id): - raise AssertionError("Key metadata problem: %s %s / %s" % (key.name, key.metadata, realm.id)) + raise AssertionError(f"Key metadata problem: {key.name} {key.metadata} / {realm.id}") # Email gateway bot sends messages, potentially including attachments, cross-realm. - print("File uploaded by email gateway bot: %s / %s" % (key.key, key.metadata)) + print(f"File uploaded by email gateway bot: {key.key} / {key.metadata}") elif processing_avatars: if 'user_profile_id' not in key.metadata: - raise AssertionError("Missing user_profile_id in key metadata: %s" % (key.metadata,)) + raise AssertionError(f"Missing user_profile_id in key metadata: {key.metadata}") if int(key.metadata['user_profile_id']) not in user_ids: - raise AssertionError("Wrong user_profile_id in key metadata: %s" % (key.metadata,)) + raise AssertionError(f"Wrong user_profile_id in key metadata: {key.metadata}") elif 'realm_id' not in key.metadata: - raise AssertionError("Missing realm_id in key metadata: %s" % (key.metadata,)) + raise AssertionError(f"Missing realm_id in key metadata: {key.metadata}") def _get_exported_s3_record( bucket_name: str, @@ -1233,11 +1233,11 @@ def _save_s3_object_to_file(key: ServiceResource, output_dir: str, processing_av else: fields = key.key.split('/') if len(fields) != 3: - raise AssertionError("Suspicious key with invalid format %s" % (key.key,)) + raise AssertionError(f"Suspicious key with invalid format {key.key}") filename = os.path.join(output_dir, key.key) if "../" in filename: - raise AssertionError("Suspicious file with invalid format %s" % (filename,)) + raise AssertionError(f"Suspicious file with invalid format {filename}") dirname = os.path.dirname(filename) if not os.path.exists(dirname): @@ -1264,11 +1264,11 @@ def export_files_from_s3(realm: Realm, bucket_name: str, output_dir: Path, user_ids.add(user_profile.id) if processing_realm_icon_and_logo: - object_prefix = "%s/realm/" % (realm.id,) + object_prefix = f"{realm.id}/realm/" elif processing_emoji: - object_prefix = "%s/emoji/images/" % (realm.id,) + object_prefix = f"{realm.id}/emoji/images/" else: - object_prefix = "%s/" % (realm.id,) + object_prefix = f"{realm.id}/" if settings.EMAIL_GATEWAY_BOT is not None: email_gateway_bot: Optional[UserProfile] = get_system_bot(settings.EMAIL_GATEWAY_BOT) @@ -1576,7 +1576,7 @@ def launch_user_message_subprocesses(threads: int, output_dir: Path, while pids: pid, status = os.wait() shard = pids.pop(pid) - print('Shard %s finished, status %s' % (shard, status)) + print(f'Shard {shard} finished, status {status}') def do_export_user(user_profile: UserProfile, output_dir: Path) -> None: response: TableData = {} @@ -1741,8 +1741,8 @@ def export_realm_wrapper(realm: Realm, output_dir: str, tarball_path = do_export_realm(realm=realm, output_dir=output_dir, threads=threads, public_only=public_only, consent_message_id=consent_message_id) - print("Finished exporting to %s" % (output_dir,)) - print("Tarball written to %s" % (tarball_path,)) + print(f"Finished exporting to {output_dir}") + print(f"Tarball written to {tarball_path}") if not upload: return None @@ -1753,11 +1753,11 @@ def export_realm_wrapper(realm: Realm, output_dir: str, print("Uploading export tarball...") public_url = zerver.lib.upload.upload_backend.upload_export_tarball(realm, tarball_path) print() - print("Uploaded to %s" % (public_url,)) + print(f"Uploaded to {public_url}") if delete_after_upload: os.remove(tarball_path) - print("Successfully deleted the tarball at %s" % (tarball_path,)) + print(f"Successfully deleted the tarball at {tarball_path}") return public_url def get_realm_exports_serialized(user: UserProfile) -> List[Dict[str, Any]]: diff --git a/zerver/lib/html_diff.py b/zerver/lib/html_diff.py index 9f36c50891..16686a69e4 100644 --- a/zerver/lib/html_diff.py +++ b/zerver/lib/html_diff.py @@ -4,7 +4,7 @@ from lxml.html.diff import htmldiff from typing import Optional def highlight_with_class(text: str, klass: str) -> str: - return '%s' % (klass, text) + return f'{text}' def highlight_html_differences(s1: str, s2: str, msg_id: Optional[int]=None) -> str: retval = htmldiff(s1, s2) diff --git a/zerver/lib/import_realm.py b/zerver/lib/import_realm.py index 8da3306877..6348aea44c 100644 --- a/zerver/lib/import_realm.py +++ b/zerver/lib/import_realm.py @@ -101,10 +101,10 @@ path_maps: Dict[str, Dict[str, str]] = { def update_id_map(table: TableName, old_id: int, new_id: int) -> None: if table not in ID_MAP: raise Exception(''' - Table %s is not initialized in ID_MAP, which could + Table {} is not initialized in ID_MAP, which could mean that we have not thought through circular dependencies. - ''' % (table,)) + '''.format(table)) ID_MAP[table][old_id] = new_id def fix_datetime_fields(data: TableData, table: TableName) -> None: diff --git a/zerver/lib/logging_util.py b/zerver/lib/logging_util.py index 40d621857c..23ddb43e21 100644 --- a/zerver/lib/logging_util.py +++ b/zerver/lib/logging_util.py @@ -70,7 +70,7 @@ class _RateLimitFilter: try: # Track duplicate errors duplicate = False - rate = getattr(settings, '%s_LIMIT' % (self.__class__.__name__.upper(),), + rate = getattr(settings, f'{self.__class__.__name__.upper()}_LIMIT', 600) # seconds if rate > 0: diff --git a/zerver/lib/management.py b/zerver/lib/management.py index 7a7001b094..f7c90fa9f5 100644 --- a/zerver/lib/management.py +++ b/zerver/lib/management.py @@ -28,7 +28,7 @@ def check_config() -> None: except AttributeError: pass - raise CommandError("Error: You must set %s in /etc/zulip/settings.py." % (setting_name,)) + raise CommandError(f"Error: You must set {setting_name} in /etc/zulip/settings.py.") def sleep_forever() -> None: while True: # nocoverage @@ -127,7 +127,7 @@ You can use the command list_realms to find ID of the realms in this server.""" return UserProfile.objects.select_related().get( delivery_email__iexact=email.strip(), realm=realm) except UserProfile.DoesNotExist: - raise CommandError("The realm '%s' does not contain a user with email '%s'" % (realm, email)) + raise CommandError(f"The realm '{realm}' does not contain a user with email '{email}'") # Realm is None in the remaining code path. Here, we # optimistically try to see if there is exactly one user with @@ -139,7 +139,7 @@ You can use the command list_realms to find ID of the realms in this server.""" "(in different realms); please pass `--realm` " "to specify which one to modify.") except UserProfile.DoesNotExist: - raise CommandError("This Zulip server does not contain a user with email '%s'" % (email,)) + raise CommandError(f"This Zulip server does not contain a user with email '{email}'") def get_client(self) -> Client: """Returns a Zulip Client object to be used for things done in management commands""" diff --git a/zerver/lib/message.py b/zerver/lib/message.py index 3c2eff8552..b944684710 100644 --- a/zerver/lib/message.py +++ b/zerver/lib/message.py @@ -519,7 +519,7 @@ class MessageDict: elif recip['email'] > display_recipient[0]['email']: display_recipient = [display_recipient[0], recip] else: - raise AssertionError("Invalid recipient type %s" % (recipient_type,)) + raise AssertionError(f"Invalid recipient type {recipient_type}") obj['display_recipient'] = display_recipient obj['type'] = display_type @@ -1019,7 +1019,7 @@ def apply_unread_message_event(user_profile: UserProfile, else: message_type = 'huddle' else: - raise AssertionError("Invalid message type %s" % (message['type'],)) + raise AssertionError("Invalid message type {}".format(message['type'])) sender_id = message['sender_id'] diff --git a/zerver/lib/migrate.py b/zerver/lib/migrate.py index be0830a888..3fa2bd3167 100644 --- a/zerver/lib/migrate.py +++ b/zerver/lib/migrate.py @@ -28,11 +28,11 @@ def do_batch_update(cursor: CursorObj, if min_id is None: return - print("\n Range of rows to update: [%s, %s]" % (min_id, max_id)) + print(f"\n Range of rows to update: [{min_id}, {max_id}]") while min_id <= max_id: lower = min_id upper = min_id + batch_size - print(' Updating range [%s,%s)' % (lower, upper)) + print(f' Updating range [{lower},{upper})') cursor.execute(stmt, [lower, upper]) min_id = upper diff --git a/zerver/lib/mobile_auth_otp.py b/zerver/lib/mobile_auth_otp.py index f04a54ad57..83df1fef71 100644 --- a/zerver/lib/mobile_auth_otp.py +++ b/zerver/lib/mobile_auth_otp.py @@ -15,7 +15,7 @@ def xor_hex_strings(bytes_a: str, bytes_b: str) -> str: """Given two hex strings of equal length, return a hex string with the bitwise xor of the two hex strings.""" assert len(bytes_a) == len(bytes_b) - return ''.join(["%x" % (int(x, 16) ^ int(y, 16),) + return ''.join([f"{int(x, 16) ^ int(y, 16):x}" for x, y in zip(bytes_a, bytes_b)]) def ascii_to_hex(input_string: str) -> str: diff --git a/zerver/lib/outgoing_webhook.py b/zerver/lib/outgoing_webhook.py index 099c3457d9..cc5c56b173 100644 --- a/zerver/lib/outgoing_webhook.py +++ b/zerver/lib/outgoing_webhook.py @@ -223,11 +223,11 @@ def notify_bot_owner(event: Dict[str, Any], bot_id = event['user_profile_id'] bot_owner = get_user_profile_by_id(bot_id).bot_owner - notification_message = "[A message](%s) triggered an outgoing webhook." % (message_url,) + notification_message = f"[A message]({message_url}) triggered an outgoing webhook." if failure_message: notification_message += "\n" + failure_message if status_code: - notification_message += "\nThe webhook got a response with status code *%s*." % (status_code,) + notification_message += f"\nThe webhook got a response with status code *{status_code}*." if response_content: notification_message += "\nThe response contains the following payload:\n" \ "```\n%s\n```" % (str(response_content),) @@ -326,6 +326,6 @@ def do_rest_call(base_url: str, response_message = ("An exception of type *%s* occurred for message `%s`! " "See the Zulip server logs for more information." % ( type(e).__name__, event["command"],)) - logging.exception("Outhook trigger failed:\n %s" % (e,)) + logging.exception(f"Outhook trigger failed:\n {e}") fail_with_message(event, response_message) notify_bot_owner(event, exception=e) diff --git a/zerver/lib/push_notifications.py b/zerver/lib/push_notifications.py index a6aa895d3d..5abbdd370a 100644 --- a/zerver/lib/push_notifications.py +++ b/zerver/lib/push_notifications.py @@ -468,14 +468,14 @@ def get_gcm_alert(message: Message) -> str: """ sender_str = message.sender.full_name if message.recipient.type == Recipient.HUDDLE and message.trigger == 'private_message': - return "New private group message from %s" % (sender_str,) + return f"New private group message from {sender_str}" elif message.recipient.type == Recipient.PERSONAL and message.trigger == 'private_message': - return "New private message from %s" % (sender_str,) + return f"New private message from {sender_str}" elif message.is_stream_message() and (message.trigger == 'mentioned' or message.trigger == 'wildcard_mentioned'): - return "New mention from %s" % (sender_str,) + return f"New mention from {sender_str}" else: # message.is_stream_message() and message.trigger == 'stream_push_notify' - return "New stream message from %s in %s" % (sender_str, get_display_recipient(message.recipient),) + return f"New stream message from {sender_str} in {get_display_recipient(message.recipient)}" def get_mobile_push_content(rendered_content: str) -> str: def get_text(elem: lxml.html.HtmlElement) -> str: @@ -586,7 +586,7 @@ def get_apns_alert_title(message: Message) -> str: assert isinstance(recipients, list) return ', '.join(sorted(r['full_name'] for r in recipients)) elif message.is_stream_message(): - return "#%s > %s" % (get_display_recipient(message.recipient), message.topic_name(),) + return f"#{get_display_recipient(message.recipient)} > {message.topic_name()}" # For personal PMs, we just show the sender name. return message.sender.full_name diff --git a/zerver/lib/queue.py b/zerver/lib/queue.py index e827320ac0..176a06ac02 100644 --- a/zerver/lib/queue.py +++ b/zerver/lib/queue.py @@ -38,7 +38,7 @@ class SimpleQueueClient: start = time.time() self.connection = pika.BlockingConnection(self._get_parameters()) self.channel = self.connection.channel() - self.log.info('SimpleQueueClient connected (connecting took %.3fs)' % (time.time() - start,)) + self.log.info(f'SimpleQueueClient connected (connecting took {time.time() - start:.3f}s)') def _reconnect(self) -> None: self.connection = None @@ -76,10 +76,10 @@ class SimpleQueueClient: credentials=credentials) def _generate_ctag(self, queue_name: str) -> str: - return "%s_%s" % (queue_name, str(random.getrandbits(16))) + return f"{queue_name}_{str(random.getrandbits(16))}" def _reconnect_consumer_callback(self, queue: str, consumer: Consumer) -> None: - self.log.info("Queue reconnecting saved consumer %s to queue %s" % (consumer, queue)) + self.log.info(f"Queue reconnecting saved consumer {consumer} to queue {queue}") self.ensure_queue(queue, lambda: self.channel.basic_consume(queue, consumer, consumer_tag=self._generate_ctag(queue))) @@ -115,7 +115,7 @@ class SimpleQueueClient: properties=pika.BasicProperties(delivery_mode=2), body=body) - statsd.incr("rabbitmq.publish.%s" % (queue_name,)) + statsd.incr(f"rabbitmq.publish.{queue_name}") self.ensure_queue(queue_name, do_publish) diff --git a/zerver/lib/rate_limiter.py b/zerver/lib/rate_limiter.py index e980e46958..447f67868b 100644 --- a/zerver/lib/rate_limiter.py +++ b/zerver/lib/rate_limiter.py @@ -286,7 +286,7 @@ class TornadoInMemoryRateLimiterBackend(RateLimiterBackend): ratelimited, time_till_free = cls.need_to_limit(entity_key, time_window, max_count) if ratelimited: - statsd.incr("ratelimiter.limited.%s" % (entity_key,)) + statsd.incr(f"ratelimiter.limited.{entity_key}") break return ratelimited, time_till_free @@ -450,7 +450,7 @@ class RedisRateLimiterBackend(RateLimiterBackend): ratelimited, time = cls.is_ratelimited(entity_key, rules) if ratelimited: - statsd.incr("ratelimiter.limited.%s" % (entity_key,)) + statsd.incr(f"ratelimiter.limited.{entity_key}") else: try: diff --git a/zerver/lib/realm_icon.py b/zerver/lib/realm_icon.py index b925247e4d..81a9eeb1d1 100644 --- a/zerver/lib/realm_icon.py +++ b/zerver/lib/realm_icon.py @@ -12,6 +12,6 @@ def get_realm_icon_url(realm: Realm) -> str: return upload_backend.get_realm_icon_url(realm.id, realm.icon_version) elif settings.ENABLE_GRAVATAR: hash_key = gravatar_hash(realm.string_id) - return "https://secure.gravatar.com/avatar/%s?d=identicon" % (hash_key,) + return f"https://secure.gravatar.com/avatar/{hash_key}?d=identicon" else: return settings.DEFAULT_AVATAR_URI+'?version=0' diff --git a/zerver/lib/redis_utils.py b/zerver/lib/redis_utils.py index e5d6dc718c..27adf96e1d 100644 --- a/zerver/lib/redis_utils.py +++ b/zerver/lib/redis_utils.py @@ -63,4 +63,4 @@ def validate_key_fits_format(key: str, key_format: str) -> None: regex = key_format.format(token=r"[a-zA-Z0-9]+") if not re.fullmatch(regex, key): - raise ZulipRedisKeyOfWrongFormatError("%s does not match format %s" % (key, key_format)) + raise ZulipRedisKeyOfWrongFormatError(f"{key} does not match format {key_format}") diff --git a/zerver/lib/remote_server.py b/zerver/lib/remote_server.py index 8ab76f29c9..892c249b79 100644 --- a/zerver/lib/remote_server.py +++ b/zerver/lib/remote_server.py @@ -45,7 +45,7 @@ def send_to_push_bouncer(method: str, api_auth = requests.auth.HTTPBasicAuth(settings.ZULIP_ORG_ID, settings.ZULIP_ORG_KEY) - headers = {"User-agent": "ZulipServer/%s" % (ZULIP_VERSION,)} + headers = {"User-agent": f"ZulipServer/{ZULIP_VERSION}"} if extra_headers is not None: headers.update(extra_headers) @@ -88,7 +88,7 @@ def send_to_push_bouncer(method: str, # this version of Zulip, so we throw an exception that will # email the server admins. raise PushNotificationBouncerException( - "Push notification bouncer returned unexpected status code %s" % (res.status_code,)) + f"Push notification bouncer returned unexpected status code {res.status_code}") # If we don't throw an exception, it's a successful bounce! return ujson.loads(res.content) diff --git a/zerver/lib/response.py b/zerver/lib/response.py index c499bb81d7..52e1576401 100644 --- a/zerver/lib/response.py +++ b/zerver/lib/response.py @@ -11,9 +11,9 @@ class HttpResponseUnauthorized(HttpResponse): def __init__(self, realm: str, www_authenticate: Optional[str]=None) -> None: HttpResponse.__init__(self) if www_authenticate is None: - self["WWW-Authenticate"] = 'Basic realm="%s"' % (realm,) + self["WWW-Authenticate"] = f'Basic realm="{realm}"' elif www_authenticate == "session": - self["WWW-Authenticate"] = 'Session realm="%s"' % (realm,) + self["WWW-Authenticate"] = f'Session realm="{realm}"' else: raise AssertionError("Invalid www_authenticate value!") diff --git a/zerver/lib/rest.py b/zerver/lib/rest.py index 37b36264ab..3954b4aa05 100644 --- a/zerver/lib/rest.py +++ b/zerver/lib/rest.py @@ -144,7 +144,7 @@ def rest_dispatch(request: HttpRequest, **kwargs: Any) -> HttpResponse: # browser, send the user to the login page if 'text/html' in request.META.get('HTTP_ACCEPT', ''): # TODO: It seems like the `?next=` part is unlikely to be helpful - return HttpResponseRedirect('%s?next=%s' % (settings.HOME_NOT_LOGGED_IN, request.path)) + return HttpResponseRedirect(f'{settings.HOME_NOT_LOGGED_IN}?next={request.path}') # Ask for basic auth (email:apiKey) elif request.path.startswith("/api"): return json_unauthorized() diff --git a/zerver/lib/send_email.py b/zerver/lib/send_email.py index e6cf45fb39..3642935efc 100644 --- a/zerver/lib/send_email.py +++ b/zerver/lib/send_email.py @@ -265,12 +265,12 @@ def send_custom_email(users: List[UserProfile], options: Dict[str, Any]) -> None parsed_email_template = Parser(policy=default).parsestr(text) email_template_hash = hashlib.sha256(text.encode('utf-8')).hexdigest()[0:32] - email_filename = "custom/custom_email_%s.source.html" % (email_template_hash,) - email_id = "zerver/emails/custom/custom_email_%s" % (email_template_hash,) + email_filename = f"custom/custom_email_{email_template_hash}.source.html" + email_id = f"zerver/emails/custom/custom_email_{email_template_hash}" markdown_email_base_template_path = "templates/zerver/emails/custom_email_base.pre.html" - html_source_template_path = "templates/%s.source.html" % (email_id,) - plain_text_template_path = "templates/%s.txt" % (email_id,) - subject_path = "templates/%s.subject.txt" % (email_id,) + html_source_template_path = f"templates/{email_id}.source.html" + plain_text_template_path = f"templates/{email_id}.txt" + subject_path = f"templates/{email_id}.subject.txt" os.makedirs(os.path.dirname(html_source_template_path), exist_ok=True) # First, we render the markdown input file just like our diff --git a/zerver/lib/soft_deactivation.py b/zerver/lib/soft_deactivation.py index b6611917e9..ef94fffb75 100644 --- a/zerver/lib/soft_deactivation.py +++ b/zerver/lib/soft_deactivation.py @@ -74,7 +74,7 @@ def filter_by_subscription_history(user_profile: UserProfile, if stream_messages[-1]['id'] <= log_entry.event_last_message_id: stream_messages = [] else: - raise AssertionError('%s is not a Subscription Event.' % (log_entry.event_type,)) + raise AssertionError(f'{log_entry.event_type} is not a Subscription Event.') if len(stream_messages) > 0: # We do this check for last event since if the last subscription diff --git a/zerver/lib/subdomains.py b/zerver/lib/subdomains.py index 476f0c8523..77800dc1aa 100644 --- a/zerver/lib/subdomains.py +++ b/zerver/lib/subdomains.py @@ -23,7 +23,7 @@ def get_subdomain(request: HttpRequest) -> str: return get_subdomain_from_hostname(host) def get_subdomain_from_hostname(host: str) -> str: - m = re.search(r'\.%s(:\d+)?$' % (settings.EXTERNAL_HOST,), + m = re.search(fr'\.{settings.EXTERNAL_HOST}(:\d+)?$', host) if m: subdomain = host[:m.start()] @@ -32,7 +32,7 @@ def get_subdomain_from_hostname(host: str) -> str: return subdomain for subdomain, realm_host in settings.REALM_HOSTS.items(): - if re.search(r'^%s(:\d+)?$' % (realm_host,), + if re.search(fr'^{realm_host}(:\d+)?$', host): return subdomain diff --git a/zerver/lib/test_classes.py b/zerver/lib/test_classes.py index f8e4d28bda..74d59b62a4 100644 --- a/zerver/lib/test_classes.py +++ b/zerver/lib/test_classes.py @@ -487,7 +487,7 @@ class ZulipTestCase(TestCase): """ identifier: Can be an email or a remote server uuid. """ - credentials = "%s:%s" % (identifier, api_key) + credentials = f"{identifier}:{api_key}" return 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8') def uuid_get(self, identifier: str, *args: Any, **kwargs: Any) -> HttpResponse: @@ -629,7 +629,7 @@ class ZulipTestCase(TestCase): print('ITEMS:\n') for item in items: print(item) - print("\nexpected length: %s\nactual length: %s" % (count, actual_count)) + print(f"\nexpected length: {count}\nactual length: {actual_count}") raise AssertionError('List is unexpected size!') def assert_json_error_contains(self, result: HttpResponse, msg_substring: str, @@ -663,14 +663,14 @@ class ZulipTestCase(TestCase): def webhook_fixture_data(self, type: str, action: str, file_type: str='json') -> str: fn = os.path.join( os.path.dirname(__file__), - "../webhooks/%s/fixtures/%s.%s" % (type, action, file_type) + f"../webhooks/{type}/fixtures/{action}.{file_type}" ) return open(fn).read() def fixture_file_name(self, file_name: str, type: str='') -> str: return os.path.join( os.path.dirname(__file__), - "../tests/fixtures/%s/%s" % (type, file_name) + f"../tests/fixtures/{type}/{file_name}" ) def fixture_data(self, file_name: str, type: str='') -> str: @@ -695,10 +695,10 @@ class ZulipTestCase(TestCase): ) except IntegrityError: # nocoverage -- this is for bugs in the tests raise Exception(''' - %s already exists + {} already exists Please call make_stream with a stream name - that is not already in use.''' % (stream_name,)) + that is not already in use.'''.format(stream_name)) recipient = Recipient.objects.create(type_id=stream.id, type=Recipient.STREAM) stream.recipient = recipient diff --git a/zerver/lib/test_fixtures.py b/zerver/lib/test_fixtures.py index 192ed5aad2..3327ff2955 100644 --- a/zerver/lib/test_fixtures.py +++ b/zerver/lib/test_fixtures.py @@ -357,7 +357,7 @@ def destroy_leaked_test_databases(expiry_time: int = 60 * 60) -> int: if not databases_to_drop: return 0 - commands = "\n".join("DROP DATABASE IF EXISTS %s;" % (db,) for db in databases_to_drop) + commands = "\n".join(f"DROP DATABASE IF EXISTS {db};" for db in databases_to_drop) p = subprocess.Popen(["psql", "-q", "-v", "ON_ERROR_STOP=1", "-h", "localhost", "postgres", "zulip_test"], stdin=subprocess.PIPE) diff --git a/zerver/lib/test_helpers.py b/zerver/lib/test_helpers.py index 0b041f7b47..75602a3ae9 100644 --- a/zerver/lib/test_helpers.py +++ b/zerver/lib/test_helpers.py @@ -161,7 +161,7 @@ def queries_captured(include_savepoints: Optional[bool]=False) -> Generator[ if include_savepoints or not isinstance(sql, str) or 'SAVEPOINT' not in sql: queries.append({ 'sql': self.mogrify(sql, params).decode('utf-8'), - 'time': "%.3f" % (duration,), + 'time': f"{duration:.3f}", }) old_execute = TimeTrackingCursor.execute @@ -437,13 +437,13 @@ def write_instrumentation_reports(full_suite: bool, include_webhooks: bool) -> N print(call) if full_suite: - print('INFO: URL coverage report is in %s' % (fn,)) + print(f'INFO: URL coverage report is in {fn}') print('INFO: Try running: ./tools/create-test-api-docs') if full_suite and len(untested_patterns): # nocoverage -- test suite error handling print("\nERROR: Some URLs are untested! Here's the list of untested URLs:") for untested_pattern in sorted(untested_patterns): - print(" %s" % (untested_pattern,)) + print(f" {untested_pattern}") sys.exit(1) def load_subdomain_token(response: HttpResponse) -> ExternalAuthDataDict: diff --git a/zerver/lib/test_runner.py b/zerver/lib/test_runner.py index 2c10c05113..3b5b5a3c69 100644 --- a/zerver/lib/test_runner.py +++ b/zerver/lib/test_runner.py @@ -74,7 +74,7 @@ def full_test_name(test: TestCase) -> str: test_module = test.__module__ test_class = test.__class__.__name__ test_method = test._testMethodName - return '%s.%s.%s' % (test_module, test_class, test_method) + return f'{test_module}.{test_class}.{test_method}' def get_test_method(test: TestCase) -> Callable[[], None]: return getattr(test, test._testMethodName) @@ -91,14 +91,14 @@ def report_slow_tests() -> None: for delay, test_name, slowness_reason in timings[:15]: if not slowness_reason: slowness_reason = 'UNKNOWN WHY SLOW, please investigate' - print(' %0.3f %s\n %s\n' % (delay, test_name, slowness_reason)) + print(f' {delay:0.3f} {test_name}\n {slowness_reason}\n') print('...') for delay, test_name, slowness_reason in timings[100:]: if slowness_reason: - print(' %.3f %s is not that slow' % (delay, test_name)) + print(f' {delay:.3f} {test_name} is not that slow') print(' consider removing @slow decorator') - print(' This may no longer be true: %s' % (slowness_reason,)) + print(f' This may no longer be true: {slowness_reason}') def enforce_timely_test_completion(test_method: Callable[..., ReturnT], test_name: str, delay: float, result: unittest.TestResult) -> None: @@ -110,7 +110,7 @@ def enforce_timely_test_completion(test_method: Callable[..., ReturnT], test_nam assert isinstance(result, TextTestResult) or isinstance(result, RemoteTestResult) if delay > max_delay: - msg = '** Test is TOO slow: %s (%.3f s)\n' % (test_name, delay) + msg = f'** Test is TOO slow: {test_name} ({delay:.3f} s)\n' result.addInfo(test_method, msg) def fast_tests_only() -> bool: diff --git a/zerver/lib/thumbnail.py b/zerver/lib/thumbnail.py index 9ee9517430..2cc19a7da8 100644 --- a/zerver/lib/thumbnail.py +++ b/zerver/lib/thumbnail.py @@ -46,7 +46,7 @@ def generate_thumbnail_url(path: str, source_type = get_source_type(path) safe_url = base64.urlsafe_b64encode(path.encode()).decode('utf-8') - image_url = '%s/source_type/%s' % (safe_url, source_type) + image_url = f'{safe_url}/source_type/{source_type}' width, height = map(int, size.split('x')) crypto = CryptoURL(key=settings.THUMBOR_KEY) diff --git a/zerver/lib/timestamp.py b/zerver/lib/timestamp.py index 7f4f47d64f..d344bef6f5 100644 --- a/zerver/lib/timestamp.py +++ b/zerver/lib/timestamp.py @@ -6,7 +6,7 @@ class TimezoneNotUTCException(Exception): def verify_UTC(dt: datetime.datetime) -> None: if dt.tzinfo is None or dt.tzinfo.utcoffset(dt) != datetime.timezone.utc.utcoffset(dt): - raise TimezoneNotUTCException("Datetime %s does not have a UTC timezone." % (dt,)) + raise TimezoneNotUTCException(f"Datetime {dt} does not have a UTC timezone.") def convert_to_UTC(dt: datetime.datetime) -> datetime.datetime: if dt.tzinfo is None: diff --git a/zerver/lib/type_debug.py b/zerver/lib/type_debug.py index 4aa6f571cd..35a07fbc45 100644 --- a/zerver/lib/type_debug.py +++ b/zerver/lib/type_debug.py @@ -15,14 +15,14 @@ def get_mapping_type_str(x: Mapping[Any, Any]) -> str: value_type = get_type_str(x[key]) if container_type == 'dict': if len(x) == 1: - return '{%s: %s}' % (key_type, value_type) + return f'{{{key_type}: {value_type}}}' else: - return '{%s: %s, ...}' % (key_type, value_type) + return f'{{{key_type}: {value_type}, ...}}' else: if len(x) == 1: - return '%s([(%s, %s)])' % (container_type, key_type, value_type) + return f'{container_type}([({key_type}, {value_type})])' else: - return '%s([(%s, %s), ...])' % (container_type, key_type, value_type) + return f'{container_type}([({key_type}, {value_type}), ...])' def get_sequence_type_str(x: Sequence[Any]) -> str: container_type = type(x).__name__ @@ -39,9 +39,9 @@ def get_sequence_type_str(x: Sequence[Any]) -> str: return '[' + elem_type + ', ...]' else: if len(x) == 1: - return '%s([%s])' % (container_type, elem_type) + return f'{container_type}([{elem_type}])' else: - return '%s([%s, ...])' % (container_type, elem_type) + return f'{container_type}([{elem_type}, ...])' expansion_blacklist = [str, bytes] @@ -72,9 +72,9 @@ def print_types_to(file_obj: IO[str]) -> Callable[[FuncT], FuncT]: arg_types = [get_type_str(arg) for arg in args] kwarg_types = [key + "=" + get_type_str(value) for key, value in kwargs.items()] ret_val = func(*args, **kwargs) - output = "%s(%s) -> %s" % (func.__name__, - ", ".join(arg_types + kwarg_types), - get_type_str(ret_val)) + output = "{}({}) -> {}".format(func.__name__, + ", ".join(arg_types + kwarg_types), + get_type_str(ret_val)) print(output, file=file_obj) return ret_val return wrapper # type: ignore[return-value] # https://github.com/python/mypy/issues/1927 diff --git a/zerver/lib/upload.py b/zerver/lib/upload.py index a22751b73e..030db29994 100644 --- a/zerver/lib/upload.py +++ b/zerver/lib/upload.py @@ -378,7 +378,7 @@ class S3UploadBackend(ZulipUploadBackend): random_name(18), sanitize_name(uploaded_file_name) ]) - url = "/user_uploads/%s" % (s3_file_name,) + url = f"/user_uploads/{s3_file_name}" upload_image_to_s3( bucket_name, @@ -469,12 +469,12 @@ class S3UploadBackend(ZulipUploadBackend): bucket = settings.S3_AVATAR_BUCKET medium_suffix = "-medium.png" if medium else "" # ?x=x allows templates to append additional parameters with &s - return "https://%s.s3.amazonaws.com/%s%s?x=x" % (bucket, hash_key, medium_suffix) + return f"https://{bucket}.s3.amazonaws.com/{hash_key}{medium_suffix}?x=x" def get_export_tarball_url(self, realm: Realm, export_path: str) -> str: bucket = settings.S3_AVATAR_BUCKET # export_path has a leading / - return "https://%s.s3.amazonaws.com%s" % (bucket, export_path) + return f"https://{bucket}.s3.amazonaws.com{export_path}" def realm_avatar_and_logo_path(self, realm: Realm) -> str: return os.path.join(str(realm.id), 'realm') @@ -507,8 +507,7 @@ class S3UploadBackend(ZulipUploadBackend): def get_realm_icon_url(self, realm_id: int, version: int) -> str: bucket = settings.S3_AVATAR_BUCKET # ?x=x allows templates to append additional parameters with &s - return "https://%s.s3.amazonaws.com/%s/realm/icon.png?version=%s" % ( - bucket, realm_id, version) + return f"https://{bucket}.s3.amazonaws.com/{realm_id}/realm/icon.png?version={version}" def upload_realm_logo_image(self, logo_file: File, user_profile: UserProfile, night: bool) -> None: @@ -547,8 +546,7 @@ class S3UploadBackend(ZulipUploadBackend): file_name = 'logo.png' else: file_name = 'night_logo.png' - return "https://%s.s3.amazonaws.com/%s/realm/%s?version=%s" % ( - bucket, realm_id, file_name, version) + return f"https://{bucket}.s3.amazonaws.com/{realm_id}/realm/{file_name}?version={version}" def ensure_medium_avatar_image(self, user_profile: UserProfile) -> None: file_path = user_avatar_path(user_profile) @@ -618,7 +616,7 @@ class S3UploadBackend(ZulipUploadBackend): bucket = settings.S3_AVATAR_BUCKET emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format(realm_id=realm_id, emoji_file_name=emoji_file_name) - return "https://%s.s3.amazonaws.com/%s" % (bucket, emoji_path) + return f"https://{bucket}.s3.amazonaws.com/{emoji_path}" def upload_export_tarball(self, realm: Optional[Realm], tarball_path: str) -> str: def percent_callback(bytes_transferred: Any) -> None: @@ -747,7 +745,7 @@ class LocalUploadBackend(ZulipUploadBackend): def get_avatar_url(self, hash_key: str, medium: bool=False) -> str: # ?x=x allows templates to append additional parameters with &s medium_suffix = "-medium" if medium else "" - return "/user_avatars/%s%s.png?x=x" % (hash_key, medium_suffix) + return f"/user_avatars/{hash_key}{medium_suffix}.png?x=x" def copy_avatar(self, source_profile: UserProfile, target_profile: UserProfile) -> None: source_file_path = user_avatar_path(source_profile) @@ -772,7 +770,7 @@ class LocalUploadBackend(ZulipUploadBackend): def get_realm_icon_url(self, realm_id: int, version: int) -> str: # ?x=x allows templates to append additional parameters with &s - return "/user_avatars/%s/realm/icon.png?version=%s" % (realm_id, version) + return f"/user_avatars/{realm_id}/realm/icon.png?version={version}" def upload_realm_logo_image(self, logo_file: File, user_profile: UserProfile, night: bool) -> None: @@ -798,7 +796,7 @@ class LocalUploadBackend(ZulipUploadBackend): file_name = 'night_logo.png' else: file_name = 'logo.png' - return "/user_avatars/%s/realm/%s?version=%s" % (realm_id, file_name, version) + return f"/user_avatars/{realm_id}/realm/{file_name}?version={version}" def ensure_medium_avatar_image(self, user_profile: UserProfile) -> None: file_path = user_avatar_path(user_profile) diff --git a/zerver/lib/url_encoding.py b/zerver/lib/url_encoding.py index a223f8dc62..a5d59d031e 100644 --- a/zerver/lib/url_encoding.py +++ b/zerver/lib/url_encoding.py @@ -17,25 +17,23 @@ def encode_stream(stream_id: int, stream_name: str) -> str: return str(stream_id) + '-' + hash_util_encode(stream_name) def personal_narrow_url(realm: Realm, sender: UserProfile) -> str: - base_url = "%s/#narrow/pm-with/" % (realm.uri,) + base_url = f"{realm.uri}/#narrow/pm-with/" email_user = sender.email.split('@')[0].lower() pm_slug = str(sender.id) + '-' + hash_util_encode(email_user) return base_url + pm_slug def huddle_narrow_url(realm: Realm, other_user_ids: List[int]) -> str: pm_slug = ','.join(str(user_id) for user_id in sorted(other_user_ids)) + '-group' - base_url = "%s/#narrow/pm-with/" % (realm.uri,) + base_url = f"{realm.uri}/#narrow/pm-with/" return base_url + pm_slug def stream_narrow_url(realm: Realm, stream: Stream) -> str: - base_url = "%s/#narrow/stream/" % (realm.uri,) + base_url = f"{realm.uri}/#narrow/stream/" return base_url + encode_stream(stream.id, stream.name) def topic_narrow_url(realm: Realm, stream: Stream, topic: str) -> str: - base_url = "%s/#narrow/stream/" % (realm.uri,) - return "%s%s/topic/%s" % (base_url, - encode_stream(stream.id, stream.name), - hash_util_encode(topic)) + base_url = f"{realm.uri}/#narrow/stream/" + return f"{base_url}{encode_stream(stream.id, stream.name)}/topic/{hash_util_encode(topic)}" def near_message_url(realm: Realm, message: Dict[str, Any]) -> str: diff --git a/zerver/lib/utils.py b/zerver/lib/utils.py index 97c1e5ac03..4b1d35ef8e 100644 --- a/zerver/lib/utils.py +++ b/zerver/lib/utils.py @@ -36,9 +36,9 @@ class StatsDWrapper: """Set a gauge value.""" from django_statsd.clients import statsd if delta: - value_str = '%+g|g' % (value,) + value_str = f'{value:+g}|g' else: - value_str = '%g|g' % (value,) + value_str = f'{value:g}|g' statsd._send(stat, value_str, rate) def __getattr__(self, name: str) -> Any: @@ -76,7 +76,7 @@ def run_in_batches(all_list: Sequence[T], batch = all_list[start:end] if logger: - logger("Executing %s in batch %s of %s" % (end-start, i+1, limit)) + logger(f"Executing {end-start} in batch {i+1} of {limit}") callback(batch) @@ -104,7 +104,7 @@ def log_statsd_event(name: str) -> None: Note that to draw this event as a vertical line in graphite you can use the drawAsInfinite() command """ - event_name = "events.%s" % (name,) + event_name = f"events.{name}" statsd.incr(event_name) def generate_random_token(length: int) -> str: diff --git a/zerver/lib/validator.py b/zerver/lib/validator.py index 15dc93b075..00920b0706 100644 --- a/zerver/lib/validator.py +++ b/zerver/lib/validator.py @@ -221,7 +221,7 @@ def check_dict(required_keys: Iterable[Tuple[str, Validator]]=[], if k not in val: return (_('%(key_name)s key is missing from %(var_name)s') % {'key_name': k, 'var_name': var_name}) - vname = '%s["%s"]' % (var_name, k) + vname = f'{var_name}["{k}"]' error = sub_validator(vname, val[k]) if error: return error @@ -230,7 +230,7 @@ def check_dict(required_keys: Iterable[Tuple[str, Validator]]=[], for k, sub_validator in optional_keys: if k in val: - vname = '%s["%s"]' % (var_name, k) + vname = f'{var_name}["{k}"]' error = sub_validator(vname, val[k]) if error: return error @@ -239,7 +239,7 @@ def check_dict(required_keys: Iterable[Tuple[str, Validator]]=[], if value_validator: for key in val: - vname = '%s contains a value that' % (var_name,) + vname = f'{var_name} contains a value that' error = value_validator(vname, val[key]) if error: return error @@ -271,7 +271,7 @@ def check_variable_type(allowed_type_funcs: Iterable[Validator]) -> Validator: """ if settings.LOG_API_EVENT_TYPES: - type_structure = 'any("%s")' % ([x.type_structure for x in allowed_type_funcs],) # type: ignore[attr-defined] # monkey-patching + type_structure = f'any("{[x.type_structure for x in allowed_type_funcs]}")' # type: ignore[attr-defined] # monkey-patching else: type_structure = None # type: ignore[assignment] # monkey-patching @@ -284,7 +284,7 @@ def check_variable_type(allowed_type_funcs: Iterable[Validator]) -> Validator: return enumerated_type_check def equals(expected_val: object) -> Validator: - @set_type_structure('equals("%s")' % (str(expected_val),)) + @set_type_structure(f'equals("{str(expected_val)}")') def f(var_name: str, val: object) -> Optional[str]: if val != expected_val: return (_('%(variable)s != %(expected_value)s (%(value)s is wrong)') % @@ -415,7 +415,7 @@ def to_non_negative_int(s: str, max_int_size: int=2**32-1) -> int: if x < 0: raise ValueError("argument is negative") if x > max_int_size: - raise ValueError('%s is too large (max %s)' % (x, max_int_size)) + raise ValueError(f'{x} is too large (max {max_int_size})') return x def to_positive_or_allowed_int(allowed_integer: int) -> Callable[[str], int]: diff --git a/zerver/lib/zephyr.py b/zerver/lib/zephyr.py index 2d11536d9a..c39b2f52a7 100644 --- a/zerver/lib/zephyr.py +++ b/zerver/lib/zephyr.py @@ -8,7 +8,7 @@ def compute_mit_user_fullname(email: str) -> str: match_user = re.match(r'^([a-zA-Z0-9_.-]+)(\|.+)?@mit\.edu$', email.lower()) if match_user and match_user.group(2) is None: answer = DNS.dnslookup( - "%s.passwd.ns.athena.mit.edu" % (match_user.group(1),), + f"{match_user.group(1)}.passwd.ns.athena.mit.edu", DNS.Type.TXT) hesiod_name = answer[0][0].split(':')[4].split(',')[0].strip() if hesiod_name != "": @@ -18,6 +18,6 @@ def compute_mit_user_fullname(email: str) -> str: except DNS.Base.ServerError: pass except Exception: - print("Error getting fullname for %s:" % (email,)) + print(f"Error getting fullname for {email}:") traceback.print_exc() return email.lower() diff --git a/zerver/management/commands/add_users_to_mailing_list.py b/zerver/management/commands/add_users_to_mailing_list.py index 77826ca27a..8ea4b84ca6 100644 --- a/zerver/management/commands/add_users_to_mailing_list.py +++ b/zerver/management/commands/add_users_to_mailing_list.py @@ -66,6 +66,6 @@ class Command(BaseCommand): } r = requests.post(endpoint, auth=('apikey', api_key), json=data, timeout=10) if r.status_code == 400 and ujson.loads(r.text)['title'] == 'Member Exists': - print("%s is already a part of the list." % (data['email_address'],)) + print("{} is already a part of the list.".format(data['email_address'])) elif r.status_code >= 400: print(r.text) diff --git a/zerver/management/commands/add_users_to_streams.py b/zerver/management/commands/add_users_to_streams.py index 7ed396aa86..04ecf73303 100644 --- a/zerver/management/commands/add_users_to_streams.py +++ b/zerver/management/commands/add_users_to_streams.py @@ -32,6 +32,6 @@ class Command(ZulipBaseCommand): stream = ensure_stream(realm, stream_name) _ignore, already_subscribed = bulk_add_subscriptions([stream], [user_profile]) was_there_already = user_profile.id in {tup[0].id for tup in already_subscribed} - print("%s %s to %s" % ( + print("{} {} to {}".format( "Already subscribed" if was_there_already else "Subscribed", user_profile.delivery_email, stream_name)) diff --git a/zerver/management/commands/backup.py b/zerver/management/commands/backup.py index f65776279d..a826cf62da 100644 --- a/zerver/management/commands/backup.py +++ b/zerver/management/commands/backup.py @@ -31,7 +31,7 @@ class Command(ZulipBaseCommand): def handle(self, *args: Any, **options: Any) -> None: timestamp = timezone_now().strftime(TIMESTAMP_FORMAT) with tempfile.TemporaryDirectory( - prefix="zulip-backup-%s-" % (timestamp,) + prefix=f"zulip-backup-{timestamp}-" ) as tmp: os.mkdir(os.path.join(tmp, "zulip-backup")) members = [] @@ -109,7 +109,7 @@ class Command(ZulipBaseCommand): try: if options["output"] is None: tarball_path = tempfile.NamedTemporaryFile( - prefix="zulip-backup-%s-" % (timestamp,), + prefix=f"zulip-backup-{timestamp}-", suffix=".tar.gz", delete=False, ).name @@ -122,7 +122,7 @@ class Command(ZulipBaseCommand): + ["--"] + members ) - print("Backup tarball written to %s" % (tarball_path,)) + print(f"Backup tarball written to {tarball_path}") except BaseException: if options["output"] is None: os.unlink(tarball_path) diff --git a/zerver/management/commands/bulk_change_user_name.py b/zerver/management/commands/bulk_change_user_name.py index 970aceef9e..fe7abe71da 100644 --- a/zerver/management/commands/bulk_change_user_name.py +++ b/zerver/management/commands/bulk_change_user_name.py @@ -25,7 +25,7 @@ class Command(ZulipBaseCommand): try: user_profile = self.get_user(email, realm) old_name = user_profile.full_name - print("%s: %s -> %s" % (email, old_name, new_name)) + print(f"{email}: {old_name} -> {new_name}") do_change_full_name(user_profile, new_name, None) except CommandError: - print("e-mail %s doesn't exist in the realm %s, skipping" % (email, realm)) + print(f"e-mail {email} doesn't exist in the realm {realm}, skipping") diff --git a/zerver/management/commands/compilemessages.py b/zerver/management/commands/compilemessages.py index e64dfb2e08..fff408548f 100644 --- a/zerver/management/commands/compilemessages.py +++ b/zerver/management/commands/compilemessages.py @@ -70,7 +70,7 @@ class Command(compilemessages.Command): print(f"Problem in parsing {po_filename}") raise else: - raise Exception("Unknown language %s" % (locale,)) + raise Exception(f"Unknown language {locale}") def get_locales(self) -> List[str]: output = check_output(['git', 'ls-files', 'locale']) diff --git a/zerver/management/commands/convert_gitter_data.py b/zerver/management/commands/convert_gitter_data.py index a4fb54d8d2..62e88568c5 100644 --- a/zerver/management/commands/convert_gitter_data.py +++ b/zerver/management/commands/convert_gitter_data.py @@ -42,7 +42,7 @@ class Command(BaseCommand): for path in options['gitter_data']: if not os.path.exists(path): - raise CommandError("Gitter data file not found: '%s'" % (path,)) + raise CommandError(f"Gitter data file not found: '{path}'") # TODO add json check print("Converting Data ...") do_convert_data(path, output_dir, num_threads) diff --git a/zerver/management/commands/convert_hipchat_data.py b/zerver/management/commands/convert_hipchat_data.py index 8182666337..9061c44766 100644 --- a/zerver/management/commands/convert_hipchat_data.py +++ b/zerver/management/commands/convert_hipchat_data.py @@ -72,7 +72,7 @@ class Command(BaseCommand): for path in options['hipchat_tar']: if not os.path.exists(path): - raise CommandError("Tar file not found: '%s'" % (path,)) + raise CommandError(f"Tar file not found: '{path}'") print("Converting Data ...") do_convert_data( diff --git a/zerver/management/commands/convert_mattermost_data.py b/zerver/management/commands/convert_mattermost_data.py index 43ea478113..d0e1df3bdd 100644 --- a/zerver/management/commands/convert_mattermost_data.py +++ b/zerver/management/commands/convert_mattermost_data.py @@ -55,7 +55,7 @@ class Command(BaseCommand): data_dir = options['mattermost_data_dir'] if not os.path.exists(data_dir): - raise CommandError("Directory not found: '%s'" % (data_dir,)) + raise CommandError(f"Directory not found: '{data_dir}'") data_dir = os.path.realpath(data_dir) print("Converting Data ...") diff --git a/zerver/management/commands/convert_slack_data.py b/zerver/management/commands/convert_slack_data.py index 1cc9e21d83..7a75f0bbed 100644 --- a/zerver/management/commands/convert_slack_data.py +++ b/zerver/management/commands/convert_slack_data.py @@ -49,7 +49,7 @@ class Command(BaseCommand): for path in options['slack_data_zip']: if not os.path.exists(path): - raise CommandError("Slack data directory not found: '%s'" % (path,)) + raise CommandError(f"Slack data directory not found: '{path}'") print("Converting Data ...") do_convert_data(path, output_dir, token, threads=num_threads) diff --git a/zerver/management/commands/deactivate_user.py b/zerver/management/commands/deactivate_user.py index d981a696d7..0914d45ef0 100644 --- a/zerver/management/commands/deactivate_user.py +++ b/zerver/management/commands/deactivate_user.py @@ -24,14 +24,12 @@ class Command(ZulipBaseCommand): realm = self.get_realm(options) user_profile = self.get_user(options['email'], realm) - print("Deactivating %s (%s) - %s" % (user_profile.full_name, - user_profile.delivery_email, - user_profile.realm.string_id)) - print("%s has the following active sessions:" % (user_profile.delivery_email,)) + print(f"Deactivating {user_profile.full_name} ({user_profile.delivery_email}) - {user_profile.realm.string_id}") + print(f"{user_profile.delivery_email} has the following active sessions:") for session in user_sessions(user_profile): print(session.expire_date, session.get_decoded()) print("") - print("%s has %s active bots that will also be deactivated." % ( + print("{} has {} active bots that will also be deactivated.".format( user_profile.delivery_email, UserProfile.objects.filter( is_bot=True, is_active=True, bot_owner=user_profile diff --git a/zerver/management/commands/delete_old_unclaimed_attachments.py b/zerver/management/commands/delete_old_unclaimed_attachments.py index 8820e9cb6a..512f602114 100644 --- a/zerver/management/commands/delete_old_unclaimed_attachments.py +++ b/zerver/management/commands/delete_old_unclaimed_attachments.py @@ -27,12 +27,12 @@ class Command(BaseCommand): def handle(self, *args: Any, **options: Any) -> None: delta_weeks = options['delta_weeks'] - print("Deleting unclaimed attached files older than %s weeks" % (delta_weeks,)) + print(f"Deleting unclaimed attached files older than {delta_weeks} weeks") # print the list of files that are going to be removed old_attachments = get_old_unclaimed_attachments(delta_weeks) for old_attachment in old_attachments: - print("* %s created at %s" % (old_attachment.file_name, old_attachment.create_time)) + print(f"* {old_attachment.file_name} created at {old_attachment.create_time}") print("") if not options["for_real"]: diff --git a/zerver/management/commands/enqueue_file.py b/zerver/management/commands/enqueue_file.py index 24bd1913b5..68ab4fda85 100644 --- a/zerver/management/commands/enqueue_file.py +++ b/zerver/management/commands/enqueue_file.py @@ -48,7 +48,7 @@ You can use "-" to represent stdin. except IndexError: payload = line - print('Queueing to queue %s: %s' % (queue_name, payload)) + print(f'Queueing to queue {queue_name}: {payload}') # Verify that payload is valid json. data = ujson.loads(payload) diff --git a/zerver/management/commands/export.py b/zerver/management/commands/export.py index 7a0a4c0c95..28752bf8f8 100644 --- a/zerver/management/commands/export.py +++ b/zerver/management/commands/export.py @@ -131,9 +131,9 @@ class Command(ZulipBaseCommand): try: os.close(os.open(tarball_path, os.O_CREAT | os.O_EXCL | os.O_WRONLY, 0o666)) except FileExistsError: - raise CommandError("Refusing to overwrite existing tarball: %s. Aborting..." % (tarball_path,)) + raise CommandError(f"Refusing to overwrite existing tarball: {tarball_path}. Aborting...") - print("\033[94mExporting realm\033[0m: %s" % (realm.string_id,)) + print(f"\033[94mExporting realm\033[0m: {realm.string_id}") num_threads = int(options['threads']) if num_threads < 1: diff --git a/zerver/management/commands/export_single_user.py b/zerver/management/commands/export_single_user.py index 6f19a22a41..fbef9983f7 100644 --- a/zerver/management/commands/export_single_user.py +++ b/zerver/management/commands/export_single_user.py @@ -38,9 +38,9 @@ class Command(ZulipBaseCommand): if os.path.exists(output_dir): shutil.rmtree(output_dir) os.makedirs(output_dir) - print("Exporting user %s" % (user_profile.delivery_email,)) + print(f"Exporting user {user_profile.delivery_email}") do_export_user(user_profile, output_dir) - print("Finished exporting to %s; tarring" % (output_dir,)) + print(f"Finished exporting to {output_dir}; tarring") tarball_path = output_dir.rstrip('/') + '.tar.gz' subprocess.check_call(["tar", "--strip-components=1", "-czf", tarball_path, output_dir]) - print("Tarball written to %s" % (tarball_path,)) + print(f"Tarball written to {tarball_path}") diff --git a/zerver/management/commands/fix_unreads.py b/zerver/management/commands/fix_unreads.py index 0ad250ab64..22a841237a 100644 --- a/zerver/management/commands/fix_unreads.py +++ b/zerver/management/commands/fix_unreads.py @@ -41,7 +41,7 @@ class Command(ZulipBaseCommand): try: user_profile = self.get_user(email, realm) except CommandError: - print("e-mail %s doesn't exist in the realm %s, skipping" % (email, realm)) + print(f"e-mail {email} doesn't exist in the realm {realm}, skipping") return fix(user_profile) diff --git a/zerver/management/commands/generate_multiuse_invite_link.py b/zerver/management/commands/generate_multiuse_invite_link.py index 3107aae443..6ee0839471 100644 --- a/zerver/management/commands/generate_multiuse_invite_link.py +++ b/zerver/management/commands/generate_multiuse_invite_link.py @@ -40,4 +40,4 @@ class Command(ZulipBaseCommand): referred_by = self.get_user(options['referred_by'], realm) invite_as = PreregistrationUser.INVITE_AS['MEMBER'] invite_link = do_create_multiuse_invite_link(referred_by, invite_as, streams) - print("You can use %s to invite as many number of people to the organization." % (invite_link,)) + print(f"You can use {invite_link} to invite as many number of people to the organization.") diff --git a/zerver/management/commands/generate_realm_creation_link.py b/zerver/management/commands/generate_realm_creation_link.py index 5bbc2ff0f2..d9fa493356 100644 --- a/zerver/management/commands/generate_realm_creation_link.py +++ b/zerver/management/commands/generate_realm_creation_link.py @@ -29,5 +29,5 @@ class Command(ZulipBaseCommand): "secure single-use link to register your ")) self.stdout.write(self.style.SUCCESS("new Zulip organization:\033[0m")) self.stdout.write("") - self.stdout.write(self.style.SUCCESS(" \033[1;92m%s\033[0m" % (url,))) + self.stdout.write(self.style.SUCCESS(f" \033[1;92m{url}\033[0m")) self.stdout.write("") diff --git a/zerver/management/commands/import.py b/zerver/management/commands/import.py index 5e77852e9d..1f3406ecd8 100644 --- a/zerver/management/commands/import.py +++ b/zerver/management/commands/import.py @@ -68,14 +68,14 @@ import a database dump from one or more JSON files.""" for path in options['export_paths']: path = os.path.realpath(os.path.expanduser(path)) if not os.path.exists(path): - raise CommandError("Directory not found: '%s'" % (path,)) + raise CommandError(f"Directory not found: '{path}'") if not os.path.isdir(path): raise CommandError("Export file should be folder; if it's a " "tarball, please unpack it first.") paths.append(path) for path in paths: - print("Processing dump: %s ..." % (path,)) + print(f"Processing dump: {path} ...") realm = do_import_realm(path, subdomain, num_processes) print("Checking the system bots.") do_import_system_bots(realm) diff --git a/zerver/management/commands/knight.py b/zerver/management/commands/knight.py index 418b41bca9..601eb1908c 100644 --- a/zerver/management/commands/knight.py +++ b/zerver/management/commands/knight.py @@ -53,7 +53,7 @@ ONLY perform this on customer request from an authorized person. do_change_user_role(user, UserProfile.ROLE_REALM_ADMINISTRATOR) print("Done!") else: - print("Would have granted %s %s rights for %s" % ( + print("Would have granted {} {} rights for {}".format( email, options['permission'], user.realm.string_id)) else: if (user.is_realm_admin and options['permission'] == "administer" or @@ -65,7 +65,7 @@ ONLY perform this on customer request from an authorized person. do_change_user_role(user, UserProfile.ROLE_MEMBER) print("Done!") else: - print("Would have removed %s's %s rights on %s" % (email, options['permission'], - user.realm.string_id)) + print("Would have removed {}'s {} rights on {}".format(email, options['permission'], + user.realm.string_id)) else: raise CommandError("User did not have permission for this realm!") diff --git a/zerver/management/commands/makemessages.py b/zerver/management/commands/makemessages.py index 3fa4040ba5..e3eec2a16e 100644 --- a/zerver/management/commands/makemessages.py +++ b/zerver/management/commands/makemessages.py @@ -40,10 +40,8 @@ from django.core.management.commands import makemessages from django.template.base import BLOCK_TAG_END, BLOCK_TAG_START from django.utils.translation import template -strip_whitespace_right = re.compile("(%s-?\\s*(trans|pluralize).*?-%s)\\s+" % ( - BLOCK_TAG_START, BLOCK_TAG_END), re.U) -strip_whitespace_left = re.compile("\\s+(%s-\\s*(endtrans|pluralize).*?-?%s)" % ( - BLOCK_TAG_START, BLOCK_TAG_END), re.U) +strip_whitespace_right = re.compile(f"({BLOCK_TAG_START}-?\\s*(trans|pluralize).*?-{BLOCK_TAG_END})\\s+", re.U) +strip_whitespace_left = re.compile(f"\\s+({BLOCK_TAG_START}-\\s*(endtrans|pluralize).*?-?{BLOCK_TAG_END})", re.U) regexes = [r'{{#tr .*?}}([\s\S]*?){{/tr}}', # '.' doesn't match '\n' by default r'{{\s*t "(.*?)"\W*}}', @@ -193,7 +191,7 @@ class Command(makemessages.Command): exclude = self.frontend_exclude process_all = self.frontend_all - paths = glob.glob('%s/*' % (self.default_locale_path,),) + paths = glob.glob(f'{self.default_locale_path}/*',) all_locales = [os.path.basename(path) for path in paths if os.path.isdir(path)] # Account for excluded locales diff --git a/zerver/management/commands/merge_streams.py b/zerver/management/commands/merge_streams.py index a5d9316c14..517446da72 100644 --- a/zerver/management/commands/merge_streams.py +++ b/zerver/management/commands/merge_streams.py @@ -46,7 +46,7 @@ class Command(ZulipBaseCommand): message_ids_to_clear = list(Message.objects.filter( recipient=recipient_to_destroy).values_list("id", flat=True)) count = Message.objects.filter(recipient=recipient_to_destroy).update(recipient=recipient_to_keep) - print("Moved %s messages" % (count,)) + print(f"Moved {count} messages") bulk_delete_cache_keys(message_ids_to_clear) # Move the Subscription objects. This algorithm doesn't @@ -62,11 +62,11 @@ class Command(ZulipBaseCommand): ] if len(subs_to_deactivate) > 0: - print("Deactivating %s subscriptions" % (len(subs_to_deactivate),)) + print(f"Deactivating {len(subs_to_deactivate)} subscriptions") bulk_remove_subscriptions([sub.user_profile for sub in subs_to_deactivate], [stream_to_destroy], self.get_client()) do_deactivate_stream(stream_to_destroy) if len(users_to_activate) > 0: - print("Adding %s subscriptions" % (len(users_to_activate),)) + print(f"Adding {len(users_to_activate)} subscriptions") bulk_add_subscriptions([stream_to_keep], users_to_activate) diff --git a/zerver/management/commands/purge_queue.py b/zerver/management/commands/purge_queue.py index f8c44ef7c9..3cbe54e876 100644 --- a/zerver/management/commands/purge_queue.py +++ b/zerver/management/commands/purge_queue.py @@ -33,9 +33,9 @@ class Command(BaseCommand): queue_name = options['queue_name'] if not (queue_name in get_active_worker_queues() or queue_name.startswith("notify_tornado")): - raise CommandError("Unknown queue %s" % (queue_name,)) + raise CommandError(f"Unknown queue {queue_name}") - print("Purging queue %s" % (queue_name,)) + print(f"Purging queue {queue_name}") purge_queue(queue_name) print("Done") diff --git a/zerver/management/commands/rate_limit.py b/zerver/management/commands/rate_limit.py index 162c737e9e..4491351605 100644 --- a/zerver/management/commands/rate_limit.py +++ b/zerver/management/commands/rate_limit.py @@ -46,7 +46,7 @@ class Command(ZulipBaseCommand): try: user_profile = get_user_profile_by_api_key(options['api_key']) except UserProfile.DoesNotExist: - raise CommandError("Unable to get user profile for api key %s" % (options['api_key'],)) + raise CommandError("Unable to get user profile for api key {}".format(options['api_key'])) users = [user_profile] if options['bots']: @@ -55,7 +55,7 @@ class Command(ZulipBaseCommand): operation = options['operation'] for user in users: - print("Applying operation to User ID: %s: %s" % (user.id, operation)) + print(f"Applying operation to User ID: {user.id}: {operation}") if operation == 'block': RateLimitedUser(user, domain=options['domain']).block_access(options['seconds']) diff --git a/zerver/management/commands/realm_domain.py b/zerver/management/commands/realm_domain.py index 77a5090f74..35b4c41e17 100644 --- a/zerver/management/commands/realm_domain.py +++ b/zerver/management/commands/realm_domain.py @@ -32,7 +32,7 @@ class Command(ZulipBaseCommand): realm = self.get_realm(options) assert realm is not None # Should be ensured by parser if options["op"] == "show": - print("Domains for %s:" % (realm.string_id,)) + print(f"Domains for {realm.string_id}:") for realm_domain in get_realm_domains(realm): if realm_domain["allow_subdomains"]: print(realm_domain["domain"] + " (subdomains allowed)") diff --git a/zerver/management/commands/realm_filters.py b/zerver/management/commands/realm_filters.py index c34fe55658..e4d4304685 100644 --- a/zerver/management/commands/realm_filters.py +++ b/zerver/management/commands/realm_filters.py @@ -38,7 +38,7 @@ Example: ./manage.py realm_filters --realm=zulip --op=show realm = self.get_realm(options) assert realm is not None # Should be ensured by parser if options["op"] == "show": - print("%s: %s" % (realm.string_id, all_realm_filters().get(realm.id, []))) + print(f"{realm.string_id}: {all_realm_filters().get(realm.id, [])}") sys.exit(0) pattern = options['pattern'] diff --git a/zerver/management/commands/register_server.py b/zerver/management/commands/register_server.py index 9fd68e4e43..31c39427a6 100644 --- a/zerver/management/commands/register_server.py +++ b/zerver/management/commands/register_server.py @@ -56,7 +56,7 @@ class Command(ZulipBaseCommand): print("The following data will be submitted to the push notification service:") for key in sorted(request.keys()): - print(" %s: %s" % (key, request[key])) + print(f" {key}: {request[key]}") print("") if not options['agree_to_terms_of_service'] and not options["rotate_key"]: @@ -88,7 +88,7 @@ class Command(ZulipBaseCommand): print("- Return to the documentation to learn how to test push notifications") else: if options["rotate_key"]: - print("Success! Updating %s with the new key..." % (SECRETS_FILENAME,)) + print(f"Success! Updating {SECRETS_FILENAME} with the new key...") subprocess.check_call(["crudini", '--set', SECRETS_FILENAME, "secrets", "zulip_org_key", request["new_org_key"]]) print("Mobile Push Notification Service registration successfully updated!") diff --git a/zerver/management/commands/remove_users_from_stream.py b/zerver/management/commands/remove_users_from_stream.py index 743eb97e34..a646b250c3 100644 --- a/zerver/management/commands/remove_users_from_stream.py +++ b/zerver/management/commands/remove_users_from_stream.py @@ -33,6 +33,6 @@ class Command(ZulipBaseCommand): for user_profile in user_profiles: if user_profile in not_subscribed_users: - print("%s was not subscribed" % (user_profile.delivery_email,)) + print(f"{user_profile.delivery_email} was not subscribed") else: - print("Removed %s from %s" % (user_profile.delivery_email, stream_name)) + print(f"Removed {user_profile.delivery_email} from {stream_name}") diff --git a/zerver/management/commands/runtornado.py b/zerver/management/commands/runtornado.py index beb09355fc..02e27760c0 100644 --- a/zerver/management/commands/runtornado.py +++ b/zerver/management/commands/runtornado.py @@ -65,7 +65,7 @@ class Command(BaseCommand): addr = '127.0.0.1' if not port.isdigit(): - raise CommandError("%r is not a valid port number." % (port,)) + raise CommandError(f"{port!r} is not a valid port number.") xheaders = options.get('xheaders', True) no_keep_alive = options.get('no_keep_alive', False) @@ -82,7 +82,7 @@ class Command(BaseCommand): # We pass display_num_errors=False, since Django will # likely display similar output anyway. self.check(display_num_errors=False) - print("Tornado server is running at http://%s:%s/" % (addr, port)) + print(f"Tornado server is running at http://{addr}:{port}/") if settings.USING_RABBITMQ: queue_client = get_queue_client() diff --git a/zerver/management/commands/send_realm_reactivation_email.py b/zerver/management/commands/send_realm_reactivation_email.py index 5ff1f60015..25c2e67cb2 100644 --- a/zerver/management/commands/send_realm_reactivation_email.py +++ b/zerver/management/commands/send_realm_reactivation_email.py @@ -15,7 +15,7 @@ class Command(ZulipBaseCommand): realm = self.get_realm(options) assert realm is not None if not realm.deactivated: - raise CommandError("The realm %s is already active." % (realm.name,)) + raise CommandError(f"The realm {realm.name} is already active.") print('Sending email to admins') do_send_realm_reactivation_email(realm) print('Done!') diff --git a/zerver/management/commands/send_test_email.py b/zerver/management/commands/send_test_email.py index ce512afcdb..58016d5f2a 100644 --- a/zerver/management/commands/send_test_email.py +++ b/zerver/management/commands/send_test_email.py @@ -32,13 +32,13 @@ class Command(sendtestemail.Command): "the Zulip server with /home/zulip/deployments/current/scripts/restart-server " "after changing the settings in /etc/zulip before your changes will take effect.") sender = FromAddress.SUPPORT - print(" * %s" % (sender,)) + print(f" * {sender}") send_mail("Zulip email test", message, sender, kwargs['email']) noreply_sender = FromAddress.tokenized_no_reply_address() - print(" * %s" % (noreply_sender,)) + print(f" * {noreply_sender}") send_mail("Zulip noreply email test", message, noreply_sender, kwargs['email']) print() - print("Successfully sent 2 emails to %s!" % (", ".join(kwargs['email']),)) + print("Successfully sent 2 emails to {}!".format(", ".join(kwargs['email']))) if kwargs['managers']: mail_managers("Zulip manager email test", "This email was sent to the site managers.") diff --git a/zerver/management/commands/send_webhook_fixture_message.py b/zerver/management/commands/send_webhook_fixture_message.py index b12b8761be..9da940be72 100644 --- a/zerver/management/commands/send_webhook_fixture_message.py +++ b/zerver/management/commands/send_webhook_fixture_message.py @@ -85,7 +85,7 @@ approach shown above. result = client.post(options['url'], json, content_type="application/json", HTTP_HOST=realm.host) if result.status_code != 200: - raise CommandError('Error status %s: %s' % (result.status_code, result.content)) + raise CommandError(f'Error status {result.status_code}: {result.content}') def _does_fixture_path_exist(self, fixture_path: str) -> bool: return os.path.exists(fixture_path) diff --git a/zerver/management/commands/turn_off_digests.py b/zerver/management/commands/turn_off_digests.py index a7d7716c38..ee68e18f92 100644 --- a/zerver/management/commands/turn_off_digests.py +++ b/zerver/management/commands/turn_off_digests.py @@ -26,5 +26,4 @@ class Command(ZulipBaseCommand): do_change_notification_settings(user_profile, 'enable_digest_emails', False) else: already_disabled_prefix = "(already off) " - print("%s%s <%s>" % (already_disabled_prefix, user_profile.full_name, - user_profile.delivery_email)) + print(f"{already_disabled_prefix}{user_profile.full_name} <{user_profile.delivery_email}>") diff --git a/zerver/middleware.py b/zerver/middleware.py index dc9b914205..d19f35e3df 100644 --- a/zerver/middleware.py +++ b/zerver/middleware.py @@ -76,8 +76,8 @@ def timedelta_ms(timedelta: float) -> float: def format_timedelta(timedelta: float) -> str: if (timedelta >= 1): - return "%.1fs" % (timedelta,) - return "%.0fms" % (timedelta_ms(timedelta),) + return f"{timedelta:.1f}s" + return f"{timedelta_ms(timedelta):.0f}ms" def is_slow_query(time_delta: float, path: str) -> bool: if time_delta < 1.2: @@ -114,7 +114,7 @@ def write_log_line(log_data: MutableMapping[str, Any], path: str, method: str, r if path == '/': statsd_path = 'webreq' else: - statsd_path = "webreq.%s" % (path[1:].replace('/', '.'),) + statsd_path = "webreq.{}".format(path[1:].replace('/', '.')) # Remove non-ascii chars from path (there should be none, if there are it's # because someone manually entered a nonexistent path), as UTF-8 chars make # statsd sad when it sends the key name over the socket @@ -135,7 +135,7 @@ def write_log_line(log_data: MutableMapping[str, Any], path: str, method: str, r orig_time_delta = time_delta time_delta = ((log_data['time_stopped'] - log_data['time_started']) + (time.time() - log_data['time_restarted'])) - optional_orig_delta = " (lp: %s)" % (format_timedelta(orig_time_delta),) + optional_orig_delta = f" (lp: {format_timedelta(orig_time_delta)})" remote_cache_output = "" if 'remote_cache_time_start' in log_data: remote_cache_time_delta = get_remote_cache_time() - log_data['remote_cache_time_start'] @@ -148,16 +148,15 @@ def write_log_line(log_data: MutableMapping[str, Any], path: str, method: str, r log_data['remote_cache_requests_restarted']) if (remote_cache_time_delta > 0.005): - remote_cache_output = " (mem: %s/%s)" % (format_timedelta(remote_cache_time_delta), - remote_cache_count_delta) + remote_cache_output = f" (mem: {format_timedelta(remote_cache_time_delta)}/{remote_cache_count_delta})" if not suppress_statsd: - statsd.timing("%s.remote_cache.time" % (statsd_path,), timedelta_ms(remote_cache_time_delta)) - statsd.incr("%s.remote_cache.querycount" % (statsd_path,), remote_cache_count_delta) + statsd.timing(f"{statsd_path}.remote_cache.time", timedelta_ms(remote_cache_time_delta)) + statsd.incr(f"{statsd_path}.remote_cache.querycount", remote_cache_count_delta) startup_output = "" if 'startup_time_delta' in log_data and log_data["startup_time_delta"] > 0.005: - startup_output = " (+start: %s)" % (format_timedelta(log_data["startup_time_delta"]),) + startup_output = " (+start: {})".format(format_timedelta(log_data["startup_time_delta"])) bugdown_output = "" if 'bugdown_time_start' in log_data: @@ -171,32 +170,30 @@ def write_log_line(log_data: MutableMapping[str, Any], path: str, method: str, r log_data['bugdown_requests_restarted']) if (bugdown_time_delta > 0.005): - bugdown_output = " (md: %s/%s)" % (format_timedelta(bugdown_time_delta), - bugdown_count_delta) + bugdown_output = f" (md: {format_timedelta(bugdown_time_delta)}/{bugdown_count_delta})" if not suppress_statsd: - statsd.timing("%s.markdown.time" % (statsd_path,), timedelta_ms(bugdown_time_delta)) - statsd.incr("%s.markdown.count" % (statsd_path,), bugdown_count_delta) + statsd.timing(f"{statsd_path}.markdown.time", timedelta_ms(bugdown_time_delta)) + statsd.incr(f"{statsd_path}.markdown.count", bugdown_count_delta) # Get the amount of time spent doing database queries db_time_output = "" queries = connection.connection.queries if connection.connection is not None else [] if len(queries) > 0: query_time = sum(float(query.get('time', 0)) for query in queries) - db_time_output = " (db: %s/%sq)" % (format_timedelta(query_time), - len(queries)) + db_time_output = f" (db: {format_timedelta(query_time)}/{len(queries)}q)" if not suppress_statsd: # Log ms, db ms, and num queries to statsd - statsd.timing("%s.dbtime" % (statsd_path,), timedelta_ms(query_time)) - statsd.incr("%s.dbq" % (statsd_path,), len(queries)) - statsd.timing("%s.total" % (statsd_path,), timedelta_ms(time_delta)) + statsd.timing(f"{statsd_path}.dbtime", timedelta_ms(query_time)) + statsd.incr(f"{statsd_path}.dbq", len(queries)) + statsd.timing(f"{statsd_path}.total", timedelta_ms(time_delta)) if 'extra' in log_data: - extra_request_data = " %s" % (log_data['extra'],) + extra_request_data = " {}".format(log_data['extra']) else: extra_request_data = "" - logger_client = "(%s via %s)" % (requestor_for_logs, client_name) + logger_client = f"({requestor_for_logs} via {client_name})" logger_timing = ('%5s%s%s%s%s%s %s' % (format_timedelta(time_delta), optional_orig_delta, remote_cache_output, bugdown_output, @@ -214,7 +211,7 @@ def write_log_line(log_data: MutableMapping[str, Any], path: str, method: str, r if settings.PROFILE_ALL_REQUESTS: log_data["prof"].disable() - profile_path = "/tmp/profile.data.%s.%s" % (path.split("/")[-1], int(time_delta * 1000),) + profile_path = "/tmp/profile.data.{}.{}".format(path.split("/")[-1], int(time_delta * 1000)) log_data["prof"].dump_stats(profile_path) # Log some additional data whenever we return certain 40x errors @@ -284,7 +281,7 @@ class LogRequests(MiddlewareMixin): if hasattr(request, 'user') and hasattr(request.user, 'format_requestor_for_logs'): requestor_for_logs = request.user.format_requestor_for_logs() else: - requestor_for_logs = "unauth@%s" % (get_subdomain(request) or 'root',) + requestor_for_logs = "unauth@{}".format(get_subdomain(request) or 'root') try: client = request.client.name except Exception: diff --git a/zerver/migrations/0037_disallow_null_string_id.py b/zerver/migrations/0037_disallow_null_string_id.py index a4a8a86c70..823aefe925 100644 --- a/zerver/migrations/0037_disallow_null_string_id.py +++ b/zerver/migrations/0037_disallow_null_string_id.py @@ -22,7 +22,7 @@ def set_string_id_using_domain(apps: StateApps, schema_editor: DatabaseSchemaEdi continue except IntegrityError: pass - raise RuntimeError("Unable to find a good string_id for realm %s" % (realm,)) + raise RuntimeError(f"Unable to find a good string_id for realm {realm}") class Migration(migrations.Migration): diff --git a/zerver/migrations/0177_user_message_add_and_index_is_private_flag.py b/zerver/migrations/0177_user_message_add_and_index_is_private_flag.py index 637a23bf9d..efc0d92ea1 100644 --- a/zerver/migrations/0177_user_message_add_and_index_is_private_flag.py +++ b/zerver/migrations/0177_user_message_add_and_index_is_private_flag.py @@ -42,7 +42,7 @@ def reset_is_private_flag( i += 1 if (i % 50 == 0 or i == total): percent = round((i / total) * 100, 2) - print("Processed %s/%s %s%%" % (i, total, percent)) + print(f"Processed {i}/{total} {percent}%") sys.stdout.flush() class Migration(migrations.Migration): diff --git a/zerver/migrations/0182_set_initial_value_is_private_flag.py b/zerver/migrations/0182_set_initial_value_is_private_flag.py index c326109d58..4b40daa396 100644 --- a/zerver/migrations/0182_set_initial_value_is_private_flag.py +++ b/zerver/migrations/0182_set_initial_value_is_private_flag.py @@ -36,7 +36,7 @@ def set_initial_value_of_is_private_flag( percent = round((processed / total) * 100, 2) else: percent = 100.00 - print("Processed %s/%s %s%%" % (processed, total, percent)) + print(f"Processed {processed}/{total} {percent}%") sys.stdout.flush() class Migration(migrations.Migration): diff --git a/zerver/migrations/0257_fix_has_link_attribute.py b/zerver/migrations/0257_fix_has_link_attribute.py index 4370749541..8f031795f4 100644 --- a/zerver/migrations/0257_fix_has_link_attribute.py +++ b/zerver/migrations/0257_fix_has_link_attribute.py @@ -22,7 +22,7 @@ def process_batch(apps: StateApps, id_start: int, id_end: int, last_id: int) -> continue if message.id % 1000 == 0: - print("Processed %s / %s" % (message.id, last_id)) + print(f"Processed {message.id} / {last_id}") # Because we maintain the Attachment table, this should be as # simple as just just checking if there's any Attachment diff --git a/zerver/models.py b/zerver/models.py index b86b0cf1bc..5d65b10baa 100644 --- a/zerver/models.py +++ b/zerver/models.py @@ -102,10 +102,10 @@ def get_display_recipient(recipient: 'Recipient') -> DisplayRecipientT: ) def get_realm_emoji_cache_key(realm: 'Realm') -> str: - return 'realm_emoji:%s' % (realm.id,) + return f'realm_emoji:{realm.id}' def get_active_realm_emoji_cache_key(realm: 'Realm') -> str: - return 'active_realm_emoji:%s' % (realm.id,) + return f'active_realm_emoji:{realm.id}' # This simple call-once caching saves ~500us in auth_enabled_helper, # which is a significant optimization for common_context. Note that @@ -420,7 +420,7 @@ class Realm(models.Model): return ret def __str__(self) -> str: - return "" % (self.string_id, self.id) + return f"" @cache_with_key(get_realm_emoji_cache_key, timeout=3600*24*7) def get_emoji(self) -> Dict[str, Dict[str, Iterable[str]]]: @@ -524,7 +524,7 @@ class Realm(models.Model): def host_for_subdomain(subdomain: str) -> str: if subdomain == Realm.SUBDOMAIN_FOR_ROOT_DOMAIN: return settings.EXTERNAL_HOST - default_host = "%s.%s" % (subdomain, settings.EXTERNAL_HOST) + default_host = f"{subdomain}.{settings.EXTERNAL_HOST}" return settings.REALM_HOSTS.get(subdomain, default_host) @property @@ -614,11 +614,7 @@ class RealmEmoji(models.Model): PATH_ID_TEMPLATE = "{realm_id}/emoji/images/{emoji_file_name}" def __str__(self) -> str: - return "" % (self.realm.string_id, - self.id, - self.name, - self.deactivated, - self.file_name) + return f"" def get_realm_emoji_dicts(realm: Realm, only_active_emojis: bool=False) -> Dict[str, Dict[str, Any]]: @@ -694,10 +690,10 @@ class RealmFilter(models.Model): unique_together = ("realm", "pattern") def __str__(self) -> str: - return "" % (self.realm.string_id, self.pattern, self.url_format_string) + return f"" def get_realm_filters_cache_key(realm_id: int) -> str: - return '%s:all_realm_filters:%s' % (cache.KEY_PREFIX, realm_id,) + return f'{cache.KEY_PREFIX}:all_realm_filters:{realm_id}' # We have a per-process cache to avoid doing 1000 remote cache queries during page load per_request_realm_filters_cache: Dict[int, List[Tuple[str, str, int]]] = {} @@ -1104,7 +1100,7 @@ class UserProfile(AbstractBaseUser, PermissionsMixin): return False def __str__(self) -> str: - return "" % (self.email, self.realm) + return f"" @property def is_new_member(self) -> bool: @@ -1432,7 +1428,7 @@ class Stream(models.Model): first_message_id: Optional[int] = models.IntegerField(null=True, db_index=True) def __str__(self) -> str: - return "" % (self.name,) + return f"" def is_public(self) -> bool: # All streams are private in Zephyr mirroring realms. @@ -1502,16 +1498,13 @@ class MutedTopic(models.Model): unique_together = ('user_profile', 'stream', 'topic_name') def __str__(self) -> str: - return ("" % (self.user_profile.email, - self.stream.name, - self.topic_name, - self.date_muted)) + return (f"") class Client(models.Model): name: str = models.CharField(max_length=30, db_index=True, unique=True) def __str__(self) -> str: - return "" % (self.name,) + return f"" get_client_cache: Dict[str, Client] = {} def get_client(name: str) -> Client: @@ -1524,7 +1517,7 @@ def get_client(name: str) -> Client: return get_client_cache[cache_name] def get_client_cache_key(name: str) -> str: - return 'get_client:%s' % (make_safe_digest(name),) + return f'get_client:{make_safe_digest(name)}' @cache_with_key(get_client_cache_key, timeout=3600*24*7) def get_client_remote_cache(name: str) -> Client: @@ -1660,8 +1653,7 @@ class AbstractMessage(models.Model): def __str__(self) -> str: display_recipient = get_display_recipient(self.recipient) - return "<%s: %s / %s / %s>" % (self.__class__.__name__, display_recipient, - self.subject, self.sender) + return f"<{self.__class__.__name__}: {display_recipient} / {self.subject} / {self.sender}>" class ArchiveTransaction(models.Model): timestamp: datetime.datetime = models.DateTimeField(default=timezone_now, db_index=True) @@ -1859,7 +1851,7 @@ class Reaction(AbstractReaction): return Reaction.objects.filter(message_id__in=needed_ids).values(*fields) def __str__(self) -> str: - return "%s / %s / %s" % (self.user_profile.email, self.message.id, self.emoji_name) + return f"{self.user_profile.email} / {self.message.id} / {self.emoji_name}" class ArchivedReaction(AbstractReaction): message: ArchivedMessage = models.ForeignKey(ArchivedMessage, on_delete=CASCADE) @@ -1973,8 +1965,7 @@ class AbstractUserMessage(models.Model): def __str__(self) -> str: display_recipient = get_display_recipient(self.message.recipient) - return "<%s: %s / %s (%s)>" % (self.__class__.__name__, display_recipient, - self.user_profile.email, self.flags_list()) + return f"<{self.__class__.__name__}: {display_recipient} / {self.user_profile.email} ({self.flags_list()})>" class UserMessage(AbstractUserMessage): @@ -2020,7 +2011,7 @@ class AbstractAttachment(models.Model): abstract = True def __str__(self) -> str: - return "<%s: %s>" % (self.__class__.__name__, self.file_name,) + return f"<{self.__class__.__name__}: {self.file_name}>" class ArchivedAttachment(AbstractAttachment): @@ -2128,7 +2119,7 @@ class Subscription(models.Model): unique_together = ("user_profile", "recipient") def __str__(self) -> str: - return " %s>" % (self.user_profile, self.recipient) + return f" {self.recipient}>" # Subscription fields included whenever a Subscription object is provided to # Zulip clients via the API. A few details worth noting: @@ -2329,7 +2320,7 @@ def get_huddle_hash(id_list: List[int]) -> str: return make_safe_digest(hash_key) def huddle_hash_cache_key(huddle_hash: str) -> str: - return "huddle_by_hash:%s" % (huddle_hash,) + return f"huddle_by_hash:{huddle_hash}" def get_huddle(id_list: List[int]) -> Huddle: huddle_hash = get_huddle_hash(id_list) @@ -2413,7 +2404,7 @@ class UserPresence(models.Model): elif status == UserPresence.IDLE: return 'idle' else: # nocoverage # TODO: Add a presence test to cover this. - raise ValueError('Unknown status: %s' % (status,)) + raise ValueError(f'Unknown status: {status}') @staticmethod def to_presence_dict(client_name: str, status: int, dt: datetime.datetime, push_enabled: bool=False, @@ -2510,9 +2501,7 @@ class ScheduledEmail(AbstractScheduledJob): type: int = models.PositiveSmallIntegerField() def __str__(self) -> str: - return "" % (self.type, - self.address or list(self.users.all()), - self.scheduled_timestamp) + return f"" class MissedMessageEmailAddress(models.Model): EXPIRY_SECONDS = 60 * 60 * 24 * 5 @@ -2570,9 +2559,7 @@ class ScheduledMessage(models.Model): def __str__(self) -> str: display_recipient = get_display_recipient(self.recipient) - return "" % (display_recipient, - self.subject, self.sender, - self.scheduled_timestamp) + return f"" EMAIL_TYPES = { 'followup_day1': ScheduledEmail.WELCOME, @@ -2680,13 +2667,10 @@ class RealmAuditLog(AbstractRealmAuditLog): def __str__(self) -> str: if self.modified_user is not None: - return "" % ( - self.modified_user, self.event_type, self.event_time, self.id) + return f"" if self.modified_stream is not None: - return "" % ( - self.modified_stream, self.event_type, self.event_time, self.id) - return "" % ( - self.realm, self.event_type, self.event_time, self.id) + return f"" + return f"" class UserHotspot(models.Model): user: UserProfile = models.ForeignKey(UserProfile, on_delete=CASCADE) @@ -2827,7 +2811,7 @@ class CustomProfileFieldValue(models.Model): unique_together = ('user_profile', 'field') def __str__(self) -> str: - return "" % (self.user_profile, self.field, self.value) + return f"" # Interfaces for services # They provide additional functionality like parsing message to obtain query url, data to be sent to url, diff --git a/zerver/openapi/markdown_extension.py b/zerver/openapi/markdown_extension.py index 19f4ecb3cd..4c692ee344 100644 --- a/zerver/openapi/markdown_extension.py +++ b/zerver/openapi/markdown_extension.py @@ -269,7 +269,7 @@ def generate_curl_example(endpoint: str, method: str, raise AssertionError("Unhandled securityScheme. Please update the code to handle this scheme.") if authentication_required: - lines.append(" -u %s:%s" % (auth_email, auth_api_key)) + lines.append(f" -u {auth_email}:{auth_api_key}") for param in operation_params: if param["in"] == "path": diff --git a/zerver/openapi/python_examples.py b/zerver/openapi/python_examples.py index b32bfa58c2..34bb899194 100644 --- a/zerver/openapi/python_examples.py +++ b/zerver/openapi/python_examples.py @@ -888,7 +888,7 @@ def upload_file(client: Client) -> None: "type": "stream", "to": "Denmark", "topic": "Castle", - "content": "Check out [this picture](%s) of my castle!" % (result['uri'],) + "content": "Check out [this picture]({}) of my castle!".format(result['uri']) }) # {code_example|end} diff --git a/zerver/openapi/test_curl_examples.py b/zerver/openapi/test_curl_examples.py index 2fea31bbfb..9d1ec803ce 100644 --- a/zerver/openapi/test_curl_examples.py +++ b/zerver/openapi/test_curl_examples.py @@ -39,7 +39,7 @@ def test_generated_curl_examples_for_success(client: Client) -> None: curl_command_text = curl_command_text.replace( "BOT_EMAIL_ADDRESS:BOT_API_KEY", authentication_line) - print("Testing %s ..." % (curl_command_text.split("\n")[0],)) + print("Testing {} ...".format(curl_command_text.split("\n")[0])) # Turn the text into an arguments list. generated_curl_command = [ diff --git a/zerver/templatetags/app_filters.py b/zerver/templatetags/app_filters.py index 9683a340e2..094c98c9ee 100644 --- a/zerver/templatetags/app_filters.py +++ b/zerver/templatetags/app_filters.py @@ -44,16 +44,16 @@ def display_list(values: List[str], display_limit: int) -> str: """ if len(values) == 1: # One value, show it. - display_string = "%s" % (values[0],) + display_string = f"{values[0]}" elif len(values) <= display_limit: # Fewer than `display_limit` values, show all of them. display_string = ", ".join( - "%s" % (value,) for value in values[:-1]) - display_string += " and %s" % (values[-1],) + f"{value}" for value in values[:-1]) + display_string += f" and {values[-1]}" else: # More than `display_limit` values, only mention a few. display_string = ", ".join( - "%s" % (value,) for value in values[:display_limit]) + f"{value}" for value in values[:display_limit]) display_string += and_n_others(values, display_limit) return display_string diff --git a/zerver/tests/test_audit_log.py b/zerver/tests/test_audit_log.py index 6d11dac0f5..34ed744067 100644 --- a/zerver/tests/test_audit_log.py +++ b/zerver/tests/test_audit_log.py @@ -97,7 +97,7 @@ class TestRealmAuditLog(ZulipTestCase): # Test the RealmAuditLog stringification audit_entry = RealmAuditLog.objects.get(event_type=RealmAuditLog.USER_EMAIL_CHANGED, event_time__gte=now) - self.assertTrue(str(audit_entry).startswith(" %s " % (user.email, user.realm, RealmAuditLog.USER_EMAIL_CHANGED))) + self.assertTrue(str(audit_entry).startswith(f" {RealmAuditLog.USER_EMAIL_CHANGED} ")) def test_change_avatar_source(self) -> None: now = timezone_now() diff --git a/zerver/tests/test_auth_backends.py b/zerver/tests/test_auth_backends.py index 239f61ee07..aeffd24eb7 100644 --- a/zerver/tests/test_auth_backends.py +++ b/zerver/tests/test_auth_backends.py @@ -621,7 +621,7 @@ class DesktopFlowTestingLib(ZulipTestCase): browser_url = soup.find("a", href=True)["href"] decrypted_key = self.verify_desktop_data_and_return_key(desktop_data, desktop_flow_otp) - self.assertEqual(browser_url, 'http://zulip.testserver/accounts/login/subdomain/%s' % (decrypted_key,)) + self.assertEqual(browser_url, f'http://zulip.testserver/accounts/login/subdomain/{decrypted_key}') result = self.client_get(browser_url) self.assertEqual(result.status_code, 302) @@ -706,7 +706,7 @@ class SocialAuthBase(DesktopFlowTestingLib, ZulipTestCase): params['next'] = next params['multiuse_object_key'] = multiuse_object_key if len(params) > 0: - url += "?%s" % (urllib.parse.urlencode(params),) + url += f"?{urllib.parse.urlencode(params)}" if user_agent is not None: headers['HTTP_USER_AGENT'] = user_agent @@ -773,7 +773,7 @@ class SocialAuthBase(DesktopFlowTestingLib, ZulipTestCase): result = self.client_get(url, **headers) - expected_result_url_prefix = 'http://testserver/login/%s/' % (self.backend.name,) + expected_result_url_prefix = f'http://testserver/login/{self.backend.name}/' if settings.SOCIAL_AUTH_SUBDOMAIN is not None: expected_result_url_prefix = ('http://%s.testserver/login/%s/' % (settings.SOCIAL_AUTH_SUBDOMAIN, self.backend.name,)) @@ -1405,10 +1405,10 @@ class SAMLAuthBackendTest(SocialAuthBase): result = self.client_get(url, **headers) - expected_result_url_prefix = 'http://testserver/login/%s/' % (self.backend.name,) + expected_result_url_prefix = f'http://testserver/login/{self.backend.name}/' if settings.SOCIAL_AUTH_SUBDOMAIN is not None: expected_result_url_prefix = ( - 'http://%s.testserver/login/%s/' % (settings.SOCIAL_AUTH_SUBDOMAIN, self.backend.name) + f'http://{settings.SOCIAL_AUTH_SUBDOMAIN}.testserver/login/{self.backend.name}/' ) if result.status_code != 302 or not result.url.startswith(expected_result_url_prefix): @@ -1600,7 +1600,7 @@ class SAMLAuthBackendTest(SocialAuthBase): self.assertEqual(result.status_code, 302) self.assertEqual('/login/', result.url) self.assertEqual(m.output, [self.logger_output( - "/complete/saml/: Can't figure out subdomain for this authentication request. relayed_params: %s" % ("{}",), + "/complete/saml/: Can't figure out subdomain for this authentication request. relayed_params: {}".format("{}"), 'info' )]) @@ -1648,13 +1648,13 @@ class SAMLAuthBackendTest(SocialAuthBase): result = self.client_get('/login/saml/') self.assertEqual(result.status_code, 302) self.assertEqual('/login/', result.url) - self.assertEqual(m.output, [self.logger_output("/login/saml/ : Bad idp param: KeyError: %s." % ("'idp'",), 'info')]) + self.assertEqual(m.output, [self.logger_output("/login/saml/ : Bad idp param: KeyError: {}.".format("'idp'"), 'info')]) with self.assertLogs(self.logger_string, level='INFO') as m: result = self.client_get('/login/saml/?idp=bad_idp') self.assertEqual(result.status_code, 302) self.assertEqual('/login/', result.url) - self.assertEqual(m.output, [self.logger_output("/login/saml/ : Bad idp param: KeyError: %s." % ("'bad_idp'",), 'info')]) + self.assertEqual(m.output, [self.logger_output("/login/saml/ : Bad idp param: KeyError: {}.".format("'bad_idp'"), 'info')]) def test_social_auth_invalid_email(self) -> None: """ @@ -2125,7 +2125,7 @@ class GitHubAuthBackendTest(SocialAuthBase): self.assertEqual(result.status_code, 302) self.assertEqual(result.url, "/login/") self.assertEqual(m.output, [self.logger_output( - "Social auth (%s) failed because user has no verified emails" % ('GitHub',), + "Social auth ({}) failed because user has no verified emails".format('GitHub'), "warning" )]) @@ -2454,7 +2454,7 @@ class GitHubAuthBackendTest(SocialAuthBase): self.assertEqual(result.status_code, 302) self.assertEqual(result.url, "/login/") self.assertEqual(m.output, [self.logger_output( - "Social auth (%s) failed because user has no verified emails associated with the account" % ("GitHub",), + "Social auth ({}) failed because user has no verified emails associated with the account".format("GitHub"), "warning" )]) @@ -2504,7 +2504,7 @@ class GoogleAuthBackendTest(SocialAuthBase): self.assertEqual(result.status_code, 302) self.assertEqual(result.url, "/login/") self.assertEqual(m.output, [self.logger_output( - "Social auth (%s) failed because user has no verified emails" % ("Google",), + "Social auth ({}) failed because user has no verified emails".format("Google"), "warning" )]) @@ -2946,7 +2946,7 @@ class ExternalMethodDictsTests(ZulipTestCase): 'id="{}_auth_button_google"' ] for name in saml_idp_names: - expected_button_id_strings.append('id="{}_auth_button_saml:%s"' % (name,)) + expected_button_id_strings.append(f'id="{{}}_auth_button_saml:{name}"') result = self.client_get("/login/") self.assert_in_success_response([string.format("login") for string in expected_button_id_strings], diff --git a/zerver/tests/test_bots.py b/zerver/tests/test_bots.py index b8edc1327d..9c31e1cdec 100644 --- a/zerver/tests/test_bots.py +++ b/zerver/tests/test_bots.py @@ -741,7 +741,7 @@ class BotTest(ZulipTestCase, UploadSerializeMixin): # A regular user cannot reactivate a generic bot self.assert_num_bots_equal(0) - result = self.client_post("/json/users/%s/reactivate" % (bot_user.id,)) + result = self.client_post(f"/json/users/{bot_user.id}/reactivate") self.assert_json_error(result, 'Must be an organization administrator') self.assert_num_bots_equal(0) diff --git a/zerver/tests/test_decorators.py b/zerver/tests/test_decorators.py index ca58fe8a6a..45cd481cc0 100644 --- a/zerver/tests/test_decorators.py +++ b/zerver/tests/test_decorators.py @@ -578,7 +578,7 @@ body: def test_authenticated_rest_api_view_errors(self) -> None: user_profile = self.example_user("hamlet") api_key = get_api_key(user_profile) - credentials = "%s:%s" % (user_profile.email, api_key) + credentials = f"{user_profile.email}:{api_key}" api_auth = 'Digest ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8') result = self.client_post('/api/v1/external/zendesk', {}, HTTP_AUTHORIZATION=api_auth) @@ -771,7 +771,7 @@ class ValidatorTestCase(TestCase): self.assertEqual(to_non_negative_int('-1')) with self.assertRaisesRegex(ValueError, re.escape('5 is too large (max 4)')): self.assertEqual(to_non_negative_int('5', max_int_size=4)) - with self.assertRaisesRegex(ValueError, re.escape('%s is too large (max %s)' % (2**32, 2**32-1))): + with self.assertRaisesRegex(ValueError, re.escape(f'{2**32} is too large (max {2**32-1})')): self.assertEqual(to_non_negative_int(str(2**32))) def test_to_positive_or_allowed_int(self) -> None: @@ -1055,7 +1055,7 @@ class DeactivatedRealmTest(ZulipTestCase): do_deactivate_realm(get_realm("zulip")) user_profile = self.example_user("hamlet") api_key = get_api_key(user_profile) - url = "/api/v1/external/jira?api_key=%s&stream=jira_custom" % (api_key,) + url = f"/api/v1/external/jira?api_key={api_key}&stream=jira_custom" data = self.webhook_fixture_data('jira', 'created_v2') result = self.client_post(url, data, content_type="application/json") @@ -1234,7 +1234,7 @@ class InactiveUserTest(ZulipTestCase): do_deactivate_user(user_profile) api_key = get_api_key(user_profile) - url = "/api/v1/external/jira?api_key=%s&stream=jira_custom" % (api_key,) + url = f"/api/v1/external/jira?api_key={api_key}&stream=jira_custom" data = self.webhook_fixture_data('jira', 'created_v2') result = self.client_post(url, data, content_type="application/json") @@ -1725,7 +1725,7 @@ class CacheTestCase(ZulipTestCase): @cachify def greet(first_name: str, last_name: str) -> str: - msg = '%s %s %s' % (greeting, first_name, last_name) + msg = f'{greeting} {first_name} {last_name}' work_log.append(msg) return msg diff --git a/zerver/tests/test_email_mirror.py b/zerver/tests/test_email_mirror.py index 3621a8dd5a..d4b8023f8d 100644 --- a/zerver/tests/test_email_mirror.py +++ b/zerver/tests/test_email_mirror.py @@ -339,8 +339,8 @@ class TestStreamEmailMessagesSuccess(ZulipTestCase): process_message(incoming_valid_message) message = most_recent_message(user_profile) - self.assertEqual(message.content, "From: %s\n%s" % (self.example_email('hamlet'), - "TestStreamEmailMessages Body")) + self.assertEqual(message.content, "From: {}\n{}".format(self.example_email('hamlet'), + "TestStreamEmailMessages Body")) self.assertEqual(get_display_recipient(message.recipient), stream.name) self.assertEqual(message.topic_name(), incoming_valid_message['Subject']) @@ -364,8 +364,8 @@ class TestStreamEmailMessagesSuccess(ZulipTestCase): process_message(incoming_valid_message) message = most_recent_message(user_profile) - self.assertEqual(message.content, "From: %s\n%s" % ('Test Useróąę ', - "TestStreamEmailMessages Body")) + self.assertEqual(message.content, "From: {}\n{}".format('Test Useróąę ', + "TestStreamEmailMessages Body")) self.assertEqual(get_display_recipient(message.recipient), stream.name) self.assertEqual(message.topic_name(), incoming_valid_message['Subject']) @@ -494,7 +494,7 @@ class TestEmailMirrorMessagesWithAttachments(ZulipTestCase): target_realm=user_profile.realm) message = most_recent_message(user_profile) - self.assertEqual(message.content, "Test body\n[%s](https://test_url)" % (utf8_filename,)) + self.assertEqual(message.content, f"Test body\n[{utf8_filename}](https://test_url)") def test_message_with_valid_nested_attachment(self) -> None: user_profile = self.example_user('hamlet') diff --git a/zerver/tests/test_email_notifications.py b/zerver/tests/test_email_notifications.py index 8e3dfef2e3..281fad1835 100644 --- a/zerver/tests/test_email_notifications.py +++ b/zerver/tests/test_email_notifications.py @@ -237,7 +237,7 @@ class TestMissedMessages(ZulipTestCase): from_email = formataddr(("Zulip missed messages", FromAddress.NOREPLY)) self.assertEqual(len(mail.outbox), 1) if send_as_user: - from_email = '"%s" <%s>' % (othello.full_name, othello.email) + from_email = f'"{othello.full_name}" <{othello.email}>' self.assertEqual(msg.from_email, from_email) self.assertEqual(msg.subject, email_subject) self.assertEqual(len(msg.reply_to), 1) @@ -678,9 +678,8 @@ class TestMissedMessages(ZulipTestCase): self.example_user('othello'), self.example_user('hamlet'), 'Extremely personal message with a realm emoji :green_tick:!') realm_emoji_id = realm.get_active_emoji()['green_tick']['id'] - realm_emoji_url = "http://zulip.testserver/user_avatars/%s/emoji/images/%s.png" % ( - realm.id, realm_emoji_id,) - verify_body_include = [':green_tick:' % (realm_emoji_url,)] + realm_emoji_url = f"http://zulip.testserver/user_avatars/{realm.id}/emoji/images/{realm_emoji_id}.png" + verify_body_include = [f':green_tick:'] email_subject = 'PMs with Othello, the Moor of Venice' self._test_cases(msg_id, verify_body_include, email_subject, send_as_user=False, verify_html_body=True) diff --git a/zerver/tests/test_events.py b/zerver/tests/test_events.py index 04ceedd56c..d37eecfb16 100644 --- a/zerver/tests/test_events.py +++ b/zerver/tests/test_events.py @@ -1295,7 +1295,7 @@ class EventsRegisterTest(ZulipTestCase): error = realm_user_add_checker('events[0]', events[0]) self.assert_on_error(error) new_user_profile = get_user_by_delivery_email("test1@zulip.com", self.user_profile.realm) - self.assertEqual(new_user_profile.email, "user%s@zulip.testserver" % (new_user_profile.id,)) + self.assertEqual(new_user_profile.email, f"user{new_user_profile.id}@zulip.testserver") def test_alert_words_events(self) -> None: alert_words_checker = self.check_events_dict([ @@ -1646,7 +1646,7 @@ class EventsRegisterTest(ZulipTestCase): elif property_type == (int, type(None)): validator = check_int else: - raise AssertionError("Unexpected property type %s" % (property_type,)) + raise AssertionError(f"Unexpected property type {property_type}") schema_checker = self.check_events_dict([ ('type', equals('realm')), ('op', equals('update')), @@ -1655,7 +1655,7 @@ class EventsRegisterTest(ZulipTestCase): ]) if vals is None: - raise AssertionError('No test created for %s' % (name,)) + raise AssertionError(f'No test created for {name}') do_set_realm_property(self.user_profile.realm, name, vals[0]) for val in vals[1:]: state_change_expected = True @@ -1903,7 +1903,7 @@ class EventsRegisterTest(ZulipTestCase): elif property_type is int: validator = check_int else: - raise AssertionError("Unexpected property type %s" % (property_type,)) + raise AssertionError(f"Unexpected property type {property_type}") num_events = 1 if setting_name == "timezone": @@ -1915,7 +1915,7 @@ class EventsRegisterTest(ZulipTestCase): else: values = [False, True, False] if values is None: - raise AssertionError('No test created for %s' % (setting_name,)) + raise AssertionError(f'No test created for {setting_name}') for value in values: events = self.do_test(lambda: do_set_user_display_setting( @@ -2844,7 +2844,7 @@ class EventsRegisterTest(ZulipTestCase): ]) events = self.do_test( - lambda: self.client_delete("/json/attachments/%s" % (entry.id,)), + lambda: self.client_delete(f"/json/attachments/{entry.id}"), num_events=1, state_change_expected=False) error = schema_checker('events[0]', events[0]) self.assert_on_error(error) diff --git a/zerver/tests/test_external.py b/zerver/tests/test_external.py index 0071ff1c16..38a1886f34 100644 --- a/zerver/tests/test_external.py +++ b/zerver/tests/test_external.py @@ -93,7 +93,7 @@ class RateLimitTests(ZulipTestCase): start_time = time.time() for i in range(6): with mock.patch('time.time', return_value=(start_time + i * 0.1)): - result = self.send_api_message(user, "some stuff %s" % (i,)) + result = self.send_api_message(user, f"some stuff {i}") self.assertEqual(result.status_code, 429) json = result.json() @@ -122,5 +122,5 @@ class RateLimitTests(ZulipTestCase): self.assertEqual(result.status_code, 429) mock_warn.assert_called_with( "Deadlock trying to incr_ratelimit for %s", - "RateLimitedUser:%s:api_by_user" % (user.id,), + f"RateLimitedUser:{user.id}:api_by_user", ) diff --git a/zerver/tests/test_home.py b/zerver/tests/test_home.py index 3b546bd8e6..e5232cb5a7 100644 --- a/zerver/tests/test_home.py +++ b/zerver/tests/test_home.py @@ -257,7 +257,7 @@ class HomeTest(ZulipTestCase): for html_bit in html_bits: if html_bit not in html: - raise AssertionError('%s not in result' % (html_bit,)) + raise AssertionError(f'{html_bit} not in result') page_params = self._get_page_params(result) @@ -772,23 +772,23 @@ class HomeTest(ZulipTestCase): page_params = {"night_mode": True} add_realm_logo_fields(page_params, user_profile.realm) self.assertEqual(compute_navbar_logo_url(page_params), - "/user_avatars/%s/realm/logo.png?version=2" % (user_profile.realm_id,)) + f"/user_avatars/{user_profile.realm_id}/realm/logo.png?version=2") page_params = {"night_mode": False} add_realm_logo_fields(page_params, user_profile.realm) self.assertEqual(compute_navbar_logo_url(page_params), - "/user_avatars/%s/realm/logo.png?version=2" % (user_profile.realm_id,)) + f"/user_avatars/{user_profile.realm_id}/realm/logo.png?version=2") do_change_logo_source(user_profile.realm, Realm.LOGO_UPLOADED, night=True) page_params = {"night_mode": True} add_realm_logo_fields(page_params, user_profile.realm) self.assertEqual(compute_navbar_logo_url(page_params), - "/user_avatars/%s/realm/night_logo.png?version=2" % (user_profile.realm_id,)) + f"/user_avatars/{user_profile.realm_id}/realm/night_logo.png?version=2") page_params = {"night_mode": False} add_realm_logo_fields(page_params, user_profile.realm) self.assertEqual(compute_navbar_logo_url(page_params), - "/user_avatars/%s/realm/logo.png?version=2" % (user_profile.realm_id,)) + f"/user_avatars/{user_profile.realm_id}/realm/logo.png?version=2") # This configuration isn't super supported in the UI and is a # weird choice, but we have a test for it anyway. @@ -796,7 +796,7 @@ class HomeTest(ZulipTestCase): page_params = {"night_mode": True} add_realm_logo_fields(page_params, user_profile.realm) self.assertEqual(compute_navbar_logo_url(page_params), - "/user_avatars/%s/realm/night_logo.png?version=2" % (user_profile.realm_id,)) + f"/user_avatars/{user_profile.realm_id}/realm/night_logo.png?version=2") page_params = {"night_mode": False} add_realm_logo_fields(page_params, user_profile.realm) diff --git a/zerver/tests/test_management_commands.py b/zerver/tests/test_management_commands.py index c35d920332..cb47a55667 100644 --- a/zerver/tests/test_management_commands.py +++ b/zerver/tests/test_management_commands.py @@ -61,7 +61,7 @@ class TestZulipBaseCommand(ZulipTestCase): self.assertEqual(self.command.get_user(email, self.zulip_realm), user_profile) self.assertEqual(self.command.get_user(email, None), user_profile) - error_message = "The realm '%s' does not contain a user with email" % (mit_realm,) + error_message = f"The realm '{mit_realm}' does not contain a user with email" with self.assertRaisesRegex(CommandError, error_message): self.command.get_user(email, mit_realm) @@ -106,7 +106,7 @@ class TestZulipBaseCommand(ZulipTestCase): user_emails = ','.join(u.delivery_email for u in expected_user_profiles) user_profiles = self.get_users_sorted(dict(users=user_emails), None) self.assertEqual(user_profiles, expected_user_profiles) - error_message = "The realm '%s' does not contain a user with email" % (self.zulip_realm,) + error_message = f"The realm '{self.zulip_realm}' does not contain a user with email" with self.assertRaisesRegex(CommandError, error_message): self.command.get_users(dict(users=user_emails), self.zulip_realm) @@ -340,7 +340,7 @@ class TestRealmReactivationEmail(ZulipTestCase): def test_if_realm_not_deactivated(self) -> None: realm = get_realm('zulip') - with self.assertRaisesRegex(CommandError, "The realm %s is already active." % (realm.name,)): + with self.assertRaisesRegex(CommandError, f"The realm {realm.name} is already active."): call_command(self.COMMAND_NAME, "--realm=zulip") class TestSendToEmailMirror(ZulipTestCase): diff --git a/zerver/tests/test_messages.py b/zerver/tests/test_messages.py index 219ee98b7c..465e7b1949 100644 --- a/zerver/tests/test_messages.py +++ b/zerver/tests/test_messages.py @@ -279,7 +279,7 @@ class TopicHistoryTest(ZulipTestCase): 'mit_stream', realm=get_realm('zephyr') ) - endpoint = '/json/users/me/%s/topics' % (bad_stream.id,) + endpoint = f'/json/users/me/{bad_stream.id}/topics' result = self.client_get(endpoint, dict()) self.assert_json_error(result, 'Invalid stream id') @@ -288,7 +288,7 @@ class TopicHistoryTest(ZulipTestCase): 'private_stream', invite_only=True ) - endpoint = '/json/users/me/%s/topics' % (private_stream.id,) + endpoint = f'/json/users/me/{private_stream.id}/topics' result = self.client_get(endpoint, dict()) self.assert_json_error(result, 'Invalid stream id') @@ -3458,11 +3458,11 @@ class EditMessageTest(ZulipTestCase): messages = get_topic_messages(user_profile, old_stream, "test") self.assertEqual(len(messages), 1) - self.assertEqual(messages[0].content, "This topic was moved by @_**Iago|%s** to #**new stream>test**" % (user_profile.id,)) + self.assertEqual(messages[0].content, f"This topic was moved by @_**Iago|{user_profile.id}** to #**new stream>test**") messages = get_topic_messages(user_profile, new_stream, "test") self.assertEqual(len(messages), 4) - self.assertEqual(messages[3].content, "This topic was moved here from #**test move stream>test** by @_**Iago|%s**" % (user_profile.id,)) + self.assertEqual(messages[3].content, f"This topic was moved here from #**test move stream>test** by @_**Iago|{user_profile.id}**") def test_move_message_to_stream_change_later(self) -> None: (user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics( @@ -3478,7 +3478,7 @@ class EditMessageTest(ZulipTestCase): messages = get_topic_messages(user_profile, old_stream, "test") self.assertEqual(len(messages), 2) self.assertEqual(messages[0].id, msg_id) - self.assertEqual(messages[1].content, "This topic was moved by @_**Iago|%s** to #**new stream>test**" % (user_profile.id,)) + self.assertEqual(messages[1].content, f"This topic was moved by @_**Iago|{user_profile.id}** to #**new stream>test**") messages = get_topic_messages(user_profile, new_stream, "test") self.assertEqual(len(messages), 3) @@ -3535,11 +3535,11 @@ class EditMessageTest(ZulipTestCase): messages = get_topic_messages(user_profile, old_stream, "test") self.assertEqual(len(messages), 1) - self.assertEqual(messages[0].content, "This topic was moved by @_**Iago|%s** to #**new stream>new topic**" % (user_profile.id,)) + self.assertEqual(messages[0].content, f"This topic was moved by @_**Iago|{user_profile.id}** to #**new stream>new topic**") messages = get_topic_messages(user_profile, new_stream, "new topic") self.assertEqual(len(messages), 4) - self.assertEqual(messages[3].content, "This topic was moved here from #**test move stream>test** by @_**Iago|%s**" % (user_profile.id,)) + self.assertEqual(messages[3].content, f"This topic was moved here from #**test move stream>test** by @_**Iago|{user_profile.id}**") self.assert_json_success(result) def test_no_notify_move_message_to_stream(self) -> None: @@ -3599,7 +3599,7 @@ class EditMessageTest(ZulipTestCase): messages = get_topic_messages(user_profile, old_stream, "test") self.assertEqual(len(messages), 1) - self.assertEqual(messages[0].content, "This topic was moved by @_**Iago|%s** to #**new stream>test**" % (user_profile.id,)) + self.assertEqual(messages[0].content, f"This topic was moved by @_**Iago|{user_profile.id}** to #**new stream>test**") messages = get_topic_messages(user_profile, new_stream, "test") self.assertEqual(len(messages), 3) @@ -4144,9 +4144,9 @@ class MessageHasKeywordsTest(ZulipTestCase): sample_size = 10 realm_id = user_profile.realm_id dummy_files = [ - ('zulip.txt', '%s/31/4CBjtTLYZhk66pZrF8hnYGwc/zulip.txt' % (realm_id,), sample_size), - ('temp_file.py', '%s/31/4CBjtTLYZhk66pZrF8hnYGwc/temp_file.py' % (realm_id,), sample_size), - ('abc.py', '%s/31/4CBjtTLYZhk66pZrF8hnYGwc/abc.py' % (realm_id,), sample_size) + ('zulip.txt', f'{realm_id}/31/4CBjtTLYZhk66pZrF8hnYGwc/zulip.txt', sample_size), + ('temp_file.py', f'{realm_id}/31/4CBjtTLYZhk66pZrF8hnYGwc/temp_file.py', sample_size), + ('abc.py', f'{realm_id}/31/4CBjtTLYZhk66pZrF8hnYGwc/abc.py', sample_size) ] for file_name, path_id, size in dummy_files: diff --git a/zerver/tests/test_middleware.py b/zerver/tests/test_middleware.py index f62c281964..dbdf138ead 100644 --- a/zerver/tests/test_middleware.py +++ b/zerver/tests/test_middleware.py @@ -168,13 +168,13 @@ class OpenGraphTest(ZulipTestCase): decoded = response.content.decode('utf-8') bs = BeautifulSoup(decoded, features='lxml') open_graph_image = bs.select_one('meta[property="og:image"]').get('content') - self.assertEqual(open_graph_image, '%s%s' % (realm.uri, realm_icon)) + self.assertEqual(open_graph_image, f'{realm.uri}{realm_icon}') def test_login_page_realm_icon_absolute_url(self) -> None: realm = get_realm('zulip') realm.icon_source = 'U' realm.save(update_fields=['icon_source']) - icon_url = "https://foo.s3.amazonaws.com/%s/realm/icon.png?version=%s" % (realm.id, 1) + icon_url = f"https://foo.s3.amazonaws.com/{realm.id}/realm/icon.png?version={1}" with patch('zerver.lib.realm_icon.upload_backend.get_realm_icon_url', return_value=icon_url): response = self.client_get('/login/') self.assertEqual(response.status_code, 200) diff --git a/zerver/tests/test_narrow.py b/zerver/tests/test_narrow.py index 9d59452b14..c258bc2396 100644 --- a/zerver/tests/test_narrow.py +++ b/zerver/tests/test_narrow.py @@ -2259,7 +2259,7 @@ class GetOldMessagesTest(ZulipTestCase): post_params = dict(required_args[:i] + required_args[i + 1:]) result = self.client_get("/json/messages", post_params) self.assert_json_error(result, - "Missing '%s' argument" % (required_args[i][0],)) + f"Missing '{required_args[i][0]}' argument") def test_get_messages_limits(self) -> None: """ @@ -2295,7 +2295,7 @@ class GetOldMessagesTest(ZulipTestCase): ) result = self.client_get("/json/messages", post_params) self.assert_json_error(result, - "Bad value for '%s': %s" % (param, type)) + f"Bad value for '{param}': {type}") def test_bad_narrow_type(self) -> None: """ @@ -2313,7 +2313,7 @@ class GetOldMessagesTest(ZulipTestCase): post_params = dict(other_params + [("narrow", type)]) result = self.client_get("/json/messages", post_params) self.assert_json_error(result, - "Bad value for 'narrow': %s" % (type,)) + f"Bad value for 'narrow': {type}") def test_bad_narrow_operator(self) -> None: """ @@ -2535,7 +2535,7 @@ class GetOldMessagesTest(ZulipTestCase): queries = [q for q in all_queries if '/* get_messages */' in q['sql']] self.assertEqual(len(queries), 1) sql = queries[0]['sql'] - self.assertNotIn('AND message_id = %s' % (LARGER_THAN_MAX_MESSAGE_ID,), sql) + self.assertNotIn(f'AND message_id = {LARGER_THAN_MAX_MESSAGE_ID}', sql) self.assertIn('ORDER BY message_id ASC', sql) cond = 'WHERE user_profile_id = %d AND message_id >= %d' % ( @@ -2583,7 +2583,7 @@ class GetOldMessagesTest(ZulipTestCase): queries = [q for q in all_queries if '/* get_messages */' in q['sql']] self.assertEqual(len(queries), 1) sql = queries[0]['sql'] - self.assertNotIn('AND message_id = %s' % (LARGER_THAN_MAX_MESSAGE_ID,), sql) + self.assertNotIn(f'AND message_id = {LARGER_THAN_MAX_MESSAGE_ID}', sql) self.assertIn('ORDER BY message_id ASC', sql) cond = 'WHERE user_profile_id = %d AND message_id <= %d' % ( user_profile.id, first_unread_message_id - 1 @@ -2795,19 +2795,19 @@ recipient_id = %(recipient_id_3)s AND upper(subject) = upper(%(param_2)s))\ sql_template = 'SELECT anon_1.message_id, anon_1.flags \nFROM (SELECT message_id, flags \nFROM zerver_usermessage JOIN zerver_message ON zerver_usermessage.message_id = zerver_message.id \nWHERE user_profile_id = {hamlet_id} AND (sender_id = {othello_id} AND recipient_id = {hamlet_recipient} OR sender_id = {hamlet_id} AND recipient_id = {othello_recipient}) AND message_id = 0) AS anon_1 ORDER BY message_id ASC' sql = sql_template.format(**query_ids) self.common_check_get_messages_query({'anchor': 0, 'num_before': 0, 'num_after': 0, - 'narrow': '[["pm-with", "%s"]]' % (othello_email,)}, + 'narrow': f'[["pm-with", "{othello_email}"]]'}, sql) sql_template = 'SELECT anon_1.message_id, anon_1.flags \nFROM (SELECT message_id, flags \nFROM zerver_usermessage JOIN zerver_message ON zerver_usermessage.message_id = zerver_message.id \nWHERE user_profile_id = {hamlet_id} AND (sender_id = {othello_id} AND recipient_id = {hamlet_recipient} OR sender_id = {hamlet_id} AND recipient_id = {othello_recipient}) AND message_id = 0) AS anon_1 ORDER BY message_id ASC' sql = sql_template.format(**query_ids) self.common_check_get_messages_query({'anchor': 0, 'num_before': 1, 'num_after': 0, - 'narrow': '[["pm-with", "%s"]]' % (othello_email,)}, + 'narrow': f'[["pm-with", "{othello_email}"]]'}, sql) sql_template = 'SELECT anon_1.message_id, anon_1.flags \nFROM (SELECT message_id, flags \nFROM zerver_usermessage JOIN zerver_message ON zerver_usermessage.message_id = zerver_message.id \nWHERE user_profile_id = {hamlet_id} AND (sender_id = {othello_id} AND recipient_id = {hamlet_recipient} OR sender_id = {hamlet_id} AND recipient_id = {othello_recipient}) ORDER BY message_id ASC \n LIMIT 10) AS anon_1 ORDER BY message_id ASC' sql = sql_template.format(**query_ids) self.common_check_get_messages_query({'anchor': 0, 'num_before': 0, 'num_after': 9, - 'narrow': '[["pm-with", "%s"]]' % (othello_email,)}, + 'narrow': f'[["pm-with", "{othello_email}"]]'}, sql) sql_template = 'SELECT anon_1.message_id, anon_1.flags \nFROM (SELECT message_id, flags \nFROM zerver_usermessage JOIN zerver_message ON zerver_usermessage.message_id = zerver_message.id \nWHERE user_profile_id = {hamlet_id} AND (flags & 2) != 0 ORDER BY message_id ASC \n LIMIT 10) AS anon_1 ORDER BY message_id ASC' @@ -2819,7 +2819,7 @@ recipient_id = %(recipient_id_3)s AND upper(subject) = upper(%(param_2)s))\ sql_template = 'SELECT anon_1.message_id, anon_1.flags \nFROM (SELECT message_id, flags \nFROM zerver_usermessage JOIN zerver_message ON zerver_usermessage.message_id = zerver_message.id \nWHERE user_profile_id = {hamlet_id} AND sender_id = {othello_id} ORDER BY message_id ASC \n LIMIT 10) AS anon_1 ORDER BY message_id ASC' sql = sql_template.format(**query_ids) self.common_check_get_messages_query({'anchor': 0, 'num_before': 0, 'num_after': 9, - 'narrow': '[["sender", "%s"]]' % (othello_email,)}, + 'narrow': f'[["sender", "{othello_email}"]]'}, sql) sql_template = 'SELECT anon_1.message_id \nFROM (SELECT id AS message_id \nFROM zerver_message \nWHERE recipient_id = {scotland_recipient} ORDER BY zerver_message.id ASC \n LIMIT 10) AS anon_1 ORDER BY message_id ASC' @@ -2856,7 +2856,7 @@ recipient_id = %(recipient_id_3)s AND upper(subject) = upper(%(param_2)s))\ sql_template = 'SELECT anon_1.message_id, anon_1.flags \nFROM (SELECT message_id, flags \nFROM zerver_usermessage JOIN zerver_message ON zerver_usermessage.message_id = zerver_message.id \nWHERE user_profile_id = {hamlet_id} AND sender_id = {hamlet_id} AND recipient_id = {hamlet_recipient} ORDER BY message_id ASC \n LIMIT 10) AS anon_1 ORDER BY message_id ASC' sql = sql_template.format(**query_ids) self.common_check_get_messages_query({'anchor': 0, 'num_before': 0, 'num_after': 9, - 'narrow': '[["pm-with", "%s"]]' % (hamlet_email,)}, + 'narrow': f'[["pm-with", "{hamlet_email}"]]'}, sql) sql_template = 'SELECT anon_1.message_id, anon_1.flags \nFROM (SELECT message_id, flags \nFROM zerver_usermessage JOIN zerver_message ON zerver_usermessage.message_id = zerver_message.id \nWHERE user_profile_id = {hamlet_id} AND recipient_id = {scotland_recipient} AND (flags & 2) != 0 ORDER BY message_id ASC \n LIMIT 10) AS anon_1 ORDER BY message_id ASC' diff --git a/zerver/tests/test_push_notifications.py b/zerver/tests/test_push_notifications.py index 457c3aec29..537169c682 100644 --- a/zerver/tests/test_push_notifications.py +++ b/zerver/tests/test_push_notifications.py @@ -209,7 +209,7 @@ class PushBouncerNotificationTest(BouncerTestCase): del self.API_KEYS[self.server_uuid] - credentials = "%s:%s" % ("5678-efgh", 'invalid') + credentials = "{}:{}".format("5678-efgh", 'invalid') api_auth = 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8') result = self.client_post(endpoint, {'user_id': user_id, 'token_kind': token_kind, @@ -1900,20 +1900,17 @@ class TestPushNotificationsContent(ZulipTestCase): fixtures = [ { 'name': 'realm_emoji', - 'rendered_content': '

Testing :green_tick: realm emoji.

' % ( - realm.id,), + 'rendered_content': f'

Testing :green_tick: realm emoji.

', 'expected_output': 'Testing :green_tick: realm emoji.', }, { 'name': 'mentions', - 'rendered_content': '

Mentioning @Cordelia Lear.

' % ( - cordelia.id,), + 'rendered_content': f'

Mentioning @Cordelia Lear.

', 'expected_output': 'Mentioning @Cordelia Lear.', }, { 'name': 'stream_names', - 'rendered_content': '

Testing stream names #Verona.

' % ( - stream.id,), + 'rendered_content': f'

Testing stream names #Verona.

', 'expected_output': 'Testing stream names #Verona.', }, ] diff --git a/zerver/tests/test_queue_worker.py b/zerver/tests/test_queue_worker.py index 592a876a02..5fea7fdf85 100644 --- a/zerver/tests/test_queue_worker.py +++ b/zerver/tests/test_queue_worker.py @@ -384,7 +384,7 @@ class WorkerTest(ZulipTestCase): self.assertEqual(mock_mirror_email.call_count, 4) mock_warn.assert_called_with( "Deadlock trying to incr_ratelimit for %s", - "RateLimitedRealmMirror:%s" % (realm.string_id,), + f"RateLimitedRealmMirror:{realm.string_id}", ) def test_email_sending_worker_retries(self) -> None: diff --git a/zerver/tests/test_reactions.py b/zerver/tests/test_reactions.py index 6450b51f4d..528fe087fa 100644 --- a/zerver/tests/test_reactions.py +++ b/zerver/tests/test_reactions.py @@ -154,7 +154,7 @@ class ReactionEmojiTest(ZulipTestCase): 'emoji_name': 'smile' } - result = self.api_post(sender, '/api/v1/messages/%s/reactions' % (message_id,), + result = self.api_post(sender, f'/api/v1/messages/{message_id}/reactions', reaction_info) self.assert_json_success(result) @@ -279,7 +279,7 @@ class ReactionMessageIDTest(ZulipTestCase): 'emoji_name': 'smile' } - result = self.api_post(reaction_sender, '/api/v1/messages/%s/reactions' % (pm_id,), + result = self.api_post(reaction_sender, f'/api/v1/messages/{pm_id}/reactions', reaction_info) self.assert_json_error(result, "Invalid message(s)") @@ -305,11 +305,11 @@ class ReactionTest(ZulipTestCase): 'emoji_name': 'smile' } - first = self.api_post(reaction_sender, '/api/v1/messages/%s/reactions' % (pm_id,), + first = self.api_post(reaction_sender, f'/api/v1/messages/{pm_id}/reactions', reaction_info) self.assert_json_success(first) - second = self.api_post(reaction_sender, '/api/v1/messages/%s/reactions' % (pm_id,), + second = self.api_post(reaction_sender, f'/api/v1/messages/{pm_id}/reactions', reaction_info) self.assert_json_error(second, "Reaction already exists.") @@ -333,15 +333,15 @@ class ReactionTest(ZulipTestCase): 'emoji_name': 'smile' } - add = self.api_post(reaction_sender, '/api/v1/messages/%s/reactions' % (pm_id,), + add = self.api_post(reaction_sender, f'/api/v1/messages/{pm_id}/reactions', reaction_info) self.assert_json_success(add) - first = self.api_delete(reaction_sender, '/api/v1/messages/%s/reactions' % (pm_id,), + first = self.api_delete(reaction_sender, f'/api/v1/messages/{pm_id}/reactions', reaction_info) self.assert_json_success(first) - second = self.api_delete(reaction_sender, '/api/v1/messages/%s/reactions' % (pm_id,), + second = self.api_delete(reaction_sender, f'/api/v1/messages/{pm_id}/reactions', reaction_info) self.assert_json_error(second, "Reaction doesn't exist.") @@ -414,7 +414,7 @@ class ReactionEventTest(ZulipTestCase): events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): - result = self.api_post(reaction_sender, '/api/v1/messages/%s/reactions' % (pm_id,), + result = self.api_post(reaction_sender, f'/api/v1/messages/{pm_id}/reactions', reaction_info) self.assert_json_success(result) self.assertEqual(len(events), 1) @@ -452,13 +452,13 @@ class ReactionEventTest(ZulipTestCase): 'emoji_name': 'smile' } - add = self.api_post(reaction_sender, '/api/v1/messages/%s/reactions' % (pm_id,), + add = self.api_post(reaction_sender, f'/api/v1/messages/{pm_id}/reactions', reaction_info) self.assert_json_success(add) events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): - result = self.api_delete(reaction_sender, '/api/v1/messages/%s/reactions' % (pm_id,), + result = self.api_delete(reaction_sender, f'/api/v1/messages/{pm_id}/reactions', reaction_info) self.assert_json_success(result) self.assertEqual(len(events), 1) @@ -486,7 +486,7 @@ class EmojiReactionBase(ZulipTestCase): if 'reaction_type' not in reaction_info: reaction_info['reaction_type'] = self.reaction_type sender = self.example_user(sender) - result = self.api_post(sender, '/api/v1/messages/%s/reactions' % (message_id,), + result = self.api_post(sender, f'/api/v1/messages/{message_id}/reactions', reaction_info) return result @@ -505,7 +505,7 @@ class EmojiReactionBase(ZulipTestCase): if 'reaction_type' not in reaction_info: reaction_info['reaction_type'] = self.reaction_type sender = self.example_user(sender) - result = self.api_delete(sender, '/api/v1/messages/%s/reactions' % (message_id,), + result = self.api_delete(sender, f'/api/v1/messages/{message_id}/reactions', reaction_info) return result @@ -852,7 +852,7 @@ class RealmEmojiReactionTests(EmojiReactionBase): } sender = self.example_user("hamlet") message_id = 1 - result = self.api_post(sender, '/api/v1/messages/%s/reactions' % (message_id,), + result = self.api_post(sender, f'/api/v1/messages/{message_id}/reactions', reaction_info) self.assert_json_error(result, "Invalid emoji type.") diff --git a/zerver/tests/test_realm.py b/zerver/tests/test_realm.py index 6c6514c5ef..3334b27f61 100644 --- a/zerver/tests/test_realm.py +++ b/zerver/tests/test_realm.py @@ -347,7 +347,7 @@ class RealmTest(ZulipTestCase): invalid_lang = "invalid_lang" req = dict(default_language=ujson.dumps(invalid_lang)) result = self.client_patch('/json/realm', req) - self.assert_json_error(result, "Invalid language '%s'" % (invalid_lang,)) + self.assert_json_error(result, f"Invalid language '{invalid_lang}'") realm = get_realm('zulip') self.assertNotEqual(realm.default_language, invalid_lang) @@ -405,20 +405,20 @@ class RealmTest(ZulipTestCase): self.assertEqual(realm.email_address_visibility, Realm.EMAIL_ADDRESS_VISIBILITY_ADMINS) edited_user_profile = get_user_profile_by_id(user_profile.id) - self.assertEqual(edited_user_profile.email, "user%s@zulip.testserver" % (edited_user_profile.id,)) + self.assertEqual(edited_user_profile.email, f"user{edited_user_profile.id}@zulip.testserver") # Check normal user cannot access email - result = self.api_get(cordelia, "/api/v1/users/%s" % (hamlet.id,)) + result = self.api_get(cordelia, f"/api/v1/users/{hamlet.id}") self.assert_json_success(result) self.assertEqual(result.json()['user']['email'], - 'user%s@zulip.testserver' % (hamlet.id,)) + f'user{hamlet.id}@zulip.testserver') self.assertEqual(result.json()['user'].get('delivery_email'), None) # Check administrator gets delivery_email with EMAIL_ADDRESS_VISIBILITY_ADMINS - result = self.api_get(user_profile, "/api/v1/users/%s" % (hamlet.id,)) + result = self.api_get(user_profile, f"/api/v1/users/{hamlet.id}") self.assert_json_success(result) self.assertEqual(result.json()['user']['email'], - 'user%s@zulip.testserver' % (hamlet.id,)) + f'user{hamlet.id}@zulip.testserver') self.assertEqual(result.json()['user'].get('delivery_email'), hamlet.delivery_email) @@ -429,14 +429,14 @@ class RealmTest(ZulipTestCase): realm = get_realm("zulip") self.assertEqual(realm.email_address_visibility, Realm.EMAIL_ADDRESS_VISIBILITY_NOBODY) edited_user_profile = get_user_profile_by_id(user_profile.id) - self.assertEqual(edited_user_profile.email, "user%s@zulip.testserver" % (edited_user_profile.id,)) + self.assertEqual(edited_user_profile.email, f"user{edited_user_profile.id}@zulip.testserver") # Check even administrator doesn't get delivery_email with # EMAIL_ADDRESS_VISIBILITY_NOBODY - result = self.api_get(user_profile, "/api/v1/users/%s" % (hamlet.id,)) + result = self.api_get(user_profile, f"/api/v1/users/{hamlet.id}") self.assert_json_success(result) self.assertEqual(result.json()['user']['email'], - 'user%s@zulip.testserver' % (hamlet.id,)) + f'user{hamlet.id}@zulip.testserver') self.assertEqual(result.json()['user'].get('delivery_email'), None) def test_change_stream_creation_policy(self) -> None: @@ -511,7 +511,7 @@ class RealmTest(ZulipTestCase): for name in integer_values: invalid_value = invalid_values.get(name) if invalid_value is None: - raise AssertionError('No test created for %s' % (name,)) + raise AssertionError(f'No test created for {name}') self.do_test_invalid_integer_attribute_value(name, invalid_value) @@ -724,7 +724,7 @@ class RealmAPITest(ZulipTestCase): if Realm.property_types[name] is bool: vals = bool_tests if vals is None: - raise AssertionError('No test created for %s' % (name,)) + raise AssertionError(f'No test created for {name}') if name == 'video_chat_provider': self.set_up_db(name, vals[0][name]) diff --git a/zerver/tests/test_realm_emoji.py b/zerver/tests/test_realm_emoji.py index d7d590d019..43430d25e2 100644 --- a/zerver/tests/test_realm_emoji.py +++ b/zerver/tests/test_realm_emoji.py @@ -86,7 +86,7 @@ class RealmEmojiTest(ZulipTestCase): file_name = str(realm_emoji.id) + '.png' self.assertEqual( str(realm_emoji), - '' % (realm_emoji.id, file_name) + f'' ) def test_upload_exception(self) -> None: diff --git a/zerver/tests/test_retention.py b/zerver/tests/test_retention.py index d3f6457a8c..28a1199230 100644 --- a/zerver/tests/test_retention.py +++ b/zerver/tests/test_retention.py @@ -135,9 +135,9 @@ class ArchiveMessagesTestingBase(RetentionTestingBase): host = user_profile.realm.host realm_id = get_realm("zulip").id dummy_files = [ - ('zulip.txt', '%s/31/4CBjtTLYZhk66pZrF8hnYGwc/zulip.txt' % (realm_id,), sample_size), - ('temp_file.py', '%s/31/4CBjtTLYZhk66pZrF8hnYGwc/temp_file.py' % (realm_id,), sample_size), - ('abc.py', '%s/31/4CBjtTLYZhk66pZrF8hnYGwc/abc.py' % (realm_id,), sample_size) + ('zulip.txt', f'{realm_id}/31/4CBjtTLYZhk66pZrF8hnYGwc/zulip.txt', sample_size), + ('temp_file.py', f'{realm_id}/31/4CBjtTLYZhk66pZrF8hnYGwc/temp_file.py', sample_size), + ('abc.py', f'{realm_id}/31/4CBjtTLYZhk66pZrF8hnYGwc/abc.py', sample_size) ] for file_name, path_id, size in dummy_files: @@ -483,11 +483,11 @@ class MoveMessageToArchiveBase(RetentionTestingBase): sample_size = 10 realm_id = get_realm("zulip").id dummy_files = [ - ('zulip.txt', '%s/31/4CBjtTLYZhk66pZrF8hnYGwc/zulip.txt' % (realm_id,), sample_size), - ('temp_file.py', '%s/31/4CBjtTLYZhk66pZrF8hnYGwc/temp_file.py' % (realm_id,), sample_size), - ('abc.py', '%s/31/4CBjtTLYZhk66pZrF8hnYGwc/abc.py' % (realm_id,), sample_size), - ('hello.txt', '%s/31/4CBjtTLYZhk66pZrF8hnYGwc/hello.txt' % (realm_id,), sample_size), - ('new.py', '%s/31/4CBjtTLYZhk66pZrF8hnYGwc/new.py' % (realm_id,), sample_size) + ('zulip.txt', f'{realm_id}/31/4CBjtTLYZhk66pZrF8hnYGwc/zulip.txt', sample_size), + ('temp_file.py', f'{realm_id}/31/4CBjtTLYZhk66pZrF8hnYGwc/temp_file.py', sample_size), + ('abc.py', f'{realm_id}/31/4CBjtTLYZhk66pZrF8hnYGwc/abc.py', sample_size), + ('hello.txt', f'{realm_id}/31/4CBjtTLYZhk66pZrF8hnYGwc/hello.txt', sample_size), + ('new.py', f'{realm_id}/31/4CBjtTLYZhk66pZrF8hnYGwc/new.py', sample_size) ] user_profile = self.example_user('hamlet') for file_name, path_id, size in dummy_files: diff --git a/zerver/tests/test_settings.py b/zerver/tests/test_settings.py index bba51f2c75..e42516bd69 100644 --- a/zerver/tests/test_settings.py +++ b/zerver/tests/test_settings.py @@ -330,7 +330,7 @@ class ChangeSettingsTest(ZulipTestCase): test_value = test_changes.get(setting_name) # Error if a setting in UserProfile.property_types does not have test values if test_value is None: - raise AssertionError('No test created for %s' % (setting_name,)) + raise AssertionError(f'No test created for {setting_name}') if isinstance(test_value, int): invalid_value: Any = 100 @@ -350,7 +350,7 @@ class ChangeSettingsTest(ZulipTestCase): result = self.client_patch("/json/settings/display", data) # the json error for multiple word setting names (ex: default_language) # displays as 'Invalid language'. Using setting_name.split('_') to format. - self.assert_json_error(result, "Invalid %s" % (setting_name,)) + self.assert_json_error(result, f"Invalid {setting_name}") user_profile = self.example_user('hamlet') self.assertNotEqual(getattr(user_profile, setting_name), invalid_value) diff --git a/zerver/tests/test_signup.py b/zerver/tests/test_signup.py index 1fa8d5186a..ea90c5d7c8 100644 --- a/zerver/tests/test_signup.py +++ b/zerver/tests/test_signup.py @@ -1111,7 +1111,7 @@ class InviteUserTest(InviteUserBase): earl-test@zulip.com""", ["Denmark"])) for user in ("bob", "carol", "dave", "earl"): - self.assertTrue(find_key_by_email("%s-test@zulip.com" % (user,))) + self.assertTrue(find_key_by_email(f"{user}-test@zulip.com")) self.check_sent_emails(["bob-test@zulip.com", "carol-test@zulip.com", "dave-test@zulip.com", "earl-test@zulip.com"]) @@ -1926,7 +1926,7 @@ class MultiuseInviteTest(ZulipTestCase): result = self.client_post(invite_link, {'email': email}) self.assertEqual(result.status_code, 302) self.assertTrue(result["Location"].endswith( - "/accounts/send_confirm/%s" % (email,))) + f"/accounts/send_confirm/{email}")) result = self.client_get(result["Location"]) self.assert_in_response("Check your email so we can get started.", result) @@ -2158,7 +2158,7 @@ class RealmCreationTest(ZulipTestCase): result = self.client_post('/new/', {'email': email}) self.assertEqual(result.status_code, 302) self.assertTrue(result["Location"].endswith( - "/accounts/new/send_confirm/%s" % (email,))) + f"/accounts/new/send_confirm/{email}")) result = self.client_get(result["Location"]) self.assert_in_response("Check your email so we can get started.", result) @@ -2255,7 +2255,7 @@ class RealmCreationTest(ZulipTestCase): result = self.client_post('/new/', {'email': email}) self.assertEqual(result.status_code, 302) self.assertTrue(result["Location"].endswith( - "/accounts/new/send_confirm/%s" % (email,))) + f"/accounts/new/send_confirm/{email}")) result = self.client_get(result["Location"]) self.assert_in_response("Check your email so we can get started.", result) @@ -2296,7 +2296,7 @@ class RealmCreationTest(ZulipTestCase): result = self.client_post('/new/', {'email': email}) self.assertEqual(result.status_code, 302) self.assertTrue(result["Location"].endswith( - "/accounts/new/send_confirm/%s" % (email,))) + f"/accounts/new/send_confirm/{email}")) result = self.client_get(result["Location"]) self.assert_in_response("Check your email so we can get started.", result) @@ -2508,7 +2508,7 @@ class UserSignUpTest(InviteUserBase): result = self.client_post('/accounts/home/', {'email': email}) self.assertEqual(result.status_code, 302) self.assertTrue(result["Location"].endswith( - "/accounts/send_confirm/%s" % (email,))) + f"/accounts/send_confirm/{email}")) result = self.client_get(result["Location"]) self.assert_in_response("Check your email so we can get started.", result) @@ -2540,7 +2540,7 @@ class UserSignUpTest(InviteUserBase): result = self.client_post('/accounts/home/', {'email': email}) self.assertEqual(result.status_code, 302) self.assertTrue(result["Location"].endswith( - "/accounts/send_confirm/%s" % (email,))) + f"/accounts/send_confirm/{email}")) result = self.client_get(result["Location"]) self.assert_in_response("Check your email so we can get started.", result) @@ -2608,7 +2608,7 @@ class UserSignUpTest(InviteUserBase): result = self.client_post('/accounts/home/', {'email': email}) self.assertEqual(result.status_code, 302) self.assertTrue(result["Location"].endswith( - "/accounts/send_confirm/%s" % (email,))) + f"/accounts/send_confirm/{email}")) result = self.client_get(result["Location"]) self.assert_in_response("Check your email so we can get started.", result) @@ -2635,7 +2635,7 @@ class UserSignUpTest(InviteUserBase): result = self.client_post('/accounts/home/', {'email': email}) self.assertEqual(result.status_code, 302) self.assertTrue(result["Location"].endswith( - "/accounts/send_confirm/%s" % (email,))) + f"/accounts/send_confirm/{email}")) result = self.client_get(result["Location"]) self.assert_in_response("Check your email so we can get started.", result) @@ -2667,7 +2667,7 @@ class UserSignUpTest(InviteUserBase): result = self.client_post('/accounts/home/', {'email': email}) self.assertEqual(result.status_code, 302) self.assertTrue(result["Location"].endswith( - "/accounts/send_confirm/%s" % (email,))) + f"/accounts/send_confirm/{email}")) result = self.client_get(result["Location"]) self.assert_in_response("Check your email so we can get started.", result) @@ -2698,7 +2698,7 @@ class UserSignUpTest(InviteUserBase): result = self.client_post('/accounts/home/', {'email': email}) self.assertEqual(result.status_code, 302) self.assertTrue(result["Location"].endswith( - "/accounts/send_confirm/%s" % (email,))) + f"/accounts/send_confirm/{email}")) result = self.client_get(result["Location"]) self.assert_in_response("Check your email so we can get started.", result) @@ -2726,7 +2726,7 @@ class UserSignUpTest(InviteUserBase): result = self.client_post('/accounts/home/', {'email': email}) self.assertEqual(result.status_code, 302) self.assertTrue(result["Location"].endswith( - "/accounts/send_confirm/%s" % (email,))) + f"/accounts/send_confirm/{email}")) result = self.client_get(result["Location"]) self.assert_in_response("Check your email so we can get started.", result) @@ -2969,7 +2969,7 @@ class UserSignUpTest(InviteUserBase): result = self.client_post('/accounts/home/', {'email': email}) self.assertEqual(result.status_code, 302) self.assertTrue(result["Location"].endswith( - "/accounts/send_confirm/%s" % (email,))) + f"/accounts/send_confirm/{email}")) result = self.client_get(result["Location"]) self.assert_in_response("Check your email so we can get started.", result) @@ -3081,7 +3081,7 @@ class UserSignUpTest(InviteUserBase): self.assertEqual(result.status_code, 302) self.assertTrue(result["Location"].endswith( - "/accounts/send_confirm/%s" % (email,))) + f"/accounts/send_confirm/{email}")) result = self.client_get(result["Location"]) self.assert_in_response("Check your email so we can get started.", result) # Visit the confirmation link. @@ -3149,7 +3149,7 @@ class UserSignUpTest(InviteUserBase): self.assertEqual(result.status_code, 302) self.assertTrue(result["Location"].endswith( - "/accounts/send_confirm/%s" % (email,))) + f"/accounts/send_confirm/{email}")) result = self.client_get(result["Location"]) self.assert_in_response("Check your email so we can get started.", result) # Visit the confirmation link. @@ -3212,7 +3212,7 @@ class UserSignUpTest(InviteUserBase): self.assertEqual(result.status_code, 302) self.assertTrue(result["Location"].endswith( - "/accounts/send_confirm/%s" % (email,))) + f"/accounts/send_confirm/{email}")) result = self.client_get(result["Location"]) self.assert_in_response("Check your email so we can get started.", result) @@ -3273,7 +3273,7 @@ class UserSignUpTest(InviteUserBase): self.assertEqual(result.status_code, 302) self.assertTrue(result["Location"].endswith( - "/accounts/send_confirm/%s" % (email,))) + f"/accounts/send_confirm/{email}")) result = self.client_get(result["Location"]) self.assert_in_response("Check your email so we can get started.", result) @@ -3393,7 +3393,7 @@ class UserSignUpTest(InviteUserBase): self.assertEqual(result.status_code, 302) self.assertTrue(result["Location"].endswith( - "/accounts/send_confirm/%s" % (email,))) + f"/accounts/send_confirm/{email}")) result = self.client_get(result["Location"]) self.assert_in_response("Check your email so we can get started.", result) @@ -3437,7 +3437,7 @@ class UserSignUpTest(InviteUserBase): self.assertEqual(result.status_code, 302) self.assertTrue(result["Location"].endswith( - "/accounts/send_confirm/%s" % (email,))) + f"/accounts/send_confirm/{email}")) result = self.client_get(result["Location"]) self.assert_in_response("Check your email so we can get started.", result) @@ -3536,7 +3536,7 @@ class UserSignUpTest(InviteUserBase): self.assertEqual(result.status_code, 302) self.assertTrue(result["Location"].endswith( - "/accounts/send_confirm/%s" % (email,))) + f"/accounts/send_confirm/{email}")) result = self.client_get(result["Location"]) self.assert_in_response("Check your email so we can get started.", result) @@ -3575,7 +3575,7 @@ class UserSignUpTest(InviteUserBase): self.assertEqual(result.status_code, 302) self.assertTrue(result["Location"].endswith( - "/accounts/send_confirm/%s" % (email,))) + f"/accounts/send_confirm/{email}")) result = self.client_get(result["Location"]) self.assert_in_response("Check your email so we can get started.", result) with self.settings( @@ -3690,7 +3690,7 @@ class UserSignUpTest(InviteUserBase): self.assertEqual(result.status_code, 302) self.assertTrue(result["Location"].endswith( - "/accounts/send_confirm/%s" % (email,))) + f"/accounts/send_confirm/{email}")) result = self.client_get(result["Location"]) self.assert_in_response("Check your email so we can get started.", result) @@ -3718,7 +3718,7 @@ class UserSignUpTest(InviteUserBase): self.assertEqual(result.status_code, 302) self.assertTrue(result["Location"].endswith( - "/accounts/send_confirm/%s" % (email,))) + f"/accounts/send_confirm/{email}")) result = self.client_get(result["Location"]) self.assert_in_response("Check your email so we can get started.", result) # Visit the confirmation link. @@ -3773,7 +3773,7 @@ class UserSignUpTest(InviteUserBase): self.assertEqual(result.status_code, 302) self.assertTrue(result["Location"].endswith( - "/accounts/send_confirm/%s" % (email,))) + f"/accounts/send_confirm/{email}")) result = self.client_get(result["Location"], subdomain="zephyr") self.assert_in_response("Check your email so we can get started.", result) # Visit the confirmation link. diff --git a/zerver/tests/test_slack_message_conversion.py b/zerver/tests/test_slack_message_conversion.py index 9c59808221..c3809c96de 100644 --- a/zerver/tests/test_slack_message_conversion.py +++ b/zerver/tests/test_slack_message_conversion.py @@ -74,7 +74,7 @@ class SlackMessageConversion(ZulipTestCase): self.assertEqual(full_name, 'John Doe') self.assertEqual(get_user_full_name(users[2]), 'Jane') - self.assertEqual(text, 'Hi @**%s**: How are you? #**general**' % (full_name,)) + self.assertEqual(text, f'Hi @**{full_name}**: How are you? #**general**') self.assertEqual(mentioned_users, [540]) # multiple mentioning diff --git a/zerver/tests/test_subs.py b/zerver/tests/test_subs.py index 84908abbba..a672a286a0 100644 --- a/zerver/tests/test_subs.py +++ b/zerver/tests/test_subs.py @@ -922,7 +922,7 @@ class StreamAdminTest(ZulipTestCase): "/json/users/me/subscriptions", {"subscriptions": ujson.dumps([{"name": deactivated_stream_name}])}) self.assert_json_error( - result, "Unable to access stream (%s)." % (deactivated_stream_name,)) + result, f"Unable to access stream ({deactivated_stream_name}).") def test_you_must_be_realm_admin(self) -> None: """ @@ -1804,7 +1804,7 @@ class SubscriptionPropertiesTest(ZulipTestCase): "value": "bad", "stream_id": subs[0]["stream_id"]}])}) self.assert_json_error(result, - '%s is not a boolean' % (property_name,)) + f'{property_name} is not a boolean') property_name = "in_home_view" result = self.api_post(test_user, "/api/v1/users/me/subscriptions/properties", @@ -1812,7 +1812,7 @@ class SubscriptionPropertiesTest(ZulipTestCase): "value": "bad", "stream_id": subs[0]["stream_id"]}])}) self.assert_json_error(result, - '%s is not a boolean' % (property_name,)) + f'{property_name} is not a boolean') property_name = "desktop_notifications" result = self.api_post(test_user, "/api/v1/users/me/subscriptions/properties", @@ -1820,7 +1820,7 @@ class SubscriptionPropertiesTest(ZulipTestCase): "value": "bad", "stream_id": subs[0]["stream_id"]}])}) self.assert_json_error(result, - '%s is not a boolean' % (property_name,)) + f'{property_name} is not a boolean') property_name = "audible_notifications" result = self.api_post(test_user, "/api/v1/users/me/subscriptions/properties", @@ -1828,7 +1828,7 @@ class SubscriptionPropertiesTest(ZulipTestCase): "value": "bad", "stream_id": subs[0]["stream_id"]}])}) self.assert_json_error(result, - '%s is not a boolean' % (property_name,)) + f'{property_name} is not a boolean') property_name = "push_notifications" result = self.api_post(test_user, "/api/v1/users/me/subscriptions/properties", @@ -1836,7 +1836,7 @@ class SubscriptionPropertiesTest(ZulipTestCase): "value": "bad", "stream_id": subs[0]["stream_id"]}])}) self.assert_json_error(result, - '%s is not a boolean' % (property_name,)) + f'{property_name} is not a boolean') property_name = "email_notifications" result = self.api_post(test_user, "/api/v1/users/me/subscriptions/properties", @@ -1844,7 +1844,7 @@ class SubscriptionPropertiesTest(ZulipTestCase): "value": "bad", "stream_id": subs[0]["stream_id"]}])}) self.assert_json_error(result, - '%s is not a boolean' % (property_name,)) + f'{property_name} is not a boolean') property_name = "wildcard_mentions_notify" result = self.api_post(test_user, "/api/v1/users/me/subscriptions/properties", @@ -1853,7 +1853,7 @@ class SubscriptionPropertiesTest(ZulipTestCase): "stream_id": subs[0]["stream_id"]}])}) self.assert_json_error(result, - "%s is not a boolean" % (property_name,)) + f"{property_name} is not a boolean") property_name = "color" result = self.api_post(test_user, "/api/v1/users/me/subscriptions/properties", @@ -1861,7 +1861,7 @@ class SubscriptionPropertiesTest(ZulipTestCase): "value": False, "stream_id": subs[0]["stream_id"]}])}) self.assert_json_error(result, - '%s is not a string' % (property_name,)) + f'{property_name} is not a string') def test_json_subscription_property_invalid_stream(self) -> None: test_user = self.example_user("hamlet") @@ -2024,7 +2024,7 @@ class SubscriptionRestApiTest(ZulipTestCase): } result = self.api_patch(user, "/api/v1/users/me/subscriptions", request) self.assert_json_error(result, - "Invalid stream name '%s'" % (invalid_stream_name,)) + f"Invalid stream name '{invalid_stream_name}'") def test_stream_name_too_long(self) -> None: user = self.example_user('hamlet') @@ -2048,7 +2048,7 @@ class SubscriptionRestApiTest(ZulipTestCase): } result = self.api_patch(user, "/api/v1/users/me/subscriptions", request) self.assert_json_error(result, - "Stream name '%s' contains NULL (0x00) characters." % (stream_name,)) + f"Stream name '{stream_name}' contains NULL (0x00) characters.") def test_compose_views_rollback(self) -> None: ''' @@ -2348,7 +2348,7 @@ class SubscriptionAPITest(ZulipTestCase): stream_name = "abc\000" result = self.common_subscribe_to_streams(self.test_user, [stream_name]) self.assert_json_error(result, - "Stream name '%s' contains NULL (0x00) characters." % (stream_name,)) + f"Stream name '{stream_name}' contains NULL (0x00) characters.") def test_user_settings_for_adding_streams(self) -> None: with mock.patch('zerver.models.UserProfile.can_create_streams', return_value=False): @@ -2457,7 +2457,7 @@ class SubscriptionAPITest(ZulipTestCase): invalid_stream_name = "" result = self.common_subscribe_to_streams(self.test_user, [invalid_stream_name]) self.assert_json_error(result, - "Invalid stream name '%s'" % (invalid_stream_name,)) + f"Invalid stream name '{invalid_stream_name}'") def assert_adding_subscriptions_for_principal(self, invitee_data: Union[str, int], invitee_realm: Realm, streams: List[str], invite_only: bool=False) -> None: @@ -2821,7 +2821,7 @@ class SubscriptionAPITest(ZulipTestCase): mit_user = self.mit_user('starnine') realm = get_realm("zephyr") - stream_names = ["stream_%s" % (i,) for i in range(40)] + stream_names = [f"stream_{i}" for i in range(40)] streams = [ self.make_stream(stream_name, realm=realm) for stream_name in stream_names] @@ -2857,7 +2857,7 @@ class SubscriptionAPITest(ZulipTestCase): def test_bulk_subscribe_many(self) -> None: # Create a whole bunch of streams - streams = ["stream_%s" % (i,) for i in range(20)] + streams = [f"stream_{i}" for i in range(20)] for stream_name in streams: self.make_stream(stream_name) @@ -3015,7 +3015,7 @@ class SubscriptionAPITest(ZulipTestCase): streams_to_remove = random_streams[:1] # pick only one fake stream, to make checking the error message easy result = self.client_delete("/json/users/me/subscriptions", {"subscriptions": ujson.dumps(streams_to_remove)}) - self.assert_json_error(result, "Stream(s) (%s) do not exist" % (random_streams[0],)) + self.assert_json_error(result, f"Stream(s) ({random_streams[0]}) do not exist") def helper_subscriptions_exists(self, stream: str, expect_success: bool, subscribed: bool) -> None: """ @@ -3473,7 +3473,7 @@ class StreamIdTest(ZulipTestCase): user = self.example_user('hamlet') self.login_user(user) stream = gather_subscriptions(user)[0][0] - result = self.client_get("/json/get_stream_id?stream=%s" % (stream['name'],)) + result = self.client_get("/json/get_stream_id?stream={}".format(stream['name'])) self.assert_json_success(result) self.assertEqual(result.json()['stream_id'], stream['stream_id']) @@ -3631,7 +3631,7 @@ class GetSubscribersTest(ZulipTestCase): (We also use this test to verify subscription notifications to folks who get subscribed to streams.) """ - streams = ["stream_%s" % (i,) for i in range(10)] + streams = [f"stream_{i}" for i in range(10)] for stream_name in streams: self.make_stream(stream_name) diff --git a/zerver/tests/test_thumbnail.py b/zerver/tests/test_thumbnail.py index 593bbf144a..9b09d6b4a6 100644 --- a/zerver/tests/test_thumbnail.py +++ b/zerver/tests/test_thumbnail.py @@ -23,7 +23,7 @@ class ThumbnailTest(ZulipTestCase): url_in_result = 'smart/filters:no_upscale()%s/%s/source_type/s3' sharpen_filter = '' if size: - url_in_result = '/%s/%s' % (size, url_in_result) + url_in_result = f'/{size}/{url_in_result}' sharpen_filter = ':sharpen(0.5,0.2,true)' hex_uri = base64.urlsafe_b64encode(uri.encode()).decode('utf-8') return url_in_result % (sharpen_filter, hex_uri) @@ -48,13 +48,13 @@ class ThumbnailTest(ZulipTestCase): quoted_uri = urllib.parse.quote(uri[1:], safe='') # Test full size image. - result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_uri,)) + result = self.client_get(f"/thumbnail?url={quoted_uri}&size=full") self.assertEqual(result.status_code, 302, result) expected_part_url = get_file_path_urlpart(uri) self.assertIn(expected_part_url, result.url) # Test thumbnail size. - result = self.client_get("/thumbnail?url=%s&size=thumbnail" % (quoted_uri,)) + result = self.client_get(f"/thumbnail?url={quoted_uri}&size=thumbnail") self.assertEqual(result.status_code, 302, result) expected_part_url = get_file_path_urlpart(uri, '0x300') self.assertIn(expected_part_url, result.url) @@ -72,7 +72,7 @@ class ThumbnailTest(ZulipTestCase): quoted_emoji_url = urllib.parse.quote(custom_emoji_url[1:], safe='') # Test full size custom emoji image (for emoji link in messages case). - result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_emoji_url,)) + result = self.client_get(f"/thumbnail?url={quoted_emoji_url}&size=full") self.assertEqual(result.status_code, 302, result) self.assertIn(custom_emoji_url, result.url) @@ -88,7 +88,7 @@ class ThumbnailTest(ZulipTestCase): # Test with another user trying to access image using thumbor. self.login('iago') - result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_uri,)) + result = self.client_get(f"/thumbnail?url={quoted_uri}&size=full") self.assertEqual(result.status_code, 403, result) self.assert_in_response("You are not authorized to view this file.", result) @@ -98,13 +98,13 @@ class ThumbnailTest(ZulipTestCase): self.login('hamlet') quoted_url = urllib.parse.quote(image_url, safe='') encoded_url = base64.urlsafe_b64encode(image_url.encode()).decode('utf-8') - result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_url,)) + result = self.client_get(f"/thumbnail?url={quoted_url}&size=full") self.assertEqual(result.status_code, 302, result) expected_part_url = '/smart/filters:no_upscale()/' + encoded_url + '/source_type/external' self.assertIn(expected_part_url, result.url) # Test thumbnail size. - result = self.client_get("/thumbnail?url=%s&size=thumbnail" % (quoted_url,)) + result = self.client_get(f"/thumbnail?url={quoted_url}&size=thumbnail") self.assertEqual(result.status_code, 302, result) expected_part_url = '/0x300/smart/filters:no_upscale():sharpen(0.5,0.2,true)/' + encoded_url + '/source_type/external' self.assertIn(expected_part_url, result.url) @@ -113,23 +113,21 @@ class ThumbnailTest(ZulipTestCase): self.logout() user_profile = self.example_user("hamlet") result = self.api_get(user_profile, - "/thumbnail?url=%s&size=thumbnail" % (quoted_url,)) + f"/thumbnail?url={quoted_url}&size=thumbnail") self.assertEqual(result.status_code, 302, result) expected_part_url = '/0x300/smart/filters:no_upscale():sharpen(0.5,0.2,true)/' + encoded_url + '/source_type/external' self.assertIn(expected_part_url, result.url) # Test api endpoint with legacy API authentication. user_profile = self.example_user("hamlet") - result = self.client_get("/thumbnail?url=%s&size=thumbnail&api_key=%s" % ( - quoted_url, get_api_key(user_profile))) + result = self.client_get(f"/thumbnail?url={quoted_url}&size=thumbnail&api_key={get_api_key(user_profile)}") self.assertEqual(result.status_code, 302, result) expected_part_url = '/0x300/smart/filters:no_upscale():sharpen(0.5,0.2,true)/' + encoded_url + '/source_type/external' self.assertIn(expected_part_url, result.url) # Test a second logged-in user; they should also be able to access it user_profile = self.example_user("iago") - result = self.client_get("/thumbnail?url=%s&size=thumbnail&api_key=%s" % ( - quoted_url, get_api_key(user_profile))) + result = self.client_get(f"/thumbnail?url={quoted_url}&size=thumbnail&api_key={get_api_key(user_profile)}") self.assertEqual(result.status_code, 302, result) expected_part_url = '/0x300/smart/filters:no_upscale():sharpen(0.5,0.2,true)/' + encoded_url + '/source_type/external' self.assertIn(expected_part_url, result.url) @@ -137,7 +135,7 @@ class ThumbnailTest(ZulipTestCase): # Test with another user trying to access image using thumbor. # File should be always accessible to user in case of external source self.login('iago') - result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_url,)) + result = self.client_get(f"/thumbnail?url={quoted_url}&size=full") self.assertEqual(result.status_code, 302, result) expected_part_url = '/smart/filters:no_upscale()/' + encoded_url + '/source_type/external' self.assertIn(expected_part_url, result.url) @@ -156,7 +154,7 @@ class ThumbnailTest(ZulipTestCase): url_in_result = 'smart/filters:no_upscale()%s/%s/source_type/local_file' sharpen_filter = '' if size: - url_in_result = '/%s/%s' % (size, url_in_result) + url_in_result = f'/{size}/{url_in_result}' sharpen_filter = ':sharpen(0.5,0.2,true)' hex_uri = base64.urlsafe_b64encode(uri.encode()).decode('utf-8') return url_in_result % (sharpen_filter, hex_uri) @@ -177,13 +175,13 @@ class ThumbnailTest(ZulipTestCase): # We remove the forward slash infront of the `/user_uploads/` to match # bugdown behaviour. quoted_uri = urllib.parse.quote(uri[1:], safe='') - result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_uri,)) + result = self.client_get(f"/thumbnail?url={quoted_uri}&size=full") self.assertEqual(result.status_code, 302, result) expected_part_url = get_file_path_urlpart(uri) self.assertIn(expected_part_url, result.url) # Test thumbnail size. - result = self.client_get("/thumbnail?url=%s&size=thumbnail" % (quoted_uri,)) + result = self.client_get(f"/thumbnail?url={quoted_uri}&size=thumbnail") self.assertEqual(result.status_code, 302, result) expected_part_url = get_file_path_urlpart(uri, '0x300') self.assertIn(expected_part_url, result.url) @@ -201,7 +199,7 @@ class ThumbnailTest(ZulipTestCase): # We remove the forward slash infront of the `/user_uploads/` to match # bugdown behaviour. quoted_uri = urllib.parse.quote(uri[1:], safe='') - result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_uri,)) + result = self.client_get(f"/thumbnail?url={quoted_uri}&size=full") self.assertEqual(result.status_code, 302, result) expected_part_url = get_file_path_urlpart(uri) self.assertIn(expected_part_url, result.url) @@ -219,7 +217,7 @@ class ThumbnailTest(ZulipTestCase): quoted_emoji_url = urllib.parse.quote(custom_emoji_url[1:], safe='') # Test full size custom emoji image (for emoji link in messages case). - result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_emoji_url,)) + result = self.client_get(f"/thumbnail?url={quoted_emoji_url}&size=full") self.assertEqual(result.status_code, 302, result) self.assertIn(custom_emoji_url, result.url) @@ -246,7 +244,7 @@ class ThumbnailTest(ZulipTestCase): # Test with another user trying to access image using thumbor. self.login('iago') - result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_uri,)) + result = self.client_get(f"/thumbnail?url={quoted_uri}&size=full") self.assertEqual(result.status_code, 403, result) self.assert_in_response("You are not authorized to view this file.", result) @@ -255,7 +253,7 @@ class ThumbnailTest(ZulipTestCase): self.login('hamlet') uri = '/static/images/cute/turtle.png' quoted_uri = urllib.parse.quote(uri[1:], safe='') - result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_uri,)) + result = self.client_get(f"/thumbnail?url={quoted_uri}&size=full") self.assertEqual(result.status_code, 302, result) self.assertEqual(uri, result.url) @@ -275,14 +273,14 @@ class ThumbnailTest(ZulipTestCase): quoted_uri = urllib.parse.quote(uri[1:], safe='') with self.settings(THUMBOR_URL=''): - result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_uri,)) + result = self.client_get(f"/thumbnail?url={quoted_uri}&size=full") self.assertEqual(result.status_code, 302, result) self.assertEqual(uri, result.url) uri = 'https://www.google.com/images/srpr/logo4w.png' quoted_uri = urllib.parse.quote(uri, safe='') with self.settings(THUMBOR_URL=''): - result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_uri,)) + result = self.client_get(f"/thumbnail?url={quoted_uri}&size=full") self.assertEqual(result.status_code, 302, result) base = 'https://external-content.zulipcdn.net/external_content/56c362a24201593891955ff526b3b412c0f9fcd2/68747470733a2f2f7777772e676f6f676c652e636f6d2f696d616765732f737270722f6c6f676f34772e706e67' self.assertEqual(base, result.url) @@ -290,7 +288,7 @@ class ThumbnailTest(ZulipTestCase): uri = 'http://www.google.com/images/srpr/logo4w.png' quoted_uri = urllib.parse.quote(uri, safe='') with self.settings(THUMBOR_URL=''): - result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_uri,)) + result = self.client_get(f"/thumbnail?url={quoted_uri}&size=full") self.assertEqual(result.status_code, 302, result) base = 'https://external-content.zulipcdn.net/external_content/7b6552b60c635e41e8f6daeb36d88afc4eabde79/687474703a2f2f7777772e676f6f676c652e636f6d2f696d616765732f737270722f6c6f676f34772e706e67' self.assertEqual(base, result.url) @@ -298,7 +296,7 @@ class ThumbnailTest(ZulipTestCase): uri = '//www.google.com/images/srpr/logo4w.png' quoted_uri = urllib.parse.quote(uri, safe='') with self.settings(THUMBOR_URL=''): - result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_uri,)) + result = self.client_get(f"/thumbnail?url={quoted_uri}&size=full") self.assertEqual(result.status_code, 302, result) base = 'https://external-content.zulipcdn.net/external_content/676530cf4b101d56f56cc4a37c6ef4d4fd9b0c03/2f2f7777772e676f6f676c652e636f6d2f696d616765732f737270722f6c6f676f34772e706e67' self.assertEqual(base, result.url) @@ -319,7 +317,7 @@ class ThumbnailTest(ZulipTestCase): quoted_uri = urllib.parse.quote(uri[1:], safe='') hex_uri = base64.urlsafe_b64encode(uri.encode()).decode('utf-8') with self.settings(THUMBOR_URL='http://test-thumborhost.com'): - result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_uri,)) + result = self.client_get(f"/thumbnail?url={quoted_uri}&size=full") self.assertEqual(result.status_code, 302, result) base = 'http://test-thumborhost.com/' self.assertEqual(base, result.url[:len(base)]) @@ -331,7 +329,7 @@ class ThumbnailTest(ZulipTestCase): url_in_result = 'smart/filters:no_upscale()%s/%s/source_type/local_file' sharpen_filter = '' if size: - url_in_result = '/%s/%s' % (size, url_in_result) + url_in_result = f'/{size}/{url_in_result}' sharpen_filter = ':sharpen(0.5,0.2,true)' hex_uri = base64.urlsafe_b64encode(uri.encode()).decode('utf-8') return url_in_result % (sharpen_filter, hex_uri) @@ -352,23 +350,23 @@ class ThumbnailTest(ZulipTestCase): # size=thumbnail should return a 0x300 sized image. # size=full should return the original resolution image. quoted_uri = urllib.parse.quote(uri[1:], safe='') - result = self.client_get("/thumbnail?url=%s&size=thumbnail" % (quoted_uri,)) + result = self.client_get(f"/thumbnail?url={quoted_uri}&size=thumbnail") self.assertEqual(result.status_code, 302, result) expected_part_url = get_file_path_urlpart(uri, '0x300') self.assertIn(expected_part_url, result.url) - result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_uri,)) + result = self.client_get(f"/thumbnail?url={quoted_uri}&size=full") self.assertEqual(result.status_code, 302, result) expected_part_url = get_file_path_urlpart(uri) self.assertIn(expected_part_url, result.url) # Test with size supplied as a query parameter where size is anything # else than 'full' or 'thumbnail'. Result should be an error message. - result = self.client_get("/thumbnail?url=%s&size=480x360" % (quoted_uri,)) + result = self.client_get(f"/thumbnail?url={quoted_uri}&size=480x360") self.assertEqual(result.status_code, 403, result) self.assert_in_response("Invalid size.", result) # Test with no size param supplied. In this case as well we show an # error message. - result = self.client_get("/thumbnail?url=%s" % (quoted_uri,)) + result = self.client_get(f"/thumbnail?url={quoted_uri}") self.assertEqual(result.status_code, 400, "Missing 'size' argument") diff --git a/zerver/tests/test_upload.py b/zerver/tests/test_upload.py index 95a31b919b..213dd9f722 100644 --- a/zerver/tests/test_upload.py +++ b/zerver/tests/test_upload.py @@ -295,8 +295,7 @@ class FileUploadTest(UploadSerializeMixin, ZulipTestCase): ''' hamlet = self.example_user("hamlet") self.login_user(hamlet) - response = self.client_get("http://localhost:9991/user_uploads/%s/ff/gg/abc.py" % ( - hamlet.realm_id,)) + response = self.client_get(f"http://localhost:9991/user_uploads/{hamlet.realm_id}/ff/gg/abc.py") self.assertEqual(response.status_code, 404) self.assert_in_response('File not found.', response) @@ -335,8 +334,7 @@ class FileUploadTest(UploadSerializeMixin, ZulipTestCase): def test_attachment_url_without_upload(self) -> None: hamlet = self.example_user("hamlet") self.login_user(hamlet) - body = "Test message ...[zulip.txt](http://localhost:9991/user_uploads/%s/64/fake_path_id.txt)" % ( - hamlet.realm_id,) + body = f"Test message ...[zulip.txt](http://localhost:9991/user_uploads/{hamlet.realm_id}/64/fake_path_id.txt)" self.send_stream_message(self.example_user("hamlet"), "Denmark", body, "test") self.assertFalse(Attachment.objects.filter(path_id = "1/64/fake_path_id.txt").exists()) @@ -686,7 +684,7 @@ class FileUploadTest(UploadSerializeMixin, ZulipTestCase): def test_multiple_message_attachment_file_download(self) -> None: hamlet = self.example_user("hamlet") for i in range(0, 5): - stream_name = "test-subscribe %s" % (i,) + stream_name = f"test-subscribe {i}" self.make_stream(stream_name, realm=hamlet.realm, invite_only=True, history_public_to_subscribers=True) self.subscribe(hamlet, stream_name) @@ -698,7 +696,7 @@ class FileUploadTest(UploadSerializeMixin, ZulipTestCase): fp_path_id = re.sub('/user_uploads/', '', uri) for i in range(20): body = f"First message ...[zulip.txt](http://{hamlet.realm.host}/user_uploads/" + fp_path_id + ")" - self.send_stream_message(self.example_user("hamlet"), "test-subscribe %s" % (i % 5,), body, "test") + self.send_stream_message(self.example_user("hamlet"), f"test-subscribe {i % 5}", body, "test") self.logout() user = self.example_user("aaron") @@ -940,7 +938,7 @@ class AvatarTest(UploadSerializeMixin, ZulipTestCase): redirect_url = response['Location'] self.assertTrue(redirect_url.endswith(str(avatar_url(cordelia)) + '&foo=bar')) - response = self.client_get("/avatar/%s?foo=bar" % (cordelia.id,)) + response = self.client_get(f"/avatar/{cordelia.id}?foo=bar") redirect_url = response['Location'] self.assertTrue(redirect_url.endswith(str(avatar_url(cordelia)) + '&foo=bar')) @@ -954,7 +952,7 @@ class AvatarTest(UploadSerializeMixin, ZulipTestCase): redirect_url = response['Location'] self.assertTrue(redirect_url.endswith(str(avatar_url(cordelia)) + '&foo=bar')) - response = self.api_get(hamlet, "/avatar/%s?foo=bar" % (cordelia.id,)) + response = self.api_get(hamlet, f"/avatar/{cordelia.id}?foo=bar") redirect_url = response['Location'] self.assertTrue(redirect_url.endswith(str(avatar_url(cordelia)) + '&foo=bar')) @@ -964,7 +962,7 @@ class AvatarTest(UploadSerializeMixin, ZulipTestCase): self.assertTrue(redirect_url.endswith(str(avatar_url(cross_realm_bot)) + '&foo=bar')) # Test cross_realm_bot avatar access using id. - response = self.api_get(hamlet, "/avatar/%s?foo=bar" % (cross_realm_bot.id,)) + response = self.api_get(hamlet, f"/avatar/{cross_realm_bot.id}?foo=bar") redirect_url = response['Location'] self.assertTrue(redirect_url.endswith(str(avatar_url(cross_realm_bot)) + '&foo=bar')) @@ -985,7 +983,7 @@ class AvatarTest(UploadSerializeMixin, ZulipTestCase): redirect_url = response['Location'] self.assertTrue(redirect_url.endswith(str(avatar_url(cordelia, True)) + '&foo=bar')) - response = self.client_get("/avatar/%s/medium?foo=bar" % (cordelia.id,)) + response = self.client_get(f"/avatar/{cordelia.id}/medium?foo=bar") redirect_url = response['Location'] self.assertTrue(redirect_url.endswith(str(avatar_url(cordelia, True)) + '&foo=bar')) @@ -996,7 +994,7 @@ class AvatarTest(UploadSerializeMixin, ZulipTestCase): redirect_url = response['Location'] self.assertTrue(redirect_url.endswith(str(avatar_url(cordelia, True)) + '&foo=bar')) - response = self.api_get(hamlet, "/avatar/%s/medium?foo=bar" % (cordelia.id,)) + response = self.api_get(hamlet, f"/avatar/{cordelia.id}/medium?foo=bar") redirect_url = response['Location'] self.assertTrue(redirect_url.endswith(str(avatar_url(cordelia, True)) + '&foo=bar')) @@ -1269,7 +1267,7 @@ class RealmIconTest(UploadSerializeMixin, ZulipTestCase): realm = get_realm('zulip') self.assert_json_success(result) self.assertIn("icon_url", result.json()) - base = '/user_avatars/%s/realm/icon.png' % (realm.id,) + base = f'/user_avatars/{realm.id}/realm/icon.png' url = result.json()['icon_url'] self.assertEqual(base, url[:len(base)]) @@ -1381,7 +1379,7 @@ class RealmLogoTest(UploadSerializeMixin, ZulipTestCase): response = self.client_get("/json/realm/logo", {'night': ujson.dumps(self.night)}) redirect_url = response['Location'] self.assertEqual(redirect_url, get_realm_logo_url(realm, self.night) + - '&night=%s' % (str(self.night).lower(),)) + f'&night={str(self.night).lower()}') def test_get_realm_logo(self) -> None: self.login('hamlet') @@ -1390,7 +1388,7 @@ class RealmLogoTest(UploadSerializeMixin, ZulipTestCase): response = self.client_get("/json/realm/logo", {'night': ujson.dumps(self.night)}) redirect_url = response['Location'] self.assertTrue(redirect_url.endswith(get_realm_logo_url(realm, self.night) + - '&night=%s' % (str(self.night).lower(),))) + f'&night={str(self.night).lower()}')) def test_valid_logos(self) -> None: """ @@ -1796,12 +1794,12 @@ class S3Test(ZulipTestCase): image_file = get_test_image_file("img.png") zerver.lib.upload.upload_backend.upload_realm_logo_image(image_file, user_profile, night) - original_path_id = os.path.join(str(user_profile.realm.id), "realm", "%s.original" % (file_name,)) + original_path_id = os.path.join(str(user_profile.realm.id), "realm", f"{file_name}.original") original_key = bucket.Object(original_path_id) image_file.seek(0) self.assertEqual(image_file.read(), original_key.get()['Body'].read()) - resized_path_id = os.path.join(str(user_profile.realm.id), "realm", "%s.png" % (file_name,)) + resized_path_id = os.path.join(str(user_profile.realm.id), "realm", f"{file_name}.png") resized_data = bucket.Object(resized_path_id).get()['Body'].read() resized_image = Image.open(io.BytesIO(resized_data)).size self.assertEqual(resized_image, (DEFAULT_AVATAR_SIZE, DEFAULT_AVATAR_SIZE)) diff --git a/zerver/tests/test_urls.py b/zerver/tests/test_urls.py index 287ff70251..60b4555fa0 100644 --- a/zerver/tests/test_urls.py +++ b/zerver/tests/test_urls.py @@ -113,7 +113,7 @@ class URLResolutionTest(TestCase): def check_function_exists(self, module_name: str, view: str) -> None: module = importlib.import_module(module_name) - self.assertTrue(hasattr(module, view), "View %s.%s does not exist" % (module_name, view)) + self.assertTrue(hasattr(module, view), f"View {module_name}.{view} does not exist") # Tests that all views in urls.v1_api_and_json_patterns exist def test_rest_api_url_resolution(self) -> None: diff --git a/zerver/tests/test_users.py b/zerver/tests/test_users.py index 8d10855b0b..432f41e614 100644 --- a/zerver/tests/test_users.py +++ b/zerver/tests/test_users.py @@ -57,7 +57,7 @@ def find_dict(lst: Iterable[Dict[K, V]], k: K, v: V) -> Dict[K, V]: for dct in lst: if dct[k] == v: return dct - raise AssertionError('Cannot find element in list where key %s == %s' % (k, v)) + raise AssertionError(f'Cannot find element in list where key {k} == {v}') class PermissionTest(ZulipTestCase): def test_role_setters(self) -> None: @@ -263,7 +263,7 @@ class PermissionTest(ZulipTestCase): self.assert_json_success(result) members = result.json()['members'] hamlet = find_dict(members, 'user_id', user.id) - self.assertEqual(hamlet['email'], "user%s@zulip.testserver" % (user.id,)) + self.assertEqual(hamlet['email'], f"user{user.id}@zulip.testserver") # Note that the Gravatar URL should still be computed from the # `delivery_email`; otherwise, we won't be able to serve the # user's Gravatar. @@ -291,7 +291,7 @@ class PermissionTest(ZulipTestCase): self.assert_json_success(result) members = result.json()['members'] hamlet = find_dict(members, 'user_id', user.id) - self.assertEqual(hamlet['email'], "user%s@zulip.testserver" % (user.id,)) + self.assertEqual(hamlet['email'], f"user{user.id}@zulip.testserver") self.assertEqual(hamlet['avatar_url'], get_gravatar_url(user.email, 1)) self.assertEqual(hamlet['delivery_email'], self.example_email("hamlet")) diff --git a/zerver/tornado/autoreload.py b/zerver/tornado/autoreload.py index 64ffd60a0b..a9d7a98e54 100644 --- a/zerver/tornado/autoreload.py +++ b/zerver/tornado/autoreload.py @@ -189,7 +189,7 @@ def _check_file(modify_times, module, path): try: importlib.reload(module) except Exception: - gen_log.error("Error importing %s, not reloading" % (path,)) + gen_log.error(f"Error importing {path}, not reloading") traceback.print_exc() return False return True diff --git a/zerver/tornado/event_queue.py b/zerver/tornado/event_queue.py index 9cc7b1b2d2..7e537d8ead 100644 --- a/zerver/tornado/event_queue.py +++ b/zerver/tornado/event_queue.py @@ -117,7 +117,7 @@ class ClientDescriptor: client_type_name=self.client_type_name) def __repr__(self) -> str: - return "ClientDescriptor<%s>" % (self.event_queue.id,) + return f"ClientDescriptor<{self.event_queue.id}>" @classmethod def from_dict(cls, d: MutableMapping[str, Any]) -> 'ClientDescriptor': @@ -161,7 +161,7 @@ class ClientDescriptor: def finish_current_handler(self) -> bool: if self.current_handler_id is not None: - err_msg = "Got error finishing handler for queue %s" % (self.event_queue.id,) + err_msg = f"Got error finishing handler for queue {self.event_queue.id}" try: finish_handler(self.current_handler_id, self.event_queue.id, self.event_queue.contents(), self.apply_markdown) @@ -232,8 +232,8 @@ def compute_full_event_type(event: Mapping[str, Any]) -> str: if event["type"] == "update_message_flags": if event["all"]: # Put the "all" case in its own category - return "all_flags/%s/%s" % (event["flag"], event["operation"]) - return "flags/%s/%s" % (event["operation"], event["flag"]) + return "all_flags/{}/{}".format(event["flag"], event["operation"]) + return "flags/{}/{}".format(event["operation"], event["flag"]) return event["type"] class EventQueue: @@ -561,10 +561,10 @@ def fetch_events(query: Mapping[str, Any]) -> Dict[str, Any]: if orig_queue_id is None: response['queue_id'] = queue_id if len(response["events"]) == 1: - extra_log_data = "[%s/%s/%s]" % (queue_id, len(response["events"]), - response["events"][0]["type"]) + extra_log_data = "[{}/{}/{}]".format(queue_id, len(response["events"]), + response["events"][0]["type"]) else: - extra_log_data = "[%s/%s]" % (queue_id, len(response["events"])) + extra_log_data = "[{}/{}]".format(queue_id, len(response["events"])) if was_connected: extra_log_data += " [was connected]" return dict(type="response", response=response, extra_log_data=extra_log_data) diff --git a/zerver/tornado/handlers.py b/zerver/tornado/handlers.py index f71322f749..e1af1f8a5c 100644 --- a/zerver/tornado/handlers.py +++ b/zerver/tornado/handlers.py @@ -35,11 +35,11 @@ def clear_handler_by_id(handler_id: int) -> None: del handlers[handler_id] def handler_stats_string() -> str: - return "%s handlers, latest ID %s" % (len(handlers), current_handler_id) + return f"{len(handlers)} handlers, latest ID {current_handler_id}" def finish_handler(handler_id: int, event_queue_id: str, contents: List[Dict[str, Any]], apply_markdown: bool) -> None: - err_msg = "Got error finishing handler for queue %s" % (event_queue_id,) + err_msg = f"Got error finishing handler for queue {event_queue_id}" try: # We call async_request_timer_restart here in case we are # being finished without any events (because another @@ -48,9 +48,9 @@ def finish_handler(handler_id: int, event_queue_id: str, request = handler._request async_request_timer_restart(request) if len(contents) != 1: - request._log_data['extra'] = "[%s/1]" % (event_queue_id,) + request._log_data['extra'] = f"[{event_queue_id}/1]" else: - request._log_data['extra'] = "[%s/1/%s]" % (event_queue_id, contents[0]["type"]) + request._log_data['extra'] = "[{}/1/{}]".format(event_queue_id, contents[0]["type"]) handler.zulip_finish(dict(result='success', msg='', events=contents, @@ -82,7 +82,7 @@ class AsyncDjangoHandler(tornado.web.RequestHandler, base.BaseHandler): def __repr__(self) -> str: descriptor = get_descriptor_by_handler_id(self.handler_id) - return "AsyncDjangoHandler<%s, %s>" % (self.handler_id, descriptor) + return f"AsyncDjangoHandler<{self.handler_id}, {descriptor}>" def convert_tornado_request_to_django_request(self) -> HttpRequest: # This takes the WSGI environment that Tornado received (which diff --git a/zerver/tornado/views.py b/zerver/tornado/views.py index 701ffe02df..5e61ea6f00 100644 --- a/zerver/tornado/views.py +++ b/zerver/tornado/views.py @@ -29,7 +29,7 @@ def cleanup_event_queue(request: HttpRequest, user_profile: UserProfile, raise BadEventQueueIdError(queue_id) if user_profile.id != client.user_profile_id: return json_error(_("You are not authorized to access this queue")) - request._log_data['extra'] = "[%s]" % (queue_id,) + request._log_data['extra'] = f"[{queue_id}]" client.cleanup() return json_success() diff --git a/zerver/views/auth.py b/zerver/views/auth.py index aaa2f8ffd6..2b07424737 100644 --- a/zerver/views/auth.py +++ b/zerver/views/auth.py @@ -402,7 +402,7 @@ def remote_user_jwt(request: HttpRequest) -> HttpResponse: if email_domain is None: raise JsonableError(_("No organization specified in JSON web token claims")) - email = "%s@%s" % (remote_user, email_domain) + email = f"{remote_user}@{email_domain}" try: realm = get_realm(subdomain) diff --git a/zerver/views/development/registration.py b/zerver/views/development/registration.py index 5c188f16d5..ee8a8047ba 100644 --- a/zerver/views/development/registration.py +++ b/zerver/views/development/registration.py @@ -29,7 +29,7 @@ def register_development_user(request: HttpRequest) -> HttpResponse: request.META['HTTP_HOST'] = settings.REALM_HOSTS['zulip'] count = UserProfile.objects.count() name = 'user-%d' % (count,) - email = '%s@zulip.com' % (name,) + email = f'{name}@zulip.com' prereg = create_preregistration_user(email, request, realm_creation=False, password_required=False) activation_url = create_confirmation_link(prereg, request.get_host(), @@ -44,7 +44,7 @@ def register_development_user(request: HttpRequest) -> HttpResponse: def register_development_realm(request: HttpRequest) -> HttpResponse: count = UserProfile.objects.count() name = 'user-%d' % (count,) - email = '%s@zulip.com' % (name,) + email = f'{name}@zulip.com' realm_name = 'realm-%d' % (count,) prereg = create_preregistration_user(email, request, realm_creation=True, password_required=False) diff --git a/zerver/views/documentation.py b/zerver/views/documentation.py index 452b84784b..f26c49b4bd 100644 --- a/zerver/views/documentation.py +++ b/zerver/views/documentation.py @@ -110,11 +110,11 @@ class MarkdownDirectoryView(ApiURLView): # Strip the header and then use the first line to get the article title article_title = first_line.lstrip("#").strip() if context["not_index_page"]: - context["OPEN_GRAPH_TITLE"] = "%s (%s)" % (article_title, title_base) + context["OPEN_GRAPH_TITLE"] = f"{article_title} ({title_base})" else: context["OPEN_GRAPH_TITLE"] = title_base self.request.placeholder_open_graph_description = ( - "REPLACMENT_OPEN_GRAPH_DESCRIPTION_%s" % (int(2**24 * random.random()),)) + f"REPLACMENT_OPEN_GRAPH_DESCRIPTION_{int(2**24 * random.random())}") context["OPEN_GRAPH_DESCRIPTION"] = self.request.placeholder_open_graph_description context["sidebar_index"] = sidebar_index diff --git a/zerver/views/home.py b/zerver/views/home.py index ada3020002..749da6c1d8 100644 --- a/zerver/views/home.py +++ b/zerver/views/home.py @@ -301,7 +301,7 @@ def home_real(request: HttpRequest) -> HttpResponse: if user_profile.realm.plan_type == Realm.LIMITED: show_plans = True - request._log_data['extra'] = "[%s]" % (register_ret["queue_id"],) + request._log_data['extra'] = "[{}]".format(register_ret["queue_id"]) page_params['translation_data'] = {} if request_language != 'en': diff --git a/zerver/views/messages.py b/zerver/views/messages.py index 3295c2c541..f1133dc880 100644 --- a/zerver/views/messages.py +++ b/zerver/views/messages.py @@ -95,7 +95,7 @@ TS_STOP = "" def ts_locs_array( config: ColumnElement, text: ColumnElement, tsquery: ColumnElement ) -> ColumnElement: - options = "HighlightAll = TRUE, StartSel = %s, StopSel = %s" % (TS_START, TS_STOP) + options = f"HighlightAll = TRUE, StartSel = {TS_START}, StopSel = {TS_STOP}" delimited = func.ts_headline(config, text, tsquery, options) parts = func.unnest(func.string_to_array(delimited, TS_START)).alias() part = column(parts.name) @@ -255,7 +255,7 @@ class NarrowBuilder: base_stream_name = m.group(1) matching_streams = get_active_streams(self.user_profile.realm).filter( - name__iregex=r'^(un)*%s(\.d)*$' % (self._pg_re_escape(base_stream_name),)) + name__iregex=fr'^(un)*{self._pg_re_escape(base_stream_name)}(\.d)*$') recipient_ids = [matching_stream.recipient_id for matching_stream in matching_streams] cond = column("recipient_id").in_(recipient_ids) return query.where(maybe_negate(cond)) @@ -851,7 +851,7 @@ def get_messages_backend(request: HttpRequest, user_profile: UserProfile, verbose_operators.append("is:" + term['operand']) else: verbose_operators.append(term['operator']) - request._log_data['extra'] = "[%s]" % (",".join(verbose_operators),) + request._log_data['extra'] = "[{}]".format(",".join(verbose_operators)) sa_conn = get_sqlalchemy_connection() @@ -1127,7 +1127,7 @@ def update_message_flags(request: HttpRequest, user_profile: UserProfile, count = do_update_message_flags(user_profile, request.client, operation, flag, messages) target_count_str = str(len(messages)) - log_data_str = "[%s %s/%s] actually %s" % (operation, flag, target_count_str, count) + log_data_str = f"[{operation} {flag}/{target_count_str}] actually {count}" request._log_data["extra"] = log_data_str return json_success({'result': 'success', @@ -1138,7 +1138,7 @@ def update_message_flags(request: HttpRequest, user_profile: UserProfile, def mark_all_as_read(request: HttpRequest, user_profile: UserProfile) -> HttpResponse: count = do_mark_all_as_read(user_profile, request.client) - log_data_str = "[%s updated]" % (count,) + log_data_str = f"[{count} updated]" request._log_data["extra"] = log_data_str return json_success({'result': 'success', @@ -1151,7 +1151,7 @@ def mark_stream_as_read(request: HttpRequest, stream, recipient, sub = access_stream_by_id(user_profile, stream_id) count = do_mark_stream_messages_as_read(user_profile, request.client, stream) - log_data_str = "[%s updated]" % (count,) + log_data_str = f"[{count} updated]" request._log_data["extra"] = log_data_str return json_success({'result': 'success', @@ -1176,7 +1176,7 @@ def mark_topic_as_read(request: HttpRequest, count = do_mark_stream_messages_as_read(user_profile, request.client, stream, topic_name) - log_data_str = "[%s updated]" % (count,) + log_data_str = f"[{count} updated]" request._log_data["extra"] = log_data_str return json_success({'result': 'success', @@ -1616,7 +1616,7 @@ def update_message_backend(request: HttpRequest, user_profile: UserMessage, mention_user_ids, mention_data) # Include the number of messages changed in the logs - request._log_data['extra'] = "[%s]" % (number_changed,) + request._log_data['extra'] = f"[{number_changed}]" if links_for_embed: event_data = { 'message_id': message.id, diff --git a/zerver/views/pointer.py b/zerver/views/pointer.py index c01ba35489..924382098d 100644 --- a/zerver/views/pointer.py +++ b/zerver/views/pointer.py @@ -19,7 +19,7 @@ def update_pointer_backend(request: HttpRequest, user_profile: UserProfile, if get_usermessage_by_message_id(user_profile, pointer) is None: raise JsonableError(_("Invalid message ID")) - request._log_data["extra"] = "[%s]" % (pointer,) + request._log_data["extra"] = f"[{pointer}]" update_flags = (request.client.name.lower() in ['android', "zulipandroid"]) do_update_pointer(user_profile, request.client, pointer, update_flags=update_flags) diff --git a/zerver/views/report.py b/zerver/views/report.py index 3ce197d914..981f82c102 100644 --- a/zerver/views/report.py +++ b/zerver/views/report.py @@ -51,11 +51,11 @@ def report_send_times(request: HttpRequest, user_profile: UserProfile, % (time, received_str, displayed_str, locally_echoed, rendered_content_disparity) base_key = statsd_key(user_profile.realm.string_id, clean_periods=True) - statsd.timing("endtoend.send_time.%s" % (base_key,), time) + statsd.timing(f"endtoend.send_time.{base_key}", time) if received > 0: - statsd.timing("endtoend.receive_time.%s" % (base_key,), received) + statsd.timing(f"endtoend.receive_time.{base_key}", received) if displayed > 0: - statsd.timing("endtoend.displayed_time.%s" % (base_key,), displayed) + statsd.timing(f"endtoend.displayed_time.{base_key}", displayed) if locally_echoed: statsd.incr('locally_echoed') if rendered_content_disparity: @@ -68,11 +68,11 @@ def report_narrow_times(request: HttpRequest, user_profile: UserProfile, initial_core: int=REQ(converter=to_non_negative_int), initial_free: int=REQ(converter=to_non_negative_int), network: int=REQ(converter=to_non_negative_int)) -> HttpResponse: - request._log_data["extra"] = "[%sms/%sms/%sms]" % (initial_core, initial_free, network) + request._log_data["extra"] = f"[{initial_core}ms/{initial_free}ms/{network}ms]" base_key = statsd_key(user_profile.realm.string_id, clean_periods=True) - statsd.timing("narrow.initial_core.%s" % (base_key,), initial_core) - statsd.timing("narrow.initial_free.%s" % (base_key,), initial_free) - statsd.timing("narrow.network.%s" % (base_key,), network) + statsd.timing(f"narrow.initial_core.{base_key}", initial_core) + statsd.timing(f"narrow.initial_free.{base_key}", initial_free) + statsd.timing(f"narrow.network.{base_key}", network) return json_success() @human_users_only @@ -80,10 +80,10 @@ def report_narrow_times(request: HttpRequest, user_profile: UserProfile, def report_unnarrow_times(request: HttpRequest, user_profile: UserProfile, initial_core: int=REQ(converter=to_non_negative_int), initial_free: int=REQ(converter=to_non_negative_int)) -> HttpResponse: - request._log_data["extra"] = "[%sms/%sms]" % (initial_core, initial_free) + request._log_data["extra"] = f"[{initial_core}ms/{initial_free}ms]" base_key = statsd_key(user_profile.realm.string_id, clean_periods=True) - statsd.timing("unnarrow.initial_core.%s" % (base_key,), initial_core) - statsd.timing("unnarrow.initial_free.%s" % (base_key,), initial_free) + statsd.timing(f"unnarrow.initial_core.{base_key}", initial_core) + statsd.timing(f"unnarrow.initial_free.{base_key}", initial_free) return json_success() @has_request_variables diff --git a/zerver/views/streams.py b/zerver/views/streams.py index b2ec544a65..4876f04f72 100644 --- a/zerver/views/streams.py +++ b/zerver/views/streams.py @@ -312,7 +312,7 @@ def you_were_just_subscribed_message(acting_user: UserProfile, {"full_name": acting_user.full_name} message += "\n\n" for stream_name in subscriptions: - message += "* #**%s**\n" % (stream_name,) + message += f"* #**{stream_name}**\n" return message @require_non_guest_user @@ -443,7 +443,7 @@ def add_subscriptions_backend( content = content % { 'user_name': user_profile.full_name, 'user_id': user_profile.id, - 'stream_str': ", ".join('#**%s**' % (s.name,) for s in created_streams)} + 'stream_str': ", ".join(f'#**{s.name}**' for s in created_streams)} sender = get_system_bot(settings.NOTIFICATION_BOT) topic = _('new streams') diff --git a/zerver/views/upload.py b/zerver/views/upload.py index f7f1c06295..22ff8389b4 100644 --- a/zerver/views/upload.py +++ b/zerver/views/upload.py @@ -70,7 +70,7 @@ def serve_file_url_backend(request: HttpRequest, user_profile: UserProfile, def serve_file(request: HttpRequest, user_profile: UserProfile, realm_id_str: str, filename: str, url_only: bool=False) -> HttpResponse: - path_id = "%s/%s" % (realm_id_str, filename) + path_id = f"{realm_id_str}/{filename}" is_authorized = validate_attachment_request(user_profile, path_id) if is_authorized is None: diff --git a/zerver/views/users.py b/zerver/views/users.py index 3c7696c8c8..26b08d5e8a 100644 --- a/zerver/views/users.py +++ b/zerver/views/users.py @@ -268,7 +268,7 @@ def add_bot_backend( short_name += "-bot" full_name = check_full_name(full_name_raw) try: - email = '%s@%s' % (short_name, user_profile.realm.get_bot_domain()) + email = f'{short_name}@{user_profile.realm.get_bot_domain()}' except InvalidFakeEmailDomain: return json_error(_("Can't create bots until FAKE_EMAIL_DOMAIN is correctly configured.\n" "Please contact your server administrator.")) diff --git a/zerver/webhooks/beanstalk/view.py b/zerver/webhooks/beanstalk/view.py index d70e74d363..2567bf102f 100644 --- a/zerver/webhooks/beanstalk/view.py +++ b/zerver/webhooks/beanstalk/view.py @@ -53,7 +53,7 @@ def beanstalk_decoder(view_func: ViewFuncT) -> ViewFuncT: if auth_type.lower() == "basic": email, api_key = base64.b64decode(encoded_value).decode('utf-8').split(":") email = email.replace('%40', '@') - credentials = "%s:%s" % (email, api_key) + credentials = f"{email}:{api_key}" encoded_credentials: str = base64.b64encode(credentials.encode('utf-8')).decode('utf8') request.META['HTTP_AUTHORIZATION'] = "Basic " + encoded_credentials @@ -89,8 +89,8 @@ def api_beanstalk_webhook(request: HttpRequest, user_profile: UserProfile, revision = payload.get('revision') (short_commit_msg, _, _) = payload['message'].partition("\n") - subject = "svn r%s" % (revision,) - content = "%s pushed [revision %s](%s):\n\n> %s" % (author, revision, url, short_commit_msg) + subject = f"svn r{revision}" + content = f"{author} pushed [revision {revision}]({url}):\n\n> {short_commit_msg}" check_send_webhook_message(request, user_profile, subject, content) return json_success() diff --git a/zerver/webhooks/bitbucket3/view.py b/zerver/webhooks/bitbucket3/view.py index a6a6fab9ca..16a469ecd9 100644 --- a/zerver/webhooks/bitbucket3/view.py +++ b/zerver/webhooks/bitbucket3/view.py @@ -65,7 +65,7 @@ def repo_comment_handler(payload: Dict[str, Any], action: str) -> List[Dict[str, subject = BITBUCKET_TOPIC_TEMPLATE.format(repository_name=repo_name) sha = payload["commit"] commit_url = payload["repository"]["links"]["self"][0]["href"][:-6] # remove the "browse" at the end - commit_url += "commits/%s" % (sha,) + commit_url += f"commits/{sha}" message = payload["comment"]["text"] if action == "deleted their comment": message = f"~~{message}~~" @@ -125,7 +125,7 @@ def repo_push_branch_data(payload: Dict[str, Any], change: Dict[str, Any]) -> Di elif event_type == "DELETE": body = get_remove_branch_event_message(user_name, branch_name) else: - message = "%s.%s" % (payload["eventKey"], event_type) # nocoverage + message = "{}.{}".format(payload["eventKey"], event_type) # nocoverage raise UnexpectedWebhookEventType("BitBucket Server", message) subject = TOPIC_WITH_BRANCH_TEMPLATE.format(repo=repo_name, branch=branch_name) @@ -141,7 +141,7 @@ def repo_push_tag_data(payload: Dict[str, Any], change: Dict[str, Any]) -> Dict[ elif event_type == "DELETE": action = "removed" else: - message = "%s.%s" % (payload["eventKey"], event_type) # nocoverage + message = "{}.{}".format(payload["eventKey"], event_type) # nocoverage raise UnexpectedWebhookEventType("BitBucket Server", message) subject = BITBUCKET_TOPIC_TEMPLATE.format(repository_name=repo_name) @@ -162,7 +162,7 @@ def repo_push_handler(payload: Dict[str, Any], branches: Optional[str]=None elif event_target_type == "TAG": data.append(repo_push_tag_data(payload, change)) else: - message = "%s.%s" % (payload["eventKey"], event_target_type) # nocoverage + message = "{}.{}".format(payload["eventKey"], event_target_type) # nocoverage raise UnexpectedWebhookEventType("BitBucket Server", message) return data @@ -171,7 +171,7 @@ def get_assignees_string(pr: Dict[str, Any]) -> Optional[str]: for reviewer in pr["reviewers"]: name = reviewer["user"]["name"] link = reviewer["user"]["links"]["self"][0]["href"] - reviewers.append("[%s](%s)" % (name, link)) + reviewers.append(f"[{name}]({link})") if len(reviewers) == 0: assignees = None elif len(reviewers) == 1: diff --git a/zerver/webhooks/front/view.py b/zerver/webhooks/front/view.py index 40659f4505..819bb41b96 100644 --- a/zerver/webhooks/front/view.py +++ b/zerver/webhooks/front/view.py @@ -20,12 +20,12 @@ def get_message_data(payload: Dict[str, Any]) -> Tuple[str, str, str, str]: def get_source_name(payload: Dict[str, Any]) -> str: first_name = payload['source']['data']['first_name'] last_name = payload['source']['data']['last_name'] - return "%s %s" % (first_name, last_name) + return f"{first_name} {last_name}" def get_target_name(payload: Dict[str, Any]) -> str: first_name = payload['target']['data']['first_name'] last_name = payload['target']['data']['last_name'] - return "%s %s" % (first_name, last_name) + return f"{first_name} {last_name}" def get_inbound_message_body(payload: Dict[str, Any]) -> str: link, outbox, inbox, subject = get_message_data(payload) diff --git a/zerver/webhooks/jira/tests.py b/zerver/webhooks/jira/tests.py index 755fe20b39..435d42519b 100644 --- a/zerver/webhooks/jira/tests.py +++ b/zerver/webhooks/jira/tests.py @@ -10,7 +10,7 @@ class JiraHookTests(WebhookTestCase): def test_custom_stream(self) -> None: api_key = get_api_key(self.test_user) - url = "/api/v1/external/jira?api_key=%s&stream=jira_custom" % (api_key,) + url = f"/api/v1/external/jira?api_key={api_key}&stream=jira_custom" msg = self.send_json_payload(self.test_user, url, self.get_body('created_v2'), diff --git a/zerver/webhooks/newrelic/view.py b/zerver/webhooks/newrelic/view.py index df3f611d86..6760c38e8b 100644 --- a/zerver/webhooks/newrelic/view.py +++ b/zerver/webhooks/newrelic/view.py @@ -40,7 +40,7 @@ def api_newrelic_webhook(request: HttpRequest, user_profile: UserProfile, subject = alert['message'] content = ALERT_TEMPLATE.format(**alert) elif deployment: - subject = "%s deploy" % (deployment['application_name'],) + subject = "{} deploy".format(deployment['application_name']) content = DEPLOY_TEMPLATE.format(**deployment) else: raise UnexpectedWebhookEventType('New Relic', 'Unknown Event Type') diff --git a/zerver/webhooks/pivotal/view.py b/zerver/webhooks/pivotal/view.py index 31965e3e32..3a56f4b2dd 100644 --- a/zerver/webhooks/pivotal/view.py +++ b/zerver/webhooks/pivotal/view.py @@ -33,7 +33,7 @@ def api_pivotal_webhook_v3(request: HttpRequest, user_profile: UserProfile) -> T story_id = get_text(['stories', 'story', 'id']) # Ugh, the URL in the XML data is not a clickable url that works for the user # so we try to build one that the user can actually click on - url = "https://www.pivotaltracker.com/s/projects/%s/stories/%s" % (project_id, story_id) + url = f"https://www.pivotaltracker.com/s/projects/{project_id}/stories/{story_id}" # Pivotal doesn't tell us the name of the story, but it's usually in the # description in quotes as the first quoted string @@ -43,7 +43,7 @@ def api_pivotal_webhook_v3(request: HttpRequest, user_profile: UserProfile) -> T name = match.group(1) else: name = "Story changed" # Failed for an unknown reason, show something - more_info = " [(view)](%s)." % (url,) + more_info = f" [(view)]({url})." if event_type == 'story_update': subject = name @@ -57,15 +57,9 @@ def api_pivotal_webhook_v3(request: HttpRequest, user_profile: UserProfile) -> T issue_status = get_text(['stories', 'story', 'current_state']) estimate = get_text(['stories', 'story', 'estimate']) if estimate != '': - estimate = " worth %s story points" % (estimate,) + estimate = f" worth {estimate} story points" subject = name - content = "%s (%s %s%s):\n\n~~~ quote\n%s\n~~~\n\n%s" % ( - description, - issue_status, - issue_type, - estimate, - issue_desc, - more_info) + content = f"{description} ({issue_status} {issue_type}{estimate}):\n\n~~~ quote\n{issue_desc}\n~~~\n\n{more_info}" return subject, content UNSUPPORTED_EVENT_TYPES = [ @@ -95,13 +89,12 @@ def api_pivotal_webhook_v5(request: HttpRequest, user_profile: UserProfile) -> T performed_by = payload.get("performed_by", {}).get("name", "") - story_info = "[%s](https://www.pivotaltracker.com/s/projects/%s): [%s](%s)" % ( - project_name, project_id, story_name, story_url) + story_info = f"[{project_name}](https://www.pivotaltracker.com/s/projects/{project_id}): [{story_name}]({story_url})" changes = payload.get("changes", []) content = "" - subject = "#%s: %s" % (story_id, story_name) + subject = f"#{story_id}: {story_name}" def extract_comment(change: Dict[str, Any]) -> Optional[str]: if change.get("kind") == "comment": @@ -110,52 +103,51 @@ def api_pivotal_webhook_v5(request: HttpRequest, user_profile: UserProfile) -> T if event_type == "story_update_activity": # Find the changed valued and build a message - content += "%s updated %s:\n" % (performed_by, story_info) + content += f"{performed_by} updated {story_info}:\n" for change in changes: old_values = change.get("original_values", {}) new_values = change["new_values"] if "current_state" in old_values and "current_state" in new_values: - content += "* state changed from **%s** to **%s**\n" % ( + content += "* state changed from **{}** to **{}**\n".format( old_values["current_state"], new_values["current_state"]) if "estimate" in old_values and "estimate" in new_values: old_estimate = old_values.get("estimate", None) if old_estimate is None: estimate = "is now" else: - estimate = "changed from %s to" % (old_estimate,) + estimate = f"changed from {old_estimate} to" new_estimate = new_values["estimate"] if new_values["estimate"] is not None else "0" - content += "* estimate %s **%s points**\n" % (estimate, new_estimate) + content += f"* estimate {estimate} **{new_estimate} points**\n" if "story_type" in old_values and "story_type" in new_values: - content += "* type changed from **%s** to **%s**\n" % ( + content += "* type changed from **{}** to **{}**\n".format( old_values["story_type"], new_values["story_type"]) comment = extract_comment(change) if comment is not None: - content += "* Comment added:\n~~~quote\n%s\n~~~\n" % (comment,) + content += f"* Comment added:\n~~~quote\n{comment}\n~~~\n" elif event_type == "comment_create_activity": for change in changes: comment = extract_comment(change) if comment is not None: - content += "%s added a comment to %s:\n~~~quote\n%s\n~~~" % ( - performed_by, story_info, comment) + content += f"{performed_by} added a comment to {story_info}:\n~~~quote\n{comment}\n~~~" elif event_type == "story_create_activity": - content += "%s created %s: %s\n" % (performed_by, story_type, story_info) + content += f"{performed_by} created {story_type}: {story_info}\n" for change in changes: new_values = change.get("new_values", {}) if "current_state" in new_values: - content += "* State is **%s**\n" % (new_values["current_state"],) + content += "* State is **{}**\n".format(new_values["current_state"]) if "description" in new_values: - content += "* Description is\n\n> %s" % (new_values["description"],) + content += "* Description is\n\n> {}".format(new_values["description"]) elif event_type == "story_move_activity": - content = "%s moved %s" % (performed_by, story_info) + content = f"{performed_by} moved {story_info}" for change in changes: old_values = change.get("original_values", {}) new_values = change["new_values"] if "current_state" in old_values and "current_state" in new_values: - content += " from **%s** to **%s**." % (old_values["current_state"], - new_values["current_state"]) + content += " from **{}** to **{}**.".format(old_values["current_state"], + new_values["current_state"]) elif event_type in UNSUPPORTED_EVENT_TYPES: # Known but unsupported Pivotal event types pass diff --git a/zerver/webhooks/zendesk/view.py b/zerver/webhooks/zendesk/view.py index 50a3c305cd..4292ba150d 100644 --- a/zerver/webhooks/zendesk/view.py +++ b/zerver/webhooks/zendesk/view.py @@ -23,6 +23,6 @@ def api_zendesk_webhook(request: HttpRequest, user_profile: UserProfile, ticket_id and ticket_title to create a subject. And passes with zendesk user's configured message to zulip. """ - subject = truncate('#%s: %s' % (ticket_id, ticket_title), 60) + subject = truncate(f'#{ticket_id}: {ticket_title}', 60) check_send_webhook_message(request, user_profile, subject, message) return json_success() diff --git a/zerver/worker/queue_processors.py b/zerver/worker/queue_processors.py index 924f32e2b3..1e76dbf760 100644 --- a/zerver/worker/queue_processors.py +++ b/zerver/worker/queue_processors.py @@ -150,7 +150,7 @@ class QueueProcessingWorker(ABC): os.makedirs(settings.QUEUE_STATS_DIR, exist_ok=True) - fname = '%s.stats' % (self.queue_name,) + fname = f'{self.queue_name}.stats' fn = os.path.join(settings.QUEUE_STATS_DIR, fname) with lockfile(fn + '.lock'): tmp_fn = fn + '.tmp' @@ -204,9 +204,9 @@ class QueueProcessingWorker(ABC): self._log_problem() if not os.path.exists(settings.QUEUE_ERROR_DIR): os.mkdir(settings.QUEUE_ERROR_DIR) # nocoverage - fname = '%s.errors' % (self.queue_name,) + fname = f'{self.queue_name}.errors' fn = os.path.join(settings.QUEUE_ERROR_DIR, fname) - line = '%s\t%s\n' % (time.asctime(), ujson.dumps(events)) + line = f'{time.asctime()}\t{ujson.dumps(events)}\n' lock_fn = fn + '.lock' with lockfile(lock_fn): with open(fn, 'ab') as f: @@ -214,7 +214,7 @@ class QueueProcessingWorker(ABC): check_and_send_restart_signal() def _log_problem(self) -> None: - logging.exception("Problem handling data on queue %s" % (self.queue_name,)) + logging.exception(f"Problem handling data on queue {self.queue_name}") def setup(self) -> None: self.q = SimpleQueueClient() @@ -685,8 +685,7 @@ class DeferredWorker(QueueProcessingWorker): # Send a private message notification letting the user who # triggered the export know the export finished. - content = "Your data export is complete and has been uploaded here:\n\n%s" % ( - public_url,) + content = f"Your data export is complete and has been uploaded here:\n\n{public_url}" internal_send_private_message( realm=user_profile.realm, sender=get_system_bot(settings.NOTIFICATION_BOT), diff --git a/zilencer/management/commands/add_new_realm.py b/zilencer/management/commands/add_new_realm.py index 7570e0c75e..a78723114d 100644 --- a/zilencer/management/commands/add_new_realm.py +++ b/zilencer/management/commands/add_new_realm.py @@ -17,7 +17,7 @@ class Command(ZulipBaseCommand): name = '%02d-user' % ( UserProfile.objects.filter(email__contains='user@').count(),) - user = do_create_user('%s@%s.zulip.com' % (name, string_id), + user = do_create_user(f'{name}@{string_id}.zulip.com', 'password', realm, name, name, role=UserProfile.ROLE_REALM_ADMINISTRATOR) bulk_add_subscriptions([realm.signup_notifications_stream], [user]) diff --git a/zilencer/management/commands/add_new_user.py b/zilencer/management/commands/add_new_user.py index 1ed1e179af..e6fdfce4af 100644 --- a/zilencer/management/commands/add_new_user.py +++ b/zilencer/management/commands/add_new_user.py @@ -30,4 +30,4 @@ and will otherwise fall back to the zulip realm.""" domain = realm.string_id + '.zulip.com' name = '%02d-user' % (UserProfile.objects.filter(email__contains='user@').count(),) - do_create_user('%s@%s' % (name, domain), 'password', valid_realm, name, name) + do_create_user(f'{name}@{domain}', 'password', valid_realm, name, name) diff --git a/zilencer/management/commands/populate_db.py b/zilencer/management/commands/populate_db.py index e1b5b921d2..f74ebfdae0 100644 --- a/zilencer/management/commands/populate_db.py +++ b/zilencer/management/commands/populate_db.py @@ -366,7 +366,7 @@ class Command(BaseCommand): for profile in profiles: email = profile.delivery_email if email not in subscriptions_map: - raise Exception('Subscriptions not listed for user %s' % (email,)) + raise Exception(f'Subscriptions not listed for user {email}') for stream_name in subscriptions_map[email]: stream = Stream.objects.get(name=stream_name) diff --git a/zilencer/management/commands/print_initial_password.py b/zilencer/management/commands/print_initial_password.py index 3c728a5f33..aab724290a 100644 --- a/zilencer/management/commands/print_initial_password.py +++ b/zilencer/management/commands/print_initial_password.py @@ -21,7 +21,7 @@ class Command(ZulipBaseCommand): print(self.fmt % ('email', 'password', 'API key')) for email in options['emails']: if '@' not in email: - print('ERROR: %s does not look like an email address' % (email,)) + print(f'ERROR: {email} does not look like an email address') continue user = self.get_user(email, realm) print(self.fmt % (email, initial_password(email), get_api_key(user))) diff --git a/zilencer/management/commands/sync_api_key.py b/zilencer/management/commands/sync_api_key.py index 6cfade03ac..619781aaa3 100644 --- a/zilencer/management/commands/sync_api_key.py +++ b/zilencer/management/commands/sync_api_key.py @@ -26,4 +26,4 @@ class Command(BaseCommand): user_profile.api_key = api_key user_profile.save(update_fields=["api_key"]) except UserProfile.DoesNotExist: - print("User %s does not exist; not syncing API key" % (email,)) + print(f"User {email} does not exist; not syncing API key") diff --git a/zilencer/models.py b/zilencer/models.py index b913b64759..91f93606cb 100644 --- a/zilencer/models.py +++ b/zilencer/models.py @@ -21,7 +21,7 @@ class RemoteZulipServer(models.Model): last_updated: datetime.datetime = models.DateTimeField('last updated', auto_now=True) def __str__(self) -> str: - return "" % (self.hostname, self.uuid[0:12]) + return f"" def format_requestor_for_logs(self) -> str: return "zulip-server:" + self.uuid @@ -36,7 +36,7 @@ class RemotePushDeviceToken(AbstractPushDeviceToken): unique_together = ("server", "user_id", "kind", "token") def __str__(self) -> str: - return "" % (self.server, self.user_id) + return f"" class RemoteRealmAuditLog(AbstractRealmAuditLog): """Synced audit data from a remote Zulip server, used primarily for @@ -48,8 +48,7 @@ class RemoteRealmAuditLog(AbstractRealmAuditLog): remote_id: int = models.IntegerField(db_index=True) def __str__(self) -> str: - return "" % ( - self.server, self.event_type, self.event_time, self.id) + return f"" class RemoteInstallationCount(BaseCount): server: RemoteZulipServer = models.ForeignKey(RemoteZulipServer, on_delete=models.CASCADE) @@ -63,7 +62,7 @@ class RemoteInstallationCount(BaseCount): ] def __str__(self) -> str: - return "" % (self.property, self.subgroup, self.value) + return f"" # We can't subclass RealmCount because we only have a realm_id here, not a foreign key. class RemoteRealmCount(BaseCount): @@ -80,4 +79,4 @@ class RemoteRealmCount(BaseCount): ] def __str__(self) -> str: - return "%s %s %s %s %s" % (self.server, self.realm_id, self.property, self.subgroup, self.value) + return f"{self.server} {self.realm_id} {self.property} {self.subgroup} {self.value}" diff --git a/zproject/backends.py b/zproject/backends.py index e23d84bb60..04cfcfdd83 100644 --- a/zproject/backends.py +++ b/zproject/backends.py @@ -143,7 +143,7 @@ def any_social_backend_enabled(realm: Optional[Realm]=None) -> bool: return auth_enabled_helper(social_backend_names, realm) def redirect_to_config_error(error_type: str) -> HttpResponseRedirect: - return HttpResponseRedirect("/config-error/%s" % (error_type,)) + return HttpResponseRedirect(f"/config-error/{error_type}") def require_email_format_usernames(realm: Optional[Realm]=None) -> bool: if ldap_auth_enabled(realm): @@ -445,8 +445,7 @@ class ZulipLDAPAuthBackendBase(ZulipAuthMixin, LDAPBackend): if settings.LDAP_APPEND_DOMAIN: if is_valid_email(username): if not username.endswith("@" + settings.LDAP_APPEND_DOMAIN): - raise ZulipLDAPExceptionOutsideDomain("Email %s does not match LDAP domain %s." % ( - username, settings.LDAP_APPEND_DOMAIN)) + raise ZulipLDAPExceptionOutsideDomain(f"Email {username} does not match LDAP domain {settings.LDAP_APPEND_DOMAIN}.") result = email_to_username(username) else: # We can use find_ldap_users_by_email @@ -485,8 +484,7 @@ class ZulipLDAPAuthBackendBase(ZulipAuthMixin, LDAPBackend): if settings.LDAP_EMAIL_ATTR is not None: # Get email from ldap attributes. if settings.LDAP_EMAIL_ATTR not in ldap_user.attrs: - raise ZulipLDAPException("LDAP user doesn't have the needed %s attribute" % ( - settings.LDAP_EMAIL_ATTR,)) + raise ZulipLDAPException(f"LDAP user doesn't have the needed {settings.LDAP_EMAIL_ATTR} attribute") else: return ldap_user.attrs[settings.LDAP_EMAIL_ATTR][0] @@ -611,12 +609,12 @@ class ZulipLDAPAuthBackendBase(ZulipAuthMixin, LDAPBackend): try: field = fields_by_var_name[var_name] except KeyError: - raise ZulipLDAPException('Custom profile field with name %s not found.' % (var_name,)) + raise ZulipLDAPException(f'Custom profile field with name {var_name} not found.') if existing_values.get(var_name) == value: continue result = validate_user_custom_profile_field(user_profile.realm.id, field, value) if result is not None: - raise ZulipLDAPException('Invalid data for %s field: %s' % (var_name, result)) + raise ZulipLDAPException(f'Invalid data for {var_name} field: {result}') profile_data.append({ 'id': field.id, 'value': value, @@ -876,9 +874,9 @@ def query_ldap(email: str) -> List[str]: if django_field == "avatar": if isinstance(value, bytes): value = "(An avatar image file)" - values.append("%s: %s" % (django_field, value)) + values.append(f"{django_field}: {value}") if settings.LDAP_EMAIL_ATTR is not None: - values.append("%s: %s" % ('email', ldap_attrs[settings.LDAP_EMAIL_ATTR][0])) + values.append("{}: {}".format('email', ldap_attrs[settings.LDAP_EMAIL_ATTR][0])) else: values.append("LDAP backend not configured on this server.") return values @@ -1680,7 +1678,7 @@ class SAMLAuthBackend(SocialAuthMixin, SAMLAuth): if idps_without_limit_to_subdomains: self.logger.error("SAML_REQUIRE_LIMIT_TO_SUBDOMAINS is enabled and the following " + "IdPs don't have limit_to_subdomains specified and will be ignored: " + - "%s" % (idps_without_limit_to_subdomains,)) + f"{idps_without_limit_to_subdomains}") for idp_name in idps_without_limit_to_subdomains: del settings.SOCIAL_AUTH_SAML_ENABLED_IDPS[idp_name] super().__init__(*args, **kwargs) @@ -1874,7 +1872,7 @@ class SAMLAuthBackend(SocialAuthMixin, SAMLAuth): def validate_idp_for_subdomain(cls, idp_name: str, subdomain: str) -> bool: idp_dict = settings.SOCIAL_AUTH_SAML_ENABLED_IDPS.get(idp_name) if idp_dict is None: - raise AssertionError("IdP: %s not found" % (idp_name,)) + raise AssertionError(f"IdP: {idp_name} not found") if 'limit_to_subdomains' in idp_dict and subdomain not in idp_dict['limit_to_subdomains']: return False diff --git a/zproject/urls.py b/zproject/urls.py index 1081f51a12..48686c3b94 100644 --- a/zproject/urls.py +++ b/zproject/urls.py @@ -696,7 +696,7 @@ urls += [ for app_name in settings.EXTRA_INSTALLED_APPS: app_dir = os.path.join(settings.DEPLOY_ROOT, app_name) if os.path.exists(os.path.join(app_dir, 'urls.py')): - urls += [url(r'^', include('%s.urls' % (app_name,)))] + urls += [url(r'^', include(f'{app_name}.urls'))] i18n_urls += import_string(f"{app_name}.urls.i18n_urlpatterns") # Tornado views