mirror of https://github.com/zulip/zulip.git
python: Convert "".format to Python 3.6 f-strings.
Generated automatically by pyupgrade. Signed-off-by: Anders Kaseorg <anders@zulip.com>
This commit is contained in:
parent
96234016f5
commit
5483ebae37
|
@ -67,7 +67,7 @@ def setup_node_modules(
|
|||
if not os.path.exists(success_stamp):
|
||||
do_yarn_install(target_path, yarn_args, success_stamp)
|
||||
|
||||
print("Using cached node modules from {}".format(cached_node_modules))
|
||||
print(f"Using cached node modules from {cached_node_modules}")
|
||||
if os.path.islink("node_modules"):
|
||||
os.remove("node_modules")
|
||||
elif os.path.isdir("node_modules"):
|
||||
|
|
|
@ -109,7 +109,7 @@ def get_package_names(requirements_file: str) -> List[str]:
|
|||
if package.startswith("git+https://") and "#egg=" in package:
|
||||
split_package = package.split("#egg=")
|
||||
if len(split_package) != 2:
|
||||
raise Exception("Unexpected duplicate #egg in package {}".format(package))
|
||||
raise Exception(f"Unexpected duplicate #egg in package {package}")
|
||||
# Extract the package name from Git requirements entries
|
||||
package = split_package[1]
|
||||
|
||||
|
@ -197,8 +197,8 @@ def try_to_copy_venv(venv_path: str, new_packages: Set[str]) -> bool:
|
|||
# Here, we select the old virtualenv with the largest overlap
|
||||
overlaps = sorted(overlaps)
|
||||
_, source_venv_path, copied_packages = overlaps[-1]
|
||||
print("Copying packages from {}".format(source_venv_path))
|
||||
clone_ve = "{}/bin/virtualenv-clone".format(source_venv_path)
|
||||
print(f"Copying packages from {source_venv_path}")
|
||||
clone_ve = f"{source_venv_path}/bin/virtualenv-clone"
|
||||
cmd = [clone_ve, source_venv_path, venv_path]
|
||||
|
||||
try:
|
||||
|
@ -222,7 +222,7 @@ def try_to_copy_venv(venv_path: str, new_packages: Set[str]) -> bool:
|
|||
success_stamp_path = os.path.join(venv_path, "success-stamp")
|
||||
run_as_root(["rm", "-f", success_stamp_path])
|
||||
|
||||
run_as_root(["chown", "-R", "{}:{}".format(os.getuid(), os.getgid()), venv_path])
|
||||
run_as_root(["chown", "-R", f"{os.getuid()}:{os.getgid()}", venv_path])
|
||||
source_log = get_logfile_name(source_venv_path)
|
||||
copy_parent_log(source_log, target_log)
|
||||
create_log_entry(
|
||||
|
@ -234,7 +234,7 @@ def try_to_copy_venv(venv_path: str, new_packages: Set[str]) -> bool:
|
|||
|
||||
|
||||
def get_logfile_name(venv_path: str) -> str:
|
||||
return "{}/setup-venv.log".format(venv_path)
|
||||
return f"{venv_path}/setup-venv.log"
|
||||
|
||||
|
||||
def create_log_entry(
|
||||
|
@ -246,14 +246,14 @@ def create_log_entry(
|
|||
|
||||
venv_path = os.path.dirname(target_log)
|
||||
with open(target_log, "a") as writer:
|
||||
writer.write("{}\n".format(venv_path))
|
||||
writer.write(f"{venv_path}\n")
|
||||
if copied_packages:
|
||||
writer.write("Copied from {}:\n".format(parent))
|
||||
writer.write("\n".join("- {}".format(p) for p in sorted(copied_packages)))
|
||||
writer.write(f"Copied from {parent}:\n")
|
||||
writer.write("\n".join(f"- {p}" for p in sorted(copied_packages)))
|
||||
writer.write("\n")
|
||||
|
||||
writer.write("New packages:\n")
|
||||
writer.write("\n".join("- {}".format(p) for p in sorted(new_packages)))
|
||||
writer.write("\n".join(f"- {p}" for p in sorted(new_packages)))
|
||||
writer.write("\n\n")
|
||||
|
||||
|
||||
|
@ -274,7 +274,7 @@ def do_patch_activate_script(venv_path: str) -> None:
|
|||
lines = f.readlines()
|
||||
for i, line in enumerate(lines):
|
||||
if line.startswith("VIRTUAL_ENV="):
|
||||
lines[i] = 'VIRTUAL_ENV="{}"\n'.format(venv_path)
|
||||
lines[i] = f'VIRTUAL_ENV="{venv_path}"\n'
|
||||
|
||||
with open(script_path, "w") as f:
|
||||
f.write("".join(lines))
|
||||
|
@ -306,7 +306,7 @@ def setup_virtualenv(
|
|||
with open(success_stamp, "w") as f:
|
||||
f.close()
|
||||
|
||||
print("Using cached Python venv from {}".format(cached_venv_path))
|
||||
print(f"Using cached Python venv from {cached_venv_path}")
|
||||
if target_venv_path is not None:
|
||||
run_as_root(["ln", "-nsf", cached_venv_path, target_venv_path])
|
||||
if patch_activate_script:
|
||||
|
@ -331,7 +331,7 @@ def do_setup_virtualenv(venv_path: str, requirements_file: str) -> None:
|
|||
# Create new virtualenv.
|
||||
run_as_root(["mkdir", "-p", venv_path])
|
||||
run_as_root(["virtualenv", "-p", "python3", venv_path])
|
||||
run_as_root(["chown", "-R", "{}:{}".format(os.getuid(), os.getgid()), venv_path])
|
||||
run_as_root(["chown", "-R", f"{os.getuid()}:{os.getgid()}", venv_path])
|
||||
create_log_entry(get_logfile_name(venv_path), "", set(), new_packages)
|
||||
|
||||
create_requirements_index_file(venv_path, requirements_file)
|
||||
|
|
|
@ -1358,10 +1358,10 @@ def check_token_access(token: str) -> None:
|
|||
logging.info("This is a Slack user token, which grants all rights the user has!")
|
||||
elif token.startswith("xoxb-"):
|
||||
data = requests.get(
|
||||
"https://slack.com/api/team.info", headers={"Authorization": "Bearer {}".format(token)}
|
||||
"https://slack.com/api/team.info", headers={"Authorization": f"Bearer {token}"}
|
||||
)
|
||||
if data.status_code != 200 or not data.json()["ok"]:
|
||||
raise ValueError("Invalid Slack token: {}".format(token))
|
||||
raise ValueError(f"Invalid Slack token: {token}")
|
||||
has_scopes = set(data.headers.get("x-oauth-scopes", "").split(","))
|
||||
required_scopes = {"emoji:read", "users:read", "users:read.email", "team:read"}
|
||||
missing_scopes = required_scopes - has_scopes
|
||||
|
@ -1379,9 +1379,7 @@ def get_slack_api_data(slack_api_url: str, get_param: str, **kwargs: Any) -> Any
|
|||
if not kwargs.get("token"):
|
||||
raise AssertionError("Slack token missing in kwargs")
|
||||
token = kwargs.pop("token")
|
||||
data = requests.get(
|
||||
slack_api_url, headers={"Authorization": "Bearer {}".format(token)}, **kwargs
|
||||
)
|
||||
data = requests.get(slack_api_url, headers={"Authorization": f"Bearer {token}"}, **kwargs)
|
||||
|
||||
if data.status_code == requests.codes.ok:
|
||||
result = data.json()
|
||||
|
|
|
@ -8,7 +8,7 @@ def generate_camo_url(url: str) -> str:
|
|||
encoded_url = url.encode()
|
||||
encoded_camo_key = settings.CAMO_KEY.encode()
|
||||
digest = hmac.new(encoded_camo_key, encoded_url, hashlib.sha1).hexdigest()
|
||||
return "{}/{}".format(digest, encoded_url.hex())
|
||||
return f"{digest}/{encoded_url.hex()}"
|
||||
|
||||
|
||||
# Encodes the provided URL using the same algorithm used by the camo
|
||||
|
|
|
@ -83,7 +83,7 @@ class TextTestResult(runner.TextTestResult):
|
|||
def addSkip(self, test: TestCase, reason: str) -> None:
|
||||
TestResult.addSkip(self, test, reason)
|
||||
self.stream.writeln( # type: ignore[attr-defined] # https://github.com/python/typeshed/issues/3139
|
||||
"** Skipping {}: {}".format(test.id(), reason)
|
||||
f"** Skipping {test.id()}: {reason}"
|
||||
)
|
||||
self.stream.flush()
|
||||
|
||||
|
|
|
@ -1136,7 +1136,7 @@ class SocialAuthBase(DesktopFlowTestingLib, ZulipTestCase):
|
|||
m.output,
|
||||
[
|
||||
self.logger_output(
|
||||
"{} got invalid email argument.".format(self.backend.auth_backend_name),
|
||||
f"{self.backend.auth_backend_name} got invalid email argument.",
|
||||
"warning",
|
||||
)
|
||||
],
|
||||
|
@ -3929,7 +3929,7 @@ class GoogleAuthBackendTest(SocialAuthBase):
|
|||
self.assertEqual(result.status_code, 400)
|
||||
self.assert_in_response("Invalid or expired login session.", result)
|
||||
self.assertEqual(
|
||||
m.output, ["WARNING:root:log_into_subdomain: Invalid token given: {}".format(token)]
|
||||
m.output, [f"WARNING:root:log_into_subdomain: Invalid token given: {token}"]
|
||||
)
|
||||
|
||||
def test_prevent_duplicate_signups(self) -> None:
|
||||
|
@ -4088,7 +4088,7 @@ class GoogleAuthBackendTest(SocialAuthBase):
|
|||
result = self.get_log_into_subdomain(data, force_token=token)
|
||||
self.assertEqual(result.status_code, 400)
|
||||
self.assertEqual(
|
||||
m.output, ["WARNING:root:log_into_subdomain: Invalid token given: {}".format(token)]
|
||||
m.output, [f"WARNING:root:log_into_subdomain: Invalid token given: {token}"]
|
||||
)
|
||||
|
||||
def test_user_cannot_log_into_wrong_subdomain(self) -> None:
|
||||
|
|
|
@ -22,7 +22,7 @@ class MutedUsersTests(ZulipTestCase):
|
|||
mute_time = datetime(2021, 1, 1, tzinfo=timezone.utc)
|
||||
|
||||
with mock.patch("zerver.views.muting.timezone_now", return_value=mute_time):
|
||||
url = "/api/v1/users/me/muted_users/{}".format(cordelia.id)
|
||||
url = f"/api/v1/users/me/muted_users/{cordelia.id}"
|
||||
result = self.api_post(hamlet, url)
|
||||
self.assert_json_success(result)
|
||||
|
||||
|
@ -41,7 +41,7 @@ class MutedUsersTests(ZulipTestCase):
|
|||
hamlet = self.example_user("hamlet")
|
||||
self.login_user(hamlet)
|
||||
|
||||
url = "/api/v1/users/me/muted_users/{}".format(hamlet.id)
|
||||
url = f"/api/v1/users/me/muted_users/{hamlet.id}"
|
||||
result = self.api_post(hamlet, url)
|
||||
self.assert_json_error(result, "Cannot mute self")
|
||||
|
||||
|
@ -58,7 +58,7 @@ class MutedUsersTests(ZulipTestCase):
|
|||
self.assert_json_success(result)
|
||||
muted_id = result.json()["user_id"]
|
||||
|
||||
url = "/api/v1/users/me/muted_users/{}".format(muted_id)
|
||||
url = f"/api/v1/users/me/muted_users/{muted_id}"
|
||||
result = self.api_post(hamlet, url)
|
||||
# Currently we do not allow muting bots. This is the error message
|
||||
# from `access_user_by_id`.
|
||||
|
@ -69,11 +69,11 @@ class MutedUsersTests(ZulipTestCase):
|
|||
self.login_user(hamlet)
|
||||
cordelia = self.example_user("cordelia")
|
||||
|
||||
url = "/api/v1/users/me/muted_users/{}".format(cordelia.id)
|
||||
url = f"/api/v1/users/me/muted_users/{cordelia.id}"
|
||||
result = self.api_post(hamlet, url)
|
||||
self.assert_json_success(result)
|
||||
|
||||
url = "/api/v1/users/me/muted_users/{}".format(cordelia.id)
|
||||
url = f"/api/v1/users/me/muted_users/{cordelia.id}"
|
||||
result = self.api_post(hamlet, url)
|
||||
self.assert_json_error(result, "User already muted")
|
||||
|
||||
|
@ -87,7 +87,7 @@ class MutedUsersTests(ZulipTestCase):
|
|||
do_deactivate_user(cordelia, acting_user=None)
|
||||
|
||||
with mock.patch("zerver.views.muting.timezone_now", return_value=mute_time):
|
||||
url = "/api/v1/users/me/muted_users/{}".format(cordelia.id)
|
||||
url = f"/api/v1/users/me/muted_users/{cordelia.id}"
|
||||
result = self.api_post(hamlet, url)
|
||||
self.assert_json_success(result)
|
||||
|
||||
|
@ -127,7 +127,7 @@ class MutedUsersTests(ZulipTestCase):
|
|||
self.login_user(hamlet)
|
||||
cordelia = self.example_user("cordelia")
|
||||
|
||||
url = "/api/v1/users/me/muted_users/{}".format(cordelia.id)
|
||||
url = f"/api/v1/users/me/muted_users/{cordelia.id}"
|
||||
result = self.api_delete(hamlet, url)
|
||||
self.assert_json_error(result, "User is not muted")
|
||||
|
||||
|
@ -141,13 +141,13 @@ class MutedUsersTests(ZulipTestCase):
|
|||
do_deactivate_user(cordelia, acting_user=None)
|
||||
|
||||
with mock.patch("zerver.views.muting.timezone_now", return_value=mute_time):
|
||||
url = "/api/v1/users/me/muted_users/{}".format(cordelia.id)
|
||||
url = f"/api/v1/users/me/muted_users/{cordelia.id}"
|
||||
result = self.api_post(hamlet, url)
|
||||
self.assert_json_success(result)
|
||||
|
||||
with mock.patch("zerver.lib.actions.timezone_now", return_value=mute_time):
|
||||
# To test that `RealmAuditLog` entry has correct `event_time`.
|
||||
url = "/api/v1/users/me/muted_users/{}".format(cordelia.id)
|
||||
url = f"/api/v1/users/me/muted_users/{cordelia.id}"
|
||||
result = self.api_delete(hamlet, url)
|
||||
|
||||
self.assert_json_success(result)
|
||||
|
@ -191,14 +191,14 @@ class MutedUsersTests(ZulipTestCase):
|
|||
self.assertEqual(set(), get_muting_users(cordelia.id))
|
||||
self.assertEqual(set(), cache_get(get_muting_users_cache_key(cordelia.id))[0])
|
||||
|
||||
url = "/api/v1/users/me/muted_users/{}".format(cordelia.id)
|
||||
url = f"/api/v1/users/me/muted_users/{cordelia.id}"
|
||||
result = self.api_post(hamlet, url)
|
||||
self.assert_json_success(result)
|
||||
self.assertEqual(None, cache_get(get_muting_users_cache_key(cordelia.id)))
|
||||
self.assertEqual({hamlet.id}, get_muting_users(cordelia.id))
|
||||
self.assertEqual({hamlet.id}, cache_get(get_muting_users_cache_key(cordelia.id))[0])
|
||||
|
||||
url = "/api/v1/users/me/muted_users/{}".format(cordelia.id)
|
||||
url = f"/api/v1/users/me/muted_users/{cordelia.id}"
|
||||
result = self.api_delete(hamlet, url)
|
||||
self.assert_json_success(result)
|
||||
self.assertEqual(None, cache_get(get_muting_users_cache_key(cordelia.id)))
|
||||
|
@ -224,7 +224,7 @@ class MutedUsersTests(ZulipTestCase):
|
|||
self.subscribe(othello, "general")
|
||||
|
||||
# Hamlet mutes Cordelia.
|
||||
url = "/api/v1/users/me/muted_users/{}".format(cordelia.id)
|
||||
url = f"/api/v1/users/me/muted_users/{cordelia.id}"
|
||||
result = self.api_post(hamlet, url)
|
||||
self.assert_json_success(result)
|
||||
|
||||
|
@ -271,7 +271,7 @@ class MutedUsersTests(ZulipTestCase):
|
|||
self.assert_usermessage_read_flag(othello, pm_to_othello, False)
|
||||
|
||||
# Hamlet mutes Cordelia.
|
||||
url = "/api/v1/users/me/muted_users/{}".format(cordelia.id)
|
||||
url = f"/api/v1/users/me/muted_users/{cordelia.id}"
|
||||
result = self.api_post(hamlet, url)
|
||||
self.assert_json_success(result)
|
||||
|
||||
|
@ -296,7 +296,7 @@ class MutedUsersTests(ZulipTestCase):
|
|||
m.assert_called_once()
|
||||
|
||||
# Hamlet mutes Cordelia.
|
||||
url = "/api/v1/users/me/muted_users/{}".format(cordelia.id)
|
||||
url = f"/api/v1/users/me/muted_users/{cordelia.id}"
|
||||
result = self.api_post(hamlet, url)
|
||||
self.assert_json_success(result)
|
||||
|
||||
|
@ -335,7 +335,7 @@ class MutedUsersTests(ZulipTestCase):
|
|||
|
||||
# Hamlet mutes Cordelia.
|
||||
self.login("hamlet")
|
||||
url = "/api/v1/users/me/muted_users/{}".format(cordelia.id)
|
||||
url = f"/api/v1/users/me/muted_users/{cordelia.id}"
|
||||
result = self.api_post(hamlet, url)
|
||||
self.assert_json_success(result)
|
||||
|
||||
|
|
|
@ -21,7 +21,7 @@ class RequestMockWithProxySupport(responses.RequestsMock):
|
|||
if proxy_uri is not None:
|
||||
request = requests.Request(
|
||||
method="GET",
|
||||
url="{}/".format(proxy_uri),
|
||||
url=f"{proxy_uri}/",
|
||||
headers=adapter.proxy_headers(proxy_uri),
|
||||
).prepare()
|
||||
return super()._on_request( # type: ignore[misc] # This is an undocumented internal API
|
||||
|
|
|
@ -27,9 +27,7 @@ class TestBuildEmail(ZulipTestCase):
|
|||
from_address=FromAddress.NOREPLY,
|
||||
language="en",
|
||||
)
|
||||
self.assertEqual(
|
||||
mail.extra_headers["From"], "{} <{}>".format(limit_length_name, FromAddress.NOREPLY)
|
||||
)
|
||||
self.assertEqual(mail.extra_headers["From"], f"{limit_length_name} <{FromAddress.NOREPLY}>")
|
||||
|
||||
def test_build_and_send_SES_incompatible_From_address(self) -> None:
|
||||
hamlet = self.example_user("hamlet")
|
||||
|
@ -142,9 +140,7 @@ class TestSendEmail(ZulipTestCase):
|
|||
from_address=FromAddress.NOREPLY,
|
||||
language="en",
|
||||
)
|
||||
self.assertEqual(
|
||||
mail.extra_headers["From"], "{} <{}>".format(from_name, FromAddress.NOREPLY)
|
||||
)
|
||||
self.assertEqual(mail.extra_headers["From"], f"{from_name} <{FromAddress.NOREPLY}>")
|
||||
|
||||
# We test the two cases that should raise an EmailNotDeliveredException
|
||||
errors = {
|
||||
|
|
|
@ -194,7 +194,7 @@ def home_real(request: HttpRequest) -> HttpResponse:
|
|||
|
||||
log_data = get_request_notes(request).log_data
|
||||
assert log_data is not None
|
||||
log_data["extra"] = "[{}]".format(queue_id)
|
||||
log_data["extra"] = f"[{queue_id}]"
|
||||
|
||||
csp_nonce = secrets.token_hex(24)
|
||||
|
||||
|
|
|
@ -61,7 +61,7 @@ def get_outbound_reply_body(payload: Dict[str, Any]) -> str:
|
|||
def get_comment_body(payload: Dict[str, Any]) -> str:
|
||||
name = get_source_name(payload)
|
||||
comment = payload["target"]["data"]["body"]
|
||||
return "**{name}** left a comment:\n```quote\n{comment}\n```".format(name=name, comment=comment)
|
||||
return f"**{name}** left a comment:\n```quote\n{comment}\n```"
|
||||
|
||||
|
||||
def get_conversation_assigned_body(payload: Dict[str, Any]) -> str:
|
||||
|
@ -69,7 +69,7 @@ def get_conversation_assigned_body(payload: Dict[str, Any]) -> str:
|
|||
target_name = get_target_name(payload)
|
||||
|
||||
if source_name == target_name:
|
||||
return "**{source_name}** assigned themselves.".format(source_name=source_name)
|
||||
return f"**{source_name}** assigned themselves."
|
||||
|
||||
return "**{source_name}** assigned **{target_name}**.".format(
|
||||
source_name=source_name, target_name=target_name
|
||||
|
|
|
@ -458,7 +458,7 @@ class Command(BaseCommand):
|
|||
else:
|
||||
full_name += " " + random.choice(mnames)
|
||||
if random.random() < 0.1:
|
||||
full_name += " {} ".format(random.choice(raw_emojis))
|
||||
full_name += f" {random.choice(raw_emojis)} "
|
||||
else:
|
||||
full_name += " " + random.choice(lnames)
|
||||
email = fname.lower() + "@zulip.com"
|
||||
|
@ -806,9 +806,7 @@ class Command(BaseCommand):
|
|||
zulip_stream_dict: Dict[str, Dict[str, Any]] = {
|
||||
"devel": {"description": "For developing"},
|
||||
# ビデオゲーム - VideoGames (japanese)
|
||||
"ビデオゲーム": {
|
||||
"description": "Share your favorite video games! {}".format(raw_emojis[2])
|
||||
},
|
||||
"ビデオゲーム": {"description": f"Share your favorite video games! {raw_emojis[2]}"},
|
||||
"announce": {
|
||||
"description": "For announcements",
|
||||
"stream_post_policy": Stream.STREAM_POST_POLICY_ADMINS,
|
||||
|
|
Loading…
Reference in New Issue