mirror of https://github.com/zulip/zulip.git
lint: Fix code that evaded our lint checks for string % non-tuple.
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
This commit is contained in:
parent
d1b8497afb
commit
643bd18b9f
|
@ -38,5 +38,5 @@ class Command(BaseCommand):
|
||||||
for user_profile in user_profiles:
|
for user_profile in user_profiles:
|
||||||
print("%35s" % (user_profile.email,), end=' ')
|
print("%35s" % (user_profile.email,), end=' ')
|
||||||
for week in range(10):
|
for week in range(10):
|
||||||
print("%5d" % (self.messages_sent_by(user_profile, week)), end=' ')
|
print("%5d" % (self.messages_sent_by(user_profile, week),), end=' ')
|
||||||
print("")
|
print("")
|
||||||
|
|
|
@ -122,7 +122,7 @@ def stats_for_installation(request: HttpRequest) -> HttpResponse:
|
||||||
def stats_for_remote_installation(request: HttpRequest, remote_server_id: str) -> HttpResponse:
|
def stats_for_remote_installation(request: HttpRequest, remote_server_id: str) -> HttpResponse:
|
||||||
server = RemoteZulipServer.objects.get(id=remote_server_id)
|
server = RemoteZulipServer.objects.get(id=remote_server_id)
|
||||||
return render_stats(request, '/remote/%s/installation' % (server.id,),
|
return render_stats(request, '/remote/%s/installation' % (server.id,),
|
||||||
'remote Installation %s' % (server.hostname), True, True)
|
'remote Installation %s' % (server.hostname,), True, True)
|
||||||
|
|
||||||
@require_server_admin_api
|
@require_server_admin_api
|
||||||
@has_request_variables
|
@has_request_variables
|
||||||
|
|
|
@ -17,7 +17,7 @@ if len(sys.argv) < 2:
|
||||||
print("Please pass the name of the consumer file to check")
|
print("Please pass the name of the consumer file to check")
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
RESULTS_FILE = "/var/lib/nagios_state/check-rabbitmq-consumers-%s" % (sys.argv[1])
|
RESULTS_FILE = "/var/lib/nagios_state/check-rabbitmq-consumers-%s" % (sys.argv[1],)
|
||||||
|
|
||||||
ret, result = nagios_from_file(RESULTS_FILE)
|
ret, result = nagios_from_file(RESULTS_FILE)
|
||||||
|
|
||||||
|
|
|
@ -114,7 +114,7 @@ def get_zulips():
|
||||||
global queue_id, last_event_id
|
global queue_id, last_event_id
|
||||||
res = zulip_recipient.get_events(queue_id=queue_id, last_event_id=last_event_id)
|
res = zulip_recipient.get_events(queue_id=queue_id, last_event_id=last_event_id)
|
||||||
if 'error' in res.get('result', {}):
|
if 'error' in res.get('result', {}):
|
||||||
report("CRITICAL", msg="Error receiving Zulips, error was: %s" % (res["msg"]))
|
report("CRITICAL", msg="Error receiving Zulips, error was: %s" % (res["msg"],))
|
||||||
for event in res['events']:
|
for event in res['events']:
|
||||||
last_event_id = max(last_event_id, int(event['id']))
|
last_event_id = max(last_event_id, int(event['id']))
|
||||||
# If we get a heartbeat event, that means we've been hanging for
|
# If we get a heartbeat event, that means we've been hanging for
|
||||||
|
@ -154,10 +154,10 @@ zulip_recipient = zulip.Client(
|
||||||
try:
|
try:
|
||||||
res = zulip_recipient.register(event_types=["message"])
|
res = zulip_recipient.register(event_types=["message"])
|
||||||
if 'error' in res.get('result', {}):
|
if 'error' in res.get('result', {}):
|
||||||
report("CRITICAL", msg="Error subscribing to Zulips: %s" % (res['msg']))
|
report("CRITICAL", msg="Error subscribing to Zulips: %s" % (res['msg'],))
|
||||||
queue_id, last_event_id = (res['queue_id'], res['last_event_id'])
|
queue_id, last_event_id = (res['queue_id'], res['last_event_id'])
|
||||||
except Exception:
|
except Exception:
|
||||||
report("CRITICAL", msg="Error subscribing to Zulips:\n%s" % (traceback.format_exc()))
|
report("CRITICAL", msg="Error subscribing to Zulips:\n%s" % (traceback.format_exc(),))
|
||||||
msg_to_send = str(random.getrandbits(64))
|
msg_to_send = str(random.getrandbits(64))
|
||||||
time_start = time.time()
|
time_start = time.time()
|
||||||
|
|
||||||
|
|
|
@ -71,7 +71,7 @@ if recv_diff > 5 * 16 * 1024**2:
|
||||||
report('CRITICAL', 'secondary is %d bytes behind on receiving xlog' % (recv_diff,))
|
report('CRITICAL', 'secondary is %d bytes behind on receiving xlog' % (recv_diff,))
|
||||||
|
|
||||||
if replay_diff > 5 * 16 * 1024**2:
|
if replay_diff > 5 * 16 * 1024**2:
|
||||||
report('CRITICAL', 'secondary is %d bytes behind on applying received xlog' % (replay_diff))
|
report('CRITICAL', 'secondary is %d bytes behind on applying received xlog' % (replay_diff,))
|
||||||
|
|
||||||
if recv_diff < 0:
|
if recv_diff < 0:
|
||||||
report('CRITICAL', 'secondary is %d bytes ahead on receiving xlog' % (recv_diff,))
|
report('CRITICAL', 'secondary is %d bytes ahead on receiving xlog' % (recv_diff,))
|
||||||
|
@ -83,7 +83,7 @@ if recv_diff > 16 * 1024**2:
|
||||||
report('WARNING', 'secondary is %d bytes behind on receiving xlog' % (recv_diff,))
|
report('WARNING', 'secondary is %d bytes behind on receiving xlog' % (recv_diff,))
|
||||||
|
|
||||||
if replay_diff > 16 * 1024**2:
|
if replay_diff > 16 * 1024**2:
|
||||||
report('WARNING', 'secondary is %d bytes behind on applying received xlog' % (replay_diff))
|
report('WARNING', 'secondary is %d bytes behind on applying received xlog' % (replay_diff,))
|
||||||
|
|
||||||
report('OK', ('secondary is %d bytes behind on receiving and %d bytes behind on applying xlog'
|
report('OK', ('secondary is %d bytes behind on receiving and %d bytes behind on applying xlog'
|
||||||
% (recv_diff, replay_diff)))
|
% (recv_diff, replay_diff)))
|
||||||
|
|
|
@ -89,9 +89,9 @@ if config_file.has_option('machine', 'pgroonga'):
|
||||||
if remote_postgres_host != '':
|
if remote_postgres_host != '':
|
||||||
postgres_password = ''
|
postgres_password = ''
|
||||||
if settings.DATABASES['default']['PASSWORD'] is not None:
|
if settings.DATABASES['default']['PASSWORD'] is not None:
|
||||||
postgres_password = "password='%s'" % settings.DATABASES['default']['PASSWORD']
|
postgres_password = "password='%s'" % (settings.DATABASES['default']['PASSWORD'],)
|
||||||
postgres_user = "user='%s'" % settings.DATABASES['default']['USER']
|
postgres_user = "user='%s'" % (settings.DATABASES['default']['USER'],)
|
||||||
postgres_dbname = "dbname='%s'" % settings.DATABASES['default']['NAME']
|
postgres_dbname = "dbname='%s'" % (settings.DATABASES['default']['NAME'],)
|
||||||
if settings.REMOTE_POSTGRES_SSLMODE != '':
|
if settings.REMOTE_POSTGRES_SSLMODE != '':
|
||||||
postgres_sslmode = settings.REMOTE_POSTGRES_SSLMODE
|
postgres_sslmode = settings.REMOTE_POSTGRES_SSLMODE
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -57,7 +57,7 @@ if False:
|
||||||
def address_of(device_id):
|
def address_of(device_id):
|
||||||
# type: (int) -> Optional[str]
|
# type: (int) -> Optional[str]
|
||||||
try:
|
try:
|
||||||
return netifaces.ifaddresses("ens%i" % device_id)[netifaces.AF_INET][0]['addr']
|
return netifaces.ifaddresses("ens%i" % (device_id,))[netifaces.AF_INET][0]['addr']
|
||||||
except KeyError:
|
except KeyError:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@ -112,7 +112,7 @@ for device in macs.values():
|
||||||
if address is None:
|
if address is None:
|
||||||
# If the device was not autoconfigured, do so now.
|
# If the device was not autoconfigured, do so now.
|
||||||
log.info("Device ens%i not configured, starting dhcpd" % (device_number,))
|
log.info("Device ens%i not configured, starting dhcpd" % (device_number,))
|
||||||
subprocess.check_call(['/sbin/dhcpcd', 'ens%i' % device_number])
|
subprocess.check_call(['/sbin/dhcpcd', 'ens%i' % (device_number,)])
|
||||||
|
|
||||||
dev_num = str(device_number)
|
dev_num = str(device_number)
|
||||||
address = address_of(device_number)
|
address = address_of(device_number)
|
||||||
|
@ -126,7 +126,7 @@ for device in macs.values():
|
||||||
['/sbin/ip', 'rule', 'add', 'fwmark', dev_num, 'table', dev_num])
|
['/sbin/ip', 'rule', 'add', 'fwmark', dev_num, 'table', dev_num])
|
||||||
subprocess.check_call(
|
subprocess.check_call(
|
||||||
['/sbin/ip', 'route', 'add', '0.0.0.0/0', 'table', dev_num, 'dev',
|
['/sbin/ip', 'route', 'add', '0.0.0.0/0', 'table', dev_num, 'dev',
|
||||||
'ens%i' % device_number, 'via', gateway])
|
'ens%i' % (device_number,), 'via', gateway])
|
||||||
subprocess.check_call(
|
subprocess.check_call(
|
||||||
['/sbin/iptables', '-t', 'mangle', '-A', 'OUTPUT', '-m', 'conntrack', '--ctorigdst',
|
['/sbin/iptables', '-t', 'mangle', '-A', 'OUTPUT', '-m', 'conntrack', '--ctorigdst',
|
||||||
address, '-j', 'MARK', '--set-mark', dev_num])
|
address, '-j', 'MARK', '--set-mark', dev_num])
|
||||||
|
|
|
@ -12,8 +12,8 @@
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
{% block commonjs %}
|
{% block commonjs %}
|
||||||
{{ render_bundle('app', attrs='nonce="%s"' % (csp_nonce)) }}
|
{{ render_bundle('app', attrs='nonce="%s"' % (csp_nonce,)) }}
|
||||||
{{ render_bundle('katex', attrs='nonce="%s"' % (csp_nonce)) }}
|
{{ render_bundle('katex', attrs='nonce="%s"' % (csp_nonce,)) }}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
{% block customhead %}
|
{% block customhead %}
|
||||||
|
|
|
@ -11,8 +11,8 @@
|
||||||
}
|
}
|
||||||
</style>
|
</style>
|
||||||
|
|
||||||
{{ render_bundle('translations', attrs='nonce="%s"' % (csp_nonce)) }}
|
{{ render_bundle('translations', attrs='nonce="%s"' % (csp_nonce,)) }}
|
||||||
{{ render_bundle('katex', attrs='nonce="%s"' % (csp_nonce)) }}
|
{{ render_bundle('katex', attrs='nonce="%s"' % (csp_nonce,)) }}
|
||||||
{{ render_bundle('portico') }}
|
{{ render_bundle('portico') }}
|
||||||
{{ render_bundle('archive') }}
|
{{ render_bundle('archive') }}
|
||||||
{{ render_bundle('archive-styles') }}
|
{{ render_bundle('archive-styles') }}
|
||||||
|
|
|
@ -28,7 +28,7 @@
|
||||||
This allows pages requiring common files via webpack to override
|
This allows pages requiring common files via webpack to override
|
||||||
this block -->
|
this block -->
|
||||||
{% block commonjs %}
|
{% block commonjs %}
|
||||||
{{ render_bundle('common', attrs='nonce="%s"' % (csp_nonce)) }}
|
{{ render_bundle('common', attrs='nonce="%s"' % (csp_nonce,)) }}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
{% block customhead %}
|
{% block customhead %}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
|
@ -69,7 +69,7 @@ def check_issue_labels():
|
||||||
if args.force:
|
if args.force:
|
||||||
response = requests.get(next_page_url)
|
response = requests.get(next_page_url)
|
||||||
else:
|
else:
|
||||||
response = requests.get(next_page_url, headers={'Authorization': 'token %s' % token})
|
response = requests.get(next_page_url, headers={'Authorization': 'token %s' % (token,)})
|
||||||
if response.status_code == 401:
|
if response.status_code == 401:
|
||||||
sys.exit("Error. Please check the token.")
|
sys.exit("Error. Please check the token.")
|
||||||
if response.status_code == 403:
|
if response.status_code == 403:
|
||||||
|
|
|
@ -197,7 +197,7 @@ def validate_indent_html(fn):
|
||||||
file.close()
|
file.close()
|
||||||
if not html.split('\n') == phtml.split('\n'):
|
if not html.split('\n') == phtml.split('\n'):
|
||||||
print('Invalid Indentation detected in file: '
|
print('Invalid Indentation detected in file: '
|
||||||
'%s\nDiff for the file against expected indented file:' % (fn), flush=True)
|
'%s\nDiff for the file against expected indented file:' % (fn,), flush=True)
|
||||||
with subprocess.Popen(
|
with subprocess.Popen(
|
||||||
['diff', fn, '-'],
|
['diff', fn, '-'],
|
||||||
stdin=subprocess.PIPE,
|
stdin=subprocess.PIPE,
|
||||||
|
|
|
@ -6,7 +6,7 @@ import sys
|
||||||
def clean_html(filenames):
|
def clean_html(filenames):
|
||||||
# type: (List[str]) -> None
|
# type: (List[str]) -> None
|
||||||
for fn in filenames:
|
for fn in filenames:
|
||||||
print('Prettifying: %s' % (fn))
|
print('Prettifying: %s' % (fn,))
|
||||||
file = open(fn)
|
file = open(fn)
|
||||||
html = file.read()
|
html = file.read()
|
||||||
phtml = pretty_print_html(html)
|
phtml = pretty_print_html(html)
|
||||||
|
|
|
@ -28,7 +28,7 @@ return exports;
|
||||||
}());
|
}());
|
||||||
if (typeof module !== 'undefined') {
|
if (typeof module !== 'undefined') {
|
||||||
module.exports = pygments_data;
|
module.exports = pygments_data;
|
||||||
}''' % json.dumps(langs)
|
}''' % (json.dumps(langs),)
|
||||||
|
|
||||||
with open(JS_PATH, 'w') as f:
|
with open(JS_PATH, 'w') as f:
|
||||||
f.write(template)
|
f.write(template)
|
||||||
|
|
|
@ -109,14 +109,14 @@ class TestHtmlBranches(unittest.TestCase):
|
||||||
|
|
||||||
self.assertEqual(set(template_id_dict.keys()), {'below_navbar', 'hello_{{ message }}', 'intro'})
|
self.assertEqual(set(template_id_dict.keys()), {'below_navbar', 'hello_{{ message }}', 'intro'})
|
||||||
self.assertEqual(template_id_dict['hello_{{ message }}'], [
|
self.assertEqual(template_id_dict['hello_{{ message }}'], [
|
||||||
'Line 12:%s/tools/tests/test_template_data/test_template1.html' % (ZULIP_PATH),
|
'Line 12:%s/tools/tests/test_template_data/test_template1.html' % (ZULIP_PATH,),
|
||||||
'Line 12:%s/tools/tests/test_template_data/test_template2.html' % (ZULIP_PATH)])
|
'Line 12:%s/tools/tests/test_template_data/test_template2.html' % (ZULIP_PATH,)])
|
||||||
self.assertEqual(template_id_dict['intro'], [
|
self.assertEqual(template_id_dict['intro'], [
|
||||||
'Line 10:%s/tools/tests/test_template_data/test_template1.html' % (ZULIP_PATH),
|
'Line 10:%s/tools/tests/test_template_data/test_template1.html' % (ZULIP_PATH,),
|
||||||
'Line 11:%s/tools/tests/test_template_data/test_template1.html' % (ZULIP_PATH),
|
'Line 11:%s/tools/tests/test_template_data/test_template1.html' % (ZULIP_PATH,),
|
||||||
'Line 11:%s/tools/tests/test_template_data/test_template2.html' % (ZULIP_PATH)])
|
'Line 11:%s/tools/tests/test_template_data/test_template2.html' % (ZULIP_PATH,)])
|
||||||
self.assertEqual(template_id_dict['below_navbar'], [
|
self.assertEqual(template_id_dict['below_navbar'], [
|
||||||
'Line 10:%s/tools/tests/test_template_data/test_template2.html' % (ZULIP_PATH)])
|
'Line 10:%s/tools/tests/test_template_data/test_template2.html' % (ZULIP_PATH,)])
|
||||||
|
|
||||||
def test_split_for_id_and_class(self) -> None:
|
def test_split_for_id_and_class(self) -> None:
|
||||||
id1 = "{{ red|blue }}"
|
id1 = "{{ red|blue }}"
|
||||||
|
|
|
@ -95,7 +95,7 @@ def build_userprofile(timestamp: Any, domain_name: str,
|
||||||
|
|
||||||
def get_user_email(user_data: ZerverFieldsT, domain_name: str) -> str:
|
def get_user_email(user_data: ZerverFieldsT, domain_name: str) -> str:
|
||||||
# TODO Get user email from github
|
# TODO Get user email from github
|
||||||
email = ("%s@users.noreply.github.com" % user_data['username'])
|
email = ("%s@users.noreply.github.com" % (user_data['username'],))
|
||||||
return email
|
return email
|
||||||
|
|
||||||
def build_stream_and_defaultstream(timestamp: Any) -> Tuple[List[ZerverFieldsT],
|
def build_stream_and_defaultstream(timestamp: Any) -> Tuple[List[ZerverFieldsT],
|
||||||
|
@ -213,10 +213,10 @@ def get_usermentions(message: Dict[str, Any], user_map: Dict[str, int],
|
||||||
if 'mentions' in message:
|
if 'mentions' in message:
|
||||||
for mention in message['mentions']:
|
for mention in message['mentions']:
|
||||||
if mention.get('userId') in user_map:
|
if mention.get('userId') in user_map:
|
||||||
gitter_mention = '@%s' % (mention['screenName'])
|
gitter_mention = '@%s' % (mention['screenName'],)
|
||||||
if mention['screenName'] not in user_short_name_to_full_name:
|
if mention['screenName'] not in user_short_name_to_full_name:
|
||||||
logging.info("Mentioned user %s never sent any messages, so has no full name data" %
|
logging.info("Mentioned user %s never sent any messages, so has no full name data" %
|
||||||
mention['screenName'])
|
(mention['screenName'],))
|
||||||
full_name = mention['screenName']
|
full_name = mention['screenName']
|
||||||
else:
|
else:
|
||||||
full_name = user_short_name_to_full_name[mention['screenName']]
|
full_name = user_short_name_to_full_name[mention['screenName']]
|
||||||
|
@ -277,7 +277,7 @@ def do_convert_data(gitter_data_file: str, output_dir: str, threads: int=6) -> N
|
||||||
subprocess.check_call(["tar", "-czf", output_dir + '.tar.gz', output_dir, '-P'])
|
subprocess.check_call(["tar", "-czf", output_dir + '.tar.gz', output_dir, '-P'])
|
||||||
|
|
||||||
logging.info('######### DATA CONVERSION FINISHED #########\n')
|
logging.info('######### DATA CONVERSION FINISHED #########\n')
|
||||||
logging.info("Zulip data dump created at %s" % (output_dir))
|
logging.info("Zulip data dump created at %s" % (output_dir,))
|
||||||
|
|
||||||
def write_data_to_file(output_file: str, data: Any) -> None:
|
def write_data_to_file(output_file: str, data: Any) -> None:
|
||||||
with open(output_file, "w") as f:
|
with open(output_file, "w") as f:
|
||||||
|
|
|
@ -37,7 +37,7 @@ def build_zerver_realm(realm_id: int, realm_subdomain: str, time: float,
|
||||||
other_product: str) -> List[ZerverFieldsT]:
|
other_product: str) -> List[ZerverFieldsT]:
|
||||||
realm = Realm(id=realm_id, date_created=time,
|
realm = Realm(id=realm_id, date_created=time,
|
||||||
name=realm_subdomain, string_id=realm_subdomain,
|
name=realm_subdomain, string_id=realm_subdomain,
|
||||||
description=("Organization imported from %s!" % (other_product)))
|
description="Organization imported from %s!" % (other_product,))
|
||||||
auth_methods = [[flag[0], flag[1]] for flag in realm.authentication_methods]
|
auth_methods = [[flag[0], flag[1]] for flag in realm.authentication_methods]
|
||||||
realm_dict = model_to_dict(realm, exclude='authentication_methods')
|
realm_dict = model_to_dict(realm, exclude='authentication_methods')
|
||||||
realm_dict['authentication_methods'] = auth_methods
|
realm_dict['authentication_methods'] = auth_methods
|
||||||
|
@ -461,8 +461,8 @@ def process_avatars(avatar_list: List[ZerverFieldsT], avatar_dir: str, realm_id:
|
||||||
avatar_url = avatar['path']
|
avatar_url = avatar['path']
|
||||||
avatar_original = dict(avatar)
|
avatar_original = dict(avatar)
|
||||||
|
|
||||||
image_path = ('%s.png' % (avatar_hash))
|
image_path = '%s.png' % (avatar_hash,)
|
||||||
original_image_path = ('%s.original' % (avatar_hash))
|
original_image_path = '%s.original' % (avatar_hash,)
|
||||||
|
|
||||||
avatar_upload_list.append([avatar_url, image_path, original_image_path])
|
avatar_upload_list.append([avatar_url, image_path, original_image_path])
|
||||||
# We don't add the size field here in avatar's records.json,
|
# We don't add the size field here in avatar's records.json,
|
||||||
|
|
|
@ -226,7 +226,7 @@ def build_customprofile_field(customprofile_field: List[ZerverFieldsT], fields:
|
||||||
if field in slack_custom_fields:
|
if field in slack_custom_fields:
|
||||||
field_name = field
|
field_name = field
|
||||||
else:
|
else:
|
||||||
field_name = ("slack custom field %s" % str(customprofilefield_id + 1))
|
field_name = "slack custom field %s" % (str(customprofilefield_id + 1),)
|
||||||
customprofilefield = CustomProfileField(
|
customprofilefield = CustomProfileField(
|
||||||
id=customprofilefield_id,
|
id=customprofilefield_id,
|
||||||
name=field_name,
|
name=field_name,
|
||||||
|
@ -539,7 +539,7 @@ def convert_slack_workspace_messages(slack_data_dir: str, users: List[ZerverFiel
|
||||||
zerver_usermessage=zerver_usermessage)
|
zerver_usermessage=zerver_usermessage)
|
||||||
|
|
||||||
message_file = "/messages-%06d.json" % (dump_file_id,)
|
message_file = "/messages-%06d.json" % (dump_file_id,)
|
||||||
logging.info("Writing Messages to %s\n" % (output_dir + message_file))
|
logging.info("Writing Messages to %s\n" % (output_dir + message_file,))
|
||||||
create_converted_data_files(message_json, output_dir, message_file)
|
create_converted_data_files(message_json, output_dir, message_file)
|
||||||
|
|
||||||
total_reactions += reactions
|
total_reactions += reactions
|
||||||
|
@ -658,7 +658,7 @@ def channel_message_to_zerver_message(realm_id: int,
|
||||||
# For example "sh_room_created" has the message 'started a call'
|
# For example "sh_room_created" has the message 'started a call'
|
||||||
# which should be displayed as '/me started a call'
|
# which should be displayed as '/me started a call'
|
||||||
if subtype in ["bot_add", "sh_room_created", "me_message"]:
|
if subtype in ["bot_add", "sh_room_created", "me_message"]:
|
||||||
content = ('/me %s' % (content))
|
content = '/me %s' % (content,)
|
||||||
if subtype == 'file_comment':
|
if subtype == 'file_comment':
|
||||||
# The file_comment message type only indicates the
|
# The file_comment message type only indicates the
|
||||||
# responsible user in a subfield.
|
# responsible user in a subfield.
|
||||||
|
@ -789,7 +789,7 @@ def get_attachment_path_and_content(fileinfo: ZerverFieldsT, realm_id: int) -> T
|
||||||
random_name(18),
|
random_name(18),
|
||||||
sanitize_name(fileinfo['name'])
|
sanitize_name(fileinfo['name'])
|
||||||
])
|
])
|
||||||
attachment_path = ('/user_uploads/%s' % (s3_path))
|
attachment_path = '/user_uploads/%s' % (s3_path,)
|
||||||
content = '[%s](%s)' % (fileinfo['title'], attachment_path)
|
content = '[%s](%s)' % (fileinfo['title'], attachment_path)
|
||||||
|
|
||||||
return s3_path, content
|
return s3_path, content
|
||||||
|
@ -919,7 +919,7 @@ def do_convert_data(slack_zip_file: str, output_dir: str, token: str, threads: i
|
||||||
subprocess.check_call(["tar", "-czf", output_dir + '.tar.gz', output_dir, '-P'])
|
subprocess.check_call(["tar", "-czf", output_dir + '.tar.gz', output_dir, '-P'])
|
||||||
|
|
||||||
logging.info('######### DATA CONVERSION FINISHED #########\n')
|
logging.info('######### DATA CONVERSION FINISHED #########\n')
|
||||||
logging.info("Zulip data dump created at %s" % (output_dir))
|
logging.info("Zulip data dump created at %s" % (output_dir,))
|
||||||
|
|
||||||
def get_data_file(path: str) -> Any:
|
def get_data_file(path: str) -> Any:
|
||||||
with open(path, "r") as fp:
|
with open(path, "r") as fp:
|
||||||
|
|
|
@ -831,7 +831,7 @@ def rate_limit(domain: str='all') -> Callable[[ViewFuncT], ViewFuncT]:
|
||||||
|
|
||||||
if not user: # nocoverage # See comments below
|
if not user: # nocoverage # See comments below
|
||||||
logging.error("Requested rate-limiting on %s but user is not authenticated!" %
|
logging.error("Requested rate-limiting on %s but user is not authenticated!" %
|
||||||
func.__name__)
|
(func.__name__,))
|
||||||
return func(request, *args, **kwargs)
|
return func(request, *args, **kwargs)
|
||||||
|
|
||||||
if isinstance(user, AnonymousUser): # nocoverage
|
if isinstance(user, AnonymousUser): # nocoverage
|
||||||
|
|
|
@ -52,7 +52,7 @@ def email_is_not_mit_mailing_list(email: str) -> None:
|
||||||
username = email.rsplit("@", 1)[0]
|
username = email.rsplit("@", 1)[0]
|
||||||
# Check whether the user exists and can get mail.
|
# Check whether the user exists and can get mail.
|
||||||
try:
|
try:
|
||||||
DNS.dnslookup("%s.pobox.ns.athena.mit.edu" % username, DNS.Type.TXT)
|
DNS.dnslookup("%s.pobox.ns.athena.mit.edu" % (username,), DNS.Type.TXT)
|
||||||
except DNS.Base.ServerError as e:
|
except DNS.Base.ServerError as e:
|
||||||
if e.rcode == DNS.Status.NXDOMAIN:
|
if e.rcode == DNS.Status.NXDOMAIN:
|
||||||
raise ValidationError(mark_safe(MIT_VALIDATION_ERROR))
|
raise ValidationError(mark_safe(MIT_VALIDATION_ERROR))
|
||||||
|
|
|
@ -290,7 +290,7 @@ def notify_invites_changed(user_profile: UserProfile) -> None:
|
||||||
def notify_new_user(user_profile: UserProfile, internal: bool=False) -> None:
|
def notify_new_user(user_profile: UserProfile, internal: bool=False) -> None:
|
||||||
if settings.NOTIFICATION_BOT is not None:
|
if settings.NOTIFICATION_BOT is not None:
|
||||||
send_signup_message(settings.NOTIFICATION_BOT, "signups", user_profile, internal)
|
send_signup_message(settings.NOTIFICATION_BOT, "signups", user_profile, internal)
|
||||||
statsd.gauge("users.signups.%s" % (user_profile.realm.string_id), 1, delta=True)
|
statsd.gauge("users.signups.%s" % (user_profile.realm.string_id,), 1, delta=True)
|
||||||
|
|
||||||
# We also clear any scheduled invitation emails to prevent them
|
# We also clear any scheduled invitation emails to prevent them
|
||||||
# from being sent after the user is created.
|
# from being sent after the user is created.
|
||||||
|
@ -2050,23 +2050,23 @@ def check_schedule_message(sender: UserProfile, client: Client,
|
||||||
|
|
||||||
def check_stream_name(stream_name: str) -> None:
|
def check_stream_name(stream_name: str) -> None:
|
||||||
if stream_name.strip() == "":
|
if stream_name.strip() == "":
|
||||||
raise JsonableError(_("Invalid stream name '%s'" % (stream_name)))
|
raise JsonableError(_("Invalid stream name '%s'" % (stream_name,)))
|
||||||
if len(stream_name) > Stream.MAX_NAME_LENGTH:
|
if len(stream_name) > Stream.MAX_NAME_LENGTH:
|
||||||
raise JsonableError(_("Stream name too long (limit: %s characters)." % (Stream.MAX_NAME_LENGTH)))
|
raise JsonableError(_("Stream name too long (limit: %s characters)." % (Stream.MAX_NAME_LENGTH,)))
|
||||||
for i in stream_name:
|
for i in stream_name:
|
||||||
if ord(i) == 0:
|
if ord(i) == 0:
|
||||||
raise JsonableError(_("Stream name '%s' contains NULL (0x00) characters." % (stream_name)))
|
raise JsonableError(_("Stream name '%s' contains NULL (0x00) characters." % (stream_name,)))
|
||||||
|
|
||||||
def check_default_stream_group_name(group_name: str) -> None:
|
def check_default_stream_group_name(group_name: str) -> None:
|
||||||
if group_name.strip() == "":
|
if group_name.strip() == "":
|
||||||
raise JsonableError(_("Invalid default stream group name '%s'" % (group_name)))
|
raise JsonableError(_("Invalid default stream group name '%s'" % (group_name,)))
|
||||||
if len(group_name) > DefaultStreamGroup.MAX_NAME_LENGTH:
|
if len(group_name) > DefaultStreamGroup.MAX_NAME_LENGTH:
|
||||||
raise JsonableError(_("Default stream group name too long (limit: %s characters)"
|
raise JsonableError(_("Default stream group name too long (limit: %s characters)"
|
||||||
% (DefaultStreamGroup.MAX_NAME_LENGTH)))
|
% (DefaultStreamGroup.MAX_NAME_LENGTH,)))
|
||||||
for i in group_name:
|
for i in group_name:
|
||||||
if ord(i) == 0:
|
if ord(i) == 0:
|
||||||
raise JsonableError(_("Default stream group name '%s' contains NULL (0x00) characters."
|
raise JsonableError(_("Default stream group name '%s' contains NULL (0x00) characters."
|
||||||
% (group_name)))
|
% (group_name,)))
|
||||||
|
|
||||||
def send_rate_limited_pm_notification_to_bot_owner(sender: UserProfile,
|
def send_rate_limited_pm_notification_to_bot_owner(sender: UserProfile,
|
||||||
realm: Realm,
|
realm: Realm,
|
||||||
|
|
|
@ -92,13 +92,13 @@ EMOJI_REGEX = r'(?P<syntax>:[\w\-\+]+:)'
|
||||||
|
|
||||||
def verbose_compile(pattern: str) -> Any:
|
def verbose_compile(pattern: str) -> Any:
|
||||||
return re.compile(
|
return re.compile(
|
||||||
"^(.*?)%s(.*?)$" % pattern,
|
"^(.*?)%s(.*?)$" % (pattern,),
|
||||||
re.DOTALL | re.UNICODE | re.VERBOSE
|
re.DOTALL | re.UNICODE | re.VERBOSE
|
||||||
)
|
)
|
||||||
|
|
||||||
def normal_compile(pattern: str) -> Any:
|
def normal_compile(pattern: str) -> Any:
|
||||||
return re.compile(
|
return re.compile(
|
||||||
r"^(.*?)%s(.*)$" % pattern,
|
r"^(.*?)%s(.*)$" % (pattern,),
|
||||||
re.DOTALL | re.UNICODE
|
re.DOTALL | re.UNICODE
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1869,7 +1869,7 @@ class Bugdown(markdown.Markdown):
|
||||||
def register_realm_filters(self, inlinePatterns: markdown.util.Registry) -> markdown.util.Registry:
|
def register_realm_filters(self, inlinePatterns: markdown.util.Registry) -> markdown.util.Registry:
|
||||||
for (pattern, format_string, id) in self.getConfig("realm_filters"):
|
for (pattern, format_string, id) in self.getConfig("realm_filters"):
|
||||||
inlinePatterns.register(RealmFilterPattern(pattern, format_string, self),
|
inlinePatterns.register(RealmFilterPattern(pattern, format_string, self),
|
||||||
'realm_filters/%s' % (pattern), 45)
|
'realm_filters/%s' % (pattern,), 45)
|
||||||
return inlinePatterns
|
return inlinePatterns
|
||||||
|
|
||||||
def build_treeprocessors(self) -> markdown.util.Registry:
|
def build_treeprocessors(self) -> markdown.util.Registry:
|
||||||
|
|
|
@ -295,7 +295,7 @@ def cache(func: Callable[..., ReturnT]) -> Callable[..., ReturnT]:
|
||||||
return cache_with_key(keyfunc)(func)
|
return cache_with_key(keyfunc)(func)
|
||||||
|
|
||||||
def preview_url_cache_key(url: str) -> str:
|
def preview_url_cache_key(url: str) -> str:
|
||||||
return "preview_url:%s" % (make_safe_digest(url))
|
return "preview_url:%s" % (make_safe_digest(url),)
|
||||||
|
|
||||||
def display_recipient_cache_key(recipient_id: int) -> str:
|
def display_recipient_cache_key(recipient_id: int) -> str:
|
||||||
return "display_recipient_dict:%d" % (recipient_id,)
|
return "display_recipient_dict:%d" % (recipient_id,)
|
||||||
|
@ -313,7 +313,7 @@ def user_profile_cache_key(email: str, realm: 'Realm') -> str:
|
||||||
return user_profile_cache_key_id(email, realm.id)
|
return user_profile_cache_key_id(email, realm.id)
|
||||||
|
|
||||||
def bot_profile_cache_key(email: str) -> str:
|
def bot_profile_cache_key(email: str) -> str:
|
||||||
return "bot_profile:%s" % (make_safe_digest(email.strip()))
|
return "bot_profile:%s" % (make_safe_digest(email.strip()),)
|
||||||
|
|
||||||
def user_profile_by_id_cache_key(user_profile_id: int) -> str:
|
def user_profile_by_id_cache_key(user_profile_id: int) -> str:
|
||||||
return "user_profile_by_id:%s" % (user_profile_id,)
|
return "user_profile_by_id:%s" % (user_profile_id,)
|
||||||
|
@ -474,7 +474,7 @@ def to_dict_cache_key(message: 'Message') -> str:
|
||||||
return to_dict_cache_key_id(message.id)
|
return to_dict_cache_key_id(message.id)
|
||||||
|
|
||||||
def open_graph_description_cache_key(content: Any, request: HttpRequest) -> str:
|
def open_graph_description_cache_key(content: Any, request: HttpRequest) -> str:
|
||||||
return 'open_graph_description_path:%s' % (make_safe_digest(request.META['PATH_INFO']))
|
return 'open_graph_description_path:%s' % (make_safe_digest(request.META['PATH_INFO']),)
|
||||||
|
|
||||||
def flush_message(sender: Any, **kwargs: Any) -> None:
|
def flush_message(sender: Any, **kwargs: Any) -> None:
|
||||||
message = kwargs['instance']
|
message = kwargs['instance']
|
||||||
|
|
|
@ -21,7 +21,7 @@ def wrapper_execute(self: CursorObj,
|
||||||
stop = time.time()
|
stop = time.time()
|
||||||
duration = stop - start
|
duration = stop - start
|
||||||
self.connection.queries.append({
|
self.connection.queries.append({
|
||||||
'time': "%.3f" % duration,
|
'time': "%.3f" % (duration,),
|
||||||
})
|
})
|
||||||
|
|
||||||
class TimeTrackingCursor(cursor):
|
class TimeTrackingCursor(cursor):
|
||||||
|
|
|
@ -248,7 +248,7 @@ def extract_body(message: message.Message, remove_quotations: bool=True) -> str:
|
||||||
if plaintext_content is not None or html_content is not None:
|
if plaintext_content is not None or html_content is not None:
|
||||||
raise ZulipEmailForwardUserError("Email has no nonempty body sections; ignoring.")
|
raise ZulipEmailForwardUserError("Email has no nonempty body sections; ignoring.")
|
||||||
|
|
||||||
logging.warning("Content types: %s" % ([part.get_content_type() for part in message.walk()]))
|
logging.warning("Content types: %s" % ([part.get_content_type() for part in message.walk()],))
|
||||||
raise ZulipEmailForwardUserError("Unable to find plaintext or HTML message body")
|
raise ZulipEmailForwardUserError("Unable to find plaintext or HTML message body")
|
||||||
|
|
||||||
def filter_footer(text: str) -> str:
|
def filter_footer(text: str) -> str:
|
||||||
|
|
|
@ -274,7 +274,7 @@ def do_send_missedmessage_events_reply_in_zulip(user_profile: UserProfile,
|
||||||
if len(recipients) != 1:
|
if len(recipients) != 1:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
'All missed_messages must have the same recipient and topic %r' %
|
'All missed_messages must have the same recipient and topic %r' %
|
||||||
recipients
|
(recipients,)
|
||||||
)
|
)
|
||||||
|
|
||||||
unsubscribe_link = one_click_unsubscribe_link(user_profile, "missed_messages")
|
unsubscribe_link = one_click_unsubscribe_link(user_profile, "missed_messages")
|
||||||
|
@ -324,7 +324,7 @@ def do_send_missedmessage_events_reply_in_zulip(user_profile: UserProfile,
|
||||||
if r['id'] != user_profile.id]
|
if r['id'] != user_profile.id]
|
||||||
context.update({'group_pm': True})
|
context.update({'group_pm': True})
|
||||||
if len(other_recipients) == 2:
|
if len(other_recipients) == 2:
|
||||||
huddle_display_name = "%s" % (" and ".join(other_recipients))
|
huddle_display_name = " and ".join(other_recipients)
|
||||||
context.update({'huddle_display_name': huddle_display_name})
|
context.update({'huddle_display_name': huddle_display_name})
|
||||||
elif len(other_recipients) == 3:
|
elif len(other_recipients) == 3:
|
||||||
huddle_display_name = "%s, %s, and %s" % (
|
huddle_display_name = "%s, %s, and %s" % (
|
||||||
|
|
|
@ -20,11 +20,11 @@ def format_email_subject(email_subject: str) -> str:
|
||||||
|
|
||||||
def logger_repr(report: Dict[str, Any]) -> str:
|
def logger_repr(report: Dict[str, Any]) -> str:
|
||||||
return ("Logger %(logger_name)s, from module %(log_module)s line %(log_lineno)d:"
|
return ("Logger %(logger_name)s, from module %(log_module)s line %(log_lineno)d:"
|
||||||
% report)
|
% dict(report))
|
||||||
|
|
||||||
def user_info_str(report: Dict[str, Any]) -> str:
|
def user_info_str(report: Dict[str, Any]) -> str:
|
||||||
if report['user_full_name'] and report['user_email']:
|
if report['user_full_name'] and report['user_email']:
|
||||||
user_info = "%(user_full_name)s (%(user_email)s)" % (report)
|
user_info = "%(user_full_name)s (%(user_email)s)" % dict(report)
|
||||||
else:
|
else:
|
||||||
user_info = "Anonymous user (not logged in)"
|
user_info = "Anonymous user (not logged in)"
|
||||||
|
|
||||||
|
@ -48,7 +48,7 @@ def notify_browser_error(report: Dict[str, Any]) -> None:
|
||||||
email_browser_error(report)
|
email_browser_error(report)
|
||||||
|
|
||||||
def email_browser_error(report: Dict[str, Any]) -> None:
|
def email_browser_error(report: Dict[str, Any]) -> None:
|
||||||
email_subject = "Browser error for %s" % (user_info_str(report))
|
email_subject = "Browser error for %s" % (user_info_str(report),)
|
||||||
|
|
||||||
body = ("User: %(user_full_name)s <%(user_email)s> on %(deployment)s\n\n"
|
body = ("User: %(user_full_name)s <%(user_email)s> on %(deployment)s\n\n"
|
||||||
"Message:\n%(message)s\n\nStacktrace:\n%(stacktrace)s\n\n"
|
"Message:\n%(message)s\n\nStacktrace:\n%(stacktrace)s\n\n"
|
||||||
|
@ -57,7 +57,7 @@ def email_browser_error(report: Dict[str, Any]) -> None:
|
||||||
"href: %(href)s\n"
|
"href: %(href)s\n"
|
||||||
"Server path: %(server_path)s\n"
|
"Server path: %(server_path)s\n"
|
||||||
"Deployed version: %(version)s\n"
|
"Deployed version: %(version)s\n"
|
||||||
% (report))
|
% dict(report))
|
||||||
|
|
||||||
more_info = report['more_info']
|
more_info = report['more_info']
|
||||||
if more_info is not None:
|
if more_info is not None:
|
||||||
|
@ -76,7 +76,7 @@ def zulip_browser_error(report: Dict[str, Any]) -> None:
|
||||||
|
|
||||||
body = "User: %s\n" % (user_info,)
|
body = "User: %s\n" % (user_info,)
|
||||||
body += ("Message: %(message)s\n"
|
body += ("Message: %(message)s\n"
|
||||||
% (report))
|
% dict(report))
|
||||||
|
|
||||||
realm = get_system_bot(settings.ERROR_BOT).realm
|
realm = get_system_bot(settings.ERROR_BOT).realm
|
||||||
internal_send_message(realm, settings.ERROR_BOT,
|
internal_send_message(realm, settings.ERROR_BOT,
|
||||||
|
@ -89,7 +89,7 @@ def notify_server_error(report: Dict[str, Any], skip_error_zulip: Optional[bool]
|
||||||
zulip_server_error(report)
|
zulip_server_error(report)
|
||||||
|
|
||||||
def zulip_server_error(report: Dict[str, Any]) -> None:
|
def zulip_server_error(report: Dict[str, Any]) -> None:
|
||||||
email_subject = '%(node)s: %(message)s' % (report)
|
email_subject = '%(node)s: %(message)s' % dict(report)
|
||||||
|
|
||||||
logger_str = logger_repr(report)
|
logger_str = logger_repr(report)
|
||||||
user_info = user_info_str(report)
|
user_info = user_info_str(report)
|
||||||
|
@ -99,7 +99,7 @@ def zulip_server_error(report: Dict[str, Any]) -> None:
|
||||||
request_repr = (
|
request_repr = (
|
||||||
"Request info:\n~~~~\n"
|
"Request info:\n~~~~\n"
|
||||||
"- path: %(path)s\n"
|
"- path: %(path)s\n"
|
||||||
"- %(method)s: %(data)s\n") % (report)
|
"- %(method)s: %(data)s\n") % dict(report)
|
||||||
for field in ["REMOTE_ADDR", "QUERY_STRING", "SERVER_NAME"]:
|
for field in ["REMOTE_ADDR", "QUERY_STRING", "SERVER_NAME"]:
|
||||||
val = report.get(field.lower())
|
val = report.get(field.lower())
|
||||||
if field == "QUERY_STRING":
|
if field == "QUERY_STRING":
|
||||||
|
@ -117,7 +117,7 @@ def zulip_server_error(report: Dict[str, Any]) -> None:
|
||||||
format_email_subject(email_subject), message)
|
format_email_subject(email_subject), message)
|
||||||
|
|
||||||
def email_server_error(report: Dict[str, Any]) -> None:
|
def email_server_error(report: Dict[str, Any]) -> None:
|
||||||
email_subject = '%(node)s: %(message)s' % (report)
|
email_subject = '%(node)s: %(message)s' % dict(report)
|
||||||
|
|
||||||
logger_str = logger_repr(report)
|
logger_str = logger_repr(report)
|
||||||
user_info = user_info_str(report)
|
user_info = user_info_str(report)
|
||||||
|
@ -127,7 +127,7 @@ def email_server_error(report: Dict[str, Any]) -> None:
|
||||||
request_repr = (
|
request_repr = (
|
||||||
"Request info:\n"
|
"Request info:\n"
|
||||||
"- path: %(path)s\n"
|
"- path: %(path)s\n"
|
||||||
"- %(method)s: %(data)s\n") % (report)
|
"- %(method)s: %(data)s\n") % dict(report)
|
||||||
for field in ["REMOTE_ADDR", "QUERY_STRING", "SERVER_NAME"]:
|
for field in ["REMOTE_ADDR", "QUERY_STRING", "SERVER_NAME"]:
|
||||||
val = report.get(field.lower())
|
val = report.get(field.lower())
|
||||||
if field == "QUERY_STRING":
|
if field == "QUERY_STRING":
|
||||||
|
|
|
@ -1165,7 +1165,7 @@ def _save_s3_object_to_file(
|
||||||
else:
|
else:
|
||||||
fields = key.name.split('/')
|
fields = key.name.split('/')
|
||||||
if len(fields) != 3:
|
if len(fields) != 3:
|
||||||
raise AssertionError("Suspicious key with invalid format %s" % (key.name))
|
raise AssertionError("Suspicious key with invalid format %s" % (key.name,))
|
||||||
filename = os.path.join(output_dir, key.name)
|
filename = os.path.join(output_dir, key.name)
|
||||||
|
|
||||||
dirname = os.path.dirname(filename)
|
dirname = os.path.dirname(filename)
|
||||||
|
@ -1180,7 +1180,7 @@ def export_files_from_s3(realm: Realm, bucket_name: str, output_dir: Path,
|
||||||
bucket = conn.get_bucket(bucket_name, validate=True)
|
bucket = conn.get_bucket(bucket_name, validate=True)
|
||||||
records = []
|
records = []
|
||||||
|
|
||||||
logging.info("Downloading uploaded files from %s" % (bucket_name))
|
logging.info("Downloading uploaded files from %s" % (bucket_name,))
|
||||||
|
|
||||||
avatar_hash_values = set()
|
avatar_hash_values = set()
|
||||||
user_ids = set()
|
user_ids = set()
|
||||||
|
@ -1352,7 +1352,7 @@ def do_write_stats_file_for_realm_export(output_dir: Path) -> None:
|
||||||
f.write(fn+'\n')
|
f.write(fn+'\n')
|
||||||
payload = open(fn).read()
|
payload = open(fn).read()
|
||||||
data = ujson.loads(payload)
|
data = ujson.loads(payload)
|
||||||
f.write('%5d records\n' % len(data))
|
f.write('%5d records\n' % (len(data),))
|
||||||
f.write('\n')
|
f.write('\n')
|
||||||
|
|
||||||
def do_export_realm(realm: Realm, output_dir: Path, threads: int,
|
def do_export_realm(realm: Realm, output_dir: Path, threads: int,
|
||||||
|
@ -1392,7 +1392,7 @@ def do_export_realm(realm: Realm, output_dir: Path, threads: int,
|
||||||
logging.info("Exporting .partial files messages")
|
logging.info("Exporting .partial files messages")
|
||||||
message_ids = export_partial_message_files(realm, response, output_dir=output_dir,
|
message_ids = export_partial_message_files(realm, response, output_dir=output_dir,
|
||||||
public_only=public_only)
|
public_only=public_only)
|
||||||
logging.info('%d messages were exported' % (len(message_ids)))
|
logging.info('%d messages were exported' % (len(message_ids),))
|
||||||
|
|
||||||
# zerver_reaction
|
# zerver_reaction
|
||||||
zerver_reaction = {} # type: TableData
|
zerver_reaction = {} # type: TableData
|
||||||
|
@ -1413,7 +1413,7 @@ def do_export_realm(realm: Realm, output_dir: Path, threads: int,
|
||||||
# Start parallel jobs to export the UserMessage objects.
|
# Start parallel jobs to export the UserMessage objects.
|
||||||
launch_user_message_subprocesses(threads=threads, output_dir=output_dir)
|
launch_user_message_subprocesses(threads=threads, output_dir=output_dir)
|
||||||
|
|
||||||
logging.info("Finished exporting %s" % (realm.string_id))
|
logging.info("Finished exporting %s" % (realm.string_id,))
|
||||||
create_soft_link(source=output_dir, in_progress=False)
|
create_soft_link(source=output_dir, in_progress=False)
|
||||||
|
|
||||||
def export_attachment_table(realm: Realm, output_dir: Path, message_ids: Set[int]) -> None:
|
def export_attachment_table(realm: Realm, output_dir: Path, message_ids: Set[int]) -> None:
|
||||||
|
|
|
@ -279,7 +279,7 @@ def fix_message_rendered_content(realm: Realm,
|
||||||
# * rendering markdown failing with the exception being
|
# * rendering markdown failing with the exception being
|
||||||
# caught in bugdown (which then returns None, causing the the
|
# caught in bugdown (which then returns None, causing the the
|
||||||
# rendered_content assert above to fire).
|
# rendered_content assert above to fire).
|
||||||
logging.warning("Error in markdown rendering for message ID %s; continuing" % (message['id']))
|
logging.warning("Error in markdown rendering for message ID %s; continuing" % (message['id'],))
|
||||||
|
|
||||||
def current_table_ids(data: TableData, table: TableName) -> List[int]:
|
def current_table_ids(data: TableData, table: TableName) -> List[int]:
|
||||||
"""
|
"""
|
||||||
|
@ -662,7 +662,7 @@ def import_uploads(import_dir: Path, processes: int, processing_avatars: bool=Fa
|
||||||
upload_backend.ensure_basic_avatar_image(user_profile=user_profile)
|
upload_backend.ensure_basic_avatar_image(user_profile=user_profile)
|
||||||
except BadImageError:
|
except BadImageError:
|
||||||
logging.warning("Could not thumbnail avatar image for user %s; ignoring" % (
|
logging.warning("Could not thumbnail avatar image for user %s; ignoring" % (
|
||||||
user_profile.id))
|
user_profile.id,))
|
||||||
# Delete the record of the avatar to avoid 404s.
|
# Delete the record of the avatar to avoid 404s.
|
||||||
do_change_avatar_fields(user_profile, UserProfile.AVATAR_FROM_GRAVATAR)
|
do_change_avatar_fields(user_profile, UserProfile.AVATAR_FROM_GRAVATAR)
|
||||||
return 0
|
return 0
|
||||||
|
|
|
@ -23,7 +23,7 @@ class _RateLimitFilter:
|
||||||
|
|
||||||
# Track duplicate errors
|
# Track duplicate errors
|
||||||
duplicate = False
|
duplicate = False
|
||||||
rate = getattr(settings, '%s_LIMIT' % self.__class__.__name__.upper(),
|
rate = getattr(settings, '%s_LIMIT' % (self.__class__.__name__.upper(),),
|
||||||
600) # seconds
|
600) # seconds
|
||||||
if rate > 0:
|
if rate > 0:
|
||||||
# Test if the cache works
|
# Test if the cache works
|
||||||
|
|
|
@ -15,7 +15,7 @@ def xor_hex_strings(bytes_a: str, bytes_b: str) -> str:
|
||||||
"""Given two hex strings of equal length, return a hex string with
|
"""Given two hex strings of equal length, return a hex string with
|
||||||
the bitwise xor of the two hex strings."""
|
the bitwise xor of the two hex strings."""
|
||||||
assert len(bytes_a) == len(bytes_b)
|
assert len(bytes_a) == len(bytes_b)
|
||||||
return ''.join(["%x" % (int(x, 16) ^ int(y, 16))
|
return ''.join(["%x" % (int(x, 16) ^ int(y, 16),)
|
||||||
for x, y in zip(bytes_a, bytes_b)])
|
for x, y in zip(bytes_a, bytes_b)])
|
||||||
|
|
||||||
def ascii_to_hex(input_string: str) -> str:
|
def ascii_to_hex(input_string: str) -> str:
|
||||||
|
|
|
@ -277,7 +277,7 @@ def do_rest_call(base_url: str,
|
||||||
% {'message_url': get_message_url(event),
|
% {'message_url': get_message_url(event),
|
||||||
'status_code': response.status_code,
|
'status_code': response.status_code,
|
||||||
'response': response.content})
|
'response': response.content})
|
||||||
failure_message = "Third party responded with %d" % (response.status_code)
|
failure_message = "Third party responded with %d" % (response.status_code,)
|
||||||
fail_with_message(event, failure_message)
|
fail_with_message(event, failure_message)
|
||||||
notify_bot_owner(event, request_data, response.status_code, response.content)
|
notify_bot_owner(event, request_data, response.status_code, response.content)
|
||||||
|
|
||||||
|
|
|
@ -643,7 +643,7 @@ def handle_remove_push_notification(user_profile_id: int, message_ids: List[int]
|
||||||
def failure_processor(event: Dict[str, Any]) -> None:
|
def failure_processor(event: Dict[str, Any]) -> None:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"Maximum retries exceeded for trigger:%s event:push_notification" % (
|
"Maximum retries exceeded for trigger:%s event:push_notification" % (
|
||||||
event['user_profile_id']))
|
event['user_profile_id'],))
|
||||||
else:
|
else:
|
||||||
android_devices = list(PushDeviceToken.objects.filter(
|
android_devices = list(PushDeviceToken.objects.filter(
|
||||||
user=user_profile, kind=PushDeviceToken.GCM))
|
user=user_profile, kind=PushDeviceToken.GCM))
|
||||||
|
@ -721,7 +721,7 @@ def handle_push_notification(user_profile_id: int, missed_message: Dict[str, Any
|
||||||
def failure_processor(event: Dict[str, Any]) -> None:
|
def failure_processor(event: Dict[str, Any]) -> None:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"Maximum retries exceeded for trigger:%s event:push_notification" % (
|
"Maximum retries exceeded for trigger:%s event:push_notification" % (
|
||||||
event['user_profile_id']))
|
event['user_profile_id'],))
|
||||||
retry_event('missedmessage_mobile_notifications', missed_message,
|
retry_event('missedmessage_mobile_notifications', missed_message,
|
||||||
failure_processor)
|
failure_processor)
|
||||||
return
|
return
|
||||||
|
|
|
@ -51,7 +51,7 @@ def filter_by_subscription_history(user_profile: UserProfile,
|
||||||
if stream_messages[-1]['id'] <= log_entry.event_last_message_id:
|
if stream_messages[-1]['id'] <= log_entry.event_last_message_id:
|
||||||
stream_messages = []
|
stream_messages = []
|
||||||
else:
|
else:
|
||||||
raise AssertionError('%s is not a Subscription Event.' % (log_entry.event_type))
|
raise AssertionError('%s is not a Subscription Event.' % (log_entry.event_type,))
|
||||||
|
|
||||||
if len(stream_messages) > 0:
|
if len(stream_messages) > 0:
|
||||||
# We do this check for last event since if the last subscription
|
# We do this check for last event since if the last subscription
|
||||||
|
|
|
@ -38,11 +38,11 @@ def run_db_migrations(platform: str) -> None:
|
||||||
# what the database is as runtime.
|
# what the database is as runtime.
|
||||||
# Also we export DB_NAME which is ignored by dev platform but
|
# Also we export DB_NAME which is ignored by dev platform but
|
||||||
# recognised by test platform and used to migrate correct db.
|
# recognised by test platform and used to migrate correct db.
|
||||||
run(['env', ('DJANGO_SETTINGS_MODULE=%s' % settings), db_name,
|
run(['env', ('DJANGO_SETTINGS_MODULE=%s' % (settings,)), db_name,
|
||||||
'./manage.py', 'migrate', '--no-input'])
|
'./manage.py', 'migrate', '--no-input'])
|
||||||
run(['env', ('DJANGO_SETTINGS_MODULE=%s' % settings), db_name,
|
run(['env', ('DJANGO_SETTINGS_MODULE=%s' % (settings,)), db_name,
|
||||||
'./manage.py', 'get_migration_status',
|
'./manage.py', 'get_migration_status',
|
||||||
'--output=%s' % (migration_status_file)])
|
'--output=%s' % (migration_status_file,)])
|
||||||
|
|
||||||
def run_generate_fixtures_if_required(use_force: bool=False) -> None:
|
def run_generate_fixtures_if_required(use_force: bool=False) -> None:
|
||||||
generate_fixtures_command = ['tools/setup/generate-fixtures']
|
generate_fixtures_command = ['tools/setup/generate-fixtures']
|
||||||
|
|
|
@ -162,7 +162,7 @@ def queries_captured(include_savepoints: Optional[bool]=False) -> Generator[
|
||||||
if include_savepoints or ('SAVEPOINT' not in sql):
|
if include_savepoints or ('SAVEPOINT' not in sql):
|
||||||
queries.append({
|
queries.append({
|
||||||
'sql': self.mogrify(sql, params).decode('utf-8'),
|
'sql': self.mogrify(sql, params).decode('utf-8'),
|
||||||
'time': "%.3f" % duration,
|
'time': "%.3f" % (duration,),
|
||||||
})
|
})
|
||||||
|
|
||||||
old_execute = TimeTrackingCursor.execute
|
old_execute = TimeTrackingCursor.execute
|
||||||
|
|
|
@ -215,7 +215,7 @@ def _replacement_destroy_test_db(self: DatabaseCreation,
|
||||||
unnecessary sleep(1)."""
|
unnecessary sleep(1)."""
|
||||||
with self.connection._nodb_connection.cursor() as cursor:
|
with self.connection._nodb_connection.cursor() as cursor:
|
||||||
cursor.execute("DROP DATABASE %s"
|
cursor.execute("DROP DATABASE %s"
|
||||||
% self.connection.ops.quote_name(test_database_name))
|
% (self.connection.ops.quote_name(test_database_name),))
|
||||||
DatabaseCreation._destroy_test_db = _replacement_destroy_test_db
|
DatabaseCreation._destroy_test_db = _replacement_destroy_test_db
|
||||||
|
|
||||||
def destroy_test_databases(database_id: Optional[int]=None) -> None:
|
def destroy_test_databases(database_id: Optional[int]=None) -> None:
|
||||||
|
|
|
@ -148,7 +148,7 @@ def user_ids_to_users(user_ids: List[int], realm: Realm) -> List[UserProfile]:
|
||||||
found_user_ids = user_profiles_by_id.keys()
|
found_user_ids = user_profiles_by_id.keys()
|
||||||
missed_user_ids = [user_id for user_id in user_ids if user_id not in found_user_ids]
|
missed_user_ids = [user_id for user_id in user_ids if user_id not in found_user_ids]
|
||||||
if missed_user_ids:
|
if missed_user_ids:
|
||||||
raise JsonableError(_("Invalid user ID: %s" % (missed_user_ids[0])))
|
raise JsonableError(_("Invalid user ID: %s" % (missed_user_ids[0],)))
|
||||||
|
|
||||||
user_profiles = list(user_profiles_by_id.values())
|
user_profiles = list(user_profiles_by_id.values())
|
||||||
for user_profile in user_profiles:
|
for user_profile in user_profiles:
|
||||||
|
|
|
@ -174,7 +174,7 @@ def check_dict(required_keys: Iterable[Tuple[str, Validator]]=[],
|
||||||
optional_keys_set = set(x[0] for x in optional_keys)
|
optional_keys_set = set(x[0] for x in optional_keys)
|
||||||
delta_keys = set(val.keys()) - required_keys_set - optional_keys_set
|
delta_keys = set(val.keys()) - required_keys_set - optional_keys_set
|
||||||
if len(delta_keys) != 0:
|
if len(delta_keys) != 0:
|
||||||
return _("Unexpected arguments: %s" % (", ".join(list(delta_keys))))
|
return _("Unexpected arguments: %s" % (", ".join(list(delta_keys)),))
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
|
@ -191,7 +191,7 @@ class Command(makemessages.Command):
|
||||||
exclude = self.frontend_exclude
|
exclude = self.frontend_exclude
|
||||||
process_all = self.frontend_all
|
process_all = self.frontend_all
|
||||||
|
|
||||||
paths = glob.glob('%s/*' % self.default_locale_path,)
|
paths = glob.glob('%s/*' % (self.default_locale_path,),)
|
||||||
all_locales = [os.path.basename(path) for path in paths if os.path.isdir(path)]
|
all_locales = [os.path.basename(path) for path in paths if os.path.isdir(path)]
|
||||||
|
|
||||||
# Account for excluded locales
|
# Account for excluded locales
|
||||||
|
|
|
@ -84,7 +84,7 @@ class Command(BaseCommand):
|
||||||
|
|
||||||
print("Validating Django models.py...")
|
print("Validating Django models.py...")
|
||||||
self.check(display_num_errors=True)
|
self.check(display_num_errors=True)
|
||||||
print("\nDjango version %s" % (django.get_version()))
|
print("\nDjango version %s" % (django.get_version(),))
|
||||||
print("Tornado server is running at http://%s:%s/" % (addr, port))
|
print("Tornado server is running at http://%s:%s/" % (addr, port))
|
||||||
print("Quit the server with %s." % (quit_command,))
|
print("Quit the server with %s." % (quit_command,))
|
||||||
|
|
||||||
|
|
|
@ -38,7 +38,7 @@ class Command(sendtestemail.Command):
|
||||||
print(" * %s" % (noreply_sender,))
|
print(" * %s" % (noreply_sender,))
|
||||||
send_mail("Zulip noreply email test", message, noreply_sender, kwargs['email'])
|
send_mail("Zulip noreply email test", message, noreply_sender, kwargs['email'])
|
||||||
print()
|
print()
|
||||||
print("Successfully sent 2 emails to %s!" % (", ".join(kwargs['email'])))
|
print("Successfully sent 2 emails to %s!" % (", ".join(kwargs['email']),))
|
||||||
|
|
||||||
if kwargs['managers']:
|
if kwargs['managers']:
|
||||||
mail_managers("Zulip manager email test", "This email was sent to the site managers.")
|
mail_managers("Zulip manager email test", "This email was sent to the site managers.")
|
||||||
|
|
|
@ -57,11 +57,11 @@ approach shown above.
|
||||||
custom_headers_dict = ujson.loads(custom_headers)
|
custom_headers_dict = ujson.loads(custom_headers)
|
||||||
for header in custom_headers_dict:
|
for header in custom_headers_dict:
|
||||||
if len(header.split(" ")) > 1:
|
if len(header.split(" ")) > 1:
|
||||||
raise ValueError("custom header '%s' contains a space." % (header))
|
raise ValueError("custom header '%s' contains a space." % (header,))
|
||||||
headers["HTTP_" + header.upper().replace("-", "_")] = str(custom_headers_dict[header])
|
headers["HTTP_" + header.upper().replace("-", "_")] = str(custom_headers_dict[header])
|
||||||
return headers
|
return headers
|
||||||
except ValueError as ve:
|
except ValueError as ve:
|
||||||
print('Encountered an error while attempting to parse custom headers: %s' % (ve))
|
print('Encountered an error while attempting to parse custom headers: %s' % (ve,))
|
||||||
print('Note: all strings must be enclosed within "" instead of \'\'')
|
print('Note: all strings must be enclosed within "" instead of \'\'')
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
|
|
|
@ -20,7 +20,7 @@ def get_users_from_emails(emails: Any,
|
||||||
user_emails_found = {user.email for user in users}
|
user_emails_found = {user.email for user in users}
|
||||||
user_emails_not_found = '\n'.join(set(emails) - user_emails_found)
|
user_emails_not_found = '\n'.join(set(emails) - user_emails_found)
|
||||||
raise CommandError('Users with the following emails were not found:\n\n%s\n\n'
|
raise CommandError('Users with the following emails were not found:\n\n%s\n\n'
|
||||||
'Check if they are correct.' % (user_emails_not_found))
|
'Check if they are correct.' % (user_emails_not_found,))
|
||||||
return users
|
return users
|
||||||
|
|
||||||
class Command(ZulipBaseCommand):
|
class Command(ZulipBaseCommand):
|
||||||
|
@ -67,7 +67,7 @@ class Command(ZulipBaseCommand):
|
||||||
|
|
||||||
users_to_activate = get_users_from_emails(user_emails, filter_kwargs)
|
users_to_activate = get_users_from_emails(user_emails, filter_kwargs)
|
||||||
users_activated = do_soft_activate_users(users_to_activate)
|
users_activated = do_soft_activate_users(users_to_activate)
|
||||||
logger.info('Soft Reactivated %d user(s)' % (len(users_activated)))
|
logger.info('Soft Reactivated %d user(s)' % (len(users_activated),))
|
||||||
|
|
||||||
elif deactivate:
|
elif deactivate:
|
||||||
if user_emails:
|
if user_emails:
|
||||||
|
@ -77,7 +77,7 @@ class Command(ZulipBaseCommand):
|
||||||
else:
|
else:
|
||||||
users_deactivated = do_auto_soft_deactivate_users(int(options['inactive_for']),
|
users_deactivated = do_auto_soft_deactivate_users(int(options['inactive_for']),
|
||||||
realm)
|
realm)
|
||||||
logger.info('Soft Deactivated %d user(s)' % (len(users_deactivated)))
|
logger.info('Soft Deactivated %d user(s)' % (len(users_deactivated),))
|
||||||
|
|
||||||
else:
|
else:
|
||||||
self.print_help("./manage.py", "soft_deactivate_users")
|
self.print_help("./manage.py", "soft_deactivate_users")
|
||||||
|
|
|
@ -80,7 +80,7 @@ def timedelta_ms(timedelta: float) -> float:
|
||||||
|
|
||||||
def format_timedelta(timedelta: float) -> str:
|
def format_timedelta(timedelta: float) -> str:
|
||||||
if (timedelta >= 1):
|
if (timedelta >= 1):
|
||||||
return "%.1fs" % (timedelta)
|
return "%.1fs" % (timedelta,)
|
||||||
return "%.0fms" % (timedelta_ms(timedelta),)
|
return "%.0fms" % (timedelta_ms(timedelta),)
|
||||||
|
|
||||||
def is_slow_query(time_delta: float, path: str) -> bool:
|
def is_slow_query(time_delta: float, path: str) -> bool:
|
||||||
|
@ -160,7 +160,7 @@ def write_log_line(log_data: MutableMapping[str, Any], path: str, method: str, r
|
||||||
|
|
||||||
startup_output = ""
|
startup_output = ""
|
||||||
if 'startup_time_delta' in log_data and log_data["startup_time_delta"] > 0.005:
|
if 'startup_time_delta' in log_data and log_data["startup_time_delta"] > 0.005:
|
||||||
startup_output = " (+start: %s)" % (format_timedelta(log_data["startup_time_delta"]))
|
startup_output = " (+start: %s)" % (format_timedelta(log_data["startup_time_delta"]),)
|
||||||
|
|
||||||
bugdown_output = ""
|
bugdown_output = ""
|
||||||
if 'bugdown_time_start' in log_data:
|
if 'bugdown_time_start' in log_data:
|
||||||
|
|
|
@ -484,7 +484,7 @@ class SocialAuthBase(ZulipTestCase):
|
||||||
params['next'] = next
|
params['next'] = next
|
||||||
params['multiuse_object_key'] = multiuse_object_key
|
params['multiuse_object_key'] = multiuse_object_key
|
||||||
if len(params) > 0:
|
if len(params) > 0:
|
||||||
url += "?%s" % (urllib.parse.urlencode(params))
|
url += "?%s" % (urllib.parse.urlencode(params),)
|
||||||
|
|
||||||
result = self.client_get(url, **headers)
|
result = self.client_get(url, **headers)
|
||||||
|
|
||||||
|
@ -983,7 +983,7 @@ class GoogleOAuthTest(ZulipTestCase):
|
||||||
params['next'] = next
|
params['next'] = next
|
||||||
params['multiuse_object_key'] = multiuse_object_key
|
params['multiuse_object_key'] = multiuse_object_key
|
||||||
if len(params) > 0:
|
if len(params) > 0:
|
||||||
url += "?%s" % (urllib.parse.urlencode(params))
|
url += "?%s" % (urllib.parse.urlencode(params),)
|
||||||
|
|
||||||
result = self.client_get(url, **headers)
|
result = self.client_get(url, **headers)
|
||||||
if result.status_code != 302 or '/accounts/login/google/send/' not in result.url:
|
if result.status_code != 302 or '/accounts/login/google/send/' not in result.url:
|
||||||
|
|
|
@ -618,25 +618,25 @@ class BugdownTest(ZulipTestCase):
|
||||||
|
|
||||||
msg = 'http://www.twitter.com'
|
msg = 'http://www.twitter.com'
|
||||||
converted = bugdown_convert(msg)
|
converted = bugdown_convert(msg)
|
||||||
self.assertEqual(converted, '<p>%s</p>' % make_link('http://www.twitter.com'))
|
self.assertEqual(converted, '<p>%s</p>' % (make_link('http://www.twitter.com'),))
|
||||||
|
|
||||||
msg = 'http://www.twitter.com/wdaher/'
|
msg = 'http://www.twitter.com/wdaher/'
|
||||||
converted = bugdown_convert(msg)
|
converted = bugdown_convert(msg)
|
||||||
self.assertEqual(converted, '<p>%s</p>' % make_link('http://www.twitter.com/wdaher/'))
|
self.assertEqual(converted, '<p>%s</p>' % (make_link('http://www.twitter.com/wdaher/'),))
|
||||||
|
|
||||||
msg = 'http://www.twitter.com/wdaher/status/3'
|
msg = 'http://www.twitter.com/wdaher/status/3'
|
||||||
converted = bugdown_convert(msg)
|
converted = bugdown_convert(msg)
|
||||||
self.assertEqual(converted, '<p>%s</p>' % make_link('http://www.twitter.com/wdaher/status/3'))
|
self.assertEqual(converted, '<p>%s</p>' % (make_link('http://www.twitter.com/wdaher/status/3'),))
|
||||||
|
|
||||||
# id too long
|
# id too long
|
||||||
msg = 'http://www.twitter.com/wdaher/status/2879779692873154569'
|
msg = 'http://www.twitter.com/wdaher/status/2879779692873154569'
|
||||||
converted = bugdown_convert(msg)
|
converted = bugdown_convert(msg)
|
||||||
self.assertEqual(converted, '<p>%s</p>' % make_link('http://www.twitter.com/wdaher/status/2879779692873154569'))
|
self.assertEqual(converted, '<p>%s</p>' % (make_link('http://www.twitter.com/wdaher/status/2879779692873154569'),))
|
||||||
|
|
||||||
# id too large (i.e. tweet doesn't exist)
|
# id too large (i.e. tweet doesn't exist)
|
||||||
msg = 'http://www.twitter.com/wdaher/status/999999999999999999'
|
msg = 'http://www.twitter.com/wdaher/status/999999999999999999'
|
||||||
converted = bugdown_convert(msg)
|
converted = bugdown_convert(msg)
|
||||||
self.assertEqual(converted, '<p>%s</p>' % make_link('http://www.twitter.com/wdaher/status/999999999999999999'))
|
self.assertEqual(converted, '<p>%s</p>' % (make_link('http://www.twitter.com/wdaher/status/999999999999999999'),))
|
||||||
|
|
||||||
msg = 'http://www.twitter.com/wdaher/status/287977969287315456'
|
msg = 'http://www.twitter.com/wdaher/status/287977969287315456'
|
||||||
converted = bugdown_convert(msg)
|
converted = bugdown_convert(msg)
|
||||||
|
@ -726,7 +726,7 @@ class BugdownTest(ZulipTestCase):
|
||||||
realm_emoji = RealmEmoji.objects.filter(realm=realm,
|
realm_emoji = RealmEmoji.objects.filter(realm=realm,
|
||||||
name='green_tick',
|
name='green_tick',
|
||||||
deactivated=False).get()
|
deactivated=False).get()
|
||||||
self.assertEqual(converted, '<p>%s</p>' % (emoji_img(':green_tick:', realm_emoji.file_name, realm.id)))
|
self.assertEqual(converted, '<p>%s</p>' % (emoji_img(':green_tick:', realm_emoji.file_name, realm.id),))
|
||||||
|
|
||||||
# Deactivate realm emoji.
|
# Deactivate realm emoji.
|
||||||
do_remove_realm_emoji(realm, 'green_tick')
|
do_remove_realm_emoji(realm, 'green_tick')
|
||||||
|
@ -1238,7 +1238,7 @@ class BugdownTest(ZulipTestCase):
|
||||||
self.assertEqual(render_markdown(msg, content),
|
self.assertEqual(render_markdown(msg, content),
|
||||||
'<p><span class="user-mention" '
|
'<p><span class="user-mention" '
|
||||||
'data-user-id="%s">'
|
'data-user-id="%s">'
|
||||||
'@King Hamlet</span></p>' % (user_id))
|
'@King Hamlet</span></p>' % (user_id,))
|
||||||
self.assertEqual(msg.mentions_user_ids, set([user_profile.id]))
|
self.assertEqual(msg.mentions_user_ids, set([user_profile.id]))
|
||||||
|
|
||||||
def test_mention_silent(self) -> None:
|
def test_mention_silent(self) -> None:
|
||||||
|
@ -1251,7 +1251,7 @@ class BugdownTest(ZulipTestCase):
|
||||||
self.assertEqual(render_markdown(msg, content),
|
self.assertEqual(render_markdown(msg, content),
|
||||||
'<p><span class="user-mention silent" '
|
'<p><span class="user-mention silent" '
|
||||||
'data-user-id="%s">'
|
'data-user-id="%s">'
|
||||||
'King Hamlet</span></p>' % (user_id))
|
'King Hamlet</span></p>' % (user_id,))
|
||||||
self.assertEqual(msg.mentions_user_ids, set())
|
self.assertEqual(msg.mentions_user_ids, set())
|
||||||
|
|
||||||
def test_possible_mentions(self) -> None:
|
def test_possible_mentions(self) -> None:
|
||||||
|
@ -1308,7 +1308,7 @@ class BugdownTest(ZulipTestCase):
|
||||||
# Both fenced quote and > quote should be identical for both silent and regular syntax.
|
# Both fenced quote and > quote should be identical for both silent and regular syntax.
|
||||||
expected = ('<blockquote>\n<p>'
|
expected = ('<blockquote>\n<p>'
|
||||||
'<span class="user-mention silent" data-user-id="%s">King Hamlet</span>'
|
'<span class="user-mention silent" data-user-id="%s">King Hamlet</span>'
|
||||||
'</p>\n</blockquote>' % (hamlet.id))
|
'</p>\n</blockquote>' % (hamlet.id,))
|
||||||
content = "```quote\n@**King Hamlet**\n```"
|
content = "```quote\n@**King Hamlet**\n```"
|
||||||
self.assertEqual(render_markdown(msg, content), expected)
|
self.assertEqual(render_markdown(msg, content), expected)
|
||||||
self.assertEqual(msg.mentions_user_ids, set())
|
self.assertEqual(msg.mentions_user_ids, set())
|
||||||
|
|
|
@ -351,7 +351,7 @@ class CustomProfileFieldTest(ZulipTestCase):
|
||||||
'data': ujson.dumps([invalid_field_id])
|
'data': ujson.dumps([invalid_field_id])
|
||||||
})
|
})
|
||||||
self.assert_json_error(result,
|
self.assert_json_error(result,
|
||||||
u'Field id %d not found.' % (invalid_field_id))
|
u'Field id %d not found.' % (invalid_field_id,))
|
||||||
|
|
||||||
field = CustomProfileField.objects.get(name="Mentor", realm=realm)
|
field = CustomProfileField.objects.get(name="Mentor", realm=realm)
|
||||||
data = [{'id': field.id,
|
data = [{'id': field.id,
|
||||||
|
@ -395,7 +395,7 @@ class CustomProfileFieldTest(ZulipTestCase):
|
||||||
invalid_user_id = 1000
|
invalid_user_id = 1000
|
||||||
self.assert_error_update_invalid_value(field_name, [invalid_user_id],
|
self.assert_error_update_invalid_value(field_name, [invalid_user_id],
|
||||||
u"Invalid user ID: %d"
|
u"Invalid user ID: %d"
|
||||||
% (invalid_user_id))
|
% (invalid_user_id,))
|
||||||
|
|
||||||
def test_create_field_of_type_user(self) -> None:
|
def test_create_field_of_type_user(self) -> None:
|
||||||
self.login(self.example_email("iago"))
|
self.login(self.example_email("iago"))
|
||||||
|
|
|
@ -183,7 +183,7 @@ class TestMissedMessages(ZulipTestCase):
|
||||||
hamlet = self.example_user('hamlet')
|
hamlet = self.example_user('hamlet')
|
||||||
handle_missedmessage_emails(hamlet.id, [{'message_id': msg_id, 'trigger': trigger}])
|
handle_missedmessage_emails(hamlet.id, [{'message_id': msg_id, 'trigger': trigger}])
|
||||||
if settings.EMAIL_GATEWAY_PATTERN != "":
|
if settings.EMAIL_GATEWAY_PATTERN != "":
|
||||||
reply_to_addresses = [settings.EMAIL_GATEWAY_PATTERN % (u'mm' + t) for t in tokens]
|
reply_to_addresses = [settings.EMAIL_GATEWAY_PATTERN % (u'mm' + t,) for t in tokens]
|
||||||
reply_to_emails = [formataddr(("Zulip", address)) for address in reply_to_addresses]
|
reply_to_emails = [formataddr(("Zulip", address)) for address in reply_to_addresses]
|
||||||
else:
|
else:
|
||||||
reply_to_emails = ["noreply@testserver"]
|
reply_to_emails = ["noreply@testserver"]
|
||||||
|
|
|
@ -1264,7 +1264,7 @@ class EventsRegisterTest(ZulipTestCase):
|
||||||
error = realm_user_add_checker('events[0]', events[0])
|
error = realm_user_add_checker('events[0]', events[0])
|
||||||
self.assert_on_error(error)
|
self.assert_on_error(error)
|
||||||
new_user_profile = get_user_by_delivery_email("test1@zulip.com", self.user_profile.realm)
|
new_user_profile = get_user_by_delivery_email("test1@zulip.com", self.user_profile.realm)
|
||||||
self.assertEqual(new_user_profile.email, "user%s@zulip.testserver" % (new_user_profile.id))
|
self.assertEqual(new_user_profile.email, "user%s@zulip.testserver" % (new_user_profile.id,))
|
||||||
|
|
||||||
def test_alert_words_events(self) -> None:
|
def test_alert_words_events(self) -> None:
|
||||||
alert_words_checker = self.check_events_dict([
|
alert_words_checker = self.check_events_dict([
|
||||||
|
@ -1602,7 +1602,7 @@ class EventsRegisterTest(ZulipTestCase):
|
||||||
])
|
])
|
||||||
|
|
||||||
if vals is None:
|
if vals is None:
|
||||||
raise AssertionError('No test created for %s' % (name))
|
raise AssertionError('No test created for %s' % (name,))
|
||||||
do_set_realm_property(self.user_profile.realm, name, vals[0])
|
do_set_realm_property(self.user_profile.realm, name, vals[0])
|
||||||
for val in vals[1:]:
|
for val in vals[1:]:
|
||||||
state_change_expected = True
|
state_change_expected = True
|
||||||
|
@ -1779,7 +1779,7 @@ class EventsRegisterTest(ZulipTestCase):
|
||||||
else:
|
else:
|
||||||
values = [False, True, False]
|
values = [False, True, False]
|
||||||
if values is None:
|
if values is None:
|
||||||
raise AssertionError('No test created for %s' % (setting_name))
|
raise AssertionError('No test created for %s' % (setting_name,))
|
||||||
|
|
||||||
for value in values:
|
for value in values:
|
||||||
events = self.do_test(lambda: do_set_user_display_setting(
|
events = self.do_test(lambda: do_set_user_display_setting(
|
||||||
|
|
|
@ -392,7 +392,7 @@ class ImportExportTest(ZulipTestCase):
|
||||||
|
|
||||||
def get_set(table: str, field: str) -> Set[str]:
|
def get_set(table: str, field: str) -> Set[str]:
|
||||||
values = set(r[field] for r in data[table])
|
values = set(r[field] for r in data[table])
|
||||||
# print('set(%s)' % sorted(values))
|
# print('set(%s)' % (sorted(values),))
|
||||||
return values
|
return values
|
||||||
|
|
||||||
def find_by_id(table: str, db_id: int) -> Dict[str, Any]:
|
def find_by_id(table: str, db_id: int) -> Dict[str, Any]:
|
||||||
|
@ -457,7 +457,7 @@ class ImportExportTest(ZulipTestCase):
|
||||||
|
|
||||||
def get_set(data: List[Dict[str, Any]], field: str) -> Set[str]:
|
def get_set(data: List[Dict[str, Any]], field: str) -> Set[str]:
|
||||||
values = set(r[field] for r in data)
|
values = set(r[field] for r in data)
|
||||||
# print('set(%s)' % sorted(values))
|
# print('set(%s)' % (sorted(values),))
|
||||||
return values
|
return values
|
||||||
|
|
||||||
messages = read_file('messages-000001.json')
|
messages = read_file('messages-000001.json')
|
||||||
|
|
|
@ -1180,7 +1180,7 @@ class MessageDictTest(ZulipTestCase):
|
||||||
message = Message(
|
message = Message(
|
||||||
sender=sender,
|
sender=sender,
|
||||||
recipient=recipient,
|
recipient=recipient,
|
||||||
content='whatever %d' % i,
|
content='whatever %d' % (i,),
|
||||||
rendered_content='DOES NOT MATTER',
|
rendered_content='DOES NOT MATTER',
|
||||||
rendered_content_version=bugdown.version,
|
rendered_content_version=bugdown.version,
|
||||||
pub_date=timezone_now(),
|
pub_date=timezone_now(),
|
||||||
|
@ -1328,7 +1328,7 @@ class SewMessageAndReactionTest(ZulipTestCase):
|
||||||
message = Message(
|
message = Message(
|
||||||
sender=sender,
|
sender=sender,
|
||||||
recipient=recipient,
|
recipient=recipient,
|
||||||
content='whatever %d' % i,
|
content='whatever %d' % (i,),
|
||||||
pub_date=timezone_now(),
|
pub_date=timezone_now(),
|
||||||
sending_client=sending_client,
|
sending_client=sending_client,
|
||||||
last_edit_time=timezone_now(),
|
last_edit_time=timezone_now(),
|
||||||
|
|
|
@ -193,4 +193,4 @@ class TestNotifyNewUser(ZulipTestCase):
|
||||||
self.assertEqual(message.recipient.type, Recipient.STREAM)
|
self.assertEqual(message.recipient.type, Recipient.STREAM)
|
||||||
actual_stream = Stream.objects.get(id=message.recipient.type_id)
|
actual_stream = Stream.objects.get(id=message.recipient.type_id)
|
||||||
self.assertEqual(actual_stream.name, Realm.INITIAL_PRIVATE_STREAM_NAME)
|
self.assertEqual(actual_stream.name, Realm.INITIAL_PRIVATE_STREAM_NAME)
|
||||||
self.assertIn('@_**Cordelia Lear|%d** just signed up for Zulip.' % (new_user.id), message.content)
|
self.assertIn('@_**Cordelia Lear|%d** just signed up for Zulip.' % (new_user.id,), message.content)
|
||||||
|
|
|
@ -561,7 +561,7 @@ class RealmAPITest(ZulipTestCase):
|
||||||
if Realm.property_types[name] is bool:
|
if Realm.property_types[name] is bool:
|
||||||
vals = bool_tests
|
vals = bool_tests
|
||||||
if vals is None:
|
if vals is None:
|
||||||
raise AssertionError('No test created for %s' % (name))
|
raise AssertionError('No test created for %s' % (name,))
|
||||||
|
|
||||||
self.set_up_db(name, vals[0])
|
self.set_up_db(name, vals[0])
|
||||||
realm = self.update_with_api(name, vals[1])
|
realm = self.update_with_api(name, vals[1])
|
||||||
|
|
|
@ -267,7 +267,7 @@ class ChangeSettingsTest(ZulipTestCase):
|
||||||
test_value = test_changes.get(setting_name)
|
test_value = test_changes.get(setting_name)
|
||||||
# Error if a setting in UserProfile.property_types does not have test values
|
# Error if a setting in UserProfile.property_types does not have test values
|
||||||
if test_value is None:
|
if test_value is None:
|
||||||
raise AssertionError('No test created for %s' % (setting_name))
|
raise AssertionError('No test created for %s' % (setting_name,))
|
||||||
invalid_value = 'invalid_' + setting_name
|
invalid_value = 'invalid_' + setting_name
|
||||||
|
|
||||||
data = {setting_name: ujson.dumps(test_value)}
|
data = {setting_name: ujson.dumps(test_value)}
|
||||||
|
@ -307,7 +307,7 @@ class ChangeSettingsTest(ZulipTestCase):
|
||||||
|
|
||||||
for emojiset in banned_emojisets:
|
for emojiset in banned_emojisets:
|
||||||
result = self.do_change_emojiset(emojiset)
|
result = self.do_change_emojiset(emojiset)
|
||||||
self.assert_json_error(result, "Invalid emojiset '%s'" % (emojiset))
|
self.assert_json_error(result, "Invalid emojiset '%s'" % (emojiset,))
|
||||||
|
|
||||||
for emojiset in valid_emojisets:
|
for emojiset in valid_emojisets:
|
||||||
result = self.do_change_emojiset(emojiset)
|
result = self.do_change_emojiset(emojiset)
|
||||||
|
|
|
@ -71,7 +71,7 @@ class SlackMessageConversion(ZulipTestCase):
|
||||||
self.assertEqual(full_name, 'John Doe')
|
self.assertEqual(full_name, 'John Doe')
|
||||||
self.assertEqual(get_user_full_name(users[2]), 'Jane')
|
self.assertEqual(get_user_full_name(users[2]), 'Jane')
|
||||||
|
|
||||||
self.assertEqual(text, 'Hi @**%s**: How are you? #**general**' % (full_name))
|
self.assertEqual(text, 'Hi @**%s**: How are you? #**general**' % (full_name,))
|
||||||
self.assertEqual(mentioned_users, [540])
|
self.assertEqual(mentioned_users, [540])
|
||||||
|
|
||||||
# multiple mentioning
|
# multiple mentioning
|
||||||
|
|
|
@ -700,7 +700,7 @@ class StreamAdminTest(ZulipTestCase):
|
||||||
result = self.client_patch('/json/streams/%d' % (stream_id,),
|
result = self.client_patch('/json/streams/%d' % (stream_id,),
|
||||||
{'description': ujson.dumps('a' * 1025)})
|
{'description': ujson.dumps('a' * 1025)})
|
||||||
self.assert_json_error(result, "description is too long (limit: %s characters)"
|
self.assert_json_error(result, "description is too long (limit: %s characters)"
|
||||||
% (Stream.MAX_DESCRIPTION_LENGTH))
|
% (Stream.MAX_DESCRIPTION_LENGTH,))
|
||||||
|
|
||||||
result = self.client_patch('/json/streams/%d' % (stream_id,),
|
result = self.client_patch('/json/streams/%d' % (stream_id,),
|
||||||
{'description': ujson.dumps('a\nmulti\nline\ndescription')})
|
{'description': ujson.dumps('a\nmulti\nline\ndescription')})
|
||||||
|
@ -1115,7 +1115,7 @@ class DefaultStreamTest(ZulipTestCase):
|
||||||
self.make_stream(stream_name, invite_only=True)
|
self.make_stream(stream_name, invite_only=True)
|
||||||
self.subscribe(self.example_user('iago'), stream_name)
|
self.subscribe(self.example_user('iago'), stream_name)
|
||||||
result = self.client_post('/json/default_streams', dict(stream_name=stream_name))
|
result = self.client_post('/json/default_streams', dict(stream_name=stream_name))
|
||||||
self.assert_json_error(result, "Invalid stream name '%s'" % (stream_name))
|
self.assert_json_error(result, "Invalid stream name '%s'" % (stream_name,))
|
||||||
|
|
||||||
self.subscribe(user_profile, stream_name)
|
self.subscribe(user_profile, stream_name)
|
||||||
result = self.client_post('/json/default_streams', dict(stream_name=stream_name))
|
result = self.client_post('/json/default_streams', dict(stream_name=stream_name))
|
||||||
|
@ -1638,7 +1638,7 @@ class SubscriptionRestApiTest(ZulipTestCase):
|
||||||
|
|
||||||
self.login(test_email)
|
self.login(test_email)
|
||||||
subs = gather_subscriptions(test_user)[0]
|
subs = gather_subscriptions(test_user)[0]
|
||||||
result = self.api_patch(test_email, "/api/v1/users/me/subscriptions/%d" % subs[0]["stream_id"],
|
result = self.api_patch(test_email, "/api/v1/users/me/subscriptions/%d" % (subs[0]["stream_id"],),
|
||||||
{'property': 'color', 'value': '#c2c2c2'})
|
{'property': 'color', 'value': '#c2c2c2'})
|
||||||
self.assert_json_success(result)
|
self.assert_json_success(result)
|
||||||
|
|
||||||
|
@ -1653,7 +1653,7 @@ class SubscriptionRestApiTest(ZulipTestCase):
|
||||||
self.login(test_email)
|
self.login(test_email)
|
||||||
subs = gather_subscriptions(test_user)[0]
|
subs = gather_subscriptions(test_user)[0]
|
||||||
|
|
||||||
result = self.api_patch(test_email, "/api/v1/users/me/subscriptions/%d" % subs[0]["stream_id"],
|
result = self.api_patch(test_email, "/api/v1/users/me/subscriptions/%d" % (subs[0]["stream_id"],),
|
||||||
{'property': 'invalid', 'value': 'somevalue'})
|
{'property': 'invalid', 'value': 'somevalue'})
|
||||||
self.assert_json_error(result,
|
self.assert_json_error(result,
|
||||||
"Unknown subscription property: invalid")
|
"Unknown subscription property: invalid")
|
||||||
|
@ -1750,7 +1750,7 @@ class SubscriptionRestApiTest(ZulipTestCase):
|
||||||
}
|
}
|
||||||
result = self.api_patch(email, "/api/v1/users/me/subscriptions", request)
|
result = self.api_patch(email, "/api/v1/users/me/subscriptions", request)
|
||||||
self.assert_json_error(result,
|
self.assert_json_error(result,
|
||||||
"Stream name '%s' contains NULL (0x00) characters." % (stream_name))
|
"Stream name '%s' contains NULL (0x00) characters." % (stream_name,))
|
||||||
|
|
||||||
def test_compose_views_rollback(self) -> None:
|
def test_compose_views_rollback(self) -> None:
|
||||||
'''
|
'''
|
||||||
|
@ -2051,7 +2051,7 @@ class SubscriptionAPITest(ZulipTestCase):
|
||||||
stream_name = "abc\000"
|
stream_name = "abc\000"
|
||||||
result = self.common_subscribe_to_streams(self.test_email, [stream_name])
|
result = self.common_subscribe_to_streams(self.test_email, [stream_name])
|
||||||
self.assert_json_error(result,
|
self.assert_json_error(result,
|
||||||
"Stream name '%s' contains NULL (0x00) characters." % (stream_name))
|
"Stream name '%s' contains NULL (0x00) characters." % (stream_name,))
|
||||||
|
|
||||||
def test_user_settings_for_adding_streams(self) -> None:
|
def test_user_settings_for_adding_streams(self) -> None:
|
||||||
with mock.patch('zerver.models.UserProfile.can_create_streams', return_value=False):
|
with mock.patch('zerver.models.UserProfile.can_create_streams', return_value=False):
|
||||||
|
@ -2478,7 +2478,7 @@ class SubscriptionAPITest(ZulipTestCase):
|
||||||
mit_user = self.mit_user('starnine')
|
mit_user = self.mit_user('starnine')
|
||||||
|
|
||||||
realm = get_realm("zephyr")
|
realm = get_realm("zephyr")
|
||||||
stream_names = ["stream_%s" % i for i in range(40)]
|
stream_names = ["stream_%s" % (i,) for i in range(40)]
|
||||||
streams = [
|
streams = [
|
||||||
self.make_stream(stream_name, realm=realm)
|
self.make_stream(stream_name, realm=realm)
|
||||||
for stream_name in stream_names]
|
for stream_name in stream_names]
|
||||||
|
@ -2514,7 +2514,7 @@ class SubscriptionAPITest(ZulipTestCase):
|
||||||
def test_bulk_subscribe_many(self) -> None:
|
def test_bulk_subscribe_many(self) -> None:
|
||||||
|
|
||||||
# Create a whole bunch of streams
|
# Create a whole bunch of streams
|
||||||
streams = ["stream_%s" % i for i in range(20)]
|
streams = ["stream_%s" % (i,) for i in range(20)]
|
||||||
for stream_name in streams:
|
for stream_name in streams:
|
||||||
self.make_stream(stream_name)
|
self.make_stream(stream_name)
|
||||||
|
|
||||||
|
@ -3256,7 +3256,7 @@ class GetSubscribersTest(ZulipTestCase):
|
||||||
(We also use this test to verify subscription notifications to
|
(We also use this test to verify subscription notifications to
|
||||||
folks who get subscribed to streams.)
|
folks who get subscribed to streams.)
|
||||||
"""
|
"""
|
||||||
streams = ["stream_%s" % i for i in range(10)]
|
streams = ["stream_%s" % (i,) for i in range(10)]
|
||||||
for stream_name in streams:
|
for stream_name in streams:
|
||||||
self.make_stream(stream_name)
|
self.make_stream(stream_name)
|
||||||
|
|
||||||
|
|
|
@ -48,13 +48,13 @@ class ThumbnailTest(ZulipTestCase):
|
||||||
quoted_uri = urllib.parse.quote(uri[1:], safe='')
|
quoted_uri = urllib.parse.quote(uri[1:], safe='')
|
||||||
|
|
||||||
# Test full size image.
|
# Test full size image.
|
||||||
result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_uri))
|
result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_uri,))
|
||||||
self.assertEqual(result.status_code, 302, result)
|
self.assertEqual(result.status_code, 302, result)
|
||||||
expected_part_url = get_file_path_urlpart(uri)
|
expected_part_url = get_file_path_urlpart(uri)
|
||||||
self.assertIn(expected_part_url, result.url)
|
self.assertIn(expected_part_url, result.url)
|
||||||
|
|
||||||
# Test thumbnail size.
|
# Test thumbnail size.
|
||||||
result = self.client_get("/thumbnail?url=%s&size=thumbnail" % (quoted_uri))
|
result = self.client_get("/thumbnail?url=%s&size=thumbnail" % (quoted_uri,))
|
||||||
self.assertEqual(result.status_code, 302, result)
|
self.assertEqual(result.status_code, 302, result)
|
||||||
expected_part_url = get_file_path_urlpart(uri, '0x300')
|
expected_part_url = get_file_path_urlpart(uri, '0x300')
|
||||||
self.assertIn(expected_part_url, result.url)
|
self.assertIn(expected_part_url, result.url)
|
||||||
|
@ -72,7 +72,7 @@ class ThumbnailTest(ZulipTestCase):
|
||||||
quoted_emoji_url = urllib.parse.quote(custom_emoji_url[1:], safe='')
|
quoted_emoji_url = urllib.parse.quote(custom_emoji_url[1:], safe='')
|
||||||
|
|
||||||
# Test full size custom emoji image (for emoji link in messages case).
|
# Test full size custom emoji image (for emoji link in messages case).
|
||||||
result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_emoji_url))
|
result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_emoji_url,))
|
||||||
self.assertEqual(result.status_code, 302, result)
|
self.assertEqual(result.status_code, 302, result)
|
||||||
self.assertIn(custom_emoji_url, result.url)
|
self.assertIn(custom_emoji_url, result.url)
|
||||||
|
|
||||||
|
@ -88,7 +88,7 @@ class ThumbnailTest(ZulipTestCase):
|
||||||
|
|
||||||
# Test with another user trying to access image using thumbor.
|
# Test with another user trying to access image using thumbor.
|
||||||
self.login(self.example_email("iago"))
|
self.login(self.example_email("iago"))
|
||||||
result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_uri))
|
result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_uri,))
|
||||||
self.assertEqual(result.status_code, 403, result)
|
self.assertEqual(result.status_code, 403, result)
|
||||||
self.assert_in_response("You are not authorized to view this file.", result)
|
self.assert_in_response("You are not authorized to view this file.", result)
|
||||||
|
|
||||||
|
@ -98,13 +98,13 @@ class ThumbnailTest(ZulipTestCase):
|
||||||
self.login(self.example_email("hamlet"))
|
self.login(self.example_email("hamlet"))
|
||||||
quoted_url = urllib.parse.quote(image_url, safe='')
|
quoted_url = urllib.parse.quote(image_url, safe='')
|
||||||
encoded_url = base64.urlsafe_b64encode(image_url.encode()).decode('utf-8')
|
encoded_url = base64.urlsafe_b64encode(image_url.encode()).decode('utf-8')
|
||||||
result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_url))
|
result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_url,))
|
||||||
self.assertEqual(result.status_code, 302, result)
|
self.assertEqual(result.status_code, 302, result)
|
||||||
expected_part_url = '/smart/filters:no_upscale()/' + encoded_url + '/source_type/external'
|
expected_part_url = '/smart/filters:no_upscale()/' + encoded_url + '/source_type/external'
|
||||||
self.assertIn(expected_part_url, result.url)
|
self.assertIn(expected_part_url, result.url)
|
||||||
|
|
||||||
# Test thumbnail size.
|
# Test thumbnail size.
|
||||||
result = self.client_get("/thumbnail?url=%s&size=thumbnail" % (quoted_url))
|
result = self.client_get("/thumbnail?url=%s&size=thumbnail" % (quoted_url,))
|
||||||
self.assertEqual(result.status_code, 302, result)
|
self.assertEqual(result.status_code, 302, result)
|
||||||
expected_part_url = '/0x300/smart/filters:no_upscale():sharpen(0.5,0.2,true)/' + encoded_url + '/source_type/external'
|
expected_part_url = '/0x300/smart/filters:no_upscale():sharpen(0.5,0.2,true)/' + encoded_url + '/source_type/external'
|
||||||
self.assertIn(expected_part_url, result.url)
|
self.assertIn(expected_part_url, result.url)
|
||||||
|
@ -137,7 +137,7 @@ class ThumbnailTest(ZulipTestCase):
|
||||||
# Test with another user trying to access image using thumbor.
|
# Test with another user trying to access image using thumbor.
|
||||||
# File should be always accessible to user in case of external source
|
# File should be always accessible to user in case of external source
|
||||||
self.login(self.example_email("iago"))
|
self.login(self.example_email("iago"))
|
||||||
result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_url))
|
result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_url,))
|
||||||
self.assertEqual(result.status_code, 302, result)
|
self.assertEqual(result.status_code, 302, result)
|
||||||
expected_part_url = '/smart/filters:no_upscale()/' + encoded_url + '/source_type/external'
|
expected_part_url = '/smart/filters:no_upscale()/' + encoded_url + '/source_type/external'
|
||||||
self.assertIn(expected_part_url, result.url)
|
self.assertIn(expected_part_url, result.url)
|
||||||
|
@ -174,13 +174,13 @@ class ThumbnailTest(ZulipTestCase):
|
||||||
# We remove the forward slash infront of the `/user_uploads/` to match
|
# We remove the forward slash infront of the `/user_uploads/` to match
|
||||||
# bugdown behaviour.
|
# bugdown behaviour.
|
||||||
quoted_uri = urllib.parse.quote(uri[1:], safe='')
|
quoted_uri = urllib.parse.quote(uri[1:], safe='')
|
||||||
result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_uri))
|
result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_uri,))
|
||||||
self.assertEqual(result.status_code, 302, result)
|
self.assertEqual(result.status_code, 302, result)
|
||||||
expected_part_url = get_file_path_urlpart(uri)
|
expected_part_url = get_file_path_urlpart(uri)
|
||||||
self.assertIn(expected_part_url, result.url)
|
self.assertIn(expected_part_url, result.url)
|
||||||
|
|
||||||
# Test thumbnail size.
|
# Test thumbnail size.
|
||||||
result = self.client_get("/thumbnail?url=%s&size=thumbnail" % (quoted_uri))
|
result = self.client_get("/thumbnail?url=%s&size=thumbnail" % (quoted_uri,))
|
||||||
self.assertEqual(result.status_code, 302, result)
|
self.assertEqual(result.status_code, 302, result)
|
||||||
expected_part_url = get_file_path_urlpart(uri, '0x300')
|
expected_part_url = get_file_path_urlpart(uri, '0x300')
|
||||||
self.assertIn(expected_part_url, result.url)
|
self.assertIn(expected_part_url, result.url)
|
||||||
|
@ -198,7 +198,7 @@ class ThumbnailTest(ZulipTestCase):
|
||||||
# We remove the forward slash infront of the `/user_uploads/` to match
|
# We remove the forward slash infront of the `/user_uploads/` to match
|
||||||
# bugdown behaviour.
|
# bugdown behaviour.
|
||||||
quoted_uri = urllib.parse.quote(uri[1:], safe='')
|
quoted_uri = urllib.parse.quote(uri[1:], safe='')
|
||||||
result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_uri))
|
result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_uri,))
|
||||||
self.assertEqual(result.status_code, 302, result)
|
self.assertEqual(result.status_code, 302, result)
|
||||||
expected_part_url = get_file_path_urlpart(uri)
|
expected_part_url = get_file_path_urlpart(uri)
|
||||||
self.assertIn(expected_part_url, result.url)
|
self.assertIn(expected_part_url, result.url)
|
||||||
|
@ -216,7 +216,7 @@ class ThumbnailTest(ZulipTestCase):
|
||||||
quoted_emoji_url = urllib.parse.quote(custom_emoji_url[1:], safe='')
|
quoted_emoji_url = urllib.parse.quote(custom_emoji_url[1:], safe='')
|
||||||
|
|
||||||
# Test full size custom emoji image (for emoji link in messages case).
|
# Test full size custom emoji image (for emoji link in messages case).
|
||||||
result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_emoji_url))
|
result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_emoji_url,))
|
||||||
self.assertEqual(result.status_code, 302, result)
|
self.assertEqual(result.status_code, 302, result)
|
||||||
self.assertIn(custom_emoji_url, result.url)
|
self.assertIn(custom_emoji_url, result.url)
|
||||||
|
|
||||||
|
@ -243,7 +243,7 @@ class ThumbnailTest(ZulipTestCase):
|
||||||
|
|
||||||
# Test with another user trying to access image using thumbor.
|
# Test with another user trying to access image using thumbor.
|
||||||
self.login(self.example_email("iago"))
|
self.login(self.example_email("iago"))
|
||||||
result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_uri))
|
result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_uri,))
|
||||||
self.assertEqual(result.status_code, 403, result)
|
self.assertEqual(result.status_code, 403, result)
|
||||||
self.assert_in_response("You are not authorized to view this file.", result)
|
self.assert_in_response("You are not authorized to view this file.", result)
|
||||||
|
|
||||||
|
@ -252,7 +252,7 @@ class ThumbnailTest(ZulipTestCase):
|
||||||
self.login(self.example_email("hamlet"))
|
self.login(self.example_email("hamlet"))
|
||||||
uri = '/static/images/cute/turtle.png'
|
uri = '/static/images/cute/turtle.png'
|
||||||
quoted_uri = urllib.parse.quote(uri[1:], safe='')
|
quoted_uri = urllib.parse.quote(uri[1:], safe='')
|
||||||
result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_uri))
|
result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_uri,))
|
||||||
self.assertEqual(result.status_code, 302, result)
|
self.assertEqual(result.status_code, 302, result)
|
||||||
self.assertEqual(uri, result.url)
|
self.assertEqual(uri, result.url)
|
||||||
|
|
||||||
|
@ -272,21 +272,21 @@ class ThumbnailTest(ZulipTestCase):
|
||||||
quoted_uri = urllib.parse.quote(uri[1:], safe='')
|
quoted_uri = urllib.parse.quote(uri[1:], safe='')
|
||||||
|
|
||||||
with self.settings(THUMBOR_URL=''):
|
with self.settings(THUMBOR_URL=''):
|
||||||
result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_uri))
|
result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_uri,))
|
||||||
self.assertEqual(result.status_code, 302, result)
|
self.assertEqual(result.status_code, 302, result)
|
||||||
self.assertEqual(uri, result.url)
|
self.assertEqual(uri, result.url)
|
||||||
|
|
||||||
uri = 'https://www.google.com/images/srpr/logo4w.png'
|
uri = 'https://www.google.com/images/srpr/logo4w.png'
|
||||||
quoted_uri = urllib.parse.quote(uri, safe='')
|
quoted_uri = urllib.parse.quote(uri, safe='')
|
||||||
with self.settings(THUMBOR_URL=''):
|
with self.settings(THUMBOR_URL=''):
|
||||||
result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_uri))
|
result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_uri,))
|
||||||
self.assertEqual(result.status_code, 302, result)
|
self.assertEqual(result.status_code, 302, result)
|
||||||
self.assertEqual(uri, result.url)
|
self.assertEqual(uri, result.url)
|
||||||
|
|
||||||
uri = 'http://www.google.com/images/srpr/logo4w.png'
|
uri = 'http://www.google.com/images/srpr/logo4w.png'
|
||||||
quoted_uri = urllib.parse.quote(uri, safe='')
|
quoted_uri = urllib.parse.quote(uri, safe='')
|
||||||
with self.settings(THUMBOR_URL=''):
|
with self.settings(THUMBOR_URL=''):
|
||||||
result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_uri))
|
result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_uri,))
|
||||||
self.assertEqual(result.status_code, 302, result)
|
self.assertEqual(result.status_code, 302, result)
|
||||||
base = 'https://external-content.zulipcdn.net/external_content/7b6552b60c635e41e8f6daeb36d88afc4eabde79/687474703a2f2f7777772e676f6f676c652e636f6d2f696d616765732f737270722f6c6f676f34772e706e67'
|
base = 'https://external-content.zulipcdn.net/external_content/7b6552b60c635e41e8f6daeb36d88afc4eabde79/687474703a2f2f7777772e676f6f676c652e636f6d2f696d616765732f737270722f6c6f676f34772e706e67'
|
||||||
self.assertEqual(base, result.url)
|
self.assertEqual(base, result.url)
|
||||||
|
@ -307,7 +307,7 @@ class ThumbnailTest(ZulipTestCase):
|
||||||
quoted_uri = urllib.parse.quote(uri[1:], safe='')
|
quoted_uri = urllib.parse.quote(uri[1:], safe='')
|
||||||
hex_uri = base64.urlsafe_b64encode(uri.encode()).decode('utf-8')
|
hex_uri = base64.urlsafe_b64encode(uri.encode()).decode('utf-8')
|
||||||
with self.settings(THUMBOR_URL='http://test-thumborhost.com'):
|
with self.settings(THUMBOR_URL='http://test-thumborhost.com'):
|
||||||
result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_uri))
|
result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_uri,))
|
||||||
self.assertEqual(result.status_code, 302, result)
|
self.assertEqual(result.status_code, 302, result)
|
||||||
base = 'http://test-thumborhost.com/'
|
base = 'http://test-thumborhost.com/'
|
||||||
self.assertEqual(base, result.url[:len(base)])
|
self.assertEqual(base, result.url[:len(base)])
|
||||||
|
@ -340,23 +340,23 @@ class ThumbnailTest(ZulipTestCase):
|
||||||
# size=thumbnail should return a 0x300 sized image.
|
# size=thumbnail should return a 0x300 sized image.
|
||||||
# size=full should return the original resolution image.
|
# size=full should return the original resolution image.
|
||||||
quoted_uri = urllib.parse.quote(uri[1:], safe='')
|
quoted_uri = urllib.parse.quote(uri[1:], safe='')
|
||||||
result = self.client_get("/thumbnail?url=%s&size=thumbnail" % (quoted_uri))
|
result = self.client_get("/thumbnail?url=%s&size=thumbnail" % (quoted_uri,))
|
||||||
self.assertEqual(result.status_code, 302, result)
|
self.assertEqual(result.status_code, 302, result)
|
||||||
expected_part_url = get_file_path_urlpart(uri, '0x300')
|
expected_part_url = get_file_path_urlpart(uri, '0x300')
|
||||||
self.assertIn(expected_part_url, result.url)
|
self.assertIn(expected_part_url, result.url)
|
||||||
|
|
||||||
result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_uri))
|
result = self.client_get("/thumbnail?url=%s&size=full" % (quoted_uri,))
|
||||||
self.assertEqual(result.status_code, 302, result)
|
self.assertEqual(result.status_code, 302, result)
|
||||||
expected_part_url = get_file_path_urlpart(uri)
|
expected_part_url = get_file_path_urlpart(uri)
|
||||||
self.assertIn(expected_part_url, result.url)
|
self.assertIn(expected_part_url, result.url)
|
||||||
|
|
||||||
# Test with size supplied as a query parameter where size is anything
|
# Test with size supplied as a query parameter where size is anything
|
||||||
# else than 'full' or 'thumbnail'. Result should be an error message.
|
# else than 'full' or 'thumbnail'. Result should be an error message.
|
||||||
result = self.client_get("/thumbnail?url=%s&size=480x360" % (quoted_uri))
|
result = self.client_get("/thumbnail?url=%s&size=480x360" % (quoted_uri,))
|
||||||
self.assertEqual(result.status_code, 403, result)
|
self.assertEqual(result.status_code, 403, result)
|
||||||
self.assert_in_response("Invalid size.", result)
|
self.assert_in_response("Invalid size.", result)
|
||||||
|
|
||||||
# Test with no size param supplied. In this case as well we show an
|
# Test with no size param supplied. In this case as well we show an
|
||||||
# error message.
|
# error message.
|
||||||
result = self.client_get("/thumbnail?url=%s" % (quoted_uri))
|
result = self.client_get("/thumbnail?url=%s" % (quoted_uri,))
|
||||||
self.assertEqual(result.status_code, 400, "Missing 'size' argument")
|
self.assertEqual(result.status_code, 400, "Missing 'size' argument")
|
||||||
|
|
|
@ -838,7 +838,7 @@ class AvatarTest(UploadSerializeMixin, ZulipTestCase):
|
||||||
redirect_url = response['Location']
|
redirect_url = response['Location']
|
||||||
self.assertTrue(redirect_url.endswith(str(avatar_url(cordelia)) + '&foo=bar'))
|
self.assertTrue(redirect_url.endswith(str(avatar_url(cordelia)) + '&foo=bar'))
|
||||||
|
|
||||||
response = self.client_get("/avatar/%s?foo=bar" % (cordelia.id))
|
response = self.client_get("/avatar/%s?foo=bar" % (cordelia.id,))
|
||||||
redirect_url = response['Location']
|
redirect_url = response['Location']
|
||||||
self.assertTrue(redirect_url.endswith(str(avatar_url(cordelia)) + '&foo=bar'))
|
self.assertTrue(redirect_url.endswith(str(avatar_url(cordelia)) + '&foo=bar'))
|
||||||
|
|
||||||
|
@ -852,7 +852,7 @@ class AvatarTest(UploadSerializeMixin, ZulipTestCase):
|
||||||
redirect_url = response['Location']
|
redirect_url = response['Location']
|
||||||
self.assertTrue(redirect_url.endswith(str(avatar_url(cordelia)) + '&foo=bar'))
|
self.assertTrue(redirect_url.endswith(str(avatar_url(cordelia)) + '&foo=bar'))
|
||||||
|
|
||||||
response = self.api_get(hamlet, "/avatar/%s?foo=bar" % (cordelia.id))
|
response = self.api_get(hamlet, "/avatar/%s?foo=bar" % (cordelia.id,))
|
||||||
redirect_url = response['Location']
|
redirect_url = response['Location']
|
||||||
self.assertTrue(redirect_url.endswith(str(avatar_url(cordelia)) + '&foo=bar'))
|
self.assertTrue(redirect_url.endswith(str(avatar_url(cordelia)) + '&foo=bar'))
|
||||||
|
|
||||||
|
@ -862,7 +862,7 @@ class AvatarTest(UploadSerializeMixin, ZulipTestCase):
|
||||||
self.assertTrue(redirect_url.endswith(str(avatar_url(cross_realm_bot)) + '&foo=bar'))
|
self.assertTrue(redirect_url.endswith(str(avatar_url(cross_realm_bot)) + '&foo=bar'))
|
||||||
|
|
||||||
# Test cross_realm_bot avatar access using id.
|
# Test cross_realm_bot avatar access using id.
|
||||||
response = self.api_get(hamlet, "/avatar/%s?foo=bar" % (cross_realm_bot.id))
|
response = self.api_get(hamlet, "/avatar/%s?foo=bar" % (cross_realm_bot.id,))
|
||||||
redirect_url = response['Location']
|
redirect_url = response['Location']
|
||||||
self.assertTrue(redirect_url.endswith(str(avatar_url(cross_realm_bot)) + '&foo=bar'))
|
self.assertTrue(redirect_url.endswith(str(avatar_url(cross_realm_bot)) + '&foo=bar'))
|
||||||
|
|
||||||
|
@ -1269,7 +1269,7 @@ class RealmLogoTest(UploadSerializeMixin, ZulipTestCase):
|
||||||
response = self.client_get("/json/realm/logo", {'night': ujson.dumps(self.night)})
|
response = self.client_get("/json/realm/logo", {'night': ujson.dumps(self.night)})
|
||||||
redirect_url = response['Location']
|
redirect_url = response['Location']
|
||||||
self.assertEqual(redirect_url, realm_logo_url(realm, self.night) +
|
self.assertEqual(redirect_url, realm_logo_url(realm, self.night) +
|
||||||
'&night=%s' % (str(self.night).lower()))
|
'&night=%s' % (str(self.night).lower(),))
|
||||||
|
|
||||||
def test_get_realm_logo(self) -> None:
|
def test_get_realm_logo(self) -> None:
|
||||||
self.login(self.example_email("hamlet"))
|
self.login(self.example_email("hamlet"))
|
||||||
|
@ -1278,7 +1278,7 @@ class RealmLogoTest(UploadSerializeMixin, ZulipTestCase):
|
||||||
response = self.client_get("/json/realm/logo", {'night': ujson.dumps(self.night)})
|
response = self.client_get("/json/realm/logo", {'night': ujson.dumps(self.night)})
|
||||||
redirect_url = response['Location']
|
redirect_url = response['Location']
|
||||||
self.assertTrue(redirect_url.endswith(realm_logo_url(realm, self.night) +
|
self.assertTrue(redirect_url.endswith(realm_logo_url(realm, self.night) +
|
||||||
'&night=%s' % (str(self.night).lower())))
|
'&night=%s' % (str(self.night).lower(),)))
|
||||||
|
|
||||||
def test_valid_logos(self) -> None:
|
def test_valid_logos(self) -> None:
|
||||||
"""
|
"""
|
||||||
|
@ -1642,12 +1642,12 @@ class S3Test(ZulipTestCase):
|
||||||
image_file = get_test_image_file("img.png")
|
image_file = get_test_image_file("img.png")
|
||||||
zerver.lib.upload.upload_backend.upload_realm_logo_image(image_file, user_profile, night)
|
zerver.lib.upload.upload_backend.upload_realm_logo_image(image_file, user_profile, night)
|
||||||
|
|
||||||
original_path_id = os.path.join(str(user_profile.realm.id), "realm", "%s.original" % (file_name))
|
original_path_id = os.path.join(str(user_profile.realm.id), "realm", "%s.original" % (file_name,))
|
||||||
original_key = bucket.get_key(original_path_id)
|
original_key = bucket.get_key(original_path_id)
|
||||||
image_file.seek(0)
|
image_file.seek(0)
|
||||||
self.assertEqual(image_file.read(), original_key.get_contents_as_string())
|
self.assertEqual(image_file.read(), original_key.get_contents_as_string())
|
||||||
|
|
||||||
resized_path_id = os.path.join(str(user_profile.realm.id), "realm", "%s.png" % (file_name))
|
resized_path_id = os.path.join(str(user_profile.realm.id), "realm", "%s.png" % (file_name,))
|
||||||
resized_data = bucket.get_key(resized_path_id).read()
|
resized_data = bucket.get_key(resized_path_id).read()
|
||||||
resized_image = Image.open(io.BytesIO(resized_data)).size
|
resized_image = Image.open(io.BytesIO(resized_data)).size
|
||||||
self.assertEqual(resized_image, (DEFAULT_AVATAR_SIZE, DEFAULT_AVATAR_SIZE))
|
self.assertEqual(resized_image, (DEFAULT_AVATAR_SIZE, DEFAULT_AVATAR_SIZE))
|
||||||
|
|
|
@ -586,7 +586,7 @@ class UserProfileTest(ZulipTestCase):
|
||||||
self.assertEqual(check_valid_user_ids(realm.id, invalid_uid),
|
self.assertEqual(check_valid_user_ids(realm.id, invalid_uid),
|
||||||
"User IDs is not a list")
|
"User IDs is not a list")
|
||||||
self.assertEqual(check_valid_user_ids(realm.id, [invalid_uid]),
|
self.assertEqual(check_valid_user_ids(realm.id, [invalid_uid]),
|
||||||
"Invalid user ID: %d" % (invalid_uid))
|
"Invalid user ID: %d" % (invalid_uid,))
|
||||||
|
|
||||||
invalid_uid = "abc"
|
invalid_uid = "abc"
|
||||||
self.assertEqual(check_valid_user_ids(realm.id, [invalid_uid]),
|
self.assertEqual(check_valid_user_ids(realm.id, [invalid_uid]),
|
||||||
|
@ -597,19 +597,19 @@ class UserProfileTest(ZulipTestCase):
|
||||||
|
|
||||||
# User is in different realm
|
# User is in different realm
|
||||||
self.assertEqual(check_valid_user_ids(get_realm("zephyr").id, [hamlet.id]),
|
self.assertEqual(check_valid_user_ids(get_realm("zephyr").id, [hamlet.id]),
|
||||||
"Invalid user ID: %d" % (hamlet.id))
|
"Invalid user ID: %d" % (hamlet.id,))
|
||||||
|
|
||||||
# User is not active
|
# User is not active
|
||||||
hamlet.is_active = False
|
hamlet.is_active = False
|
||||||
hamlet.save()
|
hamlet.save()
|
||||||
self.assertEqual(check_valid_user_ids(realm.id, [hamlet.id]),
|
self.assertEqual(check_valid_user_ids(realm.id, [hamlet.id]),
|
||||||
"User with ID %d is deactivated" % (hamlet.id))
|
"User with ID %d is deactivated" % (hamlet.id,))
|
||||||
self.assertEqual(check_valid_user_ids(realm.id, [hamlet.id], allow_deactivated=True),
|
self.assertEqual(check_valid_user_ids(realm.id, [hamlet.id], allow_deactivated=True),
|
||||||
None)
|
None)
|
||||||
|
|
||||||
# User is a bot
|
# User is a bot
|
||||||
self.assertEqual(check_valid_user_ids(realm.id, [bot.id]),
|
self.assertEqual(check_valid_user_ids(realm.id, [bot.id]),
|
||||||
"User with ID %d is a bot" % (bot.id))
|
"User with ID %d is a bot" % (bot.id,))
|
||||||
|
|
||||||
# Succesfully get non-bot, active user belong to your realm
|
# Succesfully get non-bot, active user belong to your realm
|
||||||
self.assertEqual(check_valid_user_ids(realm.id, [othello.id]), None)
|
self.assertEqual(check_valid_user_ids(realm.id, [othello.id]), None)
|
||||||
|
|
|
@ -391,10 +391,10 @@ def add_subscriptions_backend(
|
||||||
notifications_stream = user_profile.realm.get_notifications_stream()
|
notifications_stream = user_profile.realm.get_notifications_stream()
|
||||||
if notifications_stream is not None:
|
if notifications_stream is not None:
|
||||||
if len(created_streams) > 1:
|
if len(created_streams) > 1:
|
||||||
stream_strs = ", ".join('#**%s**' % s.name for s in created_streams)
|
stream_strs = ", ".join('#**%s**' % (s.name,) for s in created_streams)
|
||||||
stream_msg = "the following streams: %s" % (stream_strs,)
|
stream_msg = "the following streams: %s" % (stream_strs,)
|
||||||
else:
|
else:
|
||||||
stream_msg = "a new stream #**%s**." % created_streams[0].name
|
stream_msg = "a new stream #**%s**." % (created_streams[0].name,)
|
||||||
msg = ("@_**%s|%d** just created %s" % (user_profile.full_name, user_profile.id, stream_msg))
|
msg = ("@_**%s|%d** just created %s" % (user_profile.full_name, user_profile.id, stream_msg))
|
||||||
|
|
||||||
sender = get_system_bot(settings.NOTIFICATION_BOT)
|
sender = get_system_bot(settings.NOTIFICATION_BOT)
|
||||||
|
|
|
@ -82,7 +82,9 @@ def format_freshdesk_property_change_message(ticket: TicketDict, event_info: Lis
|
||||||
"""
|
"""
|
||||||
content = "%s <%s> updated [ticket #%s](%s):\n\n" % (
|
content = "%s <%s> updated [ticket #%s](%s):\n\n" % (
|
||||||
ticket.requester_name, ticket.requester_email, ticket.id, ticket.url)
|
ticket.requester_name, ticket.requester_email, ticket.id, ticket.url)
|
||||||
# Why not `"%s %s %s" % event_info`? Because the linter doesn't like it.
|
# Why not `"%s %s %s" % event_info`? Because the linter doesn't like it?
|
||||||
|
# No, because it doesn't work: `event_info` is a list, not a tuple. But you
|
||||||
|
# could write `"%s %s %s" % (*event_info,)`.
|
||||||
content += "%s: **%s** => **%s**" % (
|
content += "%s: **%s** => **%s**" % (
|
||||||
event_info[0].capitalize(), event_info[1], event_info[2])
|
event_info[0].capitalize(), event_info[1], event_info[2])
|
||||||
|
|
||||||
|
|
|
@ -40,7 +40,7 @@ def api_newrelic_webhook(request: HttpRequest, user_profile: UserProfile,
|
||||||
subject = alert['message']
|
subject = alert['message']
|
||||||
content = ALERT_TEMPLATE.format(**alert)
|
content = ALERT_TEMPLATE.format(**alert)
|
||||||
elif deployment:
|
elif deployment:
|
||||||
subject = "%s deploy" % (deployment['application_name'])
|
subject = "%s deploy" % (deployment['application_name'],)
|
||||||
content = DEPLOY_TEMPLATE.format(**deployment)
|
content = DEPLOY_TEMPLATE.format(**deployment)
|
||||||
else:
|
else:
|
||||||
raise UnexpectedWebhookEventType('New Relic', 'Unknown Event Type')
|
raise UnexpectedWebhookEventType('New Relic', 'Unknown Event Type')
|
||||||
|
|
|
@ -256,7 +256,7 @@ class UserActivityIntervalWorker(QueueProcessingWorker):
|
||||||
@assign_queue('user_presence')
|
@assign_queue('user_presence')
|
||||||
class UserPresenceWorker(QueueProcessingWorker):
|
class UserPresenceWorker(QueueProcessingWorker):
|
||||||
def consume(self, event: Mapping[str, Any]) -> None:
|
def consume(self, event: Mapping[str, Any]) -> None:
|
||||||
logging.debug("Received presence event: %s" % (event),)
|
logging.debug("Received presence event: %s" % (event,),)
|
||||||
user_profile = get_user_profile_by_id(event["user_profile_id"])
|
user_profile = get_user_profile_by_id(event["user_profile_id"])
|
||||||
client = get_client(event["client"])
|
client = get_client(event["client"])
|
||||||
log_time = timestamp_to_datetime(event["time"])
|
log_time = timestamp_to_datetime(event["time"])
|
||||||
|
@ -392,7 +392,7 @@ class SlowQueryWorker(LoopQueueProcessingWorker):
|
||||||
# creates conflicts with other users in the file.
|
# creates conflicts with other users in the file.
|
||||||
def consume_batch(self, slow_queries: List[Any]) -> None:
|
def consume_batch(self, slow_queries: List[Any]) -> None:
|
||||||
for query in slow_queries:
|
for query in slow_queries:
|
||||||
logging.info("Slow query: %s" % (query))
|
logging.info("Slow query: %s" % (query,))
|
||||||
|
|
||||||
if settings.SLOW_QUERY_LOGS_STREAM is None:
|
if settings.SLOW_QUERY_LOGS_STREAM is None:
|
||||||
return
|
return
|
||||||
|
|
|
@ -155,7 +155,7 @@ def validate_count_stats(server: RemoteZulipServer, model: Any,
|
||||||
last_id = get_last_id_from_server(server, model)
|
last_id = get_last_id_from_server(server, model)
|
||||||
for item in counts:
|
for item in counts:
|
||||||
if item['property'] not in COUNT_STATS:
|
if item['property'] not in COUNT_STATS:
|
||||||
raise JsonableError(_("Invalid property %s" % item['property']))
|
raise JsonableError(_("Invalid property %s" % (item['property'],)))
|
||||||
if item['id'] <= last_id:
|
if item['id'] <= last_id:
|
||||||
raise JsonableError(_("Data is out of order."))
|
raise JsonableError(_("Data is out of order."))
|
||||||
last_id = item['id']
|
last_id = item['id']
|
||||||
|
|
Loading…
Reference in New Issue