pep8: Fix E225 pep8 violations.

This commit is contained in:
Rafid Aslam 2016-11-29 05:29:01 +07:00 committed by Tim Abbott
parent f036a59b4f
commit 7a2282986a
24 changed files with 55 additions and 55 deletions

View File

@ -84,7 +84,7 @@ def check_permissions():
def build_message(event): def build_message(event):
if not ('bucket' in event and 'creator' in event and 'html_url' in event): if not ('bucket' in event and 'creator' in event and 'html_url' in event):
logging.error("Perhaps the Basecamp API changed behavior? " logging.error("Perhaps the Basecamp API changed behavior? "
"This event doesn't have the expected format:\n%s" %(event,)) "This event doesn't have the expected format:\n%s" % (event,))
return None return None
# adjust the topic length to be bounded to 60 characters # adjust the topic length to be bounded to 60 characters
topic = event['bucket']['name'] topic = event['bucket']['name']

View File

@ -230,28 +230,28 @@ class Client(object):
self.client_name = client self.client_name = client
if insecure: if insecure:
self.tls_verification=False self.tls_verification = False
elif cert_bundle is not None: elif cert_bundle is not None:
if not os.path.isfile(cert_bundle): if not os.path.isfile(cert_bundle):
raise RuntimeError("tls bundle '%s' does not exist" raise RuntimeError("tls bundle '%s' does not exist"
%(cert_bundle,)) % (cert_bundle,))
self.tls_verification=cert_bundle self.tls_verification = cert_bundle
else: else:
# Default behavior: verify against system CA certificates # Default behavior: verify against system CA certificates
self.tls_verification=True self.tls_verification = True
if client_cert is None: if client_cert is None:
if client_cert_key is not None: if client_cert_key is not None:
raise RuntimeError("client cert key '%s' specified, but no client cert public part provided" raise RuntimeError("client cert key '%s' specified, but no client cert public part provided"
%(client_cert_key,)) % (client_cert_key,))
else: # we have a client cert else: # we have a client cert
if not os.path.isfile(client_cert): if not os.path.isfile(client_cert):
raise RuntimeError("client cert '%s' does not exist" raise RuntimeError("client cert '%s' does not exist"
%(client_cert,)) % (client_cert,))
if client_cert_key is not None: if client_cert_key is not None:
if not os.path.isfile(client_cert_key): if not os.path.isfile(client_cert_key):
raise RuntimeError("client cert key '%s' does not exist" raise RuntimeError("client cert key '%s' does not exist"
%(client_cert_key,)) % (client_cert_key,))
self.client_cert = client_cert self.client_cert = client_cert
self.client_cert_key = client_cert_key self.client_cert_key = client_cert_key

View File

@ -1,5 +1,5 @@
TIME_ZONE="America/New_York" TIME_ZONE = "America/New_York"
ALLOWED_HOSTS=['graphite.humbughq.com', 'graphite.zulip.net', 'stats1.zulip.net'] ALLOWED_HOSTS = ['graphite.humbughq.com', 'graphite.zulip.net', 'stats1.zulip.net']
DATABASES = { DATABASES = {
'default': { 'default': {

View File

@ -6,7 +6,7 @@ import sys
import subprocess import subprocess
import logging import logging
TARBALL_ARCHIVE_PATH="/home/zulip/archives" TARBALL_ARCHIVE_PATH = "/home/zulip/archives"
os.environ["PYTHONUNBUFFERED"] = "y" os.environ["PYTHONUNBUFFERED"] = "y"
sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..')) sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..'))

View File

@ -140,7 +140,7 @@ def create_user_docs():
groups['other'] = patterns groups['other'] = patterns
for name in ['api', 'legacy', 'webhooks', 'other']: for name in ['api', 'legacy', 'webhooks', 'other']:
f.write(name +' endpoints:\n\n') f.write(name + ' endpoints:\n\n')
f.write('<ul>\n') f.write('<ul>\n')
for pattern in sorted(groups[name]): for pattern in sorted(groups[name]):
href = pattern.replace('/', '-') + '.html' href = pattern.replace('/', '-') + '.html'

View File

@ -31,7 +31,7 @@ def get_data_url(buckets, realm):
# This is the slightly-cleaned up JSON api version of https://graphiti.zulip.net/graphs/945c7aafc2d # This is the slightly-cleaned up JSON api version of https://graphiti.zulip.net/graphs/945c7aafc2d
# #
# Fetches 1 month worth of data # Fetches 1 month worth of data
DATA_URL="https://stats1.zulip.net:444/render/?from=-1000d&format=json" DATA_URL = "https://stats1.zulip.net:444/render/?from=-1000d&format=json"
for bucket in buckets: for bucket in buckets:
if realm != 'all': if realm != 'all':
statsd_target = "stats.gauges.staging.users.active.%s.%s" % (realm_key, bucket) statsd_target = "stats.gauges.staging.users.active.%s.%s" % (realm_key, bucket)

View File

@ -152,7 +152,7 @@ def html_tag_tree(text):
# Add tokens to the Node tree first (conditionally). # Add tokens to the Node tree first (conditionally).
if token.kind in ('html_start', 'html_singleton'): if token.kind in ('html_start', 'html_singleton'):
parent = stack[-1] parent = stack[-1]
node= Node(token=token, parent=parent) node = Node(token=token, parent=parent)
parent.children.append(node) parent.children.append(node)
# Then update the stack to have the next node that # Then update the stack to have the next node that

View File

@ -238,7 +238,7 @@ def is_django_block_tag(tag):
def get_handlebars_tag(text, i): def get_handlebars_tag(text, i):
# type: (str, int) -> str # type: (str, int) -> str
end = i + 2 end = i + 2
while end < len(text) -1 and text[end] != '}': while end < len(text) - 1 and text[end] != '}':
end += 1 end += 1
if text[end] != '}' or text[end+1] != '}': if text[end] != '}' or text[end+1] != '}':
raise TemplateParserException('Tag missing }}') raise TemplateParserException('Tag missing }}')
@ -248,7 +248,7 @@ def get_handlebars_tag(text, i):
def get_django_tag(text, i): def get_django_tag(text, i):
# type: (str, int) -> str # type: (str, int) -> str
end = i + 2 end = i + 2
while end < len(text) -1 and text[end] != '%': while end < len(text) - 1 and text[end] != '%':
end += 1 end += 1
if text[end] != '%' or text[end+1] != '}': if text[end] != '%' or text[end+1] != '}':
raise TemplateParserException('Tag missing %}') raise TemplateParserException('Tag missing %}')

View File

@ -84,9 +84,9 @@ def check_pep8(files):
ignored_rules = [ ignored_rules = [
'E402', 'E501', 'W503', 'E711', 'E201', 'E203', 'E202', 'E128', 'E226', 'E124', 'E125', 'E402', 'E501', 'W503', 'E711', 'E201', 'E203', 'E202', 'E128', 'E226', 'E124', 'E125',
'E126', 'E127', 'E121', 'E122', 'E123', 'E266', 'E265', 'E261', 'E301', 'E221', 'E303', 'E126', 'E127', 'E121', 'E122', 'E123', 'E266', 'E265', 'E261', 'E301', 'E221', 'E303',
'E241', 'E712', 'E225', 'E401', 'E115', 'E114', 'E111', 'E222', 'E731', 'E302', 'E129', 'E241', 'E712', 'E702', 'E401', 'E115', 'E114', 'E111', 'E222', 'E731', 'E302', 'E129',
'E741', 'E714', 'W391', 'E211', 'E713', 'E502', 'E131', 'E305', 'E251', 'E306', 'E231', 'E741', 'E714', 'W391', 'E211', 'E713', 'E502', 'E131', 'E305', 'E251', 'E306', 'E231',
'E701', 'E702', 'E701',
] ]
pep8 = subprocess.Popen( pep8 = subprocess.Popen(
['pycodestyle'] + files + ['--ignore={rules}'.format(rules=','.join(ignored_rules))], ['pycodestyle'] + files + ['--ignore={rules}'.format(rules=','.join(ignored_rules))],

View File

@ -109,7 +109,7 @@ def list_files(targets=[], ftypes=[], use_shebang=True, modified_only=False,
else: else:
return result_list return result_list
if __name__=="__main__": if __name__ == "__main__":
parser = argparse.ArgumentParser(description="List files tracked by git and optionally filter by type") parser = argparse.ArgumentParser(description="List files tracked by git and optionally filter by type")
parser.add_argument('targets', nargs='*', default=[], parser.add_argument('targets', nargs='*', default=[],
help='''files and directories to include in the result. help='''files and directories to include in the result.

View File

@ -82,10 +82,10 @@ def update_user_activity(request, user_profile):
else: else:
query = request.META['PATH_INFO'] query = request.META['PATH_INFO']
event={'query': query, event = {'query': query,
'user_profile_id': user_profile.id, 'user_profile_id': user_profile.id,
'time': datetime_to_timestamp(now()), 'time': datetime_to_timestamp(now()),
'client': request.client.name} 'client': request.client.name}
queue_json_publish("user_activity", event, lambda event: None) queue_json_publish("user_activity", event, lambda event: None)
# Based on django.views.decorators.http.require_http_methods # Based on django.views.decorators.http.require_http_methods

View File

@ -544,7 +544,7 @@ def do_set_realm_default_language(realm, default_language):
# NB: remove this once we upgrade to Django 1.9 # NB: remove this once we upgrade to Django 1.9
# zh-cn and zh-tw will be replaced by zh-hans and zh-hant in # zh-cn and zh-tw will be replaced by zh-hans and zh-hant in
# Django 1.9 # Django 1.9
default_language= 'zh_HANS' default_language = 'zh_HANS'
realm.default_language = default_language realm.default_language = default_language
realm.save(update_fields=['default_language']) realm.save(update_fields=['default_language'])
@ -2441,18 +2441,18 @@ def do_update_user_presence(user_profile, client, log_time, status):
def update_user_activity_interval(user_profile, log_time): def update_user_activity_interval(user_profile, log_time):
# type: (UserProfile, datetime.datetime) -> None # type: (UserProfile, datetime.datetime) -> None
event={'user_profile_id': user_profile.id, event = {'user_profile_id': user_profile.id,
'time': datetime_to_timestamp(log_time)} 'time': datetime_to_timestamp(log_time)}
queue_json_publish("user_activity_interval", event, queue_json_publish("user_activity_interval", event,
lambda e: do_update_user_activity_interval(user_profile, log_time)) lambda e: do_update_user_activity_interval(user_profile, log_time))
def update_user_presence(user_profile, client, log_time, status, def update_user_presence(user_profile, client, log_time, status,
new_user_input): new_user_input):
# type: (UserProfile, Client, datetime.datetime, int, bool) -> None # type: (UserProfile, Client, datetime.datetime, int, bool) -> None
event={'user_profile_id': user_profile.id, event = {'user_profile_id': user_profile.id,
'status': status, 'status': status,
'time': datetime_to_timestamp(log_time), 'time': datetime_to_timestamp(log_time),
'client': client.name} 'client': client.name}
queue_json_publish("user_presence", event, queue_json_publish("user_presence", event,
lambda e: do_update_user_presence(user_profile, client, lambda e: do_update_user_presence(user_profile, client,
@ -3302,7 +3302,7 @@ def handle_push_notification(user_profile_id, missed_message):
send_android_push_notification(user_profile, android_data) send_android_push_notification(user_profile, android_data)
except UserMessage.DoesNotExist: except UserMessage.DoesNotExist:
logging.error("Could not find UserMessage with message_id %s" %(missed_message['message_id'],)) logging.error("Could not find UserMessage with message_id %s" % (missed_message['message_id'],))
def is_inactive(email): def is_inactive(email):
# type: (text_type) -> None # type: (text_type) -> None

View File

@ -259,7 +259,7 @@ def get_tweet_id(url):
to_match = parsed_url.path to_match = parsed_url.path
# In old-style twitter.com/#!/wdaher/status/1231241234-style URLs, we need to look at the fragment instead # In old-style twitter.com/#!/wdaher/status/1231241234-style URLs, we need to look at the fragment instead
if parsed_url.path == '/' and len(parsed_url.fragment) > 5: if parsed_url.path == '/' and len(parsed_url.fragment) > 5:
to_match= parsed_url.fragment to_match = parsed_url.fragment
tweet_id_match = re.match(r'^!?/.*?/status(es)?/(?P<tweetid>\d{10,18})(/photo/[0-9])?/?$', to_match) tweet_id_match = re.match(r'^!?/.*?/status(es)?/(?P<tweetid>\d{10,18})(/photo/[0-9])?/?$', to_match)
if not tweet_id_match: if not tweet_id_match:

View File

@ -159,7 +159,7 @@ def send_digest_email(user_profile, html_content, text_content):
def handle_digest_email(user_profile_id, cutoff): def handle_digest_email(user_profile_id, cutoff):
# type: (int, float) -> None # type: (int, float) -> None
user_profile=UserProfile.objects.get(id=user_profile_id) user_profile = UserProfile.objects.get(id=user_profile_id)
# Convert from epoch seconds to a datetime object. # Convert from epoch seconds to a datetime object.
cutoff_date = datetime.datetime.utcfromtimestamp(int(cutoff)) cutoff_date = datetime.datetime.utcfromtimestamp(int(cutoff))

View File

@ -223,7 +223,7 @@ class Config(object):
self.post_process_data = post_process_data self.post_process_data = post_process_data
self.concat_and_destroy = concat_and_destroy self.concat_and_destroy = concat_and_destroy
self.id_source = id_source self.id_source = id_source
self.source_filter= source_filter self.source_filter = source_filter
self.children = [] # type: List[Config] self.children = [] # type: List[Config]
if normal_parent: if normal_parent:
@ -556,7 +556,7 @@ def fetch_user_profile(response, config, context):
exportable_user_ids = context['exportable_user_ids'] exportable_user_ids = context['exportable_user_ids']
query = UserProfile.objects.filter(realm_id=realm.id) query = UserProfile.objects.filter(realm_id=realm.id)
exclude=['password', 'api_key'] exclude = ['password', 'api_key']
rows = make_raw(list(query), exclude=exclude) rows = make_raw(list(query), exclude=exclude)
normal_rows = [] # type: List[Record] normal_rows = [] # type: List[Record]
@ -1005,7 +1005,7 @@ def do_write_stats_file_for_realm_export(output_dir):
logging.info('Writing stats file: %s\n' % (stats_file,)) logging.info('Writing stats file: %s\n' % (stats_file,))
with open(stats_file, 'w') as f: with open(stats_file, 'w') as f:
for fn in fns: for fn in fns:
f.write(os.path.basename(fn) +'\n') f.write(os.path.basename(fn) + '\n')
payload = open(fn).read() payload = open(fn).read()
data = ujson.loads(payload) data = ujson.loads(payload)
for k in sorted(data): for k in sorted(data):

View File

@ -102,7 +102,7 @@ def act_on_message_ranges(db, orm, tasks, batch_size=5000, sleep=0.5):
max_id = all_objects.all().order_by('-id')[0].id max_id = all_objects.all().order_by('-id')[0].id
print("max_id = %d" % (max_id,)) print("max_id = %d" % (max_id,))
overhead = int((max_id + 1 - min_id)/ batch_size * sleep / 60) overhead = int((max_id + 1 - min_id) / batch_size * sleep / 60)
print("Expect this to take at least %d minutes, just due to sleeps alone." % (overhead,)) print("Expect this to take at least %d minutes, just due to sleeps alone." % (overhead,))
while min_id <= max_id: while min_id <= max_id:

View File

@ -78,7 +78,7 @@ Usage: ./manage.py create_realm --string_id=acme --name='Acme'"""
def handle(self, *args, **options): def handle(self, *args, **options):
# type: (*Any, **Any) -> None # type: (*Any, **Any) -> None
string_id=options["string_id"] string_id = options["string_id"]
name = options["name"] name = options["name"]
domain = options["domain"] domain = options["domain"]

View File

@ -114,7 +114,7 @@ class Command(BaseCommand):
if django.conf.settings.DEBUG: if django.conf.settings.DEBUG:
instance.set_blocking_log_threshold(5) instance.set_blocking_log_threshold(5)
instance.handle_callback_exception=handle_callback_exception instance.handle_callback_exception = handle_callback_exception
instance.start() instance.start()
except KeyboardInterrupt: except KeyboardInterrupt:
sys.exit(0) sys.exit(0)

View File

@ -363,7 +363,7 @@ class GitHubAuthBackendTest(ZulipTestCase):
with mock.patch('social.backends.github.GithubOAuth2.do_auth', with mock.patch('social.backends.github.GithubOAuth2.do_auth',
side_effect=self.do_auth), \ side_effect=self.do_auth), \
mock.patch('zerver.views.auth.login'): mock.patch('zerver.views.auth.login'):
response=dict(email=self.email, name=self.name) response = dict(email=self.email, name=self.name)
result = self.backend.do_auth(response=response) result = self.backend.do_auth(response=response)
self.assertNotIn('subdomain=1', result.url) self.assertNotIn('subdomain=1', result.url)
@ -372,7 +372,7 @@ class GitHubAuthBackendTest(ZulipTestCase):
with mock.patch('social.backends.github.GithubOAuth2.do_auth', with mock.patch('social.backends.github.GithubOAuth2.do_auth',
side_effect=self.do_auth): side_effect=self.do_auth):
with self.settings(REALMS_HAVE_SUBDOMAINS=True): with self.settings(REALMS_HAVE_SUBDOMAINS=True):
response=dict(email=self.email, name=self.name) response = dict(email=self.email, name=self.name)
result = self.backend.do_auth(response=response) result = self.backend.do_auth(response=response)
self.assertIn('subdomain=1', result.url) self.assertIn('subdomain=1', result.url)
@ -381,7 +381,7 @@ class GitHubAuthBackendTest(ZulipTestCase):
with mock.patch('social.backends.github.GithubOAuth2.do_auth', with mock.patch('social.backends.github.GithubOAuth2.do_auth',
side_effect=self.do_auth), \ side_effect=self.do_auth), \
mock.patch('zproject.backends.SocialAuthMixin.process_do_auth') as result: mock.patch('zproject.backends.SocialAuthMixin.process_do_auth') as result:
response=dict(email=self.email, name=self.name) response = dict(email=self.email, name=self.name)
self.backend.do_auth('fake-access-token', response=response) self.backend.do_auth('fake-access-token', response=response)
kwargs = {'realm_subdomain': 'acme', kwargs = {'realm_subdomain': 'acme',
@ -394,7 +394,7 @@ class GitHubAuthBackendTest(ZulipTestCase):
with mock.patch('social.backends.github.GithubTeamOAuth2.do_auth', with mock.patch('social.backends.github.GithubTeamOAuth2.do_auth',
side_effect=self.do_auth), \ side_effect=self.do_auth), \
mock.patch('zproject.backends.SocialAuthMixin.process_do_auth') as result: mock.patch('zproject.backends.SocialAuthMixin.process_do_auth') as result:
response=dict(email=self.email, name=self.name) response = dict(email=self.email, name=self.name)
with self.settings(SOCIAL_AUTH_GITHUB_TEAM_ID='zulip-webapp'): with self.settings(SOCIAL_AUTH_GITHUB_TEAM_ID='zulip-webapp'):
self.backend.do_auth('fake-access-token', response=response) self.backend.do_auth('fake-access-token', response=response)
@ -409,7 +409,7 @@ class GitHubAuthBackendTest(ZulipTestCase):
side_effect=AuthFailed('Not found')), \ side_effect=AuthFailed('Not found')), \
mock.patch('logging.info'), \ mock.patch('logging.info'), \
mock.patch('zproject.backends.SocialAuthMixin.process_do_auth') as result: mock.patch('zproject.backends.SocialAuthMixin.process_do_auth') as result:
response=dict(email=self.email, name=self.name) response = dict(email=self.email, name=self.name)
with self.settings(SOCIAL_AUTH_GITHUB_TEAM_ID='zulip-webapp'): with self.settings(SOCIAL_AUTH_GITHUB_TEAM_ID='zulip-webapp'):
self.backend.do_auth('fake-access-token', response=response) self.backend.do_auth('fake-access-token', response=response)
kwargs = {'realm_subdomain': 'acme', kwargs = {'realm_subdomain': 'acme',
@ -422,7 +422,7 @@ class GitHubAuthBackendTest(ZulipTestCase):
with mock.patch('social.backends.github.GithubOrganizationOAuth2.do_auth', with mock.patch('social.backends.github.GithubOrganizationOAuth2.do_auth',
side_effect=self.do_auth), \ side_effect=self.do_auth), \
mock.patch('zproject.backends.SocialAuthMixin.process_do_auth') as result: mock.patch('zproject.backends.SocialAuthMixin.process_do_auth') as result:
response=dict(email=self.email, name=self.name) response = dict(email=self.email, name=self.name)
with self.settings(SOCIAL_AUTH_GITHUB_ORG_NAME='Zulip'): with self.settings(SOCIAL_AUTH_GITHUB_ORG_NAME='Zulip'):
self.backend.do_auth('fake-access-token', response=response) self.backend.do_auth('fake-access-token', response=response)
@ -437,7 +437,7 @@ class GitHubAuthBackendTest(ZulipTestCase):
side_effect=AuthFailed('Not found')), \ side_effect=AuthFailed('Not found')), \
mock.patch('logging.info'), \ mock.patch('logging.info'), \
mock.patch('zproject.backends.SocialAuthMixin.process_do_auth') as result: mock.patch('zproject.backends.SocialAuthMixin.process_do_auth') as result:
response=dict(email=self.email, name=self.name) response = dict(email=self.email, name=self.name)
with self.settings(SOCIAL_AUTH_GITHUB_ORG_NAME='Zulip'): with self.settings(SOCIAL_AUTH_GITHUB_ORG_NAME='Zulip'):
self.backend.do_auth('fake-access-token', response=response) self.backend.do_auth('fake-access-token', response=response)
kwargs = {'realm_subdomain': 'acme', kwargs = {'realm_subdomain': 'acme',
@ -449,7 +449,7 @@ class GitHubAuthBackendTest(ZulipTestCase):
# type: () -> None # type: () -> None
with mock.patch('zproject.backends.get_user_profile_by_email', with mock.patch('zproject.backends.get_user_profile_by_email',
side_effect=UserProfile.DoesNotExist("Do not exist")): side_effect=UserProfile.DoesNotExist("Do not exist")):
response=dict(email=self.email, name=self.name) response = dict(email=self.email, name=self.name)
return_data = dict() # type: Dict[str, Any] return_data = dict() # type: Dict[str, Any]
user = self.backend.authenticate(return_data=return_data, response=response) user = self.backend.authenticate(return_data=return_data, response=response)
self.assertIs(user, None) self.assertIs(user, None)
@ -466,7 +466,7 @@ class GitHubAuthBackendTest(ZulipTestCase):
with mock.patch('zerver.views.auth.login_or_register_remote_user') as result, \ with mock.patch('zerver.views.auth.login_or_register_remote_user') as result, \
mock.patch('social.backends.github.GithubOAuth2.do_auth', mock.patch('social.backends.github.GithubOAuth2.do_auth',
side_effect=do_auth_inactive): side_effect=do_auth_inactive):
response=dict(email=self.email, name=self.name) response = dict(email=self.email, name=self.name)
user = self.backend.do_auth(response=response) user = self.backend.do_auth(response=response)
result.assert_not_called() result.assert_not_called()
self.assertIs(user, None) self.assertIs(user, None)
@ -487,7 +487,7 @@ class GitHubAuthBackendTest(ZulipTestCase):
with mock.patch('social.backends.github.GithubOAuth2.do_auth', with mock.patch('social.backends.github.GithubOAuth2.do_auth',
side_effect=do_auth): side_effect=do_auth):
response=dict(email='nonexisting@phantom.com', name='Ghost') response = dict(email='nonexisting@phantom.com', name='Ghost')
result = self.backend.do_auth(response=response) result = self.backend.do_auth(response=response)
self.assert_in_response('action="/register/"', result) self.assert_in_response('action="/register/"', result)
self.assert_in_response('Your email address does not correspond to any ' self.assert_in_response('Your email address does not correspond to any '

View File

@ -419,7 +419,7 @@ class BugdownTest(TestCase):
# Needs to mock an actual message because that's how bugdown obtains the realm # Needs to mock an actual message because that's how bugdown obtains the realm
msg = Message(sender=get_user_profile_by_email("hamlet@zulip.com")) msg = Message(sender=get_user_profile_by_email("hamlet@zulip.com"))
converted = bugdown.convert(":test:", "zulip.com", msg) converted = bugdown.convert(":test:", "zulip.com", msg)
self.assertEqual(converted, '<p>%s</p>' %(emoji_img(':test:', url))) self.assertEqual(converted, '<p>%s</p>' % (emoji_img(':test:', url)))
do_remove_realm_emoji(zulip_realm, 'test') do_remove_realm_emoji(zulip_realm, 'test')
converted = bugdown.convert(":test:", "zulip.com", msg) converted = bugdown.convert(":test:", "zulip.com", msg)

View File

@ -31,7 +31,7 @@ class ActivityTest(ZulipTestCase):
client, _ = Client.objects.get_or_create(name='website') client, _ = Client.objects.get_or_create(name='website')
query = '/json/users/me/pointer' query = '/json/users/me/pointer'
last_visit = timezone.now() last_visit = timezone.now()
count=150 count = 150
for user_profile in UserProfile.objects.all(): for user_profile in UserProfile.objects.all():
UserActivity.objects.get_or_create( UserActivity.objects.get_or_create(
user_profile=user_profile, user_profile=user_profile,

View File

@ -541,8 +541,8 @@ so we didn't send them an invitation. We did send invitations to everyone else!"
# type: () -> None # type: () -> None
self.login("hamlet@zulip.com") self.login("hamlet@zulip.com")
user = get_user_profile_by_email('hamlet@zulip.com') user = get_user_profile_by_email('hamlet@zulip.com')
user.invites_granted=1 user.invites_granted = 1
user.invites_used=0 user.invites_used = 0
user.save() user.save()
invitee = "alice-test@zulip.com" invitee = "alice-test@zulip.com"

View File

@ -122,7 +122,7 @@ class SignupWorker(QueueProcessingWorker):
def consume(self, data): def consume(self, data):
# type: (Mapping[str, Any]) -> None # type: (Mapping[str, Any]) -> None
merge_vars=data['merge_vars'] merge_vars = data['merge_vars']
# This should clear out any invitation reminder emails # This should clear out any invitation reminder emails
clear_followup_emails_queue(data["EMAIL"]) clear_followup_emails_queue(data["EMAIL"])
if settings.MAILCHIMP_API_KEY and settings.PRODUCTION: if settings.MAILCHIMP_API_KEY and settings.PRODUCTION:

View File

@ -77,7 +77,7 @@ TERMS_OF_SERVICE = 'corporate/terms.md'
# Buckets used for Amazon S3 integration for storing files and user avatars. # Buckets used for Amazon S3 integration for storing files and user avatars.
S3_AUTH_UPLOADS_BUCKET = "zulip-user-uploads" S3_AUTH_UPLOADS_BUCKET = "zulip-user-uploads"
S3_AVATAR_BUCKET="humbug-user-avatars" S3_AVATAR_BUCKET = "humbug-user-avatars"
APNS_SANDBOX = False APNS_SANDBOX = False
APNS_FEEDBACK = "feedback_production" APNS_FEEDBACK = "feedback_production"