mirror of https://github.com/zulip/zulip.git
pep8: Fix E225 pep8 violations.
This commit is contained in:
parent
f036a59b4f
commit
7a2282986a
|
@ -84,7 +84,7 @@ def check_permissions():
|
|||
def build_message(event):
|
||||
if not ('bucket' in event and 'creator' in event and 'html_url' in event):
|
||||
logging.error("Perhaps the Basecamp API changed behavior? "
|
||||
"This event doesn't have the expected format:\n%s" %(event,))
|
||||
"This event doesn't have the expected format:\n%s" % (event,))
|
||||
return None
|
||||
# adjust the topic length to be bounded to 60 characters
|
||||
topic = event['bucket']['name']
|
||||
|
|
|
@ -230,28 +230,28 @@ class Client(object):
|
|||
self.client_name = client
|
||||
|
||||
if insecure:
|
||||
self.tls_verification=False
|
||||
self.tls_verification = False
|
||||
elif cert_bundle is not None:
|
||||
if not os.path.isfile(cert_bundle):
|
||||
raise RuntimeError("tls bundle '%s' does not exist"
|
||||
%(cert_bundle,))
|
||||
self.tls_verification=cert_bundle
|
||||
% (cert_bundle,))
|
||||
self.tls_verification = cert_bundle
|
||||
else:
|
||||
# Default behavior: verify against system CA certificates
|
||||
self.tls_verification=True
|
||||
self.tls_verification = True
|
||||
|
||||
if client_cert is None:
|
||||
if client_cert_key is not None:
|
||||
raise RuntimeError("client cert key '%s' specified, but no client cert public part provided"
|
||||
%(client_cert_key,))
|
||||
% (client_cert_key,))
|
||||
else: # we have a client cert
|
||||
if not os.path.isfile(client_cert):
|
||||
raise RuntimeError("client cert '%s' does not exist"
|
||||
%(client_cert,))
|
||||
% (client_cert,))
|
||||
if client_cert_key is not None:
|
||||
if not os.path.isfile(client_cert_key):
|
||||
raise RuntimeError("client cert key '%s' does not exist"
|
||||
%(client_cert_key,))
|
||||
% (client_cert_key,))
|
||||
self.client_cert = client_cert
|
||||
self.client_cert_key = client_cert_key
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
TIME_ZONE="America/New_York"
|
||||
ALLOWED_HOSTS=['graphite.humbughq.com', 'graphite.zulip.net', 'stats1.zulip.net']
|
||||
TIME_ZONE = "America/New_York"
|
||||
ALLOWED_HOSTS = ['graphite.humbughq.com', 'graphite.zulip.net', 'stats1.zulip.net']
|
||||
|
||||
DATABASES = {
|
||||
'default': {
|
||||
|
|
|
@ -6,7 +6,7 @@ import sys
|
|||
import subprocess
|
||||
import logging
|
||||
|
||||
TARBALL_ARCHIVE_PATH="/home/zulip/archives"
|
||||
TARBALL_ARCHIVE_PATH = "/home/zulip/archives"
|
||||
os.environ["PYTHONUNBUFFERED"] = "y"
|
||||
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..'))
|
||||
|
|
|
@ -140,7 +140,7 @@ def create_user_docs():
|
|||
groups['other'] = patterns
|
||||
|
||||
for name in ['api', 'legacy', 'webhooks', 'other']:
|
||||
f.write(name +' endpoints:\n\n')
|
||||
f.write(name + ' endpoints:\n\n')
|
||||
f.write('<ul>\n')
|
||||
for pattern in sorted(groups[name]):
|
||||
href = pattern.replace('/', '-') + '.html'
|
||||
|
|
|
@ -31,7 +31,7 @@ def get_data_url(buckets, realm):
|
|||
# This is the slightly-cleaned up JSON api version of https://graphiti.zulip.net/graphs/945c7aafc2d
|
||||
#
|
||||
# Fetches 1 month worth of data
|
||||
DATA_URL="https://stats1.zulip.net:444/render/?from=-1000d&format=json"
|
||||
DATA_URL = "https://stats1.zulip.net:444/render/?from=-1000d&format=json"
|
||||
for bucket in buckets:
|
||||
if realm != 'all':
|
||||
statsd_target = "stats.gauges.staging.users.active.%s.%s" % (realm_key, bucket)
|
||||
|
|
|
@ -152,7 +152,7 @@ def html_tag_tree(text):
|
|||
# Add tokens to the Node tree first (conditionally).
|
||||
if token.kind in ('html_start', 'html_singleton'):
|
||||
parent = stack[-1]
|
||||
node= Node(token=token, parent=parent)
|
||||
node = Node(token=token, parent=parent)
|
||||
parent.children.append(node)
|
||||
|
||||
# Then update the stack to have the next node that
|
||||
|
|
|
@ -238,7 +238,7 @@ def is_django_block_tag(tag):
|
|||
def get_handlebars_tag(text, i):
|
||||
# type: (str, int) -> str
|
||||
end = i + 2
|
||||
while end < len(text) -1 and text[end] != '}':
|
||||
while end < len(text) - 1 and text[end] != '}':
|
||||
end += 1
|
||||
if text[end] != '}' or text[end+1] != '}':
|
||||
raise TemplateParserException('Tag missing }}')
|
||||
|
@ -248,7 +248,7 @@ def get_handlebars_tag(text, i):
|
|||
def get_django_tag(text, i):
|
||||
# type: (str, int) -> str
|
||||
end = i + 2
|
||||
while end < len(text) -1 and text[end] != '%':
|
||||
while end < len(text) - 1 and text[end] != '%':
|
||||
end += 1
|
||||
if text[end] != '%' or text[end+1] != '}':
|
||||
raise TemplateParserException('Tag missing %}')
|
||||
|
|
|
@ -84,9 +84,9 @@ def check_pep8(files):
|
|||
ignored_rules = [
|
||||
'E402', 'E501', 'W503', 'E711', 'E201', 'E203', 'E202', 'E128', 'E226', 'E124', 'E125',
|
||||
'E126', 'E127', 'E121', 'E122', 'E123', 'E266', 'E265', 'E261', 'E301', 'E221', 'E303',
|
||||
'E241', 'E712', 'E225', 'E401', 'E115', 'E114', 'E111', 'E222', 'E731', 'E302', 'E129',
|
||||
'E241', 'E712', 'E702', 'E401', 'E115', 'E114', 'E111', 'E222', 'E731', 'E302', 'E129',
|
||||
'E741', 'E714', 'W391', 'E211', 'E713', 'E502', 'E131', 'E305', 'E251', 'E306', 'E231',
|
||||
'E701', 'E702',
|
||||
'E701',
|
||||
]
|
||||
pep8 = subprocess.Popen(
|
||||
['pycodestyle'] + files + ['--ignore={rules}'.format(rules=','.join(ignored_rules))],
|
||||
|
|
|
@ -109,7 +109,7 @@ def list_files(targets=[], ftypes=[], use_shebang=True, modified_only=False,
|
|||
else:
|
||||
return result_list
|
||||
|
||||
if __name__=="__main__":
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="List files tracked by git and optionally filter by type")
|
||||
parser.add_argument('targets', nargs='*', default=[],
|
||||
help='''files and directories to include in the result.
|
||||
|
|
|
@ -82,10 +82,10 @@ def update_user_activity(request, user_profile):
|
|||
else:
|
||||
query = request.META['PATH_INFO']
|
||||
|
||||
event={'query': query,
|
||||
'user_profile_id': user_profile.id,
|
||||
'time': datetime_to_timestamp(now()),
|
||||
'client': request.client.name}
|
||||
event = {'query': query,
|
||||
'user_profile_id': user_profile.id,
|
||||
'time': datetime_to_timestamp(now()),
|
||||
'client': request.client.name}
|
||||
queue_json_publish("user_activity", event, lambda event: None)
|
||||
|
||||
# Based on django.views.decorators.http.require_http_methods
|
||||
|
|
|
@ -544,7 +544,7 @@ def do_set_realm_default_language(realm, default_language):
|
|||
# NB: remove this once we upgrade to Django 1.9
|
||||
# zh-cn and zh-tw will be replaced by zh-hans and zh-hant in
|
||||
# Django 1.9
|
||||
default_language= 'zh_HANS'
|
||||
default_language = 'zh_HANS'
|
||||
|
||||
realm.default_language = default_language
|
||||
realm.save(update_fields=['default_language'])
|
||||
|
@ -2441,18 +2441,18 @@ def do_update_user_presence(user_profile, client, log_time, status):
|
|||
|
||||
def update_user_activity_interval(user_profile, log_time):
|
||||
# type: (UserProfile, datetime.datetime) -> None
|
||||
event={'user_profile_id': user_profile.id,
|
||||
'time': datetime_to_timestamp(log_time)}
|
||||
event = {'user_profile_id': user_profile.id,
|
||||
'time': datetime_to_timestamp(log_time)}
|
||||
queue_json_publish("user_activity_interval", event,
|
||||
lambda e: do_update_user_activity_interval(user_profile, log_time))
|
||||
|
||||
def update_user_presence(user_profile, client, log_time, status,
|
||||
new_user_input):
|
||||
# type: (UserProfile, Client, datetime.datetime, int, bool) -> None
|
||||
event={'user_profile_id': user_profile.id,
|
||||
'status': status,
|
||||
'time': datetime_to_timestamp(log_time),
|
||||
'client': client.name}
|
||||
event = {'user_profile_id': user_profile.id,
|
||||
'status': status,
|
||||
'time': datetime_to_timestamp(log_time),
|
||||
'client': client.name}
|
||||
|
||||
queue_json_publish("user_presence", event,
|
||||
lambda e: do_update_user_presence(user_profile, client,
|
||||
|
@ -3302,7 +3302,7 @@ def handle_push_notification(user_profile_id, missed_message):
|
|||
send_android_push_notification(user_profile, android_data)
|
||||
|
||||
except UserMessage.DoesNotExist:
|
||||
logging.error("Could not find UserMessage with message_id %s" %(missed_message['message_id'],))
|
||||
logging.error("Could not find UserMessage with message_id %s" % (missed_message['message_id'],))
|
||||
|
||||
def is_inactive(email):
|
||||
# type: (text_type) -> None
|
||||
|
|
|
@ -259,7 +259,7 @@ def get_tweet_id(url):
|
|||
to_match = parsed_url.path
|
||||
# In old-style twitter.com/#!/wdaher/status/1231241234-style URLs, we need to look at the fragment instead
|
||||
if parsed_url.path == '/' and len(parsed_url.fragment) > 5:
|
||||
to_match= parsed_url.fragment
|
||||
to_match = parsed_url.fragment
|
||||
|
||||
tweet_id_match = re.match(r'^!?/.*?/status(es)?/(?P<tweetid>\d{10,18})(/photo/[0-9])?/?$', to_match)
|
||||
if not tweet_id_match:
|
||||
|
|
|
@ -159,7 +159,7 @@ def send_digest_email(user_profile, html_content, text_content):
|
|||
|
||||
def handle_digest_email(user_profile_id, cutoff):
|
||||
# type: (int, float) -> None
|
||||
user_profile=UserProfile.objects.get(id=user_profile_id)
|
||||
user_profile = UserProfile.objects.get(id=user_profile_id)
|
||||
# Convert from epoch seconds to a datetime object.
|
||||
cutoff_date = datetime.datetime.utcfromtimestamp(int(cutoff))
|
||||
|
||||
|
|
|
@ -223,7 +223,7 @@ class Config(object):
|
|||
self.post_process_data = post_process_data
|
||||
self.concat_and_destroy = concat_and_destroy
|
||||
self.id_source = id_source
|
||||
self.source_filter= source_filter
|
||||
self.source_filter = source_filter
|
||||
self.children = [] # type: List[Config]
|
||||
|
||||
if normal_parent:
|
||||
|
@ -556,7 +556,7 @@ def fetch_user_profile(response, config, context):
|
|||
exportable_user_ids = context['exportable_user_ids']
|
||||
|
||||
query = UserProfile.objects.filter(realm_id=realm.id)
|
||||
exclude=['password', 'api_key']
|
||||
exclude = ['password', 'api_key']
|
||||
rows = make_raw(list(query), exclude=exclude)
|
||||
|
||||
normal_rows = [] # type: List[Record]
|
||||
|
@ -1005,7 +1005,7 @@ def do_write_stats_file_for_realm_export(output_dir):
|
|||
logging.info('Writing stats file: %s\n' % (stats_file,))
|
||||
with open(stats_file, 'w') as f:
|
||||
for fn in fns:
|
||||
f.write(os.path.basename(fn) +'\n')
|
||||
f.write(os.path.basename(fn) + '\n')
|
||||
payload = open(fn).read()
|
||||
data = ujson.loads(payload)
|
||||
for k in sorted(data):
|
||||
|
|
|
@ -102,7 +102,7 @@ def act_on_message_ranges(db, orm, tasks, batch_size=5000, sleep=0.5):
|
|||
|
||||
max_id = all_objects.all().order_by('-id')[0].id
|
||||
print("max_id = %d" % (max_id,))
|
||||
overhead = int((max_id + 1 - min_id)/ batch_size * sleep / 60)
|
||||
overhead = int((max_id + 1 - min_id) / batch_size * sleep / 60)
|
||||
print("Expect this to take at least %d minutes, just due to sleeps alone." % (overhead,))
|
||||
|
||||
while min_id <= max_id:
|
||||
|
|
|
@ -78,7 +78,7 @@ Usage: ./manage.py create_realm --string_id=acme --name='Acme'"""
|
|||
|
||||
def handle(self, *args, **options):
|
||||
# type: (*Any, **Any) -> None
|
||||
string_id=options["string_id"]
|
||||
string_id = options["string_id"]
|
||||
name = options["name"]
|
||||
domain = options["domain"]
|
||||
|
||||
|
|
|
@ -114,7 +114,7 @@ class Command(BaseCommand):
|
|||
|
||||
if django.conf.settings.DEBUG:
|
||||
instance.set_blocking_log_threshold(5)
|
||||
instance.handle_callback_exception=handle_callback_exception
|
||||
instance.handle_callback_exception = handle_callback_exception
|
||||
instance.start()
|
||||
except KeyboardInterrupt:
|
||||
sys.exit(0)
|
||||
|
|
|
@ -363,7 +363,7 @@ class GitHubAuthBackendTest(ZulipTestCase):
|
|||
with mock.patch('social.backends.github.GithubOAuth2.do_auth',
|
||||
side_effect=self.do_auth), \
|
||||
mock.patch('zerver.views.auth.login'):
|
||||
response=dict(email=self.email, name=self.name)
|
||||
response = dict(email=self.email, name=self.name)
|
||||
result = self.backend.do_auth(response=response)
|
||||
self.assertNotIn('subdomain=1', result.url)
|
||||
|
||||
|
@ -372,7 +372,7 @@ class GitHubAuthBackendTest(ZulipTestCase):
|
|||
with mock.patch('social.backends.github.GithubOAuth2.do_auth',
|
||||
side_effect=self.do_auth):
|
||||
with self.settings(REALMS_HAVE_SUBDOMAINS=True):
|
||||
response=dict(email=self.email, name=self.name)
|
||||
response = dict(email=self.email, name=self.name)
|
||||
result = self.backend.do_auth(response=response)
|
||||
self.assertIn('subdomain=1', result.url)
|
||||
|
||||
|
@ -381,7 +381,7 @@ class GitHubAuthBackendTest(ZulipTestCase):
|
|||
with mock.patch('social.backends.github.GithubOAuth2.do_auth',
|
||||
side_effect=self.do_auth), \
|
||||
mock.patch('zproject.backends.SocialAuthMixin.process_do_auth') as result:
|
||||
response=dict(email=self.email, name=self.name)
|
||||
response = dict(email=self.email, name=self.name)
|
||||
self.backend.do_auth('fake-access-token', response=response)
|
||||
|
||||
kwargs = {'realm_subdomain': 'acme',
|
||||
|
@ -394,7 +394,7 @@ class GitHubAuthBackendTest(ZulipTestCase):
|
|||
with mock.patch('social.backends.github.GithubTeamOAuth2.do_auth',
|
||||
side_effect=self.do_auth), \
|
||||
mock.patch('zproject.backends.SocialAuthMixin.process_do_auth') as result:
|
||||
response=dict(email=self.email, name=self.name)
|
||||
response = dict(email=self.email, name=self.name)
|
||||
with self.settings(SOCIAL_AUTH_GITHUB_TEAM_ID='zulip-webapp'):
|
||||
self.backend.do_auth('fake-access-token', response=response)
|
||||
|
||||
|
@ -409,7 +409,7 @@ class GitHubAuthBackendTest(ZulipTestCase):
|
|||
side_effect=AuthFailed('Not found')), \
|
||||
mock.patch('logging.info'), \
|
||||
mock.patch('zproject.backends.SocialAuthMixin.process_do_auth') as result:
|
||||
response=dict(email=self.email, name=self.name)
|
||||
response = dict(email=self.email, name=self.name)
|
||||
with self.settings(SOCIAL_AUTH_GITHUB_TEAM_ID='zulip-webapp'):
|
||||
self.backend.do_auth('fake-access-token', response=response)
|
||||
kwargs = {'realm_subdomain': 'acme',
|
||||
|
@ -422,7 +422,7 @@ class GitHubAuthBackendTest(ZulipTestCase):
|
|||
with mock.patch('social.backends.github.GithubOrganizationOAuth2.do_auth',
|
||||
side_effect=self.do_auth), \
|
||||
mock.patch('zproject.backends.SocialAuthMixin.process_do_auth') as result:
|
||||
response=dict(email=self.email, name=self.name)
|
||||
response = dict(email=self.email, name=self.name)
|
||||
with self.settings(SOCIAL_AUTH_GITHUB_ORG_NAME='Zulip'):
|
||||
self.backend.do_auth('fake-access-token', response=response)
|
||||
|
||||
|
@ -437,7 +437,7 @@ class GitHubAuthBackendTest(ZulipTestCase):
|
|||
side_effect=AuthFailed('Not found')), \
|
||||
mock.patch('logging.info'), \
|
||||
mock.patch('zproject.backends.SocialAuthMixin.process_do_auth') as result:
|
||||
response=dict(email=self.email, name=self.name)
|
||||
response = dict(email=self.email, name=self.name)
|
||||
with self.settings(SOCIAL_AUTH_GITHUB_ORG_NAME='Zulip'):
|
||||
self.backend.do_auth('fake-access-token', response=response)
|
||||
kwargs = {'realm_subdomain': 'acme',
|
||||
|
@ -449,7 +449,7 @@ class GitHubAuthBackendTest(ZulipTestCase):
|
|||
# type: () -> None
|
||||
with mock.patch('zproject.backends.get_user_profile_by_email',
|
||||
side_effect=UserProfile.DoesNotExist("Do not exist")):
|
||||
response=dict(email=self.email, name=self.name)
|
||||
response = dict(email=self.email, name=self.name)
|
||||
return_data = dict() # type: Dict[str, Any]
|
||||
user = self.backend.authenticate(return_data=return_data, response=response)
|
||||
self.assertIs(user, None)
|
||||
|
@ -466,7 +466,7 @@ class GitHubAuthBackendTest(ZulipTestCase):
|
|||
with mock.patch('zerver.views.auth.login_or_register_remote_user') as result, \
|
||||
mock.patch('social.backends.github.GithubOAuth2.do_auth',
|
||||
side_effect=do_auth_inactive):
|
||||
response=dict(email=self.email, name=self.name)
|
||||
response = dict(email=self.email, name=self.name)
|
||||
user = self.backend.do_auth(response=response)
|
||||
result.assert_not_called()
|
||||
self.assertIs(user, None)
|
||||
|
@ -487,7 +487,7 @@ class GitHubAuthBackendTest(ZulipTestCase):
|
|||
|
||||
with mock.patch('social.backends.github.GithubOAuth2.do_auth',
|
||||
side_effect=do_auth):
|
||||
response=dict(email='nonexisting@phantom.com', name='Ghost')
|
||||
response = dict(email='nonexisting@phantom.com', name='Ghost')
|
||||
result = self.backend.do_auth(response=response)
|
||||
self.assert_in_response('action="/register/"', result)
|
||||
self.assert_in_response('Your email address does not correspond to any '
|
||||
|
|
|
@ -419,7 +419,7 @@ class BugdownTest(TestCase):
|
|||
# Needs to mock an actual message because that's how bugdown obtains the realm
|
||||
msg = Message(sender=get_user_profile_by_email("hamlet@zulip.com"))
|
||||
converted = bugdown.convert(":test:", "zulip.com", msg)
|
||||
self.assertEqual(converted, '<p>%s</p>' %(emoji_img(':test:', url)))
|
||||
self.assertEqual(converted, '<p>%s</p>' % (emoji_img(':test:', url)))
|
||||
|
||||
do_remove_realm_emoji(zulip_realm, 'test')
|
||||
converted = bugdown.convert(":test:", "zulip.com", msg)
|
||||
|
|
|
@ -31,7 +31,7 @@ class ActivityTest(ZulipTestCase):
|
|||
client, _ = Client.objects.get_or_create(name='website')
|
||||
query = '/json/users/me/pointer'
|
||||
last_visit = timezone.now()
|
||||
count=150
|
||||
count = 150
|
||||
for user_profile in UserProfile.objects.all():
|
||||
UserActivity.objects.get_or_create(
|
||||
user_profile=user_profile,
|
||||
|
|
|
@ -541,8 +541,8 @@ so we didn't send them an invitation. We did send invitations to everyone else!"
|
|||
# type: () -> None
|
||||
self.login("hamlet@zulip.com")
|
||||
user = get_user_profile_by_email('hamlet@zulip.com')
|
||||
user.invites_granted=1
|
||||
user.invites_used=0
|
||||
user.invites_granted = 1
|
||||
user.invites_used = 0
|
||||
user.save()
|
||||
|
||||
invitee = "alice-test@zulip.com"
|
||||
|
|
|
@ -122,7 +122,7 @@ class SignupWorker(QueueProcessingWorker):
|
|||
|
||||
def consume(self, data):
|
||||
# type: (Mapping[str, Any]) -> None
|
||||
merge_vars=data['merge_vars']
|
||||
merge_vars = data['merge_vars']
|
||||
# This should clear out any invitation reminder emails
|
||||
clear_followup_emails_queue(data["EMAIL"])
|
||||
if settings.MAILCHIMP_API_KEY and settings.PRODUCTION:
|
||||
|
|
|
@ -77,7 +77,7 @@ TERMS_OF_SERVICE = 'corporate/terms.md'
|
|||
|
||||
# Buckets used for Amazon S3 integration for storing files and user avatars.
|
||||
S3_AUTH_UPLOADS_BUCKET = "zulip-user-uploads"
|
||||
S3_AVATAR_BUCKET="humbug-user-avatars"
|
||||
S3_AVATAR_BUCKET = "humbug-user-avatars"
|
||||
|
||||
APNS_SANDBOX = False
|
||||
APNS_FEEDBACK = "feedback_production"
|
||||
|
|
Loading…
Reference in New Issue