lint: Clean up W503 PEP-8 warning.

This commit is contained in:
Tim Abbott 2017-01-23 20:50:04 -08:00
parent bde2da7dfd
commit 22d1aa396b
22 changed files with 94 additions and 95 deletions

View File

@ -79,9 +79,9 @@ class CountingBackoff(object):
def _check_success_timeout(self):
# type: () -> None
if (self.timeout_success_equivalent is not None
and self.last_attempt_time != 0
and time.time() - self.last_attempt_time > self.timeout_success_equivalent):
if (self.timeout_success_equivalent is not None and
self.last_attempt_time != 0 and
time.time() - self.last_attempt_time > self.timeout_success_equivalent):
self.number_of_retries = 0
class RandomExponentialBackoff(CountingBackoff):

View File

@ -75,9 +75,9 @@ def jid_to_zulip(jid):
def zulip_to_jid(email, jabber_domain):
# type: (str, str) -> JID
jid = JID(email, domain=jabber_domain)
if (options.zulip_email_suffix
and options.zulip_email_suffix in jid.username
and not jid.username.endswith("-bot")):
if (options.zulip_email_suffix and
options.zulip_email_suffix in jid.username and
not jid.username.endswith("-bot")):
jid.username = jid.username.rpartition(options.zulip_email_suffix)[0]
return jid
@ -366,10 +366,10 @@ option does not affect login credentials.'''.replace("\n", " "))
default=None,
action='store',
help="Your Jabber JID. If a resource is specified, "
+ "it will be used as the nickname when joining MUCs. "
+ "Specifying the nickname is mostly useful if you want "
+ "to run the public mirror from a regular user instead of "
+ "from a dedicated account.")
"it will be used as the nickname when joining MUCs. "
"Specifying the nickname is mostly useful if you want "
"to run the public mirror from a regular user instead of "
"from a dedicated account.")
jabber_group.add_option('--jabber-password',
default=None,
action='store',
@ -378,7 +378,7 @@ option does not affect login credentials.'''.replace("\n", " "))
default=None,
action='store',
help="Your Jabber conference domain (E.g. conference.jabber.example.com). "
+ "If not specifed, \"conference.\" will be prepended to your JID's domain.")
"If not specifed, \"conference.\" will be prepended to your JID's domain.")
jabber_group.add_option('--no-use-tls',
default=None,
action='store_true')
@ -413,8 +413,8 @@ option does not affect login credentials.'''.replace("\n", " "))
pass
for option in ("jid", "jabber_password", "conference_domain", "mode", "zulip_email_suffix",
"jabber_server_address", "jabber_server_port"):
if (getattr(options, option) is None
and config.has_option("jabber_mirror", option)):
if (getattr(options, option) is None and
config.has_option("jabber_mirror", option)):
setattr(options, option, config.get("jabber_mirror", option))
for option in ("no_use_tls",):
@ -435,7 +435,7 @@ option does not affect login credentials.'''.replace("\n", " "))
if None in (options.jid, options.jabber_password):
config_error("You must specify your Jabber JID and Jabber password either "
+ "in the Zulip configuration file or on the commandline")
"in the Zulip configuration file or on the commandline")
zulipToJabber = ZulipToJabberBot(zulip.init_from_options(options, "JabberMirror/" + __version__))
# This won't work for open realms that don't have a consistent domain

View File

@ -104,8 +104,8 @@ def unwrap_lines(body):
previous_line = lines[0]
for line in lines[1:]:
line = line.rstrip()
if (re.match(r'^\W', line, flags=re.UNICODE)
and re.match(r'^\W', previous_line, flags=re.UNICODE)):
if (re.match(r'^\W', line, flags=re.UNICODE) and
re.match(r'^\W', previous_line, flags=re.UNICODE)):
result += previous_line + "\n"
elif (line == "" or
previous_line == "" or

View File

@ -171,16 +171,16 @@ class CommuteHandler(object):
# determines if fare information is available
try:
fare = ('Fare: ' + variable_list["fare"]["currency"]
+ variable_list["fare"]["text"])
fare = ('Fare: ' + variable_list["fare"]["currency"] +
variable_list["fare"]["text"])
output += '\n' + fare
except (KeyError, IndexError):
pass
# determines if traffic duration information is available
try:
traffic_duration = ('Duration in traffic: '
+ variable_list["duration_in_traffic"]
traffic_duration = ('Duration in traffic: ' +
variable_list["duration_in_traffic"]
["text"])
output += '\n' + traffic_duration
except (KeyError, IndexError):
@ -208,13 +208,13 @@ class CommuteHandler(object):
return request.json()
else:
self.send_info(message,
"Something went wrong. Please try again."
+ " Error: {error_num}.\n{error_text}"
"Something went wrong. Please try again." +
" Error: {error_num}.\n{error_text}"
.format(error_num=request.status_code,
error_text=request.text), client)
return
r = requests.get('https://maps.googleapis.com/maps/api/'
+ 'distancematrix/json', params=params)
r = requests.get('https://maps.googleapis.com/maps/api/' +
'distancematrix/json', params=params)
result = validate_requests(r)
return result

View File

@ -120,9 +120,9 @@ Example Inputs:
return
if received_json['meta']['code'] == 200:
response_msg = ('Food nearby ' + params['near']
+ ' coming right up:\n'
+ self.format_json(received_json['response']['venues']))
response_msg = ('Food nearby ' + params['near'] +
' coming right up:\n' +
self.format_json(received_json['response']['venues']))
self.send_info(message, response_msg, client)
return

View File

@ -67,18 +67,18 @@ def get_deployment_lock(error_rerun_script):
got_lock = True
break
except OSError:
print(WARNING + "Another deployment in progress; waiting for lock... "
+ "(If no deployment is running, rmdir %s)" % (LOCK_DIR,) + ENDC)
print(WARNING + "Another deployment in progress; waiting for lock... " +
"(If no deployment is running, rmdir %s)" % (LOCK_DIR,) + ENDC)
sys.stdout.flush()
time.sleep(3)
if not got_lock:
print(FAIL + "Deployment already in progress. Please run\n"
+ " %s\n" % (error_rerun_script,)
+ "manually when the previous deployment finishes, or run\n"
+ " rmdir %s\n" % (LOCK_DIR,)
+ "if the previous deployment crashed."
+ ENDC)
print(FAIL + "Deployment already in progress. Please run\n" +
" %s\n" % (error_rerun_script,) +
"manually when the previous deployment finishes, or run\n" +
" rmdir %s\n" % (LOCK_DIR,) +
"if the previous deployment crashed." +
ENDC)
sys.exit(1)
def release_deployment_lock():
@ -98,9 +98,9 @@ def run(args, **kwargs):
subprocess.check_call(args, **kwargs)
except subprocess.CalledProcessError:
print()
print(WHITEONRED + "Error running a subcommand of %s: %s" % (sys.argv[0], " ".join(args))
+ ENDC)
print(WHITEONRED + "Actual error output for the subcommand is just above this."
+ ENDC)
print(WHITEONRED + "Error running a subcommand of %s: %s" % (sys.argv[0], " ".join(args)) +
ENDC)
print(WHITEONRED + "Actual error output for the subcommand is just above this." +
ENDC)
print()
raise

View File

@ -83,14 +83,14 @@ def check_html_templates(templates, modified_only, all_dups):
if ids in IGNORE_IDS and len(fns) > 1}
for ids, fns in ignorable_ids_dict.items():
logging.warning("Duplicate ID(s) detected :Id '" + ids
+ "' present at following files:")
logging.warning("Duplicate ID(s) detected :Id '" + ids +
"' present at following files:")
for fn in fns:
print(fn)
for ids, fns in bad_ids_dict.items():
logging.error("Duplicate ID(s) detected :Id '" + ids
+ "' present at following files:")
logging.error("Duplicate ID(s) detected :Id '" + ids +
"' present at following files:")
for fn in fns:
print(fn)

View File

@ -24,10 +24,10 @@ def get_templates():
def run():
# type: () -> None
subprocess.check_call(['node', 'node_modules/.bin/handlebars']
+ get_templates()
+ ['--output', os.path.join(STATIC_PATH, 'templates/compiled.js'),
'--known', 'if,unless,each,with'])
subprocess.check_call(['node', 'node_modules/.bin/handlebars'] +
get_templates() +
['--output', os.path.join(STATIC_PATH, 'templates/compiled.js'),
'--known', 'if,unless,each,with'])
def run_forever():
# type: () -> None

View File

@ -87,7 +87,6 @@ def check_pep8(files):
# ignored. It either doesn't fit with the style of the project or should
# actually be cleaned up.
#
'W503',
'E114', 'E115', 'E121', 'E123', 'E126', 'E226', 'E241', 'E261', 'E302',
'E305', 'E306', 'E401', 'E501', 'E702', 'E711', 'E712', 'E713', 'E714',
'E741',
@ -622,8 +621,8 @@ def run():
# type: () -> int
if len(by_lang['js']) == 0:
return 0
result = subprocess.call(['node', 'node_modules/.bin/eslint', '--quiet']
+ by_lang['js'])
result = subprocess.call(['node', 'node_modules/.bin/eslint', '--quiet'] +
by_lang['js'])
return result
@lint

View File

@ -109,8 +109,8 @@ for js_group, filespec in six.iteritems(JS_SPECS):
for filename in filespec.get('minifed_source_filenames', [])]
out_file = os.path.join(MIN_DIR, os.path.basename(filespec['output_filename']))
map_file = os.path.join(MAP_DIR, os.path.basename(filespec['output_filename'])
+ '.map')
map_file = os.path.join(MAP_DIR, os.path.basename(filespec['output_filename']) +
'.map')
if (not 'force_minify' in filespec) and \
(prev_deploy and len(set(in_files) & changed_files) == 0):

View File

@ -32,8 +32,8 @@ fp = open('var/log/update-prod-static.log', 'w')
setup_node_modules(npm_args=['--production'], stdout=fp, stderr=fp)
# Compile Handlebars templates and minify JavaScript.
subprocess.check_call(['./tools/minify-js']
+ (['--prev-deploy', prev_deploy] if prev_deploy else []),
subprocess.check_call(['./tools/minify-js'] +
(['--prev-deploy', prev_deploy] if prev_deploy else []),
stdout=fp, stderr=fp)
# Build emoji

View File

@ -94,9 +94,9 @@ def require_post(func):
@wraps(func)
def wrapper(request, *args, **kwargs):
# type: (HttpRequest, *Any, **Any) -> HttpResponse
if (request.method != "POST"
and not (request.method == "SOCKET"
and request.META['zulip.emulated_method'] == "POST")):
if (request.method != "POST" and
not (request.method == "SOCKET" and
request.META['zulip.emulated_method'] == "POST")):
if request.method == "SOCKET":
err_method = "SOCKET/%s" % (request.META['zulip.emulated_method'],)
else:
@ -191,11 +191,11 @@ def validate_api_key(request, role, api_key, is_webhook=False):
except AttributeError:
# Deployment objects don't have realms
pass
if (not check_subdomain(get_subdomain(request), profile.realm.subdomain)
if (not check_subdomain(get_subdomain(request), profile.realm.subdomain) and
# Allow access to localhost for Tornado
and not (settings.RUNNING_INSIDE_TORNADO and
request.META["SERVER_NAME"] == "127.0.0.1" and
request.META["REMOTE_ADDR"] == "127.0.0.1")):
not (settings.RUNNING_INSIDE_TORNADO and
request.META["SERVER_NAME"] == "127.0.0.1" and
request.META["REMOTE_ADDR"] == "127.0.0.1")):
logging.warning("User %s attempted to access API on wrong subdomain %s" % (
profile.email, get_subdomain(request)))
raise JsonableError(_("Account is not associated with this subdomain"))
@ -486,17 +486,17 @@ def is_local_addr(addr):
# secret, and also the originating IP (for now).
def authenticate_notify(request):
# type: (HttpRequest) -> bool
return (is_local_addr(request.META['REMOTE_ADDR'])
and request.POST.get('secret') == settings.SHARED_SECRET)
return (is_local_addr(request.META['REMOTE_ADDR']) and
request.POST.get('secret') == settings.SHARED_SECRET)
def client_is_exempt_from_rate_limiting(request):
# type: (HttpRequest) -> bool
# Don't rate limit requests from Django that come from our own servers,
# and don't rate-limit dev instances
return ((request.client and request.client.name.lower() == 'internal')
and (is_local_addr(request.META['REMOTE_ADDR']) or
settings.DEBUG_RATE_LIMITING))
return ((request.client and request.client.name.lower() == 'internal') and
(is_local_addr(request.META['REMOTE_ADDR']) or
settings.DEBUG_RATE_LIMITING))
def internal_notify_view(view_func):
# type: (ViewFuncT) -> ViewFuncT

View File

@ -122,8 +122,8 @@ def log_event(event):
os.mkdir(settings.EVENT_LOG_DIR)
template = os.path.join(settings.EVENT_LOG_DIR,
'%s.' + platform.node()
+ datetime.datetime.now().strftime('.%Y-%m-%d'))
'%s.' + platform.node() +
datetime.datetime.now().strftime('.%Y-%m-%d'))
with lockfile(template % ('lock',)):
with open(template % ('events',), 'a') as log:
@ -1128,8 +1128,8 @@ def recipient_for_emails(emails, not_forged_mirror_message,
# If the private message is just between the sender and
# another person, force it to be a personal internally
if (len(recipient_profile_ids) == 2
and sender.id in recipient_profile_ids):
if (len(recipient_profile_ids) == 2 and
sender.id in recipient_profile_ids):
recipient_profile_ids.remove(sender.id)
if len(recipient_profile_ids) > 1:

View File

@ -55,12 +55,12 @@ def add_bool_columns(db, table, cols):
coltype = 'boolean'
val = 'false'
stmt = (('ALTER TABLE %s ' % (table,))
+ ', '.join(['ADD %s %s' % (col, coltype) for col in cols]))
stmt = (('ALTER TABLE %s ' % (table,)) +
', '.join(['ADD %s %s' % (col, coltype) for col in cols]))
timed_ddl(db, stmt)
stmt = (('ALTER TABLE %s ' % (table,))
+ ', '.join(['ALTER %s SET DEFAULT %s' % (col, val) for col in cols]))
stmt = (('ALTER TABLE %s ' % (table,)) +
', '.join(['ALTER %s SET DEFAULT %s' % (col, val) for col in cols]))
timed_ddl(db, stmt)
vals = [val] * len(cols)
@ -69,8 +69,8 @@ def add_bool_columns(db, table, cols):
stmt = 'ANALYZE %s' % (table,)
timed_ddl(db, stmt)
stmt = (('ALTER TABLE %s ' % (table,))
+ ', '.join(['ALTER %s SET NOT NULL' % (col,) for col in cols]))
stmt = (('ALTER TABLE %s ' % (table,)) +
', '.join(['ALTER %s SET NOT NULL' % (col,) for col in cols]))
timed_ddl(db, stmt)
def create_index_if_nonexistant(db, table, col, index):

View File

@ -84,8 +84,8 @@ def rest_dispatch(request, **kwargs):
# for some special views (e.g. serving a file that has been
# uploaded), we support using the same url for web and API clients.
if ('override_api_url_scheme' in view_flags
and request.META.get('HTTP_AUTHORIZATION', None) is not None):
if ('override_api_url_scheme' in view_flags and
request.META.get('HTTP_AUTHORIZATION', None) is not None):
# This request API based authentication.
target_function = authenticated_rest_api_view()(target_function)
# /json views (web client) validate with a session token (cookie)

View File

@ -350,8 +350,8 @@ class SessionHostDomainMiddleware(SessionMiddleware):
def process_response(self, request, response):
# type: (HttpRequest, HttpResponse) -> HttpResponse
if settings.REALMS_HAVE_SUBDOMAINS:
if (not request.path.startswith("/static/") and not request.path.startswith("/api/")
and not request.path.startswith("/json/")):
if (not request.path.startswith("/static/") and not request.path.startswith("/api/") and
not request.path.startswith("/json/")):
subdomain = get_subdomain(request)
if (request.get_host() == "127.0.0.1:9991" or request.get_host() == "localhost:9991"):
return redirect("%s%s" % (settings.EXTERNAL_URI_SCHEME,

View File

@ -470,8 +470,8 @@ class GetOldMessagesTest(ZulipTestCase):
return ','.join(sorted(set([r['email'] for r in dr] + [me])))
personals = [m for m in get_user_messages(get_user_profile_by_email(me))
if m.recipient.type == Recipient.PERSONAL
or m.recipient.type == Recipient.HUDDLE]
if m.recipient.type == Recipient.PERSONAL or
m.recipient.type == Recipient.HUDDLE]
if not personals:
# FIXME: This is bad. We should use test data that is guaranteed
# to contain some personals for every user. See #617.

View File

@ -16,8 +16,8 @@ def create_tornado_application():
)
# Application is an instance of Django's standard wsgi handler.
return tornado.web.Application([(url, AsyncDjangoHandler) for url in urls]
+ get_sockjs_router().urls,
return tornado.web.Application(([(url, AsyncDjangoHandler) for url in urls] +
get_sockjs_router().urls),
debug=settings.DEBUG,
# Disable Tornado's own request logging, since we have our own
log_function=lambda x: None)

View File

@ -169,8 +169,8 @@ class ClientDescriptor(object):
if not hasattr(self, 'queue_timeout'):
self.queue_timeout = IDLE_EVENT_QUEUE_TIMEOUT_SECS
return (self.current_handler_id is None
and now - self.last_connection_time >= self.queue_timeout)
return (self.current_handler_id is None and
now - self.last_connection_time >= self.queue_timeout)
def connect_handler(self, handler_id, client_name):
# type: (int, Text) -> None
@ -408,8 +408,8 @@ def gc_event_queues():
# not have a current handler.
do_gc_event_queues(to_remove, affected_users, affected_realms)
logging.info(('Tornado removed %d idle event queues owned by %d users in %.3fs.'
+ ' Now %d active queues, %s')
logging.info(('Tornado removed %d idle event queues owned by %d users in %.3fs.' +
' Now %d active queues, %s')
% (len(to_remove), len(affected_users), time.time() - start,
len(clients), handler_stats_string()))
statsd.gauge('tornado.active_queues', len(clients))

View File

@ -373,9 +373,9 @@ def narrow_parameter(json):
# We have to support a legacy tuple format.
if isinstance(elem, list):
if (len(elem) != 2
or any(not isinstance(x, str) and not isinstance(x, Text)
for x in elem)):
if (len(elem) != 2 or
any(not isinstance(x, str) and not isinstance(x, Text)
for x in elem)):
raise ValueError("element is not a string pair")
return dict(operator=elem[0], operand=elem[1])

View File

@ -121,8 +121,8 @@ def principal_to_user_profile(agent, principal):
except UserProfile.DoesNotExist:
principal_doesnt_exist = True
if (principal_doesnt_exist
or agent.realm != principal_user_profile.realm):
if (principal_doesnt_exist or
agent.realm != principal_user_profile.realm):
# We have to make sure we don't leak information about which users
# are registered for Zulip in a different realm. We could do
# something a little more clever and check the domain part of the

View File

@ -22,8 +22,8 @@ PRODUCTION = config_file.has_option('machine', 'deploy_type')
# Zulip run by Zulip, Inc. We will eventually be able to get rid of
# them and just have the PRODUCTION flag, but we need them for now.
ZULIP_COM_STAGING = PRODUCTION and config_file.get('machine', 'deploy_type') == 'zulip.com-staging'
ZULIP_COM = ((PRODUCTION and config_file.get('machine', 'deploy_type') == 'zulip.com-prod')
or ZULIP_COM_STAGING)
ZULIP_COM = ((PRODUCTION and config_file.get('machine', 'deploy_type') == 'zulip.com-prod') or
ZULIP_COM_STAGING)
if not ZULIP_COM:
raise Exception("You should create your own local settings from prod_settings_template.")