mirror of https://github.com/zulip/zulip.git
lint: Clean up E126 PEP-8 rule.
This commit is contained in:
parent
31e7dcd86b
commit
4e171ce787
|
@ -68,13 +68,13 @@ Usage examples:
|
|||
# Report activity for a user.
|
||||
user_profile = get_user_profile_by_email(arg)
|
||||
self.compute_activity(UserActivity.objects.filter(
|
||||
user_profile=user_profile))
|
||||
user_profile=user_profile))
|
||||
except UserProfile.DoesNotExist:
|
||||
try:
|
||||
# Report activity for a realm.
|
||||
realm = get_realm(arg)
|
||||
self.compute_activity(UserActivity.objects.filter(
|
||||
user_profile__realm=realm))
|
||||
user_profile__realm=realm))
|
||||
except Realm.DoesNotExist:
|
||||
print("Unknown user or realm %s" % (arg,))
|
||||
exit(1)
|
||||
|
|
|
@ -31,11 +31,11 @@ class Command(BaseCommand):
|
|||
# Has been active (on the website, for now) in the last 7 days.
|
||||
activity_cutoff = datetime.datetime.now(tz=pytz.utc) - datetime.timedelta(days=7)
|
||||
return [activity.user_profile for activity in (
|
||||
UserActivity.objects.filter(user_profile__realm=realm,
|
||||
user_profile__is_active=True,
|
||||
last_visit__gt=activity_cutoff,
|
||||
query="/json/users/me/pointer",
|
||||
client__name="website"))]
|
||||
UserActivity.objects.filter(user_profile__realm=realm,
|
||||
user_profile__is_active=True,
|
||||
last_visit__gt=activity_cutoff,
|
||||
query="/json/users/me/pointer",
|
||||
client__name="website"))]
|
||||
|
||||
def messages_sent_by(self, user, days_ago):
|
||||
# type: (UserProfile, int) -> int
|
||||
|
|
|
@ -699,9 +699,9 @@ def get_user_activity_records_for_realm(realm, is_bot):
|
|||
]
|
||||
|
||||
records = UserActivity.objects.filter(
|
||||
user_profile__realm__string_id=realm,
|
||||
user_profile__is_active=True,
|
||||
user_profile__is_bot=is_bot
|
||||
user_profile__realm__string_id=realm,
|
||||
user_profile__is_active=True,
|
||||
user_profile__is_bot=is_bot
|
||||
)
|
||||
records = records.order_by("user_profile__email", "-last_visit")
|
||||
records = records.select_related('user_profile', 'client').only(*fields)
|
||||
|
@ -718,7 +718,7 @@ def get_user_activity_records_for_email(email):
|
|||
]
|
||||
|
||||
records = UserActivity.objects.filter(
|
||||
user_profile__email=email
|
||||
user_profile__email=email
|
||||
)
|
||||
records = records.order_by("-last_visit")
|
||||
records = records.select_related('user_profile', 'client').only(*fields)
|
||||
|
@ -736,10 +736,10 @@ def raw_user_activity_table(records):
|
|||
def row(record):
|
||||
# type: (QuerySet) -> List[Any]
|
||||
return [
|
||||
record.query,
|
||||
record.client.name,
|
||||
record.count,
|
||||
format_date_for_activity_reports(record.last_visit)
|
||||
record.query,
|
||||
record.client.name,
|
||||
record.count,
|
||||
format_date_for_activity_reports(record.last_visit)
|
||||
]
|
||||
|
||||
rows = list(map(row, records))
|
||||
|
@ -759,14 +759,14 @@ def get_user_activity_summary(records):
|
|||
# type: (str, QuerySet) -> None
|
||||
if action not in summary:
|
||||
summary[action] = dict(
|
||||
count=record.count,
|
||||
last_visit=record.last_visit
|
||||
count=record.count,
|
||||
last_visit=record.last_visit
|
||||
)
|
||||
else:
|
||||
summary[action]['count'] += record.count
|
||||
summary[action]['last_visit'] = max(
|
||||
summary[action]['last_visit'],
|
||||
record.last_visit
|
||||
summary[action]['last_visit'],
|
||||
record.last_visit
|
||||
)
|
||||
|
||||
if records:
|
||||
|
@ -820,13 +820,13 @@ def realm_activity_link(realm_str):
|
|||
def realm_client_table(user_summaries):
|
||||
# type: (Dict[str, Dict[str, Dict[str, Any]]]) -> str
|
||||
exclude_keys = [
|
||||
'internal',
|
||||
'name',
|
||||
'use',
|
||||
'send',
|
||||
'pointer',
|
||||
'website',
|
||||
'desktop',
|
||||
'internal',
|
||||
'name',
|
||||
'use',
|
||||
'send',
|
||||
'pointer',
|
||||
'website',
|
||||
'desktop',
|
||||
]
|
||||
|
||||
rows = []
|
||||
|
@ -840,22 +840,22 @@ def realm_client_table(user_summaries):
|
|||
count = v['count']
|
||||
last_visit = v['last_visit']
|
||||
row = [
|
||||
format_date_for_activity_reports(last_visit),
|
||||
client,
|
||||
name,
|
||||
email_link,
|
||||
count,
|
||||
format_date_for_activity_reports(last_visit),
|
||||
client,
|
||||
name,
|
||||
email_link,
|
||||
count,
|
||||
]
|
||||
rows.append(row)
|
||||
|
||||
rows = sorted(rows, key=lambda r: r[0], reverse=True)
|
||||
|
||||
cols = [
|
||||
'Last visit',
|
||||
'Client',
|
||||
'Name',
|
||||
'Email',
|
||||
'Count',
|
||||
'Last visit',
|
||||
'Client',
|
||||
'Name',
|
||||
'Email',
|
||||
'Count',
|
||||
]
|
||||
|
||||
title = 'Clients'
|
||||
|
@ -872,18 +872,18 @@ def user_activity_summary_table(user_summary):
|
|||
count = v['count']
|
||||
last_visit = v['last_visit']
|
||||
row = [
|
||||
format_date_for_activity_reports(last_visit),
|
||||
client,
|
||||
count,
|
||||
format_date_for_activity_reports(last_visit),
|
||||
client,
|
||||
count,
|
||||
]
|
||||
rows.append(row)
|
||||
|
||||
rows = sorted(rows, key=lambda r: r[0], reverse=True)
|
||||
|
||||
cols = [
|
||||
'last_visit',
|
||||
'client',
|
||||
'count',
|
||||
'last_visit',
|
||||
'client',
|
||||
'count',
|
||||
]
|
||||
|
||||
title = 'User Activity'
|
||||
|
@ -944,15 +944,15 @@ def realm_user_summary_table(all_records, admin_emails):
|
|||
rows = sorted(rows, key=by_used_time, reverse=True)
|
||||
|
||||
cols = [
|
||||
'Name',
|
||||
'Email',
|
||||
'Total sent',
|
||||
'Heard from',
|
||||
'Message sent',
|
||||
'Pointer motion',
|
||||
'Desktop',
|
||||
'ZulipiOS',
|
||||
'Android'
|
||||
'Name',
|
||||
'Email',
|
||||
'Total sent',
|
||||
'Heard from',
|
||||
'Message sent',
|
||||
'Pointer motion',
|
||||
'Desktop',
|
||||
'ZulipiOS',
|
||||
'Android',
|
||||
]
|
||||
|
||||
title = 'Summary'
|
||||
|
|
|
@ -48,8 +48,8 @@ parser.add_option('--new-short-name')
|
|||
client = zulip.init_from_options(options)
|
||||
|
||||
print(client.create_user({
|
||||
'email': options.new_email,
|
||||
'password': options.new_password,
|
||||
'full_name': options.new_full_name,
|
||||
'short_name': options.new_short_name
|
||||
'email': options.new_email,
|
||||
'password': options.new_password,
|
||||
'full_name': options.new_full_name,
|
||||
'short_name': options.new_short_name
|
||||
}))
|
||||
|
|
|
@ -76,10 +76,10 @@ def trac_subject(ticket):
|
|||
def send_update(ticket, content):
|
||||
# type: (Any, str) -> None
|
||||
client.send_message({
|
||||
"type": "stream",
|
||||
"to": config.STREAM_FOR_NOTIFICATIONS,
|
||||
"content": content,
|
||||
"subject": trac_subject(ticket)
|
||||
"type": "stream",
|
||||
"to": config.STREAM_FOR_NOTIFICATIONS,
|
||||
"content": content,
|
||||
"subject": trac_subject(ticket)
|
||||
})
|
||||
|
||||
class ZulipPlugin(Component):
|
||||
|
|
|
@ -285,9 +285,9 @@ class Client(object):
|
|||
vendor_version = platform.mac_ver()[0]
|
||||
|
||||
return "{client_name} ({vendor}; {vendor_version})".format(
|
||||
client_name=self.client_name,
|
||||
vendor=vendor,
|
||||
vendor_version=vendor_version,
|
||||
client_name=self.client_name,
|
||||
vendor=vendor,
|
||||
vendor_version=vendor_version,
|
||||
)
|
||||
|
||||
def do_api_query(self, orig_request, url, method="POST", longpolling=False, files=None):
|
||||
|
@ -357,15 +357,15 @@ class Client(object):
|
|||
client_cert = self.client_cert
|
||||
|
||||
res = requests.request(
|
||||
method,
|
||||
urllib.parse.urljoin(self.base_url, url),
|
||||
auth=requests.auth.HTTPBasicAuth(self.email,
|
||||
self.api_key),
|
||||
verify=self.tls_verification,
|
||||
cert=client_cert,
|
||||
timeout=90,
|
||||
headers={"User-agent": self.get_user_agent()},
|
||||
**kwargs)
|
||||
method,
|
||||
urllib.parse.urljoin(self.base_url, url),
|
||||
auth=requests.auth.HTTPBasicAuth(self.email,
|
||||
self.api_key),
|
||||
verify=self.tls_verification,
|
||||
cert=client_cert,
|
||||
timeout=90,
|
||||
headers={"User-agent": self.get_user_agent()},
|
||||
**kwargs)
|
||||
|
||||
# On 50x errors, try again after a short sleep
|
||||
if str(res.status_code).startswith('5'):
|
||||
|
|
|
@ -252,16 +252,16 @@ logger.info("Sent Zephyr messages!")
|
|||
for key, (stream, test) in hzkeys.items():
|
||||
if stream == "message":
|
||||
send_zulip({
|
||||
"type": "private",
|
||||
"content": str(key),
|
||||
"to": zulip_user,
|
||||
"type": "private",
|
||||
"content": str(key),
|
||||
"to": zulip_user,
|
||||
})
|
||||
else:
|
||||
send_zulip({
|
||||
"type": "stream",
|
||||
"subject": "test",
|
||||
"content": str(key),
|
||||
"to": stream,
|
||||
"type": "stream",
|
||||
"subject": "test",
|
||||
"content": str(key),
|
||||
"to": stream,
|
||||
})
|
||||
receive_zephyrs()
|
||||
|
||||
|
|
|
@ -65,10 +65,10 @@ class IRCBot(irc.bot.SingleServerIRCBot):
|
|||
|
||||
# Forward the PM to Zulip
|
||||
print(zulip_client.send_message({
|
||||
"sender": sender,
|
||||
"type": "private",
|
||||
"to": "username@example.com",
|
||||
"content": content,
|
||||
"sender": sender,
|
||||
"type": "private",
|
||||
"to": "username@example.com",
|
||||
"content": content,
|
||||
}))
|
||||
|
||||
def on_pubmsg(self, c, e):
|
||||
|
@ -81,12 +81,12 @@ class IRCBot(irc.bot.SingleServerIRCBot):
|
|||
|
||||
# Forward the stream message to Zulip
|
||||
print(zulip_client.send_message({
|
||||
"forged": "yes",
|
||||
"sender": sender,
|
||||
"type": "stream",
|
||||
"to": stream,
|
||||
"subject": "IRC",
|
||||
"content": content,
|
||||
"forged": "yes",
|
||||
"sender": sender,
|
||||
"type": "stream",
|
||||
"to": stream,
|
||||
"subject": "IRC",
|
||||
"content": content,
|
||||
}))
|
||||
|
||||
def on_dccmsg(self, c, e):
|
||||
|
|
|
@ -332,7 +332,7 @@ zulip configuration file under the jabber_mirror section (exceptions are noted
|
|||
in their help sections). Keys have the same name as options with hyphens
|
||||
replaced with underscores. Zulip configuration options go in the api section,
|
||||
as normal.'''.replace("\n", " ")
|
||||
)
|
||||
)
|
||||
parser.add_option(
|
||||
'--mode',
|
||||
default=None,
|
||||
|
|
|
@ -22,22 +22,22 @@ def test():
|
|||
''' % (cmd, expected_response, client_dummy.output))
|
||||
def sample_conversation():
|
||||
return [
|
||||
('@convert 2 m cm', '2.0 m = 200.0 cm\n'),
|
||||
('@converter 2 m cm', ''),
|
||||
('@convert 12 celsius fahrenheit',
|
||||
'12.0 celsius = 53.600054 fahrenheit\n'),
|
||||
('@convert 0.002 kilometer millimile',
|
||||
'0.002 kilometer = 1.2427424 millimile\n'),
|
||||
('@convert 3 megabyte kilobit',
|
||||
'3.0 megabyte = 24576.0 kilobit\n'),
|
||||
(('foo @convert 120.5 g lb bar baz.\n'
|
||||
'baz bar bar @convert 22 k c lorem ipsum dolor'),
|
||||
('1. conversion: 120.5 g = 0.26565703 lb\n'
|
||||
'2. conversion: 22.0 k = -251.15 c\n')),
|
||||
('@convert foo bar',
|
||||
('Too few arguments given. Enter `@convert help` '
|
||||
'for help on using the converter.\n')),
|
||||
]
|
||||
('@convert 2 m cm', '2.0 m = 200.0 cm\n'),
|
||||
('@converter 2 m cm', ''),
|
||||
('@convert 12 celsius fahrenheit',
|
||||
'12.0 celsius = 53.600054 fahrenheit\n'),
|
||||
('@convert 0.002 kilometer millimile',
|
||||
'0.002 kilometer = 1.2427424 millimile\n'),
|
||||
('@convert 3 megabyte kilobit',
|
||||
'3.0 megabyte = 24576.0 kilobit\n'),
|
||||
(('foo @convert 120.5 g lb bar baz.\n'
|
||||
'baz bar bar @convert 22 k c lorem ipsum dolor'),
|
||||
('1. conversion: 120.5 g = 0.26565703 lb\n'
|
||||
'2. conversion: 22.0 k = -251.15 c\n')),
|
||||
('@convert foo bar',
|
||||
('Too few arguments given. Enter `@convert help` '
|
||||
'for help on using the converter.\n')),
|
||||
]
|
||||
|
||||
if __name__ == '__main__':
|
||||
test()
|
||||
|
|
|
@ -85,12 +85,12 @@ class IssueHandler(object):
|
|||
|
||||
# Creates the issue json, that is transmitted to the github api servers
|
||||
issue = {
|
||||
'title': new_issue_title,
|
||||
'body': '{} **Sent by [{}](https://chat.zulip.org/#) from zulip**'.format(issue_content, original_sender),
|
||||
'assignee': '',
|
||||
'milestone': 'none',
|
||||
'labels': [''],
|
||||
}
|
||||
'title': new_issue_title,
|
||||
'body': '{} **Sent by [{}](https://chat.zulip.org/#) from zulip**'.format(issue_content, original_sender),
|
||||
'assignee': '',
|
||||
'milestone': 'none',
|
||||
'labels': [''],
|
||||
}
|
||||
# Sends the HTTP post request
|
||||
r = session.post(url_new, json.dumps(issue))
|
||||
|
||||
|
|
|
@ -110,13 +110,13 @@ for device in macs.values():
|
|||
# Horrible hack to route return packets on the correct interface
|
||||
# See http://unix.stackexchange.com/a/4421/933
|
||||
subprocess.check_call(
|
||||
['/sbin/ip', 'rule', 'add', 'fwmark', str(device_number), 'table', str(device_number)])
|
||||
['/sbin/ip', 'rule', 'add', 'fwmark', str(device_number), 'table', str(device_number)])
|
||||
subprocess.check_call(
|
||||
['/sbin/ip', 'route', 'add', '0.0.0.0/0', 'table', str(device_number), 'dev',
|
||||
'ens%i' % device_number, 'via', guess_gateway(device_number)])
|
||||
['/sbin/ip', 'route', 'add', '0.0.0.0/0', 'table', str(device_number), 'dev',
|
||||
'ens%i' % device_number, 'via', guess_gateway(device_number)])
|
||||
subprocess.check_call(
|
||||
['/sbin/iptables', '-t', 'mangle', '-A', 'OUTPUT', '-m', 'conntrack', '--ctorigdst',
|
||||
address_of(device_number), '-j', 'MARK', '--set-mark', str(device_number)])
|
||||
['/sbin/iptables', '-t', 'mangle', '-A', 'OUTPUT', '-m', 'conntrack', '--ctorigdst',
|
||||
address_of(device_number), '-j', 'MARK', '--set-mark', str(device_number)])
|
||||
|
||||
to_configure.remove(address_of(device_number))
|
||||
|
||||
|
@ -126,8 +126,8 @@ for device in macs.values():
|
|||
log.info("Configuring %s with IP %s" % (device, ip))
|
||||
subprocess.check_call(['/sbin/ifconfig', device, ip])
|
||||
subprocess.check_call(
|
||||
['/sbin/iptables', '-t', 'mangle', '-A', 'OUTPUT', '-m', 'conntrack', '--ctorigdst',
|
||||
ip, '-j', 'MARK', '--set-mark', str(device_number)])
|
||||
['/sbin/iptables', '-t', 'mangle', '-A', 'OUTPUT', '-m', 'conntrack', '--ctorigdst',
|
||||
ip, '-j', 'MARK', '--set-mark', str(device_number)])
|
||||
|
||||
for throwaway in range(2):
|
||||
# Don't freak out if this doens't work.
|
||||
|
|
|
@ -60,7 +60,7 @@ if not set(warn_queues) - set(("missedmessage_emails", "digest_emails")) and \
|
|||
now_struct.tm_hour == 15 and now_struct.tm_min < 25:
|
||||
status = 0
|
||||
print("%s|%s|%s|processing digests, not alerting on elevated mail queues" % (
|
||||
now, status, states[status]))
|
||||
now, status, states[status]))
|
||||
exit(0)
|
||||
|
||||
if status > 0:
|
||||
|
|
|
@ -10,7 +10,7 @@ from six.moves import range
|
|||
tree = ET.parse('orig.svg')
|
||||
elems = [tree.getroot().findall(
|
||||
".//*[@id='%s']/{http://www.w3.org/2000/svg}tspan" % (name,))[0]
|
||||
for name in ('number_back', 'number_front')]
|
||||
for name in ('number_back', 'number_front')]
|
||||
|
||||
for i in range(1, 100):
|
||||
# Prepare a modified SVG
|
||||
|
|
|
@ -11,7 +11,7 @@ root_dir = os.path.dirname(tools_dir)
|
|||
sys.path.insert(0, root_dir)
|
||||
|
||||
from tools.lib.test_script import (
|
||||
get_provisioning_status,
|
||||
get_provisioning_status,
|
||||
)
|
||||
|
||||
def run():
|
||||
|
|
|
@ -84,10 +84,6 @@ def check_pep8(files):
|
|||
ignored_rules = [
|
||||
# Each of these rules are ignored for the explained reason.
|
||||
|
||||
# 'continuation line over-indented for hanging indent'
|
||||
# Most of these we should probably clean up.
|
||||
'E126',
|
||||
|
||||
# "multiple spaces before operator"
|
||||
# There are several typos here, but also several instances that are
|
||||
# being used for alignment in dict keys/values using the `dict`
|
||||
|
|
|
@ -130,8 +130,8 @@ for js_group, filespec in six.iteritems(JS_SPECS):
|
|||
# (N.B. we include STATIC_HEADER_FILE before the JavaScripts.
|
||||
# This way it doesn't throw off the source map.)
|
||||
cmd = '%s --language_in ECMASCRIPT5 --create_source_map %s %s %s' % (
|
||||
CLOSURE_BINARY, map_file,
|
||||
settings.STATIC_HEADER_FILE, ' '.join(in_files))
|
||||
CLOSURE_BINARY, map_file,
|
||||
settings.STATIC_HEADER_FILE, ' '.join(in_files))
|
||||
js = subprocess.check_output(cmd, shell=True)
|
||||
|
||||
# Write out the JS
|
||||
|
|
|
@ -18,7 +18,7 @@ old_shebang_bytes = old_shebang.encode()
|
|||
new_shebang_bytes = new_shebang.encode()
|
||||
|
||||
with tarfile.open(fileobj=stdin, mode='r|*') as in_tar, \
|
||||
tarfile.open(fileobj=stdout, mode='w', format=tarfile.PAX_FORMAT, pax_headers=in_tar.pax_headers) as out_tar:
|
||||
tarfile.open(fileobj=stdout, mode='w', format=tarfile.PAX_FORMAT, pax_headers=in_tar.pax_headers) as out_tar:
|
||||
for info in in_tar: # type: ignore # https://github.com/python/typeshed/pull/693
|
||||
if info.isfile():
|
||||
file = in_tar.extractfile(info)
|
||||
|
|
|
@ -17,7 +17,7 @@ from zerver.lib.timestamp import datetime_to_timestamp, timestamp_to_datetime
|
|||
from zerver.lib.utils import statsd, get_subdomain, check_subdomain
|
||||
from zerver.exceptions import RateLimited
|
||||
from zerver.lib.rate_limiter import incr_ratelimit, is_ratelimited, \
|
||||
api_calls_left
|
||||
api_calls_left
|
||||
from zerver.lib.request import REQ, has_request_variables, JsonableError, RequestVariableMissingError
|
||||
from django.core.handlers import base
|
||||
|
||||
|
|
|
@ -74,7 +74,7 @@ from zerver.lib.html_diff import highlight_html_differences
|
|||
from zerver.lib.alert_words import user_alert_words, add_user_alert_words, \
|
||||
remove_user_alert_words, set_user_alert_words
|
||||
from zerver.lib.push_notifications import num_push_devices_for_user, \
|
||||
send_apple_push_notification, send_android_push_notification
|
||||
send_apple_push_notification, send_android_push_notification
|
||||
from zerver.lib.notifications import clear_followup_emails_queue
|
||||
from zerver.lib.narrow import check_supported_events_narrow_filter
|
||||
from zerver.lib.request import JsonableError
|
||||
|
@ -574,10 +574,10 @@ def do_set_realm_default_language(realm, default_language):
|
|||
realm.default_language = default_language
|
||||
realm.save(update_fields=['default_language'])
|
||||
event = dict(
|
||||
type="realm",
|
||||
op="update",
|
||||
property="default_language",
|
||||
value=default_language
|
||||
type="realm",
|
||||
op="update",
|
||||
property="default_language",
|
||||
value=default_language
|
||||
)
|
||||
send_event(event, active_user_ids(realm))
|
||||
|
||||
|
@ -789,7 +789,7 @@ def get_recipient_user_profiles(recipient, sender_id):
|
|||
'user_profile__realm__domain'
|
||||
]
|
||||
query = Subscription.objects.select_related("user_profile", "user_profile__realm").only(*fields).filter(
|
||||
recipient=recipient, active=True)
|
||||
recipient=recipient, active=True)
|
||||
recipients = [s.user_profile for s in query]
|
||||
else:
|
||||
raise ValueError('Bad recipient type')
|
||||
|
@ -939,9 +939,9 @@ def do_send_messages(messages):
|
|||
message['message'].recipient.type == Recipient.PERSONAL and
|
||||
settings.FEEDBACK_BOT in [up.email for up in message['recipients']]):
|
||||
queue_json_publish(
|
||||
'feedback_messages',
|
||||
message_to_dict(message['message'], apply_markdown=False),
|
||||
lambda x: None
|
||||
'feedback_messages',
|
||||
message_to_dict(message['message'], apply_markdown=False),
|
||||
lambda x: None
|
||||
)
|
||||
|
||||
# Note that this does not preserve the order of message ids
|
||||
|
@ -1015,10 +1015,10 @@ def do_send_typing_notification(notification):
|
|||
# Include a list of recipients in the event body to help identify where the typing is happening
|
||||
recipient_dicts = [{'user_id': profile.id, 'email': profile.email} for profile in recipient_user_profiles]
|
||||
event = dict(
|
||||
type = 'typing',
|
||||
op = notification['op'],
|
||||
sender = sender_dict,
|
||||
recipients = recipient_dicts)
|
||||
type = 'typing',
|
||||
op = notification['op'],
|
||||
sender = sender_dict,
|
||||
recipients = recipient_dicts)
|
||||
|
||||
send_event(event, user_ids_to_notify)
|
||||
|
||||
|
|
|
@ -12,9 +12,9 @@ from zerver.lib.upload import upload_backend, MEDIUM_AVATAR_SIZE
|
|||
def avatar_url(user_profile, medium=False):
|
||||
# type: (UserProfile, bool) -> Text
|
||||
return get_avatar_url(
|
||||
user_profile.avatar_source,
|
||||
user_profile.email,
|
||||
medium=medium)
|
||||
user_profile.avatar_source,
|
||||
user_profile.email,
|
||||
medium=medium)
|
||||
|
||||
def get_avatar_url(avatar_source, email, medium=False):
|
||||
# type: (Text, Text, bool) -> Text
|
||||
|
|
|
@ -1205,16 +1205,16 @@ def make_md_engine(key, opts):
|
|||
md_engines[key] = markdown.Markdown(
|
||||
output_format = 'html',
|
||||
extensions = [
|
||||
'markdown.extensions.nl2br',
|
||||
'markdown.extensions.tables',
|
||||
codehilite.makeExtension(
|
||||
linenums=False,
|
||||
guess_lang=False
|
||||
),
|
||||
fenced_code.makeExtension(),
|
||||
EscapeHtml(),
|
||||
Bugdown(realm_filters=opts["realm_filters"][0],
|
||||
realm=opts["realm"][0])])
|
||||
'markdown.extensions.nl2br',
|
||||
'markdown.extensions.tables',
|
||||
codehilite.makeExtension(
|
||||
linenums=False,
|
||||
guess_lang=False
|
||||
),
|
||||
fenced_code.makeExtension(),
|
||||
EscapeHtml(),
|
||||
Bugdown(realm_filters=opts["realm_filters"][0],
|
||||
realm=opts["realm"][0])])
|
||||
|
||||
def subject_links(realm_filters_key, subject):
|
||||
# type: (int, Text) -> List[Text]
|
||||
|
@ -1362,9 +1362,9 @@ def do_convert(content, message=None, message_realm=None, possible_words=None):
|
|||
error_bot_realm = get_user_profile_by_email(settings.ERROR_BOT).realm
|
||||
internal_send_message(error_bot_realm, settings.ERROR_BOT, "stream",
|
||||
"errors", subject, "Markdown parser failed, email sent with details.")
|
||||
mail.mail_admins(subject, "Failed message: %s\n\n%s\n\n" % (
|
||||
cleaned, traceback.format_exc()),
|
||||
fail_silently=False)
|
||||
mail.mail_admins(
|
||||
subject, "Failed message: %s\n\n%s\n\n" % (cleaned, traceback.format_exc()),
|
||||
fail_silently=False)
|
||||
raise BugdownRenderingException()
|
||||
finally:
|
||||
current_message = None
|
||||
|
|
|
@ -122,17 +122,17 @@ def der_encode_ticket(tkt):
|
|||
[der_encode_integer(5), # tktVno
|
||||
der_encode_string(tkt["realm"]),
|
||||
der_encode_sequence( # PrincipalName
|
||||
[der_encode_int32(tkt["sname"]["nameType"]),
|
||||
der_encode_sequence([der_encode_string(c)
|
||||
for c in tkt["sname"]["nameString"]],
|
||||
tagged=False)]),
|
||||
[der_encode_int32(tkt["sname"]["nameType"]),
|
||||
der_encode_sequence([der_encode_string(c)
|
||||
for c in tkt["sname"]["nameString"]],
|
||||
tagged=False)]),
|
||||
der_encode_sequence( # EncryptedData
|
||||
[der_encode_int32(tkt["encPart"]["etype"]),
|
||||
(der_encode_uint32(tkt["encPart"]["kvno"])
|
||||
if "kvno" in tkt["encPart"]
|
||||
else None),
|
||||
der_encode_octet_string(
|
||||
base64.b64decode(tkt["encPart"]["cipher"]))])]))
|
||||
[der_encode_int32(tkt["encPart"]["etype"]),
|
||||
(der_encode_uint32(tkt["encPart"]["kvno"])
|
||||
if "kvno" in tkt["encPart"]
|
||||
else None),
|
||||
der_encode_octet_string(
|
||||
base64.b64decode(tkt["encPart"]["cipher"]))])]))
|
||||
|
||||
# Kerberos ccache writing code. Using format documentation from here:
|
||||
# http://www.gnu.org/software/shishi/manual/html_node/The-Credential-Cache-Binary-File-Format.html
|
||||
|
|
|
@ -107,8 +107,8 @@ def gather_new_users(user_profile, threshold):
|
|||
new_users = [] # type: List[UserProfile]
|
||||
else:
|
||||
new_users = list(UserProfile.objects.filter(
|
||||
realm=user_profile.realm, date_joined__gt=threshold,
|
||||
is_bot=False))
|
||||
realm=user_profile.realm, date_joined__gt=threshold,
|
||||
is_bot=False))
|
||||
user_names = [user.full_name for user in new_users]
|
||||
|
||||
return len(user_names), user_names
|
||||
|
@ -119,7 +119,7 @@ def gather_new_streams(user_profile, threshold):
|
|||
new_streams = [] # type: List[Stream]
|
||||
else:
|
||||
new_streams = list(get_active_streams(user_profile.realm).filter(
|
||||
invite_only=False, date_created__gt=threshold))
|
||||
invite_only=False, date_created__gt=threshold))
|
||||
|
||||
base_url = u"%s/#narrow/stream/" % (user_profile.realm.uri,)
|
||||
|
||||
|
|
|
@ -189,12 +189,12 @@ class ZulipEmailForwardError(Exception):
|
|||
def send_zulip(sender, stream, topic, content):
|
||||
# type: (Text, Stream, Text, Text) -> None
|
||||
internal_send_message(
|
||||
stream.realm,
|
||||
sender,
|
||||
"stream",
|
||||
stream.name,
|
||||
topic[:60],
|
||||
content[:2000])
|
||||
stream.realm,
|
||||
sender,
|
||||
"stream",
|
||||
stream.name,
|
||||
topic[:60],
|
||||
content[:2000])
|
||||
|
||||
def valid_stream(stream_name, token):
|
||||
# type: (Text, Text) -> bool
|
||||
|
|
|
@ -257,9 +257,9 @@ class Config(object):
|
|||
the ordering correctly. You may simply
|
||||
need to assign a virtual_parent, or there
|
||||
may be deeper issues going on.''' % (
|
||||
self.table,
|
||||
self.id_source[0],
|
||||
self.virtual_parent.table))
|
||||
self.table,
|
||||
self.id_source[0],
|
||||
self.virtual_parent.table))
|
||||
|
||||
|
||||
def export_from_config(response, config, seed_object=None, context=None):
|
||||
|
|
|
@ -61,29 +61,29 @@ class MessageDict(object):
|
|||
def to_dict_uncached_helper(message, apply_markdown):
|
||||
# type: (Message, bool) -> Dict[str, Any]
|
||||
return MessageDict.build_message_dict(
|
||||
apply_markdown = apply_markdown,
|
||||
message = message,
|
||||
message_id = message.id,
|
||||
last_edit_time = message.last_edit_time,
|
||||
edit_history = message.edit_history,
|
||||
content = message.content,
|
||||
subject = message.subject,
|
||||
pub_date = message.pub_date,
|
||||
rendered_content = message.rendered_content,
|
||||
rendered_content_version = message.rendered_content_version,
|
||||
sender_id = message.sender.id,
|
||||
sender_email = message.sender.email,
|
||||
sender_realm_id = message.sender.realm_id,
|
||||
sender_realm_domain = message.sender.realm.domain,
|
||||
sender_full_name = message.sender.full_name,
|
||||
sender_short_name = message.sender.short_name,
|
||||
sender_avatar_source = message.sender.avatar_source,
|
||||
sender_is_mirror_dummy = message.sender.is_mirror_dummy,
|
||||
sending_client_name = message.sending_client.name,
|
||||
recipient_id = message.recipient.id,
|
||||
recipient_type = message.recipient.type,
|
||||
recipient_type_id = message.recipient.type_id,
|
||||
reactions = Reaction.get_raw_db_rows([message.id])
|
||||
apply_markdown = apply_markdown,
|
||||
message = message,
|
||||
message_id = message.id,
|
||||
last_edit_time = message.last_edit_time,
|
||||
edit_history = message.edit_history,
|
||||
content = message.content,
|
||||
subject = message.subject,
|
||||
pub_date = message.pub_date,
|
||||
rendered_content = message.rendered_content,
|
||||
rendered_content_version = message.rendered_content_version,
|
||||
sender_id = message.sender.id,
|
||||
sender_email = message.sender.email,
|
||||
sender_realm_id = message.sender.realm_id,
|
||||
sender_realm_domain = message.sender.realm.domain,
|
||||
sender_full_name = message.sender.full_name,
|
||||
sender_short_name = message.sender.short_name,
|
||||
sender_avatar_source = message.sender.avatar_source,
|
||||
sender_is_mirror_dummy = message.sender.is_mirror_dummy,
|
||||
sending_client_name = message.sending_client.name,
|
||||
recipient_id = message.recipient.id,
|
||||
recipient_type = message.recipient.type,
|
||||
recipient_type_id = message.recipient.type_id,
|
||||
reactions = Reaction.get_raw_db_rows([message.id])
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
|
@ -94,29 +94,29 @@ class MessageDict(object):
|
|||
all the relevant fields populated
|
||||
'''
|
||||
return MessageDict.build_message_dict(
|
||||
apply_markdown = apply_markdown,
|
||||
message = None,
|
||||
message_id = row['id'],
|
||||
last_edit_time = row['last_edit_time'],
|
||||
edit_history = row['edit_history'],
|
||||
content = row['content'],
|
||||
subject = row['subject'],
|
||||
pub_date = row['pub_date'],
|
||||
rendered_content = row['rendered_content'],
|
||||
rendered_content_version = row['rendered_content_version'],
|
||||
sender_id = row['sender_id'],
|
||||
sender_email = row['sender__email'],
|
||||
sender_realm_id = row['sender__realm__id'],
|
||||
sender_realm_domain = row['sender__realm__domain'],
|
||||
sender_full_name = row['sender__full_name'],
|
||||
sender_short_name = row['sender__short_name'],
|
||||
sender_avatar_source = row['sender__avatar_source'],
|
||||
sender_is_mirror_dummy = row['sender__is_mirror_dummy'],
|
||||
sending_client_name = row['sending_client__name'],
|
||||
recipient_id = row['recipient_id'],
|
||||
recipient_type = row['recipient__type'],
|
||||
recipient_type_id = row['recipient__type_id'],
|
||||
reactions=row['reactions']
|
||||
apply_markdown = apply_markdown,
|
||||
message = None,
|
||||
message_id = row['id'],
|
||||
last_edit_time = row['last_edit_time'],
|
||||
edit_history = row['edit_history'],
|
||||
content = row['content'],
|
||||
subject = row['subject'],
|
||||
pub_date = row['pub_date'],
|
||||
rendered_content = row['rendered_content'],
|
||||
rendered_content_version = row['rendered_content_version'],
|
||||
sender_id = row['sender_id'],
|
||||
sender_email = row['sender__email'],
|
||||
sender_realm_id = row['sender__realm__id'],
|
||||
sender_realm_domain = row['sender__realm__domain'],
|
||||
sender_full_name = row['sender__full_name'],
|
||||
sender_short_name = row['sender__short_name'],
|
||||
sender_avatar_source = row['sender__avatar_source'],
|
||||
sender_is_mirror_dummy = row['sender__is_mirror_dummy'],
|
||||
sending_client_name = row['sending_client__name'],
|
||||
recipient_id = row['recipient_id'],
|
||||
recipient_type = row['recipient__type'],
|
||||
recipient_type_id = row['recipient__type_id'],
|
||||
reactions=row['reactions']
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
|
@ -150,9 +150,9 @@ class MessageDict(object):
|
|||
avatar_url = get_avatar_url(sender_avatar_source, sender_email)
|
||||
|
||||
display_recipient = get_display_recipient_by_id(
|
||||
recipient_id,
|
||||
recipient_type,
|
||||
recipient_type_id
|
||||
recipient_id,
|
||||
recipient_type,
|
||||
recipient_type_id
|
||||
)
|
||||
|
||||
if recipient_type == Recipient.STREAM:
|
||||
|
|
|
@ -438,7 +438,7 @@ def send_future_email(recipients, email_html, email_text, subject,
|
|||
user_profile = get_user_profile_by_email(bounce_email)
|
||||
do_change_enable_digest_emails(user_profile, False)
|
||||
log_digest_event("%s\nTurned off digest emails for %s" % (
|
||||
str(problems), bounce_email))
|
||||
str(problems), bounce_email))
|
||||
continue
|
||||
elif problem["reject_reason"] == "soft-bounce":
|
||||
# A soft bounce is temporary; let it try to resolve itself.
|
||||
|
|
|
@ -226,10 +226,10 @@ def send_android_push_notification(user, data):
|
|||
#
|
||||
# That said, recovery is easy: just update the current PDT object to use the new ID.
|
||||
logging.warning(
|
||||
"GCM: Got canonical ref %s replacing %s but new ID not registered! Updating." %
|
||||
(new_reg_id, reg_id))
|
||||
"GCM: Got canonical ref %s replacing %s but new ID not registered! Updating." %
|
||||
(new_reg_id, reg_id))
|
||||
PushDeviceToken.objects.filter(
|
||||
token=reg_id, kind=PushDeviceToken.GCM).update(token=new_reg_id)
|
||||
token=reg_id, kind=PushDeviceToken.GCM).update(token=new_reg_id)
|
||||
else:
|
||||
# Since we know the new ID is registered in our system we can just drop the old one.
|
||||
logging.info("GCM: Got canonical ref %s, dropping %s" % (new_reg_id, reg_id))
|
||||
|
|
|
@ -99,10 +99,10 @@ class SimpleQueueClient(object):
|
|||
def do_publish():
|
||||
# type: () -> None
|
||||
self.channel.basic_publish(
|
||||
exchange='',
|
||||
routing_key=queue_name,
|
||||
properties=pika.BasicProperties(delivery_mode=2),
|
||||
body=body)
|
||||
exchange='',
|
||||
routing_key=queue_name,
|
||||
properties=pika.BasicProperties(delivery_mode=2),
|
||||
body=body)
|
||||
|
||||
statsd.incr("rabbitmq.publish.%s" % (queue_name,))
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@ from django.utils.translation import ugettext as _
|
|||
from django.views.decorators.csrf import csrf_exempt, csrf_protect
|
||||
|
||||
from zerver.decorator import authenticated_json_view, authenticated_rest_api_view, \
|
||||
process_as_post
|
||||
process_as_post
|
||||
from zerver.lib.response import json_method_not_allowed, json_unauthorized
|
||||
from django.http import HttpRequest, HttpResponse, HttpResponseRedirect
|
||||
from django.conf import settings
|
||||
|
|
|
@ -26,8 +26,8 @@ def median(data):
|
|||
return (data[before] + data[after]) / 2.0
|
||||
|
||||
users_who_sent_query = Message.objects.select_related("sender") \
|
||||
.exclude(sending_client__name__contains="mirror") \
|
||||
.exclude(sending_client__name__contains="API")
|
||||
.exclude(sending_client__name__contains="mirror") \
|
||||
.exclude(sending_client__name__contains="API")
|
||||
|
||||
def active_users():
|
||||
# type: () -> Sequence[UserProfile]
|
||||
|
|
|
@ -198,11 +198,11 @@ class S3UploadBackend(ZulipUploadBackend):
|
|||
url = "/user_uploads/%s" % (s3_file_name,)
|
||||
|
||||
upload_image_to_s3(
|
||||
bucket_name,
|
||||
s3_file_name,
|
||||
content_type,
|
||||
user_profile,
|
||||
file_data
|
||||
bucket_name,
|
||||
s3_file_name,
|
||||
content_type,
|
||||
user_profile,
|
||||
file_data
|
||||
)
|
||||
|
||||
create_attachment(uploaded_file_name, s3_file_name, user_profile)
|
||||
|
|
|
@ -36,10 +36,10 @@ class Command(BaseCommand):
|
|||
print(session.expire_date, session.get_decoded())
|
||||
print("")
|
||||
print("%s has %s active bots that will also be deactivated." % (
|
||||
user_profile.email,
|
||||
UserProfile.objects.filter(
|
||||
is_bot=True, is_active=True, bot_owner=user_profile
|
||||
).count()
|
||||
user_profile.email,
|
||||
UserProfile.objects.filter(
|
||||
is_bot=True, is_active=True, bot_owner=user_profile
|
||||
).count()
|
||||
))
|
||||
|
||||
if not options["for_real"]:
|
||||
|
|
|
@ -128,12 +128,12 @@ class Command(BaseCommand):
|
|||
exit(posix.EX_DATAERR) # type: ignore # There are no stubs for posix in python 3
|
||||
|
||||
queue_json_publish(
|
||||
"email_mirror",
|
||||
{
|
||||
"message": msg_text,
|
||||
"rcpt_to": rcpt_to
|
||||
},
|
||||
lambda x: None
|
||||
"email_mirror",
|
||||
{
|
||||
"message": msg_text,
|
||||
"rcpt_to": rcpt_to
|
||||
},
|
||||
lambda x: None
|
||||
)
|
||||
else:
|
||||
# We're probably running from cron, try to batch-process mail
|
||||
|
|
|
@ -31,7 +31,7 @@ def inactive_since(user_profile, cutoff):
|
|||
# Hasn't used the app in the last 24 business-day hours.
|
||||
most_recent_visit = [row.last_visit for row in
|
||||
UserActivity.objects.filter(
|
||||
user_profile=user_profile)]
|
||||
user_profile=user_profile)]
|
||||
|
||||
if not most_recent_visit:
|
||||
# This person has never used the app.
|
||||
|
@ -110,4 +110,4 @@ in a while.
|
|||
if inactive_since(user_profile, cutoff):
|
||||
queue_digest_recipient(user_profile, cutoff)
|
||||
logger.info("%s is inactive, queuing for potential digest" % (
|
||||
user_profile.email,))
|
||||
user_profile.email,))
|
||||
|
|
|
@ -63,9 +63,9 @@ def get_display_recipient_by_id(recipient_id, recipient_type, recipient_type_id)
|
|||
def get_display_recipient(recipient):
|
||||
# type: (Recipient) -> Union[Text, List[Dict[str, Any]]]
|
||||
return get_display_recipient_by_id(
|
||||
recipient.id,
|
||||
recipient.type,
|
||||
recipient.type_id
|
||||
recipient.id,
|
||||
recipient.type,
|
||||
recipient.type_id
|
||||
)
|
||||
|
||||
def flush_per_request_caches():
|
||||
|
@ -542,8 +542,8 @@ class UserProfile(ModelReprMixin, AbstractBaseUser, PermissionsMixin):
|
|||
AVATAR_FROM_GRAVATAR = u'G'
|
||||
AVATAR_FROM_USER = u'U'
|
||||
AVATAR_SOURCES = (
|
||||
(AVATAR_FROM_GRAVATAR, 'Hosted by Gravatar'),
|
||||
(AVATAR_FROM_USER, 'Uploaded by user'),
|
||||
(AVATAR_FROM_GRAVATAR, 'Hosted by Gravatar'),
|
||||
(AVATAR_FROM_USER, 'Uploaded by user'),
|
||||
)
|
||||
avatar_source = models.CharField(default=AVATAR_FROM_GRAVATAR, choices=AVATAR_SOURCES, max_length=1) # type: Text
|
||||
avatar_version = models.PositiveSmallIntegerField(default=1) # type: int
|
||||
|
@ -708,10 +708,10 @@ class Stream(ModelReprMixin, models.Model):
|
|||
def num_subscribers(self):
|
||||
# type: () -> int
|
||||
return Subscription.objects.filter(
|
||||
recipient__type=Recipient.STREAM,
|
||||
recipient__type_id=self.id,
|
||||
user_profile__is_active=True,
|
||||
active=True
|
||||
recipient__type=Recipient.STREAM,
|
||||
recipient__type_id=self.id,
|
||||
user_profile__is_active=True,
|
||||
active=True
|
||||
).count()
|
||||
|
||||
# This is stream information that is sent to clients
|
||||
|
@ -980,7 +980,7 @@ class Message(ModelReprMixin, models.Model):
|
|||
|
||||
return (sending_client in ('zulipandroid', 'zulipios', 'zulipdesktop',
|
||||
'website', 'ios', 'android')) or (
|
||||
'desktop app' in sending_client)
|
||||
'desktop app' in sending_client)
|
||||
|
||||
@staticmethod
|
||||
def content_has_attachment(content):
|
||||
|
@ -1295,33 +1295,33 @@ class UserPresence(models.Model):
|
|||
user_statuses = defaultdict(dict) # type: defaultdict[Any, Dict[Any, Any]]
|
||||
|
||||
query = UserPresence.objects.filter(
|
||||
user_profile__realm_id=realm_id,
|
||||
user_profile__is_active=True,
|
||||
user_profile__is_bot=False
|
||||
user_profile__realm_id=realm_id,
|
||||
user_profile__is_active=True,
|
||||
user_profile__is_bot=False
|
||||
).values(
|
||||
'client__name',
|
||||
'status',
|
||||
'timestamp',
|
||||
'user_profile__email',
|
||||
'user_profile__id',
|
||||
'user_profile__enable_offline_push_notifications',
|
||||
'user_profile__is_mirror_dummy',
|
||||
'client__name',
|
||||
'status',
|
||||
'timestamp',
|
||||
'user_profile__email',
|
||||
'user_profile__id',
|
||||
'user_profile__enable_offline_push_notifications',
|
||||
'user_profile__is_mirror_dummy',
|
||||
)
|
||||
|
||||
mobile_user_ids = [row['user'] for row in PushDeviceToken.objects.filter(
|
||||
user__realm_id=1,
|
||||
user__is_active=True,
|
||||
user__is_bot=False,
|
||||
user__realm_id=1,
|
||||
user__is_active=True,
|
||||
user__is_bot=False,
|
||||
).distinct("user").values("user")]
|
||||
|
||||
for row in query:
|
||||
info = UserPresence.to_presence_dict(
|
||||
client_name=row['client__name'],
|
||||
status=row['status'],
|
||||
dt=row['timestamp'],
|
||||
push_enabled=row['user_profile__enable_offline_push_notifications'],
|
||||
has_push_devices=row['user_profile__id'] in mobile_user_ids,
|
||||
is_mirror_dummy=row['user_profile__is_mirror_dummy'],
|
||||
client_name=row['client__name'],
|
||||
status=row['status'],
|
||||
dt=row['timestamp'],
|
||||
push_enabled=row['user_profile__enable_offline_push_notifications'],
|
||||
has_push_devices=row['user_profile__id'] in mobile_user_ids,
|
||||
is_mirror_dummy=row['user_profile__is_mirror_dummy'],
|
||||
)
|
||||
user_statuses[row['user_profile__email']][row['client__name']] = info
|
||||
|
||||
|
@ -1335,18 +1335,18 @@ class UserPresence(models.Model):
|
|||
|
||||
timestamp = datetime_to_timestamp(dt)
|
||||
return dict(
|
||||
client=client_name,
|
||||
status=presence_val,
|
||||
timestamp=timestamp,
|
||||
pushable=(push_enabled and has_push_devices),
|
||||
client=client_name,
|
||||
status=presence_val,
|
||||
timestamp=timestamp,
|
||||
pushable=(push_enabled and has_push_devices),
|
||||
)
|
||||
|
||||
def to_dict(self):
|
||||
# type: () -> Dict[str, Any]
|
||||
return UserPresence.to_presence_dict(
|
||||
client_name=self.client.name,
|
||||
status=self.status,
|
||||
dt=self.timestamp
|
||||
client_name=self.client.name,
|
||||
status=self.status,
|
||||
dt=self.timestamp
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
|
|
|
@ -228,22 +228,22 @@ class AuthBackendTest(TestCase):
|
|||
# Test LDAP auth fails when LDAP server rejects password
|
||||
with mock.patch('django_auth_ldap.backend._LDAPUser._authenticate_user_dn',
|
||||
side_effect=_LDAPUser.AuthenticationFailed("Failed")), (
|
||||
mock.patch('django_auth_ldap.backend._LDAPUser._check_requirements')), (
|
||||
mock.patch('django_auth_ldap.backend._LDAPUser._get_user_attrs',
|
||||
return_value=dict(full_name=['Hamlet']))):
|
||||
mock.patch('django_auth_ldap.backend._LDAPUser._check_requirements')), (
|
||||
mock.patch('django_auth_ldap.backend._LDAPUser._get_user_attrs',
|
||||
return_value=dict(full_name=['Hamlet']))):
|
||||
self.assertIsNone(backend.authenticate(email, password))
|
||||
|
||||
# For this backend, we mock the internals of django_auth_ldap
|
||||
with mock.patch('django_auth_ldap.backend._LDAPUser._authenticate_user_dn'), (
|
||||
mock.patch('django_auth_ldap.backend._LDAPUser._check_requirements')), (
|
||||
mock.patch('django_auth_ldap.backend._LDAPUser._get_user_attrs',
|
||||
return_value=dict(full_name=['Hamlet']))):
|
||||
mock.patch('django_auth_ldap.backend._LDAPUser._check_requirements')), (
|
||||
mock.patch('django_auth_ldap.backend._LDAPUser._get_user_attrs',
|
||||
return_value=dict(full_name=['Hamlet']))):
|
||||
self.verify_backend(backend, good_kwargs=dict(password=password))
|
||||
|
||||
with mock.patch('django_auth_ldap.backend._LDAPUser._authenticate_user_dn'), (
|
||||
mock.patch('django_auth_ldap.backend._LDAPUser._check_requirements')), (
|
||||
mock.patch('django_auth_ldap.backend._LDAPUser._get_user_attrs',
|
||||
return_value=dict(full_name=['Hamlet']))):
|
||||
mock.patch('django_auth_ldap.backend._LDAPUser._check_requirements')), (
|
||||
mock.patch('django_auth_ldap.backend._LDAPUser._get_user_attrs',
|
||||
return_value=dict(full_name=['Hamlet']))):
|
||||
self.verify_backend(backend, good_kwargs=dict(password=password,
|
||||
realm_subdomain='acme'))
|
||||
|
||||
|
@ -251,9 +251,9 @@ class AuthBackendTest(TestCase):
|
|||
# With subdomains, authenticating with the right subdomain
|
||||
# works; using the wrong subdomain doesn't
|
||||
with mock.patch('django_auth_ldap.backend._LDAPUser._authenticate_user_dn'), (
|
||||
mock.patch('django_auth_ldap.backend._LDAPUser._check_requirements')), (
|
||||
mock.patch('django_auth_ldap.backend._LDAPUser._get_user_attrs',
|
||||
return_value=dict(full_name=['Hamlet']))):
|
||||
mock.patch('django_auth_ldap.backend._LDAPUser._check_requirements')), (
|
||||
mock.patch('django_auth_ldap.backend._LDAPUser._get_user_attrs',
|
||||
return_value=dict(full_name=['Hamlet']))):
|
||||
self.verify_backend(backend,
|
||||
bad_kwargs=dict(password=password,
|
||||
realm_subdomain='acme'),
|
||||
|
@ -541,7 +541,7 @@ class GoogleOAuthTest(ZulipTestCase):
|
|||
csrf_state = urllib.parse.parse_qs(parsed_url.query)['state']
|
||||
|
||||
with mock.patch("requests.post", return_value=token_response), (
|
||||
mock.patch("requests.get", return_value=account_response)):
|
||||
mock.patch("requests.get", return_value=account_response)):
|
||||
result = self.client_get("/accounts/login/google/done/",
|
||||
dict(state=csrf_state))
|
||||
return result
|
||||
|
@ -643,8 +643,8 @@ class GoogleSubdomainLoginTest(GoogleOAuthTest):
|
|||
# type: () -> None
|
||||
"""If the user doesn't exist yet, Google auth can be used to register an account"""
|
||||
with self.settings(REALMS_HAVE_SUBDOMAINS=True), (
|
||||
mock.patch('zerver.views.auth.get_subdomain', return_value='zulip')), (
|
||||
mock.patch('zerver.views.registration.get_subdomain', return_value='zulip')):
|
||||
mock.patch('zerver.views.auth.get_subdomain', return_value='zulip')), (
|
||||
mock.patch('zerver.views.registration.get_subdomain', return_value='zulip')):
|
||||
|
||||
email = "newuser@zulip.com"
|
||||
token_response = ResponseMock(200, {'access_token': "unique_token"})
|
||||
|
|
|
@ -28,7 +28,7 @@ class TranslationTestCase(ZulipTestCase):
|
|||
response = getattr(self.client, method)(url, **kwargs)
|
||||
self.assertEqual(response.status_code, expected_status,
|
||||
msg="Expected %d, received %d for %s to %s" % (
|
||||
expected_status, response.status_code, method, url))
|
||||
expected_status, response.status_code, method, url))
|
||||
return response
|
||||
|
||||
def test_accept_language_header(self):
|
||||
|
|
|
@ -83,7 +83,7 @@ class TestGenerateRealmCreationLink(ZulipTestCase):
|
|||
result = self.client_post(generated_link, {'email': email})
|
||||
self.assertEqual(result.status_code, 302)
|
||||
self.assertTrue(result["Location"].endswith(
|
||||
"/accounts/send_confirm/%s" % (email,)))
|
||||
"/accounts/send_confirm/%s" % (email,)))
|
||||
result = self.client_get(result["Location"])
|
||||
self.assert_in_response("Check your email so we can get started.", result)
|
||||
|
||||
|
|
|
@ -1558,10 +1558,10 @@ class AttachmentTest(ZulipTestCase):
|
|||
sender_email = "hamlet@zulip.com"
|
||||
user_profile = get_user_profile_by_email(sender_email)
|
||||
dummy_files = [
|
||||
('zulip.txt', '1/31/4CBjtTLYZhk66pZrF8hnYGwc/zulip.txt'),
|
||||
('temp_file.py', '1/31/4CBjtTLYZhk66pZrF8hnYGwc/temp_file.py'),
|
||||
('abc.py', '1/31/4CBjtTLYZhk66pZrF8hnYGwc/abc.py')
|
||||
]
|
||||
('zulip.txt', '1/31/4CBjtTLYZhk66pZrF8hnYGwc/zulip.txt'),
|
||||
('temp_file.py', '1/31/4CBjtTLYZhk66pZrF8hnYGwc/temp_file.py'),
|
||||
('abc.py', '1/31/4CBjtTLYZhk66pZrF8hnYGwc/abc.py')
|
||||
]
|
||||
|
||||
for file_name, path_id in dummy_files:
|
||||
create_attachment(file_name, path_id, user_profile)
|
||||
|
@ -1630,14 +1630,14 @@ class CheckMessageTest(ZulipTestCase):
|
|||
an unsubscribed stream"""
|
||||
parent = get_user_profile_by_email('othello@zulip.com')
|
||||
bot = do_create_user(
|
||||
email='othello-bot@zulip.com',
|
||||
password='',
|
||||
realm=parent.realm,
|
||||
full_name='',
|
||||
short_name='',
|
||||
active=True,
|
||||
bot_type=UserProfile.DEFAULT_BOT,
|
||||
bot_owner=parent
|
||||
email='othello-bot@zulip.com',
|
||||
password='',
|
||||
realm=parent.realm,
|
||||
full_name='',
|
||||
short_name='',
|
||||
active=True,
|
||||
bot_type=UserProfile.DEFAULT_BOT,
|
||||
bot_owner=parent
|
||||
)
|
||||
bot.last_reminder = None
|
||||
|
||||
|
|
|
@ -61,5 +61,5 @@ class RealmAliasTest(ZulipTestCase):
|
|||
self.assertEqual(get_realm_by_email_domain('user@zulip.com').string_id, 'zulip')
|
||||
self.assertEqual(get_realm_by_email_domain('user@fakedomain.com'), None)
|
||||
with self.settings(REALMS_HAVE_SUBDOMAINS = True), (
|
||||
self.assertRaises(GetRealmByDomainException)):
|
||||
self.assertRaises(GetRealmByDomainException)):
|
||||
get_realm_by_email_domain('user@zulip.com')
|
||||
|
|
|
@ -184,7 +184,7 @@ class PasswordResetTest(ZulipTestCase):
|
|||
# check the redirect link telling you to check mail for password reset link
|
||||
self.assertEqual(result.status_code, 302)
|
||||
self.assertTrue(result["Location"].endswith(
|
||||
"/accounts/password/reset/done/"))
|
||||
"/accounts/password/reset/done/"))
|
||||
result = self.client_get(result["Location"])
|
||||
|
||||
self.assert_in_response("Check your email to finish the process.", result)
|
||||
|
@ -687,7 +687,7 @@ class EmailUnsubscribeTests(ZulipTestCase):
|
|||
# Simulate a new user signing up, which enqueues 2 welcome e-mails.
|
||||
enqueue_welcome_emails(email, "King Hamlet")
|
||||
self.assertEqual(2, len(ScheduledJob.objects.filter(
|
||||
type=ScheduledJob.EMAIL, filter_string__iexact=email)))
|
||||
type=ScheduledJob.EMAIL, filter_string__iexact=email)))
|
||||
|
||||
# Simulate unsubscribing from the welcome e-mails.
|
||||
unsubscribe_link = one_click_unsubscribe_link(user_profile, "welcome")
|
||||
|
@ -696,7 +696,7 @@ class EmailUnsubscribeTests(ZulipTestCase):
|
|||
# The welcome email jobs are no longer scheduled.
|
||||
self.assertEqual(result.status_code, 200)
|
||||
self.assertEqual(0, len(ScheduledJob.objects.filter(
|
||||
type=ScheduledJob.EMAIL, filter_string__iexact=email)))
|
||||
type=ScheduledJob.EMAIL, filter_string__iexact=email)))
|
||||
|
||||
def test_digest_unsubscribe(self):
|
||||
# type: () -> None
|
||||
|
@ -714,7 +714,7 @@ class EmailUnsubscribeTests(ZulipTestCase):
|
|||
# Enqueue a fake digest email.
|
||||
send_digest_email(user_profile, "", "", "")
|
||||
self.assertEqual(1, len(ScheduledJob.objects.filter(
|
||||
type=ScheduledJob.EMAIL, filter_string__iexact=email)))
|
||||
type=ScheduledJob.EMAIL, filter_string__iexact=email)))
|
||||
|
||||
# Simulate unsubscribing from digest e-mails.
|
||||
unsubscribe_link = one_click_unsubscribe_link(user_profile, "digest")
|
||||
|
@ -726,7 +726,7 @@ class EmailUnsubscribeTests(ZulipTestCase):
|
|||
user_profile = UserProfile.objects.get(email="hamlet@zulip.com")
|
||||
self.assertFalse(user_profile.enable_digest_emails)
|
||||
self.assertEqual(0, len(ScheduledJob.objects.filter(
|
||||
type=ScheduledJob.EMAIL, filter_string__iexact=email)))
|
||||
type=ScheduledJob.EMAIL, filter_string__iexact=email)))
|
||||
|
||||
class RealmCreationTest(ZulipTestCase):
|
||||
|
||||
|
@ -745,7 +745,7 @@ class RealmCreationTest(ZulipTestCase):
|
|||
result = self.client_post('/create_realm/', {'email': email})
|
||||
self.assertEqual(result.status_code, 302)
|
||||
self.assertTrue(result["Location"].endswith(
|
||||
"/accounts/send_confirm/%s" % (email,)))
|
||||
"/accounts/send_confirm/%s" % (email,)))
|
||||
result = self.client_get(result["Location"])
|
||||
self.assert_in_response("Check your email so we can get started.", result)
|
||||
|
||||
|
@ -785,7 +785,7 @@ class RealmCreationTest(ZulipTestCase):
|
|||
result = self.client_post('/create_realm/', {'email': email})
|
||||
self.assertEqual(result.status_code, 302)
|
||||
self.assertTrue(result["Location"].endswith(
|
||||
"/accounts/send_confirm/%s" % (email,)))
|
||||
"/accounts/send_confirm/%s" % (email,)))
|
||||
result = self.client_get(result["Location"])
|
||||
self.assert_in_response("Check your email so we can get started.", result)
|
||||
|
||||
|
@ -865,7 +865,7 @@ class UserSignUpTest(ZulipTestCase):
|
|||
result = self.client_post('/accounts/home/', {'email': email})
|
||||
self.assertEqual(result.status_code, 302)
|
||||
self.assertTrue(result["Location"].endswith(
|
||||
"/accounts/send_confirm/%s" % (email,)))
|
||||
"/accounts/send_confirm/%s" % (email,)))
|
||||
result = self.client_get(result["Location"])
|
||||
self.assert_in_response("Check your email so we can get started.", result)
|
||||
|
||||
|
@ -903,7 +903,7 @@ class UserSignUpTest(ZulipTestCase):
|
|||
|
||||
self.assertEqual(result.status_code, 302)
|
||||
self.assertTrue(result["Location"].endswith(
|
||||
"/accounts/send_confirm/%s" % (email,)))
|
||||
"/accounts/send_confirm/%s" % (email,)))
|
||||
result = self.client_get(result["Location"])
|
||||
self.assert_in_response("Check your email so we can get started.", result)
|
||||
# Visit the confirmation link.
|
||||
|
@ -944,7 +944,7 @@ class UserSignUpTest(ZulipTestCase):
|
|||
|
||||
self.assertEqual(result.status_code, 302)
|
||||
self.assertTrue(result["Location"].endswith(
|
||||
"/accounts/send_confirm/%s" % (email,)))
|
||||
"/accounts/send_confirm/%s" % (email,)))
|
||||
result = self.client_get(result["Location"])
|
||||
self.assert_in_response("Check your email so we can get started.", result)
|
||||
# Visit the confirmation link.
|
||||
|
@ -1023,7 +1023,7 @@ class UserSignUpTest(ZulipTestCase):
|
|||
|
||||
self.assertEqual(result.status_code, 302)
|
||||
self.assertTrue(result["Location"].endswith(
|
||||
"/accounts/send_confirm/%s" % (email,)))
|
||||
"/accounts/send_confirm/%s" % (email,)))
|
||||
result = self.client_get(result["Location"])
|
||||
self.assert_in_response("Check your email so we can get started.", result)
|
||||
# Visit the confirmation link.
|
||||
|
@ -1097,7 +1097,7 @@ class UserSignUpTest(ZulipTestCase):
|
|||
|
||||
self.assertEqual(result.status_code, 302)
|
||||
self.assertTrue(result["Location"].endswith(
|
||||
"/accounts/send_confirm/%s" % (email,)))
|
||||
"/accounts/send_confirm/%s" % (email,)))
|
||||
result = self.client_get(result["Location"])
|
||||
self.assert_in_response("Check your email so we can get started.", result)
|
||||
# Visit the confirmation link.
|
||||
|
|
|
@ -415,9 +415,9 @@ class StreamAdminTest(ZulipTestCase):
|
|||
deletion_events = [e['event'] for e in events if e['event']['type'] == 'subscription']
|
||||
if subscribed:
|
||||
self.assertEqual(deletion_events[0], dict(
|
||||
op='remove',
|
||||
type='subscription',
|
||||
subscriptions=[{'name': active_name, 'stream_id': stream.id}]
|
||||
op='remove',
|
||||
type='subscription',
|
||||
subscriptions=[{'name': active_name, 'stream_id': stream.id}]
|
||||
))
|
||||
else:
|
||||
# You could delete the stream, but you weren't on it so you don't
|
||||
|
@ -435,7 +435,7 @@ class StreamAdminTest(ZulipTestCase):
|
|||
self.assertTrue(deactivated_stream.invite_only)
|
||||
self.assertEqual(deactivated_stream.name, deactivated_stream_name)
|
||||
subscribers = self.users_subscribed_to_stream(
|
||||
deactivated_stream_name, realm)
|
||||
deactivated_stream_name, realm)
|
||||
self.assertEqual(subscribers, [])
|
||||
|
||||
# It doesn't show up in the list of public streams anymore.
|
||||
|
@ -1478,8 +1478,8 @@ class SubscriptionAPITest(ZulipTestCase):
|
|||
if isinstance(ev['event']['subscriptions'][0], dict):
|
||||
self.assertEqual(ev['event']['op'], 'add')
|
||||
self.assertEqual(
|
||||
set(ev['event']['subscriptions'][0]['subscribers']),
|
||||
set([email1, email2])
|
||||
set(ev['event']['subscriptions'][0]['subscribers']),
|
||||
set([email1, email2])
|
||||
)
|
||||
else:
|
||||
# Check "peer_add" events for streams users were
|
||||
|
@ -1495,9 +1495,9 @@ class SubscriptionAPITest(ZulipTestCase):
|
|||
with tornado_redirected_to_list(events):
|
||||
with queries_captured() as queries:
|
||||
self.common_subscribe_to_streams(
|
||||
self.test_email,
|
||||
streams_to_sub,
|
||||
dict(principals=ujson.dumps([self.test_email])),
|
||||
self.test_email,
|
||||
streams_to_sub,
|
||||
dict(principals=ujson.dumps([self.test_email])),
|
||||
)
|
||||
self.assert_max_length(queries, 8)
|
||||
|
||||
|
@ -1507,8 +1507,8 @@ class SubscriptionAPITest(ZulipTestCase):
|
|||
self.assertEqual(add_event['event']['op'], 'add')
|
||||
self.assertEqual(add_event['users'], [get_user_profile_by_email(self.test_email).id])
|
||||
self.assertEqual(
|
||||
set(add_event['event']['subscriptions'][0]['subscribers']),
|
||||
set([email1, email2, self.test_email])
|
||||
set(add_event['event']['subscriptions'][0]['subscribers']),
|
||||
set([email1, email2, self.test_email])
|
||||
)
|
||||
|
||||
self.assertEqual(len(add_peer_event['users']), 16)
|
||||
|
@ -1534,8 +1534,8 @@ class SubscriptionAPITest(ZulipTestCase):
|
|||
self.assertEqual(add_event['event']['op'], 'add')
|
||||
self.assertEqual(add_event['users'], [get_user_profile_by_email(email3).id])
|
||||
self.assertEqual(
|
||||
set(add_event['event']['subscriptions'][0]['subscribers']),
|
||||
set([email1, email2, email3, self.test_email])
|
||||
set(add_event['event']['subscriptions'][0]['subscribers']),
|
||||
set([email1, email2, email3, self.test_email])
|
||||
)
|
||||
|
||||
# We don't send a peer_add event to othello
|
||||
|
@ -1561,9 +1561,9 @@ class SubscriptionAPITest(ZulipTestCase):
|
|||
events = [] # type: List[Dict[str, Any]]
|
||||
with tornado_redirected_to_list(events):
|
||||
self.common_subscribe_to_streams(
|
||||
self.test_email,
|
||||
streams_to_sub,
|
||||
dict(principals=ujson.dumps(new_users_to_subscribe)),
|
||||
self.test_email,
|
||||
streams_to_sub,
|
||||
dict(principals=ujson.dumps(new_users_to_subscribe)),
|
||||
)
|
||||
|
||||
add_peer_events = [events[2], events[3]]
|
||||
|
@ -1667,9 +1667,9 @@ class SubscriptionAPITest(ZulipTestCase):
|
|||
with tornado_redirected_to_list(events):
|
||||
with queries_captured() as queries:
|
||||
self.common_subscribe_to_streams(
|
||||
'starnine@mit.edu',
|
||||
streams,
|
||||
dict(principals=ujson.dumps(['starnine@mit.edu'])),
|
||||
'starnine@mit.edu',
|
||||
streams,
|
||||
dict(principals=ujson.dumps(['starnine@mit.edu'])),
|
||||
)
|
||||
# Make sure Zephyr mirroring realms such as MIT do not get
|
||||
# any tornado subscription events
|
||||
|
@ -1686,9 +1686,9 @@ class SubscriptionAPITest(ZulipTestCase):
|
|||
|
||||
with queries_captured() as queries:
|
||||
self.common_subscribe_to_streams(
|
||||
self.test_email,
|
||||
streams,
|
||||
dict(principals=ujson.dumps([self.test_email])),
|
||||
self.test_email,
|
||||
streams,
|
||||
dict(principals=ujson.dumps([self.test_email])),
|
||||
)
|
||||
# Make sure we don't make O(streams) queries
|
||||
self.assert_max_length(queries, 10)
|
||||
|
@ -2120,7 +2120,7 @@ class GetSubscribersTest(ZulipTestCase):
|
|||
self.assertIn("subscribers", result)
|
||||
self.assertIsInstance(result["subscribers"], list)
|
||||
true_subscribers = [user_profile.email for user_profile in self.users_subscribed_to_stream(
|
||||
stream_name, realm)]
|
||||
stream_name, realm)]
|
||||
self.assertEqual(sorted(result["subscribers"]), sorted(true_subscribers))
|
||||
|
||||
def make_subscriber_request(self, stream_id, email=None):
|
||||
|
|
|
@ -80,9 +80,10 @@ class PointerTest(ZulipTestCase):
|
|||
class UnreadCountTests(ZulipTestCase):
|
||||
def setUp(self):
|
||||
# type: () -> None
|
||||
self.unread_msg_ids = [self.send_message(
|
||||
self.unread_msg_ids = [
|
||||
self.send_message(
|
||||
"iago@zulip.com", "hamlet@zulip.com", Recipient.PERSONAL, "hello"),
|
||||
self.send_message(
|
||||
self.send_message(
|
||||
"iago@zulip.com", "hamlet@zulip.com", Recipient.PERSONAL, "hello2")]
|
||||
|
||||
# Sending a new message results in unread UserMessages being created
|
||||
|
|
|
@ -542,15 +542,15 @@ class WorkerTest(TestCase):
|
|||
|
||||
user = get_user_profile_by_email('hamlet@zulip.com')
|
||||
UserActivity.objects.filter(
|
||||
user_profile = user.id,
|
||||
client = get_client('ios')
|
||||
user_profile = user.id,
|
||||
client = get_client('ios')
|
||||
).delete()
|
||||
|
||||
data = dict(
|
||||
user_profile_id = user.id,
|
||||
client = 'ios',
|
||||
time = time.time(),
|
||||
query = 'send_message'
|
||||
user_profile_id = user.id,
|
||||
client = 'ios',
|
||||
time = time.time(),
|
||||
query = 'send_message'
|
||||
)
|
||||
fake_client.queue.append(('user_activity', data))
|
||||
|
||||
|
@ -559,8 +559,8 @@ class WorkerTest(TestCase):
|
|||
worker.setup()
|
||||
worker.start()
|
||||
activity_records = UserActivity.objects.filter(
|
||||
user_profile = user.id,
|
||||
client = get_client('ios')
|
||||
user_profile = user.id,
|
||||
client = get_client('ios')
|
||||
)
|
||||
self.assertTrue(len(activity_records), 1)
|
||||
self.assertTrue(activity_records[0].count, 1)
|
||||
|
@ -984,9 +984,9 @@ class BotTest(ZulipTestCase):
|
|||
do_make_stream_private(user_profile.realm, "Denmark")
|
||||
|
||||
bot_info = {
|
||||
'full_name': 'The Bot of Hamlet',
|
||||
'short_name': 'hambot',
|
||||
'default_sending_stream': 'Denmark',
|
||||
'full_name': 'The Bot of Hamlet',
|
||||
'short_name': 'hambot',
|
||||
'default_sending_stream': 'Denmark',
|
||||
}
|
||||
result = self.client_post("/json/bots", bot_info)
|
||||
self.assert_json_error(result, 'Insufficient permission')
|
||||
|
@ -1047,9 +1047,9 @@ class BotTest(ZulipTestCase):
|
|||
|
||||
self.assert_num_bots_equal(0)
|
||||
bot_info = {
|
||||
'full_name': 'The Bot of Hamlet',
|
||||
'short_name': 'hambot',
|
||||
'default_events_register_stream': 'Denmark',
|
||||
'full_name': 'The Bot of Hamlet',
|
||||
'short_name': 'hambot',
|
||||
'default_events_register_stream': 'Denmark',
|
||||
}
|
||||
result = self.client_post("/json/bots", bot_info)
|
||||
self.assert_json_error(result, 'Insufficient permission')
|
||||
|
|
|
@ -812,8 +812,8 @@ def send_notification_http(data):
|
|||
# type: (Mapping[str, Any]) -> None
|
||||
if settings.TORNADO_SERVER and not settings.RUNNING_INSIDE_TORNADO:
|
||||
requests_client.post(settings.TORNADO_SERVER + '/notify_tornado', data=dict(
|
||||
data = ujson.dumps(data),
|
||||
secret = settings.SHARED_SECRET))
|
||||
data = ujson.dumps(data),
|
||||
secret = settings.SHARED_SECRET))
|
||||
else:
|
||||
process_notification(data)
|
||||
|
||||
|
|
|
@ -305,7 +305,7 @@ def respond_send_message(data):
|
|||
# authentication scheme.
|
||||
sockjs_router = sockjs.tornado.SockJSRouter(SocketConnection, "/sockjs",
|
||||
{'sockjs_url': 'https://%s/static/third/sockjs/sockjs-0.3.4.js' % (
|
||||
settings.EXTERNAL_HOST,),
|
||||
settings.EXTERNAL_HOST,),
|
||||
'disabled_transports': ['eventsource', 'htmlfile']})
|
||||
def get_sockjs_router():
|
||||
# type: () -> sockjs.tornado.SockJSRouter
|
||||
|
|
|
@ -61,7 +61,7 @@ def approximate_unread_count(user_profile):
|
|||
# type: (UserProfile) -> int
|
||||
not_in_home_view_recipients = [sub.recipient.id for sub in
|
||||
Subscription.objects.filter(
|
||||
user_profile=user_profile, in_home_view=False)]
|
||||
user_profile=user_profile, in_home_view=False)]
|
||||
|
||||
# TODO: We may want to exclude muted messages from this count.
|
||||
# It was attempted in the past, but the original attempt
|
||||
|
|
|
@ -126,8 +126,8 @@ def api_endpoint_docs(request):
|
|||
for lang in call.get('example_' + example_type, []):
|
||||
langs.add(lang)
|
||||
return render_to_response(
|
||||
'zerver/api_endpoints.html', {
|
||||
'content': calls,
|
||||
'langs': langs,
|
||||
},
|
||||
request=request)
|
||||
'zerver/api_endpoints.html', {
|
||||
'content': calls,
|
||||
'langs': langs,
|
||||
},
|
||||
request=request)
|
||||
|
|
|
@ -132,7 +132,7 @@ def accounts_register(request):
|
|||
# zephyr mirroring realm.
|
||||
hesiod_name = compute_mit_user_fullname(email)
|
||||
form = RegistrationForm(
|
||||
initial={'full_name': hesiod_name if "@" not in hesiod_name else ""})
|
||||
initial={'full_name': hesiod_name if "@" not in hesiod_name else ""})
|
||||
name_validated = True
|
||||
elif settings.POPULATE_PROFILE_VIA_LDAP:
|
||||
for backend in get_backends():
|
||||
|
|
|
@ -251,7 +251,7 @@ def remove_subscriptions_backend(request, user_profile,
|
|||
|
||||
if principals:
|
||||
people_to_unsub = set(principal_to_user_profile(
|
||||
user_profile, principal) for principal in principals)
|
||||
user_profile, principal) for principal in principals)
|
||||
else:
|
||||
people_to_unsub = set([user_profile])
|
||||
|
||||
|
@ -484,8 +484,8 @@ def stream_exists_backend(request, user_profile, stream_id, autosubscribe):
|
|||
if autosubscribe:
|
||||
bulk_add_subscriptions([stream], [user_profile])
|
||||
result["subscribed"] = is_active_subscriber(
|
||||
user_profile=user_profile,
|
||||
recipient=recipient)
|
||||
user_profile=user_profile,
|
||||
recipient=recipient)
|
||||
|
||||
return json_success(result) # results are ignored for HEAD requests
|
||||
return json_response(data=result, status=404)
|
||||
|
|
|
@ -265,11 +265,11 @@ def add_bot_backend(request, user_profile, full_name=REQ(), short_name=REQ(),
|
|||
default_events_register_stream=default_events_register_stream,
|
||||
default_all_public_streams=default_all_public_streams)
|
||||
json_result = dict(
|
||||
api_key=bot_profile.api_key,
|
||||
avatar_url=avatar_url(bot_profile),
|
||||
default_sending_stream=get_stream_name(bot_profile.default_sending_stream),
|
||||
default_events_register_stream=get_stream_name(bot_profile.default_events_register_stream),
|
||||
default_all_public_streams=bot_profile.default_all_public_streams,
|
||||
api_key=bot_profile.api_key,
|
||||
avatar_url=avatar_url(bot_profile),
|
||||
default_sending_stream=get_stream_name(bot_profile.default_sending_stream),
|
||||
default_events_register_stream=get_stream_name(bot_profile.default_events_register_stream),
|
||||
default_all_public_streams=bot_profile.default_all_public_streams,
|
||||
)
|
||||
return json_success(json_result)
|
||||
|
||||
|
|
|
@ -45,10 +45,10 @@ def github_pull_request_content(payload):
|
|||
get_pull_request_or_issue_assignee(pull_request)
|
||||
)
|
||||
return get_pull_request_event_message(
|
||||
payload['sender']['login'],
|
||||
action,
|
||||
pull_request['html_url'],
|
||||
pull_request['number']
|
||||
payload['sender']['login'],
|
||||
action,
|
||||
pull_request['html_url'],
|
||||
pull_request['number']
|
||||
)
|
||||
|
||||
def github_issues_content(payload):
|
||||
|
@ -66,10 +66,10 @@ def github_issues_content(payload):
|
|||
get_pull_request_or_issue_assignee(issue)
|
||||
)
|
||||
return get_issue_event_message(
|
||||
payload['sender']['login'],
|
||||
action,
|
||||
issue['html_url'],
|
||||
issue['number'],
|
||||
payload['sender']['login'],
|
||||
action,
|
||||
issue['html_url'],
|
||||
issue['number'],
|
||||
)
|
||||
|
||||
def github_object_commented_content(payload, type):
|
||||
|
@ -100,10 +100,10 @@ def get_pull_request_or_issue_assignee(object_payload):
|
|||
def get_pull_request_or_issue_subject(repository, payload_object, type):
|
||||
# type: (Mapping[Text, Any], Mapping[Text, Any], Text) -> Text
|
||||
return SUBJECT_WITH_PR_OR_ISSUE_INFO_TEMPLATE.format(
|
||||
repo=repository['name'],
|
||||
type=type,
|
||||
id=payload_object['number'],
|
||||
title=payload_object['title']
|
||||
repo=repository['name'],
|
||||
type=type,
|
||||
id=payload_object['number'],
|
||||
title=payload_object['title']
|
||||
)
|
||||
|
||||
def github_generic_subject(noun, topic_focus, blob):
|
||||
|
|
|
@ -30,11 +30,11 @@ def guess_zulip_user_from_jira(jira_username, realm):
|
|||
# We search a user's full name, short name,
|
||||
# and beginning of email address
|
||||
user = UserProfile.objects.filter(
|
||||
Q(full_name__iexact=jira_username) |
|
||||
Q(short_name__iexact=jira_username) |
|
||||
Q(email__istartswith=jira_username),
|
||||
is_active=True,
|
||||
realm=realm).order_by("id")[0]
|
||||
Q(full_name__iexact=jira_username) |
|
||||
Q(short_name__iexact=jira_username) |
|
||||
Q(email__istartswith=jira_username),
|
||||
is_active=True,
|
||||
realm=realm).order_by("id")[0]
|
||||
return user
|
||||
except IndexError:
|
||||
return None
|
||||
|
|
|
@ -148,8 +148,8 @@ class LibratoWebhookHandler(LibratoWebhookParser):
|
|||
metric_name, recorded_at = self.parse_violation(violation)
|
||||
metric_condition_template = u"\n>Metric `{metric_name}`, {summary_function} was {condition_type} {threshold}"
|
||||
content = metric_condition_template.format(
|
||||
metric_name=metric_name, summary_function=summary_function, condition_type=condition_type,
|
||||
threshold=threshold)
|
||||
metric_name=metric_name, summary_function=summary_function, condition_type=condition_type,
|
||||
threshold=threshold)
|
||||
if duration:
|
||||
content += u" by {duration}s".format(duration=duration)
|
||||
content += u", recorded at {recorded_at}".format(recorded_at=recorded_at)
|
||||
|
|
|
@ -37,7 +37,7 @@ def api_stash_webhook(request, user_profile, payload=REQ(argument_type='body'),
|
|||
content = "`%s` was pushed to **%s** in **%s/%s** with:\n\n" % (
|
||||
head_ref, branch_name, project_name, repo_name)
|
||||
content += "\n".join("* `%s`: %s" % (
|
||||
commit[0], commit[1]) for commit in commits)
|
||||
commit[0], commit[1]) for commit in commits)
|
||||
|
||||
check_send_message(user_profile, get_client("ZulipStashWebhook"), "stream",
|
||||
[stream], subject, content)
|
||||
|
|
|
@ -21,11 +21,11 @@ def guess_zulip_user_from_teamcity(teamcity_username, realm):
|
|||
# We search a user's full name, short name,
|
||||
# and beginning of email address
|
||||
user = UserProfile.objects.filter(
|
||||
Q(full_name__iexact=teamcity_username) |
|
||||
Q(short_name__iexact=teamcity_username) |
|
||||
Q(email__istartswith=teamcity_username),
|
||||
is_active=True,
|
||||
realm=realm).order_by("id")[0]
|
||||
Q(full_name__iexact=teamcity_username) |
|
||||
Q(short_name__iexact=teamcity_username) |
|
||||
Q(email__istartswith=teamcity_username),
|
||||
is_active=True,
|
||||
realm=realm).order_by("id")[0]
|
||||
return user
|
||||
except IndexError:
|
||||
return None
|
||||
|
|
|
@ -386,7 +386,7 @@ def send_messages(data):
|
|||
stream = Stream.objects.get(id=message.recipient.type_id)
|
||||
# Pick a random subscriber to the stream
|
||||
message.sender = random.choice(Subscription.objects.filter(
|
||||
recipient=message.recipient)).user_profile
|
||||
recipient=message.recipient)).user_profile
|
||||
message.subject = stream.name + Text(random.randint(1, 3))
|
||||
saved_data['subject'] = message.subject
|
||||
|
||||
|
|
|
@ -278,7 +278,7 @@ TEMPLATES = [
|
|||
{
|
||||
'BACKEND': 'zproject.jinja2.backends.Jinja2',
|
||||
'DIRS': [
|
||||
os.path.join(DEPLOY_ROOT, 'templates'),
|
||||
os.path.join(DEPLOY_ROOT, 'templates'),
|
||||
],
|
||||
'APP_DIRS': True,
|
||||
'OPTIONS': {
|
||||
|
@ -299,7 +299,7 @@ TEMPLATES = [
|
|||
{
|
||||
'BACKEND': 'django.template.backends.django.DjangoTemplates',
|
||||
'DIRS': [
|
||||
os.path.join(DEPLOY_ROOT, 'django_templates'),
|
||||
os.path.join(DEPLOY_ROOT, 'django_templates'),
|
||||
],
|
||||
'APP_DIRS': False,
|
||||
'OPTIONS': {
|
||||
|
@ -384,12 +384,12 @@ DATABASES = {"default": {
|
|||
if DEVELOPMENT:
|
||||
LOCAL_DATABASE_PASSWORD = get_secret("local_database_password")
|
||||
DATABASES["default"].update({
|
||||
'PASSWORD': LOCAL_DATABASE_PASSWORD,
|
||||
'HOST': 'localhost'
|
||||
'PASSWORD': LOCAL_DATABASE_PASSWORD,
|
||||
'HOST': 'localhost'
|
||||
})
|
||||
elif REMOTE_POSTGRES_HOST != '':
|
||||
DATABASES['default'].update({
|
||||
'HOST': REMOTE_POSTGRES_HOST,
|
||||
'HOST': REMOTE_POSTGRES_HOST,
|
||||
})
|
||||
if get_secret("postgres_password") is not None:
|
||||
DATABASES['default'].update({
|
||||
|
@ -488,9 +488,9 @@ if DEVELOPMENT:
|
|||
# Use fast password hashing for creating testing users when not
|
||||
# PRODUCTION. Saves a bunch of time.
|
||||
PASSWORD_HASHERS = (
|
||||
'django.contrib.auth.hashers.SHA1PasswordHasher',
|
||||
'django.contrib.auth.hashers.PBKDF2PasswordHasher'
|
||||
)
|
||||
'django.contrib.auth.hashers.SHA1PasswordHasher',
|
||||
'django.contrib.auth.hashers.PBKDF2PasswordHasher'
|
||||
)
|
||||
# Also we auto-generate passwords for the default users which you
|
||||
# can query using ./manage.py print_initial_password
|
||||
INITIAL_PASSWORD_SALT = get_secret("initial_password_salt")
|
||||
|
@ -551,12 +551,12 @@ INTERNAL_BOTS = [{'var_name': 'NOTIFICATION_BOT',
|
|||
|
||||
if PRODUCTION:
|
||||
INTERNAL_BOTS += [
|
||||
{'var_name': 'NAGIOS_STAGING_SEND_BOT',
|
||||
'email_template': 'nagios-staging-send-bot@%s',
|
||||
'name': 'Nagios Staging Send Bot'},
|
||||
{'var_name': 'NAGIOS_STAGING_RECEIVE_BOT',
|
||||
'email_template': 'nagios-staging-receive-bot@%s',
|
||||
'name': 'Nagios Staging Receive Bot'},
|
||||
{'var_name': 'NAGIOS_STAGING_SEND_BOT',
|
||||
'email_template': 'nagios-staging-send-bot@%s',
|
||||
'name': 'Nagios Staging Send Bot'},
|
||||
{'var_name': 'NAGIOS_STAGING_RECEIVE_BOT',
|
||||
'email_template': 'nagios-staging-receive-bot@%s',
|
||||
'name': 'Nagios Staging Receive Bot'},
|
||||
]
|
||||
|
||||
INTERNAL_BOT_DOMAIN = "zulip.com"
|
||||
|
|
Loading…
Reference in New Issue