mirror of https://github.com/zulip/zulip.git
text: Fix some typos (most of them found and fixed by codespell).
Signed-off-by: Stefan Weil <sw@weilnetz.de>
This commit is contained in:
parent
6eb39c8668
commit
d2fa058cc1
|
@ -214,7 +214,7 @@ def do_replace_payment_source(user: UserProfile, stripe_token: str,
|
||||||
for stripe_invoice in stripe.Invoice.list(
|
for stripe_invoice in stripe.Invoice.list(
|
||||||
billing='charge_automatically', customer=stripe_customer.id, status='open'):
|
billing='charge_automatically', customer=stripe_customer.id, status='open'):
|
||||||
# The user will get either a receipt or a "failed payment" email, but the in-app
|
# The user will get either a receipt or a "failed payment" email, but the in-app
|
||||||
# messaging could be clearer here (e.g. it could explictly tell the user that there
|
# messaging could be clearer here (e.g. it could explicitly tell the user that there
|
||||||
# were payment(s) and that they succeeded or failed).
|
# were payment(s) and that they succeeded or failed).
|
||||||
# Worth fixing if we notice that a lot of cards end up failing at this step.
|
# Worth fixing if we notice that a lot of cards end up failing at this step.
|
||||||
stripe.Invoice.pay(stripe_invoice)
|
stripe.Invoice.pay(stripe_invoice)
|
||||||
|
|
|
@ -1093,7 +1093,7 @@ class BillingHelpersTest(ZulipTestCase):
|
||||||
datetime(2021, 2, 28, 1, 2, 3).replace(tzinfo=timezone_utc)]
|
datetime(2021, 2, 28, 1, 2, 3).replace(tzinfo=timezone_utc)]
|
||||||
with self.assertRaises(AssertionError):
|
with self.assertRaises(AssertionError):
|
||||||
add_months(anchor, -1)
|
add_months(anchor, -1)
|
||||||
# Explictly test add_months for each value of MAX_DAY_FOR_MONTH and
|
# Explicitly test add_months for each value of MAX_DAY_FOR_MONTH and
|
||||||
# for crossing a year boundary
|
# for crossing a year boundary
|
||||||
for i, boundary in enumerate(period_boundaries):
|
for i, boundary in enumerate(period_boundaries):
|
||||||
self.assertEqual(add_months(anchor, i), boundary)
|
self.assertEqual(add_months(anchor, i), boundary)
|
||||||
|
|
|
@ -328,8 +328,8 @@ run_test('validate_stream_message', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
run_test('test_validate_stream_message_post_policy', () => {
|
run_test('test_validate_stream_message_post_policy', () => {
|
||||||
// This test is in continuation with test_validate but it has been seperated out
|
// This test is in continuation with test_validate but it has been separated out
|
||||||
// for better readabilty. Their relative position of execution should not be changed.
|
// for better readability. Their relative position of execution should not be changed.
|
||||||
// Although the position with respect to test_validate_stream_message does not matter
|
// Although the position with respect to test_validate_stream_message does not matter
|
||||||
// as `get_stream_post_policy` is reset at the end.
|
// as `get_stream_post_policy` is reset at the end.
|
||||||
page_params.is_admin = false;
|
page_params.is_admin = false;
|
||||||
|
|
|
@ -766,7 +766,7 @@ with_overrides(function (override) {
|
||||||
});
|
});
|
||||||
|
|
||||||
with_overrides(function (override) {
|
with_overrides(function (override) {
|
||||||
// attachements
|
// attachments
|
||||||
const event = event_fixtures.attachment;
|
const event = event_fixtures.attachment;
|
||||||
global.with_stub(function (stub) {
|
global.with_stub(function (stub) {
|
||||||
override('attachments_ui.update_attachments', stub.f);
|
override('attachments_ui.update_attachments', stub.f);
|
||||||
|
@ -1524,7 +1524,7 @@ with_overrides(function (override) {
|
||||||
});
|
});
|
||||||
|
|
||||||
with_overrides(function (override) {
|
with_overrides(function (override) {
|
||||||
// attachements
|
// attachments
|
||||||
let event = event_fixtures.user_status__set_away;
|
let event = event_fixtures.user_status__set_away;
|
||||||
global.with_stub(function (stub) {
|
global.with_stub(function (stub) {
|
||||||
override('activity.on_set_away', stub.f);
|
override('activity.on_set_away', stub.f);
|
||||||
|
|
|
@ -303,7 +303,7 @@ run_test('narrow_to_compose_target', () => {
|
||||||
{operator: 'pm-with', operand: 'alice@example.com,ray@example.com'},
|
{operator: 'pm-with', operand: 'alice@example.com,ray@example.com'},
|
||||||
]);
|
]);
|
||||||
|
|
||||||
// Test with some inavlid persons
|
// Test with some invalid persons
|
||||||
global.compose_state.private_message_recipient = () => 'alice@example.com,random,ray@example.com';
|
global.compose_state.private_message_recipient = () => 'alice@example.com,random,ray@example.com';
|
||||||
args.called = false;
|
args.called = false;
|
||||||
narrow.to_compose_target();
|
narrow.to_compose_target();
|
||||||
|
@ -312,7 +312,7 @@ run_test('narrow_to_compose_target', () => {
|
||||||
{operator: 'is', operand: 'private'},
|
{operator: 'is', operand: 'private'},
|
||||||
]);
|
]);
|
||||||
|
|
||||||
// Test with all inavlid persons
|
// Test with all invalid persons
|
||||||
global.compose_state.private_message_recipient = () => 'alice,random,ray';
|
global.compose_state.private_message_recipient = () => 'alice,random,ray';
|
||||||
args.called = false;
|
args.called = false;
|
||||||
narrow.to_compose_target();
|
narrow.to_compose_target();
|
||||||
|
|
|
@ -78,7 +78,7 @@ run_test('message_is_notifiable', () => {
|
||||||
// Not notifiable because it was sent by the current user
|
// Not notifiable because it was sent by the current user
|
||||||
assert.equal(notifications.message_is_notifiable(message), false);
|
assert.equal(notifications.message_is_notifiable(message), false);
|
||||||
|
|
||||||
// Case 2: If the user has already been sent a notificaton about this message,
|
// Case 2: If the user has already been sent a notification about this message,
|
||||||
// DO NOT notify the user
|
// DO NOT notify the user
|
||||||
// In this test, all other circumstances should trigger notification
|
// In this test, all other circumstances should trigger notification
|
||||||
// EXCEPT notification_sent, which should trump them
|
// EXCEPT notification_sent, which should trump them
|
||||||
|
|
|
@ -763,7 +763,7 @@ run_test('get_people_for_search_bar', () => {
|
||||||
|
|
||||||
// As long as there are 5+ results among the user_ids
|
// As long as there are 5+ results among the user_ids
|
||||||
// in message_store, we will get a small result and not
|
// in message_store, we will get a small result and not
|
||||||
// seach all people.
|
// search all people.
|
||||||
assert.equal(small_results.length, 6);
|
assert.equal(small_results.length, 6);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -794,7 +794,7 @@ run_test('set_up', () => {
|
||||||
const stub_render_notifications_stream_ui = settings_org.render_notifications_stream_ui;
|
const stub_render_notifications_stream_ui = settings_org.render_notifications_stream_ui;
|
||||||
settings_org.render_notifications_stream_ui = noop;
|
settings_org.render_notifications_stream_ui = noop;
|
||||||
$("#id_realm_message_content_edit_limit_minutes").set_parent($.create('<stub edit limit parent>'));
|
$("#id_realm_message_content_edit_limit_minutes").set_parent($.create('<stub edit limit parent>'));
|
||||||
$("#id_realm_message_content_delete_limit_minutes").set_parent($.create('<stub delete limti parent>'));
|
$("#id_realm_message_content_delete_limit_minutes").set_parent($.create('<stub delete limit parent>'));
|
||||||
$("#message_content_in_email_notifications_label").set_parent($.create('<stub in-content setting checkbox>'));
|
$("#message_content_in_email_notifications_label").set_parent($.create('<stub in-content setting checkbox>'));
|
||||||
$("#enable_digest_emails_label").set_parent($.create('<stub digest setting checkbox>'));
|
$("#enable_digest_emails_label").set_parent($.create('<stub digest setting checkbox>'));
|
||||||
$("#id_realm_digest_weekday").set_parent($.create('<stub digest weekday setting dropdown>'));
|
$("#id_realm_digest_weekday").set_parent($.create('<stub digest weekday setting dropdown>'));
|
||||||
|
|
|
@ -529,7 +529,7 @@ run_test('separators_only_pinned_and_dormant', () => {
|
||||||
subscribed: true,
|
subscribed: true,
|
||||||
};
|
};
|
||||||
add_row(RomeSub);
|
add_row(RomeSub);
|
||||||
// dorment stream
|
// dormant stream
|
||||||
const DenmarkSub = {
|
const DenmarkSub = {
|
||||||
name: 'Denmark',
|
name: 'Denmark',
|
||||||
stream_id: 3000,
|
stream_id: 3000,
|
||||||
|
|
|
@ -250,7 +250,7 @@ run_test('sort_recipients', () => {
|
||||||
stream_data.update_calculated_fields(dev_sub);
|
stream_data.update_calculated_fields(dev_sub);
|
||||||
stream_data.update_calculated_fields(linux_sub);
|
stream_data.update_calculated_fields(linux_sub);
|
||||||
|
|
||||||
// For spliting based on whether a PM was sent
|
// For splitting based on whether a PM was sent
|
||||||
global.pm_conversations.set_partner(5);
|
global.pm_conversations.set_partner(5);
|
||||||
global.pm_conversations.set_partner(6);
|
global.pm_conversations.set_partner(6);
|
||||||
global.pm_conversations.set_partner(2);
|
global.pm_conversations.set_partner(2);
|
||||||
|
@ -502,7 +502,7 @@ run_test('highlight_with_escaping', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
run_test('render_person when emails hidden', () => {
|
run_test('render_person when emails hidden', () => {
|
||||||
// Test render_person with regular person, under hidden email visiblity case
|
// Test render_person with regular person, under hidden email visibility case
|
||||||
page_params.is_admin = false;
|
page_params.is_admin = false;
|
||||||
let rendered = false;
|
let rendered = false;
|
||||||
global.stub_templates(function (template_name, args) {
|
global.stub_templates(function (template_name, args) {
|
||||||
|
|
|
@ -274,7 +274,7 @@ run_test('basics', () => {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// User ids of poeple in compose narrow doesn't change and is same as stat.current_recipent
|
// User ids of people in compose narrow doesn't change and is same as stat.current_recipient
|
||||||
// so counts of function should increase except stop_last_notification
|
// so counts of function should increase except stop_last_notification
|
||||||
typing_status.update(worker, typing.get_recipient());
|
typing_status.update(worker, typing.get_recipient());
|
||||||
assert.deepEqual(call_count.maybe_ping_server, 1);
|
assert.deepEqual(call_count.maybe_ping_server, 1);
|
||||||
|
|
|
@ -60,7 +60,7 @@ run_test('basics', () => {
|
||||||
blueslip.clear_test_data();
|
blueslip.clear_test_data();
|
||||||
|
|
||||||
// Let's repeat the above procedue with warnings. Unlike errors,
|
// Let's repeat the above procedue with warnings. Unlike errors,
|
||||||
// warnings shoudln't stop the code execution, and thus, the
|
// warnings shouldn't stop the code execution, and thus, the
|
||||||
// behaviour is slightly different.
|
// behaviour is slightly different.
|
||||||
|
|
||||||
function throw_a_warning() {
|
function throw_a_warning() {
|
||||||
|
|
|
@ -167,7 +167,7 @@ run_test('events', () => {
|
||||||
stopPropagation: function () {},
|
stopPropagation: function () {},
|
||||||
};
|
};
|
||||||
|
|
||||||
// Now call the hander.
|
// Now call the handler.
|
||||||
red_handler_func(stub_event);
|
red_handler_func(stub_event);
|
||||||
|
|
||||||
// And verify it did what it was supposed to do.
|
// And verify it did what it was supposed to do.
|
||||||
|
|
|
@ -12,7 +12,7 @@
|
||||||
* assert.notEqual().
|
* assert.notEqual().
|
||||||
*
|
*
|
||||||
* There is a default _output_formatter used to create the
|
* There is a default _output_formatter used to create the
|
||||||
* AssertionError error message; this function can be overriden using
|
* AssertionError error message; this function can be overridden using
|
||||||
* the exported setFormatter() function below.
|
* the exported setFormatter() function below.
|
||||||
*
|
*
|
||||||
* The HTML passed to the _output_formatter is not the original HTML, but
|
* The HTML passed to the _output_formatter is not the original HTML, but
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
# This class includes all the modules you need to install/run a Zulip installation
|
# This class includes all the modules you need to install/run a Zulip installation
|
||||||
# in a single container (without the database, memcached, redis services).
|
# in a single container (without the database, memcached, redis services).
|
||||||
# The database, memcached, redis services need to be run in seperate containers.
|
# The database, memcached, redis services need to be run in separate containers.
|
||||||
# Through this split of services, it is easier to scale the services to the needs.
|
# Through this split of services, it is easier to scale the services to the needs.
|
||||||
class zulip::dockervoyager {
|
class zulip::dockervoyager {
|
||||||
include zulip::base
|
include zulip::base
|
||||||
|
|
|
@ -26,7 +26,7 @@ Unattended-Upgrade::Package-Blacklist {
|
||||||
// is running is possible (with a small delay)
|
// is running is possible (with a small delay)
|
||||||
//Unattended-Upgrade::MinimalSteps "true";
|
//Unattended-Upgrade::MinimalSteps "true";
|
||||||
|
|
||||||
// Install all unattended-upgrades when the machine is shuting down
|
// Install all unattended-upgrades when the machine is shutting down
|
||||||
// instead of doing it in the background while the machine is running
|
// instead of doing it in the background while the machine is running
|
||||||
// This will (obviously) make shutdown slower
|
// This will (obviously) make shutdown slower
|
||||||
//Unattended-Upgrade::InstallOnShutdown "true";
|
//Unattended-Upgrade::InstallOnShutdown "true";
|
||||||
|
|
|
@ -14,8 +14,8 @@
|
||||||
#%# family=auto
|
#%# family=auto
|
||||||
#%# capabilities=autoconf
|
#%# capabilities=autoconf
|
||||||
|
|
||||||
# If run with the "autoconf"-parameter, give our opinion on wether we
|
# If run with the "autoconf"-parameter, give our opinion on whether we
|
||||||
# should be run on this system or not. This is optinal, and only used by
|
# should be run on this system or not. This is optional, and only used by
|
||||||
# munin-config. In the case of this plugin, we should most probably
|
# munin-config. In the case of this plugin, we should most probably
|
||||||
# always be included.
|
# always be included.
|
||||||
|
|
||||||
|
|
|
@ -14,8 +14,8 @@
|
||||||
#%# family=auto
|
#%# family=auto
|
||||||
#%# capabilities=autoconf
|
#%# capabilities=autoconf
|
||||||
|
|
||||||
# If run with the "autoconf"-parameter, give our opinion on wether we
|
# If run with the "autoconf"-parameter, give our opinion on whether we
|
||||||
# should be run on this system or not. This is optinal, and only used by
|
# should be run on this system or not. This is optional, and only used by
|
||||||
# munin-config. In the case of this plugin, we should most probably
|
# munin-config. In the case of this plugin, we should most probably
|
||||||
# always be included.
|
# always be included.
|
||||||
|
|
||||||
|
|
|
@ -14,8 +14,8 @@
|
||||||
#%# family=auto
|
#%# family=auto
|
||||||
#%# capabilities=autoconf
|
#%# capabilities=autoconf
|
||||||
|
|
||||||
# If run with the "autoconf"-parameter, give our opinion on wether we
|
# If run with the "autoconf"-parameter, give our opinion on whether we
|
||||||
# should be run on this system or not. This is optinal, and only used by
|
# should be run on this system or not. This is optional, and only used by
|
||||||
# munin-config. In the case of this plugin, we should most probably
|
# munin-config. In the case of this plugin, we should most probably
|
||||||
# always be included.
|
# always be included.
|
||||||
|
|
||||||
|
|
|
@ -14,8 +14,8 @@
|
||||||
#%# family=auto
|
#%# family=auto
|
||||||
#%# capabilities=autoconf
|
#%# capabilities=autoconf
|
||||||
|
|
||||||
# If run with the "autoconf"-parameter, give our opinion on wether we
|
# If run with the "autoconf"-parameter, give our opinion on whether we
|
||||||
# should be run on this system or not. This is optinal, and only used by
|
# should be run on this system or not. This is optional, and only used by
|
||||||
# munin-config. In the case of this plugin, we should most probably
|
# munin-config. In the case of this plugin, we should most probably
|
||||||
# always be included.
|
# always be included.
|
||||||
|
|
||||||
|
|
|
@ -14,8 +14,8 @@
|
||||||
#%# family=auto
|
#%# family=auto
|
||||||
#%# capabilities=autoconf
|
#%# capabilities=autoconf
|
||||||
|
|
||||||
# If run with the "autoconf"-parameter, give our opinion on wether we
|
# If run with the "autoconf"-parameter, give our opinion on whether we
|
||||||
# should be run on this system or not. This is optinal, and only used by
|
# should be run on this system or not. This is optional, and only used by
|
||||||
# munin-config. In the case of this plugin, we should most probably
|
# munin-config. In the case of this plugin, we should most probably
|
||||||
# always be included.
|
# always be included.
|
||||||
|
|
||||||
|
|
|
@ -14,8 +14,8 @@
|
||||||
#%# family=auto
|
#%# family=auto
|
||||||
#%# capabilities=autoconf
|
#%# capabilities=autoconf
|
||||||
|
|
||||||
# If run with the "autoconf"-parameter, give our opinion on wether we
|
# If run with the "autoconf"-parameter, give our opinion on whether we
|
||||||
# should be run on this system or not. This is optinal, and only used by
|
# should be run on this system or not. This is optional, and only used by
|
||||||
# munin-config. In the case of this plugin, we should most probably
|
# munin-config. In the case of this plugin, we should most probably
|
||||||
# always be included.
|
# always be included.
|
||||||
|
|
||||||
|
|
|
@ -12,8 +12,8 @@
|
||||||
#%# family=auto
|
#%# family=auto
|
||||||
#%# capabilities=autoconf
|
#%# capabilities=autoconf
|
||||||
|
|
||||||
# If run with the "autoconf"-parameter, give our opinion on wether we
|
# If run with the "autoconf"-parameter, give our opinion on whether we
|
||||||
# should be run on this system or not. This is optinal, and only used by
|
# should be run on this system or not. This is optional, and only used by
|
||||||
# munin-config. In the case of this plugin, we should most probably
|
# munin-config. In the case of this plugin, we should most probably
|
||||||
# always be included.
|
# always be included.
|
||||||
|
|
||||||
|
|
|
@ -877,7 +877,7 @@ process_performance_data=0
|
||||||
# performance data files are opened in write ("w") or append ("a")
|
# performance data files are opened in write ("w") or append ("a")
|
||||||
# mode. If you want to use named pipes, you should use the special
|
# mode. If you want to use named pipes, you should use the special
|
||||||
# pipe ("p") mode which avoid blocking at startup, otherwise you will
|
# pipe ("p") mode which avoid blocking at startup, otherwise you will
|
||||||
# likely want the defult append ("a") mode.
|
# likely want the default append ("a") mode.
|
||||||
|
|
||||||
#host_perfdata_file_mode=a
|
#host_perfdata_file_mode=a
|
||||||
#service_perfdata_file_mode=a
|
#service_perfdata_file_mode=a
|
||||||
|
@ -906,7 +906,7 @@ process_performance_data=0
|
||||||
|
|
||||||
|
|
||||||
# HOST AND SERVICE PERFORMANCE DATA PROCESS EMPTY RESULTS
|
# HOST AND SERVICE PERFORMANCE DATA PROCESS EMPTY RESULTS
|
||||||
# THese options determine wether the core will process empty perfdata
|
# THese options determine whether the core will process empty perfdata
|
||||||
# results or not. This is needed for distributed monitoring, and intentionally
|
# results or not. This is needed for distributed monitoring, and intentionally
|
||||||
# turned on by default.
|
# turned on by default.
|
||||||
# If you don't require empty perfdata - saving some cpu cycles
|
# If you don't require empty perfdata - saving some cpu cycles
|
||||||
|
|
|
@ -142,7 +142,7 @@ for device in macs.values():
|
||||||
ip, '-j', 'MARK', '--set-mark', str(device_number)])
|
ip, '-j', 'MARK', '--set-mark', str(device_number)])
|
||||||
|
|
||||||
for throwaway in range(2):
|
for throwaway in range(2):
|
||||||
# Don't freak out if this doens't work.
|
# Don't freak out if this doesn't work.
|
||||||
subprocess.call(
|
subprocess.call(
|
||||||
['/sbin/ip', 'route', 'del', '10.0.0.0/8'])
|
['/sbin/ip', 'route', 'del', '10.0.0.0/8'])
|
||||||
|
|
||||||
|
|
|
@ -399,7 +399,7 @@ def file_or_package_hash_updated(paths, hash_name, is_force, package_versions=[]
|
||||||
with open(path, 'rb') as file_to_hash:
|
with open(path, 'rb') as file_to_hash:
|
||||||
sha1sum.update(file_to_hash.read())
|
sha1sum.update(file_to_hash.read())
|
||||||
|
|
||||||
# The ouput of tools like build_pygments_data depends
|
# The output of tools like build_pygments_data depends
|
||||||
# on the version of some pip packages as well.
|
# on the version of some pip packages as well.
|
||||||
for package_version in package_versions:
|
for package_version in package_versions:
|
||||||
sha1sum.update(package_version.encode("utf-8"))
|
sha1sum.update(package_version.encode("utf-8"))
|
||||||
|
|
|
@ -271,7 +271,7 @@ exports.get_item = function (user_id) {
|
||||||
};
|
};
|
||||||
|
|
||||||
function user_is_recently_active(user_id) {
|
function user_is_recently_active(user_id) {
|
||||||
// return true if the user has a green/orange cirle
|
// return true if the user has a green/orange circle
|
||||||
return exports.level(user_id) <= 2;
|
return exports.level(user_id) <= 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -685,10 +685,10 @@ exports.handle_keydown = function (event, textarea) {
|
||||||
const position = textarea.caret();
|
const position = textarea.caret();
|
||||||
const txt = document.getElementById(textarea[0].id);
|
const txt = document.getElementById(textarea[0].id);
|
||||||
|
|
||||||
// Include selected text in between [] parantheses and insert '(url)'
|
// Include selected text in between [] parentheses and insert '(url)'
|
||||||
// where "url" should be automatically selected.
|
// where "url" should be automatically selected.
|
||||||
// Position of cursor depends on whether browser supports exec
|
// Position of cursor depends on whether browser supports exec
|
||||||
// command or not. So set cursor position accrodingly.
|
// command or not. So set cursor position accordingly.
|
||||||
if (range.length > 0) {
|
if (range.length > 0) {
|
||||||
if (document.queryCommandEnabled('insertText')) {
|
if (document.queryCommandEnabled('insertText')) {
|
||||||
txt.selectionStart = position - 4;
|
txt.selectionStart = position - 4;
|
||||||
|
@ -1039,7 +1039,7 @@ exports.initialize = function () {
|
||||||
e.preventDefault();
|
e.preventDefault();
|
||||||
|
|
||||||
let target_textarea;
|
let target_textarea;
|
||||||
// The data-message-id atribute is only present in the video
|
// The data-message-id attribute is only present in the video
|
||||||
// call icon present in the message edit form. If present,
|
// call icon present in the message edit form. If present,
|
||||||
// the request is for the edit UI; otherwise, it's for the
|
// the request is for the edit UI; otherwise, it's for the
|
||||||
// compose box.
|
// compose box.
|
||||||
|
|
|
@ -777,7 +777,7 @@ exports.content_typeahead_selected = function (item, event) {
|
||||||
beginning = beginning.substring(0, backticks) + item;
|
beginning = beginning.substring(0, backticks) + item;
|
||||||
}
|
}
|
||||||
} else if (this.completing === 'topic_jump') {
|
} else if (this.completing === 'topic_jump') {
|
||||||
// Put the cursor at the end of immediately preceeding stream mention syntax,
|
// Put the cursor at the end of immediately preceding stream mention syntax,
|
||||||
// just before where the `**` at the end of the syntax. This will delete that
|
// just before where the `**` at the end of the syntax. This will delete that
|
||||||
// final ** and set things up for the topic_list typeahead.
|
// final ** and set things up for the topic_list typeahead.
|
||||||
const index = beginning.lastIndexOf('**');
|
const index = beginning.lastIndexOf('**');
|
||||||
|
|
|
@ -644,12 +644,12 @@ exports.register_click_handlers = function () {
|
||||||
$("body").on("click", "#emoji_map", function (e) {
|
$("body").on("click", "#emoji_map", function (e) {
|
||||||
e.preventDefault();
|
e.preventDefault();
|
||||||
e.stopPropagation();
|
e.stopPropagation();
|
||||||
// The data-message-id atribute is only present in the emoji icon present in
|
// The data-message-id attribute is only present in the emoji icon present in
|
||||||
// the message edit form. So the following check will return false if this
|
// the message edit form. So the following check will return false if this
|
||||||
// event was not fired from message edit form.
|
// event was not fired from message edit form.
|
||||||
if ($(this).attr("data-message-id") !== undefined) {
|
if ($(this).attr("data-message-id") !== undefined) {
|
||||||
// Store data-message-id value in global variable edit_message_id so that
|
// Store data-message-id value in global variable edit_message_id so that
|
||||||
// its value can be further used to correclty find the message textarea element.
|
// its value can be further used to correctly find the message textarea element.
|
||||||
edit_message_id = $(this).attr("data-message-id");
|
edit_message_id = $(this).attr("data-message-id");
|
||||||
} else {
|
} else {
|
||||||
edit_message_id = null;
|
edit_message_id = null;
|
||||||
|
|
|
@ -212,7 +212,7 @@ exports.create = function (opts) {
|
||||||
|
|
||||||
// this sends a flag that the operation wasn't completely successful,
|
// this sends a flag that the operation wasn't completely successful,
|
||||||
// which in this case is defined as some of the pills not autofilling
|
// which in this case is defined as some of the pills not autofilling
|
||||||
// correclty.
|
// correctly.
|
||||||
if (drafts.length > 0) {
|
if (drafts.length > 0) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
|
@ -79,7 +79,7 @@ const funcs = {
|
||||||
canvas.addEventListener("wheel", function (e) {
|
canvas.addEventListener("wheel", function (e) {
|
||||||
e.preventDefault();
|
e.preventDefault();
|
||||||
|
|
||||||
// this is to reverese scrolling directions for the image.
|
// this is to reverse scrolling directions for the image.
|
||||||
let delta = meta.direction * e.deltaY;
|
let delta = meta.direction * e.deltaY;
|
||||||
|
|
||||||
if (e.deltaMode === DELTA_MODE.LINE) {
|
if (e.deltaMode === DELTA_MODE.LINE) {
|
||||||
|
|
|
@ -86,7 +86,7 @@ exports.create = function ($container, list, opts) {
|
||||||
const prototype = {
|
const prototype = {
|
||||||
// Reads the provided list (in the scope directly above)
|
// Reads the provided list (in the scope directly above)
|
||||||
// and renders the next block of messages automatically
|
// and renders the next block of messages automatically
|
||||||
// into the specified contianer.
|
// into the specified container.
|
||||||
render: function (load_count) {
|
render: function (load_count) {
|
||||||
load_count = load_count || opts.load_count || DEFAULTS.LOAD_COUNT;
|
load_count = load_count || opts.load_count || DEFAULTS.LOAD_COUNT;
|
||||||
|
|
||||||
|
|
|
@ -263,7 +263,7 @@ exports.get_frontfill_anchor = function (msg_list) {
|
||||||
exports.maybe_load_older_messages = function (opts) {
|
exports.maybe_load_older_messages = function (opts) {
|
||||||
// This function gets called when you scroll to the top
|
// This function gets called when you scroll to the top
|
||||||
// of your window, and you want to get messages older
|
// of your window, and you want to get messages older
|
||||||
// than what the browers originally fetched.
|
// than what the browsers originally fetched.
|
||||||
const msg_list = opts.msg_list;
|
const msg_list = opts.msg_list;
|
||||||
if (!msg_list.fetch_status.can_load_older_messages()) {
|
if (!msg_list.fetch_status.can_load_older_messages()) {
|
||||||
// We may already be loading old messages or already
|
// We may already be loading old messages or already
|
||||||
|
@ -301,7 +301,7 @@ exports.do_backfill = function (opts) {
|
||||||
exports.maybe_load_newer_messages = function (opts) {
|
exports.maybe_load_newer_messages = function (opts) {
|
||||||
// This function gets called when you scroll to the top
|
// This function gets called when you scroll to the top
|
||||||
// of your window, and you want to get messages newer
|
// of your window, and you want to get messages newer
|
||||||
// than what the browers originally fetched.
|
// than what the browsers originally fetched.
|
||||||
const msg_list = opts.msg_list;
|
const msg_list = opts.msg_list;
|
||||||
|
|
||||||
if (!msg_list.fetch_status.can_load_newer_messages()) {
|
if (!msg_list.fetch_status.can_load_newer_messages()) {
|
||||||
|
|
|
@ -872,7 +872,7 @@ function show_search_query() {
|
||||||
|
|
||||||
// if query contains stop words, it is enclosed by a <del> tag
|
// if query contains stop words, it is enclosed by a <del> tag
|
||||||
if (page_params.stop_words.includes(query_word)) {
|
if (page_params.stop_words.includes(query_word)) {
|
||||||
// stop_words do not need sanitization so this is unnecesary but it is fail-safe.
|
// stop_words do not need sanitization so this is unnecessary but it is fail-safe.
|
||||||
search_string_display.append($('<del>').text(query_word));
|
search_string_display.append($('<del>').text(query_word));
|
||||||
query_contains_stop_words = true;
|
query_contains_stop_words = true;
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -90,7 +90,7 @@ exports.page_up_the_right_amount = function () {
|
||||||
// because we can't rely on the browser to account for certain
|
// because we can't rely on the browser to account for certain
|
||||||
// page elements, like the compose box, that sit in fixed
|
// page elements, like the compose box, that sit in fixed
|
||||||
// positions above the message pane. For other scrolling
|
// positions above the message pane. For other scrolling
|
||||||
// related adjustements, try to make those happen in the
|
// related adjustments, try to make those happen in the
|
||||||
// scroll handlers, not here.
|
// scroll handlers, not here.
|
||||||
const delta = amount_to_paginate();
|
const delta = amount_to_paginate();
|
||||||
message_viewport.scrollTop(message_viewport.scrollTop() - delta);
|
message_viewport.scrollTop(message_viewport.scrollTop() - delta);
|
||||||
|
|
|
@ -267,7 +267,7 @@ exports.notify_above_composebox = function (note, link_class, link_msg_id, link_
|
||||||
|
|
||||||
if (window.electron_bridge !== undefined) {
|
if (window.electron_bridge !== undefined) {
|
||||||
// The code below is for sending a message received from notification reply which
|
// The code below is for sending a message received from notification reply which
|
||||||
// is often refered to as inline reply feature. This is done so desktop app doesn't
|
// is often referred to as inline reply feature. This is done so desktop app doesn't
|
||||||
// have to depend on channel.post for setting crsf_token and narrow.by_topic
|
// have to depend on channel.post for setting crsf_token and narrow.by_topic
|
||||||
// to narrow to the message being sent.
|
// to narrow to the message being sent.
|
||||||
window.electron_bridge.send_notification_reply_message_supported = true;
|
window.electron_bridge.send_notification_reply_message_supported = true;
|
||||||
|
|
|
@ -100,7 +100,7 @@ exports.open_modal = function (name) {
|
||||||
$("#" + name).modal("show").attr("aria-hidden", false);
|
$("#" + name).modal("show").attr("aria-hidden", false);
|
||||||
// Disable background mouse events when modal is active
|
// Disable background mouse events when modal is active
|
||||||
$('.overlay.show').attr("style", "pointer-events: none");
|
$('.overlay.show').attr("style", "pointer-events: none");
|
||||||
// Remove previous alert messsages from modal, if exists.
|
// Remove previous alert messages from modal, if exists.
|
||||||
$("#" + name).find(".alert").hide();
|
$("#" + name).find(".alert").hide();
|
||||||
$("#" + name).find(".alert-notification").html("");
|
$("#" + name).find(".alert-notification").html("");
|
||||||
};
|
};
|
||||||
|
|
|
@ -143,7 +143,7 @@ function update_url() {
|
||||||
/* Construct the URL that the webhook should be targeting, using
|
/* Construct the URL that the webhook should be targeting, using
|
||||||
the bot's API key and the integration name. The stream and topic
|
the bot's API key and the integration name. The stream and topic
|
||||||
are both optional, and for the sake of completeness, it should be
|
are both optional, and for the sake of completeness, it should be
|
||||||
noted that the topic is irrelavent without specifying the
|
noted that the topic is irrelevant without specifying the
|
||||||
stream. */
|
stream. */
|
||||||
const url_field = $("#URL")[0];
|
const url_field = $("#URL")[0];
|
||||||
|
|
||||||
|
|
|
@ -74,7 +74,7 @@ function update_ui_and_send_reaction_ajax(message_id, reaction_info) {
|
||||||
success: function () {},
|
success: function () {},
|
||||||
error: function (xhr) {
|
error: function (xhr) {
|
||||||
const response = channel.xhr_error_message("Error sending reaction", xhr);
|
const response = channel.xhr_error_message("Error sending reaction", xhr);
|
||||||
// Errors are somewhat commmon here, due to race conditions
|
// Errors are somewhat common here, due to race conditions
|
||||||
// where the user tries to add/remove the reaction when there is already
|
// where the user tries to add/remove the reaction when there is already
|
||||||
// an in-flight request. We eventually want to make this a blueslip
|
// an in-flight request. We eventually want to make this a blueslip
|
||||||
// error, rather than a warning, but we need to implement either
|
// error, rather than a warning, but we need to implement either
|
||||||
|
|
|
@ -453,7 +453,7 @@ exports.on_load_success = function (realm_people_data) {
|
||||||
} else {
|
} else {
|
||||||
const new_profile_data = [];
|
const new_profile_data = [];
|
||||||
$("#user-info-form-modal .custom_user_field_value").each(function () {
|
$("#user-info-form-modal .custom_user_field_value").each(function () {
|
||||||
// Remove duplicate datepicker input element genearted flatpicker library
|
// Remove duplicate datepicker input element generated flatpicker library
|
||||||
if (!$(this).hasClass("form-control")) {
|
if (!$(this).hasClass("form-control")) {
|
||||||
new_profile_data.push({
|
new_profile_data.push({
|
||||||
id: parseInt($(this).closest(".custom_user_field").attr("data-field-id"), 10),
|
id: parseInt($(this).closest(".custom_user_field").attr("data-field-id"), 10),
|
||||||
|
|
|
@ -240,7 +240,7 @@ exports.new_stream_clicked = function (stream_name) {
|
||||||
// focus the button on that page, the entire app view jumps over to
|
// focus the button on that page, the entire app view jumps over to
|
||||||
// the other tab, and the animation breaks.
|
// the other tab, and the animation breaks.
|
||||||
// it is unclear whether this is a browser bug or "feature", however what
|
// it is unclear whether this is a browser bug or "feature", however what
|
||||||
// is clear is that this shoudn't be touched unless you're also changing
|
// is clear is that this shouldn't be touched unless you're also changing
|
||||||
// the mobile @media query at 700px.
|
// the mobile @media query at 700px.
|
||||||
if (window.innerWidth > 700) {
|
if (window.innerWidth > 700) {
|
||||||
$('#create_stream_name').focus();
|
$('#create_stream_name').focus();
|
||||||
|
|
|
@ -23,7 +23,7 @@ exports.build_widget = function (
|
||||||
upload_button, // jQuery button to open file dialog
|
upload_button, // jQuery button to open file dialog
|
||||||
max_file_upload_size
|
max_file_upload_size
|
||||||
) {
|
) {
|
||||||
// default value of max upladed file size
|
// default value of max uploaded file size
|
||||||
max_file_upload_size = max_file_upload_size || default_max_file_size;
|
max_file_upload_size = max_file_upload_size || default_max_file_size;
|
||||||
|
|
||||||
function accept(file) {
|
function accept(file) {
|
||||||
|
@ -109,7 +109,7 @@ exports.build_direct_upload_widget = function (
|
||||||
upload_function,
|
upload_function,
|
||||||
max_file_upload_size
|
max_file_upload_size
|
||||||
) {
|
) {
|
||||||
// default value of max upladed file size
|
// default value of max uploaded file size
|
||||||
max_file_upload_size = max_file_upload_size || default_max_file_size;
|
max_file_upload_size = max_file_upload_size || default_max_file_size;
|
||||||
function accept() {
|
function accept() {
|
||||||
input_error.hide();
|
input_error.hide();
|
||||||
|
|
|
@ -378,7 +378,7 @@ on a dark background, and don't change the dark labels dark either. */
|
||||||
}
|
}
|
||||||
|
|
||||||
#invite_user_form .modal-footer {
|
#invite_user_form .modal-footer {
|
||||||
// no transperancy prevents overlap issues
|
// no transparency prevents overlap issues
|
||||||
background-color: hsl(211, 28%, 14%);
|
background-color: hsl(211, 28%, 14%);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -297,7 +297,7 @@ $ cat hello.html
|
||||||
|
|
||||||
The point of marked was to create a markdown compiler where it was possible to
|
The point of marked was to create a markdown compiler where it was possible to
|
||||||
frequently parse huge chunks of markdown without having to worry about
|
frequently parse huge chunks of markdown without having to worry about
|
||||||
caching the compiled output somehow...or blocking for an unnecesarily long time.
|
caching the compiled output somehow...or blocking for an unnecessarily long time.
|
||||||
|
|
||||||
marked is very concise and still implements all markdown features. It is also
|
marked is very concise and still implements all markdown features. It is also
|
||||||
now fully compatible with the client-side.
|
now fully compatible with the client-side.
|
||||||
|
|
|
@ -59,7 +59,7 @@ You may pass the `client_gravatar` query parameter as follows:
|
||||||
* `email`: The email address of the user or bot.
|
* `email`: The email address of the user or bot.
|
||||||
* `is_bot`: A boolean specifying whether the user is a bot or not.
|
* `is_bot`: A boolean specifying whether the user is a bot or not.
|
||||||
* `avatar_url`: URL to the user's gravatar. `None` if the `client_gravatar`
|
* `avatar_url`: URL to the user's gravatar. `None` if the `client_gravatar`
|
||||||
query paramater was set to `True`.
|
query parameter was set to `True`.
|
||||||
* `full_name`: Full name of the user or bot.
|
* `full_name`: Full name of the user or bot.
|
||||||
* `is_admin`: A boolean specifying whether the user is an admin or not.
|
* `is_admin`: A boolean specifying whether the user is an admin or not.
|
||||||
* `bot_type`: `None` if the user isn't a bot. `1` for a `Generic` bot.
|
* `bot_type`: `None` if the user isn't a bot. `1` for a `Generic` bot.
|
||||||
|
|
|
@ -81,7 +81,7 @@ endpoint and a queue would be registered in the absence of a `queue_id`.
|
||||||
#### Return values
|
#### Return values
|
||||||
|
|
||||||
* `events`: An array (possibly zero-length if `dont_block` is set) of events
|
* `events`: An array (possibly zero-length if `dont_block` is set) of events
|
||||||
with IDs newer than `last_event_id`. Event IDs are guaranted to be increasing,
|
with IDs newer than `last_event_id`. Event IDs are guaranteed to be increasing,
|
||||||
but they are not guaranteed to be consecutive.
|
but they are not guaranteed to be consecutive.
|
||||||
|
|
||||||
#### Example response
|
#### Example response
|
||||||
|
|
|
@ -57,11 +57,11 @@ You may pass the `include_subscribers` query parameter as follows:
|
||||||
Only people who have been invited can access a private stream.
|
Only people who have been invited can access a private stream.
|
||||||
* `subscribers`: A list of email addresses of users who are also subscribed
|
* `subscribers`: A list of email addresses of users who are also subscribed
|
||||||
to a given stream. Included only if `include_subscribers` is `true`.
|
to a given stream. Included only if `include_subscribers` is `true`.
|
||||||
* `desktop_notifications`: A boolean specifiying whether desktop notifications
|
* `desktop_notifications`: A boolean specifying whether desktop notifications
|
||||||
are enabled for the given stream.
|
are enabled for the given stream.
|
||||||
* `push_notifications`: A boolean specifiying whether push notifications
|
* `push_notifications`: A boolean specifying whether push notifications
|
||||||
are enabled for the given stream.
|
are enabled for the given stream.
|
||||||
* `audible_notifications`: A boolean specifiying whether audible notifications
|
* `audible_notifications`: A boolean specifying whether audible notifications
|
||||||
are enabled for the given stream.
|
are enabled for the given stream.
|
||||||
* `pin_to_top`: A boolean specifying whether the given stream has been pinned
|
* `pin_to_top`: A boolean specifying whether the given stream has been pinned
|
||||||
to the top.
|
to the top.
|
||||||
|
|
|
@ -39,7 +39,7 @@ You may pass the `client_gravatar` or `include_custom_profile_fields` query para
|
||||||
* `email`: The email address of the user or bot.
|
* `email`: The email address of the user or bot.
|
||||||
* `is_bot`: A boolean specifying whether the user is a bot or not.
|
* `is_bot`: A boolean specifying whether the user is a bot or not.
|
||||||
* `avatar_url`: URL to the user's gravatar. `None` if the `client_gravatar`
|
* `avatar_url`: URL to the user's gravatar. `None` if the `client_gravatar`
|
||||||
query paramater was set to `True`.
|
query parameter was set to `True`.
|
||||||
* `full_name`: Full name of the user or bot.
|
* `full_name`: Full name of the user or bot.
|
||||||
* `is_admin`: A boolean specifying whether the user is an admin or not.
|
* `is_admin`: A boolean specifying whether the user is an admin or not.
|
||||||
* `bot_type`: `None` if the user isn't a bot. `1` for a `Generic` bot.
|
* `bot_type`: `None` if the user isn't a bot. `1` for a `Generic` bot.
|
||||||
|
|
|
@ -90,7 +90,7 @@ below are for a webhook named `MyWebHook`.
|
||||||
platform/server/product you are integrating. Used on the documentation
|
platform/server/product you are integrating. Used on the documentation
|
||||||
pages as well as the sender's avatar for messages sent by the integration.
|
pages as well as the sender's avatar for messages sent by the integration.
|
||||||
* `static/images/integrations/mywebbook/001.svg`: A screenshot of a message
|
* `static/images/integrations/mywebbook/001.svg`: A screenshot of a message
|
||||||
sent by the integration, used on the documenation page.
|
sent by the integration, used on the documentation page.
|
||||||
|
|
||||||
### Files that need to be updated
|
### Files that need to be updated
|
||||||
|
|
||||||
|
|
|
@ -131,7 +131,7 @@
|
||||||
notifications? Enjoy Zulip on your desktop.</p>
|
notifications? Enjoy Zulip on your desktop.</p>
|
||||||
</a>
|
</a>
|
||||||
<!--Hack: These two pseudo elements are here to ensure the flex
|
<!--Hack: These two pseudo elements are here to ensure the flex
|
||||||
arrangment uses the proper cell size with 4 elements in 2 rows.-->
|
arrangement uses the proper cell size with 4 elements in 2 rows.-->
|
||||||
<div class="feature-block"></div>
|
<div class="feature-block"></div>
|
||||||
<div class="feature-block"></div>
|
<div class="feature-block"></div>
|
||||||
</section>
|
</section>
|
||||||
|
@ -301,7 +301,7 @@
|
||||||
your use case is missing, you can make it happen!</p>
|
your use case is missing, you can make it happen!</p>
|
||||||
</a>
|
</a>
|
||||||
<!--Hack: These two pseudo elements are here to ensure the flex
|
<!--Hack: These two pseudo elements are here to ensure the flex
|
||||||
arrangment uses the proper cell size with 4 elements in 2 rows.-->
|
arrangement uses the proper cell size with 4 elements in 2 rows.-->
|
||||||
<div class="feature-block"></div>
|
<div class="feature-block"></div>
|
||||||
</section>
|
</section>
|
||||||
</div>
|
</div>
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
By default, anyone can change their email address at any time.
|
By default, anyone can change their email address at any time.
|
||||||
|
|
||||||
Organization adminstrators can
|
Organization administrators can
|
||||||
[restrict users to certain email domains](/help/change-a-users-name), or
|
[restrict users to certain email domains](/help/change-a-users-name), or
|
||||||
[prevent users from changing their email](/help/restrict-name-and-email-changes).
|
[prevent users from changing their email](/help/restrict-name-and-email-changes).
|
||||||
|
|
||||||
|
|
|
@ -103,7 +103,7 @@ Get a peek at new features before they're released!
|
||||||
|
|
||||||
Start by finding the latest version marked "Pre-release" on the
|
Start by finding the latest version marked "Pre-release" on the
|
||||||
[release list page][release-list]. There may or may not be a "Pre-release"
|
[release list page][release-list]. There may or may not be a "Pre-release"
|
||||||
later than the latest release. If there is, download the approriate Zulip
|
later than the latest release. If there is, download the appropriate Zulip
|
||||||
installer or app from there, and follow the instructions for your operating
|
installer or app from there, and follow the instructions for your operating
|
||||||
system above.
|
system above.
|
||||||
|
|
||||||
|
|
|
@ -22,7 +22,7 @@ apt-mark hold initramfs-tools initramfs-tools-bin udev base-files linux-firmware
|
||||||
apt-mark hold accountsservice apparmor apport apt apt-transport-https apt-utils bash bash-completion bind9-host binutils binutils-doc bsdutils bzr cloud-guest-utils cloud-init coreutils cpio dbus dnsutils dosfstools dpkg dpkg-dev e2fslibs e2fsprogs eject gcc-4.9-base git-core grub-common grub-pc grub-pc-bin grub2-common icu-devtools ifupdown imagemagick imagemagick-common init-system-helpers initscripts irqbalance isc-dhcp-client isc-dhcp-common klibc-utils krb5-locales krb5-multidev libaccountsservice0 libapparmor-perl libapparmor1 libblkid1 libc-bin libc-dev-bin libc6 libc6-dev libcdt5 libcgmanager0 libcgraph6 libcups2 libcurl3-gnutls libdbus-1-3 libdpkg-perl libdrm-intel1 libdrm-nouveau2 libdrm-radeon1 libdrm2 libevent-2.0-5 libexpat1 libexpat1-dev libgc1c2 libgcc1 libgd3 libgl1-mesa-dri libgl1-mesa-glx libglapi-mesa libgnutls-openssl27 libgraphite2-3 libgraphviz-dev libgssapi-krb5-2 libgssrpc4 libgtk2.0-0 libgtk2.0-common libgvc6 libgvpr2 libicu-dev libjasper-dev libjasper1 libk5crypto3 libkadm5clnt-mit9 libkadm5srv-mit9 libklibc libkrb5-3 libkrb5-dev libkrb5support0 liblcms2-2 liblcms2-dev libmagickwand-dev libmount1 libmysqlclient-dev libnl-3-200 libnl-genl-3-200 libnspr4 libnss3 libnss3-nssdb libnuma1 libpam-modules libpam-modules-bin libpam-runtime libpam-systemd libpam0g libpam0g-dev libpathplan4 libpci3 libpcre3 libpcre3-dev libpcsclite1 libpixman-1-0 libpixman-1-dev libpng12-0 libpng12-dev libpolkit-agent-1-0 libpolkit-backend-1-0 libpolkit-gobject-1-0 libpython3.4 libsndfile1 libss2 libssl-dev libssl1.0.0 libtasn1-6 libtiff5 libtiff5-dev libtiffxx5 libuuid1 libxdot4 libxml2 libxml2-dev libxpm4 linux-libc-dev login lsb-base lshw makedev mongodb-org mongodb-org-mongos mongodb-org-server mongodb-org-shell mongodb-org-tools mount multiarch-support mysql-client-5.7 mysql-client-core-5.7 mysql-common mysql-server-5.7 mysql-server-core-5.7 ntpdate openssh-client openssh-server openssh-sftp-server os-prober overlayroot passwd pciutils perl perl-base pgdg-keyring policykit-1 pollinate postgresql-client postgresql-client-common postgresql-common python-apt python-apt-common python-bzrlib python-urllib3 python3-apport python3-apt python3-distupgrade python3-gdbm python3-problem-report python3-software-properties python3-update-manager python3.4 python3.4-minimal rsync software-properties-common sudo sysv-rc sysvinit-utils tar tcpdump tzdata ubuntu-release-upgrader-core unzip update-manager-core usbutils util-linux uuid-runtime w3m
|
apt-mark hold accountsservice apparmor apport apt apt-transport-https apt-utils bash bash-completion bind9-host binutils binutils-doc bsdutils bzr cloud-guest-utils cloud-init coreutils cpio dbus dnsutils dosfstools dpkg dpkg-dev e2fslibs e2fsprogs eject gcc-4.9-base git-core grub-common grub-pc grub-pc-bin grub2-common icu-devtools ifupdown imagemagick imagemagick-common init-system-helpers initscripts irqbalance isc-dhcp-client isc-dhcp-common klibc-utils krb5-locales krb5-multidev libaccountsservice0 libapparmor-perl libapparmor1 libblkid1 libc-bin libc-dev-bin libc6 libc6-dev libcdt5 libcgmanager0 libcgraph6 libcups2 libcurl3-gnutls libdbus-1-3 libdpkg-perl libdrm-intel1 libdrm-nouveau2 libdrm-radeon1 libdrm2 libevent-2.0-5 libexpat1 libexpat1-dev libgc1c2 libgcc1 libgd3 libgl1-mesa-dri libgl1-mesa-glx libglapi-mesa libgnutls-openssl27 libgraphite2-3 libgraphviz-dev libgssapi-krb5-2 libgssrpc4 libgtk2.0-0 libgtk2.0-common libgvc6 libgvpr2 libicu-dev libjasper-dev libjasper1 libk5crypto3 libkadm5clnt-mit9 libkadm5srv-mit9 libklibc libkrb5-3 libkrb5-dev libkrb5support0 liblcms2-2 liblcms2-dev libmagickwand-dev libmount1 libmysqlclient-dev libnl-3-200 libnl-genl-3-200 libnspr4 libnss3 libnss3-nssdb libnuma1 libpam-modules libpam-modules-bin libpam-runtime libpam-systemd libpam0g libpam0g-dev libpathplan4 libpci3 libpcre3 libpcre3-dev libpcsclite1 libpixman-1-0 libpixman-1-dev libpng12-0 libpng12-dev libpolkit-agent-1-0 libpolkit-backend-1-0 libpolkit-gobject-1-0 libpython3.4 libsndfile1 libss2 libssl-dev libssl1.0.0 libtasn1-6 libtiff5 libtiff5-dev libtiffxx5 libuuid1 libxdot4 libxml2 libxml2-dev libxpm4 linux-libc-dev login lsb-base lshw makedev mongodb-org mongodb-org-mongos mongodb-org-server mongodb-org-shell mongodb-org-tools mount multiarch-support mysql-client-5.7 mysql-client-core-5.7 mysql-common mysql-server-5.7 mysql-server-core-5.7 ntpdate openssh-client openssh-server openssh-sftp-server os-prober overlayroot passwd pciutils perl perl-base pgdg-keyring policykit-1 pollinate postgresql-client postgresql-client-common postgresql-common python-apt python-apt-common python-bzrlib python-urllib3 python3-apport python3-apt python3-distupgrade python3-gdbm python3-problem-report python3-software-properties python3-update-manager python3.4 python3.4-minimal rsync software-properties-common sudo sysv-rc sysvinit-utils tar tcpdump tzdata ubuntu-release-upgrader-core unzip update-manager-core usbutils util-linux uuid-runtime w3m
|
||||||
|
|
||||||
if ! apt-get dist-upgrade -y "${APT_OPTIONS[@]}"; then
|
if ! apt-get dist-upgrade -y "${APT_OPTIONS[@]}"; then
|
||||||
echo "\`apt-get dist-upgrade\`: Failure occured while trying to perform distribution upgrade, Retrying..."
|
echo "\`apt-get dist-upgrade\`: Failure occurred while trying to perform distribution upgrade, Retrying..."
|
||||||
apt-get dist-upgrade -y "${APT_OPTIONS[@]}"
|
apt-get dist-upgrade -y "${APT_OPTIONS[@]}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|
|
@ -9,7 +9,7 @@ set -e
|
||||||
# usage: clean-branches --reviews
|
# usage: clean-branches --reviews
|
||||||
# Deletes all the above mentioned branches as well as branches
|
# Deletes all the above mentioned branches as well as branches
|
||||||
# created by the scripts like `fetch-rebase-pull-request`. Be careful
|
# created by the scripts like `fetch-rebase-pull-request`. Be careful
|
||||||
# as this would also remove other branches woth names like review-*
|
# as this would also remove other branches with names like review-*
|
||||||
|
|
||||||
review=0
|
review=0
|
||||||
if [ $# -ne 0 ] && [ "$1" == "--reviews" ]; then
|
if [ $# -ne 0 ] && [ "$1" == "--reviews" ]; then
|
||||||
|
|
|
@ -15,7 +15,7 @@ EMOJI_NAME_MAPS = {
|
||||||
# laughter_tears from https://beebom.com/emoji-meanings/
|
# laughter_tears from https://beebom.com/emoji-meanings/
|
||||||
'1f602': {'canonical_name': 'joy', 'aliases': ['tears', 'laughter_tears']},
|
'1f602': {'canonical_name': 'joy', 'aliases': ['tears', 'laughter_tears']},
|
||||||
'1f923': {'canonical_name': 'rolling_on_the_floor_laughing', 'aliases': ['rofl']},
|
'1f923': {'canonical_name': 'rolling_on_the_floor_laughing', 'aliases': ['rofl']},
|
||||||
# not sure how the glpyhs match relaxed, but both iamcal and gemoji have it
|
# not sure how the glyphs match relaxed, but both iamcal and gemoji have it
|
||||||
'263a': {'canonical_name': 'smile', 'aliases': ['relaxed']},
|
'263a': {'canonical_name': 'smile', 'aliases': ['relaxed']},
|
||||||
'1f60a': {'canonical_name': 'blush', 'aliases': []},
|
'1f60a': {'canonical_name': 'blush', 'aliases': []},
|
||||||
# halo comes from gemoji/unicode
|
# halo comes from gemoji/unicode
|
||||||
|
@ -894,7 +894,7 @@ EMOJI_NAME_MAPS = {
|
||||||
'26f5': {'canonical_name': 'boat', 'aliases': ['sailboat']},
|
'26f5': {'canonical_name': 'boat', 'aliases': ['sailboat']},
|
||||||
'1f6e5': {'canonical_name': 'motor_boat', 'aliases': []},
|
'1f6e5': {'canonical_name': 'motor_boat', 'aliases': []},
|
||||||
'1f6a4': {'canonical_name': 'speedboat', 'aliases': []},
|
'1f6a4': {'canonical_name': 'speedboat', 'aliases': []},
|
||||||
# yatch and cruise seem like reasonable additions
|
# yacht and cruise seem like reasonable additions
|
||||||
'1f6f3': {'canonical_name': 'passenger_ship', 'aliases': ['yacht', 'cruise']},
|
'1f6f3': {'canonical_name': 'passenger_ship', 'aliases': ['yacht', 'cruise']},
|
||||||
'26f4': {'canonical_name': 'ferry', 'aliases': []},
|
'26f4': {'canonical_name': 'ferry', 'aliases': []},
|
||||||
'1f6a2': {'canonical_name': 'ship', 'aliases': []},
|
'1f6a2': {'canonical_name': 'ship', 'aliases': []},
|
||||||
|
|
|
@ -92,7 +92,7 @@ def generate_emoji_catalog(emoji_data: List[Dict[str, Any]],
|
||||||
return dict(emoji_catalog)
|
return dict(emoji_catalog)
|
||||||
|
|
||||||
# Use only those names for which images are present in all
|
# Use only those names for which images are present in all
|
||||||
# the emoji sets so that we can switch emoji sets seemlessly.
|
# the emoji sets so that we can switch emoji sets seamlessly.
|
||||||
def emoji_is_universal(emoji_dict: Dict[str, Any]) -> bool:
|
def emoji_is_universal(emoji_dict: Dict[str, Any]) -> bool:
|
||||||
for emoji_set in EMOJISETS:
|
for emoji_set in EMOJISETS:
|
||||||
if not emoji_dict['has_img_' + emoji_set]:
|
if not emoji_dict['has_img_' + emoji_set]:
|
||||||
|
|
|
@ -113,7 +113,7 @@ def run_production() -> None:
|
||||||
contribs_list[username] = contrib_data
|
contribs_list[username] = contrib_data
|
||||||
|
|
||||||
# remove duplicate contributions count
|
# remove duplicate contributions count
|
||||||
# find commits at the time of split and substract from zulip-server
|
# find commits at the time of split and subtract from zulip-server
|
||||||
with open(duplicate_commits_file, 'r') as f:
|
with open(duplicate_commits_file, 'r') as f:
|
||||||
duplicate_commits = json.load(f)
|
duplicate_commits = json.load(f)
|
||||||
for committer in duplicate_commits:
|
for committer in duplicate_commits:
|
||||||
|
|
|
@ -528,7 +528,7 @@ def process_avatars(avatar_list: List[ZerverFieldsT], avatar_dir: str, realm_id:
|
||||||
avatar_original['s3_path'] = original_image_path
|
avatar_original['s3_path'] = original_image_path
|
||||||
avatar_original_list.append(avatar_original)
|
avatar_original_list.append(avatar_original)
|
||||||
|
|
||||||
# Run downloads parallely
|
# Run downloads in parallel
|
||||||
output = []
|
output = []
|
||||||
for (status, job) in run_parallel_wrapper(get_avatar, avatar_upload_list, threads=threads):
|
for (status, job) in run_parallel_wrapper(get_avatar, avatar_upload_list, threads=threads):
|
||||||
output.append(job)
|
output.append(job)
|
||||||
|
@ -617,7 +617,7 @@ def process_uploads(upload_list: List[ZerverFieldsT], upload_dir: str,
|
||||||
upload_url_list.append([upload_url, upload_s3_path])
|
upload_url_list.append([upload_url, upload_s3_path])
|
||||||
upload['path'] = upload_s3_path
|
upload['path'] = upload_s3_path
|
||||||
|
|
||||||
# Run downloads parallely
|
# Run downloads in parallel
|
||||||
output = []
|
output = []
|
||||||
for (status, job) in run_parallel_wrapper(get_uploads, upload_url_list, threads=threads):
|
for (status, job) in run_parallel_wrapper(get_uploads, upload_url_list, threads=threads):
|
||||||
output.append(job)
|
output.append(job)
|
||||||
|
@ -678,7 +678,7 @@ def process_emojis(zerver_realmemoji: List[ZerverFieldsT], emoji_dir: str,
|
||||||
|
|
||||||
emoji_records.append(emoji_record)
|
emoji_records.append(emoji_record)
|
||||||
|
|
||||||
# Run downloads parallely
|
# Run downloads in parallel
|
||||||
output = []
|
output = []
|
||||||
for (status, job) in run_parallel_wrapper(get_emojis, upload_emoji_list, threads=threads):
|
for (status, job) in run_parallel_wrapper(get_emojis, upload_emoji_list, threads=threads):
|
||||||
output.append(job)
|
output.append(job)
|
||||||
|
|
|
@ -651,7 +651,7 @@ def label_mirror_dummy_users(num_teams: int, team_name: str, mattermost_data: Di
|
||||||
username_to_user: Dict[str, Dict[str, Any]]) -> None:
|
username_to_user: Dict[str, Dict[str, Any]]) -> None:
|
||||||
# This function might looks like a great place to label admin users. But
|
# This function might looks like a great place to label admin users. But
|
||||||
# that won't be fully correct since we are iterating only though posts and
|
# that won't be fully correct since we are iterating only though posts and
|
||||||
# it covers only users that has sent atleast one message.
|
# it covers only users that have sent at least one message.
|
||||||
for post in mattermost_data["post"]["channel_post"]:
|
for post in mattermost_data["post"]["channel_post"]:
|
||||||
post_team = post["team"]
|
post_team = post["team"]
|
||||||
if post_team == team_name:
|
if post_team == team_name:
|
||||||
|
|
|
@ -493,7 +493,7 @@ def process_long_term_idle_users(slack_data_dir: str, users: List[ZerverFieldsT]
|
||||||
"""Algorithmically, we treat users who have sent at least 10 messages
|
"""Algorithmically, we treat users who have sent at least 10 messages
|
||||||
or have sent a message within the last 60 days as active.
|
or have sent a message within the last 60 days as active.
|
||||||
Everyone else is treated as long-term idle, which means they will
|
Everyone else is treated as long-term idle, which means they will
|
||||||
have a slighly slower first page load when coming back to
|
have a slightly slower first page load when coming back to
|
||||||
Zulip.
|
Zulip.
|
||||||
"""
|
"""
|
||||||
all_messages = get_messages_iterator(slack_data_dir, added_channels, added_mpims, dm_members)
|
all_messages = get_messages_iterator(slack_data_dir, added_channels, added_mpims, dm_members)
|
||||||
|
|
|
@ -826,7 +826,7 @@ def do_deactivate_stream(stream: Stream, log: bool=True) -> None:
|
||||||
new_name = ("!DEACTIVATED:" + old_name)[:Stream.MAX_NAME_LENGTH]
|
new_name = ("!DEACTIVATED:" + old_name)[:Stream.MAX_NAME_LENGTH]
|
||||||
for i in range(20):
|
for i in range(20):
|
||||||
if stream_name_in_use(new_name, stream.realm_id):
|
if stream_name_in_use(new_name, stream.realm_id):
|
||||||
# This stream has alrady been deactivated, keep prepending !s until
|
# This stream has already been deactivated, keep prepending !s until
|
||||||
# we have a unique stream name or you've hit a rename limit.
|
# we have a unique stream name or you've hit a rename limit.
|
||||||
new_name = ("!" + new_name)[:Stream.MAX_NAME_LENGTH]
|
new_name = ("!" + new_name)[:Stream.MAX_NAME_LENGTH]
|
||||||
else:
|
else:
|
||||||
|
@ -1223,7 +1223,7 @@ def get_service_bot_events(sender: UserProfile, service_bot_tuples: List[Tuple[i
|
||||||
# Mention triggers, for stream messages
|
# Mention triggers, for stream messages
|
||||||
if is_stream and user_profile_id in mentioned_user_ids:
|
if is_stream and user_profile_id in mentioned_user_ids:
|
||||||
trigger = 'mention'
|
trigger = 'mention'
|
||||||
# PM triggers for personal and huddle messsages
|
# PM triggers for personal and huddle messages
|
||||||
elif (not is_stream) and (user_profile_id in active_user_ids):
|
elif (not is_stream) and (user_profile_id in active_user_ids):
|
||||||
trigger = 'private_message'
|
trigger = 'private_message'
|
||||||
else:
|
else:
|
||||||
|
@ -5239,7 +5239,7 @@ def do_revoke_user_invite(prereg_user: PreregistrationUser) -> None:
|
||||||
email = prereg_user.email
|
email = prereg_user.email
|
||||||
|
|
||||||
# Delete both the confirmation objects and the prereg_user object.
|
# Delete both the confirmation objects and the prereg_user object.
|
||||||
# TODO: Probably we actaully want to set the confirmation objects
|
# TODO: Probably we actually want to set the confirmation objects
|
||||||
# to a "revoked" status so that we can give the invited user a better
|
# to a "revoked" status so that we can give the invited user a better
|
||||||
# error message.
|
# error message.
|
||||||
content_type = ContentType.objects.get_for_model(PreregistrationUser)
|
content_type = ContentType.objects.get_for_model(PreregistrationUser)
|
||||||
|
|
|
@ -2091,7 +2091,7 @@ def privacy_clean_markdown(content: str) -> str:
|
||||||
|
|
||||||
def log_bugdown_error(msg: str) -> None:
|
def log_bugdown_error(msg: str) -> None:
|
||||||
"""We use this unusual logging approach to log the bugdown error, in
|
"""We use this unusual logging approach to log the bugdown error, in
|
||||||
order to prevent AdminNotifyHandler from sending the santized
|
order to prevent AdminNotifyHandler from sending the sanitized
|
||||||
original markdown formatting into another Zulip message, which
|
original markdown formatting into another Zulip message, which
|
||||||
could cause an infinite exception loop."""
|
could cause an infinite exception loop."""
|
||||||
bugdown_logger.error(msg)
|
bugdown_logger.error(msg)
|
||||||
|
|
|
@ -99,7 +99,7 @@ def bulk_set_users_or_streams_recipient_fields(model: Model,
|
||||||
# we take adventage of this, instead of calling save individually.
|
# we take adventage of this, instead of calling save individually.
|
||||||
result.save(update_fields=['recipient'])
|
result.save(update_fields=['recipient'])
|
||||||
|
|
||||||
# This is only sed in populate_db, so doesn't realy need tests
|
# This is only sed in populate_db, so doesn't really need tests
|
||||||
def bulk_create_streams(realm: Realm,
|
def bulk_create_streams(realm: Realm,
|
||||||
stream_dict: Dict[str, Dict[str, Any]]) -> None: # nocoverage
|
stream_dict: Dict[str, Dict[str, Any]]) -> None: # nocoverage
|
||||||
existing_streams = frozenset([name.lower() for name in
|
existing_streams = frozenset([name.lower() for name in
|
||||||
|
|
|
@ -656,7 +656,7 @@ def ignore_unhashable_lru_cache(maxsize: int=128, typed: bool=False) -> DECORATO
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# Deliberately calling this function from outside of exception
|
# Deliberately calling this function from outside of exception
|
||||||
# handler to get a more descriptive traceback. Otherise traceback
|
# handler to get a more descriptive traceback. Otherwise traceback
|
||||||
# can include the exception from cached_enabled_user_function as
|
# can include the exception from cached_enabled_user_function as
|
||||||
# well.
|
# well.
|
||||||
return user_function(*args, **kwargs)
|
return user_function(*args, **kwargs)
|
||||||
|
|
|
@ -8,7 +8,7 @@ from zerver.lib.validator import check_required_string, \
|
||||||
check_external_account_url_pattern, check_dict_only
|
check_external_account_url_pattern, check_dict_only
|
||||||
from zerver.lib.types import ProfileFieldData
|
from zerver.lib.types import ProfileFieldData
|
||||||
|
|
||||||
# Default external account fields are by default avaliable
|
# Default external account fields are by default available
|
||||||
# to realm admins, where realm admin only need to select
|
# to realm admins, where realm admin only need to select
|
||||||
# the default field and other values(i.e. name, url) will be
|
# the default field and other values(i.e. name, url) will be
|
||||||
# fetch from this dictionary.
|
# fetch from this dictionary.
|
||||||
|
|
|
@ -48,7 +48,7 @@ realm_tables = [("zerver_defaultstream", DefaultStream, "defaultstream"),
|
||||||
# that map old ids to new ids. We use this in
|
# that map old ids to new ids. We use this in
|
||||||
# re_map_foreign_keys and other places.
|
# re_map_foreign_keys and other places.
|
||||||
#
|
#
|
||||||
# We explicity initialize ID_MAP with the tables that support
|
# We explicitly initialize ID_MAP with the tables that support
|
||||||
# id re-mapping.
|
# id re-mapping.
|
||||||
#
|
#
|
||||||
# Code reviewers: give these tables extra scrutiny, as we need to
|
# Code reviewers: give these tables extra scrutiny, as we need to
|
||||||
|
|
|
@ -170,7 +170,7 @@ class MessageDict:
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def wide_dict(message: Message) -> Dict[str, Any]:
|
def wide_dict(message: Message) -> Dict[str, Any]:
|
||||||
'''
|
'''
|
||||||
The next two lines get the cachable field related
|
The next two lines get the cacheable field related
|
||||||
to our message object, with the side effect of
|
to our message object, with the side effect of
|
||||||
populating the cache.
|
populating the cache.
|
||||||
'''
|
'''
|
||||||
|
|
|
@ -238,7 +238,7 @@ def list_to_streams(streams_raw: Iterable[Mapping[str, Any]],
|
||||||
|
|
||||||
@param streams_raw The list of stream dictionaries to process;
|
@param streams_raw The list of stream dictionaries to process;
|
||||||
names should already be stripped of whitespace by the caller.
|
names should already be stripped of whitespace by the caller.
|
||||||
@param user_profile The user for whom we are retreiving the streams
|
@param user_profile The user for whom we are retrieving the streams
|
||||||
@param autocreate Whether we should create streams if they don't already exist
|
@param autocreate Whether we should create streams if they don't already exist
|
||||||
"""
|
"""
|
||||||
# Validate all streams, getting extant ones, then get-or-creating the rest.
|
# Validate all streams, getting extant ones, then get-or-creating the rest.
|
||||||
|
|
|
@ -382,7 +382,7 @@ class ParallelTestSuite(django_runner.ParallelTestSuite):
|
||||||
|
|
||||||
def check_import_error(test_name: str) -> None:
|
def check_import_error(test_name: str) -> None:
|
||||||
try:
|
try:
|
||||||
# Directly using __import__ is not recommeded, but here it gives
|
# Directly using __import__ is not recommended, but here it gives
|
||||||
# clearer traceback as compared to importlib.import_module.
|
# clearer traceback as compared to importlib.import_module.
|
||||||
__import__(test_name)
|
__import__(test_name)
|
||||||
except ImportError as exc:
|
except ImportError as exc:
|
||||||
|
|
|
@ -18,7 +18,7 @@ class Command(ZulipBaseCommand):
|
||||||
|
|
||||||
def handle(self, *args: Any, **options: Any) -> None:
|
def handle(self, *args: Any, **options: Any) -> None:
|
||||||
try:
|
try:
|
||||||
# first check if the db has been initalized
|
# first check if the db has been initialized
|
||||||
Realm.objects.first()
|
Realm.objects.first()
|
||||||
except ProgrammingError:
|
except ProgrammingError:
|
||||||
raise CommandError("The Zulip database does not appear to exist. "
|
raise CommandError("The Zulip database does not appear to exist. "
|
||||||
|
|
|
@ -20,7 +20,7 @@ Zulip's message retention and deletion features.
|
||||||
Examples:
|
Examples:
|
||||||
To restore all recently deleted messages:
|
To restore all recently deleted messages:
|
||||||
./manage.py restore_messages
|
./manage.py restore_messages
|
||||||
To restore a specfic ArchiveTransaction:
|
To restore a specific ArchiveTransaction:
|
||||||
./manage.py restore_messages --transaction-id=1
|
./manage.py restore_messages --transaction-id=1
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -23,7 +23,7 @@ from zerver.models import Realm, get_realm, get_stream
|
||||||
# See zerver/tests/fixtures/email/1.txt for a very simple example,
|
# See zerver/tests/fixtures/email/1.txt for a very simple example,
|
||||||
# but anything that the message_from_binary_file function
|
# but anything that the message_from_binary_file function
|
||||||
# from the email library can parse should work.
|
# from the email library can parse should work.
|
||||||
# Value of the TO: header doesn't matter, as it is overriden
|
# Value of the TO: header doesn't matter, as it is overridden
|
||||||
# by the command in order for the email to be sent to the correct stream.
|
# by the command in order for the email to be sent to the correct stream.
|
||||||
|
|
||||||
class Command(ZulipBaseCommand):
|
class Command(ZulipBaseCommand):
|
||||||
|
|
|
@ -377,7 +377,7 @@ class Realm(models.Model):
|
||||||
max_length=1) # type: str
|
max_length=1) # type: str
|
||||||
icon_version = models.PositiveSmallIntegerField(default=1) # type: int
|
icon_version = models.PositiveSmallIntegerField(default=1) # type: int
|
||||||
|
|
||||||
# Logo is the horizonal logo we show in top-left of webapp navbar UI.
|
# Logo is the horizontal logo we show in top-left of webapp navbar UI.
|
||||||
LOGO_DEFAULT = u'D'
|
LOGO_DEFAULT = u'D'
|
||||||
LOGO_UPLOADED = u'U'
|
LOGO_UPLOADED = u'U'
|
||||||
LOGO_SOURCES = (
|
LOGO_SOURCES = (
|
||||||
|
@ -795,7 +795,7 @@ class UserProfile(AbstractBaseUser, PermissionsMixin):
|
||||||
# with EMAIL_ADDRESS_VISIBILITY_EVERYONE. For other
|
# with EMAIL_ADDRESS_VISIBILITY_EVERYONE. For other
|
||||||
# organizations, it will be a unique value of the form
|
# organizations, it will be a unique value of the form
|
||||||
# user1234@example.com. This field exists for backwards
|
# user1234@example.com. This field exists for backwards
|
||||||
# compatibility in Zulip APIs where users are refered to by their
|
# compatibility in Zulip APIs where users are referred to by their
|
||||||
# email address, not their ID; it should be used in all API use cases.
|
# email address, not their ID; it should be used in all API use cases.
|
||||||
#
|
#
|
||||||
# Both fields are unique within a realm (in a case-insensitive fashion).
|
# Both fields are unique within a realm (in a case-insensitive fashion).
|
||||||
|
@ -1668,7 +1668,7 @@ class Message(AbstractMessage):
|
||||||
Find out whether a message is a stream message by
|
Find out whether a message is a stream message by
|
||||||
looking up its recipient.type. TODO: Make this
|
looking up its recipient.type. TODO: Make this
|
||||||
an easier operation by denormalizing the message
|
an easier operation by denormalizing the message
|
||||||
type onto Message, either explicity (message.type)
|
type onto Message, either explicitly (message.type)
|
||||||
or implicitly (message.stream_id is not None).
|
or implicitly (message.stream_id is not None).
|
||||||
'''
|
'''
|
||||||
return self.recipient.type == Recipient.STREAM
|
return self.recipient.type == Recipient.STREAM
|
||||||
|
@ -2061,7 +2061,7 @@ class Subscription(models.Model):
|
||||||
|
|
||||||
# Whether the user has since unsubscribed. We mark Subscription
|
# Whether the user has since unsubscribed. We mark Subscription
|
||||||
# objects as inactive, rather than deleting them, when a user
|
# objects as inactive, rather than deleting them, when a user
|
||||||
# unsubscribes, so we can preseve user customizations like
|
# unsubscribes, so we can preserve user customizations like
|
||||||
# notification settings, stream color, etc., if the user later
|
# notification settings, stream color, etc., if the user later
|
||||||
# resubscribes.
|
# resubscribes.
|
||||||
active = models.BooleanField(default=True) # type: bool
|
active = models.BooleanField(default=True) # type: bool
|
||||||
|
|
|
@ -117,7 +117,7 @@ def validate_against_openapi_schema(content: Dict[str, Any], endpoint: str,
|
||||||
raise SchemaError('Expected to find the "{}" required key')
|
raise SchemaError('Expected to find the "{}" required key')
|
||||||
|
|
||||||
def to_python_type(py_type: str) -> type:
|
def to_python_type(py_type: str) -> type:
|
||||||
"""Transform an OpenAPI-like type to a Pyton one.
|
"""Transform an OpenAPI-like type to a Python one.
|
||||||
https://swagger.io/docs/specification/data-models/data-types
|
https://swagger.io/docs/specification/data-models/data-types
|
||||||
"""
|
"""
|
||||||
TYPES = {
|
TYPES = {
|
||||||
|
|
|
@ -125,7 +125,7 @@ paths:
|
||||||
description: An array of `event` objects (possibly
|
description: An array of `event` objects (possibly
|
||||||
zero-length if `dont_block` is set) of events with
|
zero-length if `dont_block` is set) of events with
|
||||||
IDs newer than `last_event_id`. Event IDs are
|
IDs newer than `last_event_id`. Event IDs are
|
||||||
guaranted to be increasing, but they are not
|
guaranteed to be increasing, but they are not
|
||||||
guaranteed to be consecutive.
|
guaranteed to be consecutive.
|
||||||
- example:
|
- example:
|
||||||
{
|
{
|
||||||
|
|
|
@ -8,7 +8,7 @@ from zerver.models import get_realm
|
||||||
|
|
||||||
class GlobalPublicStreamTest(ZulipTestCase):
|
class GlobalPublicStreamTest(ZulipTestCase):
|
||||||
def test_non_existant_stream_id(self) -> None:
|
def test_non_existant_stream_id(self) -> None:
|
||||||
# Here we use a relatively big number as stream id assumming such an id
|
# Here we use a relatively big number as stream id assuming such an id
|
||||||
# won't exist in the test DB.
|
# won't exist in the test DB.
|
||||||
result = self.client_get("/archive/streams/100000000/topics/TopicGlobal")
|
result = self.client_get("/archive/streams/100000000/topics/TopicGlobal")
|
||||||
self.assert_in_success_response(["This stream does not exist."], result)
|
self.assert_in_success_response(["This stream does not exist."], result)
|
||||||
|
|
|
@ -646,7 +646,7 @@ class DesktopFlowTestingLib(ZulipTestCase):
|
||||||
|
|
||||||
class SocialAuthBase(DesktopFlowTestingLib, ZulipTestCase):
|
class SocialAuthBase(DesktopFlowTestingLib, ZulipTestCase):
|
||||||
"""This is a base class for testing social-auth backends. These
|
"""This is a base class for testing social-auth backends. These
|
||||||
methods are often overriden by subclasses:
|
methods are often overridden by subclasses:
|
||||||
|
|
||||||
register_extra_endpoints() - If the backend being tested calls some extra
|
register_extra_endpoints() - If the backend being tested calls some extra
|
||||||
endpoints then they can be added here.
|
endpoints then they can be added here.
|
||||||
|
@ -1576,7 +1576,7 @@ class SAMLAuthBackendTest(SocialAuthBase):
|
||||||
self.assert_in_success_response(["Log in with Second Test IdP"], result)
|
self.assert_in_success_response(["Log in with Second Test IdP"], result)
|
||||||
self.assert_in_success_response(["/accounts/login/social/saml/test_idp2"], result)
|
self.assert_in_success_response(["/accounts/login/social/saml/test_idp2"], result)
|
||||||
|
|
||||||
# Try succesful authentication with the regular idp from all previous tests:
|
# Try successful authentication with the regular idp from all previous tests:
|
||||||
self.test_social_auth_success()
|
self.test_social_auth_success()
|
||||||
|
|
||||||
# Now test with the second idp:
|
# Now test with the second idp:
|
||||||
|
|
|
@ -143,7 +143,7 @@ class CreateCustomProfileFieldTest(CustomProfileFieldTestCase):
|
||||||
name=invalid_field_name,
|
name=invalid_field_name,
|
||||||
))
|
))
|
||||||
self.assert_json_success(result)
|
self.assert_json_success(result)
|
||||||
# Sliently overwrite name and hint with values set in default fields dict
|
# Silently overwrite name and hint with values set in default fields dict
|
||||||
# for default custom external account fields.
|
# for default custom external account fields.
|
||||||
with self.assertRaises(CustomProfileField.DoesNotExist):
|
with self.assertRaises(CustomProfileField.DoesNotExist):
|
||||||
field = CustomProfileField.objects.get(name=invalid_field_name, realm=realm)
|
field = CustomProfileField.objects.get(name=invalid_field_name, realm=realm)
|
||||||
|
|
|
@ -365,7 +365,7 @@ class PlansPageTest(ZulipTestCase):
|
||||||
# Test root domain
|
# Test root domain
|
||||||
result = self.client_get("/plans/", subdomain="")
|
result = self.client_get("/plans/", subdomain="")
|
||||||
self.assert_in_success_response(["Sign up now"], result)
|
self.assert_in_success_response(["Sign up now"], result)
|
||||||
# Test non-existant domain
|
# Test non-existent domain
|
||||||
result = self.client_get("/plans/", subdomain="moo")
|
result = self.client_get("/plans/", subdomain="moo")
|
||||||
self.assertEqual(result.status_code, 404)
|
self.assertEqual(result.status_code, 404)
|
||||||
self.assert_in_response("does not exist", result)
|
self.assert_in_response("does not exist", result)
|
||||||
|
|
|
@ -56,7 +56,7 @@ class EmailTranslationTestCase(ZulipTestCase):
|
||||||
|
|
||||||
class TranslationTestCase(ZulipTestCase):
|
class TranslationTestCase(ZulipTestCase):
|
||||||
"""
|
"""
|
||||||
Tranlations strings should change with locale. URLs should be locale
|
Translations strings should change with locale. URLs should be locale
|
||||||
aware.
|
aware.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -233,7 +233,7 @@ class TestIntegrationsDevPanel(ZulipTestCase):
|
||||||
# We have to use this roundabout manner since the order may vary each time. This is not
|
# We have to use this roundabout manner since the order may vary each time. This is not
|
||||||
# an issue. Basically, we're trying to compare 2 lists and since we're not resorting to
|
# an issue. Basically, we're trying to compare 2 lists and since we're not resorting to
|
||||||
# using sets or a sorted order, we're sticking with O(n*m) time complexity for this
|
# using sets or a sorted order, we're sticking with O(n*m) time complexity for this
|
||||||
# comparision (where n and m are the lengths of the two lists respectively). But since
|
# comparison (where n and m are the lengths of the two lists respectively). But since
|
||||||
# this is just a unit test and more importantly n = m = some-low-number we don't really
|
# this is just a unit test and more importantly n = m = some-low-number we don't really
|
||||||
# care about the time complexity being what it is.
|
# care about the time complexity being what it is.
|
||||||
self.assertTrue(r in expected_responses)
|
self.assertTrue(r in expected_responses)
|
||||||
|
|
|
@ -4333,14 +4333,14 @@ class DeleteMessageTest(ZulipTestCase):
|
||||||
result = test_delete_message_by_admin(msg_id=msg_id_2)
|
result = test_delete_message_by_admin(msg_id=msg_id_2)
|
||||||
self.assert_json_success(result)
|
self.assert_json_success(result)
|
||||||
|
|
||||||
# Test mulitple delete requests with no latency issues
|
# Test multiple delete requests with no latency issues
|
||||||
msg_id = self.send_stream_message(hamlet, "Scotland")
|
msg_id = self.send_stream_message(hamlet, "Scotland")
|
||||||
result = test_delete_message_by_owner(msg_id=msg_id)
|
result = test_delete_message_by_owner(msg_id=msg_id)
|
||||||
self.assert_json_success(result)
|
self.assert_json_success(result)
|
||||||
result = test_delete_message_by_owner(msg_id=msg_id)
|
result = test_delete_message_by_owner(msg_id=msg_id)
|
||||||
self.assert_json_error(result, "Invalid message(s)")
|
self.assert_json_error(result, "Invalid message(s)")
|
||||||
|
|
||||||
# Test handling of 500 error caused by mulitple delete requests due to latency.
|
# Test handling of 500 error caused by multiple delete requests due to latency.
|
||||||
# see issue #11219.
|
# see issue #11219.
|
||||||
with mock.patch("zerver.views.messages.do_delete_messages") as m, \
|
with mock.patch("zerver.views.messages.do_delete_messages") as m, \
|
||||||
mock.patch("zerver.views.messages.validate_can_delete_message", return_value=None), \
|
mock.patch("zerver.views.messages.validate_can_delete_message", return_value=None), \
|
||||||
|
|
|
@ -376,7 +376,7 @@ class PushBouncerNotificationTest(BouncerTestCase):
|
||||||
server=server))
|
server=server))
|
||||||
self.assertEqual(len(tokens), 2)
|
self.assertEqual(len(tokens), 2)
|
||||||
|
|
||||||
# Now we succesfully remove them:
|
# Now we successfully remove them:
|
||||||
do_regenerate_api_key(user, user)
|
do_regenerate_api_key(user, user)
|
||||||
tokens = list(RemotePushDeviceToken.objects.filter(user_id=user.id,
|
tokens = list(RemotePushDeviceToken.objects.filter(user_id=user.id,
|
||||||
server=server))
|
server=server))
|
||||||
|
|
|
@ -309,7 +309,7 @@ class TestArchiveMessagesGeneral(ArchiveMessagesTestingBase):
|
||||||
expired_usermsg_ids = self._get_usermessage_ids(expired_msg_ids)
|
expired_usermsg_ids = self._get_usermessage_ids(expired_msg_ids)
|
||||||
|
|
||||||
archive_messages(chunk_size=2) # Specify low chunk_size to test batching.
|
archive_messages(chunk_size=2) # Specify low chunk_size to test batching.
|
||||||
# Make sure we archived what neeeded:
|
# Make sure we archived what needed:
|
||||||
self._verify_archive_data(expired_msg_ids, expired_usermsg_ids)
|
self._verify_archive_data(expired_msg_ids, expired_usermsg_ids)
|
||||||
|
|
||||||
restore_all_data_from_archive()
|
restore_all_data_from_archive()
|
||||||
|
|
|
@ -3186,7 +3186,7 @@ class UserSignUpTest(InviteUserBase):
|
||||||
self.assertEqual(result.url, "/accounts/login/?email=newuser%40zulip.com")
|
self.assertEqual(result.url, "/accounts/login/?email=newuser%40zulip.com")
|
||||||
self.assertFalse(UserProfile.objects.filter(delivery_email=email).exists())
|
self.assertFalse(UserProfile.objects.filter(delivery_email=email).exists())
|
||||||
|
|
||||||
# If the email is outside of LDAP_APPEND_DOMAIN, we succesfully create a non-ldap account,
|
# If the email is outside of LDAP_APPEND_DOMAIN, we successfully create a non-ldap account,
|
||||||
# with the password managed in the zulip database.
|
# with the password managed in the zulip database.
|
||||||
with self.settings(
|
with self.settings(
|
||||||
POPULATE_PROFILE_VIA_LDAP=True,
|
POPULATE_PROFILE_VIA_LDAP=True,
|
||||||
|
|
|
@ -438,7 +438,7 @@ class SlackImporter(ZulipTestCase):
|
||||||
self.assertDictEqual(test_dm_members, dm_members)
|
self.assertDictEqual(test_dm_members, dm_members)
|
||||||
|
|
||||||
# We can't do an assertDictEqual since during the construction of Personal
|
# We can't do an assertDictEqual since during the construction of Personal
|
||||||
# recipients, slack_user_id_to_zulip_user_id are iterated in diffrent order in Python 3.5 and 3.6.
|
# recipients, slack_user_id_to_zulip_user_id are iterated in different order in Python 3.5 and 3.6.
|
||||||
self.assertEqual(set(slack_recipient_name_to_zulip_recipient_id.keys()), slack_recipient_names)
|
self.assertEqual(set(slack_recipient_name_to_zulip_recipient_id.keys()), slack_recipient_names)
|
||||||
self.assertEqual(set(slack_recipient_name_to_zulip_recipient_id.values()), set(i for i in range(11)))
|
self.assertEqual(set(slack_recipient_name_to_zulip_recipient_id.values()), set(i for i in range(11)))
|
||||||
|
|
||||||
|
|
|
@ -87,7 +87,7 @@ class FileUploadTest(UploadSerializeMixin, ZulipTestCase):
|
||||||
data = b"".join(response.streaming_content)
|
data = b"".join(response.streaming_content)
|
||||||
self.assertEqual(b"zulip!", data)
|
self.assertEqual(b"zulip!", data)
|
||||||
|
|
||||||
# Files uploaded through the API should be accesible via the web client
|
# Files uploaded through the API should be accessible via the web client
|
||||||
self.login('hamlet')
|
self.login('hamlet')
|
||||||
self.assert_url_serves_contents_of_file(uri, b"zulip!")
|
self.assert_url_serves_contents_of_file(uri, b"zulip!")
|
||||||
|
|
||||||
|
@ -529,7 +529,7 @@ class FileUploadTest(UploadSerializeMixin, ZulipTestCase):
|
||||||
self.logout()
|
self.logout()
|
||||||
self.assertEqual(len(queries), 5)
|
self.assertEqual(len(queries), 5)
|
||||||
|
|
||||||
# Subscribed user who recieved the message should be able to view file
|
# Subscribed user who received the message should be able to view file
|
||||||
self.login_user(cordelia)
|
self.login_user(cordelia)
|
||||||
with queries_captured() as queries:
|
with queries_captured() as queries:
|
||||||
response = self.client_get(uri)
|
response = self.client_get(uri)
|
||||||
|
|
|
@ -500,7 +500,7 @@ class PermissionTest(ZulipTestCase):
|
||||||
{'profile_data': ujson.dumps(new_profile_data)})
|
{'profile_data': ujson.dumps(new_profile_data)})
|
||||||
self.assert_json_error(result, error_msg)
|
self.assert_json_error(result, error_msg)
|
||||||
|
|
||||||
# non-existant field and no data
|
# non-existent field and no data
|
||||||
invalid_profile_data = [{
|
invalid_profile_data = [{
|
||||||
'id': 9001,
|
'id': 9001,
|
||||||
'value': ''
|
'value': ''
|
||||||
|
@ -509,7 +509,7 @@ class PermissionTest(ZulipTestCase):
|
||||||
{'profile_data': ujson.dumps(invalid_profile_data)})
|
{'profile_data': ujson.dumps(invalid_profile_data)})
|
||||||
self.assert_json_error(result, 'Field id 9001 not found.')
|
self.assert_json_error(result, 'Field id 9001 not found.')
|
||||||
|
|
||||||
# non-existant field and data
|
# non-existent field and data
|
||||||
invalid_profile_data = [{
|
invalid_profile_data = [{
|
||||||
'id': 9001,
|
'id': 9001,
|
||||||
'value': 'some data'
|
'value': 'some data'
|
||||||
|
@ -776,7 +776,7 @@ class UserProfileTest(ZulipTestCase):
|
||||||
self.assertEqual(check_valid_user_ids(realm.id, [bot.id]),
|
self.assertEqual(check_valid_user_ids(realm.id, [bot.id]),
|
||||||
"User with ID %d is a bot" % (bot.id,))
|
"User with ID %d is a bot" % (bot.id,))
|
||||||
|
|
||||||
# Succesfully get non-bot, active user belong to your realm
|
# Successfully get non-bot, active user belong to your realm
|
||||||
self.assertEqual(check_valid_user_ids(realm.id, [othello.id]), None)
|
self.assertEqual(check_valid_user_ids(realm.id, [othello.id]), None)
|
||||||
|
|
||||||
def test_cache_invalidation(self) -> None:
|
def test_cache_invalidation(self) -> None:
|
||||||
|
|
|
@ -50,7 +50,7 @@ def add_reaction(request: HttpRequest, user_profile: UserProfile, message_id: in
|
||||||
# If another user has already reacted to this message with
|
# If another user has already reacted to this message with
|
||||||
# same emoji code, we treat the new reaction as a vote for the
|
# same emoji code, we treat the new reaction as a vote for the
|
||||||
# existing reaction. So the emoji name used by that earlier
|
# existing reaction. So the emoji name used by that earlier
|
||||||
# reaction takes precendence over whatever was passed in this
|
# reaction takes precedence over whatever was passed in this
|
||||||
# request. This is necessary to avoid a message having 2
|
# request. This is necessary to avoid a message having 2
|
||||||
# "different" emoji reactions with the same emoji code (and
|
# "different" emoji reactions with the same emoji code (and
|
||||||
# thus same image) on the same message, which looks ugly.
|
# thus same image) on the same message, which looks ugly.
|
||||||
|
|
|
@ -379,7 +379,7 @@ def get_bots_backend(request: HttpRequest, user_profile: UserProfile) -> HttpRes
|
||||||
default_events_register_stream = get_stream_name(bot_profile.default_events_register_stream)
|
default_events_register_stream = get_stream_name(bot_profile.default_events_register_stream)
|
||||||
|
|
||||||
# Bots are supposed to have only one API key, at least for now.
|
# Bots are supposed to have only one API key, at least for now.
|
||||||
# Therefore we can safely asume that one and only valid API key will be
|
# Therefore we can safely assume that one and only valid API key will be
|
||||||
# the first one.
|
# the first one.
|
||||||
api_key = get_api_key(bot_profile)
|
api_key = get_api_key(bot_profile)
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
Front lets you manage all of your communication channels in one place,
|
Front lets you manage all of your communication channels in one place,
|
||||||
and helps your team collaborate around every message. Follow these steps
|
and helps your team collaborate around every message. Follow these steps
|
||||||
to recieve Front notifications without leaving Zulip!
|
to receive Front notifications without leaving Zulip!
|
||||||
|
|
||||||
1. {!create-stream.md!}
|
1. {!create-stream.md!}
|
||||||
|
|
||||||
|
|
|
@ -43,7 +43,7 @@
|
||||||
"created_at": "2015-05-05T23:40:28Z",
|
"created_at": "2015-05-05T23:40:28Z",
|
||||||
"updated_at": "2015-05-05T23:40:28Z",
|
"updated_at": "2015-05-05T23:40:28Z",
|
||||||
"closed_at": null,
|
"closed_at": null,
|
||||||
"body": "It looks like you accidently spelled 'commit' with two 't's."
|
"body": "It looks like you accidentally spelled 'commit' with two 't's."
|
||||||
},
|
},
|
||||||
"comment": {
|
"comment": {
|
||||||
"url": "https://api.github.com/repos/baxterthehacker/public-repo/issues/comments/99262140",
|
"url": "https://api.github.com/repos/baxterthehacker/public-repo/issues/comments/99262140",
|
||||||
|
|
|
@ -43,7 +43,7 @@
|
||||||
"created_at": "2015-05-05T23:40:28Z",
|
"created_at": "2015-05-05T23:40:28Z",
|
||||||
"updated_at": "2015-05-05T23:40:28Z",
|
"updated_at": "2015-05-05T23:40:28Z",
|
||||||
"closed_at": null,
|
"closed_at": null,
|
||||||
"body": "It looks like you accidently spelled 'commit' with two 't's."
|
"body": "It looks like you accidentally spelled 'commit' with two 't's."
|
||||||
},
|
},
|
||||||
"repository": {
|
"repository": {
|
||||||
"id": 35129377,
|
"id": 35129377,
|
||||||
|
|
|
@ -131,13 +131,13 @@ class GithubWebhookTest(WebhookTestCase):
|
||||||
self.send_and_test_stream_message('issue_comment', expected_topic, expected_message)
|
self.send_and_test_stream_message('issue_comment', expected_topic, expected_message)
|
||||||
|
|
||||||
def test_issue_msg(self) -> None:
|
def test_issue_msg(self) -> None:
|
||||||
expected_message = u"baxterthehacker opened [Issue #2](https://github.com/baxterthehacker/public-repo/issues/2):\n\n~~~ quote\nIt looks like you accidently spelled 'commit' with two 't's.\n~~~"
|
expected_message = u"baxterthehacker opened [Issue #2](https://github.com/baxterthehacker/public-repo/issues/2):\n\n~~~ quote\nIt looks like you accidentally spelled 'commit' with two 't's.\n~~~"
|
||||||
self.send_and_test_stream_message('issues', self.EXPECTED_TOPIC_ISSUE_EVENTS, expected_message)
|
self.send_and_test_stream_message('issues', self.EXPECTED_TOPIC_ISSUE_EVENTS, expected_message)
|
||||||
|
|
||||||
def test_issue_msg_with_custom_topic_in_url(self) -> None:
|
def test_issue_msg_with_custom_topic_in_url(self) -> None:
|
||||||
self.url = self.build_webhook_url(topic='notifications')
|
self.url = self.build_webhook_url(topic='notifications')
|
||||||
expected_topic = u"notifications"
|
expected_topic = u"notifications"
|
||||||
expected_message = u"baxterthehacker opened [Issue #2 Spelling error in the README file](https://github.com/baxterthehacker/public-repo/issues/2):\n\n~~~ quote\nIt looks like you accidently spelled 'commit' with two 't's.\n~~~"
|
expected_message = u"baxterthehacker opened [Issue #2 Spelling error in the README file](https://github.com/baxterthehacker/public-repo/issues/2):\n\n~~~ quote\nIt looks like you accidentally spelled 'commit' with two 't's.\n~~~"
|
||||||
self.send_and_test_stream_message('issues', expected_topic, expected_message)
|
self.send_and_test_stream_message('issues', expected_topic, expected_message)
|
||||||
|
|
||||||
def test_membership_msg(self) -> None:
|
def test_membership_msg(self) -> None:
|
||||||
|
@ -210,7 +210,7 @@ class GithubWebhookTest(WebhookTestCase):
|
||||||
self.send_and_test_stream_message('release', self.EXPECTED_TOPIC_REPO_EVENTS, expected_message)
|
self.send_and_test_stream_message('release', self.EXPECTED_TOPIC_REPO_EVENTS, expected_message)
|
||||||
|
|
||||||
def test_page_build_msg(self) -> None:
|
def test_page_build_msg(self) -> None:
|
||||||
expected_message = u"Github Pages build, trigerred by baxterthehacker, has finished building."
|
expected_message = u"Github Pages build, triggered by baxterthehacker, has finished building."
|
||||||
self.send_and_test_stream_message('page_build', self.EXPECTED_TOPIC_REPO_EVENTS, expected_message)
|
self.send_and_test_stream_message('page_build', self.EXPECTED_TOPIC_REPO_EVENTS, expected_message)
|
||||||
|
|
||||||
def test_status_msg(self) -> None:
|
def test_status_msg(self) -> None:
|
||||||
|
|
|
@ -257,7 +257,7 @@ def get_page_build_body(payload: Dict[str, Any]) -> str:
|
||||||
CONTENT_MESSAGE_TEMPLATE.format(message=build['error']['message'])
|
CONTENT_MESSAGE_TEMPLATE.format(message=build['error']['message'])
|
||||||
)
|
)
|
||||||
|
|
||||||
return u"Github Pages build, trigerred by {}, {}.".format(
|
return u"Github Pages build, triggered by {}, {}.".format(
|
||||||
payload['build']['pusher']['login'],
|
payload['build']['pusher']['login'],
|
||||||
action
|
action
|
||||||
)
|
)
|
||||||
|
|
|
@ -73,7 +73,7 @@ def replied_body(payload: Dict[str, Any], actor: str, action: str) -> str:
|
||||||
return body
|
return body
|
||||||
|
|
||||||
def get_event_handler(event: str) -> Callable[..., str]:
|
def get_event_handler(event: str) -> Callable[..., str]:
|
||||||
# The main reason for this function existance is because of mypy
|
# The main reason for this function existence is because of mypy
|
||||||
handler = EVENTS_FUNCTION_MAPPER.get(event) # type: Any
|
handler = EVENTS_FUNCTION_MAPPER.get(event) # type: Any
|
||||||
if handler is None:
|
if handler is None:
|
||||||
raise UnexpectedWebhookEventType("Groove", event)
|
raise UnexpectedWebhookEventType("Groove", event)
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
"name": "ToHighTemeprature",
|
"name": "ToHighTemeprature",
|
||||||
"runbook_url": "http://www.use.water.pl",
|
"runbook_url": "http://www.use.water.pl",
|
||||||
"version": 2,
|
"version": 2,
|
||||||
"description": "Measurment of temperature in your computer"
|
"description": "Measurement of temperature in your computer"
|
||||||
},
|
},
|
||||||
"account": "lizonr@gmail.com",
|
"account": "lizonr@gmail.com",
|
||||||
"trigger_time": 1460407214,
|
"trigger_time": 1460407214,
|
||||||
|
|
|
@ -10,7 +10,7 @@ Get Zulip notifications for your PagerDuty services!
|
||||||
|
|
||||||
1. Set **Extension Type** to **Generic V1 Webhook**. Set **Name** to a name
|
1. Set **Extension Type** to **Generic V1 Webhook**. Set **Name** to a name
|
||||||
of your choice, such as `Zulip`. Under **Details**, set **URL** to the
|
of your choice, such as `Zulip`. Under **Details**, set **URL** to the
|
||||||
URL contructed above, and click **Save**.
|
URL constructed above, and click **Save**.
|
||||||
|
|
||||||
{!congrats.md!}
|
{!congrats.md!}
|
||||||
|
|
||||||
|
|
|
@ -8,7 +8,7 @@ Get ReviewBoard notifications in Zulip!
|
||||||
corner, and click **Team administration**. Select **WebHooks** on the
|
corner, and click **Team administration**. Select **WebHooks** on the
|
||||||
left sidebar, and click **+ Create a WebHook**.
|
left sidebar, and click **+ Create a WebHook**.
|
||||||
|
|
||||||
1. Make sure the **Enabled** option is checked. Set **URL** to the URL constucted
|
1. Make sure the **Enabled** option is checked. Set **URL** to the URL constructed
|
||||||
above, and select the events you'd like to be notified about. Set **Encoding**
|
above, and select the events you'd like to be notified about. Set **Encoding**
|
||||||
to **JSON**, and click **Create WebHook**.
|
to **JSON**, and click **Create WebHook**.
|
||||||
|
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue