2020-02-13 22:34:29 +01:00
|
|
|
const util = require("./util");
|
2019-12-26 15:34:17 +01:00
|
|
|
const FoldDict = require('./fold_dict').FoldDict;
|
2019-12-27 13:16:22 +01:00
|
|
|
const LazySet = require('./lazy_set').LazySet;
|
2020-02-21 14:26:11 +01:00
|
|
|
const settings_config = require("./settings_config");
|
2019-02-08 11:56:33 +01:00
|
|
|
|
2019-12-27 15:56:46 +01:00
|
|
|
const BinaryDict = function (pred) {
|
|
|
|
/*
|
|
|
|
A dictionary that keeps track of which objects had the predicate
|
|
|
|
return true or false for efficient lookups and iteration.
|
|
|
|
|
|
|
|
This class is an optimization for managing subscriptions.
|
|
|
|
Typically you only subscribe to a small minority of streams, and
|
|
|
|
most common operations want to efficiently iterate through only
|
|
|
|
streams where the current user is subscribed:
|
|
|
|
|
|
|
|
- search bar search
|
|
|
|
- build left sidebar
|
|
|
|
- autocomplete #stream_links
|
|
|
|
- autocomplete stream in compose
|
|
|
|
*/
|
|
|
|
|
|
|
|
const self = {};
|
2019-12-26 15:34:17 +01:00
|
|
|
self.trues = new FoldDict();
|
|
|
|
self.falses = new FoldDict();
|
2019-12-27 15:56:46 +01:00
|
|
|
|
|
|
|
self.true_values = function () {
|
|
|
|
return self.trues.values();
|
|
|
|
};
|
|
|
|
|
|
|
|
self.num_true_items = function () {
|
2020-02-03 07:58:50 +01:00
|
|
|
return self.trues.size;
|
2019-12-27 15:56:46 +01:00
|
|
|
};
|
|
|
|
|
|
|
|
self.false_values = function () {
|
|
|
|
return self.falses.values();
|
|
|
|
};
|
|
|
|
|
2020-02-03 08:51:09 +01:00
|
|
|
self.values = function* () {
|
|
|
|
for (const value of self.trues.values()) {
|
|
|
|
yield value;
|
|
|
|
}
|
|
|
|
for (const value of self.falses.values()) {
|
|
|
|
yield value;
|
|
|
|
}
|
2019-12-27 15:56:46 +01:00
|
|
|
};
|
|
|
|
|
|
|
|
self.get = function (k) {
|
|
|
|
const res = self.trues.get(k);
|
|
|
|
|
|
|
|
if (res !== undefined) {
|
|
|
|
return res;
|
|
|
|
}
|
|
|
|
|
|
|
|
return self.falses.get(k);
|
|
|
|
};
|
|
|
|
|
|
|
|
self.set = function (k, v) {
|
|
|
|
if (pred(v)) {
|
|
|
|
self.set_true(k, v);
|
|
|
|
} else {
|
|
|
|
self.set_false(k, v);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
self.set_true = function (k, v) {
|
2020-02-03 07:41:38 +01:00
|
|
|
self.falses.delete(k);
|
2019-12-27 15:56:46 +01:00
|
|
|
self.trues.set(k, v);
|
|
|
|
};
|
|
|
|
|
|
|
|
self.set_false = function (k, v) {
|
2020-02-03 07:41:38 +01:00
|
|
|
self.trues.delete(k);
|
2019-12-27 15:56:46 +01:00
|
|
|
self.falses.set(k, v);
|
|
|
|
};
|
|
|
|
|
2020-02-03 07:41:38 +01:00
|
|
|
self.delete = function (k) {
|
|
|
|
self.trues.delete(k);
|
|
|
|
self.falses.delete(k);
|
2019-12-27 15:56:46 +01:00
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
return self;
|
|
|
|
};
|
2013-09-16 23:47:05 +02:00
|
|
|
|
|
|
|
// The stream_info variable maps stream names to stream properties objects
|
|
|
|
// Call clear_subscriptions() to initialize it.
|
2019-11-02 00:06:25 +01:00
|
|
|
let stream_info;
|
|
|
|
let subs_by_stream_id;
|
|
|
|
let filter_out_inactives = false;
|
2013-09-16 23:47:05 +02:00
|
|
|
|
2019-12-26 15:34:17 +01:00
|
|
|
const stream_ids_by_name = new FoldDict();
|
2019-12-30 13:20:45 +01:00
|
|
|
const default_stream_ids = new Set();
|
2017-05-11 21:49:38 +02:00
|
|
|
|
2020-02-04 21:50:55 +01:00
|
|
|
exports.stream_post_policy_values = {
|
|
|
|
everyone: {
|
|
|
|
code: 1,
|
|
|
|
description: i18n.t("All stream members can post"),
|
|
|
|
},
|
|
|
|
admins: {
|
|
|
|
code: 2,
|
|
|
|
description: i18n.t("Only organization administrators can post"),
|
|
|
|
},
|
|
|
|
non_new_members: {
|
|
|
|
code: 3,
|
|
|
|
description: i18n.t("Only organization full members can post"),
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
2013-09-16 23:47:05 +02:00
|
|
|
exports.clear_subscriptions = function () {
|
2019-12-27 15:56:46 +01:00
|
|
|
stream_info = new BinaryDict(function (sub) {
|
|
|
|
return sub.subscribed;
|
|
|
|
});
|
2020-02-03 09:42:50 +01:00
|
|
|
subs_by_stream_id = new Map();
|
2013-09-16 23:47:05 +02:00
|
|
|
};
|
2016-06-04 22:40:25 +02:00
|
|
|
|
2013-09-16 23:47:05 +02:00
|
|
|
exports.clear_subscriptions();
|
2013-08-15 21:11:07 +02:00
|
|
|
|
2019-03-17 14:48:51 +01:00
|
|
|
exports.set_filter_out_inactives = function () {
|
|
|
|
if (page_params.demote_inactive_streams ===
|
2020-02-21 14:26:11 +01:00
|
|
|
settings_config.demote_inactive_streams_values.automatic.code) {
|
2019-12-27 15:56:46 +01:00
|
|
|
filter_out_inactives = exports.num_subscribed_subs() >= 30;
|
2019-03-17 14:48:51 +01:00
|
|
|
} else if (page_params.demote_inactive_streams ===
|
2020-02-21 14:26:11 +01:00
|
|
|
settings_config.demote_inactive_streams_values.always.code) {
|
2019-03-17 14:48:51 +01:00
|
|
|
filter_out_inactives = true;
|
|
|
|
} else {
|
|
|
|
filter_out_inactives = false;
|
|
|
|
}
|
2019-02-11 22:05:59 +01:00
|
|
|
};
|
|
|
|
|
|
|
|
// for testing:
|
|
|
|
exports.is_filtering_inactives = function () {
|
|
|
|
return filter_out_inactives;
|
|
|
|
};
|
|
|
|
|
2017-04-28 15:38:02 +02:00
|
|
|
exports.is_active = function (sub) {
|
2019-03-17 14:48:51 +01:00
|
|
|
if (!filter_out_inactives || sub.pin_to_top) {
|
2019-02-11 22:05:59 +01:00
|
|
|
// If users don't want to filter inactive streams
|
|
|
|
// to the bottom, we respect that setting and don't
|
|
|
|
// treat any streams as dormant.
|
|
|
|
//
|
|
|
|
// Currently this setting is automatically determined
|
|
|
|
// by the number of streams. See the callers
|
|
|
|
// to set_filter_out_inactives.
|
|
|
|
return true;
|
|
|
|
}
|
2020-03-22 18:40:05 +01:00
|
|
|
return stream_topic_history.stream_has_topics(sub.stream_id) || sub.newly_subscribed;
|
2016-10-28 17:18:56 +02:00
|
|
|
};
|
2013-08-15 21:11:07 +02:00
|
|
|
|
2017-03-05 00:35:45 +01:00
|
|
|
exports.rename_sub = function (sub, new_name) {
|
2019-11-02 00:06:25 +01:00
|
|
|
const old_name = sub.name;
|
2017-05-11 21:49:38 +02:00
|
|
|
|
|
|
|
stream_ids_by_name.set(old_name, sub.stream_id);
|
|
|
|
|
2016-10-30 17:33:23 +01:00
|
|
|
sub.name = new_name;
|
2020-02-03 07:41:38 +01:00
|
|
|
stream_info.delete(old_name);
|
2016-10-30 17:33:23 +01:00
|
|
|
stream_info.set(new_name, sub);
|
|
|
|
};
|
|
|
|
|
2017-01-20 23:04:40 +01:00
|
|
|
exports.subscribe_myself = function (sub) {
|
2019-11-02 00:06:25 +01:00
|
|
|
const user_id = people.my_current_user_id();
|
2017-01-20 23:04:40 +01:00
|
|
|
exports.add_subscriber(sub.name, user_id);
|
|
|
|
sub.subscribed = true;
|
2017-04-28 15:59:30 +02:00
|
|
|
sub.newly_subscribed = true;
|
2019-12-27 15:56:46 +01:00
|
|
|
stream_info.set_true(sub.name, sub);
|
2017-01-20 23:04:40 +01:00
|
|
|
};
|
|
|
|
|
refactor: Extract is_subscriber_subset().
Extracting the function makes it a bit easier to
test and use in a generic way.
Also, I wanted this to live in stream_data, so that
it's easier to find if we change how we model
subscriber data.
Finally, I use _.every to do the subset check
instead of `_.difference`, since _.difference
is actually N-squared:
_.difference = restArguments(function(array, rest) {
rest = flatten(rest, true, true);
return _.filter(array, function(value){
return !_.contains(rest, value);
});
});
And we don't actually want to build a list only
to check that it's zero vs. nonzero length.
We now do this, which short circuits as soon
as it finds any key that is only in sub1:
return _.every(sub1.subscribers.keys(), (key) => {
return sub2_set.has(key);
});
2020-01-14 19:35:33 +01:00
|
|
|
exports.is_subscriber_subset = function (sub1, sub2) {
|
|
|
|
if (sub1.subscribers && sub2.subscribers) {
|
|
|
|
const sub2_set = sub2.subscribers;
|
|
|
|
|
2020-02-08 05:11:31 +01:00
|
|
|
return Array.from(sub1.subscribers.keys()).every(key => sub2_set.has(key));
|
refactor: Extract is_subscriber_subset().
Extracting the function makes it a bit easier to
test and use in a generic way.
Also, I wanted this to live in stream_data, so that
it's easier to find if we change how we model
subscriber data.
Finally, I use _.every to do the subset check
instead of `_.difference`, since _.difference
is actually N-squared:
_.difference = restArguments(function(array, rest) {
rest = flatten(rest, true, true);
return _.filter(array, function(value){
return !_.contains(rest, value);
});
});
And we don't actually want to build a list only
to check that it's zero vs. nonzero length.
We now do this, which short circuits as soon
as it finds any key that is only in sub1:
return _.every(sub1.subscribers.keys(), (key) => {
return sub2_set.has(key);
});
2020-01-14 19:35:33 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
return false;
|
|
|
|
};
|
|
|
|
|
2016-11-09 16:26:35 +01:00
|
|
|
exports.unsubscribe_myself = function (sub) {
|
|
|
|
// Remove user from subscriber's list
|
2019-11-02 00:06:25 +01:00
|
|
|
const user_id = people.my_current_user_id();
|
2016-11-09 16:05:30 +01:00
|
|
|
exports.remove_subscriber(sub.name, user_id);
|
2016-11-09 16:26:35 +01:00
|
|
|
sub.subscribed = false;
|
2017-04-28 15:59:30 +02:00
|
|
|
sub.newly_subscribed = false;
|
2019-12-27 15:56:46 +01:00
|
|
|
stream_info.set_false(sub.name, sub);
|
2016-11-09 16:26:35 +01:00
|
|
|
};
|
|
|
|
|
2020-02-09 22:02:55 +01:00
|
|
|
exports.add_sub = function (sub) {
|
2020-02-09 04:29:30 +01:00
|
|
|
if (!Object.prototype.hasOwnProperty.call(sub, 'subscribers')) {
|
2020-02-03 07:48:50 +01:00
|
|
|
sub.subscribers = new LazySet([]);
|
2016-03-14 06:38:43 +01:00
|
|
|
}
|
|
|
|
|
2020-02-09 22:02:55 +01:00
|
|
|
stream_info.set(sub.name, sub);
|
2014-02-05 20:35:16 +01:00
|
|
|
subs_by_stream_id.set(sub.stream_id, sub);
|
2013-08-15 21:11:07 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
exports.get_sub = function (stream_name) {
|
2013-09-16 23:47:05 +02:00
|
|
|
return stream_info.get(stream_name);
|
2013-08-15 21:11:07 +02:00
|
|
|
};
|
|
|
|
|
2014-02-05 20:35:16 +01:00
|
|
|
exports.get_sub_by_id = function (stream_id) {
|
|
|
|
return subs_by_stream_id.get(stream_id);
|
|
|
|
};
|
|
|
|
|
2017-05-11 21:49:38 +02:00
|
|
|
exports.get_stream_id = function (name) {
|
|
|
|
// Note: Only use this function for situations where
|
|
|
|
// you are comfortable with a user dealing with an
|
|
|
|
// old name of a stream (from prior to a rename).
|
2019-11-02 00:06:25 +01:00
|
|
|
const sub = stream_info.get(name);
|
2017-05-11 21:49:38 +02:00
|
|
|
|
|
|
|
if (sub) {
|
|
|
|
return sub.stream_id;
|
|
|
|
}
|
|
|
|
|
2019-11-02 00:06:25 +01:00
|
|
|
const stream_id = stream_ids_by_name.get(name);
|
2017-05-11 21:49:38 +02:00
|
|
|
return stream_id;
|
|
|
|
};
|
|
|
|
|
|
|
|
exports.get_sub_by_name = function (name) {
|
|
|
|
// Note: Only use this function for situations where
|
|
|
|
// you are comfortable with a user dealing with an
|
|
|
|
// old name of a stream (from prior to a rename).
|
|
|
|
|
2019-11-02 00:06:25 +01:00
|
|
|
const sub = stream_info.get(name);
|
2017-05-11 21:49:38 +02:00
|
|
|
|
|
|
|
if (sub) {
|
|
|
|
return sub;
|
|
|
|
}
|
|
|
|
|
2019-11-02 00:06:25 +01:00
|
|
|
const stream_id = stream_ids_by_name.get(name);
|
2017-05-11 21:49:38 +02:00
|
|
|
|
|
|
|
if (!stream_id) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
return subs_by_stream_id.get(stream_id);
|
|
|
|
};
|
|
|
|
|
2018-12-14 19:02:26 +01:00
|
|
|
exports.id_to_slug = function (stream_id) {
|
2019-11-02 00:06:25 +01:00
|
|
|
let name = exports.maybe_get_stream_name(stream_id) || 'unknown';
|
2018-12-14 19:02:26 +01:00
|
|
|
|
|
|
|
// The name part of the URL doesn't really matter, so we try to
|
|
|
|
// make it pretty.
|
|
|
|
name = name.replace(' ', '-');
|
|
|
|
|
|
|
|
return stream_id + '-' + name;
|
|
|
|
};
|
|
|
|
|
2018-02-15 21:02:47 +01:00
|
|
|
exports.name_to_slug = function (name) {
|
2019-11-02 00:06:25 +01:00
|
|
|
const stream_id = exports.get_stream_id(name);
|
2018-02-15 21:02:47 +01:00
|
|
|
|
|
|
|
if (!stream_id) {
|
|
|
|
return name;
|
|
|
|
}
|
|
|
|
|
|
|
|
// The name part of the URL doesn't really matter, so we try to
|
|
|
|
// make it pretty.
|
|
|
|
name = name.replace(' ', '-');
|
|
|
|
|
|
|
|
return stream_id + '-' + name;
|
|
|
|
};
|
|
|
|
|
|
|
|
exports.slug_to_name = function (slug) {
|
2019-11-02 00:06:25 +01:00
|
|
|
const m = /^([\d]+)-/.exec(slug);
|
2018-02-15 21:02:47 +01:00
|
|
|
if (m) {
|
2019-12-30 12:51:16 +01:00
|
|
|
const stream_id = parseInt(m[1], 10);
|
2019-11-02 00:06:25 +01:00
|
|
|
const sub = subs_by_stream_id.get(stream_id);
|
2018-02-15 21:02:47 +01:00
|
|
|
if (sub) {
|
|
|
|
return sub.name;
|
|
|
|
}
|
|
|
|
// if nothing was found above, we try to match on the stream
|
|
|
|
// name in the somewhat unlikely event they had a historical
|
|
|
|
// link to a stream like 4-horsemen
|
|
|
|
}
|
|
|
|
|
|
|
|
return slug;
|
|
|
|
};
|
|
|
|
|
|
|
|
|
2017-02-16 03:47:08 +01:00
|
|
|
exports.delete_sub = function (stream_id) {
|
2019-11-02 00:06:25 +01:00
|
|
|
const sub = subs_by_stream_id.get(stream_id);
|
2017-02-16 03:47:08 +01:00
|
|
|
if (!sub) {
|
|
|
|
blueslip.warn('Failed to delete stream ' + stream_id);
|
|
|
|
return;
|
|
|
|
}
|
2020-02-03 07:41:38 +01:00
|
|
|
subs_by_stream_id.delete(stream_id);
|
|
|
|
stream_info.delete(sub.name);
|
2013-08-21 23:21:31 +02:00
|
|
|
};
|
|
|
|
|
2017-08-22 20:00:09 +02:00
|
|
|
exports.get_non_default_stream_names = function () {
|
2020-02-04 23:46:56 +01:00
|
|
|
let subs = Array.from(stream_info.values());
|
2020-02-08 03:51:18 +01:00
|
|
|
subs = subs.filter(
|
|
|
|
sub => !exports.is_default_stream_id(sub.stream_id) && (sub.subscribed || !sub.invite_only)
|
|
|
|
);
|
2020-02-09 04:39:08 +01:00
|
|
|
const names = subs.map(sub => sub.name);
|
2017-08-22 20:00:09 +02:00
|
|
|
return names;
|
|
|
|
};
|
|
|
|
|
2018-07-30 15:27:18 +02:00
|
|
|
exports.get_unsorted_subs = function () {
|
2020-02-04 23:46:56 +01:00
|
|
|
return Array.from(stream_info.values());
|
2018-07-30 15:27:18 +02:00
|
|
|
};
|
|
|
|
|
2018-07-30 19:44:57 +02:00
|
|
|
exports.get_updated_unsorted_subs = function () {
|
|
|
|
// This function is expensive in terms of calculating
|
|
|
|
// some values (particularly stream counts) but avoids
|
|
|
|
// prematurely sorting subs.
|
2020-02-04 23:46:56 +01:00
|
|
|
let all_subs = Array.from(stream_info.values());
|
2018-07-30 19:44:57 +02:00
|
|
|
|
|
|
|
// Add in admin options and stream counts.
|
js: Automatically convert _.each to for…of.
This commit was automatically generated by the following script,
followed by lint --fix and a few small manual lint-related cleanups.
import * as babelParser from "recast/parsers/babel";
import * as recast from "recast";
import * as tsParser from "recast/parsers/typescript";
import { builders as b, namedTypes as n } from "ast-types";
import { Context } from "ast-types/lib/path-visitor";
import K from "ast-types/gen/kinds";
import { NodePath } from "ast-types/lib/node-path";
import assert from "assert";
import fs from "fs";
import path from "path";
import process from "process";
const checkExpression = (node: n.Node): node is K.ExpressionKind =>
n.Expression.check(node);
const checkStatement = (node: n.Node): node is K.StatementKind =>
n.Statement.check(node);
for (const file of process.argv.slice(2)) {
console.log("Parsing", file);
const ast = recast.parse(fs.readFileSync(file, { encoding: "utf8" }), {
parser: path.extname(file) === ".ts" ? tsParser : babelParser,
});
let changed = false;
let inLoop = false;
let replaceReturn = false;
const visitLoop = (...args: string[]) =>
function(this: Context, path: NodePath) {
for (const arg of args) {
this.visit(path.get(arg));
}
const old = { inLoop };
inLoop = true;
this.visit(path.get("body"));
inLoop = old.inLoop;
return false;
};
recast.visit(ast, {
visitDoWhileStatement: visitLoop("test"),
visitExpressionStatement(path) {
const { expression, comments } = path.node;
let valueOnly;
if (
n.CallExpression.check(expression) &&
n.MemberExpression.check(expression.callee) &&
!expression.callee.computed &&
n.Identifier.check(expression.callee.object) &&
expression.callee.object.name === "_" &&
n.Identifier.check(expression.callee.property) &&
["each", "forEach"].includes(expression.callee.property.name) &&
[2, 3].includes(expression.arguments.length) &&
checkExpression(expression.arguments[0]) &&
(n.FunctionExpression.check(expression.arguments[1]) ||
n.ArrowFunctionExpression.check(expression.arguments[1])) &&
[1, 2].includes(expression.arguments[1].params.length) &&
n.Identifier.check(expression.arguments[1].params[0]) &&
((valueOnly = expression.arguments[1].params[1] === undefined) ||
n.Identifier.check(expression.arguments[1].params[1])) &&
(expression.arguments[2] === undefined ||
n.ThisExpression.check(expression.arguments[2]))
) {
const old = { inLoop, replaceReturn };
inLoop = false;
replaceReturn = true;
this.visit(
path
.get("expression")
.get("arguments")
.get(1)
.get("body")
);
inLoop = old.inLoop;
replaceReturn = old.replaceReturn;
const [right, { body, params }] = expression.arguments;
const loop = b.forOfStatement(
b.variableDeclaration("let", [
b.variableDeclarator(
valueOnly ? params[0] : b.arrayPattern([params[1], params[0]])
),
]),
valueOnly
? right
: b.callExpression(
b.memberExpression(right, b.identifier("entries")),
[]
),
checkStatement(body) ? body : b.expressionStatement(body)
);
loop.comments = comments;
path.replace(loop);
changed = true;
}
this.traverse(path);
},
visitForStatement: visitLoop("init", "test", "update"),
visitForInStatement: visitLoop("left", "right"),
visitForOfStatement: visitLoop("left", "right"),
visitFunction(path) {
this.visit(path.get("params"));
const old = { replaceReturn };
replaceReturn = false;
this.visit(path.get("body"));
replaceReturn = old.replaceReturn;
return false;
},
visitReturnStatement(path) {
if (replaceReturn) {
assert(!inLoop); // could use labeled continue if this ever fires
const { argument, comments } = path.node;
if (argument === null) {
const s = b.continueStatement();
s.comments = comments;
path.replace(s);
} else {
const s = b.expressionStatement(argument);
s.comments = comments;
path.replace(s, b.continueStatement());
}
return false;
}
this.traverse(path);
},
visitWhileStatement: visitLoop("test"),
});
if (changed) {
console.log("Writing", file);
fs.writeFileSync(file, recast.print(ast).code, { encoding: "utf8" });
}
}
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-02-06 06:19:47 +01:00
|
|
|
for (const sub of all_subs) {
|
2018-07-30 19:44:57 +02:00
|
|
|
exports.update_calculated_fields(sub);
|
js: Automatically convert _.each to for…of.
This commit was automatically generated by the following script,
followed by lint --fix and a few small manual lint-related cleanups.
import * as babelParser from "recast/parsers/babel";
import * as recast from "recast";
import * as tsParser from "recast/parsers/typescript";
import { builders as b, namedTypes as n } from "ast-types";
import { Context } from "ast-types/lib/path-visitor";
import K from "ast-types/gen/kinds";
import { NodePath } from "ast-types/lib/node-path";
import assert from "assert";
import fs from "fs";
import path from "path";
import process from "process";
const checkExpression = (node: n.Node): node is K.ExpressionKind =>
n.Expression.check(node);
const checkStatement = (node: n.Node): node is K.StatementKind =>
n.Statement.check(node);
for (const file of process.argv.slice(2)) {
console.log("Parsing", file);
const ast = recast.parse(fs.readFileSync(file, { encoding: "utf8" }), {
parser: path.extname(file) === ".ts" ? tsParser : babelParser,
});
let changed = false;
let inLoop = false;
let replaceReturn = false;
const visitLoop = (...args: string[]) =>
function(this: Context, path: NodePath) {
for (const arg of args) {
this.visit(path.get(arg));
}
const old = { inLoop };
inLoop = true;
this.visit(path.get("body"));
inLoop = old.inLoop;
return false;
};
recast.visit(ast, {
visitDoWhileStatement: visitLoop("test"),
visitExpressionStatement(path) {
const { expression, comments } = path.node;
let valueOnly;
if (
n.CallExpression.check(expression) &&
n.MemberExpression.check(expression.callee) &&
!expression.callee.computed &&
n.Identifier.check(expression.callee.object) &&
expression.callee.object.name === "_" &&
n.Identifier.check(expression.callee.property) &&
["each", "forEach"].includes(expression.callee.property.name) &&
[2, 3].includes(expression.arguments.length) &&
checkExpression(expression.arguments[0]) &&
(n.FunctionExpression.check(expression.arguments[1]) ||
n.ArrowFunctionExpression.check(expression.arguments[1])) &&
[1, 2].includes(expression.arguments[1].params.length) &&
n.Identifier.check(expression.arguments[1].params[0]) &&
((valueOnly = expression.arguments[1].params[1] === undefined) ||
n.Identifier.check(expression.arguments[1].params[1])) &&
(expression.arguments[2] === undefined ||
n.ThisExpression.check(expression.arguments[2]))
) {
const old = { inLoop, replaceReturn };
inLoop = false;
replaceReturn = true;
this.visit(
path
.get("expression")
.get("arguments")
.get(1)
.get("body")
);
inLoop = old.inLoop;
replaceReturn = old.replaceReturn;
const [right, { body, params }] = expression.arguments;
const loop = b.forOfStatement(
b.variableDeclaration("let", [
b.variableDeclarator(
valueOnly ? params[0] : b.arrayPattern([params[1], params[0]])
),
]),
valueOnly
? right
: b.callExpression(
b.memberExpression(right, b.identifier("entries")),
[]
),
checkStatement(body) ? body : b.expressionStatement(body)
);
loop.comments = comments;
path.replace(loop);
changed = true;
}
this.traverse(path);
},
visitForStatement: visitLoop("init", "test", "update"),
visitForInStatement: visitLoop("left", "right"),
visitForOfStatement: visitLoop("left", "right"),
visitFunction(path) {
this.visit(path.get("params"));
const old = { replaceReturn };
replaceReturn = false;
this.visit(path.get("body"));
replaceReturn = old.replaceReturn;
return false;
},
visitReturnStatement(path) {
if (replaceReturn) {
assert(!inLoop); // could use labeled continue if this ever fires
const { argument, comments } = path.node;
if (argument === null) {
const s = b.continueStatement();
s.comments = comments;
path.replace(s);
} else {
const s = b.expressionStatement(argument);
s.comments = comments;
path.replace(s, b.continueStatement());
}
return false;
}
this.traverse(path);
},
visitWhileStatement: visitLoop("test"),
});
if (changed) {
console.log("Writing", file);
fs.writeFileSync(file, recast.print(ast).code, { encoding: "utf8" });
}
}
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-02-06 06:19:47 +01:00
|
|
|
}
|
2018-07-30 19:44:57 +02:00
|
|
|
|
2019-05-08 08:57:23 +02:00
|
|
|
// We don't display unsubscribed streams to guest users.
|
|
|
|
if (page_params.is_guest) {
|
2020-02-08 03:51:18 +01:00
|
|
|
all_subs = all_subs.filter(sub => sub.subscribed);
|
2019-05-08 08:57:23 +02:00
|
|
|
}
|
|
|
|
|
2018-07-30 19:44:57 +02:00
|
|
|
return all_subs;
|
|
|
|
};
|
|
|
|
|
2019-12-27 15:56:46 +01:00
|
|
|
exports.num_subscribed_subs = function () {
|
|
|
|
return stream_info.num_true_items();
|
|
|
|
};
|
|
|
|
|
2013-10-25 16:59:52 +02:00
|
|
|
exports.subscribed_subs = function () {
|
2020-02-04 23:46:56 +01:00
|
|
|
return Array.from(stream_info.true_values());
|
2013-10-25 16:59:52 +02:00
|
|
|
};
|
2013-08-15 21:11:07 +02:00
|
|
|
|
2016-10-25 21:45:19 +02:00
|
|
|
exports.unsubscribed_subs = function () {
|
2020-02-04 23:46:56 +01:00
|
|
|
return Array.from(stream_info.false_values());
|
2016-10-25 21:45:19 +02:00
|
|
|
};
|
|
|
|
|
2013-08-15 21:11:07 +02:00
|
|
|
exports.subscribed_streams = function () {
|
2020-02-09 04:39:08 +01:00
|
|
|
return exports.subscribed_subs().map(sub => sub.name);
|
2013-08-15 21:11:07 +02:00
|
|
|
};
|
|
|
|
|
2019-01-10 17:57:35 +01:00
|
|
|
exports.get_invite_stream_data = function () {
|
2020-03-22 17:31:47 +01:00
|
|
|
function get_data(sub) {
|
2019-01-10 17:57:35 +01:00
|
|
|
return {
|
|
|
|
name: sub.name,
|
|
|
|
stream_id: sub.stream_id,
|
|
|
|
invite_only: sub.invite_only,
|
2020-03-22 17:04:47 +01:00
|
|
|
default_stream: default_stream_ids.has(sub.stream_id),
|
2019-01-10 17:57:35 +01:00
|
|
|
};
|
2020-03-22 17:31:47 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
const streams = [];
|
|
|
|
|
|
|
|
// Invite users to all default streams...
|
|
|
|
for (const stream_id of default_stream_ids) {
|
|
|
|
const sub = subs_by_stream_id.get(stream_id);
|
|
|
|
streams.push(get_data(sub));
|
|
|
|
}
|
|
|
|
|
|
|
|
// ...plus all your subscribed streams (avoiding repeats).
|
|
|
|
for (const sub of exports.subscribed_subs()) {
|
|
|
|
if (!default_stream_ids.has(sub.stream_id)) {
|
|
|
|
streams.push(get_data(sub));
|
2019-01-10 17:57:35 +01:00
|
|
|
}
|
2020-03-22 17:31:47 +01:00
|
|
|
}
|
|
|
|
|
2019-01-10 17:57:35 +01:00
|
|
|
return streams;
|
|
|
|
};
|
|
|
|
|
2013-08-15 21:11:07 +02:00
|
|
|
exports.get_colors = function () {
|
2020-02-09 04:39:08 +01:00
|
|
|
return exports.subscribed_subs().map(sub => sub.color);
|
2013-08-15 21:11:07 +02:00
|
|
|
};
|
|
|
|
|
2016-03-14 06:38:43 +01:00
|
|
|
exports.update_subscribers_count = function (sub) {
|
2020-02-03 07:58:50 +01:00
|
|
|
const count = sub.subscribers.size;
|
2016-03-14 06:38:43 +01:00
|
|
|
sub.subscriber_count = count;
|
|
|
|
};
|
|
|
|
|
2020-03-21 15:22:40 +01:00
|
|
|
exports.potential_subscribers = function (sub) {
|
|
|
|
/*
|
|
|
|
This is a list of unsubscribed users
|
|
|
|
for the current stream, who the current
|
|
|
|
user could potentially subscribe to the
|
|
|
|
stream. This may include some bots.
|
|
|
|
|
|
|
|
We currently use it for typeahead in
|
|
|
|
stream_edit.js.
|
|
|
|
|
|
|
|
This may be a superset of the actual
|
|
|
|
subscribers that you can change in some cases
|
|
|
|
(like if you're a guest?); we should refine this
|
|
|
|
going forward, especially if we use it for something
|
|
|
|
other than typeahead. (The guest use case
|
|
|
|
may be moot now for other reasons.)
|
|
|
|
*/
|
|
|
|
|
|
|
|
function is_potential_subscriber(person) {
|
|
|
|
// Use verbose style to force better test
|
|
|
|
// coverage, plus we may add more conditions over
|
|
|
|
// time.
|
|
|
|
if (sub.subscribers.has(person.user_id)) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
return people.filter_all_users(is_potential_subscriber);
|
|
|
|
|
|
|
|
};
|
|
|
|
|
2018-04-05 19:58:27 +02:00
|
|
|
exports.update_stream_email_address = function (sub, email) {
|
|
|
|
sub.email_address = email;
|
|
|
|
};
|
|
|
|
|
2017-06-29 15:35:34 +02:00
|
|
|
exports.get_subscriber_count = function (stream_name) {
|
2019-11-02 00:06:25 +01:00
|
|
|
const sub = exports.get_sub_by_name(stream_name);
|
2017-06-29 15:35:34 +02:00
|
|
|
if (sub === undefined) {
|
|
|
|
blueslip.warn('We got a get_subscriber_count count call for a non-existent stream.');
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
if (!sub.subscribers) {
|
|
|
|
return 0;
|
|
|
|
}
|
2020-02-03 07:58:50 +01:00
|
|
|
return sub.subscribers.size;
|
2017-06-29 15:35:34 +02:00
|
|
|
};
|
|
|
|
|
2020-02-04 21:50:55 +01:00
|
|
|
exports.update_stream_post_policy = function (sub, stream_post_policy) {
|
|
|
|
sub.stream_post_policy = stream_post_policy;
|
2019-05-07 07:12:14 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
exports.update_stream_privacy = function (sub, values) {
|
|
|
|
sub.invite_only = values.invite_only;
|
|
|
|
sub.history_public_to_subscribers = values.history_public_to_subscribers;
|
|
|
|
};
|
|
|
|
|
2019-06-14 19:41:26 +02:00
|
|
|
exports.receives_notifications = function (stream_name, notification_name) {
|
2019-11-02 00:06:25 +01:00
|
|
|
const sub = exports.get_sub(stream_name);
|
2019-06-14 19:41:26 +02:00
|
|
|
if (sub === undefined) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
if (sub[notification_name] !== null) {
|
|
|
|
return sub[notification_name];
|
|
|
|
}
|
2019-11-26 02:37:12 +01:00
|
|
|
if (notification_name === 'wildcard_mentions_notify') {
|
|
|
|
return page_params[notification_name];
|
|
|
|
}
|
2019-06-14 19:41:26 +02:00
|
|
|
return page_params["enable_stream_" + notification_name];
|
|
|
|
};
|
|
|
|
|
2017-04-28 17:55:22 +02:00
|
|
|
exports.update_calculated_fields = function (sub) {
|
|
|
|
sub.is_admin = page_params.is_admin;
|
2018-02-12 16:02:19 +01:00
|
|
|
// Admin can change any stream's name & description either stream is public or
|
|
|
|
// private, subscribed or unsubscribed.
|
|
|
|
sub.can_change_name_description = page_params.is_admin;
|
2018-01-04 04:47:51 +01:00
|
|
|
// If stream is public then any user can subscribe. If stream is private then only
|
|
|
|
// subscribed users can unsubscribe.
|
2018-10-31 18:52:40 +01:00
|
|
|
// Guest users can't subscribe themselves to any stream.
|
|
|
|
sub.should_display_subscription_button = sub.subscribed ||
|
|
|
|
!page_params.is_guest && !sub.invite_only;
|
2018-04-30 15:03:23 +02:00
|
|
|
sub.should_display_preview_button = sub.subscribed || !sub.invite_only ||
|
|
|
|
sub.previously_subscribed;
|
2018-05-22 01:14:18 +02:00
|
|
|
sub.can_change_stream_permissions = page_params.is_admin && (
|
2018-05-22 01:12:50 +02:00
|
|
|
!sub.invite_only || sub.subscribed);
|
2018-02-14 17:59:01 +01:00
|
|
|
// User can add other users to stream if stream is public or user is subscribed to stream.
|
2018-11-05 13:30:40 +01:00
|
|
|
// Guest users can't access subscribers of any(public or private) non-subscribed streams.
|
|
|
|
sub.can_access_subscribers = page_params.is_admin || sub.subscribed || !page_params.is_guest &&
|
|
|
|
!sub.invite_only;
|
2018-12-14 19:18:24 +01:00
|
|
|
sub.preview_url = hash_util.by_stream_uri(sub.stream_id);
|
2018-10-31 15:35:34 +01:00
|
|
|
sub.can_add_subscribers = !page_params.is_guest && (!sub.invite_only || sub.subscribed);
|
2019-02-05 16:39:46 +01:00
|
|
|
if (sub.rendered_description !== undefined) {
|
|
|
|
sub.rendered_description = sub.rendered_description.replace('<p>', '').replace('</p>', '');
|
|
|
|
}
|
2017-04-28 17:55:22 +02:00
|
|
|
exports.update_subscribers_count(sub);
|
2019-02-13 10:22:16 +01:00
|
|
|
|
|
|
|
// Apply the defaults for our notification settings for rendering.
|
2020-03-28 18:03:43 +01:00
|
|
|
for (const setting of settings_config.stream_specific_notification_settings) {
|
2019-06-14 19:08:36 +02:00
|
|
|
sub[setting + "_display"] = exports.receives_notifications(sub.name, setting);
|
js: Automatically convert _.each to for…of.
This commit was automatically generated by the following script,
followed by lint --fix and a few small manual lint-related cleanups.
import * as babelParser from "recast/parsers/babel";
import * as recast from "recast";
import * as tsParser from "recast/parsers/typescript";
import { builders as b, namedTypes as n } from "ast-types";
import { Context } from "ast-types/lib/path-visitor";
import K from "ast-types/gen/kinds";
import { NodePath } from "ast-types/lib/node-path";
import assert from "assert";
import fs from "fs";
import path from "path";
import process from "process";
const checkExpression = (node: n.Node): node is K.ExpressionKind =>
n.Expression.check(node);
const checkStatement = (node: n.Node): node is K.StatementKind =>
n.Statement.check(node);
for (const file of process.argv.slice(2)) {
console.log("Parsing", file);
const ast = recast.parse(fs.readFileSync(file, { encoding: "utf8" }), {
parser: path.extname(file) === ".ts" ? tsParser : babelParser,
});
let changed = false;
let inLoop = false;
let replaceReturn = false;
const visitLoop = (...args: string[]) =>
function(this: Context, path: NodePath) {
for (const arg of args) {
this.visit(path.get(arg));
}
const old = { inLoop };
inLoop = true;
this.visit(path.get("body"));
inLoop = old.inLoop;
return false;
};
recast.visit(ast, {
visitDoWhileStatement: visitLoop("test"),
visitExpressionStatement(path) {
const { expression, comments } = path.node;
let valueOnly;
if (
n.CallExpression.check(expression) &&
n.MemberExpression.check(expression.callee) &&
!expression.callee.computed &&
n.Identifier.check(expression.callee.object) &&
expression.callee.object.name === "_" &&
n.Identifier.check(expression.callee.property) &&
["each", "forEach"].includes(expression.callee.property.name) &&
[2, 3].includes(expression.arguments.length) &&
checkExpression(expression.arguments[0]) &&
(n.FunctionExpression.check(expression.arguments[1]) ||
n.ArrowFunctionExpression.check(expression.arguments[1])) &&
[1, 2].includes(expression.arguments[1].params.length) &&
n.Identifier.check(expression.arguments[1].params[0]) &&
((valueOnly = expression.arguments[1].params[1] === undefined) ||
n.Identifier.check(expression.arguments[1].params[1])) &&
(expression.arguments[2] === undefined ||
n.ThisExpression.check(expression.arguments[2]))
) {
const old = { inLoop, replaceReturn };
inLoop = false;
replaceReturn = true;
this.visit(
path
.get("expression")
.get("arguments")
.get(1)
.get("body")
);
inLoop = old.inLoop;
replaceReturn = old.replaceReturn;
const [right, { body, params }] = expression.arguments;
const loop = b.forOfStatement(
b.variableDeclaration("let", [
b.variableDeclarator(
valueOnly ? params[0] : b.arrayPattern([params[1], params[0]])
),
]),
valueOnly
? right
: b.callExpression(
b.memberExpression(right, b.identifier("entries")),
[]
),
checkStatement(body) ? body : b.expressionStatement(body)
);
loop.comments = comments;
path.replace(loop);
changed = true;
}
this.traverse(path);
},
visitForStatement: visitLoop("init", "test", "update"),
visitForInStatement: visitLoop("left", "right"),
visitForOfStatement: visitLoop("left", "right"),
visitFunction(path) {
this.visit(path.get("params"));
const old = { replaceReturn };
replaceReturn = false;
this.visit(path.get("body"));
replaceReturn = old.replaceReturn;
return false;
},
visitReturnStatement(path) {
if (replaceReturn) {
assert(!inLoop); // could use labeled continue if this ever fires
const { argument, comments } = path.node;
if (argument === null) {
const s = b.continueStatement();
s.comments = comments;
path.replace(s);
} else {
const s = b.expressionStatement(argument);
s.comments = comments;
path.replace(s, b.continueStatement());
}
return false;
}
this.traverse(path);
},
visitWhileStatement: visitLoop("test"),
});
if (changed) {
console.log("Writing", file);
fs.writeFileSync(file, recast.print(ast).code, { encoding: "utf8" });
}
}
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-02-06 06:19:47 +01:00
|
|
|
}
|
2017-04-28 17:55:22 +02:00
|
|
|
};
|
|
|
|
|
2013-08-15 21:11:07 +02:00
|
|
|
exports.all_subscribed_streams_are_in_home_view = function () {
|
2020-02-08 05:11:31 +01:00
|
|
|
return exports.subscribed_subs().every(sub => !sub.is_muted);
|
2013-08-15 21:11:07 +02:00
|
|
|
};
|
|
|
|
|
2014-01-15 20:59:31 +01:00
|
|
|
exports.home_view_stream_names = function () {
|
2020-02-08 03:33:46 +01:00
|
|
|
const home_view_subs = exports.subscribed_subs().filter(sub => !sub.is_muted);
|
js: Convert _.map(a, …) to a.map(…).
And convert the corresponding function expressions to arrow style
while we’re here.
import * as babelParser from "recast/parsers/babel";
import * as recast from "recast";
import * as tsParser from "recast/parsers/typescript";
import { builders as b, namedTypes as n } from "ast-types";
import K from "ast-types/gen/kinds";
import fs from "fs";
import path from "path";
import process from "process";
const checkExpression = (node: n.Node): node is K.ExpressionKind =>
n.Expression.check(node);
for (const file of process.argv.slice(2)) {
console.log("Parsing", file);
const ast = recast.parse(fs.readFileSync(file, { encoding: "utf8" }), {
parser: path.extname(file) === ".ts" ? tsParser : babelParser,
});
let changed = false;
recast.visit(ast, {
visitCallExpression(path) {
const { callee, arguments: args } = path.node;
if (
n.MemberExpression.check(callee) &&
!callee.computed &&
n.Identifier.check(callee.object) &&
callee.object.name === "_" &&
n.Identifier.check(callee.property) &&
callee.property.name === "map" &&
args.length === 2 &&
checkExpression(args[0]) &&
checkExpression(args[1])
) {
const [arr, fn] = args;
path.replace(
b.callExpression(b.memberExpression(arr, b.identifier("map")), [
n.FunctionExpression.check(fn) ||
n.ArrowFunctionExpression.check(fn)
? b.arrowFunctionExpression(
fn.params,
n.BlockStatement.check(fn.body) &&
fn.body.body.length === 1 &&
n.ReturnStatement.check(fn.body.body[0])
? fn.body.body[0].argument || b.identifier("undefined")
: fn.body
)
: fn,
])
);
changed = true;
}
this.traverse(path);
},
});
if (changed) {
console.log("Writing", file);
fs.writeFileSync(file, recast.print(ast).code, { encoding: "utf8" });
}
}
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-02-08 02:43:49 +01:00
|
|
|
return home_view_subs.map(sub => sub.name);
|
2014-01-15 20:59:31 +01:00
|
|
|
};
|
|
|
|
|
2013-08-15 21:11:07 +02:00
|
|
|
exports.canonicalized_name = function (stream_name) {
|
2018-05-06 21:43:17 +02:00
|
|
|
return stream_name.toString().toLowerCase();
|
2013-08-15 21:11:07 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
exports.get_color = function (stream_name) {
|
2019-11-02 00:06:25 +01:00
|
|
|
const sub = exports.get_sub(stream_name);
|
2013-08-15 21:11:07 +02:00
|
|
|
if (sub === undefined) {
|
|
|
|
return stream_color.default_color;
|
|
|
|
}
|
|
|
|
return sub.color;
|
|
|
|
};
|
|
|
|
|
2019-05-21 09:33:21 +02:00
|
|
|
exports.is_muted = function (stream_id) {
|
2019-11-02 00:06:25 +01:00
|
|
|
const sub = exports.get_sub_by_id(stream_id);
|
2019-05-21 09:33:21 +02:00
|
|
|
// Return true for undefined streams
|
|
|
|
if (sub === undefined) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
return sub.is_muted;
|
2017-05-13 20:54:53 +02:00
|
|
|
};
|
|
|
|
|
2019-05-21 09:33:21 +02:00
|
|
|
exports.is_stream_muted_by_name = function (stream_name) {
|
2019-11-02 00:06:25 +01:00
|
|
|
const sub = exports.get_sub(stream_name);
|
2019-05-21 09:33:21 +02:00
|
|
|
// Return true for undefined streams
|
|
|
|
if (sub === undefined) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
return sub.is_muted;
|
2013-08-15 21:11:07 +02:00
|
|
|
};
|
|
|
|
|
2019-05-21 09:33:21 +02:00
|
|
|
exports.is_notifications_stream_muted = function () {
|
2020-04-14 12:55:18 +02:00
|
|
|
return exports.is_muted(page_params.realm_notifications_stream_id);
|
2017-05-13 20:54:53 +02:00
|
|
|
};
|
|
|
|
|
2013-08-15 21:11:07 +02:00
|
|
|
exports.is_subscribed = function (stream_name) {
|
2019-11-02 00:06:25 +01:00
|
|
|
const sub = exports.get_sub(stream_name);
|
2013-08-15 21:11:07 +02:00
|
|
|
return sub !== undefined && sub.subscribed;
|
|
|
|
};
|
|
|
|
|
2017-05-14 16:32:18 +02:00
|
|
|
exports.id_is_subscribed = function (stream_id) {
|
2019-11-02 00:06:25 +01:00
|
|
|
const sub = subs_by_stream_id.get(stream_id);
|
2017-05-14 16:32:18 +02:00
|
|
|
return sub !== undefined && sub.subscribed;
|
|
|
|
};
|
|
|
|
|
2013-08-15 21:11:07 +02:00
|
|
|
exports.get_invite_only = function (stream_name) {
|
2019-11-02 00:06:25 +01:00
|
|
|
const sub = exports.get_sub(stream_name);
|
2013-08-15 21:11:07 +02:00
|
|
|
if (sub === undefined) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
return sub.invite_only;
|
|
|
|
};
|
|
|
|
|
2020-02-04 21:50:55 +01:00
|
|
|
exports.get_stream_post_policy = function (stream_name) {
|
2019-11-02 00:06:25 +01:00
|
|
|
const sub = exports.get_sub(stream_name);
|
2018-05-14 12:06:56 +02:00
|
|
|
if (sub === undefined) {
|
|
|
|
return false;
|
|
|
|
}
|
2020-02-04 21:50:55 +01:00
|
|
|
return sub.stream_post_policy;
|
2018-05-14 12:06:56 +02:00
|
|
|
};
|
|
|
|
|
2019-03-13 13:47:57 +01:00
|
|
|
exports.all_topics_in_cache = function (sub) {
|
2019-03-15 17:34:18 +01:00
|
|
|
// Checks whether this browser's cache of contiguous messages
|
|
|
|
// (used to locally render narrows) in message_list.all has all
|
2019-03-13 13:47:57 +01:00
|
|
|
// messages from a given stream, and thus all historical topics
|
2019-03-15 17:34:18 +01:00
|
|
|
// for it. Because message_list.all is a range, we just need to
|
|
|
|
// compare it to the range of history on the stream.
|
2019-03-13 13:47:57 +01:00
|
|
|
|
|
|
|
// If the cache isn't initialized, it's a clear false.
|
|
|
|
if (message_list.all === undefined || message_list.all.empty()) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
// If the cache doesn't have the latest messages, we can't be sure
|
|
|
|
// we have all topics.
|
2020-05-30 09:45:12 +02:00
|
|
|
if (!message_list.all.data.fetch_status.has_found_newest()) {
|
2019-03-13 13:47:57 +01:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (sub.first_message_id === null) {
|
|
|
|
// If the stream has no message history, we have it all
|
|
|
|
// vacuously. This should be a very rare condition, since
|
|
|
|
// stream creation sends a message.
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Now, we can just compare the first cached message to the first
|
|
|
|
// message ID in the stream; if it's older, we're good, otherwise,
|
|
|
|
// we might be missing the oldest topics in this stream in our
|
|
|
|
// cache.
|
2019-11-02 00:06:25 +01:00
|
|
|
const first_cached_message = message_list.all.first();
|
2019-03-13 13:47:57 +01:00
|
|
|
return first_cached_message.id <= sub.first_message_id;
|
|
|
|
};
|
|
|
|
|
2017-08-22 18:20:00 +02:00
|
|
|
exports.set_realm_default_streams = function (realm_default_streams) {
|
|
|
|
default_stream_ids.clear();
|
|
|
|
|
|
|
|
realm_default_streams.forEach(function (stream) {
|
2019-12-30 13:20:45 +01:00
|
|
|
default_stream_ids.add(stream.stream_id);
|
2017-08-22 18:20:00 +02:00
|
|
|
});
|
|
|
|
};
|
|
|
|
|
2020-03-22 13:28:16 +01:00
|
|
|
exports.get_default_stream_ids = function () {
|
|
|
|
return Array.from(default_stream_ids);
|
2018-07-22 11:30:38 +02:00
|
|
|
};
|
|
|
|
|
2017-08-22 18:20:00 +02:00
|
|
|
exports.is_default_stream_id = function (stream_id) {
|
|
|
|
return default_stream_ids.has(stream_id);
|
2017-03-21 21:10:20 +01:00
|
|
|
};
|
|
|
|
|
2013-08-19 19:25:44 +02:00
|
|
|
exports.get_name = function (stream_name) {
|
|
|
|
// This returns the actual name of a stream if we are subscribed to
|
|
|
|
// it (i.e "Denmark" vs. "denmark"), while falling thru to
|
|
|
|
// stream_name if we don't have a subscription. (Stream names
|
|
|
|
// are case-insensitive, but we try to display the actual name
|
|
|
|
// when we know it.)
|
2017-05-11 23:25:42 +02:00
|
|
|
//
|
|
|
|
// This function will also do the right thing if we have
|
|
|
|
// an old stream name in memory for a recently renamed stream.
|
2019-11-02 00:06:25 +01:00
|
|
|
const sub = exports.get_sub_by_name(stream_name);
|
2013-08-19 19:25:44 +02:00
|
|
|
if (sub === undefined) {
|
|
|
|
return stream_name;
|
|
|
|
}
|
|
|
|
return sub.name;
|
|
|
|
};
|
|
|
|
|
2017-07-15 08:31:55 +02:00
|
|
|
exports.maybe_get_stream_name = function (stream_id) {
|
|
|
|
if (!stream_id) {
|
|
|
|
return;
|
|
|
|
}
|
2019-11-02 00:06:25 +01:00
|
|
|
const stream = exports.get_sub_by_id(stream_id);
|
2017-07-15 08:31:55 +02:00
|
|
|
|
|
|
|
if (!stream) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
return stream.name;
|
|
|
|
};
|
|
|
|
|
2016-10-30 15:47:20 +01:00
|
|
|
exports.set_subscribers = function (sub, user_ids) {
|
2020-02-03 07:48:50 +01:00
|
|
|
sub.subscribers = new LazySet(user_ids || []);
|
2016-10-30 15:47:20 +01:00
|
|
|
};
|
|
|
|
|
2016-11-08 16:06:50 +01:00
|
|
|
exports.add_subscriber = function (stream_name, user_id) {
|
2019-11-02 00:06:25 +01:00
|
|
|
const sub = exports.get_sub(stream_name);
|
2016-10-15 21:13:09 +02:00
|
|
|
if (typeof sub === 'undefined') {
|
|
|
|
blueslip.warn("We got an add_subscriber call for a non-existent stream.");
|
2017-03-03 23:14:06 +01:00
|
|
|
return false;
|
2013-09-07 02:48:44 +02:00
|
|
|
}
|
2020-02-05 14:30:59 +01:00
|
|
|
const person = people.get_by_user_id(user_id);
|
2016-11-08 16:06:50 +01:00
|
|
|
if (person === undefined) {
|
|
|
|
blueslip.error("We tried to add invalid subscriber: " + user_id);
|
2017-03-03 23:14:06 +01:00
|
|
|
return false;
|
2016-10-30 15:47:20 +01:00
|
|
|
}
|
2019-12-27 13:16:22 +01:00
|
|
|
sub.subscribers.add(user_id);
|
2017-03-03 23:14:06 +01:00
|
|
|
|
|
|
|
return true;
|
2013-09-07 02:48:44 +02:00
|
|
|
};
|
|
|
|
|
2016-11-09 16:05:30 +01:00
|
|
|
exports.remove_subscriber = function (stream_name, user_id) {
|
2019-11-02 00:06:25 +01:00
|
|
|
const sub = exports.get_sub(stream_name);
|
2016-10-25 20:06:25 +02:00
|
|
|
if (typeof sub === 'undefined') {
|
|
|
|
blueslip.warn("We got a remove_subscriber call for a non-existent stream " + stream_name);
|
2017-03-03 23:14:06 +01:00
|
|
|
return false;
|
2013-09-07 02:48:44 +02:00
|
|
|
}
|
2016-11-09 16:05:30 +01:00
|
|
|
if (!sub.subscribers.has(user_id)) {
|
|
|
|
blueslip.warn("We tried to remove invalid subscriber: " + user_id);
|
2017-03-03 23:14:06 +01:00
|
|
|
return false;
|
2016-10-30 15:47:20 +01:00
|
|
|
}
|
|
|
|
|
2020-02-03 07:41:38 +01:00
|
|
|
sub.subscribers.delete(user_id);
|
2017-03-03 23:14:06 +01:00
|
|
|
|
|
|
|
return true;
|
2013-09-07 02:48:44 +02:00
|
|
|
};
|
|
|
|
|
2018-04-06 05:22:07 +02:00
|
|
|
exports.is_user_subscribed = function (stream_name, user_id) {
|
2019-11-02 00:06:25 +01:00
|
|
|
const sub = exports.get_sub(stream_name);
|
2018-03-19 05:52:37 +01:00
|
|
|
if (typeof sub === 'undefined' || !sub.can_access_subscribers) {
|
2018-04-03 00:36:31 +02:00
|
|
|
// If we don't know about the stream, or we ourselves cannot access subscriber list,
|
2013-09-07 04:22:18 +02:00
|
|
|
// so we return undefined (treated as falsy if not explicitly handled).
|
2018-04-06 05:22:07 +02:00
|
|
|
blueslip.warn("We got a is_user_subscribed call for a non-existent or inaccessible stream.");
|
2018-03-13 13:04:16 +01:00
|
|
|
return;
|
2013-09-07 02:48:44 +02:00
|
|
|
}
|
2018-04-06 05:22:07 +02:00
|
|
|
if (typeof user_id === "undefined") {
|
|
|
|
blueslip.warn("Undefined user_id passed to function is_user_subscribed");
|
|
|
|
return;
|
2016-10-30 15:47:20 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
return sub.subscribers.has(user_id);
|
2013-09-07 02:48:44 +02:00
|
|
|
};
|
|
|
|
|
2016-10-15 21:10:10 +02:00
|
|
|
exports.create_streams = function (streams) {
|
js: Automatically convert _.each to for…of.
This commit was automatically generated by the following script,
followed by lint --fix and a few small manual lint-related cleanups.
import * as babelParser from "recast/parsers/babel";
import * as recast from "recast";
import * as tsParser from "recast/parsers/typescript";
import { builders as b, namedTypes as n } from "ast-types";
import { Context } from "ast-types/lib/path-visitor";
import K from "ast-types/gen/kinds";
import { NodePath } from "ast-types/lib/node-path";
import assert from "assert";
import fs from "fs";
import path from "path";
import process from "process";
const checkExpression = (node: n.Node): node is K.ExpressionKind =>
n.Expression.check(node);
const checkStatement = (node: n.Node): node is K.StatementKind =>
n.Statement.check(node);
for (const file of process.argv.slice(2)) {
console.log("Parsing", file);
const ast = recast.parse(fs.readFileSync(file, { encoding: "utf8" }), {
parser: path.extname(file) === ".ts" ? tsParser : babelParser,
});
let changed = false;
let inLoop = false;
let replaceReturn = false;
const visitLoop = (...args: string[]) =>
function(this: Context, path: NodePath) {
for (const arg of args) {
this.visit(path.get(arg));
}
const old = { inLoop };
inLoop = true;
this.visit(path.get("body"));
inLoop = old.inLoop;
return false;
};
recast.visit(ast, {
visitDoWhileStatement: visitLoop("test"),
visitExpressionStatement(path) {
const { expression, comments } = path.node;
let valueOnly;
if (
n.CallExpression.check(expression) &&
n.MemberExpression.check(expression.callee) &&
!expression.callee.computed &&
n.Identifier.check(expression.callee.object) &&
expression.callee.object.name === "_" &&
n.Identifier.check(expression.callee.property) &&
["each", "forEach"].includes(expression.callee.property.name) &&
[2, 3].includes(expression.arguments.length) &&
checkExpression(expression.arguments[0]) &&
(n.FunctionExpression.check(expression.arguments[1]) ||
n.ArrowFunctionExpression.check(expression.arguments[1])) &&
[1, 2].includes(expression.arguments[1].params.length) &&
n.Identifier.check(expression.arguments[1].params[0]) &&
((valueOnly = expression.arguments[1].params[1] === undefined) ||
n.Identifier.check(expression.arguments[1].params[1])) &&
(expression.arguments[2] === undefined ||
n.ThisExpression.check(expression.arguments[2]))
) {
const old = { inLoop, replaceReturn };
inLoop = false;
replaceReturn = true;
this.visit(
path
.get("expression")
.get("arguments")
.get(1)
.get("body")
);
inLoop = old.inLoop;
replaceReturn = old.replaceReturn;
const [right, { body, params }] = expression.arguments;
const loop = b.forOfStatement(
b.variableDeclaration("let", [
b.variableDeclarator(
valueOnly ? params[0] : b.arrayPattern([params[1], params[0]])
),
]),
valueOnly
? right
: b.callExpression(
b.memberExpression(right, b.identifier("entries")),
[]
),
checkStatement(body) ? body : b.expressionStatement(body)
);
loop.comments = comments;
path.replace(loop);
changed = true;
}
this.traverse(path);
},
visitForStatement: visitLoop("init", "test", "update"),
visitForInStatement: visitLoop("left", "right"),
visitForOfStatement: visitLoop("left", "right"),
visitFunction(path) {
this.visit(path.get("params"));
const old = { replaceReturn };
replaceReturn = false;
this.visit(path.get("body"));
replaceReturn = old.replaceReturn;
return false;
},
visitReturnStatement(path) {
if (replaceReturn) {
assert(!inLoop); // could use labeled continue if this ever fires
const { argument, comments } = path.node;
if (argument === null) {
const s = b.continueStatement();
s.comments = comments;
path.replace(s);
} else {
const s = b.expressionStatement(argument);
s.comments = comments;
path.replace(s, b.continueStatement());
}
return false;
}
this.traverse(path);
},
visitWhileStatement: visitLoop("test"),
});
if (changed) {
console.log("Writing", file);
fs.writeFileSync(file, recast.print(ast).code, { encoding: "utf8" });
}
}
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-02-06 06:19:47 +01:00
|
|
|
for (const stream of streams) {
|
2016-10-30 15:47:20 +01:00
|
|
|
// We handle subscriber stuff in other events.
|
2020-02-09 04:21:30 +01:00
|
|
|
const attrs = {
|
2016-10-30 15:47:20 +01:00
|
|
|
subscribers: [],
|
2017-01-12 00:17:43 +01:00
|
|
|
subscribed: false,
|
2020-02-09 04:21:30 +01:00
|
|
|
...stream,
|
|
|
|
};
|
2016-10-15 21:10:10 +02:00
|
|
|
exports.create_sub_from_server_data(stream.name, attrs);
|
js: Automatically convert _.each to for…of.
This commit was automatically generated by the following script,
followed by lint --fix and a few small manual lint-related cleanups.
import * as babelParser from "recast/parsers/babel";
import * as recast from "recast";
import * as tsParser from "recast/parsers/typescript";
import { builders as b, namedTypes as n } from "ast-types";
import { Context } from "ast-types/lib/path-visitor";
import K from "ast-types/gen/kinds";
import { NodePath } from "ast-types/lib/node-path";
import assert from "assert";
import fs from "fs";
import path from "path";
import process from "process";
const checkExpression = (node: n.Node): node is K.ExpressionKind =>
n.Expression.check(node);
const checkStatement = (node: n.Node): node is K.StatementKind =>
n.Statement.check(node);
for (const file of process.argv.slice(2)) {
console.log("Parsing", file);
const ast = recast.parse(fs.readFileSync(file, { encoding: "utf8" }), {
parser: path.extname(file) === ".ts" ? tsParser : babelParser,
});
let changed = false;
let inLoop = false;
let replaceReturn = false;
const visitLoop = (...args: string[]) =>
function(this: Context, path: NodePath) {
for (const arg of args) {
this.visit(path.get(arg));
}
const old = { inLoop };
inLoop = true;
this.visit(path.get("body"));
inLoop = old.inLoop;
return false;
};
recast.visit(ast, {
visitDoWhileStatement: visitLoop("test"),
visitExpressionStatement(path) {
const { expression, comments } = path.node;
let valueOnly;
if (
n.CallExpression.check(expression) &&
n.MemberExpression.check(expression.callee) &&
!expression.callee.computed &&
n.Identifier.check(expression.callee.object) &&
expression.callee.object.name === "_" &&
n.Identifier.check(expression.callee.property) &&
["each", "forEach"].includes(expression.callee.property.name) &&
[2, 3].includes(expression.arguments.length) &&
checkExpression(expression.arguments[0]) &&
(n.FunctionExpression.check(expression.arguments[1]) ||
n.ArrowFunctionExpression.check(expression.arguments[1])) &&
[1, 2].includes(expression.arguments[1].params.length) &&
n.Identifier.check(expression.arguments[1].params[0]) &&
((valueOnly = expression.arguments[1].params[1] === undefined) ||
n.Identifier.check(expression.arguments[1].params[1])) &&
(expression.arguments[2] === undefined ||
n.ThisExpression.check(expression.arguments[2]))
) {
const old = { inLoop, replaceReturn };
inLoop = false;
replaceReturn = true;
this.visit(
path
.get("expression")
.get("arguments")
.get(1)
.get("body")
);
inLoop = old.inLoop;
replaceReturn = old.replaceReturn;
const [right, { body, params }] = expression.arguments;
const loop = b.forOfStatement(
b.variableDeclaration("let", [
b.variableDeclarator(
valueOnly ? params[0] : b.arrayPattern([params[1], params[0]])
),
]),
valueOnly
? right
: b.callExpression(
b.memberExpression(right, b.identifier("entries")),
[]
),
checkStatement(body) ? body : b.expressionStatement(body)
);
loop.comments = comments;
path.replace(loop);
changed = true;
}
this.traverse(path);
},
visitForStatement: visitLoop("init", "test", "update"),
visitForInStatement: visitLoop("left", "right"),
visitForOfStatement: visitLoop("left", "right"),
visitFunction(path) {
this.visit(path.get("params"));
const old = { replaceReturn };
replaceReturn = false;
this.visit(path.get("body"));
replaceReturn = old.replaceReturn;
return false;
},
visitReturnStatement(path) {
if (replaceReturn) {
assert(!inLoop); // could use labeled continue if this ever fires
const { argument, comments } = path.node;
if (argument === null) {
const s = b.continueStatement();
s.comments = comments;
path.replace(s);
} else {
const s = b.expressionStatement(argument);
s.comments = comments;
path.replace(s, b.continueStatement());
}
return false;
}
this.traverse(path);
},
visitWhileStatement: visitLoop("test"),
});
if (changed) {
console.log("Writing", file);
fs.writeFileSync(file, recast.print(ast).code, { encoding: "utf8" });
}
}
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-02-06 06:19:47 +01:00
|
|
|
}
|
2016-10-15 21:10:10 +02:00
|
|
|
};
|
|
|
|
|
2016-10-15 20:17:32 +02:00
|
|
|
exports.create_sub_from_server_data = function (stream_name, attrs) {
|
2019-11-02 00:06:25 +01:00
|
|
|
let sub = exports.get_sub(stream_name);
|
2016-10-15 20:17:32 +02:00
|
|
|
if (sub !== undefined) {
|
|
|
|
// We've already created this subscription, no need to continue.
|
|
|
|
return sub;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!attrs.stream_id) {
|
|
|
|
// fail fast (blueslip.fatal will throw an error on our behalf)
|
|
|
|
blueslip.fatal("We cannot create a sub without a stream_id");
|
|
|
|
}
|
|
|
|
|
|
|
|
// Our internal data structure for subscriptions is mostly plain dictionaries,
|
|
|
|
// so we just reuse the attrs that are passed in to us, but we encapsulate how
|
2018-11-29 21:50:20 +01:00
|
|
|
// we handle subscribers. We defensively remove the `subscribers` field from
|
|
|
|
// the original `attrs` object, which will get thrown away. (We used to make
|
|
|
|
// a copy of the object with `_.omit(attrs, 'subscribers')`, but `_.omit` is
|
|
|
|
// slow enough to show up in timings when you have 1000s of streams.
|
|
|
|
|
2019-11-02 00:06:25 +01:00
|
|
|
const subscriber_user_ids = attrs.subscribers;
|
2016-10-15 20:17:32 +02:00
|
|
|
|
2018-11-29 21:50:20 +01:00
|
|
|
delete attrs.subscribers;
|
|
|
|
|
2020-02-09 04:21:30 +01:00
|
|
|
sub = {
|
2016-10-15 20:17:32 +02:00
|
|
|
name: stream_name,
|
2017-04-20 08:03:44 +02:00
|
|
|
render_subscribers: !page_params.realm_is_zephyr_mirror_realm || attrs.invite_only === true,
|
2016-10-15 20:17:32 +02:00
|
|
|
subscribed: true,
|
2017-04-28 15:59:30 +02:00
|
|
|
newly_subscribed: false,
|
2019-05-15 08:54:25 +02:00
|
|
|
is_muted: false,
|
2016-10-15 20:17:32 +02:00
|
|
|
invite_only: false,
|
2017-04-29 07:01:46 +02:00
|
|
|
desktop_notifications: page_params.enable_stream_desktop_notifications,
|
2019-06-11 08:47:49 +02:00
|
|
|
audible_notifications: page_params.enable_stream_audible_notifications,
|
2017-08-17 16:55:32 +02:00
|
|
|
push_notifications: page_params.enable_stream_push_notifications,
|
2017-11-21 05:58:26 +01:00
|
|
|
email_notifications: page_params.enable_stream_email_notifications,
|
2020-06-06 20:26:56 +02:00
|
|
|
wildcard_mentions_notify: page_params.wildcard_mentions_notify,
|
2017-01-12 00:17:43 +01:00
|
|
|
description: '',
|
2019-02-05 16:39:46 +01:00
|
|
|
rendered_description: '',
|
2019-03-13 13:47:57 +01:00
|
|
|
first_message_id: attrs.first_message_id,
|
2020-02-09 04:21:30 +01:00
|
|
|
...attrs,
|
|
|
|
};
|
2016-10-15 20:17:32 +02:00
|
|
|
|
2016-10-30 15:47:20 +01:00
|
|
|
exports.set_subscribers(sub, subscriber_user_ids);
|
2016-10-15 20:17:32 +02:00
|
|
|
|
|
|
|
if (!sub.color) {
|
2018-11-28 23:12:40 +01:00
|
|
|
sub.color = color_data.pick_color();
|
2016-10-15 20:17:32 +02:00
|
|
|
}
|
|
|
|
|
2018-03-20 22:28:57 +01:00
|
|
|
exports.update_calculated_fields(sub);
|
|
|
|
|
2020-02-09 22:02:55 +01:00
|
|
|
exports.add_sub(sub);
|
2016-10-15 20:17:32 +02:00
|
|
|
|
|
|
|
return sub;
|
|
|
|
};
|
|
|
|
|
2020-02-27 13:57:11 +01:00
|
|
|
exports.get_unmatched_streams_for_notification_settings = function () {
|
|
|
|
const subscribed_rows = exports.subscribed_subs();
|
|
|
|
subscribed_rows.sort((a, b) => util.strcmp(a.name, b.name));
|
|
|
|
|
|
|
|
const notification_settings = [];
|
|
|
|
for (const row of subscribed_rows) {
|
|
|
|
const settings_values = {};
|
|
|
|
let make_table_row = false;
|
2020-03-28 18:03:43 +01:00
|
|
|
for (const notification_name of settings_config.stream_specific_notification_settings) {
|
2020-02-27 13:57:11 +01:00
|
|
|
const prepend = notification_name === 'wildcard_mentions_notify' ? "" : "enable_stream_";
|
|
|
|
const default_setting = page_params[prepend + notification_name];
|
|
|
|
const stream_setting = exports.receives_notifications(row.name, notification_name);
|
|
|
|
|
|
|
|
settings_values[notification_name] = stream_setting;
|
|
|
|
if (stream_setting !== default_setting) {
|
|
|
|
make_table_row = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// We do not need to display the streams whose settings
|
|
|
|
// match with the global settings defined by the user.
|
|
|
|
if (make_table_row) {
|
|
|
|
settings_values.stream_name = row.name;
|
|
|
|
settings_values.stream_id = row.stream_id;
|
|
|
|
settings_values.invite_only = row.invite_only;
|
|
|
|
settings_values.is_web_public = row.is_web_public;
|
|
|
|
|
|
|
|
notification_settings.push(settings_values);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return notification_settings;
|
|
|
|
};
|
|
|
|
|
2016-10-25 21:45:19 +02:00
|
|
|
exports.get_streams_for_settings_page = function () {
|
2018-07-30 19:44:57 +02:00
|
|
|
// TODO: This function is only used for copy-from-stream, so
|
|
|
|
// the current name is slightly misleading now, plus
|
|
|
|
// it's not entirely clear we need unsubscribed streams
|
|
|
|
// for that. Also we may be revisiting that UI.
|
|
|
|
|
2016-10-17 17:48:56 +02:00
|
|
|
// Build up our list of subscribed streams from the data we already have.
|
2019-11-02 00:06:25 +01:00
|
|
|
const subscribed_rows = exports.subscribed_subs();
|
|
|
|
const unsubscribed_rows = exports.unsubscribed_subs();
|
2016-10-17 17:48:56 +02:00
|
|
|
|
|
|
|
// Sort and combine all our streams.
|
2018-12-07 21:21:39 +01:00
|
|
|
function by_name(a, b) {
|
2016-10-17 17:48:56 +02:00
|
|
|
return util.strcmp(a.name, b.name);
|
|
|
|
}
|
|
|
|
subscribed_rows.sort(by_name);
|
|
|
|
unsubscribed_rows.sort(by_name);
|
2019-11-02 00:06:25 +01:00
|
|
|
const all_subs = unsubscribed_rows.concat(subscribed_rows);
|
2016-10-17 17:48:56 +02:00
|
|
|
|
2016-03-14 06:38:43 +01:00
|
|
|
// Add in admin options and stream counts.
|
js: Automatically convert _.each to for…of.
This commit was automatically generated by the following script,
followed by lint --fix and a few small manual lint-related cleanups.
import * as babelParser from "recast/parsers/babel";
import * as recast from "recast";
import * as tsParser from "recast/parsers/typescript";
import { builders as b, namedTypes as n } from "ast-types";
import { Context } from "ast-types/lib/path-visitor";
import K from "ast-types/gen/kinds";
import { NodePath } from "ast-types/lib/node-path";
import assert from "assert";
import fs from "fs";
import path from "path";
import process from "process";
const checkExpression = (node: n.Node): node is K.ExpressionKind =>
n.Expression.check(node);
const checkStatement = (node: n.Node): node is K.StatementKind =>
n.Statement.check(node);
for (const file of process.argv.slice(2)) {
console.log("Parsing", file);
const ast = recast.parse(fs.readFileSync(file, { encoding: "utf8" }), {
parser: path.extname(file) === ".ts" ? tsParser : babelParser,
});
let changed = false;
let inLoop = false;
let replaceReturn = false;
const visitLoop = (...args: string[]) =>
function(this: Context, path: NodePath) {
for (const arg of args) {
this.visit(path.get(arg));
}
const old = { inLoop };
inLoop = true;
this.visit(path.get("body"));
inLoop = old.inLoop;
return false;
};
recast.visit(ast, {
visitDoWhileStatement: visitLoop("test"),
visitExpressionStatement(path) {
const { expression, comments } = path.node;
let valueOnly;
if (
n.CallExpression.check(expression) &&
n.MemberExpression.check(expression.callee) &&
!expression.callee.computed &&
n.Identifier.check(expression.callee.object) &&
expression.callee.object.name === "_" &&
n.Identifier.check(expression.callee.property) &&
["each", "forEach"].includes(expression.callee.property.name) &&
[2, 3].includes(expression.arguments.length) &&
checkExpression(expression.arguments[0]) &&
(n.FunctionExpression.check(expression.arguments[1]) ||
n.ArrowFunctionExpression.check(expression.arguments[1])) &&
[1, 2].includes(expression.arguments[1].params.length) &&
n.Identifier.check(expression.arguments[1].params[0]) &&
((valueOnly = expression.arguments[1].params[1] === undefined) ||
n.Identifier.check(expression.arguments[1].params[1])) &&
(expression.arguments[2] === undefined ||
n.ThisExpression.check(expression.arguments[2]))
) {
const old = { inLoop, replaceReturn };
inLoop = false;
replaceReturn = true;
this.visit(
path
.get("expression")
.get("arguments")
.get(1)
.get("body")
);
inLoop = old.inLoop;
replaceReturn = old.replaceReturn;
const [right, { body, params }] = expression.arguments;
const loop = b.forOfStatement(
b.variableDeclaration("let", [
b.variableDeclarator(
valueOnly ? params[0] : b.arrayPattern([params[1], params[0]])
),
]),
valueOnly
? right
: b.callExpression(
b.memberExpression(right, b.identifier("entries")),
[]
),
checkStatement(body) ? body : b.expressionStatement(body)
);
loop.comments = comments;
path.replace(loop);
changed = true;
}
this.traverse(path);
},
visitForStatement: visitLoop("init", "test", "update"),
visitForInStatement: visitLoop("left", "right"),
visitForOfStatement: visitLoop("left", "right"),
visitFunction(path) {
this.visit(path.get("params"));
const old = { replaceReturn };
replaceReturn = false;
this.visit(path.get("body"));
replaceReturn = old.replaceReturn;
return false;
},
visitReturnStatement(path) {
if (replaceReturn) {
assert(!inLoop); // could use labeled continue if this ever fires
const { argument, comments } = path.node;
if (argument === null) {
const s = b.continueStatement();
s.comments = comments;
path.replace(s);
} else {
const s = b.expressionStatement(argument);
s.comments = comments;
path.replace(s, b.continueStatement());
}
return false;
}
this.traverse(path);
},
visitWhileStatement: visitLoop("test"),
});
if (changed) {
console.log("Writing", file);
fs.writeFileSync(file, recast.print(ast).code, { encoding: "utf8" });
}
}
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-02-06 06:19:47 +01:00
|
|
|
for (const sub of all_subs) {
|
2017-04-28 17:55:22 +02:00
|
|
|
exports.update_calculated_fields(sub);
|
js: Automatically convert _.each to for…of.
This commit was automatically generated by the following script,
followed by lint --fix and a few small manual lint-related cleanups.
import * as babelParser from "recast/parsers/babel";
import * as recast from "recast";
import * as tsParser from "recast/parsers/typescript";
import { builders as b, namedTypes as n } from "ast-types";
import { Context } from "ast-types/lib/path-visitor";
import K from "ast-types/gen/kinds";
import { NodePath } from "ast-types/lib/node-path";
import assert from "assert";
import fs from "fs";
import path from "path";
import process from "process";
const checkExpression = (node: n.Node): node is K.ExpressionKind =>
n.Expression.check(node);
const checkStatement = (node: n.Node): node is K.StatementKind =>
n.Statement.check(node);
for (const file of process.argv.slice(2)) {
console.log("Parsing", file);
const ast = recast.parse(fs.readFileSync(file, { encoding: "utf8" }), {
parser: path.extname(file) === ".ts" ? tsParser : babelParser,
});
let changed = false;
let inLoop = false;
let replaceReturn = false;
const visitLoop = (...args: string[]) =>
function(this: Context, path: NodePath) {
for (const arg of args) {
this.visit(path.get(arg));
}
const old = { inLoop };
inLoop = true;
this.visit(path.get("body"));
inLoop = old.inLoop;
return false;
};
recast.visit(ast, {
visitDoWhileStatement: visitLoop("test"),
visitExpressionStatement(path) {
const { expression, comments } = path.node;
let valueOnly;
if (
n.CallExpression.check(expression) &&
n.MemberExpression.check(expression.callee) &&
!expression.callee.computed &&
n.Identifier.check(expression.callee.object) &&
expression.callee.object.name === "_" &&
n.Identifier.check(expression.callee.property) &&
["each", "forEach"].includes(expression.callee.property.name) &&
[2, 3].includes(expression.arguments.length) &&
checkExpression(expression.arguments[0]) &&
(n.FunctionExpression.check(expression.arguments[1]) ||
n.ArrowFunctionExpression.check(expression.arguments[1])) &&
[1, 2].includes(expression.arguments[1].params.length) &&
n.Identifier.check(expression.arguments[1].params[0]) &&
((valueOnly = expression.arguments[1].params[1] === undefined) ||
n.Identifier.check(expression.arguments[1].params[1])) &&
(expression.arguments[2] === undefined ||
n.ThisExpression.check(expression.arguments[2]))
) {
const old = { inLoop, replaceReturn };
inLoop = false;
replaceReturn = true;
this.visit(
path
.get("expression")
.get("arguments")
.get(1)
.get("body")
);
inLoop = old.inLoop;
replaceReturn = old.replaceReturn;
const [right, { body, params }] = expression.arguments;
const loop = b.forOfStatement(
b.variableDeclaration("let", [
b.variableDeclarator(
valueOnly ? params[0] : b.arrayPattern([params[1], params[0]])
),
]),
valueOnly
? right
: b.callExpression(
b.memberExpression(right, b.identifier("entries")),
[]
),
checkStatement(body) ? body : b.expressionStatement(body)
);
loop.comments = comments;
path.replace(loop);
changed = true;
}
this.traverse(path);
},
visitForStatement: visitLoop("init", "test", "update"),
visitForInStatement: visitLoop("left", "right"),
visitForOfStatement: visitLoop("left", "right"),
visitFunction(path) {
this.visit(path.get("params"));
const old = { replaceReturn };
replaceReturn = false;
this.visit(path.get("body"));
replaceReturn = old.replaceReturn;
return false;
},
visitReturnStatement(path) {
if (replaceReturn) {
assert(!inLoop); // could use labeled continue if this ever fires
const { argument, comments } = path.node;
if (argument === null) {
const s = b.continueStatement();
s.comments = comments;
path.replace(s);
} else {
const s = b.expressionStatement(argument);
s.comments = comments;
path.replace(s, b.continueStatement());
}
return false;
}
this.traverse(path);
},
visitWhileStatement: visitLoop("test"),
});
if (changed) {
console.log("Writing", file);
fs.writeFileSync(file, recast.print(ast).code, { encoding: "utf8" });
}
}
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-02-06 06:19:47 +01:00
|
|
|
}
|
2016-10-17 17:48:56 +02:00
|
|
|
|
2017-04-28 17:55:22 +02:00
|
|
|
return all_subs;
|
2016-10-17 17:48:56 +02:00
|
|
|
};
|
2016-10-17 16:38:15 +02:00
|
|
|
|
2020-03-31 17:13:01 +02:00
|
|
|
exports.sort_for_stream_settings = function (stream_ids, order) {
|
2018-07-29 15:26:45 +02:00
|
|
|
// TODO: We may want to simply use util.strcmp here,
|
|
|
|
// which uses Intl.Collator() when possible.
|
|
|
|
|
|
|
|
function name(stream_id) {
|
2019-11-02 00:06:25 +01:00
|
|
|
const sub = exports.get_sub_by_id(stream_id);
|
2018-07-29 15:26:45 +02:00
|
|
|
if (!sub) {
|
|
|
|
return '';
|
|
|
|
}
|
|
|
|
return sub.name.toLocaleLowerCase();
|
|
|
|
}
|
|
|
|
|
2020-03-31 17:13:01 +02:00
|
|
|
function weekly_traffic(stream_id) {
|
|
|
|
const sub = exports.get_sub_by_id(stream_id);
|
|
|
|
if (sub && sub.is_old_stream) {
|
|
|
|
return sub.stream_weekly_traffic;
|
|
|
|
}
|
|
|
|
// don't intersperse new streams with zero-traffic existing streams
|
|
|
|
return -1;
|
|
|
|
}
|
|
|
|
|
|
|
|
function by_stream_name(id_a, id_b) {
|
2019-11-02 00:06:25 +01:00
|
|
|
const stream_a_name = name(id_a);
|
|
|
|
const stream_b_name = name(id_b);
|
2018-07-29 15:26:45 +02:00
|
|
|
return String.prototype.localeCompare.call(stream_a_name, stream_b_name);
|
|
|
|
}
|
|
|
|
|
2020-03-31 17:13:01 +02:00
|
|
|
function by_subscriber_count(id_a, id_b) {
|
|
|
|
const out = exports.get_sub_by_id(id_b).subscribers.size
|
|
|
|
- exports.get_sub_by_id(id_a).subscribers.size;
|
|
|
|
if (out === 0) {
|
|
|
|
return by_stream_name(id_a, id_b);
|
|
|
|
}
|
|
|
|
return out;
|
|
|
|
}
|
|
|
|
|
|
|
|
function by_weekly_traffic(id_a, id_b) {
|
|
|
|
const out = weekly_traffic(id_b) - weekly_traffic(id_a);
|
|
|
|
if (out === 0) {
|
|
|
|
return by_stream_name(id_a, id_b);
|
|
|
|
}
|
|
|
|
return out;
|
|
|
|
}
|
|
|
|
|
2020-05-27 04:06:28 +02:00
|
|
|
const orders = new Map([
|
|
|
|
["by-stream-name", by_stream_name],
|
|
|
|
["by-subscriber-count", by_subscriber_count],
|
|
|
|
["by-weekly-traffic", by_weekly_traffic],
|
|
|
|
]);
|
2020-03-31 17:13:01 +02:00
|
|
|
|
2020-05-27 04:06:28 +02:00
|
|
|
if (order === undefined || !orders.has(order)) {
|
2020-03-31 17:13:01 +02:00
|
|
|
order = "by-stream-name";
|
|
|
|
}
|
|
|
|
|
2020-05-27 04:06:28 +02:00
|
|
|
stream_ids.sort(orders.get(order));
|
2018-07-29 15:26:45 +02:00
|
|
|
};
|
|
|
|
|
2017-08-22 20:51:41 +02:00
|
|
|
exports.get_streams_for_admin = function () {
|
|
|
|
// Sort and combine all our streams.
|
2018-12-07 21:21:39 +01:00
|
|
|
function by_name(a, b) {
|
2017-08-22 20:51:41 +02:00
|
|
|
return util.strcmp(a.name, b.name);
|
|
|
|
}
|
|
|
|
|
2020-02-04 23:46:56 +01:00
|
|
|
const subs = Array.from(stream_info.values());
|
2017-08-22 20:51:41 +02:00
|
|
|
|
|
|
|
subs.sort(by_name);
|
|
|
|
|
|
|
|
return subs;
|
|
|
|
};
|
|
|
|
|
2020-04-15 18:29:26 +02:00
|
|
|
/*
|
|
|
|
This module provides a common helper for finding the notification
|
|
|
|
stream, but we don't own the data. The `page_params` structure
|
|
|
|
is the authoritative source of this data, and it will be updated by
|
|
|
|
server_events_dispatch in case of changes.
|
|
|
|
*/
|
|
|
|
exports.realm_has_notifications_stream = () => page_params.realm_notifications_stream_id !== -1;
|
|
|
|
|
2020-04-14 12:55:18 +02:00
|
|
|
exports.get_notifications_stream = function () {
|
|
|
|
const stream_id = page_params.realm_notifications_stream_id;
|
|
|
|
if (stream_id !== -1) {
|
|
|
|
const stream_obj = exports.get_sub_by_id(stream_id);
|
|
|
|
if (stream_obj) {
|
|
|
|
return stream_obj.name;
|
|
|
|
}
|
|
|
|
// We reach here when the notifications stream is a private
|
|
|
|
// stream the current user is not subscribed to.
|
|
|
|
}
|
|
|
|
return '';
|
|
|
|
};
|
|
|
|
|
2020-02-25 12:16:26 +01:00
|
|
|
exports.initialize = function (params) {
|
|
|
|
/*
|
|
|
|
We get `params` data, which is data that we "own"
|
|
|
|
and which has already been removed from `page_params`.
|
|
|
|
We only use it in this function to populate other
|
|
|
|
data structures.
|
|
|
|
*/
|
|
|
|
|
|
|
|
const subscriptions = params.subscriptions;
|
|
|
|
const unsubscribed = params.unsubscribed;
|
|
|
|
const never_subscribed = params.never_subscribed;
|
2020-03-22 17:32:31 +01:00
|
|
|
const realm_default_streams = params.realm_default_streams;
|
2020-02-25 12:16:26 +01:00
|
|
|
|
|
|
|
/*
|
|
|
|
We also consume some data directly from `page_params`.
|
|
|
|
This data can be accessed by any other module,
|
|
|
|
and we consider the authoritative source to be
|
|
|
|
`page_params`. Some of this data should eventually
|
2020-03-22 17:32:31 +01:00
|
|
|
be fully handled by stream_data.
|
2020-02-25 12:16:26 +01:00
|
|
|
*/
|
|
|
|
|
|
|
|
color_data.claim_colors(subscriptions);
|
2018-11-28 23:12:40 +01:00
|
|
|
|
2018-02-06 09:42:41 +01:00
|
|
|
function populate_subscriptions(subs, subscribed, previously_subscribed) {
|
Clean up startup code for streams.
The startup code in subs.js used to intermingle data
stuff and UI stuff in a loop inside a called function,
which made the code hard to reason about.
Now there is a clear separation of concerns, with these methods
being called in succession:
stream_data.initialize_from_page_params();
stream_list.create_initial_sidebar_rows();
The first method was mostly extracted from subs.js, but I simplified
some things, like not needing to make a copy of the hashes
we were passed in, plus I now garbage collect email_dict. Also,
the code path that initialize_from_page_params() mostly replaces
used to call create_sub(), which fired a trigger, but now it
just does data stuff.
Once the data structure is built up, it's a very simple matter
to build the initial sidebar rows, and that's what the second
method does.
2016-10-17 19:34:58 +02:00
|
|
|
subs.forEach(function (sub) {
|
2019-11-02 00:06:25 +01:00
|
|
|
const stream_name = sub.name;
|
Clean up startup code for streams.
The startup code in subs.js used to intermingle data
stuff and UI stuff in a loop inside a called function,
which made the code hard to reason about.
Now there is a clear separation of concerns, with these methods
being called in succession:
stream_data.initialize_from_page_params();
stream_list.create_initial_sidebar_rows();
The first method was mostly extracted from subs.js, but I simplified
some things, like not needing to make a copy of the hashes
we were passed in, plus I now garbage collect email_dict. Also,
the code path that initialize_from_page_params() mostly replaces
used to call create_sub(), which fired a trigger, but now it
just does data stuff.
Once the data structure is built up, it's a very simple matter
to build the initial sidebar rows, and that's what the second
method does.
2016-10-17 19:34:58 +02:00
|
|
|
sub.subscribed = subscribed;
|
2018-02-06 09:42:41 +01:00
|
|
|
sub.previously_subscribed = previously_subscribed;
|
Clean up startup code for streams.
The startup code in subs.js used to intermingle data
stuff and UI stuff in a loop inside a called function,
which made the code hard to reason about.
Now there is a clear separation of concerns, with these methods
being called in succession:
stream_data.initialize_from_page_params();
stream_list.create_initial_sidebar_rows();
The first method was mostly extracted from subs.js, but I simplified
some things, like not needing to make a copy of the hashes
we were passed in, plus I now garbage collect email_dict. Also,
the code path that initialize_from_page_params() mostly replaces
used to call create_sub(), which fired a trigger, but now it
just does data stuff.
Once the data structure is built up, it's a very simple matter
to build the initial sidebar rows, and that's what the second
method does.
2016-10-17 19:34:58 +02:00
|
|
|
|
|
|
|
exports.create_sub_from_server_data(stream_name, sub);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2020-03-22 17:32:31 +01:00
|
|
|
exports.set_realm_default_streams(realm_default_streams);
|
2017-03-21 21:10:20 +01:00
|
|
|
|
2020-02-25 12:16:26 +01:00
|
|
|
populate_subscriptions(subscriptions, true, true);
|
|
|
|
populate_subscriptions(unsubscribed, false, true);
|
|
|
|
populate_subscriptions(never_subscribed, false, false);
|
Clean up startup code for streams.
The startup code in subs.js used to intermingle data
stuff and UI stuff in a loop inside a called function,
which made the code hard to reason about.
Now there is a clear separation of concerns, with these methods
being called in succession:
stream_data.initialize_from_page_params();
stream_list.create_initial_sidebar_rows();
The first method was mostly extracted from subs.js, but I simplified
some things, like not needing to make a copy of the hashes
we were passed in, plus I now garbage collect email_dict. Also,
the code path that initialize_from_page_params() mostly replaces
used to call create_sub(), which fired a trigger, but now it
just does data stuff.
Once the data structure is built up, it's a very simple matter
to build the initial sidebar rows, and that's what the second
method does.
2016-10-17 19:34:58 +02:00
|
|
|
|
2019-03-17 14:48:51 +01:00
|
|
|
exports.set_filter_out_inactives();
|
Clean up startup code for streams.
The startup code in subs.js used to intermingle data
stuff and UI stuff in a loop inside a called function,
which made the code hard to reason about.
Now there is a clear separation of concerns, with these methods
being called in succession:
stream_data.initialize_from_page_params();
stream_list.create_initial_sidebar_rows();
The first method was mostly extracted from subs.js, but I simplified
some things, like not needing to make a copy of the hashes
we were passed in, plus I now garbage collect email_dict. Also,
the code path that initialize_from_page_params() mostly replaces
used to call create_sub(), which fired a trigger, but now it
just does data stuff.
Once the data structure is built up, it's a very simple matter
to build the initial sidebar rows, and that's what the second
method does.
2016-10-17 19:34:58 +02:00
|
|
|
};
|
|
|
|
|
2017-02-18 20:38:19 +01:00
|
|
|
exports.remove_default_stream = function (stream_id) {
|
2019-12-30 13:20:45 +01:00
|
|
|
default_stream_ids.delete(stream_id);
|
2017-02-18 20:38:19 +01:00
|
|
|
};
|
|
|
|
|
2019-10-25 09:45:13 +02:00
|
|
|
window.stream_data = exports;
|