2020-02-04 21:50:55 +01:00
|
|
|
|
2018-05-13 12:17:00 +02:00
|
|
|
zrequire('unread');
|
2020-01-23 16:09:55 +01:00
|
|
|
zrequire('stream_data');
|
2020-03-22 18:40:05 +01:00
|
|
|
zrequire('stream_topic_history');
|
2017-07-26 14:18:40 +02:00
|
|
|
|
2017-08-08 19:54:07 +02:00
|
|
|
set_global('channel', {});
|
2020-01-23 16:09:55 +01:00
|
|
|
set_global('message_list', {});
|
2017-08-08 19:54:07 +02:00
|
|
|
|
2018-05-15 12:40:07 +02:00
|
|
|
run_test('basics', () => {
|
2019-11-02 00:06:25 +01:00
|
|
|
const stream_id = 55;
|
2017-07-26 14:18:40 +02:00
|
|
|
|
2020-03-22 18:40:05 +01:00
|
|
|
stream_topic_history.add_message({
|
2017-07-26 14:18:40 +02:00
|
|
|
stream_id: stream_id,
|
|
|
|
message_id: 101,
|
|
|
|
topic_name: 'toPic1',
|
|
|
|
});
|
|
|
|
|
2020-03-22 18:40:05 +01:00
|
|
|
let history = stream_topic_history.get_recent_topic_names(stream_id);
|
2020-04-30 12:11:18 +02:00
|
|
|
let max_message_id = stream_topic_history.get_max_message_id(stream_id);
|
2017-07-26 14:18:40 +02:00
|
|
|
assert.deepEqual(history, ['toPic1']);
|
2020-04-30 12:11:18 +02:00
|
|
|
assert.deepEqual(max_message_id, 101);
|
2017-07-26 14:18:40 +02:00
|
|
|
|
2020-03-22 18:40:05 +01:00
|
|
|
stream_topic_history.add_message({
|
2017-07-26 14:18:40 +02:00
|
|
|
stream_id: stream_id,
|
|
|
|
message_id: 102,
|
|
|
|
topic_name: 'Topic1',
|
|
|
|
});
|
2020-03-22 18:40:05 +01:00
|
|
|
history = stream_topic_history.get_recent_topic_names(stream_id);
|
2020-04-30 12:11:18 +02:00
|
|
|
max_message_id = stream_topic_history.get_max_message_id(stream_id);
|
2017-07-26 14:18:40 +02:00
|
|
|
assert.deepEqual(history, ['Topic1']);
|
2020-04-30 12:11:18 +02:00
|
|
|
assert.deepEqual(max_message_id, 102);
|
2017-07-26 14:18:40 +02:00
|
|
|
|
2020-03-22 18:40:05 +01:00
|
|
|
stream_topic_history.add_message({
|
2017-07-26 14:18:40 +02:00
|
|
|
stream_id: stream_id,
|
|
|
|
message_id: 103,
|
|
|
|
topic_name: 'topic2',
|
|
|
|
});
|
2020-03-22 18:40:05 +01:00
|
|
|
history = stream_topic_history.get_recent_topic_names(stream_id);
|
2020-04-30 12:11:18 +02:00
|
|
|
max_message_id = stream_topic_history.get_max_message_id(stream_id);
|
2017-07-26 14:18:40 +02:00
|
|
|
assert.deepEqual(history, ['topic2', 'Topic1']);
|
2020-04-30 12:11:18 +02:00
|
|
|
assert.deepEqual(max_message_id, 103);
|
2017-07-26 14:18:40 +02:00
|
|
|
|
|
|
|
// Removing first topic1 message has no effect.
|
2020-06-15 19:52:00 +02:00
|
|
|
stream_topic_history.remove_messages({
|
2017-07-26 14:18:40 +02:00
|
|
|
stream_id: stream_id,
|
|
|
|
topic_name: 'toPic1',
|
2020-06-15 19:52:00 +02:00
|
|
|
num_messages: 1,
|
2017-07-26 14:18:40 +02:00
|
|
|
});
|
2020-03-22 18:40:05 +01:00
|
|
|
history = stream_topic_history.get_recent_topic_names(stream_id);
|
2017-07-26 14:18:40 +02:00
|
|
|
assert.deepEqual(history, ['topic2', 'Topic1']);
|
2020-04-30 12:11:18 +02:00
|
|
|
// Removing a topic message shouldn't effect the max_message_id.
|
|
|
|
max_message_id = stream_topic_history.get_max_message_id(stream_id);
|
|
|
|
assert.deepEqual(max_message_id, 103);
|
2017-07-26 14:18:40 +02:00
|
|
|
|
|
|
|
// Removing second topic1 message removes the topic.
|
2020-06-15 19:52:00 +02:00
|
|
|
stream_topic_history.remove_messages({
|
2017-07-26 14:18:40 +02:00
|
|
|
stream_id: stream_id,
|
|
|
|
topic_name: 'Topic1',
|
2020-06-15 19:52:00 +02:00
|
|
|
num_messages: 1,
|
2017-07-26 14:18:40 +02:00
|
|
|
});
|
2020-03-22 18:40:05 +01:00
|
|
|
history = stream_topic_history.get_recent_topic_names(stream_id);
|
2017-07-26 14:18:40 +02:00
|
|
|
assert.deepEqual(history, ['topic2']);
|
|
|
|
|
|
|
|
// Test that duplicate remove does not crash us.
|
2020-06-15 19:52:00 +02:00
|
|
|
stream_topic_history.remove_messages({
|
2017-07-26 14:18:40 +02:00
|
|
|
stream_id: stream_id,
|
|
|
|
topic_name: 'Topic1',
|
2020-06-15 19:52:00 +02:00
|
|
|
num_messages: 1,
|
2017-07-26 14:18:40 +02:00
|
|
|
});
|
2020-03-22 18:40:05 +01:00
|
|
|
history = stream_topic_history.get_recent_topic_names(stream_id);
|
2017-07-26 14:18:40 +02:00
|
|
|
assert.deepEqual(history, ['topic2']);
|
|
|
|
|
|
|
|
// get to 100% coverage for defensive code
|
2020-06-15 19:52:00 +02:00
|
|
|
stream_topic_history.remove_messages({
|
2017-07-26 14:18:40 +02:00
|
|
|
stream_id: 9999999,
|
2020-06-15 19:52:00 +02:00
|
|
|
num_messages: 1,
|
2017-07-26 14:18:40 +02:00
|
|
|
});
|
2018-05-15 12:40:07 +02:00
|
|
|
});
|
2017-07-26 14:18:40 +02:00
|
|
|
|
2020-01-23 16:09:55 +01:00
|
|
|
run_test('is_complete_for_stream_id', () => {
|
|
|
|
const sub = {
|
|
|
|
name: 'devel',
|
|
|
|
stream_id: 444,
|
|
|
|
first_message_id: 1000,
|
|
|
|
};
|
2020-02-09 22:02:55 +01:00
|
|
|
stream_data.add_sub(sub);
|
2020-01-23 16:09:55 +01:00
|
|
|
|
|
|
|
message_list.all = {
|
|
|
|
empty: () => false,
|
2020-05-30 09:45:12 +02:00
|
|
|
data: {
|
|
|
|
fetch_status: {
|
|
|
|
has_found_newest: () => true,
|
|
|
|
},
|
2020-01-23 16:09:55 +01:00
|
|
|
},
|
2020-07-02 01:41:40 +02:00
|
|
|
first: () => ({id: 5}),
|
2020-01-23 16:09:55 +01:00
|
|
|
};
|
|
|
|
|
|
|
|
assert.equal(
|
2020-03-22 18:40:05 +01:00
|
|
|
stream_topic_history.is_complete_for_stream_id(sub.stream_id),
|
2020-01-23 16:09:55 +01:00
|
|
|
true);
|
|
|
|
|
2020-01-23 17:05:31 +01:00
|
|
|
// Now simulate a more recent message id.
|
2020-07-02 01:41:40 +02:00
|
|
|
message_list.all.first = () => ({id: sub.first_message_id + 1});
|
2020-01-23 16:09:55 +01:00
|
|
|
|
2020-01-23 17:05:31 +01:00
|
|
|
// Note that we'll return `true` here due to
|
|
|
|
// fetched_stream_ids having the stream_id now.
|
|
|
|
assert.equal(
|
2020-03-22 18:40:05 +01:00
|
|
|
stream_topic_history.is_complete_for_stream_id(sub.stream_id),
|
2020-01-23 17:05:31 +01:00
|
|
|
true);
|
|
|
|
|
|
|
|
// But now clear the data to see what we'd have without
|
|
|
|
// the previous call.
|
2020-03-22 18:40:05 +01:00
|
|
|
stream_topic_history.reset();
|
2020-01-23 17:05:31 +01:00
|
|
|
|
2020-01-23 16:09:55 +01:00
|
|
|
assert.equal(
|
2020-03-22 18:40:05 +01:00
|
|
|
stream_topic_history.is_complete_for_stream_id(sub.stream_id),
|
2020-01-23 16:09:55 +01:00
|
|
|
false);
|
|
|
|
});
|
|
|
|
|
2018-05-15 12:40:07 +02:00
|
|
|
run_test('server_history', () => {
|
2020-01-23 17:05:31 +01:00
|
|
|
const sub = {
|
|
|
|
name: 'devel',
|
|
|
|
stream_id: 66,
|
|
|
|
};
|
|
|
|
const stream_id = sub.stream_id;
|
2020-02-09 22:02:55 +01:00
|
|
|
stream_data.add_sub(sub);
|
2020-01-23 17:05:31 +01:00
|
|
|
|
2020-05-30 09:45:12 +02:00
|
|
|
message_list.all.data.fetch_status.has_found_newest = () => false;
|
2020-01-23 17:05:31 +01:00
|
|
|
|
|
|
|
assert.equal(
|
2020-03-22 18:40:05 +01:00
|
|
|
stream_topic_history.is_complete_for_stream_id(stream_id),
|
2020-01-23 17:05:31 +01:00
|
|
|
false);
|
2017-07-27 12:57:37 +02:00
|
|
|
|
2020-03-22 18:40:05 +01:00
|
|
|
stream_topic_history.add_message({
|
2017-07-27 12:57:37 +02:00
|
|
|
stream_id: stream_id,
|
|
|
|
message_id: 501,
|
|
|
|
topic_name: 'local',
|
|
|
|
});
|
|
|
|
|
|
|
|
function add_server_history() {
|
2020-03-22 18:40:05 +01:00
|
|
|
stream_topic_history.add_history(stream_id, [
|
2020-04-17 22:08:17 +02:00
|
|
|
{ name: 'local', max_id: 501 },
|
|
|
|
{ name: 'hist2', max_id: 31 },
|
|
|
|
{ name: 'hist1', max_id: 30 },
|
2017-07-27 12:57:37 +02:00
|
|
|
]);
|
|
|
|
}
|
|
|
|
|
|
|
|
add_server_history();
|
2020-01-23 17:05:31 +01:00
|
|
|
|
|
|
|
// Since we added history, now subsequent calls
|
|
|
|
// to is_complete_for_stream_id will return true.
|
|
|
|
assert.equal(
|
2020-03-22 18:40:05 +01:00
|
|
|
stream_topic_history.is_complete_for_stream_id(stream_id),
|
2020-01-23 17:05:31 +01:00
|
|
|
true);
|
|
|
|
|
2020-03-22 18:40:05 +01:00
|
|
|
let history = stream_topic_history.get_recent_topic_names(stream_id);
|
2017-07-27 12:57:37 +02:00
|
|
|
assert.deepEqual(history, ['local', 'hist2', 'hist1']);
|
|
|
|
|
|
|
|
// If new activity comes in for historical messages,
|
|
|
|
// they can bump to the front of the list.
|
2020-03-22 18:40:05 +01:00
|
|
|
stream_topic_history.add_message({
|
2017-07-27 12:57:37 +02:00
|
|
|
stream_id: stream_id,
|
|
|
|
message_id: 502,
|
|
|
|
topic_name: 'hist1',
|
|
|
|
});
|
2020-03-22 18:40:05 +01:00
|
|
|
history = stream_topic_history.get_recent_topic_names(stream_id);
|
2017-07-27 12:57:37 +02:00
|
|
|
assert.deepEqual(history, ['hist1', 'local', 'hist2']);
|
|
|
|
|
|
|
|
// server history is allowed to backdate hist1
|
|
|
|
add_server_history();
|
2020-03-22 18:40:05 +01:00
|
|
|
history = stream_topic_history.get_recent_topic_names(stream_id);
|
2017-07-27 12:57:37 +02:00
|
|
|
assert.deepEqual(history, ['local', 'hist2', 'hist1']);
|
|
|
|
|
|
|
|
// Removing a local message removes the topic if we have
|
|
|
|
// our counts right.
|
2020-06-15 19:52:00 +02:00
|
|
|
stream_topic_history.remove_messages({
|
2017-07-27 12:57:37 +02:00
|
|
|
stream_id: stream_id,
|
|
|
|
topic_name: 'local',
|
2020-06-15 19:52:00 +02:00
|
|
|
num_messages: 1,
|
2017-07-27 12:57:37 +02:00
|
|
|
});
|
2020-03-22 18:40:05 +01:00
|
|
|
history = stream_topic_history.get_recent_topic_names(stream_id);
|
2017-07-27 12:57:37 +02:00
|
|
|
assert.deepEqual(history, ['hist2', 'hist1']);
|
|
|
|
|
|
|
|
// We can try to remove a historical message, but it should
|
|
|
|
// have no effect.
|
2020-06-15 19:52:00 +02:00
|
|
|
stream_topic_history.remove_messages({
|
2017-07-27 12:57:37 +02:00
|
|
|
stream_id: stream_id,
|
|
|
|
topic_name: 'hist2',
|
2020-06-15 19:52:00 +02:00
|
|
|
num_messages: 1,
|
2017-07-27 12:57:37 +02:00
|
|
|
});
|
2020-03-22 18:40:05 +01:00
|
|
|
history = stream_topic_history.get_recent_topic_names(stream_id);
|
2017-07-27 12:57:37 +02:00
|
|
|
assert.deepEqual(history, ['hist2', 'hist1']);
|
|
|
|
|
|
|
|
// If we call back to the server for history, the
|
|
|
|
// effect is always additive. We may decide to prune old
|
|
|
|
// topics in the future, if they dropped off due to renames,
|
|
|
|
// but that is probably an edge case we can ignore for now.
|
2020-03-22 18:40:05 +01:00
|
|
|
stream_topic_history.add_history(stream_id, [
|
2020-04-17 22:08:17 +02:00
|
|
|
{ name: 'hist2', max_id: 931 },
|
|
|
|
{ name: 'hist3', max_id: 5 },
|
2017-07-27 12:57:37 +02:00
|
|
|
]);
|
2020-03-22 18:40:05 +01:00
|
|
|
history = stream_topic_history.get_recent_topic_names(stream_id);
|
2017-07-27 12:57:37 +02:00
|
|
|
assert.deepEqual(history, ['hist2', 'hist1', 'hist3']);
|
2018-05-15 12:40:07 +02:00
|
|
|
});
|
2017-07-27 12:57:37 +02:00
|
|
|
|
2018-06-02 09:25:49 +02:00
|
|
|
run_test('test_unread_logic', () => {
|
2019-11-02 00:06:25 +01:00
|
|
|
const stream_id = 77;
|
2017-07-27 12:57:37 +02:00
|
|
|
|
2020-03-22 18:40:05 +01:00
|
|
|
stream_topic_history.add_message({
|
2018-05-13 12:17:00 +02:00
|
|
|
stream_id: stream_id,
|
|
|
|
message_id: 201,
|
|
|
|
topic_name: 'toPic1',
|
|
|
|
});
|
|
|
|
|
2020-03-22 18:40:05 +01:00
|
|
|
stream_topic_history.add_message({
|
2018-05-13 12:17:00 +02:00
|
|
|
stream_id: stream_id,
|
|
|
|
message_id: 45,
|
|
|
|
topic_name: 'topic2',
|
|
|
|
});
|
|
|
|
|
2020-03-22 18:40:05 +01:00
|
|
|
let history = stream_topic_history.get_recent_topic_names(stream_id);
|
2018-05-13 12:17:00 +02:00
|
|
|
assert.deepEqual(history, ['toPic1', 'topic2']);
|
|
|
|
|
|
|
|
const msgs = [
|
2018-12-23 16:49:14 +01:00
|
|
|
{ id: 150, topic: 'TOPIC2' }, // will be ignored
|
|
|
|
{ id: 61, topic: 'unread1' },
|
|
|
|
{ id: 60, topic: 'unread1' },
|
|
|
|
{ id: 20, topic: 'UNREAD2' },
|
2018-05-13 12:17:00 +02:00
|
|
|
];
|
|
|
|
|
js: Automatically convert _.each to for…of.
This commit was automatically generated by the following script,
followed by lint --fix and a few small manual lint-related cleanups.
import * as babelParser from "recast/parsers/babel";
import * as recast from "recast";
import * as tsParser from "recast/parsers/typescript";
import { builders as b, namedTypes as n } from "ast-types";
import { Context } from "ast-types/lib/path-visitor";
import K from "ast-types/gen/kinds";
import { NodePath } from "ast-types/lib/node-path";
import assert from "assert";
import fs from "fs";
import path from "path";
import process from "process";
const checkExpression = (node: n.Node): node is K.ExpressionKind =>
n.Expression.check(node);
const checkStatement = (node: n.Node): node is K.StatementKind =>
n.Statement.check(node);
for (const file of process.argv.slice(2)) {
console.log("Parsing", file);
const ast = recast.parse(fs.readFileSync(file, { encoding: "utf8" }), {
parser: path.extname(file) === ".ts" ? tsParser : babelParser,
});
let changed = false;
let inLoop = false;
let replaceReturn = false;
const visitLoop = (...args: string[]) =>
function(this: Context, path: NodePath) {
for (const arg of args) {
this.visit(path.get(arg));
}
const old = { inLoop };
inLoop = true;
this.visit(path.get("body"));
inLoop = old.inLoop;
return false;
};
recast.visit(ast, {
visitDoWhileStatement: visitLoop("test"),
visitExpressionStatement(path) {
const { expression, comments } = path.node;
let valueOnly;
if (
n.CallExpression.check(expression) &&
n.MemberExpression.check(expression.callee) &&
!expression.callee.computed &&
n.Identifier.check(expression.callee.object) &&
expression.callee.object.name === "_" &&
n.Identifier.check(expression.callee.property) &&
["each", "forEach"].includes(expression.callee.property.name) &&
[2, 3].includes(expression.arguments.length) &&
checkExpression(expression.arguments[0]) &&
(n.FunctionExpression.check(expression.arguments[1]) ||
n.ArrowFunctionExpression.check(expression.arguments[1])) &&
[1, 2].includes(expression.arguments[1].params.length) &&
n.Identifier.check(expression.arguments[1].params[0]) &&
((valueOnly = expression.arguments[1].params[1] === undefined) ||
n.Identifier.check(expression.arguments[1].params[1])) &&
(expression.arguments[2] === undefined ||
n.ThisExpression.check(expression.arguments[2]))
) {
const old = { inLoop, replaceReturn };
inLoop = false;
replaceReturn = true;
this.visit(
path
.get("expression")
.get("arguments")
.get(1)
.get("body")
);
inLoop = old.inLoop;
replaceReturn = old.replaceReturn;
const [right, { body, params }] = expression.arguments;
const loop = b.forOfStatement(
b.variableDeclaration("let", [
b.variableDeclarator(
valueOnly ? params[0] : b.arrayPattern([params[1], params[0]])
),
]),
valueOnly
? right
: b.callExpression(
b.memberExpression(right, b.identifier("entries")),
[]
),
checkStatement(body) ? body : b.expressionStatement(body)
);
loop.comments = comments;
path.replace(loop);
changed = true;
}
this.traverse(path);
},
visitForStatement: visitLoop("init", "test", "update"),
visitForInStatement: visitLoop("left", "right"),
visitForOfStatement: visitLoop("left", "right"),
visitFunction(path) {
this.visit(path.get("params"));
const old = { replaceReturn };
replaceReturn = false;
this.visit(path.get("body"));
replaceReturn = old.replaceReturn;
return false;
},
visitReturnStatement(path) {
if (replaceReturn) {
assert(!inLoop); // could use labeled continue if this ever fires
const { argument, comments } = path.node;
if (argument === null) {
const s = b.continueStatement();
s.comments = comments;
path.replace(s);
} else {
const s = b.expressionStatement(argument);
s.comments = comments;
path.replace(s, b.continueStatement());
}
return false;
}
this.traverse(path);
},
visitWhileStatement: visitLoop("test"),
});
if (changed) {
console.log("Writing", file);
fs.writeFileSync(file, recast.print(ast).code, { encoding: "utf8" });
}
}
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-02-06 06:19:47 +01:00
|
|
|
for (const msg of msgs) {
|
2018-05-13 12:17:00 +02:00
|
|
|
msg.type = 'stream';
|
|
|
|
msg.stream_id = stream_id;
|
|
|
|
msg.unread = true;
|
js: Automatically convert _.each to for…of.
This commit was automatically generated by the following script,
followed by lint --fix and a few small manual lint-related cleanups.
import * as babelParser from "recast/parsers/babel";
import * as recast from "recast";
import * as tsParser from "recast/parsers/typescript";
import { builders as b, namedTypes as n } from "ast-types";
import { Context } from "ast-types/lib/path-visitor";
import K from "ast-types/gen/kinds";
import { NodePath } from "ast-types/lib/node-path";
import assert from "assert";
import fs from "fs";
import path from "path";
import process from "process";
const checkExpression = (node: n.Node): node is K.ExpressionKind =>
n.Expression.check(node);
const checkStatement = (node: n.Node): node is K.StatementKind =>
n.Statement.check(node);
for (const file of process.argv.slice(2)) {
console.log("Parsing", file);
const ast = recast.parse(fs.readFileSync(file, { encoding: "utf8" }), {
parser: path.extname(file) === ".ts" ? tsParser : babelParser,
});
let changed = false;
let inLoop = false;
let replaceReturn = false;
const visitLoop = (...args: string[]) =>
function(this: Context, path: NodePath) {
for (const arg of args) {
this.visit(path.get(arg));
}
const old = { inLoop };
inLoop = true;
this.visit(path.get("body"));
inLoop = old.inLoop;
return false;
};
recast.visit(ast, {
visitDoWhileStatement: visitLoop("test"),
visitExpressionStatement(path) {
const { expression, comments } = path.node;
let valueOnly;
if (
n.CallExpression.check(expression) &&
n.MemberExpression.check(expression.callee) &&
!expression.callee.computed &&
n.Identifier.check(expression.callee.object) &&
expression.callee.object.name === "_" &&
n.Identifier.check(expression.callee.property) &&
["each", "forEach"].includes(expression.callee.property.name) &&
[2, 3].includes(expression.arguments.length) &&
checkExpression(expression.arguments[0]) &&
(n.FunctionExpression.check(expression.arguments[1]) ||
n.ArrowFunctionExpression.check(expression.arguments[1])) &&
[1, 2].includes(expression.arguments[1].params.length) &&
n.Identifier.check(expression.arguments[1].params[0]) &&
((valueOnly = expression.arguments[1].params[1] === undefined) ||
n.Identifier.check(expression.arguments[1].params[1])) &&
(expression.arguments[2] === undefined ||
n.ThisExpression.check(expression.arguments[2]))
) {
const old = { inLoop, replaceReturn };
inLoop = false;
replaceReturn = true;
this.visit(
path
.get("expression")
.get("arguments")
.get(1)
.get("body")
);
inLoop = old.inLoop;
replaceReturn = old.replaceReturn;
const [right, { body, params }] = expression.arguments;
const loop = b.forOfStatement(
b.variableDeclaration("let", [
b.variableDeclarator(
valueOnly ? params[0] : b.arrayPattern([params[1], params[0]])
),
]),
valueOnly
? right
: b.callExpression(
b.memberExpression(right, b.identifier("entries")),
[]
),
checkStatement(body) ? body : b.expressionStatement(body)
);
loop.comments = comments;
path.replace(loop);
changed = true;
}
this.traverse(path);
},
visitForStatement: visitLoop("init", "test", "update"),
visitForInStatement: visitLoop("left", "right"),
visitForOfStatement: visitLoop("left", "right"),
visitFunction(path) {
this.visit(path.get("params"));
const old = { replaceReturn };
replaceReturn = false;
this.visit(path.get("body"));
replaceReturn = old.replaceReturn;
return false;
},
visitReturnStatement(path) {
if (replaceReturn) {
assert(!inLoop); // could use labeled continue if this ever fires
const { argument, comments } = path.node;
if (argument === null) {
const s = b.continueStatement();
s.comments = comments;
path.replace(s);
} else {
const s = b.expressionStatement(argument);
s.comments = comments;
path.replace(s, b.continueStatement());
}
return false;
}
this.traverse(path);
},
visitWhileStatement: visitLoop("test"),
});
if (changed) {
console.log("Writing", file);
fs.writeFileSync(file, recast.print(ast).code, { encoding: "utf8" });
}
}
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-02-06 06:19:47 +01:00
|
|
|
}
|
2018-05-13 12:17:00 +02:00
|
|
|
|
|
|
|
unread.process_loaded_messages(msgs);
|
|
|
|
|
2020-03-22 18:40:05 +01:00
|
|
|
history = stream_topic_history.get_recent_topic_names(stream_id);
|
2018-05-13 12:17:00 +02:00
|
|
|
assert.deepEqual(history, ['toPic1', 'unread1', 'topic2', 'UNREAD2']);
|
2018-06-02 09:25:49 +02:00
|
|
|
});
|
2017-07-27 12:57:37 +02:00
|
|
|
|
2018-06-02 09:25:49 +02:00
|
|
|
run_test('test_stream_has_topics', () => {
|
2019-11-02 00:06:25 +01:00
|
|
|
const stream_id = 88;
|
2018-05-21 21:29:39 +02:00
|
|
|
|
2020-03-22 18:40:05 +01:00
|
|
|
assert.equal(stream_topic_history.stream_has_topics(stream_id), false);
|
2018-05-21 21:29:39 +02:00
|
|
|
|
2020-03-22 18:40:05 +01:00
|
|
|
stream_topic_history.find_or_create(stream_id);
|
2018-05-21 21:29:39 +02:00
|
|
|
|
|
|
|
// This was a bug before--just creating a bucket does not
|
|
|
|
// mean we have actual topics.
|
2020-03-22 18:40:05 +01:00
|
|
|
assert.equal(stream_topic_history.stream_has_topics(stream_id), false);
|
2018-05-21 21:29:39 +02:00
|
|
|
|
2020-03-22 18:40:05 +01:00
|
|
|
stream_topic_history.add_message({
|
2018-05-21 21:29:39 +02:00
|
|
|
stream_id: stream_id,
|
|
|
|
message_id: 888,
|
|
|
|
topic_name: 'whatever',
|
|
|
|
});
|
|
|
|
|
2020-03-22 18:40:05 +01:00
|
|
|
assert.equal(stream_topic_history.stream_has_topics(stream_id), true);
|
2018-06-02 09:25:49 +02:00
|
|
|
});
|
2018-05-21 21:29:39 +02:00
|
|
|
|
2018-05-15 12:40:07 +02:00
|
|
|
run_test('server_history_end_to_end', () => {
|
2020-03-22 18:40:05 +01:00
|
|
|
stream_topic_history.reset();
|
2017-08-08 19:54:07 +02:00
|
|
|
|
2019-11-02 00:06:25 +01:00
|
|
|
const stream_id = 99;
|
2017-08-08 19:54:07 +02:00
|
|
|
|
2019-11-02 00:06:25 +01:00
|
|
|
const topics = [
|
2020-04-17 22:08:17 +02:00
|
|
|
{ name: 'topic3', max_id: 501 },
|
|
|
|
{ name: 'topic2', max_id: 31 },
|
|
|
|
{ name: 'topic1', max_id: 30 },
|
2017-08-08 19:54:07 +02:00
|
|
|
];
|
|
|
|
|
2019-11-02 00:06:25 +01:00
|
|
|
let get_success_callback;
|
|
|
|
let on_success_called;
|
2017-08-08 19:54:07 +02:00
|
|
|
|
|
|
|
channel.get = function (opts) {
|
|
|
|
assert.equal(opts.url, '/json/users/me/99/topics');
|
|
|
|
assert.deepEqual(opts.data, {});
|
|
|
|
get_success_callback = opts.success;
|
|
|
|
};
|
|
|
|
|
2020-03-22 18:40:05 +01:00
|
|
|
stream_topic_history.get_server_history(stream_id, () => {
|
2017-08-08 19:54:07 +02:00
|
|
|
on_success_called = true;
|
|
|
|
});
|
|
|
|
|
|
|
|
get_success_callback({topics: topics});
|
|
|
|
|
|
|
|
assert(on_success_called);
|
|
|
|
|
2020-03-22 18:40:05 +01:00
|
|
|
const history = stream_topic_history.get_recent_topic_names(stream_id);
|
2017-08-08 19:54:07 +02:00
|
|
|
assert.deepEqual(history, ['topic3', 'topic2', 'topic1']);
|
2020-01-23 17:05:31 +01:00
|
|
|
|
|
|
|
// Try getting server history for a second time.
|
|
|
|
|
|
|
|
channel.get = () => {
|
|
|
|
throw Error('We should not get more data.');
|
|
|
|
};
|
|
|
|
|
|
|
|
on_success_called = false;
|
2020-03-22 18:40:05 +01:00
|
|
|
stream_topic_history.get_server_history(stream_id, () => {
|
2020-01-23 17:05:31 +01:00
|
|
|
on_success_called = true;
|
|
|
|
});
|
|
|
|
assert(on_success_called);
|
|
|
|
|
2018-05-15 12:40:07 +02:00
|
|
|
});
|