zulip/frontend_tests/node_tests/stream_topic_history.js

380 lines
12 KiB
JavaScript
Raw Normal View History

"use strict";
const {strict: assert} = require("assert");
const {mock_esm, zrequire} = require("../zjsunit/namespace");
const {run_test} = require("../zjsunit/test");
const channel = mock_esm("../../static/js/channel");
const message_util = mock_esm("../../static/js/message_util");
const all_messages_data = zrequire("all_messages_data");
const unread = zrequire("unread");
const stream_data = zrequire("stream_data");
const stream_topic_history = zrequire("stream_topic_history");
const stream_topic_history_util = zrequire("stream_topic_history_util");
function test(label, f) {
run_test(label, (helpers) => {
unread.declare_bankruptcy();
stream_topic_history.reset();
f(helpers);
});
}
test("basics", () => {
const stream_id = 55;
stream_topic_history.add_message({
stream_id,
message_id: 101,
topic_name: "toPic1",
});
let history = stream_topic_history.get_recent_topic_names(stream_id);
let max_message_id = stream_topic_history.get_max_message_id(stream_id);
assert.deepEqual(history, ["toPic1"]);
assert.deepEqual(max_message_id, 101);
stream_topic_history.add_message({
stream_id,
message_id: 102,
topic_name: "Topic1",
});
history = stream_topic_history.get_recent_topic_names(stream_id);
max_message_id = stream_topic_history.get_max_message_id(stream_id);
assert.deepEqual(history, ["Topic1"]);
assert.deepEqual(max_message_id, 102);
stream_topic_history.add_message({
stream_id,
message_id: 103,
topic_name: "topic2",
});
history = stream_topic_history.get_recent_topic_names(stream_id);
max_message_id = stream_topic_history.get_max_message_id(stream_id);
assert.deepEqual(history, ["topic2", "Topic1"]);
assert.deepEqual(max_message_id, 103);
stream_topic_history.add_message({
stream_id,
message_id: 104,
topic_name: "Topic1",
});
history = stream_topic_history.get_recent_topic_names(stream_id);
max_message_id = stream_topic_history.get_max_message_id(stream_id);
assert.deepEqual(history, ["Topic1", "topic2"]);
assert.deepEqual(max_message_id, 104);
message_util.get_messages_in_topic = () => [{id: 101}, {id: 102}];
message_util.get_max_message_id_in_stream = () => 103;
// Removing the last msg in topic1 changes the order
stream_topic_history.remove_messages({
stream_id,
topic_name: "Topic1",
num_messages: 1,
max_removed_msg_id: 104,
});
history = stream_topic_history.get_recent_topic_names(stream_id);
assert.deepEqual(history, ["topic2", "Topic1"]);
// check if stream's max_message_id is updated.
max_message_id = stream_topic_history.get_max_message_id(stream_id);
assert.deepEqual(max_message_id, 103);
delete message_util.get_messages_in_topic;
// Removing first topic1 message has no effect.
stream_topic_history.remove_messages({
stream_id,
topic_name: "toPic1",
num_messages: 1,
max_removed_msg_id: 101,
});
history = stream_topic_history.get_recent_topic_names(stream_id);
assert.deepEqual(history, ["topic2", "Topic1"]);
max_message_id = stream_topic_history.get_max_message_id(stream_id);
assert.deepEqual(max_message_id, 103);
// Removing second topic1 message removes the topic.
stream_topic_history.remove_messages({
stream_id,
topic_name: "Topic1",
num_messages: 1,
max_removed_msg_id: 102,
});
history = stream_topic_history.get_recent_topic_names(stream_id);
assert.deepEqual(history, ["topic2"]);
// Test that duplicate remove does not crash us.
stream_topic_history.remove_messages({
stream_id,
topic_name: "Topic1",
num_messages: 1,
max_removed_msg_id: 0,
});
history = stream_topic_history.get_recent_topic_names(stream_id);
assert.deepEqual(history, ["topic2"]);
// get to 100% coverage for defensive code
stream_topic_history.remove_messages({
stream_id: 9999999,
num_messages: 1,
});
});
test("is_complete_for_stream_id", ({override_rewire}) => {
const sub = {
name: "devel",
stream_id: 444,
first_message_id: 1000,
};
stream_data.add_sub(sub);
override_rewire(all_messages_data, "all_messages_data", {
empty: () => false,
fetch_status: {
has_found_newest: () => true,
},
first: () => ({id: 5}),
});
assert.equal(stream_topic_history.is_complete_for_stream_id(sub.stream_id), true);
// Now simulate a more recent message id.
all_messages_data.all_messages_data.first = () => ({id: sub.first_message_id + 1});
// Note that we'll return `true` here due to
// fetched_stream_ids having the stream_id now.
assert.equal(stream_topic_history.is_complete_for_stream_id(sub.stream_id), true);
// But now clear the data to see what we'd have without
// the previous call.
stream_topic_history.reset();
assert.equal(stream_topic_history.is_complete_for_stream_id(sub.stream_id), false);
});
test("server_history", () => {
const sub = {
name: "devel",
stream_id: 66,
};
const stream_id = sub.stream_id;
stream_data.add_sub(sub);
assert.equal(stream_topic_history.is_complete_for_stream_id(stream_id), false);
2017-07-27 12:57:37 +02:00
stream_topic_history.add_message({
stream_id,
2017-07-27 12:57:37 +02:00
message_id: 501,
topic_name: "local",
2017-07-27 12:57:37 +02:00
});
function add_server_history() {
stream_topic_history.add_history(stream_id, [
{name: "local", max_id: 501},
{name: "hist2", max_id: 31},
{name: "hist1", max_id: 30},
2017-07-27 12:57:37 +02:00
]);
}
add_server_history();
// Since we added history, now subsequent calls
// to is_complete_for_stream_id will return true.
assert.equal(stream_topic_history.is_complete_for_stream_id(stream_id), true);
let history = stream_topic_history.get_recent_topic_names(stream_id);
assert.deepEqual(history, ["local", "hist2", "hist1"]);
2017-07-27 12:57:37 +02:00
// If new activity comes in for historical messages,
// they can bump to the front of the list.
stream_topic_history.add_message({
stream_id,
2017-07-27 12:57:37 +02:00
message_id: 502,
topic_name: "hist1",
2017-07-27 12:57:37 +02:00
});
history = stream_topic_history.get_recent_topic_names(stream_id);
assert.deepEqual(history, ["hist1", "local", "hist2"]);
2017-07-27 12:57:37 +02:00
// server history is allowed to backdate hist1
add_server_history();
history = stream_topic_history.get_recent_topic_names(stream_id);
assert.deepEqual(history, ["local", "hist2", "hist1"]);
2017-07-27 12:57:37 +02:00
// Removing a local message removes the topic if we have
// our counts right.
stream_topic_history.remove_messages({
stream_id,
topic_name: "local",
num_messages: 1,
2017-07-27 12:57:37 +02:00
});
history = stream_topic_history.get_recent_topic_names(stream_id);
assert.deepEqual(history, ["hist2", "hist1"]);
2017-07-27 12:57:37 +02:00
// We can try to remove a historical message, but it should
// have no effect.
stream_topic_history.remove_messages({
stream_id,
topic_name: "hist2",
num_messages: 1,
2017-07-27 12:57:37 +02:00
});
history = stream_topic_history.get_recent_topic_names(stream_id);
assert.deepEqual(history, ["hist2", "hist1"]);
2017-07-27 12:57:37 +02:00
// If we call back to the server for history, the
// effect is always additive. We may decide to prune old
// topics in the future, if they dropped off due to renames,
// but that is probably an edge case we can ignore for now.
stream_topic_history.add_history(stream_id, [
{name: "hist2", max_id: 931},
{name: "hist3", max_id: 5},
2017-07-27 12:57:37 +02:00
]);
history = stream_topic_history.get_recent_topic_names(stream_id);
assert.deepEqual(history, ["hist2", "hist1", "hist3"]);
});
2017-07-27 12:57:37 +02:00
test("test_unread_logic", () => {
const stream_id = 77;
2017-07-27 12:57:37 +02:00
stream_topic_history.add_message({
stream_id,
message_id: 201,
topic_name: "toPic1",
});
stream_topic_history.add_message({
stream_id,
message_id: 45,
topic_name: "topic2",
});
let history = stream_topic_history.get_recent_topic_names(stream_id);
assert.deepEqual(history, ["toPic1", "topic2"]);
const msgs = [
// This will be ignored as a case variant of `topic2` above.
{id: 150, topic: "TOPIC2"},
{id: 61, topic: "unread1"},
{id: 60, topic: "unread1"},
{id: 20, topic: "UNREAD2"},
// We're going to mark this as read; this will verify the logic
// in unreads.js for only including topics with nonzero unreads.
{id: 79, topic: "to_mark_as_read"},
];
js: Automatically convert _.each to for…of. This commit was automatically generated by the following script, followed by lint --fix and a few small manual lint-related cleanups. import * as babelParser from "recast/parsers/babel"; import * as recast from "recast"; import * as tsParser from "recast/parsers/typescript"; import { builders as b, namedTypes as n } from "ast-types"; import { Context } from "ast-types/lib/path-visitor"; import K from "ast-types/gen/kinds"; import { NodePath } from "ast-types/lib/node-path"; import assert from "assert"; import fs from "fs"; import path from "path"; import process from "process"; const checkExpression = (node: n.Node): node is K.ExpressionKind => n.Expression.check(node); const checkStatement = (node: n.Node): node is K.StatementKind => n.Statement.check(node); for (const file of process.argv.slice(2)) { console.log("Parsing", file); const ast = recast.parse(fs.readFileSync(file, { encoding: "utf8" }), { parser: path.extname(file) === ".ts" ? tsParser : babelParser, }); let changed = false; let inLoop = false; let replaceReturn = false; const visitLoop = (...args: string[]) => function(this: Context, path: NodePath) { for (const arg of args) { this.visit(path.get(arg)); } const old = { inLoop }; inLoop = true; this.visit(path.get("body")); inLoop = old.inLoop; return false; }; recast.visit(ast, { visitDoWhileStatement: visitLoop("test"), visitExpressionStatement(path) { const { expression, comments } = path.node; let valueOnly; if ( n.CallExpression.check(expression) && n.MemberExpression.check(expression.callee) && !expression.callee.computed && n.Identifier.check(expression.callee.object) && expression.callee.object.name === "_" && n.Identifier.check(expression.callee.property) && ["each", "forEach"].includes(expression.callee.property.name) && [2, 3].includes(expression.arguments.length) && checkExpression(expression.arguments[0]) && (n.FunctionExpression.check(expression.arguments[1]) || n.ArrowFunctionExpression.check(expression.arguments[1])) && [1, 2].includes(expression.arguments[1].params.length) && n.Identifier.check(expression.arguments[1].params[0]) && ((valueOnly = expression.arguments[1].params[1] === undefined) || n.Identifier.check(expression.arguments[1].params[1])) && (expression.arguments[2] === undefined || n.ThisExpression.check(expression.arguments[2])) ) { const old = { inLoop, replaceReturn }; inLoop = false; replaceReturn = true; this.visit( path .get("expression") .get("arguments") .get(1) .get("body") ); inLoop = old.inLoop; replaceReturn = old.replaceReturn; const [right, { body, params }] = expression.arguments; const loop = b.forOfStatement( b.variableDeclaration("let", [ b.variableDeclarator( valueOnly ? params[0] : b.arrayPattern([params[1], params[0]]) ), ]), valueOnly ? right : b.callExpression( b.memberExpression(right, b.identifier("entries")), [] ), checkStatement(body) ? body : b.expressionStatement(body) ); loop.comments = comments; path.replace(loop); changed = true; } this.traverse(path); }, visitForStatement: visitLoop("init", "test", "update"), visitForInStatement: visitLoop("left", "right"), visitForOfStatement: visitLoop("left", "right"), visitFunction(path) { this.visit(path.get("params")); const old = { replaceReturn }; replaceReturn = false; this.visit(path.get("body")); replaceReturn = old.replaceReturn; return false; }, visitReturnStatement(path) { if (replaceReturn) { assert(!inLoop); // could use labeled continue if this ever fires const { argument, comments } = path.node; if (argument === null) { const s = b.continueStatement(); s.comments = comments; path.replace(s); } else { const s = b.expressionStatement(argument); s.comments = comments; path.replace(s, b.continueStatement()); } return false; } this.traverse(path); }, visitWhileStatement: visitLoop("test"), }); if (changed) { console.log("Writing", file); fs.writeFileSync(file, recast.print(ast).code, { encoding: "utf8" }); } } Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-02-06 06:19:47 +01:00
for (const msg of msgs) {
msg.type = "stream";
msg.stream_id = stream_id;
msg.unread = true;
js: Automatically convert _.each to for…of. This commit was automatically generated by the following script, followed by lint --fix and a few small manual lint-related cleanups. import * as babelParser from "recast/parsers/babel"; import * as recast from "recast"; import * as tsParser from "recast/parsers/typescript"; import { builders as b, namedTypes as n } from "ast-types"; import { Context } from "ast-types/lib/path-visitor"; import K from "ast-types/gen/kinds"; import { NodePath } from "ast-types/lib/node-path"; import assert from "assert"; import fs from "fs"; import path from "path"; import process from "process"; const checkExpression = (node: n.Node): node is K.ExpressionKind => n.Expression.check(node); const checkStatement = (node: n.Node): node is K.StatementKind => n.Statement.check(node); for (const file of process.argv.slice(2)) { console.log("Parsing", file); const ast = recast.parse(fs.readFileSync(file, { encoding: "utf8" }), { parser: path.extname(file) === ".ts" ? tsParser : babelParser, }); let changed = false; let inLoop = false; let replaceReturn = false; const visitLoop = (...args: string[]) => function(this: Context, path: NodePath) { for (const arg of args) { this.visit(path.get(arg)); } const old = { inLoop }; inLoop = true; this.visit(path.get("body")); inLoop = old.inLoop; return false; }; recast.visit(ast, { visitDoWhileStatement: visitLoop("test"), visitExpressionStatement(path) { const { expression, comments } = path.node; let valueOnly; if ( n.CallExpression.check(expression) && n.MemberExpression.check(expression.callee) && !expression.callee.computed && n.Identifier.check(expression.callee.object) && expression.callee.object.name === "_" && n.Identifier.check(expression.callee.property) && ["each", "forEach"].includes(expression.callee.property.name) && [2, 3].includes(expression.arguments.length) && checkExpression(expression.arguments[0]) && (n.FunctionExpression.check(expression.arguments[1]) || n.ArrowFunctionExpression.check(expression.arguments[1])) && [1, 2].includes(expression.arguments[1].params.length) && n.Identifier.check(expression.arguments[1].params[0]) && ((valueOnly = expression.arguments[1].params[1] === undefined) || n.Identifier.check(expression.arguments[1].params[1])) && (expression.arguments[2] === undefined || n.ThisExpression.check(expression.arguments[2])) ) { const old = { inLoop, replaceReturn }; inLoop = false; replaceReturn = true; this.visit( path .get("expression") .get("arguments") .get(1) .get("body") ); inLoop = old.inLoop; replaceReturn = old.replaceReturn; const [right, { body, params }] = expression.arguments; const loop = b.forOfStatement( b.variableDeclaration("let", [ b.variableDeclarator( valueOnly ? params[0] : b.arrayPattern([params[1], params[0]]) ), ]), valueOnly ? right : b.callExpression( b.memberExpression(right, b.identifier("entries")), [] ), checkStatement(body) ? body : b.expressionStatement(body) ); loop.comments = comments; path.replace(loop); changed = true; } this.traverse(path); }, visitForStatement: visitLoop("init", "test", "update"), visitForInStatement: visitLoop("left", "right"), visitForOfStatement: visitLoop("left", "right"), visitFunction(path) { this.visit(path.get("params")); const old = { replaceReturn }; replaceReturn = false; this.visit(path.get("body")); replaceReturn = old.replaceReturn; return false; }, visitReturnStatement(path) { if (replaceReturn) { assert(!inLoop); // could use labeled continue if this ever fires const { argument, comments } = path.node; if (argument === null) { const s = b.continueStatement(); s.comments = comments; path.replace(s); } else { const s = b.expressionStatement(argument); s.comments = comments; path.replace(s, b.continueStatement()); } return false; } this.traverse(path); }, visitWhileStatement: visitLoop("test"), }); if (changed) { console.log("Writing", file); fs.writeFileSync(file, recast.print(ast).code, { encoding: "utf8" }); } } Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-02-06 06:19:47 +01:00
}
unread.process_loaded_messages(msgs);
unread.mark_as_read(79);
history = stream_topic_history.get_recent_topic_names(stream_id);
assert.deepEqual(history, ["toPic1", "unread1", "topic2", "UNREAD2"]);
});
2017-07-27 12:57:37 +02:00
test("test_stream_has_topics", () => {
const stream_id = 88;
assert.equal(stream_topic_history.stream_has_topics(stream_id), false);
stream_topic_history.find_or_create(stream_id);
// This was a bug before--just creating a bucket does not
// mean we have actual topics.
assert.equal(stream_topic_history.stream_has_topics(stream_id), false);
stream_topic_history.add_message({
stream_id,
message_id: 888,
topic_name: "whatever",
});
assert.equal(stream_topic_history.stream_has_topics(stream_id), true);
});
test("server_history_end_to_end", () => {
stream_topic_history.reset();
const stream_id = 99;
const topics = [
{name: "topic3", max_id: 501},
{name: "topic2", max_id: 31},
{name: "topic1", max_id: 30},
];
let get_success_callback;
let on_success_called;
channel.get = (opts) => {
assert.equal(opts.url, "/json/users/me/99/topics");
assert.deepEqual(opts.data, {});
get_success_callback = opts.success;
};
stream_topic_history_util.get_server_history(stream_id, () => {
on_success_called = true;
});
get_success_callback({topics});
assert.ok(on_success_called);
const history = stream_topic_history.get_recent_topic_names(stream_id);
assert.deepEqual(history, ["topic3", "topic2", "topic1"]);
// Try getting server history for a second time.
/* istanbul ignore next */
channel.get = () => {
throw new Error("We should not get more data.");
};
on_success_called = false;
stream_topic_history_util.get_server_history(stream_id, () => {
on_success_called = true;
});
assert.ok(on_success_called);
});
test("all_topics_in_cache", ({override}) => {
// Add a new stream with first_message_id set.
const general = {
name: "general",
stream_id: 21,
first_message_id: null,
};
const messages = [
{id: 1, stream_id: 21},
{id: 2, stream_id: 21},
{id: 3, stream_id: 21},
];
const sub = stream_data.create_sub_from_server_data(general);
assert.equal(stream_topic_history.all_topics_in_cache(sub), false);
all_messages_data.all_messages_data.clear();
all_messages_data.all_messages_data.add_messages(messages);
let has_found_newest = false;
override(
all_messages_data.all_messages_data.fetch_status,
"has_found_newest",
() => has_found_newest,
);
assert.equal(stream_topic_history.all_topics_in_cache(sub), false);
has_found_newest = true;
assert.equal(stream_topic_history.all_topics_in_cache(sub), true);
sub.first_message_id = 0;
assert.equal(stream_topic_history.all_topics_in_cache(sub), false);
sub.first_message_id = 2;
assert.equal(stream_topic_history.all_topics_in_cache(sub), true);
});