2021-03-30 06:23:09 +02:00
|
|
|
import {all_messages_data} from "./all_messages_data";
|
2021-02-28 00:54:32 +01:00
|
|
|
import {FoldDict} from "./fold_dict";
|
2021-02-28 01:10:31 +01:00
|
|
|
import * as message_util from "./message_util";
|
2021-04-15 17:02:54 +02:00
|
|
|
import * as sub_store from "./sub_store";
|
2021-02-28 21:30:38 +01:00
|
|
|
import * as unread from "./unread";
|
2019-02-08 11:56:33 +01:00
|
|
|
|
2020-07-23 02:59:59 +02:00
|
|
|
const stream_dict = new Map(); // stream_id -> PerStreamHistory object
|
2020-01-23 17:05:31 +01:00
|
|
|
const fetched_stream_ids = new Set();
|
2017-07-24 18:22:37 +02:00
|
|
|
|
2021-03-29 16:13:39 +02:00
|
|
|
export function all_topics_in_cache(sub) {
|
|
|
|
// Checks whether this browser's cache of contiguous messages
|
2021-03-30 06:23:09 +02:00
|
|
|
// (used to locally render narrows) in all_messages_data has all
|
2021-03-29 16:13:39 +02:00
|
|
|
// messages from a given stream, and thus all historical topics
|
2021-03-30 06:23:09 +02:00
|
|
|
// for it. Because all_messages_data is a range, we just need to
|
2021-03-29 16:13:39 +02:00
|
|
|
// compare it to the range of history on the stream.
|
|
|
|
|
|
|
|
// If the cache isn't initialized, it's a clear false.
|
2021-03-30 06:23:09 +02:00
|
|
|
if (all_messages_data === undefined || all_messages_data.empty()) {
|
2021-03-29 16:13:39 +02:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
// If the cache doesn't have the latest messages, we can't be sure
|
|
|
|
// we have all topics.
|
2021-03-30 06:23:09 +02:00
|
|
|
if (!all_messages_data.fetch_status.has_found_newest()) {
|
2021-03-29 16:13:39 +02:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (sub.first_message_id === null) {
|
|
|
|
// If the stream has no message history, we have it all
|
|
|
|
// vacuously. This should be a very rare condition, since
|
|
|
|
// stream creation sends a message.
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Now, we can just compare the first cached message to the first
|
|
|
|
// message ID in the stream; if it's older, we're good, otherwise,
|
|
|
|
// we might be missing the oldest topics in this stream in our
|
|
|
|
// cache.
|
2021-03-30 06:23:09 +02:00
|
|
|
const first_cached_message = all_messages_data.first();
|
2021-03-29 16:13:39 +02:00
|
|
|
return first_cached_message.id <= sub.first_message_id;
|
|
|
|
}
|
|
|
|
|
2021-02-28 00:54:32 +01:00
|
|
|
export function is_complete_for_stream_id(stream_id) {
|
2020-01-23 17:05:31 +01:00
|
|
|
if (fetched_stream_ids.has(stream_id)) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2021-04-15 17:02:54 +02:00
|
|
|
const sub = sub_store.get(stream_id);
|
2021-03-29 16:13:39 +02:00
|
|
|
const in_cache = all_topics_in_cache(sub);
|
2020-01-23 17:05:31 +01:00
|
|
|
|
|
|
|
if (in_cache) {
|
|
|
|
/*
|
|
|
|
If the stream is cached, we can add it to
|
|
|
|
fetched_stream_ids. Note that for the opposite
|
|
|
|
scenario, we don't delete from
|
|
|
|
fetched_stream_ids, because we may just be
|
|
|
|
waiting for the initial message fetch.
|
|
|
|
*/
|
|
|
|
fetched_stream_ids.add(stream_id);
|
|
|
|
}
|
|
|
|
|
|
|
|
return in_cache;
|
2021-02-28 00:54:32 +01:00
|
|
|
}
|
2020-01-23 16:09:55 +01:00
|
|
|
|
2021-02-28 00:54:32 +01:00
|
|
|
export function stream_has_topics(stream_id) {
|
2018-05-21 21:29:39 +02:00
|
|
|
if (!stream_dict.has(stream_id)) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2019-11-02 00:06:25 +01:00
|
|
|
const history = stream_dict.get(stream_id);
|
2018-05-21 21:29:39 +02:00
|
|
|
|
|
|
|
return history.has_topics();
|
2021-02-28 00:54:32 +01:00
|
|
|
}
|
2017-07-24 18:22:37 +02:00
|
|
|
|
2021-02-28 00:54:32 +01:00
|
|
|
export class PerStreamHistory {
|
2020-01-23 15:36:11 +01:00
|
|
|
/*
|
2020-04-30 12:11:18 +02:00
|
|
|
For a given stream, this structure has a dictionary of topics.
|
|
|
|
The main getter of this object is get_recent_topic_names, and
|
|
|
|
we just sort on the fly every time we are called.
|
|
|
|
|
|
|
|
Attributes for a topic are:
|
|
|
|
* message_id: The latest message_id in the topic. Only usable
|
|
|
|
for imprecise applications like sorting. The message_id
|
|
|
|
cannot be fully accurate given message editing and deleting
|
|
|
|
(as we don't have a way to handle the latest message in a
|
|
|
|
stream having its stream edited or deleted).
|
|
|
|
|
|
|
|
TODO: We can probably fix this limitation by doing a
|
|
|
|
single-message `GET /messages` query with anchor="latest",
|
|
|
|
num_before=0, num_after=0, to update this field when its
|
|
|
|
value becomes ambiguous. Or probably better to avoid a
|
|
|
|
thundering herd (of a fast query), having the server send
|
|
|
|
the data needed to do this update in stream/topic-edit and
|
|
|
|
delete events (just the new max_message_id for the relevant
|
|
|
|
topic would likely suffice, though we need to think about
|
|
|
|
private stream corner cases).
|
|
|
|
* pretty_name: The topic_name, with original case.
|
|
|
|
* historical: Whether the user actually received any messages in
|
|
|
|
the topic (has UserMessage rows) or is just viewing the stream.
|
|
|
|
* count: Number of known messages in the topic. Used to detect
|
|
|
|
when the last messages in a topic were moved to other topics or
|
|
|
|
deleted.
|
2020-01-23 15:36:11 +01:00
|
|
|
*/
|
|
|
|
|
2020-07-23 02:59:59 +02:00
|
|
|
topics = new FoldDict();
|
2020-04-30 12:11:18 +02:00
|
|
|
// Most recent message ID for the stream.
|
2020-07-23 02:59:59 +02:00
|
|
|
max_message_id = 0;
|
2017-07-24 22:16:13 +02:00
|
|
|
|
2020-07-23 02:59:59 +02:00
|
|
|
constructor(stream_id) {
|
|
|
|
this.stream_id = stream_id;
|
|
|
|
}
|
|
|
|
|
|
|
|
has_topics() {
|
|
|
|
return this.topics.size !== 0;
|
|
|
|
}
|
2018-05-21 21:29:39 +02:00
|
|
|
|
2020-07-23 02:59:59 +02:00
|
|
|
update_stream_max_message_id(message_id) {
|
|
|
|
if (message_id > this.max_message_id) {
|
|
|
|
this.max_message_id = message_id;
|
2020-04-30 12:11:18 +02:00
|
|
|
}
|
2020-07-23 02:59:59 +02:00
|
|
|
}
|
2020-04-30 12:11:18 +02:00
|
|
|
|
2021-03-24 21:44:43 +01:00
|
|
|
add_or_update({topic_name, message_id = 0}) {
|
2020-10-07 09:17:30 +02:00
|
|
|
message_id = Number.parseInt(message_id, 10);
|
2020-07-23 02:59:59 +02:00
|
|
|
this.update_stream_max_message_id(message_id);
|
2017-07-24 22:16:13 +02:00
|
|
|
|
2020-07-23 02:59:59 +02:00
|
|
|
const existing = this.topics.get(topic_name);
|
2017-07-24 22:16:13 +02:00
|
|
|
|
|
|
|
if (!existing) {
|
2021-03-24 21:44:43 +01:00
|
|
|
this.topics.set(topic_name, {
|
2020-07-20 22:18:43 +02:00
|
|
|
message_id,
|
2020-03-22 18:40:05 +01:00
|
|
|
pretty_name: topic_name,
|
2017-07-27 12:57:37 +02:00
|
|
|
historical: false,
|
2017-07-24 22:16:13 +02:00
|
|
|
count: 1,
|
|
|
|
});
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2017-07-27 12:57:37 +02:00
|
|
|
if (!existing.historical) {
|
|
|
|
existing.count += 1;
|
|
|
|
}
|
|
|
|
|
2017-07-24 22:16:13 +02:00
|
|
|
if (message_id > existing.message_id) {
|
|
|
|
existing.message_id = message_id;
|
2020-03-22 18:40:05 +01:00
|
|
|
existing.pretty_name = topic_name;
|
2017-07-24 22:16:13 +02:00
|
|
|
}
|
2020-07-23 02:59:59 +02:00
|
|
|
}
|
2017-07-24 22:16:13 +02:00
|
|
|
|
2020-07-23 02:59:59 +02:00
|
|
|
maybe_remove(topic_name, num_messages) {
|
|
|
|
const existing = this.topics.get(topic_name);
|
2017-07-24 22:16:13 +02:00
|
|
|
|
|
|
|
if (!existing) {
|
|
|
|
return;
|
2017-07-24 18:22:37 +02:00
|
|
|
}
|
2017-07-24 22:16:13 +02:00
|
|
|
|
2017-07-27 12:57:37 +02:00
|
|
|
if (existing.historical) {
|
|
|
|
// We can't trust that a topic rename applied to
|
|
|
|
// the entire history of historical topic, so we
|
|
|
|
// will always leave it in the sidebar.
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2020-06-11 12:12:12 +02:00
|
|
|
if (existing.count <= num_messages) {
|
2020-07-23 02:59:59 +02:00
|
|
|
this.topics.delete(topic_name);
|
2017-07-24 22:16:13 +02:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2020-06-11 12:12:12 +02:00
|
|
|
existing.count -= num_messages;
|
2020-07-23 02:59:59 +02:00
|
|
|
}
|
2017-07-24 22:16:13 +02:00
|
|
|
|
2020-07-23 02:59:59 +02:00
|
|
|
add_history(server_history) {
|
2017-07-27 12:57:37 +02:00
|
|
|
// This method populates historical topics from the
|
|
|
|
// server. We have less data about these than the
|
|
|
|
// client can maintain for newer topics.
|
|
|
|
|
js: Automatically convert _.each to for…of.
This commit was automatically generated by the following script,
followed by lint --fix and a few small manual lint-related cleanups.
import * as babelParser from "recast/parsers/babel";
import * as recast from "recast";
import * as tsParser from "recast/parsers/typescript";
import { builders as b, namedTypes as n } from "ast-types";
import { Context } from "ast-types/lib/path-visitor";
import K from "ast-types/gen/kinds";
import { NodePath } from "ast-types/lib/node-path";
import assert from "assert";
import fs from "fs";
import path from "path";
import process from "process";
const checkExpression = (node: n.Node): node is K.ExpressionKind =>
n.Expression.check(node);
const checkStatement = (node: n.Node): node is K.StatementKind =>
n.Statement.check(node);
for (const file of process.argv.slice(2)) {
console.log("Parsing", file);
const ast = recast.parse(fs.readFileSync(file, { encoding: "utf8" }), {
parser: path.extname(file) === ".ts" ? tsParser : babelParser,
});
let changed = false;
let inLoop = false;
let replaceReturn = false;
const visitLoop = (...args: string[]) =>
function(this: Context, path: NodePath) {
for (const arg of args) {
this.visit(path.get(arg));
}
const old = { inLoop };
inLoop = true;
this.visit(path.get("body"));
inLoop = old.inLoop;
return false;
};
recast.visit(ast, {
visitDoWhileStatement: visitLoop("test"),
visitExpressionStatement(path) {
const { expression, comments } = path.node;
let valueOnly;
if (
n.CallExpression.check(expression) &&
n.MemberExpression.check(expression.callee) &&
!expression.callee.computed &&
n.Identifier.check(expression.callee.object) &&
expression.callee.object.name === "_" &&
n.Identifier.check(expression.callee.property) &&
["each", "forEach"].includes(expression.callee.property.name) &&
[2, 3].includes(expression.arguments.length) &&
checkExpression(expression.arguments[0]) &&
(n.FunctionExpression.check(expression.arguments[1]) ||
n.ArrowFunctionExpression.check(expression.arguments[1])) &&
[1, 2].includes(expression.arguments[1].params.length) &&
n.Identifier.check(expression.arguments[1].params[0]) &&
((valueOnly = expression.arguments[1].params[1] === undefined) ||
n.Identifier.check(expression.arguments[1].params[1])) &&
(expression.arguments[2] === undefined ||
n.ThisExpression.check(expression.arguments[2]))
) {
const old = { inLoop, replaceReturn };
inLoop = false;
replaceReturn = true;
this.visit(
path
.get("expression")
.get("arguments")
.get(1)
.get("body")
);
inLoop = old.inLoop;
replaceReturn = old.replaceReturn;
const [right, { body, params }] = expression.arguments;
const loop = b.forOfStatement(
b.variableDeclaration("let", [
b.variableDeclarator(
valueOnly ? params[0] : b.arrayPattern([params[1], params[0]])
),
]),
valueOnly
? right
: b.callExpression(
b.memberExpression(right, b.identifier("entries")),
[]
),
checkStatement(body) ? body : b.expressionStatement(body)
);
loop.comments = comments;
path.replace(loop);
changed = true;
}
this.traverse(path);
},
visitForStatement: visitLoop("init", "test", "update"),
visitForInStatement: visitLoop("left", "right"),
visitForOfStatement: visitLoop("left", "right"),
visitFunction(path) {
this.visit(path.get("params"));
const old = { replaceReturn };
replaceReturn = false;
this.visit(path.get("body"));
replaceReturn = old.replaceReturn;
return false;
},
visitReturnStatement(path) {
if (replaceReturn) {
assert(!inLoop); // could use labeled continue if this ever fires
const { argument, comments } = path.node;
if (argument === null) {
const s = b.continueStatement();
s.comments = comments;
path.replace(s);
} else {
const s = b.expressionStatement(argument);
s.comments = comments;
path.replace(s, b.continueStatement());
}
return false;
}
this.traverse(path);
},
visitWhileStatement: visitLoop("test"),
});
if (changed) {
console.log("Writing", file);
fs.writeFileSync(file, recast.print(ast).code, { encoding: "utf8" });
}
}
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-02-06 06:19:47 +01:00
|
|
|
for (const obj of server_history) {
|
2020-04-17 22:08:17 +02:00
|
|
|
const topic_name = obj.name;
|
2019-11-02 00:06:25 +01:00
|
|
|
const message_id = obj.max_id;
|
2017-07-27 12:57:37 +02:00
|
|
|
|
2020-07-23 02:59:59 +02:00
|
|
|
const existing = this.topics.get(topic_name);
|
2017-07-27 12:57:37 +02:00
|
|
|
|
2020-12-22 11:26:39 +01:00
|
|
|
if (existing && !existing.historical) {
|
|
|
|
// Trust out local data more, since it
|
|
|
|
// maintains counts.
|
|
|
|
continue;
|
2017-07-27 12:57:37 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// If we get here, we are either finding out about
|
|
|
|
// the topic for the first time, or we are getting
|
|
|
|
// more current data for it.
|
|
|
|
|
2020-07-23 02:59:59 +02:00
|
|
|
this.topics.set(topic_name, {
|
2020-07-20 22:18:43 +02:00
|
|
|
message_id,
|
2020-03-22 18:40:05 +01:00
|
|
|
pretty_name: topic_name,
|
2017-07-27 12:57:37 +02:00
|
|
|
historical: true,
|
|
|
|
});
|
2020-07-23 02:59:59 +02:00
|
|
|
this.update_stream_max_message_id(message_id);
|
js: Automatically convert _.each to for…of.
This commit was automatically generated by the following script,
followed by lint --fix and a few small manual lint-related cleanups.
import * as babelParser from "recast/parsers/babel";
import * as recast from "recast";
import * as tsParser from "recast/parsers/typescript";
import { builders as b, namedTypes as n } from "ast-types";
import { Context } from "ast-types/lib/path-visitor";
import K from "ast-types/gen/kinds";
import { NodePath } from "ast-types/lib/node-path";
import assert from "assert";
import fs from "fs";
import path from "path";
import process from "process";
const checkExpression = (node: n.Node): node is K.ExpressionKind =>
n.Expression.check(node);
const checkStatement = (node: n.Node): node is K.StatementKind =>
n.Statement.check(node);
for (const file of process.argv.slice(2)) {
console.log("Parsing", file);
const ast = recast.parse(fs.readFileSync(file, { encoding: "utf8" }), {
parser: path.extname(file) === ".ts" ? tsParser : babelParser,
});
let changed = false;
let inLoop = false;
let replaceReturn = false;
const visitLoop = (...args: string[]) =>
function(this: Context, path: NodePath) {
for (const arg of args) {
this.visit(path.get(arg));
}
const old = { inLoop };
inLoop = true;
this.visit(path.get("body"));
inLoop = old.inLoop;
return false;
};
recast.visit(ast, {
visitDoWhileStatement: visitLoop("test"),
visitExpressionStatement(path) {
const { expression, comments } = path.node;
let valueOnly;
if (
n.CallExpression.check(expression) &&
n.MemberExpression.check(expression.callee) &&
!expression.callee.computed &&
n.Identifier.check(expression.callee.object) &&
expression.callee.object.name === "_" &&
n.Identifier.check(expression.callee.property) &&
["each", "forEach"].includes(expression.callee.property.name) &&
[2, 3].includes(expression.arguments.length) &&
checkExpression(expression.arguments[0]) &&
(n.FunctionExpression.check(expression.arguments[1]) ||
n.ArrowFunctionExpression.check(expression.arguments[1])) &&
[1, 2].includes(expression.arguments[1].params.length) &&
n.Identifier.check(expression.arguments[1].params[0]) &&
((valueOnly = expression.arguments[1].params[1] === undefined) ||
n.Identifier.check(expression.arguments[1].params[1])) &&
(expression.arguments[2] === undefined ||
n.ThisExpression.check(expression.arguments[2]))
) {
const old = { inLoop, replaceReturn };
inLoop = false;
replaceReturn = true;
this.visit(
path
.get("expression")
.get("arguments")
.get(1)
.get("body")
);
inLoop = old.inLoop;
replaceReturn = old.replaceReturn;
const [right, { body, params }] = expression.arguments;
const loop = b.forOfStatement(
b.variableDeclaration("let", [
b.variableDeclarator(
valueOnly ? params[0] : b.arrayPattern([params[1], params[0]])
),
]),
valueOnly
? right
: b.callExpression(
b.memberExpression(right, b.identifier("entries")),
[]
),
checkStatement(body) ? body : b.expressionStatement(body)
);
loop.comments = comments;
path.replace(loop);
changed = true;
}
this.traverse(path);
},
visitForStatement: visitLoop("init", "test", "update"),
visitForInStatement: visitLoop("left", "right"),
visitForOfStatement: visitLoop("left", "right"),
visitFunction(path) {
this.visit(path.get("params"));
const old = { replaceReturn };
replaceReturn = false;
this.visit(path.get("body"));
replaceReturn = old.replaceReturn;
return false;
},
visitReturnStatement(path) {
if (replaceReturn) {
assert(!inLoop); // could use labeled continue if this ever fires
const { argument, comments } = path.node;
if (argument === null) {
const s = b.continueStatement();
s.comments = comments;
path.replace(s);
} else {
const s = b.expressionStatement(argument);
s.comments = comments;
path.replace(s, b.continueStatement());
}
return false;
}
this.traverse(path);
},
visitWhileStatement: visitLoop("test"),
});
if (changed) {
console.log("Writing", file);
fs.writeFileSync(file, recast.print(ast).code, { encoding: "utf8" });
}
}
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-02-06 06:19:47 +01:00
|
|
|
}
|
2020-07-23 02:59:59 +02:00
|
|
|
}
|
2017-07-27 12:57:37 +02:00
|
|
|
|
2020-07-23 02:59:59 +02:00
|
|
|
get_recent_topic_names() {
|
|
|
|
const my_recents = Array.from(this.topics.values());
|
2018-05-13 12:17:00 +02:00
|
|
|
|
2021-06-29 18:43:52 +02:00
|
|
|
/* Add any older topics with unreads that may not be present
|
|
|
|
* in our local cache. */
|
2019-11-02 00:06:25 +01:00
|
|
|
const missing_topics = unread.get_missing_topics({
|
2020-07-23 02:59:59 +02:00
|
|
|
stream_id: this.stream_id,
|
|
|
|
topic_dict: this.topics,
|
2018-05-13 12:17:00 +02:00
|
|
|
});
|
|
|
|
|
2019-11-02 00:06:25 +01:00
|
|
|
const recents = my_recents.concat(missing_topics);
|
2017-07-24 22:16:13 +02:00
|
|
|
|
2020-07-02 01:45:54 +02:00
|
|
|
recents.sort((a, b) => b.message_id - a.message_id);
|
2017-07-24 22:16:13 +02:00
|
|
|
|
2020-07-02 01:39:34 +02:00
|
|
|
const names = recents.map((obj) => obj.pretty_name);
|
2017-07-24 22:16:13 +02:00
|
|
|
|
|
|
|
return names;
|
2020-07-23 02:59:59 +02:00
|
|
|
}
|
2020-04-30 12:11:18 +02:00
|
|
|
|
2020-07-23 02:59:59 +02:00
|
|
|
get_max_message_id() {
|
|
|
|
return this.max_message_id;
|
|
|
|
}
|
|
|
|
}
|
2017-07-24 22:16:13 +02:00
|
|
|
|
2021-02-28 00:54:32 +01:00
|
|
|
export function remove_messages(opts) {
|
2019-11-02 00:06:25 +01:00
|
|
|
const stream_id = opts.stream_id;
|
2020-03-22 18:40:05 +01:00
|
|
|
const topic_name = opts.topic_name;
|
2020-06-15 19:52:00 +02:00
|
|
|
const num_messages = opts.num_messages;
|
2020-08-04 11:12:42 +02:00
|
|
|
const max_removed_msg_id = opts.max_removed_msg_id;
|
2019-11-02 00:06:25 +01:00
|
|
|
const history = stream_dict.get(stream_id);
|
2017-07-24 22:16:13 +02:00
|
|
|
|
|
|
|
// This is the special case of "removing" a message from
|
|
|
|
// a topic, which happens when we edit topics.
|
2017-07-24 18:22:37 +02:00
|
|
|
|
2017-07-24 22:16:13 +02:00
|
|
|
if (!history) {
|
|
|
|
return;
|
2017-07-24 18:22:37 +02:00
|
|
|
}
|
|
|
|
|
2017-07-24 22:16:13 +02:00
|
|
|
// This is the normal case of an incoming message.
|
2020-06-11 12:12:12 +02:00
|
|
|
history.maybe_remove(topic_name, num_messages);
|
2020-08-04 11:12:42 +02:00
|
|
|
|
|
|
|
const existing_topic = history.topics.get(topic_name);
|
|
|
|
if (!existing_topic) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Update max_message_id in topic
|
|
|
|
if (existing_topic.message_id <= max_removed_msg_id) {
|
|
|
|
const msgs_in_topic = message_util.get_messages_in_topic(stream_id, topic_name);
|
|
|
|
let max_message_id = 0;
|
|
|
|
for (const msg of msgs_in_topic) {
|
|
|
|
if (msg.id > max_message_id) {
|
|
|
|
max_message_id = msg.id;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
existing_topic.message_id = max_message_id;
|
|
|
|
}
|
2020-08-04 11:13:20 +02:00
|
|
|
|
|
|
|
// Update max_message_id in stream
|
|
|
|
if (history.max_message_id <= max_removed_msg_id) {
|
|
|
|
history.max_message_id = message_util.get_max_message_id_in_stream(stream_id);
|
|
|
|
}
|
2021-02-28 00:54:32 +01:00
|
|
|
}
|
2017-07-24 22:16:13 +02:00
|
|
|
|
2021-02-28 00:54:32 +01:00
|
|
|
export function find_or_create(stream_id) {
|
2019-11-02 00:06:25 +01:00
|
|
|
let history = stream_dict.get(stream_id);
|
2017-07-24 22:16:13 +02:00
|
|
|
|
|
|
|
if (!history) {
|
2020-07-23 02:59:59 +02:00
|
|
|
history = new PerStreamHistory(stream_id);
|
2017-07-24 22:16:13 +02:00
|
|
|
stream_dict.set(stream_id, history);
|
2017-07-24 18:22:37 +02:00
|
|
|
}
|
|
|
|
|
2017-07-24 22:16:13 +02:00
|
|
|
return history;
|
2021-02-28 00:54:32 +01:00
|
|
|
}
|
2017-07-24 18:22:37 +02:00
|
|
|
|
2021-02-28 00:54:32 +01:00
|
|
|
export function add_message(opts) {
|
2019-11-02 00:06:25 +01:00
|
|
|
const stream_id = opts.stream_id;
|
|
|
|
const message_id = opts.message_id;
|
2020-03-22 18:40:05 +01:00
|
|
|
const topic_name = opts.topic_name;
|
2017-07-24 22:16:13 +02:00
|
|
|
|
2021-02-28 00:54:32 +01:00
|
|
|
const history = find_or_create(stream_id);
|
2017-07-24 22:16:13 +02:00
|
|
|
|
|
|
|
history.add_or_update({
|
2020-07-20 22:18:43 +02:00
|
|
|
topic_name,
|
|
|
|
message_id,
|
2017-07-24 22:16:13 +02:00
|
|
|
});
|
2021-02-28 00:54:32 +01:00
|
|
|
}
|
2017-07-24 18:22:37 +02:00
|
|
|
|
2021-02-28 00:54:32 +01:00
|
|
|
export function add_history(stream_id, server_history) {
|
|
|
|
const history = find_or_create(stream_id);
|
2017-07-27 12:57:37 +02:00
|
|
|
history.add_history(server_history);
|
2020-01-23 17:05:31 +01:00
|
|
|
fetched_stream_ids.add(stream_id);
|
2021-02-28 00:54:32 +01:00
|
|
|
}
|
2017-07-27 12:57:37 +02:00
|
|
|
|
2021-04-15 18:51:44 +02:00
|
|
|
export function has_history_for(stream_id) {
|
|
|
|
return fetched_stream_ids.has(stream_id);
|
2021-02-28 00:54:32 +01:00
|
|
|
}
|
2017-08-08 19:54:07 +02:00
|
|
|
|
2021-02-28 00:54:32 +01:00
|
|
|
export function get_recent_topic_names(stream_id) {
|
|
|
|
const history = find_or_create(stream_id);
|
2017-07-24 18:22:37 +02:00
|
|
|
|
2020-03-22 18:40:05 +01:00
|
|
|
return history.get_recent_topic_names();
|
2021-02-28 00:54:32 +01:00
|
|
|
}
|
2017-07-24 18:22:37 +02:00
|
|
|
|
2021-02-28 00:54:32 +01:00
|
|
|
export function get_max_message_id(stream_id) {
|
|
|
|
const history = find_or_create(stream_id);
|
2020-04-30 12:11:18 +02:00
|
|
|
|
|
|
|
return history.get_max_message_id();
|
2021-02-28 00:54:32 +01:00
|
|
|
}
|
2020-04-30 12:11:18 +02:00
|
|
|
|
2021-02-28 00:54:32 +01:00
|
|
|
export function reset() {
|
2017-07-24 18:22:37 +02:00
|
|
|
// This is only used by tests.
|
2020-01-23 15:42:43 +01:00
|
|
|
stream_dict.clear();
|
2020-01-23 17:05:31 +01:00
|
|
|
fetched_stream_ids.clear();
|
2021-02-28 00:54:32 +01:00
|
|
|
}
|