2024-01-02 07:03:01 +01:00
|
|
|
import assert from "minimalistic-assert";
|
|
|
|
|
2021-03-30 06:23:09 +02:00
|
|
|
import {all_messages_data} from "./all_messages_data";
|
2024-08-03 14:16:14 +02:00
|
|
|
import * as echo_state from "./echo_state";
|
2021-02-28 00:54:32 +01:00
|
|
|
import {FoldDict} from "./fold_dict";
|
2021-02-28 01:10:31 +01:00
|
|
|
import * as message_util from "./message_util";
|
2021-04-15 17:02:54 +02:00
|
|
|
import * as sub_store from "./sub_store";
|
2024-01-02 07:03:01 +01:00
|
|
|
import type {StreamSubscription} from "./sub_store";
|
2021-02-28 21:30:38 +01:00
|
|
|
import * as unread from "./unread";
|
2019-02-08 11:56:33 +01:00
|
|
|
|
2024-01-02 07:03:01 +01:00
|
|
|
// stream_id -> PerStreamHistory object
|
|
|
|
const stream_dict = new Map<number, PerStreamHistory>();
|
|
|
|
const fetched_stream_ids = new Set<number>();
|
|
|
|
const request_pending_stream_ids = new Set<number>();
|
2017-07-24 18:22:37 +02:00
|
|
|
|
2024-06-26 06:16:20 +02:00
|
|
|
// This is stream_topic_history_util.get_server_history.
|
|
|
|
// We have to indirectly set it to avoid a circular dependency.
|
|
|
|
let update_topic_last_message_id: (stream_id: number, topic_name: string) => void;
|
|
|
|
export function set_update_topic_last_message_id(
|
|
|
|
f: (stream_id: number, topic_name: string) => void,
|
|
|
|
): void {
|
|
|
|
update_topic_last_message_id = f;
|
|
|
|
}
|
|
|
|
|
2024-01-02 07:03:01 +01:00
|
|
|
export function all_topics_in_cache(sub: StreamSubscription): boolean {
|
2021-03-29 16:13:39 +02:00
|
|
|
// Checks whether this browser's cache of contiguous messages
|
2021-03-30 06:23:09 +02:00
|
|
|
// (used to locally render narrows) in all_messages_data has all
|
2024-06-26 05:35:44 +02:00
|
|
|
// messages from a given stream. Because all_messages_data is a range,
|
|
|
|
// we just need to compare it to the range of history on the stream.
|
2021-03-29 16:13:39 +02:00
|
|
|
|
|
|
|
// If the cache isn't initialized, it's a clear false.
|
2021-03-30 06:23:09 +02:00
|
|
|
if (all_messages_data === undefined || all_messages_data.empty()) {
|
2021-03-29 16:13:39 +02:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
// If the cache doesn't have the latest messages, we can't be sure
|
|
|
|
// we have all topics.
|
2021-03-30 06:23:09 +02:00
|
|
|
if (!all_messages_data.fetch_status.has_found_newest()) {
|
2021-03-29 16:13:39 +02:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (sub.first_message_id === null) {
|
|
|
|
// If the stream has no message history, we have it all
|
|
|
|
// vacuously. This should be a very rare condition, since
|
|
|
|
// stream creation sends a message.
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Now, we can just compare the first cached message to the first
|
|
|
|
// message ID in the stream; if it's older, we're good, otherwise,
|
|
|
|
// we might be missing the oldest topics in this stream in our
|
|
|
|
// cache.
|
2021-03-30 06:23:09 +02:00
|
|
|
const first_cached_message = all_messages_data.first();
|
2024-05-24 01:17:25 +02:00
|
|
|
return first_cached_message!.id <= sub.first_message_id;
|
2021-03-29 16:13:39 +02:00
|
|
|
}
|
|
|
|
|
2024-01-02 07:03:01 +01:00
|
|
|
export function is_complete_for_stream_id(stream_id: number): boolean {
|
2020-01-23 17:05:31 +01:00
|
|
|
if (fetched_stream_ids.has(stream_id)) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2021-04-15 17:02:54 +02:00
|
|
|
const sub = sub_store.get(stream_id);
|
2024-01-02 07:03:01 +01:00
|
|
|
const in_cache = sub !== undefined && all_topics_in_cache(sub);
|
2020-01-23 17:05:31 +01:00
|
|
|
|
|
|
|
if (in_cache) {
|
|
|
|
/*
|
|
|
|
If the stream is cached, we can add it to
|
|
|
|
fetched_stream_ids. Note that for the opposite
|
|
|
|
scenario, we don't delete from
|
|
|
|
fetched_stream_ids, because we may just be
|
|
|
|
waiting for the initial message fetch.
|
|
|
|
*/
|
|
|
|
fetched_stream_ids.add(stream_id);
|
|
|
|
}
|
|
|
|
|
|
|
|
return in_cache;
|
2021-02-28 00:54:32 +01:00
|
|
|
}
|
2020-01-23 16:09:55 +01:00
|
|
|
|
2024-01-02 07:03:01 +01:00
|
|
|
export function stream_has_topics(stream_id: number): boolean {
|
2018-05-21 21:29:39 +02:00
|
|
|
if (!stream_dict.has(stream_id)) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2019-11-02 00:06:25 +01:00
|
|
|
const history = stream_dict.get(stream_id);
|
2024-01-02 07:03:01 +01:00
|
|
|
assert(history !== undefined);
|
2018-05-21 21:29:39 +02:00
|
|
|
|
|
|
|
return history.has_topics();
|
2021-02-28 00:54:32 +01:00
|
|
|
}
|
2017-07-24 18:22:37 +02:00
|
|
|
|
2024-06-26 05:35:44 +02:00
|
|
|
export type TopicHistoryEntry = {
|
|
|
|
count: number;
|
|
|
|
message_id: number;
|
|
|
|
pretty_name: string;
|
|
|
|
};
|
2024-01-02 07:03:01 +01:00
|
|
|
|
|
|
|
type ServerTopicHistoryEntry = {
|
|
|
|
name: string;
|
|
|
|
max_id: number;
|
|
|
|
};
|
|
|
|
|
2021-02-28 00:54:32 +01:00
|
|
|
export class PerStreamHistory {
|
2020-01-23 15:36:11 +01:00
|
|
|
/*
|
2020-04-30 12:11:18 +02:00
|
|
|
For a given stream, this structure has a dictionary of topics.
|
|
|
|
The main getter of this object is get_recent_topic_names, and
|
|
|
|
we just sort on the fly every time we are called.
|
|
|
|
|
|
|
|
Attributes for a topic are:
|
2024-06-26 06:16:20 +02:00
|
|
|
* message_id: The latest message_id in the topic. This is to
|
|
|
|
the best of our knowledge, and may not be accurate if
|
|
|
|
we have not seen all the messages in the topic.
|
2020-04-30 12:11:18 +02:00
|
|
|
* pretty_name: The topic_name, with original case.
|
|
|
|
* count: Number of known messages in the topic. Used to detect
|
|
|
|
when the last messages in a topic were moved to other topics or
|
|
|
|
deleted.
|
2020-01-23 15:36:11 +01:00
|
|
|
*/
|
|
|
|
|
2024-01-02 07:03:01 +01:00
|
|
|
topics = new FoldDict<TopicHistoryEntry>();
|
2020-04-30 12:11:18 +02:00
|
|
|
// Most recent message ID for the stream.
|
2020-07-23 02:59:59 +02:00
|
|
|
max_message_id = 0;
|
2024-01-02 07:03:01 +01:00
|
|
|
stream_id: number;
|
2017-07-24 22:16:13 +02:00
|
|
|
|
2024-01-02 07:03:01 +01:00
|
|
|
constructor(stream_id: number) {
|
2020-07-23 02:59:59 +02:00
|
|
|
this.stream_id = stream_id;
|
|
|
|
}
|
|
|
|
|
2024-01-02 07:03:01 +01:00
|
|
|
has_topics(): boolean {
|
2020-07-23 02:59:59 +02:00
|
|
|
return this.topics.size !== 0;
|
|
|
|
}
|
2018-05-21 21:29:39 +02:00
|
|
|
|
2024-06-27 10:33:08 +02:00
|
|
|
update_stream_with_message_id(message_id: number): void {
|
2020-07-23 02:59:59 +02:00
|
|
|
if (message_id > this.max_message_id) {
|
|
|
|
this.max_message_id = message_id;
|
2020-04-30 12:11:18 +02:00
|
|
|
}
|
2024-06-27 10:33:08 +02:00
|
|
|
|
|
|
|
// Update the first_message_id for the stream.
|
|
|
|
// It is fine if `first_message_id` changes to be higher
|
|
|
|
// due to removal of messages since it will not cause to
|
|
|
|
// display wrong list of topics. So, we don't update it here.
|
|
|
|
// On the other hand, if it changes to be lower
|
|
|
|
// we may miss some topics in autocomplete in the range
|
|
|
|
// of outdated-`first_message_id` to new-`message_id`.
|
|
|
|
// Note that this can only happen if a user moves old
|
|
|
|
// messages to the stream from another stream.
|
|
|
|
const sub = sub_store.get(this.stream_id);
|
|
|
|
if (!sub) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (sub.first_message_id === null || sub.first_message_id === undefined) {
|
|
|
|
fetched_stream_ids.delete(this.stream_id);
|
|
|
|
sub.first_message_id = message_id;
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (sub.first_message_id > message_id) {
|
|
|
|
fetched_stream_ids.delete(this.stream_id);
|
|
|
|
sub.first_message_id = message_id;
|
|
|
|
}
|
2020-07-23 02:59:59 +02:00
|
|
|
}
|
2020-04-30 12:11:18 +02:00
|
|
|
|
2024-01-02 07:03:01 +01:00
|
|
|
add_or_update(topic_name: string, message_id: number): void {
|
2024-06-26 06:16:20 +02:00
|
|
|
// The `message_id` provided here can easily be far from the latest
|
|
|
|
// message in the topic, but it is more important for us to cache the topic
|
|
|
|
// for autocomplete purposes than to have an accurate max message ID.
|
2024-06-27 10:33:08 +02:00
|
|
|
this.update_stream_with_message_id(message_id);
|
2017-07-24 22:16:13 +02:00
|
|
|
|
2020-07-23 02:59:59 +02:00
|
|
|
const existing = this.topics.get(topic_name);
|
2017-07-24 22:16:13 +02:00
|
|
|
|
|
|
|
if (!existing) {
|
2021-03-24 21:44:43 +01:00
|
|
|
this.topics.set(topic_name, {
|
2020-07-20 22:18:43 +02:00
|
|
|
message_id,
|
2020-03-22 18:40:05 +01:00
|
|
|
pretty_name: topic_name,
|
2017-07-24 22:16:13 +02:00
|
|
|
count: 1,
|
|
|
|
});
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2024-06-26 05:35:44 +02:00
|
|
|
existing.count += 1;
|
2017-07-27 12:57:37 +02:00
|
|
|
|
2017-07-24 22:16:13 +02:00
|
|
|
if (message_id > existing.message_id) {
|
|
|
|
existing.message_id = message_id;
|
2020-03-22 18:40:05 +01:00
|
|
|
existing.pretty_name = topic_name;
|
2017-07-24 22:16:13 +02:00
|
|
|
}
|
2020-07-23 02:59:59 +02:00
|
|
|
}
|
2017-07-24 22:16:13 +02:00
|
|
|
|
2024-01-02 07:03:01 +01:00
|
|
|
maybe_remove(topic_name: string, num_messages: number): void {
|
2020-07-23 02:59:59 +02:00
|
|
|
const existing = this.topics.get(topic_name);
|
2017-07-24 22:16:13 +02:00
|
|
|
|
2024-06-29 12:00:56 +02:00
|
|
|
if (!existing) {
|
2017-07-24 22:16:13 +02:00
|
|
|
return;
|
2017-07-24 18:22:37 +02:00
|
|
|
}
|
2017-07-24 22:16:13 +02:00
|
|
|
|
2020-06-11 12:12:12 +02:00
|
|
|
if (existing.count <= num_messages) {
|
2020-07-23 02:59:59 +02:00
|
|
|
this.topics.delete(topic_name);
|
2024-06-29 12:00:56 +02:00
|
|
|
// Verify if this topic still has messages from the server.
|
|
|
|
update_topic_last_message_id(this.stream_id, topic_name);
|
2017-07-24 22:16:13 +02:00
|
|
|
}
|
|
|
|
|
2020-06-11 12:12:12 +02:00
|
|
|
existing.count -= num_messages;
|
2020-07-23 02:59:59 +02:00
|
|
|
}
|
2017-07-24 22:16:13 +02:00
|
|
|
|
2024-01-02 07:03:01 +01:00
|
|
|
add_history(server_history: ServerTopicHistoryEntry[]): void {
|
2024-06-26 05:35:44 +02:00
|
|
|
// This method populates list of topics from the server.
|
2017-07-27 12:57:37 +02:00
|
|
|
|
js: Automatically convert _.each to for…of.
This commit was automatically generated by the following script,
followed by lint --fix and a few small manual lint-related cleanups.
import * as babelParser from "recast/parsers/babel";
import * as recast from "recast";
import * as tsParser from "recast/parsers/typescript";
import { builders as b, namedTypes as n } from "ast-types";
import { Context } from "ast-types/lib/path-visitor";
import K from "ast-types/gen/kinds";
import { NodePath } from "ast-types/lib/node-path";
import assert from "assert";
import fs from "fs";
import path from "path";
import process from "process";
const checkExpression = (node: n.Node): node is K.ExpressionKind =>
n.Expression.check(node);
const checkStatement = (node: n.Node): node is K.StatementKind =>
n.Statement.check(node);
for (const file of process.argv.slice(2)) {
console.log("Parsing", file);
const ast = recast.parse(fs.readFileSync(file, { encoding: "utf8" }), {
parser: path.extname(file) === ".ts" ? tsParser : babelParser,
});
let changed = false;
let inLoop = false;
let replaceReturn = false;
const visitLoop = (...args: string[]) =>
function(this: Context, path: NodePath) {
for (const arg of args) {
this.visit(path.get(arg));
}
const old = { inLoop };
inLoop = true;
this.visit(path.get("body"));
inLoop = old.inLoop;
return false;
};
recast.visit(ast, {
visitDoWhileStatement: visitLoop("test"),
visitExpressionStatement(path) {
const { expression, comments } = path.node;
let valueOnly;
if (
n.CallExpression.check(expression) &&
n.MemberExpression.check(expression.callee) &&
!expression.callee.computed &&
n.Identifier.check(expression.callee.object) &&
expression.callee.object.name === "_" &&
n.Identifier.check(expression.callee.property) &&
["each", "forEach"].includes(expression.callee.property.name) &&
[2, 3].includes(expression.arguments.length) &&
checkExpression(expression.arguments[0]) &&
(n.FunctionExpression.check(expression.arguments[1]) ||
n.ArrowFunctionExpression.check(expression.arguments[1])) &&
[1, 2].includes(expression.arguments[1].params.length) &&
n.Identifier.check(expression.arguments[1].params[0]) &&
((valueOnly = expression.arguments[1].params[1] === undefined) ||
n.Identifier.check(expression.arguments[1].params[1])) &&
(expression.arguments[2] === undefined ||
n.ThisExpression.check(expression.arguments[2]))
) {
const old = { inLoop, replaceReturn };
inLoop = false;
replaceReturn = true;
this.visit(
path
.get("expression")
.get("arguments")
.get(1)
.get("body")
);
inLoop = old.inLoop;
replaceReturn = old.replaceReturn;
const [right, { body, params }] = expression.arguments;
const loop = b.forOfStatement(
b.variableDeclaration("let", [
b.variableDeclarator(
valueOnly ? params[0] : b.arrayPattern([params[1], params[0]])
),
]),
valueOnly
? right
: b.callExpression(
b.memberExpression(right, b.identifier("entries")),
[]
),
checkStatement(body) ? body : b.expressionStatement(body)
);
loop.comments = comments;
path.replace(loop);
changed = true;
}
this.traverse(path);
},
visitForStatement: visitLoop("init", "test", "update"),
visitForInStatement: visitLoop("left", "right"),
visitForOfStatement: visitLoop("left", "right"),
visitFunction(path) {
this.visit(path.get("params"));
const old = { replaceReturn };
replaceReturn = false;
this.visit(path.get("body"));
replaceReturn = old.replaceReturn;
return false;
},
visitReturnStatement(path) {
if (replaceReturn) {
assert(!inLoop); // could use labeled continue if this ever fires
const { argument, comments } = path.node;
if (argument === null) {
const s = b.continueStatement();
s.comments = comments;
path.replace(s);
} else {
const s = b.expressionStatement(argument);
s.comments = comments;
path.replace(s, b.continueStatement());
}
return false;
}
this.traverse(path);
},
visitWhileStatement: visitLoop("test"),
});
if (changed) {
console.log("Writing", file);
fs.writeFileSync(file, recast.print(ast).code, { encoding: "utf8" });
}
}
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-02-06 06:19:47 +01:00
|
|
|
for (const obj of server_history) {
|
2020-04-17 22:08:17 +02:00
|
|
|
const topic_name = obj.name;
|
2019-11-02 00:06:25 +01:00
|
|
|
const message_id = obj.max_id;
|
2017-07-27 12:57:37 +02:00
|
|
|
|
2020-07-23 02:59:59 +02:00
|
|
|
const existing = this.topics.get(topic_name);
|
2017-07-27 12:57:37 +02:00
|
|
|
|
2024-06-26 05:35:44 +02:00
|
|
|
if (existing) {
|
2024-06-27 08:01:50 +02:00
|
|
|
// If we have a topic in our cache, we update
|
|
|
|
// the message_id to accurately reflect the latest
|
|
|
|
// message in the topic.
|
|
|
|
existing.message_id = message_id;
|
2020-12-22 11:26:39 +01:00
|
|
|
continue;
|
2017-07-27 12:57:37 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// If we get here, we are either finding out about
|
|
|
|
// the topic for the first time, or we are getting
|
|
|
|
// more current data for it.
|
|
|
|
|
2020-07-23 02:59:59 +02:00
|
|
|
this.topics.set(topic_name, {
|
2020-07-20 22:18:43 +02:00
|
|
|
message_id,
|
2020-03-22 18:40:05 +01:00
|
|
|
pretty_name: topic_name,
|
2024-06-26 05:35:44 +02:00
|
|
|
count: 0,
|
2017-07-27 12:57:37 +02:00
|
|
|
});
|
2024-06-27 10:33:08 +02:00
|
|
|
this.update_stream_with_message_id(message_id);
|
js: Automatically convert _.each to for…of.
This commit was automatically generated by the following script,
followed by lint --fix and a few small manual lint-related cleanups.
import * as babelParser from "recast/parsers/babel";
import * as recast from "recast";
import * as tsParser from "recast/parsers/typescript";
import { builders as b, namedTypes as n } from "ast-types";
import { Context } from "ast-types/lib/path-visitor";
import K from "ast-types/gen/kinds";
import { NodePath } from "ast-types/lib/node-path";
import assert from "assert";
import fs from "fs";
import path from "path";
import process from "process";
const checkExpression = (node: n.Node): node is K.ExpressionKind =>
n.Expression.check(node);
const checkStatement = (node: n.Node): node is K.StatementKind =>
n.Statement.check(node);
for (const file of process.argv.slice(2)) {
console.log("Parsing", file);
const ast = recast.parse(fs.readFileSync(file, { encoding: "utf8" }), {
parser: path.extname(file) === ".ts" ? tsParser : babelParser,
});
let changed = false;
let inLoop = false;
let replaceReturn = false;
const visitLoop = (...args: string[]) =>
function(this: Context, path: NodePath) {
for (const arg of args) {
this.visit(path.get(arg));
}
const old = { inLoop };
inLoop = true;
this.visit(path.get("body"));
inLoop = old.inLoop;
return false;
};
recast.visit(ast, {
visitDoWhileStatement: visitLoop("test"),
visitExpressionStatement(path) {
const { expression, comments } = path.node;
let valueOnly;
if (
n.CallExpression.check(expression) &&
n.MemberExpression.check(expression.callee) &&
!expression.callee.computed &&
n.Identifier.check(expression.callee.object) &&
expression.callee.object.name === "_" &&
n.Identifier.check(expression.callee.property) &&
["each", "forEach"].includes(expression.callee.property.name) &&
[2, 3].includes(expression.arguments.length) &&
checkExpression(expression.arguments[0]) &&
(n.FunctionExpression.check(expression.arguments[1]) ||
n.ArrowFunctionExpression.check(expression.arguments[1])) &&
[1, 2].includes(expression.arguments[1].params.length) &&
n.Identifier.check(expression.arguments[1].params[0]) &&
((valueOnly = expression.arguments[1].params[1] === undefined) ||
n.Identifier.check(expression.arguments[1].params[1])) &&
(expression.arguments[2] === undefined ||
n.ThisExpression.check(expression.arguments[2]))
) {
const old = { inLoop, replaceReturn };
inLoop = false;
replaceReturn = true;
this.visit(
path
.get("expression")
.get("arguments")
.get(1)
.get("body")
);
inLoop = old.inLoop;
replaceReturn = old.replaceReturn;
const [right, { body, params }] = expression.arguments;
const loop = b.forOfStatement(
b.variableDeclaration("let", [
b.variableDeclarator(
valueOnly ? params[0] : b.arrayPattern([params[1], params[0]])
),
]),
valueOnly
? right
: b.callExpression(
b.memberExpression(right, b.identifier("entries")),
[]
),
checkStatement(body) ? body : b.expressionStatement(body)
);
loop.comments = comments;
path.replace(loop);
changed = true;
}
this.traverse(path);
},
visitForStatement: visitLoop("init", "test", "update"),
visitForInStatement: visitLoop("left", "right"),
visitForOfStatement: visitLoop("left", "right"),
visitFunction(path) {
this.visit(path.get("params"));
const old = { replaceReturn };
replaceReturn = false;
this.visit(path.get("body"));
replaceReturn = old.replaceReturn;
return false;
},
visitReturnStatement(path) {
if (replaceReturn) {
assert(!inLoop); // could use labeled continue if this ever fires
const { argument, comments } = path.node;
if (argument === null) {
const s = b.continueStatement();
s.comments = comments;
path.replace(s);
} else {
const s = b.expressionStatement(argument);
s.comments = comments;
path.replace(s, b.continueStatement());
}
return false;
}
this.traverse(path);
},
visitWhileStatement: visitLoop("test"),
});
if (changed) {
console.log("Writing", file);
fs.writeFileSync(file, recast.print(ast).code, { encoding: "utf8" });
}
}
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-02-06 06:19:47 +01:00
|
|
|
}
|
2020-07-23 02:59:59 +02:00
|
|
|
}
|
2017-07-27 12:57:37 +02:00
|
|
|
|
2024-01-02 07:03:01 +01:00
|
|
|
get_recent_topic_names(): string[] {
|
2024-08-03 14:16:14 +02:00
|
|
|
// Combines several data sources to produce a complete picture
|
|
|
|
// of topics the client knows about.
|
|
|
|
//
|
|
|
|
// This data source is this module's own data structures.
|
2023-03-02 01:58:25 +01:00
|
|
|
const my_recents = [...this.topics.values()];
|
2024-08-03 14:16:14 +02:00
|
|
|
// This data source is older topics that we know exist because
|
|
|
|
// we have unread messages in the topic, even if we don't have
|
|
|
|
// any messages from the topic in our local cache.
|
2019-11-02 00:06:25 +01:00
|
|
|
const missing_topics = unread.get_missing_topics({
|
2020-07-23 02:59:59 +02:00
|
|
|
stream_id: this.stream_id,
|
|
|
|
topic_dict: this.topics,
|
2018-05-13 12:17:00 +02:00
|
|
|
});
|
|
|
|
|
2024-08-03 14:16:14 +02:00
|
|
|
// This data source is locally echoed messages, which should
|
|
|
|
// are treated as newer than all delivered messages.
|
|
|
|
const local_echo_topics = [
|
|
|
|
...echo_state.get_waiting_for_ack_local_ids_by_topic(this.stream_id).entries(),
|
|
|
|
].map(([topic, local_id]) => ({pretty_name: topic, message_id: local_id}));
|
|
|
|
const local_echo_set = new Set<string>(
|
|
|
|
local_echo_topics.map((message_topic) => message_topic.pretty_name.toLowerCase()),
|
|
|
|
);
|
|
|
|
|
|
|
|
// We first sort the topics without locally echoed messages,
|
|
|
|
// and then prepend topics with locally echoed messages.
|
|
|
|
const server_topics = [...my_recents, ...missing_topics].filter(
|
|
|
|
(message_topic) => !local_echo_set.has(message_topic.pretty_name.toLowerCase()),
|
|
|
|
);
|
|
|
|
server_topics.sort((a, b) => b.message_id - a.message_id);
|
|
|
|
return [...local_echo_topics, ...server_topics].map((obj) => obj.pretty_name);
|
2020-07-23 02:59:59 +02:00
|
|
|
}
|
2020-04-30 12:11:18 +02:00
|
|
|
|
2024-01-02 07:03:01 +01:00
|
|
|
get_max_message_id(): number {
|
2024-08-03 14:16:14 +02:00
|
|
|
// TODO: We probably want to migrate towards this function
|
|
|
|
// ignoring locally echoed messages, and thus returning an integer.
|
|
|
|
const unacked_message_ids_in_stream = [
|
|
|
|
...echo_state.get_waiting_for_ack_local_ids_by_topic(this.stream_id).values(),
|
|
|
|
];
|
|
|
|
const max_message_id = Math.max(...unacked_message_ids_in_stream, this.max_message_id);
|
|
|
|
return max_message_id;
|
2020-07-23 02:59:59 +02:00
|
|
|
}
|
|
|
|
}
|
2017-07-24 22:16:13 +02:00
|
|
|
|
2024-01-02 07:03:01 +01:00
|
|
|
export function remove_messages(opts: {
|
|
|
|
stream_id: number;
|
|
|
|
topic_name: string;
|
|
|
|
num_messages: number;
|
|
|
|
max_removed_msg_id: number;
|
|
|
|
}): void {
|
2019-11-02 00:06:25 +01:00
|
|
|
const stream_id = opts.stream_id;
|
2020-03-22 18:40:05 +01:00
|
|
|
const topic_name = opts.topic_name;
|
2020-06-15 19:52:00 +02:00
|
|
|
const num_messages = opts.num_messages;
|
2020-08-04 11:12:42 +02:00
|
|
|
const max_removed_msg_id = opts.max_removed_msg_id;
|
2019-11-02 00:06:25 +01:00
|
|
|
const history = stream_dict.get(stream_id);
|
2017-07-24 22:16:13 +02:00
|
|
|
|
|
|
|
// This is the special case of "removing" a message from
|
|
|
|
// a topic, which happens when we edit topics.
|
2017-07-24 18:22:37 +02:00
|
|
|
|
2017-07-24 22:16:13 +02:00
|
|
|
if (!history) {
|
|
|
|
return;
|
2017-07-24 18:22:37 +02:00
|
|
|
}
|
|
|
|
|
2024-06-26 06:16:20 +02:00
|
|
|
// Adjust our local data structures to account for the
|
|
|
|
// removal of messages from a topic. We can also remove
|
|
|
|
// the topic if it has no messages left or if we cannot
|
|
|
|
// locally determine the current state of the topic.
|
|
|
|
// So, it is important that we return below if we don't have
|
|
|
|
// the topic cached.
|
2020-06-11 12:12:12 +02:00
|
|
|
history.maybe_remove(topic_name, num_messages);
|
2020-08-04 11:12:42 +02:00
|
|
|
const existing_topic = history.topics.get(topic_name);
|
|
|
|
if (!existing_topic) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Update max_message_id in topic
|
|
|
|
if (existing_topic.message_id <= max_removed_msg_id) {
|
|
|
|
const msgs_in_topic = message_util.get_messages_in_topic(stream_id, topic_name);
|
|
|
|
let max_message_id = 0;
|
|
|
|
for (const msg of msgs_in_topic) {
|
|
|
|
if (msg.id > max_message_id) {
|
|
|
|
max_message_id = msg.id;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
existing_topic.message_id = max_message_id;
|
|
|
|
}
|
2020-08-04 11:13:20 +02:00
|
|
|
|
|
|
|
// Update max_message_id in stream
|
|
|
|
if (history.max_message_id <= max_removed_msg_id) {
|
|
|
|
history.max_message_id = message_util.get_max_message_id_in_stream(stream_id);
|
|
|
|
}
|
2021-02-28 00:54:32 +01:00
|
|
|
}
|
2017-07-24 22:16:13 +02:00
|
|
|
|
2024-01-02 07:03:01 +01:00
|
|
|
export function find_or_create(stream_id: number): PerStreamHistory {
|
2019-11-02 00:06:25 +01:00
|
|
|
let history = stream_dict.get(stream_id);
|
2017-07-24 22:16:13 +02:00
|
|
|
|
|
|
|
if (!history) {
|
2020-07-23 02:59:59 +02:00
|
|
|
history = new PerStreamHistory(stream_id);
|
2017-07-24 22:16:13 +02:00
|
|
|
stream_dict.set(stream_id, history);
|
2017-07-24 18:22:37 +02:00
|
|
|
}
|
|
|
|
|
2017-07-24 22:16:13 +02:00
|
|
|
return history;
|
2021-02-28 00:54:32 +01:00
|
|
|
}
|
2017-07-24 18:22:37 +02:00
|
|
|
|
2024-01-02 07:03:01 +01:00
|
|
|
export function add_message(opts: {
|
|
|
|
stream_id: number;
|
|
|
|
message_id: number;
|
|
|
|
topic_name: string;
|
|
|
|
}): void {
|
2019-11-02 00:06:25 +01:00
|
|
|
const stream_id = opts.stream_id;
|
2024-01-02 07:03:01 +01:00
|
|
|
const message_id = opts.message_id;
|
2020-03-22 18:40:05 +01:00
|
|
|
const topic_name = opts.topic_name;
|
2017-07-24 22:16:13 +02:00
|
|
|
|
2021-02-28 00:54:32 +01:00
|
|
|
const history = find_or_create(stream_id);
|
2017-07-24 22:16:13 +02:00
|
|
|
|
2024-01-03 22:25:13 +01:00
|
|
|
history.add_or_update(topic_name, message_id);
|
2021-02-28 00:54:32 +01:00
|
|
|
}
|
2017-07-24 18:22:37 +02:00
|
|
|
|
2024-01-02 07:03:01 +01:00
|
|
|
export function add_history(stream_id: number, server_history: ServerTopicHistoryEntry[]): void {
|
2021-02-28 00:54:32 +01:00
|
|
|
const history = find_or_create(stream_id);
|
2017-07-27 12:57:37 +02:00
|
|
|
history.add_history(server_history);
|
2020-01-23 17:05:31 +01:00
|
|
|
fetched_stream_ids.add(stream_id);
|
2021-02-28 00:54:32 +01:00
|
|
|
}
|
2017-07-27 12:57:37 +02:00
|
|
|
|
2024-01-02 07:03:01 +01:00
|
|
|
export function has_history_for(stream_id: number): boolean {
|
2021-04-15 18:51:44 +02:00
|
|
|
return fetched_stream_ids.has(stream_id);
|
2021-02-28 00:54:32 +01:00
|
|
|
}
|
2017-08-08 19:54:07 +02:00
|
|
|
|
2024-01-02 07:03:01 +01:00
|
|
|
export function get_recent_topic_names(stream_id: number): string[] {
|
2021-02-28 00:54:32 +01:00
|
|
|
const history = find_or_create(stream_id);
|
2017-07-24 18:22:37 +02:00
|
|
|
|
2020-03-22 18:40:05 +01:00
|
|
|
return history.get_recent_topic_names();
|
2021-02-28 00:54:32 +01:00
|
|
|
}
|
2017-07-24 18:22:37 +02:00
|
|
|
|
2024-01-02 07:03:01 +01:00
|
|
|
export function get_max_message_id(stream_id: number): number {
|
2021-02-28 00:54:32 +01:00
|
|
|
const history = find_or_create(stream_id);
|
2020-04-30 12:11:18 +02:00
|
|
|
|
|
|
|
return history.get_max_message_id();
|
2021-02-28 00:54:32 +01:00
|
|
|
}
|
2020-04-30 12:11:18 +02:00
|
|
|
|
2024-01-02 07:03:01 +01:00
|
|
|
export function reset(): void {
|
2017-07-24 18:22:37 +02:00
|
|
|
// This is only used by tests.
|
2020-01-23 15:42:43 +01:00
|
|
|
stream_dict.clear();
|
2020-01-23 17:05:31 +01:00
|
|
|
fetched_stream_ids.clear();
|
2023-10-09 16:19:56 +02:00
|
|
|
request_pending_stream_ids.clear();
|
|
|
|
}
|
|
|
|
|
2024-01-02 07:03:01 +01:00
|
|
|
export function is_request_pending_for(stream_id: number): boolean {
|
2023-10-09 16:19:56 +02:00
|
|
|
return request_pending_stream_ids.has(stream_id);
|
|
|
|
}
|
|
|
|
|
2024-01-02 07:03:01 +01:00
|
|
|
export function add_request_pending_for(stream_id: number): void {
|
2023-10-09 16:19:56 +02:00
|
|
|
request_pending_stream_ids.add(stream_id);
|
|
|
|
}
|
|
|
|
|
2024-01-02 07:03:01 +01:00
|
|
|
export function remove_request_pending_for(stream_id: number): void {
|
2023-10-09 16:19:56 +02:00
|
|
|
request_pending_stream_ids.delete(stream_id);
|
2021-02-28 00:54:32 +01:00
|
|
|
}
|