2020-08-01 03:43:15 +02:00
|
|
|
"use strict";
|
|
|
|
|
2024-10-09 00:25:41 +02:00
|
|
|
const assert = require("node:assert/strict");
|
2020-11-30 23:46:45 +01:00
|
|
|
|
2024-11-13 07:05:32 +01:00
|
|
|
const {mock_esm, zrequire} = require("./lib/namespace.cjs");
|
|
|
|
const {run_test, noop} = require("./lib/test.cjs");
|
2020-12-01 00:02:16 +01:00
|
|
|
|
2023-02-22 23:04:10 +01:00
|
|
|
const channel = mock_esm("../src/channel");
|
|
|
|
const message_util = mock_esm("../src/message_util");
|
2020-12-01 23:21:38 +01:00
|
|
|
|
2021-03-30 06:23:09 +02:00
|
|
|
const all_messages_data = zrequire("all_messages_data");
|
2024-08-03 14:16:14 +02:00
|
|
|
const echo_state = zrequire("echo_state");
|
2021-02-10 04:53:22 +01:00
|
|
|
const unread = zrequire("unread");
|
2022-10-14 17:37:47 +02:00
|
|
|
const message_store = zrequire("message_store");
|
2024-10-09 22:44:13 +02:00
|
|
|
const {set_realm} = zrequire("state_data");
|
2021-02-10 04:53:22 +01:00
|
|
|
const stream_data = zrequire("stream_data");
|
|
|
|
const stream_topic_history = zrequire("stream_topic_history");
|
2021-04-15 18:51:44 +02:00
|
|
|
const stream_topic_history_util = zrequire("stream_topic_history_util");
|
2017-07-26 14:18:40 +02:00
|
|
|
|
2024-10-09 22:44:13 +02:00
|
|
|
set_realm({});
|
|
|
|
|
2024-06-26 06:16:20 +02:00
|
|
|
stream_topic_history.set_update_topic_last_message_id(noop);
|
|
|
|
|
2021-03-13 14:36:30 +01:00
|
|
|
function test(label, f) {
|
2022-07-09 23:25:05 +02:00
|
|
|
run_test(label, (helpers) => {
|
2021-03-13 14:36:30 +01:00
|
|
|
unread.declare_bankruptcy();
|
|
|
|
stream_topic_history.reset();
|
2022-07-09 23:25:05 +02:00
|
|
|
f(helpers);
|
2021-03-13 14:36:30 +01:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
test("basics", () => {
|
2019-11-02 00:06:25 +01:00
|
|
|
const stream_id = 55;
|
2017-07-26 14:18:40 +02:00
|
|
|
|
2020-03-22 18:40:05 +01:00
|
|
|
stream_topic_history.add_message({
|
2020-07-20 22:18:43 +02:00
|
|
|
stream_id,
|
2017-07-26 14:18:40 +02:00
|
|
|
message_id: 101,
|
2020-07-15 01:29:15 +02:00
|
|
|
topic_name: "toPic1",
|
2017-07-26 14:18:40 +02:00
|
|
|
});
|
|
|
|
|
2020-03-22 18:40:05 +01:00
|
|
|
let history = stream_topic_history.get_recent_topic_names(stream_id);
|
2020-04-30 12:11:18 +02:00
|
|
|
let max_message_id = stream_topic_history.get_max_message_id(stream_id);
|
2020-07-15 01:29:15 +02:00
|
|
|
assert.deepEqual(history, ["toPic1"]);
|
2020-04-30 12:11:18 +02:00
|
|
|
assert.deepEqual(max_message_id, 101);
|
2017-07-26 14:18:40 +02:00
|
|
|
|
2020-03-22 18:40:05 +01:00
|
|
|
stream_topic_history.add_message({
|
2020-07-20 22:18:43 +02:00
|
|
|
stream_id,
|
2017-07-26 14:18:40 +02:00
|
|
|
message_id: 102,
|
2020-07-15 01:29:15 +02:00
|
|
|
topic_name: "Topic1",
|
2017-07-26 14:18:40 +02:00
|
|
|
});
|
2020-03-22 18:40:05 +01:00
|
|
|
history = stream_topic_history.get_recent_topic_names(stream_id);
|
2020-04-30 12:11:18 +02:00
|
|
|
max_message_id = stream_topic_history.get_max_message_id(stream_id);
|
2020-07-15 01:29:15 +02:00
|
|
|
assert.deepEqual(history, ["Topic1"]);
|
2020-04-30 12:11:18 +02:00
|
|
|
assert.deepEqual(max_message_id, 102);
|
2017-07-26 14:18:40 +02:00
|
|
|
|
2020-03-22 18:40:05 +01:00
|
|
|
stream_topic_history.add_message({
|
2020-07-20 22:18:43 +02:00
|
|
|
stream_id,
|
2017-07-26 14:18:40 +02:00
|
|
|
message_id: 103,
|
2020-07-15 01:29:15 +02:00
|
|
|
topic_name: "topic2",
|
2017-07-26 14:18:40 +02:00
|
|
|
});
|
2020-03-22 18:40:05 +01:00
|
|
|
history = stream_topic_history.get_recent_topic_names(stream_id);
|
2020-04-30 12:11:18 +02:00
|
|
|
max_message_id = stream_topic_history.get_max_message_id(stream_id);
|
2020-07-15 01:29:15 +02:00
|
|
|
assert.deepEqual(history, ["topic2", "Topic1"]);
|
2020-04-30 12:11:18 +02:00
|
|
|
assert.deepEqual(max_message_id, 103);
|
2017-07-26 14:18:40 +02:00
|
|
|
|
2020-08-04 11:12:42 +02:00
|
|
|
stream_topic_history.add_message({
|
|
|
|
stream_id,
|
|
|
|
message_id: 104,
|
|
|
|
topic_name: "Topic1",
|
|
|
|
});
|
|
|
|
history = stream_topic_history.get_recent_topic_names(stream_id);
|
|
|
|
max_message_id = stream_topic_history.get_max_message_id(stream_id);
|
|
|
|
assert.deepEqual(history, ["Topic1", "topic2"]);
|
|
|
|
assert.deepEqual(max_message_id, 104);
|
|
|
|
|
2021-02-28 17:48:20 +01:00
|
|
|
message_util.get_messages_in_topic = () => [{id: 101}, {id: 102}];
|
|
|
|
message_util.get_max_message_id_in_stream = () => 103;
|
2020-08-04 11:12:42 +02:00
|
|
|
// Removing the last msg in topic1 changes the order
|
|
|
|
stream_topic_history.remove_messages({
|
|
|
|
stream_id,
|
|
|
|
topic_name: "Topic1",
|
|
|
|
num_messages: 1,
|
|
|
|
max_removed_msg_id: 104,
|
|
|
|
});
|
|
|
|
history = stream_topic_history.get_recent_topic_names(stream_id);
|
|
|
|
assert.deepEqual(history, ["topic2", "Topic1"]);
|
2020-08-04 11:13:20 +02:00
|
|
|
// check if stream's max_message_id is updated.
|
2020-08-04 11:12:42 +02:00
|
|
|
max_message_id = stream_topic_history.get_max_message_id(stream_id);
|
2020-08-04 11:13:20 +02:00
|
|
|
assert.deepEqual(max_message_id, 103);
|
2020-08-04 11:12:42 +02:00
|
|
|
|
2022-04-09 23:44:38 +02:00
|
|
|
delete message_util.get_messages_in_topic;
|
2017-07-26 14:18:40 +02:00
|
|
|
// Removing first topic1 message has no effect.
|
2020-06-15 19:52:00 +02:00
|
|
|
stream_topic_history.remove_messages({
|
2020-07-20 22:18:43 +02:00
|
|
|
stream_id,
|
2020-07-15 01:29:15 +02:00
|
|
|
topic_name: "toPic1",
|
2020-06-15 19:52:00 +02:00
|
|
|
num_messages: 1,
|
2020-08-04 11:12:42 +02:00
|
|
|
max_removed_msg_id: 101,
|
2017-07-26 14:18:40 +02:00
|
|
|
});
|
2020-03-22 18:40:05 +01:00
|
|
|
history = stream_topic_history.get_recent_topic_names(stream_id);
|
2020-07-15 01:29:15 +02:00
|
|
|
assert.deepEqual(history, ["topic2", "Topic1"]);
|
2020-04-30 12:11:18 +02:00
|
|
|
max_message_id = stream_topic_history.get_max_message_id(stream_id);
|
|
|
|
assert.deepEqual(max_message_id, 103);
|
2017-07-26 14:18:40 +02:00
|
|
|
|
|
|
|
// Removing second topic1 message removes the topic.
|
2020-06-15 19:52:00 +02:00
|
|
|
stream_topic_history.remove_messages({
|
2020-07-20 22:18:43 +02:00
|
|
|
stream_id,
|
2020-07-15 01:29:15 +02:00
|
|
|
topic_name: "Topic1",
|
2020-06-15 19:52:00 +02:00
|
|
|
num_messages: 1,
|
2020-08-04 11:12:42 +02:00
|
|
|
max_removed_msg_id: 102,
|
2017-07-26 14:18:40 +02:00
|
|
|
});
|
2020-03-22 18:40:05 +01:00
|
|
|
history = stream_topic_history.get_recent_topic_names(stream_id);
|
2020-07-15 01:29:15 +02:00
|
|
|
assert.deepEqual(history, ["topic2"]);
|
2017-07-26 14:18:40 +02:00
|
|
|
|
|
|
|
// Test that duplicate remove does not crash us.
|
2020-06-15 19:52:00 +02:00
|
|
|
stream_topic_history.remove_messages({
|
2020-07-20 22:18:43 +02:00
|
|
|
stream_id,
|
2020-07-15 01:29:15 +02:00
|
|
|
topic_name: "Topic1",
|
2020-06-15 19:52:00 +02:00
|
|
|
num_messages: 1,
|
2020-08-04 11:12:42 +02:00
|
|
|
max_removed_msg_id: 0,
|
2017-07-26 14:18:40 +02:00
|
|
|
});
|
2020-03-22 18:40:05 +01:00
|
|
|
history = stream_topic_history.get_recent_topic_names(stream_id);
|
2020-07-15 01:29:15 +02:00
|
|
|
assert.deepEqual(history, ["topic2"]);
|
2017-07-26 14:18:40 +02:00
|
|
|
|
|
|
|
// get to 100% coverage for defensive code
|
2020-06-15 19:52:00 +02:00
|
|
|
stream_topic_history.remove_messages({
|
2017-07-26 14:18:40 +02:00
|
|
|
stream_id: 9999999,
|
2020-06-15 19:52:00 +02:00
|
|
|
num_messages: 1,
|
2017-07-26 14:18:40 +02:00
|
|
|
});
|
2018-05-15 12:40:07 +02:00
|
|
|
});
|
2017-07-26 14:18:40 +02:00
|
|
|
|
2022-07-09 23:25:05 +02:00
|
|
|
test("is_complete_for_stream_id", ({override_rewire}) => {
|
2020-01-23 16:09:55 +01:00
|
|
|
const sub = {
|
2020-07-15 01:29:15 +02:00
|
|
|
name: "devel",
|
2020-01-23 16:09:55 +01:00
|
|
|
stream_id: 444,
|
|
|
|
first_message_id: 1000,
|
|
|
|
};
|
2020-02-09 22:02:55 +01:00
|
|
|
stream_data.add_sub(sub);
|
2020-01-23 16:09:55 +01:00
|
|
|
|
2022-07-09 23:25:05 +02:00
|
|
|
override_rewire(all_messages_data, "all_messages_data", {
|
2020-01-23 16:09:55 +01:00
|
|
|
empty: () => false,
|
2021-03-30 06:23:09 +02:00
|
|
|
fetch_status: {
|
|
|
|
has_found_newest: () => true,
|
2020-01-23 16:09:55 +01:00
|
|
|
},
|
2020-07-02 01:41:40 +02:00
|
|
|
first: () => ({id: 5}),
|
2022-07-09 23:25:05 +02:00
|
|
|
});
|
2020-01-23 16:09:55 +01:00
|
|
|
|
2022-07-09 23:25:05 +02:00
|
|
|
assert.equal(stream_topic_history.is_complete_for_stream_id(sub.stream_id), true);
|
2020-01-23 16:09:55 +01:00
|
|
|
|
2022-07-09 23:25:05 +02:00
|
|
|
// Now simulate a more recent message id.
|
|
|
|
all_messages_data.all_messages_data.first = () => ({id: sub.first_message_id + 1});
|
2020-01-23 16:09:55 +01:00
|
|
|
|
2022-07-09 23:25:05 +02:00
|
|
|
// Note that we'll return `true` here due to
|
|
|
|
// fetched_stream_ids having the stream_id now.
|
|
|
|
assert.equal(stream_topic_history.is_complete_for_stream_id(sub.stream_id), true);
|
2020-01-23 17:05:31 +01:00
|
|
|
|
2022-07-09 23:25:05 +02:00
|
|
|
// But now clear the data to see what we'd have without
|
|
|
|
// the previous call.
|
|
|
|
stream_topic_history.reset();
|
2020-01-23 17:05:31 +01:00
|
|
|
|
2022-07-09 23:25:05 +02:00
|
|
|
assert.equal(stream_topic_history.is_complete_for_stream_id(sub.stream_id), false);
|
2020-01-23 16:09:55 +01:00
|
|
|
});
|
|
|
|
|
2021-03-13 14:36:30 +01:00
|
|
|
test("server_history", () => {
|
2020-01-23 17:05:31 +01:00
|
|
|
const sub = {
|
2020-07-15 01:29:15 +02:00
|
|
|
name: "devel",
|
2020-01-23 17:05:31 +01:00
|
|
|
stream_id: 66,
|
|
|
|
};
|
|
|
|
const stream_id = sub.stream_id;
|
2020-02-09 22:02:55 +01:00
|
|
|
stream_data.add_sub(sub);
|
2020-01-23 17:05:31 +01:00
|
|
|
|
2020-07-15 00:34:28 +02:00
|
|
|
assert.equal(stream_topic_history.is_complete_for_stream_id(stream_id), false);
|
2017-07-27 12:57:37 +02:00
|
|
|
|
2020-03-22 18:40:05 +01:00
|
|
|
stream_topic_history.add_message({
|
2020-07-20 22:18:43 +02:00
|
|
|
stream_id,
|
2017-07-27 12:57:37 +02:00
|
|
|
message_id: 501,
|
2020-07-15 01:29:15 +02:00
|
|
|
topic_name: "local",
|
2017-07-27 12:57:37 +02:00
|
|
|
});
|
|
|
|
|
|
|
|
function add_server_history() {
|
2020-03-22 18:40:05 +01:00
|
|
|
stream_topic_history.add_history(stream_id, [
|
2020-07-16 22:40:18 +02:00
|
|
|
{name: "local", max_id: 501},
|
|
|
|
{name: "hist2", max_id: 31},
|
|
|
|
{name: "hist1", max_id: 30},
|
2017-07-27 12:57:37 +02:00
|
|
|
]);
|
|
|
|
}
|
|
|
|
|
|
|
|
add_server_history();
|
2020-01-23 17:05:31 +01:00
|
|
|
|
|
|
|
// Since we added history, now subsequent calls
|
|
|
|
// to is_complete_for_stream_id will return true.
|
2020-07-15 00:34:28 +02:00
|
|
|
assert.equal(stream_topic_history.is_complete_for_stream_id(stream_id), true);
|
2020-01-23 17:05:31 +01:00
|
|
|
|
2020-03-22 18:40:05 +01:00
|
|
|
let history = stream_topic_history.get_recent_topic_names(stream_id);
|
2020-07-15 01:29:15 +02:00
|
|
|
assert.deepEqual(history, ["local", "hist2", "hist1"]);
|
2017-07-27 12:57:37 +02:00
|
|
|
|
|
|
|
// If new activity comes in for historical messages,
|
|
|
|
// they can bump to the front of the list.
|
2020-03-22 18:40:05 +01:00
|
|
|
stream_topic_history.add_message({
|
2020-07-20 22:18:43 +02:00
|
|
|
stream_id,
|
2017-07-27 12:57:37 +02:00
|
|
|
message_id: 502,
|
2020-07-15 01:29:15 +02:00
|
|
|
topic_name: "hist1",
|
2017-07-27 12:57:37 +02:00
|
|
|
});
|
2020-03-22 18:40:05 +01:00
|
|
|
history = stream_topic_history.get_recent_topic_names(stream_id);
|
2020-07-15 01:29:15 +02:00
|
|
|
assert.deepEqual(history, ["hist1", "local", "hist2"]);
|
2017-07-27 12:57:37 +02:00
|
|
|
|
|
|
|
// server history is allowed to backdate hist1
|
|
|
|
add_server_history();
|
2020-03-22 18:40:05 +01:00
|
|
|
history = stream_topic_history.get_recent_topic_names(stream_id);
|
2020-07-15 01:29:15 +02:00
|
|
|
assert.deepEqual(history, ["local", "hist2", "hist1"]);
|
2017-07-27 12:57:37 +02:00
|
|
|
|
|
|
|
// Removing a local message removes the topic if we have
|
|
|
|
// our counts right.
|
2020-06-15 19:52:00 +02:00
|
|
|
stream_topic_history.remove_messages({
|
2020-07-20 22:18:43 +02:00
|
|
|
stream_id,
|
2020-07-15 01:29:15 +02:00
|
|
|
topic_name: "local",
|
2020-06-15 19:52:00 +02:00
|
|
|
num_messages: 1,
|
2017-07-27 12:57:37 +02:00
|
|
|
});
|
2020-03-22 18:40:05 +01:00
|
|
|
history = stream_topic_history.get_recent_topic_names(stream_id);
|
2020-07-15 01:29:15 +02:00
|
|
|
assert.deepEqual(history, ["hist2", "hist1"]);
|
2017-07-27 12:57:37 +02:00
|
|
|
|
2024-06-29 12:00:56 +02:00
|
|
|
// Removing message from a topic fetched from server history, will send
|
|
|
|
// query to the server to get the latest message id in the topic.
|
|
|
|
let update_topic_called = false;
|
|
|
|
stream_topic_history.set_update_topic_last_message_id((stream_id, topic_name) => {
|
|
|
|
assert.equal(stream_id, 66);
|
|
|
|
assert.equal(topic_name, "hist2");
|
|
|
|
update_topic_called = true;
|
|
|
|
});
|
2020-06-15 19:52:00 +02:00
|
|
|
stream_topic_history.remove_messages({
|
2020-07-20 22:18:43 +02:00
|
|
|
stream_id,
|
2020-07-15 01:29:15 +02:00
|
|
|
topic_name: "hist2",
|
2020-06-15 19:52:00 +02:00
|
|
|
num_messages: 1,
|
2017-07-27 12:57:37 +02:00
|
|
|
});
|
2024-06-29 12:00:56 +02:00
|
|
|
assert.equal(update_topic_called, true);
|
2020-03-22 18:40:05 +01:00
|
|
|
history = stream_topic_history.get_recent_topic_names(stream_id);
|
2024-06-29 12:00:56 +02:00
|
|
|
assert.deepEqual(history, ["hist1"]);
|
|
|
|
stream_topic_history.set_update_topic_last_message_id(noop);
|
2017-07-27 12:57:37 +02:00
|
|
|
|
|
|
|
// If we call back to the server for history, the
|
|
|
|
// effect is always additive. We may decide to prune old
|
|
|
|
// topics in the future, if they dropped off due to renames,
|
|
|
|
// but that is probably an edge case we can ignore for now.
|
2020-03-22 18:40:05 +01:00
|
|
|
stream_topic_history.add_history(stream_id, [
|
2020-07-16 22:40:18 +02:00
|
|
|
{name: "hist2", max_id: 931},
|
|
|
|
{name: "hist3", max_id: 5},
|
2017-07-27 12:57:37 +02:00
|
|
|
]);
|
2020-03-22 18:40:05 +01:00
|
|
|
history = stream_topic_history.get_recent_topic_names(stream_id);
|
2020-07-15 01:29:15 +02:00
|
|
|
assert.deepEqual(history, ["hist2", "hist1", "hist3"]);
|
2018-05-15 12:40:07 +02:00
|
|
|
});
|
2017-07-27 12:57:37 +02:00
|
|
|
|
2021-03-13 14:36:30 +01:00
|
|
|
test("test_unread_logic", () => {
|
2019-11-02 00:06:25 +01:00
|
|
|
const stream_id = 77;
|
2017-07-27 12:57:37 +02:00
|
|
|
|
2020-03-22 18:40:05 +01:00
|
|
|
stream_topic_history.add_message({
|
2020-07-20 22:18:43 +02:00
|
|
|
stream_id,
|
2018-05-13 12:17:00 +02:00
|
|
|
message_id: 201,
|
2020-07-15 01:29:15 +02:00
|
|
|
topic_name: "toPic1",
|
2018-05-13 12:17:00 +02:00
|
|
|
});
|
|
|
|
|
2020-03-22 18:40:05 +01:00
|
|
|
stream_topic_history.add_message({
|
2020-07-20 22:18:43 +02:00
|
|
|
stream_id,
|
2018-05-13 12:17:00 +02:00
|
|
|
message_id: 45,
|
2020-07-15 01:29:15 +02:00
|
|
|
topic_name: "topic2",
|
2018-05-13 12:17:00 +02:00
|
|
|
});
|
|
|
|
|
2020-03-22 18:40:05 +01:00
|
|
|
let history = stream_topic_history.get_recent_topic_names(stream_id);
|
2020-07-15 01:29:15 +02:00
|
|
|
assert.deepEqual(history, ["toPic1", "topic2"]);
|
2018-05-13 12:17:00 +02:00
|
|
|
|
|
|
|
const msgs = [
|
2021-06-29 18:43:52 +02:00
|
|
|
// This will be ignored as a case variant of `topic2` above.
|
|
|
|
{id: 150, topic: "TOPIC2"},
|
2020-07-16 22:40:18 +02:00
|
|
|
{id: 61, topic: "unread1"},
|
|
|
|
{id: 60, topic: "unread1"},
|
|
|
|
{id: 20, topic: "UNREAD2"},
|
2021-06-29 18:43:52 +02:00
|
|
|
// We're going to mark this as read; this will verify the logic
|
|
|
|
// in unreads.js for only including topics with nonzero unreads.
|
|
|
|
{id: 79, topic: "to_mark_as_read"},
|
2018-05-13 12:17:00 +02:00
|
|
|
];
|
|
|
|
|
js: Automatically convert _.each to for…of.
This commit was automatically generated by the following script,
followed by lint --fix and a few small manual lint-related cleanups.
import * as babelParser from "recast/parsers/babel";
import * as recast from "recast";
import * as tsParser from "recast/parsers/typescript";
import { builders as b, namedTypes as n } from "ast-types";
import { Context } from "ast-types/lib/path-visitor";
import K from "ast-types/gen/kinds";
import { NodePath } from "ast-types/lib/node-path";
import assert from "assert";
import fs from "fs";
import path from "path";
import process from "process";
const checkExpression = (node: n.Node): node is K.ExpressionKind =>
n.Expression.check(node);
const checkStatement = (node: n.Node): node is K.StatementKind =>
n.Statement.check(node);
for (const file of process.argv.slice(2)) {
console.log("Parsing", file);
const ast = recast.parse(fs.readFileSync(file, { encoding: "utf8" }), {
parser: path.extname(file) === ".ts" ? tsParser : babelParser,
});
let changed = false;
let inLoop = false;
let replaceReturn = false;
const visitLoop = (...args: string[]) =>
function(this: Context, path: NodePath) {
for (const arg of args) {
this.visit(path.get(arg));
}
const old = { inLoop };
inLoop = true;
this.visit(path.get("body"));
inLoop = old.inLoop;
return false;
};
recast.visit(ast, {
visitDoWhileStatement: visitLoop("test"),
visitExpressionStatement(path) {
const { expression, comments } = path.node;
let valueOnly;
if (
n.CallExpression.check(expression) &&
n.MemberExpression.check(expression.callee) &&
!expression.callee.computed &&
n.Identifier.check(expression.callee.object) &&
expression.callee.object.name === "_" &&
n.Identifier.check(expression.callee.property) &&
["each", "forEach"].includes(expression.callee.property.name) &&
[2, 3].includes(expression.arguments.length) &&
checkExpression(expression.arguments[0]) &&
(n.FunctionExpression.check(expression.arguments[1]) ||
n.ArrowFunctionExpression.check(expression.arguments[1])) &&
[1, 2].includes(expression.arguments[1].params.length) &&
n.Identifier.check(expression.arguments[1].params[0]) &&
((valueOnly = expression.arguments[1].params[1] === undefined) ||
n.Identifier.check(expression.arguments[1].params[1])) &&
(expression.arguments[2] === undefined ||
n.ThisExpression.check(expression.arguments[2]))
) {
const old = { inLoop, replaceReturn };
inLoop = false;
replaceReturn = true;
this.visit(
path
.get("expression")
.get("arguments")
.get(1)
.get("body")
);
inLoop = old.inLoop;
replaceReturn = old.replaceReturn;
const [right, { body, params }] = expression.arguments;
const loop = b.forOfStatement(
b.variableDeclaration("let", [
b.variableDeclarator(
valueOnly ? params[0] : b.arrayPattern([params[1], params[0]])
),
]),
valueOnly
? right
: b.callExpression(
b.memberExpression(right, b.identifier("entries")),
[]
),
checkStatement(body) ? body : b.expressionStatement(body)
);
loop.comments = comments;
path.replace(loop);
changed = true;
}
this.traverse(path);
},
visitForStatement: visitLoop("init", "test", "update"),
visitForInStatement: visitLoop("left", "right"),
visitForOfStatement: visitLoop("left", "right"),
visitFunction(path) {
this.visit(path.get("params"));
const old = { replaceReturn };
replaceReturn = false;
this.visit(path.get("body"));
replaceReturn = old.replaceReturn;
return false;
},
visitReturnStatement(path) {
if (replaceReturn) {
assert(!inLoop); // could use labeled continue if this ever fires
const { argument, comments } = path.node;
if (argument === null) {
const s = b.continueStatement();
s.comments = comments;
path.replace(s);
} else {
const s = b.expressionStatement(argument);
s.comments = comments;
path.replace(s, b.continueStatement());
}
return false;
}
this.traverse(path);
},
visitWhileStatement: visitLoop("test"),
});
if (changed) {
console.log("Writing", file);
fs.writeFileSync(file, recast.print(ast).code, { encoding: "utf8" });
}
}
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-02-06 06:19:47 +01:00
|
|
|
for (const msg of msgs) {
|
2020-07-15 01:29:15 +02:00
|
|
|
msg.type = "stream";
|
2018-05-13 12:17:00 +02:00
|
|
|
msg.stream_id = stream_id;
|
|
|
|
msg.unread = true;
|
2022-10-14 17:37:47 +02:00
|
|
|
message_store.update_message_cache(msg);
|
js: Automatically convert _.each to for…of.
This commit was automatically generated by the following script,
followed by lint --fix and a few small manual lint-related cleanups.
import * as babelParser from "recast/parsers/babel";
import * as recast from "recast";
import * as tsParser from "recast/parsers/typescript";
import { builders as b, namedTypes as n } from "ast-types";
import { Context } from "ast-types/lib/path-visitor";
import K from "ast-types/gen/kinds";
import { NodePath } from "ast-types/lib/node-path";
import assert from "assert";
import fs from "fs";
import path from "path";
import process from "process";
const checkExpression = (node: n.Node): node is K.ExpressionKind =>
n.Expression.check(node);
const checkStatement = (node: n.Node): node is K.StatementKind =>
n.Statement.check(node);
for (const file of process.argv.slice(2)) {
console.log("Parsing", file);
const ast = recast.parse(fs.readFileSync(file, { encoding: "utf8" }), {
parser: path.extname(file) === ".ts" ? tsParser : babelParser,
});
let changed = false;
let inLoop = false;
let replaceReturn = false;
const visitLoop = (...args: string[]) =>
function(this: Context, path: NodePath) {
for (const arg of args) {
this.visit(path.get(arg));
}
const old = { inLoop };
inLoop = true;
this.visit(path.get("body"));
inLoop = old.inLoop;
return false;
};
recast.visit(ast, {
visitDoWhileStatement: visitLoop("test"),
visitExpressionStatement(path) {
const { expression, comments } = path.node;
let valueOnly;
if (
n.CallExpression.check(expression) &&
n.MemberExpression.check(expression.callee) &&
!expression.callee.computed &&
n.Identifier.check(expression.callee.object) &&
expression.callee.object.name === "_" &&
n.Identifier.check(expression.callee.property) &&
["each", "forEach"].includes(expression.callee.property.name) &&
[2, 3].includes(expression.arguments.length) &&
checkExpression(expression.arguments[0]) &&
(n.FunctionExpression.check(expression.arguments[1]) ||
n.ArrowFunctionExpression.check(expression.arguments[1])) &&
[1, 2].includes(expression.arguments[1].params.length) &&
n.Identifier.check(expression.arguments[1].params[0]) &&
((valueOnly = expression.arguments[1].params[1] === undefined) ||
n.Identifier.check(expression.arguments[1].params[1])) &&
(expression.arguments[2] === undefined ||
n.ThisExpression.check(expression.arguments[2]))
) {
const old = { inLoop, replaceReturn };
inLoop = false;
replaceReturn = true;
this.visit(
path
.get("expression")
.get("arguments")
.get(1)
.get("body")
);
inLoop = old.inLoop;
replaceReturn = old.replaceReturn;
const [right, { body, params }] = expression.arguments;
const loop = b.forOfStatement(
b.variableDeclaration("let", [
b.variableDeclarator(
valueOnly ? params[0] : b.arrayPattern([params[1], params[0]])
),
]),
valueOnly
? right
: b.callExpression(
b.memberExpression(right, b.identifier("entries")),
[]
),
checkStatement(body) ? body : b.expressionStatement(body)
);
loop.comments = comments;
path.replace(loop);
changed = true;
}
this.traverse(path);
},
visitForStatement: visitLoop("init", "test", "update"),
visitForInStatement: visitLoop("left", "right"),
visitForOfStatement: visitLoop("left", "right"),
visitFunction(path) {
this.visit(path.get("params"));
const old = { replaceReturn };
replaceReturn = false;
this.visit(path.get("body"));
replaceReturn = old.replaceReturn;
return false;
},
visitReturnStatement(path) {
if (replaceReturn) {
assert(!inLoop); // could use labeled continue if this ever fires
const { argument, comments } = path.node;
if (argument === null) {
const s = b.continueStatement();
s.comments = comments;
path.replace(s);
} else {
const s = b.expressionStatement(argument);
s.comments = comments;
path.replace(s, b.continueStatement());
}
return false;
}
this.traverse(path);
},
visitWhileStatement: visitLoop("test"),
});
if (changed) {
console.log("Writing", file);
fs.writeFileSync(file, recast.print(ast).code, { encoding: "utf8" });
}
}
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-02-06 06:19:47 +01:00
|
|
|
}
|
2018-05-13 12:17:00 +02:00
|
|
|
|
|
|
|
unread.process_loaded_messages(msgs);
|
2021-06-29 18:43:52 +02:00
|
|
|
unread.mark_as_read(79);
|
2018-05-13 12:17:00 +02:00
|
|
|
|
2020-03-22 18:40:05 +01:00
|
|
|
history = stream_topic_history.get_recent_topic_names(stream_id);
|
2020-07-15 01:29:15 +02:00
|
|
|
assert.deepEqual(history, ["toPic1", "unread1", "topic2", "UNREAD2"]);
|
2018-06-02 09:25:49 +02:00
|
|
|
});
|
2017-07-27 12:57:37 +02:00
|
|
|
|
2021-03-13 14:36:30 +01:00
|
|
|
test("test_stream_has_topics", () => {
|
2019-11-02 00:06:25 +01:00
|
|
|
const stream_id = 88;
|
2018-05-21 21:29:39 +02:00
|
|
|
|
2020-03-22 18:40:05 +01:00
|
|
|
assert.equal(stream_topic_history.stream_has_topics(stream_id), false);
|
2018-05-21 21:29:39 +02:00
|
|
|
|
2020-03-22 18:40:05 +01:00
|
|
|
stream_topic_history.find_or_create(stream_id);
|
2018-05-21 21:29:39 +02:00
|
|
|
|
|
|
|
// This was a bug before--just creating a bucket does not
|
|
|
|
// mean we have actual topics.
|
2020-03-22 18:40:05 +01:00
|
|
|
assert.equal(stream_topic_history.stream_has_topics(stream_id), false);
|
2018-05-21 21:29:39 +02:00
|
|
|
|
2020-03-22 18:40:05 +01:00
|
|
|
stream_topic_history.add_message({
|
2020-07-20 22:18:43 +02:00
|
|
|
stream_id,
|
2018-05-21 21:29:39 +02:00
|
|
|
message_id: 888,
|
2020-07-15 01:29:15 +02:00
|
|
|
topic_name: "whatever",
|
2018-05-21 21:29:39 +02:00
|
|
|
});
|
|
|
|
|
2020-03-22 18:40:05 +01:00
|
|
|
assert.equal(stream_topic_history.stream_has_topics(stream_id), true);
|
2018-06-02 09:25:49 +02:00
|
|
|
});
|
2018-05-21 21:29:39 +02:00
|
|
|
|
2021-03-13 14:36:30 +01:00
|
|
|
test("server_history_end_to_end", () => {
|
2020-03-22 18:40:05 +01:00
|
|
|
stream_topic_history.reset();
|
2017-08-08 19:54:07 +02:00
|
|
|
|
2019-11-02 00:06:25 +01:00
|
|
|
const stream_id = 99;
|
2017-08-08 19:54:07 +02:00
|
|
|
|
2019-11-02 00:06:25 +01:00
|
|
|
const topics = [
|
2020-07-16 22:40:18 +02:00
|
|
|
{name: "topic3", max_id: 501},
|
|
|
|
{name: "topic2", max_id: 31},
|
|
|
|
{name: "topic1", max_id: 30},
|
2017-08-08 19:54:07 +02:00
|
|
|
];
|
|
|
|
|
2019-11-02 00:06:25 +01:00
|
|
|
let get_success_callback;
|
2023-10-09 16:19:56 +02:00
|
|
|
let get_error_callback;
|
2019-11-02 00:06:25 +01:00
|
|
|
let on_success_called;
|
2017-08-08 19:54:07 +02:00
|
|
|
|
2021-02-23 14:37:26 +01:00
|
|
|
channel.get = (opts) => {
|
2020-07-15 01:29:15 +02:00
|
|
|
assert.equal(opts.url, "/json/users/me/99/topics");
|
2017-08-08 19:54:07 +02:00
|
|
|
assert.deepEqual(opts.data, {});
|
2023-10-09 16:19:56 +02:00
|
|
|
assert.ok(stream_topic_history.is_request_pending_for(stream_id));
|
2017-08-08 19:54:07 +02:00
|
|
|
get_success_callback = opts.success;
|
2023-10-09 16:19:56 +02:00
|
|
|
get_error_callback = opts.error;
|
2017-08-08 19:54:07 +02:00
|
|
|
};
|
|
|
|
|
2023-12-14 23:51:33 +01:00
|
|
|
stream_topic_history_util.get_server_history(stream_id, noop);
|
2023-10-09 16:19:56 +02:00
|
|
|
|
|
|
|
// Another call. Early return because a request is already in progress
|
|
|
|
// for stream_id = 99. This function call adds coverage.
|
2023-12-14 23:51:33 +01:00
|
|
|
stream_topic_history_util.get_server_history(stream_id, noop);
|
2023-10-09 16:19:56 +02:00
|
|
|
assert.ok(stream_topic_history.is_request_pending_for(stream_id));
|
|
|
|
|
|
|
|
get_error_callback();
|
|
|
|
assert.ok(!stream_topic_history.is_request_pending_for(stream_id));
|
|
|
|
|
2021-04-15 18:51:44 +02:00
|
|
|
stream_topic_history_util.get_server_history(stream_id, () => {
|
2017-08-08 19:54:07 +02:00
|
|
|
on_success_called = true;
|
|
|
|
});
|
|
|
|
|
2020-07-20 22:18:43 +02:00
|
|
|
get_success_callback({topics});
|
2021-06-10 08:32:54 +02:00
|
|
|
assert.ok(on_success_called);
|
2023-10-09 16:19:56 +02:00
|
|
|
assert.ok(!stream_topic_history.is_request_pending_for(stream_id));
|
2017-08-08 19:54:07 +02:00
|
|
|
|
2020-03-22 18:40:05 +01:00
|
|
|
const history = stream_topic_history.get_recent_topic_names(stream_id);
|
2020-07-15 01:29:15 +02:00
|
|
|
assert.deepEqual(history, ["topic3", "topic2", "topic1"]);
|
2020-01-23 17:05:31 +01:00
|
|
|
|
|
|
|
// Try getting server history for a second time.
|
|
|
|
|
2022-04-09 23:44:38 +02:00
|
|
|
/* istanbul ignore next */
|
2020-01-23 17:05:31 +01:00
|
|
|
channel.get = () => {
|
2020-10-07 09:58:04 +02:00
|
|
|
throw new Error("We should not get more data.");
|
2020-01-23 17:05:31 +01:00
|
|
|
};
|
|
|
|
|
|
|
|
on_success_called = false;
|
2021-04-15 18:51:44 +02:00
|
|
|
stream_topic_history_util.get_server_history(stream_id, () => {
|
2020-01-23 17:05:31 +01:00
|
|
|
on_success_called = true;
|
|
|
|
});
|
2021-06-10 08:32:54 +02:00
|
|
|
assert.ok(on_success_called);
|
2018-05-15 12:40:07 +02:00
|
|
|
});
|
2021-03-29 16:13:39 +02:00
|
|
|
|
2021-06-16 14:38:37 +02:00
|
|
|
test("all_topics_in_cache", ({override}) => {
|
2021-03-29 16:13:39 +02:00
|
|
|
// Add a new stream with first_message_id set.
|
|
|
|
const general = {
|
|
|
|
name: "general",
|
|
|
|
stream_id: 21,
|
|
|
|
first_message_id: null,
|
|
|
|
};
|
|
|
|
const messages = [
|
|
|
|
{id: 1, stream_id: 21},
|
|
|
|
{id: 2, stream_id: 21},
|
|
|
|
{id: 3, stream_id: 21},
|
|
|
|
];
|
|
|
|
const sub = stream_data.create_sub_from_server_data(general);
|
|
|
|
|
|
|
|
assert.equal(stream_topic_history.all_topics_in_cache(sub), false);
|
|
|
|
|
2021-03-30 06:23:09 +02:00
|
|
|
all_messages_data.all_messages_data.clear();
|
|
|
|
all_messages_data.all_messages_data.add_messages(messages);
|
2021-03-29 16:13:39 +02:00
|
|
|
|
|
|
|
let has_found_newest = false;
|
|
|
|
|
2021-03-30 06:23:09 +02:00
|
|
|
override(
|
|
|
|
all_messages_data.all_messages_data.fetch_status,
|
|
|
|
"has_found_newest",
|
|
|
|
() => has_found_newest,
|
|
|
|
);
|
2021-03-29 16:13:39 +02:00
|
|
|
|
|
|
|
assert.equal(stream_topic_history.all_topics_in_cache(sub), false);
|
|
|
|
has_found_newest = true;
|
|
|
|
assert.equal(stream_topic_history.all_topics_in_cache(sub), true);
|
|
|
|
|
|
|
|
sub.first_message_id = 0;
|
|
|
|
assert.equal(stream_topic_history.all_topics_in_cache(sub), false);
|
|
|
|
|
|
|
|
sub.first_message_id = 2;
|
|
|
|
assert.equal(stream_topic_history.all_topics_in_cache(sub), true);
|
|
|
|
});
|
2024-06-26 06:16:20 +02:00
|
|
|
|
|
|
|
test("ask_server_for_latest_topic_data", () => {
|
|
|
|
stream_topic_history.set_update_topic_last_message_id((stream_id, topic_name) => {
|
|
|
|
stream_topic_history_util.update_topic_last_message_id(stream_id, topic_name, noop);
|
|
|
|
});
|
|
|
|
const stream_id = 1080;
|
|
|
|
|
|
|
|
let success_callback;
|
|
|
|
let get_message_request_triggered = false;
|
|
|
|
channel.get = (opts) => {
|
|
|
|
get_message_request_triggered = true;
|
|
|
|
assert.equal(opts.url, "/json/messages");
|
|
|
|
assert.deepEqual(opts.data, {
|
|
|
|
anchor: "newest",
|
|
|
|
narrow: '[{"operator":"stream","operand":1080},{"operator":"topic","operand":"Topic1"}]',
|
|
|
|
num_after: 0,
|
|
|
|
num_before: 1,
|
|
|
|
});
|
|
|
|
success_callback = opts.success;
|
|
|
|
};
|
|
|
|
|
|
|
|
stream_topic_history.add_message({
|
|
|
|
stream_id,
|
|
|
|
message_id: 101,
|
|
|
|
topic_name: "topic1",
|
|
|
|
});
|
|
|
|
|
|
|
|
let history = stream_topic_history.get_recent_topic_names(stream_id);
|
|
|
|
let max_message_id = stream_topic_history.get_max_message_id(stream_id);
|
|
|
|
assert.deepEqual(history, ["topic1"]);
|
|
|
|
assert.deepEqual(max_message_id, 101);
|
|
|
|
|
|
|
|
// Remove all cached messages from the topic. This sends a request to the server
|
|
|
|
// to check for the latest message id in the topic.
|
|
|
|
stream_topic_history.remove_messages({
|
|
|
|
stream_id,
|
|
|
|
topic_name: "Topic1",
|
|
|
|
num_messages: 1,
|
|
|
|
max_removed_msg_id: 104,
|
|
|
|
});
|
|
|
|
assert.equal(get_message_request_triggered, true);
|
|
|
|
get_message_request_triggered = false;
|
|
|
|
|
|
|
|
// Until we process the response from the server,
|
|
|
|
// the topic is not available.
|
|
|
|
history = stream_topic_history.get_recent_topic_names(stream_id);
|
|
|
|
assert.deepEqual(history, []);
|
|
|
|
|
|
|
|
// Simulate the server responses.
|
|
|
|
// Topic is empty.
|
|
|
|
success_callback({
|
|
|
|
messages: [],
|
|
|
|
});
|
|
|
|
history = stream_topic_history.get_recent_topic_names(stream_id);
|
|
|
|
assert.deepEqual(history, []);
|
|
|
|
|
|
|
|
// Topic has a different max_message_id.
|
|
|
|
success_callback({
|
|
|
|
messages: [{id: 102}],
|
|
|
|
});
|
|
|
|
|
|
|
|
// The topic is now available.
|
|
|
|
history = stream_topic_history.get_recent_topic_names(stream_id);
|
|
|
|
max_message_id = stream_topic_history.get_max_message_id(stream_id);
|
|
|
|
assert.deepEqual(history, ["Topic1"]);
|
|
|
|
assert.deepEqual(max_message_id, 102);
|
|
|
|
});
|
2024-08-03 14:16:14 +02:00
|
|
|
|
|
|
|
// Test when a local unacked message is sent, then get_max_message_id would also
|
|
|
|
// consider this unacked message. However, the unacked message is not added to
|
|
|
|
// max_message_id of stream, or message_id of topic histories.
|
|
|
|
test("test_max_message_ids_in_channel_and_topics", () => {
|
|
|
|
const general_sub = {
|
|
|
|
stream_id: 101,
|
|
|
|
name: "general",
|
|
|
|
subscribed: true,
|
|
|
|
};
|
|
|
|
|
|
|
|
const history = stream_topic_history.find_or_create(general_sub.stream_id);
|
|
|
|
|
|
|
|
stream_topic_history.add_message({
|
|
|
|
stream_id: general_sub.stream_id,
|
|
|
|
message_id: 45,
|
|
|
|
topic_name: "topic 1",
|
|
|
|
});
|
|
|
|
|
|
|
|
assert.equal(stream_topic_history.get_max_message_id(general_sub.stream_id), 45);
|
|
|
|
assert.equal(history.max_message_id, 45);
|
|
|
|
|
|
|
|
stream_topic_history.add_message({
|
|
|
|
stream_id: general_sub.stream_id,
|
|
|
|
message_id: 47,
|
|
|
|
topic_name: "topic 1",
|
|
|
|
});
|
|
|
|
|
|
|
|
assert.equal(stream_topic_history.get_max_message_id(general_sub.stream_id), 47);
|
|
|
|
assert.equal(history.max_message_id, 47);
|
|
|
|
|
|
|
|
const local_message = {
|
|
|
|
type: "stream",
|
|
|
|
stream_id: general_sub.stream_id,
|
|
|
|
topic: "topic 2",
|
|
|
|
sender_email: "iago@zulip.com",
|
|
|
|
sender_full_name: "Iago",
|
|
|
|
sender_id: 123,
|
|
|
|
id: 49.01,
|
|
|
|
};
|
|
|
|
echo_state.set_message_waiting_for_ack("49.01", local_message);
|
|
|
|
|
|
|
|
assert.equal(stream_topic_history.get_max_message_id(general_sub.stream_id), 49.01);
|
|
|
|
assert.equal(history.max_message_id, 47);
|
|
|
|
assert.equal(history.topics.get("topic 2"), undefined);
|
|
|
|
|
|
|
|
assert.deepEqual(stream_topic_history.get_recent_topic_names(general_sub.stream_id), [
|
|
|
|
"topic 2",
|
|
|
|
"topic 1",
|
|
|
|
]);
|
|
|
|
});
|