mirror of https://github.com/zulip/zulip.git
message_list_data_cache: Cache MessageListData objects.
We start caching MessageListData objects for the narrows which user has visited. We restore the cached objects if the filters match. Also, the cached objects are updated based on events. One major piece is update path the is pending implementation is the message move code path.
This commit is contained in:
parent
259e77fbf6
commit
5b9a2584c5
|
@ -144,6 +144,7 @@ EXEMPT_FILES = make_set(
|
|||
"web/src/message_fetch.ts",
|
||||
"web/src/message_list.js",
|
||||
"web/src/message_list_data.ts",
|
||||
"web/src/message_list_data_cache.ts",
|
||||
"web/src/message_list_hover.js",
|
||||
"web/src/message_list_tooltips.ts",
|
||||
"web/src/message_list_view.js",
|
||||
|
|
|
@ -3,7 +3,6 @@ import assert from "minimalistic-assert";
|
|||
import {z} from "zod";
|
||||
|
||||
import * as alert_words from "./alert_words";
|
||||
import {all_messages_data} from "./all_messages_data";
|
||||
import * as blueslip from "./blueslip";
|
||||
import * as compose_notifications from "./compose_notifications";
|
||||
import * as compose_ui from "./compose_ui";
|
||||
|
@ -448,13 +447,15 @@ export function reify_message_id(local_id: string, server_id: number): void {
|
|||
}
|
||||
|
||||
export function update_message_lists({old_id, new_id}: {old_id: number; new_id: number}): void {
|
||||
if (all_messages_data !== undefined) {
|
||||
all_messages_data.change_message_id(old_id, new_id);
|
||||
}
|
||||
// Update the rendered data first since it is most user visible.
|
||||
for (const msg_list of message_lists.all_rendered_message_lists()) {
|
||||
msg_list.change_message_id(old_id, new_id);
|
||||
msg_list.view.change_message_id(old_id, new_id);
|
||||
}
|
||||
|
||||
for (const msg_list_data of message_lists.non_rendered_data()) {
|
||||
msg_list_data.change_message_id(old_id, new_id);
|
||||
}
|
||||
}
|
||||
|
||||
export function process_from_server(messages: ServerMessage[]): ServerMessage[] {
|
||||
|
@ -555,11 +556,14 @@ export function message_send_error(message_id: number, error_response: string):
|
|||
}
|
||||
|
||||
function abort_message(message: Message): void {
|
||||
// Remove in all lists in which it exists
|
||||
all_messages_data.remove([message.id]);
|
||||
// Update the rendered data first since it is most user visible.
|
||||
for (const msg_list of message_lists.all_rendered_message_lists()) {
|
||||
msg_list.remove_and_rerender([message.id]);
|
||||
}
|
||||
|
||||
for (const msg_list_data of message_lists.non_rendered_data()) {
|
||||
msg_list_data.remove([message.id]);
|
||||
}
|
||||
}
|
||||
|
||||
export function display_slow_send_loading_spinner(message: Message): void {
|
||||
|
|
|
@ -14,6 +14,7 @@ import * as message_edit from "./message_edit";
|
|||
import * as message_edit_history from "./message_edit_history";
|
||||
import * as message_events_util from "./message_events_util";
|
||||
import * as message_helper from "./message_helper";
|
||||
import * as message_list_data_cache from "./message_list_data_cache";
|
||||
import * as message_lists from "./message_lists";
|
||||
import * as message_notifications from "./message_notifications";
|
||||
import * as message_store from "./message_store";
|
||||
|
@ -119,6 +120,17 @@ export function update_messages(events) {
|
|||
let any_stream_changed = false;
|
||||
let local_cache_missing_messages = false;
|
||||
|
||||
// Clear message list data cache since the local data for the
|
||||
// filters might no longer be accurate.
|
||||
//
|
||||
// TODO: Add logic to update the message list data cache.
|
||||
// Special care needs to be taken to ensure that the cache is
|
||||
// updated correctly when the message is moved to a different
|
||||
// stream or topic. Also, we need to update message lists like
|
||||
// `is:starred`, `is:mentioned`, etc. when the message flags are
|
||||
// updated.
|
||||
message_list_data_cache.clear();
|
||||
|
||||
for (const event of events) {
|
||||
const anchor_message = message_store.get(event.message_id);
|
||||
if (anchor_message !== undefined) {
|
||||
|
@ -552,10 +564,15 @@ export function update_messages(events) {
|
|||
}
|
||||
|
||||
export function remove_messages(message_ids) {
|
||||
all_messages_data.remove(message_ids);
|
||||
// Update the rendered data first since it is most user visible.
|
||||
for (const list of message_lists.all_rendered_message_lists()) {
|
||||
list.remove_and_rerender(message_ids);
|
||||
}
|
||||
|
||||
for (const msg_list_data of message_lists.non_rendered_data()) {
|
||||
msg_list_data.remove(message_ids);
|
||||
}
|
||||
|
||||
recent_senders.update_topics_of_deleted_message_ids(message_ids);
|
||||
recent_view_ui.update_topics_of_deleted_message_ids(message_ids);
|
||||
starred_messages.remove(message_ids);
|
||||
|
|
|
@ -0,0 +1,90 @@
|
|||
import * as all_messages_data from "./all_messages_data";
|
||||
import type {Filter} from "./filter";
|
||||
import type {MessageListData} from "./message_list_data";
|
||||
|
||||
// LRU cache for message list data.
|
||||
//
|
||||
// While it's unlikely that user will narrow to empty filter,
|
||||
// but we will still need to update all_messages_data since it used
|
||||
// as super set for populating other views.
|
||||
let cache = new Map<number, MessageListData>([[0, all_messages_data.all_messages_data]]);
|
||||
let latest_key = 0;
|
||||
|
||||
// Maximum number of data items to cache.
|
||||
const CACHE_STORAGE_LIMIT = 100;
|
||||
|
||||
function move_to_end(key: number, cached_data: MessageListData): void {
|
||||
// Move the item to the end of the cache.
|
||||
// Map remembers the original insertion order of the keys.
|
||||
cache.delete(key);
|
||||
latest_key += 1;
|
||||
cache.set(latest_key, cached_data);
|
||||
|
||||
// In theory, a cache like this might need to consider integer
|
||||
// overflow on latest_key, but that's not a realistic possibility
|
||||
// with how these data structures are used given the lifetime on a
|
||||
// Zulip web app window.
|
||||
}
|
||||
|
||||
export function get(filter: Filter): MessageListData | undefined {
|
||||
for (const [key, cached_data] of cache.entries()) {
|
||||
if (cached_data.filter.equals(filter)) {
|
||||
move_to_end(key, cached_data);
|
||||
return cached_data;
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
export function add(message_list_data: MessageListData): void {
|
||||
for (const [key, cached_data] of cache.entries()) {
|
||||
if (cached_data.filter.equals(message_list_data.filter)) {
|
||||
// We could chose to maintain in the cache the
|
||||
// message_list_data passed in, or the one already in the
|
||||
// cache. These can be different primarily when they
|
||||
// represent non-overlapping ID ranges.
|
||||
//
|
||||
// Saving the more current of the two message lists seems
|
||||
// like a better tiebreak, but we may be able to eliminate
|
||||
// this class of case if we implement the plan in #16697.
|
||||
move_to_end(key, message_list_data);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (cache.size >= CACHE_STORAGE_LIMIT) {
|
||||
// Remove the oldest item from the cache.
|
||||
for (const [key, cached_data] of cache.entries()) {
|
||||
// We never want to remove the all_messages_data from the cache.
|
||||
if (cached_data.filter.equals(all_messages_data.all_messages_data.filter)) {
|
||||
continue;
|
||||
}
|
||||
cache.delete(key);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
latest_key += 1;
|
||||
cache.set(latest_key, message_list_data);
|
||||
}
|
||||
|
||||
export function all(): MessageListData[] {
|
||||
return [...cache.values()];
|
||||
}
|
||||
|
||||
export function clear(): void {
|
||||
cache = new Map([[0, all_messages_data.all_messages_data]]);
|
||||
latest_key = 0;
|
||||
}
|
||||
|
||||
export function get_superset_datasets(filter: Filter): MessageListData[] {
|
||||
const superset_datasets = [];
|
||||
// Try to get exact match first.
|
||||
const superset_data = get(filter);
|
||||
if (superset_data !== undefined) {
|
||||
// TODO: Search for additional superset datasets.
|
||||
superset_datasets.push(superset_data);
|
||||
}
|
||||
|
||||
return [...superset_datasets, all_messages_data.all_messages_data];
|
||||
}
|
|
@ -2,6 +2,7 @@ import $ from "jquery";
|
|||
|
||||
import * as inbox_util from "./inbox_util";
|
||||
import type {MessageListData} from "./message_list_data";
|
||||
import * as message_list_data_cache from "./message_list_data_cache";
|
||||
import type {Message} from "./message_store";
|
||||
import * as ui_util from "./ui_util";
|
||||
|
||||
|
@ -132,6 +133,7 @@ export function update_current_message_list(msg_list: MessageList | undefined):
|
|||
current = msg_list;
|
||||
if (current !== undefined) {
|
||||
rendered_message_lists.set(current.id, current);
|
||||
message_list_data_cache.add(current.data);
|
||||
current.view.$list.addClass("focused-message-list");
|
||||
}
|
||||
}
|
||||
|
@ -140,6 +142,16 @@ export function all_rendered_message_lists(): MessageList[] {
|
|||
return [...rendered_message_lists.values()];
|
||||
}
|
||||
|
||||
export function non_rendered_data(): MessageListData[] {
|
||||
const rendered_data = new Set(rendered_message_lists.keys());
|
||||
return message_list_data_cache.all().filter((data) => {
|
||||
if (data.rendered_message_list_id === undefined) {
|
||||
return true;
|
||||
}
|
||||
return !rendered_data.has(data.rendered_message_list_id);
|
||||
});
|
||||
}
|
||||
|
||||
export function add_rendered_message_list(msg_list: MessageList): void {
|
||||
rendered_message_lists.set(msg_list.id, msg_list);
|
||||
}
|
||||
|
|
|
@ -29,6 +29,7 @@ import * as message_fetch from "./message_fetch";
|
|||
import * as message_helper from "./message_helper";
|
||||
import * as message_list from "./message_list";
|
||||
import {MessageListData} from "./message_list_data";
|
||||
import * as message_list_data_cache from "./message_list_data_cache";
|
||||
import * as message_lists from "./message_lists";
|
||||
import * as message_scroll_state from "./message_scroll_state";
|
||||
import * as message_store from "./message_store";
|
||||
|
@ -141,6 +142,8 @@ function create_and_update_message_list(filter, id_info, opts) {
|
|||
}
|
||||
|
||||
if (!restore_rendered_list) {
|
||||
// If we don't have a cached message list for the narrow, we
|
||||
// need to construct one from scratch.
|
||||
let msg_data = new MessageListData({
|
||||
filter,
|
||||
excludes_muted_topics,
|
||||
|
@ -150,10 +153,24 @@ function create_and_update_message_list(filter, id_info, opts) {
|
|||
// with no server help) and we have the message we want to select.
|
||||
// Also update id_info accordingly.
|
||||
if (!filter.requires_adjustment_for_moved_with_target) {
|
||||
const superset_datasets = message_list_data_cache.get_superset_datasets(filter);
|
||||
for (const superset_data of superset_datasets) {
|
||||
maybe_add_local_messages({
|
||||
id_info,
|
||||
msg_data,
|
||||
superset_data,
|
||||
});
|
||||
|
||||
if (id_info.local_select_id) {
|
||||
// We have the message we want to select.
|
||||
break;
|
||||
}
|
||||
|
||||
msg_data = new MessageListData({
|
||||
filter,
|
||||
excludes_muted_topics,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (!id_info.local_select_id) {
|
||||
|
@ -849,13 +866,13 @@ function min_defined(a, b) {
|
|||
return a < b ? a : b;
|
||||
}
|
||||
|
||||
function load_local_messages(msg_data) {
|
||||
function load_local_messages(msg_data, superset_data) {
|
||||
// This little helper loads messages into our narrow message
|
||||
// data and returns true unless it's visibly empty. We use this for
|
||||
// cases when our local cache (all_messages_data) has at least
|
||||
// cases when our local cache (superset_data) has at least
|
||||
// one message the user will expect to see in the new narrow.
|
||||
|
||||
const in_msgs = all_messages_data.all_messages();
|
||||
const in_msgs = superset_data.all_messages();
|
||||
msg_data.add_messages(in_msgs);
|
||||
|
||||
return !msg_data.visibly_empty();
|
||||
|
@ -883,6 +900,7 @@ export function maybe_add_local_messages(opts) {
|
|||
// - add messages into our message list from our local cache
|
||||
const id_info = opts.id_info;
|
||||
const msg_data = opts.msg_data;
|
||||
const superset_data = opts.superset_data;
|
||||
const filter = msg_data.filter;
|
||||
const unread_info = narrow_state.get_first_unread_info(filter);
|
||||
|
||||
|
@ -933,7 +951,7 @@ export function maybe_add_local_messages(opts) {
|
|||
// need to look at unread here.
|
||||
id_info.final_select_id = min_defined(id_info.target_id, unread_info.msg_id);
|
||||
|
||||
if (!load_local_messages(msg_data)) {
|
||||
if (!load_local_messages(msg_data, superset_data)) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -959,16 +977,18 @@ export function maybe_add_local_messages(opts) {
|
|||
// Without unread messages or a target ID, we're narrowing to
|
||||
// the very latest message or first unread if matching the narrow allows.
|
||||
|
||||
if (!all_messages_data.fetch_status.has_found_newest()) {
|
||||
// If all_messages_data is not caught up, then we cannot
|
||||
if (!superset_data.fetch_status.has_found_newest()) {
|
||||
// If superset_data is not caught up, then we cannot
|
||||
// populate the latest messages for the target narrow
|
||||
// correctly from there, so we must go to the server.
|
||||
return;
|
||||
}
|
||||
if (!load_local_messages(msg_data)) {
|
||||
|
||||
if (!load_local_messages(msg_data, superset_data)) {
|
||||
return;
|
||||
}
|
||||
// Otherwise, we have matching messages, and all_messages_data
|
||||
|
||||
// Otherwise, we have matching messages, and superset_data
|
||||
// is caught up, so the last message in our now-populated
|
||||
// msg_data object must be the last message matching the
|
||||
// narrow the server could give us, so we can render locally.
|
||||
|
@ -985,20 +1005,20 @@ export function maybe_add_local_messages(opts) {
|
|||
|
||||
// TODO: We could improve on this next condition by considering
|
||||
// cases where
|
||||
// `all_messages_data.fetch_status.has_found_oldest()`; which
|
||||
// `superset_data.fetch_status.has_found_oldest()`; which
|
||||
// would come up with e.g. `near: 0` in a small organization.
|
||||
//
|
||||
// And similarly for `near: max_int` with has_found_newest.
|
||||
if (
|
||||
all_messages_data.visibly_empty() ||
|
||||
id_info.target_id < all_messages_data.first().id ||
|
||||
id_info.target_id > all_messages_data.last().id
|
||||
superset_data.visibly_empty() ||
|
||||
id_info.target_id < superset_data.first().id ||
|
||||
id_info.target_id > superset_data.last().id
|
||||
) {
|
||||
// If the target message is outside the range that we had
|
||||
// available for local population, we must go to the server.
|
||||
return;
|
||||
}
|
||||
if (!load_local_messages(msg_data)) {
|
||||
if (!load_local_messages(msg_data, superset_data)) {
|
||||
return;
|
||||
}
|
||||
if (msg_data.get(id_info.target_id)) {
|
||||
|
|
|
@ -65,6 +65,7 @@ const home_msg_list = {
|
|||
change_message_id: noop,
|
||||
};
|
||||
message_lists.all_rendered_message_lists = () => [home_msg_list, message_lists.current];
|
||||
message_lists.non_rendered_data = () => [];
|
||||
|
||||
const echo = zrequire("echo");
|
||||
const echo_state = zrequire("echo_state");
|
||||
|
|
|
@ -197,6 +197,9 @@ run_test("basics", ({override, override_rewire}) => {
|
|||
visibly_empty: () => false,
|
||||
first: () => ({id: 900}),
|
||||
last: () => ({id: 1100}),
|
||||
filter: {
|
||||
equals: () => false,
|
||||
},
|
||||
};
|
||||
|
||||
$("#navbar-fixed-container").set_height(40);
|
||||
|
|
|
@ -2,10 +2,10 @@
|
|||
|
||||
const {strict: assert} = require("assert");
|
||||
|
||||
const {mock_esm, zrequire} = require("./lib/namespace");
|
||||
const {zrequire} = require("./lib/namespace");
|
||||
const {run_test} = require("./lib/test");
|
||||
|
||||
const all_messages_data = mock_esm("../src/all_messages_data");
|
||||
const all_messages_data = zrequire("../src/all_messages_data");
|
||||
|
||||
const {Filter} = zrequire("../src/filter");
|
||||
const {MessageListData} = zrequire("../src/message_list_data");
|
||||
|
@ -44,7 +44,7 @@ function test_with(fixture) {
|
|||
final_select_id: undefined,
|
||||
};
|
||||
|
||||
all_messages_data.all_messages_data = {
|
||||
all_messages_data.__Rewire__("all_messages_data", {
|
||||
fetch_status: {
|
||||
has_found_newest: () => fixture.has_found_newest,
|
||||
},
|
||||
|
@ -61,13 +61,14 @@ function test_with(fixture) {
|
|||
assert.notEqual(fixture.all_messages, undefined);
|
||||
return fixture.all_messages.at(-1);
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
narrow_state.__Rewire__("get_first_unread_info", () => fixture.unread_info);
|
||||
|
||||
message_view.maybe_add_local_messages({
|
||||
id_info,
|
||||
msg_data,
|
||||
superset_data: all_messages_data.all_messages_data,
|
||||
});
|
||||
|
||||
assert.deepEqual(id_info, fixture.expected_id_info);
|
||||
|
|
Loading…
Reference in New Issue