2020-08-01 03:43:15 +02:00
|
|
|
"use strict";
|
|
|
|
|
2020-05-26 13:58:18 +02:00
|
|
|
const huddle_data = require("./huddle_data");
|
|
|
|
|
2019-11-02 00:06:25 +01:00
|
|
|
const consts = {
|
2018-06-04 21:13:07 +02:00
|
|
|
backfill_idle_time: 10 * 1000,
|
2018-03-08 21:25:14 +01:00
|
|
|
error_retry_time: 5000,
|
|
|
|
backfill_batch_size: 1000,
|
2018-03-09 15:32:28 +01:00
|
|
|
narrow_before: 50,
|
|
|
|
narrow_after: 50,
|
2020-06-18 08:38:49 +02:00
|
|
|
num_before_home_anchor: 200,
|
|
|
|
num_after_home_anchor: 200,
|
2018-03-08 21:25:14 +01:00
|
|
|
backward_batch_size: 100,
|
2018-03-11 20:19:30 +01:00
|
|
|
forward_batch_size: 100,
|
2018-03-08 21:25:14 +01:00
|
|
|
catch_up_batch_size: 1000,
|
|
|
|
};
|
|
|
|
|
2018-03-16 13:05:54 +01:00
|
|
|
function process_result(data, opts) {
|
2019-11-02 00:06:25 +01:00
|
|
|
let messages = data.messages;
|
2018-03-16 13:05:54 +01:00
|
|
|
|
2020-07-15 01:29:15 +02:00
|
|
|
if (!$("#connection-error").hasClass("get-events-error")) {
|
2018-02-25 13:01:38 +01:00
|
|
|
ui_report.hide_error($("#connection-error"));
|
|
|
|
}
|
2017-03-19 18:19:48 +01:00
|
|
|
|
2020-07-15 00:34:28 +02:00
|
|
|
if (
|
|
|
|
messages.length === 0 &&
|
|
|
|
current_msg_list === message_list.narrowed &&
|
|
|
|
message_list.narrowed.empty()
|
|
|
|
) {
|
2017-03-19 18:19:48 +01:00
|
|
|
// Even after trying to load more messages, we have no
|
|
|
|
// messages to display in this narrow.
|
|
|
|
narrow.show_empty_narrow_message();
|
|
|
|
}
|
|
|
|
|
2020-02-08 05:56:44 +01:00
|
|
|
messages.forEach(message_store.set_message_booleans);
|
js: Convert _.map(a, …) to a.map(…).
And convert the corresponding function expressions to arrow style
while we’re here.
import * as babelParser from "recast/parsers/babel";
import * as recast from "recast";
import * as tsParser from "recast/parsers/typescript";
import { builders as b, namedTypes as n } from "ast-types";
import K from "ast-types/gen/kinds";
import fs from "fs";
import path from "path";
import process from "process";
const checkExpression = (node: n.Node): node is K.ExpressionKind =>
n.Expression.check(node);
for (const file of process.argv.slice(2)) {
console.log("Parsing", file);
const ast = recast.parse(fs.readFileSync(file, { encoding: "utf8" }), {
parser: path.extname(file) === ".ts" ? tsParser : babelParser,
});
let changed = false;
recast.visit(ast, {
visitCallExpression(path) {
const { callee, arguments: args } = path.node;
if (
n.MemberExpression.check(callee) &&
!callee.computed &&
n.Identifier.check(callee.object) &&
callee.object.name === "_" &&
n.Identifier.check(callee.property) &&
callee.property.name === "map" &&
args.length === 2 &&
checkExpression(args[0]) &&
checkExpression(args[1])
) {
const [arr, fn] = args;
path.replace(
b.callExpression(b.memberExpression(arr, b.identifier("map")), [
n.FunctionExpression.check(fn) ||
n.ArrowFunctionExpression.check(fn)
? b.arrowFunctionExpression(
fn.params,
n.BlockStatement.check(fn.body) &&
fn.body.body.length === 1 &&
n.ReturnStatement.check(fn.body.body[0])
? fn.body.body[0].argument || b.identifier("undefined")
: fn.body
)
: fn,
])
);
changed = true;
}
this.traverse(path);
},
});
if (changed) {
console.log("Writing", file);
fs.writeFileSync(file, recast.print(ast).code, { encoding: "utf8" });
}
}
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-02-08 02:43:49 +01:00
|
|
|
messages = messages.map(message_store.add_message_metadata);
|
2017-03-19 18:19:48 +01:00
|
|
|
|
2018-04-23 20:24:55 +02:00
|
|
|
// In case any of the newly fetched messages are new, add them to
|
|
|
|
// our unread data structures. It's important that this run even
|
|
|
|
// when fetching in a narrow, since we might return unread
|
|
|
|
// messages that aren't in the home view data set (e.g. on a muted
|
|
|
|
// stream).
|
|
|
|
message_util.do_unread_count_updates(messages);
|
|
|
|
|
2017-03-19 18:19:48 +01:00
|
|
|
// If we're loading more messages into the home view, save them to
|
|
|
|
// the message_list.all as well, as the home_msg_list is reconstructed
|
|
|
|
// from message_list.all.
|
|
|
|
if (opts.msg_list === home_msg_list) {
|
2019-01-08 01:26:02 +01:00
|
|
|
message_util.add_old_messages(messages, message_list.all);
|
2017-03-19 18:19:48 +01:00
|
|
|
}
|
|
|
|
|
2018-03-09 15:35:23 +01:00
|
|
|
if (messages.length !== 0) {
|
2019-01-08 01:26:02 +01:00
|
|
|
message_util.add_old_messages(messages, opts.msg_list);
|
2017-03-19 18:19:48 +01:00
|
|
|
}
|
|
|
|
|
2020-05-26 13:58:18 +02:00
|
|
|
huddle_data.process_loaded_messages(messages);
|
2017-03-19 18:19:48 +01:00
|
|
|
stream_list.update_streams_sidebar();
|
|
|
|
pm_list.update_private_messages();
|
2020-03-21 14:42:10 +01:00
|
|
|
recent_topics.process_messages(messages);
|
2019-12-11 03:07:51 +01:00
|
|
|
|
2018-07-06 20:23:20 +02:00
|
|
|
stream_list.maybe_scroll_narrow_into_view();
|
2017-03-19 18:19:48 +01:00
|
|
|
|
|
|
|
if (opts.cont !== undefined) {
|
2020-06-16 17:58:37 +02:00
|
|
|
opts.cont(data, opts);
|
2017-03-19 18:19:48 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-03-09 02:17:54 +01:00
|
|
|
function get_messages_success(data, opts) {
|
2020-06-15 11:53:00 +02:00
|
|
|
const update_loading_indicator = opts.msg_list === current_msg_list;
|
2018-12-13 00:57:40 +01:00
|
|
|
if (opts.num_before > 0) {
|
2020-05-30 09:45:12 +02:00
|
|
|
opts.msg_list.data.fetch_status.finish_older_batch({
|
2020-07-20 22:18:43 +02:00
|
|
|
update_loading_indicator,
|
2018-12-13 00:57:40 +01:00
|
|
|
found_oldest: data.found_oldest,
|
|
|
|
history_limited: data.history_limited,
|
|
|
|
});
|
|
|
|
if (opts.msg_list === home_msg_list) {
|
2020-06-15 11:53:00 +02:00
|
|
|
// When we update home_msg_list, we need to also update
|
|
|
|
// the fetch_status data structure for message_list.all,
|
|
|
|
// which is never rendered (and just used for
|
|
|
|
// prepopulating narrowed views).
|
2020-05-30 09:45:12 +02:00
|
|
|
message_list.all.data.fetch_status.finish_older_batch({
|
2020-06-15 12:47:11 +02:00
|
|
|
update_loading_indicator: false,
|
2018-12-13 00:57:40 +01:00
|
|
|
found_oldest: data.found_oldest,
|
|
|
|
history_limited: data.history_limited,
|
|
|
|
});
|
|
|
|
}
|
2020-06-14 12:33:12 +02:00
|
|
|
message_scroll.update_top_of_narrow_notices(opts.msg_list);
|
2018-12-13 00:57:40 +01:00
|
|
|
}
|
|
|
|
|
2018-12-13 01:13:29 +01:00
|
|
|
if (opts.num_after > 0) {
|
2020-07-15 00:34:28 +02:00
|
|
|
opts.fetch_again = opts.msg_list.data.fetch_status.finish_newer_batch(data.messages, {
|
2020-07-20 22:18:43 +02:00
|
|
|
update_loading_indicator,
|
2020-07-15 00:34:28 +02:00
|
|
|
found_newest: data.found_newest,
|
|
|
|
});
|
2018-12-13 01:13:29 +01:00
|
|
|
if (opts.msg_list === home_msg_list) {
|
2020-06-15 11:53:00 +02:00
|
|
|
// When we update home_msg_list, we need to also update
|
|
|
|
// the fetch_status data structure for message_list.all,
|
|
|
|
// which is never rendered (and just used for
|
|
|
|
// prepopulating narrowed views).
|
2020-06-16 17:58:37 +02:00
|
|
|
opts.fetch_again = message_list.all.data.fetch_status.finish_newer_batch(
|
2020-07-15 00:34:28 +02:00
|
|
|
data.messages,
|
|
|
|
{
|
2020-06-16 17:58:37 +02:00
|
|
|
update_loading_indicator: false,
|
|
|
|
found_newest: data.found_newest,
|
2020-07-15 00:34:28 +02:00
|
|
|
},
|
|
|
|
);
|
2018-12-13 01:13:29 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-03-19 18:19:48 +01:00
|
|
|
if (opts.msg_list.narrowed && opts.msg_list !== current_msg_list) {
|
|
|
|
// We unnarrowed before receiving new messages so
|
|
|
|
// don't bother processing the newly arrived messages.
|
|
|
|
return;
|
|
|
|
}
|
2018-06-04 21:09:11 +02:00
|
|
|
if (!data) {
|
2017-11-09 16:26:38 +01:00
|
|
|
// The server occasionally returns no data during a
|
2017-03-19 18:19:48 +01:00
|
|
|
// restart. Ignore those responses and try again
|
2020-07-02 01:45:54 +02:00
|
|
|
setTimeout(() => {
|
2018-03-09 02:17:54 +01:00
|
|
|
exports.load_messages(opts);
|
2017-03-19 18:19:48 +01:00
|
|
|
}, 0);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2018-03-16 13:05:54 +01:00
|
|
|
process_result(data, opts);
|
2017-03-19 18:19:48 +01:00
|
|
|
}
|
|
|
|
|
2019-07-11 18:54:28 +02:00
|
|
|
// This function modifies the data.narrow filters to use user IDs
|
|
|
|
// instead of emails string if it is supported. We currently don't set
|
|
|
|
// or convert the emails string to user IDs directly into the Filter code
|
|
|
|
// because doing so breaks the app in various modules that expect emails string.
|
2019-08-10 18:14:22 +02:00
|
|
|
function handle_operators_supporting_id_based_api(data) {
|
2020-07-15 01:29:15 +02:00
|
|
|
const operators_supporting_ids = ["pm-with"];
|
|
|
|
const operators_supporting_id = ["sender", "group-pm-with", "stream"];
|
2019-07-11 18:54:28 +02:00
|
|
|
|
|
|
|
if (data.narrow === undefined) {
|
|
|
|
return data;
|
|
|
|
}
|
|
|
|
|
|
|
|
data.narrow = JSON.parse(data.narrow);
|
2020-07-02 01:39:34 +02:00
|
|
|
data.narrow = data.narrow.map((filter) => {
|
js: Convert a.indexOf(…) !== -1 to a.includes(…).
Babel polyfills this for us for Internet Explorer.
import * as babelParser from "recast/parsers/babel";
import * as recast from "recast";
import * as tsParser from "recast/parsers/typescript";
import { builders as b, namedTypes as n } from "ast-types";
import K from "ast-types/gen/kinds";
import fs from "fs";
import path from "path";
import process from "process";
const checkExpression = (node: n.Node): node is K.ExpressionKind =>
n.Expression.check(node);
for (const file of process.argv.slice(2)) {
console.log("Parsing", file);
const ast = recast.parse(fs.readFileSync(file, { encoding: "utf8" }), {
parser: path.extname(file) === ".ts" ? tsParser : babelParser,
});
let changed = false;
recast.visit(ast, {
visitBinaryExpression(path) {
const { operator, left, right } = path.node;
if (
n.CallExpression.check(left) &&
n.MemberExpression.check(left.callee) &&
!left.callee.computed &&
n.Identifier.check(left.callee.property) &&
left.callee.property.name === "indexOf" &&
left.arguments.length === 1 &&
checkExpression(left.arguments[0]) &&
((["===", "!==", "==", "!=", ">", "<="].includes(operator) &&
n.UnaryExpression.check(right) &&
right.operator == "-" &&
n.Literal.check(right.argument) &&
right.argument.value === 1) ||
([">=", "<"].includes(operator) &&
n.Literal.check(right) &&
right.value === 0))
) {
const test = b.callExpression(
b.memberExpression(left.callee.object, b.identifier("includes")),
[left.arguments[0]]
);
path.replace(
["!==", "!=", ">", ">="].includes(operator)
? test
: b.unaryExpression("!", test)
);
changed = true;
}
this.traverse(path);
},
});
if (changed) {
console.log("Writing", file);
fs.writeFileSync(file, recast.print(ast).code, { encoding: "utf8" });
}
}
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-02-08 04:55:06 +01:00
|
|
|
if (operators_supporting_ids.includes(filter.operator)) {
|
2019-07-11 18:54:28 +02:00
|
|
|
filter.operand = people.emails_strings_to_user_ids_array(filter.operand);
|
|
|
|
}
|
|
|
|
|
js: Convert a.indexOf(…) !== -1 to a.includes(…).
Babel polyfills this for us for Internet Explorer.
import * as babelParser from "recast/parsers/babel";
import * as recast from "recast";
import * as tsParser from "recast/parsers/typescript";
import { builders as b, namedTypes as n } from "ast-types";
import K from "ast-types/gen/kinds";
import fs from "fs";
import path from "path";
import process from "process";
const checkExpression = (node: n.Node): node is K.ExpressionKind =>
n.Expression.check(node);
for (const file of process.argv.slice(2)) {
console.log("Parsing", file);
const ast = recast.parse(fs.readFileSync(file, { encoding: "utf8" }), {
parser: path.extname(file) === ".ts" ? tsParser : babelParser,
});
let changed = false;
recast.visit(ast, {
visitBinaryExpression(path) {
const { operator, left, right } = path.node;
if (
n.CallExpression.check(left) &&
n.MemberExpression.check(left.callee) &&
!left.callee.computed &&
n.Identifier.check(left.callee.property) &&
left.callee.property.name === "indexOf" &&
left.arguments.length === 1 &&
checkExpression(left.arguments[0]) &&
((["===", "!==", "==", "!=", ">", "<="].includes(operator) &&
n.UnaryExpression.check(right) &&
right.operator == "-" &&
n.Literal.check(right.argument) &&
right.argument.value === 1) ||
([">=", "<"].includes(operator) &&
n.Literal.check(right) &&
right.value === 0))
) {
const test = b.callExpression(
b.memberExpression(left.callee.object, b.identifier("includes")),
[left.arguments[0]]
);
path.replace(
["!==", "!=", ">", ">="].includes(operator)
? test
: b.unaryExpression("!", test)
);
changed = true;
}
this.traverse(path);
},
});
if (changed) {
console.log("Writing", file);
fs.writeFileSync(file, recast.print(ast).code, { encoding: "utf8" });
}
}
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-02-08 04:55:06 +01:00
|
|
|
if (operators_supporting_id.includes(filter.operator)) {
|
2020-07-15 01:29:15 +02:00
|
|
|
if (filter.operator === "stream") {
|
2019-08-07 19:17:17 +02:00
|
|
|
const stream_id = stream_data.get_stream_id(filter.operand);
|
|
|
|
if (stream_id !== undefined) {
|
|
|
|
filter.operand = stream_id;
|
|
|
|
}
|
|
|
|
|
|
|
|
return filter;
|
|
|
|
}
|
|
|
|
|
|
|
|
// The other operands supporting object IDs all work with user objects.
|
2019-11-02 00:06:25 +01:00
|
|
|
const person = people.get_by_email(filter.operand);
|
2019-07-13 02:16:35 +02:00
|
|
|
if (person !== undefined) {
|
|
|
|
filter.operand = person.user_id;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-07-11 18:54:28 +02:00
|
|
|
return filter;
|
|
|
|
});
|
|
|
|
|
|
|
|
data.narrow = JSON.stringify(data.narrow);
|
|
|
|
return data;
|
|
|
|
}
|
2017-03-19 18:19:48 +01:00
|
|
|
|
2018-03-09 02:17:54 +01:00
|
|
|
exports.load_messages = function (opts) {
|
2020-06-22 22:30:30 +02:00
|
|
|
if (typeof opts.anchor === "number") {
|
|
|
|
// Messages that have been locally echoed messages have
|
|
|
|
// floating point temporary IDs, which is intended to be a.
|
|
|
|
// completely client-side detail. We need to round these to
|
|
|
|
// the nearest integer before sending a request to the server.
|
|
|
|
opts.anchor = opts.anchor.toFixed();
|
|
|
|
}
|
2020-07-15 00:34:28 +02:00
|
|
|
let data = {anchor: opts.anchor, num_before: opts.num_before, num_after: opts.num_after};
|
2017-03-19 18:19:48 +01:00
|
|
|
|
2017-04-25 15:25:31 +02:00
|
|
|
if (opts.msg_list.narrowed && narrow_state.active()) {
|
2019-11-02 00:06:25 +01:00
|
|
|
let operators = narrow_state.public_operators();
|
2017-03-19 18:19:48 +01:00
|
|
|
if (page_params.narrow !== undefined) {
|
|
|
|
operators = operators.concat(page_params.narrow);
|
|
|
|
}
|
|
|
|
data.narrow = JSON.stringify(operators);
|
|
|
|
}
|
|
|
|
if (opts.msg_list === home_msg_list && page_params.narrow_stream !== undefined) {
|
|
|
|
data.narrow = JSON.stringify(page_params.narrow);
|
|
|
|
}
|
|
|
|
|
2020-06-16 06:22:51 +02:00
|
|
|
let update_loading_indicator = opts.msg_list === current_msg_list;
|
2018-12-13 01:06:38 +01:00
|
|
|
if (opts.num_before > 0) {
|
2020-06-15 12:47:11 +02:00
|
|
|
opts.msg_list.data.fetch_status.start_older_batch({
|
2020-07-20 22:18:43 +02:00
|
|
|
update_loading_indicator,
|
2020-06-15 12:47:11 +02:00
|
|
|
});
|
2018-12-13 01:06:38 +01:00
|
|
|
if (opts.msg_list === home_msg_list) {
|
2020-06-15 12:47:11 +02:00
|
|
|
message_list.all.data.fetch_status.start_older_batch({
|
2020-07-20 22:18:43 +02:00
|
|
|
update_loading_indicator,
|
2020-06-15 12:47:11 +02:00
|
|
|
});
|
2018-12-13 01:06:38 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-12-13 01:08:37 +01:00
|
|
|
if (opts.num_after > 0) {
|
2020-06-16 06:22:51 +02:00
|
|
|
// We hide the bottom loading indicator when we're fetching both top and bottom messages.
|
|
|
|
update_loading_indicator = update_loading_indicator && opts.num_before === 0;
|
2020-06-15 11:53:00 +02:00
|
|
|
opts.msg_list.data.fetch_status.start_newer_batch({
|
2020-07-20 22:18:43 +02:00
|
|
|
update_loading_indicator,
|
2020-06-15 11:53:00 +02:00
|
|
|
});
|
2018-12-13 01:08:37 +01:00
|
|
|
if (opts.msg_list === home_msg_list) {
|
2020-06-15 11:53:00 +02:00
|
|
|
message_list.all.data.fetch_status.start_newer_batch({
|
2020-07-20 22:18:43 +02:00
|
|
|
update_loading_indicator,
|
2020-06-15 11:53:00 +02:00
|
|
|
});
|
2018-12-13 01:08:37 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-11-03 16:53:21 +01:00
|
|
|
data.client_gravatar = true;
|
2019-08-10 18:14:22 +02:00
|
|
|
data = handle_operators_supporting_id_based_api(data);
|
2017-11-03 16:53:21 +01:00
|
|
|
|
2017-03-19 18:19:48 +01:00
|
|
|
channel.get({
|
2020-07-15 01:29:15 +02:00
|
|
|
url: "/json/messages",
|
2020-07-20 22:18:43 +02:00
|
|
|
data,
|
2017-03-19 18:19:48 +01:00
|
|
|
idempotent: true,
|
2020-07-20 22:18:43 +02:00
|
|
|
success(data) {
|
2018-03-09 02:17:54 +01:00
|
|
|
get_messages_success(data, opts);
|
2017-03-19 18:19:48 +01:00
|
|
|
},
|
2020-07-20 22:18:43 +02:00
|
|
|
error(xhr) {
|
2017-03-19 18:19:48 +01:00
|
|
|
if (opts.msg_list.narrowed && opts.msg_list !== current_msg_list) {
|
|
|
|
// We unnarrowed before getting an error so don't
|
|
|
|
// bother trying again or doing further processing.
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
if (xhr.status === 400) {
|
|
|
|
// Bad request: We probably specified a narrow operator
|
|
|
|
// for a nonexistent stream or something. We shouldn't
|
|
|
|
// retry or display a connection error.
|
|
|
|
//
|
|
|
|
// FIXME: Warn the user when this has happened?
|
2019-11-02 00:06:25 +01:00
|
|
|
const data = {
|
2018-03-16 13:05:54 +01:00
|
|
|
messages: [],
|
|
|
|
};
|
|
|
|
process_result(data, opts);
|
2017-03-19 18:19:48 +01:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// We might want to be more clever here
|
2020-07-15 01:29:15 +02:00
|
|
|
$("#connection-error").addClass("show");
|
2020-07-02 01:45:54 +02:00
|
|
|
setTimeout(() => {
|
2018-03-09 02:17:54 +01:00
|
|
|
exports.load_messages(opts);
|
2018-03-08 21:25:14 +01:00
|
|
|
}, consts.error_retry_time);
|
2017-03-19 18:19:48 +01:00
|
|
|
},
|
|
|
|
});
|
|
|
|
};
|
|
|
|
|
2018-03-09 15:32:28 +01:00
|
|
|
exports.load_messages_for_narrow = function (opts) {
|
2019-11-02 00:06:25 +01:00
|
|
|
const msg_list = message_list.narrowed;
|
2018-03-16 14:15:30 +01:00
|
|
|
|
2019-10-25 09:45:13 +02:00
|
|
|
exports.load_messages({
|
2020-01-28 06:57:07 +01:00
|
|
|
anchor: opts.anchor,
|
2018-03-09 15:32:28 +01:00
|
|
|
num_before: consts.narrow_before,
|
|
|
|
num_after: consts.narrow_after,
|
2020-07-20 22:18:43 +02:00
|
|
|
msg_list,
|
2020-06-16 06:22:51 +02:00
|
|
|
cont: opts.cont,
|
2018-03-09 15:32:28 +01:00
|
|
|
});
|
|
|
|
};
|
2017-03-19 18:19:48 +01:00
|
|
|
|
2018-03-21 16:03:03 +01:00
|
|
|
exports.get_backfill_anchor = function (msg_list) {
|
2018-03-21 16:19:28 +01:00
|
|
|
if (msg_list === home_msg_list) {
|
|
|
|
msg_list = message_list.all;
|
|
|
|
}
|
|
|
|
|
2020-06-06 16:35:50 +02:00
|
|
|
const oldest_msg = msg_list.first();
|
|
|
|
if (oldest_msg) {
|
|
|
|
return oldest_msg.id;
|
2018-03-21 16:03:03 +01:00
|
|
|
}
|
2020-06-06 16:35:50 +02:00
|
|
|
|
|
|
|
// msg_list is empty, which is an impossible
|
|
|
|
// case, raise a fatal error.
|
|
|
|
blueslip.fatal("There are no message available to backfill.");
|
|
|
|
return;
|
2018-03-21 16:03:03 +01:00
|
|
|
};
|
|
|
|
|
2018-03-11 20:19:30 +01:00
|
|
|
exports.get_frontfill_anchor = function (msg_list) {
|
|
|
|
if (msg_list === home_msg_list) {
|
|
|
|
msg_list = message_list.all;
|
|
|
|
}
|
|
|
|
|
2019-11-02 00:06:25 +01:00
|
|
|
const last_msg = msg_list.last();
|
2018-03-11 20:19:30 +01:00
|
|
|
|
|
|
|
if (last_msg) {
|
|
|
|
return last_msg.id;
|
|
|
|
}
|
|
|
|
|
2020-06-06 16:35:50 +02:00
|
|
|
// Although it is impossible that we reach here since we
|
|
|
|
// are already checking `msg_list.fetch_status.can_load_newer_messages`
|
|
|
|
// and user cannot be scrolling down on an empty message_list to
|
|
|
|
// fetch more data, and if user is, then the available data is wrong
|
|
|
|
// and we raise a fatal error.
|
|
|
|
blueslip.fatal("There are no message available to frontfill.");
|
|
|
|
return;
|
2018-03-11 20:19:30 +01:00
|
|
|
};
|
|
|
|
|
2018-03-09 02:23:49 +01:00
|
|
|
exports.maybe_load_older_messages = function (opts) {
|
|
|
|
// This function gets called when you scroll to the top
|
|
|
|
// of your window, and you want to get messages older
|
2020-03-28 01:25:56 +01:00
|
|
|
// than what the browsers originally fetched.
|
2019-11-02 00:06:25 +01:00
|
|
|
const msg_list = opts.msg_list;
|
2020-05-30 09:45:12 +02:00
|
|
|
if (!msg_list.data.fetch_status.can_load_older_messages()) {
|
2018-03-09 22:23:23 +01:00
|
|
|
// We may already be loading old messages or already
|
|
|
|
// got the oldest one.
|
2017-03-19 18:19:48 +01:00
|
|
|
return;
|
|
|
|
}
|
2018-03-21 16:39:03 +01:00
|
|
|
|
|
|
|
exports.do_backfill({
|
2020-07-20 22:18:43 +02:00
|
|
|
msg_list,
|
2018-03-21 16:39:03 +01:00
|
|
|
num_before: consts.backward_batch_size,
|
|
|
|
});
|
|
|
|
};
|
|
|
|
|
|
|
|
exports.do_backfill = function (opts) {
|
2019-11-02 00:06:25 +01:00
|
|
|
const msg_list = opts.msg_list;
|
2020-06-22 22:30:30 +02:00
|
|
|
const anchor = exports.get_backfill_anchor(msg_list);
|
2018-03-08 21:25:14 +01:00
|
|
|
|
2018-03-09 02:17:54 +01:00
|
|
|
exports.load_messages({
|
2020-07-20 22:18:43 +02:00
|
|
|
anchor,
|
2018-03-21 16:39:03 +01:00
|
|
|
num_before: opts.num_before,
|
2017-03-19 18:19:48 +01:00
|
|
|
num_after: 0,
|
2020-07-20 22:18:43 +02:00
|
|
|
msg_list,
|
|
|
|
cont() {
|
2018-03-21 16:39:03 +01:00
|
|
|
if (opts.cont) {
|
|
|
|
opts.cont();
|
|
|
|
}
|
2017-03-19 18:19:48 +01:00
|
|
|
},
|
|
|
|
});
|
|
|
|
};
|
|
|
|
|
2018-03-11 20:19:30 +01:00
|
|
|
exports.maybe_load_newer_messages = function (opts) {
|
2020-06-06 16:35:50 +02:00
|
|
|
// This function gets called when you scroll to the bottom
|
2018-03-11 20:19:30 +01:00
|
|
|
// of your window, and you want to get messages newer
|
2020-03-28 01:25:56 +01:00
|
|
|
// than what the browsers originally fetched.
|
2019-11-02 00:06:25 +01:00
|
|
|
const msg_list = opts.msg_list;
|
2018-03-11 20:19:30 +01:00
|
|
|
|
2020-05-30 09:45:12 +02:00
|
|
|
if (!msg_list.data.fetch_status.can_load_newer_messages()) {
|
2018-03-11 20:19:30 +01:00
|
|
|
// We may already be loading new messages or already
|
|
|
|
// got the newest one.
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2020-06-22 22:30:30 +02:00
|
|
|
const anchor = exports.get_frontfill_anchor(msg_list);
|
2018-03-11 20:19:30 +01:00
|
|
|
|
2020-06-16 17:58:37 +02:00
|
|
|
function load_more(data, args) {
|
|
|
|
if (args.fetch_again && args.msg_list === current_msg_list) {
|
2020-07-16 22:40:18 +02:00
|
|
|
exports.maybe_load_newer_messages({msg_list: current_msg_list});
|
2020-06-16 17:58:37 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-03-11 20:19:30 +01:00
|
|
|
exports.load_messages({
|
2020-07-20 22:18:43 +02:00
|
|
|
anchor,
|
2018-03-11 20:19:30 +01:00
|
|
|
num_before: 0,
|
|
|
|
num_after: consts.forward_batch_size,
|
2020-07-20 22:18:43 +02:00
|
|
|
msg_list,
|
2020-06-16 17:58:37 +02:00
|
|
|
cont: load_more,
|
2018-03-11 20:19:30 +01:00
|
|
|
});
|
|
|
|
};
|
|
|
|
|
2018-03-20 15:32:43 +01:00
|
|
|
exports.start_backfilling_messages = function () {
|
|
|
|
// backfill more messages after the user is idle
|
2020-07-15 00:34:28 +02:00
|
|
|
$(document).idle({
|
|
|
|
idle: consts.backfill_idle_time,
|
2020-07-20 22:18:43 +02:00
|
|
|
onIdle() {
|
2020-07-15 00:34:28 +02:00
|
|
|
exports.do_backfill({
|
|
|
|
num_before: consts.backfill_batch_size,
|
|
|
|
msg_list: home_msg_list,
|
|
|
|
});
|
|
|
|
},
|
|
|
|
});
|
2018-03-20 15:32:43 +01:00
|
|
|
};
|
|
|
|
|
2017-07-04 01:30:47 +02:00
|
|
|
exports.initialize = function () {
|
2017-03-19 18:19:48 +01:00
|
|
|
// get the initial message list
|
2018-12-13 00:57:40 +01:00
|
|
|
function load_more(data) {
|
2020-02-19 22:45:57 +01:00
|
|
|
// If we haven't selected a message in the home view yet, and
|
|
|
|
// the home view isn't empty, we select the anchor message here.
|
2017-03-19 18:19:48 +01:00
|
|
|
if (home_msg_list.selected_id() === -1 && !home_msg_list.empty()) {
|
2020-02-19 22:45:57 +01:00
|
|
|
// We fall back to the closest selected id, as the user
|
|
|
|
// may have removed a stream from the home view while we
|
|
|
|
// were loading data.
|
2020-07-15 00:34:28 +02:00
|
|
|
home_msg_list.select_id(data.anchor, {
|
|
|
|
then_scroll: true,
|
|
|
|
use_closest: true,
|
|
|
|
target_scroll_offset: page_params.initial_offset,
|
|
|
|
});
|
2017-03-19 18:19:48 +01:00
|
|
|
}
|
|
|
|
|
2018-03-20 15:28:24 +01:00
|
|
|
if (data.found_newest) {
|
|
|
|
server_events.home_view_loaded();
|
|
|
|
exports.start_backfilling_messages();
|
|
|
|
return;
|
2017-03-19 18:19:48 +01:00
|
|
|
}
|
|
|
|
|
2018-03-20 15:28:24 +01:00
|
|
|
// If we fall through here, we need to keep fetching more data, and
|
|
|
|
// we'll call back to the function we're in.
|
2019-11-02 00:06:25 +01:00
|
|
|
const messages = data.messages;
|
|
|
|
const latest_id = messages[messages.length - 1].id;
|
2018-03-20 15:28:24 +01:00
|
|
|
|
|
|
|
exports.load_messages({
|
2020-06-22 22:30:30 +02:00
|
|
|
anchor: latest_id,
|
2018-03-20 15:28:24 +01:00
|
|
|
num_before: 0,
|
|
|
|
num_after: consts.catch_up_batch_size,
|
|
|
|
msg_list: home_msg_list,
|
|
|
|
cont: load_more,
|
|
|
|
});
|
2017-03-19 18:19:48 +01:00
|
|
|
}
|
|
|
|
|
2020-02-19 22:24:00 +01:00
|
|
|
let anchor;
|
|
|
|
if (page_params.initial_pointer) {
|
|
|
|
// If we're doing a server-initiated reload, similar to a
|
|
|
|
// near: narrow query, we want to select a specific message.
|
|
|
|
anchor = page_params.initial_pointer;
|
|
|
|
} else {
|
2020-02-19 22:53:08 +01:00
|
|
|
// Otherwise, we should just use the first unread message in
|
|
|
|
// the user's unmuted history as our anchor.
|
|
|
|
anchor = "first_unread";
|
2020-02-19 22:24:00 +01:00
|
|
|
}
|
2020-02-19 21:59:26 +01:00
|
|
|
exports.load_messages({
|
2020-07-20 22:18:43 +02:00
|
|
|
anchor,
|
2020-06-18 08:38:49 +02:00
|
|
|
num_before: consts.num_before_home_anchor,
|
|
|
|
num_after: consts.num_after_home_anchor,
|
2020-02-19 21:59:26 +01:00
|
|
|
msg_list: home_msg_list,
|
|
|
|
cont: load_more,
|
|
|
|
});
|
2017-07-04 01:30:47 +02:00
|
|
|
};
|
2017-03-19 18:19:48 +01:00
|
|
|
|
2019-10-25 09:45:13 +02:00
|
|
|
window.message_fetch = exports;
|