2021-03-11 05:43:45 +01:00
|
|
|
import $ from "jquery";
|
|
|
|
|
2021-02-28 21:33:40 +01:00
|
|
|
import * as alert_words from "./alert_words";
|
2021-03-30 06:23:09 +02:00
|
|
|
import {all_messages_data} from "./all_messages_data";
|
2021-02-28 21:33:40 +01:00
|
|
|
import * as channel from "./channel";
|
|
|
|
import * as compose_fade from "./compose_fade";
|
|
|
|
import * as compose_state from "./compose_state";
|
2021-07-27 21:48:11 +02:00
|
|
|
import * as compose_validate from "./compose_validate";
|
2021-02-28 21:33:40 +01:00
|
|
|
import * as condense from "./condense";
|
2022-05-19 20:32:26 +02:00
|
|
|
import * as drafts from "./drafts";
|
2021-02-28 21:33:40 +01:00
|
|
|
import * as huddle_data from "./huddle_data";
|
|
|
|
import * as message_edit from "./message_edit";
|
|
|
|
import * as message_edit_history from "./message_edit_history";
|
2021-03-28 17:57:53 +02:00
|
|
|
import * as message_helper from "./message_helper";
|
2021-02-28 21:33:40 +01:00
|
|
|
import * as message_list from "./message_list";
|
2021-03-30 02:21:21 +02:00
|
|
|
import * as message_lists from "./message_lists";
|
2021-02-28 21:33:40 +01:00
|
|
|
import * as message_store from "./message_store";
|
|
|
|
import * as message_util from "./message_util";
|
|
|
|
import * as narrow from "./narrow";
|
|
|
|
import * as narrow_state from "./narrow_state";
|
|
|
|
import * as notifications from "./notifications";
|
2021-03-25 22:35:45 +01:00
|
|
|
import {page_params} from "./page_params";
|
2021-02-28 21:33:40 +01:00
|
|
|
import * as pm_list from "./pm_list";
|
|
|
|
import * as recent_senders from "./recent_senders";
|
2021-06-10 14:18:46 +02:00
|
|
|
import * as recent_topics_ui from "./recent_topics_ui";
|
2021-02-28 21:33:40 +01:00
|
|
|
import * as resize from "./resize";
|
|
|
|
import * as stream_list from "./stream_list";
|
|
|
|
import * as stream_topic_history from "./stream_topic_history";
|
2021-04-15 17:02:54 +02:00
|
|
|
import * as sub_store from "./sub_store";
|
2021-02-28 21:33:40 +01:00
|
|
|
import * as unread from "./unread";
|
|
|
|
import * as unread_ops from "./unread_ops";
|
|
|
|
import * as unread_ui from "./unread_ui";
|
|
|
|
import * as util from "./util";
|
2020-05-26 13:58:18 +02:00
|
|
|
|
2022-02-16 06:55:07 +01:00
|
|
|
function maybe_add_narrowed_messages(messages, msg_list, callback) {
|
2019-11-02 00:06:25 +01:00
|
|
|
const ids = [];
|
js: Automatically convert _.each to for…of.
This commit was automatically generated by the following script,
followed by lint --fix and a few small manual lint-related cleanups.
import * as babelParser from "recast/parsers/babel";
import * as recast from "recast";
import * as tsParser from "recast/parsers/typescript";
import { builders as b, namedTypes as n } from "ast-types";
import { Context } from "ast-types/lib/path-visitor";
import K from "ast-types/gen/kinds";
import { NodePath } from "ast-types/lib/node-path";
import assert from "assert";
import fs from "fs";
import path from "path";
import process from "process";
const checkExpression = (node: n.Node): node is K.ExpressionKind =>
n.Expression.check(node);
const checkStatement = (node: n.Node): node is K.StatementKind =>
n.Statement.check(node);
for (const file of process.argv.slice(2)) {
console.log("Parsing", file);
const ast = recast.parse(fs.readFileSync(file, { encoding: "utf8" }), {
parser: path.extname(file) === ".ts" ? tsParser : babelParser,
});
let changed = false;
let inLoop = false;
let replaceReturn = false;
const visitLoop = (...args: string[]) =>
function(this: Context, path: NodePath) {
for (const arg of args) {
this.visit(path.get(arg));
}
const old = { inLoop };
inLoop = true;
this.visit(path.get("body"));
inLoop = old.inLoop;
return false;
};
recast.visit(ast, {
visitDoWhileStatement: visitLoop("test"),
visitExpressionStatement(path) {
const { expression, comments } = path.node;
let valueOnly;
if (
n.CallExpression.check(expression) &&
n.MemberExpression.check(expression.callee) &&
!expression.callee.computed &&
n.Identifier.check(expression.callee.object) &&
expression.callee.object.name === "_" &&
n.Identifier.check(expression.callee.property) &&
["each", "forEach"].includes(expression.callee.property.name) &&
[2, 3].includes(expression.arguments.length) &&
checkExpression(expression.arguments[0]) &&
(n.FunctionExpression.check(expression.arguments[1]) ||
n.ArrowFunctionExpression.check(expression.arguments[1])) &&
[1, 2].includes(expression.arguments[1].params.length) &&
n.Identifier.check(expression.arguments[1].params[0]) &&
((valueOnly = expression.arguments[1].params[1] === undefined) ||
n.Identifier.check(expression.arguments[1].params[1])) &&
(expression.arguments[2] === undefined ||
n.ThisExpression.check(expression.arguments[2]))
) {
const old = { inLoop, replaceReturn };
inLoop = false;
replaceReturn = true;
this.visit(
path
.get("expression")
.get("arguments")
.get(1)
.get("body")
);
inLoop = old.inLoop;
replaceReturn = old.replaceReturn;
const [right, { body, params }] = expression.arguments;
const loop = b.forOfStatement(
b.variableDeclaration("let", [
b.variableDeclarator(
valueOnly ? params[0] : b.arrayPattern([params[1], params[0]])
),
]),
valueOnly
? right
: b.callExpression(
b.memberExpression(right, b.identifier("entries")),
[]
),
checkStatement(body) ? body : b.expressionStatement(body)
);
loop.comments = comments;
path.replace(loop);
changed = true;
}
this.traverse(path);
},
visitForStatement: visitLoop("init", "test", "update"),
visitForInStatement: visitLoop("left", "right"),
visitForOfStatement: visitLoop("left", "right"),
visitFunction(path) {
this.visit(path.get("params"));
const old = { replaceReturn };
replaceReturn = false;
this.visit(path.get("body"));
replaceReturn = old.replaceReturn;
return false;
},
visitReturnStatement(path) {
if (replaceReturn) {
assert(!inLoop); // could use labeled continue if this ever fires
const { argument, comments } = path.node;
if (argument === null) {
const s = b.continueStatement();
s.comments = comments;
path.replace(s);
} else {
const s = b.expressionStatement(argument);
s.comments = comments;
path.replace(s, b.continueStatement());
}
return false;
}
this.traverse(path);
},
visitWhileStatement: visitLoop("test"),
});
if (changed) {
console.log("Writing", file);
fs.writeFileSync(file, recast.print(ast).code, { encoding: "utf8" });
}
}
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-02-06 06:19:47 +01:00
|
|
|
|
|
|
|
for (const elem of messages) {
|
2017-03-19 20:23:48 +01:00
|
|
|
ids.push(elem.id);
|
js: Automatically convert _.each to for…of.
This commit was automatically generated by the following script,
followed by lint --fix and a few small manual lint-related cleanups.
import * as babelParser from "recast/parsers/babel";
import * as recast from "recast";
import * as tsParser from "recast/parsers/typescript";
import { builders as b, namedTypes as n } from "ast-types";
import { Context } from "ast-types/lib/path-visitor";
import K from "ast-types/gen/kinds";
import { NodePath } from "ast-types/lib/node-path";
import assert from "assert";
import fs from "fs";
import path from "path";
import process from "process";
const checkExpression = (node: n.Node): node is K.ExpressionKind =>
n.Expression.check(node);
const checkStatement = (node: n.Node): node is K.StatementKind =>
n.Statement.check(node);
for (const file of process.argv.slice(2)) {
console.log("Parsing", file);
const ast = recast.parse(fs.readFileSync(file, { encoding: "utf8" }), {
parser: path.extname(file) === ".ts" ? tsParser : babelParser,
});
let changed = false;
let inLoop = false;
let replaceReturn = false;
const visitLoop = (...args: string[]) =>
function(this: Context, path: NodePath) {
for (const arg of args) {
this.visit(path.get(arg));
}
const old = { inLoop };
inLoop = true;
this.visit(path.get("body"));
inLoop = old.inLoop;
return false;
};
recast.visit(ast, {
visitDoWhileStatement: visitLoop("test"),
visitExpressionStatement(path) {
const { expression, comments } = path.node;
let valueOnly;
if (
n.CallExpression.check(expression) &&
n.MemberExpression.check(expression.callee) &&
!expression.callee.computed &&
n.Identifier.check(expression.callee.object) &&
expression.callee.object.name === "_" &&
n.Identifier.check(expression.callee.property) &&
["each", "forEach"].includes(expression.callee.property.name) &&
[2, 3].includes(expression.arguments.length) &&
checkExpression(expression.arguments[0]) &&
(n.FunctionExpression.check(expression.arguments[1]) ||
n.ArrowFunctionExpression.check(expression.arguments[1])) &&
[1, 2].includes(expression.arguments[1].params.length) &&
n.Identifier.check(expression.arguments[1].params[0]) &&
((valueOnly = expression.arguments[1].params[1] === undefined) ||
n.Identifier.check(expression.arguments[1].params[1])) &&
(expression.arguments[2] === undefined ||
n.ThisExpression.check(expression.arguments[2]))
) {
const old = { inLoop, replaceReturn };
inLoop = false;
replaceReturn = true;
this.visit(
path
.get("expression")
.get("arguments")
.get(1)
.get("body")
);
inLoop = old.inLoop;
replaceReturn = old.replaceReturn;
const [right, { body, params }] = expression.arguments;
const loop = b.forOfStatement(
b.variableDeclaration("let", [
b.variableDeclarator(
valueOnly ? params[0] : b.arrayPattern([params[1], params[0]])
),
]),
valueOnly
? right
: b.callExpression(
b.memberExpression(right, b.identifier("entries")),
[]
),
checkStatement(body) ? body : b.expressionStatement(body)
);
loop.comments = comments;
path.replace(loop);
changed = true;
}
this.traverse(path);
},
visitForStatement: visitLoop("init", "test", "update"),
visitForInStatement: visitLoop("left", "right"),
visitForOfStatement: visitLoop("left", "right"),
visitFunction(path) {
this.visit(path.get("params"));
const old = { replaceReturn };
replaceReturn = false;
this.visit(path.get("body"));
replaceReturn = old.replaceReturn;
return false;
},
visitReturnStatement(path) {
if (replaceReturn) {
assert(!inLoop); // could use labeled continue if this ever fires
const { argument, comments } = path.node;
if (argument === null) {
const s = b.continueStatement();
s.comments = comments;
path.replace(s);
} else {
const s = b.expressionStatement(argument);
s.comments = comments;
path.replace(s, b.continueStatement());
}
return false;
}
this.traverse(path);
},
visitWhileStatement: visitLoop("test"),
});
if (changed) {
console.log("Writing", file);
fs.writeFileSync(file, recast.print(ast).code, { encoding: "utf8" });
}
}
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-02-06 06:19:47 +01:00
|
|
|
}
|
2017-03-19 20:23:48 +01:00
|
|
|
|
2017-07-31 21:09:55 +02:00
|
|
|
channel.get({
|
2020-07-15 01:29:15 +02:00
|
|
|
url: "/json/messages/matches_narrow",
|
2020-07-15 00:34:28 +02:00
|
|
|
data: {
|
|
|
|
msg_ids: JSON.stringify(ids),
|
|
|
|
narrow: JSON.stringify(narrow_state.public_operators()),
|
|
|
|
},
|
2018-12-18 19:34:45 +01:00
|
|
|
timeout: 5000,
|
2020-07-20 22:18:43 +02:00
|
|
|
success(data) {
|
2021-03-30 02:21:21 +02:00
|
|
|
if (msg_list !== message_lists.current) {
|
2017-03-19 20:23:48 +01:00
|
|
|
// We unnarrowed in the mean time
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2019-11-02 00:06:25 +01:00
|
|
|
let new_messages = [];
|
|
|
|
const elsewhere_messages = [];
|
js: Automatically convert _.each to for…of.
This commit was automatically generated by the following script,
followed by lint --fix and a few small manual lint-related cleanups.
import * as babelParser from "recast/parsers/babel";
import * as recast from "recast";
import * as tsParser from "recast/parsers/typescript";
import { builders as b, namedTypes as n } from "ast-types";
import { Context } from "ast-types/lib/path-visitor";
import K from "ast-types/gen/kinds";
import { NodePath } from "ast-types/lib/node-path";
import assert from "assert";
import fs from "fs";
import path from "path";
import process from "process";
const checkExpression = (node: n.Node): node is K.ExpressionKind =>
n.Expression.check(node);
const checkStatement = (node: n.Node): node is K.StatementKind =>
n.Statement.check(node);
for (const file of process.argv.slice(2)) {
console.log("Parsing", file);
const ast = recast.parse(fs.readFileSync(file, { encoding: "utf8" }), {
parser: path.extname(file) === ".ts" ? tsParser : babelParser,
});
let changed = false;
let inLoop = false;
let replaceReturn = false;
const visitLoop = (...args: string[]) =>
function(this: Context, path: NodePath) {
for (const arg of args) {
this.visit(path.get(arg));
}
const old = { inLoop };
inLoop = true;
this.visit(path.get("body"));
inLoop = old.inLoop;
return false;
};
recast.visit(ast, {
visitDoWhileStatement: visitLoop("test"),
visitExpressionStatement(path) {
const { expression, comments } = path.node;
let valueOnly;
if (
n.CallExpression.check(expression) &&
n.MemberExpression.check(expression.callee) &&
!expression.callee.computed &&
n.Identifier.check(expression.callee.object) &&
expression.callee.object.name === "_" &&
n.Identifier.check(expression.callee.property) &&
["each", "forEach"].includes(expression.callee.property.name) &&
[2, 3].includes(expression.arguments.length) &&
checkExpression(expression.arguments[0]) &&
(n.FunctionExpression.check(expression.arguments[1]) ||
n.ArrowFunctionExpression.check(expression.arguments[1])) &&
[1, 2].includes(expression.arguments[1].params.length) &&
n.Identifier.check(expression.arguments[1].params[0]) &&
((valueOnly = expression.arguments[1].params[1] === undefined) ||
n.Identifier.check(expression.arguments[1].params[1])) &&
(expression.arguments[2] === undefined ||
n.ThisExpression.check(expression.arguments[2]))
) {
const old = { inLoop, replaceReturn };
inLoop = false;
replaceReturn = true;
this.visit(
path
.get("expression")
.get("arguments")
.get(1)
.get("body")
);
inLoop = old.inLoop;
replaceReturn = old.replaceReturn;
const [right, { body, params }] = expression.arguments;
const loop = b.forOfStatement(
b.variableDeclaration("let", [
b.variableDeclarator(
valueOnly ? params[0] : b.arrayPattern([params[1], params[0]])
),
]),
valueOnly
? right
: b.callExpression(
b.memberExpression(right, b.identifier("entries")),
[]
),
checkStatement(body) ? body : b.expressionStatement(body)
);
loop.comments = comments;
path.replace(loop);
changed = true;
}
this.traverse(path);
},
visitForStatement: visitLoop("init", "test", "update"),
visitForInStatement: visitLoop("left", "right"),
visitForOfStatement: visitLoop("left", "right"),
visitFunction(path) {
this.visit(path.get("params"));
const old = { replaceReturn };
replaceReturn = false;
this.visit(path.get("body"));
replaceReturn = old.replaceReturn;
return false;
},
visitReturnStatement(path) {
if (replaceReturn) {
assert(!inLoop); // could use labeled continue if this ever fires
const { argument, comments } = path.node;
if (argument === null) {
const s = b.continueStatement();
s.comments = comments;
path.replace(s);
} else {
const s = b.expressionStatement(argument);
s.comments = comments;
path.replace(s, b.continueStatement());
}
return false;
}
this.traverse(path);
},
visitWhileStatement: visitLoop("test"),
});
if (changed) {
console.log("Writing", file);
fs.writeFileSync(file, recast.print(ast).code, { encoding: "utf8" });
}
}
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-02-06 06:19:47 +01:00
|
|
|
|
|
|
|
for (const elem of messages) {
|
2022-01-24 08:23:22 +01:00
|
|
|
if (Object.hasOwn(data.messages, elem.id)) {
|
2018-11-15 16:59:41 +01:00
|
|
|
util.set_match_data(elem, data.messages[elem.id]);
|
2017-03-19 20:23:48 +01:00
|
|
|
new_messages.push(elem);
|
|
|
|
} else {
|
|
|
|
elsewhere_messages.push(elem);
|
|
|
|
}
|
js: Automatically convert _.each to for…of.
This commit was automatically generated by the following script,
followed by lint --fix and a few small manual lint-related cleanups.
import * as babelParser from "recast/parsers/babel";
import * as recast from "recast";
import * as tsParser from "recast/parsers/typescript";
import { builders as b, namedTypes as n } from "ast-types";
import { Context } from "ast-types/lib/path-visitor";
import K from "ast-types/gen/kinds";
import { NodePath } from "ast-types/lib/node-path";
import assert from "assert";
import fs from "fs";
import path from "path";
import process from "process";
const checkExpression = (node: n.Node): node is K.ExpressionKind =>
n.Expression.check(node);
const checkStatement = (node: n.Node): node is K.StatementKind =>
n.Statement.check(node);
for (const file of process.argv.slice(2)) {
console.log("Parsing", file);
const ast = recast.parse(fs.readFileSync(file, { encoding: "utf8" }), {
parser: path.extname(file) === ".ts" ? tsParser : babelParser,
});
let changed = false;
let inLoop = false;
let replaceReturn = false;
const visitLoop = (...args: string[]) =>
function(this: Context, path: NodePath) {
for (const arg of args) {
this.visit(path.get(arg));
}
const old = { inLoop };
inLoop = true;
this.visit(path.get("body"));
inLoop = old.inLoop;
return false;
};
recast.visit(ast, {
visitDoWhileStatement: visitLoop("test"),
visitExpressionStatement(path) {
const { expression, comments } = path.node;
let valueOnly;
if (
n.CallExpression.check(expression) &&
n.MemberExpression.check(expression.callee) &&
!expression.callee.computed &&
n.Identifier.check(expression.callee.object) &&
expression.callee.object.name === "_" &&
n.Identifier.check(expression.callee.property) &&
["each", "forEach"].includes(expression.callee.property.name) &&
[2, 3].includes(expression.arguments.length) &&
checkExpression(expression.arguments[0]) &&
(n.FunctionExpression.check(expression.arguments[1]) ||
n.ArrowFunctionExpression.check(expression.arguments[1])) &&
[1, 2].includes(expression.arguments[1].params.length) &&
n.Identifier.check(expression.arguments[1].params[0]) &&
((valueOnly = expression.arguments[1].params[1] === undefined) ||
n.Identifier.check(expression.arguments[1].params[1])) &&
(expression.arguments[2] === undefined ||
n.ThisExpression.check(expression.arguments[2]))
) {
const old = { inLoop, replaceReturn };
inLoop = false;
replaceReturn = true;
this.visit(
path
.get("expression")
.get("arguments")
.get(1)
.get("body")
);
inLoop = old.inLoop;
replaceReturn = old.replaceReturn;
const [right, { body, params }] = expression.arguments;
const loop = b.forOfStatement(
b.variableDeclaration("let", [
b.variableDeclarator(
valueOnly ? params[0] : b.arrayPattern([params[1], params[0]])
),
]),
valueOnly
? right
: b.callExpression(
b.memberExpression(right, b.identifier("entries")),
[]
),
checkStatement(body) ? body : b.expressionStatement(body)
);
loop.comments = comments;
path.replace(loop);
changed = true;
}
this.traverse(path);
},
visitForStatement: visitLoop("init", "test", "update"),
visitForInStatement: visitLoop("left", "right"),
visitForOfStatement: visitLoop("left", "right"),
visitFunction(path) {
this.visit(path.get("params"));
const old = { replaceReturn };
replaceReturn = false;
this.visit(path.get("body"));
replaceReturn = old.replaceReturn;
return false;
},
visitReturnStatement(path) {
if (replaceReturn) {
assert(!inLoop); // could use labeled continue if this ever fires
const { argument, comments } = path.node;
if (argument === null) {
const s = b.continueStatement();
s.comments = comments;
path.replace(s);
} else {
const s = b.expressionStatement(argument);
s.comments = comments;
path.replace(s, b.continueStatement());
}
return false;
}
this.traverse(path);
},
visitWhileStatement: visitLoop("test"),
});
if (changed) {
console.log("Writing", file);
fs.writeFileSync(file, recast.print(ast).code, { encoding: "utf8" });
}
}
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-02-06 06:19:47 +01:00
|
|
|
}
|
2017-03-19 20:23:48 +01:00
|
|
|
|
2021-03-28 17:57:53 +02:00
|
|
|
// This second call to process_new_message in the
|
narrow: Fix messages being cached without flags set.
f0c680e9c0d1a62fd414bccc82e4ac255173aaa9 introduced a call to
message_helper.process_new_message without first calling
message_store.set_message_flags on the message.
This resulted in it being possible as a race, when loading the Zulip
app to a stream/topic/near narrow, for a message to have the
`historical` flag be undefined due to not being initialized.
That invalid state, in turn, resulted in the message_list_view code
path for rendering the message feed incorrectly displaying additional
recipient bars around the message.
We could fix this by just calling message_store.set_message_booleans
in this code path. However, this bug exposes the fact that it's very
fragile to expect every code path to call that function before
message_helper.process_new_message.
So we instead fix this by moving message_store.set_message_booleans
inside message_helper.process_new_message.
One call point of concern in this change is maybe_add_narrow_messages,
which could theoretically reintroduce the double set_message_flags
bugs detailed in 9729b1a4ad51b69c98ce4f8374c9d9f8cf69430c. However, I
believe that to not be possible, because that call should never
experience a cache miss.
The other existing code paths were already calling
set_message_booleans immediately before
message_helper.process_new_message. They are still changing here, in
that we now do a cache lookup before attempting to call
set_message_booleans. Because the message booleans do not affect the
cache lookup and the local message object is discarded in case of a
cache hit, this should have no functional impact.
Because I found the existing comment at that call site confusing and
almost proposed removing it as pointless, extend the block comment to
explicitly mention that the purpose is refreshing our object.
Fixes #21503.
2022-03-24 01:07:56 +01:00
|
|
|
// insert_new_messages code path is designed to replace
|
|
|
|
// our slightly stale message object with the latest copy
|
|
|
|
// from the message_store. This helps in very rare race
|
2018-04-28 23:32:37 +02:00
|
|
|
// conditions, where e.g. the current user's name was
|
|
|
|
// edited in between when they sent the message and when
|
|
|
|
// we hear back from the server and can echo the new
|
narrow: Fix messages being cached without flags set.
f0c680e9c0d1a62fd414bccc82e4ac255173aaa9 introduced a call to
message_helper.process_new_message without first calling
message_store.set_message_flags on the message.
This resulted in it being possible as a race, when loading the Zulip
app to a stream/topic/near narrow, for a message to have the
`historical` flag be undefined due to not being initialized.
That invalid state, in turn, resulted in the message_list_view code
path for rendering the message feed incorrectly displaying additional
recipient bars around the message.
We could fix this by just calling message_store.set_message_booleans
in this code path. However, this bug exposes the fact that it's very
fragile to expect every code path to call that function before
message_helper.process_new_message.
So we instead fix this by moving message_store.set_message_booleans
inside message_helper.process_new_message.
One call point of concern in this change is maybe_add_narrow_messages,
which could theoretically reintroduce the double set_message_flags
bugs detailed in 9729b1a4ad51b69c98ce4f8374c9d9f8cf69430c. However, I
believe that to not be possible, because that call should never
experience a cache miss.
The other existing code paths were already calling
set_message_booleans immediately before
message_helper.process_new_message. They are still changing here, in
that we now do a cache lookup before attempting to call
set_message_booleans. Because the message booleans do not affect the
cache lookup and the local message object is discarded in case of a
cache hit, this should have no functional impact.
Because I found the existing comment at that call site confusing and
almost proposed removing it as pointless, extend the block comment to
explicitly mention that the purpose is refreshing our object.
Fixes #21503.
2022-03-24 01:07:56 +01:00
|
|
|
// message.
|
2021-01-23 02:36:54 +01:00
|
|
|
new_messages = new_messages.map((message) =>
|
2021-03-28 17:57:53 +02:00
|
|
|
message_helper.process_new_message(message),
|
2021-01-23 02:36:54 +01:00
|
|
|
);
|
2018-04-28 23:32:37 +02:00
|
|
|
|
2022-02-16 06:55:07 +01:00
|
|
|
callback(new_messages, msg_list);
|
2017-03-19 20:23:48 +01:00
|
|
|
unread_ops.process_visible();
|
|
|
|
notifications.notify_messages_outside_current_search(elsewhere_messages);
|
|
|
|
},
|
2020-07-20 22:18:43 +02:00
|
|
|
error() {
|
2017-03-19 20:23:48 +01:00
|
|
|
// We might want to be more clever here
|
2020-07-02 01:45:54 +02:00
|
|
|
setTimeout(() => {
|
2021-03-30 02:21:21 +02:00
|
|
|
if (msg_list === message_lists.current) {
|
2017-03-19 20:23:48 +01:00
|
|
|
// Don't actually try again if we unnarrowed
|
|
|
|
// while waiting
|
2022-02-16 06:55:07 +01:00
|
|
|
maybe_add_narrowed_messages(messages, msg_list, callback);
|
2017-03-19 20:23:48 +01:00
|
|
|
}
|
|
|
|
}, 5000);
|
2020-07-15 00:34:28 +02:00
|
|
|
},
|
|
|
|
});
|
2017-03-19 20:23:48 +01:00
|
|
|
}
|
|
|
|
|
2021-02-28 21:33:40 +01:00
|
|
|
export function insert_new_messages(messages, sent_by_this_client) {
|
2021-03-28 17:57:53 +02:00
|
|
|
messages = messages.map((message) => message_helper.process_new_message(message));
|
2017-03-19 20:23:48 +01:00
|
|
|
|
2017-08-04 05:54:02 +02:00
|
|
|
unread.process_loaded_messages(messages);
|
2020-05-26 13:58:18 +02:00
|
|
|
huddle_data.process_loaded_messages(messages);
|
2017-08-04 05:54:02 +02:00
|
|
|
|
2021-03-30 06:23:09 +02:00
|
|
|
// all_messages_data is the data that we use to populate
|
2019-01-07 21:40:03 +01:00
|
|
|
// other lists, so we always update this
|
2021-03-30 06:23:09 +02:00
|
|
|
message_util.add_new_messages_data(messages, all_messages_data);
|
2017-03-19 20:23:48 +01:00
|
|
|
|
2019-11-02 00:06:25 +01:00
|
|
|
let render_info;
|
message scrolling: Fix "Scroll down to view" warning.
We recently added a feature to warn users that they
may need to scroll down to view messages that they
just sent, but it was broken due to various complexities
in the rendering code path.
Now we compute it a bit more rigorously.
It requires us to pass some info about rendering up
and down the stack, which is why it's kind of a long
commit, but the bulk of the logic is in these JS files:
* message_list_view.js
* notifications.js
I choose to pass structs around instead of booleans,
because I anticipate we may eventually add more metadata
about rendering to it, plus bools are just kinda brittle.
(The exceptions are that `_maybe_autoscroll`, which
is at the bottom of the stack, just passes back a simple
boolean, and `notify_local_mixes`, also at the bottom
of the stack, just accepts a simple boolean.)
This errs on the side of warning the user, even if the
new message is partially visible.
Fixes #11138
2019-01-07 21:00:03 +01:00
|
|
|
|
2017-04-25 15:25:31 +02:00
|
|
|
if (narrow_state.active()) {
|
2019-01-07 21:40:03 +01:00
|
|
|
// We do this NOW even though the home view is not active,
|
|
|
|
// because we want the home view to load fast later.
|
2021-03-30 02:21:21 +02:00
|
|
|
message_util.add_new_messages(messages, message_lists.home);
|
2019-01-07 21:40:03 +01:00
|
|
|
|
2017-04-25 15:25:31 +02:00
|
|
|
if (narrow_state.filter().can_apply_locally()) {
|
message scrolling: Fix "Scroll down to view" warning.
We recently added a feature to warn users that they
may need to scroll down to view messages that they
just sent, but it was broken due to various complexities
in the rendering code path.
Now we compute it a bit more rigorously.
It requires us to pass some info about rendering up
and down the stack, which is why it's kind of a long
commit, but the bulk of the logic is in these JS files:
* message_list_view.js
* notifications.js
I choose to pass structs around instead of booleans,
because I anticipate we may eventually add more metadata
about rendering to it, plus bools are just kinda brittle.
(The exceptions are that `_maybe_autoscroll`, which
is at the bottom of the stack, just passes back a simple
boolean, and `notify_local_mixes`, also at the bottom
of the stack, just accepts a simple boolean.)
This errs on the side of warning the user, even if the
new message is partially visible.
Fixes #11138
2019-01-07 21:00:03 +01:00
|
|
|
render_info = message_util.add_new_messages(messages, message_list.narrowed);
|
2017-03-19 20:23:48 +01:00
|
|
|
} else {
|
|
|
|
// if we cannot apply locally, we have to wait for this callback to happen to notify
|
2022-02-16 06:55:07 +01:00
|
|
|
maybe_add_narrowed_messages(
|
|
|
|
messages,
|
|
|
|
message_list.narrowed,
|
|
|
|
message_util.add_new_messages,
|
|
|
|
);
|
2017-03-19 20:23:48 +01:00
|
|
|
}
|
2019-01-07 21:40:03 +01:00
|
|
|
} else {
|
|
|
|
// we're in the home view, so update its list
|
2021-03-30 02:21:21 +02:00
|
|
|
render_info = message_util.add_new_messages(messages, message_lists.home);
|
2017-07-18 20:03:14 +02:00
|
|
|
}
|
|
|
|
|
2019-02-12 03:23:56 +01:00
|
|
|
if (sent_by_this_client) {
|
2019-11-02 00:06:25 +01:00
|
|
|
const need_user_to_scroll = render_info && render_info.need_user_to_scroll;
|
2019-02-12 03:25:26 +01:00
|
|
|
// sent_by_this_client will be true if ANY of the messages
|
|
|
|
// were sent by this client; notifications.notify_local_mixes
|
|
|
|
// will filter out any not sent by us.
|
message scrolling: Fix "Scroll down to view" warning.
We recently added a feature to warn users that they
may need to scroll down to view messages that they
just sent, but it was broken due to various complexities
in the rendering code path.
Now we compute it a bit more rigorously.
It requires us to pass some info about rendering up
and down the stack, which is why it's kind of a long
commit, but the bulk of the logic is in these JS files:
* message_list_view.js
* notifications.js
I choose to pass structs around instead of booleans,
because I anticipate we may eventually add more metadata
about rendering to it, plus bools are just kinda brittle.
(The exceptions are that `_maybe_autoscroll`, which
is at the bottom of the stack, just passes back a simple
boolean, and `notify_local_mixes`, also at the bottom
of the stack, just accepts a simple boolean.)
This errs on the side of warning the user, even if the
new message is partially visible.
Fixes #11138
2019-01-07 21:00:03 +01:00
|
|
|
notifications.notify_local_mixes(messages, need_user_to_scroll);
|
2017-03-19 20:23:48 +01:00
|
|
|
}
|
|
|
|
|
2017-08-04 05:48:43 +02:00
|
|
|
unread_ui.update_unread_counts();
|
|
|
|
resize.resize_page_components();
|
|
|
|
|
2017-07-18 20:14:30 +02:00
|
|
|
unread_ops.process_visible();
|
|
|
|
notifications.received_messages(messages);
|
|
|
|
stream_list.update_streams_sidebar();
|
|
|
|
pm_list.update_private_messages();
|
2021-06-10 14:18:46 +02:00
|
|
|
recent_topics_ui.process_messages(messages);
|
2021-02-28 21:33:40 +01:00
|
|
|
}
|
2017-03-19 20:23:48 +01:00
|
|
|
|
2021-02-28 21:33:40 +01:00
|
|
|
export function update_messages(events) {
|
2019-11-02 00:06:25 +01:00
|
|
|
const msgs_to_rerender = [];
|
|
|
|
let topic_edited = false;
|
|
|
|
let changed_narrow = false;
|
|
|
|
let changed_compose = false;
|
|
|
|
let message_content_edited = false;
|
2020-04-07 22:29:22 +02:00
|
|
|
let stream_changed = false;
|
2021-04-24 21:51:18 +02:00
|
|
|
let stream_archived = false;
|
2017-03-19 20:23:48 +01:00
|
|
|
|
js: Automatically convert _.each to for…of.
This commit was automatically generated by the following script,
followed by lint --fix and a few small manual lint-related cleanups.
import * as babelParser from "recast/parsers/babel";
import * as recast from "recast";
import * as tsParser from "recast/parsers/typescript";
import { builders as b, namedTypes as n } from "ast-types";
import { Context } from "ast-types/lib/path-visitor";
import K from "ast-types/gen/kinds";
import { NodePath } from "ast-types/lib/node-path";
import assert from "assert";
import fs from "fs";
import path from "path";
import process from "process";
const checkExpression = (node: n.Node): node is K.ExpressionKind =>
n.Expression.check(node);
const checkStatement = (node: n.Node): node is K.StatementKind =>
n.Statement.check(node);
for (const file of process.argv.slice(2)) {
console.log("Parsing", file);
const ast = recast.parse(fs.readFileSync(file, { encoding: "utf8" }), {
parser: path.extname(file) === ".ts" ? tsParser : babelParser,
});
let changed = false;
let inLoop = false;
let replaceReturn = false;
const visitLoop = (...args: string[]) =>
function(this: Context, path: NodePath) {
for (const arg of args) {
this.visit(path.get(arg));
}
const old = { inLoop };
inLoop = true;
this.visit(path.get("body"));
inLoop = old.inLoop;
return false;
};
recast.visit(ast, {
visitDoWhileStatement: visitLoop("test"),
visitExpressionStatement(path) {
const { expression, comments } = path.node;
let valueOnly;
if (
n.CallExpression.check(expression) &&
n.MemberExpression.check(expression.callee) &&
!expression.callee.computed &&
n.Identifier.check(expression.callee.object) &&
expression.callee.object.name === "_" &&
n.Identifier.check(expression.callee.property) &&
["each", "forEach"].includes(expression.callee.property.name) &&
[2, 3].includes(expression.arguments.length) &&
checkExpression(expression.arguments[0]) &&
(n.FunctionExpression.check(expression.arguments[1]) ||
n.ArrowFunctionExpression.check(expression.arguments[1])) &&
[1, 2].includes(expression.arguments[1].params.length) &&
n.Identifier.check(expression.arguments[1].params[0]) &&
((valueOnly = expression.arguments[1].params[1] === undefined) ||
n.Identifier.check(expression.arguments[1].params[1])) &&
(expression.arguments[2] === undefined ||
n.ThisExpression.check(expression.arguments[2]))
) {
const old = { inLoop, replaceReturn };
inLoop = false;
replaceReturn = true;
this.visit(
path
.get("expression")
.get("arguments")
.get(1)
.get("body")
);
inLoop = old.inLoop;
replaceReturn = old.replaceReturn;
const [right, { body, params }] = expression.arguments;
const loop = b.forOfStatement(
b.variableDeclaration("let", [
b.variableDeclarator(
valueOnly ? params[0] : b.arrayPattern([params[1], params[0]])
),
]),
valueOnly
? right
: b.callExpression(
b.memberExpression(right, b.identifier("entries")),
[]
),
checkStatement(body) ? body : b.expressionStatement(body)
);
loop.comments = comments;
path.replace(loop);
changed = true;
}
this.traverse(path);
},
visitForStatement: visitLoop("init", "test", "update"),
visitForInStatement: visitLoop("left", "right"),
visitForOfStatement: visitLoop("left", "right"),
visitFunction(path) {
this.visit(path.get("params"));
const old = { replaceReturn };
replaceReturn = false;
this.visit(path.get("body"));
replaceReturn = old.replaceReturn;
return false;
},
visitReturnStatement(path) {
if (replaceReturn) {
assert(!inLoop); // could use labeled continue if this ever fires
const { argument, comments } = path.node;
if (argument === null) {
const s = b.continueStatement();
s.comments = comments;
path.replace(s);
} else {
const s = b.expressionStatement(argument);
s.comments = comments;
path.replace(s, b.continueStatement());
}
return false;
}
this.traverse(path);
},
visitWhileStatement: visitLoop("test"),
});
if (changed) {
console.log("Writing", file);
fs.writeFileSync(file, recast.print(ast).code, { encoding: "utf8" });
}
}
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-02-06 06:19:47 +01:00
|
|
|
for (const event of events) {
|
2019-11-02 00:06:25 +01:00
|
|
|
const msg = message_store.get(event.message_id);
|
2017-03-19 20:23:48 +01:00
|
|
|
if (msg === undefined) {
|
js: Automatically convert _.each to for…of.
This commit was automatically generated by the following script,
followed by lint --fix and a few small manual lint-related cleanups.
import * as babelParser from "recast/parsers/babel";
import * as recast from "recast";
import * as tsParser from "recast/parsers/typescript";
import { builders as b, namedTypes as n } from "ast-types";
import { Context } from "ast-types/lib/path-visitor";
import K from "ast-types/gen/kinds";
import { NodePath } from "ast-types/lib/node-path";
import assert from "assert";
import fs from "fs";
import path from "path";
import process from "process";
const checkExpression = (node: n.Node): node is K.ExpressionKind =>
n.Expression.check(node);
const checkStatement = (node: n.Node): node is K.StatementKind =>
n.Statement.check(node);
for (const file of process.argv.slice(2)) {
console.log("Parsing", file);
const ast = recast.parse(fs.readFileSync(file, { encoding: "utf8" }), {
parser: path.extname(file) === ".ts" ? tsParser : babelParser,
});
let changed = false;
let inLoop = false;
let replaceReturn = false;
const visitLoop = (...args: string[]) =>
function(this: Context, path: NodePath) {
for (const arg of args) {
this.visit(path.get(arg));
}
const old = { inLoop };
inLoop = true;
this.visit(path.get("body"));
inLoop = old.inLoop;
return false;
};
recast.visit(ast, {
visitDoWhileStatement: visitLoop("test"),
visitExpressionStatement(path) {
const { expression, comments } = path.node;
let valueOnly;
if (
n.CallExpression.check(expression) &&
n.MemberExpression.check(expression.callee) &&
!expression.callee.computed &&
n.Identifier.check(expression.callee.object) &&
expression.callee.object.name === "_" &&
n.Identifier.check(expression.callee.property) &&
["each", "forEach"].includes(expression.callee.property.name) &&
[2, 3].includes(expression.arguments.length) &&
checkExpression(expression.arguments[0]) &&
(n.FunctionExpression.check(expression.arguments[1]) ||
n.ArrowFunctionExpression.check(expression.arguments[1])) &&
[1, 2].includes(expression.arguments[1].params.length) &&
n.Identifier.check(expression.arguments[1].params[0]) &&
((valueOnly = expression.arguments[1].params[1] === undefined) ||
n.Identifier.check(expression.arguments[1].params[1])) &&
(expression.arguments[2] === undefined ||
n.ThisExpression.check(expression.arguments[2]))
) {
const old = { inLoop, replaceReturn };
inLoop = false;
replaceReturn = true;
this.visit(
path
.get("expression")
.get("arguments")
.get(1)
.get("body")
);
inLoop = old.inLoop;
replaceReturn = old.replaceReturn;
const [right, { body, params }] = expression.arguments;
const loop = b.forOfStatement(
b.variableDeclaration("let", [
b.variableDeclarator(
valueOnly ? params[0] : b.arrayPattern([params[1], params[0]])
),
]),
valueOnly
? right
: b.callExpression(
b.memberExpression(right, b.identifier("entries")),
[]
),
checkStatement(body) ? body : b.expressionStatement(body)
);
loop.comments = comments;
path.replace(loop);
changed = true;
}
this.traverse(path);
},
visitForStatement: visitLoop("init", "test", "update"),
visitForInStatement: visitLoop("left", "right"),
visitForOfStatement: visitLoop("left", "right"),
visitFunction(path) {
this.visit(path.get("params"));
const old = { replaceReturn };
replaceReturn = false;
this.visit(path.get("body"));
replaceReturn = old.replaceReturn;
return false;
},
visitReturnStatement(path) {
if (replaceReturn) {
assert(!inLoop); // could use labeled continue if this ever fires
const { argument, comments } = path.node;
if (argument === null) {
const s = b.continueStatement();
s.comments = comments;
path.replace(s);
} else {
const s = b.expressionStatement(argument);
s.comments = comments;
path.replace(s, b.continueStatement());
}
return false;
}
this.traverse(path);
},
visitWhileStatement: visitLoop("test"),
});
if (changed) {
console.log("Writing", file);
fs.writeFileSync(file, recast.print(ast).code, { encoding: "utf8" });
}
}
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-02-06 06:19:47 +01:00
|
|
|
continue;
|
2017-03-19 20:23:48 +01:00
|
|
|
}
|
2019-04-22 20:13:23 +02:00
|
|
|
|
|
|
|
delete msg.local_edit_timestamp;
|
|
|
|
|
2017-03-19 20:23:48 +01:00
|
|
|
msgs_to_rerender.push(msg);
|
|
|
|
|
2017-12-21 18:32:38 +01:00
|
|
|
message_store.update_booleans(msg, event.flags);
|
2017-03-19 20:23:48 +01:00
|
|
|
|
2020-04-11 02:51:45 +02:00
|
|
|
unread.update_message_for_mention(msg);
|
|
|
|
|
2017-03-19 20:23:48 +01:00
|
|
|
condense.un_cache_message_content_height(msg.id);
|
|
|
|
|
|
|
|
if (event.rendered_content !== undefined) {
|
|
|
|
msg.content = event.rendered_content;
|
|
|
|
}
|
|
|
|
|
2018-01-21 19:27:36 +01:00
|
|
|
if (event.is_me_message !== undefined) {
|
|
|
|
msg.is_me_message = event.is_me_message;
|
|
|
|
}
|
|
|
|
|
2022-01-25 11:36:19 +01:00
|
|
|
const $row = message_lists.current.get_row(event.message_id);
|
|
|
|
if ($row.length > 0) {
|
|
|
|
message_edit.end_message_row_edit($row);
|
2017-03-19 20:23:48 +01:00
|
|
|
}
|
|
|
|
|
2022-08-06 02:15:17 +02:00
|
|
|
// new_topic will be undefined if the topic is unchanged.
|
2019-11-02 00:06:25 +01:00
|
|
|
const new_topic = util.get_edit_event_topic(event);
|
2022-08-06 02:15:17 +02:00
|
|
|
// new_stream_id will be undefined if the stream is unchanged.
|
2021-01-23 02:44:02 +01:00
|
|
|
const new_stream_id = event.new_stream_id;
|
2022-08-06 02:15:17 +02:00
|
|
|
// old_stream_id will be present and valid for all stream messages.
|
2022-08-06 02:10:22 +02:00
|
|
|
const old_stream_id = event.stream_id;
|
2022-08-06 02:15:17 +02:00
|
|
|
// old_stream will be undefined if the message was moved from
|
|
|
|
// a stream that the current user doesn't have access to.
|
2021-04-24 21:51:18 +02:00
|
|
|
const old_stream = sub_store.get(event.stream_id);
|
|
|
|
|
2022-02-09 02:52:39 +01:00
|
|
|
// Save the content edit to the front end msg.edit_history
|
|
|
|
// before topic edits to ensure that combined topic / content
|
|
|
|
// edits have edit_history logged for both before any
|
|
|
|
// potential narrowing as part of the topic edit loop.
|
|
|
|
if (event.orig_content !== undefined) {
|
|
|
|
if (page_params.realm_allow_edit_history) {
|
|
|
|
// Note that we do this for topic edits separately, below.
|
|
|
|
// If an event changed both content and topic, we'll generate
|
|
|
|
// two client-side events, which is probably good for display.
|
|
|
|
const edit_history_entry = {
|
|
|
|
user_id: event.user_id,
|
|
|
|
prev_content: event.orig_content,
|
|
|
|
prev_rendered_content: event.orig_rendered_content,
|
|
|
|
prev_rendered_content_version: event.prev_rendered_content_version,
|
|
|
|
timestamp: event.edit_timestamp,
|
|
|
|
};
|
|
|
|
// Add message's edit_history in message dict
|
|
|
|
// For messages that are edited, edit_history needs to
|
|
|
|
// be added to message in frontend.
|
|
|
|
if (msg.edit_history === undefined) {
|
|
|
|
msg.edit_history = [];
|
|
|
|
}
|
|
|
|
msg.edit_history = [edit_history_entry].concat(msg.edit_history);
|
|
|
|
}
|
|
|
|
message_content_edited = true;
|
|
|
|
|
|
|
|
// Update raw_content, so that editing a few times in a row is fast.
|
|
|
|
msg.raw_content = event.content;
|
|
|
|
}
|
|
|
|
|
|
|
|
// A topic or stream edit may affect multiple messages, listed in
|
2020-04-07 22:29:22 +02:00
|
|
|
// event.message_ids. event.message_id is still the first message
|
|
|
|
// where the user initiated the edit.
|
|
|
|
topic_edited = new_topic !== undefined;
|
|
|
|
stream_changed = new_stream_id !== undefined;
|
2021-04-24 21:51:18 +02:00
|
|
|
stream_archived = old_stream === undefined;
|
2020-04-07 22:29:22 +02:00
|
|
|
if (topic_edited || stream_changed) {
|
2020-07-15 00:34:28 +02:00
|
|
|
const going_forward_change = ["change_later", "change_all"].includes(
|
|
|
|
event.propagate_mode,
|
|
|
|
);
|
2017-03-19 20:23:48 +01:00
|
|
|
|
2021-04-24 21:51:18 +02:00
|
|
|
const stream_name = stream_archived ? undefined : old_stream.name;
|
2019-11-02 00:06:25 +01:00
|
|
|
const compose_stream_name = compose_state.stream_name();
|
|
|
|
const orig_topic = util.get_edit_event_orig_topic(event);
|
2017-03-19 20:23:48 +01:00
|
|
|
|
2020-11-12 22:03:45 +01:00
|
|
|
const current_filter = narrow_state.filter();
|
2021-03-30 02:21:21 +02:00
|
|
|
const current_selected_id = message_lists.current.selected_id();
|
2020-06-14 19:49:18 +02:00
|
|
|
const selection_changed_topic = event.message_ids.includes(current_selected_id);
|
2021-03-30 16:58:34 +02:00
|
|
|
const event_messages = [];
|
|
|
|
for (const message_id of event.message_ids) {
|
|
|
|
// We don't need to concern ourselves updating data structures
|
|
|
|
// for messages we don't have stored locally.
|
|
|
|
const message = message_store.get(message_id);
|
|
|
|
if (message !== undefined) {
|
|
|
|
event_messages.push(message);
|
|
|
|
}
|
|
|
|
}
|
2020-11-12 22:03:45 +01:00
|
|
|
// The event.message_ids received from the server are not in sorted order.
|
|
|
|
event_messages.sort((a, b) => a.id - b.id);
|
2020-06-14 19:49:18 +02:00
|
|
|
|
2020-12-22 11:26:39 +01:00
|
|
|
if (
|
|
|
|
going_forward_change &&
|
|
|
|
stream_name &&
|
|
|
|
compose_stream_name &&
|
|
|
|
stream_name.toLowerCase() === compose_stream_name.toLowerCase() &&
|
|
|
|
orig_topic === compose_state.topic()
|
|
|
|
) {
|
|
|
|
changed_compose = true;
|
|
|
|
compose_state.topic(new_topic);
|
2022-03-16 01:31:01 +01:00
|
|
|
compose_validate.warn_if_topic_resolved(true);
|
2020-12-22 11:26:39 +01:00
|
|
|
compose_fade.set_focused_recipient("stream");
|
2017-03-19 20:23:48 +01:00
|
|
|
}
|
|
|
|
|
2022-08-06 02:10:22 +02:00
|
|
|
drafts.rename_topic(old_stream_id, orig_topic, new_topic);
|
2022-05-19 20:32:26 +02:00
|
|
|
|
2020-11-12 22:03:45 +01:00
|
|
|
for (const msg of event_messages) {
|
2021-12-09 06:30:16 +01:00
|
|
|
if (page_params.realm_allow_edit_history) {
|
|
|
|
/* Simulate the format of server-generated edit
|
|
|
|
* history events. This logic ensures that all
|
|
|
|
* messages that were moved are displayed as such
|
|
|
|
* without a browser reload. */
|
|
|
|
const edit_history_entry = {
|
2022-03-10 16:25:18 +01:00
|
|
|
user_id: event.user_id,
|
2021-12-09 06:30:16 +01:00
|
|
|
timestamp: event.edit_timestamp,
|
|
|
|
};
|
2022-03-10 16:25:18 +01:00
|
|
|
if (stream_changed) {
|
2022-08-06 02:15:17 +02:00
|
|
|
edit_history_entry.stream = new_stream_id;
|
2022-08-06 02:10:22 +02:00
|
|
|
edit_history_entry.prev_stream = old_stream_id;
|
2022-03-10 16:25:18 +01:00
|
|
|
}
|
|
|
|
if (topic_edited) {
|
|
|
|
edit_history_entry.topic = new_topic;
|
|
|
|
edit_history_entry.prev_topic = orig_topic;
|
|
|
|
}
|
2021-12-09 06:30:16 +01:00
|
|
|
if (msg.edit_history === undefined) {
|
|
|
|
msg.edit_history = [];
|
|
|
|
}
|
|
|
|
msg.edit_history = [edit_history_entry].concat(msg.edit_history);
|
|
|
|
}
|
|
|
|
msg.last_edit_timestamp = event.edit_timestamp;
|
|
|
|
|
2020-06-14 10:13:14 +02:00
|
|
|
// Remove the recent topics entry for the old topics;
|
|
|
|
// must be called before we call set_message_topic.
|
2020-06-15 19:47:10 +02:00
|
|
|
//
|
|
|
|
// TODO: Use a single bulk request to do this removal.
|
|
|
|
// Note that we need to be careful to only remove IDs
|
|
|
|
// that were present in stream_topic_history data.
|
|
|
|
// This may not be possible to do correctly without extra
|
|
|
|
// complexity; the present loop assumes stream_topic_history has
|
|
|
|
// only messages in message_store, but that's been false
|
|
|
|
// since we added the server_history feature.
|
2020-06-15 19:52:00 +02:00
|
|
|
stream_topic_history.remove_messages({
|
2020-06-14 10:13:14 +02:00
|
|
|
stream_id: msg.stream_id,
|
|
|
|
topic_name: msg.topic,
|
2020-06-15 19:52:00 +02:00
|
|
|
num_messages: 1,
|
2020-08-04 11:12:42 +02:00
|
|
|
max_removed_msg_id: msg.id,
|
2020-06-14 10:13:14 +02:00
|
|
|
});
|
|
|
|
|
|
|
|
// Update the unread counts; again, this must be called
|
|
|
|
// before we modify the topic field on the message.
|
|
|
|
unread.update_unread_topics(msg, event);
|
|
|
|
|
|
|
|
// Now edit the attributes of our message object.
|
|
|
|
if (topic_edited) {
|
|
|
|
msg.topic = new_topic;
|
|
|
|
msg.topic_links = event.topic_links;
|
|
|
|
}
|
|
|
|
if (stream_changed) {
|
2021-04-15 17:02:54 +02:00
|
|
|
const new_stream_name = sub_store.get(new_stream_id).name;
|
2022-08-06 02:15:17 +02:00
|
|
|
msg.stream_id = new_stream_id;
|
2020-06-14 10:13:14 +02:00
|
|
|
msg.stream = new_stream_name;
|
|
|
|
msg.display_recipient = new_stream_name;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Add the recent topics entry for the new stream/topics.
|
|
|
|
stream_topic_history.add_message({
|
|
|
|
stream_id: msg.stream_id,
|
|
|
|
topic_name: msg.topic,
|
|
|
|
message_id: msg.id,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2020-12-22 11:26:39 +01:00
|
|
|
if (
|
|
|
|
going_forward_change &&
|
2020-04-07 22:29:22 +02:00
|
|
|
// This logic is a bit awkward. What we're trying to
|
|
|
|
// accomplish is two things:
|
|
|
|
//
|
|
|
|
// * If we're currently narrowed to a topic that was just moved,
|
|
|
|
// renarrow to the new location.
|
|
|
|
// * We determine whether enough of the topic was moved to justify
|
|
|
|
// renarrowing by checking if the currently selected message is moved.
|
|
|
|
//
|
|
|
|
// Corner cases around only moving some messages in a topic
|
|
|
|
// need to be thought about carefully when making changes.
|
|
|
|
//
|
|
|
|
// Code further down takes care of the actual rerendering of
|
|
|
|
// messages within a narrow.
|
2020-12-22 11:26:39 +01:00
|
|
|
selection_changed_topic &&
|
|
|
|
current_filter &&
|
|
|
|
current_filter.has_topic(stream_name, orig_topic)
|
|
|
|
) {
|
|
|
|
let new_filter = current_filter;
|
|
|
|
if (new_filter && stream_changed) {
|
|
|
|
// TODO: This logic doesn't handle the
|
|
|
|
// case where we're a guest user and the
|
|
|
|
// message moves to a stream we cannot
|
|
|
|
// access, which would cause the
|
|
|
|
// stream_data lookup here to fail.
|
|
|
|
//
|
|
|
|
// The fix is likely somewhat involved, so punting for now.
|
2021-04-15 17:02:54 +02:00
|
|
|
const new_stream_name = sub_store.get(new_stream_id).name;
|
2020-12-22 11:26:39 +01:00
|
|
|
new_filter = new_filter.filter_with_new_params({
|
|
|
|
operator: "stream",
|
|
|
|
operand: new_stream_name,
|
|
|
|
});
|
|
|
|
changed_narrow = true;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (new_filter && topic_edited) {
|
|
|
|
new_filter = new_filter.filter_with_new_params({
|
|
|
|
operator: "topic",
|
|
|
|
operand: new_topic,
|
|
|
|
});
|
|
|
|
changed_narrow = true;
|
|
|
|
}
|
|
|
|
// NOTE: We should always be changing narrows after we finish
|
|
|
|
// updating the local data and UI. This avoids conflict
|
|
|
|
// with data fetched from the server (which is already updated)
|
|
|
|
// when we move to new narrow and what data is locally available.
|
|
|
|
if (changed_narrow) {
|
|
|
|
const operators = new_filter.operators();
|
|
|
|
const opts = {
|
|
|
|
trigger: "stream/topic change",
|
|
|
|
then_select_id: current_selected_id,
|
|
|
|
};
|
|
|
|
narrow.activate(operators, opts);
|
2017-03-19 20:23:48 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-11-12 22:03:45 +01:00
|
|
|
// Ensure messages that are no longer part of this
|
|
|
|
// narrow are deleted and messages that are now part
|
|
|
|
// of this narrow are added to the message_list.
|
2020-06-14 10:13:14 +02:00
|
|
|
//
|
|
|
|
// Even if we end up renarrowing, the message_list_data
|
|
|
|
// part of this is important for non-rendering message
|
|
|
|
// lists, so we do this unconditionally. Most correctly,
|
|
|
|
// this should be a loop over all valid message_list_data
|
|
|
|
// objects, without the rerender (which will naturally
|
|
|
|
// happen in the following code).
|
2022-02-16 06:57:48 +01:00
|
|
|
if (!changed_narrow && current_filter) {
|
2020-11-12 22:03:45 +01:00
|
|
|
let message_ids_to_remove = [];
|
2022-02-16 06:57:48 +01:00
|
|
|
if (current_filter.can_apply_locally()) {
|
2020-11-12 22:03:45 +01:00
|
|
|
const predicate = current_filter.predicate();
|
|
|
|
message_ids_to_remove = event_messages.filter((msg) => !predicate(msg));
|
|
|
|
message_ids_to_remove = message_ids_to_remove.map((msg) => msg.id);
|
2022-02-16 06:57:48 +01:00
|
|
|
// We filter out messages that do not belong to the message
|
|
|
|
// list and then pass these to the remove messages codepath.
|
|
|
|
// While we can pass all our messages to the add messages
|
|
|
|
// codepath as the filtering is done within the method.
|
|
|
|
message_lists.current.remove_and_rerender(message_ids_to_remove);
|
|
|
|
message_lists.current.add_messages(event_messages);
|
|
|
|
} else {
|
2022-02-16 07:16:55 +01:00
|
|
|
// Remove existing message that were updated, since
|
|
|
|
// they may not be a part of the filter now. Also,
|
|
|
|
// this will help us rerender them via
|
|
|
|
// maybe_add_narrowed_messages, if they were
|
|
|
|
// simply updated.
|
|
|
|
const updated_messages = event_messages.filter(
|
|
|
|
(msg) => message_lists.current.data.get(msg.id) !== undefined,
|
|
|
|
);
|
|
|
|
message_lists.current.remove_and_rerender(
|
|
|
|
updated_messages.map((msg) => msg.id),
|
|
|
|
);
|
2022-02-16 06:57:48 +01:00
|
|
|
// For filters that cannot be processed locally, ask server.
|
|
|
|
maybe_add_narrowed_messages(
|
|
|
|
event_messages,
|
|
|
|
message_lists.current,
|
|
|
|
message_util.add_messages,
|
|
|
|
);
|
2020-11-12 22:03:45 +01:00
|
|
|
}
|
js: Automatically convert _.each to for…of.
This commit was automatically generated by the following script,
followed by lint --fix and a few small manual lint-related cleanups.
import * as babelParser from "recast/parsers/babel";
import * as recast from "recast";
import * as tsParser from "recast/parsers/typescript";
import { builders as b, namedTypes as n } from "ast-types";
import { Context } from "ast-types/lib/path-visitor";
import K from "ast-types/gen/kinds";
import { NodePath } from "ast-types/lib/node-path";
import assert from "assert";
import fs from "fs";
import path from "path";
import process from "process";
const checkExpression = (node: n.Node): node is K.ExpressionKind =>
n.Expression.check(node);
const checkStatement = (node: n.Node): node is K.StatementKind =>
n.Statement.check(node);
for (const file of process.argv.slice(2)) {
console.log("Parsing", file);
const ast = recast.parse(fs.readFileSync(file, { encoding: "utf8" }), {
parser: path.extname(file) === ".ts" ? tsParser : babelParser,
});
let changed = false;
let inLoop = false;
let replaceReturn = false;
const visitLoop = (...args: string[]) =>
function(this: Context, path: NodePath) {
for (const arg of args) {
this.visit(path.get(arg));
}
const old = { inLoop };
inLoop = true;
this.visit(path.get("body"));
inLoop = old.inLoop;
return false;
};
recast.visit(ast, {
visitDoWhileStatement: visitLoop("test"),
visitExpressionStatement(path) {
const { expression, comments } = path.node;
let valueOnly;
if (
n.CallExpression.check(expression) &&
n.MemberExpression.check(expression.callee) &&
!expression.callee.computed &&
n.Identifier.check(expression.callee.object) &&
expression.callee.object.name === "_" &&
n.Identifier.check(expression.callee.property) &&
["each", "forEach"].includes(expression.callee.property.name) &&
[2, 3].includes(expression.arguments.length) &&
checkExpression(expression.arguments[0]) &&
(n.FunctionExpression.check(expression.arguments[1]) ||
n.ArrowFunctionExpression.check(expression.arguments[1])) &&
[1, 2].includes(expression.arguments[1].params.length) &&
n.Identifier.check(expression.arguments[1].params[0]) &&
((valueOnly = expression.arguments[1].params[1] === undefined) ||
n.Identifier.check(expression.arguments[1].params[1])) &&
(expression.arguments[2] === undefined ||
n.ThisExpression.check(expression.arguments[2]))
) {
const old = { inLoop, replaceReturn };
inLoop = false;
replaceReturn = true;
this.visit(
path
.get("expression")
.get("arguments")
.get(1)
.get("body")
);
inLoop = old.inLoop;
replaceReturn = old.replaceReturn;
const [right, { body, params }] = expression.arguments;
const loop = b.forOfStatement(
b.variableDeclaration("let", [
b.variableDeclarator(
valueOnly ? params[0] : b.arrayPattern([params[1], params[0]])
),
]),
valueOnly
? right
: b.callExpression(
b.memberExpression(right, b.identifier("entries")),
[]
),
checkStatement(body) ? body : b.expressionStatement(body)
);
loop.comments = comments;
path.replace(loop);
changed = true;
}
this.traverse(path);
},
visitForStatement: visitLoop("init", "test", "update"),
visitForInStatement: visitLoop("left", "right"),
visitForOfStatement: visitLoop("left", "right"),
visitFunction(path) {
this.visit(path.get("params"));
const old = { replaceReturn };
replaceReturn = false;
this.visit(path.get("body"));
replaceReturn = old.replaceReturn;
return false;
},
visitReturnStatement(path) {
if (replaceReturn) {
assert(!inLoop); // could use labeled continue if this ever fires
const { argument, comments } = path.node;
if (argument === null) {
const s = b.continueStatement();
s.comments = comments;
path.replace(s);
} else {
const s = b.expressionStatement(argument);
s.comments = comments;
path.replace(s, b.continueStatement());
}
return false;
}
this.traverse(path);
},
visitWhileStatement: visitLoop("test"),
});
if (changed) {
console.log("Writing", file);
fs.writeFileSync(file, recast.print(ast).code, { encoding: "utf8" });
}
}
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-02-06 06:19:47 +01:00
|
|
|
}
|
2017-03-19 20:23:48 +01:00
|
|
|
}
|
|
|
|
|
2022-02-17 16:41:00 +01:00
|
|
|
// Mark the message as edited for the UI. The rendering_only
|
|
|
|
// flag is used to indicated update_message events that are
|
|
|
|
// triggered by server latency optimizations, not user
|
|
|
|
// interactions; these should not generate edit history updates.
|
|
|
|
if (!event.rendering_only) {
|
|
|
|
msg.last_edit_timestamp = event.edit_timestamp;
|
|
|
|
}
|
2017-03-19 20:23:48 +01:00
|
|
|
|
|
|
|
notifications.received_messages([msg]);
|
|
|
|
alert_words.process_message(msg);
|
2020-05-01 08:29:08 +02:00
|
|
|
|
|
|
|
if (topic_edited || stream_changed) {
|
|
|
|
// if topic is changed
|
|
|
|
let pre_edit_topic = util.get_edit_event_orig_topic(event);
|
|
|
|
let post_edit_topic = new_topic;
|
|
|
|
|
|
|
|
if (!topic_edited) {
|
|
|
|
pre_edit_topic = msg.topic;
|
|
|
|
post_edit_topic = pre_edit_topic;
|
|
|
|
}
|
2021-05-28 16:27:14 +02:00
|
|
|
|
|
|
|
// new_stream_id is undefined if this is only a topic edit.
|
2022-08-06 02:10:22 +02:00
|
|
|
const post_edit_stream_id = new_stream_id || old_stream_id;
|
2021-05-28 16:27:14 +02:00
|
|
|
|
2022-08-06 02:10:22 +02:00
|
|
|
const args = [old_stream_id, pre_edit_topic, post_edit_topic, post_edit_stream_id];
|
refactor: Simplify recent_senders code.
This reduces our dependency on message_list code (via
message_util), and it makes moving streams/topics and
deleting messages more performant.
For every single message that was being updated or
deleted, the previous code was basically re-computing
lots of things, including having to iterate through
every message in memory to find the messages matching
your topic.
Now everything basically happens in O(1) time.
The only O(N) computation is that we now lazily
re-compute the max message id every time you need it
for typeahead logic, and then we cache it for
subsequent use. The N here is the number of messages
that the particular sender has sent to the particular
stream/topic combination, so it should always be quite
small, except for certain spammy bots.
Once the max has been calculated, the common operation
of adding a message doesn't invalidate our cached
value. We only invalidate the cache on deletes.
The main change that we make here from a data
standpoint is that we just keep track of all
message_ids for all senders. The storage overhead here
should be negligible. By keeping track of our own
messages, we don't have to punt to other code for
update/delete situations.
There is similar code in recent_topics that I think can
be improved in similar ways, and it would allow us to
eliminate functions like this one:
export function get_messages_in_topic(stream_id, topic) {
return message_list.all
.all_messages()
.filter(
(x) =>
x.type === "stream" &&
x.stream_id === stream_id &&
x.topic.toLowerCase() === topic.toLowerCase(),
);
}
2021-03-29 19:42:44 +02:00
|
|
|
recent_senders.process_topic_edit({
|
|
|
|
message_ids: event.message_ids,
|
2022-08-06 02:10:22 +02:00
|
|
|
old_stream_id,
|
refactor: Simplify recent_senders code.
This reduces our dependency on message_list code (via
message_util), and it makes moving streams/topics and
deleting messages more performant.
For every single message that was being updated or
deleted, the previous code was basically re-computing
lots of things, including having to iterate through
every message in memory to find the messages matching
your topic.
Now everything basically happens in O(1) time.
The only O(N) computation is that we now lazily
re-compute the max message id every time you need it
for typeahead logic, and then we cache it for
subsequent use. The N here is the number of messages
that the particular sender has sent to the particular
stream/topic combination, so it should always be quite
small, except for certain spammy bots.
Once the max has been calculated, the common operation
of adding a message doesn't invalidate our cached
value. We only invalidate the cache on deletes.
The main change that we make here from a data
standpoint is that we just keep track of all
message_ids for all senders. The storage overhead here
should be negligible. By keeping track of our own
messages, we don't have to punt to other code for
update/delete situations.
There is similar code in recent_topics that I think can
be improved in similar ways, and it would allow us to
eliminate functions like this one:
export function get_messages_in_topic(stream_id, topic) {
return message_list.all
.all_messages()
.filter(
(x) =>
x.type === "stream" &&
x.stream_id === stream_id &&
x.topic.toLowerCase() === topic.toLowerCase(),
);
}
2021-03-29 19:42:44 +02:00
|
|
|
old_topic: pre_edit_topic,
|
2021-05-28 16:27:14 +02:00
|
|
|
new_stream_id: post_edit_stream_id,
|
refactor: Simplify recent_senders code.
This reduces our dependency on message_list code (via
message_util), and it makes moving streams/topics and
deleting messages more performant.
For every single message that was being updated or
deleted, the previous code was basically re-computing
lots of things, including having to iterate through
every message in memory to find the messages matching
your topic.
Now everything basically happens in O(1) time.
The only O(N) computation is that we now lazily
re-compute the max message id every time you need it
for typeahead logic, and then we cache it for
subsequent use. The N here is the number of messages
that the particular sender has sent to the particular
stream/topic combination, so it should always be quite
small, except for certain spammy bots.
Once the max has been calculated, the common operation
of adding a message doesn't invalidate our cached
value. We only invalidate the cache on deletes.
The main change that we make here from a data
standpoint is that we just keep track of all
message_ids for all senders. The storage overhead here
should be negligible. By keeping track of our own
messages, we don't have to punt to other code for
update/delete situations.
There is similar code in recent_topics that I think can
be improved in similar ways, and it would allow us to
eliminate functions like this one:
export function get_messages_in_topic(stream_id, topic) {
return message_list.all
.all_messages()
.filter(
(x) =>
x.type === "stream" &&
x.stream_id === stream_id &&
x.topic.toLowerCase() === topic.toLowerCase(),
);
}
2021-03-29 19:42:44 +02:00
|
|
|
new_topic: post_edit_topic,
|
|
|
|
});
|
2021-06-10 14:18:46 +02:00
|
|
|
recent_topics_ui.process_topic_edit(...args);
|
2020-05-01 08:29:08 +02:00
|
|
|
}
|
2020-05-23 19:07:31 +02:00
|
|
|
|
|
|
|
// Rerender "Message edit history" if it was open to the edited message.
|
2020-07-15 00:34:28 +02:00
|
|
|
if (
|
2021-07-07 11:47:18 +02:00
|
|
|
$("#message-edit-history").parents(".micromodal").hasClass("modal--open") &&
|
2020-10-07 09:17:30 +02:00
|
|
|
msg.id === Number.parseInt($("#message-history").attr("data-message-id"), 10)
|
2020-07-15 00:34:28 +02:00
|
|
|
) {
|
2020-06-07 04:50:31 +02:00
|
|
|
message_edit_history.fetch_and_render_message_history(msg);
|
2020-05-23 19:07:31 +02:00
|
|
|
}
|
js: Automatically convert _.each to for…of.
This commit was automatically generated by the following script,
followed by lint --fix and a few small manual lint-related cleanups.
import * as babelParser from "recast/parsers/babel";
import * as recast from "recast";
import * as tsParser from "recast/parsers/typescript";
import { builders as b, namedTypes as n } from "ast-types";
import { Context } from "ast-types/lib/path-visitor";
import K from "ast-types/gen/kinds";
import { NodePath } from "ast-types/lib/node-path";
import assert from "assert";
import fs from "fs";
import path from "path";
import process from "process";
const checkExpression = (node: n.Node): node is K.ExpressionKind =>
n.Expression.check(node);
const checkStatement = (node: n.Node): node is K.StatementKind =>
n.Statement.check(node);
for (const file of process.argv.slice(2)) {
console.log("Parsing", file);
const ast = recast.parse(fs.readFileSync(file, { encoding: "utf8" }), {
parser: path.extname(file) === ".ts" ? tsParser : babelParser,
});
let changed = false;
let inLoop = false;
let replaceReturn = false;
const visitLoop = (...args: string[]) =>
function(this: Context, path: NodePath) {
for (const arg of args) {
this.visit(path.get(arg));
}
const old = { inLoop };
inLoop = true;
this.visit(path.get("body"));
inLoop = old.inLoop;
return false;
};
recast.visit(ast, {
visitDoWhileStatement: visitLoop("test"),
visitExpressionStatement(path) {
const { expression, comments } = path.node;
let valueOnly;
if (
n.CallExpression.check(expression) &&
n.MemberExpression.check(expression.callee) &&
!expression.callee.computed &&
n.Identifier.check(expression.callee.object) &&
expression.callee.object.name === "_" &&
n.Identifier.check(expression.callee.property) &&
["each", "forEach"].includes(expression.callee.property.name) &&
[2, 3].includes(expression.arguments.length) &&
checkExpression(expression.arguments[0]) &&
(n.FunctionExpression.check(expression.arguments[1]) ||
n.ArrowFunctionExpression.check(expression.arguments[1])) &&
[1, 2].includes(expression.arguments[1].params.length) &&
n.Identifier.check(expression.arguments[1].params[0]) &&
((valueOnly = expression.arguments[1].params[1] === undefined) ||
n.Identifier.check(expression.arguments[1].params[1])) &&
(expression.arguments[2] === undefined ||
n.ThisExpression.check(expression.arguments[2]))
) {
const old = { inLoop, replaceReturn };
inLoop = false;
replaceReturn = true;
this.visit(
path
.get("expression")
.get("arguments")
.get(1)
.get("body")
);
inLoop = old.inLoop;
replaceReturn = old.replaceReturn;
const [right, { body, params }] = expression.arguments;
const loop = b.forOfStatement(
b.variableDeclaration("let", [
b.variableDeclarator(
valueOnly ? params[0] : b.arrayPattern([params[1], params[0]])
),
]),
valueOnly
? right
: b.callExpression(
b.memberExpression(right, b.identifier("entries")),
[]
),
checkStatement(body) ? body : b.expressionStatement(body)
);
loop.comments = comments;
path.replace(loop);
changed = true;
}
this.traverse(path);
},
visitForStatement: visitLoop("init", "test", "update"),
visitForInStatement: visitLoop("left", "right"),
visitForOfStatement: visitLoop("left", "right"),
visitFunction(path) {
this.visit(path.get("params"));
const old = { replaceReturn };
replaceReturn = false;
this.visit(path.get("body"));
replaceReturn = old.replaceReturn;
return false;
},
visitReturnStatement(path) {
if (replaceReturn) {
assert(!inLoop); // could use labeled continue if this ever fires
const { argument, comments } = path.node;
if (argument === null) {
const s = b.continueStatement();
s.comments = comments;
path.replace(s);
} else {
const s = b.expressionStatement(argument);
s.comments = comments;
path.replace(s, b.continueStatement());
}
return false;
}
this.traverse(path);
},
visitWhileStatement: visitLoop("test"),
});
if (changed) {
console.log("Writing", file);
fs.writeFileSync(file, recast.print(ast).code, { encoding: "utf8" });
}
}
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-02-06 06:19:47 +01:00
|
|
|
}
|
2017-03-19 20:23:48 +01:00
|
|
|
|
|
|
|
// If a topic was edited, we re-render the whole view to get any
|
|
|
|
// propagated edits to be updated (since the topic edits can have
|
|
|
|
// changed the correct grouping of messages).
|
2020-04-07 22:29:22 +02:00
|
|
|
if (topic_edited || stream_changed) {
|
message lists: Don't allow user/topic mute message filtering independently.
This basically reverts 4bd7ec7c3699b08655fb3d6ae2a00a19c8a086db and
3a9dfc02e6414089de8ed5cbc85eb69f60454013.
The plan earlier was to have compeletely different codepaths
for user and topic muting, so that we could call seperate
functions in the message list class on receiving the respective
events.
However, this cannot be done, because if we, for example, on
receiving a `muted_users` event, filter `_all_items` based on
just user mutes, and store the result in `_items`, then, that
result may still contain topic-muted messages, which is
undesirable. Hence whenever we filter messages, we must do so
based on both user as well as topic muting.
(The code for the former will be added in further commits.)
So, we will have a single function which will handle updating
the message lists for muting.
2021-05-07 22:13:03 +02:00
|
|
|
message_lists.home.update_muting_and_rerender();
|
2017-03-19 20:23:48 +01:00
|
|
|
// However, we don't need to rerender message_list.narrowed if
|
|
|
|
// we just changed the narrow earlier in this function.
|
2021-04-30 07:44:43 +02:00
|
|
|
//
|
|
|
|
// TODO: We can potentially optimize this logic to avoid
|
message lists: Don't allow user/topic mute message filtering independently.
This basically reverts 4bd7ec7c3699b08655fb3d6ae2a00a19c8a086db and
3a9dfc02e6414089de8ed5cbc85eb69f60454013.
The plan earlier was to have compeletely different codepaths
for user and topic muting, so that we could call seperate
functions in the message list class on receiving the respective
events.
However, this cannot be done, because if we, for example, on
receiving a `muted_users` event, filter `_all_items` based on
just user mutes, and store the result in `_items`, then, that
result may still contain topic-muted messages, which is
undesirable. Hence whenever we filter messages, we must do so
based on both user as well as topic muting.
(The code for the former will be added in further commits.)
So, we will have a single function which will handle updating
the message lists for muting.
2021-05-07 22:13:03 +02:00
|
|
|
// calling `update_muting_and_rerender` if the muted
|
2021-04-30 07:44:43 +02:00
|
|
|
// messages would not match the view before or after this
|
|
|
|
// edit. Doing so could save significant work, since most
|
|
|
|
// topic edits will not match the current topic narrow in
|
|
|
|
// large organizations.
|
2021-03-30 02:21:21 +02:00
|
|
|
if (!changed_narrow && message_lists.current === message_list.narrowed) {
|
message lists: Don't allow user/topic mute message filtering independently.
This basically reverts 4bd7ec7c3699b08655fb3d6ae2a00a19c8a086db and
3a9dfc02e6414089de8ed5cbc85eb69f60454013.
The plan earlier was to have compeletely different codepaths
for user and topic muting, so that we could call seperate
functions in the message list class on receiving the respective
events.
However, this cannot be done, because if we, for example, on
receiving a `muted_users` event, filter `_all_items` based on
just user mutes, and store the result in `_items`, then, that
result may still contain topic-muted messages, which is
undesirable. Hence whenever we filter messages, we must do so
based on both user as well as topic muting.
(The code for the former will be added in further commits.)
So, we will have a single function which will handle updating
the message lists for muting.
2021-05-07 22:13:03 +02:00
|
|
|
message_list.narrowed.update_muting_and_rerender();
|
2017-03-19 20:23:48 +01:00
|
|
|
}
|
|
|
|
} else {
|
2017-04-21 20:27:45 +02:00
|
|
|
// If the content of the message was edited, we do a special animation.
|
2021-03-30 02:21:21 +02:00
|
|
|
message_lists.current.view.rerender_messages(msgs_to_rerender, message_content_edited);
|
|
|
|
if (message_lists.current === message_list.narrowed) {
|
|
|
|
message_lists.home.view.rerender_messages(msgs_to_rerender);
|
2017-03-19 20:23:48 +01:00
|
|
|
}
|
|
|
|
}
|
2018-04-14 01:29:21 +02:00
|
|
|
|
|
|
|
if (changed_compose) {
|
|
|
|
// We need to do this after we rerender the message list, to
|
|
|
|
// produce correct results.
|
|
|
|
compose_fade.update_message_list();
|
|
|
|
}
|
|
|
|
|
2017-03-19 20:23:48 +01:00
|
|
|
unread_ui.update_unread_counts();
|
|
|
|
stream_list.update_streams_sidebar();
|
|
|
|
pm_list.update_private_messages();
|
2021-02-28 21:33:40 +01:00
|
|
|
}
|
2017-03-19 20:23:48 +01:00
|
|
|
|
2021-02-28 21:33:40 +01:00
|
|
|
export function remove_messages(message_ids) {
|
2021-03-30 06:23:09 +02:00
|
|
|
all_messages_data.remove(message_ids);
|
|
|
|
for (const list of [message_lists.home, message_list.narrowed]) {
|
2020-11-12 22:43:04 +01:00
|
|
|
if (list === undefined) {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
list.remove_and_rerender(message_ids);
|
|
|
|
}
|
|
|
|
recent_senders.update_topics_of_deleted_message_ids(message_ids);
|
2021-06-10 14:18:46 +02:00
|
|
|
recent_topics_ui.update_topics_of_deleted_message_ids(message_ids);
|
2021-02-28 21:33:40 +01:00
|
|
|
}
|