mirror of https://github.com/zulip/zulip.git
util: Change parameter type of `sorted_ids` to `number[]`.
This function was mistyped; it was used in practice both accepting string[] and number[], with the implementation taking advantage of the fact that number.parseInt(<int>, 10) = <int>. The only string[] callers were some overly defensive typing_data tests that don't match the actual typing_data interface, so we remove the string[] support and adjust the function's type, as well as those tests.
This commit is contained in:
parent
5762799d18
commit
88c23933bb
|
@ -220,11 +220,9 @@ export function is_mobile(): boolean {
|
||||||
return new RegExp(regex, "i").test(window.navigator.userAgent);
|
return new RegExp(regex, "i").test(window.navigator.userAgent);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function sorted_ids(ids: string[]): number[] {
|
export function sorted_ids(ids: number[]): number[] {
|
||||||
// This mapping makes sure we are using ints, and
|
// This makes sure we don't mutate the list.
|
||||||
// it also makes sure we don't mutate the list.
|
const id_list = [...new Set(ids)];
|
||||||
let id_list = ids.map((s) => Number.parseInt(s, 10));
|
|
||||||
id_list = [...new Set(id_list)];
|
|
||||||
id_list.sort((a, b) => a - b);
|
id_list.sort((a, b) => a - b);
|
||||||
|
|
||||||
return id_list;
|
return id_list;
|
||||||
|
|
|
@ -18,8 +18,7 @@ function test(label, f) {
|
||||||
|
|
||||||
test("basics", () => {
|
test("basics", () => {
|
||||||
// The typing_data needs to be robust with lists of
|
// The typing_data needs to be robust with lists of
|
||||||
// user ids being in arbitrary sorting order and
|
// user ids being in arbitrary sorting order. So all
|
||||||
// possibly in string form instead of integer. So all
|
|
||||||
// the apparent randomness in these tests has a purpose.
|
// the apparent randomness in these tests has a purpose.
|
||||||
typing_data.add_typist([5, 10, 15], 15);
|
typing_data.add_typist([5, 10, 15], 15);
|
||||||
assert.deepEqual(typing_data.get_group_typists([15, 10, 5]), [15]);
|
assert.deepEqual(typing_data.get_group_typists([15, 10, 5]), [15]);
|
||||||
|
@ -28,18 +27,18 @@ test("basics", () => {
|
||||||
typing_data.add_typist([5, 10, 15], 15);
|
typing_data.add_typist([5, 10, 15], 15);
|
||||||
|
|
||||||
// add another id to our first group
|
// add another id to our first group
|
||||||
typing_data.add_typist([5, 10, 15], "10");
|
typing_data.add_typist([5, 10, 15], 10);
|
||||||
assert.deepEqual(typing_data.get_group_typists([10, 15, 5]), [10, 15]);
|
assert.deepEqual(typing_data.get_group_typists([10, 15, 5]), [10, 15]);
|
||||||
|
|
||||||
// start adding to a new group
|
// start adding to a new group
|
||||||
typing_data.add_typist([7, 15], 7);
|
typing_data.add_typist([7, 15], 7);
|
||||||
typing_data.add_typist([7, "15"], 15);
|
typing_data.add_typist([7, 15], 15);
|
||||||
|
|
||||||
// test get_all_typists
|
// test get_all_typists
|
||||||
assert.deepEqual(typing_data.get_all_typists(), [7, 10, 15]);
|
assert.deepEqual(typing_data.get_all_typists(), [7, 10, 15]);
|
||||||
|
|
||||||
// test basic removal
|
// test basic removal
|
||||||
assert.ok(typing_data.remove_typist([15, 7], "7"));
|
assert.ok(typing_data.remove_typist([15, 7], 7));
|
||||||
assert.deepEqual(typing_data.get_group_typists([7, 15]), [15]);
|
assert.deepEqual(typing_data.get_group_typists([7, 15]), [15]);
|
||||||
|
|
||||||
// test removing an id that is not there
|
// test removing an id that is not there
|
||||||
|
@ -49,7 +48,7 @@ test("basics", () => {
|
||||||
|
|
||||||
// remove user from one group, but "15" will still be among
|
// remove user from one group, but "15" will still be among
|
||||||
// "all typists"
|
// "all typists"
|
||||||
assert.ok(typing_data.remove_typist(["15", 7], "15"));
|
assert.ok(typing_data.remove_typist([15, 7], 15));
|
||||||
assert.deepEqual(typing_data.get_all_typists(), [10, 15]);
|
assert.deepEqual(typing_data.get_all_typists(), [10, 15]);
|
||||||
|
|
||||||
// now remove from the other group
|
// now remove from the other group
|
||||||
|
|
Loading…
Reference in New Issue