dict: Remove each method.

Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
This commit is contained in:
Anders Kaseorg 2020-02-03 00:26:53 -08:00 committed by Tim Abbott
parent fac2c71776
commit 4480963f5a
10 changed files with 41 additions and 45 deletions

View File

@ -100,11 +100,11 @@ run_test('each', () => {
let unseen_keys = [...d.keys()];
let cnt = 0;
d.each(function (v, k) {
for (const [k, v] of d) {
assert.equal(v, d.get(k));
unseen_keys = _.without(unseen_keys, k);
cnt += 1;
});
}
assert.equal(cnt, d.size);
assert.equal(unseen_keys.length, 0);

View File

@ -48,11 +48,11 @@ run_test('each', () => {
let unseen_keys = [...d.keys()];
let cnt = 0;
d.each(function (v, k) {
for (const [k, v] of d) {
assert.equal(v, d.get(k));
unseen_keys = _.without(unseen_keys, k);
cnt += 1;
});
}
assert.equal(cnt, d.size);
assert.equal(unseen_keys.length, 0);

View File

@ -85,7 +85,7 @@ exports.update_dom_with_unread_counts = function (counts) {
// counts is just a data object that gets calculated elsewhere
// Our job is to update some DOM elements.
counts.pm_count.each(function (count, user_ids_string) {
for (const [user_ids_string, count] of counts.pm_count) {
// TODO: just use user_ids_string in our markup
const is_pm = user_ids_string.indexOf(',') < 0;
if (is_pm) {
@ -93,7 +93,7 @@ exports.update_dom_with_unread_counts = function (counts) {
} else {
set_group_count(user_ids_string, count);
}
});
}
};
exports.process_loaded_messages = function (messages) {

View File

@ -34,10 +34,6 @@ export class Dict<V> {
return this._items.size;
}
each(f: (v: V, k?: string) => void): void {
this._items.forEach(f);
}
clear(): void {
this._items.clear();
}

View File

@ -60,10 +60,6 @@ export class FoldDict<V> {
return this._items.size;
}
each(f: (v: V, k?: string) => void): void {
this._items.forEach(({k, v}) => f(v, k));
}
clear(): void {
this._items.clear();
}

View File

@ -50,6 +50,10 @@ export class IntDict<V> {
return this._map.values();
}
[Symbol.iterator](): Iterator<[number, V]> {
return this._map.entries();
}
filter_values(pred: (item: V) => boolean): V[] {
const results: V[] = [];
@ -66,10 +70,6 @@ export class IntDict<V> {
return this._map.size;
}
each(f: (v: V, k?: number) => void): void {
this._map.forEach(f);
}
clear(): void {
this._map.clear();
}

View File

@ -29,11 +29,11 @@ exports.is_topic_muted = function (stream_id, topic) {
exports.get_muted_topics = function () {
const topics = [];
muted_topics.each(function (sub_dict, stream_id) {
for (const [stream_id, sub_dict] of muted_topics) {
for (const topic of sub_dict.keys()) {
topics.push([stream_id, topic]);
}
});
}
return topics;
};

View File

@ -461,7 +461,7 @@ exports.on_load_success = function (realm_people_data) {
}
});
// Append user type field values also
fields_user_pills.each(function (field_pills, field_id) {
for (const [field_id, field_pills] of fields_user_pills) {
if (field_pills) {
const user_ids = user_pill.get_user_ids(field_pills);
new_profile_data.push({
@ -469,7 +469,7 @@ exports.on_load_success = function (realm_people_data) {
value: user_ids,
});
}
});
}
url = "/json/users/" + encodeURIComponent(user_id);
data = {

View File

@ -317,29 +317,29 @@ exports.update_streams_sidebar = function () {
exports.update_dom_with_unread_counts = function (counts) {
// counts.stream_count maps streams to counts
counts.stream_count.each(function (count, stream_id) {
for (const [stream_id, count] of counts.stream_count) {
set_stream_unread_count(stream_id, count);
});
}
// counts.topic_count maps streams to hashes of topics to counts
counts.topic_count.each(function (topic_hash, stream_id) {
for (const [stream_id, topic_hash] of counts.topic_count) {
// Because the topic_list data structure doesn't keep track of
// which topics the "more topics" unread count came from, we
// need to compute the correct value from scratch here.
let more_topics_total = 0;
topic_hash.each(function (count, topic) {
for (const [topic, count] of topic_hash) {
const in_more_topics = topic_list.set_count(stream_id, topic, count);
if (in_more_topics === true) {
more_topics_total += count;
}
});
}
if (topic_list.active_stream_id() === stream_id) {
// Update the "more topics" unread count; we communicate
// this to the `topic_list` library by passing `null` as
// the topic.
topic_list.set_count(stream_id, null, more_topics_total);
}
});
}
};
exports.rename_stream = function (sub) {

View File

@ -56,14 +56,18 @@ function make_bucketer(options) {
return key_to_bucket.get(bucket_key);
};
self.each = function (callback) {
key_to_bucket.each(callback);
};
self.keys = function () {
return key_to_bucket.keys();
};
self.values = function () {
return key_to_bucket.values();
};
self[Symbol.iterator] = function () {
return key_to_bucket[Symbol.iterator]();
};
return self;
}
@ -119,11 +123,11 @@ exports.unread_pm_counter = (function () {
self.get_counts = function () {
const pm_dict = new Dict(); // Hash by user_ids_string -> count
let total_count = 0;
bucketer.each(function (id_set, user_ids_string) {
for (const [user_ids_string, id_set] of bucketer) {
const count = id_set.size;
pm_dict.set(user_ids_string, count);
total_count += count;
});
}
return {
total_count: total_count,
pm_dict: pm_dict,
@ -146,10 +150,10 @@ exports.unread_pm_counter = (function () {
self.get_msg_ids = function () {
const lists = [];
bucketer.each(function (id_set) {
for (const id_set of bucketer.values()) {
const members = [...id_set];
lists.push(members);
});
}
const ids = [].concat.apply([], lists);
@ -234,7 +238,7 @@ exports.unread_topic_counter = (function () {
res.stream_unread_messages = 0;
res.stream_count = new IntDict(); // hash by stream_id -> count
res.topic_count = new IntDict(); // hash of hashes (stream_id, then topic -> count)
bucketer.each(function (per_stream_bucketer, stream_id) {
for (const [stream_id, per_stream_bucketer] of bucketer) {
// We track unread counts for streams that may be currently
// unsubscribed. Since users may re-subscribe, we don't
@ -242,24 +246,24 @@ exports.unread_topic_counter = (function () {
// so that callers have a view of the **current** world.
const sub = stream_data.get_sub_by_id(stream_id);
if (!sub || !stream_data.is_subscribed(sub.name)) {
return;
continue;
}
res.topic_count.set(stream_id, str_dict());
let stream_count = 0;
per_stream_bucketer.each(function (msgs, topic) {
for (const [topic, msgs] of per_stream_bucketer) {
const topic_count = msgs.size;
res.topic_count.get(stream_id).set(topic, topic_count);
if (!muting.is_topic_muted(stream_id, topic)) {
stream_count += topic_count;
}
});
}
res.stream_count.set(stream_id, stream_count);
if (!stream_data.is_muted(stream_id)) {
res.stream_unread_messages += stream_count;
}
});
}
return res;
};
@ -301,11 +305,11 @@ exports.unread_topic_counter = (function () {
}
const sub = stream_data.get_sub_by_id(stream_id);
per_stream_bucketer.each(function (msgs, topic) {
for (const [topic, msgs] of per_stream_bucketer) {
if (sub && !muting.is_topic_muted(stream_id, topic)) {
stream_count += msgs.size;
}
});
}
return stream_count;
};
@ -333,11 +337,11 @@ exports.unread_topic_counter = (function () {
const topic_lists = [];
const sub = stream_data.get_sub_by_id(stream_id);
per_stream_bucketer.each(function (msgs, topic) {
for (const [topic, msgs] of per_stream_bucketer) {
if (sub && !muting.is_topic_muted(stream_id, topic)) {
topic_lists.push([...msgs]);
}
});
}
const ids = [].concat.apply([], topic_lists);