Fix huge usage bandwidth and performance issue of pinned message banner. (#37)

* Return only the first 100 pinned messages

* Execute pinned message 10 by 10
This commit is contained in:
Florian Duros 2024-09-13 09:47:22 +02:00 committed by GitHub
parent 5740bdbd38
commit 6b384fe9c1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 100 additions and 11 deletions

View File

@ -24,19 +24,22 @@ import { ReadPinsEventId } from "../components/views/right_panel/types";
import { useMatrixClientContext } from "../contexts/MatrixClientContext";
import { useAsyncMemo } from "./useAsyncMemo";
import PinningUtils from "../utils/PinningUtils";
import { batch } from "../utils/promise.ts";
/**
* Get the pinned event IDs from a room.
* The number of pinned events is limited to 100.
* @param room
*/
function getPinnedEventIds(room?: Room): string[] {
return (
const eventIds: string[] =
room
?.getLiveTimeline()
.getState(EventTimeline.FORWARDS)
?.getStateEvents(EventType.RoomPinnedEvents, "")
?.getContent()?.pinned ?? []
);
?.getContent()?.pinned ?? [];
// Limit the number of pinned events to 100
return eventIds.slice(0, 100);
}
/**
@ -173,12 +176,11 @@ export function useFetchedPinnedEvents(room: Room, pinnedEventIds: string[]): Ar
const cli = useMatrixClientContext();
return useAsyncMemo(
() =>
Promise.all(
pinnedEventIds.map(
async (eventId): Promise<MatrixEvent | null> => fetchPinnedEvent(room, eventId, cli),
),
),
() => {
const fetchPromises = pinnedEventIds.map((eventId) => () => fetchPinnedEvent(room, eventId, cli));
// Fetch the pinned events in batches of 10
return batch(fetchPromises, 10);
},
[cli, room, pinnedEventIds],
null,
);

View File

@ -40,3 +40,18 @@ export async function retry<T, E extends Error>(
}
throw lastErr;
}
/**
* Batch promises into groups of a given size.
* Execute the promises in parallel, but wait for all promises in a batch to resolve before moving to the next batch.
* @param funcs - The promises to batch
* @param batchSize - The number of promises to execute in parallel
*/
export async function batch<T>(funcs: Array<() => Promise<T>>, batchSize: number): Promise<T[]> {
const results: T[] = [];
for (let i = 0; i < funcs.length; i += batchSize) {
const batch = funcs.slice(i, i + batchSize);
results.push(...(await Promise.all(batch.map((f) => f()))));
}
return results;
}

View File

@ -196,6 +196,21 @@ describe("<PinnedMessagesCard />", () => {
expect(asFragment()).toMatchSnapshot();
});
it("should not show more than 100 messages", async () => {
const events = Array.from({ length: 120 }, (_, i) =>
mkMessage({
event: true,
room: "!room:example.org",
user: "@alice:example.org",
msg: `The message ${i}`,
ts: i,
}),
);
await initPinnedMessagesCard(events, []);
expect(screen.queryAllByRole("listitem")).toHaveLength(100);
});
it("should updates when messages are pinned", async () => {
// Start with nothing pinned
const { addLocalPinEvent, addNonLocalPinEvent } = await initPinnedMessagesCard([], []);

View File

@ -358,7 +358,7 @@ exports[`<PinnedMessagesCard /> unpin all should not allow to unpinall 1`] = `
aria-label="Open menu"
class="_icon-button_bh2qc_17"
data-state="closed"
id="radix-18"
id="radix-218"
role="button"
style="--cpd-icon-button-size: 24px;"
tabindex="0"
@ -424,7 +424,7 @@ exports[`<PinnedMessagesCard /> unpin all should not allow to unpinall 1`] = `
aria-label="Open menu"
class="_icon-button_bh2qc_17"
data-state="closed"
id="radix-19"
id="radix-219"
role="button"
style="--cpd-icon-button-size: 24px;"
tabindex="0"

View File

@ -0,0 +1,57 @@
/*
* Copyright 2024 New Vector Ltd.
*
* SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
* Please see LICENSE files in the repository root for full details.
*
*/
import { batch } from "../../src/utils/promise.ts";
describe("promise.ts", () => {
describe("batch", () => {
afterEach(() => jest.useRealTimers());
it("should batch promises into groups of a given size", async () => {
const promises = [() => Promise.resolve(1), () => Promise.resolve(2), () => Promise.resolve(3)];
const batchSize = 2;
const result = await batch(promises, batchSize);
expect(result).toEqual([1, 2, 3]);
});
it("should wait for the current batch to finish to request the next one", async () => {
jest.useFakeTimers();
let promise1Called = false;
const promise1 = () =>
new Promise<number>((resolve) => {
promise1Called = true;
resolve(1);
});
let promise2Called = false;
const promise2 = () =>
new Promise<number>((resolve) => {
promise2Called = true;
setTimeout(() => {
resolve(2);
}, 10);
});
let promise3Called = false;
const promise3 = () =>
new Promise<number>((resolve) => {
promise3Called = true;
resolve(3);
});
const batchSize = 2;
const batchPromise = batch([promise1, promise2, promise3], batchSize);
expect(promise1Called).toBe(true);
expect(promise2Called).toBe(true);
expect(promise3Called).toBe(false);
jest.advanceTimersByTime(11);
expect(await batchPromise).toEqual([1, 2, 3]);
});
});
});