merge: all upstream changes
This commit is contained in:
commit
f8f128b347
170 changed files with 4490 additions and 2218 deletions
File diff suppressed because it is too large
Load diff
|
@ -92,6 +92,9 @@ describe('ActivityPub', () => {
|
|||
const metaInitial = {
|
||||
cacheRemoteFiles: true,
|
||||
cacheRemoteSensitiveFiles: true,
|
||||
perUserHomeTimelineCacheMax: 100,
|
||||
perLocalUserUserTimelineCacheMax: 100,
|
||||
perRemoteUserUserTimelineCacheMax: 100,
|
||||
blockedHosts: [] as string[],
|
||||
sensitiveWords: [] as string[],
|
||||
} as MiMeta;
|
||||
|
|
88
packages/backend/test/unit/misc/loader.ts
Normal file
88
packages/backend/test/unit/misc/loader.ts
Normal file
|
@ -0,0 +1,88 @@
|
|||
import { DebounceLoader } from '@/misc/loader.js';
|
||||
|
||||
class Mock {
|
||||
loadCountByKey = new Map<number, number>();
|
||||
load = async (key: number): Promise<number> => {
|
||||
const count = this.loadCountByKey.get(key);
|
||||
if (typeof count === 'undefined') {
|
||||
this.loadCountByKey.set(key, 1);
|
||||
} else {
|
||||
this.loadCountByKey.set(key, count + 1);
|
||||
}
|
||||
return key * 2;
|
||||
};
|
||||
reset() {
|
||||
this.loadCountByKey.clear();
|
||||
}
|
||||
}
|
||||
|
||||
describe(DebounceLoader, () => {
|
||||
describe('single request', () => {
|
||||
it('loads once', async () => {
|
||||
const mock = new Mock();
|
||||
const loader = new DebounceLoader(mock.load);
|
||||
expect(await loader.load(7)).toBe(14);
|
||||
expect(mock.loadCountByKey.size).toBe(1);
|
||||
expect(mock.loadCountByKey.get(7)).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('two duplicated requests at same time', () => {
|
||||
it('loads once', async () => {
|
||||
const mock = new Mock();
|
||||
const loader = new DebounceLoader(mock.load);
|
||||
const [v1, v2] = await Promise.all([
|
||||
loader.load(7),
|
||||
loader.load(7),
|
||||
]);
|
||||
expect(v1).toBe(14);
|
||||
expect(v2).toBe(14);
|
||||
expect(mock.loadCountByKey.size).toBe(1);
|
||||
expect(mock.loadCountByKey.get(7)).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('two different requests at same time', () => {
|
||||
it('loads twice', async () => {
|
||||
const mock = new Mock();
|
||||
const loader = new DebounceLoader(mock.load);
|
||||
const [v1, v2] = await Promise.all([
|
||||
loader.load(7),
|
||||
loader.load(13),
|
||||
]);
|
||||
expect(v1).toBe(14);
|
||||
expect(v2).toBe(26);
|
||||
expect(mock.loadCountByKey.size).toBe(2);
|
||||
expect(mock.loadCountByKey.get(7)).toBe(1);
|
||||
expect(mock.loadCountByKey.get(13)).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('non-continuous same two requests', () => {
|
||||
it('loads twice', async () => {
|
||||
const mock = new Mock();
|
||||
const loader = new DebounceLoader(mock.load);
|
||||
expect(await loader.load(7)).toBe(14);
|
||||
expect(mock.loadCountByKey.size).toBe(1);
|
||||
expect(mock.loadCountByKey.get(7)).toBe(1);
|
||||
mock.reset();
|
||||
expect(await loader.load(7)).toBe(14);
|
||||
expect(mock.loadCountByKey.size).toBe(1);
|
||||
expect(mock.loadCountByKey.get(7)).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('non-continuous different two requests', () => {
|
||||
it('loads twice', async () => {
|
||||
const mock = new Mock();
|
||||
const loader = new DebounceLoader(mock.load);
|
||||
expect(await loader.load(7)).toBe(14);
|
||||
expect(mock.loadCountByKey.size).toBe(1);
|
||||
expect(mock.loadCountByKey.get(7)).toBe(1);
|
||||
mock.reset();
|
||||
expect(await loader.load(13)).toBe(26);
|
||||
expect(mock.loadCountByKey.size).toBe(1);
|
||||
expect(mock.loadCountByKey.get(13)).toBe(1);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -99,7 +99,7 @@ export const relativeFetch = async (path: string, init?: RequestInit | undefined
|
|||
return await fetch(new URL(path, `http://127.0.0.1:${port}/`).toString(), init);
|
||||
};
|
||||
|
||||
function randomString(chars = 'abcdefghijklmnopqrstuvwxyz0123456789', length = 16) {
|
||||
export function randomString(chars = 'abcdefghijklmnopqrstuvwxyz0123456789', length = 16) {
|
||||
let randomString = '';
|
||||
for (let i = 0; i < length; i++) {
|
||||
randomString += chars[Math.floor(Math.random() * chars.length)];
|
||||
|
@ -301,12 +301,14 @@ export const uploadFile = async (user?: UserToken, { path, name, blob }: UploadO
|
|||
};
|
||||
|
||||
export const uploadUrl = async (user: UserToken, url: string) => {
|
||||
let file: any;
|
||||
let resolve: unknown;
|
||||
const file = new Promise(ok => resolve = ok);
|
||||
const marker = Math.random().toString();
|
||||
|
||||
const ws = await connectStream(user, 'main', (msg) => {
|
||||
if (msg.type === 'urlUploadFinished' && msg.body.marker === marker) {
|
||||
file = msg.body.file;
|
||||
ws.close();
|
||||
resolve(msg.body.file);
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -316,9 +318,6 @@ export const uploadUrl = async (user: UserToken, url: string) => {
|
|||
force: true,
|
||||
}, user);
|
||||
|
||||
await sleep(7000);
|
||||
ws.close();
|
||||
|
||||
return file;
|
||||
};
|
||||
|
||||
|
@ -458,6 +457,7 @@ export async function testPaginationConsistency<Entity extends { id: string, cre
|
|||
};
|
||||
|
||||
for (const limit of [1, 5, 10, 100, undefined]) {
|
||||
/*
|
||||
// 1. sinceId/DateとuntilId/Dateで両端を指定して取得した結果が期待通りになっていること
|
||||
if (ordering === 'desc') {
|
||||
const end = expected.at(-1)!;
|
||||
|
@ -486,6 +486,7 @@ export async function testPaginationConsistency<Entity extends { id: string, cre
|
|||
actual.map(({ id, createdAt }) => id + ':' + createdAt),
|
||||
expected.map(({ id, createdAt }) => id + ':' + createdAt));
|
||||
}
|
||||
*/
|
||||
|
||||
// 3. untilId指定+limitで取得してつなぎ合わせた結果が期待通りになっていること
|
||||
if (ordering === 'desc') {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue