merge: improve recreateChain

Resolves #159
This commit is contained in:
Mar0xy 2023-11-18 12:38:13 +01:00
parent 4929467016
commit 19fc5a74f0
No known key found for this signature in database
GPG key ID: 56569BBE47D2C828

View file

@ -69,53 +69,42 @@ export class ImportNotesProcessorService {
} }
} }
// Function was taken from Firefish and edited to remove renoteId and make it run in only one for loop instead of two // Function was taken from Firefish and modified for our needs
@bindThis @bindThis
private async recreateChain(arr: any[]) { private async recreateChain(idField: string, replyField: string, arr: any[]): Promise<any[]> {
type NotesMap = { type NotesMap = {
[id: string]: any; [id: string]: any;
}; };
const notesTree: any[] = []; const notesTree: any[] = [];
const lookup: NotesMap = {}; const noteById: NotesMap = {};
const notesWaitingForParent: NotesMap = {};
for await (const note of arr) { for await (const note of arr) {
lookup[`${note.id}`] = note; noteById[note[idField]] = note;
note.childNotes = []; note.childNotes = [];
let parent = null;
const children = notesWaitingForParent[note[idField]];
if (note.replyId == null) { if (children) {
note.childNotes.push(...children);
}
if (note[replyField] == null) {
notesTree.push(note); notesTree.push(note);
} else { continue;
parent = lookup[`${note.replyId}`];
} }
if (parent) parent.childNotes.push(note); const parent = noteById[note[replyField]];
if (parent) {
parent.childNotes.push(note);
} else {
notesWaitingForParent[note[replyField]] ||= [];
notesWaitingForParent[note[replyField]].push(note);
}
} }
return notesTree; return notesTree;
} }
@bindThis
private async recreateTwitChain(arr: any[]) {
type TweetsMap = {
[id: string]: any;
};
const tweetsTree: any[] = [];
const lookup: TweetsMap = {};
for await (const tweet of arr) {
lookup[`${tweet.id_str}`] = tweet;
tweet.replies = [];
let parent = null;
if (!tweet.in_reply_to_status_id_str) {
tweetsTree.push(tweet);
} else {
parent = lookup[`${tweet.in_reply_to_status_id_str}`];
}
if (parent) parent.replies.push(tweet);
}
return tweetsTree;
}
@bindThis @bindThis
private isIterable(obj: any) { private isIterable(obj: any) {
if (obj == null) { if (obj == null) {
@ -178,9 +167,7 @@ export class ImportNotesProcessorService {
const tweets = Object.keys(fakeWindow.window.YTD.tweets.part0).reduce((m, key, i, obj) => { const tweets = Object.keys(fakeWindow.window.YTD.tweets.part0).reduce((m, key, i, obj) => {
return m.concat(fakeWindow.window.YTD.tweets.part0[key].tweet); return m.concat(fakeWindow.window.YTD.tweets.part0[key].tweet);
}, []); }, []);
// Due to the way twitter outputs the tweets the entire array needs to be reversed for the recreate function. const processedTweets = await this.recreateChain("id_str", "in_reply_to_status_id_str", tweets);
const reversedTweets = tweets.reverse();
const processedTweets = await this.recreateTwitChain(reversedTweets);
this.queueService.createImportTweetsToDbJob(job.data.user, processedTweets, null); this.queueService.createImportTweetsToDbJob(job.data.user, processedTweets, null);
} finally { } finally {
cleanup(); cleanup();
@ -248,7 +235,7 @@ export class ImportNotesProcessorService {
const notesJson = fs.readFileSync(path, 'utf-8'); const notesJson = fs.readFileSync(path, 'utf-8');
const notes = JSON.parse(notesJson); const notes = JSON.parse(notesJson);
const processedNotes = await this.recreateChain(notes); const processedNotes = await this.recreateChain("id", "replyId", notes);
this.queueService.createImportKeyNotesToDbJob(job.data.user, processedNotes, null); this.queueService.createImportKeyNotesToDbJob(job.data.user, processedNotes, null);
cleanup(); cleanup();
} }
@ -484,9 +471,9 @@ export class ImportNotesProcessorService {
try { try {
const date = new Date(tweet.created_at); const date = new Date(tweet.created_at);
const textReplaceURLs = tweet.entities.urls && tweet.entities.urls.length > 0 ? await replaceTwitterUrls(tweet.full_text, tweet.entities.urls) : tweet.full_text; const textReplaceURLs = tweet.entities.urls && tweet.entities.urls.length > 0 ? await replaceTwitterUrls(tweet.full_text, tweet.entities.urls) : tweet.full_text;
const text = tweet.entities.user_mentions && tweet.entities.user_mentions.length > 0 ? await replaceTwitterMentions(textReplaceURLs, tweet.entities.user_mentions) : textReplaceURLs; const text = tweet.entities.user_mentions && tweet.entities.user_mentions.length > 0 ? await replaceTwitterMentions(textReplaceURLs, tweet.entities.user_mentions) : textReplaceURLs;
const files: MiDriveFile[] = []; const files: MiDriveFile[] = [];
if (tweet.extended_entities && this.isIterable(tweet.extended_entities.media)) { if (tweet.extended_entities && this.isIterable(tweet.extended_entities.media)) {
for await (const file of tweet.extended_entities.media) { for await (const file of tweet.extended_entities.media) {
if (file.video_info) { if (file.video_info) {
@ -543,7 +530,7 @@ export class ImportNotesProcessorService {
} }
} }
const createdNote = await this.noteCreateService.import(user, { createdAt: date, reply: parentNote, text: text, files: files }); const createdNote = await this.noteCreateService.import(user, { createdAt: date, reply: parentNote, text: text, files: files });
if (tweet.replies) this.queueService.createImportTweetsToDbJob(user, tweet.replies, createdNote.id); if (tweet.childNotes) this.queueService.createImportTweetsToDbJob(user, tweet.childNotes, createdNote.id);
} catch (e) { } catch (e) {
this.logger.warn(`Error: ${e}`); this.logger.warn(`Error: ${e}`);
} }