summaryrefslogtreecommitdiff
path: root/packages/backend/src
diff options
context:
space:
mode:
authordakkar <dakkar@thenautilus.net>2023-11-30 13:13:41 +0000
committerdakkar <dakkar@thenautilus.net>2023-11-30 13:26:55 +0000
commitc59e74dfd5081603b881f6c0452596af34d7e04e (patch)
treec3f06df0294f2f9cb579ea9737ab38f1ee925f0f /packages/backend/src
parentproperly thread Mastodon imports (diff)
downloadsharkey-c59e74dfd5081603b881f6c0452596af34d7e04e.tar.gz
sharkey-c59e74dfd5081603b881f6c0452596af34d7e04e.tar.bz2
sharkey-c59e74dfd5081603b881f6c0452596af34d7e04e.zip
fix chaining for Mastodon notes
the id / replyId are not at the top level, so now `recreateChain` takes a list of keys to walk, not just a single key
Diffstat (limited to 'packages/backend/src')
-rw-r--r--packages/backend/src/queue/processors/ImportNotesProcessorService.ts18
1 files changed, 12 insertions, 6 deletions
diff --git a/packages/backend/src/queue/processors/ImportNotesProcessorService.ts b/packages/backend/src/queue/processors/ImportNotesProcessorService.ts
index 49c8530b39..b9a3645f6d 100644
--- a/packages/backend/src/queue/processors/ImportNotesProcessorService.ts
+++ b/packages/backend/src/queue/processors/ImportNotesProcessorService.ts
@@ -74,7 +74,7 @@ export class ImportNotesProcessorService {
// Function was taken from Firefish and modified for our needs
@bindThis
- private async recreateChain(idField: string, replyField: string, arr: any[], includeOrphans: boolean): Promise<any[]> {
+ private async recreateChain(idFieldPath: string[], replyFieldPath: string[], arr: any[], includeOrphans: boolean): Promise<any[]> {
type NotesMap = {
[id: string]: any;
};
@@ -83,7 +83,10 @@ export class ImportNotesProcessorService {
const notesWaitingForParent: NotesMap = {};
for await (const note of arr) {
- const noteId = note[idField];
+ const noteId = idFieldPath.reduce(
+ (obj, step) => obj[step],
+ note,
+ );
noteById[noteId] = note;
note.childNotes = [];
@@ -94,7 +97,10 @@ export class ImportNotesProcessorService {
delete notesWaitingForParent[noteId];
}
- const noteReplyId = note[replyField];
+ const noteReplyId = replyFieldPath.reduce(
+ (obj, step) => obj[step],
+ note,
+ );
if (noteReplyId == null) {
notesTree.push(note);
continue;
@@ -184,7 +190,7 @@ export class ImportNotesProcessorService {
const tweets = Object.keys(fakeWindow.window.YTD.tweets.part0).reduce((m, key, i, obj) => {
return m.concat(fakeWindow.window.YTD.tweets.part0[key].tweet);
}, []);
- const processedTweets = await this.recreateChain('id_str', 'in_reply_to_status_id_str', tweets, false);
+ const processedTweets = await this.recreateChain(['id_str'], ['in_reply_to_status_id_str'], tweets, false);
this.queueService.createImportTweetsToDbJob(job.data.user, processedTweets, null);
} finally {
cleanup();
@@ -274,7 +280,7 @@ export class ImportNotesProcessorService {
if (fs.existsSync(outputPath + '/media_attachments/files') && mastoFolder) {
await this.uploadFiles(outputPath + '/media_attachments/files', user, mastoFolder.id);
}
- const processedToots = await this.recreateChain('id', 'inReplyTo', outbox.orderedItems.filter((x: any) => x.type === 'Create' && x.object.type === 'Note'), true);
+ const processedToots = await this.recreateChain(['object', 'id'], ['object', 'inReplyTo'], outbox.orderedItems.filter((x: any) => x.type === 'Create' && x.object.type === 'Note'), true);
this.queueService.createImportMastoToDbJob(job.data.user, processedToots, null);
}
}
@@ -298,7 +304,7 @@ export class ImportNotesProcessorService {
const notesJson = fs.readFileSync(path, 'utf-8');
const notes = JSON.parse(notesJson);
- const processedNotes = await this.recreateChain('id', 'replyId', notes, false);
+ const processedNotes = await this.recreateChain(['id'], ['replyId'], notes, false);
this.queueService.createImportKeyNotesToDbJob(job.data.user, processedNotes, null);
cleanup();
}