summaryrefslogtreecommitdiff
path: root/packages/backend/src/queue
diff options
context:
space:
mode:
authorJohann150 <johann.galle@protonmail.com>2022-05-25 09:50:22 +0200
committerGitHub <noreply@github.com>2022-05-25 16:50:22 +0900
commite27c6abaeaf0e0e0be9fba7ffc6fd165474d8592 (patch)
treeece082db386298d8a7d3451a557cda34212cc399 /packages/backend/src/queue
parentRefactor widgets and fix lint issues (#8719) (diff)
downloadsharkey-e27c6abaeaf0e0e0be9fba7ffc6fd165474d8592.tar.gz
sharkey-e27c6abaeaf0e0e0be9fba7ffc6fd165474d8592.tar.bz2
sharkey-e27c6abaeaf0e0e0be9fba7ffc6fd165474d8592.zip
refactor: temporary files (#8713)
* simplify temporary files for thumbnails Because only a single file will be written to the directory, creating a separate directory seems unnecessary. If only a temporary file is created, the code from `createTemp` can be reused here as well. * refactor: deduplicate code for temporary files/directories To follow the DRY principle, the same code should not be duplicated across different files. Instead an already existing function is used. Because temporary directories are also create in multiple locations, a function for this is also newly added to reduce duplication. * fix: clean up identicon temp files The temporary files for identicons are not reused and can be deleted after they are fully read. This condition is met when the stream is closed and so the file can be cleaned up using the events API of the stream. * fix: ensure cleanup is called when download fails * fix: ensure cleanup is called in error conditions This covers import/export queue jobs and is mostly just wrapping all code in a try...finally statement where the finally runs the cleanup. * fix: use correct type instead of `any`
Diffstat (limited to 'packages/backend/src/queue')
-rw-r--r--packages/backend/src/queue/processors/db/export-blocking.ts105
-rw-r--r--packages/backend/src/queue/processors/db/export-custom-emojis.ts17
-rw-r--r--packages/backend/src/queue/processors/db/export-following.ts105
-rw-r--r--packages/backend/src/queue/processors/db/export-mute.ts107
-rw-r--r--packages/backend/src/queue/processors/db/export-notes.ts117
-rw-r--r--packages/backend/src/queue/processors/db/export-user-lists.ts63
-rw-r--r--packages/backend/src/queue/processors/db/import-custom-emojis.ts10
7 files changed, 251 insertions, 273 deletions
diff --git a/packages/backend/src/queue/processors/db/export-blocking.ts b/packages/backend/src/queue/processors/db/export-blocking.ts
index 166c9e4cd3..f5e0424a79 100644
--- a/packages/backend/src/queue/processors/db/export-blocking.ts
+++ b/packages/backend/src/queue/processors/db/export-blocking.ts
@@ -1,11 +1,11 @@
import Bull from 'bull';
-import * as tmp from 'tmp';
import * as fs from 'node:fs';
import { queueLogger } from '../../logger.js';
import { addFile } from '@/services/drive/add-file.js';
import { format as dateFormat } from 'date-fns';
import { getFullApAccount } from '@/misc/convert-host.js';
+import { createTemp } from '@/misc/create-temp.js';
import { Users, Blockings } from '@/models/index.js';
import { MoreThan } from 'typeorm';
import { DbUserJobData } from '@/queue/types.js';
@@ -22,73 +22,72 @@ export async function exportBlocking(job: Bull.Job<DbUserJobData>, done: any): P
}
// Create temp file
- const [path, cleanup] = await new Promise<[string, any]>((res, rej) => {
- tmp.file((e, path, fd, cleanup) => {
- if (e) return rej(e);
- res([path, cleanup]);
- });
- });
+ const [path, cleanup] = await createTemp();
logger.info(`Temp file is ${path}`);
- const stream = fs.createWriteStream(path, { flags: 'a' });
+ try {
+ const stream = fs.createWriteStream(path, { flags: 'a' });
- let exportedCount = 0;
- let cursor: any = null;
+ let exportedCount = 0;
+ let cursor: any = null;
- while (true) {
- const blockings = await Blockings.find({
- where: {
- blockerId: user.id,
- ...(cursor ? { id: MoreThan(cursor) } : {}),
- },
- take: 100,
- order: {
- id: 1,
- },
- });
+ while (true) {
+ const blockings = await Blockings.find({
+ where: {
+ blockerId: user.id,
+ ...(cursor ? { id: MoreThan(cursor) } : {}),
+ },
+ take: 100,
+ order: {
+ id: 1,
+ },
+ });
- if (blockings.length === 0) {
- job.progress(100);
- break;
- }
+ if (blockings.length === 0) {
+ job.progress(100);
+ break;
+ }
- cursor = blockings[blockings.length - 1].id;
+ cursor = blockings[blockings.length - 1].id;
- for (const block of blockings) {
- const u = await Users.findOneBy({ id: block.blockeeId });
- if (u == null) {
- exportedCount++; continue;
- }
+ for (const block of blockings) {
+ const u = await Users.findOneBy({ id: block.blockeeId });
+ if (u == null) {
+ exportedCount++; continue;
+ }
- const content = getFullApAccount(u.username, u.host);
- await new Promise<void>((res, rej) => {
- stream.write(content + '\n', err => {
- if (err) {
- logger.error(err);
- rej(err);
- } else {
- res();
- }
+ const content = getFullApAccount(u.username, u.host);
+ await new Promise<void>((res, rej) => {
+ stream.write(content + '\n', err => {
+ if (err) {
+ logger.error(err);
+ rej(err);
+ } else {
+ res();
+ }
+ });
});
+ exportedCount++;
+ }
+
+ const total = await Blockings.countBy({
+ blockerId: user.id,
});
- exportedCount++;
- }
- const total = await Blockings.countBy({
- blockerId: user.id,
- });
+ job.progress(exportedCount / total);
+ }
- job.progress(exportedCount / total);
- }
+ stream.end();
+ logger.succ(`Exported to: ${path}`);
- stream.end();
- logger.succ(`Exported to: ${path}`);
+ const fileName = 'blocking-' + dateFormat(new Date(), 'yyyy-MM-dd-HH-mm-ss') + '.csv';
+ const driveFile = await addFile({ user, path, name: fileName, force: true });
- const fileName = 'blocking-' + dateFormat(new Date(), 'yyyy-MM-dd-HH-mm-ss') + '.csv';
- const driveFile = await addFile({ user, path, name: fileName, force: true });
+ logger.succ(`Exported to: ${driveFile.id}`);
+ } finally {
+ cleanup();
+ }
- logger.succ(`Exported to: ${driveFile.id}`);
- cleanup();
done();
}
diff --git a/packages/backend/src/queue/processors/db/export-custom-emojis.ts b/packages/backend/src/queue/processors/db/export-custom-emojis.ts
index c2467fb5f0..97ba62dcf6 100644
--- a/packages/backend/src/queue/processors/db/export-custom-emojis.ts
+++ b/packages/backend/src/queue/processors/db/export-custom-emojis.ts
@@ -1,5 +1,4 @@
import Bull from 'bull';
-import * as tmp from 'tmp';
import * as fs from 'node:fs';
import { ulid } from 'ulid';
@@ -10,6 +9,7 @@ import { addFile } from '@/services/drive/add-file.js';
import { format as dateFormat } from 'date-fns';
import { Users, Emojis } from '@/models/index.js';
import { } from '@/queue/types.js';
+import { createTempDir } from '@/misc/create-temp.js';
import { downloadUrl } from '@/misc/download-url.js';
import config from '@/config/index.js';
import { IsNull } from 'typeorm';
@@ -25,13 +25,7 @@ export async function exportCustomEmojis(job: Bull.Job, done: () => void): Promi
return;
}
- // Create temp dir
- const [path, cleanup] = await new Promise<[string, () => void]>((res, rej) => {
- tmp.dir((e, path, cleanup) => {
- if (e) return rej(e);
- res([path, cleanup]);
- });
- });
+ const [path, cleanup] = await createTempDir();
logger.info(`Temp dir is ${path}`);
@@ -98,12 +92,7 @@ export async function exportCustomEmojis(job: Bull.Job, done: () => void): Promi
metaStream.end();
// Create archive
- const [archivePath, archiveCleanup] = await new Promise<[string, () => void]>((res, rej) => {
- tmp.file((e, path, fd, cleanup) => {
- if (e) return rej(e);
- res([path, cleanup]);
- });
- });
+ const [archivePath, archiveCleanup] = await createTemp();
const archiveStream = fs.createWriteStream(archivePath);
const archive = archiver('zip', {
zlib: { level: 0 },
diff --git a/packages/backend/src/queue/processors/db/export-following.ts b/packages/backend/src/queue/processors/db/export-following.ts
index 965500ac27..4ac165567b 100644
--- a/packages/backend/src/queue/processors/db/export-following.ts
+++ b/packages/backend/src/queue/processors/db/export-following.ts
@@ -1,11 +1,11 @@
import Bull from 'bull';
-import * as tmp from 'tmp';
import * as fs from 'node:fs';
import { queueLogger } from '../../logger.js';
import { addFile } from '@/services/drive/add-file.js';
import { format as dateFormat } from 'date-fns';
import { getFullApAccount } from '@/misc/convert-host.js';
+import { createTemp } from '@/misc/create-temp.js';
import { Users, Followings, Mutings } from '@/models/index.js';
import { In, MoreThan, Not } from 'typeorm';
import { DbUserJobData } from '@/queue/types.js';
@@ -23,73 +23,72 @@ export async function exportFollowing(job: Bull.Job<DbUserJobData>, done: () =>
}
// Create temp file
- const [path, cleanup] = await new Promise<[string, () => void]>((res, rej) => {
- tmp.file((e, path, fd, cleanup) => {
- if (e) return rej(e);
- res([path, cleanup]);
- });
- });
+ const [path, cleanup] = await createTemp();
logger.info(`Temp file is ${path}`);
- const stream = fs.createWriteStream(path, { flags: 'a' });
+ try {
+ const stream = fs.createWriteStream(path, { flags: 'a' });
- let cursor: Following['id'] | null = null;
+ let cursor: Following['id'] | null = null;
- const mutings = job.data.excludeMuting ? await Mutings.findBy({
- muterId: user.id,
- }) : [];
+ const mutings = job.data.excludeMuting ? await Mutings.findBy({
+ muterId: user.id,
+ }) : [];
- while (true) {
- const followings = await Followings.find({
- where: {
- followerId: user.id,
- ...(mutings.length > 0 ? { followeeId: Not(In(mutings.map(x => x.muteeId))) } : {}),
- ...(cursor ? { id: MoreThan(cursor) } : {}),
- },
- take: 100,
- order: {
- id: 1,
- },
- }) as Following[];
+ while (true) {
+ const followings = await Followings.find({
+ where: {
+ followerId: user.id,
+ ...(mutings.length > 0 ? { followeeId: Not(In(mutings.map(x => x.muteeId))) } : {}),
+ ...(cursor ? { id: MoreThan(cursor) } : {}),
+ },
+ take: 100,
+ order: {
+ id: 1,
+ },
+ }) as Following[];
- if (followings.length === 0) {
- break;
- }
+ if (followings.length === 0) {
+ break;
+ }
- cursor = followings[followings.length - 1].id;
+ cursor = followings[followings.length - 1].id;
- for (const following of followings) {
- const u = await Users.findOneBy({ id: following.followeeId });
- if (u == null) {
- continue;
- }
+ for (const following of followings) {
+ const u = await Users.findOneBy({ id: following.followeeId });
+ if (u == null) {
+ continue;
+ }
- if (job.data.excludeInactive && u.updatedAt && (Date.now() - u.updatedAt.getTime() > 1000 * 60 * 60 * 24 * 90)) {
- continue;
- }
+ if (job.data.excludeInactive && u.updatedAt && (Date.now() - u.updatedAt.getTime() > 1000 * 60 * 60 * 24 * 90)) {
+ continue;
+ }
- const content = getFullApAccount(u.username, u.host);
- await new Promise<void>((res, rej) => {
- stream.write(content + '\n', err => {
- if (err) {
- logger.error(err);
- rej(err);
- } else {
- res();
- }
+ const content = getFullApAccount(u.username, u.host);
+ await new Promise<void>((res, rej) => {
+ stream.write(content + '\n', err => {
+ if (err) {
+ logger.error(err);
+ rej(err);
+ } else {
+ res();
+ }
+ });
});
- });
+ }
}
- }
- stream.end();
- logger.succ(`Exported to: ${path}`);
+ stream.end();
+ logger.succ(`Exported to: ${path}`);
- const fileName = 'following-' + dateFormat(new Date(), 'yyyy-MM-dd-HH-mm-ss') + '.csv';
- const driveFile = await addFile({ user, path, name: fileName, force: true });
+ const fileName = 'following-' + dateFormat(new Date(), 'yyyy-MM-dd-HH-mm-ss') + '.csv';
+ const driveFile = await addFile({ user, path, name: fileName, force: true });
+
+ logger.succ(`Exported to: ${driveFile.id}`);
+ } finally {
+ cleanup();
+ }
- logger.succ(`Exported to: ${driveFile.id}`);
- cleanup();
done();
}
diff --git a/packages/backend/src/queue/processors/db/export-mute.ts b/packages/backend/src/queue/processors/db/export-mute.ts
index 0ef81971f1..6a36cfa072 100644
--- a/packages/backend/src/queue/processors/db/export-mute.ts
+++ b/packages/backend/src/queue/processors/db/export-mute.ts
@@ -1,11 +1,11 @@
import Bull from 'bull';
-import * as tmp from 'tmp';
import * as fs from 'node:fs';
import { queueLogger } from '../../logger.js';
import { addFile } from '@/services/drive/add-file.js';
import { format as dateFormat } from 'date-fns';
import { getFullApAccount } from '@/misc/convert-host.js';
+import { createTemp } from '@/misc/create-temp.js';
import { Users, Mutings } from '@/models/index.js';
import { IsNull, MoreThan } from 'typeorm';
import { DbUserJobData } from '@/queue/types.js';
@@ -22,74 +22,73 @@ export async function exportMute(job: Bull.Job<DbUserJobData>, done: any): Promi
}
// Create temp file
- const [path, cleanup] = await new Promise<[string, any]>((res, rej) => {
- tmp.file((e, path, fd, cleanup) => {
- if (e) return rej(e);
- res([path, cleanup]);
- });
- });
+ const [path, cleanup] = await createTemp();
logger.info(`Temp file is ${path}`);
- const stream = fs.createWriteStream(path, { flags: 'a' });
+ try {
+ const stream = fs.createWriteStream(path, { flags: 'a' });
- let exportedCount = 0;
- let cursor: any = null;
+ let exportedCount = 0;
+ let cursor: any = null;
- while (true) {
- const mutes = await Mutings.find({
- where: {
- muterId: user.id,
- expiresAt: IsNull(),
- ...(cursor ? { id: MoreThan(cursor) } : {}),
- },
- take: 100,
- order: {
- id: 1,
- },
- });
+ while (true) {
+ const mutes = await Mutings.find({
+ where: {
+ muterId: user.id,
+ expiresAt: IsNull(),
+ ...(cursor ? { id: MoreThan(cursor) } : {}),
+ },
+ take: 100,
+ order: {
+ id: 1,
+ },
+ });
- if (mutes.length === 0) {
- job.progress(100);
- break;
- }
+ if (mutes.length === 0) {
+ job.progress(100);
+ break;
+ }
- cursor = mutes[mutes.length - 1].id;
+ cursor = mutes[mutes.length - 1].id;
- for (const mute of mutes) {
- const u = await Users.findOneBy({ id: mute.muteeId });
- if (u == null) {
- exportedCount++; continue;
- }
+ for (const mute of mutes) {
+ const u = await Users.findOneBy({ id: mute.muteeId });
+ if (u == null) {
+ exportedCount++; continue;
+ }
- const content = getFullApAccount(u.username, u.host);
- await new Promise<void>((res, rej) => {
- stream.write(content + '\n', err => {
- if (err) {
- logger.error(err);
- rej(err);
- } else {
- res();
- }
+ const content = getFullApAccount(u.username, u.host);
+ await new Promise<void>((res, rej) => {
+ stream.write(content + '\n', err => {
+ if (err) {
+ logger.error(err);
+ rej(err);
+ } else {
+ res();
+ }
+ });
});
+ exportedCount++;
+ }
+
+ const total = await Mutings.countBy({
+ muterId: user.id,
});
- exportedCount++;
- }
- const total = await Mutings.countBy({
- muterId: user.id,
- });
+ job.progress(exportedCount / total);
+ }
- job.progress(exportedCount / total);
- }
+ stream.end();
+ logger.succ(`Exported to: ${path}`);
- stream.end();
- logger.succ(`Exported to: ${path}`);
+ const fileName = 'mute-' + dateFormat(new Date(), 'yyyy-MM-dd-HH-mm-ss') + '.csv';
+ const driveFile = await addFile({ user, path, name: fileName, force: true });
- const fileName = 'mute-' + dateFormat(new Date(), 'yyyy-MM-dd-HH-mm-ss') + '.csv';
- const driveFile = await addFile({ user, path, name: fileName, force: true });
+ logger.succ(`Exported to: ${driveFile.id}`);
+ } finally {
+ cleanup();
+ }
- logger.succ(`Exported to: ${driveFile.id}`);
- cleanup();
done();
}
diff --git a/packages/backend/src/queue/processors/db/export-notes.ts b/packages/backend/src/queue/processors/db/export-notes.ts
index 7e12a6fac2..051fcdf385 100644
--- a/packages/backend/src/queue/processors/db/export-notes.ts
+++ b/packages/backend/src/queue/processors/db/export-notes.ts
@@ -1,5 +1,4 @@
import Bull from 'bull';
-import * as tmp from 'tmp';
import * as fs from 'node:fs';
import { queueLogger } from '../../logger.js';
@@ -10,6 +9,7 @@ import { MoreThan } from 'typeorm';
import { Note } from '@/models/entities/note.js';
import { Poll } from '@/models/entities/poll.js';
import { DbUserJobData } from '@/queue/types.js';
+import { createTemp } from '@/misc/create-temp.js';
const logger = queueLogger.createSubLogger('export-notes');
@@ -23,82 +23,81 @@ export async function exportNotes(job: Bull.Job<DbUserJobData>, done: any): Prom
}
// Create temp file
- const [path, cleanup] = await new Promise<[string, any]>((res, rej) => {
- tmp.file((e, path, fd, cleanup) => {
- if (e) return rej(e);
- res([path, cleanup]);
- });
- });
+ const [path, cleanup] = await createTemp();
logger.info(`Temp file is ${path}`);
- const stream = fs.createWriteStream(path, { flags: 'a' });
+ try {
+ const stream = fs.createWriteStream(path, { flags: 'a' });
- const write = (text: string): Promise<void> => {
- return new Promise<void>((res, rej) => {
- stream.write(text, err => {
- if (err) {
- logger.error(err);
- rej(err);
- } else {
- res();
- }
+ const write = (text: string): Promise<void> => {
+ return new Promise<void>((res, rej) => {
+ stream.write(text, err => {
+ if (err) {
+ logger.error(err);
+ rej(err);
+ } else {
+ res();
+ }
+ });
});
- });
- };
+ };
- await write('[');
+ await write('[');
- let exportedNotesCount = 0;
- let cursor: Note['id'] | null = null;
+ let exportedNotesCount = 0;
+ let cursor: Note['id'] | null = null;
- while (true) {
- const notes = await Notes.find({
- where: {
- userId: user.id,
- ...(cursor ? { id: MoreThan(cursor) } : {}),
- },
- take: 100,
- order: {
- id: 1,
- },
- }) as Note[];
+ while (true) {
+ const notes = await Notes.find({
+ where: {
+ userId: user.id,
+ ...(cursor ? { id: MoreThan(cursor) } : {}),
+ },
+ take: 100,
+ order: {
+ id: 1,
+ },
+ }) as Note[];
- if (notes.length === 0) {
- job.progress(100);
- break;
- }
+ if (notes.length === 0) {
+ job.progress(100);
+ break;
+ }
- cursor = notes[notes.length - 1].id;
+ cursor = notes[notes.length - 1].id;
- for (const note of notes) {
- let poll: Poll | undefined;
- if (note.hasPoll) {
- poll = await Polls.findOneByOrFail({ noteId: note.id });
+ for (const note of notes) {
+ let poll: Poll | undefined;
+ if (note.hasPoll) {
+ poll = await Polls.findOneByOrFail({ noteId: note.id });
+ }
+ const content = JSON.stringify(serialize(note, poll));
+ const isFirst = exportedNotesCount === 0;
+ await write(isFirst ? content : ',\n' + content);
+ exportedNotesCount++;
}
- const content = JSON.stringify(serialize(note, poll));
- const isFirst = exportedNotesCount === 0;
- await write(isFirst ? content : ',\n' + content);
- exportedNotesCount++;
- }
- const total = await Notes.countBy({
- userId: user.id,
- });
+ const total = await Notes.countBy({
+ userId: user.id,
+ });
- job.progress(exportedNotesCount / total);
- }
+ job.progress(exportedNotesCount / total);
+ }
+
+ await write(']');
- await write(']');
+ stream.end();
+ logger.succ(`Exported to: ${path}`);
- stream.end();
- logger.succ(`Exported to: ${path}`);
+ const fileName = 'notes-' + dateFormat(new Date(), 'yyyy-MM-dd-HH-mm-ss') + '.json';
+ const driveFile = await addFile({ user, path, name: fileName, force: true });
- const fileName = 'notes-' + dateFormat(new Date(), 'yyyy-MM-dd-HH-mm-ss') + '.json';
- const driveFile = await addFile({ user, path, name: fileName, force: true });
+ logger.succ(`Exported to: ${driveFile.id}`);
+ } finally {
+ cleanup();
+ }
- logger.succ(`Exported to: ${driveFile.id}`);
- cleanup();
done();
}
diff --git a/packages/backend/src/queue/processors/db/export-user-lists.ts b/packages/backend/src/queue/processors/db/export-user-lists.ts
index 45852a6038..71dd72df27 100644
--- a/packages/backend/src/queue/processors/db/export-user-lists.ts
+++ b/packages/backend/src/queue/processors/db/export-user-lists.ts
@@ -1,11 +1,11 @@
import Bull from 'bull';
-import * as tmp from 'tmp';
import * as fs from 'node:fs';
import { queueLogger } from '../../logger.js';
import { addFile } from '@/services/drive/add-file.js';
import { format as dateFormat } from 'date-fns';
import { getFullApAccount } from '@/misc/convert-host.js';
+import { createTemp } from '@/misc/create-temp.js';
import { Users, UserLists, UserListJoinings } from '@/models/index.js';
import { In } from 'typeorm';
import { DbUserJobData } from '@/queue/types.js';
@@ -26,46 +26,45 @@ export async function exportUserLists(job: Bull.Job<DbUserJobData>, done: any):
});
// Create temp file
- const [path, cleanup] = await new Promise<[string, any]>((res, rej) => {
- tmp.file((e, path, fd, cleanup) => {
- if (e) return rej(e);
- res([path, cleanup]);
- });
- });
+ const [path, cleanup] = await createTemp();
logger.info(`Temp file is ${path}`);
- const stream = fs.createWriteStream(path, { flags: 'a' });
+ try {
+ const stream = fs.createWriteStream(path, { flags: 'a' });
- for (const list of lists) {
- const joinings = await UserListJoinings.findBy({ userListId: list.id });
- const users = await Users.findBy({
- id: In(joinings.map(j => j.userId)),
- });
+ for (const list of lists) {
+ const joinings = await UserListJoinings.findBy({ userListId: list.id });
+ const users = await Users.findBy({
+ id: In(joinings.map(j => j.userId)),
+ });
- for (const u of users) {
- const acct = getFullApAccount(u.username, u.host);
- const content = `${list.name},${acct}`;
- await new Promise<void>((res, rej) => {
- stream.write(content + '\n', err => {
- if (err) {
- logger.error(err);
- rej(err);
- } else {
- res();
- }
+ for (const u of users) {
+ const acct = getFullApAccount(u.username, u.host);
+ const content = `${list.name},${acct}`;
+ await new Promise<void>((res, rej) => {
+ stream.write(content + '\n', err => {
+ if (err) {
+ logger.error(err);
+ rej(err);
+ } else {
+ res();
+ }
+ });
});
- });
+ }
}
- }
- stream.end();
- logger.succ(`Exported to: ${path}`);
+ stream.end();
+ logger.succ(`Exported to: ${path}`);
+
+ const fileName = 'user-lists-' + dateFormat(new Date(), 'yyyy-MM-dd-HH-mm-ss') + '.csv';
+ const driveFile = await addFile({ user, path, name: fileName, force: true });
- const fileName = 'user-lists-' + dateFormat(new Date(), 'yyyy-MM-dd-HH-mm-ss') + '.csv';
- const driveFile = await addFile({ user, path, name: fileName, force: true });
+ logger.succ(`Exported to: ${driveFile.id}`);
+ } finally {
+ cleanup();
+ }
- logger.succ(`Exported to: ${driveFile.id}`);
- cleanup();
done();
}
diff --git a/packages/backend/src/queue/processors/db/import-custom-emojis.ts b/packages/backend/src/queue/processors/db/import-custom-emojis.ts
index 28e0b867a4..64dfe85374 100644
--- a/packages/backend/src/queue/processors/db/import-custom-emojis.ts
+++ b/packages/backend/src/queue/processors/db/import-custom-emojis.ts
@@ -1,9 +1,9 @@
import Bull from 'bull';
-import * as tmp from 'tmp';
import * as fs from 'node:fs';
import unzipper from 'unzipper';
import { queueLogger } from '../../logger.js';
+import { createTempDir } from '@/misc/create-temp.js';
import { downloadUrl } from '@/misc/download-url.js';
import { DriveFiles, Emojis } from '@/models/index.js';
import { DbUserImportJobData } from '@/queue/types.js';
@@ -25,13 +25,7 @@ export async function importCustomEmojis(job: Bull.Job<DbUserImportJobData>, don
return;
}
- // Create temp dir
- const [path, cleanup] = await new Promise<[string, () => void]>((res, rej) => {
- tmp.dir((e, path, cleanup) => {
- if (e) return rej(e);
- res([path, cleanup]);
- });
- });
+ const [path, cleanup] = await createTempDir();
logger.info(`Temp dir is ${path}`);