summaryrefslogtreecommitdiff
path: root/src/queue/processors/export-notes.ts
diff options
context:
space:
mode:
authorsyuilo <Syuilotan@yahoo.co.jp>2019-02-05 19:50:14 +0900
committerGitHub <noreply@github.com>2019-02-05 19:50:14 +0900
commit5db5bbd1cd25f83640d4dd01de14e7774d9370db (patch)
tree701c27744ca0e4d6e16d5f2fb568481c1bf4baf0 /src/queue/processors/export-notes.ts
parentAdd ffmpeg package for the runner container (#4145) (diff)
downloadsharkey-5db5bbd1cd25f83640d4dd01de14e7774d9370db.tar.gz
sharkey-5db5bbd1cd25f83640d4dd01de14e7774d9370db.tar.bz2
sharkey-5db5bbd1cd25f83640d4dd01de14e7774d9370db.zip
自分の投稿情報をエクスポートできるように (#4144)
* wip * 正しいJSONを生成するように * データを整形
Diffstat (limited to 'src/queue/processors/export-notes.ts')
-rw-r--r--src/queue/processors/export-notes.ts128
1 files changed, 128 insertions, 0 deletions
diff --git a/src/queue/processors/export-notes.ts b/src/queue/processors/export-notes.ts
new file mode 100644
index 0000000000..52845a5a9c
--- /dev/null
+++ b/src/queue/processors/export-notes.ts
@@ -0,0 +1,128 @@
+import * as bq from 'bee-queue';
+import * as tmp from 'tmp';
+import * as fs from 'fs';
+import * as mongo from 'mongodb';
+
+import { queueLogger } from '../logger';
+import Note, { INote } from '../../models/note';
+import addFile from '../../services/drive/add-file';
+import User from '../../models/user';
+import dateFormat = require('dateformat');
+
+const logger = queueLogger.createSubLogger('export-notes');
+
+export async function exportNotes(job: bq.Job, done: any): Promise<void> {
+ logger.info(`Exporting notes of ${job.data.user._id} ...`);
+
+ const user = await User.findOne({
+ _id: new mongo.ObjectID(job.data.user._id.toString())
+ });
+
+ // Create temp file
+ const [path, cleanup] = await new Promise<[string, any]>((res, rej) => {
+ tmp.file((e, path, fd, cleanup) => {
+ if (e) return rej(e);
+ res([path, cleanup]);
+ });
+ });
+
+ logger.info(`Temp file is ${path}`);
+
+ const stream = fs.createWriteStream(path, { flags: 'a' });
+
+ await new Promise((res, rej) => {
+ stream.write('[', err => {
+ if (err) {
+ logger.error(err);
+ rej(err);
+ } else {
+ res();
+ }
+ });
+ });
+
+ let exportedNotesCount = 0;
+ let ended = false;
+ let cursor: any = null;
+
+ while (!ended) {
+ const notes = await Note.find({
+ userId: user._id,
+ ...(cursor ? { _id: { $gt: cursor } } : {})
+ }, {
+ limit: 100,
+ sort: {
+ _id: 1
+ }
+ });
+
+ if (notes.length === 0) {
+ ended = true;
+ job.reportProgress(100);
+ break;
+ }
+
+ cursor = notes[notes.length - 1]._id;
+
+ for (const note of notes) {
+ const content = JSON.stringify(serialize(note));
+ await new Promise((res, rej) => {
+ stream.write(exportedNotesCount === 0 ? content : ',\n' + content, err => {
+ if (err) {
+ logger.error(err);
+ rej(err);
+ } else {
+ res();
+ }
+ });
+ });
+ exportedNotesCount++;
+ }
+
+ const total = await Note.count({
+ userId: user._id,
+ });
+
+ job.reportProgress(exportedNotesCount / total);
+ }
+
+ await new Promise((res, rej) => {
+ stream.write(']', err => {
+ if (err) {
+ logger.error(err);
+ rej(err);
+ } else {
+ res();
+ }
+ });
+ });
+
+ stream.end();
+ logger.succ(`Exported to: ${path}`);
+
+ const fileName = dateFormat(new Date(), 'yyyy-mm-dd-HH-MM-ss') + '.json';
+ const driveFile = await addFile(user, path, fileName);
+
+ logger.succ(`Exported to: ${driveFile._id}`);
+ cleanup();
+ done();
+}
+
+function serialize(note: INote): any {
+ return {
+ id: note._id,
+ text: note.text,
+ createdAt: note.createdAt,
+ fileIds: note.fileIds,
+ replyId: note.replyId,
+ renoteId: note.renoteId,
+ poll: note.poll,
+ cw: note.cw,
+ viaMobile: note.viaMobile,
+ visibility: note.visibility,
+ visibleUserIds: note.visibleUserIds,
+ appId: note.appId,
+ geo: note.geo,
+ localOnly: note.localOnly
+ };
+}