This commit is contained in:
syuilo
2019-02-06 20:56:48 +09:00
parent ede70d354e
commit 27d16c6a12
9 changed files with 351 additions and 4 deletions

View File

@ -0,0 +1,90 @@
import * as bq from 'bee-queue';
import * as tmp from 'tmp';
import * as fs from 'fs';
import * as mongo from 'mongodb';
import { queueLogger } from '../logger';
import addFile from '../../services/drive/add-file';
import User from '../../models/user';
import dateFormat = require('dateformat');
import Blocking from '../../models/blocking';
import config from '../../config';
const logger = queueLogger.createSubLogger('export-blocking');
export async function exportBlocking(job: bq.Job, done: any): Promise<void> {
logger.info(`Exporting blocking of ${job.data.user._id} ...`);
const user = await User.findOne({
_id: new mongo.ObjectID(job.data.user._id.toString())
});
// Create temp file
const [path, cleanup] = await new Promise<[string, any]>((res, rej) => {
tmp.file((e, path, fd, cleanup) => {
if (e) return rej(e);
res([path, cleanup]);
});
});
logger.info(`Temp file is ${path}`);
const stream = fs.createWriteStream(path, { flags: 'a' });
let exportedCount = 0;
let ended = false;
let cursor: any = null;
while (!ended) {
const blockings = await Blocking.find({
blockerId: user._id,
...(cursor ? { _id: { $gt: cursor } } : {})
}, {
limit: 100,
sort: {
_id: 1
}
});
if (blockings.length === 0) {
ended = true;
job.reportProgress(100);
break;
}
cursor = blockings[blockings.length - 1]._id;
for (const block of blockings) {
const u = await User.findOne({ _id: block.blockeeId }, { fields: { username: true, host: true } });
const content = u.host ? `${u.username}@${u.host}` : `${u.username}@${config.host}`;
await new Promise((res, rej) => {
stream.write(content + '\n', err => {
if (err) {
logger.error(err);
rej(err);
} else {
res();
}
});
});
exportedCount++;
}
const total = await Blocking.count({
blockerId: user._id,
});
job.reportProgress(exportedCount / total);
}
stream.end();
logger.succ(`Exported to: ${path}`);
const fileName = dateFormat(new Date(), 'blocking-yyyy-mm-dd-HH-MM-ss') + '.json';
const driveFile = await addFile(user, path, fileName);
logger.succ(`Exported to: ${driveFile._id}`);
cleanup();
done();
}

View File

@ -0,0 +1,90 @@
import * as bq from 'bee-queue';
import * as tmp from 'tmp';
import * as fs from 'fs';
import * as mongo from 'mongodb';
import { queueLogger } from '../logger';
import addFile from '../../services/drive/add-file';
import User from '../../models/user';
import dateFormat = require('dateformat');
import Following from '../../models/following';
import config from '../../config';
const logger = queueLogger.createSubLogger('export-following');
export async function exportFollowing(job: bq.Job, done: any): Promise<void> {
logger.info(`Exporting following of ${job.data.user._id} ...`);
const user = await User.findOne({
_id: new mongo.ObjectID(job.data.user._id.toString())
});
// Create temp file
const [path, cleanup] = await new Promise<[string, any]>((res, rej) => {
tmp.file((e, path, fd, cleanup) => {
if (e) return rej(e);
res([path, cleanup]);
});
});
logger.info(`Temp file is ${path}`);
const stream = fs.createWriteStream(path, { flags: 'a' });
let exportedCount = 0;
let ended = false;
let cursor: any = null;
while (!ended) {
const followings = await Following.find({
followerId: user._id,
...(cursor ? { _id: { $gt: cursor } } : {})
}, {
limit: 100,
sort: {
_id: 1
}
});
if (followings.length === 0) {
ended = true;
job.reportProgress(100);
break;
}
cursor = followings[followings.length - 1]._id;
for (const following of followings) {
const u = await User.findOne({ _id: following.followeeId }, { fields: { username: true, host: true } });
const content = u.host ? `${u.username}@${u.host}` : `${u.username}@${config.host}`;
await new Promise((res, rej) => {
stream.write(content + '\n', err => {
if (err) {
logger.error(err);
rej(err);
} else {
res();
}
});
});
exportedCount++;
}
const total = await Following.count({
followerId: user._id,
});
job.reportProgress(exportedCount / total);
}
stream.end();
logger.succ(`Exported to: ${path}`);
const fileName = dateFormat(new Date(), 'following-yyyy-mm-dd-HH-MM-ss') + '.json';
const driveFile = await addFile(user, path, fileName);
logger.succ(`Exported to: ${driveFile._id}`);
cleanup();
done();
}

View File

@ -0,0 +1,90 @@
import * as bq from 'bee-queue';
import * as tmp from 'tmp';
import * as fs from 'fs';
import * as mongo from 'mongodb';
import { queueLogger } from '../logger';
import addFile from '../../services/drive/add-file';
import User from '../../models/user';
import dateFormat = require('dateformat');
import Mute from '../../models/mute';
import config from '../../config';
const logger = queueLogger.createSubLogger('export-mute');
export async function exportMute(job: bq.Job, done: any): Promise<void> {
logger.info(`Exporting mute of ${job.data.user._id} ...`);
const user = await User.findOne({
_id: new mongo.ObjectID(job.data.user._id.toString())
});
// Create temp file
const [path, cleanup] = await new Promise<[string, any]>((res, rej) => {
tmp.file((e, path, fd, cleanup) => {
if (e) return rej(e);
res([path, cleanup]);
});
});
logger.info(`Temp file is ${path}`);
const stream = fs.createWriteStream(path, { flags: 'a' });
let exportedCount = 0;
let ended = false;
let cursor: any = null;
while (!ended) {
const mutes = await Mute.find({
muterId: user._id,
...(cursor ? { _id: { $gt: cursor } } : {})
}, {
limit: 100,
sort: {
_id: 1
}
});
if (mutes.length === 0) {
ended = true;
job.reportProgress(100);
break;
}
cursor = mutes[mutes.length - 1]._id;
for (const mute of mutes) {
const u = await User.findOne({ _id: mute.muteeId }, { fields: { username: true, host: true } });
const content = u.host ? `${u.username}@${u.host}` : `${u.username}@${config.host}`;
await new Promise((res, rej) => {
stream.write(content + '\n', err => {
if (err) {
logger.error(err);
rej(err);
} else {
res();
}
});
});
exportedCount++;
}
const total = await Mute.count({
muterId: user._id,
});
job.reportProgress(exportedCount / total);
}
stream.end();
logger.succ(`Exported to: ${path}`);
const fileName = dateFormat(new Date(), 'mute-yyyy-mm-dd-HH-MM-ss') + '.json';
const driveFile = await addFile(user, path, fileName);
logger.succ(`Exported to: ${driveFile._id}`);
cleanup();
done();
}

View File

@ -1,12 +1,18 @@
import deliver from './http/deliver';
import processInbox from './http/process-inbox';
import { exportNotes } from './export-notes';
import { exportFollowing } from './export-following';
import { exportMute } from './export-mute';
import { exportBlocking } from './export-blocking';
import { queueLogger } from '../logger';
const handlers: any = {
deliver,
processInbox,
exportNotes,
exportFollowing,
exportMute,
exportBlocking,
};
export default (job: any, done: any) => {