Use PostgreSQL instead of MongoDB (#4572)

* wip

* Update note.ts

* Update timeline.ts

* Update core.ts

* wip

* Update generate-visibility-query.ts

* wip

* wip

* wip

* wip

* wip

* Update global-timeline.ts

* wip

* wip

* wip

* Update vote.ts

* wip

* wip

* Update create.ts

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* Update files.ts

* wip

* wip

* Update CONTRIBUTING.md

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* Update read-notification.ts

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* Update cancel.ts

* wip

* wip

* wip

* Update show.ts

* wip

* wip

* Update gen-id.ts

* Update create.ts

* Update id.ts

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* Docker: Update files about Docker (#4599)

* Docker: Use cache if files used by `yarn install` was not updated

This patch reduces the number of times to installing node_modules.
For example, `yarn install` step will be skipped when only ".config/default.yml" is updated.

* Docker: Migrate MongoDB to Postgresql

Misskey uses Postgresql as a database instead of Mongodb since version 11.

* Docker: Uncomment about data persistence

This patch will save a lot of databases.

* wip

* wip

* wip

* Update activitypub.ts

* wip

* wip

* wip

* Update logs.ts

* wip

* Update drive-file.ts

* Update register.ts

* wip

* wip

* Update mentions.ts

* wip

* wip

* wip

* Update recommendation.ts

* wip

* Update index.ts

* wip

* Update recommendation.ts

* Doc: Update docker.ja.md and docker.en.md (#1) (#4608)

Update how to set up misskey.

* wip

* ✌️

* wip

* Update note.ts

* Update postgre.ts

* wip

* wip

* wip

* wip

* Update add-file.ts

* wip

* wip

* wip

* Clean up

* Update logs.ts

* wip

* 🍕

* wip

* Ad notes

* wip

* Update api-visibility.ts

* Update note.ts

* Update add-file.ts

* tests

* tests

* Update postgre.ts

* Update utils.ts

* wip

* wip

* Refactor

* wip

* Refactor

* wip

* wip

* Update show-users.ts

* Update update-instance.ts

* wip

* Update feed.ts

* Update outbox.ts

* Update outbox.ts

* Update user.ts

* wip

* Update list.ts

* Update update-hashtag.ts

* wip

* Update update-hashtag.ts

* Refactor

* Update update.ts

* wip

* wip

* ✌️

* clean up

* docs

* Update push.ts

* wip

* Update api.ts

* wip

* ✌️

* Update make-pagination-query.ts

* ✌️

* Delete hashtags.ts

* Update instances.ts

* Update instances.ts

* Update create.ts

* Update search.ts

* Update reversi-game.ts

* Update signup.ts

* Update user.ts

* id

* Update example.yml

* 🎨

* objectid

* fix

* reversi

* reversi

* Fix bug of chart engine

* Add test of chart engine

* Improve test

* Better testing

* Improve chart engine

* Refactor

* Add test of chart engine

* Refactor

* Add chart test

* Fix bug

* コミットし忘れ

* Refactoring

* ✌️

* Add tests

* Add test

* Extarct note tests

* Refactor

* 存在しないユーザーにメンションできなくなっていた問題を修正

* Fix bug

* Update update-meta.ts

* Fix bug

* Update mention.vue

* Fix bug

* Update meta.ts

* Update CONTRIBUTING.md

* Fix bug

* Fix bug

* Fix bug

* Clean up

* Clean up

* Update notification.ts

* Clean up

* Add mute tests

* Add test

* Refactor

* Add test

* Fix test

* Refactor

* Refactor

* Add tests

* Update utils.ts

* Update utils.ts

* Fix test

* Update package.json

* Update update.ts

* Update manifest.ts

* Fix bug

* Fix bug

* Add test

* 🎨

* Update endpoint permissions

* Updaye permisison

* Update person.ts

#4299

* データベースと同期しないように

* Fix bug

* Fix bug

* Update reversi-game.ts

* Use a feature of Node v11.7.0 to extract a public key (#4644)

* wip

* wip

* ✌️

* Refactoring

#1540

* test

* test

* test

* test

* test

* test

* test

* Fix bug

* Fix test

* 🍣

* wip

* #4471

* Add test for #4335

* Refactor

* Fix test

* Add tests

* 🕓

* Fix bug

* Add test

* Add test

* rename

* Fix bug
This commit is contained in:
syuilo
2019-04-07 21:50:36 +09:00
committed by GitHub
parent 13caf37991
commit f0a29721c9
592 changed files with 13463 additions and 14147 deletions

View File

@ -1,31 +1,27 @@
import { Buffer } from 'buffer';
import * as fs from 'fs';
import * as mongodb from 'mongodb';
import * as crypto from 'crypto';
import * as Minio from 'minio';
import * as uuid from 'uuid';
import * as sharp from 'sharp';
import DriveFile, { IMetadata, getDriveFileBucket, IDriveFile } from '../../models/drive-file';
import DriveFolder from '../../models/drive-folder';
import { pack } from '../../models/drive-file';
import { publishMainStream, publishDriveStream } from '../stream';
import { isLocalUser, IUser, IRemoteUser, isRemoteUser } from '../../models/user';
import delFile from './delete-file';
import config from '../../config';
import { getDriveFileWebpublicBucket } from '../../models/drive-file-webpublic';
import { getDriveFileThumbnailBucket } from '../../models/drive-file-thumbnail';
import driveChart from '../../services/chart/drive';
import perUserDriveChart from '../../services/chart/per-user-drive';
import instanceChart from '../../services/chart/instance';
import fetchMeta from '../../misc/fetch-meta';
import { GenerateVideoThumbnail } from './generate-video-thumbnail';
import { driveLogger } from './logger';
import { IImage, ConvertToJpeg, ConvertToWebp, ConvertToPng } from './image-processor';
import Instance from '../../models/instance';
import { contentDisposition } from '../../misc/content-disposition';
import { detectMine } from '../../misc/detect-mine';
import { DriveFiles, DriveFolders, Users, Instances } from '../../models';
import { InternalStorage } from './internal-storage';
import { DriveFile } from '../../models/entities/drive-file';
import { IRemoteUser, User } from '../../models/entities/user';
import { driveChart, perUserDriveChart, instanceChart } from '../chart';
import { genId } from '../../misc/gen-id';
import { isDuplicateKeyValueError } from '../../misc/is-duplicate-key-value-error';
const logger = driveLogger.createSubLogger('register', 'yellow');
@ -36,11 +32,10 @@ const logger = driveLogger.createSubLogger('register', 'yellow');
* @param type Content-Type for original
* @param hash Hash for original
* @param size Size for original
* @param metadata
*/
async function save(path: string, name: string, type: string, hash: string, size: number, metadata: IMetadata): Promise<IDriveFile> {
async function save(file: DriveFile, path: string, name: string, type: string, hash: string, size: number): Promise<DriveFile> {
// thunbnail, webpublic を必要なら生成
const alts = await generateAlts(path, type, !metadata.uri);
const alts = await generateAlts(path, type, !file.uri);
if (config.drive && config.drive.storage == 'minio') {
//#region ObjectStorage params
@ -60,10 +55,10 @@ async function save(path: string, name: string, type: string, hash: string, size
const url = `${ baseUrl }/${ key }`;
// for alts
let webpublicKey = null as string;
let webpublicUrl = null as string;
let thumbnailKey = null as string;
let thumbnailUrl = null as string;
let webpublicKey: string = null;
let webpublicUrl: string = null;
let thumbnailKey: string = null;
let thumbnailUrl: string = null;
//#endregion
//#region Uploads
@ -91,58 +86,52 @@ async function save(path: string, name: string, type: string, hash: string, size
await Promise.all(uploads);
//#endregion
//#region DB
Object.assign(metadata, {
withoutChunks: true,
storage: 'minio',
storageProps: {
key,
webpublicKey,
thumbnailKey,
},
url,
webpublicUrl,
thumbnailUrl,
} as IMetadata);
file.url = url;
file.thumbnailUrl = thumbnailUrl;
file.webpublicUrl = webpublicUrl;
file.accessKey = key;
file.thumbnailAccessKey = thumbnailKey;
file.webpublicAccessKey = webpublicKey;
file.name = name;
file.type = type;
file.md5 = hash;
file.size = size;
file.storedInternal = false;
const file = await DriveFile.insert({
length: size,
uploadDate: new Date(),
md5: hash,
filename: name,
metadata: metadata,
contentType: type
});
//#endregion
return await DriveFiles.save(file);
} else { // use internal storage
const accessKey = uuid.v4();
const thumbnailAccessKey = uuid.v4();
const webpublicAccessKey = uuid.v4();
return file;
} else { // use MongoDB GridFS
// #region store original
const originalDst = await getDriveFileBucket();
const url = InternalStorage.saveFromPath(accessKey, path);
// web用(Exif削除済み)がある場合はオリジナルにアクセス制限
if (alts.webpublic) metadata.accessKey = uuid.v4();
const originalFile = await storeOriginal(originalDst, name, path, type, metadata);
logger.info(`original stored to ${originalFile._id}`);
// #endregion store original
// #region store webpublic
if (alts.webpublic) {
const webDst = await getDriveFileWebpublicBucket();
const webFile = await storeAlts(webDst, name, alts.webpublic.data, alts.webpublic.type, originalFile._id);
logger.info(`web stored ${webFile._id}`);
}
// #endregion store webpublic
let thumbnailUrl: string;
let webpublicUrl: string;
if (alts.thumbnail) {
const thumDst = await getDriveFileThumbnailBucket();
const thumFile = await storeAlts(thumDst, name, alts.thumbnail.data, alts.thumbnail.type, originalFile._id);
logger.info(`web stored ${thumFile._id}`);
thumbnailUrl = InternalStorage.saveFromBuffer(thumbnailAccessKey, alts.thumbnail.data);
logger.info(`thumbnail stored: ${thumbnailAccessKey}`);
}
return originalFile;
if (alts.webpublic) {
webpublicUrl = InternalStorage.saveFromBuffer(webpublicAccessKey, alts.webpublic.data);
logger.info(`web stored: ${webpublicAccessKey}`);
}
file.storedInternal = true;
file.url = url;
file.thumbnailUrl = thumbnailUrl;
file.webpublicUrl = webpublicUrl;
file.accessKey = accessKey;
file.thumbnailAccessKey = thumbnailAccessKey;
file.webpublicAccessKey = webpublicAccessKey;
file.name = name;
file.type = type;
file.md5 = hash;
file.size = size;
return await DriveFiles.save(file);
}
}
@ -211,51 +200,14 @@ async function upload(key: string, stream: fs.ReadStream | Buffer, type: string,
await minio.putObject(config.drive.bucket, key, stream, null, metadata);
}
/**
* GridFSBucketにオリジナルを格納する
*/
export async function storeOriginal(bucket: mongodb.GridFSBucket, name: string, path: string, contentType: string, metadata: any) {
return new Promise<IDriveFile>((resolve, reject) => {
const writeStream = bucket.openUploadStream(name, {
contentType,
metadata
});
writeStream.once('finish', resolve);
writeStream.on('error', reject);
fs.createReadStream(path).pipe(writeStream);
});
}
/**
* GridFSBucketにオリジナル以外を格納する
*/
export async function storeAlts(bucket: mongodb.GridFSBucket, name: string, data: Buffer, contentType: string, originalId: mongodb.ObjectID) {
return new Promise<IDriveFile>((resolve, reject) => {
const writeStream = bucket.openUploadStream(name, {
contentType,
metadata: {
originalId
}
});
writeStream.once('finish', resolve);
writeStream.on('error', reject);
writeStream.end(data);
});
}
async function deleteOldFile(user: IRemoteUser) {
const oldFile = await DriveFile.findOne({
_id: {
$nin: [user.avatarId, user.bannerId]
},
'metadata.userId': user._id
}, {
sort: {
_id: 1
}
});
const oldFile = await DriveFiles.createQueryBuilder()
.select('file')
.where('file.id != :avatarId', { avatarId: user.avatarId })
.andWhere('file.id != :bannerId', { bannerId: user.bannerId })
.andWhere('file.userId = :userId', { userId: user.id })
.orderBy('file.id', 'DESC')
.getOne();
if (oldFile) {
delFile(oldFile, true);
@ -278,17 +230,17 @@ async function deleteOldFile(user: IRemoteUser) {
* @return Created drive file
*/
export default async function(
user: IUser,
user: User,
path: string,
name: string = null,
comment: string = null,
folderId: mongodb.ObjectID = null,
folderId: any = null,
force: boolean = false,
isLink: boolean = false,
url: string = null,
uri: string = null,
sensitive: boolean = null
): Promise<IDriveFile> {
): Promise<DriveFile> {
// Calc md5 hash
const calcHash = new Promise<string>((res, rej) => {
const readable = fs.createReadStream(path);
@ -322,51 +274,29 @@ export default async function(
if (!force) {
// Check if there is a file with the same hash
const much = await DriveFile.findOne({
const much = await DriveFiles.findOne({
md5: hash,
'metadata.userId': user._id,
'metadata.deletedAt': { $exists: false }
userId: user.id,
});
if (much) {
logger.info(`file with same hash is found: ${much._id}`);
logger.info(`file with same hash is found: ${much.id}`);
return much;
}
}
//#region Check drive usage
if (!isLink) {
const usage = await DriveFile
.aggregate([{
$match: {
'metadata.userId': user._id,
'metadata.deletedAt': { $exists: false }
}
}, {
$project: {
length: true
}
}, {
$group: {
_id: null,
usage: { $sum: '$length' }
}
}])
.then((aggregates: any[]) => {
if (aggregates.length > 0) {
return aggregates[0].usage;
}
return 0;
});
logger.debug(`drive usage is ${usage}`);
const usage = await DriveFiles.clacDriveUsageOf(user);
const instance = await fetchMeta();
const driveCapacity = 1024 * 1024 * (isLocalUser(user) ? instance.localDriveCapacityMb : instance.remoteDriveCapacityMb);
const driveCapacity = 1024 * 1024 * (Users.isLocalUser(user) ? instance.localDriveCapacityMb : instance.remoteDriveCapacityMb);
logger.debug(`drive usage is ${usage} (max: ${driveCapacity})`);
// If usage limit exceeded
if (usage + size > driveCapacity) {
if (isLocalUser(user)) {
if (Users.isLocalUser(user)) {
throw 'no-free-space';
} else {
// (アバターまたはバナーを含まず)最も古いファイルを削除する
@ -381,9 +311,9 @@ export default async function(
return null;
}
const driveFolder = await DriveFolder.findOne({
_id: folderId,
userId: user._id
const driveFolder = await DriveFolders.findOne({
id: folderId,
userId: user.id
});
if (driveFolder == null) throw 'folder-not-found';
@ -437,54 +367,48 @@ export default async function(
const [folder] = await Promise.all([fetchFolder(), Promise.all(propPromises)]);
const metadata = {
userId: user._id,
_user: {
host: user.host
},
folderId: folder !== null ? folder._id : null,
comment: comment,
properties: properties,
withoutChunks: isLink,
isRemote: isLink,
isSensitive: isLocalUser(user) && user.settings.alwaysMarkNsfw ? true :
(sensitive !== null && sensitive !== undefined)
? sensitive
: false
} as IMetadata;
let file = new DriveFile();
file.id = genId();
file.createdAt = new Date();
file.userId = user.id;
file.userHost = user.host;
file.folderId = folder !== null ? folder.id : null;
file.comment = comment;
file.properties = properties;
file.isRemote = isLink;
file.isSensitive = Users.isLocalUser(user) && user.alwaysMarkNsfw ? true :
(sensitive !== null && sensitive !== undefined)
? sensitive
: false;
if (url !== null) {
metadata.src = url;
file.src = url;
if (isLink) {
metadata.url = url;
file.url = url;
}
}
if (uri !== null) {
metadata.uri = uri;
file.uri = uri;
}
let driveFile: IDriveFile;
if (isLink) {
try {
driveFile = await DriveFile.insert({
length: 0,
uploadDate: new Date(),
md5: hash,
filename: detectedName,
metadata: metadata,
contentType: mime
});
file.size = 0;
file.md5 = hash;
file.name = detectedName;
file.type = mime;
file = await DriveFiles.save(file);
} catch (e) {
// duplicate key error (when already registered)
if (e.code === 11000) {
logger.info(`already registered ${metadata.uri}`);
if (isDuplicateKeyValueError(e)) {
logger.info(`already registered ${file.uri}`);
driveFile = await DriveFile.findOne({
'metadata.uri': metadata.uri,
'metadata.userId': user._id
file = await DriveFiles.findOne({
uri: file.uri,
userId: user.id
});
} else {
logger.error(e);
@ -492,29 +416,25 @@ export default async function(
}
}
} else {
driveFile = await (save(path, detectedName, mime, hash, size, metadata));
file = await (save(file, path, detectedName, mime, hash, size));
}
logger.succ(`drive file has been created ${driveFile._id}`);
logger.succ(`drive file has been created ${file.id}`);
pack(driveFile).then(packedFile => {
DriveFiles.pack(file).then(packedFile => {
// Publish driveFileCreated event
publishMainStream(user._id, 'driveFileCreated', packedFile);
publishDriveStream(user._id, 'fileCreated', packedFile);
publishMainStream(user.id, 'driveFileCreated', packedFile);
publishDriveStream(user.id, 'fileCreated', packedFile);
});
// 統計を更新
driveChart.update(driveFile, true);
perUserDriveChart.update(driveFile, true);
if (isRemoteUser(driveFile.metadata._user)) {
instanceChart.updateDrive(driveFile, true);
Instance.update({ host: driveFile.metadata._user.host }, {
$inc: {
driveUsage: driveFile.length,
driveFiles: 1
}
});
driveChart.update(file, true);
perUserDriveChart.update(file, true);
if (file.userHost !== null) {
instanceChart.updateDrive(file, true);
Instances.increment({ host: file.userHost }, 'driveUsage', file.size);
Instances.increment({ host: file.userHost }, 'driveFiles', 1);
}
return driveFile;
return file;
}