add-file-to-drive - Promise百烈拳とメモリ削減

This commit is contained in:
otofune 2017-11-14 03:46:30 +09:00
parent feab13c5eb
commit 64aedcaa6b
3 changed files with 269 additions and 152 deletions

View File

@ -54,13 +54,14 @@
"@types/node": "8.0.49", "@types/node": "8.0.49",
"@types/page": "1.5.32", "@types/page": "1.5.32",
"@types/proxy-addr": "2.0.0", "@types/proxy-addr": "2.0.0",
"@types/seedrandom": "2.4.27",
"@types/ratelimiter": "2.1.28", "@types/ratelimiter": "2.1.28",
"@types/redis": "2.8.1", "@types/redis": "2.8.1",
"@types/request": "2.0.7", "@types/request": "^2.0.7",
"@types/rimraf": "2.0.2", "@types/rimraf": "2.0.2",
"@types/riot": "3.6.1", "@types/riot": "3.6.1",
"@types/seedrandom": "2.4.27",
"@types/serve-favicon": "2.2.29", "@types/serve-favicon": "2.2.29",
"@types/tmp": "0.0.33",
"@types/uuid": "3.4.3", "@types/uuid": "3.4.3",
"@types/webpack": "3.8.0", "@types/webpack": "3.8.0",
"@types/webpack-stream": "3.2.8", "@types/webpack-stream": "3.2.8",
@ -111,7 +112,6 @@
"deep-equal": "1.0.1", "deep-equal": "1.0.1",
"deepcopy": "0.6.3", "deepcopy": "0.6.3",
"diskusage": "0.2.2", "diskusage": "0.2.2",
"download": "6.2.5",
"elasticsearch": "13.3.1", "elasticsearch": "13.3.1",
"escape-regexp": "0.0.1", "escape-regexp": "0.0.1",
"express": "4.15.4", "express": "4.15.4",
@ -140,7 +140,7 @@
"recaptcha-promise": "0.1.3", "recaptcha-promise": "0.1.3",
"reconnecting-websocket": "3.2.2", "reconnecting-websocket": "3.2.2",
"redis": "2.8.0", "redis": "2.8.0",
"request": "2.83.0", "request": "^2.83.0",
"rimraf": "2.6.2", "rimraf": "2.6.2",
"riot": "3.7.4", "riot": "3.7.4",
"rndstr": "1.0.0", "rndstr": "1.0.0",
@ -152,6 +152,7 @@
"syuilo-password-strength": "0.0.1", "syuilo-password-strength": "0.0.1",
"tcp-port-used": "0.1.2", "tcp-port-used": "0.1.2",
"textarea-caret": "3.0.2", "textarea-caret": "3.0.2",
"tmp": "0.0.33",
"ts-node": "3.3.0", "ts-node": "3.3.0",
"typescript": "2.6.1", "typescript": "2.6.1",
"uuid": "3.1.0", "uuid": "3.1.0",

View File

@ -9,28 +9,34 @@ import DriveFolder from '../models/drive-folder';
import serialize from '../serializers/drive-file'; import serialize from '../serializers/drive-file';
import event from '../event'; import event from '../event';
import config from '../../conf'; import config from '../../conf';
import { Duplex } from 'stream'; import { Buffer } from 'buffer';
import * as fs from 'fs';
import * as tmp from 'tmp';
import * as stream from 'stream';
const log = debug('misskey:register-drive-file'); const log = debug('misskey:register-drive-file');
const addToGridFS = (name, binary, type, metadata): Promise<any> => new Promise(async (resolve, reject) => { const tmpFile = (): Promise<string> => new Promise((resolve, reject) => {
const dataStream = new Duplex(); tmp.file((e, path) => {
dataStream.push(binary); if (e) return reject(e)
dataStream.push(null); resolve(path)
})
})
const bucket = await getGridFSBucket(); const addToGridFS = (name: string, readable: stream.Readable, type: string, metadata: any): Promise<any> =>
const writeStream = bucket.openUploadStream(name, { contentType: type, metadata }); getGridFSBucket()
writeStream.once('finish', (doc) => { resolve(doc); }); .then(bucket => new Promise((resolve, reject) => {
writeStream.on('error', reject); const writeStream = bucket.openUploadStream(name, { contentType: type, metadata });
dataStream.pipe(writeStream); writeStream.once('finish', (doc) => { resolve(doc); });
}); writeStream.on('error', reject);
readable.pipe(writeStream);
}))
/** /**
* Add file to drive * Add file to drive
* *
* @param user User who wish to add file * @param user User who wish to add file
* @param fileName File name * @param file File path, binary, or readableStream
* @param data Contents
* @param comment Comment * @param comment Comment
* @param type File type * @param type File type
* @param folderId Folder ID * @param folderId Folder ID
@ -39,139 +45,201 @@ const addToGridFS = (name, binary, type, metadata): Promise<any> => new Promise(
*/ */
export default ( export default (
user: any, user: any,
data: Buffer, file: string | Buffer | stream.Readable,
name: string = null, name: string = null,
comment: string = null, comment: string = null,
folderId: mongodb.ObjectID = null, folderId: mongodb.ObjectID = null,
force: boolean = false force: boolean = false
) => new Promise<any>(async (resolve, reject) => { ) => new Promise<any>((resolve, reject) => {
log(`registering ${name} (user: ${user.username})`); log(`registering ${name} (user: ${user.username})`);
// File size // Get file path
const size = data.byteLength; new Promise((res: (v: string) => void, rej) => {
if (typeof file === 'string') {
log(`size is ${size}`); res(file)
return
// File type
let mime = 'application/octet-stream';
const type = fileType(data);
if (type !== null) {
mime = type.mime;
if (name === null) {
name = `untitled.${type.ext}`;
} }
} else { if (file instanceof Buffer) {
if (name === null) { tmpFile()
name = 'untitled'; .then(path => {
fs.writeFile(path, file, (err) => {
if (err) rej(err)
res(path)
})
})
.catch(rej)
return
} }
} if (typeof file === 'object' && typeof file.read === 'function') {
tmpFile()
log(`type is ${mime}`); .then(path => {
const readable: stream.Readable = file
// Generate hash const writable = fs.createWriteStream(path)
const hash = crypto readable
.createHash('md5') .on('error', rej)
.update(data) .on('end', () => {
.digest('hex') as string; res(path)
})
log(`hash is ${hash}`); .pipe(writable)
.on('error', rej)
if (!force) { })
// Check if there is a file with the same hash .catch(rej)
const much = await DriveFile.findOne({
md5: hash,
'metadata.user_id': user._id
});
if (much !== null) {
log('file with same hash is found');
return resolve(much);
} else {
log('file with same hash is not found');
} }
} rej(new Error('un-compatible file.'))
})
// Calculate hash, get content type and get file size
.then(path => Promise.all([
path,
// hash
((): Promise<string> => new Promise((res, rej) => {
const readable = fs.createReadStream(path)
const hash = crypto.createHash('md5')
readable
.on('error', rej)
.on('end', () => {
res(hash.digest('hex'))
})
.pipe(hash)
.on('error', rej)
}))(),
// mime
((): Promise<[string, string | null]> => new Promise((res, rej) => {
const readable = fs.createReadStream(path)
readable
.on('error', rej)
.once('data', (buffer: Buffer) => {
readable.destroy()
const type = fileType(buffer)
if (!type) {
return res(['application/octet-stream', null])
}
return res([type.mime, type.ext])
})
}))(),
// size
((): Promise<number> => new Promise((res, rej) => {
fs.stat(path, (err, stats) => {
if (err) return rej(err)
res(stats.size)
})
}))()
]))
.then(async ([path, hash, [mime, ext], size]) => {
log(`hash: ${hash}, mime: ${mime}, ext: ${ext}, size: ${size}`)
// Calculate drive usage // detect name
const usage = ((await DriveFile const detectedName: string = name || (ext ? `untitled.${ext}` : 'untitled');
.aggregate([
{ $match: { 'metadata.user_id': user._id } },
{ $project: {
length: true
}},
{ $group: {
_id: null,
usage: { $sum: '$length' }
}}
]))[0] || {
usage: 0
}).usage;
log(`drive usage is ${usage}`); if (!force) {
// Check if there is a file with the same hash
const much = await DriveFile.findOne({
md5: hash,
'metadata.user_id': user._id
});
// If usage limit exceeded if (much !== null) {
if (usage + size > user.drive_capacity) { log('file with same hash is found');
return reject('no-free-space'); return resolve(much);
} } else {
log('file with same hash is not found');
// If the folder is specified }
let folder: any = null;
if (folderId !== null) {
folder = await DriveFolder
.findOne({
_id: folderId,
user_id: user._id
});
if (folder === null) {
return reject('folder-not-found');
}
}
let properties: any = null;
// If the file is an image
if (/^image\/.*$/.test(mime)) {
// Calculate width and height to save in property
const g = gm(data, name);
const size = await prominence(g).size();
properties = {
width: size.width,
height: size.height
};
log('image width and height is calculated');
}
// Create DriveFile document
const file = await addToGridFS(name, data, mime, {
user_id: user._id,
folder_id: folder !== null ? folder._id : null,
comment: comment,
properties: properties
});
log(`drive file has been created ${file._id}`);
resolve(file);
// Serialize
const fileObj = await serialize(file);
// Publish drive_file_created event
event(user._id, 'drive_file_created', fileObj);
// Register to search database
if (config.elasticsearch.enable) {
const es = require('../../db/elasticsearch');
es.index({
index: 'misskey',
type: 'drive_file',
id: file._id.toString(),
body: {
name: file.name,
user_id: user._id.toString()
} }
});
} const [properties, folder] = await Promise.all([
// properties
(async () => {
if (!/^image\/.*$/.test(mime)) {
return null
}
// If the file is an image, calculate width and height to save in property
const g = gm(data, name);
const size = await prominence(g).size();
const properties = {
width: size.width,
height: size.height
};
log('image width and height is calculated');
return properties
})(),
// folder
(async () => {
if (!folderId) {
return null
}
const driveFolder = await DriveFolder.findOne({
_id: folderId,
user_id: user._id
})
if (!driveFolder) {
throw 'folder-not-found'
}
return driveFolder
})(),
// usage checker
(async () => {
// Calculate drive usage
const usage = await DriveFile
.aggregate([
{ $match: { 'metadata.user_id': user._id } },
{
$project: {
length: true
}
},
{
$group: {
_id: null,
usage: { $sum: '$length' }
}
}
])
.then((aggregates: any[]) => {
if (aggregates.length > 0) {
return aggregates[0].usage
}
return 0
});
log(`drive usage is ${usage}`);
// If usage limit exceeded
if (usage + size > user.drive_capacity) {
throw 'no-free-space';
}
})()
])
const readable = fs.createReadStream(path)
return addToGridFS(name, readable, mime, {
user_id: user._id,
folder_id: folder !== null ? folder._id : null,
comment: comment,
properties: properties
})
})
.then(file => {
log(`drive file has been created ${file._id}`);
resolve(file)
return serialize(file)
})
.then(serializedFile => {
// Publish drive_file_created event
event(user._id, 'drive_file_created', fileObj);
// Register to search database
if (config.elasticsearch.enable) {
const es = require('../../db/elasticsearch');
es.index({
index: 'misskey',
type: 'drive_file',
id: file._id.toString(),
body: {
name: file.name,
user_id: user._id.toString()
}
});
}
})
.catch(reject)
}); });

View File

@ -2,11 +2,17 @@
* Module dependencies * Module dependencies
*/ */
import * as URL from 'url'; import * as URL from 'url';
const download = require('download');
import $ from 'cafy'; import $ from 'cafy';
import { validateFileName } from '../../../models/drive-file'; import { validateFileName } from '../../../models/drive-file';
import serialize from '../../../serializers/drive-file'; import serialize from '../../../serializers/drive-file';
import create from '../../../common/add-file-to-drive'; import create from '../../../common/add-file-to-drive';
import * as debug from 'debug';
import * as tmp from 'tmp';
import * as fs from 'fs';
import * as request from 'request';
import * as crypto from 'crypto';
const log = debug('misskey:endpoint:upload_from_url')
/** /**
* Create a file from a URL * Create a file from a URL
@ -15,7 +21,7 @@ import create from '../../../common/add-file-to-drive';
* @param {any} user * @param {any} user
* @return {Promise<any>} * @return {Promise<any>}
*/ */
module.exports = (params, user) => new Promise(async (res, rej) => { module.exports = (params, user) => new Promise((res, rej) => {
// Get 'url' parameter // Get 'url' parameter
// TODO: Validate this url // TODO: Validate this url
const [url, urlErr] = $(params.url).string().$; const [url, urlErr] = $(params.url).string().$;
@ -30,15 +36,57 @@ module.exports = (params, user) => new Promise(async (res, rej) => {
const [folderId = null, folderIdErr] = $(params.folder_id).optional.nullable.id().$; const [folderId = null, folderIdErr] = $(params.folder_id).optional.nullable.id().$;
if (folderIdErr) return rej('invalid folder_id param'); if (folderIdErr) return rej('invalid folder_id param');
// Download file // Create temp file
const data = await download(url); new Promise((res, rej) => {
tmp.file((e, path) => {
// Create file if (e) return rej(e)
const driveFile = await create(user, data, name, null, folderId); res(path)
})
// Serialize })
const fileObj = await serialize(driveFile); // Download file
.then((path: string) => new Promise((res, rej) => {
// Response const writable = fs.createWriteStream(path)
res(fileObj); request(url)
.on('error', rej)
.on('end', () => {
writable.close()
res(path)
})
.pipe(writable)
.on('error', rej)
}))
// Calculate hash & content-type
.then((path: string) => new Promise((res, rej) => {
const readable = fs.createReadStream(path)
const hash = crypto.createHash('md5')
readable
.on('error', rej)
.on('end', () => {
hash.end()
res([path, hash.digest('hex')])
})
.pipe(hash)
.on('error', rej)
}))
// Create file
.then((rv: string[]) => new Promise((res, rej) => {
const [path, hash] = rv
create(user, {
stream: fs.createReadStream(path),
name,
hash
}, null, folderId)
.then(driveFile => {
res(driveFile)
// crean-up
fs.unlink(path, (e) => {
if (e) log(e.stack)
})
})
.catch(rej)
}))
// Serialize
.then(serialize)
.then(res)
.catch(rej)
}); });