Commit 6ff2ccf9 authored by Anton's avatar Anton

Move archive functionality to mbjs-archive

parent 113ed35e
......@@ -20,6 +20,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Changed
- Moved archive functionality to [mbjs-archive](https://gitlab.rlp.net/motionbank/mbjs/archive) module
- Updated [mbjs-data-models](https://gitlab.rlp.net/motionbank/mbjs/data-models)
to 0.1.2 ([release_0_1](https://gitlab.rlp.net/motionbank/mbjs/data-models/commits/release_0_1)
branch)
......
......@@ -2322,6 +2322,49 @@
"@arr/every": "^1.0.0"
}
},
"mbjs-archive": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/mbjs-archive/-/mbjs-archive-1.0.0.tgz",
"integrity": "sha512-h/IEh5oAxZvJNLv9TIc/sg8nr2Od5NvnLHWW/9+PqFn768aXc7ERAB5hIozqmdSPKThsy7v1P47ZexSMaRw0EA==",
"requires": {
"mbjs-utils": "0.0.6",
"mz": "^2.7.0",
"rimraf": "^2.6.3",
"yauzl": "^2.10.0",
"yazl": "^2.5.1"
},
"dependencies": {
"glob": {
"version": "7.1.3",
"resolved": "https://registry.npmjs.org/glob/-/glob-7.1.3.tgz",
"integrity": "sha512-vcfuiIxogLV4DlGBHIUOwI0IbrJ8HWPc4MU7HzviGeNho/UJDfi6B5p3sHeWIQ0KGIU0Jpxi5ZHxemQfLkkAwQ==",
"requires": {
"fs.realpath": "^1.0.0",
"inflight": "^1.0.4",
"inherits": "2",
"minimatch": "^3.0.4",
"once": "^1.3.0",
"path-is-absolute": "^1.0.0"
}
},
"rimraf": {
"version": "2.6.3",
"resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.6.3.tgz",
"integrity": "sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA==",
"requires": {
"glob": "^7.1.3"
}
},
"yazl": {
"version": "2.5.1",
"resolved": "https://registry.npmjs.org/yazl/-/yazl-2.5.1.tgz",
"integrity": "sha512-phENi2PLiHnHb6QBVot+dJnaAZ0xosj7p3fWl+znIjBDlnMI2PsZCJZ306BPTFOaHf5qdDEI8x5qFrSOBN5vrw==",
"requires": {
"buffer-crc32": "~0.2.3"
}
}
}
},
"mbjs-data-models": {
"version": "0.1.2",
"resolved": "https://registry.npmjs.org/mbjs-data-models/-/mbjs-data-models-0.1.2.tgz",
......
......@@ -46,7 +46,7 @@ const setup = async function () {
* Configure archives
*/
const { setupArchives } = require('./lib/archives')
const setupArchives = require('./lib/archives')
setupArchives(api, maps, annotations)
await api.start()
......
const
os = require('os'),
path = require('path'),
fs = require('mz/fs'),
multer = require('multer'),
send = require('@polka/send-type'),
createArchive = require('./create'),
readArchive = require('./read')
config = require('config'),
Minio = require('minio'),
{ ObjectUtil } = require('mbjs-utils'),
{ read, write } = require('mbjs-archive')
const setupArchives = function (api, mapService, annotationService) {
const upload = multer({ dest: os.tmpdir() })
......@@ -42,12 +46,23 @@ const setupArchives = function (api, mapService, annotationService) {
}
}
const url = await createArchive(api, data)
const
dir = path.join(os.tmpdir(), `archive_${ObjectUtil.slug(data.map.title)}_${data.map.uuid}`),
archivePath = await write(dir, data)
const opts = Object.assign({}, config.assets.client)
opts.useSSL = config.assets.client.useSSL && (config.assets.client.useSSL === true || config.assets.client.useSSL === 'true')
opts.port = config.assets.client.port ? parseInt(config.assets.client.port) : undefined
const minioClient = new Minio.Client(opts)
await minioClient.fPutObject(config.assets.archivesBucket, path.basename(archivePath), archivePath, { 'Content-Type': 'application/zip' })
await fs.unlink(archivePath)
const url = await minioClient.presignedGetObject(config.assets.archivesBucket, path.basename(archivePath))
send(res, 200, url)
})
api.app.post('/archives/maps/upload', async function (req, res) {
upload.single('file')(req, res, async () => {
const results = await readArchive(req.file.path)
const results = await read(req.file.path)
const
copy = typeof req.body.title === 'string',
overrideAuthor = req.body.overrideAuthor === 'true'
......@@ -131,8 +146,4 @@ const setupArchives = function (api, mapService, annotationService) {
})
}
module.exports = {
setupArchives,
createArchive,
readArchive
}
module.exports = setupArchives
const
yazl = require('yazl'),
path = require('path'),
fs = require('mz/fs'),
rimraf = require('rimraf'),
os = require('os'),
config = require('config'),
Minio = require('minio'),
{ Assert, ObjectUtil } = require('mbjs-utils')
const createArchive = async function (api, data) {
Assert.isType(data.map, 'object', 'data.map must be object')
Assert.ok(Array.isArray(data.annotations), 'data.annotations must be array')
const dir = path.join(os.tmpdir(), `archive_${ObjectUtil.slug(data.map.title)}_${data.map.uuid}`)
await new Promise((resolve, reject) => {
rimraf(dir, err => {
if (err) {
api.captureException(err)
return reject(err)
}
resolve()
})
})
const archive = new yazl.ZipFile()
await fs.mkdir(dir)
await fs.mkdir(path.join(dir, 'maps'))
await fs.mkdir(path.join(dir, 'annotations'))
const mapfile = path.join('maps', `${data.map.uuid}.json`)
await fs.writeFile(path.join(dir, mapfile), JSON.stringify(data.map))
archive.addFile(path.join(dir, mapfile), mapfile)
for (let a of data.annotations) {
const annofile = path.join('annotations', `${a.uuid}.json`)
await fs.writeFile(path.join(dir, annofile), JSON.stringify(a))
archive.addFile(path.join(dir, annofile), annofile)
}
archive.end()
const archivePath = `${dir}.zip`
await new Promise((resolve, reject) => {
archive.outputStream.pipe(fs.createWriteStream(archivePath))
.on('error', err => {
reject(err)
})
.on('close', () => {
resolve()
})
})
const opts = Object.assign({}, config.assets.client)
opts.useSSL = config.assets.client.useSSL && (config.assets.client.useSSL === true || config.assets.client.useSSL === 'true')
opts.port = config.assets.client.port ? parseInt(config.assets.client.port) : undefined
const minioClient = new Minio.Client(opts)
await minioClient.fPutObject(config.assets.archivesBucket, path.basename(archivePath), archivePath, { 'Content-Type': 'application/zip' })
await fs.unlink(archivePath)
const url = await minioClient.presignedGetObject(config.assets.archivesBucket, path.basename(archivePath))
return url
}
module.exports = createArchive
const
yauzl = require('yauzl'),
path = require('path')
const readArchive = function (archivePath) {
const results = {}
const getFile = (entry, zipfile) => {
return new Promise((resolve, reject) => {
let data = ''
zipfile.openReadStream(entry, function (err, readStream) {
if (err) return reject(err)
readStream.on('data', chunk => {
data += chunk.toString()
})
readStream.on('end', () => resolve(data))
readStream.on('error', err => reject(err))
})
})
}
return new Promise((resolve, reject) => {
yauzl.open(archivePath, {lazyEntries: true}, async (err, zipfile) => {
if (err) return reject(err)
zipfile.readEntry()
zipfile.on('end', () => resolve(results))
zipfile.on('error', err => reject(err))
zipfile.on('entry', async entry => {
if (/\/$/.test(entry.fileName)) zipfile.readEntry()
else {
const type = path.dirname(entry.fileName)
const data = await getFile(entry, zipfile)
const obj = JSON.parse(data)
if (!results[type]) results[type] = []
results[type].push(obj)
zipfile.readEntry()
}
})
})
})
}
module.exports = readArchive
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment