Commit 22028ac0 authored by Anton's avatar Anton

Refactor archives

parent a15980ab
......@@ -46,8 +46,8 @@ const setup = async function () {
* Configure archives
*/
const archives = require('./lib/archives')
archives.setupArchives(api, maps, annotations)
const { setupArchives } = require('./lib/archives')
setupArchives(api, maps, annotations)
await api.start()
}
......
const
yazl = require('yazl'),
path = require('path'),
fs = require('mz/fs'),
rimraf = require('rimraf'),
os = require('os'),
config = require('config'),
Minio = require('minio'),
{ Assert, ObjectUtil } = require('mbjs-utils')
const createArchive = async function (api, data) {
Assert.isType(data.map, 'object', 'data.map must be object')
Assert.ok(Array.isArray(data.annotations), 'data.annotations must be array')
const dir = path.join(os.tmpdir(), `archive_${ObjectUtil.slug(data.map.title)}_${data.map.uuid}`)
await new Promise((resolve, reject) => {
rimraf(dir, err => {
if (err) {
api.captureException(err)
return reject(err)
}
resolve()
})
})
const archive = new yazl.ZipFile()
await fs.mkdir(dir)
await fs.mkdir(path.join(dir, 'maps'))
await fs.mkdir(path.join(dir, 'annotations'))
const mapfile = path.join('maps', `${data.map.uuid}.json`)
await fs.writeFile(path.join(dir, mapfile), JSON.stringify(data.map))
archive.addFile(path.join(dir, mapfile), mapfile)
for (let a of data.annotations) {
const annofile = path.join('annotations', `${a.uuid}.json`)
await fs.writeFile(path.join(dir, annofile), JSON.stringify(a))
archive.addFile(path.join(dir, annofile), annofile)
}
archive.end()
const archivePath = `${dir}.zip`
await new Promise((resolve, reject) => {
archive.outputStream.pipe(fs.createWriteStream(archivePath))
.on('error', err => {
reject(err)
})
.on('close', () => {
resolve()
})
})
const opts = Object.assign({}, config.assets.client)
opts.useSSL = config.assets.client.useSSL && (config.assets.client.useSSL === true || config.assets.client.useSSL === 'true')
opts.port = config.assets.client.port ? parseInt(config.assets.client.port) : undefined
const minioClient = new Minio.Client(opts)
await minioClient.fPutObject(config.assets.archivesBucket, path.basename(archivePath), archivePath, { 'Content-Type': 'application/zip' })
await fs.unlink(archivePath)
const url = await minioClient.presignedGetObject(config.assets.archivesBucket, path.basename(archivePath))
return url
}
module.exports = createArchive
const
yazl = require('yazl'),
yauzl = require('yauzl'),
path = require('path'),
fs = require('mz/fs'),
rimraf = require('rimraf'),
os = require('os'),
multer = require('multer'),
send = require('@polka/send-type'),
config = require('config'),
Minio = require('minio'),
{ Assert, ObjectUtil } = require('mbjs-utils')
createArchive = require('./create'),
readArchive = require('./read')
module.exports.setupArchives = (api, mapService, annotationService) => {
const setupArchives = function (api, mapService, annotationService) {
const upload = multer({ dest: os.tmpdir() })
api.app.post('/archives/maps', async (req, res) => {
let data = {}
......@@ -33,14 +28,14 @@ module.exports.setupArchives = (api, mapService, annotationService) => {
await annotationService.findHandler(request, async result => {
if (result.error) return send(res, result.code)
data.annotations = result.data.items
const url = await exports.createArchive(api, data)
const url = await createArchive(api, data)
send(res, 200, url)
})
})
})
api.app.post('/archives/maps/upload', async function (req, res) {
upload.single('file')(req, res, async () => {
const results = await exports.readArchive(req.file.path)
const results = await readArchive(req.file.path)
const copy = req.body.title || false
let hasDuplicates = false
if (results.maps && !copy) {
......@@ -118,94 +113,8 @@ module.exports.setupArchives = (api, mapService, annotationService) => {
})
}
module.exports.createArchive = async (api, data) => {
Assert.isType(data.map, 'object', 'data.map must be object')
Assert.ok(Array.isArray(data.annotations), 'data.annotations must be array')
const dir = path.join(os.tmpdir(), `archive_${ObjectUtil.slug(data.map.title)}_${data.map.uuid}`)
await new Promise((resolve, reject) => {
rimraf(dir, err => {
if (err) {
api.captureException(err)
return reject(err)
}
resolve()
})
})
const archive = new yazl.ZipFile()
await fs.mkdir(dir)
await fs.mkdir(path.join(dir, 'maps'))
await fs.mkdir(path.join(dir, 'annotations'))
const mapfile = path.join('maps', `${data.map.uuid}.json`)
await fs.writeFile(path.join(dir, mapfile), JSON.stringify(data.map))
archive.addFile(path.join(dir, mapfile), mapfile)
for (let a of data.annotations) {
const annofile = path.join('annotations', `${a.uuid}.json`)
await fs.writeFile(path.join(dir, annofile), JSON.stringify(a))
archive.addFile(path.join(dir, annofile), annofile)
}
archive.end()
const archivePath = `${dir}.zip`
await new Promise((resolve, reject) => {
archive.outputStream.pipe(fs.createWriteStream(archivePath))
.on('error', err => {
reject(err)
})
.on('close', () => {
resolve()
})
})
const opts = Object.assign({}, config.assets.client)
opts.useSSL = config.assets.client.useSSL && (config.assets.client.useSSL === true || config.assets.client.useSSL === 'true')
opts.port = config.assets.client.port ? parseInt(config.assets.client.port) : undefined
const minioClient = new Minio.Client(opts)
await minioClient.fPutObject(config.assets.archivesBucket, path.basename(archivePath), archivePath, { 'Content-Type': 'application/zip' })
await fs.unlink(archivePath)
const url = await minioClient.presignedGetObject(config.assets.archivesBucket, path.basename(archivePath))
return url
}
module.exports.readArchive = archivePath => {
const results = {}
const getFile = (entry, zipfile) => {
return new Promise((resolve, reject) => {
let data = ''
zipfile.openReadStream(entry, function (err, readStream) {
if (err) return reject(err)
readStream.on('data', chunk => {
data += chunk.toString()
})
readStream.on('end', () => resolve(data))
readStream.on('error', err => reject(err))
})
})
}
return new Promise((resolve, reject) => {
yauzl.open(archivePath, {lazyEntries: true}, async (err, zipfile) => {
if (err) return reject(err)
zipfile.readEntry()
zipfile.on('end', () => resolve(results))
zipfile.on('error', err => reject(err))
zipfile.on('entry', async entry => {
if (/\/$/.test(entry.fileName)) zipfile.readEntry()
else {
const type = path.dirname(entry.fileName)
const data = await getFile(entry, zipfile)
const obj = JSON.parse(data)
if (!results[type]) results[type] = []
results[type].push(obj)
zipfile.readEntry()
}
})
})
})
module.exports = {
setupArchives,
createArchive,
readArchive
}
const
yauzl = require('yauzl'),
path = require('path')
const readArchive = function (archivePath) {
const results = {}
const getFile = (entry, zipfile) => {
return new Promise((resolve, reject) => {
let data = ''
zipfile.openReadStream(entry, function (err, readStream) {
if (err) return reject(err)
readStream.on('data', chunk => {
data += chunk.toString()
})
readStream.on('end', () => resolve(data))
readStream.on('error', err => reject(err))
})
})
}
return new Promise((resolve, reject) => {
yauzl.open(archivePath, {lazyEntries: true}, async (err, zipfile) => {
if (err) return reject(err)
zipfile.readEntry()
zipfile.on('end', () => resolve(results))
zipfile.on('error', err => reject(err))
zipfile.on('entry', async entry => {
if (/\/$/.test(entry.fileName)) zipfile.readEntry()
else {
const type = path.dirname(entry.fileName)
const data = await getFile(entry, zipfile)
const obj = JSON.parse(data)
if (!results[type]) results[type] = []
results[type].push(obj)
zipfile.readEntry()
}
})
})
})
}
module.exports = readArchive
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment