Commit e091c58b authored by Anton Koch's avatar Anton Koch

Merge branch 'data-model-update' into 'master'

Data model update

See merge request !1
parents 37fdb40d 150871bf
Pipeline #9560 passed with stage
code_quality:
image: docker:stable
variables:
DOCKER_DRIVER: overlay2
allow_failure: true
services:
- docker:stable-dind
script:
- export SP_VERSION=$(echo "$CI_SERVER_VERSION" | sed 's/^\([0-9]*\)\.\([0-9]*\).*/\1-\2-stable/')
- docker run
--env SOURCE_CODE="$PWD"
--volume "$PWD":/code
--volume /var/run/docker.sock:/var/run/docker.sock
"registry.gitlab.com/gitlab-org/security-products/codequality:$SP_VERSION" /code
artifacts:
reports:
codequality: gl-code-quality-report.json
...@@ -7,7 +7,16 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ...@@ -7,7 +7,16 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased] ## [Unreleased]
No changes yet. ### Updated
- Updated [mbjs-data-models](https://gitlab.rlp.net/motionbank/mbjs/data-models)
to 1.2.4
- Updated [mbjs-generic-api](https://gitlab.rlp.net/motionbank/mbjs/generic-api)
to 1.1.3
### Changed
- Uses api.uriPrefix in config (adds trailing slash over api.uriBase)
## [1.2.3] - 2019-03-03 ## [1.2.3] - 2019-03-03
......
const
Acl = require('acl'),
Backend = Acl.mongodbBackend,
MongoClient = require('mongodb').MongoClient,
path = require('path'),
fs = require('mz/fs'),
{ MongoDB } = require('mbjs-persistence'),
{ ObjectUtil } = require('mbjs-utils'),
config = require('config')
const
folder = process.env.FOLDER,
authorUUID = process.env.AUTHOR_UUID,
authorName = process.env.AUTHOR_NAME
if (!folder) throw new Error('no input folder specified')
const updateAuthor = entry => {
if (!entry.author) entry.author = {}
if (typeof entry.author === 'string') entry.author = { id: entry.author }
if (authorName) entry.author.name = authorName
if (authorUUID) entry.author.id = authorUUID
return entry
}
const proc = async function (folder) {
const mapsClient = new MongoDB(
ObjectUtil.merge({ name: 'maps', logger: console },
config.get('resources.mongodb')),
'uuid'
)
await mapsClient.connect()
const maps = await fs.readdir(path.join(folder, 'maps'))
for (let m of maps) {
if (m[0] === '.') continue
const file = await fs.readFile(path.join(folder, 'maps', m))
const entry = JSON.parse(file)
const existing = await mapsClient.get(entry.uuid)
if (existing) await mapsClient.update(entry.uuid, updateAuthor(entry))
else await mapsClient.create(updateAuthor(entry))
}
const annoClient = new MongoDB(
ObjectUtil.merge({ name: 'annotations', logger: console },
config.get('resources.mongodb')),
'uuid'
)
await annoClient.connect()
const annos = await fs.readdir(path.join(folder, 'annotations'))
for (let a of annos) {
if (a[0] === '.') continue
const file = await fs.readFile(path.join(folder, 'annotations', a))
const entry = JSON.parse(file)
const existing = await annoClient.get(entry.uuid)
if (existing) await annoClient.update(entry.uuid, updateAuthor(entry))
else await annoClient.create(updateAuthor(entry))
}
const cfg = config.get('acl.mongodb')
cfg.logger = console
const db = await new Promise((resolve, reject) => {
MongoClient.connect(cfg.url, function (err, client) {
if (err) return reject(err)
cfg.logger.info(`ACL connected at ${cfg.url}/${cfg.dbName}`)
const db = client.db(cfg.dbName)
resolve(db)
})
})
const acl = new Acl(new Backend(db))
const acls = await fs.readdir(path.join(folder, 'acl'))
for (let a of acls) {
if (a[0] === '.') continue
const file = await fs.readFile(path.join(folder, 'acl', a))
const entry = JSON.parse(file)
await new Promise((resolve, reject) => {
const resource = a.replace('.json', '')
acl.allow(entry.role, resource, entry.permissions, err => {
if (err) reject(err)
else resolve()
})
})
}
}
proc(folder).then(() => process.exit(0))
const
{ MongoDB } = require('mbjs-persistence'),
{ ObjectUtil, uuid } = require('mbjs-utils'),
config = require('config'),
{ Annotation, Map } = require('mbjs-data-models/src/models')
const
timelinePrefix = 'https://app.motionbank.org/piecemaker/timelines/',
gridPrefix = 'https://app.motionbank.org/mosys/grids/'
const newPrefix = 'http://id.motionbank.org/'
const proc = async function () {
console.log('MAPS\n--------------------------\n\n')
const mapsClient = new MongoDB(
ObjectUtil.merge({ name: 'maps', logger: console },
config.get('resources.mongodb')),
'uuid'
)
await mapsClient.connect()
const maps = await mapsClient.find({})
for (let map of maps) {
Object.keys(map).forEach(key => {
if (key[0] === '_') map[key] = undefined
})
if (typeof map.author === 'string') {
map.author = { id: map.author }
console.log('updating author to', map.author)
}
const mi = new Map(map)
await mapsClient.update(mi.uuid, mi.toObject(), {})
}
console.log('ANNOTATIONS\n-------------------\n\n')
const annoClient = new MongoDB(
ObjectUtil.merge({ name: 'annotations', logger: console },
config.get('resources.mongodb')),
'uuid'
)
await annoClient.connect()
const annos = await annoClient.find({})
for (let anno of annos) {
Object.keys(anno).forEach(key => {
if (key[0] === '_') anno[key] = undefined
})
if (typeof anno.author === 'string') {
anno.author = { id: anno.author }
console.log('updating author to', anno.author)
}
if (anno.target && typeof anno.target.id === 'string' && anno.target.id.indexOf(timelinePrefix) === 0) {
anno.target.id = anno.target.id.replace(timelinePrefix, `${newPrefix}maps/`)
console.log('updating timeline target to', anno.target.id)
}
if (anno.target && typeof anno.target.id === 'string' && anno.target.id.indexOf(gridPrefix) === 0) {
anno.target.id = anno.target.id.replace(gridPrefix, `${newPrefix}maps/`)
console.log('updating grid target to', anno.target.id)
}
if (anno.target && anno.target.type === 'Video' && typeof anno.target.id === 'string' && uuid.isUUID(anno.target.id)) {
anno.target.id = `${newPrefix}annotations/${anno.target.id}`
console.log('updating video target to', anno.target.id)
}
if (anno.target && anno.target.type === 'Annotation' && typeof anno.target.id === 'string' && uuid.isUUID(anno.target.id)) {
anno.target.id = `${newPrefix}annotations/${anno.target.id}`
console.log('updating annotation target to', anno.target.id)
}
if (anno.target && anno.target.type === 'Timeline' && typeof anno.target.id === 'string' && uuid.isUUID(anno.target.id)) {
anno.target.id = `${newPrefix}maps/${anno.target.id}`
console.log('updating timeline target to', anno.target.id)
}
if (anno.target && anno.target.type === '2DGrid' && typeof anno.target.id === 'string' && uuid.isUUID(anno.target.id)) {
anno.target.id = `${newPrefix}maps/${anno.target.id}`
console.log('updating grid target to', anno.target.id)
}
const ai = new Annotation(anno)
await annoClient.update(ai.uuid, ai.toObject(), {})
}
}
proc().then(() => process.exit(0))
{ {
"api": { "api": {
"uriBase": "API_URI_BASE", "uriBase": "API_URI_BASE",
"uriPrefix": "API_URI_PREFIX",
"apiHost": "API_HOST", "apiHost": "API_HOST",
"transcoderHost": "API_TRANSCODER_HOST", "transcoderHost": "API_TRANSCODER_HOST",
"auth0AppMetadataPrefix": "AUTH0_APP_METADATA_PREFIX" "auth0AppMetadataPrefix": "AUTH0_APP_METADATA_PREFIX"
......
{ {
"api": { "api": {
"uriBase": "http://id.motionbank.org", "uriBase": "http://id.motionbank.org",
"uriPrefix": "http://id.motionbank.org/",
"apiHost": "https://api.motionbank.org", "apiHost": "https://api.motionbank.org",
"transcoderHost": "https://transcoder.motionbank.org", "transcoderHost": "https://transcoder.motionbank.org",
"auth0AppMetadataPrefix": "https://app.motionbank.org/app_metadata/" "auth0AppMetadataPrefix": "https://app.motionbank.org/app_metadata/"
......
{ {
"api": { "api": {
"uriBase": "http://id.motionbank.org", "uriBase": "http://id.motionbank.org",
"uriPrefix": "http://id.motionbank.org/",
"apiHost": "http://localhost:3030", "apiHost": "http://localhost:3030",
"transcoderHost": "http://localhost:4040", "transcoderHost": "http://localhost:4040",
"auth0AppMetadataPrefix": "https://app.motionbank.org/app_metadata/" "auth0AppMetadataPrefix": "https://app.motionbank.org/app_metadata/"
......
This diff is collapsed.
...@@ -5,8 +5,11 @@ ...@@ -5,8 +5,11 @@
"main": "src/index.js", "main": "src/index.js",
"scripts": { "scripts": {
"docker-build": "docker build -t motionbank/api:latest .", "docker-build": "docker build -t motionbank/api:latest .",
"docker-build-dev": "docker build -t motionbank/api:dev .",
"docker-push": "docker push motionbank/api:latest", "docker-push": "docker push motionbank/api:latest",
"docker-push-dev": "docker push motionbank/api:dev",
"docker-release": "npm run docker-build && npm run docker-push", "docker-release": "npm run docker-build && npm run docker-push",
"docker-release-dev": "npm run docker-build-dev && npm run docker-push-dev",
"lint": "eslint src/. --config .eslintrc.js", "lint": "eslint src/. --config .eslintrc.js",
"metapak": "metapak", "metapak": "metapak",
"start": "node src", "start": "node src",
...@@ -43,8 +46,8 @@ ...@@ -43,8 +46,8 @@
"axios": "^0.18.0", "axios": "^0.18.0",
"config": "^2.0.1", "config": "^2.0.1",
"luxon": "^1.3.3", "luxon": "^1.3.3",
"mbjs-data-models": "0.1.2", "mbjs-data-models": "1.2.4",
"mbjs-generic-api": "0.8.4", "mbjs-generic-api": "1.1.3",
"mbjs-persistence": "1.0.0", "mbjs-persistence": "1.0.0",
"mbjs-utils": "0.0.6", "mbjs-utils": "0.0.6",
"minio": "^7.0.1", "minio": "^7.0.1",
......
...@@ -30,6 +30,9 @@ const setup = async function () { ...@@ -30,6 +30,9 @@ const setup = async function () {
const maps = new Service('maps', api, models.Map) const maps = new Service('maps', api, models.Map)
// maps.on('message', message => api._sockets.write(message)) // maps.on('message', message => api._sockets.write(message))
const cells = new Service('cells', api, models.Cell)
// cells.on('message', message => api._sockets.write(message))
const documents = new Service('documents', api, models.Document) const documents = new Service('documents', api, models.Document)
// documents.on('message', message => api._sockets.write(message)) // documents.on('message', message => api._sockets.write(message))
...@@ -47,7 +50,7 @@ const setup = async function () { ...@@ -47,7 +50,7 @@ const setup = async function () {
*/ */
const archives = require('./lib/archives') const archives = require('./lib/archives')
archives.setupArchives(api, maps, annotations) archives.setupArchives(api, maps, annotations, cells)
await api.start() await api.start()
} }
......
...@@ -9,15 +9,16 @@ const ...@@ -9,15 +9,16 @@ const
send = require('@polka/send-type'), send = require('@polka/send-type'),
config = require('config'), config = require('config'),
Minio = require('minio'), Minio = require('minio'),
{ Assert, ObjectUtil } = require('mbjs-utils') { Assert, ObjectUtil } = require('mbjs-utils'),
parseURI = require('mbjs-data-models/src/lib/parse-uri')
module.exports.setupArchives = (api, mapService, annotationService) => { module.exports.setupArchives = (api, mapService, annotationService, cellService) => {
const upload = multer({ dest: os.tmpdir() }) const upload = multer({ dest: os.tmpdir() })
api.app.post('/archives/maps', async (req, res) => { api.app.post('/archives/maps', async (req, res) => {
let data = {} let data = {}
let request = { let request = {
params: { params: {
id: req.body.id uuid: req.body.uuid
}, },
user: req.user user: req.user
} }
...@@ -26,13 +27,26 @@ module.exports.setupArchives = (api, mapService, annotationService) => { ...@@ -26,13 +27,26 @@ module.exports.setupArchives = (api, mapService, annotationService) => {
data.map = result.data data.map = result.data
request = { request = {
query: { query: {
query: JSON.stringify({'target.id': `${config.api.uriBase}/maps/${data.map.uuid}`}) query: JSON.stringify({'target.id': data.map.id})
}, },
user: req.user user: req.user
} }
await annotationService.findHandler(request, async result => { await annotationService.findHandler(request, async result => {
if (result.error) return send(res, result.code) if (result.error) return send(res, result.code)
data.annotations = result.data.items data.annotations = result.data.items
data.cells = []
for (let annotation of data.annotations) {
if (annotation.body.type === 'Cell' && annotation.body.source) {
const cellRequest = {
query: {
uuid: parseURI(annotation.body.source.id).uuid
},
user: req.user
}
const cell = await cellService.getHandler(cellRequest)
if (cell) data.cells.push(cell)
}
}
const url = await exports.createArchive(api, data) const url = await exports.createArchive(api, data)
send(res, 200, url) send(res, 200, url)
}) })
...@@ -46,7 +60,7 @@ module.exports.setupArchives = (api, mapService, annotationService) => { ...@@ -46,7 +60,7 @@ module.exports.setupArchives = (api, mapService, annotationService) => {
if (results.maps && !copy) { if (results.maps && !copy) {
for (let map of results.maps) { for (let map of results.maps) {
const getRequest = { const getRequest = {
params: { id: map.uuid }, params: { uuid: map._uuid },
user: req.user user: req.user
} }
const item = await mapService.getHandler(getRequest) const item = await mapService.getHandler(getRequest)
...@@ -56,7 +70,7 @@ module.exports.setupArchives = (api, mapService, annotationService) => { ...@@ -56,7 +70,7 @@ module.exports.setupArchives = (api, mapService, annotationService) => {
if (results.annotations && !copy) { if (results.annotations && !copy) {
for (let annotation of results.annotations) { for (let annotation of results.annotations) {
const getRequest = { const getRequest = {
params: { id: annotation.uuid }, params: { uuid: annotation._uuid },
user: req.user user: req.user
} }
const item = await annotationService.getHandler(getRequest) const item = await annotationService.getHandler(getRequest)
...@@ -68,13 +82,14 @@ module.exports.setupArchives = (api, mapService, annotationService) => { ...@@ -68,13 +82,14 @@ module.exports.setupArchives = (api, mapService, annotationService) => {
const mappings = {} const mappings = {}
if (results.maps) { if (results.maps) {
for (let map of results.maps) { for (let map of results.maps) {
let oldId = map.uuid let oldId = map._uuid
for (let k of Object.keys(map)) { for (let k of Object.keys(map)) {
if (k[0] === '_') map[k] = undefined if (k[0] === '_') map[k] = undefined
} }
if (copy) { if (copy) {
map.title = req.body.title map.title = req.body.title
map.uuid = undefined map.id = undefined
map._uuid = undefined
} }
if (!map.author) { if (!map.author) {
map.author = { map.author = {
...@@ -87,7 +102,7 @@ module.exports.setupArchives = (api, mapService, annotationService) => { ...@@ -87,7 +102,7 @@ module.exports.setupArchives = (api, mapService, annotationService) => {
user: req.user user: req.user
} }
const result = await mapService.postHandler(postRequest) const result = await mapService.postHandler(postRequest)
if (copy) mappings[oldId] = result.data.uuid if (copy) mappings[oldId] = result.data._uuid
} }
} }
if (results.annotations) { if (results.annotations) {
...@@ -97,7 +112,8 @@ module.exports.setupArchives = (api, mapService, annotationService) => { ...@@ -97,7 +112,8 @@ module.exports.setupArchives = (api, mapService, annotationService) => {
} }
if (copy) { if (copy) {
annotation.target.id = mappings[annotation.target.id] annotation.target.id = mappings[annotation.target.id]
annotation.uuid = undefined annotation._uuid = undefined
annotation.id = undefined
} }
if (!annotation.author) { if (!annotation.author) {
annotation.author = { annotation.author = {
...@@ -122,7 +138,7 @@ module.exports.createArchive = async (api, data) => { ...@@ -122,7 +138,7 @@ module.exports.createArchive = async (api, data) => {
Assert.isType(data.map, 'object', 'data.map must be object') Assert.isType(data.map, 'object', 'data.map must be object')
Assert.ok(Array.isArray(data.annotations), 'data.annotations must be array') Assert.ok(Array.isArray(data.annotations), 'data.annotations must be array')
const dir = path.join(os.tmpdir(), `archive_${ObjectUtil.slug(data.map.title)}_${data.map.uuid}`) const dir = path.join(os.tmpdir(), `archive_${ObjectUtil.slug(data.map.title)}_${data.map._uuid}`)
await new Promise((resolve, reject) => { await new Promise((resolve, reject) => {
rimraf(dir, err => { rimraf(dir, err => {
...@@ -140,12 +156,12 @@ module.exports.createArchive = async (api, data) => { ...@@ -140,12 +156,12 @@ module.exports.createArchive = async (api, data) => {
await fs.mkdir(path.join(dir, 'maps')) await fs.mkdir(path.join(dir, 'maps'))
await fs.mkdir(path.join(dir, 'annotations')) await fs.mkdir(path.join(dir, 'annotations'))
const mapfile = path.join('maps', `${data.map.uuid}.json`) const mapfile = path.join('maps', `${data.map._uuid}.json`)
await fs.writeFile(path.join(dir, mapfile), JSON.stringify(data.map)) await fs.writeFile(path.join(dir, mapfile), JSON.stringify(data.map))
archive.addFile(path.join(dir, mapfile), mapfile) archive.addFile(path.join(dir, mapfile), mapfile)
for (let a of data.annotations) { for (let a of data.annotations) {
const annofile = path.join('annotations', `${a.uuid}.json`) const annofile = path.join('annotations', `${a._uuid}.json`)
await fs.writeFile(path.join(dir, annofile), JSON.stringify(a)) await fs.writeFile(path.join(dir, annofile), JSON.stringify(a))
archive.addFile(path.join(dir, annofile), annofile) archive.addFile(path.join(dir, annofile), annofile)
} }
......
...@@ -125,7 +125,7 @@ class Profiles extends TinyEmitter { ...@@ -125,7 +125,7 @@ class Profiles extends TinyEmitter {
} }
_response (req, res, data = {}) { _response (req, res, data = {}) {
this.emit('message', { method: req.method, id: data.uuid }) this.emit('message', { method: req.method, id: data.id })
if (typeof res === 'function') res({ data }) if (typeof res === 'function') res({ data })
else if (typeof res === 'undefined') return Promise.resolve({ data }) else if (typeof res === 'undefined') return Promise.resolve({ data })
else send(res, 200, data) else send(res, 200, data)
......
...@@ -29,7 +29,7 @@ const resurrectAnnotation = function (annotation) { ...@@ -29,7 +29,7 @@ const resurrectAnnotation = function (annotation) {
const fetchMetaData = async (videos, req, api) => { const fetchMetaData = async (videos, req, api) => {
for (let v of videos) { for (let v of videos) {
try { try {
const meta = await axios.get(`${config.api.transcoderHost}/metadata/${v.annotation.uuid}`, { const meta = await axios.get(`${config.api.transcoderHost}/metadata/${v.annotation._uuid}`, {
headers: { headers: {
Authorization: req.headers.authorization Authorization: req.headers.authorization
} }
...@@ -41,7 +41,7 @@ const fetchMetaData = async (videos, req, api) => { ...@@ -41,7 +41,7 @@ const fetchMetaData = async (videos, req, api) => {
return videos return videos
} }
const groupBySessions = async function (annotations, req, api, secondsDist = constants.SESSION_DISTANCE_SECONDS) { const groupBySessions = async function (annotations, req, api, secondsDist = constants.config.SESSION_DISTANCE_SECONDS) {
let millisDist = secondsDist * 1000 let millisDist = secondsDist * 1000
annotations = annotations.map(annotation => resurrectAnnotation(annotation)).sort(Sorting.sortOnTarget) annotations = annotations.map(annotation => resurrectAnnotation(annotation)).sort(Sorting.sortOnTarget)
const videos = annotations.filter(anno => { return anno.body.type === 'Video' }) const videos = annotations.filter(anno => { return anno.body.type === 'Video' })
...@@ -107,7 +107,7 @@ class Sessions extends TinyEmitter { ...@@ -107,7 +107,7 @@ class Sessions extends TinyEmitter {
results = await _this._annotations.findHandler({ results = await _this._annotations.findHandler({
query: { query: {
query: JSON.stringify({ query: JSON.stringify({
'target.id': `${config.api.uriBase}/piecemaker/timelines/${map.uuid}` 'target.id': `${config.api.uriPrefix}piecemaker/timelines/${map._uuid}`
}) })
}, },
user: req.user, user: req.user,
...@@ -120,7 +120,7 @@ class Sessions extends TinyEmitter { ...@@ -120,7 +120,7 @@ class Sessions extends TinyEmitter {
} }
_response (req, res, data = {}) { _response (req, res, data = {}) {
this.emit('message', { method: req.method, id: data.uuid }) this.emit('message', { method: req.method, id: data.id })
if (typeof res === 'function') res({ data }) if (typeof res === 'function') res({ data })
else if (typeof res === 'undefined') return Promise.resolve({ data }) else if (typeof res === 'undefined') return Promise.resolve({ data })
else send(res, 200, data) else send(res, 200, data)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment