2020-01-09 04:34:58 +01:00
|
|
|
import fs from 'fs'
|
|
|
|
import assert from 'assert'
|
2020-01-29 18:14:33 +01:00
|
|
|
import { cloneDeep } from 'lodash'
|
2020-01-09 04:34:58 +01:00
|
|
|
import pify from 'pify'
|
|
|
|
import Migrator from '../../../app/scripts/lib/migrator'
|
|
|
|
import liveMigrations from '../../../app/scripts/migrations'
|
|
|
|
|
2018-04-06 02:49:50 +02:00
|
|
|
const stubMigrations = [
|
2017-05-11 19:47:58 +02:00
|
|
|
{
|
|
|
|
version: 1,
|
|
|
|
migrate: (data) => {
|
|
|
|
// clone the data just like we do in migrations
|
2020-01-29 18:14:33 +01:00
|
|
|
const clonedData = cloneDeep(data)
|
2017-05-11 19:47:58 +02:00
|
|
|
clonedData.meta.version = 1
|
|
|
|
return Promise.resolve(clonedData)
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
version: 2,
|
|
|
|
migrate: (data) => {
|
2020-01-29 18:14:33 +01:00
|
|
|
const clonedData = cloneDeep(data)
|
2017-05-11 19:47:58 +02:00
|
|
|
clonedData.meta.version = 2
|
|
|
|
return Promise.resolve(clonedData)
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
version: 3,
|
|
|
|
migrate: (data) => {
|
2020-01-29 18:14:33 +01:00
|
|
|
const clonedData = cloneDeep(data)
|
2017-05-11 19:47:58 +02:00
|
|
|
clonedData.meta.version = 3
|
|
|
|
return Promise.resolve(clonedData)
|
|
|
|
},
|
|
|
|
},
|
|
|
|
]
|
2019-12-03 21:50:55 +01:00
|
|
|
const versionedData = { meta: { version: 0 }, data: { hello: 'world' } }
|
2018-04-06 02:49:50 +02:00
|
|
|
|
2020-01-09 04:34:58 +01:00
|
|
|
import data from '../../../app/scripts/first-time-state'
|
|
|
|
|
2018-04-06 02:49:50 +02:00
|
|
|
const firstTimeState = {
|
|
|
|
meta: { version: 0 },
|
2020-01-09 04:34:58 +01:00
|
|
|
data,
|
2018-04-06 02:49:50 +02:00
|
|
|
}
|
2019-12-13 01:14:29 +01:00
|
|
|
|
2020-02-11 17:51:13 +01:00
|
|
|
describe('migrations', function () {
|
|
|
|
describe('liveMigrations require list', function () {
|
|
|
|
it('should include all the migrations', async function () {
|
|
|
|
const fileNames = await pify(cb => fs.readdir('./app/scripts/migrations/', cb))()
|
|
|
|
const migrationNumbers = fileNames.reduce((agg, filename) => {
|
|
|
|
const name = filename.split('.')[0]
|
|
|
|
if (/^\d+$/.test(name)) {
|
|
|
|
agg.push(name)
|
|
|
|
}
|
|
|
|
return agg
|
|
|
|
}, []).map((num) => parseInt(num))
|
|
|
|
|
|
|
|
migrationNumbers.forEach((num) => {
|
|
|
|
const migration = liveMigrations.find((m) => m.version === num)
|
|
|
|
assert(migration, `migration should be include in the index missing migration ${num}`)
|
|
|
|
})
|
2019-12-13 01:14:29 +01:00
|
|
|
})
|
|
|
|
})
|
2018-04-06 02:49:50 +02:00
|
|
|
|
2020-02-11 17:51:13 +01:00
|
|
|
describe('Migrator', function () {
|
|
|
|
const migrator = new Migrator({ migrations: stubMigrations })
|
|
|
|
it('migratedData version should be version 3', async function () {
|
|
|
|
const migratedData = await migrator.migrateData(versionedData)
|
|
|
|
assert.equal(migratedData.meta.version, stubMigrations[2].version)
|
|
|
|
})
|
2018-04-06 02:49:50 +02:00
|
|
|
|
2020-02-11 17:51:13 +01:00
|
|
|
it('should match the last version in live migrations', async function () {
|
|
|
|
const migrator = new Migrator({ migrations: liveMigrations })
|
|
|
|
const migratedData = await migrator.migrateData(firstTimeState)
|
|
|
|
const last = liveMigrations.length - 1
|
|
|
|
assert.equal(migratedData.meta.version, liveMigrations[last].version)
|
|
|
|
})
|
2018-04-06 02:49:50 +02:00
|
|
|
|
2020-02-11 17:51:13 +01:00
|
|
|
it('should emit an error', async function () {
|
|
|
|
const migrator = new Migrator({
|
|
|
|
migrations: [{
|
|
|
|
version: 1,
|
|
|
|
async migrate () {
|
|
|
|
throw new Error('test')
|
|
|
|
},
|
|
|
|
}],
|
|
|
|
})
|
|
|
|
await assert.rejects(migrator.migrateData({ meta: { version: 0 } }))
|
2020-01-15 22:54:28 +01:00
|
|
|
})
|
2018-04-06 04:28:53 +02:00
|
|
|
})
|
2017-05-11 19:47:58 +02:00
|
|
|
})
|