Add initial version of dijkstra backend cloudron image
This commit is contained in:
106
node_modules/knex/lib/migrate/MigrationGenerator.js
generated
vendored
Normal file
106
node_modules/knex/lib/migrate/MigrationGenerator.js
generated
vendored
Normal file
@@ -0,0 +1,106 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { promisify } = require('util');
|
||||
const mkdirp = require('mkdirp');
|
||||
const { writeJsFileUsingTemplate } = require('../util/template');
|
||||
const { getMergedConfig } = require('./configuration-merger');
|
||||
|
||||
class MigrationGenerator {
|
||||
constructor(migrationConfig) {
|
||||
this.config = getMergedConfig(migrationConfig);
|
||||
}
|
||||
|
||||
// Creates a new migration, with a given name.
|
||||
async make(name, config) {
|
||||
this.config = getMergedConfig(config, this.config);
|
||||
if (!name) {
|
||||
return Promise.reject(
|
||||
new Error('A name must be specified for the generated migration')
|
||||
);
|
||||
}
|
||||
await this._ensureFolder();
|
||||
const createdMigrationFilePath = await this._writeNewMigration(name);
|
||||
return createdMigrationFilePath;
|
||||
}
|
||||
|
||||
// Ensures a folder for the migrations exist, dependent on the migration
|
||||
// config settings.
|
||||
_ensureFolder() {
|
||||
const dirs = this._absoluteConfigDirs();
|
||||
|
||||
const promises = dirs.map((dir) => {
|
||||
return promisify(fs.stat)(dir).catch(() => promisify(mkdirp)(dir));
|
||||
});
|
||||
|
||||
return Promise.all(promises);
|
||||
}
|
||||
|
||||
_getStubPath() {
|
||||
return (
|
||||
this.config.stub ||
|
||||
path.join(__dirname, 'stub', this.config.extension + '.stub')
|
||||
);
|
||||
}
|
||||
|
||||
_getNewMigrationName(name) {
|
||||
if (name[0] === '-') name = name.slice(1);
|
||||
return yyyymmddhhmmss() + '_' + name + '.' + this.config.extension;
|
||||
}
|
||||
|
||||
_getNewMigrationPath(name) {
|
||||
const fileName = this._getNewMigrationName(name);
|
||||
const dirs = this._absoluteConfigDirs();
|
||||
const dir = dirs.slice(-1)[0]; // Get last specified directory
|
||||
return path.join(dir, fileName);
|
||||
}
|
||||
|
||||
// Write a new migration to disk, using the config and generated filename,
|
||||
// passing any `variables` given in the config to the template.
|
||||
async _writeNewMigration(name) {
|
||||
const migrationPath = this._getNewMigrationPath(name);
|
||||
await writeJsFileUsingTemplate(
|
||||
migrationPath,
|
||||
this._getStubPath(),
|
||||
{ variable: 'd' },
|
||||
this.config.variables || {}
|
||||
);
|
||||
return migrationPath;
|
||||
}
|
||||
|
||||
_absoluteConfigDirs() {
|
||||
const directories = Array.isArray(this.config.directory)
|
||||
? this.config.directory
|
||||
: [this.config.directory];
|
||||
return directories.map((directory) => {
|
||||
if (!directory) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.warn(
|
||||
'Failed to resolve config file, knex cannot determine where to generate migrations'
|
||||
);
|
||||
}
|
||||
return path.resolve(process.cwd(), directory);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Ensure that we have 2 places for each of the date segments.
|
||||
function padDate(segment) {
|
||||
segment = segment.toString();
|
||||
return segment[1] ? segment : `0${segment}`;
|
||||
}
|
||||
|
||||
// Get a date object in the correct format, without requiring a full out library
|
||||
// like "moment.js".
|
||||
function yyyymmddhhmmss() {
|
||||
const d = new Date();
|
||||
return (
|
||||
d.getFullYear().toString() +
|
||||
padDate(d.getMonth() + 1) +
|
||||
padDate(d.getDate()) +
|
||||
padDate(d.getHours()) +
|
||||
padDate(d.getMinutes()) +
|
||||
padDate(d.getSeconds())
|
||||
);
|
||||
}
|
||||
|
||||
module.exports = MigrationGenerator;
|
||||
600
node_modules/knex/lib/migrate/Migrator.js
generated
vendored
Normal file
600
node_modules/knex/lib/migrate/Migrator.js
generated
vendored
Normal file
@@ -0,0 +1,600 @@
|
||||
// Migrator
|
||||
// -------
|
||||
const {
|
||||
differenceWith,
|
||||
each,
|
||||
filter,
|
||||
get,
|
||||
isFunction,
|
||||
isBoolean,
|
||||
isEmpty,
|
||||
isUndefined,
|
||||
max,
|
||||
} = require('lodash');
|
||||
const inherits = require('inherits');
|
||||
const {
|
||||
getLockTableName,
|
||||
getLockTableNameWithSchema,
|
||||
getTable,
|
||||
getTableName,
|
||||
} = require('./table-resolver');
|
||||
const { getSchemaBuilder } = require('./table-creator');
|
||||
const migrationListResolver = require('./migration-list-resolver');
|
||||
const MigrationGenerator = require('./MigrationGenerator');
|
||||
const { getMergedConfig } = require('./configuration-merger');
|
||||
|
||||
function LockError(msg) {
|
||||
this.name = 'MigrationLocked';
|
||||
this.message = msg;
|
||||
}
|
||||
|
||||
inherits(LockError, Error);
|
||||
|
||||
// The new migration we're performing, typically called from the `knex.migrate`
|
||||
// interface on the main `knex` object. Passes the `knex` instance performing
|
||||
// the migration.
|
||||
class Migrator {
|
||||
constructor(knex) {
|
||||
// Clone knex instance and remove post-processing that is unnecessary for internal queries from a cloned config
|
||||
if (isFunction(knex)) {
|
||||
if (!knex.isTransaction) {
|
||||
this.knex = knex.withUserParams({
|
||||
...knex.userParams,
|
||||
});
|
||||
} else {
|
||||
this.knex = knex;
|
||||
}
|
||||
} else {
|
||||
this.knex = Object.assign({}, knex);
|
||||
this.knex.userParams = this.knex.userParams || {};
|
||||
}
|
||||
|
||||
this.config = getMergedConfig(this.knex.client.config.migrations);
|
||||
this.generator = new MigrationGenerator(this.knex.client.config.migrations);
|
||||
this._activeMigration = {
|
||||
fileName: null,
|
||||
};
|
||||
}
|
||||
|
||||
// Migrators to the latest configuration.
|
||||
latest(config) {
|
||||
this._disableProcessing();
|
||||
this.config = getMergedConfig(config, this.config);
|
||||
|
||||
return migrationListResolver
|
||||
.listAllAndCompleted(this.config, this.knex)
|
||||
.then((value) => {
|
||||
if (!this.config.disableMigrationsListValidation) {
|
||||
validateMigrationList(this.config.migrationSource, value);
|
||||
}
|
||||
return value;
|
||||
})
|
||||
.then(([all, completed]) => {
|
||||
const migrations = getNewMigrations(
|
||||
this.config.migrationSource,
|
||||
all,
|
||||
completed
|
||||
);
|
||||
|
||||
const transactionForAll =
|
||||
!this.config.disableTransactions &&
|
||||
isEmpty(
|
||||
filter(migrations, (migration) => {
|
||||
const migrationContents = this.config.migrationSource.getMigration(
|
||||
migration
|
||||
);
|
||||
return !this._useTransaction(migrationContents);
|
||||
})
|
||||
);
|
||||
|
||||
if (transactionForAll) {
|
||||
return this.knex.transaction((trx) => {
|
||||
return this._runBatch(migrations, 'up', trx);
|
||||
});
|
||||
} else {
|
||||
return this._runBatch(migrations, 'up');
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Runs the next migration that has not yet been run
|
||||
up(config) {
|
||||
this._disableProcessing();
|
||||
this.config = getMergedConfig(config, this.config);
|
||||
|
||||
return migrationListResolver
|
||||
.listAllAndCompleted(this.config, this.knex)
|
||||
.then((value) => {
|
||||
if (!this.config.disableMigrationsListValidation) {
|
||||
validateMigrationList(this.config.migrationSource, value);
|
||||
}
|
||||
return value;
|
||||
})
|
||||
.then(([all, completed]) => {
|
||||
const newMigrations = getNewMigrations(
|
||||
this.config.migrationSource,
|
||||
all,
|
||||
completed
|
||||
);
|
||||
|
||||
let migrationToRun;
|
||||
const name = this.config.name;
|
||||
if (name) {
|
||||
if (!completed.includes(name)) {
|
||||
migrationToRun = newMigrations.find((migration) => {
|
||||
return (
|
||||
this.config.migrationSource.getMigrationName(migration) === name
|
||||
);
|
||||
});
|
||||
if (!migrationToRun) {
|
||||
throw new Error(`Migration "${name}" not found.`);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
migrationToRun = newMigrations[0];
|
||||
}
|
||||
|
||||
const migrationsToRun = [];
|
||||
if (migrationToRun) {
|
||||
migrationsToRun.push(migrationToRun);
|
||||
}
|
||||
|
||||
const transactionForAll =
|
||||
!this.config.disableTransactions &&
|
||||
isEmpty(
|
||||
filter(migrationsToRun, (migration) => {
|
||||
const migrationContents = this.config.migrationSource.getMigration(
|
||||
migration
|
||||
);
|
||||
|
||||
return !this._useTransaction(migrationContents);
|
||||
})
|
||||
);
|
||||
|
||||
if (transactionForAll) {
|
||||
return this.knex.transaction((trx) => {
|
||||
return this._runBatch(migrationsToRun, 'up', trx);
|
||||
});
|
||||
} else {
|
||||
return this._runBatch(migrationsToRun, 'up');
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Rollback the last "batch", or all, of migrations that were run.
|
||||
rollback(config, all = false) {
|
||||
this._disableProcessing();
|
||||
return new Promise((resolve, reject) => {
|
||||
try {
|
||||
this.config = getMergedConfig(config, this.config);
|
||||
} catch (e) {
|
||||
reject(e);
|
||||
}
|
||||
migrationListResolver
|
||||
.listAllAndCompleted(this.config, this.knex)
|
||||
.then((value) => {
|
||||
if (!this.config.disableMigrationsListValidation) {
|
||||
validateMigrationList(this.config.migrationSource, value);
|
||||
}
|
||||
return value;
|
||||
})
|
||||
.then((val) => {
|
||||
const [allMigrations, completedMigrations] = val;
|
||||
|
||||
return all
|
||||
? allMigrations
|
||||
.filter((migration) => {
|
||||
return completedMigrations.includes(migration.file);
|
||||
})
|
||||
.reverse()
|
||||
: this._getLastBatch(val);
|
||||
})
|
||||
.then((migrations) => {
|
||||
return this._runBatch(migrations, 'down');
|
||||
})
|
||||
.then(resolve, reject);
|
||||
});
|
||||
}
|
||||
|
||||
down(config) {
|
||||
this._disableProcessing();
|
||||
this.config = getMergedConfig(config, this.config);
|
||||
|
||||
return migrationListResolver
|
||||
.listAllAndCompleted(this.config, this.knex)
|
||||
.then((value) => {
|
||||
if (!this.config.disableMigrationsListValidation) {
|
||||
validateMigrationList(this.config.migrationSource, value);
|
||||
}
|
||||
return value;
|
||||
})
|
||||
.then(([all, completed]) => {
|
||||
const completedMigrations = all.filter((migration) => {
|
||||
return completed.includes(
|
||||
this.config.migrationSource.getMigrationName(migration)
|
||||
);
|
||||
});
|
||||
|
||||
let migrationToRun;
|
||||
const name = this.config.name;
|
||||
if (name) {
|
||||
migrationToRun = completedMigrations.find((migration) => {
|
||||
return (
|
||||
this.config.migrationSource.getMigrationName(migration) === name
|
||||
);
|
||||
});
|
||||
if (!migrationToRun) {
|
||||
throw new Error(`Migration "${name}" was not run.`);
|
||||
}
|
||||
} else {
|
||||
migrationToRun = completedMigrations[completedMigrations.length - 1];
|
||||
}
|
||||
|
||||
const migrationsToRun = [];
|
||||
if (migrationToRun) {
|
||||
migrationsToRun.push(migrationToRun);
|
||||
}
|
||||
|
||||
return this._runBatch(migrationsToRun, 'down');
|
||||
});
|
||||
}
|
||||
|
||||
status(config) {
|
||||
this._disableProcessing();
|
||||
this.config = getMergedConfig(config, this.config);
|
||||
|
||||
return Promise.all([
|
||||
getTable(this.knex, this.config.tableName, this.config.schemaName).select(
|
||||
'*'
|
||||
),
|
||||
migrationListResolver.listAll(this.config.migrationSource),
|
||||
]).then(([db, code]) => db.length - code.length);
|
||||
}
|
||||
|
||||
// Retrieves and returns the current migration version we're on, as a promise.
|
||||
// If no migrations have been run yet, return "none".
|
||||
currentVersion(config) {
|
||||
this._disableProcessing();
|
||||
this.config = getMergedConfig(config, this.config);
|
||||
|
||||
return migrationListResolver
|
||||
.listCompleted(this.config.tableName, this.config.schemaName, this.knex)
|
||||
.then((completed) => {
|
||||
const val = max(completed.map((value) => value.split('_')[0]));
|
||||
return isUndefined(val) ? 'none' : val;
|
||||
});
|
||||
}
|
||||
|
||||
// list all migrations
|
||||
async list(config) {
|
||||
this._disableProcessing();
|
||||
this.config = getMergedConfig(config, this.config);
|
||||
|
||||
const [all, completed] = await migrationListResolver.listAllAndCompleted(
|
||||
this.config,
|
||||
this.knex
|
||||
);
|
||||
|
||||
if (!this.config.disableMigrationsListValidation) {
|
||||
validateMigrationList(this.config.migrationSource, [all, completed]);
|
||||
}
|
||||
|
||||
const newMigrations = getNewMigrations(
|
||||
this.config.migrationSource,
|
||||
all,
|
||||
completed
|
||||
);
|
||||
return [completed, newMigrations];
|
||||
}
|
||||
|
||||
forceFreeMigrationsLock(config) {
|
||||
this.config = getMergedConfig(config, this.config);
|
||||
|
||||
const lockTable = getLockTableName(this.config.tableName);
|
||||
return getSchemaBuilder(this.knex, this.config.schemaName)
|
||||
.hasTable(lockTable)
|
||||
.then((exist) => exist && this._freeLock());
|
||||
}
|
||||
|
||||
// Creates a new migration, with a given name.
|
||||
make(name, config) {
|
||||
this.config = getMergedConfig(config, this.config);
|
||||
return this.generator.make(name, this.config);
|
||||
}
|
||||
|
||||
_disableProcessing() {
|
||||
if (this.knex.disableProcessing) {
|
||||
this.knex.disableProcessing();
|
||||
}
|
||||
}
|
||||
|
||||
_lockMigrations(trx) {
|
||||
const tableName = getLockTableName(this.config.tableName);
|
||||
return getTable(this.knex, tableName, this.config.schemaName)
|
||||
.transacting(trx)
|
||||
.where('is_locked', '=', 0)
|
||||
.update({ is_locked: 1 })
|
||||
.then((rowCount) => {
|
||||
if (rowCount != 1) {
|
||||
throw new Error('Migration table is already locked');
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
_getLock(trx) {
|
||||
const transact = trx ? (fn) => fn(trx) : (fn) => this.knex.transaction(fn);
|
||||
return transact((trx) => {
|
||||
return this._lockMigrations(trx);
|
||||
}).catch((err) => {
|
||||
throw new LockError(err.message);
|
||||
});
|
||||
}
|
||||
|
||||
_freeLock(trx = this.knex) {
|
||||
const tableName = getLockTableName(this.config.tableName);
|
||||
return getTable(trx, tableName, this.config.schemaName).update({
|
||||
is_locked: 0,
|
||||
});
|
||||
}
|
||||
|
||||
// Run a batch of current migrations, in sequence.
|
||||
_runBatch(migrations, direction, trx) {
|
||||
return (
|
||||
this._getLock(trx)
|
||||
// When there is a wrapping transaction, some migrations
|
||||
// could have been done while waiting for the lock:
|
||||
.then(() =>
|
||||
trx
|
||||
? migrationListResolver.listCompleted(
|
||||
this.config.tableName,
|
||||
this.config.schemaName,
|
||||
trx
|
||||
)
|
||||
: []
|
||||
)
|
||||
.then(
|
||||
(completed) =>
|
||||
(migrations = getNewMigrations(
|
||||
this.config.migrationSource,
|
||||
migrations,
|
||||
completed
|
||||
))
|
||||
)
|
||||
.then(() =>
|
||||
Promise.all(
|
||||
migrations.map(this._validateMigrationStructure.bind(this))
|
||||
)
|
||||
)
|
||||
.then(() => this._latestBatchNumber(trx))
|
||||
.then((batchNo) => {
|
||||
if (direction === 'up') batchNo++;
|
||||
return batchNo;
|
||||
})
|
||||
.then((batchNo) => {
|
||||
return this._waterfallBatch(batchNo, migrations, direction, trx);
|
||||
})
|
||||
.then(async (res) => {
|
||||
await this._freeLock(trx);
|
||||
return res;
|
||||
})
|
||||
.catch(async (error) => {
|
||||
let cleanupReady = Promise.resolve();
|
||||
|
||||
if (error instanceof LockError) {
|
||||
// If locking error do not free the lock.
|
||||
this.knex.client.logger.warn(
|
||||
`Can't take lock to run migrations: ${error.message}`
|
||||
);
|
||||
this.knex.client.logger.warn(
|
||||
'If you are sure migrations are not running you can release the ' +
|
||||
'lock manually by deleting all the rows = require(migrations lock ' +
|
||||
'table: ' +
|
||||
getLockTableNameWithSchema(
|
||||
this.config.tableName,
|
||||
this.config.schemaName
|
||||
)
|
||||
);
|
||||
} else {
|
||||
if (this._activeMigration.fileName) {
|
||||
this.knex.client.logger.warn(
|
||||
`migration file "${this._activeMigration.fileName}" failed`
|
||||
);
|
||||
}
|
||||
this.knex.client.logger.warn(
|
||||
`migration failed with error: ${error.message}`
|
||||
);
|
||||
// If the error was not due to a locking issue, then remove the lock.
|
||||
cleanupReady = this._freeLock(trx);
|
||||
}
|
||||
|
||||
try {
|
||||
await cleanupReady;
|
||||
// eslint-disable-next-line no-empty
|
||||
} catch (e) {}
|
||||
throw error;
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
// Validates some migrations by requiring and checking for an `up` and `down`
|
||||
// function.
|
||||
_validateMigrationStructure(migration) {
|
||||
const migrationName = this.config.migrationSource.getMigrationName(
|
||||
migration
|
||||
);
|
||||
const migrationContent = this.config.migrationSource.getMigration(
|
||||
migration
|
||||
);
|
||||
if (
|
||||
typeof migrationContent.up !== 'function' ||
|
||||
typeof migrationContent.down !== 'function'
|
||||
) {
|
||||
throw new Error(
|
||||
`Invalid migration: ${migrationName} must have both an up and down function`
|
||||
);
|
||||
}
|
||||
|
||||
return migration;
|
||||
}
|
||||
|
||||
// Get the last batch of migrations, by name, ordered by insert id in reverse
|
||||
// order.
|
||||
_getLastBatch([allMigrations]) {
|
||||
const { tableName, schemaName } = this.config;
|
||||
return getTable(this.knex, tableName, schemaName)
|
||||
.where('batch', function(qb) {
|
||||
qb.max('batch').from(getTableName(tableName, schemaName));
|
||||
})
|
||||
.orderBy('id', 'desc')
|
||||
.then((migrations) =>
|
||||
Promise.all(
|
||||
migrations.map((migration) => {
|
||||
return allMigrations.find((entry) => {
|
||||
return (
|
||||
this.config.migrationSource.getMigrationName(entry) ===
|
||||
migration.name
|
||||
);
|
||||
});
|
||||
})
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
// Returns the latest batch number.
|
||||
_latestBatchNumber(trx = this.knex) {
|
||||
return trx
|
||||
.from(getTableName(this.config.tableName, this.config.schemaName))
|
||||
.max('batch as max_batch')
|
||||
.then((obj) => obj[0].max_batch || 0);
|
||||
}
|
||||
|
||||
// If transaction config for a single migration is defined, use that.
|
||||
// Otherwise, rely on the common config. This allows enabling/disabling
|
||||
// transaction for a single migration at will, regardless of the common
|
||||
// config.
|
||||
_useTransaction(migrationContent, allTransactionsDisabled) {
|
||||
const singleTransactionValue = get(migrationContent, 'config.transaction');
|
||||
|
||||
return isBoolean(singleTransactionValue)
|
||||
? singleTransactionValue
|
||||
: !allTransactionsDisabled;
|
||||
}
|
||||
|
||||
// Runs a batch of `migrations` in a specified `direction`, saving the
|
||||
// appropriate database information as the migrations are run.
|
||||
_waterfallBatch(batchNo, migrations, direction, trx) {
|
||||
const trxOrKnex = trx || this.knex;
|
||||
const { tableName, schemaName, disableTransactions } = this.config;
|
||||
let current = Promise.resolve();
|
||||
const log = [];
|
||||
each(migrations, (migration) => {
|
||||
const name = this.config.migrationSource.getMigrationName(migration);
|
||||
this._activeMigration.fileName = name;
|
||||
const migrationContent = this.config.migrationSource.getMigration(
|
||||
migration
|
||||
);
|
||||
|
||||
// We're going to run each of the migrations in the current "up".
|
||||
current = current
|
||||
.then(() => {
|
||||
this._activeMigration.fileName = name;
|
||||
if (
|
||||
!trx &&
|
||||
this._useTransaction(migrationContent, disableTransactions)
|
||||
) {
|
||||
this.knex.enableProcessing();
|
||||
return this._transaction(
|
||||
this.knex,
|
||||
migrationContent,
|
||||
direction,
|
||||
name
|
||||
);
|
||||
}
|
||||
|
||||
trxOrKnex.enableProcessing();
|
||||
return checkPromise(
|
||||
this.knex.client.logger,
|
||||
migrationContent[direction](trxOrKnex),
|
||||
name
|
||||
);
|
||||
})
|
||||
.then(() => {
|
||||
trxOrKnex.disableProcessing();
|
||||
this.knex.disableProcessing();
|
||||
log.push(name);
|
||||
if (direction === 'up') {
|
||||
return trxOrKnex.into(getTableName(tableName, schemaName)).insert({
|
||||
name,
|
||||
batch: batchNo,
|
||||
migration_time: new Date(),
|
||||
});
|
||||
}
|
||||
if (direction === 'down') {
|
||||
return trxOrKnex
|
||||
.from(getTableName(tableName, schemaName))
|
||||
.where({ name })
|
||||
.del();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
return current.then(() => [batchNo, log]);
|
||||
}
|
||||
|
||||
_transaction(knex, migrationContent, direction, name) {
|
||||
return knex.transaction((trx) => {
|
||||
return checkPromise(
|
||||
knex.client.logger,
|
||||
migrationContent[direction](trx),
|
||||
name,
|
||||
() => {
|
||||
trx.commit();
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Validates that migrations are present in the appropriate directories.
|
||||
function validateMigrationList(migrationSource, migrations) {
|
||||
const all = migrations[0];
|
||||
const completed = migrations[1];
|
||||
const diff = getMissingMigrations(migrationSource, completed, all);
|
||||
if (!isEmpty(diff)) {
|
||||
throw new Error(
|
||||
`The migration directory is corrupt, the following files are missing: ${diff.join(
|
||||
', '
|
||||
)}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
function getMissingMigrations(migrationSource, completed, all) {
|
||||
return differenceWith(completed, all, (completedMigration, allMigration) => {
|
||||
return (
|
||||
completedMigration === migrationSource.getMigrationName(allMigration)
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
function getNewMigrations(migrationSource, all, completed) {
|
||||
return differenceWith(all, completed, (allMigration, completedMigration) => {
|
||||
return (
|
||||
completedMigration === migrationSource.getMigrationName(allMigration)
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
function checkPromise(logger, migrationPromise, name, commitFn) {
|
||||
if (!migrationPromise || typeof migrationPromise.then !== 'function') {
|
||||
logger.warn(`migration ${name} did not return a promise`);
|
||||
if (commitFn) {
|
||||
commitFn();
|
||||
}
|
||||
}
|
||||
return migrationPromise;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
Migrator,
|
||||
};
|
||||
53
node_modules/knex/lib/migrate/configuration-merger.js
generated
vendored
Normal file
53
node_modules/knex/lib/migrate/configuration-merger.js
generated
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
const {
|
||||
FsMigrations,
|
||||
DEFAULT_LOAD_EXTENSIONS,
|
||||
} = require('./sources/fs-migrations');
|
||||
|
||||
const CONFIG_DEFAULT = Object.freeze({
|
||||
extension: 'js',
|
||||
loadExtensions: DEFAULT_LOAD_EXTENSIONS,
|
||||
tableName: 'knex_migrations',
|
||||
schemaName: null,
|
||||
directory: './migrations',
|
||||
disableTransactions: false,
|
||||
disableMigrationsListValidation: false,
|
||||
sortDirsSeparately: false,
|
||||
});
|
||||
|
||||
function getMergedConfig(config, currentConfig) {
|
||||
// config is the user specified config, mergedConfig has defaults and current config
|
||||
// applied to it.
|
||||
const mergedConfig = Object.assign(
|
||||
{},
|
||||
CONFIG_DEFAULT,
|
||||
currentConfig || {},
|
||||
config
|
||||
);
|
||||
|
||||
if (
|
||||
config &&
|
||||
// If user specifies any FS related config,
|
||||
// clear existing FsMigrations migrationSource
|
||||
(config.directory ||
|
||||
config.sortDirsSeparately !== undefined ||
|
||||
config.loadExtensions)
|
||||
) {
|
||||
mergedConfig.migrationSource = null;
|
||||
}
|
||||
|
||||
// If the user has not specified any configs, we need to
|
||||
// default to fs migrations to maintain compatibility
|
||||
if (!mergedConfig.migrationSource) {
|
||||
mergedConfig.migrationSource = new FsMigrations(
|
||||
mergedConfig.directory,
|
||||
mergedConfig.sortDirsSeparately,
|
||||
mergedConfig.loadExtensions
|
||||
);
|
||||
}
|
||||
|
||||
return mergedConfig;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getMergedConfig,
|
||||
};
|
||||
17
node_modules/knex/lib/migrate/migrate-stub.js
generated
vendored
Normal file
17
node_modules/knex/lib/migrate/migrate-stub.js
generated
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
// Stub Migrate:
|
||||
// Used for now in browser builds, where filesystem access isn't
|
||||
// available.
|
||||
const StubMigrate = (module.exports = function() {});
|
||||
|
||||
const noSuchMethod = async function() {
|
||||
throw new Error('Migrations are not supported');
|
||||
};
|
||||
|
||||
StubMigrate.prototype = {
|
||||
make: noSuchMethod,
|
||||
latest: noSuchMethod,
|
||||
rollback: noSuchMethod,
|
||||
currentVersion: noSuchMethod,
|
||||
up: noSuchMethod,
|
||||
down: noSuchMethod,
|
||||
};
|
||||
40
node_modules/knex/lib/migrate/migration-list-resolver.js
generated
vendored
Normal file
40
node_modules/knex/lib/migrate/migration-list-resolver.js
generated
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
const Bluebird = require('bluebird');
|
||||
const { getTableName } = require('./table-resolver');
|
||||
const { ensureTable } = require('./table-creator');
|
||||
|
||||
// Lists all available migration versions, as a sorted array.
|
||||
function listAll(migrationSource, loadExtensions) {
|
||||
return migrationSource.getMigrations(loadExtensions);
|
||||
}
|
||||
|
||||
// Lists all migrations that have been completed for the current db, as an
|
||||
// array.
|
||||
function listCompleted(tableName, schemaName, trxOrKnex) {
|
||||
return ensureTable(tableName, schemaName, trxOrKnex)
|
||||
.then(() =>
|
||||
trxOrKnex
|
||||
.from(getTableName(tableName, schemaName))
|
||||
.orderBy('id')
|
||||
.select('name')
|
||||
)
|
||||
.then((migrations) =>
|
||||
migrations.map((migration) => {
|
||||
return migration.name;
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
// Gets the migration list from the migration directory specified in config, as well as
|
||||
// the list of completed migrations to check what should be run.
|
||||
function listAllAndCompleted(config, trxOrKnex) {
|
||||
return Bluebird.all([
|
||||
listAll(config.migrationSource, config.loadExtensions),
|
||||
listCompleted(config.tableName, config.schemaName, trxOrKnex),
|
||||
]);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
listAll,
|
||||
listAllAndCompleted,
|
||||
listCompleted,
|
||||
};
|
||||
98
node_modules/knex/lib/migrate/sources/fs-migrations.js
generated
vendored
Normal file
98
node_modules/knex/lib/migrate/sources/fs-migrations.js
generated
vendored
Normal file
@@ -0,0 +1,98 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { promisify } = require('util');
|
||||
const { sortBy, filter } = require('lodash');
|
||||
|
||||
const readDirAsync = promisify(fs.readdir);
|
||||
|
||||
const DEFAULT_LOAD_EXTENSIONS = Object.freeze([
|
||||
'.co',
|
||||
'.coffee',
|
||||
'.eg',
|
||||
'.iced',
|
||||
'.js',
|
||||
'.litcoffee',
|
||||
'.ls',
|
||||
'.ts',
|
||||
]);
|
||||
|
||||
class FsMigrations {
|
||||
constructor(migrationDirectories, sortDirsSeparately, loadExtensions) {
|
||||
this.sortDirsSeparately = sortDirsSeparately;
|
||||
|
||||
if (!Array.isArray(migrationDirectories)) {
|
||||
migrationDirectories = [migrationDirectories];
|
||||
}
|
||||
this.migrationsPaths = migrationDirectories;
|
||||
this.loadExtensions = loadExtensions || DEFAULT_LOAD_EXTENSIONS;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the migration names
|
||||
* @returns Promise<string[]>
|
||||
*/
|
||||
getMigrations(loadExtensions) {
|
||||
// Get a list of files in all specified migration directories
|
||||
const readMigrationsPromises = this.migrationsPaths.map((configDir) => {
|
||||
const absoluteDir = path.resolve(process.cwd(), configDir);
|
||||
return readDirAsync(absoluteDir).then((files) => ({
|
||||
files,
|
||||
configDir,
|
||||
absoluteDir,
|
||||
}));
|
||||
});
|
||||
|
||||
return Promise.all(readMigrationsPromises).then((allMigrations) => {
|
||||
const migrations = allMigrations.reduce((acc, migrationDirectory) => {
|
||||
// When true, files inside the folder should be sorted
|
||||
if (this.sortDirsSeparately) {
|
||||
migrationDirectory.files = migrationDirectory.files.sort();
|
||||
}
|
||||
|
||||
migrationDirectory.files.forEach((file) =>
|
||||
acc.push({ file, directory: migrationDirectory.configDir })
|
||||
);
|
||||
|
||||
return acc;
|
||||
}, []);
|
||||
|
||||
// If true we have already sorted the migrations inside the folders
|
||||
// return the migrations fully qualified
|
||||
if (this.sortDirsSeparately) {
|
||||
return filterMigrations(
|
||||
this,
|
||||
migrations,
|
||||
loadExtensions || this.loadExtensions
|
||||
);
|
||||
}
|
||||
|
||||
return filterMigrations(
|
||||
this,
|
||||
sortBy(migrations, 'file'),
|
||||
loadExtensions || this.loadExtensions
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
getMigrationName(migration) {
|
||||
return migration.file;
|
||||
}
|
||||
|
||||
getMigration(migration) {
|
||||
const absoluteDir = path.resolve(process.cwd(), migration.directory);
|
||||
return require(path.join(absoluteDir, migration.file));
|
||||
}
|
||||
}
|
||||
|
||||
function filterMigrations(migrationSource, migrations, loadExtensions) {
|
||||
return filter(migrations, (migration) => {
|
||||
const migrationName = migrationSource.getMigrationName(migration);
|
||||
const extension = path.extname(migrationName);
|
||||
return loadExtensions.includes(extension);
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
DEFAULT_LOAD_EXTENSIONS,
|
||||
FsMigrations,
|
||||
};
|
||||
13
node_modules/knex/lib/migrate/stub/coffee.stub
generated
vendored
Normal file
13
node_modules/knex/lib/migrate/stub/coffee.stub
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
|
||||
exports.up = (knex) ->
|
||||
<% if (d.tableName) { %>
|
||||
knex.schema.createTable "<%= d.tableName %>", (t) ->
|
||||
t.increments()
|
||||
t.timestamp()
|
||||
<% } %>
|
||||
|
||||
|
||||
exports.down = (knex) ->
|
||||
<% if (d.tableName) { %>
|
||||
knex.schema.dropTable "<%= d.tableName %>"
|
||||
<% } %>
|
||||
14
node_modules/knex/lib/migrate/stub/eg.stub
generated
vendored
Normal file
14
node_modules/knex/lib/migrate/stub/eg.stub
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
provide: up, down
|
||||
|
||||
up = (knex) ->
|
||||
<% if (d.tableName) { %>
|
||||
knex.schema.createTable "<%= d.tableName %>": t ->
|
||||
t.increments()
|
||||
t.timestamp()
|
||||
<% } %>
|
||||
|
||||
|
||||
down = (knex) ->
|
||||
<% if (d.tableName) { %>
|
||||
knex.schema.dropTable("<%= d.tableName %>")
|
||||
<% } %>
|
||||
15
node_modules/knex/lib/migrate/stub/js.stub
generated
vendored
Normal file
15
node_modules/knex/lib/migrate/stub/js.stub
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
|
||||
exports.up = function(knex) {
|
||||
<% if (d.tableName) { %>
|
||||
return knex.schema.createTable("<%= d.tableName %>", function(t) {
|
||||
t.increments();
|
||||
t.timestamp();
|
||||
});
|
||||
<% } %>
|
||||
};
|
||||
|
||||
exports.down = function(knex) {
|
||||
<% if (d.tableName) { %>
|
||||
return knex.schema.dropTable("<%= d.tableName %>");
|
||||
<% } %>
|
||||
};
|
||||
34
node_modules/knex/lib/migrate/stub/knexfile-coffee.stub
generated
vendored
Normal file
34
node_modules/knex/lib/migrate/stub/knexfile-coffee.stub
generated
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
# Update with your config settings.
|
||||
|
||||
module.exports =
|
||||
|
||||
development:
|
||||
client: 'sqlite3'
|
||||
connection:
|
||||
filename: './dev.sqlite3'
|
||||
migrations:
|
||||
tableName: 'knex_migrations'
|
||||
|
||||
staging:
|
||||
client: 'postgresql'
|
||||
connection:
|
||||
database: 'my_db'
|
||||
user: 'username'
|
||||
password: 'password'
|
||||
pool:
|
||||
min: 2
|
||||
max: 10
|
||||
migrations:
|
||||
tableName: 'knex_migrations'
|
||||
|
||||
production:
|
||||
client: 'postgresql'
|
||||
connection:
|
||||
database: 'my_db'
|
||||
user: 'username'
|
||||
password: 'password'
|
||||
pool:
|
||||
min: 2
|
||||
max: 10
|
||||
migrations:
|
||||
tableName: 'knex_migrations'
|
||||
43
node_modules/knex/lib/migrate/stub/knexfile-eg.stub
generated
vendored
Normal file
43
node_modules/knex/lib/migrate/stub/knexfile-eg.stub
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
;; Update with your config settings.
|
||||
|
||||
module.exports = {
|
||||
development = {
|
||||
client = 'sqlite3'
|
||||
connection = {
|
||||
filename = './dev.sqlite3'
|
||||
}
|
||||
migrations = {
|
||||
tableName = 'knex_migrations'
|
||||
}
|
||||
}
|
||||
staging = {
|
||||
client = 'postgresql'
|
||||
connection = {
|
||||
database = 'my_db'
|
||||
user = 'username'
|
||||
password = 'password'
|
||||
}
|
||||
pool = {
|
||||
min = 2
|
||||
max = 10
|
||||
}
|
||||
migrations = {
|
||||
tableName = 'knex_migrations'
|
||||
}
|
||||
}
|
||||
production = {
|
||||
client = 'postgresql'
|
||||
connection = {
|
||||
database = 'my_db'
|
||||
user = 'username'
|
||||
password = 'password'
|
||||
}
|
||||
pool = {
|
||||
min = 2
|
||||
max = 10
|
||||
}
|
||||
migrations = {
|
||||
tableName = 'knex_migrations'
|
||||
}
|
||||
}
|
||||
}
|
||||
44
node_modules/knex/lib/migrate/stub/knexfile-js.stub
generated
vendored
Normal file
44
node_modules/knex/lib/migrate/stub/knexfile-js.stub
generated
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
// Update with your config settings.
|
||||
|
||||
module.exports = {
|
||||
|
||||
development: {
|
||||
client: 'sqlite3',
|
||||
connection: {
|
||||
filename: './dev.sqlite3'
|
||||
}
|
||||
},
|
||||
|
||||
staging: {
|
||||
client: 'postgresql',
|
||||
connection: {
|
||||
database: 'my_db',
|
||||
user: 'username',
|
||||
password: 'password'
|
||||
},
|
||||
pool: {
|
||||
min: 2,
|
||||
max: 10
|
||||
},
|
||||
migrations: {
|
||||
tableName: 'knex_migrations'
|
||||
}
|
||||
},
|
||||
|
||||
production: {
|
||||
client: 'postgresql',
|
||||
connection: {
|
||||
database: 'my_db',
|
||||
user: 'username',
|
||||
password: 'password'
|
||||
},
|
||||
pool: {
|
||||
min: 2,
|
||||
max: 10
|
||||
},
|
||||
migrations: {
|
||||
tableName: 'knex_migrations'
|
||||
}
|
||||
}
|
||||
|
||||
};
|
||||
35
node_modules/knex/lib/migrate/stub/knexfile-ls.stub
generated
vendored
Normal file
35
node_modules/knex/lib/migrate/stub/knexfile-ls.stub
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
# Update with your config settings.
|
||||
|
||||
module.exports =
|
||||
|
||||
development:
|
||||
client: 'sqlite3'
|
||||
connection:
|
||||
filename: './dev.sqlite3'
|
||||
migrations:
|
||||
tableName: 'knex_migrations'
|
||||
|
||||
staging:
|
||||
client: 'postgresql'
|
||||
connection:
|
||||
database: 'my_db'
|
||||
user: 'username'
|
||||
password: 'password'
|
||||
pool:
|
||||
min: 2
|
||||
max: 10
|
||||
migrations:
|
||||
tableName: 'knex_migrations'
|
||||
|
||||
production:
|
||||
client: 'postgresql'
|
||||
connection:
|
||||
database: 'my_db'
|
||||
user: 'username'
|
||||
password: 'password'
|
||||
pool:
|
||||
min: 2
|
||||
max: 10
|
||||
migrations:
|
||||
tableName: 'knex_migrations'
|
||||
|
||||
44
node_modules/knex/lib/migrate/stub/knexfile-ts.stub
generated
vendored
Normal file
44
node_modules/knex/lib/migrate/stub/knexfile-ts.stub
generated
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
// Update with your config settings.
|
||||
|
||||
module.exports = {
|
||||
|
||||
development: {
|
||||
client: "sqlite3",
|
||||
connection: {
|
||||
filename: "./dev.sqlite3"
|
||||
}
|
||||
},
|
||||
|
||||
staging: {
|
||||
client: "postgresql",
|
||||
connection: {
|
||||
database: "my_db",
|
||||
user: "username",
|
||||
password: "password"
|
||||
},
|
||||
pool: {
|
||||
min: 2,
|
||||
max: 10
|
||||
},
|
||||
migrations: {
|
||||
tableName: "knex_migrations"
|
||||
}
|
||||
},
|
||||
|
||||
production: {
|
||||
client: "postgresql",
|
||||
connection: {
|
||||
database: "my_db",
|
||||
user: "username",
|
||||
password: "password"
|
||||
},
|
||||
pool: {
|
||||
min: 2,
|
||||
max: 10
|
||||
},
|
||||
migrations: {
|
||||
tableName: "knex_migrations"
|
||||
}
|
||||
}
|
||||
|
||||
};
|
||||
14
node_modules/knex/lib/migrate/stub/ls.stub
generated
vendored
Normal file
14
node_modules/knex/lib/migrate/stub/ls.stub
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
|
||||
exports.up = (knex, Promise) ->
|
||||
<% if (d.tableName) { %>
|
||||
knex.schema.create-table "<%= d.tableName %>", (t) ->
|
||||
t.increments!
|
||||
t.timestamp!
|
||||
<% } %>
|
||||
|
||||
|
||||
exports.down = (knex, Promise) ->
|
||||
<% if (d.tableName) { %>
|
||||
knex.schema.drop-table "<%= d.tableName %>"
|
||||
<% } %>
|
||||
|
||||
21
node_modules/knex/lib/migrate/stub/ts.stub
generated
vendored
Normal file
21
node_modules/knex/lib/migrate/stub/ts.stub
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
import * as Knex from "knex";
|
||||
|
||||
<% if (d.tableName) { %>
|
||||
export async function up(knex: Knex): Promise<Knex.SchemaBuilder> {
|
||||
return knex.schema.createTable("<%= d.tableName %>", (t: Knex.AlterTableBuilder) => {
|
||||
t.increments();
|
||||
t.timestamps();
|
||||
});
|
||||
}
|
||||
<% } else { %>
|
||||
export async function up(knex: Knex): Promise<any> {
|
||||
}
|
||||
<% } %>
|
||||
<% if (d.tableName) { %>
|
||||
export async function down(knex: Knex): Promise<Knex.SchemaBuilder> {
|
||||
return knex.schema.dropTable("<%= d.tableName %>");
|
||||
}
|
||||
<% } else { %>
|
||||
export async function down(knex: Knex): Promise<any> {
|
||||
}
|
||||
<% } %>
|
||||
67
node_modules/knex/lib/migrate/table-creator.js
generated
vendored
Normal file
67
node_modules/knex/lib/migrate/table-creator.js
generated
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
const {
|
||||
getTable,
|
||||
getLockTableName,
|
||||
getLockTableNameWithSchema,
|
||||
getTableName,
|
||||
} = require('./table-resolver');
|
||||
|
||||
function ensureTable(tableName, schemaName, trxOrKnex) {
|
||||
const lockTable = getLockTableName(tableName);
|
||||
const lockTableWithSchema = getLockTableNameWithSchema(tableName, schemaName);
|
||||
return getSchemaBuilder(trxOrKnex, schemaName)
|
||||
.hasTable(tableName)
|
||||
.then((exists) => {
|
||||
return !exists && _createMigrationTable(tableName, schemaName, trxOrKnex);
|
||||
})
|
||||
.then(() => {
|
||||
return getSchemaBuilder(trxOrKnex, schemaName).hasTable(lockTable);
|
||||
})
|
||||
.then((exists) => {
|
||||
return (
|
||||
!exists && _createMigrationLockTable(lockTable, schemaName, trxOrKnex)
|
||||
);
|
||||
})
|
||||
.then(() => {
|
||||
return getTable(trxOrKnex, lockTable, schemaName).select('*');
|
||||
})
|
||||
.then((data) => {
|
||||
return (
|
||||
!data.length &&
|
||||
trxOrKnex.into(lockTableWithSchema).insert({ is_locked: 0 })
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
function _createMigrationTable(tableName, schemaName, trxOrKnex) {
|
||||
return getSchemaBuilder(trxOrKnex, schemaName).createTable(
|
||||
getTableName(tableName),
|
||||
function(t) {
|
||||
t.increments();
|
||||
t.string('name');
|
||||
t.integer('batch');
|
||||
t.timestamp('migration_time');
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
function _createMigrationLockTable(tableName, schemaName, trxOrKnex) {
|
||||
return getSchemaBuilder(trxOrKnex, schemaName).createTable(
|
||||
tableName,
|
||||
function(t) {
|
||||
t.increments('index').primary();
|
||||
t.integer('is_locked');
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
//Get schema-aware schema builder for a given schema nam
|
||||
function getSchemaBuilder(trxOrKnex, schemaName) {
|
||||
return schemaName
|
||||
? trxOrKnex.schema.withSchema(schemaName)
|
||||
: trxOrKnex.schema;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
ensureTable,
|
||||
getSchemaBuilder,
|
||||
};
|
||||
27
node_modules/knex/lib/migrate/table-resolver.js
generated
vendored
Normal file
27
node_modules/knex/lib/migrate/table-resolver.js
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
//Get schema-aware table name
|
||||
function getTableName(tableName, schemaName) {
|
||||
return schemaName ? `${schemaName}.${tableName}` : tableName;
|
||||
}
|
||||
|
||||
//Get schema-aware query builder for a given table and schema name
|
||||
function getTable(trxOrKnex, tableName, schemaName) {
|
||||
return schemaName
|
||||
? trxOrKnex(tableName).withSchema(schemaName)
|
||||
: trxOrKnex(tableName);
|
||||
}
|
||||
function getLockTableName(tableName) {
|
||||
return tableName + '_lock';
|
||||
}
|
||||
|
||||
function getLockTableNameWithSchema(tableName, schemaName) {
|
||||
return schemaName
|
||||
? schemaName + '.' + getLockTableName(tableName)
|
||||
: getLockTableName(tableName);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getLockTableName,
|
||||
getLockTableNameWithSchema,
|
||||
getTable,
|
||||
getTableName,
|
||||
};
|
||||
Reference in New Issue
Block a user