Add initial version of dijkstra backend cloudron image

This commit is contained in:
2020-10-12 11:27:15 +02:00
commit 4f5db9ab26
4209 changed files with 448228 additions and 0 deletions

414
node_modules/knex/lib/client.js generated vendored Normal file
View File

@@ -0,0 +1,414 @@
const Bluebird = require('bluebird');
const Raw = require('./raw');
const Ref = require('./ref');
const Runner = require('./runner');
const Formatter = require('./formatter');
const Transaction = require('./transaction');
const QueryBuilder = require('./query/builder');
const QueryCompiler = require('./query/compiler');
const SchemaBuilder = require('./schema/builder');
const SchemaCompiler = require('./schema/compiler');
const TableBuilder = require('./schema/tablebuilder');
const TableCompiler = require('./schema/tablecompiler');
const ColumnBuilder = require('./schema/columnbuilder');
const ColumnCompiler = require('./schema/columncompiler');
const { Pool, TimeoutError } = require('tarn');
const inherits = require('inherits');
const { EventEmitter } = require('events');
const { promisify } = require('util');
const { makeEscape } = require('./query/string');
const { uniqueId, cloneDeep, defaults } = require('lodash');
const Logger = require('./logger');
const debug = require('debug')('knex:client');
const _debugQuery = require('debug')('knex:query');
const debugBindings = require('debug')('knex:bindings');
const debugQuery = (sql, txId) => _debugQuery(sql.replace(/%/g, '%%'), txId);
const { POOL_CONFIG_OPTIONS } = require('./constants');
// The base client provides the general structure
// for a dialect specific client object.
function Client(config = {}) {
this.config = config;
this.logger = new Logger(config);
//Client is a required field, so throw error if it's not supplied.
//If 'this.dialect' is set, then this is a 'super()' call, in which case
//'client' does not have to be set as it's already assigned on the client prototype.
if (this.dialect && !this.config.client) {
this.logger.warn(
`Using 'this.dialect' to identify the client is deprecated and support for it will be removed in the future. Please use configuration option 'client' instead.`
);
}
const dbClient = this.config.client || this.dialect;
if (!dbClient) {
throw new Error(`knex: Required configuration option 'client' is missing.`);
}
if (config.version) {
this.version = config.version;
}
if (config.connection && config.connection instanceof Function) {
this.connectionConfigProvider = config.connection;
this.connectionConfigExpirationChecker = () => true; // causes the provider to be called on first use
} else {
this.connectionSettings = cloneDeep(config.connection || {});
this.connectionConfigExpirationChecker = null;
}
if (this.driverName && config.connection) {
this.initializeDriver();
if (!config.pool || (config.pool && config.pool.max !== 0)) {
this.initializePool(config);
}
}
this.valueForUndefined = this.raw('DEFAULT');
if (config.useNullAsDefault) {
this.valueForUndefined = null;
}
}
inherits(Client, EventEmitter);
Object.assign(Client.prototype, {
formatter(builder) {
return new Formatter(this, builder);
},
queryBuilder() {
return new QueryBuilder(this);
},
queryCompiler(builder) {
return new QueryCompiler(this, builder);
},
schemaBuilder() {
return new SchemaBuilder(this);
},
schemaCompiler(builder) {
return new SchemaCompiler(this, builder);
},
tableBuilder(type, tableName, fn) {
return new TableBuilder(this, type, tableName, fn);
},
tableCompiler(tableBuilder) {
return new TableCompiler(this, tableBuilder);
},
columnBuilder(tableBuilder, type, args) {
return new ColumnBuilder(this, tableBuilder, type, args);
},
columnCompiler(tableBuilder, columnBuilder) {
return new ColumnCompiler(this, tableBuilder, columnBuilder);
},
runner(builder) {
return new Runner(this, builder);
},
transaction(container, config, outerTx) {
return new Transaction(this, container, config, outerTx);
},
raw() {
return new Raw(this).set(...arguments);
},
ref() {
return new Ref(this, ...arguments);
},
_formatQuery(sql, bindings, timeZone) {
bindings = bindings == null ? [] : [].concat(bindings);
let index = 0;
return sql.replace(/\\?\?/g, (match) => {
if (match === '\\?') {
return '?';
}
if (index === bindings.length) {
return match;
}
const value = bindings[index++];
return this._escapeBinding(value, { timeZone });
});
},
_escapeBinding: makeEscape({
escapeString(str) {
return `'${str.replace(/'/g, "''")}'`;
},
}),
query(connection, obj) {
if (typeof obj === 'string') obj = { sql: obj };
obj.bindings = this.prepBindings(obj.bindings);
const { __knexUid, __knexTxId } = connection;
this.emit('query', Object.assign({ __knexUid, __knexTxId }, obj));
debugQuery(obj.sql, __knexTxId);
debugBindings(obj.bindings, __knexTxId);
obj.sql = this.positionBindings(obj.sql);
return this._query(connection, obj).catch((err) => {
err.message =
this._formatQuery(obj.sql, obj.bindings) + ' - ' + err.message;
this.emit(
'query-error',
err,
Object.assign({ __knexUid, __knexTxId }, obj)
);
throw err;
});
},
stream(connection, obj, stream, options) {
if (typeof obj === 'string') obj = { sql: obj };
obj.bindings = this.prepBindings(obj.bindings);
const { __knexUid, __knexTxId } = connection;
this.emit('query', Object.assign({ __knexUid, __knexTxId }, obj));
debugQuery(obj.sql, __knexTxId);
debugBindings(obj.bindings, __knexTxId);
obj.sql = this.positionBindings(obj.sql);
return this._stream(connection, obj, stream, options);
},
prepBindings(bindings) {
return bindings;
},
positionBindings(sql) {
return sql;
},
postProcessResponse(resp, queryContext) {
if (this.config.postProcessResponse) {
return this.config.postProcessResponse(resp, queryContext);
}
return resp;
},
wrapIdentifier(value, queryContext) {
return this.customWrapIdentifier(
value,
this.wrapIdentifierImpl,
queryContext
);
},
customWrapIdentifier(value, origImpl, queryContext) {
if (this.config.wrapIdentifier) {
return this.config.wrapIdentifier(value, origImpl, queryContext);
}
return origImpl(value);
},
wrapIdentifierImpl(value) {
return value !== '*' ? `"${value.replace(/"/g, '""')}"` : '*';
},
initializeDriver() {
try {
this.driver = this._driver();
} catch (e) {
const message = `Knex: run\n$ npm install ${this.driverName} --save`;
this.logger.error(`${message}\n${e.message}\n${e.stack}`);
throw new Error(`${message}\n${e.message}`);
}
},
poolDefaults() {
return { min: 2, max: 10, propagateCreateError: true };
},
getPoolSettings(poolConfig) {
poolConfig = defaults({}, poolConfig, this.poolDefaults());
POOL_CONFIG_OPTIONS.forEach((option) => {
if (option in poolConfig) {
this.logger.warn(
[
`Pool config option "${option}" is no longer supported.`,
`See https://github.com/Vincit/tarn.js for possible pool config options.`,
].join(' ')
);
}
});
const timeouts = [
this.config.acquireConnectionTimeout || 60000,
poolConfig.acquireTimeoutMillis,
].filter((timeout) => timeout !== undefined);
// acquire connection timeout can be set on config or config.pool
// choose the smallest, positive timeout setting and set on poolConfig
poolConfig.acquireTimeoutMillis = Math.min(...timeouts);
const updatePoolConnectionSettingsFromProvider = async () => {
if (!this.connectionConfigProvider) {
return; // static configuration, nothing to update
}
if (
!this.connectionConfigExpirationChecker ||
!this.connectionConfigExpirationChecker()
) {
return; // not expired, reuse existing connection
}
const providerResult = await this.connectionConfigProvider();
if (providerResult.expirationChecker) {
this.connectionConfigExpirationChecker =
providerResult.expirationChecker;
delete providerResult.expirationChecker; // MySQL2 driver warns on receiving extra properties
} else {
this.connectionConfigExpirationChecker = null;
}
this.connectionSettings = providerResult;
};
return Object.assign(poolConfig, {
create: async () => {
await updatePoolConnectionSettingsFromProvider();
const connection = await this.acquireRawConnection();
connection.__knexUid = uniqueId('__knexUid');
if (poolConfig.afterCreate) {
await promisify(poolConfig.afterCreate)(connection);
}
return connection;
},
destroy: (connection) => {
if (connection !== void 0) {
return this.destroyRawConnection(connection);
}
},
validate: (connection) => {
if (connection.__knex__disposed) {
this.logger.warn(`Connection Error: ${connection.__knex__disposed}`);
return false;
}
return this.validateConnection(connection);
},
});
},
initializePool(config = this.config) {
if (this.pool) {
this.logger.warn('The pool has already been initialized');
return;
}
const tarnPoolConfig = {
...this.getPoolSettings(config.pool),
};
// afterCreate is an internal knex param, tarn.js does not support it
if (tarnPoolConfig.afterCreate) {
delete tarnPoolConfig.afterCreate;
}
this.pool = new Pool(tarnPoolConfig);
},
validateConnection(connection) {
return true;
},
// Acquire a connection from the pool.
acquireConnection() {
if (!this.pool) {
return Bluebird.reject(new Error('Unable to acquire a connection'));
}
try {
return Bluebird.try(() => this.pool.acquire().promise)
.then((connection) => {
debug('acquired connection from pool: %s', connection.__knexUid);
return connection;
})
.catch(TimeoutError, () => {
throw new Bluebird.TimeoutError(
'Knex: Timeout acquiring a connection. The pool is probably full. ' +
'Are you missing a .transacting(trx) call?'
);
});
} catch (e) {
return Bluebird.reject(e);
}
},
// Releases a connection back to the connection pool,
// returning a promise resolved when the connection is released.
releaseConnection(connection) {
debug('releasing connection to pool: %s', connection.__knexUid);
const didRelease = this.pool.release(connection);
if (!didRelease) {
debug('pool refused connection: %s', connection.__knexUid);
}
return Bluebird.resolve();
},
// Destroy the current connection pool for the client.
destroy(callback) {
const maybeDestroy = this.pool && this.pool.destroy();
return Bluebird.resolve(maybeDestroy)
.then(() => {
this.pool = void 0;
if (typeof callback === 'function') {
callback();
}
})
.catch((err) => {
if (typeof callback === 'function') {
callback(err);
}
return Bluebird.reject(err);
});
},
// Return the database being used by this client.
database() {
return this.connectionSettings.database;
},
toString() {
return '[object KnexClient]';
},
canCancelQuery: false,
assertCanCancelQuery() {
if (!this.canCancelQuery) {
throw new Error('Query cancelling not supported for this dialect');
}
},
cancelQuery() {
throw new Error('Query cancelling not supported for this dialect');
},
});
module.exports = Client;

38
node_modules/knex/lib/constants.js generated vendored Normal file
View File

@@ -0,0 +1,38 @@
const { keys } = require('lodash');
// The client names we'll allow in the `{name: lib}` pairing.
const CLIENT_ALIASES = Object.freeze({
pg: 'postgres',
postgresql: 'postgres',
sqlite: 'sqlite3',
});
const SUPPORTED_CLIENTS = Object.freeze(
[
'mssql',
'mysql',
'mysql2',
'oracledb',
'postgres',
'redshift',
'sqlite3',
].concat(keys(CLIENT_ALIASES))
);
const POOL_CONFIG_OPTIONS = Object.freeze([
'maxWaitingClients',
'testOnBorrow',
'fifo',
'priorityRange',
'autostart',
'evictionRunIntervalMillis',
'numTestsPerRun',
'softIdleTimeoutMillis',
'Promise',
]);
module.exports = {
CLIENT_ALIASES,
SUPPORTED_CLIENTS,
POOL_CONFIG_OPTIONS,
};

384
node_modules/knex/lib/dialects/mssql/index.js generated vendored Normal file
View File

@@ -0,0 +1,384 @@
// MSSQL Client
// -------
const { map, flatten, values } = require('lodash');
const inherits = require('inherits');
const Client = require('../../client');
const Bluebird = require('bluebird');
const Formatter = require('../../formatter');
const Transaction = require('./transaction');
const QueryCompiler = require('./query/compiler');
const SchemaCompiler = require('./schema/compiler');
const TableCompiler = require('./schema/tablecompiler');
const ColumnCompiler = require('./schema/columncompiler');
const SQL_INT4 = { MIN: -2147483648, MAX: 2147483647 };
const SQL_BIGINT_SAFE = { MIN: -9007199254740991, MAX: 9007199254740991 };
// Always initialize with the "QueryBuilder" and "QueryCompiler" objects, which
// extend the base 'lib/query/builder' and 'lib/query/compiler', respectively.
function Client_MSSQL(config = {}) {
// #1235 mssql module wants 'server', not 'host'. This is to enforce the same
// options object across all dialects.
if (config && config.connection && config.connection.host) {
config.connection.server = config.connection.host;
}
// mssql always creates pool :( lets try to unpool it as much as possible
this.mssqlPoolSettings = {
min: 1,
max: 1,
idleTimeoutMillis: Number.MAX_SAFE_INTEGER,
evictionRunIntervalMillis: 0,
};
Client.call(this, config);
}
inherits(Client_MSSQL, Client);
Object.assign(Client_MSSQL.prototype, {
dialect: 'mssql',
driverName: 'mssql',
_driver() {
const tds = require('tedious');
const mssqlTedious = require('mssql');
const base = require('mssql/lib/base');
// Monkey patch mssql's tedious driver _poolCreate method to fix problem with hanging acquire
// connection, this should be removed when https://github.com/tediousjs/node-mssql/pull/614 is
// merged and released.
// Also since this dialect actually always uses tedious driver (msnodesqlv8 driver should be
// required in different way), it might be better to use tedious directly, because mssql
// driver uses always internally extra generic-pool and just adds one unnecessary layer of
// indirection between database and knex and mssql driver has been lately without maintainer
// (changing implementation to use tedious will be breaking change though).
// TODO: remove mssql implementation all together and use tedious directly
/* istanbul ignore next */
const mssqlVersion = require('mssql/package.json').version;
/* istanbul ignore next */
if (mssqlVersion === '4.1.0') {
mssqlTedious.ConnectionPool.prototype.release = release;
mssqlTedious.ConnectionPool.prototype._poolCreate = _poolCreate;
} else {
const [major] = mssqlVersion.split('.');
// if version is not ^5.0.0
if (major < 5) {
throw new Error(
'This knex version only supports mssql driver versions 4.1.0 and 5.0.0+'
);
}
}
/* istanbul ignore next */
// in some rare situations release is called when stream is interrupted, but
// after pool is already destroyed
function release(connection) {
if (this.pool) {
this.pool.release(connection);
}
}
/* istanbul ignore next */
function _poolCreate() {
// implementation is copy-pasted from https://github.com/tediousjs/node-mssql/pull/614
return new base.Promise((resolve, reject) => {
const cfg = {
userName: this.config.user,
password: this.config.password,
server: this.config.server,
options: Object.assign({}, this.config.options),
domain: this.config.domain,
};
cfg.options.database = this.config.database;
cfg.options.port = this.config.port;
cfg.options.connectTimeout =
this.config.connectionTimeout || this.config.timeout || 15000;
cfg.options.requestTimeout =
this.config.requestTimeout != null
? this.config.requestTimeout
: 15000;
cfg.options.tdsVersion = cfg.options.tdsVersion || '7_4';
cfg.options.rowCollectionOnDone = false;
cfg.options.rowCollectionOnRequestCompletion = false;
cfg.options.useColumnNames = false;
cfg.options.appName = cfg.options.appName || 'node-mssql';
// tedious always connect via tcp when port is specified
if (cfg.options.instanceName) delete cfg.options.port;
if (isNaN(cfg.options.requestTimeout))
cfg.options.requestTimeout = 15000;
if (cfg.options.requestTimeout === Infinity)
cfg.options.requestTimeout = 0;
if (cfg.options.requestTimeout < 0) cfg.options.requestTimeout = 0;
if (this.config.debug) {
cfg.options.debug = {
packet: true,
token: true,
data: true,
payload: true,
};
}
const tedious = new tds.Connection(cfg);
// prevent calling resolve again on end event
let alreadyResolved = false;
function safeResolve(err) {
if (!alreadyResolved) {
alreadyResolved = true;
resolve(err);
}
}
function safeReject(err) {
if (!alreadyResolved) {
alreadyResolved = true;
reject(err);
}
}
tedious.once('end', (evt) => {
safeReject(
new base.ConnectionError(
'Connection ended unexpectedly during connecting'
)
);
});
tedious.once('connect', (err) => {
if (err) {
err = new base.ConnectionError(err);
return safeReject(err);
}
safeResolve(tedious);
});
tedious.on('error', (err) => {
if (err.code === 'ESOCKET') {
tedious.hasError = true;
return;
}
this.emit('error', err);
});
if (this.config.debug) {
tedious.on('debug', this.emit.bind(this, 'debug', tedious));
}
});
}
return mssqlTedious;
},
formatter() {
return new MSSQL_Formatter(this, ...arguments);
},
transaction() {
return new Transaction(this, ...arguments);
},
queryCompiler() {
return new QueryCompiler(this, ...arguments);
},
schemaCompiler() {
return new SchemaCompiler(this, ...arguments);
},
tableCompiler() {
return new TableCompiler(this, ...arguments);
},
columnCompiler() {
return new ColumnCompiler(this, ...arguments);
},
wrapIdentifierImpl(value) {
if (value === '*') {
return '*';
}
return `[${value.replace(/[[\]']+/g, '')}]`;
},
// Get a raw connection, called by the `pool` whenever a new
// connection needs to be added to the pool.
acquireRawConnection() {
return new Bluebird((resolver, rejecter) => {
const settings = Object.assign({}, this.connectionSettings);
settings.pool = this.mssqlPoolSettings;
const connection = new this.driver.ConnectionPool(settings);
connection.connect((err) => {
if (err) {
return rejecter(err);
}
connection.on('error', (err) => {
connection.__knex__disposed = err;
});
resolver(connection);
});
});
},
validateConnection(connection) {
if (connection.connected === true) {
return true;
}
return false;
},
// Used to explicitly close a connection, called internally by the pool
// when a connection times out or the pool is shutdown.
destroyRawConnection(connection) {
return connection.close().catch((err) => {
// some times close will reject just because pool has already been destoyed
// internally by the driver there is nothing we can do in this case
});
},
// Position the bindings for the query.
positionBindings(sql) {
let questionCount = -1;
return sql.replace(/\?/g, function() {
questionCount += 1;
return `@p${questionCount}`;
});
},
// Grab a connection, run the query via the MSSQL streaming interface,
// and pass that through to the stream we've sent back to the client.
_stream(connection, obj, stream) {
if (!obj || typeof obj === 'string') obj = { sql: obj };
return new Bluebird((resolver, rejecter) => {
stream.on('error', (err) => {
rejecter(err);
});
stream.on('end', resolver);
const { sql } = obj;
if (!sql) return resolver();
const req = (connection.tx_ || connection).request();
//req.verbose = true;
req.multiple = true;
req.stream = true;
if (obj.bindings) {
for (let i = 0; i < obj.bindings.length; i++) {
this._setReqInput(req, i, obj.bindings[i]);
}
}
req.pipe(stream);
req.query(sql);
});
},
// Runs the query on the specified connection, providing the bindings
// and any other necessary prep work.
_query(connection, obj) {
const client = this;
if (!obj || typeof obj === 'string') obj = { sql: obj };
return new Bluebird((resolver, rejecter) => {
const { sql } = obj;
if (!sql) return resolver();
const req = (connection.tx_ || connection).request();
// req.verbose = true;
req.multiple = true;
if (obj.bindings) {
for (let i = 0; i < obj.bindings.length; i++) {
client._setReqInput(req, i, obj.bindings[i]);
}
}
req.query(sql, (err, recordset) => {
if (err) {
return rejecter(err);
}
obj.response = recordset.recordsets[0];
resolver(obj);
});
});
},
// sets a request input parameter. Detects bigints and decimals and sets type appropriately.
_setReqInput(req, i, binding) {
if (typeof binding == 'number') {
if (binding % 1 !== 0) {
req.input(`p${i}`, this.driver.Decimal(38, 10), binding);
} else if (binding < SQL_INT4.MIN || binding > SQL_INT4.MAX) {
if (binding < SQL_BIGINT_SAFE.MIN || binding > SQL_BIGINT_SAFE.MAX) {
throw new Error(
`Bigint must be safe integer or must be passed as string, saw ${binding}`
);
}
req.input(`p${i}`, this.driver.BigInt, binding);
} else {
req.input(`p${i}`, this.driver.Int, binding);
}
} else {
req.input(`p${i}`, binding);
}
},
// Process the response as returned from the query.
processResponse(obj, runner) {
if (obj == null) return;
const { response, method } = obj;
if (obj.output) return obj.output.call(runner, response);
switch (method) {
case 'select':
case 'pluck':
case 'first':
if (method === 'pluck') return map(response, obj.pluck);
return method === 'first' ? response[0] : response;
case 'insert':
case 'del':
case 'update':
case 'counter':
if (obj.returning) {
if (obj.returning === '@@rowcount') {
return response[0][''];
}
if (
(Array.isArray(obj.returning) && obj.returning.length > 1) ||
obj.returning[0] === '*'
) {
return response;
}
// return an array with values if only one returning value was specified
return flatten(map(response, values));
}
return response;
default:
return response;
}
},
});
class MSSQL_Formatter extends Formatter {
// Accepts a string or array of columns to wrap as appropriate.
columnizeWithPrefix(prefix, target) {
const columns = typeof target === 'string' ? [target] : target;
let str = '',
i = -1;
while (++i < columns.length) {
if (i > 0) str += ', ';
str += prefix + this.wrap(columns[i]);
}
return str;
}
}
module.exports = Client_MSSQL;

264
node_modules/knex/lib/dialects/mssql/query/compiler.js generated vendored Normal file
View File

@@ -0,0 +1,264 @@
// MSSQL Query Compiler
// ------
const inherits = require('inherits');
const QueryCompiler = require('../../../query/compiler');
const { isEmpty, compact, identity } = require('lodash');
function QueryCompiler_MSSQL(client, builder) {
QueryCompiler.call(this, client, builder);
}
inherits(QueryCompiler_MSSQL, QueryCompiler);
const components = [
'columns',
'join',
'lock',
'where',
'union',
'group',
'having',
'order',
'limit',
'offset',
];
Object.assign(QueryCompiler_MSSQL.prototype, {
_emptyInsertValue: 'default values',
select() {
const sql = this.with();
const statements = components.map((component) => this[component](this));
return sql + compact(statements).join(' ');
},
// Compiles an "insert" query, allowing for multiple
// inserts using a single query statement.
insert() {
const insertValues = this.single.insert || [];
let sql = this.with() + `insert into ${this.tableName} `;
const { returning } = this.single;
const returningSql = returning
? this._returning('insert', returning) + ' '
: '';
if (Array.isArray(insertValues)) {
if (insertValues.length === 0) {
return '';
}
} else if (typeof insertValues === 'object' && isEmpty(insertValues)) {
return {
sql: sql + returningSql + this._emptyInsertValue,
returning,
};
}
const insertData = this._prepInsert(insertValues);
if (typeof insertData === 'string') {
sql += insertData;
} else {
if (insertData.columns.length) {
sql += `(${this.formatter.columnize(insertData.columns)}`;
sql += `) ${returningSql}values (`;
let i = -1;
while (++i < insertData.values.length) {
if (i !== 0) sql += '), (';
sql += this.formatter.parameterize(
insertData.values[i],
this.client.valueForUndefined
);
}
sql += ')';
} else if (insertValues.length === 1 && insertValues[0]) {
sql += returningSql + this._emptyInsertValue;
} else {
sql = '';
}
}
return {
sql,
returning,
};
},
// Compiles an `update` query, allowing for a return value.
update() {
const top = this.top();
const withSQL = this.with();
const updates = this._prepUpdate(this.single.update);
const join = this.join();
const where = this.where();
const order = this.order();
const { returning } = this.single;
return {
sql:
withSQL +
`update ${top ? top + ' ' : ''}${this.tableName}` +
' set ' +
updates.join(', ') +
(returning ? ` ${this._returning('update', returning)}` : '') +
(join ? ` from ${this.tableName} ${join}` : '') +
(where ? ` ${where}` : '') +
(order ? ` ${order}` : '') +
(!returning ? this._returning('rowcount', '@@rowcount') : ''),
returning: returning || '@@rowcount',
};
},
// Compiles a `delete` query.
del() {
// Make sure tableName is processed by the formatter first.
const withSQL = this.with();
const { tableName } = this;
const wheres = this.where();
const { returning } = this.single;
return {
sql:
withSQL +
`delete from ${tableName}` +
(returning ? ` ${this._returning('del', returning)}` : '') +
(wheres ? ` ${wheres}` : '') +
(!returning ? this._returning('rowcount', '@@rowcount') : ''),
returning: returning || '@@rowcount',
};
},
// Compiles the columns in the query, specifying if an item was distinct.
columns() {
let distinctClause = '';
if (this.onlyUnions()) return '';
const top = this.top();
const columns = this.grouped.columns || [];
let i = -1,
sql = [];
if (columns) {
while (++i < columns.length) {
const stmt = columns[i];
if (stmt.distinct) distinctClause = 'distinct ';
if (stmt.distinctOn) {
distinctClause = this.distinctOn(stmt.value);
continue;
}
if (stmt.type === 'aggregate') {
sql.push(...this.aggregate(stmt));
} else if (stmt.type === 'aggregateRaw') {
sql.push(this.aggregateRaw(stmt));
} else if (stmt.value && stmt.value.length > 0) {
sql.push(this.formatter.columnize(stmt.value));
}
}
}
if (sql.length === 0) sql = ['*'];
return (
`select ${distinctClause}` +
(top ? top + ' ' : '') +
sql.join(', ') +
(this.tableName ? ` from ${this.tableName}` : '')
);
},
_returning(method, value) {
switch (method) {
case 'update':
case 'insert':
return value
? `output ${this.formatter.columnizeWithPrefix('inserted.', value)}`
: '';
case 'del':
return value
? `output ${this.formatter.columnizeWithPrefix('deleted.', value)}`
: '';
case 'rowcount':
return value ? ';select @@rowcount' : '';
}
},
// Compiles a `truncate` query.
truncate() {
return `truncate table ${this.tableName}`;
},
forUpdate() {
// this doesn't work exacltly as it should, one should also mention index while locking
// https://stackoverflow.com/a/9818448/360060
return 'with (UPDLOCK)';
},
forShare() {
// http://www.sqlteam.com/article/introduction-to-locking-in-sql-server
return 'with (HOLDLOCK)';
},
// Compiles a `columnInfo` query.
columnInfo() {
const column = this.single.columnInfo;
let schema = this.single.schema;
// The user may have specified a custom wrapIdentifier function in the config. We
// need to run the identifiers through that function, but not format them as
// identifiers otherwise.
const table = this.client.customWrapIdentifier(this.single.table, identity);
if (schema) {
schema = this.client.customWrapIdentifier(schema, identity);
}
let sql = `select * from information_schema.columns where table_name = ? and table_catalog = ?`;
const bindings = [table, this.client.database()];
if (schema) {
sql += ' and table_schema = ?';
bindings.push(schema);
} else {
sql += ` and table_schema = 'dbo'`;
}
return {
sql,
bindings: bindings,
output(resp) {
const out = resp.reduce(function(columns, val) {
columns[val.COLUMN_NAME] = {
defaultValue: val.COLUMN_DEFAULT,
type: val.DATA_TYPE,
maxLength: val.CHARACTER_MAXIMUM_LENGTH,
nullable: val.IS_NULLABLE === 'YES',
};
return columns;
}, {});
return (column && out[column]) || out;
},
};
},
top() {
const noLimit = !this.single.limit && this.single.limit !== 0;
const noOffset = !this.single.offset;
if (noLimit || !noOffset) return '';
return `top (${this.formatter.parameter(this.single.limit)})`;
},
limit() {
return '';
},
offset() {
const noLimit = !this.single.limit && this.single.limit !== 0;
const noOffset = !this.single.offset;
if (noOffset) return '';
let offset = `offset ${
noOffset ? '0' : this.formatter.parameter(this.single.offset)
} rows`;
if (!noLimit) {
offset += ` fetch next ${this.formatter.parameter(
this.single.limit
)} rows only`;
}
return offset;
},
});
// Set the QueryBuilder & QueryCompiler on the client object,
// in case anyone wants to modify things to suit their own purposes.
module.exports = QueryCompiler_MSSQL;

View File

@@ -0,0 +1,103 @@
// MySQL Column Compiler
// -------
const inherits = require('inherits');
const ColumnCompiler = require('../../../schema/columncompiler');
function ColumnCompiler_MSSQL() {
ColumnCompiler.apply(this, arguments);
this.modifiers = ['nullable', 'defaultTo', 'first', 'after', 'comment'];
}
inherits(ColumnCompiler_MSSQL, ColumnCompiler);
// Types
// ------
Object.assign(ColumnCompiler_MSSQL.prototype, {
increments: 'int identity(1,1) not null primary key',
bigincrements: 'bigint identity(1,1) not null primary key',
bigint: 'bigint',
double(precision, scale) {
return 'float';
},
floating(precision, scale) {
// ignore precicion / scale which is mysql specific stuff
return `float`;
},
integer() {
// mssql does not support length
return 'int';
},
mediumint: 'int',
smallint: 'smallint',
tinyint() {
// mssql does not support length
return 'tinyint';
},
varchar(length) {
return `nvarchar(${this._num(length, 255)})`;
},
text: 'nvarchar(max)',
mediumtext: 'nvarchar(max)',
longtext: 'nvarchar(max)',
// TODO: mssql supports check constraints as of SQL Server 2008
// so make enu here more like postgres
enu: 'nvarchar(100)',
uuid: 'uniqueidentifier',
datetime: 'datetime2',
timestamp({ useTz = false } = {}) {
return useTz ? 'datetimeoffset' : 'datetime2';
},
bit(length) {
if (length > 1) {
this.client.logger.warn('Bit field is exactly 1 bit length for MSSQL');
}
return 'bit';
},
binary(length) {
return length ? `varbinary(${this._num(length)})` : 'varbinary(max)';
},
bool: 'bit',
// Modifiers
// ------
first() {
this.client.logger.warn('Column first modifier not available for MSSQL');
return '';
},
after(column) {
this.client.logger.warn('Column after modifier not available for MSSQL');
return '';
},
comment(comment) {
if (comment && comment.length > 255) {
this.client.logger.warn(
'Your comment is longer than the max comment length for MSSQL'
);
}
return '';
},
});
module.exports = ColumnCompiler_MSSQL;

View File

@@ -0,0 +1,59 @@
// MySQL Schema Compiler
// -------
const inherits = require('inherits');
const SchemaCompiler = require('../../../schema/compiler');
function SchemaCompiler_MSSQL(client, builder) {
SchemaCompiler.call(this, client, builder);
}
inherits(SchemaCompiler_MSSQL, SchemaCompiler);
Object.assign(SchemaCompiler_MSSQL.prototype, {
dropTablePrefix: 'DROP TABLE ',
dropTableIfExists(tableName) {
const name = this.formatter.wrap(prefixedTableName(this.schema, tableName));
this.pushQuery(
`if object_id('${name}', 'U') is not null DROP TABLE ${name}`
);
},
// Rename a table on the schema.
renameTable(tableName, to) {
this.pushQuery(
`exec sp_rename ${this.formatter.parameter(
prefixedTableName(this.schema, tableName)
)}, ${this.formatter.parameter(to)}`
);
},
// Check whether a table exists on the query.
hasTable(tableName) {
const formattedTable = this.formatter.parameter(
this.formatter.wrap(prefixedTableName(this.schema, tableName))
);
const sql =
`select object_id from sys.tables ` +
`where object_id = object_id(${formattedTable})`;
this.pushQuery({ sql, output: (resp) => resp.length > 0 });
},
// Check whether a column exists on the schema.
hasColumn(tableName, column) {
const formattedColumn = this.formatter.parameter(column);
const formattedTable = this.formatter.parameter(
this.formatter.wrap(prefixedTableName(this.schema, tableName))
);
const sql =
`select object_id from sys.columns ` +
`where name = ${formattedColumn} ` +
`and object_id = object_id(${formattedTable})`;
this.pushQuery({ sql, output: (resp) => resp.length > 0 });
},
});
function prefixedTableName(prefix, table) {
return prefix ? `${prefix}.${table}` : table;
}
module.exports = SchemaCompiler_MSSQL;

View File

@@ -0,0 +1,228 @@
/* eslint max-len:0 */
// MSSQL Table Builder & Compiler
// -------
const inherits = require('inherits');
const TableCompiler = require('../../../schema/tablecompiler');
const helpers = require('../../../helpers');
// Table Compiler
// ------
function TableCompiler_MSSQL() {
TableCompiler.apply(this, arguments);
}
inherits(TableCompiler_MSSQL, TableCompiler);
Object.assign(TableCompiler_MSSQL.prototype, {
createAlterTableMethods: ['foreign', 'primary'],
createQuery(columns, ifNot) {
const createStatement = ifNot
? `if object_id('${this.tableName()}', 'U') is null CREATE TABLE `
: 'CREATE TABLE ';
const sql =
createStatement +
this.tableName() +
(this._formatting ? ' (\n ' : ' (') +
columns.sql.join(this._formatting ? ',\n ' : ', ') +
')';
if (this.single.comment) {
const { comment } = this.single;
if (comment.length > 60)
this.client.logger.warn(
'The max length for a table comment is 60 characters'
);
}
this.pushQuery(sql);
},
lowerCase: false,
addColumnsPrefix: 'ADD ',
dropColumnPrefix: 'DROP COLUMN ',
alterColumnPrefix: 'ALTER COLUMN ',
// Compiles column add. Multiple columns need only one ADD clause (not one ADD per column) so core addColumns doesn't work. #1348
addColumns(columns, prefix) {
prefix = prefix || this.addColumnsPrefix;
if (columns.sql.length > 0) {
this.pushQuery({
sql:
(this.lowerCase ? 'alter table ' : 'ALTER TABLE ') +
this.tableName() +
' ' +
prefix +
columns.sql.join(', '),
bindings: columns.bindings,
});
}
},
// Compiles column drop. Multiple columns need only one DROP clause (not one DROP per column) so core dropColumn doesn't work. #1348
dropColumn() {
const _this2 = this;
const columns = helpers.normalizeArr.apply(null, arguments);
const drops = (Array.isArray(columns) ? columns : [columns]).map((column) =>
_this2.formatter.wrap(column)
);
this.pushQuery(
(this.lowerCase ? 'alter table ' : 'ALTER TABLE ') +
this.tableName() +
' ' +
this.dropColumnPrefix +
drops.join(', ')
);
},
// Compiles the comment on the table.
comment() {},
changeType() {},
// Renames a column on the table.
renameColumn(from, to) {
this.pushQuery(
`exec sp_rename ${this.formatter.parameter(
this.tableName() + '.' + from
)}, ${this.formatter.parameter(to)}, 'COLUMN'`
);
},
dropFKRefs(runner, refs) {
const formatter = this.client.formatter(this.tableBuilder);
return Promise.all(
refs.map(function(ref) {
const constraintName = formatter.wrap(ref.CONSTRAINT_NAME);
const tableName = formatter.wrap(ref.TABLE_NAME);
return runner.query({
sql: `ALTER TABLE ${tableName} DROP CONSTRAINT ${constraintName}`,
});
})
);
},
createFKRefs(runner, refs) {
const formatter = this.client.formatter(this.tableBuilder);
return Promise.all(
refs.map(function(ref) {
const tableName = formatter.wrap(ref.TABLE_NAME);
const keyName = formatter.wrap(ref.CONSTRAINT_NAME);
const column = formatter.columnize(ref.COLUMN_NAME);
const references = formatter.columnize(ref.REFERENCED_COLUMN_NAME);
const inTable = formatter.wrap(ref.REFERENCED_TABLE_NAME);
const onUpdate = ` ON UPDATE ${ref.UPDATE_RULE}`;
const onDelete = ` ON DELETE ${ref.DELETE_RULE}`;
return runner.query({
sql:
`ALTER TABLE ${tableName} ADD CONSTRAINT ${keyName}` +
' FOREIGN KEY (' +
column +
') REFERENCES ' +
inTable +
' (' +
references +
')' +
onUpdate +
onDelete,
});
})
);
},
index(columns, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('index', this.tableNameRaw, columns);
this.pushQuery(
`CREATE INDEX ${indexName} ON ${this.tableName()} (${this.formatter.columnize(
columns
)})`
);
},
primary(columns, constraintName) {
constraintName = constraintName
? this.formatter.wrap(constraintName)
: this.formatter.wrap(`${this.tableNameRaw}_pkey`);
if (!this.forCreate) {
this.pushQuery(
`ALTER TABLE ${this.tableName()} ADD CONSTRAINT ${constraintName} PRIMARY KEY (${this.formatter.columnize(
columns
)})`
);
} else {
this.pushQuery(
`CONSTRAINT ${constraintName} PRIMARY KEY (${this.formatter.columnize(
columns
)})`
);
}
},
unique(columns, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('unique', this.tableNameRaw, columns);
if (!Array.isArray(columns)) {
columns = [columns];
}
const whereAllTheColumnsAreNotNull = columns
.map((column) => this.formatter.columnize(column) + ' IS NOT NULL')
.join(' AND ');
// make unique constraint that allows null https://stackoverflow.com/a/767702/360060
// to be more or less compatible with other DBs (if any of the columns is NULL then "duplicates" are allowed)
this.pushQuery(
`CREATE UNIQUE INDEX ${indexName} ON ${this.tableName()} (${this.formatter.columnize(
columns
)}) WHERE ${whereAllTheColumnsAreNotNull}`
);
},
// Compile a drop index command.
dropIndex(columns, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('index', this.tableNameRaw, columns);
this.pushQuery(`DROP INDEX ${indexName} ON ${this.tableName()}`);
},
// Compile a drop foreign key command.
dropForeign(columns, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('foreign', this.tableNameRaw, columns);
this.pushQuery(
`ALTER TABLE ${this.tableName()} DROP CONSTRAINT ${indexName}`
);
},
// Compile a drop primary key command.
dropPrimary(constraintName) {
constraintName = constraintName
? this.formatter.wrap(constraintName)
: this.formatter.wrap(`${this.tableNameRaw}_pkey`);
this.pushQuery(
`ALTER TABLE ${this.tableName()} DROP CONSTRAINT ${constraintName}`
);
},
// Compile a drop unique key command.
dropUnique(column, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('unique', this.tableNameRaw, column);
this.pushQuery(`DROP INDEX ${indexName} ON ${this.tableName()}`);
},
});
module.exports = TableCompiler_MSSQL;

107
node_modules/knex/lib/dialects/mssql/transaction.js generated vendored Normal file
View File

@@ -0,0 +1,107 @@
const Bluebird = require('bluebird');
const Transaction = require('../../transaction');
const { isUndefined } = require('lodash');
const debug = require('debug')('knex:tx');
module.exports = class Transaction_MSSQL extends Transaction {
begin(conn) {
debug('%s: begin', this.txid);
return conn.tx_.begin().then(this._resolver, this._rejecter);
}
savepoint(conn) {
debug('%s: savepoint at', this.txid);
return Bluebird.resolve().then(() =>
this.query(conn, `SAVE TRANSACTION ${this.txid}`)
);
}
commit(conn, value) {
this._completed = true;
debug('%s: commit', this.txid);
return conn.tx_.commit().then(() => this._resolver(value), this._rejecter);
}
release(conn, value) {
return this._resolver(value);
}
rollback(conn, error) {
this._completed = true;
debug('%s: rolling back', this.txid);
return conn.tx_.rollback().then(
() => {
let err = error;
if (isUndefined(error)) {
if (this.doNotRejectOnRollback) {
this._resolver();
return;
}
err = new Error(`Transaction rejected with non-error: ${error}`);
}
this._rejecter(err);
},
(err) => {
if (error) err.originalError = error;
return this._rejecter(err);
}
);
}
rollbackTo(conn, error) {
debug('%s: rolling backTo', this.txid);
return Bluebird.resolve()
.then(() =>
this.query(conn, `ROLLBACK TRANSACTION ${this.txid}`, 2, error)
)
.then(() => this._rejecter(error));
}
// Acquire a connection and create a disposer - either using the one passed
// via config or getting one off the client. The disposer will be called once
// the original promise is marked completed.
acquireConnection(config, cb) {
const configConnection = config && config.connection;
return new Bluebird((resolve, reject) => {
try {
resolve(
(this.outerTx ? this.outerTx.conn : null) ||
configConnection ||
this.client.acquireConnection()
);
} catch (e) {
reject(e);
}
})
.then((conn) => {
conn.__knexTxId = this.txid;
if (!this.outerTx) {
this.conn = conn;
conn.tx_ = conn.transaction();
}
return conn;
})
.then(async (conn) => {
try {
return await cb(conn);
} finally {
if (!this.outerTx) {
if (conn.tx_) {
if (!this._completed) {
debug('%s: unreleased transaction', this.txid);
conn.tx_.rollback();
}
conn.tx_ = null;
}
this.conn = null;
if (!configConnection) {
debug('%s: releasing connection', this.txid);
this.client.releaseConnection(conn);
} else {
debug('%s: not releasing external connection', this.txid);
}
}
}
});
}
};

198
node_modules/knex/lib/dialects/mysql/index.js generated vendored Normal file
View File

@@ -0,0 +1,198 @@
// MySQL Client
// -------
const inherits = require('inherits');
const { map, defer } = require('lodash');
const { promisify } = require('util');
const Client = require('../../client');
const Bluebird = require('bluebird');
const Transaction = require('./transaction');
const QueryCompiler = require('./query/compiler');
const SchemaCompiler = require('./schema/compiler');
const TableCompiler = require('./schema/tablecompiler');
const ColumnCompiler = require('./schema/columncompiler');
const { makeEscape } = require('../../query/string');
// Always initialize with the "QueryBuilder" and "QueryCompiler"
// objects, which extend the base 'lib/query/builder' and
// 'lib/query/compiler', respectively.
function Client_MySQL(config) {
Client.call(this, config);
}
inherits(Client_MySQL, Client);
Object.assign(Client_MySQL.prototype, {
dialect: 'mysql',
driverName: 'mysql',
_driver() {
return require('mysql');
},
queryCompiler() {
return new QueryCompiler(this, ...arguments);
},
schemaCompiler() {
return new SchemaCompiler(this, ...arguments);
},
tableCompiler() {
return new TableCompiler(this, ...arguments);
},
columnCompiler() {
return new ColumnCompiler(this, ...arguments);
},
transaction() {
return new Transaction(this, ...arguments);
},
_escapeBinding: makeEscape(),
wrapIdentifierImpl(value) {
return value !== '*' ? `\`${value.replace(/`/g, '``')}\`` : '*';
},
// Get a raw connection, called by the `pool` whenever a new
// connection needs to be added to the pool.
acquireRawConnection() {
return new Bluebird((resolver, rejecter) => {
const connection = this.driver.createConnection(this.connectionSettings);
connection.on('error', (err) => {
connection.__knex__disposed = err;
});
connection.connect((err) => {
if (err) {
// if connection is rejected, remove listener that was registered above...
connection.removeAllListeners();
return rejecter(err);
}
resolver(connection);
});
});
},
// Used to explicitly close a connection, called internally by the pool
// when a connection times out or the pool is shutdown.
async destroyRawConnection(connection) {
try {
const end = promisify((cb) => connection.end(cb));
return await end();
} catch (err) {
connection.__knex__disposed = err;
} finally {
// see discussion https://github.com/knex/knex/pull/3483
defer(() => connection.removeAllListeners());
}
},
validateConnection(connection) {
if (
connection.state === 'connected' ||
connection.state === 'authenticated'
) {
return true;
}
return false;
},
// Grab a connection, run the query via the MySQL streaming interface,
// and pass that through to the stream we've sent back to the client.
_stream(connection, obj, stream, options) {
options = options || {};
const queryOptions = Object.assign({ sql: obj.sql }, obj.options);
return new Bluebird((resolver, rejecter) => {
stream.on('error', rejecter);
stream.on('end', resolver);
const queryStream = connection
.query(queryOptions, obj.bindings)
.stream(options);
queryStream.on('error', (err) => {
rejecter(err);
stream.emit('error', err);
});
queryStream.pipe(stream);
});
},
// Runs the query on the specified connection, providing the bindings
// and any other necessary prep work.
_query(connection, obj) {
if (!obj || typeof obj === 'string') obj = { sql: obj };
return new Bluebird(function(resolver, rejecter) {
if (!obj.sql) {
resolver();
return;
}
const queryOptions = Object.assign({ sql: obj.sql }, obj.options);
connection.query(queryOptions, obj.bindings, function(err, rows, fields) {
if (err) return rejecter(err);
obj.response = [rows, fields];
resolver(obj);
});
});
},
// Process the response as returned from the query.
processResponse(obj, runner) {
if (obj == null) return;
const { response } = obj;
const { method } = obj;
const rows = response[0];
const fields = response[1];
if (obj.output) return obj.output.call(runner, rows, fields);
switch (method) {
case 'select':
case 'pluck':
case 'first': {
if (method === 'pluck') {
return map(rows, obj.pluck);
}
return method === 'first' ? rows[0] : rows;
}
case 'insert':
return [rows.insertId];
case 'del':
case 'update':
case 'counter':
return rows.affectedRows;
default:
return response;
}
},
canCancelQuery: true,
cancelQuery(connectionToKill) {
const acquiringConn = this.acquireConnection();
// Error out if we can't acquire connection in time.
// Purposely not putting timeout on `KILL QUERY` execution because erroring
// early there would release the `connectionToKill` back to the pool with
// a `KILL QUERY` command yet to finish.
return acquiringConn
.timeout(100)
.then((conn) =>
this.query(conn, {
method: 'raw',
sql: 'KILL QUERY ?',
bindings: [connectionToKill.threadId],
options: {},
})
)
.finally(() => {
// NOT returning this promise because we want to release the connection
// in a non-blocking fashion
acquiringConn.then((conn) => this.releaseConnection(conn));
});
},
});
module.exports = Client_MySQL;

105
node_modules/knex/lib/dialects/mysql/query/compiler.js generated vendored Normal file
View File

@@ -0,0 +1,105 @@
// MySQL Query Compiler
// ------
const inherits = require('inherits');
const QueryCompiler = require('../../../query/compiler');
const { identity } = require('lodash');
function QueryCompiler_MySQL(client, builder) {
QueryCompiler.call(this, client, builder);
const { returning } = this.single;
if (returning) {
this.client.logger.warn(
'.returning() is not supported by mysql and will not have any effect.'
);
}
}
inherits(QueryCompiler_MySQL, QueryCompiler);
Object.assign(QueryCompiler_MySQL.prototype, {
_emptyInsertValue: '() values ()',
// Update method, including joins, wheres, order & limits.
update() {
const join = this.join();
const updates = this._prepUpdate(this.single.update);
const where = this.where();
const order = this.order();
const limit = this.limit();
return (
`update ${this.tableName}` +
(join ? ` ${join}` : '') +
' set ' +
updates.join(', ') +
(where ? ` ${where}` : '') +
(order ? ` ${order}` : '') +
(limit ? ` ${limit}` : '')
);
},
forUpdate() {
return 'for update';
},
forShare() {
return 'lock in share mode';
},
// Only supported on MySQL 8.0+
skipLocked() {
return 'skip locked';
},
// Supported on MySQL 8.0+ and MariaDB 10.3.0+
noWait() {
return 'nowait';
},
// Compiles a `columnInfo` query.
columnInfo() {
const column = this.single.columnInfo;
// The user may have specified a custom wrapIdentifier function in the config. We
// need to run the identifiers through that function, but not format them as
// identifiers otherwise.
const table = this.client.customWrapIdentifier(this.single.table, identity);
return {
sql:
'select * from information_schema.columns where table_name = ? and table_schema = ?',
bindings: [table, this.client.database()],
output(resp) {
const out = resp.reduce(function(columns, val) {
columns[val.COLUMN_NAME] = {
defaultValue: val.COLUMN_DEFAULT,
type: val.DATA_TYPE,
maxLength: val.CHARACTER_MAXIMUM_LENGTH,
nullable: val.IS_NULLABLE === 'YES',
};
return columns;
}, {});
return (column && out[column]) || out;
},
};
},
limit() {
const noLimit = !this.single.limit && this.single.limit !== 0;
if (noLimit && !this.single.offset) return '';
// Workaround for offset only.
// see: http://stackoverflow.com/questions/255517/mysql-offset-infinite-rows
const limit =
this.single.offset && noLimit
? '18446744073709551615'
: this.formatter.parameter(this.single.limit);
return `limit ${limit}`;
},
});
// Set the QueryBuilder & QueryCompiler on the client object,
// in case anyone wants to modify things to suit their own purposes.
module.exports = QueryCompiler_MySQL;

View File

@@ -0,0 +1,170 @@
// MySQL Column Compiler
// -------
const inherits = require('inherits');
const ColumnCompiler = require('../../../schema/columncompiler');
const { isObject } = require('lodash');
function ColumnCompiler_MySQL() {
ColumnCompiler.apply(this, arguments);
this.modifiers = [
'unsigned',
'nullable',
'defaultTo',
'comment',
'collate',
'first',
'after',
];
}
inherits(ColumnCompiler_MySQL, ColumnCompiler);
// Types
// ------
Object.assign(ColumnCompiler_MySQL.prototype, {
increments: 'int unsigned not null auto_increment primary key',
bigincrements: 'bigint unsigned not null auto_increment primary key',
bigint: 'bigint',
double(precision, scale) {
if (!precision) return 'double';
return `double(${this._num(precision, 8)}, ${this._num(scale, 2)})`;
},
integer(length) {
length = length ? `(${this._num(length, 11)})` : '';
return `int${length}`;
},
mediumint: 'mediumint',
smallint: 'smallint',
tinyint(length) {
length = length ? `(${this._num(length, 1)})` : '';
return `tinyint${length}`;
},
text(column) {
switch (column) {
case 'medium':
case 'mediumtext':
return 'mediumtext';
case 'long':
case 'longtext':
return 'longtext';
default:
return 'text';
}
},
mediumtext() {
return this.text('medium');
},
longtext() {
return this.text('long');
},
enu(allowed) {
return `enum('${allowed.join("', '")}')`;
},
datetime(precision) {
if (isObject(precision)) {
({ precision } = precision);
}
return typeof precision === 'number'
? `datetime(${precision})`
: 'datetime';
},
timestamp(precision) {
if (isObject(precision)) {
({ precision } = precision);
}
return typeof precision === 'number'
? `timestamp(${precision})`
: 'timestamp';
},
time(precision) {
if (isObject(precision)) {
({ precision } = precision);
}
return typeof precision === 'number' ? `time(${precision})` : 'time';
},
bit(length) {
return length ? `bit(${this._num(length)})` : 'bit';
},
binary(length) {
return length ? `varbinary(${this._num(length)})` : 'blob';
},
json() {
return 'json';
},
jsonb() {
return 'json';
},
// Modifiers
// ------
defaultTo(value) {
// MySQL defaults to null by default, but breaks down if you pass it explicitly
// Note that in MySQL versions up to 5.7, logic related to updating
// timestamps when no explicit value is passed is quite insane - https://dev.mysql.com/doc/refman/5.7/en/server-system-variables.html#sysvar_explicit_defaults_for_timestamp
if (value === null || value === undefined) {
return;
}
if ((this.type === 'json' || this.type === 'jsonb') && isObject(value)) {
// Default value for json will work only it is an expression
return `default ('${JSON.stringify(value)}')`;
}
const defaultVal = ColumnCompiler_MySQL.super_.prototype.defaultTo.apply(
this,
arguments
);
if (this.type !== 'blob' && this.type.indexOf('text') === -1) {
return defaultVal;
}
return '';
},
unsigned() {
return 'unsigned';
},
comment(comment) {
if (comment && comment.length > 255) {
this.client.logger.warn(
'Your comment is longer than the max comment length for MySQL'
);
}
return comment && `comment '${comment}'`;
},
first() {
return 'first';
},
after(column) {
return `after ${this.formatter.wrap(column)}`;
},
collate(collation) {
return collation && `collate '${collation}'`;
},
});
module.exports = ColumnCompiler_MySQL;

View File

@@ -0,0 +1,60 @@
// MySQL Schema Compiler
// -------
const inherits = require('inherits');
const SchemaCompiler = require('../../../schema/compiler');
const { some } = require('lodash');
function SchemaCompiler_MySQL(client, builder) {
SchemaCompiler.call(this, client, builder);
}
inherits(SchemaCompiler_MySQL, SchemaCompiler);
Object.assign(SchemaCompiler_MySQL.prototype, {
// Rename a table on the schema.
renameTable(tableName, to) {
this.pushQuery(
`rename table ${this.formatter.wrap(tableName)} to ${this.formatter.wrap(
to
)}`
);
},
// Check whether a table exists on the query.
hasTable(tableName) {
let sql = 'select * from information_schema.tables where table_name = ?';
const bindings = [tableName];
if (this.schema) {
sql += ' and table_schema = ?';
bindings.push(this.schema);
} else {
sql += ' and table_schema = database()';
}
this.pushQuery({
sql,
bindings,
output: function output(resp) {
return resp.length > 0;
},
});
},
// Check whether a column exists on the schema.
hasColumn(tableName, column) {
this.pushQuery({
sql: `show columns from ${this.formatter.wrap(tableName)}`,
output(resp) {
return some(resp, (row) => {
return (
this.client.wrapIdentifier(row.Field) ===
this.client.wrapIdentifier(column)
);
});
},
});
},
});
module.exports = SchemaCompiler_MySQL;

View File

@@ -0,0 +1,262 @@
/* eslint max-len:0 no-console:0*/
// MySQL Table Builder & Compiler
// -------
const inherits = require('inherits');
const TableCompiler = require('../../../schema/tablecompiler');
// Table Compiler
// ------
function TableCompiler_MySQL() {
TableCompiler.apply(this, arguments);
}
inherits(TableCompiler_MySQL, TableCompiler);
Object.assign(TableCompiler_MySQL.prototype, {
createQuery(columns, ifNot) {
const createStatement = ifNot
? 'create table if not exists '
: 'create table ';
const { client } = this;
let conn = {};
let sql =
createStatement + this.tableName() + ' (' + columns.sql.join(', ') + ')';
// Check if the connection settings are set.
if (client.connectionSettings) {
conn = client.connectionSettings;
}
const charset = this.single.charset || conn.charset || '';
const collation = this.single.collate || conn.collate || '';
const engine = this.single.engine || '';
// var conn = builder.client.connectionSettings;
if (charset) sql += ` default character set ${charset}`;
if (collation) sql += ` collate ${collation}`;
if (engine) sql += ` engine = ${engine}`;
if (this.single.comment) {
const comment = this.single.comment || '';
if (comment.length > 60)
this.client.logger.warn(
'The max length for a table comment is 60 characters'
);
sql += ` comment = '${comment}'`;
}
this.pushQuery(sql);
},
addColumnsPrefix: 'add ',
alterColumnsPrefix: 'modify ',
dropColumnPrefix: 'drop ',
// Compiles the comment on the table.
comment(comment) {
this.pushQuery(`alter table ${this.tableName()} comment = '${comment}'`);
},
changeType() {
// alter table + table + ' modify ' + wrapped + '// type';
},
// Renames a column on the table.
renameColumn(from, to) {
const compiler = this;
const table = this.tableName();
const wrapped = this.formatter.wrap(from) + ' ' + this.formatter.wrap(to);
this.pushQuery({
sql:
`show fields from ${table} where field = ` +
this.formatter.parameter(from),
output(resp) {
const column = resp[0];
const runner = this;
return compiler.getFKRefs(runner).then(([refs]) =>
new Promise((resolve, reject) => {
try {
if (!refs.length) {
resolve();
}
resolve(compiler.dropFKRefs(runner, refs));
} catch (e) {
reject(e);
}
})
.then(function() {
let sql = `alter table ${table} change ${wrapped} ${column.Type}`;
if (String(column.Null).toUpperCase() !== 'YES') {
sql += ` NOT NULL`;
} else {
// This doesn't matter for most cases except Timestamp, where this is important
sql += ` NULL`;
}
if (column.Default !== void 0 && column.Default !== null) {
sql += ` DEFAULT '${column.Default}'`;
}
return runner.query({
sql,
});
})
.then(function() {
if (!refs.length) {
return;
}
return compiler.createFKRefs(
runner,
refs.map(function(ref) {
if (ref.REFERENCED_COLUMN_NAME === from) {
ref.REFERENCED_COLUMN_NAME = to;
}
if (ref.COLUMN_NAME === from) {
ref.COLUMN_NAME = to;
}
return ref;
})
);
})
);
},
});
},
getFKRefs(runner) {
const formatter = this.client.formatter(this.tableBuilder);
const sql =
'SELECT KCU.CONSTRAINT_NAME, KCU.TABLE_NAME, KCU.COLUMN_NAME, ' +
' KCU.REFERENCED_TABLE_NAME, KCU.REFERENCED_COLUMN_NAME, ' +
' RC.UPDATE_RULE, RC.DELETE_RULE ' +
'FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE AS KCU ' +
'JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS AS RC ' +
' USING(CONSTRAINT_NAME)' +
'WHERE KCU.REFERENCED_TABLE_NAME = ' +
formatter.parameter(this.tableNameRaw) +
' ' +
' AND KCU.CONSTRAINT_SCHEMA = ' +
formatter.parameter(this.client.database()) +
' ' +
' AND RC.CONSTRAINT_SCHEMA = ' +
formatter.parameter(this.client.database());
return runner.query({
sql,
bindings: formatter.bindings,
});
},
dropFKRefs(runner, refs) {
const formatter = this.client.formatter(this.tableBuilder);
return Promise.all(
refs.map(function(ref) {
const constraintName = formatter.wrap(ref.CONSTRAINT_NAME);
const tableName = formatter.wrap(ref.TABLE_NAME);
return runner.query({
sql: `alter table ${tableName} drop foreign key ${constraintName}`,
});
})
);
},
createFKRefs(runner, refs) {
const formatter = this.client.formatter(this.tableBuilder);
return Promise.all(
refs.map(function(ref) {
const tableName = formatter.wrap(ref.TABLE_NAME);
const keyName = formatter.wrap(ref.CONSTRAINT_NAME);
const column = formatter.columnize(ref.COLUMN_NAME);
const references = formatter.columnize(ref.REFERENCED_COLUMN_NAME);
const inTable = formatter.wrap(ref.REFERENCED_TABLE_NAME);
const onUpdate = ` ON UPDATE ${ref.UPDATE_RULE}`;
const onDelete = ` ON DELETE ${ref.DELETE_RULE}`;
return runner.query({
sql:
`alter table ${tableName} add constraint ${keyName} ` +
'foreign key (' +
column +
') references ' +
inTable +
' (' +
references +
')' +
onUpdate +
onDelete,
});
})
);
},
index(columns, indexName, indexType) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('index', this.tableNameRaw, columns);
this.pushQuery(
`alter table ${this.tableName()} add${
indexType ? ` ${indexType}` : ''
} index ${indexName}(${this.formatter.columnize(columns)})`
);
},
primary(columns, constraintName) {
constraintName = constraintName
? this.formatter.wrap(constraintName)
: this.formatter.wrap(`${this.tableNameRaw}_pkey`);
this.pushQuery(
`alter table ${this.tableName()} add primary key ${constraintName}(${this.formatter.columnize(
columns
)})`
);
},
unique(columns, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('unique', this.tableNameRaw, columns);
this.pushQuery(
`alter table ${this.tableName()} add unique ${indexName}(${this.formatter.columnize(
columns
)})`
);
},
// Compile a drop index command.
dropIndex(columns, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('index', this.tableNameRaw, columns);
this.pushQuery(`alter table ${this.tableName()} drop index ${indexName}`);
},
// Compile a drop foreign key command.
dropForeign(columns, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('foreign', this.tableNameRaw, columns);
this.pushQuery(
`alter table ${this.tableName()} drop foreign key ${indexName}`
);
},
// Compile a drop primary key command.
dropPrimary() {
this.pushQuery(`alter table ${this.tableName()} drop primary key`);
},
// Compile a drop unique key command.
dropUnique(column, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('unique', this.tableNameRaw, column);
this.pushQuery(`alter table ${this.tableName()} drop index ${indexName}`);
},
});
module.exports = TableCompiler_MySQL;

50
node_modules/knex/lib/dialects/mysql/transaction.js generated vendored Normal file
View File

@@ -0,0 +1,50 @@
const Transaction = require('../../transaction');
const Debug = require('debug');
const { isUndefined } = require('lodash');
const debug = Debug('knex:tx');
class Transaction_MySQL extends Transaction {}
Object.assign(Transaction_MySQL.prototype, {
query(conn, sql, status, value) {
const t = this;
const q = this.trxClient
.query(conn, sql)
.catch(
(err) => err.errno === 1305,
() => {
this.trxClient.logger.warn(
'Transaction was implicitly committed, do not mix transactions and ' +
'DDL with MySQL (#805)'
);
}
)
.catch(function(err) {
status = 2;
value = err;
t._completed = true;
debug('%s error running transaction query', t.txid);
})
.then(function(res) {
if (status === 1) t._resolver(value);
if (status === 2) {
if (isUndefined(value)) {
if (t.doNotRejectOnRollback && /^ROLLBACK\b/i.test(sql)) {
t._resolver();
return;
}
value = new Error(`Transaction rejected with non-error: ${value}`);
}
t._rejecter(value);
}
return res;
});
if (status === 1 || status === 2) {
t._completed = true;
}
return q;
},
});
module.exports = Transaction_MySQL;

35
node_modules/knex/lib/dialects/mysql2/index.js generated vendored Normal file
View File

@@ -0,0 +1,35 @@
// MySQL2 Client
// -------
const inherits = require('inherits');
const Client_MySQL = require('../mysql');
const Transaction = require('./transaction');
// Always initialize with the "QueryBuilder" and "QueryCompiler"
// objects, which extend the base 'lib/query/builder' and
// 'lib/query/compiler', respectively.
function Client_MySQL2(config) {
Client_MySQL.call(this, config);
}
inherits(Client_MySQL2, Client_MySQL);
Object.assign(Client_MySQL2.prototype, {
// The "dialect", for reference elsewhere.
driverName: 'mysql2',
transaction() {
return new Transaction(this, ...arguments);
},
_driver() {
return require('mysql2');
},
validateConnection(connection) {
if (connection._fatalError) {
return false;
}
return true;
},
});
module.exports = Client_MySQL2;

49
node_modules/knex/lib/dialects/mysql2/transaction.js generated vendored Normal file
View File

@@ -0,0 +1,49 @@
const Transaction = require('../../transaction');
const debug = require('debug')('knex:tx');
const { isUndefined } = require('lodash');
class Transaction_MySQL2 extends Transaction {}
Object.assign(Transaction_MySQL2.prototype, {
query(conn, sql, status, value) {
const t = this;
const q = this.trxClient
.query(conn, sql)
.catch(
(err) => err.code === 'ER_SP_DOES_NOT_EXIST',
() => {
this.trxClient.logger.warn(
'Transaction was implicitly committed, do not mix transactions and ' +
'DDL with MySQL (#805)'
);
}
)
.catch(function(err) {
status = 2;
value = err;
t._completed = true;
debug('%s error running transaction query', t.txid);
})
.then(function(res) {
if (status === 1) t._resolver(value);
if (status === 2) {
if (isUndefined(value)) {
if (t.doNotRejectOnRollback && /^ROLLBACK\b/i.test(sql)) {
t._resolver();
return;
}
value = new Error(`Transaction rejected with non-error: ${value}`);
}
t._rejecter(value);
return res;
}
});
if (status === 1 || status === 2) {
t._completed = true;
}
return q;
},
});
module.exports = Transaction_MySQL2;

20
node_modules/knex/lib/dialects/oracle/formatter.js generated vendored Normal file
View File

@@ -0,0 +1,20 @@
const Formatter = require('../../formatter');
const { ReturningHelper } = require('./utils');
class Oracle_Formatter extends Formatter {
alias(first, second) {
return first + ' ' + second;
}
parameter(value, notSetValue) {
// Returning helper uses always ROWID as string
if (value instanceof ReturningHelper && this.client.driver) {
value = new this.client.driver.OutParam(this.client.driver.OCCISTRING);
} else if (typeof value === 'boolean') {
value = value ? 1 : 0;
}
return super.parameter(value, notSetValue);
}
}
module.exports = Oracle_Formatter;

196
node_modules/knex/lib/dialects/oracle/index.js generated vendored Normal file
View File

@@ -0,0 +1,196 @@
// Oracle Client
// -------
const { map, flatten, values } = require('lodash');
const { promisify } = require('util');
const inherits = require('inherits');
const Client = require('../../client');
const Bluebird = require('bluebird');
const { bufferToString } = require('../../query/string');
const Formatter = require('./formatter');
const Transaction = require('./transaction');
const QueryCompiler = require('./query/compiler');
const SchemaCompiler = require('./schema/compiler');
const ColumnBuilder = require('./schema/columnbuilder');
const ColumnCompiler = require('./schema/columncompiler');
const TableCompiler = require('./schema/tablecompiler');
const { ReturningHelper, isConnectionError } = require('./utils');
// Always initialize with the "QueryBuilder" and "QueryCompiler"
// objects, which extend the base 'lib/query/builder' and
// 'lib/query/compiler', respectively.
function Client_Oracle(config) {
Client.call(this, config);
}
inherits(Client_Oracle, Client);
Object.assign(Client_Oracle.prototype, {
dialect: 'oracle',
driverName: 'oracle',
_driver() {
return require('oracle');
},
transaction() {
return new Transaction(this, ...arguments);
},
formatter() {
return new Formatter(this, ...arguments);
},
queryCompiler() {
return new QueryCompiler(this, ...arguments);
},
schemaCompiler() {
return new SchemaCompiler(this, ...arguments);
},
columnBuilder() {
return new ColumnBuilder(this, ...arguments);
},
columnCompiler() {
return new ColumnCompiler(this, ...arguments);
},
tableCompiler() {
return new TableCompiler(this, ...arguments);
},
prepBindings(bindings) {
return map(bindings, (value) => {
// returning helper uses always ROWID as string
if (value instanceof ReturningHelper && this.driver) {
return new this.driver.OutParam(this.driver.OCCISTRING);
} else if (typeof value === 'boolean') {
return value ? 1 : 0;
} else if (Buffer.isBuffer(value)) {
return bufferToString(value);
}
return value;
});
},
// Get a raw connection, called by the `pool` whenever a new
// connection needs to be added to the pool.
acquireRawConnection() {
return new Bluebird((resolver, rejecter) => {
this.driver.connect(this.connectionSettings, (err, connection) => {
if (err) return rejecter(err);
Bluebird.promisifyAll(connection);
if (this.connectionSettings.prefetchRowCount) {
connection.setPrefetchRowCount(
this.connectionSettings.prefetchRowCount
);
}
resolver(connection);
});
});
},
// Used to explicitly close a connection, called internally by the pool
// when a connection times out or the pool is shutdown.
async destroyRawConnection(connection) {
const close = promisify((cb) => connection.close(cb));
return close();
},
// Return the database for the Oracle client.
database() {
return this.connectionSettings.database;
},
// Position the bindings for the query.
positionBindings(sql) {
let questionCount = 0;
return sql.replace(/\?/g, function() {
questionCount += 1;
return `:${questionCount}`;
});
},
_stream(connection, obj, stream, options) {
return new Bluebird(function(resolver, rejecter) {
stream.on('error', (err) => {
if (isConnectionError(err)) {
connection.__knex__disposed = err;
}
rejecter(err);
});
stream.on('end', resolver);
const queryStream = connection.queryStream(
obj.sql,
obj.bindings,
options
);
queryStream.pipe(stream);
queryStream.on('error', function(error) {
rejecter(error);
stream.emit('error', error);
});
});
},
// Runs the query on the specified connection, providing the bindings
// and any other necessary prep work.
_query(connection, obj) {
if (!obj.sql) throw new Error('The query is empty');
return connection
.executeAsync(obj.sql, obj.bindings)
.then(function(response) {
if (!obj.returning) return response;
const rowIds = obj.outParams.map(
(v, i) => response[`returnParam${i ? i : ''}`]
);
return connection.executeAsync(obj.returningSql, rowIds);
})
.then(function(response) {
obj.response = response;
obj.rowsAffected = response.updateCount;
return obj;
})
.catch((err) => {
if (isConnectionError(err)) {
connection.__knex__disposed = err;
}
throw err;
});
},
// Process the response as returned from the query.
processResponse(obj, runner) {
let { response } = obj;
const { method } = obj;
if (obj.output) return obj.output.call(runner, response);
switch (method) {
case 'select':
case 'pluck':
case 'first':
if (obj.method === 'pluck') response = map(response, obj.pluck);
return obj.method === 'first' ? response[0] : response;
case 'insert':
case 'del':
case 'update':
case 'counter':
if (obj.returning) {
if (obj.returning.length > 1 || obj.returning[0] === '*') {
return response;
}
// return an array with values if only one returning value was specified
return flatten(map(response, values));
}
return obj.rowsAffected;
default:
return response;
}
},
});
module.exports = Client_Oracle;

325
node_modules/knex/lib/dialects/oracle/query/compiler.js generated vendored Normal file
View File

@@ -0,0 +1,325 @@
/* eslint max-len:0 */
// Oracle Query Builder & Compiler
// ------
const {
assign,
isPlainObject,
isEmpty,
isString,
map,
reduce,
compact,
identity,
} = require('lodash');
const inherits = require('inherits');
const QueryCompiler = require('../../../query/compiler');
const { ReturningHelper } = require('../utils');
const components = [
'columns',
'join',
'where',
'union',
'group',
'having',
'order',
'lock',
];
// Query Compiler
// -------
// Set the "Formatter" to use for the queries,
// ensuring that all parameterized values (even across sub-queries)
// are properly built into the same query.
function QueryCompiler_Oracle(client, builder) {
QueryCompiler.call(this, client, builder);
}
inherits(QueryCompiler_Oracle, QueryCompiler);
assign(QueryCompiler_Oracle.prototype, {
// Compiles an "insert" query, allowing for multiple
// inserts using a single query statement.
insert() {
let insertValues = this.single.insert || [];
let { returning } = this.single;
if (!Array.isArray(insertValues) && isPlainObject(this.single.insert)) {
insertValues = [this.single.insert];
}
// always wrap returning argument in array
if (returning && !Array.isArray(returning)) {
returning = [returning];
}
if (
Array.isArray(insertValues) &&
insertValues.length === 1 &&
isEmpty(insertValues[0])
) {
return this._addReturningToSqlAndConvert(
`insert into ${this.tableName} (${this.formatter.wrap(
this.single.returning
)}) values (default)`,
returning,
this.tableName
);
}
if (
isEmpty(this.single.insert) &&
typeof this.single.insert !== 'function'
) {
return '';
}
const insertData = this._prepInsert(insertValues);
const sql = {};
if (isString(insertData)) {
return this._addReturningToSqlAndConvert(
`insert into ${this.tableName} ${insertData}`,
returning
);
}
if (insertData.values.length === 1) {
return this._addReturningToSqlAndConvert(
`insert into ${this.tableName} (${this.formatter.columnize(
insertData.columns
)}) values (${this.formatter.parameterize(insertData.values[0])})`,
returning,
this.tableName
);
}
const insertDefaultsOnly = insertData.columns.length === 0;
sql.sql =
'begin ' +
map(insertData.values, (value) => {
let returningHelper;
const parameterizedValues = !insertDefaultsOnly
? this.formatter.parameterize(value, this.client.valueForUndefined)
: '';
const returningValues = Array.isArray(returning)
? returning
: [returning];
let subSql = `insert into ${this.tableName} `;
if (returning) {
returningHelper = new ReturningHelper(returningValues.join(':'));
sql.outParams = (sql.outParams || []).concat(returningHelper);
}
if (insertDefaultsOnly) {
// no columns given so only the default value
subSql += `(${this.formatter.wrap(
this.single.returning
)}) values (default)`;
} else {
subSql += `(${this.formatter.columnize(
insertData.columns
)}) values (${parameterizedValues})`;
}
subSql += returning
? ` returning ROWID into ${this.formatter.parameter(returningHelper)}`
: '';
// pre bind position because subSql is an execute immediate parameter
// later position binding will only convert the ? params
subSql = this.formatter.client.positionBindings(subSql);
const parameterizedValuesWithoutDefault = parameterizedValues
.replace('DEFAULT, ', '')
.replace(', DEFAULT', '');
return (
`execute immediate '${subSql.replace(/'/g, "''")}` +
(parameterizedValuesWithoutDefault || returning ? "' using " : '') +
parameterizedValuesWithoutDefault +
(parameterizedValuesWithoutDefault && returning ? ', ' : '') +
(returning ? 'out ?' : '') +
';'
);
}).join(' ') +
'end;';
if (returning) {
sql.returning = returning;
// generate select statement with special order by to keep the order because 'in (..)' may change the order
sql.returningSql =
`select ${this.formatter.columnize(returning)}` +
' from ' +
this.tableName +
' where ROWID in (' +
sql.outParams.map((v, i) => `:${i + 1}`).join(', ') +
')' +
' order by case ROWID ' +
sql.outParams
.map((v, i) => `when CHARTOROWID(:${i + 1}) then ${i}`)
.join(' ') +
' end';
}
return sql;
},
// Update method, including joins, wheres, order & limits.
update() {
const updates = this._prepUpdate(this.single.update);
const where = this.where();
let { returning } = this.single;
const sql =
`update ${this.tableName}` +
' set ' +
updates.join(', ') +
(where ? ` ${where}` : '');
if (!returning) {
return sql;
}
// always wrap returning argument in array
if (!Array.isArray(returning)) {
returning = [returning];
}
return this._addReturningToSqlAndConvert(sql, returning, this.tableName);
},
// Compiles a `truncate` query.
truncate() {
return `truncate table ${this.tableName}`;
},
forUpdate() {
return 'for update';
},
forShare() {
// lock for share is not directly supported by oracle
// use LOCK TABLE .. IN SHARE MODE; instead
this.client.logger.warn(
'lock for share is not supported by oracle dialect'
);
return '';
},
// Compiles a `columnInfo` query.
columnInfo() {
const column = this.single.columnInfo;
// The user may have specified a custom wrapIdentifier function in the config. We
// need to run the identifiers through that function, but not format them as
// identifiers otherwise.
const table = this.client.customWrapIdentifier(this.single.table, identity);
// Node oracle drivers doesn't support LONG type (which is data_default type)
const sql = `select * from xmltable( '/ROWSET/ROW'
passing dbms_xmlgen.getXMLType('
select char_col_decl_length, column_name, data_type, data_default, nullable
from user_tab_columns where table_name = ''${table}'' ')
columns
CHAR_COL_DECL_LENGTH number, COLUMN_NAME varchar2(200), DATA_TYPE varchar2(106),
DATA_DEFAULT clob, NULLABLE varchar2(1))`;
return {
sql: sql,
output(resp) {
const out = reduce(
resp,
function(columns, val) {
columns[val.COLUMN_NAME] = {
type: val.DATA_TYPE,
defaultValue: val.DATA_DEFAULT,
maxLength: val.CHAR_COL_DECL_LENGTH,
nullable: val.NULLABLE === 'Y',
};
return columns;
},
{}
);
return (column && out[column]) || out;
},
};
},
select() {
let query = this.with();
const statements = map(components, (component) => {
return this[component]();
});
query += compact(statements).join(' ');
return this._surroundQueryWithLimitAndOffset(query);
},
aggregate(stmt) {
return this._aggregate(stmt, { aliasSeparator: ' ' });
},
// for single commands only
_addReturningToSqlAndConvert(sql, returning, tableName) {
const res = {
sql,
};
if (!returning) {
return res;
}
const returningValues = Array.isArray(returning) ? returning : [returning];
const returningHelper = new ReturningHelper(returningValues.join(':'));
res.sql =
sql +
' returning ROWID into ' +
this.formatter.parameter(returningHelper);
res.returningSql = `select ${this.formatter.columnize(
returning
)} from ${tableName} where ROWID = :1`;
res.outParams = [returningHelper];
res.returning = returning;
return res;
},
_surroundQueryWithLimitAndOffset(query) {
let { limit } = this.single;
const { offset } = this.single;
const hasLimit = limit || limit === 0 || limit === '0';
limit = +limit;
if (!hasLimit && !offset) return query;
query = query || '';
if (hasLimit && !offset) {
return `select * from (${query}) where rownum <= ${this.formatter.parameter(
limit
)}`;
}
const endRow = +offset + (hasLimit ? limit : 10000000000000);
return (
'select * from ' +
'(select row_.*, ROWNUM rownum_ from (' +
query +
') row_ ' +
'where rownum <= ' +
this.formatter.parameter(endRow) +
') ' +
'where rownum_ > ' +
this.formatter.parameter(offset)
);
},
});
// Compiles the `select` statement, or nested sub-selects
// by calling each of the component compilers, trimming out
// the empties, and returning a generated query string.
QueryCompiler_Oracle.prototype.first = QueryCompiler_Oracle.prototype.select;
module.exports = QueryCompiler_Oracle;

View File

@@ -0,0 +1,18 @@
const inherits = require('inherits');
const ColumnBuilder = require('../../../schema/columnbuilder');
const { toArray } = require('lodash');
function ColumnBuilder_Oracle() {
ColumnBuilder.apply(this, arguments);
}
inherits(ColumnBuilder_Oracle, ColumnBuilder);
// checkIn added to the builder to allow the column compiler to change the
// order via the modifiers ("check" must be after "default")
ColumnBuilder_Oracle.prototype.checkIn = function() {
this._modifiers.checkIn = toArray(arguments);
return this;
};
module.exports = ColumnBuilder_Oracle;

View File

@@ -0,0 +1,139 @@
const { uniq, map } = require('lodash');
const inherits = require('inherits');
const Raw = require('../../../raw');
const ColumnCompiler = require('../../../schema/columncompiler');
const Trigger = require('./trigger');
// Column Compiler
// -------
function ColumnCompiler_Oracle() {
ColumnCompiler.apply(this, arguments);
this.modifiers = ['defaultTo', 'checkIn', 'nullable', 'comment'];
}
inherits(ColumnCompiler_Oracle, ColumnCompiler);
Object.assign(ColumnCompiler_Oracle.prototype, {
// helper function for pushAdditional in increments() and bigincrements()
_createAutoIncrementTriggerAndSequence() {
// TODO Add warning that sequence etc is created
this.pushAdditional(function() {
const tableName = this.tableCompiler.tableNameRaw;
const createTriggerSQL = Trigger.createAutoIncrementTrigger(
this.client.logger,
tableName
);
this.pushQuery(createTriggerSQL);
});
},
increments() {
this._createAutoIncrementTriggerAndSequence();
return 'integer not null primary key';
},
bigincrements() {
this._createAutoIncrementTriggerAndSequence();
return 'number(20, 0) not null primary key';
},
floating(precision) {
const parsedPrecision = this._num(precision, 0);
return `float${parsedPrecision ? `(${parsedPrecision})` : ''}`;
},
double(precision, scale) {
// if (!precision) return 'number'; // TODO: Check If default is ok
return `number(${this._num(precision, 8)}, ${this._num(scale, 2)})`;
},
decimal(precision, scale) {
if (precision === null) return 'decimal';
return `decimal(${this._num(precision, 8)}, ${this._num(scale, 2)})`;
},
integer(length) {
return length ? `number(${this._num(length, 11)})` : 'integer';
},
tinyint: 'smallint',
smallint: 'smallint',
mediumint: 'integer',
biginteger: 'number(20, 0)',
text: 'clob',
enu(allowed) {
allowed = uniq(allowed);
const maxLength = (allowed || []).reduce(
(maxLength, name) => Math.max(maxLength, String(name).length),
1
);
// implicitly add the enum values as checked values
this.columnBuilder._modifiers.checkIn = [allowed];
return `varchar2(${maxLength})`;
},
time: 'timestamp with time zone',
datetime(without) {
return without ? 'timestamp' : 'timestamp with time zone';
},
timestamp(without) {
return without ? 'timestamp' : 'timestamp with time zone';
},
bit: 'clob',
json: 'clob',
bool() {
// implicitly add the check for 0 and 1
this.columnBuilder._modifiers.checkIn = [[0, 1]];
return 'number(1, 0)';
},
varchar(length) {
return `varchar2(${this._num(length, 255)})`;
},
// Modifiers
// ------
comment(comment) {
const columnName = this.args[0] || this.defaults('columnName');
this.pushAdditional(function() {
this.pushQuery(
`comment on column ${this.tableCompiler.tableName()}.` +
this.formatter.wrap(columnName) +
" is '" +
(comment || '') +
"'"
);
}, comment);
},
checkIn(value) {
// TODO: Maybe accept arguments also as array
// TODO: value(s) should be escaped properly
if (value === undefined) {
return '';
} else if (value instanceof Raw) {
value = value.toQuery();
} else if (Array.isArray(value)) {
value = map(value, (v) => `'${v}'`).join(', ');
} else {
value = `'${value}'`;
}
return `check (${this.formatter.wrap(this.args[0])} in (${value}))`;
},
});
module.exports = ColumnCompiler_Oracle;

View File

@@ -0,0 +1,81 @@
// Oracle Schema Compiler
// -------
const inherits = require('inherits');
const SchemaCompiler = require('../../../schema/compiler');
const utils = require('../utils');
const Trigger = require('./trigger');
function SchemaCompiler_Oracle() {
SchemaCompiler.apply(this, arguments);
}
inherits(SchemaCompiler_Oracle, SchemaCompiler);
// Rename a table on the schema.
SchemaCompiler_Oracle.prototype.renameTable = function(tableName, to) {
const renameTable = Trigger.renameTableAndAutoIncrementTrigger(
this.client.logger,
tableName,
to
);
this.pushQuery(renameTable);
};
// Check whether a table exists on the query.
SchemaCompiler_Oracle.prototype.hasTable = function(tableName) {
this.pushQuery({
sql:
'select TABLE_NAME from USER_TABLES where TABLE_NAME = ' +
this.formatter.parameter(tableName),
output(resp) {
return resp.length > 0;
},
});
};
// Check whether a column exists on the schema.
SchemaCompiler_Oracle.prototype.hasColumn = function(tableName, column) {
const sql =
`select COLUMN_NAME from USER_TAB_COLUMNS ` +
`where TABLE_NAME = ${this.formatter.parameter(tableName)} ` +
`and COLUMN_NAME = ${this.formatter.parameter(column)}`;
this.pushQuery({ sql, output: (resp) => resp.length > 0 });
};
SchemaCompiler_Oracle.prototype.dropSequenceIfExists = function(sequenceName) {
this.pushQuery(
utils.wrapSqlWithCatch(
`drop sequence ${this.formatter.wrap(sequenceName)}`,
-2289
)
);
};
SchemaCompiler_Oracle.prototype._dropRelatedSequenceIfExists = function(
tableName
) {
// removing the sequence that was possibly generated by increments() column
const sequenceName = utils.generateCombinedName(
this.client.logger,
'seq',
tableName
);
this.dropSequenceIfExists(sequenceName);
};
SchemaCompiler_Oracle.prototype.dropTable = function(tableName) {
this.pushQuery(`drop table ${this.formatter.wrap(tableName)}`);
// removing the sequence that was possibly generated by increments() column
this._dropRelatedSequenceIfExists(tableName);
};
SchemaCompiler_Oracle.prototype.dropTableIfExists = function(tableName) {
this.pushQuery(
utils.wrapSqlWithCatch(`drop table ${this.formatter.wrap(tableName)}`, -942)
);
// removing the sequence that was possibly generated by increments() column
this._dropRelatedSequenceIfExists(tableName);
};
module.exports = SchemaCompiler_Oracle;

View File

@@ -0,0 +1,167 @@
/* eslint max-len:0 */
const inherits = require('inherits');
const utils = require('../utils');
const TableCompiler = require('../../../schema/tablecompiler');
const helpers = require('../../../helpers');
const Trigger = require('./trigger');
const { map } = require('lodash');
// Table Compiler
// ------
function TableCompiler_Oracle() {
TableCompiler.apply(this, arguments);
}
inherits(TableCompiler_Oracle, TableCompiler);
Object.assign(TableCompiler_Oracle.prototype, {
addColumns(columns, prefix) {
if (columns.sql.length > 0) {
prefix = prefix || this.addColumnsPrefix;
const columnSql = map(columns.sql, (column) => column);
const alter = this.lowerCase ? 'alter table ' : 'ALTER TABLE ';
let sql = `${alter}${this.tableName()} ${prefix}`;
if (columns.sql.length > 1) {
sql += `(${columnSql.join(', ')})`;
} else {
sql += columnSql.join(', ');
}
this.pushQuery({
sql,
bindings: columns.bindings,
});
}
},
// Compile a rename column command.
renameColumn(from, to) {
// Remove quotes around tableName
const tableName = this.tableName().slice(1, -1);
return this.pushQuery(
Trigger.renameColumnTrigger(this.client.logger, tableName, from, to)
);
},
compileAdd(builder) {
const table = this.formatter.wrap(builder);
const columns = this.prefixArray('add column', this.getColumns(builder));
return this.pushQuery({
sql: `alter table ${table} ${columns.join(', ')}`,
});
},
// Adds the "create" query to the query sequence.
createQuery(columns, ifNot) {
const sql = `create table ${this.tableName()} (${columns.sql.join(', ')})`;
this.pushQuery({
// catch "name is already used by an existing object" for workaround for "if not exists"
sql: ifNot ? utils.wrapSqlWithCatch(sql, -955) : sql,
bindings: columns.bindings,
});
if (this.single.comment) this.comment(this.single.comment);
},
// Compiles the comment on the table.
comment(comment) {
this.pushQuery(`comment on table ${this.tableName()} is '${comment}'`);
},
addColumnsPrefix: 'add ',
alterColumnsPrefix: 'modify ',
dropColumn() {
const columns = helpers.normalizeArr.apply(null, arguments);
this.pushQuery(
`alter table ${this.tableName()} drop (${this.formatter.columnize(
columns
)})`
);
},
changeType() {
// alter table + table + ' modify ' + wrapped + '// type';
},
_indexCommand(type, tableName, columns) {
return this.formatter.wrap(
utils.generateCombinedName(this.client.logger, type, tableName, columns)
);
},
primary(columns, constraintName) {
constraintName = constraintName
? this.formatter.wrap(constraintName)
: this.formatter.wrap(`${this.tableNameRaw}_pkey`);
this.pushQuery(
`alter table ${this.tableName()} add constraint ${constraintName} primary key (${this.formatter.columnize(
columns
)})`
);
},
dropPrimary(constraintName) {
constraintName = constraintName
? this.formatter.wrap(constraintName)
: this.formatter.wrap(this.tableNameRaw + '_pkey');
this.pushQuery(
`alter table ${this.tableName()} drop constraint ${constraintName}`
);
},
index(columns, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('index', this.tableNameRaw, columns);
this.pushQuery(
`create index ${indexName} on ${this.tableName()}` +
' (' +
this.formatter.columnize(columns) +
')'
);
},
dropIndex(columns, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('index', this.tableNameRaw, columns);
this.pushQuery(`drop index ${indexName}`);
},
unique(columns, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('unique', this.tableNameRaw, columns);
this.pushQuery(
`alter table ${this.tableName()} add constraint ${indexName}` +
' unique (' +
this.formatter.columnize(columns) +
')'
);
},
dropUnique(columns, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('unique', this.tableNameRaw, columns);
this.pushQuery(
`alter table ${this.tableName()} drop constraint ${indexName}`
);
},
dropForeign(columns, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('foreign', this.tableNameRaw, columns);
this.pushQuery(
`alter table ${this.tableName()} drop constraint ${indexName}`
);
},
});
module.exports = TableCompiler_Oracle;

126
node_modules/knex/lib/dialects/oracle/schema/trigger.js generated vendored Normal file
View File

@@ -0,0 +1,126 @@
const utils = require('../utils');
const trigger = {
renameColumnTrigger: function(logger, tableName, columnName, to) {
const triggerName = utils.generateCombinedName(
logger,
'autoinc_trg',
tableName
);
const sequenceName = utils.generateCombinedName(logger, 'seq', tableName);
return (
`DECLARE ` +
`PK_NAME VARCHAR(200); ` +
`IS_AUTOINC NUMBER := 0; ` +
`BEGIN` +
` EXECUTE IMMEDIATE ('ALTER TABLE "${tableName}" RENAME COLUMN "${columnName}" TO "${to}"');` +
` SELECT COUNT(*) INTO IS_AUTOINC from "USER_TRIGGERS" where trigger_name = '${triggerName}';` +
` IF (IS_AUTOINC > 0) THEN` +
` SELECT cols.column_name INTO PK_NAME` +
` FROM all_constraints cons, all_cons_columns cols` +
` WHERE cons.constraint_type = 'P'` +
` AND cons.constraint_name = cols.constraint_name` +
` AND cons.owner = cols.owner` +
` AND cols.table_name = '${tableName}';` +
` IF ('${to}' = PK_NAME) THEN` +
` EXECUTE IMMEDIATE ('DROP TRIGGER "${triggerName}"');` +
` EXECUTE IMMEDIATE ('create or replace trigger "${triggerName}"` +
` BEFORE INSERT on "${tableName}" for each row` +
` declare` +
` checking number := 1;` +
` begin` +
` if (:new."${to}" is null) then` +
` while checking >= 1 loop` +
` select "${sequenceName}".nextval into :new."${to}" from dual;` +
` select count("${to}") into checking from "${tableName}"` +
` where "${to}" = :new."${to}";` +
` end loop;` +
` end if;` +
` end;');` +
` end if;` +
` end if;` +
`END;`
);
},
createAutoIncrementTrigger: function(logger, tableName) {
const triggerName = utils.generateCombinedName(
logger,
'autoinc_trg',
tableName
);
const sequenceName = utils.generateCombinedName(logger, 'seq', tableName);
return (
`DECLARE ` +
`PK_NAME VARCHAR(200); ` +
`BEGIN` +
` EXECUTE IMMEDIATE ('CREATE SEQUENCE "${sequenceName}"');` +
` SELECT cols.column_name INTO PK_NAME` +
` FROM all_constraints cons, all_cons_columns cols` +
` WHERE cons.constraint_type = 'P'` +
` AND cons.constraint_name = cols.constraint_name` +
` AND cons.owner = cols.owner` +
` AND cols.table_name = '${tableName}';` +
` execute immediate ('create or replace trigger "${triggerName}"` +
` BEFORE INSERT on "${tableName}"` +
` for each row` +
` declare` +
` checking number := 1;` +
` begin` +
` if (:new."' || PK_NAME || '" is null) then` +
` while checking >= 1 loop` +
` select "${sequenceName}".nextval into :new."' || PK_NAME || '" from dual;` +
` select count("' || PK_NAME || '") into checking from "${tableName}"` +
` where "' || PK_NAME || '" = :new."' || PK_NAME || '";` +
` end loop;` +
` end if;` +
` end;'); ` +
`END;`
);
},
renameTableAndAutoIncrementTrigger: function(logger, tableName, to) {
const triggerName = utils.generateCombinedName(
logger,
'autoinc_trg',
tableName
);
const sequenceName = utils.generateCombinedName(logger, 'seq', tableName);
const toTriggerName = utils.generateCombinedName(logger, 'autoinc_trg', to);
const toSequenceName = utils.generateCombinedName(logger, 'seq', to);
return (
`DECLARE ` +
`PK_NAME VARCHAR(200); ` +
`IS_AUTOINC NUMBER := 0; ` +
`BEGIN` +
` EXECUTE IMMEDIATE ('RENAME "${tableName}" TO "${to}"');` +
` SELECT COUNT(*) INTO IS_AUTOINC from "USER_TRIGGERS" where trigger_name = '${triggerName}';` +
` IF (IS_AUTOINC > 0) THEN` +
` EXECUTE IMMEDIATE ('DROP TRIGGER "${triggerName}"');` +
` EXECUTE IMMEDIATE ('RENAME "${sequenceName}" TO "${toSequenceName}"');` +
` SELECT cols.column_name INTO PK_NAME` +
` FROM all_constraints cons, all_cons_columns cols` +
` WHERE cons.constraint_type = 'P'` +
` AND cons.constraint_name = cols.constraint_name` +
` AND cons.owner = cols.owner` +
` AND cols.table_name = '${to}';` +
` EXECUTE IMMEDIATE ('create or replace trigger "${toTriggerName}"` +
` BEFORE INSERT on "${to}" for each row` +
` declare` +
` checking number := 1;` +
` begin` +
` if (:new."' || PK_NAME || '" is null) then` +
` while checking >= 1 loop` +
` select "${toSequenceName}".nextval into :new."' || PK_NAME || '" from dual;` +
` select count("' || PK_NAME || '") into checking from "${to}"` +
` where "' || PK_NAME || '" = :new."' || PK_NAME || '";` +
` end loop;` +
` end if;` +
` end;');` +
` end if;` +
`END;`
);
},
};
module.exports = trigger;

77
node_modules/knex/lib/dialects/oracle/transaction.js generated vendored Normal file
View File

@@ -0,0 +1,77 @@
const Bluebird = require('bluebird');
const Transaction = require('../../transaction');
const { isUndefined } = require('lodash');
const debugTx = require('debug')('knex:tx');
module.exports = class Oracle_Transaction extends Transaction {
// disable autocommit to allow correct behavior (default is true)
begin() {
return Bluebird.resolve();
}
commit(conn, value) {
this._completed = true;
return conn
.commitAsync()
.then(() => value)
.then(this._resolver, this._rejecter);
}
release(conn, value) {
return this._resolver(value);
}
rollback(conn, err) {
this._completed = true;
debugTx('%s: rolling back', this.txid);
return conn
.rollbackAsync()
.throw(err)
.catch((error) => {
if (isUndefined(error)) {
if (this.doNotRejectOnRollback) {
this._resolver();
return;
}
error = new Error(`Transaction rejected with non-error: ${error}`);
}
return this._rejecter(error);
});
}
acquireConnection(config, cb) {
const configConnection = config && config.connection;
return new Bluebird((resolve, reject) => {
try {
resolve(configConnection || this.client.acquireConnection());
} catch (e) {
reject(e);
}
})
.then((connection) => {
connection.__knexTxId = this.txid;
return connection;
})
.then((connection) => {
if (!this.outerTx) {
connection.setAutoCommit(false);
}
return connection;
})
.then(async (connection) => {
try {
return await cb(connection);
} finally {
debugTx('%s: releasing connection', this.txid);
connection.setAutoCommit(true);
if (!configConnection) {
this.client.releaseConnection(connection);
} else {
debugTx('%s: not releasing external connection', this.txid);
}
}
});
}
};

86
node_modules/knex/lib/dialects/oracle/utils.js generated vendored Normal file
View File

@@ -0,0 +1,86 @@
function generateCombinedName(logger, postfix, name, subNames) {
const crypto = require('crypto');
const limit = 30;
if (!Array.isArray(subNames)) subNames = subNames ? [subNames] : [];
const table = name.replace(/\.|-/g, '_');
const subNamesPart = subNames.join('_');
let result = `${table}_${
subNamesPart.length ? subNamesPart + '_' : ''
}${postfix}`.toLowerCase();
if (result.length > limit) {
logger.warn(
`Automatically generated name "${result}" exceeds ${limit} character ` +
`limit for Oracle. Using base64 encoded sha1 of that name instead.`
);
// generates the sha1 of the name and encode it with base64
result = crypto
.createHash('sha1')
.update(result)
.digest('base64')
.replace('=', '');
}
return result;
}
function wrapSqlWithCatch(sql, errorNumberToCatch) {
return (
`begin execute immediate '${sql.replace(/'/g, "''")}'; ` +
`exception when others then if sqlcode != ${errorNumberToCatch} then raise; ` +
`end if; ` +
`end;`
);
}
function ReturningHelper(columnName) {
this.columnName = columnName;
}
ReturningHelper.prototype.toString = function() {
return `[object ReturningHelper:${this.columnName}]`;
};
// If the error is any of these, we'll assume we need to
// mark the connection as failed
function isConnectionError(err) {
return [
'ORA-03114', // not connected to ORACLE
'ORA-03113', // end-of-file on communication channel
'ORA-03135', // connection lost contact
'ORA-12514', // listener does not currently know of service requested in connect descriptor
'ORA-00022', // invalid session ID; access denied
'ORA-00028', // your session has been killed
'ORA-00031', // your session has been marked for kill
'ORA-00045', // your session has been terminated with no replay
'ORA-00378', // buffer pools cannot be created as specified
'ORA-00602', // internal programming exception
'ORA-00603', // ORACLE server session terminated by fatal error
'ORA-00609', // could not attach to incoming connection
'ORA-01012', // not logged on
'ORA-01041', // internal error. hostdef extension doesn't exist
'ORA-01043', // user side memory corruption
'ORA-01089', // immediate shutdown or close in progress
'ORA-01092', // ORACLE instance terminated. Disconnection forced
'ORA-02396', // exceeded maximum idle time, please connect again
'ORA-03122', // attempt to close ORACLE-side window on user side
'ORA-12153', // TNS'not connected
'ORA-12537', // TNS'connection closed
'ORA-12547', // TNS'lost contact
'ORA-12570', // TNS'packet reader failure
'ORA-12583', // TNS'no reader
'ORA-27146', // post/wait initialization failed
'ORA-28511', // lost RPC connection
'ORA-56600', // an illegal OCI function call was issued
'NJS-040',
'NJS-024',
'NJS-003',
].some(function(prefix) {
return err.message.indexOf(prefix) === 0;
});
}
module.exports = {
generateCombinedName,
isConnectionError,
wrapSqlWithCatch,
ReturningHelper,
};

454
node_modules/knex/lib/dialects/oracledb/index.js generated vendored Normal file
View File

@@ -0,0 +1,454 @@
// Oracledb Client
// -------
const _ = require('lodash');
const inherits = require('inherits');
const QueryCompiler = require('./query/compiler');
const ColumnCompiler = require('./schema/columncompiler');
const { BlobHelper, ReturningHelper, isConnectionError } = require('./utils');
const Bluebird = require('bluebird');
const stream = require('stream');
const { promisify } = require('util');
const Transaction = require('./transaction');
const Client_Oracle = require('../oracle');
const Oracle_Formatter = require('../oracle/formatter');
function Client_Oracledb() {
Client_Oracle.apply(this, arguments);
// Node.js only have 4 background threads by default, oracledb needs one by connection
if (this.driver) {
process.env.UV_THREADPOOL_SIZE = process.env.UV_THREADPOOL_SIZE || 1;
process.env.UV_THREADPOOL_SIZE =
parseInt(process.env.UV_THREADPOOL_SIZE) + this.driver.poolMax;
}
}
inherits(Client_Oracledb, Client_Oracle);
Client_Oracledb.prototype.driverName = 'oracledb';
Client_Oracledb.prototype._driver = function() {
const client = this;
const oracledb = require('oracledb');
client.fetchAsString = [];
if (this.config.fetchAsString && _.isArray(this.config.fetchAsString)) {
this.config.fetchAsString.forEach(function(type) {
if (!_.isString(type)) return;
type = type.toUpperCase();
if (oracledb[type]) {
if (type !== 'NUMBER' && type !== 'DATE' && type !== 'CLOB') {
this.logger.warn(
'Only "date", "number" and "clob" are supported for fetchAsString'
);
}
client.fetchAsString.push(oracledb[type]);
}
});
}
return oracledb;
};
Client_Oracledb.prototype.queryCompiler = function() {
return new QueryCompiler(this, ...arguments);
};
Client_Oracledb.prototype.columnCompiler = function() {
return new ColumnCompiler(this, ...arguments);
};
Client_Oracledb.prototype.formatter = function() {
return new Oracledb_Formatter(this, ...arguments);
};
Client_Oracledb.prototype.transaction = function() {
return new Transaction(this, ...arguments);
};
Client_Oracledb.prototype.prepBindings = function(bindings) {
return _.map(bindings, (value) => {
if (value instanceof BlobHelper && this.driver) {
return { type: this.driver.BLOB, dir: this.driver.BIND_OUT };
// Returning helper always use ROWID as string
} else if (value instanceof ReturningHelper && this.driver) {
return { type: this.driver.STRING, dir: this.driver.BIND_OUT };
} else if (typeof value === 'boolean') {
return value ? 1 : 0;
}
return value;
});
};
// Get a raw connection, called by the `pool` whenever a new
// connection needs to be added to the pool.
Client_Oracledb.prototype.acquireRawConnection = function() {
const client = this;
const asyncConnection = new Bluebird(function(resolver, rejecter) {
// If external authentication don't have to worry about username/password and
// if not need to set the username and password
const oracleDbConfig = client.connectionSettings.externalAuth
? { externalAuth: client.connectionSettings.externalAuth }
: {
user: client.connectionSettings.user,
password: client.connectionSettings.password,
};
// In the case of external authentication connection string will be given
oracleDbConfig.connectString =
client.connectionSettings.connectString ||
client.connectionSettings.host + '/' + client.connectionSettings.database;
if (client.connectionSettings.prefetchRowCount) {
oracleDbConfig.prefetchRows = client.connectionSettings.prefetchRowCount;
}
if (!_.isUndefined(client.connectionSettings.stmtCacheSize)) {
oracleDbConfig.stmtCacheSize = client.connectionSettings.stmtCacheSize;
}
client.driver.fetchAsString = client.fetchAsString;
client.driver.getConnection(oracleDbConfig, function(err, connection) {
if (err) {
return rejecter(err);
}
connection.commitAsync = function() {
return new Bluebird((commitResolve, commitReject) => {
if (connection.isTransaction) {
return commitResolve();
}
this.commit(function(err) {
if (err) {
return commitReject(err);
}
commitResolve();
});
});
};
connection.rollbackAsync = function() {
return new Bluebird((rollbackResolve, rollbackReject) => {
this.rollback(function(err) {
if (err) {
return rollbackReject(err);
}
rollbackResolve();
});
});
};
const fetchAsync = promisify(function(sql, bindParams, options, cb) {
options = options || {};
options.outFormat =
client.driver.OUT_FORMAT_OBJECT || client.driver.OBJECT;
if (!options.outFormat) {
throw new Error('not found oracledb.outFormat constants');
}
if (options.resultSet) {
connection.execute(sql, bindParams || [], options, function(
err,
result
) {
if (err) {
if (isConnectionError(err)) {
connection.close().catch(function(err) {});
connection.__knex__disposed = err;
}
return cb(err);
}
const fetchResult = { rows: [], resultSet: result.resultSet };
const numRows = 100;
const fetchRowsFromRS = function(connection, resultSet, numRows) {
resultSet.getRows(numRows, function(err, rows) {
if (err) {
if (isConnectionError(err)) {
connection.close().catch(function(err) {});
connection.__knex__disposed = err;
}
resultSet.close(function() {
return cb(err);
});
} else if (rows.length === 0) {
return cb(null, fetchResult);
} else if (rows.length > 0) {
if (rows.length === numRows) {
fetchResult.rows = fetchResult.rows.concat(rows);
fetchRowsFromRS(connection, resultSet, numRows);
} else {
fetchResult.rows = fetchResult.rows.concat(rows);
return cb(null, fetchResult);
}
}
});
};
fetchRowsFromRS(connection, result.resultSet, numRows);
});
} else {
connection.execute(sql, bindParams || [], options, function(
err,
result
) {
if (err) {
// dispose the connection on connection error
if (isConnectionError(err)) {
connection.close().catch(function(err) {});
connection.__knex__disposed = err;
}
return cb(err);
}
return cb(null, result);
});
}
});
connection.executeAsync = function(sql, bindParams, options) {
// Read all lob
return fetchAsync(sql, bindParams, options).then(async (results) => {
const closeResultSet = () => {
return results.resultSet
? promisify(results.resultSet.close).call(results.resultSet)
: Promise.resolve();
};
// Collect LOBs to read
const lobs = [];
if (results.rows) {
if (Array.isArray(results.rows)) {
for (let i = 0; i < results.rows.length; i++) {
// Iterate through the rows
const row = results.rows[i];
for (const column in row) {
if (row[column] instanceof stream.Readable) {
lobs.push({ index: i, key: column, stream: row[column] });
}
}
}
}
}
try {
for (const lob of lobs) {
// todo should be fetchAsString/fetchAsBuffer polyfill only
results.rows[lob.index][lob.key] = await lobProcessing(
lob.stream
);
}
} catch (e) {
await closeResultSet().catch(() => {});
throw e;
}
await closeResultSet();
return results;
});
};
resolver(connection);
});
});
return asyncConnection;
};
// Used to explicitly close a connection, called internally by the pool
// when a connection times out or the pool is shutdown.
Client_Oracledb.prototype.destroyRawConnection = function(connection) {
return connection.release();
};
// Runs the query on the specified connection, providing the bindings
// and any other necessary prep work.
Client_Oracledb.prototype._query = function(connection, obj) {
if (!obj.sql) throw new Error('The query is empty');
const options = { autoCommit: false };
if (obj.method === 'select') {
options.resultSet = true;
}
return Bluebird.resolve(
connection.executeAsync(obj.sql, obj.bindings, options)
).then(async function(response) {
// Flatten outBinds
let outBinds = _.flatten(response.outBinds);
obj.response = response.rows || [];
obj.rowsAffected = response.rows
? response.rows.rowsAffected
: response.rowsAffected;
//added for outBind parameter
if (obj.method === 'raw' && outBinds.length > 0) {
return {
response: outBinds,
};
}
if (obj.method === 'update') {
const modifiedRowsCount = obj.rowsAffected.length || obj.rowsAffected;
const updatedObjOutBinding = [];
const updatedOutBinds = [];
const updateOutBinds = (i) =>
function(value, index) {
const OutBindsOffset = index * modifiedRowsCount;
updatedOutBinds.push(outBinds[i + OutBindsOffset]);
};
for (let i = 0; i < modifiedRowsCount; i++) {
updatedObjOutBinding.push(obj.outBinding[0]);
_.each(obj.outBinding[0], updateOutBinds(i));
}
outBinds = updatedOutBinds;
obj.outBinding = updatedObjOutBinding;
}
if (!obj.returning && outBinds.length === 0) {
await connection.commitAsync();
return obj;
}
const rowIds = [];
let offset = 0;
for (let line = 0; line < obj.outBinding.length; line++) {
const ret = obj.outBinding[line];
offset =
offset +
(obj.outBinding[line - 1] ? obj.outBinding[line - 1].length : 0);
for (let index = 0; index < ret.length; index++) {
const out = ret[index];
await new Promise(function(bindResolver, bindRejecter) {
if (out instanceof BlobHelper) {
const blob = outBinds[index + offset];
if (out.returning) {
obj.response[line] = obj.response[line] || {};
obj.response[line][out.columnName] = out.value;
}
blob.on('error', function(err) {
bindRejecter(err);
});
blob.on('finish', function() {
bindResolver();
});
blob.write(out.value);
blob.end();
} else if (obj.outBinding[line][index] === 'ROWID') {
rowIds.push(outBinds[index + offset]);
bindResolver();
} else {
obj.response[line] = obj.response[line] || {};
obj.response[line][out] = outBinds[index + offset];
bindResolver();
}
});
}
}
return connection.commitAsync().then(function() {
if (obj.returningSql) {
return connection
.executeAsync(obj.returningSql(), rowIds, { resultSet: true })
.then(function(response) {
obj.response = response.rows;
return obj;
});
}
return obj;
});
});
};
/**
* @param stream
* @param {'string' | 'buffer'} type
*/
function readStream(stream, type) {
return new Promise((resolve, reject) => {
let data = type === 'string' ? '' : Buffer.alloc(0);
stream.on('error', function(err) {
reject(err);
});
stream.on('data', function(chunk) {
if (type === 'string') {
data += chunk;
} else {
data = Buffer.concat([data, chunk]);
}
});
stream.on('end', function() {
resolve(data);
});
});
}
// Process the response as returned from the query.
Client_Oracledb.prototype.processResponse = function(obj, runner) {
let response = obj.response;
const method = obj.method;
if (obj.output) {
return obj.output.call(runner, response);
}
switch (method) {
case 'select':
case 'pluck':
case 'first':
if (obj.method === 'pluck') {
response = _.map(response, obj.pluck);
}
return obj.method === 'first' ? response[0] : response;
case 'insert':
case 'del':
case 'update':
case 'counter':
if (obj.returning && !_.isEmpty(obj.returning)) {
if (obj.returning.length === 1 && obj.returning[0] !== '*') {
return _.flatten(_.map(response, _.values));
}
return response;
} else if (!_.isUndefined(obj.rowsAffected)) {
return obj.rowsAffected;
} else {
return 1;
}
default:
return response;
}
};
const lobProcessing = function(stream) {
const oracledb = require('oracledb');
/**
* @type 'string' | 'buffer'
*/
let type;
if (stream.type) {
// v1.2-v4
if (stream.type === oracledb.BLOB) {
type = 'buffer';
} else if (stream.type === oracledb.CLOB) {
type = 'string';
}
} else if (stream.iLob) {
// v1
if (stream.iLob.type === oracledb.CLOB) {
type = 'string';
} else if (stream.iLob.type === oracledb.BLOB) {
type = 'buffer';
}
} else {
throw new Error('Unrecognized oracledb lob stream type');
}
if (type === 'string') {
stream.setEncoding('utf-8');
}
return readStream(stream, type);
};
class Oracledb_Formatter extends Oracle_Formatter {
// Checks whether a value is a function... if it is, we compile it
// otherwise we check whether it's a raw
parameter(value) {
if (typeof value === 'function') {
return this.outputQuery(this.compileCallback(value), true);
} else if (value instanceof BlobHelper) {
return 'EMPTY_BLOB()';
}
return this.unwrapRaw(value, true) || '?';
}
}
module.exports = Client_Oracledb;

View File

@@ -0,0 +1,360 @@
const _ = require('lodash');
const inherits = require('inherits');
const Oracle_Compiler = require('../../oracle/query/compiler');
const ReturningHelper = require('../utils').ReturningHelper;
const BlobHelper = require('../utils').BlobHelper;
function Oracledb_Compiler(client, builder) {
Oracle_Compiler.call(this, client, builder);
}
inherits(Oracledb_Compiler, Oracle_Compiler);
_.assign(Oracledb_Compiler.prototype, {
// Compiles an "insert" query, allowing for multiple
// inserts using a single query statement.
insert: function() {
const self = this;
const outBindPrep = this._prepOutbindings(
this.single.insert,
this.single.returning
);
const outBinding = outBindPrep.outBinding;
const returning = outBindPrep.returning;
const insertValues = outBindPrep.values;
if (
Array.isArray(insertValues) &&
insertValues.length === 1 &&
_.isEmpty(insertValues[0])
) {
return this._addReturningToSqlAndConvert(
'insert into ' +
this.tableName +
' (' +
this.formatter.wrap(this.single.returning) +
') values (default)',
outBinding[0],
this.tableName,
returning
);
}
if (
_.isEmpty(this.single.insert) &&
typeof this.single.insert !== 'function'
) {
return '';
}
const insertData = this._prepInsert(insertValues);
const sql = {};
if (_.isString(insertData)) {
return this._addReturningToSqlAndConvert(
'insert into ' + this.tableName + ' ' + insertData,
outBinding[0],
this.tableName,
returning
);
}
if (insertData.values.length === 1) {
return this._addReturningToSqlAndConvert(
'insert into ' +
this.tableName +
' (' +
this.formatter.columnize(insertData.columns) +
') values (' +
this.formatter.parameterize(insertData.values[0]) +
')',
outBinding[0],
this.tableName,
returning
);
}
const insertDefaultsOnly = insertData.columns.length === 0;
sql.returning = returning;
sql.sql =
'begin ' +
_.map(insertData.values, function(value, index) {
const parameterizedValues = !insertDefaultsOnly
? self.formatter.parameterize(value, self.client.valueForUndefined)
: '';
let subSql = 'insert into ' + self.tableName;
if (insertDefaultsOnly) {
// No columns given so only the default value
subSql +=
' (' +
self.formatter.wrap(self.single.returning) +
') values (default)';
} else {
subSql +=
' (' +
self.formatter.columnize(insertData.columns) +
') values (' +
parameterizedValues +
')';
}
let returningClause = '';
let intoClause = '';
// ToDo review if this code is still needed or could be dropped
// eslint-disable-next-line no-unused-vars
let usingClause = '';
let outClause = '';
_.each(value, function(val) {
if (!(val instanceof BlobHelper)) {
usingClause += ' ?,';
}
});
usingClause = usingClause.slice(0, -1);
// Build returning and into clauses
_.each(outBinding[index], function(ret) {
const columnName = ret.columnName || ret;
returningClause += self.formatter.wrap(columnName) + ',';
intoClause += ' ?,';
outClause += ' out ?,';
// Add Helpers to bindings
if (ret instanceof BlobHelper) {
return self.formatter.bindings.push(ret);
}
self.formatter.bindings.push(new ReturningHelper(columnName));
});
// Strip last comma
returningClause = returningClause.slice(0, -1);
intoClause = intoClause.slice(0, -1);
outClause = outClause.slice(0, -1);
if (returningClause && intoClause) {
subSql += ' returning ' + returningClause + ' into' + intoClause;
}
// Pre bind position because subSql is an execute immediate parameter
// later position binding will only convert the ? params
subSql = self.formatter.client.positionBindings(subSql);
const parameterizedValuesWithoutDefaultAndBlob = parameterizedValues
.replace('DEFAULT, ', '')
.replace(', DEFAULT', '')
.replace('EMPTY_BLOB(), ', '')
.replace(', EMPTY_BLOB()', '');
return (
"execute immediate '" +
subSql.replace(/'/g, "''") +
(parameterizedValuesWithoutDefaultAndBlob || value
? "' using "
: '') +
parameterizedValuesWithoutDefaultAndBlob +
(parameterizedValuesWithoutDefaultAndBlob && outClause ? ',' : '') +
outClause +
';'
);
}).join(' ') +
'end;';
sql.outBinding = outBinding;
if (returning[0] === '*') {
// Generate select statement with special order by
// to keep the order because 'in (..)' may change the order
sql.returningSql = function() {
return (
'select * from ' +
self.tableName +
' where ROWID in (' +
this.outBinding
.map(function(v, i) {
return ':' + (i + 1);
})
.join(', ') +
')' +
' order by case ROWID ' +
this.outBinding
.map(function(v, i) {
return 'when CHARTOROWID(:' + (i + 1) + ') then ' + i;
})
.join(' ') +
' end'
);
};
}
return sql;
},
_addReturningToSqlAndConvert: function(
sql,
outBinding,
tableName,
returning
) {
const self = this;
const res = {
sql: sql,
};
if (!outBinding) {
return res;
}
const returningValues = Array.isArray(outBinding)
? outBinding
: [outBinding];
let returningClause = '';
let intoClause = '';
// Build returning and into clauses
_.each(returningValues, function(ret) {
const columnName = ret.columnName || ret;
returningClause += self.formatter.wrap(columnName) + ',';
intoClause += '?,';
// Add Helpers to bindings
if (ret instanceof BlobHelper) {
return self.formatter.bindings.push(ret);
}
self.formatter.bindings.push(new ReturningHelper(columnName));
});
res.sql = sql;
// Strip last comma
returningClause = returningClause.slice(0, -1);
intoClause = intoClause.slice(0, -1);
if (returningClause && intoClause) {
res.sql += ' returning ' + returningClause + ' into ' + intoClause;
}
res.outBinding = [outBinding];
if (returning[0] === '*') {
res.returningSql = function() {
return 'select * from ' + self.tableName + ' where ROWID = :1';
};
}
res.returning = returning;
return res;
},
_prepOutbindings: function(paramValues, paramReturning) {
const result = {};
let params = paramValues || [];
let returning = paramReturning || [];
if (!Array.isArray(params) && _.isPlainObject(paramValues)) {
params = [params];
}
// Always wrap returning argument in array
if (returning && !Array.isArray(returning)) {
returning = [returning];
}
const outBinding = [];
// Handle Buffer value as Blob
_.each(params, function(values, index) {
if (returning[0] === '*') {
outBinding[index] = ['ROWID'];
} else {
outBinding[index] = _.clone(returning);
}
_.each(values, function(value, key) {
if (value instanceof Buffer) {
values[key] = new BlobHelper(key, value);
// Delete blob duplicate in returning
const blobIndex = outBinding[index].indexOf(key);
if (blobIndex >= 0) {
outBinding[index].splice(blobIndex, 1);
values[key].returning = true;
}
outBinding[index].push(values[key]);
}
if (_.isUndefined(value)) {
delete params[index][key];
}
});
});
result.returning = returning;
result.outBinding = outBinding;
result.values = params;
return result;
},
update: function() {
const self = this;
const sql = {};
const outBindPrep = this._prepOutbindings(
this.single.update || this.single.counter,
this.single.returning
);
const outBinding = outBindPrep.outBinding;
const returning = outBindPrep.returning;
const updates = this._prepUpdate(this.single.update);
const where = this.where();
let returningClause = '';
let intoClause = '';
if (_.isEmpty(updates) && typeof this.single.update !== 'function') {
return '';
}
// Build returning and into clauses
_.each(outBinding, function(out) {
_.each(out, function(ret) {
const columnName = ret.columnName || ret;
returningClause += self.formatter.wrap(columnName) + ',';
intoClause += ' ?,';
// Add Helpers to bindings
if (ret instanceof BlobHelper) {
return self.formatter.bindings.push(ret);
}
self.formatter.bindings.push(new ReturningHelper(columnName));
});
});
// Strip last comma
returningClause = returningClause.slice(0, -1);
intoClause = intoClause.slice(0, -1);
sql.outBinding = outBinding;
sql.returning = returning;
sql.sql =
'update ' +
this.tableName +
' set ' +
updates.join(', ') +
(where ? ' ' + where : '');
if (outBinding.length && !_.isEmpty(outBinding[0])) {
sql.sql += ' returning ' + returningClause + ' into' + intoClause;
}
if (returning[0] === '*') {
sql.returningSql = function() {
let sql = 'select * from ' + self.tableName;
const modifiedRowsCount = this.rowsAffected.length || this.rowsAffected;
let returningSqlIn = ' where ROWID in (';
let returningSqlOrderBy = ') order by case ROWID ';
// Needs special order by because in(...) change result order
for (let i = 0; i < modifiedRowsCount; i++) {
if (this.returning[0] === '*') {
returningSqlIn += ':' + (i + 1) + ', ';
returningSqlOrderBy +=
'when CHARTOROWID(:' + (i + 1) + ') then ' + i + ' ';
}
}
if (this.returning[0] === '*') {
this.returning = this.returning.slice(0, -1);
returningSqlIn = returningSqlIn.slice(0, -2);
returningSqlOrderBy = returningSqlOrderBy.slice(0, -1);
}
return (sql += returningSqlIn + returningSqlOrderBy + ' end');
};
}
return sql;
},
});
module.exports = Oracledb_Compiler;

View File

@@ -0,0 +1,36 @@
const inherits = require('inherits');
const ColumnCompiler_Oracle = require('../../oracle/schema/columncompiler');
const { isObject } = require('lodash');
function ColumnCompiler_Oracledb() {
ColumnCompiler_Oracle.apply(this, arguments);
}
inherits(ColumnCompiler_Oracledb, ColumnCompiler_Oracle);
Object.assign(ColumnCompiler_Oracledb.prototype, {
time: 'timestamp with local time zone',
datetime: function(withoutTz) {
let useTz;
if (isObject(withoutTz)) {
({ useTz } = withoutTz);
} else {
useTz = !withoutTz;
}
return useTz ? 'timestamp with local time zone' : 'timestamp';
},
timestamp: function(withoutTz) {
let useTz;
if (isObject(withoutTz)) {
({ useTz } = withoutTz);
} else {
useTz = !withoutTz;
}
return useTz ? 'timestamp with local time zone' : 'timestamp';
},
});
module.exports = ColumnCompiler_Oracledb;

87
node_modules/knex/lib/dialects/oracledb/transaction.js generated vendored Normal file
View File

@@ -0,0 +1,87 @@
const { isUndefined } = require('lodash');
const Bluebird = require('bluebird');
const Transaction = require('../../transaction');
const debugTx = require('debug')('knex:tx');
module.exports = class Oracle_Transaction extends Transaction {
// disable autocommit to allow correct behavior (default is true)
begin() {
return Bluebird.resolve();
}
commit(conn, value) {
this._completed = true;
return conn
.commitAsync()
.then(() => value)
.then(this._resolver, this._rejecter);
}
release(conn, value) {
return this._resolver(value);
}
rollback(conn, err) {
const self = this;
this._completed = true;
debugTx('%s: rolling back', this.txid);
return conn
.rollbackAsync()
.timeout(5000)
.catch(Bluebird.TimeoutError, function(e) {
self._rejecter(e);
})
.then(function() {
if (isUndefined(err)) {
if (self.doNotRejectOnRollback) {
self._resolver();
return;
}
err = new Error(`Transaction rejected with non-error: ${err}`);
}
self._rejecter(err);
});
}
savepoint(conn) {
return this.query(conn, `SAVEPOINT ${this.txid}`);
}
acquireConnection(config, cb) {
const configConnection = config && config.connection;
const t = this;
return new Bluebird((resolve, reject) => {
try {
this.client
.acquireConnection()
.then((cnx) => {
cnx.__knexTxId = this.txid;
cnx.isTransaction = true;
resolve(cnx);
})
.catch(reject);
} catch (e) {
reject(e);
}
}).then(async (connection) => {
try {
return await cb(connection);
} finally {
debugTx('%s: releasing connection', this.txid);
connection.isTransaction = false;
try {
await connection.commitAsync();
} catch (err) {
t._rejecter(err);
} finally {
if (!configConnection) {
await t.client.releaseConnection(connection);
} else {
debugTx('%s: not releasing external connection', t.txid);
}
}
}
});
}
};

14
node_modules/knex/lib/dialects/oracledb/utils.js generated vendored Normal file
View File

@@ -0,0 +1,14 @@
const Utils = require('../oracle/utils');
function BlobHelper(columnName, value) {
this.columnName = columnName;
this.value = value;
this.returning = false;
}
BlobHelper.prototype.toString = function() {
return '[object BlobHelper:' + this.columnName + ']';
};
Utils.BlobHelper = BlobHelper;
module.exports = Utils;

321
node_modules/knex/lib/dialects/postgres/index.js generated vendored Normal file
View File

@@ -0,0 +1,321 @@
// PostgreSQL
// -------
const { map, extend, isString } = require('lodash');
const { promisify } = require('util');
const inherits = require('inherits');
const Client = require('../../client');
const Bluebird = require('bluebird');
const QueryCompiler = require('./query/compiler');
const ColumnCompiler = require('./schema/columncompiler');
const TableCompiler = require('./schema/tablecompiler');
const SchemaCompiler = require('./schema/compiler');
const { makeEscape } = require('../../query/string');
function Client_PG(config) {
Client.apply(this, arguments);
if (config.returning) {
this.defaultReturning = config.returning;
}
if (config.searchPath) {
this.searchPath = config.searchPath;
}
}
inherits(Client_PG, Client);
Object.assign(Client_PG.prototype, {
queryCompiler() {
return new QueryCompiler(this, ...arguments);
},
columnCompiler() {
return new ColumnCompiler(this, ...arguments);
},
schemaCompiler() {
return new SchemaCompiler(this, ...arguments);
},
tableCompiler() {
return new TableCompiler(this, ...arguments);
},
dialect: 'postgresql',
driverName: 'pg',
_driver() {
return require('pg');
},
_escapeBinding: makeEscape({
escapeArray(val, esc) {
return esc(arrayString(val, esc));
},
escapeString(str) {
let hasBackslash = false;
let escaped = "'";
for (let i = 0; i < str.length; i++) {
const c = str[i];
if (c === "'") {
escaped += c + c;
} else if (c === '\\') {
escaped += c + c;
hasBackslash = true;
} else {
escaped += c;
}
}
escaped += "'";
if (hasBackslash === true) {
escaped = 'E' + escaped;
}
return escaped;
},
escapeObject(val, prepareValue, timezone, seen = []) {
if (val && typeof val.toPostgres === 'function') {
seen = seen || [];
if (seen.indexOf(val) !== -1) {
throw new Error(
`circular reference detected while preparing "${val}" for query`
);
}
seen.push(val);
return prepareValue(val.toPostgres(prepareValue), seen);
}
return JSON.stringify(val);
},
}),
wrapIdentifierImpl(value) {
if (value === '*') return value;
let arrayAccessor = '';
const arrayAccessorMatch = value.match(/(.*?)(\[[0-9]+\])/);
if (arrayAccessorMatch) {
value = arrayAccessorMatch[1];
arrayAccessor = arrayAccessorMatch[2];
}
return `"${value.replace(/"/g, '""')}"${arrayAccessor}`;
},
// Get a raw connection, called by the `pool` whenever a new
// connection needs to be added to the pool.
acquireRawConnection() {
const client = this;
return new Bluebird(function(resolver, rejecter) {
const connection = new client.driver.Client(client.connectionSettings);
connection.connect(function(err, connection) {
if (err) {
return rejecter(err);
}
connection.on('error', (err) => {
connection.__knex__disposed = err;
});
connection.on('end', (err) => {
connection.__knex__disposed = err || 'Connection ended unexpectedly';
});
if (!client.version) {
return client.checkVersion(connection).then(function(version) {
client.version = version;
resolver(connection);
});
}
resolver(connection);
});
}).then(function setSearchPath(connection) {
client.setSchemaSearchPath(connection);
return connection;
});
},
// Used to explicitly close a connection, called internally by the pool
// when a connection times out or the pool is shutdown.
async destroyRawConnection(connection) {
const end = promisify((cb) => connection.end(cb));
return end();
},
// In PostgreSQL, we need to do a version check to do some feature
// checking on the database.
checkVersion(connection) {
return new Bluebird(function(resolver, rejecter) {
connection.query('select version();', function(err, resp) {
if (err) return rejecter(err);
resolver(/^PostgreSQL (.*?)( |$)/.exec(resp.rows[0].version)[1]);
});
});
},
// Position the bindings for the query. The escape sequence for question mark
// is \? (e.g. knex.raw("\\?") since javascript requires '\' to be escaped too...)
positionBindings(sql) {
let questionCount = 0;
return sql.replace(/(\\*)(\?)/g, function(match, escapes) {
if (escapes.length % 2) {
return '?';
} else {
questionCount++;
return `$${questionCount}`;
}
});
},
setSchemaSearchPath(connection, searchPath) {
let path = searchPath || this.searchPath;
if (!path) return Bluebird.resolve(true);
if (!Array.isArray(path) && !isString(path)) {
throw new TypeError(
`knex: Expected searchPath to be Array/String, got: ${typeof path}`
);
}
if (isString(path)) {
if (path.includes(',')) {
const parts = path.split(',');
const arraySyntax = `[${parts
.map((searchPath) => `'${searchPath}'`)
.join(', ')}]`;
this.logger.warn(
`Detected comma in searchPath "${path}".` +
`If you are trying to specify multiple schemas, use Array syntax: ${arraySyntax}`
);
}
path = [path];
}
path = path.map((schemaName) => `"${schemaName}"`).join(',');
return new Bluebird(function(resolver, rejecter) {
connection.query(`set search_path to ${path}`, function(err) {
if (err) return rejecter(err);
resolver(true);
});
});
},
_stream(connection, obj, stream, options) {
const PGQueryStream = process.browser
? undefined
: require('pg-query-stream');
const sql = obj.sql;
return new Bluebird(function(resolver, rejecter) {
const queryStream = connection.query(
new PGQueryStream(sql, obj.bindings, options)
);
queryStream.on('error', function(error) {
rejecter(error);
stream.emit('error', error);
});
// 'end' IS propagated by .pipe, by default
stream.on('end', resolver);
queryStream.pipe(stream);
});
},
// Runs the query on the specified connection, providing the bindings
// and any other necessary prep work.
_query(connection, obj) {
let queryConfig = {
text: obj.sql,
values: obj.bindings || [],
};
if (obj.options) {
queryConfig = extend(queryConfig, obj.options);
}
return new Bluebird(function(resolver, rejecter) {
connection.query(queryConfig, function(err, response) {
if (err) return rejecter(err);
obj.response = response;
resolver(obj);
});
});
},
// Ensures the response is returned in the same format as other clients.
processResponse(obj, runner) {
const resp = obj.response;
if (obj.output) return obj.output.call(runner, resp);
if (obj.method === 'raw') return resp;
const { returning } = obj;
if (resp.command === 'SELECT') {
if (obj.method === 'first') return resp.rows[0];
if (obj.method === 'pluck') return map(resp.rows, obj.pluck);
return resp.rows;
}
if (returning) {
const returns = [];
for (let i = 0, l = resp.rows.length; i < l; i++) {
const row = resp.rows[i];
if (returning === '*' || Array.isArray(returning)) {
returns[i] = row;
} else {
// Pluck the only column in the row.
returns[i] = row[Object.keys(row)[0]];
}
}
return returns;
}
if (resp.command === 'UPDATE' || resp.command === 'DELETE') {
return resp.rowCount;
}
return resp;
},
canCancelQuery: true,
cancelQuery(connectionToKill) {
const acquiringConn = this.acquireConnection();
// Error out if we can't acquire connection in time.
// Purposely not putting timeout on `pg_cancel_backend` execution because erroring
// early there would release the `connectionToKill` back to the pool with
// a `KILL QUERY` command yet to finish.
return acquiringConn.then((conn) => {
return this._wrappedCancelQueryCall(conn, connectionToKill).finally(
() => {
// NOT returning this promise because we want to release the connection
// in a non-blocking fashion
this.releaseConnection(conn);
}
);
});
},
_wrappedCancelQueryCall(conn, connectionToKill) {
return this.query(conn, {
method: 'raw',
sql: 'SELECT pg_cancel_backend(?);',
bindings: [connectionToKill.processID],
options: {},
});
},
});
function arrayString(arr, esc) {
let result = '{';
for (let i = 0; i < arr.length; i++) {
if (i > 0) result += ',';
const val = arr[i];
if (val === null || typeof val === 'undefined') {
result += 'NULL';
} else if (Array.isArray(val)) {
result += arrayString(val, esc);
} else if (typeof val === 'number') {
result += val;
} else {
result += JSON.stringify(typeof val === 'string' ? val : esc(val));
}
}
return result + '}';
}
module.exports = Client_PG;

View File

@@ -0,0 +1,166 @@
// PostgreSQL Query Builder & Compiler
// ------
const inherits = require('inherits');
const QueryCompiler = require('../../../query/compiler');
const { reduce, identity } = require('lodash');
function QueryCompiler_PG(client, builder) {
QueryCompiler.call(this, client, builder);
}
inherits(QueryCompiler_PG, QueryCompiler);
Object.assign(QueryCompiler_PG.prototype, {
// Compiles a truncate query.
truncate() {
return `truncate ${this.tableName} restart identity`;
},
// is used if the an array with multiple empty values supplied
_defaultInsertValue: 'default',
// Compiles an `insert` query, allowing for multiple
// inserts using a single query statement.
insert() {
const sql = QueryCompiler.prototype.insert.call(this);
if (sql === '') return sql;
const { returning } = this.single;
return {
sql: sql + this._returning(returning),
returning,
};
},
// Compiles an `update` query, allowing for a return value.
update() {
const withSQL = this.with();
const updateData = this._prepUpdate(this.single.update);
const wheres = this.where();
const { returning } = this.single;
return {
sql:
withSQL +
`update ${this.single.only ? 'only ' : ''}${this.tableName} ` +
`set ${updateData.join(', ')}` +
(wheres ? ` ${wheres}` : '') +
this._returning(returning),
returning,
};
},
// Compiles an `update` query, allowing for a return value.
del() {
const sql = QueryCompiler.prototype.del.apply(this, arguments);
const { returning } = this.single;
return {
sql: sql + this._returning(returning),
returning,
};
},
aggregate(stmt) {
return this._aggregate(stmt, { distinctParentheses: true });
},
_returning(value) {
return value ? ` returning ${this.formatter.columnize(value)}` : '';
},
// Join array of table names and apply default schema.
_tableNames(tables) {
const schemaName = this.single.schema;
const sql = [];
for (let i = 0; i < tables.length; i++) {
let tableName = tables[i];
if (tableName) {
if (schemaName) {
tableName = `${schemaName}.${tableName}`;
}
sql.push(this.formatter.wrap(tableName));
}
}
return sql.join(', ');
},
forUpdate() {
const tables = this.single.lockTables || [];
return (
'for update' + (tables.length ? ' of ' + this._tableNames(tables) : '')
);
},
forShare() {
const tables = this.single.lockTables || [];
return (
'for share' + (tables.length ? ' of ' + this._tableNames(tables) : '')
);
},
skipLocked() {
return 'skip locked';
},
noWait() {
return 'nowait';
},
// Compiles a columnInfo query
columnInfo() {
const column = this.single.columnInfo;
let schema = this.single.schema;
// The user may have specified a custom wrapIdentifier function in the config. We
// need to run the identifiers through that function, but not format them as
// identifiers otherwise.
const table = this.client.customWrapIdentifier(this.single.table, identity);
if (schema) {
schema = this.client.customWrapIdentifier(schema, identity);
}
let sql =
'select * from information_schema.columns where table_name = ? and table_catalog = ?';
const bindings = [table, this.client.database()];
if (schema) {
sql += ' and table_schema = ?';
bindings.push(schema);
} else {
sql += ' and table_schema = current_schema()';
}
return {
sql,
bindings,
output(resp) {
const out = reduce(
resp.rows,
function(columns, val) {
columns[val.column_name] = {
type: val.data_type,
maxLength: val.character_maximum_length,
nullable: val.is_nullable === 'YES',
defaultValue: val.column_default,
};
return columns;
},
{}
);
return (column && out[column]) || out;
},
};
},
distinctOn(value) {
return 'distinct on (' + this.formatter.columnize(value) + ') ';
},
});
module.exports = QueryCompiler_PG;

View File

@@ -0,0 +1,122 @@
// PostgreSQL Column Compiler
// -------
const inherits = require('inherits');
const ColumnCompiler = require('../../../schema/columncompiler');
const { isObject } = require('lodash');
function ColumnCompiler_PG() {
ColumnCompiler.apply(this, arguments);
this.modifiers = ['nullable', 'defaultTo', 'comment'];
}
inherits(ColumnCompiler_PG, ColumnCompiler);
Object.assign(ColumnCompiler_PG.prototype, {
// Types
// ------
bigincrements: 'bigserial primary key',
bigint: 'bigint',
binary: 'bytea',
bit(column) {
return column.length !== false ? `bit(${column.length})` : 'bit';
},
bool: 'boolean',
// Create the column definition for an enum type.
// Using method "2" here: http://stackoverflow.com/a/10984951/525714
enu(allowed, options) {
options = options || {};
const values =
options.useNative && options.existingType
? undefined
: allowed.join("', '");
if (options.useNative) {
let enumName = '';
const schemaName = options.schemaName || this.tableCompiler.schemaNameRaw;
if (schemaName) {
enumName += `"${schemaName}".`;
}
enumName += `"${options.enumName}"`;
if (!options.existingType) {
this.tableCompiler.unshiftQuery(
`create type ${enumName} as enum ('${values}')`
);
}
return enumName;
}
return `text check (${this.formatter.wrap(this.args[0])} in ('${values}'))`;
},
double: 'double precision',
decimal(precision, scale) {
if (precision === null) return 'decimal';
return `decimal(${this._num(precision, 8)}, ${this._num(scale, 2)})`;
},
floating: 'real',
increments: 'serial primary key',
json(jsonb) {
if (jsonb) this.client.logger.deprecate('json(true)', 'jsonb()');
return jsonColumn(this.client, jsonb);
},
jsonb() {
return jsonColumn(this.client, true);
},
smallint: 'smallint',
tinyint: 'smallint',
datetime(withoutTz = false, precision) {
let useTz;
if (isObject(withoutTz)) {
({ useTz, precision } = withoutTz);
} else {
useTz = !withoutTz;
}
return `${useTz ? 'timestamptz' : 'timestamp'}${
precision ? '(' + precision + ')' : ''
}`;
},
timestamp(withoutTz = false, precision) {
let useTz;
if (isObject(withoutTz)) {
({ useTz, precision } = withoutTz);
} else {
useTz = !withoutTz;
}
return `${useTz ? 'timestamptz' : 'timestamp'}${
precision ? '(' + precision + ')' : ''
}`;
},
uuid: 'uuid',
// Modifiers:
// ------
comment(comment) {
const columnName = this.args[0] || this.defaults('columnName');
this.pushAdditional(function() {
this.pushQuery(
`comment on column ${this.tableCompiler.tableName()}.` +
this.formatter.wrap(columnName) +
' is ' +
(comment ? `'${comment}'` : 'NULL')
);
}, comment);
},
});
function jsonColumn(client, jsonb) {
if (!client.version || parseFloat(client.version) >= 9.2)
return jsonb ? 'jsonb' : 'json';
return 'text';
}
module.exports = ColumnCompiler_PG;

View File

@@ -0,0 +1,109 @@
// PostgreSQL Schema Compiler
// -------
const inherits = require('inherits');
const SchemaCompiler = require('../../../schema/compiler');
function SchemaCompiler_PG() {
SchemaCompiler.apply(this, arguments);
}
inherits(SchemaCompiler_PG, SchemaCompiler);
// Check whether the current table
SchemaCompiler_PG.prototype.hasTable = function(tableName) {
let sql = 'select * from information_schema.tables where table_name = ?';
const bindings = [tableName];
if (this.schema) {
sql += ' and table_schema = ?';
bindings.push(this.schema);
} else {
sql += ' and table_schema = current_schema()';
}
this.pushQuery({
sql,
bindings,
output(resp) {
return resp.rows.length > 0;
},
});
};
// Compile the query to determine if a column exists in a table.
SchemaCompiler_PG.prototype.hasColumn = function(tableName, columnName) {
let sql =
'select * from information_schema.columns where table_name = ? and column_name = ?';
const bindings = [tableName, columnName];
if (this.schema) {
sql += ' and table_schema = ?';
bindings.push(this.schema);
} else {
sql += ' and table_schema = current_schema()';
}
this.pushQuery({
sql,
bindings,
output(resp) {
return resp.rows.length > 0;
},
});
};
SchemaCompiler_PG.prototype.qualifiedTableName = function(tableName) {
const name = this.schema ? `${this.schema}.${tableName}` : tableName;
return this.formatter.wrap(name);
};
// Compile a rename table command.
SchemaCompiler_PG.prototype.renameTable = function(from, to) {
this.pushQuery(
`alter table ${this.qualifiedTableName(
from
)} rename to ${this.formatter.wrap(to)}`
);
};
SchemaCompiler_PG.prototype.createSchema = function(schemaName) {
this.pushQuery(`create schema ${this.formatter.wrap(schemaName)}`);
};
SchemaCompiler_PG.prototype.createSchemaIfNotExists = function(schemaName) {
this.pushQuery(
`create schema if not exists ${this.formatter.wrap(schemaName)}`
);
};
SchemaCompiler_PG.prototype.dropSchema = function(schemaName) {
this.pushQuery(`drop schema ${this.formatter.wrap(schemaName)}`);
};
SchemaCompiler_PG.prototype.dropSchemaIfExists = function(schemaName) {
this.pushQuery(`drop schema if exists ${this.formatter.wrap(schemaName)}`);
};
SchemaCompiler_PG.prototype.dropExtension = function(extensionName) {
this.pushQuery(`drop extension ${this.formatter.wrap(extensionName)}`);
};
SchemaCompiler_PG.prototype.dropExtensionIfExists = function(extensionName) {
this.pushQuery(
`drop extension if exists ${this.formatter.wrap(extensionName)}`
);
};
SchemaCompiler_PG.prototype.createExtension = function(extensionName) {
this.pushQuery(`create extension ${this.formatter.wrap(extensionName)}`);
};
SchemaCompiler_PG.prototype.createExtensionIfNotExists = function(
extensionName
) {
this.pushQuery(
`create extension if not exists ${this.formatter.wrap(extensionName)}`
);
};
module.exports = SchemaCompiler_PG;

View File

@@ -0,0 +1,183 @@
/* eslint max-len: 0 */
// PostgreSQL Table Builder & Compiler
// -------
const inherits = require('inherits');
const TableCompiler = require('../../../schema/tablecompiler');
const { has } = require('lodash');
function TableCompiler_PG() {
TableCompiler.apply(this, arguments);
}
inherits(TableCompiler_PG, TableCompiler);
// Compile a rename column command.
TableCompiler_PG.prototype.renameColumn = function(from, to) {
return this.pushQuery({
sql: `alter table ${this.tableName()} rename ${this.formatter.wrap(
from
)} to ${this.formatter.wrap(to)}`,
});
};
TableCompiler_PG.prototype.compileAdd = function(builder) {
const table = this.formatter.wrap(builder);
const columns = this.prefixArray('add column', this.getColumns(builder));
return this.pushQuery({
sql: `alter table ${table} ${columns.join(', ')}`,
});
};
// Adds the "create" query to the query sequence.
TableCompiler_PG.prototype.createQuery = function(columns, ifNot) {
const createStatement = ifNot
? 'create table if not exists '
: 'create table ';
let sql =
createStatement + this.tableName() + ' (' + columns.sql.join(', ') + ')';
if (this.single.inherits)
sql += ` inherits (${this.formatter.wrap(this.single.inherits)})`;
this.pushQuery({
sql,
bindings: columns.bindings,
});
const hasComment = has(this.single, 'comment');
if (hasComment) this.comment(this.single.comment);
};
TableCompiler_PG.prototype.addColumns = function(
columns,
prefix,
colCompilers
) {
if (prefix === this.alterColumnsPrefix) {
// alter columns
for (const col of colCompilers) {
const quotedTableName = this.tableName();
const type = col.getColumnType();
// We'd prefer to call this.formatter.wrapAsIdentifier here instead, however the context passed to
// `this` instance is not that of the column, but of the table. Thus, we unfortunately have to call
// `wrapIdentifier` here as well (it is already called once on the initial column operation) to give
// our `alter` operation the correct `queryContext`. Refer to issue #2606 and PR #2612.
const colName = this.client.wrapIdentifier(
col.getColumnName(),
col.columnBuilder.queryContext()
);
this.pushQuery({
sql: `alter table ${quotedTableName} alter column ${colName} drop default`,
bindings: [],
});
this.pushQuery({
sql: `alter table ${quotedTableName} alter column ${colName} drop not null`,
bindings: [],
});
this.pushQuery({
sql: `alter table ${quotedTableName} alter column ${colName} type ${type} using (${colName}::${type})`,
bindings: [],
});
const defaultTo = col.modified['defaultTo'];
if (defaultTo) {
const modifier = col.defaultTo.apply(col, defaultTo);
this.pushQuery({
sql: `alter table ${quotedTableName} alter column ${colName} set ${modifier}`,
bindings: [],
});
}
const nullable = col.modified['nullable'];
if (nullable && nullable[0] === false) {
this.pushQuery({
sql: `alter table ${quotedTableName} alter column ${colName} set not null`,
bindings: [],
});
}
}
} else {
// base class implementation for normal add
TableCompiler.prototype.addColumns.call(this, columns, prefix);
}
};
// Compiles the comment on the table.
TableCompiler_PG.prototype.comment = function(comment) {
this.pushQuery(
`comment on table ${this.tableName()} is '${this.single.comment}'`
);
};
// Indexes:
// -------
TableCompiler_PG.prototype.primary = function(columns, constraintName) {
constraintName = constraintName
? this.formatter.wrap(constraintName)
: this.formatter.wrap(`${this.tableNameRaw}_pkey`);
this.pushQuery(
`alter table ${this.tableName()} add constraint ${constraintName} primary key (${this.formatter.columnize(
columns
)})`
);
};
TableCompiler_PG.prototype.unique = function(columns, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('unique', this.tableNameRaw, columns);
this.pushQuery(
`alter table ${this.tableName()} add constraint ${indexName}` +
' unique (' +
this.formatter.columnize(columns) +
')'
);
};
TableCompiler_PG.prototype.index = function(columns, indexName, indexType) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('index', this.tableNameRaw, columns);
this.pushQuery(
`create index ${indexName} on ${this.tableName()}${(indexType &&
` using ${indexType}`) ||
''}` +
' (' +
this.formatter.columnize(columns) +
')'
);
};
TableCompiler_PG.prototype.dropPrimary = function(constraintName) {
constraintName = constraintName
? this.formatter.wrap(constraintName)
: this.formatter.wrap(this.tableNameRaw + '_pkey');
this.pushQuery(
`alter table ${this.tableName()} drop constraint ${constraintName}`
);
};
TableCompiler_PG.prototype.dropIndex = function(columns, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('index', this.tableNameRaw, columns);
indexName = this.schemaNameRaw
? `${this.formatter.wrap(this.schemaNameRaw)}.${indexName}`
: indexName;
this.pushQuery(`drop index ${indexName}`);
};
TableCompiler_PG.prototype.dropUnique = function(columns, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('unique', this.tableNameRaw, columns);
this.pushQuery(
`alter table ${this.tableName()} drop constraint ${indexName}`
);
};
TableCompiler_PG.prototype.dropForeign = function(columns, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('foreign', this.tableNameRaw, columns);
this.pushQuery(
`alter table ${this.tableName()} drop constraint ${indexName}`
);
};
module.exports = TableCompiler_PG;

73
node_modules/knex/lib/dialects/redshift/index.js generated vendored Normal file
View File

@@ -0,0 +1,73 @@
// Redshift
// -------
const inherits = require('inherits');
const Client_PG = require('../postgres');
const { map } = require('lodash');
const Transaction = require('./transaction');
const QueryCompiler = require('./query/compiler');
const ColumnBuilder = require('./schema/columnbuilder');
const ColumnCompiler = require('./schema/columncompiler');
const TableCompiler = require('./schema/tablecompiler');
const SchemaCompiler = require('./schema/compiler');
function Client_Redshift(config) {
Client_PG.apply(this, arguments);
}
inherits(Client_Redshift, Client_PG);
Object.assign(Client_Redshift.prototype, {
transaction() {
return new Transaction(this, ...arguments);
},
queryCompiler() {
return new QueryCompiler(this, ...arguments);
},
columnBuilder() {
return new ColumnBuilder(this, ...arguments);
},
columnCompiler() {
return new ColumnCompiler(this, ...arguments);
},
tableCompiler() {
return new TableCompiler(this, ...arguments);
},
schemaCompiler() {
return new SchemaCompiler(this, ...arguments);
},
dialect: 'redshift',
driverName: 'pg-redshift',
_driver() {
return require('pg');
},
// Ensures the response is returned in the same format as other clients.
processResponse(obj, runner) {
const resp = obj.response;
if (obj.output) return obj.output.call(runner, resp);
if (obj.method === 'raw') return resp;
if (resp.command === 'SELECT') {
if (obj.method === 'first') return resp.rows[0];
if (obj.method === 'pluck') return map(resp.rows, obj.pluck);
return resp.rows;
}
if (
resp.command === 'INSERT' ||
resp.command === 'UPDATE' ||
resp.command === 'DELETE'
) {
return resp.rowCount;
}
return resp;
},
});
module.exports = Client_Redshift;

View File

@@ -0,0 +1,122 @@
// Redshift Query Builder & Compiler
// ------
const inherits = require('inherits');
const QueryCompiler = require('../../../query/compiler');
const QueryCompiler_PG = require('../../postgres/query/compiler');
const { reduce, identity } = require('lodash');
function QueryCompiler_Redshift(client, builder) {
QueryCompiler_PG.call(this, client, builder);
}
inherits(QueryCompiler_Redshift, QueryCompiler_PG);
Object.assign(QueryCompiler_Redshift.prototype, {
truncate() {
return `truncate ${this.tableName.toLowerCase()}`;
},
// Compiles an `insert` query, allowing for multiple
// inserts using a single query statement.
insert() {
const sql = QueryCompiler.prototype.insert.apply(this, arguments);
if (sql === '') return sql;
this._slightReturn();
return {
sql,
};
},
// Compiles an `update` query, warning on unsupported returning
update() {
const sql = QueryCompiler.prototype.update.apply(this, arguments);
this._slightReturn();
return {
sql,
};
},
// Compiles an `delete` query, warning on unsupported returning
del() {
const sql = QueryCompiler.prototype.del.apply(this, arguments);
this._slightReturn();
return {
sql,
};
},
// simple: if trying to return, warn
_slightReturn() {
if (this.single.isReturning) {
this.client.logger.warn(
'insert/update/delete returning is not supported by redshift dialect'
);
}
},
forUpdate() {
this.client.logger.warn('table lock is not supported by redshift dialect');
return '';
},
forShare() {
this.client.logger.warn(
'lock for share is not supported by redshift dialect'
);
return '';
},
// Compiles a columnInfo query
columnInfo() {
const column = this.single.columnInfo;
let schema = this.single.schema;
// The user may have specified a custom wrapIdentifier function in the config. We
// need to run the identifiers through that function, but not format them as
// identifiers otherwise.
const table = this.client.customWrapIdentifier(this.single.table, identity);
if (schema) {
schema = this.client.customWrapIdentifier(schema, identity);
}
let sql =
'select * from information_schema.columns where table_name = ? and table_catalog = ?';
const bindings = [
table.toLowerCase(),
this.client.database().toLowerCase(),
];
if (schema) {
sql += ' and table_schema = ?';
bindings.push(schema);
} else {
sql += ' and table_schema = current_schema()';
}
return {
sql,
bindings,
output(resp) {
const out = reduce(
resp.rows,
function(columns, val) {
columns[val.column_name] = {
type: val.data_type,
maxLength: val.character_maximum_length,
nullable: val.is_nullable === 'YES',
defaultValue: val.column_default,
};
return columns;
},
{}
);
return (column && out[column]) || out;
},
};
},
});
module.exports = QueryCompiler_Redshift;

View File

@@ -0,0 +1,20 @@
const inherits = require('inherits');
const ColumnBuilder = require('../../../schema/columnbuilder');
function ColumnBuilder_Redshift() {
ColumnBuilder.apply(this, arguments);
}
inherits(ColumnBuilder_Redshift, ColumnBuilder);
// primary needs to set not null on non-preexisting columns, or fail
ColumnBuilder_Redshift.prototype.primary = function() {
this.notNullable();
return ColumnBuilder.prototype.primary.apply(this, arguments);
};
ColumnBuilder_Redshift.prototype.index = function() {
this.client.logger.warn('Redshift does not support the creation of indexes.');
return this;
};
module.exports = ColumnBuilder_Redshift;

View File

@@ -0,0 +1,60 @@
// Redshift Column Compiler
// -------
const inherits = require('inherits');
const ColumnCompiler_PG = require('../../postgres/schema/columncompiler');
function ColumnCompiler_Redshift() {
ColumnCompiler_PG.apply(this, arguments);
}
inherits(ColumnCompiler_Redshift, ColumnCompiler_PG);
Object.assign(ColumnCompiler_Redshift.prototype, {
// Types:
// ------
bigincrements: 'bigint identity(1,1) primary key not null',
binary: 'varchar(max)',
bit(column) {
return column.length !== false ? `char(${column.length})` : 'char(1)';
},
blob: 'varchar(max)',
enu: 'varchar(255)',
enum: 'varchar(255)',
increments: 'integer identity(1,1) primary key not null',
json: 'varchar(max)',
jsonb: 'varchar(max)',
longblob: 'varchar(max)',
mediumblob: 'varchar(16777218)',
set: 'text',
text: 'varchar(max)',
datetime(without) {
return without ? 'timestamp' : 'timestamptz';
},
timestamp(without) {
return without ? 'timestamp' : 'timestamptz';
},
tinyblob: 'varchar(256)',
uuid: 'char(36)',
varbinary: 'varchar(max)',
bigint: 'bigint',
bool: 'boolean',
double: 'double precision',
floating: 'real',
smallint: 'smallint',
tinyint: 'smallint',
// Modifiers:
// ------
comment(comment) {
this.pushAdditional(function() {
this.pushQuery(
`comment on column ${this.tableCompiler.tableName()}.` +
this.formatter.wrap(this.args[0]) +
' is ' +
(comment ? `'${comment}'` : 'NULL')
);
}, comment);
},
});
module.exports = ColumnCompiler_Redshift;

View File

@@ -0,0 +1,14 @@
/* eslint max-len: 0 */
// Redshift Table Builder & Compiler
// -------
const inherits = require('inherits');
const SchemaCompiler_PG = require('../../postgres/schema/compiler');
function SchemaCompiler_Redshift() {
SchemaCompiler_PG.apply(this, arguments);
}
inherits(SchemaCompiler_Redshift, SchemaCompiler_PG);
module.exports = SchemaCompiler_Redshift;

View File

@@ -0,0 +1,123 @@
/* eslint max-len: 0 */
// Redshift Table Builder & Compiler
// -------
const inherits = require('inherits');
const { has } = require('lodash');
const TableCompiler_PG = require('../../postgres/schema/tablecompiler');
function TableCompiler_Redshift() {
TableCompiler_PG.apply(this, arguments);
}
inherits(TableCompiler_Redshift, TableCompiler_PG);
TableCompiler_Redshift.prototype.index = function(
columns,
indexName,
indexType
) {
this.client.logger.warn('Redshift does not support the creation of indexes.');
};
TableCompiler_Redshift.prototype.dropIndex = function(columns, indexName) {
this.client.logger.warn('Redshift does not support the deletion of indexes.');
};
// TODO: have to disable setting not null on columns that already exist...
// Adds the "create" query to the query sequence.
TableCompiler_Redshift.prototype.createQuery = function(columns, ifNot) {
const createStatement = ifNot
? 'create table if not exists '
: 'create table ';
let sql =
createStatement + this.tableName() + ' (' + columns.sql.join(', ') + ')';
if (this.single.inherits)
sql += ` like (${this.formatter.wrap(this.single.inherits)})`;
this.pushQuery({
sql,
bindings: columns.bindings,
});
const hasComment = has(this.single, 'comment');
if (hasComment) this.comment(this.single.comment);
};
TableCompiler_Redshift.prototype.primary = function(columns, constraintName) {
const self = this;
constraintName = constraintName
? self.formatter.wrap(constraintName)
: self.formatter.wrap(`${this.tableNameRaw}_pkey`);
if (columns.constructor !== Array) {
columns = [columns];
}
const thiscolumns = self.grouped.columns;
if (thiscolumns) {
for (let i = 0; i < columns.length; i++) {
let exists = thiscolumns.find(
(tcb) =>
tcb.grouping === 'columns' &&
tcb.builder &&
tcb.builder._method === 'add' &&
tcb.builder._args &&
tcb.builder._args.indexOf(columns[i]) > -1
);
if (exists) {
exists = exists.builder;
}
const nullable = !(
exists &&
exists._modifiers &&
exists._modifiers['nullable'] &&
exists._modifiers['nullable'][0] === false
);
if (nullable) {
if (exists) {
return this.client.logger.warn(
'Redshift does not allow primary keys to contain nullable columns.'
);
} else {
return this.client.logger.warn(
'Redshift does not allow primary keys to contain nonexistent columns.'
);
}
}
}
}
return self.pushQuery(
`alter table ${self.tableName()} add constraint ${constraintName} primary key (${self.formatter.columnize(
columns
)})`
);
};
// Compiles column add. Redshift can only add one column per ALTER TABLE, so core addColumns doesn't work. #2545
TableCompiler_Redshift.prototype.addColumns = function(
columns,
prefix,
colCompilers
) {
if (prefix === this.alterColumnsPrefix) {
TableCompiler_PG.prototype.addColumns.call(
this,
columns,
prefix,
colCompilers
);
} else {
prefix = prefix || this.addColumnsPrefix;
colCompilers = colCompilers || this.getColumns();
for (const col of colCompilers) {
const quotedTableName = this.tableName();
const colCompiled = col.compileColumn();
this.pushQuery({
sql: `alter table ${quotedTableName} ${prefix}${colCompiled}`,
bindings: [],
});
}
}
};
module.exports = TableCompiler_Redshift;

18
node_modules/knex/lib/dialects/redshift/transaction.js generated vendored Normal file
View File

@@ -0,0 +1,18 @@
const Transaction = require('../../transaction');
module.exports = class Redshift_Transaction extends Transaction {
savepoint(conn) {
this.trxClient.logger('Redshift does not support savepoints.');
return Promise.resolve();
}
release(conn, value) {
this.trxClient.logger('Redshift does not support savepoints.');
return Promise.resolve();
}
rollbackTo(conn, error) {
this.trxClient.logger('Redshift does not support savepoints.');
return Promise.resolve();
}
};

21
node_modules/knex/lib/dialects/sqlite3/formatter.js generated vendored Normal file
View File

@@ -0,0 +1,21 @@
const Formatter = require('../../formatter');
const Raw = require('../../raw');
module.exports = class SQlite3_Formatter extends Formatter {
values(values) {
if (Array.isArray(values)) {
if (Array.isArray(values[0])) {
return `( values ${values
.map((value) => `(${this.parameterize(value)})`)
.join(', ')})`;
}
return `(${this.parameterize(values)})`;
}
if (values instanceof Raw) {
return `(${this.parameter(values)})`;
}
return this.parameter(values);
}
};

171
node_modules/knex/lib/dialects/sqlite3/index.js generated vendored Normal file
View File

@@ -0,0 +1,171 @@
// SQLite3
// -------
const Bluebird = require('bluebird');
const inherits = require('inherits');
const { isUndefined, map, defaults } = require('lodash');
const { promisify } = require('util');
const Client = require('../../client');
const QueryCompiler = require('./query/compiler');
const SchemaCompiler = require('./schema/compiler');
const ColumnCompiler = require('./schema/columncompiler');
const TableCompiler = require('./schema/tablecompiler');
const SQLite3_DDL = require('./schema/ddl');
const SQLite3_Formatter = require('./formatter');
function Client_SQLite3(config) {
Client.call(this, config);
if (isUndefined(config.useNullAsDefault)) {
this.logger.warn(
'sqlite does not support inserting default values. Set the ' +
'`useNullAsDefault` flag to hide this warning. ' +
'(see docs http://knexjs.org/#Builder-insert).'
);
}
}
inherits(Client_SQLite3, Client);
Object.assign(Client_SQLite3.prototype, {
dialect: 'sqlite3',
driverName: 'sqlite3',
_driver() {
return require('sqlite3');
},
schemaCompiler() {
return new SchemaCompiler(this, ...arguments);
},
queryCompiler() {
return new QueryCompiler(this, ...arguments);
},
columnCompiler() {
return new ColumnCompiler(this, ...arguments);
},
tableCompiler() {
return new TableCompiler(this, ...arguments);
},
ddl(compiler, pragma, connection) {
return new SQLite3_DDL(this, compiler, pragma, connection);
},
wrapIdentifierImpl(value) {
return value !== '*' ? `\`${value.replace(/`/g, '``')}\`` : '*';
},
// Get a raw connection from the database, returning a promise with the connection object.
acquireRawConnection() {
return new Bluebird((resolve, reject) => {
const db = new this.driver.Database(
this.connectionSettings.filename,
(err) => {
if (err) {
return reject(err);
}
resolve(db);
}
);
});
},
// Used to explicitly close a connection, called internally by the pool when
// a connection times out or the pool is shutdown.
async destroyRawConnection(connection) {
const close = promisify((cb) => connection.close(cb));
return close();
},
// Runs the query on the specified connection, providing the bindings and any
// other necessary prep work.
_query(connection, obj) {
const { method } = obj;
let callMethod;
switch (method) {
case 'insert':
case 'update':
case 'counter':
case 'del':
callMethod = 'run';
break;
default:
callMethod = 'all';
}
return new Bluebird(function(resolver, rejecter) {
if (!connection || !connection[callMethod]) {
return rejecter(
new Error(`Error calling ${callMethod} on connection.`)
);
}
connection[callMethod](obj.sql, obj.bindings, function(err, response) {
if (err) return rejecter(err);
obj.response = response;
// We need the context here, as it contains
// the "this.lastID" or "this.changes"
obj.context = this;
return resolver(obj);
});
});
},
_stream(connection, sql, stream) {
const client = this;
return new Bluebird(function(resolver, rejecter) {
stream.on('error', rejecter);
stream.on('end', resolver);
return client
._query(connection, sql)
.then((obj) => obj.response)
.then((rows) => rows.forEach((row) => stream.write(row)))
.catch(function(err) {
stream.emit('error', err);
})
.then(function() {
stream.end();
});
});
},
// Ensures the response is returned in the same format as other clients.
processResponse(obj, runner) {
const ctx = obj.context;
let { response } = obj;
if (obj.output) return obj.output.call(runner, response);
switch (obj.method) {
case 'select':
case 'pluck':
case 'first':
if (obj.method === 'pluck') response = map(response, obj.pluck);
return obj.method === 'first' ? response[0] : response;
case 'insert':
return [ctx.lastID];
case 'del':
case 'update':
case 'counter':
return ctx.changes;
default:
return response;
}
},
poolDefaults() {
return defaults(
{ min: 1, max: 1 },
Client.prototype.poolDefaults.call(this)
);
},
formatter() {
return new SQLite3_Formatter(this, ...arguments);
},
});
module.exports = Client_SQLite3;

View File

@@ -0,0 +1,176 @@
// SQLite3 Query Builder & Compiler
const inherits = require('inherits');
const QueryCompiler = require('../../../query/compiler');
const {
assign,
each,
isEmpty,
isString,
noop,
reduce,
identity,
} = require('lodash');
function QueryCompiler_SQLite3(client, builder) {
QueryCompiler.call(this, client, builder);
const { returning } = this.single;
if (returning) {
this.client.logger.warn(
'.returning() is not supported by sqlite3 and will not have any effect.'
);
}
}
inherits(QueryCompiler_SQLite3, QueryCompiler);
assign(QueryCompiler_SQLite3.prototype, {
// The locks are not applicable in SQLite3
forShare: emptyStr,
forUpdate: emptyStr,
// SQLite requires us to build the multi-row insert as a listing of select with
// unions joining them together. So we'll build out this list of columns and
// then join them all together with select unions to complete the queries.
insert() {
const insertValues = this.single.insert || [];
let sql = this.with() + `insert into ${this.tableName} `;
if (Array.isArray(insertValues)) {
if (insertValues.length === 0) {
return '';
} else if (
insertValues.length === 1 &&
insertValues[0] &&
isEmpty(insertValues[0])
) {
return sql + this._emptyInsertValue;
}
} else if (typeof insertValues === 'object' && isEmpty(insertValues)) {
return sql + this._emptyInsertValue;
}
const insertData = this._prepInsert(insertValues);
if (isString(insertData)) {
return sql + insertData;
}
if (insertData.columns.length === 0) {
return '';
}
sql += `(${this.formatter.columnize(insertData.columns)})`;
// backwards compatible error
if (this.client.valueForUndefined !== null) {
each(insertData.values, (bindings) => {
each(bindings, (binding) => {
if (binding === undefined)
throw new TypeError(
'`sqlite` does not support inserting default values. Specify ' +
'values explicitly or use the `useNullAsDefault` config flag. ' +
'(see docs http://knexjs.org/#Builder-insert).'
);
});
});
}
if (insertData.values.length === 1) {
const parameters = this.formatter.parameterize(
insertData.values[0],
this.client.valueForUndefined
);
return sql + ` values (${parameters})`;
}
const blocks = [];
let i = -1;
while (++i < insertData.values.length) {
let i2 = -1;
const block = (blocks[i] = []);
let current = insertData.values[i];
current = current === undefined ? this.client.valueForUndefined : current;
while (++i2 < insertData.columns.length) {
block.push(
this.formatter.alias(
this.formatter.parameter(current[i2]),
this.formatter.wrap(insertData.columns[i2])
)
);
}
blocks[i] = block.join(', ');
}
return sql + ' select ' + blocks.join(' union all select ');
},
// Compile a truncate table statement into SQL.
truncate() {
const { table } = this.single;
return {
sql: `delete from ${this.tableName}`,
output() {
return this.query({
sql: `delete from sqlite_sequence where name = '${table}'`,
}).catch(noop);
},
};
},
// Compiles a `columnInfo` query
columnInfo() {
const column = this.single.columnInfo;
// The user may have specified a custom wrapIdentifier function in the config. We
// need to run the identifiers through that function, but not format them as
// identifiers otherwise.
const table = this.client.customWrapIdentifier(this.single.table, identity);
return {
sql: `PRAGMA table_info(\`${table}\`)`,
output(resp) {
const maxLengthRegex = /.*\((\d+)\)/;
const out = reduce(
resp,
function(columns, val) {
let { type } = val;
let maxLength = type.match(maxLengthRegex);
if (maxLength) {
maxLength = maxLength[1];
}
type = maxLength ? type.split('(')[0] : type;
columns[val.name] = {
type: type.toLowerCase(),
maxLength,
nullable: !val.notnull,
defaultValue: val.dflt_value,
};
return columns;
},
{}
);
return (column && out[column]) || out;
},
};
},
limit() {
const noLimit = !this.single.limit && this.single.limit !== 0;
if (noLimit && !this.single.offset) return '';
// Workaround for offset only,
// see http://stackoverflow.com/questions/10491492/sqllite-with-skip-offset-only-not-limit
return `limit ${this.formatter.parameter(
noLimit ? -1 : this.single.limit
)}`;
},
});
function emptyStr() {
return '';
}
module.exports = QueryCompiler_SQLite3;

View File

@@ -0,0 +1,27 @@
const inherits = require('inherits');
const ColumnCompiler = require('../../../schema/columncompiler');
// Column Compiler
// -------
function ColumnCompiler_SQLite3() {
ColumnCompiler.apply(this, arguments);
this.modifiers = ['nullable', 'defaultTo'];
}
inherits(ColumnCompiler_SQLite3, ColumnCompiler);
// Types
// -------
ColumnCompiler_SQLite3.prototype.double = ColumnCompiler_SQLite3.prototype.decimal = ColumnCompiler_SQLite3.prototype.floating =
'float';
ColumnCompiler_SQLite3.prototype.timestamp = 'datetime';
ColumnCompiler_SQLite3.prototype.enu = function(allowed) {
return `text check (${this.formatter.wrap(this.args[0])} in ('${allowed.join(
"', '"
)}'))`;
};
ColumnCompiler_SQLite3.prototype.json = 'json';
module.exports = ColumnCompiler_SQLite3;

View File

@@ -0,0 +1,49 @@
// SQLite3: Column Builder & Compiler
// -------
const inherits = require('inherits');
const SchemaCompiler = require('../../../schema/compiler');
const { some } = require('lodash');
// Schema Compiler
// -------
function SchemaCompiler_SQLite3() {
SchemaCompiler.apply(this, arguments);
}
inherits(SchemaCompiler_SQLite3, SchemaCompiler);
// Compile the query to determine if a table exists.
SchemaCompiler_SQLite3.prototype.hasTable = function(tableName) {
const sql =
`select * from sqlite_master ` +
`where type = 'table' and name = ${this.formatter.parameter(tableName)}`;
this.pushQuery({ sql, output: (resp) => resp.length > 0 });
};
// Compile the query to determine if a column exists.
SchemaCompiler_SQLite3.prototype.hasColumn = function(tableName, column) {
this.pushQuery({
sql: `PRAGMA table_info(${this.formatter.wrap(tableName)})`,
output(resp) {
return some(resp, (col) => {
return (
this.client.wrapIdentifier(col.name.toLowerCase()) ===
this.client.wrapIdentifier(column.toLowerCase())
);
});
},
});
};
// Compile a rename table command.
SchemaCompiler_SQLite3.prototype.renameTable = function(from, to) {
this.pushQuery(
`alter table ${this.formatter.wrap(from)} rename to ${this.formatter.wrap(
to
)}`
);
};
module.exports = SchemaCompiler_SQLite3;

330
node_modules/knex/lib/dialects/sqlite3/schema/ddl.js generated vendored Normal file
View File

@@ -0,0 +1,330 @@
// SQLite3_DDL
//
// All of the SQLite3 specific DDL helpers for renaming/dropping
// columns and changing datatypes.
// -------
const {
assign,
uniqueId,
find,
identity,
map,
omit,
invert,
fromPairs,
some,
negate,
isEmpty,
chunk,
} = require('lodash');
// So altering the schema in SQLite3 is a major pain.
// We have our own object to deal with the renaming and altering the types
// for sqlite3 things.
function SQLite3_DDL(client, tableCompiler, pragma, connection) {
this.client = client;
this.tableCompiler = tableCompiler;
this.pragma = pragma;
this.tableNameRaw = this.tableCompiler.tableNameRaw;
this.alteredName = uniqueId('_knex_temp_alter');
this.connection = connection;
this.formatter =
client && client.config && client.config.wrapIdentifier
? client.config.wrapIdentifier
: (value) => value;
}
assign(SQLite3_DDL.prototype, {
tableName() {
return this.formatter(this.tableNameRaw, (value) => value);
},
getColumn: async function(column) {
const currentCol = find(this.pragma, (col) => {
return (
this.client.wrapIdentifier(col.name).toLowerCase() ===
this.client.wrapIdentifier(column).toLowerCase()
);
});
if (!currentCol)
throw new Error(
`The column ${column} is not in the ${this.tableName()} table`
);
return currentCol;
},
getTableSql() {
this.trx.disableProcessing();
return this.trx
.raw(
`SELECT name, sql FROM sqlite_master WHERE type="table" AND name="${this.tableName()}"`
)
.then((result) => {
this.trx.enableProcessing();
return result;
});
},
renameTable: async function() {
return this.trx.raw(
`ALTER TABLE "${this.tableName()}" RENAME TO "${this.alteredName}"`
);
},
dropOriginal() {
return this.trx.raw(`DROP TABLE "${this.tableName()}"`);
},
dropTempTable() {
return this.trx.raw(`DROP TABLE "${this.alteredName}"`);
},
copyData() {
return this.trx
.raw(`SELECT * FROM "${this.tableName()}"`)
.then((result) =>
this.insertChunked(20, this.alteredName, identity, result)
);
},
reinsertData(iterator) {
return this.trx
.raw(`SELECT * FROM "${this.alteredName}"`)
.then((result) =>
this.insertChunked(20, this.tableName(), iterator, result)
);
},
async insertChunked(chunkSize, target, iterator, result) {
iterator = iterator || identity;
const chunked = chunk(result, chunkSize);
for (const batch of chunked) {
await this.trx
.queryBuilder()
.table(target)
.insert(map(batch, iterator));
}
},
createTempTable(createTable) {
return this.trx.raw(
createTable.sql.replace(this.tableName(), this.alteredName)
);
},
_doReplace(sql, from, to) {
const oneLineSql = sql.replace(/\s+/g, ' ');
const matched = oneLineSql.match(/^CREATE TABLE\s+(\S+)\s*\((.*)\)/);
const tableName = matched[1];
const defs = matched[2];
if (!defs) {
throw new Error('No column definitions in this statement!');
}
let parens = 0,
args = [],
ptr = 0;
let i = 0;
const x = defs.length;
for (i = 0; i < x; i++) {
switch (defs[i]) {
case '(':
parens++;
break;
case ')':
parens--;
break;
case ',':
if (parens === 0) {
args.push(defs.slice(ptr, i));
ptr = i + 1;
}
break;
case ' ':
if (ptr === i) {
ptr = i + 1;
}
break;
}
}
args.push(defs.slice(ptr, i));
const fromIdentifier = from.replace(/[`"'[\]]/g, '');
args = args.map((item) => {
let split = item.trim().split(' ');
// SQLite supports all quoting mechanisms prevalent in all major dialects of SQL
// and preserves the original quoting in sqlite_master.
//
// Also, identifiers are never case sensitive, not even when quoted.
//
// Ref: https://www.sqlite.org/lang_keywords.html
const fromMatchCandidates = [
new RegExp(`\`${fromIdentifier}\``, 'i'),
new RegExp(`"${fromIdentifier}"`, 'i'),
new RegExp(`'${fromIdentifier}'`, 'i'),
new RegExp(`\\[${fromIdentifier}\\]`, 'i'),
];
if (fromIdentifier.match(/^\S+$/)) {
fromMatchCandidates.push(new RegExp(`\\b${fromIdentifier}\\b`, 'i'));
}
const doesMatchFromIdentifier = (target) =>
some(fromMatchCandidates, (c) => target.match(c));
const replaceFromIdentifier = (target) =>
fromMatchCandidates.reduce(
(result, candidate) => result.replace(candidate, to),
target
);
if (doesMatchFromIdentifier(split[0])) {
// column definition
if (to) {
split[0] = to;
return split.join(' ');
}
return ''; // for deletions
}
// skip constraint name
const idx = /constraint/i.test(split[0]) ? 2 : 0;
// primary key and unique constraints have one or more
// columns from this table listed between (); replace
// one if it matches
if (/primary|unique/i.test(split[idx])) {
const ret = item.replace(/\(.*\)/, replaceFromIdentifier);
// If any member columns are dropped then uniqueness/pk constraint
// can not be retained
if (ret !== item && isEmpty(to)) return '';
return ret;
}
// foreign keys have one or more columns from this table
// listed between (); replace one if it matches
// foreign keys also have a 'references' clause
// which may reference THIS table; if it does, replace
// column references in that too!
if (/foreign/.test(split[idx])) {
split = item.split(/ references /i);
// the quoted column names save us from having to do anything
// other than a straight replace here
const replacedKeySpec = replaceFromIdentifier(split[0]);
if (split[0] !== replacedKeySpec) {
// If we are removing one or more columns of a foreign
// key, then we should not retain the key at all
if (isEmpty(to)) return '';
else split[0] = replacedKeySpec;
}
if (split[1].slice(0, tableName.length) === tableName) {
// self-referential foreign key
const replacedKeyTargetSpec = split[1].replace(
/\(.*\)/,
replaceFromIdentifier
);
if (split[1] !== replacedKeyTargetSpec) {
// If we are removing one or more columns of a foreign
// key, then we should not retain the key at all
if (isEmpty(to)) return '';
else split[1] = replacedKeyTargetSpec;
}
}
return split.join(' references ');
}
return item;
});
args = args.filter(negate(isEmpty));
if (args.length === 0) {
throw new Error('Unable to drop last column from table');
}
return oneLineSql
.replace(/\(.*\)/, () => `(${args.join(', ')})`)
.replace(/,\s*([,)])/, '$1');
},
// Boy, this is quite a method.
renameColumn: async function(from, to) {
return this.client.transaction(
async (trx) => {
this.trx = trx;
const column = await this.getColumn(from);
const sql = await this.getTableSql(column);
const a = this.client.wrapIdentifier(from);
const b = this.client.wrapIdentifier(to);
const createTable = sql[0];
const newSql = this._doReplace(createTable.sql, a, b);
if (sql === newSql) {
throw new Error('Unable to find the column to change');
}
const { from: mappedFrom, to: mappedTo } = invert(
this.client.postProcessResponse(
invert({
from,
to,
})
)
);
return this.reinsertMapped(createTable, newSql, (row) => {
row[mappedTo] = row[mappedFrom];
return omit(row, mappedFrom);
});
},
{ connection: this.connection }
);
},
dropColumn: async function(columns) {
return this.client.transaction(
(trx) => {
this.trx = trx;
return Promise.all(columns.map((column) => this.getColumn(column)))
.then(() => this.getTableSql())
.then((sql) => {
const createTable = sql[0];
let newSql = createTable.sql;
columns.forEach((column) => {
const a = this.client.wrapIdentifier(column);
newSql = this._doReplace(newSql, a, '');
});
if (sql === newSql) {
throw new Error('Unable to find the column to change');
}
const mappedColumns = Object.keys(
this.client.postProcessResponse(
fromPairs(columns.map((column) => [column, column]))
)
);
return this.reinsertMapped(createTable, newSql, (row) =>
omit(row, ...mappedColumns)
);
});
},
{ connection: this.connection }
);
},
reinsertMapped(createTable, newSql, mapRow) {
return Promise.resolve()
.then(() => this.createTempTable(createTable))
.then(() => this.copyData())
.then(() => this.dropOriginal())
.then(() => this.trx.raw(newSql))
.then(() => this.reinsertData(mapRow))
.then(() => this.dropTempTable());
},
});
module.exports = SQLite3_DDL;

View File

@@ -0,0 +1,156 @@
const inherits = require('inherits');
const TableCompiler = require('../../../schema/tablecompiler');
const { filter, values } = require('lodash');
// Table Compiler
// -------
function TableCompiler_SQLite3() {
TableCompiler.apply(this, arguments);
this.primaryKey = void 0;
}
inherits(TableCompiler_SQLite3, TableCompiler);
// Create a new table.
TableCompiler_SQLite3.prototype.createQuery = function(columns, ifNot) {
const createStatement = ifNot
? 'create table if not exists '
: 'create table ';
let sql = createStatement + this.tableName() + ' (' + columns.sql.join(', ');
// SQLite forces primary keys to be added when the table is initially created
// so we will need to check for a primary key commands and add the columns
// to the table's declaration here so they can be created on the tables.
sql += this.foreignKeys() || '';
sql += this.primaryKeys() || '';
sql += ')';
this.pushQuery(sql);
};
TableCompiler_SQLite3.prototype.addColumns = function(columns, prefix) {
if (prefix) {
throw new Error('Sqlite does not support alter column.');
}
for (let i = 0, l = columns.sql.length; i < l; i++) {
this.pushQuery({
sql: `alter table ${this.tableName()} add column ${columns.sql[i]}`,
bindings: columns.bindings[i],
});
}
};
// Compile a drop unique key command.
TableCompiler_SQLite3.prototype.dropUnique = function(columns, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('unique', this.tableNameRaw, columns);
this.pushQuery(`drop index ${indexName}`);
};
TableCompiler_SQLite3.prototype.dropIndex = function(columns, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('index', this.tableNameRaw, columns);
this.pushQuery(`drop index ${indexName}`);
};
// Compile a unique key command.
TableCompiler_SQLite3.prototype.unique = function(columns, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('unique', this.tableNameRaw, columns);
columns = this.formatter.columnize(columns);
this.pushQuery(
`create unique index ${indexName} on ${this.tableName()} (${columns})`
);
};
// Compile a plain index key command.
TableCompiler_SQLite3.prototype.index = function(columns, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('index', this.tableNameRaw, columns);
columns = this.formatter.columnize(columns);
this.pushQuery(
`create index ${indexName} on ${this.tableName()} (${columns})`
);
};
TableCompiler_SQLite3.prototype.primary = TableCompiler_SQLite3.prototype.foreign = function() {
if (this.method !== 'create' && this.method !== 'createIfNot') {
this.client.logger.warn(
'SQLite3 Foreign & Primary keys may only be added on create'
);
}
};
TableCompiler_SQLite3.prototype.primaryKeys = function() {
const pks = filter(this.grouped.alterTable || [], { method: 'primary' });
if (pks.length > 0 && pks[0].args.length > 0) {
const columns = pks[0].args[0];
let constraintName = pks[0].args[1] || '';
if (constraintName) {
constraintName = ' constraint ' + this.formatter.wrap(constraintName);
}
return `,${constraintName} primary key (${this.formatter.columnize(
columns
)})`;
}
};
TableCompiler_SQLite3.prototype.foreignKeys = function() {
let sql = '';
const foreignKeys = filter(this.grouped.alterTable || [], {
method: 'foreign',
});
for (let i = 0, l = foreignKeys.length; i < l; i++) {
const foreign = foreignKeys[i].args[0];
const column = this.formatter.columnize(foreign.column);
const references = this.formatter.columnize(foreign.references);
const foreignTable = this.formatter.wrap(foreign.inTable);
let constraintName = foreign.keyName || '';
if (constraintName) {
constraintName = ' constraint ' + this.formatter.wrap(constraintName);
}
sql += `,${constraintName} foreign key(${column}) references ${foreignTable}(${references})`;
if (foreign.onDelete) sql += ` on delete ${foreign.onDelete}`;
if (foreign.onUpdate) sql += ` on update ${foreign.onUpdate}`;
}
return sql;
};
TableCompiler_SQLite3.prototype.createTableBlock = function() {
return this.getColumns()
.concat()
.join(',');
};
// Compile a rename column command... very complex in sqlite
TableCompiler_SQLite3.prototype.renameColumn = function(from, to) {
const compiler = this;
this.pushQuery({
sql: `PRAGMA table_info(${this.tableName()})`,
output(pragma) {
return compiler.client
.ddl(compiler, pragma, this.connection)
.renameColumn(from, to);
},
});
};
TableCompiler_SQLite3.prototype.dropColumn = function() {
const compiler = this;
const columns = values(arguments);
this.pushQuery({
sql: `PRAGMA table_info(${this.tableName()})`,
output(pragma) {
return compiler.client
.ddl(compiler, pragma, this.connection)
.dropColumn(columns);
},
});
};
module.exports = TableCompiler_SQLite3;

295
node_modules/knex/lib/formatter.js generated vendored Normal file
View File

@@ -0,0 +1,295 @@
const QueryBuilder = require('./query/builder');
const Raw = require('./raw');
const { transform } = require('lodash');
// Valid values for the `order by` clause generation.
const orderBys = ['asc', 'desc'];
// Turn this into a lookup map
const operators = transform(
[
'=',
'<',
'>',
'<=',
'>=',
'<>',
'!=',
'like',
'not like',
'between',
'not between',
'ilike',
'not ilike',
'exists',
'not exist',
'rlike',
'not rlike',
'regexp',
'not regexp',
'&',
'|',
'^',
'<<',
'>>',
'~',
'~*',
'!~',
'!~*',
'#',
'&&',
'@>',
'<@',
'||',
'&<',
'&>',
'-|-',
'@@',
'!!',
['?', '\\?'],
['?|', '\\?|'],
['?&', '\\?&'],
],
(result, key) => {
if (Array.isArray(key)) {
result[key[0]] = key[1];
} else {
result[key] = key;
}
},
{}
);
class Formatter {
constructor(client, builder) {
this.client = client;
this.builder = builder;
this.bindings = [];
}
// Accepts a string or array of columns to wrap as appropriate.
columnize(target) {
const columns = Array.isArray(target) ? target : [target];
let str = '',
i = -1;
while (++i < columns.length) {
if (i > 0) str += ', ';
str += this.wrap(columns[i]);
}
return str;
}
// Turns a list of values into a list of ?'s, joining them with commas unless
// a "joining" value is specified (e.g. ' and ')
parameterize(values, notSetValue) {
if (typeof values === 'function') return this.parameter(values);
values = Array.isArray(values) ? values : [values];
let str = '',
i = -1;
while (++i < values.length) {
if (i > 0) str += ', ';
str += this.parameter(values[i] === undefined ? notSetValue : values[i]);
}
return str;
}
// Formats `values` into a parenthesized list of parameters for a `VALUES`
// clause.
//
// [1, 2] -> '(?, ?)'
// [[1, 2], [3, 4]] -> '((?, ?), (?, ?))'
// knex('table') -> '(select * from "table")'
// knex.raw('select ?', 1) -> '(select ?)'
//
values(values) {
if (Array.isArray(values)) {
if (Array.isArray(values[0])) {
return `(${values
.map((value) => `(${this.parameterize(value)})`)
.join(', ')})`;
}
return `(${this.parameterize(values)})`;
}
if (values instanceof Raw) {
return `(${this.parameter(values)})`;
}
return this.parameter(values);
}
// Checks whether a value is a function... if it is, we compile it
// otherwise we check whether it's a raw
parameter(value) {
if (typeof value === 'function') {
return this.outputQuery(this.compileCallback(value), true);
}
return this.unwrapRaw(value, true) || '?';
}
unwrapRaw(value, isParameter) {
let query;
if (value instanceof QueryBuilder) {
query = this.client.queryCompiler(value).toSQL();
if (query.bindings) {
this.bindings = this.bindings.concat(query.bindings);
}
return this.outputQuery(query, isParameter);
}
if (value instanceof Raw) {
value.client = this.client;
if (this.builder._queryContext) {
value.queryContext = () => {
return this.builder._queryContext;
};
}
query = value.toSQL();
if (query.bindings) {
this.bindings = this.bindings.concat(query.bindings);
}
return query.sql;
}
if (isParameter) {
this.bindings.push(value);
}
}
/**
* Creates SQL for a parameter, which might be passed to where() or .with() or
* pretty much anywhere in API.
*
* @param query Callback (for where or complete builder), Raw or QueryBuilder
* @param method Optional at least 'select' or 'update' are valid
*/
rawOrFn(value, method) {
if (typeof value === 'function') {
return this.outputQuery(this.compileCallback(value, method));
}
return this.unwrapRaw(value) || '';
}
// Puts the appropriate wrapper around a value depending on the database
// engine, unless it's a knex.raw value, in which case it's left alone.
wrap(value, isParameter) {
const raw = this.unwrapRaw(value, isParameter);
if (raw) return raw;
switch (typeof value) {
case 'function':
return this.outputQuery(this.compileCallback(value), true);
case 'object':
return this.parseObject(value);
case 'number':
return value;
default:
return this.wrapString(value + '');
}
}
wrapAsIdentifier(value) {
const queryContext = this.builder.queryContext();
return this.client.wrapIdentifier((value || '').trim(), queryContext);
}
alias(first, second) {
return first + ' as ' + second;
}
operator(value) {
const raw = this.unwrapRaw(value);
if (raw) return raw;
const operator = operators[(value || '').toLowerCase()];
if (!operator) {
throw new TypeError(`The operator "${value}" is not permitted`);
}
return operator;
}
// Specify the direction of the ordering.
direction(value) {
const raw = this.unwrapRaw(value);
if (raw) return raw;
return orderBys.indexOf((value || '').toLowerCase()) !== -1 ? value : 'asc';
}
// Compiles a callback using the query builder.
compileCallback(callback, method) {
const { client } = this;
// Build the callback
const builder = client.queryBuilder();
callback.call(builder, builder);
// Compile the callback, using the current formatter (to track all bindings).
const compiler = client.queryCompiler(builder);
compiler.formatter = this;
// Return the compiled & parameterized sql.
return compiler.toSQL(method || builder._method || 'select');
}
// Ensures the query is aliased if necessary.
outputQuery(compiled, isParameter) {
let sql = compiled.sql || '';
if (sql) {
if (
(compiled.method === 'select' || compiled.method === 'first') &&
(isParameter || compiled.as)
) {
sql = `(${sql})`;
if (compiled.as) return this.alias(sql, this.wrap(compiled.as));
}
}
return sql;
}
// Key-value notation for alias
parseObject(obj) {
const ret = [];
for (const alias in obj) {
const queryOrIdentifier = obj[alias];
// Avoids double aliasing for subqueries
if (typeof queryOrIdentifier === 'function') {
const compiled = this.compileCallback(queryOrIdentifier);
compiled.as = alias; // enforces the object's alias
ret.push(this.outputQuery(compiled, true));
} else if (queryOrIdentifier instanceof QueryBuilder) {
ret.push(
this.alias(
`(${this.wrap(queryOrIdentifier)})`,
this.wrapAsIdentifier(alias)
)
);
} else {
ret.push(
this.alias(this.wrap(queryOrIdentifier), this.wrapAsIdentifier(alias))
);
}
}
return ret.join(', ');
}
// Coerce to string to prevent strange errors when it's not a string.
wrapString(value) {
const asIndex = value.toLowerCase().indexOf(' as ');
if (asIndex !== -1) {
const first = value.slice(0, asIndex);
const second = value.slice(asIndex + 4);
return this.alias(this.wrap(first), this.wrapAsIdentifier(second));
}
const wrapped = [];
let i = -1;
const segments = value.split('.');
while (++i < segments.length) {
value = segments[i];
if (i === 0 && segments.length > 1) {
wrapped.push(this.wrap((value || '').trim()));
} else {
wrapped.push(this.wrapAsIdentifier(value));
}
}
return wrapped.join('.');
}
}
module.exports = Formatter;

14
node_modules/knex/lib/functionhelper.js generated vendored Normal file
View File

@@ -0,0 +1,14 @@
// FunctionHelper
// -------
function FunctionHelper(client) {
this.client = client;
}
FunctionHelper.prototype.now = function(precision) {
if (typeof precision === 'number') {
return this.client.raw(`CURRENT_TIMESTAMP(${precision})`);
}
return this.client.raw('CURRENT_TIMESTAMP');
};
module.exports = FunctionHelper;

98
node_modules/knex/lib/helpers.js generated vendored Normal file
View File

@@ -0,0 +1,98 @@
/* eslint no-console:0 */
const {
isFunction,
isUndefined,
isPlainObject,
isArray,
isTypedArray,
} = require('lodash');
const { CLIENT_ALIASES } = require('./constants');
// Check if the first argument is an array, otherwise uses all arguments as an
// array.
function normalizeArr() {
const args = new Array(arguments.length);
for (let i = 0; i < args.length; i++) {
args[i] = arguments[i];
}
if (Array.isArray(args[0])) {
return args[0];
}
return args;
}
function containsUndefined(mixed) {
let argContainsUndefined = false;
if (isTypedArray(mixed)) return false;
if (mixed && isFunction(mixed.toSQL)) {
//Any QueryBuilder or Raw will automatically be validated during compile.
return argContainsUndefined;
}
if (isArray(mixed)) {
for (let i = 0; i < mixed.length; i++) {
if (argContainsUndefined) break;
argContainsUndefined = containsUndefined(mixed[i]);
}
} else if (isPlainObject(mixed)) {
Object.keys(mixed).forEach((key) => {
if (!argContainsUndefined) {
argContainsUndefined = containsUndefined(mixed[key]);
}
});
} else {
argContainsUndefined = isUndefined(mixed);
}
return argContainsUndefined;
}
function getUndefinedIndices(mixed) {
const indices = [];
if (Array.isArray(mixed)) {
mixed.forEach((item, index) => {
if (containsUndefined(item)) {
indices.push(index);
}
});
} else if (isPlainObject(mixed)) {
Object.keys(mixed).forEach((key) => {
if (containsUndefined(mixed[key])) {
indices.push(key);
}
});
} else {
indices.push(0);
}
return indices;
}
function addQueryContext(Target) {
// Stores or returns (if called with no arguments) context passed to
// wrapIdentifier and postProcessResponse hooks
Target.prototype.queryContext = function(context) {
if (isUndefined(context)) {
return this._queryContext;
}
this._queryContext = context;
return this;
};
}
function resolveClientNameWithAliases(clientName) {
return CLIENT_ALIASES[clientName] || clientName;
}
module.exports = {
addQueryContext,
containsUndefined,
normalizeArr,
resolveClientNameWithAliases,
getUndefinedIndices,
};

3
node_modules/knex/lib/index.js generated vendored Normal file
View File

@@ -0,0 +1,3 @@
const Knex = require('./knex');
module.exports = Knex;

111
node_modules/knex/lib/interface.js generated vendored Normal file
View File

@@ -0,0 +1,111 @@
const { isEmpty, map, clone, each } = require('lodash');
const Bluebird = require('bluebird');
module.exports = function(Target) {
Target.prototype.toQuery = function(tz) {
let data = this.toSQL(this._method, tz);
if (!Array.isArray(data)) data = [data];
return map(data, (statement) => {
return this.client._formatQuery(statement.sql, statement.bindings, tz);
}).join(';\n');
};
// Create a new instance of the `Runner`, passing in the current object.
Target.prototype.then = function(/* onFulfilled, onRejected */) {
let result = this.client.runner(this).run();
if (this.client.config.asyncStackTraces) {
result = result.catch((err) => {
err.originalStack = err.stack;
const firstLine = err.stack.split('\n')[0];
this._asyncStack.unshift(firstLine);
// put the fake more helpful "async" stack on the thrown error
err.stack = this._asyncStack.join('\n');
throw err;
});
}
return Bluebird.resolve(result.then.apply(result, arguments));
};
// Add additional "options" to the builder. Typically used for client specific
// items, like the `mysql` and `sqlite3` drivers.
Target.prototype.options = function(opts) {
this._options = this._options || [];
this._options.push(clone(opts) || {});
return this;
};
// Sets an explicit "connection" we wish to use for this query.
Target.prototype.connection = function(connection) {
this._connection = connection;
return this;
};
// Set a debug flag for the current schema query stack.
Target.prototype.debug = function(enabled) {
this._debug = arguments.length ? enabled : true;
return this;
};
// Set the transaction object for this query.
Target.prototype.transacting = function(t) {
if (t && t.client) {
if (!t.client.transacting) {
t.client.logger.warn(`Invalid transaction value: ${t.client}`);
} else {
this.client = t.client;
}
}
if (isEmpty(t)) {
this.client.logger.error(
'Invalid value on transacting call, potential bug'
);
throw Error(
'Invalid transacting value (null, undefined or empty object)'
);
}
return this;
};
// Initializes a stream.
Target.prototype.stream = function(options) {
return this.client.runner(this).stream(options);
};
// Initialize a stream & pipe automatically.
Target.prototype.pipe = function(writable, options) {
return this.client.runner(this).pipe(
writable,
options
);
};
// Creates a method which "coerces" to a promise, by calling a
// "then" method on the current `Target`
each(
[
'bind',
'catch',
'finally',
'asCallback',
'spread',
'map',
'reduce',
'thenReturn',
'return',
'yield',
'ensure',
'reflect',
'get',
'mapSeries',
'delay',
],
function(method) {
Target.prototype[method] = function() {
const promise = this.then();
return promise[method].apply(promise, arguments);
};
}
);
};

79
node_modules/knex/lib/knex.js generated vendored Normal file
View File

@@ -0,0 +1,79 @@
const Raw = require('./raw');
const Client = require('./client');
const QueryBuilder = require('./query/builder');
const QueryInterface = require('./query/methods');
const makeKnex = require('./util/make-knex');
const parseConnection = require('./util/parse-connection');
const fakeClient = require('./util/fake-client');
const { SUPPORTED_CLIENTS } = require('./constants');
const { resolveClientNameWithAliases } = require('./helpers');
function Knex(config) {
// If config is a string, try to parse it
if (typeof config === 'string') {
const parsedConfig = Object.assign(parseConnection(config), arguments[2]);
return new Knex(parsedConfig);
}
let Dialect;
// If user provided no relevant parameters, use generic client
if (arguments.length === 0 || (!config.client && !config.dialect)) {
Dialect = Client;
}
// If user provided Client constructor as a parameter, use it
else if (
typeof config.client === 'function' &&
config.client.prototype instanceof Client
) {
Dialect = config.client;
}
// If neither applies, let's assume user specified name of a client or dialect as a string
else {
const clientName = config.client || config.dialect;
if (!SUPPORTED_CLIENTS.includes(clientName)) {
throw new Error(
`knex: Unknown configuration option 'client' value ${clientName}. Note that it is case-sensitive, check documentation for supported values.`
);
}
const resolvedClientName = resolveClientNameWithAliases(clientName);
Dialect = require(`./dialects/${resolvedClientName}/index.js`);
}
// If config connection parameter is passed as string, try to parse it
if (typeof config.connection === 'string') {
config = Object.assign({}, config, {
connection: parseConnection(config.connection).connection,
});
}
const newKnex = makeKnex(new Dialect(config));
if (config.userParams) {
newKnex.userParams = config.userParams;
}
return newKnex;
}
// Expose Client on the main Knex namespace.
Knex.Client = Client;
Knex.QueryBuilder = {
extend: function(methodName, fn) {
QueryBuilder.extend(methodName, fn);
QueryInterface.push(methodName);
},
};
/* eslint no-console:0 */
// Run a "raw" query, though we can't do anything with it other than put
// it in a query statement.
Knex.raw = (sql, bindings) => {
console.warn(
'global Knex.raw is deprecated, use knex.raw (chain off an initialized knex object)'
);
return new Raw(fakeClient).set(sql, bindings);
};
module.exports = Knex;

78
node_modules/knex/lib/logger.js generated vendored Normal file
View File

@@ -0,0 +1,78 @@
/* eslint no-console:0 */
const color = require('colorette');
const { inspect } = require('util');
const { isFunction, isNil, isString } = require('lodash');
class Logger {
constructor(config) {
const {
log: {
debug,
warn,
error,
deprecate,
inspectionDepth,
enableColors,
} = {},
} = config;
this._inspectionDepth = inspectionDepth || 5;
this._enableColors = resolveIsEnabledColors(enableColors);
this._debug = debug;
this._warn = warn;
this._error = error;
this._deprecate = deprecate;
}
_log(message, userFn, colorFn) {
if (!isNil(userFn) && !isFunction(userFn)) {
throw new TypeError('Extensions to knex logger must be functions!');
}
if (isFunction(userFn)) {
userFn(message);
return;
}
if (!isString(message)) {
message = inspect(message, {
depth: this._inspectionDepth,
colors: this._enableColors,
});
}
console.log(colorFn ? colorFn(message) : message);
}
debug(message) {
this._log(message, this._debug);
}
warn(message) {
this._log(message, this._warn, color.yellow);
}
error(message) {
this._log(message, this._error, color.red);
}
deprecate(method, alternative) {
const message = `${method} is deprecated, please use ${alternative}`;
this._log(message, this._deprecate, color.yellow);
}
}
function resolveIsEnabledColors(enableColorsParam) {
if (!isNil(enableColorsParam)) {
return enableColorsParam;
}
if (process && process.stdout) {
return process.stdout.isTTY;
}
return false;
}
module.exports = Logger;

106
node_modules/knex/lib/migrate/MigrationGenerator.js generated vendored Normal file
View File

@@ -0,0 +1,106 @@
const fs = require('fs');
const path = require('path');
const { promisify } = require('util');
const mkdirp = require('mkdirp');
const { writeJsFileUsingTemplate } = require('../util/template');
const { getMergedConfig } = require('./configuration-merger');
class MigrationGenerator {
constructor(migrationConfig) {
this.config = getMergedConfig(migrationConfig);
}
// Creates a new migration, with a given name.
async make(name, config) {
this.config = getMergedConfig(config, this.config);
if (!name) {
return Promise.reject(
new Error('A name must be specified for the generated migration')
);
}
await this._ensureFolder();
const createdMigrationFilePath = await this._writeNewMigration(name);
return createdMigrationFilePath;
}
// Ensures a folder for the migrations exist, dependent on the migration
// config settings.
_ensureFolder() {
const dirs = this._absoluteConfigDirs();
const promises = dirs.map((dir) => {
return promisify(fs.stat)(dir).catch(() => promisify(mkdirp)(dir));
});
return Promise.all(promises);
}
_getStubPath() {
return (
this.config.stub ||
path.join(__dirname, 'stub', this.config.extension + '.stub')
);
}
_getNewMigrationName(name) {
if (name[0] === '-') name = name.slice(1);
return yyyymmddhhmmss() + '_' + name + '.' + this.config.extension;
}
_getNewMigrationPath(name) {
const fileName = this._getNewMigrationName(name);
const dirs = this._absoluteConfigDirs();
const dir = dirs.slice(-1)[0]; // Get last specified directory
return path.join(dir, fileName);
}
// Write a new migration to disk, using the config and generated filename,
// passing any `variables` given in the config to the template.
async _writeNewMigration(name) {
const migrationPath = this._getNewMigrationPath(name);
await writeJsFileUsingTemplate(
migrationPath,
this._getStubPath(),
{ variable: 'd' },
this.config.variables || {}
);
return migrationPath;
}
_absoluteConfigDirs() {
const directories = Array.isArray(this.config.directory)
? this.config.directory
: [this.config.directory];
return directories.map((directory) => {
if (!directory) {
// eslint-disable-next-line no-console
console.warn(
'Failed to resolve config file, knex cannot determine where to generate migrations'
);
}
return path.resolve(process.cwd(), directory);
});
}
}
// Ensure that we have 2 places for each of the date segments.
function padDate(segment) {
segment = segment.toString();
return segment[1] ? segment : `0${segment}`;
}
// Get a date object in the correct format, without requiring a full out library
// like "moment.js".
function yyyymmddhhmmss() {
const d = new Date();
return (
d.getFullYear().toString() +
padDate(d.getMonth() + 1) +
padDate(d.getDate()) +
padDate(d.getHours()) +
padDate(d.getMinutes()) +
padDate(d.getSeconds())
);
}
module.exports = MigrationGenerator;

600
node_modules/knex/lib/migrate/Migrator.js generated vendored Normal file
View File

@@ -0,0 +1,600 @@
// Migrator
// -------
const {
differenceWith,
each,
filter,
get,
isFunction,
isBoolean,
isEmpty,
isUndefined,
max,
} = require('lodash');
const inherits = require('inherits');
const {
getLockTableName,
getLockTableNameWithSchema,
getTable,
getTableName,
} = require('./table-resolver');
const { getSchemaBuilder } = require('./table-creator');
const migrationListResolver = require('./migration-list-resolver');
const MigrationGenerator = require('./MigrationGenerator');
const { getMergedConfig } = require('./configuration-merger');
function LockError(msg) {
this.name = 'MigrationLocked';
this.message = msg;
}
inherits(LockError, Error);
// The new migration we're performing, typically called from the `knex.migrate`
// interface on the main `knex` object. Passes the `knex` instance performing
// the migration.
class Migrator {
constructor(knex) {
// Clone knex instance and remove post-processing that is unnecessary for internal queries from a cloned config
if (isFunction(knex)) {
if (!knex.isTransaction) {
this.knex = knex.withUserParams({
...knex.userParams,
});
} else {
this.knex = knex;
}
} else {
this.knex = Object.assign({}, knex);
this.knex.userParams = this.knex.userParams || {};
}
this.config = getMergedConfig(this.knex.client.config.migrations);
this.generator = new MigrationGenerator(this.knex.client.config.migrations);
this._activeMigration = {
fileName: null,
};
}
// Migrators to the latest configuration.
latest(config) {
this._disableProcessing();
this.config = getMergedConfig(config, this.config);
return migrationListResolver
.listAllAndCompleted(this.config, this.knex)
.then((value) => {
if (!this.config.disableMigrationsListValidation) {
validateMigrationList(this.config.migrationSource, value);
}
return value;
})
.then(([all, completed]) => {
const migrations = getNewMigrations(
this.config.migrationSource,
all,
completed
);
const transactionForAll =
!this.config.disableTransactions &&
isEmpty(
filter(migrations, (migration) => {
const migrationContents = this.config.migrationSource.getMigration(
migration
);
return !this._useTransaction(migrationContents);
})
);
if (transactionForAll) {
return this.knex.transaction((trx) => {
return this._runBatch(migrations, 'up', trx);
});
} else {
return this._runBatch(migrations, 'up');
}
});
}
// Runs the next migration that has not yet been run
up(config) {
this._disableProcessing();
this.config = getMergedConfig(config, this.config);
return migrationListResolver
.listAllAndCompleted(this.config, this.knex)
.then((value) => {
if (!this.config.disableMigrationsListValidation) {
validateMigrationList(this.config.migrationSource, value);
}
return value;
})
.then(([all, completed]) => {
const newMigrations = getNewMigrations(
this.config.migrationSource,
all,
completed
);
let migrationToRun;
const name = this.config.name;
if (name) {
if (!completed.includes(name)) {
migrationToRun = newMigrations.find((migration) => {
return (
this.config.migrationSource.getMigrationName(migration) === name
);
});
if (!migrationToRun) {
throw new Error(`Migration "${name}" not found.`);
}
}
} else {
migrationToRun = newMigrations[0];
}
const migrationsToRun = [];
if (migrationToRun) {
migrationsToRun.push(migrationToRun);
}
const transactionForAll =
!this.config.disableTransactions &&
isEmpty(
filter(migrationsToRun, (migration) => {
const migrationContents = this.config.migrationSource.getMigration(
migration
);
return !this._useTransaction(migrationContents);
})
);
if (transactionForAll) {
return this.knex.transaction((trx) => {
return this._runBatch(migrationsToRun, 'up', trx);
});
} else {
return this._runBatch(migrationsToRun, 'up');
}
});
}
// Rollback the last "batch", or all, of migrations that were run.
rollback(config, all = false) {
this._disableProcessing();
return new Promise((resolve, reject) => {
try {
this.config = getMergedConfig(config, this.config);
} catch (e) {
reject(e);
}
migrationListResolver
.listAllAndCompleted(this.config, this.knex)
.then((value) => {
if (!this.config.disableMigrationsListValidation) {
validateMigrationList(this.config.migrationSource, value);
}
return value;
})
.then((val) => {
const [allMigrations, completedMigrations] = val;
return all
? allMigrations
.filter((migration) => {
return completedMigrations.includes(migration.file);
})
.reverse()
: this._getLastBatch(val);
})
.then((migrations) => {
return this._runBatch(migrations, 'down');
})
.then(resolve, reject);
});
}
down(config) {
this._disableProcessing();
this.config = getMergedConfig(config, this.config);
return migrationListResolver
.listAllAndCompleted(this.config, this.knex)
.then((value) => {
if (!this.config.disableMigrationsListValidation) {
validateMigrationList(this.config.migrationSource, value);
}
return value;
})
.then(([all, completed]) => {
const completedMigrations = all.filter((migration) => {
return completed.includes(
this.config.migrationSource.getMigrationName(migration)
);
});
let migrationToRun;
const name = this.config.name;
if (name) {
migrationToRun = completedMigrations.find((migration) => {
return (
this.config.migrationSource.getMigrationName(migration) === name
);
});
if (!migrationToRun) {
throw new Error(`Migration "${name}" was not run.`);
}
} else {
migrationToRun = completedMigrations[completedMigrations.length - 1];
}
const migrationsToRun = [];
if (migrationToRun) {
migrationsToRun.push(migrationToRun);
}
return this._runBatch(migrationsToRun, 'down');
});
}
status(config) {
this._disableProcessing();
this.config = getMergedConfig(config, this.config);
return Promise.all([
getTable(this.knex, this.config.tableName, this.config.schemaName).select(
'*'
),
migrationListResolver.listAll(this.config.migrationSource),
]).then(([db, code]) => db.length - code.length);
}
// Retrieves and returns the current migration version we're on, as a promise.
// If no migrations have been run yet, return "none".
currentVersion(config) {
this._disableProcessing();
this.config = getMergedConfig(config, this.config);
return migrationListResolver
.listCompleted(this.config.tableName, this.config.schemaName, this.knex)
.then((completed) => {
const val = max(completed.map((value) => value.split('_')[0]));
return isUndefined(val) ? 'none' : val;
});
}
// list all migrations
async list(config) {
this._disableProcessing();
this.config = getMergedConfig(config, this.config);
const [all, completed] = await migrationListResolver.listAllAndCompleted(
this.config,
this.knex
);
if (!this.config.disableMigrationsListValidation) {
validateMigrationList(this.config.migrationSource, [all, completed]);
}
const newMigrations = getNewMigrations(
this.config.migrationSource,
all,
completed
);
return [completed, newMigrations];
}
forceFreeMigrationsLock(config) {
this.config = getMergedConfig(config, this.config);
const lockTable = getLockTableName(this.config.tableName);
return getSchemaBuilder(this.knex, this.config.schemaName)
.hasTable(lockTable)
.then((exist) => exist && this._freeLock());
}
// Creates a new migration, with a given name.
make(name, config) {
this.config = getMergedConfig(config, this.config);
return this.generator.make(name, this.config);
}
_disableProcessing() {
if (this.knex.disableProcessing) {
this.knex.disableProcessing();
}
}
_lockMigrations(trx) {
const tableName = getLockTableName(this.config.tableName);
return getTable(this.knex, tableName, this.config.schemaName)
.transacting(trx)
.where('is_locked', '=', 0)
.update({ is_locked: 1 })
.then((rowCount) => {
if (rowCount != 1) {
throw new Error('Migration table is already locked');
}
});
}
_getLock(trx) {
const transact = trx ? (fn) => fn(trx) : (fn) => this.knex.transaction(fn);
return transact((trx) => {
return this._lockMigrations(trx);
}).catch((err) => {
throw new LockError(err.message);
});
}
_freeLock(trx = this.knex) {
const tableName = getLockTableName(this.config.tableName);
return getTable(trx, tableName, this.config.schemaName).update({
is_locked: 0,
});
}
// Run a batch of current migrations, in sequence.
_runBatch(migrations, direction, trx) {
return (
this._getLock(trx)
// When there is a wrapping transaction, some migrations
// could have been done while waiting for the lock:
.then(() =>
trx
? migrationListResolver.listCompleted(
this.config.tableName,
this.config.schemaName,
trx
)
: []
)
.then(
(completed) =>
(migrations = getNewMigrations(
this.config.migrationSource,
migrations,
completed
))
)
.then(() =>
Promise.all(
migrations.map(this._validateMigrationStructure.bind(this))
)
)
.then(() => this._latestBatchNumber(trx))
.then((batchNo) => {
if (direction === 'up') batchNo++;
return batchNo;
})
.then((batchNo) => {
return this._waterfallBatch(batchNo, migrations, direction, trx);
})
.then(async (res) => {
await this._freeLock(trx);
return res;
})
.catch(async (error) => {
let cleanupReady = Promise.resolve();
if (error instanceof LockError) {
// If locking error do not free the lock.
this.knex.client.logger.warn(
`Can't take lock to run migrations: ${error.message}`
);
this.knex.client.logger.warn(
'If you are sure migrations are not running you can release the ' +
'lock manually by deleting all the rows = require(migrations lock ' +
'table: ' +
getLockTableNameWithSchema(
this.config.tableName,
this.config.schemaName
)
);
} else {
if (this._activeMigration.fileName) {
this.knex.client.logger.warn(
`migration file "${this._activeMigration.fileName}" failed`
);
}
this.knex.client.logger.warn(
`migration failed with error: ${error.message}`
);
// If the error was not due to a locking issue, then remove the lock.
cleanupReady = this._freeLock(trx);
}
try {
await cleanupReady;
// eslint-disable-next-line no-empty
} catch (e) {}
throw error;
})
);
}
// Validates some migrations by requiring and checking for an `up` and `down`
// function.
_validateMigrationStructure(migration) {
const migrationName = this.config.migrationSource.getMigrationName(
migration
);
const migrationContent = this.config.migrationSource.getMigration(
migration
);
if (
typeof migrationContent.up !== 'function' ||
typeof migrationContent.down !== 'function'
) {
throw new Error(
`Invalid migration: ${migrationName} must have both an up and down function`
);
}
return migration;
}
// Get the last batch of migrations, by name, ordered by insert id in reverse
// order.
_getLastBatch([allMigrations]) {
const { tableName, schemaName } = this.config;
return getTable(this.knex, tableName, schemaName)
.where('batch', function(qb) {
qb.max('batch').from(getTableName(tableName, schemaName));
})
.orderBy('id', 'desc')
.then((migrations) =>
Promise.all(
migrations.map((migration) => {
return allMigrations.find((entry) => {
return (
this.config.migrationSource.getMigrationName(entry) ===
migration.name
);
});
})
)
);
}
// Returns the latest batch number.
_latestBatchNumber(trx = this.knex) {
return trx
.from(getTableName(this.config.tableName, this.config.schemaName))
.max('batch as max_batch')
.then((obj) => obj[0].max_batch || 0);
}
// If transaction config for a single migration is defined, use that.
// Otherwise, rely on the common config. This allows enabling/disabling
// transaction for a single migration at will, regardless of the common
// config.
_useTransaction(migrationContent, allTransactionsDisabled) {
const singleTransactionValue = get(migrationContent, 'config.transaction');
return isBoolean(singleTransactionValue)
? singleTransactionValue
: !allTransactionsDisabled;
}
// Runs a batch of `migrations` in a specified `direction`, saving the
// appropriate database information as the migrations are run.
_waterfallBatch(batchNo, migrations, direction, trx) {
const trxOrKnex = trx || this.knex;
const { tableName, schemaName, disableTransactions } = this.config;
let current = Promise.resolve();
const log = [];
each(migrations, (migration) => {
const name = this.config.migrationSource.getMigrationName(migration);
this._activeMigration.fileName = name;
const migrationContent = this.config.migrationSource.getMigration(
migration
);
// We're going to run each of the migrations in the current "up".
current = current
.then(() => {
this._activeMigration.fileName = name;
if (
!trx &&
this._useTransaction(migrationContent, disableTransactions)
) {
this.knex.enableProcessing();
return this._transaction(
this.knex,
migrationContent,
direction,
name
);
}
trxOrKnex.enableProcessing();
return checkPromise(
this.knex.client.logger,
migrationContent[direction](trxOrKnex),
name
);
})
.then(() => {
trxOrKnex.disableProcessing();
this.knex.disableProcessing();
log.push(name);
if (direction === 'up') {
return trxOrKnex.into(getTableName(tableName, schemaName)).insert({
name,
batch: batchNo,
migration_time: new Date(),
});
}
if (direction === 'down') {
return trxOrKnex
.from(getTableName(tableName, schemaName))
.where({ name })
.del();
}
});
});
return current.then(() => [batchNo, log]);
}
_transaction(knex, migrationContent, direction, name) {
return knex.transaction((trx) => {
return checkPromise(
knex.client.logger,
migrationContent[direction](trx),
name,
() => {
trx.commit();
}
);
});
}
}
// Validates that migrations are present in the appropriate directories.
function validateMigrationList(migrationSource, migrations) {
const all = migrations[0];
const completed = migrations[1];
const diff = getMissingMigrations(migrationSource, completed, all);
if (!isEmpty(diff)) {
throw new Error(
`The migration directory is corrupt, the following files are missing: ${diff.join(
', '
)}`
);
}
}
function getMissingMigrations(migrationSource, completed, all) {
return differenceWith(completed, all, (completedMigration, allMigration) => {
return (
completedMigration === migrationSource.getMigrationName(allMigration)
);
});
}
function getNewMigrations(migrationSource, all, completed) {
return differenceWith(all, completed, (allMigration, completedMigration) => {
return (
completedMigration === migrationSource.getMigrationName(allMigration)
);
});
}
function checkPromise(logger, migrationPromise, name, commitFn) {
if (!migrationPromise || typeof migrationPromise.then !== 'function') {
logger.warn(`migration ${name} did not return a promise`);
if (commitFn) {
commitFn();
}
}
return migrationPromise;
}
module.exports = {
Migrator,
};

53
node_modules/knex/lib/migrate/configuration-merger.js generated vendored Normal file
View File

@@ -0,0 +1,53 @@
const {
FsMigrations,
DEFAULT_LOAD_EXTENSIONS,
} = require('./sources/fs-migrations');
const CONFIG_DEFAULT = Object.freeze({
extension: 'js',
loadExtensions: DEFAULT_LOAD_EXTENSIONS,
tableName: 'knex_migrations',
schemaName: null,
directory: './migrations',
disableTransactions: false,
disableMigrationsListValidation: false,
sortDirsSeparately: false,
});
function getMergedConfig(config, currentConfig) {
// config is the user specified config, mergedConfig has defaults and current config
// applied to it.
const mergedConfig = Object.assign(
{},
CONFIG_DEFAULT,
currentConfig || {},
config
);
if (
config &&
// If user specifies any FS related config,
// clear existing FsMigrations migrationSource
(config.directory ||
config.sortDirsSeparately !== undefined ||
config.loadExtensions)
) {
mergedConfig.migrationSource = null;
}
// If the user has not specified any configs, we need to
// default to fs migrations to maintain compatibility
if (!mergedConfig.migrationSource) {
mergedConfig.migrationSource = new FsMigrations(
mergedConfig.directory,
mergedConfig.sortDirsSeparately,
mergedConfig.loadExtensions
);
}
return mergedConfig;
}
module.exports = {
getMergedConfig,
};

17
node_modules/knex/lib/migrate/migrate-stub.js generated vendored Normal file
View File

@@ -0,0 +1,17 @@
// Stub Migrate:
// Used for now in browser builds, where filesystem access isn't
// available.
const StubMigrate = (module.exports = function() {});
const noSuchMethod = async function() {
throw new Error('Migrations are not supported');
};
StubMigrate.prototype = {
make: noSuchMethod,
latest: noSuchMethod,
rollback: noSuchMethod,
currentVersion: noSuchMethod,
up: noSuchMethod,
down: noSuchMethod,
};

View File

@@ -0,0 +1,40 @@
const Bluebird = require('bluebird');
const { getTableName } = require('./table-resolver');
const { ensureTable } = require('./table-creator');
// Lists all available migration versions, as a sorted array.
function listAll(migrationSource, loadExtensions) {
return migrationSource.getMigrations(loadExtensions);
}
// Lists all migrations that have been completed for the current db, as an
// array.
function listCompleted(tableName, schemaName, trxOrKnex) {
return ensureTable(tableName, schemaName, trxOrKnex)
.then(() =>
trxOrKnex
.from(getTableName(tableName, schemaName))
.orderBy('id')
.select('name')
)
.then((migrations) =>
migrations.map((migration) => {
return migration.name;
})
);
}
// Gets the migration list from the migration directory specified in config, as well as
// the list of completed migrations to check what should be run.
function listAllAndCompleted(config, trxOrKnex) {
return Bluebird.all([
listAll(config.migrationSource, config.loadExtensions),
listCompleted(config.tableName, config.schemaName, trxOrKnex),
]);
}
module.exports = {
listAll,
listAllAndCompleted,
listCompleted,
};

98
node_modules/knex/lib/migrate/sources/fs-migrations.js generated vendored Normal file
View File

@@ -0,0 +1,98 @@
const fs = require('fs');
const path = require('path');
const { promisify } = require('util');
const { sortBy, filter } = require('lodash');
const readDirAsync = promisify(fs.readdir);
const DEFAULT_LOAD_EXTENSIONS = Object.freeze([
'.co',
'.coffee',
'.eg',
'.iced',
'.js',
'.litcoffee',
'.ls',
'.ts',
]);
class FsMigrations {
constructor(migrationDirectories, sortDirsSeparately, loadExtensions) {
this.sortDirsSeparately = sortDirsSeparately;
if (!Array.isArray(migrationDirectories)) {
migrationDirectories = [migrationDirectories];
}
this.migrationsPaths = migrationDirectories;
this.loadExtensions = loadExtensions || DEFAULT_LOAD_EXTENSIONS;
}
/**
* Gets the migration names
* @returns Promise<string[]>
*/
getMigrations(loadExtensions) {
// Get a list of files in all specified migration directories
const readMigrationsPromises = this.migrationsPaths.map((configDir) => {
const absoluteDir = path.resolve(process.cwd(), configDir);
return readDirAsync(absoluteDir).then((files) => ({
files,
configDir,
absoluteDir,
}));
});
return Promise.all(readMigrationsPromises).then((allMigrations) => {
const migrations = allMigrations.reduce((acc, migrationDirectory) => {
// When true, files inside the folder should be sorted
if (this.sortDirsSeparately) {
migrationDirectory.files = migrationDirectory.files.sort();
}
migrationDirectory.files.forEach((file) =>
acc.push({ file, directory: migrationDirectory.configDir })
);
return acc;
}, []);
// If true we have already sorted the migrations inside the folders
// return the migrations fully qualified
if (this.sortDirsSeparately) {
return filterMigrations(
this,
migrations,
loadExtensions || this.loadExtensions
);
}
return filterMigrations(
this,
sortBy(migrations, 'file'),
loadExtensions || this.loadExtensions
);
});
}
getMigrationName(migration) {
return migration.file;
}
getMigration(migration) {
const absoluteDir = path.resolve(process.cwd(), migration.directory);
return require(path.join(absoluteDir, migration.file));
}
}
function filterMigrations(migrationSource, migrations, loadExtensions) {
return filter(migrations, (migration) => {
const migrationName = migrationSource.getMigrationName(migration);
const extension = path.extname(migrationName);
return loadExtensions.includes(extension);
});
}
module.exports = {
DEFAULT_LOAD_EXTENSIONS,
FsMigrations,
};

13
node_modules/knex/lib/migrate/stub/coffee.stub generated vendored Normal file
View File

@@ -0,0 +1,13 @@
exports.up = (knex) ->
<% if (d.tableName) { %>
knex.schema.createTable "<%= d.tableName %>", (t) ->
t.increments()
t.timestamp()
<% } %>
exports.down = (knex) ->
<% if (d.tableName) { %>
knex.schema.dropTable "<%= d.tableName %>"
<% } %>

14
node_modules/knex/lib/migrate/stub/eg.stub generated vendored Normal file
View File

@@ -0,0 +1,14 @@
provide: up, down
up = (knex) ->
<% if (d.tableName) { %>
knex.schema.createTable "<%= d.tableName %>": t ->
t.increments()
t.timestamp()
<% } %>
down = (knex) ->
<% if (d.tableName) { %>
knex.schema.dropTable("<%= d.tableName %>")
<% } %>

15
node_modules/knex/lib/migrate/stub/js.stub generated vendored Normal file
View File

@@ -0,0 +1,15 @@
exports.up = function(knex) {
<% if (d.tableName) { %>
return knex.schema.createTable("<%= d.tableName %>", function(t) {
t.increments();
t.timestamp();
});
<% } %>
};
exports.down = function(knex) {
<% if (d.tableName) { %>
return knex.schema.dropTable("<%= d.tableName %>");
<% } %>
};

View File

@@ -0,0 +1,34 @@
# Update with your config settings.
module.exports =
development:
client: 'sqlite3'
connection:
filename: './dev.sqlite3'
migrations:
tableName: 'knex_migrations'
staging:
client: 'postgresql'
connection:
database: 'my_db'
user: 'username'
password: 'password'
pool:
min: 2
max: 10
migrations:
tableName: 'knex_migrations'
production:
client: 'postgresql'
connection:
database: 'my_db'
user: 'username'
password: 'password'
pool:
min: 2
max: 10
migrations:
tableName: 'knex_migrations'

43
node_modules/knex/lib/migrate/stub/knexfile-eg.stub generated vendored Normal file
View File

@@ -0,0 +1,43 @@
;; Update with your config settings.
module.exports = {
development = {
client = 'sqlite3'
connection = {
filename = './dev.sqlite3'
}
migrations = {
tableName = 'knex_migrations'
}
}
staging = {
client = 'postgresql'
connection = {
database = 'my_db'
user = 'username'
password = 'password'
}
pool = {
min = 2
max = 10
}
migrations = {
tableName = 'knex_migrations'
}
}
production = {
client = 'postgresql'
connection = {
database = 'my_db'
user = 'username'
password = 'password'
}
pool = {
min = 2
max = 10
}
migrations = {
tableName = 'knex_migrations'
}
}
}

44
node_modules/knex/lib/migrate/stub/knexfile-js.stub generated vendored Normal file
View File

@@ -0,0 +1,44 @@
// Update with your config settings.
module.exports = {
development: {
client: 'sqlite3',
connection: {
filename: './dev.sqlite3'
}
},
staging: {
client: 'postgresql',
connection: {
database: 'my_db',
user: 'username',
password: 'password'
},
pool: {
min: 2,
max: 10
},
migrations: {
tableName: 'knex_migrations'
}
},
production: {
client: 'postgresql',
connection: {
database: 'my_db',
user: 'username',
password: 'password'
},
pool: {
min: 2,
max: 10
},
migrations: {
tableName: 'knex_migrations'
}
}
};

35
node_modules/knex/lib/migrate/stub/knexfile-ls.stub generated vendored Normal file
View File

@@ -0,0 +1,35 @@
# Update with your config settings.
module.exports =
development:
client: 'sqlite3'
connection:
filename: './dev.sqlite3'
migrations:
tableName: 'knex_migrations'
staging:
client: 'postgresql'
connection:
database: 'my_db'
user: 'username'
password: 'password'
pool:
min: 2
max: 10
migrations:
tableName: 'knex_migrations'
production:
client: 'postgresql'
connection:
database: 'my_db'
user: 'username'
password: 'password'
pool:
min: 2
max: 10
migrations:
tableName: 'knex_migrations'

44
node_modules/knex/lib/migrate/stub/knexfile-ts.stub generated vendored Normal file
View File

@@ -0,0 +1,44 @@
// Update with your config settings.
module.exports = {
development: {
client: "sqlite3",
connection: {
filename: "./dev.sqlite3"
}
},
staging: {
client: "postgresql",
connection: {
database: "my_db",
user: "username",
password: "password"
},
pool: {
min: 2,
max: 10
},
migrations: {
tableName: "knex_migrations"
}
},
production: {
client: "postgresql",
connection: {
database: "my_db",
user: "username",
password: "password"
},
pool: {
min: 2,
max: 10
},
migrations: {
tableName: "knex_migrations"
}
}
};

14
node_modules/knex/lib/migrate/stub/ls.stub generated vendored Normal file
View File

@@ -0,0 +1,14 @@
exports.up = (knex, Promise) ->
<% if (d.tableName) { %>
knex.schema.create-table "<%= d.tableName %>", (t) ->
t.increments!
t.timestamp!
<% } %>
exports.down = (knex, Promise) ->
<% if (d.tableName) { %>
knex.schema.drop-table "<%= d.tableName %>"
<% } %>

21
node_modules/knex/lib/migrate/stub/ts.stub generated vendored Normal file
View File

@@ -0,0 +1,21 @@
import * as Knex from "knex";
<% if (d.tableName) { %>
export async function up(knex: Knex): Promise<Knex.SchemaBuilder> {
return knex.schema.createTable("<%= d.tableName %>", (t: Knex.AlterTableBuilder) => {
t.increments();
t.timestamps();
});
}
<% } else { %>
export async function up(knex: Knex): Promise<any> {
}
<% } %>
<% if (d.tableName) { %>
export async function down(knex: Knex): Promise<Knex.SchemaBuilder> {
return knex.schema.dropTable("<%= d.tableName %>");
}
<% } else { %>
export async function down(knex: Knex): Promise<any> {
}
<% } %>

67
node_modules/knex/lib/migrate/table-creator.js generated vendored Normal file
View File

@@ -0,0 +1,67 @@
const {
getTable,
getLockTableName,
getLockTableNameWithSchema,
getTableName,
} = require('./table-resolver');
function ensureTable(tableName, schemaName, trxOrKnex) {
const lockTable = getLockTableName(tableName);
const lockTableWithSchema = getLockTableNameWithSchema(tableName, schemaName);
return getSchemaBuilder(trxOrKnex, schemaName)
.hasTable(tableName)
.then((exists) => {
return !exists && _createMigrationTable(tableName, schemaName, trxOrKnex);
})
.then(() => {
return getSchemaBuilder(trxOrKnex, schemaName).hasTable(lockTable);
})
.then((exists) => {
return (
!exists && _createMigrationLockTable(lockTable, schemaName, trxOrKnex)
);
})
.then(() => {
return getTable(trxOrKnex, lockTable, schemaName).select('*');
})
.then((data) => {
return (
!data.length &&
trxOrKnex.into(lockTableWithSchema).insert({ is_locked: 0 })
);
});
}
function _createMigrationTable(tableName, schemaName, trxOrKnex) {
return getSchemaBuilder(trxOrKnex, schemaName).createTable(
getTableName(tableName),
function(t) {
t.increments();
t.string('name');
t.integer('batch');
t.timestamp('migration_time');
}
);
}
function _createMigrationLockTable(tableName, schemaName, trxOrKnex) {
return getSchemaBuilder(trxOrKnex, schemaName).createTable(
tableName,
function(t) {
t.increments('index').primary();
t.integer('is_locked');
}
);
}
//Get schema-aware schema builder for a given schema nam
function getSchemaBuilder(trxOrKnex, schemaName) {
return schemaName
? trxOrKnex.schema.withSchema(schemaName)
: trxOrKnex.schema;
}
module.exports = {
ensureTable,
getSchemaBuilder,
};

27
node_modules/knex/lib/migrate/table-resolver.js generated vendored Normal file
View File

@@ -0,0 +1,27 @@
//Get schema-aware table name
function getTableName(tableName, schemaName) {
return schemaName ? `${schemaName}.${tableName}` : tableName;
}
//Get schema-aware query builder for a given table and schema name
function getTable(trxOrKnex, tableName, schemaName) {
return schemaName
? trxOrKnex(tableName).withSchema(schemaName)
: trxOrKnex(tableName);
}
function getLockTableName(tableName) {
return tableName + '_lock';
}
function getLockTableNameWithSchema(tableName, schemaName) {
return schemaName
? schemaName + '.' + getLockTableName(tableName)
: getLockTableName(tableName);
}
module.exports = {
getLockTableName,
getLockTableNameWithSchema,
getTable,
getTableName,
};

1308
node_modules/knex/lib/query/builder.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

888
node_modules/knex/lib/query/compiler.js generated vendored Normal file
View File

@@ -0,0 +1,888 @@
// Query Compiler
// -------
const helpers = require('../helpers');
const Raw = require('../raw');
const QueryBuilder = require('./builder');
const JoinClause = require('./joinclause');
const debug = require('debug');
const {
assign,
bind,
compact,
groupBy,
isEmpty,
isString,
isUndefined,
map,
omitBy,
reduce,
has,
} = require('lodash');
const uuid = require('uuid');
const debugBindings = debug('knex:bindings');
// The "QueryCompiler" takes all of the query statements which
// have been gathered in the "QueryBuilder" and turns them into a
// properly formatted / bound query string.
function QueryCompiler(client, builder) {
this.client = client;
this.method = builder._method || 'select';
this.options = builder._options;
this.single = builder._single;
this.timeout = builder._timeout || false;
this.cancelOnTimeout = builder._cancelOnTimeout || false;
this.grouped = groupBy(builder._statements, 'grouping');
this.formatter = client.formatter(builder);
}
const components = [
'columns',
'join',
'where',
'union',
'group',
'having',
'order',
'limit',
'offset',
'lock',
'waitMode',
];
assign(QueryCompiler.prototype, {
// Used when the insert call is empty.
_emptyInsertValue: 'default values',
// Collapse the builder into a single object
toSQL(method, tz) {
this._undefinedInWhereClause = false;
this.undefinedBindingsInfo = [];
method = method || this.method;
const val = this[method]() || '';
const query = {
method,
options: reduce(this.options, assign, {}),
timeout: this.timeout,
cancelOnTimeout: this.cancelOnTimeout,
bindings: this.formatter.bindings || [],
__knexQueryUid: uuid.v1(),
};
Object.defineProperties(query, {
toNative: {
value: () => {
return {
sql: this.client.positionBindings(query.sql),
bindings: this.client.prepBindings(query.bindings),
};
},
enumerable: false,
},
});
if (isString(val)) {
query.sql = val;
} else {
assign(query, val);
}
if (method === 'select' || method === 'first') {
if (this.single.as) {
query.as = this.single.as;
}
}
if (this._undefinedInWhereClause) {
debugBindings(query.bindings);
throw new Error(
`Undefined binding(s) detected when compiling ` +
`${method.toUpperCase()}. Undefined column(s): [${this.undefinedBindingsInfo.join(
', '
)}] query: ${query.sql}`
);
}
return query;
},
// Compiles the `select` statement, or nested sub-selects by calling each of
// the component compilers, trimming out the empties, and returning a
// generated query string.
select() {
let sql = this.with();
const statements = components.map((component) => this[component](this));
sql += compact(statements).join(' ');
return sql;
},
pluck() {
let toPluck = this.single.pluck;
if (toPluck.indexOf('.') !== -1) {
toPluck = toPluck.split('.').slice(-1)[0];
}
return {
sql: this.select(),
pluck: toPluck,
};
},
// Compiles an "insert" query, allowing for multiple
// inserts using a single query statement.
insert() {
const insertValues = this.single.insert || [];
let sql = this.with() + `insert into ${this.tableName} `;
if (Array.isArray(insertValues)) {
if (insertValues.length === 0) {
return '';
}
} else if (typeof insertValues === 'object' && isEmpty(insertValues)) {
return sql + this._emptyInsertValue;
}
const insertData = this._prepInsert(insertValues);
if (typeof insertData === 'string') {
sql += insertData;
} else {
if (insertData.columns.length) {
sql += `(${this.formatter.columnize(insertData.columns)}`;
sql += ') values (';
let i = -1;
while (++i < insertData.values.length) {
if (i !== 0) sql += '), (';
sql += this.formatter.parameterize(
insertData.values[i],
this.client.valueForUndefined
);
}
sql += ')';
} else if (insertValues.length === 1 && insertValues[0]) {
sql += this._emptyInsertValue;
} else {
sql = '';
}
}
return sql;
},
// Compiles the "update" query.
update() {
// Make sure tableName is processed by the formatter first.
const withSQL = this.with();
const { tableName } = this;
const updateData = this._prepUpdate(this.single.update);
const wheres = this.where();
return (
withSQL +
`update ${this.single.only ? 'only ' : ''}${tableName}` +
' set ' +
updateData.join(', ') +
(wheres ? ` ${wheres}` : '')
);
},
// Compiles the columns in the query, specifying if an item was distinct.
columns() {
let distinctClause = '';
if (this.onlyUnions()) return '';
const columns = this.grouped.columns || [];
let i = -1,
sql = [];
if (columns) {
while (++i < columns.length) {
const stmt = columns[i];
if (stmt.distinct) distinctClause = 'distinct ';
if (stmt.distinctOn) {
distinctClause = this.distinctOn(stmt.value);
continue;
}
if (stmt.type === 'aggregate') {
sql.push(...this.aggregate(stmt));
} else if (stmt.type === 'aggregateRaw') {
sql.push(this.aggregateRaw(stmt));
} else if (stmt.value && stmt.value.length > 0) {
sql.push(this.formatter.columnize(stmt.value));
}
}
}
if (sql.length === 0) sql = ['*'];
return (
`select ${distinctClause}` +
sql.join(', ') +
(this.tableName
? ` from ${this.single.only ? 'only ' : ''}${this.tableName}`
: '')
);
},
_aggregate(stmt, { aliasSeparator = ' as ', distinctParentheses } = {}) {
const value = stmt.value;
const method = stmt.method;
const distinct = stmt.aggregateDistinct ? 'distinct ' : '';
const wrap = (identifier) => this.formatter.wrap(identifier);
const addAlias = (value, alias) => {
if (alias) {
return value + aliasSeparator + wrap(alias);
}
return value;
};
const aggregateArray = (value, alias) => {
let columns = value.map(wrap).join(', ');
if (distinct) {
const openParen = distinctParentheses ? '(' : ' ';
const closeParen = distinctParentheses ? ')' : '';
columns = distinct.trim() + openParen + columns + closeParen;
}
const aggregated = `${method}(${columns})`;
return addAlias(aggregated, alias);
};
const aggregateString = (value, alias) => {
const aggregated = `${method}(${distinct + wrap(value)})`;
return addAlias(aggregated, alias);
};
if (Array.isArray(value)) {
return [aggregateArray(value)];
}
if (typeof value === 'object') {
if (stmt.alias) {
throw new Error('When using an object explicit alias can not be used');
}
return Object.entries(value).map(([alias, column]) => {
if (Array.isArray(column)) {
return aggregateArray(column, alias);
}
return aggregateString(column, alias);
});
}
// Allows us to speciy an alias for the aggregate types.
const splitOn = value.toLowerCase().indexOf(' as ');
let column = value;
let { alias } = stmt;
if (splitOn !== -1) {
column = value.slice(0, splitOn);
if (alias) {
throw new Error(`Found multiple aliases for same column: ${column}`);
}
alias = value.slice(splitOn + 4);
}
return [aggregateString(column, alias)];
},
aggregate(stmt) {
return this._aggregate(stmt);
},
aggregateRaw(stmt) {
const distinct = stmt.aggregateDistinct ? 'distinct ' : '';
return `${stmt.method}(${distinct + this.formatter.unwrapRaw(stmt.value)})`;
},
// Compiles all each of the `join` clauses on the query,
// including any nested join queries.
join() {
let sql = '';
let i = -1;
const joins = this.grouped.join;
if (!joins) return '';
while (++i < joins.length) {
const join = joins[i];
const table = join.schema ? `${join.schema}.${join.table}` : join.table;
if (i > 0) sql += ' ';
if (join.joinType === 'raw') {
sql += this.formatter.unwrapRaw(join.table);
} else {
sql += join.joinType + ' join ' + this.formatter.wrap(table);
let ii = -1;
while (++ii < join.clauses.length) {
const clause = join.clauses[ii];
if (ii > 0) {
sql += ` ${clause.bool} `;
} else {
sql += ` ${clause.type === 'onUsing' ? 'using' : 'on'} `;
}
const val = this[clause.type].call(this, clause);
if (val) {
sql += val;
}
}
}
}
return sql;
},
onBetween(statement) {
return (
this.formatter.wrap(statement.column) +
' ' +
this._not(statement, 'between') +
' ' +
map(statement.value, bind(this.formatter.parameter, this.formatter)).join(
' and '
)
);
},
onNull(statement) {
return (
this.formatter.wrap(statement.column) +
' is ' +
this._not(statement, 'null')
);
},
onExists(statement) {
return (
this._not(statement, 'exists') +
' (' +
this.formatter.rawOrFn(statement.value) +
')'
);
},
onIn(statement) {
if (Array.isArray(statement.column)) return this.multiOnIn(statement);
return (
this.formatter.wrap(statement.column) +
' ' +
this._not(statement, 'in ') +
this.wrap(this.formatter.parameterize(statement.value))
);
},
multiOnIn(statement) {
let i = -1,
sql = `(${this.formatter.columnize(statement.column)}) `;
sql += this._not(statement, 'in ') + '((';
while (++i < statement.value.length) {
if (i !== 0) sql += '),(';
sql += this.formatter.parameterize(statement.value[i]);
}
return sql + '))';
},
// Compiles all `where` statements on the query.
where() {
const wheres = this.grouped.where;
if (!wheres) return;
const sql = [];
let i = -1;
while (++i < wheres.length) {
const stmt = wheres[i];
if (
Object.prototype.hasOwnProperty.call(stmt, 'value') &&
helpers.containsUndefined(stmt.value)
) {
this.undefinedBindingsInfo.push(stmt.column);
this._undefinedInWhereClause = true;
}
const val = this[stmt.type](stmt);
if (val) {
if (sql.length === 0) {
sql[0] = 'where';
} else {
sql.push(stmt.bool);
}
sql.push(val);
}
}
return sql.length > 1 ? sql.join(' ') : '';
},
group() {
return this._groupsOrders('group');
},
order() {
return this._groupsOrders('order');
},
// Compiles the `having` statements.
having() {
const havings = this.grouped.having;
if (!havings) return '';
const sql = ['having'];
for (let i = 0, l = havings.length; i < l; i++) {
const s = havings[i];
const val = this[s.type](s);
if (val) {
if (sql.length === 0) {
sql[0] = 'where';
}
if (sql.length > 1 || (sql.length === 1 && sql[0] !== 'having')) {
sql.push(s.bool);
}
sql.push(val);
}
}
return sql.length > 1 ? sql.join(' ') : '';
},
havingRaw(statement) {
return this._not(statement, '') + this.formatter.unwrapRaw(statement.value);
},
havingWrapped(statement) {
const val = this.formatter.rawOrFn(statement.value, 'where');
return (val && this._not(statement, '') + '(' + val.slice(6) + ')') || '';
},
havingBasic(statement) {
return (
this._not(statement, '') +
this.formatter.wrap(statement.column) +
' ' +
this.formatter.operator(statement.operator) +
' ' +
this.formatter.parameter(statement.value)
);
},
havingNull(statement) {
return (
this.formatter.wrap(statement.column) +
' is ' +
this._not(statement, 'null')
);
},
havingExists(statement) {
return (
this._not(statement, 'exists') +
' (' +
this.formatter.rawOrFn(statement.value) +
')'
);
},
havingBetween(statement) {
return (
this.formatter.wrap(statement.column) +
' ' +
this._not(statement, 'between') +
' ' +
map(statement.value, bind(this.formatter.parameter, this.formatter)).join(
' and '
)
);
},
havingIn(statement) {
if (Array.isArray(statement.column)) return this.multiHavingIn(statement);
return (
this.formatter.wrap(statement.column) +
' ' +
this._not(statement, 'in ') +
this.wrap(this.formatter.parameterize(statement.value))
);
},
multiHavingIn(statement) {
let i = -1,
sql = `(${this.formatter.columnize(statement.column)}) `;
sql += this._not(statement, 'in ') + '((';
while (++i < statement.value.length) {
if (i !== 0) sql += '),(';
sql += this.formatter.parameterize(statement.value[i]);
}
return sql + '))';
},
// Compile the "union" queries attached to the main query.
union() {
const onlyUnions = this.onlyUnions();
const unions = this.grouped.union;
if (!unions) return '';
let sql = '';
for (let i = 0, l = unions.length; i < l; i++) {
const union = unions[i];
if (i > 0) sql += ' ';
if (i > 0 || !onlyUnions) sql += union.clause + ' ';
const statement = this.formatter.rawOrFn(union.value);
if (statement) {
if (union.wrap) sql += '(';
sql += statement;
if (union.wrap) sql += ')';
}
}
return sql;
},
// If we haven't specified any columns or a `tableName`, we're assuming this
// is only being used for unions.
onlyUnions() {
return !this.grouped.columns && this.grouped.union && !this.tableName;
},
limit() {
const noLimit = !this.single.limit && this.single.limit !== 0;
if (noLimit) return '';
return `limit ${this.formatter.parameter(this.single.limit)}`;
},
offset() {
if (!this.single.offset) return '';
return `offset ${this.formatter.parameter(this.single.offset)}`;
},
// Compiles a `delete` query.
del() {
// Make sure tableName is processed by the formatter first.
const { tableName } = this;
const withSQL = this.with();
const wheres = this.where();
return (
withSQL +
`delete from ${this.single.only ? 'only ' : ''}${tableName}` +
(wheres ? ` ${wheres}` : '')
);
},
// Compiles a `truncate` query.
truncate() {
return `truncate ${this.tableName}`;
},
// Compiles the "locks".
lock() {
if (this.single.lock) {
return this[this.single.lock]();
}
},
// Compiles the wait mode on the locks.
waitMode() {
if (this.single.waitMode) {
return this[this.single.waitMode]();
}
},
// Fail on unsupported databases
skipLocked() {
throw new Error(
'.skipLocked() is currently only supported on MySQL 8.0+ and PostgreSQL 9.5+'
);
},
// Fail on unsupported databases
noWait() {
throw new Error(
'.noWait() is currently only supported on MySQL 8.0+, MariaDB 10.3.0+ and PostgreSQL 9.5+'
);
},
distinctOn(value) {
throw new Error('.distinctOn() is currently only supported on PostgreSQL');
},
// On Clause
// ------
onWrapped(clause) {
const self = this;
const wrapJoin = new JoinClause();
clause.value.call(wrapJoin, wrapJoin);
let sql = '';
wrapJoin.clauses.forEach(function(wrapClause, ii) {
if (ii > 0) {
sql += ` ${wrapClause.bool} `;
}
const val = self[wrapClause.type](wrapClause);
if (val) {
sql += val;
}
});
if (sql.length) {
return `(${sql})`;
}
return '';
},
onBasic(clause) {
return (
this.formatter.wrap(clause.column) +
' ' +
this.formatter.operator(clause.operator) +
' ' +
this.formatter.wrap(clause.value)
);
},
onVal(clause) {
return (
this.formatter.wrap(clause.column) +
' ' +
this.formatter.operator(clause.operator) +
' ' +
this.formatter.parameter(clause.value)
);
},
onRaw(clause) {
return this.formatter.unwrapRaw(clause.value);
},
onUsing(clause) {
return '(' + this.formatter.columnize(clause.column) + ')';
},
// Where Clause
// ------
whereIn(statement) {
let columns = null;
if (Array.isArray(statement.column)) {
columns = `(${this.formatter.columnize(statement.column)})`;
} else {
columns = this.formatter.wrap(statement.column);
}
const values = this.formatter.values(statement.value);
return `${columns} ${this._not(statement, 'in ')}${values}`;
},
whereNull(statement) {
return (
this.formatter.wrap(statement.column) +
' is ' +
this._not(statement, 'null')
);
},
// Compiles a basic "where" clause.
whereBasic(statement) {
return (
this._not(statement, '') +
this.formatter.wrap(statement.column) +
' ' +
this.formatter.operator(statement.operator) +
' ' +
(statement.asColumn
? this.formatter.wrap(statement.value)
: this.formatter.parameter(statement.value))
);
},
whereExists(statement) {
return (
this._not(statement, 'exists') +
' (' +
this.formatter.rawOrFn(statement.value) +
')'
);
},
whereWrapped(statement) {
const val = this.formatter.rawOrFn(statement.value, 'where');
return (val && this._not(statement, '') + '(' + val.slice(6) + ')') || '';
},
whereBetween(statement) {
return (
this.formatter.wrap(statement.column) +
' ' +
this._not(statement, 'between') +
' ' +
map(statement.value, bind(this.formatter.parameter, this.formatter)).join(
' and '
)
);
},
// Compiles a "whereRaw" query.
whereRaw(statement) {
return this._not(statement, '') + this.formatter.unwrapRaw(statement.value);
},
wrap(str) {
if (str.charAt(0) !== '(') return `(${str})`;
return str;
},
// Compiles all `with` statements on the query.
with() {
if (!this.grouped.with || !this.grouped.with.length) {
return '';
}
const withs = this.grouped.with;
if (!withs) return;
const sql = [];
let i = -1;
let isRecursive = false;
while (++i < withs.length) {
const stmt = withs[i];
if (stmt.recursive) {
isRecursive = true;
}
const val = this[stmt.type](stmt);
sql.push(val);
}
return `with ${isRecursive ? 'recursive ' : ''}${sql.join(', ')} `;
},
withWrapped(statement) {
const val = this.formatter.rawOrFn(statement.value);
return (
(val &&
this.formatter.columnize(statement.alias) + ' as (' + val + ')') ||
''
);
},
// Determines whether to add a "not" prefix to the where clause.
_not(statement, str) {
if (statement.not) return `not ${str}`;
return str;
},
_prepInsert(data) {
const isRaw = this.formatter.rawOrFn(data);
if (isRaw) return isRaw;
let columns = [];
const values = [];
if (!Array.isArray(data)) data = data ? [data] : [];
let i = -1;
while (++i < data.length) {
if (data[i] == null) break;
if (i === 0) columns = Object.keys(data[i]).sort();
const row = new Array(columns.length);
const keys = Object.keys(data[i]);
let j = -1;
while (++j < keys.length) {
const key = keys[j];
let idx = columns.indexOf(key);
if (idx === -1) {
columns = columns.concat(key).sort();
idx = columns.indexOf(key);
let k = -1;
while (++k < values.length) {
values[k].splice(idx, 0, undefined);
}
row.splice(idx, 0, undefined);
}
row[idx] = data[i][key];
}
values.push(row);
}
return {
columns,
values,
};
},
// "Preps" the update.
_prepUpdate(data = {}) {
const { counter = {} } = this.single;
for (const column of Object.keys(counter)) {
//Skip?
if (has(data, column)) {
//Needed?
this.client.logger.warn(
`increment/decrement called for a column that has already been specified in main .update() call. Ignoring increment/decrement and using value from .update() call.`
);
continue;
}
let value = counter[column];
const symbol = value < 0 ? '-' : '+';
if (symbol === '-') {
value = -value;
}
data[column] = this.client.raw(`?? ${symbol} ?`, [column, value]);
}
data = omitBy(data, isUndefined);
const vals = [];
const columns = Object.keys(data);
let i = -1;
while (++i < columns.length) {
vals.push(
this.formatter.wrap(columns[i]) +
' = ' +
this.formatter.parameter(data[columns[i]])
);
}
if (isEmpty(vals)) {
throw new Error(
[
'Empty .update() call detected!',
'Update data does not contain any values to update.',
'This will result in a faulty query.',
this.single.table ? `Table: ${this.single.table}.` : '',
this.single.update
? `Columns: ${Object.keys(this.single.update)}.`
: '',
].join(' ')
);
}
return vals;
},
_formatGroupsItemValue(value) {
const { formatter } = this;
if (value instanceof Raw) {
return formatter.unwrapRaw(value);
} else if (value instanceof QueryBuilder) {
return '(' + formatter.columnize(value) + ')';
} else {
return formatter.columnize(value);
}
},
// Compiles the `order by` statements.
_groupsOrders(type) {
const items = this.grouped[type];
if (!items) return '';
const { formatter } = this;
const sql = items.map((item) => {
const column = this._formatGroupsItemValue(item.value);
const direction =
type === 'order' && item.type !== 'orderByRaw'
? ` ${formatter.direction(item.direction)}`
: '';
return column + direction;
});
return sql.length ? type + ' by ' + sql.join(', ') : '';
},
});
QueryCompiler.prototype.first = QueryCompiler.prototype.select;
// Get the table name, wrapping it if necessary.
// Implemented as a property to prevent ordering issues as described in #704.
Object.defineProperty(QueryCompiler.prototype, 'tableName', {
get() {
if (!this._tableName) {
// Only call this.formatter.wrap() the first time this property is accessed.
let tableName = this.single.table;
const schemaName = this.single.schema;
if (tableName && schemaName) tableName = `${schemaName}.${tableName}`;
this._tableName = tableName
? // Wrap subQuery with parenthesis, #3485
this.formatter.wrap(tableName, tableName instanceof QueryBuilder)
: '';
}
return this._tableName;
},
});
module.exports = QueryCompiler;

13
node_modules/knex/lib/query/constants.js generated vendored Normal file
View File

@@ -0,0 +1,13 @@
/**
* internal constants, do not use in application code
*/
module.exports = {
lockMode: {
forShare: 'forShare',
forUpdate: 'forUpdate',
},
waitMode: {
skipLocked: 'skipLocked',
noWait: 'noWait',
},
};

271
node_modules/knex/lib/query/joinclause.js generated vendored Normal file
View File

@@ -0,0 +1,271 @@
const assert = require('assert');
// JoinClause
// -------
// The "JoinClause" is an object holding any necessary info about a join,
// including the type, and any associated tables & columns being joined.
function JoinClause(table, type, schema) {
this.schema = schema;
this.table = table;
this.joinType = type;
this.and = this;
this.clauses = [];
}
function getClauseFromArguments(compilerType, bool, first, operator, second) {
let data = null;
if (typeof first === 'function') {
data = {
type: 'onWrapped',
value: first,
bool: bool,
};
} else {
switch (arguments.length) {
case 3: {
data = { type: 'onRaw', value: first, bool };
break;
}
case 4:
data = {
type: compilerType,
column: first,
operator: '=',
value: operator,
bool,
};
break;
default:
data = {
type: compilerType,
column: first,
operator,
value: second,
bool,
};
}
}
return data;
}
Object.assign(JoinClause.prototype, {
grouping: 'join',
// Adds an "on" clause to the current join object.
on(first) {
if (typeof first === 'object' && typeof first.toSQL !== 'function') {
const keys = Object.keys(first);
let i = -1;
const method = this._bool() === 'or' ? 'orOn' : 'on';
while (++i < keys.length) {
this[method](keys[i], first[keys[i]]);
}
return this;
}
const data = getClauseFromArguments('onBasic', this._bool(), ...arguments);
if (data) {
this.clauses.push(data);
}
return this;
},
// Adds a "using" clause to the current join.
using(column) {
return this.clauses.push({ type: 'onUsing', column, bool: this._bool() });
},
/*// Adds an "and on" clause to the current join object.
andOn() {
return this.on.apply(this, arguments);
},*/
// Adds an "or on" clause to the current join object.
orOn(first, operator, second) {
return this._bool('or').on.apply(this, arguments);
},
onVal(first) {
if (typeof first === 'object' && typeof first.toSQL !== 'function') {
const keys = Object.keys(first);
let i = -1;
const method = this._bool() === 'or' ? 'orOnVal' : 'onVal';
while (++i < keys.length) {
this[method](keys[i], first[keys[i]]);
}
return this;
}
const data = getClauseFromArguments('onVal', this._bool(), ...arguments);
if (data) {
this.clauses.push(data);
}
return this;
},
andOnVal() {
return this.onVal(...arguments);
},
orOnVal() {
return this._bool('or').onVal(...arguments);
},
onBetween(column, values) {
assert(
Array.isArray(values),
'The second argument to onBetween must be an array.'
);
assert(
values.length === 2,
'You must specify 2 values for the onBetween clause'
);
this.clauses.push({
type: 'onBetween',
column,
value: values,
bool: this._bool(),
not: this._not(),
});
return this;
},
onNotBetween(column, values) {
return this._not(true).onBetween(column, values);
},
orOnBetween(column, values) {
return this._bool('or').onBetween(column, values);
},
orOnNotBetween(column, values) {
return this._bool('or')
._not(true)
.onBetween(column, values);
},
onIn(column, values) {
if (Array.isArray(values) && values.length === 0) return this.on(1, '=', 0);
this.clauses.push({
type: 'onIn',
column,
value: values,
not: this._not(),
bool: this._bool(),
});
return this;
},
onNotIn(column, values) {
return this._not(true).onIn(column, values);
},
orOnIn(column, values) {
return this._bool('or').onIn(column, values);
},
orOnNotIn(column, values) {
return this._bool('or')
._not(true)
.onIn(column, values);
},
onNull(column) {
this.clauses.push({
type: 'onNull',
column,
not: this._not(),
bool: this._bool(),
});
return this;
},
orOnNull(callback) {
return this._bool('or').onNull(callback);
},
onNotNull(callback) {
return this._not(true).onNull(callback);
},
orOnNotNull(callback) {
return this._not(true)
._bool('or')
.onNull(callback);
},
onExists(callback) {
this.clauses.push({
type: 'onExists',
value: callback,
not: this._not(),
bool: this._bool(),
});
return this;
},
orOnExists(callback) {
return this._bool('or').onExists(callback);
},
onNotExists(callback) {
return this._not(true).onExists(callback);
},
orOnNotExists(callback) {
return this._not(true)
._bool('or')
.onExists(callback);
},
// Explicitly set the type of join, useful within a function when creating a grouped join.
type(type) {
this.joinType = type;
return this;
},
_bool(bool) {
if (arguments.length === 1) {
this._boolFlag = bool;
return this;
}
const ret = this._boolFlag || 'and';
this._boolFlag = 'and';
return ret;
},
_not(val) {
if (arguments.length === 1) {
this._notFlag = val;
return this;
}
const ret = this._notFlag;
this._notFlag = false;
return ret;
},
});
Object.defineProperty(JoinClause.prototype, 'or', {
get() {
return this._bool('or');
},
});
JoinClause.prototype.andOn = JoinClause.prototype.on;
JoinClause.prototype.andOnIn = JoinClause.prototype.onIn;
JoinClause.prototype.andOnNotIn = JoinClause.prototype.onNotIn;
JoinClause.prototype.andOnNull = JoinClause.prototype.onNull;
JoinClause.prototype.andOnNotNull = JoinClause.prototype.onNotNull;
JoinClause.prototype.andOnExists = JoinClause.prototype.onExists;
JoinClause.prototype.andOnNotExists = JoinClause.prototype.onNotExists;
JoinClause.prototype.andOnBetween = JoinClause.prototype.onBetween;
JoinClause.prototype.andOnNotBetween = JoinClause.prototype.onNotBetween;
module.exports = JoinClause;

91
node_modules/knex/lib/query/methods.js generated vendored Normal file
View File

@@ -0,0 +1,91 @@
// All properties we can use to start a query chain
// from the `knex` object, e.g. `knex.select('*').from(...`
module.exports = [
'with',
'withRecursive',
'select',
'as',
'columns',
'column',
'from',
'fromJS',
'into',
'withSchema',
'table',
'distinct',
'join',
'joinRaw',
'innerJoin',
'leftJoin',
'leftOuterJoin',
'rightJoin',
'rightOuterJoin',
'outerJoin',
'fullOuterJoin',
'crossJoin',
'where',
'andWhere',
'orWhere',
'whereNot',
'orWhereNot',
'whereRaw',
'whereWrapped',
'havingWrapped',
'orWhereRaw',
'whereExists',
'orWhereExists',
'whereNotExists',
'orWhereNotExists',
'whereIn',
'orWhereIn',
'whereNotIn',
'orWhereNotIn',
'whereNull',
'orWhereNull',
'whereNotNull',
'orWhereNotNull',
'whereBetween',
'whereNotBetween',
'andWhereBetween',
'andWhereNotBetween',
'orWhereBetween',
'orWhereNotBetween',
'groupBy',
'groupByRaw',
'orderBy',
'orderByRaw',
'union',
'unionAll',
'intersect',
'having',
'havingRaw',
'orHaving',
'orHavingRaw',
'offset',
'limit',
'count',
'countDistinct',
'min',
'max',
'sum',
'sumDistinct',
'avg',
'avgDistinct',
'increment',
'decrement',
'first',
'debug',
'pluck',
'clearSelect',
'clearWhere',
'clearOrder',
'clearHaving',
'insert',
'update',
'returning',
'del',
'delete',
'truncate',
'transacting',
'connection',
];

190
node_modules/knex/lib/query/string.js generated vendored Normal file
View File

@@ -0,0 +1,190 @@
/*eslint max-len: 0, no-var:0 */
const charsRegex = /[\0\b\t\n\r\x1a"'\\]/g; // eslint-disable-line no-control-regex
const charsMap = {
'\0': '\\0',
'\b': '\\b',
'\t': '\\t',
'\n': '\\n',
'\r': '\\r',
'\x1a': '\\Z',
'"': '\\"',
"'": "\\'",
'\\': '\\\\',
};
function wrapEscape(escapeFn) {
return function finalEscape(val, ctx = {}) {
return escapeFn(val, finalEscape, ctx);
};
}
function makeEscape(config = {}) {
const finalEscapeDate = config.escapeDate || dateToString;
const finalEscapeArray = config.escapeArray || arrayToList;
const finalEscapeBuffer = config.escapeBuffer || bufferToString;
const finalEscapeString = config.escapeString || escapeString;
const finalEscapeObject = config.escapeObject || escapeObject;
const finalWrap = config.wrap || wrapEscape;
function escapeFn(val, finalEscape, ctx) {
if (val === undefined || val === null) {
return 'NULL';
}
switch (typeof val) {
case 'boolean':
return val ? 'true' : 'false';
case 'number':
return val + '';
case 'object':
if (val instanceof Date) {
val = finalEscapeDate(val, finalEscape, ctx);
} else if (Array.isArray(val)) {
return finalEscapeArray(val, finalEscape, ctx);
} else if (Buffer.isBuffer(val)) {
return finalEscapeBuffer(val, finalEscape, ctx);
} else {
return finalEscapeObject(val, finalEscape, ctx);
}
}
return finalEscapeString(val, finalEscape, ctx);
}
return finalWrap ? finalWrap(escapeFn) : escapeFn;
}
function escapeObject(val, finalEscape, ctx) {
if (val && typeof val.toSQL === 'function') {
return val.toSQL(ctx);
} else {
return JSON.stringify(val);
}
}
function arrayToList(array, finalEscape, ctx) {
let sql = '';
for (let i = 0; i < array.length; i++) {
const val = array[i];
if (Array.isArray(val)) {
sql +=
(i === 0 ? '' : ', ') + '(' + arrayToList(val, finalEscape, ctx) + ')';
} else {
sql += (i === 0 ? '' : ', ') + finalEscape(val, ctx);
}
}
return sql;
}
function bufferToString(buffer) {
return 'X' + escapeString(buffer.toString('hex'));
}
function escapeString(val, finalEscape, ctx) {
let chunkIndex = (charsRegex.lastIndex = 0);
let escapedVal = '';
let match;
while ((match = charsRegex.exec(val))) {
escapedVal += val.slice(chunkIndex, match.index) + charsMap[match[0]];
chunkIndex = charsRegex.lastIndex;
}
if (chunkIndex === 0) {
// Nothing was escaped
return "'" + val + "'";
}
if (chunkIndex < val.length) {
return "'" + escapedVal + val.slice(chunkIndex) + "'";
}
return "'" + escapedVal + "'";
}
function dateToString(date, finalEscape, ctx) {
const timeZone = ctx.timeZone || 'local';
const dt = new Date(date);
let year;
let month;
let day;
let hour;
let minute;
let second;
let millisecond;
if (timeZone === 'local') {
year = dt.getFullYear();
month = dt.getMonth() + 1;
day = dt.getDate();
hour = dt.getHours();
minute = dt.getMinutes();
second = dt.getSeconds();
millisecond = dt.getMilliseconds();
} else {
const tz = convertTimezone(timeZone);
if (tz !== false && tz !== 0) {
dt.setTime(dt.getTime() + tz * 60000);
}
year = dt.getUTCFullYear();
month = dt.getUTCMonth() + 1;
day = dt.getUTCDate();
hour = dt.getUTCHours();
minute = dt.getUTCMinutes();
second = dt.getUTCSeconds();
millisecond = dt.getUTCMilliseconds();
}
// YYYY-MM-DD HH:mm:ss.mmm
return (
zeroPad(year, 4) +
'-' +
zeroPad(month, 2) +
'-' +
zeroPad(day, 2) +
' ' +
zeroPad(hour, 2) +
':' +
zeroPad(minute, 2) +
':' +
zeroPad(second, 2) +
'.' +
zeroPad(millisecond, 3)
);
}
function zeroPad(number, length) {
number = number.toString();
while (number.length < length) {
number = '0' + number;
}
return number;
}
function convertTimezone(tz) {
if (tz === 'Z') {
return 0;
}
const m = tz.match(/([+\-\s])(\d\d):?(\d\d)?/);
if (m) {
return (
(m[1] == '-' ? -1 : 1) *
(parseInt(m[2], 10) + (m[3] ? parseInt(m[3], 10) : 0) / 60) *
60
);
}
return false;
}
module.exports = {
arrayToList,
bufferToString,
dateToString,
escapeString,
charsRegex,
charsMap,
escapeObject,
makeEscape,
};

192
node_modules/knex/lib/raw.js generated vendored Normal file
View File

@@ -0,0 +1,192 @@
// Raw
// -------
const inherits = require('inherits');
const helpers = require('./helpers');
const { EventEmitter } = require('events');
const debug = require('debug');
const {
assign,
reduce,
isPlainObject,
isObject,
isUndefined,
isNumber,
} = require('lodash');
const saveAsyncStack = require('./util/save-async-stack');
const uuid = require('uuid');
const debugBindings = debug('knex:bindings');
function Raw(client) {
this.client = client;
this.sql = '';
this.bindings = [];
// Todo: Deprecate
this._wrappedBefore = undefined;
this._wrappedAfter = undefined;
if (client && client.config) {
this._debug = client.config.debug;
saveAsyncStack(this, 4);
}
}
inherits(Raw, EventEmitter);
assign(Raw.prototype, {
set(sql, bindings) {
this.sql = sql;
this.bindings =
(isObject(bindings) && !bindings.toSQL) || isUndefined(bindings)
? bindings
: [bindings];
return this;
},
timeout(ms, { cancel } = {}) {
if (isNumber(ms) && ms > 0) {
this._timeout = ms;
if (cancel) {
this.client.assertCanCancelQuery();
this._cancelOnTimeout = true;
}
}
return this;
},
// Wraps the current sql with `before` and `after`.
wrap(before, after) {
this._wrappedBefore = before;
this._wrappedAfter = after;
return this;
},
// Calls `toString` on the Knex object.
toString() {
return this.toQuery();
},
// Returns the raw sql for the query.
toSQL(method, tz) {
let obj;
const formatter = this.client.formatter(this);
if (Array.isArray(this.bindings)) {
obj = replaceRawArrBindings(this, formatter);
} else if (this.bindings && isPlainObject(this.bindings)) {
obj = replaceKeyBindings(this, formatter);
} else {
obj = {
method: 'raw',
sql: this.sql,
bindings: isUndefined(this.bindings) ? [] : [this.bindings],
};
}
if (this._wrappedBefore) {
obj.sql = this._wrappedBefore + obj.sql;
}
if (this._wrappedAfter) {
obj.sql = obj.sql + this._wrappedAfter;
}
obj.options = reduce(this._options, assign, {});
if (this._timeout) {
obj.timeout = this._timeout;
if (this._cancelOnTimeout) {
obj.cancelOnTimeout = this._cancelOnTimeout;
}
}
obj.bindings = obj.bindings || [];
if (helpers.containsUndefined(obj.bindings)) {
const undefinedBindingIndices = helpers.getUndefinedIndices(
this.bindings
);
debugBindings(obj.bindings);
throw new Error(
`Undefined binding(s) detected for keys [${undefinedBindingIndices}] when compiling RAW query: ${obj.sql}`
);
}
obj.__knexQueryUid = uuid.v1();
return obj;
},
});
function replaceRawArrBindings(raw, formatter) {
const expectedBindings = raw.bindings.length;
const values = raw.bindings;
let index = 0;
const sql = raw.sql.replace(/\\?\?\??/g, function(match) {
if (match === '\\?') {
return match;
}
const value = values[index++];
if (match === '??') {
return formatter.columnize(value);
}
return formatter.parameter(value);
});
if (expectedBindings !== index) {
throw new Error(`Expected ${expectedBindings} bindings, saw ${index}`);
}
return {
method: 'raw',
sql,
bindings: formatter.bindings,
};
}
function replaceKeyBindings(raw, formatter) {
const values = raw.bindings;
const regex = /\\?(:(\w+):(?=::)|:(\w+):(?!:)|:(\w+))/g;
const sql = raw.sql.replace(regex, function(match, p1, p2, p3, p4) {
if (match !== p1) {
return p1;
}
const part = p2 || p3 || p4;
const key = match.trim();
const isIdentifier = key[key.length - 1] === ':';
const value = values[part];
if (value === undefined) {
if (Object.prototype.hasOwnProperty.call(values, part)) {
formatter.bindings.push(value);
}
return match;
}
if (isIdentifier) {
return match.replace(p1, formatter.columnize(value));
}
return match.replace(p1, formatter.parameter(value));
});
return {
method: 'raw',
sql,
bindings: formatter.bindings,
};
}
// Allow the `Raw` object to be utilized with full access to the relevant
// promise API.
require('./interface')(Raw);
helpers.addQueryContext(Raw);
module.exports = Raw;

39
node_modules/knex/lib/ref.js generated vendored Normal file
View File

@@ -0,0 +1,39 @@
const Raw = require('./raw');
class Ref extends Raw {
constructor(client, ref) {
super(client);
this.ref = ref;
this._schema = null;
this._alias = null;
}
withSchema(schema) {
this._schema = schema;
return this;
}
as(alias) {
this._alias = alias;
return this;
}
toSQL() {
const string = this._schema ? `${this._schema}.${this.ref}` : this.ref;
const formatter = this.client.formatter(this);
const ref = formatter.columnize(string);
const sql = this._alias ? `${ref} as ${formatter.wrap(this._alias)}` : ref;
this.set(sql, []);
return super.toSQL(...arguments);
}
}
module.exports = Ref;

261
node_modules/knex/lib/runner.js generated vendored Normal file
View File

@@ -0,0 +1,261 @@
const Bluebird = require('bluebird');
let PassThrough;
// The "Runner" constructor takes a "builder" (query, schema, or raw)
// and runs through each of the query statements, calling any additional
// "output" method provided alongside the query and bindings.
function Runner(client, builder) {
this.client = client;
this.builder = builder;
this.queries = [];
// The "connection" object is set on the runner when
// "run" is called.
this.connection = void 0;
}
Object.assign(Runner.prototype, {
// "Run" the target, calling "toSQL" on the builder, returning
// an object or array of queries to run, each of which are run on
// a single connection.
run() {
const runner = this;
return (
this.ensureConnection(function(connection) {
runner.connection = connection;
runner.client.emit('start', runner.builder);
runner.builder.emit('start', runner.builder);
const sql = runner.builder.toSQL();
if (runner.builder._debug) {
runner.client.logger.debug(sql);
}
if (Array.isArray(sql)) {
return runner.queryArray(sql);
}
return runner.query(sql);
})
// If there are any "error" listeners, we fire an error event
// and then re-throw the error to be eventually handled by
// the promise chain. Useful if you're wrapping in a custom `Promise`.
.catch(function(err) {
if (runner.builder._events && runner.builder._events.error) {
runner.builder.emit('error', err);
}
throw err;
})
// Fire a single "end" event on the builder when
// all queries have successfully completed.
.then(function(res) {
runner.builder.emit('end');
return res;
})
);
},
// Stream the result set, by passing through to the dialect's streaming
// capabilities. If the options are
stream(options, handler) {
// If we specify stream(handler).then(...
if (arguments.length === 1) {
if (typeof options === 'function') {
handler = options;
options = {};
}
}
// Determines whether we emit an error or throw here.
const hasHandler = typeof handler === 'function';
// Lazy-load the "PassThrough" dependency.
PassThrough = PassThrough || require('stream').PassThrough;
const runner = this;
const stream = new PassThrough({ objectMode: true });
let hasConnection = false;
const promise = this.ensureConnection(function(connection) {
hasConnection = true;
runner.connection = connection;
try {
const sql = runner.builder.toSQL();
if (Array.isArray(sql) && hasHandler) {
throw new Error(
'The stream may only be used with a single query statement.'
);
}
return runner.client.stream(runner.connection, sql, stream, options);
} catch (e) {
stream.emit('error', e);
throw e;
}
});
// If a function is passed to handle the stream, send the stream
// there and return the promise, otherwise just return the stream
// and the promise will take care of itself.
if (hasHandler) {
handler(stream);
return Bluebird.resolve(promise);
}
// Emit errors on the stream if the error occurred before a connection
// could be acquired.
// If the connection was acquired, assume the error occurred in the client
// code and has already been emitted on the stream. Don't emit it twice.
promise.catch(function(err) {
if (!hasConnection) stream.emit('error', err);
});
return stream;
},
// Allow you to pipe the stream to a writable stream.
pipe(writable, options) {
return this.stream(options).pipe(writable);
},
// "Runs" a query, returning a promise. All queries specified by the builder are guaranteed
// to run in sequence, and on the same connection, especially helpful when schema building
// and dealing with foreign key constraints, etc.
query: async function(obj) {
const { __knexUid, __knexTxId } = this.connection;
this.builder.emit('query', Object.assign({ __knexUid, __knexTxId }, obj));
const runner = this;
let queryPromise = this.client.query(this.connection, obj);
if (obj.timeout) {
queryPromise = queryPromise.timeout(obj.timeout);
}
// Await the return value of client.processResponse; in the case of sqlite3's
// dropColumn()/renameColumn(), it will be a Promise for the transaction
// containing the complete rename procedure.
return queryPromise
.then((resp) => this.client.processResponse(resp, runner))
.then((processedResponse) => {
const queryContext = this.builder.queryContext();
const postProcessedResponse = this.client.postProcessResponse(
processedResponse,
queryContext
);
this.builder.emit(
'query-response',
postProcessedResponse,
Object.assign({ __knexUid: this.connection.__knexUid }, obj),
this.builder
);
this.client.emit(
'query-response',
postProcessedResponse,
Object.assign({ __knexUid: this.connection.__knexUid }, obj),
this.builder
);
return postProcessedResponse;
})
.catch(Bluebird.TimeoutError, (error) => {
const { timeout, sql, bindings } = obj;
let cancelQuery;
if (obj.cancelOnTimeout) {
cancelQuery = this.client.cancelQuery(this.connection);
} else {
// If we don't cancel the query, we need to mark the connection as disposed so that
// it gets destroyed by the pool and is never used again. If we don't do this and
// return the connection to the pool, it will be useless until the current operation
// that timed out, finally finishes.
this.connection.__knex__disposed = error;
cancelQuery = Bluebird.resolve();
}
return cancelQuery
.catch((cancelError) => {
// If the cancellation failed, we need to mark the connection as disposed so that
// it gets destroyed by the pool and is never used again. If we don't do this and
// return the connection to the pool, it will be useless until the current operation
// that timed out, finally finishes.
this.connection.__knex__disposed = error;
// cancellation failed
throw Object.assign(cancelError, {
message: `After query timeout of ${timeout}ms exceeded, cancelling of query failed.`,
sql,
bindings,
timeout,
});
})
.then(() => {
// cancellation succeeded, rethrow timeout error
throw Object.assign(error, {
message: `Defined query timeout of ${timeout}ms exceeded when running query.`,
sql,
bindings,
timeout,
});
});
})
.catch((error) => {
this.builder.emit(
'query-error',
error,
Object.assign({ __knexUid: this.connection.__knexUid }, obj)
);
throw error;
});
},
// In the case of the "schema builder" we call `queryArray`, which runs each
// of the queries in sequence.
async queryArray(queries) {
if (queries.length === 1) {
return this.query(queries[0]);
}
const results = [];
for (const query of queries) {
results.push(await this.query(query));
}
return results;
},
// Check whether there's a transaction flag, and that it has a connection.
async ensureConnection(cb) {
// Use override from a builder if passed
if (this.builder._connection) {
return cb(this.builder._connection);
}
if (this.connection) {
return cb(this.connection);
}
return this.client
.acquireConnection()
.catch(Bluebird.TimeoutError, (error) => {
if (this.builder) {
error.sql = this.builder.sql;
error.bindings = this.builder.bindings;
}
throw error;
})
.then(async (connection) => {
try {
return await cb(connection);
} finally {
await this.client.releaseConnection(this.connection);
}
});
},
});
module.exports = Runner;

85
node_modules/knex/lib/schema/builder.js generated vendored Normal file
View File

@@ -0,0 +1,85 @@
const inherits = require('inherits');
const { EventEmitter } = require('events');
const { each, toArray } = require('lodash');
const { addQueryContext } = require('../helpers');
const saveAsyncStack = require('../util/save-async-stack');
// Constructor for the builder instance, typically called from
// `knex.builder`, accepting the current `knex` instance,
// and pulling out the `client` and `grammar` from the current
// knex instance.
function SchemaBuilder(client) {
this.client = client;
this._sequence = [];
if (client.config) {
this._debug = client.config.debug;
saveAsyncStack(this, 4);
}
}
inherits(SchemaBuilder, EventEmitter);
// Each of the schema builder methods just add to the
// "_sequence" array for consistency.
each(
[
'createTable',
'createTableIfNotExists',
'createSchema',
'createSchemaIfNotExists',
'dropSchema',
'dropSchemaIfExists',
'createExtension',
'createExtensionIfNotExists',
'dropExtension',
'dropExtensionIfExists',
'table',
'alterTable',
'hasTable',
'hasColumn',
'dropTable',
'renameTable',
'dropTableIfExists',
'raw',
],
function(method) {
SchemaBuilder.prototype[method] = function() {
if (method === 'createTableIfNotExists') {
this.client.logger.warn(
[
'Use async .hasTable to check if table exists and then use plain .createTable. Since ',
'.createTableIfNotExists actually just generates plain "CREATE TABLE IF NOT EXIST..." ',
'query it will not work correctly if there are any alter table queries generated for ',
'columns afterwards. To not break old migrations this function is left untouched for now',
', but it should not be used when writing new code and it is removed from documentation.',
].join('')
);
}
if (method === 'table') method = 'alterTable';
this._sequence.push({
method,
args: toArray(arguments),
});
return this;
};
}
);
require('../interface')(SchemaBuilder);
addQueryContext(SchemaBuilder);
SchemaBuilder.prototype.withSchema = function(schemaName) {
this._schema = schemaName;
return this;
};
SchemaBuilder.prototype.toString = function() {
return this.toQuery();
};
SchemaBuilder.prototype.toSQL = function() {
return this.client.schemaCompiler(this).toSQL();
};
module.exports = SchemaBuilder;

116
node_modules/knex/lib/schema/columnbuilder.js generated vendored Normal file
View File

@@ -0,0 +1,116 @@
const { extend, each, toArray } = require('lodash');
const { addQueryContext } = require('../helpers');
// The chainable interface off the original "column" method.
function ColumnBuilder(client, tableBuilder, type, args) {
this.client = client;
this._method = 'add';
this._single = {};
this._modifiers = {};
this._statements = [];
this._type = columnAlias[type] || type;
this._args = args;
this._tableBuilder = tableBuilder;
// If we're altering the table, extend the object
// with the available "alter" methods.
if (tableBuilder._method === 'alter') {
extend(this, AlterMethods);
}
}
// All of the modifier methods that can be used to modify the current query.
const modifiers = [
'default',
'defaultsTo',
'defaultTo',
'unsigned',
'nullable',
'first',
'after',
'comment',
'collate',
];
// Aliases for convenience.
const aliasMethod = {
default: 'defaultTo',
defaultsTo: 'defaultTo',
};
// If we call any of the modifiers (index or otherwise) on the chainable, we pretend
// as though we're calling `table.method(column)` directly.
each(modifiers, function(method) {
const key = aliasMethod[method] || method;
ColumnBuilder.prototype[method] = function() {
this._modifiers[key] = toArray(arguments);
return this;
};
});
addQueryContext(ColumnBuilder);
ColumnBuilder.prototype.notNull = ColumnBuilder.prototype.notNullable = function notNullable() {
return this.nullable(false);
};
each(['index', 'primary', 'unique'], function(method) {
ColumnBuilder.prototype[method] = function() {
if (this._type.toLowerCase().indexOf('increments') === -1) {
this._tableBuilder[method].apply(
this._tableBuilder,
[this._args[0]].concat(toArray(arguments))
);
}
return this;
};
});
// Specify that the current column "references" a column,
// which may be tableName.column or just "column"
ColumnBuilder.prototype.references = function(value) {
return this._tableBuilder.foreign
.call(this._tableBuilder, this._args[0], undefined, this)
._columnBuilder(this)
.references(value);
};
const AlterMethods = {};
// Specify that the column is to be dropped. This takes precedence
// over all other rules for the column.
AlterMethods.drop = function() {
this._single.drop = true;
return this;
};
// Specify the "type" that we're looking to set the
// Knex takes no responsibility for any data-loss that may
// occur when changing data types.
AlterMethods.alterType = function(type) {
this._statements.push({
grouping: 'alterType',
value: type,
});
return this;
};
// Set column method to alter (default is add).
AlterMethods.alter = function() {
this._method = 'alter';
return this;
};
// Alias a few methods for clarity when processing.
const columnAlias = {
float: 'floating',
enum: 'enu',
boolean: 'bool',
string: 'varchar',
bigint: 'bigInteger',
};
module.exports = ColumnBuilder;

173
node_modules/knex/lib/schema/columncompiler.js generated vendored Normal file
View File

@@ -0,0 +1,173 @@
// Column Compiler
// Used for designating column definitions
// during the table "create" / "alter" statements.
// -------
const Raw = require('../raw');
const helpers = require('./helpers');
const { groupBy, first, tail, has, isObject } = require('lodash');
function ColumnCompiler(client, tableCompiler, columnBuilder) {
this.client = client;
this.tableCompiler = tableCompiler;
this.columnBuilder = columnBuilder;
this._commonBuilder = this.columnBuilder;
this.args = columnBuilder._args;
this.type = columnBuilder._type.toLowerCase();
this.grouped = groupBy(columnBuilder._statements, 'grouping');
this.modified = columnBuilder._modifiers;
this.isIncrements = this.type.indexOf('increments') !== -1;
this.formatter = client.formatter(columnBuilder);
this.sequence = [];
this.modifiers = [];
}
ColumnCompiler.prototype.pushQuery = helpers.pushQuery;
ColumnCompiler.prototype.pushAdditional = helpers.pushAdditional;
ColumnCompiler.prototype.unshiftQuery = helpers.unshiftQuery;
ColumnCompiler.prototype._defaultMap = {
columnName: function() {
if (!this.isIncrements) {
throw new Error(
`You did not specify a column name for the ${this.type} column.`
);
}
return 'id';
},
};
ColumnCompiler.prototype.defaults = function(label) {
if (Object.prototype.hasOwnProperty.call(this._defaultMap, label)) {
return this._defaultMap[label].bind(this)();
} else {
throw new Error(
`There is no default for the specified identifier ${label}`
);
}
};
// To convert to sql, we first go through and build the
// column as it would be in the insert statement
ColumnCompiler.prototype.toSQL = function() {
this.pushQuery(this.compileColumn());
if (this.sequence.additional) {
this.sequence = this.sequence.concat(this.sequence.additional);
}
return this.sequence;
};
// Compiles a column.
ColumnCompiler.prototype.compileColumn = function() {
return (
this.formatter.wrap(this.getColumnName()) +
' ' +
this.getColumnType() +
this.getModifiers()
);
};
// Assumes the autoincrementing key is named `id` if not otherwise specified.
ColumnCompiler.prototype.getColumnName = function() {
const value = first(this.args);
return value || this.defaults('columnName');
};
ColumnCompiler.prototype.getColumnType = function() {
const type = this[this.type];
return typeof type === 'function' ? type.apply(this, tail(this.args)) : type;
};
ColumnCompiler.prototype.getModifiers = function() {
const modifiers = [];
for (let i = 0, l = this.modifiers.length; i < l; i++) {
const modifier = this.modifiers[i];
//Cannot allow 'nullable' modifiers on increments types
if (!this.isIncrements || (this.isIncrements && modifier === 'comment')) {
if (has(this.modified, modifier)) {
const val = this[modifier].apply(this, this.modified[modifier]);
if (val) modifiers.push(val);
}
}
}
return modifiers.length > 0 ? ` ${modifiers.join(' ')}` : '';
};
// Types
// ------
ColumnCompiler.prototype.increments =
'integer not null primary key autoincrement';
ColumnCompiler.prototype.bigincrements =
'integer not null primary key autoincrement';
ColumnCompiler.prototype.integer = ColumnCompiler.prototype.smallint = ColumnCompiler.prototype.mediumint =
'integer';
ColumnCompiler.prototype.biginteger = 'bigint';
ColumnCompiler.prototype.varchar = function(length) {
return `varchar(${this._num(length, 255)})`;
};
ColumnCompiler.prototype.text = 'text';
ColumnCompiler.prototype.tinyint = 'tinyint';
ColumnCompiler.prototype.floating = function(precision, scale) {
return `float(${this._num(precision, 8)}, ${this._num(scale, 2)})`;
};
ColumnCompiler.prototype.decimal = function(precision, scale) {
if (precision === null) {
throw new Error(
'Specifying no precision on decimal columns is not supported for that SQL dialect.'
);
}
return `decimal(${this._num(precision, 8)}, ${this._num(scale, 2)})`;
};
ColumnCompiler.prototype.binary = 'blob';
ColumnCompiler.prototype.bool = 'boolean';
ColumnCompiler.prototype.date = 'date';
ColumnCompiler.prototype.datetime = 'datetime';
ColumnCompiler.prototype.time = 'time';
ColumnCompiler.prototype.timestamp = 'timestamp';
ColumnCompiler.prototype.enu = 'varchar';
ColumnCompiler.prototype.bit = ColumnCompiler.prototype.json = 'text';
ColumnCompiler.prototype.uuid = 'char(36)';
ColumnCompiler.prototype.specifictype = (type) => type;
// Modifiers
// -------
ColumnCompiler.prototype.nullable = (nullable) =>
nullable === false ? 'not null' : 'null';
ColumnCompiler.prototype.notNullable = function() {
return this.nullable(false);
};
ColumnCompiler.prototype.defaultTo = function(value) {
if (value === void 0) {
return '';
} else if (value === null) {
value = 'null';
} else if (value instanceof Raw) {
value = value.toQuery();
} else if (this.type === 'bool') {
if (value === 'false') value = 0;
value = `'${value ? 1 : 0}'`;
} else if (
(this.type === 'json' || this.type === 'jsonb') &&
isObject(value)
) {
value = `'${JSON.stringify(value)}'`;
} else {
value = `'${value}'`;
}
return `default ${value}`;
};
ColumnCompiler.prototype._num = function(val, fallback) {
if (val === undefined || val === null) return fallback;
const number = parseInt(val, 10);
return isNaN(number) ? fallback : number;
};
module.exports = ColumnCompiler;

103
node_modules/knex/lib/schema/compiler.js generated vendored Normal file
View File

@@ -0,0 +1,103 @@
const { pushQuery, pushAdditional, unshiftQuery } = require('./helpers');
const { isUndefined } = require('lodash');
// The "SchemaCompiler" takes all of the query statements which have been
// gathered in the "SchemaBuilder" and turns them into an array of
// properly formatted / bound query strings.
function SchemaCompiler(client, builder) {
this.builder = builder;
this._commonBuilder = this.builder;
this.client = client;
this.schema = builder._schema;
this.formatter = client.formatter(builder);
this.sequence = [];
}
function throwOnlyPGError(operationName) {
throw new Error(
`${operationName} is not supported for this dialect (only PostgreSQL supports it currently).`
);
}
Object.assign(SchemaCompiler.prototype, {
pushQuery: pushQuery,
pushAdditional: pushAdditional,
unshiftQuery: unshiftQuery,
createTable: buildTable('create'),
createTableIfNotExists: buildTable('createIfNot'),
createSchema: () => {
throwOnlyPGError('createSchema');
},
createSchemaIfNotExists: () => {
throwOnlyPGError('createSchemaIfNotExists');
},
dropSchema: () => {
throwOnlyPGError('dropSchema');
},
dropSchemaIfExists: () => {
throwOnlyPGError('dropSchemaIfExists');
},
alterTable: buildTable('alter'),
dropTablePrefix: 'drop table ',
dropTable(tableName) {
this.pushQuery(
this.dropTablePrefix +
this.formatter.wrap(prefixedTableName(this.schema, tableName))
);
},
dropTableIfExists(tableName) {
this.pushQuery(
this.dropTablePrefix +
'if exists ' +
this.formatter.wrap(prefixedTableName(this.schema, tableName))
);
},
raw(sql, bindings) {
this.sequence.push(this.client.raw(sql, bindings).toSQL());
},
toSQL() {
const sequence = this.builder._sequence;
for (let i = 0, l = sequence.length; i < l; i++) {
const query = sequence[i];
this[query.method].apply(this, query.args);
}
return this.sequence;
},
});
function buildTable(type) {
return function(tableName, fn) {
const builder = this.client.tableBuilder(type, tableName, fn);
// pass queryContext down to tableBuilder but do not overwrite it if already set
const queryContext = this.builder.queryContext();
if (!isUndefined(queryContext) && isUndefined(builder.queryContext())) {
builder.queryContext(queryContext);
}
builder.setSchema(this.schema);
const sql = builder.toSQL();
for (let i = 0, l = sql.length; i < l; i++) {
this.sequence.push(sql[i]);
}
};
}
function prefixedTableName(prefix, table) {
return prefix ? `${prefix}.${table}` : table;
}
module.exports = SchemaCompiler;

50
node_modules/knex/lib/schema/helpers.js generated vendored Normal file
View File

@@ -0,0 +1,50 @@
const { isString, tail } = require('lodash');
// Push a new query onto the compiled "sequence" stack,
// creating a new formatter, returning the compiler.
function pushQuery(query) {
if (!query) return;
if (isString(query)) {
query = { sql: query };
}
if (!query.bindings) {
query.bindings = this.formatter.bindings;
}
this.sequence.push(query);
this.formatter = this.client.formatter(this._commonBuilder);
}
// Used in cases where we need to push some additional column specific statements.
function pushAdditional(fn) {
const child = new this.constructor(
this.client,
this.tableCompiler,
this.columnBuilder
);
fn.call(child, tail(arguments));
this.sequence.additional = (this.sequence.additional || []).concat(
child.sequence
);
}
// Unshift a new query onto the compiled "sequence" stack,
// creating a new formatter, returning the compiler.
function unshiftQuery(query) {
if (!query) return;
if (isString(query)) {
query = { sql: query };
}
if (!query.bindings) {
query.bindings = this.formatter.bindings;
}
this.sequence.unshift(query);
this.formatter = this.client.formatter(this._commonBuilder);
}
module.exports = {
pushAdditional,
pushQuery,
unshiftQuery,
};

285
node_modules/knex/lib/schema/tablebuilder.js generated vendored Normal file
View File

@@ -0,0 +1,285 @@
// TableBuilder
// Takes the function passed to the "createTable" or "table/editTable"
// functions and calls it with the "TableBuilder" as both the context and
// the first argument. Inside this function we can specify what happens to the
// method, pushing everything we want to do onto the "allStatements" array,
// which is then compiled into sql.
// ------
const { extend, each, toArray, isString, isFunction } = require('lodash');
const helpers = require('../helpers');
function TableBuilder(client, method, tableName, fn) {
this.client = client;
this._fn = fn;
this._method = method;
this._schemaName = undefined;
this._tableName = tableName;
this._statements = [];
this._single = {};
if (!isFunction(this._fn)) {
throw new TypeError(
'A callback function must be supplied to calls against `.createTable` ' +
'and `.table`'
);
}
}
TableBuilder.prototype.setSchema = function(schemaName) {
this._schemaName = schemaName;
};
// Convert the current tableBuilder object "toSQL"
// giving us additional methods if we're altering
// rather than creating the table.
TableBuilder.prototype.toSQL = function() {
if (this._method === 'alter') {
extend(this, AlterMethods);
}
this._fn.call(this, this);
return this.client.tableCompiler(this).toSQL();
};
each(
[
// Each of the index methods can be called individually, with the
// column name to be used, e.g. table.unique('column').
'index',
'primary',
'unique',
// Key specific
'dropPrimary',
'dropUnique',
'dropIndex',
'dropForeign',
],
function(method) {
TableBuilder.prototype[method] = function() {
this._statements.push({
grouping: 'alterTable',
method,
args: toArray(arguments),
});
return this;
};
}
);
// Warn for dialect-specific table methods, since that's the
// only time these are supported.
const specialMethods = {
mysql: ['engine', 'charset', 'collate'],
postgresql: ['inherits'],
};
each(specialMethods, function(methods, dialect) {
each(methods, function(method) {
TableBuilder.prototype[method] = function(value) {
if (this.client.dialect !== dialect) {
throw new Error(
`Knex only supports ${method} statement with ${dialect}.`
);
}
if (this._method === 'alter') {
throw new Error(
`Knex does not support altering the ${method} outside of create ` +
`table, please use knex.raw statement.`
);
}
this._single[method] = value;
};
});
});
helpers.addQueryContext(TableBuilder);
// Each of the column types that we can add, we create a new ColumnBuilder
// instance and push it onto the statements array.
const columnTypes = [
// Numeric
'tinyint',
'smallint',
'mediumint',
'int',
'bigint',
'decimal',
'float',
'double',
'real',
'bit',
'boolean',
'serial',
// Date / Time
'date',
'datetime',
'timestamp',
'time',
'year',
// String
'char',
'varchar',
'tinytext',
'tinyText',
'text',
'mediumtext',
'mediumText',
'longtext',
'longText',
'binary',
'varbinary',
'tinyblob',
'tinyBlob',
'mediumblob',
'mediumBlob',
'blob',
'longblob',
'longBlob',
'enum',
'set',
// Increments, Aliases, and Additional
'bool',
'dateTime',
'increments',
'bigincrements',
'bigIncrements',
'integer',
'biginteger',
'bigInteger',
'string',
'json',
'jsonb',
'uuid',
'enu',
'specificType',
];
// For each of the column methods, create a new "ColumnBuilder" interface,
// push it onto the "allStatements" stack, and then return the interface,
// with which we can add indexes, etc.
each(columnTypes, function(type) {
TableBuilder.prototype[type] = function() {
const args = toArray(arguments);
const builder = this.client.columnBuilder(this, type, args);
this._statements.push({
grouping: 'columns',
builder,
});
return builder;
};
});
// The "timestamps" call is really just sets the `created_at` and `updated_at` columns.
TableBuilder.prototype.timestamps = function timestamps() {
const method = arguments[0] === true ? 'timestamp' : 'datetime';
const createdAt = this[method]('created_at');
const updatedAt = this[method]('updated_at');
if (arguments[1] === true) {
const now = this.client.raw('CURRENT_TIMESTAMP');
createdAt.notNullable().defaultTo(now);
updatedAt.notNullable().defaultTo(now);
}
return;
};
// Set the comment value for a table, they're only allowed to be called
// once per table.
TableBuilder.prototype.comment = function(value) {
if (typeof value !== 'string') {
throw new TypeError('Table comment must be string');
}
this._single.comment = value;
};
// Set a foreign key on the table, calling
// `table.foreign('column_name').references('column').on('table').onDelete()...
// Also called from the ColumnBuilder context when chaining.
TableBuilder.prototype.foreign = function(column, keyName) {
const foreignData = { column: column, keyName: keyName };
this._statements.push({
grouping: 'alterTable',
method: 'foreign',
args: [foreignData],
});
let returnObj = {
references(tableColumn) {
let pieces;
if (isString(tableColumn)) {
pieces = tableColumn.split('.');
}
if (!pieces || pieces.length === 1) {
foreignData.references = pieces ? pieces[0] : tableColumn;
return {
on(tableName) {
if (typeof tableName !== 'string') {
throw new TypeError(
`Expected tableName to be a string, got: ${typeof tableName}`
);
}
foreignData.inTable = tableName;
return returnObj;
},
inTable() {
return this.on.apply(this, arguments);
},
};
}
foreignData.inTable = pieces[0];
foreignData.references = pieces[1];
return returnObj;
},
withKeyName(keyName) {
foreignData.keyName = keyName;
return returnObj;
},
onUpdate(statement) {
foreignData.onUpdate = statement;
return returnObj;
},
onDelete(statement) {
foreignData.onDelete = statement;
return returnObj;
},
_columnBuilder(builder) {
extend(builder, returnObj);
returnObj = builder;
return builder;
},
};
return returnObj;
};
const AlterMethods = {
// Renames the current column `from` the current
// TODO: this.column(from).rename(to)
renameColumn(from, to) {
this._statements.push({
grouping: 'alterTable',
method: 'renameColumn',
args: [from, to],
});
return this;
},
dropTimestamps() {
return this.dropColumns(['created_at', 'updated_at']);
},
// TODO: changeType
};
// Drop a column from the current table.
// TODO: Enable this.column(columnName).drop();
AlterMethods.dropColumn = AlterMethods.dropColumns = function() {
this._statements.push({
grouping: 'alterTable',
method: 'dropColumn',
args: toArray(arguments),
});
return this;
};
module.exports = TableBuilder;

306
node_modules/knex/lib/schema/tablecompiler.js generated vendored Normal file
View File

@@ -0,0 +1,306 @@
/* eslint max-len:0 */
// Table Compiler
// -------
const { pushAdditional, pushQuery, unshiftQuery } = require('./helpers');
const helpers = require('../helpers');
const {
groupBy,
reduce,
map,
first,
tail,
isEmpty,
indexOf,
isArray,
isUndefined,
} = require('lodash');
function TableCompiler(client, tableBuilder) {
this.client = client;
this.tableBuilder = tableBuilder;
this._commonBuilder = this.tableBuilder;
this.method = tableBuilder._method;
this.schemaNameRaw = tableBuilder._schemaName;
this.tableNameRaw = tableBuilder._tableName;
this.single = tableBuilder._single;
this.grouped = groupBy(tableBuilder._statements, 'grouping');
this.formatter = client.formatter(tableBuilder);
this.sequence = [];
this._formatting = client.config && client.config.formatting;
}
TableCompiler.prototype.pushQuery = pushQuery;
TableCompiler.prototype.pushAdditional = pushAdditional;
TableCompiler.prototype.unshiftQuery = unshiftQuery;
// Convert the tableCompiler toSQL
TableCompiler.prototype.toSQL = function() {
this[this.method]();
return this.sequence;
};
TableCompiler.prototype.lowerCase = true;
// Column Compilation
// -------
// If this is a table "creation", we need to first run through all
// of the columns to build them into a single string,
// and then run through anything else and push it to the query sequence.
TableCompiler.prototype.createAlterTableMethods = null;
TableCompiler.prototype.create = function(ifNot) {
const columnBuilders = this.getColumns();
const columns = columnBuilders.map((col) => col.toSQL());
const columnTypes = this.getColumnTypes(columns);
if (this.createAlterTableMethods) {
this.alterTableForCreate(columnTypes);
}
this.createQuery(columnTypes, ifNot);
this.columnQueries(columns);
delete this.single.comment;
this.alterTable();
};
// Only create the table if it doesn't exist.
TableCompiler.prototype.createIfNot = function() {
this.create(true);
};
// If we're altering the table, we need to one-by-one
// go through and handle each of the queries associated
// with altering the table's schema.
TableCompiler.prototype.alter = function() {
const addColBuilders = this.getColumns();
const addColumns = addColBuilders.map((col) => col.toSQL());
const alterColBuilders = this.getColumns('alter');
const alterColumns = alterColBuilders.map((col) => col.toSQL());
const addColumnTypes = this.getColumnTypes(addColumns);
const alterColumnTypes = this.getColumnTypes(alterColumns);
this.addColumns(addColumnTypes);
this.alterColumns(alterColumnTypes, alterColBuilders);
this.columnQueries(addColumns);
this.columnQueries(alterColumns);
this.alterTable();
};
TableCompiler.prototype.foreign = function(foreignData) {
if (foreignData.inTable && foreignData.references) {
const keyName = foreignData.keyName
? this.formatter.wrap(foreignData.keyName)
: this._indexCommand('foreign', this.tableNameRaw, foreignData.column);
const column = this.formatter.columnize(foreignData.column);
const references = this.formatter.columnize(foreignData.references);
const inTable = this.formatter.wrap(foreignData.inTable);
const onUpdate = foreignData.onUpdate
? (this.lowerCase ? ' on update ' : ' ON UPDATE ') + foreignData.onUpdate
: '';
const onDelete = foreignData.onDelete
? (this.lowerCase ? ' on delete ' : ' ON DELETE ') + foreignData.onDelete
: '';
if (this.lowerCase) {
this.pushQuery(
(!this.forCreate ? `alter table ${this.tableName()} add ` : '') +
'constraint ' +
keyName +
' ' +
'foreign key (' +
column +
') references ' +
inTable +
' (' +
references +
')' +
onUpdate +
onDelete
);
} else {
this.pushQuery(
(!this.forCreate ? `ALTER TABLE ${this.tableName()} ADD ` : '') +
'CONSTRAINT ' +
keyName +
' ' +
'FOREIGN KEY (' +
column +
') REFERENCES ' +
inTable +
' (' +
references +
')' +
onUpdate +
onDelete
);
}
}
};
// Get all of the column sql & bindings individually for building the table queries.
TableCompiler.prototype.getColumnTypes = (columns) =>
reduce(
map(columns, first),
function(memo, column) {
memo.sql.push(column.sql);
memo.bindings.concat(column.bindings);
return memo;
},
{ sql: [], bindings: [] }
);
// Adds all of the additional queries from the "column"
TableCompiler.prototype.columnQueries = function(columns) {
const queries = reduce(
map(columns, tail),
function(memo, column) {
if (!isEmpty(column)) return memo.concat(column);
return memo;
},
[]
);
for (const q of queries) {
this.pushQuery(q);
}
};
// Add a new column.
TableCompiler.prototype.addColumnsPrefix = 'add column ';
// All of the columns to "add" for the query
TableCompiler.prototype.addColumns = function(columns, prefix) {
prefix = prefix || this.addColumnsPrefix;
if (columns.sql.length > 0) {
const columnSql = map(columns.sql, (column) => {
return prefix + column;
});
this.pushQuery({
sql:
(this.lowerCase ? 'alter table ' : 'ALTER TABLE ') +
this.tableName() +
' ' +
columnSql.join(', '),
bindings: columns.bindings,
});
}
};
// Alter column
TableCompiler.prototype.alterColumnsPrefix = 'alter column ';
TableCompiler.prototype.alterColumns = function(columns, colBuilders) {
if (columns.sql.length > 0) {
this.addColumns(columns, this.alterColumnsPrefix, colBuilders);
}
};
// Compile the columns as needed for the current create or alter table
TableCompiler.prototype.getColumns = function(method) {
const columns = this.grouped.columns || [];
method = method || 'add';
const queryContext = this.tableBuilder.queryContext();
return columns
.filter((column) => column.builder._method === method)
.map((column) => {
// pass queryContext down to columnBuilder but do not overwrite it if already set
if (
!isUndefined(queryContext) &&
isUndefined(column.builder.queryContext())
) {
column.builder.queryContext(queryContext);
}
return this.client.columnCompiler(this, column.builder);
});
};
TableCompiler.prototype.tableName = function() {
const name = this.schemaNameRaw
? `${this.schemaNameRaw}.${this.tableNameRaw}`
: this.tableNameRaw;
return this.formatter.wrap(name);
};
// Generate all of the alter column statements necessary for the query.
TableCompiler.prototype.alterTable = function() {
const alterTable = this.grouped.alterTable || [];
for (let i = 0, l = alterTable.length; i < l; i++) {
const statement = alterTable[i];
if (this[statement.method]) {
this[statement.method].apply(this, statement.args);
} else {
this.client.logger.error(`Debug: ${statement.method} does not exist`);
}
}
for (const item in this.single) {
if (typeof this[item] === 'function') this[item](this.single[item]);
}
};
TableCompiler.prototype.alterTableForCreate = function(columnTypes) {
this.forCreate = true;
const savedSequence = this.sequence;
const alterTable = this.grouped.alterTable || [];
this.grouped.alterTable = [];
for (let i = 0, l = alterTable.length; i < l; i++) {
const statement = alterTable[i];
if (indexOf(this.createAlterTableMethods, statement.method) < 0) {
this.grouped.alterTable.push(statement);
continue;
}
if (this[statement.method]) {
this.sequence = [];
this[statement.method].apply(this, statement.args);
columnTypes.sql.push(this.sequence[0].sql);
} else {
this.client.logger.error(`Debug: ${statement.method} does not exist`);
}
}
this.sequence = savedSequence;
this.forCreate = false;
};
// Drop the index on the current table.
TableCompiler.prototype.dropIndex = function(value) {
this.pushQuery(`drop index${value}`);
};
// Drop the unique
TableCompiler.prototype.dropUnique = TableCompiler.prototype.dropForeign = function() {
throw new Error('Method implemented in the dialect driver');
};
TableCompiler.prototype.dropColumnPrefix = 'drop column ';
TableCompiler.prototype.dropColumn = function() {
const columns = helpers.normalizeArr.apply(null, arguments);
const drops = map(isArray(columns) ? columns : [columns], (column) => {
return this.dropColumnPrefix + this.formatter.wrap(column);
});
this.pushQuery(
(this.lowerCase ? 'alter table ' : 'ALTER TABLE ') +
this.tableName() +
' ' +
drops.join(', ')
);
};
// If no name was specified for this index, we will create one using a basic
// convention of the table name, followed by the columns, followed by an
// index type, such as primary or index, which makes the index unique.
TableCompiler.prototype._indexCommand = function(type, tableName, columns) {
if (!isArray(columns)) columns = columns ? [columns] : [];
const table = tableName.replace(/\.|-/g, '_');
const indexName = (
table +
'_' +
columns.join('_') +
'_' +
type
).toLowerCase();
return this.formatter.wrap(indexName);
};
module.exports = TableCompiler;

163
node_modules/knex/lib/seed/Seeder.js generated vendored Normal file
View File

@@ -0,0 +1,163 @@
// Seeder
// -------
const fs = require('fs');
const path = require('path');
const { promisify } = require('util');
const mkdirp = require('mkdirp');
const { filter, includes, extend } = require('lodash');
const { writeJsFileUsingTemplate } = require('../util/template');
// The new seeds we're performing, typically called from the `knex.seed`
// interface on the main `knex` object. Passes the `knex` instance performing
// the seeds.
class Seeder {
constructor(knex) {
this.knex = knex;
this.config = this.setConfig(knex.client.config.seeds);
}
// Runs seed files for the given environment.
async run(config) {
this.config = this.setConfig(config);
const all = await this._listAll();
const files =
config && config.specific
? all.filter((file) => file === config.specific)
: all;
return this._runSeeds(files);
}
// Creates a new seed file, with a given name.
async make(name, config) {
this.config = this.setConfig(config);
if (!name)
throw new Error('A name must be specified for the generated seed');
await this._ensureFolder(config);
const seedPath = await this._writeNewSeed(name);
return seedPath;
}
// Lists all available seed files as a sorted array.
async _listAll(config) {
this.config = this.setConfig(config);
const loadExtensions = this.config.loadExtensions;
return promisify(fs.readdir)(this._absoluteConfigDir()).then((seeds) =>
filter(seeds, (value) => {
const extension = path.extname(value);
return includes(loadExtensions, extension);
}).sort()
);
}
// Ensures a folder for the seeds exist, dependent on the
// seed config settings.
async _ensureFolder() {
const dir = this._absoluteConfigDir();
try {
await promisify(fs.stat)(dir);
} catch (e) {
await promisify(mkdirp)(dir);
}
}
// Run seed files, in sequence.
_runSeeds(seeds) {
seeds.forEach((seed) => this._validateSeedStructure(seed));
return this._waterfallBatch(seeds);
}
// Validates seed files by requiring and checking for a `seed` function.
_validateSeedStructure(name) {
const seed = require(path.join(this._absoluteConfigDir(), name));
if (typeof seed.seed !== 'function') {
throw new Error(`Invalid seed file: ${name} must have a seed function`);
}
return name;
}
_getStubPath() {
return (
this.config.stub ||
path.join(__dirname, 'stub', this.config.extension + '.stub')
);
}
_getNewStubFileName(name) {
if (name[0] === '-') name = name.slice(1);
return name + '.' + this.config.extension;
}
_getNewStubFilePath(name) {
return path.join(this._absoluteConfigDir(), this._getNewStubFileName(name));
}
// Write a new seed to disk, using the config and generated filename,
// passing any `variables` given in the config to the template.
async _writeNewSeed(name) {
const seedPath = this._getNewStubFilePath(name);
await writeJsFileUsingTemplate(
seedPath,
this._getStubPath(),
{ variable: 'd' },
this.config.variables || {}
);
return seedPath;
}
// Runs a batch of seed files.
async _waterfallBatch(seeds) {
const { knex } = this;
const seedDirectory = this._absoluteConfigDir();
const log = [];
for (const seedName of seeds) {
const seedPath = path.join(seedDirectory, seedName);
const seed = require(seedPath);
try {
await seed.seed(knex);
log.push(seedPath);
} catch (originalError) {
const error = new Error(
`Error while executing "${seedPath}" seed: ${originalError.message}`
);
error.original = originalError;
error.stack =
error.stack
.split('\n')
.slice(0, 2)
.join('\n') +
'\n' +
originalError.stack;
throw error;
}
}
return [log];
}
_absoluteConfigDir() {
return path.resolve(process.cwd(), this.config.directory);
}
setConfig(config) {
return extend(
{
extension: 'js',
directory: './seeds',
loadExtensions: [
'.co',
'.coffee',
'.eg',
'.iced',
'.js',
'.litcoffee',
'.ls',
'.ts',
],
},
this.config || {},
config
);
}
}
module.exports = Seeder;

13
node_modules/knex/lib/seed/seed-stub.js generated vendored Normal file
View File

@@ -0,0 +1,13 @@
// Stub Seed:
// Used for now in browser builds, where filesystem access isn't
// available.
const StubSeed = (module.exports = function() {});
const noSuchMethod = async function() {
throw new Error('Seeds are not supported');
};
StubSeed.prototype = {
make: noSuchMethod,
run: noSuchMethod,
};

9
node_modules/knex/lib/seed/stub/coffee.stub generated vendored Normal file
View File

@@ -0,0 +1,9 @@
exports.seed = (knex) ->
knex('table_name').del()
.then () ->
# Inserts seed entries
knex('table_name').insert([
{id: 1, colName: 'rowValue'}
{id: 2, colName: 'rowValue2'}
{id: 3, colName: 'rowValue3'}
])

11
node_modules/knex/lib/seed/stub/eg.stub generated vendored Normal file
View File

@@ -0,0 +1,11 @@
provide: seed
seed = (knex) ->
;; Deletes ALL existing entries
knex(.table_name).del()
.then(() ->
;; Inserts seed entries
knex(.table_name).insert with [
{ id = 1, col-name = .row-value-1 }
{ id = 2, col-name = .row-value-2 }
{ id = 3, col-name = .row-value-3 }
]

13
node_modules/knex/lib/seed/stub/js.stub generated vendored Normal file
View File

@@ -0,0 +1,13 @@
exports.seed = function(knex) {
// Deletes ALL existing entries
return knex('table_name').del()
.then(function () {
// Inserts seed entries
return knex('table_name').insert([
{id: 1, colName: 'rowValue1'},
{id: 2, colName: 'rowValue2'},
{id: 3, colName: 'rowValue3'}
]);
});
};

11
node_modules/knex/lib/seed/stub/ls.stub generated vendored Normal file
View File

@@ -0,0 +1,11 @@
exports.seed = (knex) ->
# Deletes ALL existing entries
knex('table_name').del()
.then(() ->
# Inserts seed entries
knex('table_name').insert([
{id: 1, colName: 'rowValue1'},
{id: 2, colName: 'rowValue2'},
{id: 3, colName: 'rowValue3'}
])
)

14
node_modules/knex/lib/seed/stub/ts.stub generated vendored Normal file
View File

@@ -0,0 +1,14 @@
import * as Knex from "knex";
export async function seed(knex: Knex): Promise<any> {
// Deletes ALL existing entries
return knex("table_name").del()
.then(() => {
// Inserts seed entries
return knex("table_name").insert([
{ id: 1, colName: "rowValue1" },
{ id: 2, colName: "rowValue2" },
{ id: 3, colName: "rowValue3" }
]);
});
};

397
node_modules/knex/lib/transaction.js generated vendored Normal file
View File

@@ -0,0 +1,397 @@
// Transaction
// -------
const Bluebird = require('bluebird');
const { EventEmitter } = require('events');
const Debug = require('debug');
const makeKnex = require('./util/make-knex');
const debug = Debug('knex:tx');
const { uniqueId, isUndefined } = require('lodash');
// Acts as a facade for a Promise, keeping the internal state
// and managing any child transactions.
class Transaction extends EventEmitter {
constructor(client, container, config, outerTx) {
super();
const txid = (this.txid = uniqueId('trx'));
// If there is no container provided, assume user wants to get instance of transaction and use it directly
if (!container) {
// Default behaviour for new style of transactions is not to reject on rollback
if (!config || isUndefined(config.doNotRejectOnRollback)) {
this.doNotRejectOnRollback = true;
} else {
this.doNotRejectOnRollback = config.doNotRejectOnRollback;
}
this.initPromise = new Promise((resolve, reject) => {
this.initRejectFn = reject;
container = (transactor) => {
resolve(transactor);
};
});
} else {
// Default behaviour for old style of transactions is to reject on rollback
if (!config || isUndefined(config.doNotRejectOnRollback)) {
this.doNotRejectOnRollback = false;
} else {
this.doNotRejectOnRollback = config.doNotRejectOnRollback;
}
}
this.client = client;
this.logger = client.logger;
this.outerTx = outerTx;
this.trxClient = undefined;
this._debug = client.config && client.config.debug;
debug(
'%s: Starting %s transaction',
txid,
outerTx ? 'nested' : 'top level'
);
this._promise = this.acquireConnection(config, (connection) => {
const trxClient = (this.trxClient = makeTxClient(
this,
client,
connection
));
const init = client.transacting
? this.savepoint(connection)
: this.begin(connection);
const executionPromise = new Bluebird((resolver, rejecter) => {
this._resolver = resolver;
this._rejecter = rejecter;
});
init
.then(() => {
return makeTransactor(this, connection, trxClient);
})
.then((transactor) => {
if (this.initPromise) {
transactor.executionPromise = executionPromise.catch((err) => {
throw err;
});
} else {
transactor.executionPromise = executionPromise;
}
// If we've returned a "thenable" from the transaction container, assume
// the rollback and commit are chained to this object's success / failure.
// Directly thrown errors are treated as automatic rollbacks.
let result;
try {
result = container(transactor);
} catch (err) {
result = Bluebird.reject(err);
}
if (result && result.then && typeof result.then === 'function') {
result
.then((val) => {
return transactor.commit(val);
})
.catch((err) => {
return transactor.rollback(err);
});
}
return null;
})
.catch((e) => {
return this._rejecter(e);
});
return executionPromise;
}).catch((err) => {
if (this.initRejectFn) {
this.initRejectFn(err);
} else {
throw err;
}
});
this._completed = false;
// If there's a wrapping transaction, we need to wait for any older sibling
// transactions to settle (commit or rollback) before we can start, and we
// need to register ourselves with the parent transaction so any younger
// siblings can wait for us to complete before they can start.
this._previousSibling = Bluebird.resolve(true);
if (outerTx) {
if (outerTx._lastChild) this._previousSibling = outerTx._lastChild;
outerTx._lastChild = this._promise;
}
}
isCompleted() {
return (
this._completed || (this.outerTx && this.outerTx.isCompleted()) || false
);
}
begin(conn) {
return this.query(conn, 'BEGIN;');
}
savepoint(conn) {
return this.query(conn, `SAVEPOINT ${this.txid};`);
}
commit(conn, value) {
return this.query(conn, 'COMMIT;', 1, value);
}
release(conn, value) {
return this.query(conn, `RELEASE SAVEPOINT ${this.txid};`, 1, value);
}
rollback(conn, error) {
return this.query(conn, 'ROLLBACK', 2, error)
.timeout(5000)
.catch(Bluebird.TimeoutError, () => {
this._rejecter(error);
});
}
rollbackTo(conn, error) {
return this.query(conn, `ROLLBACK TO SAVEPOINT ${this.txid}`, 2, error)
.timeout(5000)
.catch(Bluebird.TimeoutError, () => {
this._rejecter(error);
});
}
query(conn, sql, status, value) {
const q = this.trxClient
.query(conn, sql)
.catch((err) => {
status = 2;
value = err;
this._completed = true;
debug('%s error running transaction query', this.txid);
})
.then((res) => {
if (status === 1) {
this._resolver(value);
}
if (status === 2) {
if (isUndefined(value)) {
if (this.doNotRejectOnRollback && /^ROLLBACK\b/i.test(sql)) {
this._resolver();
return;
}
value = new Error(`Transaction rejected with non-error: ${value}`);
}
this._rejecter(value);
}
return res;
});
if (status === 1 || status === 2) {
this._completed = true;
}
return q;
}
debug(enabled) {
this._debug = arguments.length ? enabled : true;
return this;
}
// Acquire a connection and create a disposer - either using the one passed
// via config or getting one off the client. The disposer will be called once
// the original promise is marked completed.
acquireConnection(config, cb) {
const configConnection = config && config.connection;
return new Bluebird((resolve, reject) => {
try {
resolve(configConnection || this.client.acquireConnection());
} catch (e) {
reject(e);
}
})
.then((connection) => {
connection.__knexTxId = this.txid;
return (this._previousSibling
? this._previousSibling.catch(() => {})
: Promise.resolve()
).then(function() {
return connection;
});
})
.then(async (connection) => {
try {
return await cb(connection);
} finally {
if (!configConnection) {
debug('%s: releasing connection', this.txid);
this.client.releaseConnection(connection);
} else {
debug('%s: not releasing external connection', this.txid);
}
}
});
}
}
// The transactor is a full featured knex object, with a "commit", a "rollback"
// and a "savepoint" function. The "savepoint" is just sugar for creating a new
// transaction. If the rollback is run inside a savepoint, it rolls back to the
// last savepoint - otherwise it rolls back the transaction.
function makeTransactor(trx, connection, trxClient) {
const transactor = makeKnex(trxClient);
transactor.withUserParams = () => {
throw new Error(
'Cannot set user params on a transaction - it can only inherit params from main knex instance'
);
};
transactor.isTransaction = true;
transactor.userParams = trx.userParams || {};
transactor.transaction = function(container, options) {
if (!options) {
options = { doNotRejectOnRollback: true };
} else if (isUndefined(options.doNotRejectOnRollback)) {
options.doNotRejectOnRollback = true;
}
if (container) {
return trxClient.transaction(container, options, trx);
} else {
return new Promise((resolve, _reject) => {
trxClient.transaction(
(nestedTrx) => {
resolve(nestedTrx);
},
options,
trx
);
});
}
};
transactor.savepoint = function(container, options) {
return transactor.transaction(container, options);
};
if (trx.client.transacting) {
transactor.commit = (value) => trx.release(connection, value);
transactor.rollback = (error) => trx.rollbackTo(connection, error);
} else {
transactor.commit = (value) => trx.commit(connection, value);
transactor.rollback = (error) => trx.rollback(connection, error);
}
transactor.isCompleted = () => trx.isCompleted();
return transactor;
}
// We need to make a client object which always acquires the same
// connection and does not release back into the pool.
function makeTxClient(trx, client, connection) {
const trxClient = Object.create(client.constructor.prototype);
trxClient.version = client.version;
trxClient.config = client.config;
trxClient.driver = client.driver;
trxClient.connectionSettings = client.connectionSettings;
trxClient.transacting = true;
trxClient.valueForUndefined = client.valueForUndefined;
trxClient.logger = client.logger;
trxClient.on('query', function(arg) {
trx.emit('query', arg);
client.emit('query', arg);
});
trxClient.on('query-error', function(err, obj) {
trx.emit('query-error', err, obj);
client.emit('query-error', err, obj);
});
trxClient.on('query-response', function(response, obj, builder) {
trx.emit('query-response', response, obj, builder);
client.emit('query-response', response, obj, builder);
});
const _query = trxClient.query;
trxClient.query = function(conn, obj) {
const completed = trx.isCompleted();
return new Bluebird(function(resolve, reject) {
try {
if (conn !== connection)
throw new Error('Invalid connection for transaction query.');
if (completed) completedError(trx, obj);
resolve(_query.call(trxClient, conn, obj));
} catch (e) {
reject(e);
}
});
};
const _stream = trxClient.stream;
trxClient.stream = function(conn, obj, stream, options) {
const completed = trx.isCompleted();
return new Bluebird(function(resolve, reject) {
try {
if (conn !== connection)
throw new Error('Invalid connection for transaction query.');
if (completed) completedError(trx, obj);
resolve(_stream.call(trxClient, conn, obj, stream, options));
} catch (e) {
reject(e);
}
});
};
trxClient.acquireConnection = function() {
return Bluebird.resolve(connection);
};
trxClient.releaseConnection = function() {
return Bluebird.resolve();
};
return trxClient;
}
function completedError(trx, obj) {
const sql = typeof obj === 'string' ? obj : obj && obj.sql;
debug('%s: Transaction completed: %s', trx.txid, sql);
throw new Error(
'Transaction query already complete, run with DEBUG=knex:tx for more info'
);
}
const promiseInterface = [
'then',
'bind',
'catch',
'finally',
'asCallback',
'spread',
'map',
'reduce',
'thenReturn',
'return',
'yield',
'ensure',
'exec',
'reflect',
'get',
'mapSeries',
'delay',
];
// Creates methods which proxy promise interface methods to
// internal transaction resolution promise
promiseInterface.forEach(function(method) {
Transaction.prototype[method] = function() {
return this._promise[method].apply(this._promise, arguments);
};
});
module.exports = Transaction;

57
node_modules/knex/lib/util/batchInsert.js generated vendored Normal file
View File

@@ -0,0 +1,57 @@
const { isNumber, chunk, flatten } = require('lodash');
const delay = require('./delay');
module.exports = function batchInsert(
client,
tableName,
batch,
chunkSize = 1000
) {
let returning = void 0;
let transaction = null;
const runInTransaction = (cb) => {
if (transaction) {
return cb(transaction);
}
return client.transaction(cb);
};
return Object.assign(
Promise.resolve().then(async () => {
if (!isNumber(chunkSize) || chunkSize < 1) {
throw new TypeError(`Invalid chunkSize: ${chunkSize}`);
}
if (!Array.isArray(batch)) {
throw new TypeError(
`Invalid batch: Expected array, got ${typeof batch}`
);
}
const chunks = chunk(batch, chunkSize);
//Next tick to ensure wrapper functions are called if needed
await delay(1);
return runInTransaction(async (tr) => {
const chunksResults = [];
for (const items of chunks) {
chunksResults.push(await tr(tableName).insert(items, returning));
}
return flatten(chunksResults);
});
}),
{
returning(columns) {
returning = columns;
return this;
},
transacting(tr) {
transaction = tr;
return this;
},
}
);
};

Some files were not shown because too many files have changed in this diff Show More