Add initial version of dijkstra backend cloudron image

This commit is contained in:
2020-10-12 11:27:15 +02:00
commit 4f5db9ab26
4209 changed files with 448228 additions and 0 deletions

384
node_modules/knex/lib/dialects/mssql/index.js generated vendored Normal file
View File

@@ -0,0 +1,384 @@
// MSSQL Client
// -------
const { map, flatten, values } = require('lodash');
const inherits = require('inherits');
const Client = require('../../client');
const Bluebird = require('bluebird');
const Formatter = require('../../formatter');
const Transaction = require('./transaction');
const QueryCompiler = require('./query/compiler');
const SchemaCompiler = require('./schema/compiler');
const TableCompiler = require('./schema/tablecompiler');
const ColumnCompiler = require('./schema/columncompiler');
const SQL_INT4 = { MIN: -2147483648, MAX: 2147483647 };
const SQL_BIGINT_SAFE = { MIN: -9007199254740991, MAX: 9007199254740991 };
// Always initialize with the "QueryBuilder" and "QueryCompiler" objects, which
// extend the base 'lib/query/builder' and 'lib/query/compiler', respectively.
function Client_MSSQL(config = {}) {
// #1235 mssql module wants 'server', not 'host'. This is to enforce the same
// options object across all dialects.
if (config && config.connection && config.connection.host) {
config.connection.server = config.connection.host;
}
// mssql always creates pool :( lets try to unpool it as much as possible
this.mssqlPoolSettings = {
min: 1,
max: 1,
idleTimeoutMillis: Number.MAX_SAFE_INTEGER,
evictionRunIntervalMillis: 0,
};
Client.call(this, config);
}
inherits(Client_MSSQL, Client);
Object.assign(Client_MSSQL.prototype, {
dialect: 'mssql',
driverName: 'mssql',
_driver() {
const tds = require('tedious');
const mssqlTedious = require('mssql');
const base = require('mssql/lib/base');
// Monkey patch mssql's tedious driver _poolCreate method to fix problem with hanging acquire
// connection, this should be removed when https://github.com/tediousjs/node-mssql/pull/614 is
// merged and released.
// Also since this dialect actually always uses tedious driver (msnodesqlv8 driver should be
// required in different way), it might be better to use tedious directly, because mssql
// driver uses always internally extra generic-pool and just adds one unnecessary layer of
// indirection between database and knex and mssql driver has been lately without maintainer
// (changing implementation to use tedious will be breaking change though).
// TODO: remove mssql implementation all together and use tedious directly
/* istanbul ignore next */
const mssqlVersion = require('mssql/package.json').version;
/* istanbul ignore next */
if (mssqlVersion === '4.1.0') {
mssqlTedious.ConnectionPool.prototype.release = release;
mssqlTedious.ConnectionPool.prototype._poolCreate = _poolCreate;
} else {
const [major] = mssqlVersion.split('.');
// if version is not ^5.0.0
if (major < 5) {
throw new Error(
'This knex version only supports mssql driver versions 4.1.0 and 5.0.0+'
);
}
}
/* istanbul ignore next */
// in some rare situations release is called when stream is interrupted, but
// after pool is already destroyed
function release(connection) {
if (this.pool) {
this.pool.release(connection);
}
}
/* istanbul ignore next */
function _poolCreate() {
// implementation is copy-pasted from https://github.com/tediousjs/node-mssql/pull/614
return new base.Promise((resolve, reject) => {
const cfg = {
userName: this.config.user,
password: this.config.password,
server: this.config.server,
options: Object.assign({}, this.config.options),
domain: this.config.domain,
};
cfg.options.database = this.config.database;
cfg.options.port = this.config.port;
cfg.options.connectTimeout =
this.config.connectionTimeout || this.config.timeout || 15000;
cfg.options.requestTimeout =
this.config.requestTimeout != null
? this.config.requestTimeout
: 15000;
cfg.options.tdsVersion = cfg.options.tdsVersion || '7_4';
cfg.options.rowCollectionOnDone = false;
cfg.options.rowCollectionOnRequestCompletion = false;
cfg.options.useColumnNames = false;
cfg.options.appName = cfg.options.appName || 'node-mssql';
// tedious always connect via tcp when port is specified
if (cfg.options.instanceName) delete cfg.options.port;
if (isNaN(cfg.options.requestTimeout))
cfg.options.requestTimeout = 15000;
if (cfg.options.requestTimeout === Infinity)
cfg.options.requestTimeout = 0;
if (cfg.options.requestTimeout < 0) cfg.options.requestTimeout = 0;
if (this.config.debug) {
cfg.options.debug = {
packet: true,
token: true,
data: true,
payload: true,
};
}
const tedious = new tds.Connection(cfg);
// prevent calling resolve again on end event
let alreadyResolved = false;
function safeResolve(err) {
if (!alreadyResolved) {
alreadyResolved = true;
resolve(err);
}
}
function safeReject(err) {
if (!alreadyResolved) {
alreadyResolved = true;
reject(err);
}
}
tedious.once('end', (evt) => {
safeReject(
new base.ConnectionError(
'Connection ended unexpectedly during connecting'
)
);
});
tedious.once('connect', (err) => {
if (err) {
err = new base.ConnectionError(err);
return safeReject(err);
}
safeResolve(tedious);
});
tedious.on('error', (err) => {
if (err.code === 'ESOCKET') {
tedious.hasError = true;
return;
}
this.emit('error', err);
});
if (this.config.debug) {
tedious.on('debug', this.emit.bind(this, 'debug', tedious));
}
});
}
return mssqlTedious;
},
formatter() {
return new MSSQL_Formatter(this, ...arguments);
},
transaction() {
return new Transaction(this, ...arguments);
},
queryCompiler() {
return new QueryCompiler(this, ...arguments);
},
schemaCompiler() {
return new SchemaCompiler(this, ...arguments);
},
tableCompiler() {
return new TableCompiler(this, ...arguments);
},
columnCompiler() {
return new ColumnCompiler(this, ...arguments);
},
wrapIdentifierImpl(value) {
if (value === '*') {
return '*';
}
return `[${value.replace(/[[\]']+/g, '')}]`;
},
// Get a raw connection, called by the `pool` whenever a new
// connection needs to be added to the pool.
acquireRawConnection() {
return new Bluebird((resolver, rejecter) => {
const settings = Object.assign({}, this.connectionSettings);
settings.pool = this.mssqlPoolSettings;
const connection = new this.driver.ConnectionPool(settings);
connection.connect((err) => {
if (err) {
return rejecter(err);
}
connection.on('error', (err) => {
connection.__knex__disposed = err;
});
resolver(connection);
});
});
},
validateConnection(connection) {
if (connection.connected === true) {
return true;
}
return false;
},
// Used to explicitly close a connection, called internally by the pool
// when a connection times out or the pool is shutdown.
destroyRawConnection(connection) {
return connection.close().catch((err) => {
// some times close will reject just because pool has already been destoyed
// internally by the driver there is nothing we can do in this case
});
},
// Position the bindings for the query.
positionBindings(sql) {
let questionCount = -1;
return sql.replace(/\?/g, function() {
questionCount += 1;
return `@p${questionCount}`;
});
},
// Grab a connection, run the query via the MSSQL streaming interface,
// and pass that through to the stream we've sent back to the client.
_stream(connection, obj, stream) {
if (!obj || typeof obj === 'string') obj = { sql: obj };
return new Bluebird((resolver, rejecter) => {
stream.on('error', (err) => {
rejecter(err);
});
stream.on('end', resolver);
const { sql } = obj;
if (!sql) return resolver();
const req = (connection.tx_ || connection).request();
//req.verbose = true;
req.multiple = true;
req.stream = true;
if (obj.bindings) {
for (let i = 0; i < obj.bindings.length; i++) {
this._setReqInput(req, i, obj.bindings[i]);
}
}
req.pipe(stream);
req.query(sql);
});
},
// Runs the query on the specified connection, providing the bindings
// and any other necessary prep work.
_query(connection, obj) {
const client = this;
if (!obj || typeof obj === 'string') obj = { sql: obj };
return new Bluebird((resolver, rejecter) => {
const { sql } = obj;
if (!sql) return resolver();
const req = (connection.tx_ || connection).request();
// req.verbose = true;
req.multiple = true;
if (obj.bindings) {
for (let i = 0; i < obj.bindings.length; i++) {
client._setReqInput(req, i, obj.bindings[i]);
}
}
req.query(sql, (err, recordset) => {
if (err) {
return rejecter(err);
}
obj.response = recordset.recordsets[0];
resolver(obj);
});
});
},
// sets a request input parameter. Detects bigints and decimals and sets type appropriately.
_setReqInput(req, i, binding) {
if (typeof binding == 'number') {
if (binding % 1 !== 0) {
req.input(`p${i}`, this.driver.Decimal(38, 10), binding);
} else if (binding < SQL_INT4.MIN || binding > SQL_INT4.MAX) {
if (binding < SQL_BIGINT_SAFE.MIN || binding > SQL_BIGINT_SAFE.MAX) {
throw new Error(
`Bigint must be safe integer or must be passed as string, saw ${binding}`
);
}
req.input(`p${i}`, this.driver.BigInt, binding);
} else {
req.input(`p${i}`, this.driver.Int, binding);
}
} else {
req.input(`p${i}`, binding);
}
},
// Process the response as returned from the query.
processResponse(obj, runner) {
if (obj == null) return;
const { response, method } = obj;
if (obj.output) return obj.output.call(runner, response);
switch (method) {
case 'select':
case 'pluck':
case 'first':
if (method === 'pluck') return map(response, obj.pluck);
return method === 'first' ? response[0] : response;
case 'insert':
case 'del':
case 'update':
case 'counter':
if (obj.returning) {
if (obj.returning === '@@rowcount') {
return response[0][''];
}
if (
(Array.isArray(obj.returning) && obj.returning.length > 1) ||
obj.returning[0] === '*'
) {
return response;
}
// return an array with values if only one returning value was specified
return flatten(map(response, values));
}
return response;
default:
return response;
}
},
});
class MSSQL_Formatter extends Formatter {
// Accepts a string or array of columns to wrap as appropriate.
columnizeWithPrefix(prefix, target) {
const columns = typeof target === 'string' ? [target] : target;
let str = '',
i = -1;
while (++i < columns.length) {
if (i > 0) str += ', ';
str += prefix + this.wrap(columns[i]);
}
return str;
}
}
module.exports = Client_MSSQL;

264
node_modules/knex/lib/dialects/mssql/query/compiler.js generated vendored Normal file
View File

@@ -0,0 +1,264 @@
// MSSQL Query Compiler
// ------
const inherits = require('inherits');
const QueryCompiler = require('../../../query/compiler');
const { isEmpty, compact, identity } = require('lodash');
function QueryCompiler_MSSQL(client, builder) {
QueryCompiler.call(this, client, builder);
}
inherits(QueryCompiler_MSSQL, QueryCompiler);
const components = [
'columns',
'join',
'lock',
'where',
'union',
'group',
'having',
'order',
'limit',
'offset',
];
Object.assign(QueryCompiler_MSSQL.prototype, {
_emptyInsertValue: 'default values',
select() {
const sql = this.with();
const statements = components.map((component) => this[component](this));
return sql + compact(statements).join(' ');
},
// Compiles an "insert" query, allowing for multiple
// inserts using a single query statement.
insert() {
const insertValues = this.single.insert || [];
let sql = this.with() + `insert into ${this.tableName} `;
const { returning } = this.single;
const returningSql = returning
? this._returning('insert', returning) + ' '
: '';
if (Array.isArray(insertValues)) {
if (insertValues.length === 0) {
return '';
}
} else if (typeof insertValues === 'object' && isEmpty(insertValues)) {
return {
sql: sql + returningSql + this._emptyInsertValue,
returning,
};
}
const insertData = this._prepInsert(insertValues);
if (typeof insertData === 'string') {
sql += insertData;
} else {
if (insertData.columns.length) {
sql += `(${this.formatter.columnize(insertData.columns)}`;
sql += `) ${returningSql}values (`;
let i = -1;
while (++i < insertData.values.length) {
if (i !== 0) sql += '), (';
sql += this.formatter.parameterize(
insertData.values[i],
this.client.valueForUndefined
);
}
sql += ')';
} else if (insertValues.length === 1 && insertValues[0]) {
sql += returningSql + this._emptyInsertValue;
} else {
sql = '';
}
}
return {
sql,
returning,
};
},
// Compiles an `update` query, allowing for a return value.
update() {
const top = this.top();
const withSQL = this.with();
const updates = this._prepUpdate(this.single.update);
const join = this.join();
const where = this.where();
const order = this.order();
const { returning } = this.single;
return {
sql:
withSQL +
`update ${top ? top + ' ' : ''}${this.tableName}` +
' set ' +
updates.join(', ') +
(returning ? ` ${this._returning('update', returning)}` : '') +
(join ? ` from ${this.tableName} ${join}` : '') +
(where ? ` ${where}` : '') +
(order ? ` ${order}` : '') +
(!returning ? this._returning('rowcount', '@@rowcount') : ''),
returning: returning || '@@rowcount',
};
},
// Compiles a `delete` query.
del() {
// Make sure tableName is processed by the formatter first.
const withSQL = this.with();
const { tableName } = this;
const wheres = this.where();
const { returning } = this.single;
return {
sql:
withSQL +
`delete from ${tableName}` +
(returning ? ` ${this._returning('del', returning)}` : '') +
(wheres ? ` ${wheres}` : '') +
(!returning ? this._returning('rowcount', '@@rowcount') : ''),
returning: returning || '@@rowcount',
};
},
// Compiles the columns in the query, specifying if an item was distinct.
columns() {
let distinctClause = '';
if (this.onlyUnions()) return '';
const top = this.top();
const columns = this.grouped.columns || [];
let i = -1,
sql = [];
if (columns) {
while (++i < columns.length) {
const stmt = columns[i];
if (stmt.distinct) distinctClause = 'distinct ';
if (stmt.distinctOn) {
distinctClause = this.distinctOn(stmt.value);
continue;
}
if (stmt.type === 'aggregate') {
sql.push(...this.aggregate(stmt));
} else if (stmt.type === 'aggregateRaw') {
sql.push(this.aggregateRaw(stmt));
} else if (stmt.value && stmt.value.length > 0) {
sql.push(this.formatter.columnize(stmt.value));
}
}
}
if (sql.length === 0) sql = ['*'];
return (
`select ${distinctClause}` +
(top ? top + ' ' : '') +
sql.join(', ') +
(this.tableName ? ` from ${this.tableName}` : '')
);
},
_returning(method, value) {
switch (method) {
case 'update':
case 'insert':
return value
? `output ${this.formatter.columnizeWithPrefix('inserted.', value)}`
: '';
case 'del':
return value
? `output ${this.formatter.columnizeWithPrefix('deleted.', value)}`
: '';
case 'rowcount':
return value ? ';select @@rowcount' : '';
}
},
// Compiles a `truncate` query.
truncate() {
return `truncate table ${this.tableName}`;
},
forUpdate() {
// this doesn't work exacltly as it should, one should also mention index while locking
// https://stackoverflow.com/a/9818448/360060
return 'with (UPDLOCK)';
},
forShare() {
// http://www.sqlteam.com/article/introduction-to-locking-in-sql-server
return 'with (HOLDLOCK)';
},
// Compiles a `columnInfo` query.
columnInfo() {
const column = this.single.columnInfo;
let schema = this.single.schema;
// The user may have specified a custom wrapIdentifier function in the config. We
// need to run the identifiers through that function, but not format them as
// identifiers otherwise.
const table = this.client.customWrapIdentifier(this.single.table, identity);
if (schema) {
schema = this.client.customWrapIdentifier(schema, identity);
}
let sql = `select * from information_schema.columns where table_name = ? and table_catalog = ?`;
const bindings = [table, this.client.database()];
if (schema) {
sql += ' and table_schema = ?';
bindings.push(schema);
} else {
sql += ` and table_schema = 'dbo'`;
}
return {
sql,
bindings: bindings,
output(resp) {
const out = resp.reduce(function(columns, val) {
columns[val.COLUMN_NAME] = {
defaultValue: val.COLUMN_DEFAULT,
type: val.DATA_TYPE,
maxLength: val.CHARACTER_MAXIMUM_LENGTH,
nullable: val.IS_NULLABLE === 'YES',
};
return columns;
}, {});
return (column && out[column]) || out;
},
};
},
top() {
const noLimit = !this.single.limit && this.single.limit !== 0;
const noOffset = !this.single.offset;
if (noLimit || !noOffset) return '';
return `top (${this.formatter.parameter(this.single.limit)})`;
},
limit() {
return '';
},
offset() {
const noLimit = !this.single.limit && this.single.limit !== 0;
const noOffset = !this.single.offset;
if (noOffset) return '';
let offset = `offset ${
noOffset ? '0' : this.formatter.parameter(this.single.offset)
} rows`;
if (!noLimit) {
offset += ` fetch next ${this.formatter.parameter(
this.single.limit
)} rows only`;
}
return offset;
},
});
// Set the QueryBuilder & QueryCompiler on the client object,
// in case anyone wants to modify things to suit their own purposes.
module.exports = QueryCompiler_MSSQL;

View File

@@ -0,0 +1,103 @@
// MySQL Column Compiler
// -------
const inherits = require('inherits');
const ColumnCompiler = require('../../../schema/columncompiler');
function ColumnCompiler_MSSQL() {
ColumnCompiler.apply(this, arguments);
this.modifiers = ['nullable', 'defaultTo', 'first', 'after', 'comment'];
}
inherits(ColumnCompiler_MSSQL, ColumnCompiler);
// Types
// ------
Object.assign(ColumnCompiler_MSSQL.prototype, {
increments: 'int identity(1,1) not null primary key',
bigincrements: 'bigint identity(1,1) not null primary key',
bigint: 'bigint',
double(precision, scale) {
return 'float';
},
floating(precision, scale) {
// ignore precicion / scale which is mysql specific stuff
return `float`;
},
integer() {
// mssql does not support length
return 'int';
},
mediumint: 'int',
smallint: 'smallint',
tinyint() {
// mssql does not support length
return 'tinyint';
},
varchar(length) {
return `nvarchar(${this._num(length, 255)})`;
},
text: 'nvarchar(max)',
mediumtext: 'nvarchar(max)',
longtext: 'nvarchar(max)',
// TODO: mssql supports check constraints as of SQL Server 2008
// so make enu here more like postgres
enu: 'nvarchar(100)',
uuid: 'uniqueidentifier',
datetime: 'datetime2',
timestamp({ useTz = false } = {}) {
return useTz ? 'datetimeoffset' : 'datetime2';
},
bit(length) {
if (length > 1) {
this.client.logger.warn('Bit field is exactly 1 bit length for MSSQL');
}
return 'bit';
},
binary(length) {
return length ? `varbinary(${this._num(length)})` : 'varbinary(max)';
},
bool: 'bit',
// Modifiers
// ------
first() {
this.client.logger.warn('Column first modifier not available for MSSQL');
return '';
},
after(column) {
this.client.logger.warn('Column after modifier not available for MSSQL');
return '';
},
comment(comment) {
if (comment && comment.length > 255) {
this.client.logger.warn(
'Your comment is longer than the max comment length for MSSQL'
);
}
return '';
},
});
module.exports = ColumnCompiler_MSSQL;

View File

@@ -0,0 +1,59 @@
// MySQL Schema Compiler
// -------
const inherits = require('inherits');
const SchemaCompiler = require('../../../schema/compiler');
function SchemaCompiler_MSSQL(client, builder) {
SchemaCompiler.call(this, client, builder);
}
inherits(SchemaCompiler_MSSQL, SchemaCompiler);
Object.assign(SchemaCompiler_MSSQL.prototype, {
dropTablePrefix: 'DROP TABLE ',
dropTableIfExists(tableName) {
const name = this.formatter.wrap(prefixedTableName(this.schema, tableName));
this.pushQuery(
`if object_id('${name}', 'U') is not null DROP TABLE ${name}`
);
},
// Rename a table on the schema.
renameTable(tableName, to) {
this.pushQuery(
`exec sp_rename ${this.formatter.parameter(
prefixedTableName(this.schema, tableName)
)}, ${this.formatter.parameter(to)}`
);
},
// Check whether a table exists on the query.
hasTable(tableName) {
const formattedTable = this.formatter.parameter(
this.formatter.wrap(prefixedTableName(this.schema, tableName))
);
const sql =
`select object_id from sys.tables ` +
`where object_id = object_id(${formattedTable})`;
this.pushQuery({ sql, output: (resp) => resp.length > 0 });
},
// Check whether a column exists on the schema.
hasColumn(tableName, column) {
const formattedColumn = this.formatter.parameter(column);
const formattedTable = this.formatter.parameter(
this.formatter.wrap(prefixedTableName(this.schema, tableName))
);
const sql =
`select object_id from sys.columns ` +
`where name = ${formattedColumn} ` +
`and object_id = object_id(${formattedTable})`;
this.pushQuery({ sql, output: (resp) => resp.length > 0 });
},
});
function prefixedTableName(prefix, table) {
return prefix ? `${prefix}.${table}` : table;
}
module.exports = SchemaCompiler_MSSQL;

View File

@@ -0,0 +1,228 @@
/* eslint max-len:0 */
// MSSQL Table Builder & Compiler
// -------
const inherits = require('inherits');
const TableCompiler = require('../../../schema/tablecompiler');
const helpers = require('../../../helpers');
// Table Compiler
// ------
function TableCompiler_MSSQL() {
TableCompiler.apply(this, arguments);
}
inherits(TableCompiler_MSSQL, TableCompiler);
Object.assign(TableCompiler_MSSQL.prototype, {
createAlterTableMethods: ['foreign', 'primary'],
createQuery(columns, ifNot) {
const createStatement = ifNot
? `if object_id('${this.tableName()}', 'U') is null CREATE TABLE `
: 'CREATE TABLE ';
const sql =
createStatement +
this.tableName() +
(this._formatting ? ' (\n ' : ' (') +
columns.sql.join(this._formatting ? ',\n ' : ', ') +
')';
if (this.single.comment) {
const { comment } = this.single;
if (comment.length > 60)
this.client.logger.warn(
'The max length for a table comment is 60 characters'
);
}
this.pushQuery(sql);
},
lowerCase: false,
addColumnsPrefix: 'ADD ',
dropColumnPrefix: 'DROP COLUMN ',
alterColumnPrefix: 'ALTER COLUMN ',
// Compiles column add. Multiple columns need only one ADD clause (not one ADD per column) so core addColumns doesn't work. #1348
addColumns(columns, prefix) {
prefix = prefix || this.addColumnsPrefix;
if (columns.sql.length > 0) {
this.pushQuery({
sql:
(this.lowerCase ? 'alter table ' : 'ALTER TABLE ') +
this.tableName() +
' ' +
prefix +
columns.sql.join(', '),
bindings: columns.bindings,
});
}
},
// Compiles column drop. Multiple columns need only one DROP clause (not one DROP per column) so core dropColumn doesn't work. #1348
dropColumn() {
const _this2 = this;
const columns = helpers.normalizeArr.apply(null, arguments);
const drops = (Array.isArray(columns) ? columns : [columns]).map((column) =>
_this2.formatter.wrap(column)
);
this.pushQuery(
(this.lowerCase ? 'alter table ' : 'ALTER TABLE ') +
this.tableName() +
' ' +
this.dropColumnPrefix +
drops.join(', ')
);
},
// Compiles the comment on the table.
comment() {},
changeType() {},
// Renames a column on the table.
renameColumn(from, to) {
this.pushQuery(
`exec sp_rename ${this.formatter.parameter(
this.tableName() + '.' + from
)}, ${this.formatter.parameter(to)}, 'COLUMN'`
);
},
dropFKRefs(runner, refs) {
const formatter = this.client.formatter(this.tableBuilder);
return Promise.all(
refs.map(function(ref) {
const constraintName = formatter.wrap(ref.CONSTRAINT_NAME);
const tableName = formatter.wrap(ref.TABLE_NAME);
return runner.query({
sql: `ALTER TABLE ${tableName} DROP CONSTRAINT ${constraintName}`,
});
})
);
},
createFKRefs(runner, refs) {
const formatter = this.client.formatter(this.tableBuilder);
return Promise.all(
refs.map(function(ref) {
const tableName = formatter.wrap(ref.TABLE_NAME);
const keyName = formatter.wrap(ref.CONSTRAINT_NAME);
const column = formatter.columnize(ref.COLUMN_NAME);
const references = formatter.columnize(ref.REFERENCED_COLUMN_NAME);
const inTable = formatter.wrap(ref.REFERENCED_TABLE_NAME);
const onUpdate = ` ON UPDATE ${ref.UPDATE_RULE}`;
const onDelete = ` ON DELETE ${ref.DELETE_RULE}`;
return runner.query({
sql:
`ALTER TABLE ${tableName} ADD CONSTRAINT ${keyName}` +
' FOREIGN KEY (' +
column +
') REFERENCES ' +
inTable +
' (' +
references +
')' +
onUpdate +
onDelete,
});
})
);
},
index(columns, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('index', this.tableNameRaw, columns);
this.pushQuery(
`CREATE INDEX ${indexName} ON ${this.tableName()} (${this.formatter.columnize(
columns
)})`
);
},
primary(columns, constraintName) {
constraintName = constraintName
? this.formatter.wrap(constraintName)
: this.formatter.wrap(`${this.tableNameRaw}_pkey`);
if (!this.forCreate) {
this.pushQuery(
`ALTER TABLE ${this.tableName()} ADD CONSTRAINT ${constraintName} PRIMARY KEY (${this.formatter.columnize(
columns
)})`
);
} else {
this.pushQuery(
`CONSTRAINT ${constraintName} PRIMARY KEY (${this.formatter.columnize(
columns
)})`
);
}
},
unique(columns, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('unique', this.tableNameRaw, columns);
if (!Array.isArray(columns)) {
columns = [columns];
}
const whereAllTheColumnsAreNotNull = columns
.map((column) => this.formatter.columnize(column) + ' IS NOT NULL')
.join(' AND ');
// make unique constraint that allows null https://stackoverflow.com/a/767702/360060
// to be more or less compatible with other DBs (if any of the columns is NULL then "duplicates" are allowed)
this.pushQuery(
`CREATE UNIQUE INDEX ${indexName} ON ${this.tableName()} (${this.formatter.columnize(
columns
)}) WHERE ${whereAllTheColumnsAreNotNull}`
);
},
// Compile a drop index command.
dropIndex(columns, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('index', this.tableNameRaw, columns);
this.pushQuery(`DROP INDEX ${indexName} ON ${this.tableName()}`);
},
// Compile a drop foreign key command.
dropForeign(columns, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('foreign', this.tableNameRaw, columns);
this.pushQuery(
`ALTER TABLE ${this.tableName()} DROP CONSTRAINT ${indexName}`
);
},
// Compile a drop primary key command.
dropPrimary(constraintName) {
constraintName = constraintName
? this.formatter.wrap(constraintName)
: this.formatter.wrap(`${this.tableNameRaw}_pkey`);
this.pushQuery(
`ALTER TABLE ${this.tableName()} DROP CONSTRAINT ${constraintName}`
);
},
// Compile a drop unique key command.
dropUnique(column, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('unique', this.tableNameRaw, column);
this.pushQuery(`DROP INDEX ${indexName} ON ${this.tableName()}`);
},
});
module.exports = TableCompiler_MSSQL;

107
node_modules/knex/lib/dialects/mssql/transaction.js generated vendored Normal file
View File

@@ -0,0 +1,107 @@
const Bluebird = require('bluebird');
const Transaction = require('../../transaction');
const { isUndefined } = require('lodash');
const debug = require('debug')('knex:tx');
module.exports = class Transaction_MSSQL extends Transaction {
begin(conn) {
debug('%s: begin', this.txid);
return conn.tx_.begin().then(this._resolver, this._rejecter);
}
savepoint(conn) {
debug('%s: savepoint at', this.txid);
return Bluebird.resolve().then(() =>
this.query(conn, `SAVE TRANSACTION ${this.txid}`)
);
}
commit(conn, value) {
this._completed = true;
debug('%s: commit', this.txid);
return conn.tx_.commit().then(() => this._resolver(value), this._rejecter);
}
release(conn, value) {
return this._resolver(value);
}
rollback(conn, error) {
this._completed = true;
debug('%s: rolling back', this.txid);
return conn.tx_.rollback().then(
() => {
let err = error;
if (isUndefined(error)) {
if (this.doNotRejectOnRollback) {
this._resolver();
return;
}
err = new Error(`Transaction rejected with non-error: ${error}`);
}
this._rejecter(err);
},
(err) => {
if (error) err.originalError = error;
return this._rejecter(err);
}
);
}
rollbackTo(conn, error) {
debug('%s: rolling backTo', this.txid);
return Bluebird.resolve()
.then(() =>
this.query(conn, `ROLLBACK TRANSACTION ${this.txid}`, 2, error)
)
.then(() => this._rejecter(error));
}
// Acquire a connection and create a disposer - either using the one passed
// via config or getting one off the client. The disposer will be called once
// the original promise is marked completed.
acquireConnection(config, cb) {
const configConnection = config && config.connection;
return new Bluebird((resolve, reject) => {
try {
resolve(
(this.outerTx ? this.outerTx.conn : null) ||
configConnection ||
this.client.acquireConnection()
);
} catch (e) {
reject(e);
}
})
.then((conn) => {
conn.__knexTxId = this.txid;
if (!this.outerTx) {
this.conn = conn;
conn.tx_ = conn.transaction();
}
return conn;
})
.then(async (conn) => {
try {
return await cb(conn);
} finally {
if (!this.outerTx) {
if (conn.tx_) {
if (!this._completed) {
debug('%s: unreleased transaction', this.txid);
conn.tx_.rollback();
}
conn.tx_ = null;
}
this.conn = null;
if (!configConnection) {
debug('%s: releasing connection', this.txid);
this.client.releaseConnection(conn);
} else {
debug('%s: not releasing external connection', this.txid);
}
}
}
});
}
};

198
node_modules/knex/lib/dialects/mysql/index.js generated vendored Normal file
View File

@@ -0,0 +1,198 @@
// MySQL Client
// -------
const inherits = require('inherits');
const { map, defer } = require('lodash');
const { promisify } = require('util');
const Client = require('../../client');
const Bluebird = require('bluebird');
const Transaction = require('./transaction');
const QueryCompiler = require('./query/compiler');
const SchemaCompiler = require('./schema/compiler');
const TableCompiler = require('./schema/tablecompiler');
const ColumnCompiler = require('./schema/columncompiler');
const { makeEscape } = require('../../query/string');
// Always initialize with the "QueryBuilder" and "QueryCompiler"
// objects, which extend the base 'lib/query/builder' and
// 'lib/query/compiler', respectively.
function Client_MySQL(config) {
Client.call(this, config);
}
inherits(Client_MySQL, Client);
Object.assign(Client_MySQL.prototype, {
dialect: 'mysql',
driverName: 'mysql',
_driver() {
return require('mysql');
},
queryCompiler() {
return new QueryCompiler(this, ...arguments);
},
schemaCompiler() {
return new SchemaCompiler(this, ...arguments);
},
tableCompiler() {
return new TableCompiler(this, ...arguments);
},
columnCompiler() {
return new ColumnCompiler(this, ...arguments);
},
transaction() {
return new Transaction(this, ...arguments);
},
_escapeBinding: makeEscape(),
wrapIdentifierImpl(value) {
return value !== '*' ? `\`${value.replace(/`/g, '``')}\`` : '*';
},
// Get a raw connection, called by the `pool` whenever a new
// connection needs to be added to the pool.
acquireRawConnection() {
return new Bluebird((resolver, rejecter) => {
const connection = this.driver.createConnection(this.connectionSettings);
connection.on('error', (err) => {
connection.__knex__disposed = err;
});
connection.connect((err) => {
if (err) {
// if connection is rejected, remove listener that was registered above...
connection.removeAllListeners();
return rejecter(err);
}
resolver(connection);
});
});
},
// Used to explicitly close a connection, called internally by the pool
// when a connection times out or the pool is shutdown.
async destroyRawConnection(connection) {
try {
const end = promisify((cb) => connection.end(cb));
return await end();
} catch (err) {
connection.__knex__disposed = err;
} finally {
// see discussion https://github.com/knex/knex/pull/3483
defer(() => connection.removeAllListeners());
}
},
validateConnection(connection) {
if (
connection.state === 'connected' ||
connection.state === 'authenticated'
) {
return true;
}
return false;
},
// Grab a connection, run the query via the MySQL streaming interface,
// and pass that through to the stream we've sent back to the client.
_stream(connection, obj, stream, options) {
options = options || {};
const queryOptions = Object.assign({ sql: obj.sql }, obj.options);
return new Bluebird((resolver, rejecter) => {
stream.on('error', rejecter);
stream.on('end', resolver);
const queryStream = connection
.query(queryOptions, obj.bindings)
.stream(options);
queryStream.on('error', (err) => {
rejecter(err);
stream.emit('error', err);
});
queryStream.pipe(stream);
});
},
// Runs the query on the specified connection, providing the bindings
// and any other necessary prep work.
_query(connection, obj) {
if (!obj || typeof obj === 'string') obj = { sql: obj };
return new Bluebird(function(resolver, rejecter) {
if (!obj.sql) {
resolver();
return;
}
const queryOptions = Object.assign({ sql: obj.sql }, obj.options);
connection.query(queryOptions, obj.bindings, function(err, rows, fields) {
if (err) return rejecter(err);
obj.response = [rows, fields];
resolver(obj);
});
});
},
// Process the response as returned from the query.
processResponse(obj, runner) {
if (obj == null) return;
const { response } = obj;
const { method } = obj;
const rows = response[0];
const fields = response[1];
if (obj.output) return obj.output.call(runner, rows, fields);
switch (method) {
case 'select':
case 'pluck':
case 'first': {
if (method === 'pluck') {
return map(rows, obj.pluck);
}
return method === 'first' ? rows[0] : rows;
}
case 'insert':
return [rows.insertId];
case 'del':
case 'update':
case 'counter':
return rows.affectedRows;
default:
return response;
}
},
canCancelQuery: true,
cancelQuery(connectionToKill) {
const acquiringConn = this.acquireConnection();
// Error out if we can't acquire connection in time.
// Purposely not putting timeout on `KILL QUERY` execution because erroring
// early there would release the `connectionToKill` back to the pool with
// a `KILL QUERY` command yet to finish.
return acquiringConn
.timeout(100)
.then((conn) =>
this.query(conn, {
method: 'raw',
sql: 'KILL QUERY ?',
bindings: [connectionToKill.threadId],
options: {},
})
)
.finally(() => {
// NOT returning this promise because we want to release the connection
// in a non-blocking fashion
acquiringConn.then((conn) => this.releaseConnection(conn));
});
},
});
module.exports = Client_MySQL;

105
node_modules/knex/lib/dialects/mysql/query/compiler.js generated vendored Normal file
View File

@@ -0,0 +1,105 @@
// MySQL Query Compiler
// ------
const inherits = require('inherits');
const QueryCompiler = require('../../../query/compiler');
const { identity } = require('lodash');
function QueryCompiler_MySQL(client, builder) {
QueryCompiler.call(this, client, builder);
const { returning } = this.single;
if (returning) {
this.client.logger.warn(
'.returning() is not supported by mysql and will not have any effect.'
);
}
}
inherits(QueryCompiler_MySQL, QueryCompiler);
Object.assign(QueryCompiler_MySQL.prototype, {
_emptyInsertValue: '() values ()',
// Update method, including joins, wheres, order & limits.
update() {
const join = this.join();
const updates = this._prepUpdate(this.single.update);
const where = this.where();
const order = this.order();
const limit = this.limit();
return (
`update ${this.tableName}` +
(join ? ` ${join}` : '') +
' set ' +
updates.join(', ') +
(where ? ` ${where}` : '') +
(order ? ` ${order}` : '') +
(limit ? ` ${limit}` : '')
);
},
forUpdate() {
return 'for update';
},
forShare() {
return 'lock in share mode';
},
// Only supported on MySQL 8.0+
skipLocked() {
return 'skip locked';
},
// Supported on MySQL 8.0+ and MariaDB 10.3.0+
noWait() {
return 'nowait';
},
// Compiles a `columnInfo` query.
columnInfo() {
const column = this.single.columnInfo;
// The user may have specified a custom wrapIdentifier function in the config. We
// need to run the identifiers through that function, but not format them as
// identifiers otherwise.
const table = this.client.customWrapIdentifier(this.single.table, identity);
return {
sql:
'select * from information_schema.columns where table_name = ? and table_schema = ?',
bindings: [table, this.client.database()],
output(resp) {
const out = resp.reduce(function(columns, val) {
columns[val.COLUMN_NAME] = {
defaultValue: val.COLUMN_DEFAULT,
type: val.DATA_TYPE,
maxLength: val.CHARACTER_MAXIMUM_LENGTH,
nullable: val.IS_NULLABLE === 'YES',
};
return columns;
}, {});
return (column && out[column]) || out;
},
};
},
limit() {
const noLimit = !this.single.limit && this.single.limit !== 0;
if (noLimit && !this.single.offset) return '';
// Workaround for offset only.
// see: http://stackoverflow.com/questions/255517/mysql-offset-infinite-rows
const limit =
this.single.offset && noLimit
? '18446744073709551615'
: this.formatter.parameter(this.single.limit);
return `limit ${limit}`;
},
});
// Set the QueryBuilder & QueryCompiler on the client object,
// in case anyone wants to modify things to suit their own purposes.
module.exports = QueryCompiler_MySQL;

View File

@@ -0,0 +1,170 @@
// MySQL Column Compiler
// -------
const inherits = require('inherits');
const ColumnCompiler = require('../../../schema/columncompiler');
const { isObject } = require('lodash');
function ColumnCompiler_MySQL() {
ColumnCompiler.apply(this, arguments);
this.modifiers = [
'unsigned',
'nullable',
'defaultTo',
'comment',
'collate',
'first',
'after',
];
}
inherits(ColumnCompiler_MySQL, ColumnCompiler);
// Types
// ------
Object.assign(ColumnCompiler_MySQL.prototype, {
increments: 'int unsigned not null auto_increment primary key',
bigincrements: 'bigint unsigned not null auto_increment primary key',
bigint: 'bigint',
double(precision, scale) {
if (!precision) return 'double';
return `double(${this._num(precision, 8)}, ${this._num(scale, 2)})`;
},
integer(length) {
length = length ? `(${this._num(length, 11)})` : '';
return `int${length}`;
},
mediumint: 'mediumint',
smallint: 'smallint',
tinyint(length) {
length = length ? `(${this._num(length, 1)})` : '';
return `tinyint${length}`;
},
text(column) {
switch (column) {
case 'medium':
case 'mediumtext':
return 'mediumtext';
case 'long':
case 'longtext':
return 'longtext';
default:
return 'text';
}
},
mediumtext() {
return this.text('medium');
},
longtext() {
return this.text('long');
},
enu(allowed) {
return `enum('${allowed.join("', '")}')`;
},
datetime(precision) {
if (isObject(precision)) {
({ precision } = precision);
}
return typeof precision === 'number'
? `datetime(${precision})`
: 'datetime';
},
timestamp(precision) {
if (isObject(precision)) {
({ precision } = precision);
}
return typeof precision === 'number'
? `timestamp(${precision})`
: 'timestamp';
},
time(precision) {
if (isObject(precision)) {
({ precision } = precision);
}
return typeof precision === 'number' ? `time(${precision})` : 'time';
},
bit(length) {
return length ? `bit(${this._num(length)})` : 'bit';
},
binary(length) {
return length ? `varbinary(${this._num(length)})` : 'blob';
},
json() {
return 'json';
},
jsonb() {
return 'json';
},
// Modifiers
// ------
defaultTo(value) {
// MySQL defaults to null by default, but breaks down if you pass it explicitly
// Note that in MySQL versions up to 5.7, logic related to updating
// timestamps when no explicit value is passed is quite insane - https://dev.mysql.com/doc/refman/5.7/en/server-system-variables.html#sysvar_explicit_defaults_for_timestamp
if (value === null || value === undefined) {
return;
}
if ((this.type === 'json' || this.type === 'jsonb') && isObject(value)) {
// Default value for json will work only it is an expression
return `default ('${JSON.stringify(value)}')`;
}
const defaultVal = ColumnCompiler_MySQL.super_.prototype.defaultTo.apply(
this,
arguments
);
if (this.type !== 'blob' && this.type.indexOf('text') === -1) {
return defaultVal;
}
return '';
},
unsigned() {
return 'unsigned';
},
comment(comment) {
if (comment && comment.length > 255) {
this.client.logger.warn(
'Your comment is longer than the max comment length for MySQL'
);
}
return comment && `comment '${comment}'`;
},
first() {
return 'first';
},
after(column) {
return `after ${this.formatter.wrap(column)}`;
},
collate(collation) {
return collation && `collate '${collation}'`;
},
});
module.exports = ColumnCompiler_MySQL;

View File

@@ -0,0 +1,60 @@
// MySQL Schema Compiler
// -------
const inherits = require('inherits');
const SchemaCompiler = require('../../../schema/compiler');
const { some } = require('lodash');
function SchemaCompiler_MySQL(client, builder) {
SchemaCompiler.call(this, client, builder);
}
inherits(SchemaCompiler_MySQL, SchemaCompiler);
Object.assign(SchemaCompiler_MySQL.prototype, {
// Rename a table on the schema.
renameTable(tableName, to) {
this.pushQuery(
`rename table ${this.formatter.wrap(tableName)} to ${this.formatter.wrap(
to
)}`
);
},
// Check whether a table exists on the query.
hasTable(tableName) {
let sql = 'select * from information_schema.tables where table_name = ?';
const bindings = [tableName];
if (this.schema) {
sql += ' and table_schema = ?';
bindings.push(this.schema);
} else {
sql += ' and table_schema = database()';
}
this.pushQuery({
sql,
bindings,
output: function output(resp) {
return resp.length > 0;
},
});
},
// Check whether a column exists on the schema.
hasColumn(tableName, column) {
this.pushQuery({
sql: `show columns from ${this.formatter.wrap(tableName)}`,
output(resp) {
return some(resp, (row) => {
return (
this.client.wrapIdentifier(row.Field) ===
this.client.wrapIdentifier(column)
);
});
},
});
},
});
module.exports = SchemaCompiler_MySQL;

View File

@@ -0,0 +1,262 @@
/* eslint max-len:0 no-console:0*/
// MySQL Table Builder & Compiler
// -------
const inherits = require('inherits');
const TableCompiler = require('../../../schema/tablecompiler');
// Table Compiler
// ------
function TableCompiler_MySQL() {
TableCompiler.apply(this, arguments);
}
inherits(TableCompiler_MySQL, TableCompiler);
Object.assign(TableCompiler_MySQL.prototype, {
createQuery(columns, ifNot) {
const createStatement = ifNot
? 'create table if not exists '
: 'create table ';
const { client } = this;
let conn = {};
let sql =
createStatement + this.tableName() + ' (' + columns.sql.join(', ') + ')';
// Check if the connection settings are set.
if (client.connectionSettings) {
conn = client.connectionSettings;
}
const charset = this.single.charset || conn.charset || '';
const collation = this.single.collate || conn.collate || '';
const engine = this.single.engine || '';
// var conn = builder.client.connectionSettings;
if (charset) sql += ` default character set ${charset}`;
if (collation) sql += ` collate ${collation}`;
if (engine) sql += ` engine = ${engine}`;
if (this.single.comment) {
const comment = this.single.comment || '';
if (comment.length > 60)
this.client.logger.warn(
'The max length for a table comment is 60 characters'
);
sql += ` comment = '${comment}'`;
}
this.pushQuery(sql);
},
addColumnsPrefix: 'add ',
alterColumnsPrefix: 'modify ',
dropColumnPrefix: 'drop ',
// Compiles the comment on the table.
comment(comment) {
this.pushQuery(`alter table ${this.tableName()} comment = '${comment}'`);
},
changeType() {
// alter table + table + ' modify ' + wrapped + '// type';
},
// Renames a column on the table.
renameColumn(from, to) {
const compiler = this;
const table = this.tableName();
const wrapped = this.formatter.wrap(from) + ' ' + this.formatter.wrap(to);
this.pushQuery({
sql:
`show fields from ${table} where field = ` +
this.formatter.parameter(from),
output(resp) {
const column = resp[0];
const runner = this;
return compiler.getFKRefs(runner).then(([refs]) =>
new Promise((resolve, reject) => {
try {
if (!refs.length) {
resolve();
}
resolve(compiler.dropFKRefs(runner, refs));
} catch (e) {
reject(e);
}
})
.then(function() {
let sql = `alter table ${table} change ${wrapped} ${column.Type}`;
if (String(column.Null).toUpperCase() !== 'YES') {
sql += ` NOT NULL`;
} else {
// This doesn't matter for most cases except Timestamp, where this is important
sql += ` NULL`;
}
if (column.Default !== void 0 && column.Default !== null) {
sql += ` DEFAULT '${column.Default}'`;
}
return runner.query({
sql,
});
})
.then(function() {
if (!refs.length) {
return;
}
return compiler.createFKRefs(
runner,
refs.map(function(ref) {
if (ref.REFERENCED_COLUMN_NAME === from) {
ref.REFERENCED_COLUMN_NAME = to;
}
if (ref.COLUMN_NAME === from) {
ref.COLUMN_NAME = to;
}
return ref;
})
);
})
);
},
});
},
getFKRefs(runner) {
const formatter = this.client.formatter(this.tableBuilder);
const sql =
'SELECT KCU.CONSTRAINT_NAME, KCU.TABLE_NAME, KCU.COLUMN_NAME, ' +
' KCU.REFERENCED_TABLE_NAME, KCU.REFERENCED_COLUMN_NAME, ' +
' RC.UPDATE_RULE, RC.DELETE_RULE ' +
'FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE AS KCU ' +
'JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS AS RC ' +
' USING(CONSTRAINT_NAME)' +
'WHERE KCU.REFERENCED_TABLE_NAME = ' +
formatter.parameter(this.tableNameRaw) +
' ' +
' AND KCU.CONSTRAINT_SCHEMA = ' +
formatter.parameter(this.client.database()) +
' ' +
' AND RC.CONSTRAINT_SCHEMA = ' +
formatter.parameter(this.client.database());
return runner.query({
sql,
bindings: formatter.bindings,
});
},
dropFKRefs(runner, refs) {
const formatter = this.client.formatter(this.tableBuilder);
return Promise.all(
refs.map(function(ref) {
const constraintName = formatter.wrap(ref.CONSTRAINT_NAME);
const tableName = formatter.wrap(ref.TABLE_NAME);
return runner.query({
sql: `alter table ${tableName} drop foreign key ${constraintName}`,
});
})
);
},
createFKRefs(runner, refs) {
const formatter = this.client.formatter(this.tableBuilder);
return Promise.all(
refs.map(function(ref) {
const tableName = formatter.wrap(ref.TABLE_NAME);
const keyName = formatter.wrap(ref.CONSTRAINT_NAME);
const column = formatter.columnize(ref.COLUMN_NAME);
const references = formatter.columnize(ref.REFERENCED_COLUMN_NAME);
const inTable = formatter.wrap(ref.REFERENCED_TABLE_NAME);
const onUpdate = ` ON UPDATE ${ref.UPDATE_RULE}`;
const onDelete = ` ON DELETE ${ref.DELETE_RULE}`;
return runner.query({
sql:
`alter table ${tableName} add constraint ${keyName} ` +
'foreign key (' +
column +
') references ' +
inTable +
' (' +
references +
')' +
onUpdate +
onDelete,
});
})
);
},
index(columns, indexName, indexType) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('index', this.tableNameRaw, columns);
this.pushQuery(
`alter table ${this.tableName()} add${
indexType ? ` ${indexType}` : ''
} index ${indexName}(${this.formatter.columnize(columns)})`
);
},
primary(columns, constraintName) {
constraintName = constraintName
? this.formatter.wrap(constraintName)
: this.formatter.wrap(`${this.tableNameRaw}_pkey`);
this.pushQuery(
`alter table ${this.tableName()} add primary key ${constraintName}(${this.formatter.columnize(
columns
)})`
);
},
unique(columns, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('unique', this.tableNameRaw, columns);
this.pushQuery(
`alter table ${this.tableName()} add unique ${indexName}(${this.formatter.columnize(
columns
)})`
);
},
// Compile a drop index command.
dropIndex(columns, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('index', this.tableNameRaw, columns);
this.pushQuery(`alter table ${this.tableName()} drop index ${indexName}`);
},
// Compile a drop foreign key command.
dropForeign(columns, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('foreign', this.tableNameRaw, columns);
this.pushQuery(
`alter table ${this.tableName()} drop foreign key ${indexName}`
);
},
// Compile a drop primary key command.
dropPrimary() {
this.pushQuery(`alter table ${this.tableName()} drop primary key`);
},
// Compile a drop unique key command.
dropUnique(column, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('unique', this.tableNameRaw, column);
this.pushQuery(`alter table ${this.tableName()} drop index ${indexName}`);
},
});
module.exports = TableCompiler_MySQL;

50
node_modules/knex/lib/dialects/mysql/transaction.js generated vendored Normal file
View File

@@ -0,0 +1,50 @@
const Transaction = require('../../transaction');
const Debug = require('debug');
const { isUndefined } = require('lodash');
const debug = Debug('knex:tx');
class Transaction_MySQL extends Transaction {}
Object.assign(Transaction_MySQL.prototype, {
query(conn, sql, status, value) {
const t = this;
const q = this.trxClient
.query(conn, sql)
.catch(
(err) => err.errno === 1305,
() => {
this.trxClient.logger.warn(
'Transaction was implicitly committed, do not mix transactions and ' +
'DDL with MySQL (#805)'
);
}
)
.catch(function(err) {
status = 2;
value = err;
t._completed = true;
debug('%s error running transaction query', t.txid);
})
.then(function(res) {
if (status === 1) t._resolver(value);
if (status === 2) {
if (isUndefined(value)) {
if (t.doNotRejectOnRollback && /^ROLLBACK\b/i.test(sql)) {
t._resolver();
return;
}
value = new Error(`Transaction rejected with non-error: ${value}`);
}
t._rejecter(value);
}
return res;
});
if (status === 1 || status === 2) {
t._completed = true;
}
return q;
},
});
module.exports = Transaction_MySQL;

35
node_modules/knex/lib/dialects/mysql2/index.js generated vendored Normal file
View File

@@ -0,0 +1,35 @@
// MySQL2 Client
// -------
const inherits = require('inherits');
const Client_MySQL = require('../mysql');
const Transaction = require('./transaction');
// Always initialize with the "QueryBuilder" and "QueryCompiler"
// objects, which extend the base 'lib/query/builder' and
// 'lib/query/compiler', respectively.
function Client_MySQL2(config) {
Client_MySQL.call(this, config);
}
inherits(Client_MySQL2, Client_MySQL);
Object.assign(Client_MySQL2.prototype, {
// The "dialect", for reference elsewhere.
driverName: 'mysql2',
transaction() {
return new Transaction(this, ...arguments);
},
_driver() {
return require('mysql2');
},
validateConnection(connection) {
if (connection._fatalError) {
return false;
}
return true;
},
});
module.exports = Client_MySQL2;

49
node_modules/knex/lib/dialects/mysql2/transaction.js generated vendored Normal file
View File

@@ -0,0 +1,49 @@
const Transaction = require('../../transaction');
const debug = require('debug')('knex:tx');
const { isUndefined } = require('lodash');
class Transaction_MySQL2 extends Transaction {}
Object.assign(Transaction_MySQL2.prototype, {
query(conn, sql, status, value) {
const t = this;
const q = this.trxClient
.query(conn, sql)
.catch(
(err) => err.code === 'ER_SP_DOES_NOT_EXIST',
() => {
this.trxClient.logger.warn(
'Transaction was implicitly committed, do not mix transactions and ' +
'DDL with MySQL (#805)'
);
}
)
.catch(function(err) {
status = 2;
value = err;
t._completed = true;
debug('%s error running transaction query', t.txid);
})
.then(function(res) {
if (status === 1) t._resolver(value);
if (status === 2) {
if (isUndefined(value)) {
if (t.doNotRejectOnRollback && /^ROLLBACK\b/i.test(sql)) {
t._resolver();
return;
}
value = new Error(`Transaction rejected with non-error: ${value}`);
}
t._rejecter(value);
return res;
}
});
if (status === 1 || status === 2) {
t._completed = true;
}
return q;
},
});
module.exports = Transaction_MySQL2;

20
node_modules/knex/lib/dialects/oracle/formatter.js generated vendored Normal file
View File

@@ -0,0 +1,20 @@
const Formatter = require('../../formatter');
const { ReturningHelper } = require('./utils');
class Oracle_Formatter extends Formatter {
alias(first, second) {
return first + ' ' + second;
}
parameter(value, notSetValue) {
// Returning helper uses always ROWID as string
if (value instanceof ReturningHelper && this.client.driver) {
value = new this.client.driver.OutParam(this.client.driver.OCCISTRING);
} else if (typeof value === 'boolean') {
value = value ? 1 : 0;
}
return super.parameter(value, notSetValue);
}
}
module.exports = Oracle_Formatter;

196
node_modules/knex/lib/dialects/oracle/index.js generated vendored Normal file
View File

@@ -0,0 +1,196 @@
// Oracle Client
// -------
const { map, flatten, values } = require('lodash');
const { promisify } = require('util');
const inherits = require('inherits');
const Client = require('../../client');
const Bluebird = require('bluebird');
const { bufferToString } = require('../../query/string');
const Formatter = require('./formatter');
const Transaction = require('./transaction');
const QueryCompiler = require('./query/compiler');
const SchemaCompiler = require('./schema/compiler');
const ColumnBuilder = require('./schema/columnbuilder');
const ColumnCompiler = require('./schema/columncompiler');
const TableCompiler = require('./schema/tablecompiler');
const { ReturningHelper, isConnectionError } = require('./utils');
// Always initialize with the "QueryBuilder" and "QueryCompiler"
// objects, which extend the base 'lib/query/builder' and
// 'lib/query/compiler', respectively.
function Client_Oracle(config) {
Client.call(this, config);
}
inherits(Client_Oracle, Client);
Object.assign(Client_Oracle.prototype, {
dialect: 'oracle',
driverName: 'oracle',
_driver() {
return require('oracle');
},
transaction() {
return new Transaction(this, ...arguments);
},
formatter() {
return new Formatter(this, ...arguments);
},
queryCompiler() {
return new QueryCompiler(this, ...arguments);
},
schemaCompiler() {
return new SchemaCompiler(this, ...arguments);
},
columnBuilder() {
return new ColumnBuilder(this, ...arguments);
},
columnCompiler() {
return new ColumnCompiler(this, ...arguments);
},
tableCompiler() {
return new TableCompiler(this, ...arguments);
},
prepBindings(bindings) {
return map(bindings, (value) => {
// returning helper uses always ROWID as string
if (value instanceof ReturningHelper && this.driver) {
return new this.driver.OutParam(this.driver.OCCISTRING);
} else if (typeof value === 'boolean') {
return value ? 1 : 0;
} else if (Buffer.isBuffer(value)) {
return bufferToString(value);
}
return value;
});
},
// Get a raw connection, called by the `pool` whenever a new
// connection needs to be added to the pool.
acquireRawConnection() {
return new Bluebird((resolver, rejecter) => {
this.driver.connect(this.connectionSettings, (err, connection) => {
if (err) return rejecter(err);
Bluebird.promisifyAll(connection);
if (this.connectionSettings.prefetchRowCount) {
connection.setPrefetchRowCount(
this.connectionSettings.prefetchRowCount
);
}
resolver(connection);
});
});
},
// Used to explicitly close a connection, called internally by the pool
// when a connection times out or the pool is shutdown.
async destroyRawConnection(connection) {
const close = promisify((cb) => connection.close(cb));
return close();
},
// Return the database for the Oracle client.
database() {
return this.connectionSettings.database;
},
// Position the bindings for the query.
positionBindings(sql) {
let questionCount = 0;
return sql.replace(/\?/g, function() {
questionCount += 1;
return `:${questionCount}`;
});
},
_stream(connection, obj, stream, options) {
return new Bluebird(function(resolver, rejecter) {
stream.on('error', (err) => {
if (isConnectionError(err)) {
connection.__knex__disposed = err;
}
rejecter(err);
});
stream.on('end', resolver);
const queryStream = connection.queryStream(
obj.sql,
obj.bindings,
options
);
queryStream.pipe(stream);
queryStream.on('error', function(error) {
rejecter(error);
stream.emit('error', error);
});
});
},
// Runs the query on the specified connection, providing the bindings
// and any other necessary prep work.
_query(connection, obj) {
if (!obj.sql) throw new Error('The query is empty');
return connection
.executeAsync(obj.sql, obj.bindings)
.then(function(response) {
if (!obj.returning) return response;
const rowIds = obj.outParams.map(
(v, i) => response[`returnParam${i ? i : ''}`]
);
return connection.executeAsync(obj.returningSql, rowIds);
})
.then(function(response) {
obj.response = response;
obj.rowsAffected = response.updateCount;
return obj;
})
.catch((err) => {
if (isConnectionError(err)) {
connection.__knex__disposed = err;
}
throw err;
});
},
// Process the response as returned from the query.
processResponse(obj, runner) {
let { response } = obj;
const { method } = obj;
if (obj.output) return obj.output.call(runner, response);
switch (method) {
case 'select':
case 'pluck':
case 'first':
if (obj.method === 'pluck') response = map(response, obj.pluck);
return obj.method === 'first' ? response[0] : response;
case 'insert':
case 'del':
case 'update':
case 'counter':
if (obj.returning) {
if (obj.returning.length > 1 || obj.returning[0] === '*') {
return response;
}
// return an array with values if only one returning value was specified
return flatten(map(response, values));
}
return obj.rowsAffected;
default:
return response;
}
},
});
module.exports = Client_Oracle;

325
node_modules/knex/lib/dialects/oracle/query/compiler.js generated vendored Normal file
View File

@@ -0,0 +1,325 @@
/* eslint max-len:0 */
// Oracle Query Builder & Compiler
// ------
const {
assign,
isPlainObject,
isEmpty,
isString,
map,
reduce,
compact,
identity,
} = require('lodash');
const inherits = require('inherits');
const QueryCompiler = require('../../../query/compiler');
const { ReturningHelper } = require('../utils');
const components = [
'columns',
'join',
'where',
'union',
'group',
'having',
'order',
'lock',
];
// Query Compiler
// -------
// Set the "Formatter" to use for the queries,
// ensuring that all parameterized values (even across sub-queries)
// are properly built into the same query.
function QueryCompiler_Oracle(client, builder) {
QueryCompiler.call(this, client, builder);
}
inherits(QueryCompiler_Oracle, QueryCompiler);
assign(QueryCompiler_Oracle.prototype, {
// Compiles an "insert" query, allowing for multiple
// inserts using a single query statement.
insert() {
let insertValues = this.single.insert || [];
let { returning } = this.single;
if (!Array.isArray(insertValues) && isPlainObject(this.single.insert)) {
insertValues = [this.single.insert];
}
// always wrap returning argument in array
if (returning && !Array.isArray(returning)) {
returning = [returning];
}
if (
Array.isArray(insertValues) &&
insertValues.length === 1 &&
isEmpty(insertValues[0])
) {
return this._addReturningToSqlAndConvert(
`insert into ${this.tableName} (${this.formatter.wrap(
this.single.returning
)}) values (default)`,
returning,
this.tableName
);
}
if (
isEmpty(this.single.insert) &&
typeof this.single.insert !== 'function'
) {
return '';
}
const insertData = this._prepInsert(insertValues);
const sql = {};
if (isString(insertData)) {
return this._addReturningToSqlAndConvert(
`insert into ${this.tableName} ${insertData}`,
returning
);
}
if (insertData.values.length === 1) {
return this._addReturningToSqlAndConvert(
`insert into ${this.tableName} (${this.formatter.columnize(
insertData.columns
)}) values (${this.formatter.parameterize(insertData.values[0])})`,
returning,
this.tableName
);
}
const insertDefaultsOnly = insertData.columns.length === 0;
sql.sql =
'begin ' +
map(insertData.values, (value) => {
let returningHelper;
const parameterizedValues = !insertDefaultsOnly
? this.formatter.parameterize(value, this.client.valueForUndefined)
: '';
const returningValues = Array.isArray(returning)
? returning
: [returning];
let subSql = `insert into ${this.tableName} `;
if (returning) {
returningHelper = new ReturningHelper(returningValues.join(':'));
sql.outParams = (sql.outParams || []).concat(returningHelper);
}
if (insertDefaultsOnly) {
// no columns given so only the default value
subSql += `(${this.formatter.wrap(
this.single.returning
)}) values (default)`;
} else {
subSql += `(${this.formatter.columnize(
insertData.columns
)}) values (${parameterizedValues})`;
}
subSql += returning
? ` returning ROWID into ${this.formatter.parameter(returningHelper)}`
: '';
// pre bind position because subSql is an execute immediate parameter
// later position binding will only convert the ? params
subSql = this.formatter.client.positionBindings(subSql);
const parameterizedValuesWithoutDefault = parameterizedValues
.replace('DEFAULT, ', '')
.replace(', DEFAULT', '');
return (
`execute immediate '${subSql.replace(/'/g, "''")}` +
(parameterizedValuesWithoutDefault || returning ? "' using " : '') +
parameterizedValuesWithoutDefault +
(parameterizedValuesWithoutDefault && returning ? ', ' : '') +
(returning ? 'out ?' : '') +
';'
);
}).join(' ') +
'end;';
if (returning) {
sql.returning = returning;
// generate select statement with special order by to keep the order because 'in (..)' may change the order
sql.returningSql =
`select ${this.formatter.columnize(returning)}` +
' from ' +
this.tableName +
' where ROWID in (' +
sql.outParams.map((v, i) => `:${i + 1}`).join(', ') +
')' +
' order by case ROWID ' +
sql.outParams
.map((v, i) => `when CHARTOROWID(:${i + 1}) then ${i}`)
.join(' ') +
' end';
}
return sql;
},
// Update method, including joins, wheres, order & limits.
update() {
const updates = this._prepUpdate(this.single.update);
const where = this.where();
let { returning } = this.single;
const sql =
`update ${this.tableName}` +
' set ' +
updates.join(', ') +
(where ? ` ${where}` : '');
if (!returning) {
return sql;
}
// always wrap returning argument in array
if (!Array.isArray(returning)) {
returning = [returning];
}
return this._addReturningToSqlAndConvert(sql, returning, this.tableName);
},
// Compiles a `truncate` query.
truncate() {
return `truncate table ${this.tableName}`;
},
forUpdate() {
return 'for update';
},
forShare() {
// lock for share is not directly supported by oracle
// use LOCK TABLE .. IN SHARE MODE; instead
this.client.logger.warn(
'lock for share is not supported by oracle dialect'
);
return '';
},
// Compiles a `columnInfo` query.
columnInfo() {
const column = this.single.columnInfo;
// The user may have specified a custom wrapIdentifier function in the config. We
// need to run the identifiers through that function, but not format them as
// identifiers otherwise.
const table = this.client.customWrapIdentifier(this.single.table, identity);
// Node oracle drivers doesn't support LONG type (which is data_default type)
const sql = `select * from xmltable( '/ROWSET/ROW'
passing dbms_xmlgen.getXMLType('
select char_col_decl_length, column_name, data_type, data_default, nullable
from user_tab_columns where table_name = ''${table}'' ')
columns
CHAR_COL_DECL_LENGTH number, COLUMN_NAME varchar2(200), DATA_TYPE varchar2(106),
DATA_DEFAULT clob, NULLABLE varchar2(1))`;
return {
sql: sql,
output(resp) {
const out = reduce(
resp,
function(columns, val) {
columns[val.COLUMN_NAME] = {
type: val.DATA_TYPE,
defaultValue: val.DATA_DEFAULT,
maxLength: val.CHAR_COL_DECL_LENGTH,
nullable: val.NULLABLE === 'Y',
};
return columns;
},
{}
);
return (column && out[column]) || out;
},
};
},
select() {
let query = this.with();
const statements = map(components, (component) => {
return this[component]();
});
query += compact(statements).join(' ');
return this._surroundQueryWithLimitAndOffset(query);
},
aggregate(stmt) {
return this._aggregate(stmt, { aliasSeparator: ' ' });
},
// for single commands only
_addReturningToSqlAndConvert(sql, returning, tableName) {
const res = {
sql,
};
if (!returning) {
return res;
}
const returningValues = Array.isArray(returning) ? returning : [returning];
const returningHelper = new ReturningHelper(returningValues.join(':'));
res.sql =
sql +
' returning ROWID into ' +
this.formatter.parameter(returningHelper);
res.returningSql = `select ${this.formatter.columnize(
returning
)} from ${tableName} where ROWID = :1`;
res.outParams = [returningHelper];
res.returning = returning;
return res;
},
_surroundQueryWithLimitAndOffset(query) {
let { limit } = this.single;
const { offset } = this.single;
const hasLimit = limit || limit === 0 || limit === '0';
limit = +limit;
if (!hasLimit && !offset) return query;
query = query || '';
if (hasLimit && !offset) {
return `select * from (${query}) where rownum <= ${this.formatter.parameter(
limit
)}`;
}
const endRow = +offset + (hasLimit ? limit : 10000000000000);
return (
'select * from ' +
'(select row_.*, ROWNUM rownum_ from (' +
query +
') row_ ' +
'where rownum <= ' +
this.formatter.parameter(endRow) +
') ' +
'where rownum_ > ' +
this.formatter.parameter(offset)
);
},
});
// Compiles the `select` statement, or nested sub-selects
// by calling each of the component compilers, trimming out
// the empties, and returning a generated query string.
QueryCompiler_Oracle.prototype.first = QueryCompiler_Oracle.prototype.select;
module.exports = QueryCompiler_Oracle;

View File

@@ -0,0 +1,18 @@
const inherits = require('inherits');
const ColumnBuilder = require('../../../schema/columnbuilder');
const { toArray } = require('lodash');
function ColumnBuilder_Oracle() {
ColumnBuilder.apply(this, arguments);
}
inherits(ColumnBuilder_Oracle, ColumnBuilder);
// checkIn added to the builder to allow the column compiler to change the
// order via the modifiers ("check" must be after "default")
ColumnBuilder_Oracle.prototype.checkIn = function() {
this._modifiers.checkIn = toArray(arguments);
return this;
};
module.exports = ColumnBuilder_Oracle;

View File

@@ -0,0 +1,139 @@
const { uniq, map } = require('lodash');
const inherits = require('inherits');
const Raw = require('../../../raw');
const ColumnCompiler = require('../../../schema/columncompiler');
const Trigger = require('./trigger');
// Column Compiler
// -------
function ColumnCompiler_Oracle() {
ColumnCompiler.apply(this, arguments);
this.modifiers = ['defaultTo', 'checkIn', 'nullable', 'comment'];
}
inherits(ColumnCompiler_Oracle, ColumnCompiler);
Object.assign(ColumnCompiler_Oracle.prototype, {
// helper function for pushAdditional in increments() and bigincrements()
_createAutoIncrementTriggerAndSequence() {
// TODO Add warning that sequence etc is created
this.pushAdditional(function() {
const tableName = this.tableCompiler.tableNameRaw;
const createTriggerSQL = Trigger.createAutoIncrementTrigger(
this.client.logger,
tableName
);
this.pushQuery(createTriggerSQL);
});
},
increments() {
this._createAutoIncrementTriggerAndSequence();
return 'integer not null primary key';
},
bigincrements() {
this._createAutoIncrementTriggerAndSequence();
return 'number(20, 0) not null primary key';
},
floating(precision) {
const parsedPrecision = this._num(precision, 0);
return `float${parsedPrecision ? `(${parsedPrecision})` : ''}`;
},
double(precision, scale) {
// if (!precision) return 'number'; // TODO: Check If default is ok
return `number(${this._num(precision, 8)}, ${this._num(scale, 2)})`;
},
decimal(precision, scale) {
if (precision === null) return 'decimal';
return `decimal(${this._num(precision, 8)}, ${this._num(scale, 2)})`;
},
integer(length) {
return length ? `number(${this._num(length, 11)})` : 'integer';
},
tinyint: 'smallint',
smallint: 'smallint',
mediumint: 'integer',
biginteger: 'number(20, 0)',
text: 'clob',
enu(allowed) {
allowed = uniq(allowed);
const maxLength = (allowed || []).reduce(
(maxLength, name) => Math.max(maxLength, String(name).length),
1
);
// implicitly add the enum values as checked values
this.columnBuilder._modifiers.checkIn = [allowed];
return `varchar2(${maxLength})`;
},
time: 'timestamp with time zone',
datetime(without) {
return without ? 'timestamp' : 'timestamp with time zone';
},
timestamp(without) {
return without ? 'timestamp' : 'timestamp with time zone';
},
bit: 'clob',
json: 'clob',
bool() {
// implicitly add the check for 0 and 1
this.columnBuilder._modifiers.checkIn = [[0, 1]];
return 'number(1, 0)';
},
varchar(length) {
return `varchar2(${this._num(length, 255)})`;
},
// Modifiers
// ------
comment(comment) {
const columnName = this.args[0] || this.defaults('columnName');
this.pushAdditional(function() {
this.pushQuery(
`comment on column ${this.tableCompiler.tableName()}.` +
this.formatter.wrap(columnName) +
" is '" +
(comment || '') +
"'"
);
}, comment);
},
checkIn(value) {
// TODO: Maybe accept arguments also as array
// TODO: value(s) should be escaped properly
if (value === undefined) {
return '';
} else if (value instanceof Raw) {
value = value.toQuery();
} else if (Array.isArray(value)) {
value = map(value, (v) => `'${v}'`).join(', ');
} else {
value = `'${value}'`;
}
return `check (${this.formatter.wrap(this.args[0])} in (${value}))`;
},
});
module.exports = ColumnCompiler_Oracle;

View File

@@ -0,0 +1,81 @@
// Oracle Schema Compiler
// -------
const inherits = require('inherits');
const SchemaCompiler = require('../../../schema/compiler');
const utils = require('../utils');
const Trigger = require('./trigger');
function SchemaCompiler_Oracle() {
SchemaCompiler.apply(this, arguments);
}
inherits(SchemaCompiler_Oracle, SchemaCompiler);
// Rename a table on the schema.
SchemaCompiler_Oracle.prototype.renameTable = function(tableName, to) {
const renameTable = Trigger.renameTableAndAutoIncrementTrigger(
this.client.logger,
tableName,
to
);
this.pushQuery(renameTable);
};
// Check whether a table exists on the query.
SchemaCompiler_Oracle.prototype.hasTable = function(tableName) {
this.pushQuery({
sql:
'select TABLE_NAME from USER_TABLES where TABLE_NAME = ' +
this.formatter.parameter(tableName),
output(resp) {
return resp.length > 0;
},
});
};
// Check whether a column exists on the schema.
SchemaCompiler_Oracle.prototype.hasColumn = function(tableName, column) {
const sql =
`select COLUMN_NAME from USER_TAB_COLUMNS ` +
`where TABLE_NAME = ${this.formatter.parameter(tableName)} ` +
`and COLUMN_NAME = ${this.formatter.parameter(column)}`;
this.pushQuery({ sql, output: (resp) => resp.length > 0 });
};
SchemaCompiler_Oracle.prototype.dropSequenceIfExists = function(sequenceName) {
this.pushQuery(
utils.wrapSqlWithCatch(
`drop sequence ${this.formatter.wrap(sequenceName)}`,
-2289
)
);
};
SchemaCompiler_Oracle.prototype._dropRelatedSequenceIfExists = function(
tableName
) {
// removing the sequence that was possibly generated by increments() column
const sequenceName = utils.generateCombinedName(
this.client.logger,
'seq',
tableName
);
this.dropSequenceIfExists(sequenceName);
};
SchemaCompiler_Oracle.prototype.dropTable = function(tableName) {
this.pushQuery(`drop table ${this.formatter.wrap(tableName)}`);
// removing the sequence that was possibly generated by increments() column
this._dropRelatedSequenceIfExists(tableName);
};
SchemaCompiler_Oracle.prototype.dropTableIfExists = function(tableName) {
this.pushQuery(
utils.wrapSqlWithCatch(`drop table ${this.formatter.wrap(tableName)}`, -942)
);
// removing the sequence that was possibly generated by increments() column
this._dropRelatedSequenceIfExists(tableName);
};
module.exports = SchemaCompiler_Oracle;

View File

@@ -0,0 +1,167 @@
/* eslint max-len:0 */
const inherits = require('inherits');
const utils = require('../utils');
const TableCompiler = require('../../../schema/tablecompiler');
const helpers = require('../../../helpers');
const Trigger = require('./trigger');
const { map } = require('lodash');
// Table Compiler
// ------
function TableCompiler_Oracle() {
TableCompiler.apply(this, arguments);
}
inherits(TableCompiler_Oracle, TableCompiler);
Object.assign(TableCompiler_Oracle.prototype, {
addColumns(columns, prefix) {
if (columns.sql.length > 0) {
prefix = prefix || this.addColumnsPrefix;
const columnSql = map(columns.sql, (column) => column);
const alter = this.lowerCase ? 'alter table ' : 'ALTER TABLE ';
let sql = `${alter}${this.tableName()} ${prefix}`;
if (columns.sql.length > 1) {
sql += `(${columnSql.join(', ')})`;
} else {
sql += columnSql.join(', ');
}
this.pushQuery({
sql,
bindings: columns.bindings,
});
}
},
// Compile a rename column command.
renameColumn(from, to) {
// Remove quotes around tableName
const tableName = this.tableName().slice(1, -1);
return this.pushQuery(
Trigger.renameColumnTrigger(this.client.logger, tableName, from, to)
);
},
compileAdd(builder) {
const table = this.formatter.wrap(builder);
const columns = this.prefixArray('add column', this.getColumns(builder));
return this.pushQuery({
sql: `alter table ${table} ${columns.join(', ')}`,
});
},
// Adds the "create" query to the query sequence.
createQuery(columns, ifNot) {
const sql = `create table ${this.tableName()} (${columns.sql.join(', ')})`;
this.pushQuery({
// catch "name is already used by an existing object" for workaround for "if not exists"
sql: ifNot ? utils.wrapSqlWithCatch(sql, -955) : sql,
bindings: columns.bindings,
});
if (this.single.comment) this.comment(this.single.comment);
},
// Compiles the comment on the table.
comment(comment) {
this.pushQuery(`comment on table ${this.tableName()} is '${comment}'`);
},
addColumnsPrefix: 'add ',
alterColumnsPrefix: 'modify ',
dropColumn() {
const columns = helpers.normalizeArr.apply(null, arguments);
this.pushQuery(
`alter table ${this.tableName()} drop (${this.formatter.columnize(
columns
)})`
);
},
changeType() {
// alter table + table + ' modify ' + wrapped + '// type';
},
_indexCommand(type, tableName, columns) {
return this.formatter.wrap(
utils.generateCombinedName(this.client.logger, type, tableName, columns)
);
},
primary(columns, constraintName) {
constraintName = constraintName
? this.formatter.wrap(constraintName)
: this.formatter.wrap(`${this.tableNameRaw}_pkey`);
this.pushQuery(
`alter table ${this.tableName()} add constraint ${constraintName} primary key (${this.formatter.columnize(
columns
)})`
);
},
dropPrimary(constraintName) {
constraintName = constraintName
? this.formatter.wrap(constraintName)
: this.formatter.wrap(this.tableNameRaw + '_pkey');
this.pushQuery(
`alter table ${this.tableName()} drop constraint ${constraintName}`
);
},
index(columns, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('index', this.tableNameRaw, columns);
this.pushQuery(
`create index ${indexName} on ${this.tableName()}` +
' (' +
this.formatter.columnize(columns) +
')'
);
},
dropIndex(columns, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('index', this.tableNameRaw, columns);
this.pushQuery(`drop index ${indexName}`);
},
unique(columns, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('unique', this.tableNameRaw, columns);
this.pushQuery(
`alter table ${this.tableName()} add constraint ${indexName}` +
' unique (' +
this.formatter.columnize(columns) +
')'
);
},
dropUnique(columns, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('unique', this.tableNameRaw, columns);
this.pushQuery(
`alter table ${this.tableName()} drop constraint ${indexName}`
);
},
dropForeign(columns, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('foreign', this.tableNameRaw, columns);
this.pushQuery(
`alter table ${this.tableName()} drop constraint ${indexName}`
);
},
});
module.exports = TableCompiler_Oracle;

126
node_modules/knex/lib/dialects/oracle/schema/trigger.js generated vendored Normal file
View File

@@ -0,0 +1,126 @@
const utils = require('../utils');
const trigger = {
renameColumnTrigger: function(logger, tableName, columnName, to) {
const triggerName = utils.generateCombinedName(
logger,
'autoinc_trg',
tableName
);
const sequenceName = utils.generateCombinedName(logger, 'seq', tableName);
return (
`DECLARE ` +
`PK_NAME VARCHAR(200); ` +
`IS_AUTOINC NUMBER := 0; ` +
`BEGIN` +
` EXECUTE IMMEDIATE ('ALTER TABLE "${tableName}" RENAME COLUMN "${columnName}" TO "${to}"');` +
` SELECT COUNT(*) INTO IS_AUTOINC from "USER_TRIGGERS" where trigger_name = '${triggerName}';` +
` IF (IS_AUTOINC > 0) THEN` +
` SELECT cols.column_name INTO PK_NAME` +
` FROM all_constraints cons, all_cons_columns cols` +
` WHERE cons.constraint_type = 'P'` +
` AND cons.constraint_name = cols.constraint_name` +
` AND cons.owner = cols.owner` +
` AND cols.table_name = '${tableName}';` +
` IF ('${to}' = PK_NAME) THEN` +
` EXECUTE IMMEDIATE ('DROP TRIGGER "${triggerName}"');` +
` EXECUTE IMMEDIATE ('create or replace trigger "${triggerName}"` +
` BEFORE INSERT on "${tableName}" for each row` +
` declare` +
` checking number := 1;` +
` begin` +
` if (:new."${to}" is null) then` +
` while checking >= 1 loop` +
` select "${sequenceName}".nextval into :new."${to}" from dual;` +
` select count("${to}") into checking from "${tableName}"` +
` where "${to}" = :new."${to}";` +
` end loop;` +
` end if;` +
` end;');` +
` end if;` +
` end if;` +
`END;`
);
},
createAutoIncrementTrigger: function(logger, tableName) {
const triggerName = utils.generateCombinedName(
logger,
'autoinc_trg',
tableName
);
const sequenceName = utils.generateCombinedName(logger, 'seq', tableName);
return (
`DECLARE ` +
`PK_NAME VARCHAR(200); ` +
`BEGIN` +
` EXECUTE IMMEDIATE ('CREATE SEQUENCE "${sequenceName}"');` +
` SELECT cols.column_name INTO PK_NAME` +
` FROM all_constraints cons, all_cons_columns cols` +
` WHERE cons.constraint_type = 'P'` +
` AND cons.constraint_name = cols.constraint_name` +
` AND cons.owner = cols.owner` +
` AND cols.table_name = '${tableName}';` +
` execute immediate ('create or replace trigger "${triggerName}"` +
` BEFORE INSERT on "${tableName}"` +
` for each row` +
` declare` +
` checking number := 1;` +
` begin` +
` if (:new."' || PK_NAME || '" is null) then` +
` while checking >= 1 loop` +
` select "${sequenceName}".nextval into :new."' || PK_NAME || '" from dual;` +
` select count("' || PK_NAME || '") into checking from "${tableName}"` +
` where "' || PK_NAME || '" = :new."' || PK_NAME || '";` +
` end loop;` +
` end if;` +
` end;'); ` +
`END;`
);
},
renameTableAndAutoIncrementTrigger: function(logger, tableName, to) {
const triggerName = utils.generateCombinedName(
logger,
'autoinc_trg',
tableName
);
const sequenceName = utils.generateCombinedName(logger, 'seq', tableName);
const toTriggerName = utils.generateCombinedName(logger, 'autoinc_trg', to);
const toSequenceName = utils.generateCombinedName(logger, 'seq', to);
return (
`DECLARE ` +
`PK_NAME VARCHAR(200); ` +
`IS_AUTOINC NUMBER := 0; ` +
`BEGIN` +
` EXECUTE IMMEDIATE ('RENAME "${tableName}" TO "${to}"');` +
` SELECT COUNT(*) INTO IS_AUTOINC from "USER_TRIGGERS" where trigger_name = '${triggerName}';` +
` IF (IS_AUTOINC > 0) THEN` +
` EXECUTE IMMEDIATE ('DROP TRIGGER "${triggerName}"');` +
` EXECUTE IMMEDIATE ('RENAME "${sequenceName}" TO "${toSequenceName}"');` +
` SELECT cols.column_name INTO PK_NAME` +
` FROM all_constraints cons, all_cons_columns cols` +
` WHERE cons.constraint_type = 'P'` +
` AND cons.constraint_name = cols.constraint_name` +
` AND cons.owner = cols.owner` +
` AND cols.table_name = '${to}';` +
` EXECUTE IMMEDIATE ('create or replace trigger "${toTriggerName}"` +
` BEFORE INSERT on "${to}" for each row` +
` declare` +
` checking number := 1;` +
` begin` +
` if (:new."' || PK_NAME || '" is null) then` +
` while checking >= 1 loop` +
` select "${toSequenceName}".nextval into :new."' || PK_NAME || '" from dual;` +
` select count("' || PK_NAME || '") into checking from "${to}"` +
` where "' || PK_NAME || '" = :new."' || PK_NAME || '";` +
` end loop;` +
` end if;` +
` end;');` +
` end if;` +
`END;`
);
},
};
module.exports = trigger;

77
node_modules/knex/lib/dialects/oracle/transaction.js generated vendored Normal file
View File

@@ -0,0 +1,77 @@
const Bluebird = require('bluebird');
const Transaction = require('../../transaction');
const { isUndefined } = require('lodash');
const debugTx = require('debug')('knex:tx');
module.exports = class Oracle_Transaction extends Transaction {
// disable autocommit to allow correct behavior (default is true)
begin() {
return Bluebird.resolve();
}
commit(conn, value) {
this._completed = true;
return conn
.commitAsync()
.then(() => value)
.then(this._resolver, this._rejecter);
}
release(conn, value) {
return this._resolver(value);
}
rollback(conn, err) {
this._completed = true;
debugTx('%s: rolling back', this.txid);
return conn
.rollbackAsync()
.throw(err)
.catch((error) => {
if (isUndefined(error)) {
if (this.doNotRejectOnRollback) {
this._resolver();
return;
}
error = new Error(`Transaction rejected with non-error: ${error}`);
}
return this._rejecter(error);
});
}
acquireConnection(config, cb) {
const configConnection = config && config.connection;
return new Bluebird((resolve, reject) => {
try {
resolve(configConnection || this.client.acquireConnection());
} catch (e) {
reject(e);
}
})
.then((connection) => {
connection.__knexTxId = this.txid;
return connection;
})
.then((connection) => {
if (!this.outerTx) {
connection.setAutoCommit(false);
}
return connection;
})
.then(async (connection) => {
try {
return await cb(connection);
} finally {
debugTx('%s: releasing connection', this.txid);
connection.setAutoCommit(true);
if (!configConnection) {
this.client.releaseConnection(connection);
} else {
debugTx('%s: not releasing external connection', this.txid);
}
}
});
}
};

86
node_modules/knex/lib/dialects/oracle/utils.js generated vendored Normal file
View File

@@ -0,0 +1,86 @@
function generateCombinedName(logger, postfix, name, subNames) {
const crypto = require('crypto');
const limit = 30;
if (!Array.isArray(subNames)) subNames = subNames ? [subNames] : [];
const table = name.replace(/\.|-/g, '_');
const subNamesPart = subNames.join('_');
let result = `${table}_${
subNamesPart.length ? subNamesPart + '_' : ''
}${postfix}`.toLowerCase();
if (result.length > limit) {
logger.warn(
`Automatically generated name "${result}" exceeds ${limit} character ` +
`limit for Oracle. Using base64 encoded sha1 of that name instead.`
);
// generates the sha1 of the name and encode it with base64
result = crypto
.createHash('sha1')
.update(result)
.digest('base64')
.replace('=', '');
}
return result;
}
function wrapSqlWithCatch(sql, errorNumberToCatch) {
return (
`begin execute immediate '${sql.replace(/'/g, "''")}'; ` +
`exception when others then if sqlcode != ${errorNumberToCatch} then raise; ` +
`end if; ` +
`end;`
);
}
function ReturningHelper(columnName) {
this.columnName = columnName;
}
ReturningHelper.prototype.toString = function() {
return `[object ReturningHelper:${this.columnName}]`;
};
// If the error is any of these, we'll assume we need to
// mark the connection as failed
function isConnectionError(err) {
return [
'ORA-03114', // not connected to ORACLE
'ORA-03113', // end-of-file on communication channel
'ORA-03135', // connection lost contact
'ORA-12514', // listener does not currently know of service requested in connect descriptor
'ORA-00022', // invalid session ID; access denied
'ORA-00028', // your session has been killed
'ORA-00031', // your session has been marked for kill
'ORA-00045', // your session has been terminated with no replay
'ORA-00378', // buffer pools cannot be created as specified
'ORA-00602', // internal programming exception
'ORA-00603', // ORACLE server session terminated by fatal error
'ORA-00609', // could not attach to incoming connection
'ORA-01012', // not logged on
'ORA-01041', // internal error. hostdef extension doesn't exist
'ORA-01043', // user side memory corruption
'ORA-01089', // immediate shutdown or close in progress
'ORA-01092', // ORACLE instance terminated. Disconnection forced
'ORA-02396', // exceeded maximum idle time, please connect again
'ORA-03122', // attempt to close ORACLE-side window on user side
'ORA-12153', // TNS'not connected
'ORA-12537', // TNS'connection closed
'ORA-12547', // TNS'lost contact
'ORA-12570', // TNS'packet reader failure
'ORA-12583', // TNS'no reader
'ORA-27146', // post/wait initialization failed
'ORA-28511', // lost RPC connection
'ORA-56600', // an illegal OCI function call was issued
'NJS-040',
'NJS-024',
'NJS-003',
].some(function(prefix) {
return err.message.indexOf(prefix) === 0;
});
}
module.exports = {
generateCombinedName,
isConnectionError,
wrapSqlWithCatch,
ReturningHelper,
};

454
node_modules/knex/lib/dialects/oracledb/index.js generated vendored Normal file
View File

@@ -0,0 +1,454 @@
// Oracledb Client
// -------
const _ = require('lodash');
const inherits = require('inherits');
const QueryCompiler = require('./query/compiler');
const ColumnCompiler = require('./schema/columncompiler');
const { BlobHelper, ReturningHelper, isConnectionError } = require('./utils');
const Bluebird = require('bluebird');
const stream = require('stream');
const { promisify } = require('util');
const Transaction = require('./transaction');
const Client_Oracle = require('../oracle');
const Oracle_Formatter = require('../oracle/formatter');
function Client_Oracledb() {
Client_Oracle.apply(this, arguments);
// Node.js only have 4 background threads by default, oracledb needs one by connection
if (this.driver) {
process.env.UV_THREADPOOL_SIZE = process.env.UV_THREADPOOL_SIZE || 1;
process.env.UV_THREADPOOL_SIZE =
parseInt(process.env.UV_THREADPOOL_SIZE) + this.driver.poolMax;
}
}
inherits(Client_Oracledb, Client_Oracle);
Client_Oracledb.prototype.driverName = 'oracledb';
Client_Oracledb.prototype._driver = function() {
const client = this;
const oracledb = require('oracledb');
client.fetchAsString = [];
if (this.config.fetchAsString && _.isArray(this.config.fetchAsString)) {
this.config.fetchAsString.forEach(function(type) {
if (!_.isString(type)) return;
type = type.toUpperCase();
if (oracledb[type]) {
if (type !== 'NUMBER' && type !== 'DATE' && type !== 'CLOB') {
this.logger.warn(
'Only "date", "number" and "clob" are supported for fetchAsString'
);
}
client.fetchAsString.push(oracledb[type]);
}
});
}
return oracledb;
};
Client_Oracledb.prototype.queryCompiler = function() {
return new QueryCompiler(this, ...arguments);
};
Client_Oracledb.prototype.columnCompiler = function() {
return new ColumnCompiler(this, ...arguments);
};
Client_Oracledb.prototype.formatter = function() {
return new Oracledb_Formatter(this, ...arguments);
};
Client_Oracledb.prototype.transaction = function() {
return new Transaction(this, ...arguments);
};
Client_Oracledb.prototype.prepBindings = function(bindings) {
return _.map(bindings, (value) => {
if (value instanceof BlobHelper && this.driver) {
return { type: this.driver.BLOB, dir: this.driver.BIND_OUT };
// Returning helper always use ROWID as string
} else if (value instanceof ReturningHelper && this.driver) {
return { type: this.driver.STRING, dir: this.driver.BIND_OUT };
} else if (typeof value === 'boolean') {
return value ? 1 : 0;
}
return value;
});
};
// Get a raw connection, called by the `pool` whenever a new
// connection needs to be added to the pool.
Client_Oracledb.prototype.acquireRawConnection = function() {
const client = this;
const asyncConnection = new Bluebird(function(resolver, rejecter) {
// If external authentication don't have to worry about username/password and
// if not need to set the username and password
const oracleDbConfig = client.connectionSettings.externalAuth
? { externalAuth: client.connectionSettings.externalAuth }
: {
user: client.connectionSettings.user,
password: client.connectionSettings.password,
};
// In the case of external authentication connection string will be given
oracleDbConfig.connectString =
client.connectionSettings.connectString ||
client.connectionSettings.host + '/' + client.connectionSettings.database;
if (client.connectionSettings.prefetchRowCount) {
oracleDbConfig.prefetchRows = client.connectionSettings.prefetchRowCount;
}
if (!_.isUndefined(client.connectionSettings.stmtCacheSize)) {
oracleDbConfig.stmtCacheSize = client.connectionSettings.stmtCacheSize;
}
client.driver.fetchAsString = client.fetchAsString;
client.driver.getConnection(oracleDbConfig, function(err, connection) {
if (err) {
return rejecter(err);
}
connection.commitAsync = function() {
return new Bluebird((commitResolve, commitReject) => {
if (connection.isTransaction) {
return commitResolve();
}
this.commit(function(err) {
if (err) {
return commitReject(err);
}
commitResolve();
});
});
};
connection.rollbackAsync = function() {
return new Bluebird((rollbackResolve, rollbackReject) => {
this.rollback(function(err) {
if (err) {
return rollbackReject(err);
}
rollbackResolve();
});
});
};
const fetchAsync = promisify(function(sql, bindParams, options, cb) {
options = options || {};
options.outFormat =
client.driver.OUT_FORMAT_OBJECT || client.driver.OBJECT;
if (!options.outFormat) {
throw new Error('not found oracledb.outFormat constants');
}
if (options.resultSet) {
connection.execute(sql, bindParams || [], options, function(
err,
result
) {
if (err) {
if (isConnectionError(err)) {
connection.close().catch(function(err) {});
connection.__knex__disposed = err;
}
return cb(err);
}
const fetchResult = { rows: [], resultSet: result.resultSet };
const numRows = 100;
const fetchRowsFromRS = function(connection, resultSet, numRows) {
resultSet.getRows(numRows, function(err, rows) {
if (err) {
if (isConnectionError(err)) {
connection.close().catch(function(err) {});
connection.__knex__disposed = err;
}
resultSet.close(function() {
return cb(err);
});
} else if (rows.length === 0) {
return cb(null, fetchResult);
} else if (rows.length > 0) {
if (rows.length === numRows) {
fetchResult.rows = fetchResult.rows.concat(rows);
fetchRowsFromRS(connection, resultSet, numRows);
} else {
fetchResult.rows = fetchResult.rows.concat(rows);
return cb(null, fetchResult);
}
}
});
};
fetchRowsFromRS(connection, result.resultSet, numRows);
});
} else {
connection.execute(sql, bindParams || [], options, function(
err,
result
) {
if (err) {
// dispose the connection on connection error
if (isConnectionError(err)) {
connection.close().catch(function(err) {});
connection.__knex__disposed = err;
}
return cb(err);
}
return cb(null, result);
});
}
});
connection.executeAsync = function(sql, bindParams, options) {
// Read all lob
return fetchAsync(sql, bindParams, options).then(async (results) => {
const closeResultSet = () => {
return results.resultSet
? promisify(results.resultSet.close).call(results.resultSet)
: Promise.resolve();
};
// Collect LOBs to read
const lobs = [];
if (results.rows) {
if (Array.isArray(results.rows)) {
for (let i = 0; i < results.rows.length; i++) {
// Iterate through the rows
const row = results.rows[i];
for (const column in row) {
if (row[column] instanceof stream.Readable) {
lobs.push({ index: i, key: column, stream: row[column] });
}
}
}
}
}
try {
for (const lob of lobs) {
// todo should be fetchAsString/fetchAsBuffer polyfill only
results.rows[lob.index][lob.key] = await lobProcessing(
lob.stream
);
}
} catch (e) {
await closeResultSet().catch(() => {});
throw e;
}
await closeResultSet();
return results;
});
};
resolver(connection);
});
});
return asyncConnection;
};
// Used to explicitly close a connection, called internally by the pool
// when a connection times out or the pool is shutdown.
Client_Oracledb.prototype.destroyRawConnection = function(connection) {
return connection.release();
};
// Runs the query on the specified connection, providing the bindings
// and any other necessary prep work.
Client_Oracledb.prototype._query = function(connection, obj) {
if (!obj.sql) throw new Error('The query is empty');
const options = { autoCommit: false };
if (obj.method === 'select') {
options.resultSet = true;
}
return Bluebird.resolve(
connection.executeAsync(obj.sql, obj.bindings, options)
).then(async function(response) {
// Flatten outBinds
let outBinds = _.flatten(response.outBinds);
obj.response = response.rows || [];
obj.rowsAffected = response.rows
? response.rows.rowsAffected
: response.rowsAffected;
//added for outBind parameter
if (obj.method === 'raw' && outBinds.length > 0) {
return {
response: outBinds,
};
}
if (obj.method === 'update') {
const modifiedRowsCount = obj.rowsAffected.length || obj.rowsAffected;
const updatedObjOutBinding = [];
const updatedOutBinds = [];
const updateOutBinds = (i) =>
function(value, index) {
const OutBindsOffset = index * modifiedRowsCount;
updatedOutBinds.push(outBinds[i + OutBindsOffset]);
};
for (let i = 0; i < modifiedRowsCount; i++) {
updatedObjOutBinding.push(obj.outBinding[0]);
_.each(obj.outBinding[0], updateOutBinds(i));
}
outBinds = updatedOutBinds;
obj.outBinding = updatedObjOutBinding;
}
if (!obj.returning && outBinds.length === 0) {
await connection.commitAsync();
return obj;
}
const rowIds = [];
let offset = 0;
for (let line = 0; line < obj.outBinding.length; line++) {
const ret = obj.outBinding[line];
offset =
offset +
(obj.outBinding[line - 1] ? obj.outBinding[line - 1].length : 0);
for (let index = 0; index < ret.length; index++) {
const out = ret[index];
await new Promise(function(bindResolver, bindRejecter) {
if (out instanceof BlobHelper) {
const blob = outBinds[index + offset];
if (out.returning) {
obj.response[line] = obj.response[line] || {};
obj.response[line][out.columnName] = out.value;
}
blob.on('error', function(err) {
bindRejecter(err);
});
blob.on('finish', function() {
bindResolver();
});
blob.write(out.value);
blob.end();
} else if (obj.outBinding[line][index] === 'ROWID') {
rowIds.push(outBinds[index + offset]);
bindResolver();
} else {
obj.response[line] = obj.response[line] || {};
obj.response[line][out] = outBinds[index + offset];
bindResolver();
}
});
}
}
return connection.commitAsync().then(function() {
if (obj.returningSql) {
return connection
.executeAsync(obj.returningSql(), rowIds, { resultSet: true })
.then(function(response) {
obj.response = response.rows;
return obj;
});
}
return obj;
});
});
};
/**
* @param stream
* @param {'string' | 'buffer'} type
*/
function readStream(stream, type) {
return new Promise((resolve, reject) => {
let data = type === 'string' ? '' : Buffer.alloc(0);
stream.on('error', function(err) {
reject(err);
});
stream.on('data', function(chunk) {
if (type === 'string') {
data += chunk;
} else {
data = Buffer.concat([data, chunk]);
}
});
stream.on('end', function() {
resolve(data);
});
});
}
// Process the response as returned from the query.
Client_Oracledb.prototype.processResponse = function(obj, runner) {
let response = obj.response;
const method = obj.method;
if (obj.output) {
return obj.output.call(runner, response);
}
switch (method) {
case 'select':
case 'pluck':
case 'first':
if (obj.method === 'pluck') {
response = _.map(response, obj.pluck);
}
return obj.method === 'first' ? response[0] : response;
case 'insert':
case 'del':
case 'update':
case 'counter':
if (obj.returning && !_.isEmpty(obj.returning)) {
if (obj.returning.length === 1 && obj.returning[0] !== '*') {
return _.flatten(_.map(response, _.values));
}
return response;
} else if (!_.isUndefined(obj.rowsAffected)) {
return obj.rowsAffected;
} else {
return 1;
}
default:
return response;
}
};
const lobProcessing = function(stream) {
const oracledb = require('oracledb');
/**
* @type 'string' | 'buffer'
*/
let type;
if (stream.type) {
// v1.2-v4
if (stream.type === oracledb.BLOB) {
type = 'buffer';
} else if (stream.type === oracledb.CLOB) {
type = 'string';
}
} else if (stream.iLob) {
// v1
if (stream.iLob.type === oracledb.CLOB) {
type = 'string';
} else if (stream.iLob.type === oracledb.BLOB) {
type = 'buffer';
}
} else {
throw new Error('Unrecognized oracledb lob stream type');
}
if (type === 'string') {
stream.setEncoding('utf-8');
}
return readStream(stream, type);
};
class Oracledb_Formatter extends Oracle_Formatter {
// Checks whether a value is a function... if it is, we compile it
// otherwise we check whether it's a raw
parameter(value) {
if (typeof value === 'function') {
return this.outputQuery(this.compileCallback(value), true);
} else if (value instanceof BlobHelper) {
return 'EMPTY_BLOB()';
}
return this.unwrapRaw(value, true) || '?';
}
}
module.exports = Client_Oracledb;

View File

@@ -0,0 +1,360 @@
const _ = require('lodash');
const inherits = require('inherits');
const Oracle_Compiler = require('../../oracle/query/compiler');
const ReturningHelper = require('../utils').ReturningHelper;
const BlobHelper = require('../utils').BlobHelper;
function Oracledb_Compiler(client, builder) {
Oracle_Compiler.call(this, client, builder);
}
inherits(Oracledb_Compiler, Oracle_Compiler);
_.assign(Oracledb_Compiler.prototype, {
// Compiles an "insert" query, allowing for multiple
// inserts using a single query statement.
insert: function() {
const self = this;
const outBindPrep = this._prepOutbindings(
this.single.insert,
this.single.returning
);
const outBinding = outBindPrep.outBinding;
const returning = outBindPrep.returning;
const insertValues = outBindPrep.values;
if (
Array.isArray(insertValues) &&
insertValues.length === 1 &&
_.isEmpty(insertValues[0])
) {
return this._addReturningToSqlAndConvert(
'insert into ' +
this.tableName +
' (' +
this.formatter.wrap(this.single.returning) +
') values (default)',
outBinding[0],
this.tableName,
returning
);
}
if (
_.isEmpty(this.single.insert) &&
typeof this.single.insert !== 'function'
) {
return '';
}
const insertData = this._prepInsert(insertValues);
const sql = {};
if (_.isString(insertData)) {
return this._addReturningToSqlAndConvert(
'insert into ' + this.tableName + ' ' + insertData,
outBinding[0],
this.tableName,
returning
);
}
if (insertData.values.length === 1) {
return this._addReturningToSqlAndConvert(
'insert into ' +
this.tableName +
' (' +
this.formatter.columnize(insertData.columns) +
') values (' +
this.formatter.parameterize(insertData.values[0]) +
')',
outBinding[0],
this.tableName,
returning
);
}
const insertDefaultsOnly = insertData.columns.length === 0;
sql.returning = returning;
sql.sql =
'begin ' +
_.map(insertData.values, function(value, index) {
const parameterizedValues = !insertDefaultsOnly
? self.formatter.parameterize(value, self.client.valueForUndefined)
: '';
let subSql = 'insert into ' + self.tableName;
if (insertDefaultsOnly) {
// No columns given so only the default value
subSql +=
' (' +
self.formatter.wrap(self.single.returning) +
') values (default)';
} else {
subSql +=
' (' +
self.formatter.columnize(insertData.columns) +
') values (' +
parameterizedValues +
')';
}
let returningClause = '';
let intoClause = '';
// ToDo review if this code is still needed or could be dropped
// eslint-disable-next-line no-unused-vars
let usingClause = '';
let outClause = '';
_.each(value, function(val) {
if (!(val instanceof BlobHelper)) {
usingClause += ' ?,';
}
});
usingClause = usingClause.slice(0, -1);
// Build returning and into clauses
_.each(outBinding[index], function(ret) {
const columnName = ret.columnName || ret;
returningClause += self.formatter.wrap(columnName) + ',';
intoClause += ' ?,';
outClause += ' out ?,';
// Add Helpers to bindings
if (ret instanceof BlobHelper) {
return self.formatter.bindings.push(ret);
}
self.formatter.bindings.push(new ReturningHelper(columnName));
});
// Strip last comma
returningClause = returningClause.slice(0, -1);
intoClause = intoClause.slice(0, -1);
outClause = outClause.slice(0, -1);
if (returningClause && intoClause) {
subSql += ' returning ' + returningClause + ' into' + intoClause;
}
// Pre bind position because subSql is an execute immediate parameter
// later position binding will only convert the ? params
subSql = self.formatter.client.positionBindings(subSql);
const parameterizedValuesWithoutDefaultAndBlob = parameterizedValues
.replace('DEFAULT, ', '')
.replace(', DEFAULT', '')
.replace('EMPTY_BLOB(), ', '')
.replace(', EMPTY_BLOB()', '');
return (
"execute immediate '" +
subSql.replace(/'/g, "''") +
(parameterizedValuesWithoutDefaultAndBlob || value
? "' using "
: '') +
parameterizedValuesWithoutDefaultAndBlob +
(parameterizedValuesWithoutDefaultAndBlob && outClause ? ',' : '') +
outClause +
';'
);
}).join(' ') +
'end;';
sql.outBinding = outBinding;
if (returning[0] === '*') {
// Generate select statement with special order by
// to keep the order because 'in (..)' may change the order
sql.returningSql = function() {
return (
'select * from ' +
self.tableName +
' where ROWID in (' +
this.outBinding
.map(function(v, i) {
return ':' + (i + 1);
})
.join(', ') +
')' +
' order by case ROWID ' +
this.outBinding
.map(function(v, i) {
return 'when CHARTOROWID(:' + (i + 1) + ') then ' + i;
})
.join(' ') +
' end'
);
};
}
return sql;
},
_addReturningToSqlAndConvert: function(
sql,
outBinding,
tableName,
returning
) {
const self = this;
const res = {
sql: sql,
};
if (!outBinding) {
return res;
}
const returningValues = Array.isArray(outBinding)
? outBinding
: [outBinding];
let returningClause = '';
let intoClause = '';
// Build returning and into clauses
_.each(returningValues, function(ret) {
const columnName = ret.columnName || ret;
returningClause += self.formatter.wrap(columnName) + ',';
intoClause += '?,';
// Add Helpers to bindings
if (ret instanceof BlobHelper) {
return self.formatter.bindings.push(ret);
}
self.formatter.bindings.push(new ReturningHelper(columnName));
});
res.sql = sql;
// Strip last comma
returningClause = returningClause.slice(0, -1);
intoClause = intoClause.slice(0, -1);
if (returningClause && intoClause) {
res.sql += ' returning ' + returningClause + ' into ' + intoClause;
}
res.outBinding = [outBinding];
if (returning[0] === '*') {
res.returningSql = function() {
return 'select * from ' + self.tableName + ' where ROWID = :1';
};
}
res.returning = returning;
return res;
},
_prepOutbindings: function(paramValues, paramReturning) {
const result = {};
let params = paramValues || [];
let returning = paramReturning || [];
if (!Array.isArray(params) && _.isPlainObject(paramValues)) {
params = [params];
}
// Always wrap returning argument in array
if (returning && !Array.isArray(returning)) {
returning = [returning];
}
const outBinding = [];
// Handle Buffer value as Blob
_.each(params, function(values, index) {
if (returning[0] === '*') {
outBinding[index] = ['ROWID'];
} else {
outBinding[index] = _.clone(returning);
}
_.each(values, function(value, key) {
if (value instanceof Buffer) {
values[key] = new BlobHelper(key, value);
// Delete blob duplicate in returning
const blobIndex = outBinding[index].indexOf(key);
if (blobIndex >= 0) {
outBinding[index].splice(blobIndex, 1);
values[key].returning = true;
}
outBinding[index].push(values[key]);
}
if (_.isUndefined(value)) {
delete params[index][key];
}
});
});
result.returning = returning;
result.outBinding = outBinding;
result.values = params;
return result;
},
update: function() {
const self = this;
const sql = {};
const outBindPrep = this._prepOutbindings(
this.single.update || this.single.counter,
this.single.returning
);
const outBinding = outBindPrep.outBinding;
const returning = outBindPrep.returning;
const updates = this._prepUpdate(this.single.update);
const where = this.where();
let returningClause = '';
let intoClause = '';
if (_.isEmpty(updates) && typeof this.single.update !== 'function') {
return '';
}
// Build returning and into clauses
_.each(outBinding, function(out) {
_.each(out, function(ret) {
const columnName = ret.columnName || ret;
returningClause += self.formatter.wrap(columnName) + ',';
intoClause += ' ?,';
// Add Helpers to bindings
if (ret instanceof BlobHelper) {
return self.formatter.bindings.push(ret);
}
self.formatter.bindings.push(new ReturningHelper(columnName));
});
});
// Strip last comma
returningClause = returningClause.slice(0, -1);
intoClause = intoClause.slice(0, -1);
sql.outBinding = outBinding;
sql.returning = returning;
sql.sql =
'update ' +
this.tableName +
' set ' +
updates.join(', ') +
(where ? ' ' + where : '');
if (outBinding.length && !_.isEmpty(outBinding[0])) {
sql.sql += ' returning ' + returningClause + ' into' + intoClause;
}
if (returning[0] === '*') {
sql.returningSql = function() {
let sql = 'select * from ' + self.tableName;
const modifiedRowsCount = this.rowsAffected.length || this.rowsAffected;
let returningSqlIn = ' where ROWID in (';
let returningSqlOrderBy = ') order by case ROWID ';
// Needs special order by because in(...) change result order
for (let i = 0; i < modifiedRowsCount; i++) {
if (this.returning[0] === '*') {
returningSqlIn += ':' + (i + 1) + ', ';
returningSqlOrderBy +=
'when CHARTOROWID(:' + (i + 1) + ') then ' + i + ' ';
}
}
if (this.returning[0] === '*') {
this.returning = this.returning.slice(0, -1);
returningSqlIn = returningSqlIn.slice(0, -2);
returningSqlOrderBy = returningSqlOrderBy.slice(0, -1);
}
return (sql += returningSqlIn + returningSqlOrderBy + ' end');
};
}
return sql;
},
});
module.exports = Oracledb_Compiler;

View File

@@ -0,0 +1,36 @@
const inherits = require('inherits');
const ColumnCompiler_Oracle = require('../../oracle/schema/columncompiler');
const { isObject } = require('lodash');
function ColumnCompiler_Oracledb() {
ColumnCompiler_Oracle.apply(this, arguments);
}
inherits(ColumnCompiler_Oracledb, ColumnCompiler_Oracle);
Object.assign(ColumnCompiler_Oracledb.prototype, {
time: 'timestamp with local time zone',
datetime: function(withoutTz) {
let useTz;
if (isObject(withoutTz)) {
({ useTz } = withoutTz);
} else {
useTz = !withoutTz;
}
return useTz ? 'timestamp with local time zone' : 'timestamp';
},
timestamp: function(withoutTz) {
let useTz;
if (isObject(withoutTz)) {
({ useTz } = withoutTz);
} else {
useTz = !withoutTz;
}
return useTz ? 'timestamp with local time zone' : 'timestamp';
},
});
module.exports = ColumnCompiler_Oracledb;

87
node_modules/knex/lib/dialects/oracledb/transaction.js generated vendored Normal file
View File

@@ -0,0 +1,87 @@
const { isUndefined } = require('lodash');
const Bluebird = require('bluebird');
const Transaction = require('../../transaction');
const debugTx = require('debug')('knex:tx');
module.exports = class Oracle_Transaction extends Transaction {
// disable autocommit to allow correct behavior (default is true)
begin() {
return Bluebird.resolve();
}
commit(conn, value) {
this._completed = true;
return conn
.commitAsync()
.then(() => value)
.then(this._resolver, this._rejecter);
}
release(conn, value) {
return this._resolver(value);
}
rollback(conn, err) {
const self = this;
this._completed = true;
debugTx('%s: rolling back', this.txid);
return conn
.rollbackAsync()
.timeout(5000)
.catch(Bluebird.TimeoutError, function(e) {
self._rejecter(e);
})
.then(function() {
if (isUndefined(err)) {
if (self.doNotRejectOnRollback) {
self._resolver();
return;
}
err = new Error(`Transaction rejected with non-error: ${err}`);
}
self._rejecter(err);
});
}
savepoint(conn) {
return this.query(conn, `SAVEPOINT ${this.txid}`);
}
acquireConnection(config, cb) {
const configConnection = config && config.connection;
const t = this;
return new Bluebird((resolve, reject) => {
try {
this.client
.acquireConnection()
.then((cnx) => {
cnx.__knexTxId = this.txid;
cnx.isTransaction = true;
resolve(cnx);
})
.catch(reject);
} catch (e) {
reject(e);
}
}).then(async (connection) => {
try {
return await cb(connection);
} finally {
debugTx('%s: releasing connection', this.txid);
connection.isTransaction = false;
try {
await connection.commitAsync();
} catch (err) {
t._rejecter(err);
} finally {
if (!configConnection) {
await t.client.releaseConnection(connection);
} else {
debugTx('%s: not releasing external connection', t.txid);
}
}
}
});
}
};

14
node_modules/knex/lib/dialects/oracledb/utils.js generated vendored Normal file
View File

@@ -0,0 +1,14 @@
const Utils = require('../oracle/utils');
function BlobHelper(columnName, value) {
this.columnName = columnName;
this.value = value;
this.returning = false;
}
BlobHelper.prototype.toString = function() {
return '[object BlobHelper:' + this.columnName + ']';
};
Utils.BlobHelper = BlobHelper;
module.exports = Utils;

321
node_modules/knex/lib/dialects/postgres/index.js generated vendored Normal file
View File

@@ -0,0 +1,321 @@
// PostgreSQL
// -------
const { map, extend, isString } = require('lodash');
const { promisify } = require('util');
const inherits = require('inherits');
const Client = require('../../client');
const Bluebird = require('bluebird');
const QueryCompiler = require('./query/compiler');
const ColumnCompiler = require('./schema/columncompiler');
const TableCompiler = require('./schema/tablecompiler');
const SchemaCompiler = require('./schema/compiler');
const { makeEscape } = require('../../query/string');
function Client_PG(config) {
Client.apply(this, arguments);
if (config.returning) {
this.defaultReturning = config.returning;
}
if (config.searchPath) {
this.searchPath = config.searchPath;
}
}
inherits(Client_PG, Client);
Object.assign(Client_PG.prototype, {
queryCompiler() {
return new QueryCompiler(this, ...arguments);
},
columnCompiler() {
return new ColumnCompiler(this, ...arguments);
},
schemaCompiler() {
return new SchemaCompiler(this, ...arguments);
},
tableCompiler() {
return new TableCompiler(this, ...arguments);
},
dialect: 'postgresql',
driverName: 'pg',
_driver() {
return require('pg');
},
_escapeBinding: makeEscape({
escapeArray(val, esc) {
return esc(arrayString(val, esc));
},
escapeString(str) {
let hasBackslash = false;
let escaped = "'";
for (let i = 0; i < str.length; i++) {
const c = str[i];
if (c === "'") {
escaped += c + c;
} else if (c === '\\') {
escaped += c + c;
hasBackslash = true;
} else {
escaped += c;
}
}
escaped += "'";
if (hasBackslash === true) {
escaped = 'E' + escaped;
}
return escaped;
},
escapeObject(val, prepareValue, timezone, seen = []) {
if (val && typeof val.toPostgres === 'function') {
seen = seen || [];
if (seen.indexOf(val) !== -1) {
throw new Error(
`circular reference detected while preparing "${val}" for query`
);
}
seen.push(val);
return prepareValue(val.toPostgres(prepareValue), seen);
}
return JSON.stringify(val);
},
}),
wrapIdentifierImpl(value) {
if (value === '*') return value;
let arrayAccessor = '';
const arrayAccessorMatch = value.match(/(.*?)(\[[0-9]+\])/);
if (arrayAccessorMatch) {
value = arrayAccessorMatch[1];
arrayAccessor = arrayAccessorMatch[2];
}
return `"${value.replace(/"/g, '""')}"${arrayAccessor}`;
},
// Get a raw connection, called by the `pool` whenever a new
// connection needs to be added to the pool.
acquireRawConnection() {
const client = this;
return new Bluebird(function(resolver, rejecter) {
const connection = new client.driver.Client(client.connectionSettings);
connection.connect(function(err, connection) {
if (err) {
return rejecter(err);
}
connection.on('error', (err) => {
connection.__knex__disposed = err;
});
connection.on('end', (err) => {
connection.__knex__disposed = err || 'Connection ended unexpectedly';
});
if (!client.version) {
return client.checkVersion(connection).then(function(version) {
client.version = version;
resolver(connection);
});
}
resolver(connection);
});
}).then(function setSearchPath(connection) {
client.setSchemaSearchPath(connection);
return connection;
});
},
// Used to explicitly close a connection, called internally by the pool
// when a connection times out or the pool is shutdown.
async destroyRawConnection(connection) {
const end = promisify((cb) => connection.end(cb));
return end();
},
// In PostgreSQL, we need to do a version check to do some feature
// checking on the database.
checkVersion(connection) {
return new Bluebird(function(resolver, rejecter) {
connection.query('select version();', function(err, resp) {
if (err) return rejecter(err);
resolver(/^PostgreSQL (.*?)( |$)/.exec(resp.rows[0].version)[1]);
});
});
},
// Position the bindings for the query. The escape sequence for question mark
// is \? (e.g. knex.raw("\\?") since javascript requires '\' to be escaped too...)
positionBindings(sql) {
let questionCount = 0;
return sql.replace(/(\\*)(\?)/g, function(match, escapes) {
if (escapes.length % 2) {
return '?';
} else {
questionCount++;
return `$${questionCount}`;
}
});
},
setSchemaSearchPath(connection, searchPath) {
let path = searchPath || this.searchPath;
if (!path) return Bluebird.resolve(true);
if (!Array.isArray(path) && !isString(path)) {
throw new TypeError(
`knex: Expected searchPath to be Array/String, got: ${typeof path}`
);
}
if (isString(path)) {
if (path.includes(',')) {
const parts = path.split(',');
const arraySyntax = `[${parts
.map((searchPath) => `'${searchPath}'`)
.join(', ')}]`;
this.logger.warn(
`Detected comma in searchPath "${path}".` +
`If you are trying to specify multiple schemas, use Array syntax: ${arraySyntax}`
);
}
path = [path];
}
path = path.map((schemaName) => `"${schemaName}"`).join(',');
return new Bluebird(function(resolver, rejecter) {
connection.query(`set search_path to ${path}`, function(err) {
if (err) return rejecter(err);
resolver(true);
});
});
},
_stream(connection, obj, stream, options) {
const PGQueryStream = process.browser
? undefined
: require('pg-query-stream');
const sql = obj.sql;
return new Bluebird(function(resolver, rejecter) {
const queryStream = connection.query(
new PGQueryStream(sql, obj.bindings, options)
);
queryStream.on('error', function(error) {
rejecter(error);
stream.emit('error', error);
});
// 'end' IS propagated by .pipe, by default
stream.on('end', resolver);
queryStream.pipe(stream);
});
},
// Runs the query on the specified connection, providing the bindings
// and any other necessary prep work.
_query(connection, obj) {
let queryConfig = {
text: obj.sql,
values: obj.bindings || [],
};
if (obj.options) {
queryConfig = extend(queryConfig, obj.options);
}
return new Bluebird(function(resolver, rejecter) {
connection.query(queryConfig, function(err, response) {
if (err) return rejecter(err);
obj.response = response;
resolver(obj);
});
});
},
// Ensures the response is returned in the same format as other clients.
processResponse(obj, runner) {
const resp = obj.response;
if (obj.output) return obj.output.call(runner, resp);
if (obj.method === 'raw') return resp;
const { returning } = obj;
if (resp.command === 'SELECT') {
if (obj.method === 'first') return resp.rows[0];
if (obj.method === 'pluck') return map(resp.rows, obj.pluck);
return resp.rows;
}
if (returning) {
const returns = [];
for (let i = 0, l = resp.rows.length; i < l; i++) {
const row = resp.rows[i];
if (returning === '*' || Array.isArray(returning)) {
returns[i] = row;
} else {
// Pluck the only column in the row.
returns[i] = row[Object.keys(row)[0]];
}
}
return returns;
}
if (resp.command === 'UPDATE' || resp.command === 'DELETE') {
return resp.rowCount;
}
return resp;
},
canCancelQuery: true,
cancelQuery(connectionToKill) {
const acquiringConn = this.acquireConnection();
// Error out if we can't acquire connection in time.
// Purposely not putting timeout on `pg_cancel_backend` execution because erroring
// early there would release the `connectionToKill` back to the pool with
// a `KILL QUERY` command yet to finish.
return acquiringConn.then((conn) => {
return this._wrappedCancelQueryCall(conn, connectionToKill).finally(
() => {
// NOT returning this promise because we want to release the connection
// in a non-blocking fashion
this.releaseConnection(conn);
}
);
});
},
_wrappedCancelQueryCall(conn, connectionToKill) {
return this.query(conn, {
method: 'raw',
sql: 'SELECT pg_cancel_backend(?);',
bindings: [connectionToKill.processID],
options: {},
});
},
});
function arrayString(arr, esc) {
let result = '{';
for (let i = 0; i < arr.length; i++) {
if (i > 0) result += ',';
const val = arr[i];
if (val === null || typeof val === 'undefined') {
result += 'NULL';
} else if (Array.isArray(val)) {
result += arrayString(val, esc);
} else if (typeof val === 'number') {
result += val;
} else {
result += JSON.stringify(typeof val === 'string' ? val : esc(val));
}
}
return result + '}';
}
module.exports = Client_PG;

View File

@@ -0,0 +1,166 @@
// PostgreSQL Query Builder & Compiler
// ------
const inherits = require('inherits');
const QueryCompiler = require('../../../query/compiler');
const { reduce, identity } = require('lodash');
function QueryCompiler_PG(client, builder) {
QueryCompiler.call(this, client, builder);
}
inherits(QueryCompiler_PG, QueryCompiler);
Object.assign(QueryCompiler_PG.prototype, {
// Compiles a truncate query.
truncate() {
return `truncate ${this.tableName} restart identity`;
},
// is used if the an array with multiple empty values supplied
_defaultInsertValue: 'default',
// Compiles an `insert` query, allowing for multiple
// inserts using a single query statement.
insert() {
const sql = QueryCompiler.prototype.insert.call(this);
if (sql === '') return sql;
const { returning } = this.single;
return {
sql: sql + this._returning(returning),
returning,
};
},
// Compiles an `update` query, allowing for a return value.
update() {
const withSQL = this.with();
const updateData = this._prepUpdate(this.single.update);
const wheres = this.where();
const { returning } = this.single;
return {
sql:
withSQL +
`update ${this.single.only ? 'only ' : ''}${this.tableName} ` +
`set ${updateData.join(', ')}` +
(wheres ? ` ${wheres}` : '') +
this._returning(returning),
returning,
};
},
// Compiles an `update` query, allowing for a return value.
del() {
const sql = QueryCompiler.prototype.del.apply(this, arguments);
const { returning } = this.single;
return {
sql: sql + this._returning(returning),
returning,
};
},
aggregate(stmt) {
return this._aggregate(stmt, { distinctParentheses: true });
},
_returning(value) {
return value ? ` returning ${this.formatter.columnize(value)}` : '';
},
// Join array of table names and apply default schema.
_tableNames(tables) {
const schemaName = this.single.schema;
const sql = [];
for (let i = 0; i < tables.length; i++) {
let tableName = tables[i];
if (tableName) {
if (schemaName) {
tableName = `${schemaName}.${tableName}`;
}
sql.push(this.formatter.wrap(tableName));
}
}
return sql.join(', ');
},
forUpdate() {
const tables = this.single.lockTables || [];
return (
'for update' + (tables.length ? ' of ' + this._tableNames(tables) : '')
);
},
forShare() {
const tables = this.single.lockTables || [];
return (
'for share' + (tables.length ? ' of ' + this._tableNames(tables) : '')
);
},
skipLocked() {
return 'skip locked';
},
noWait() {
return 'nowait';
},
// Compiles a columnInfo query
columnInfo() {
const column = this.single.columnInfo;
let schema = this.single.schema;
// The user may have specified a custom wrapIdentifier function in the config. We
// need to run the identifiers through that function, but not format them as
// identifiers otherwise.
const table = this.client.customWrapIdentifier(this.single.table, identity);
if (schema) {
schema = this.client.customWrapIdentifier(schema, identity);
}
let sql =
'select * from information_schema.columns where table_name = ? and table_catalog = ?';
const bindings = [table, this.client.database()];
if (schema) {
sql += ' and table_schema = ?';
bindings.push(schema);
} else {
sql += ' and table_schema = current_schema()';
}
return {
sql,
bindings,
output(resp) {
const out = reduce(
resp.rows,
function(columns, val) {
columns[val.column_name] = {
type: val.data_type,
maxLength: val.character_maximum_length,
nullable: val.is_nullable === 'YES',
defaultValue: val.column_default,
};
return columns;
},
{}
);
return (column && out[column]) || out;
},
};
},
distinctOn(value) {
return 'distinct on (' + this.formatter.columnize(value) + ') ';
},
});
module.exports = QueryCompiler_PG;

View File

@@ -0,0 +1,122 @@
// PostgreSQL Column Compiler
// -------
const inherits = require('inherits');
const ColumnCompiler = require('../../../schema/columncompiler');
const { isObject } = require('lodash');
function ColumnCompiler_PG() {
ColumnCompiler.apply(this, arguments);
this.modifiers = ['nullable', 'defaultTo', 'comment'];
}
inherits(ColumnCompiler_PG, ColumnCompiler);
Object.assign(ColumnCompiler_PG.prototype, {
// Types
// ------
bigincrements: 'bigserial primary key',
bigint: 'bigint',
binary: 'bytea',
bit(column) {
return column.length !== false ? `bit(${column.length})` : 'bit';
},
bool: 'boolean',
// Create the column definition for an enum type.
// Using method "2" here: http://stackoverflow.com/a/10984951/525714
enu(allowed, options) {
options = options || {};
const values =
options.useNative && options.existingType
? undefined
: allowed.join("', '");
if (options.useNative) {
let enumName = '';
const schemaName = options.schemaName || this.tableCompiler.schemaNameRaw;
if (schemaName) {
enumName += `"${schemaName}".`;
}
enumName += `"${options.enumName}"`;
if (!options.existingType) {
this.tableCompiler.unshiftQuery(
`create type ${enumName} as enum ('${values}')`
);
}
return enumName;
}
return `text check (${this.formatter.wrap(this.args[0])} in ('${values}'))`;
},
double: 'double precision',
decimal(precision, scale) {
if (precision === null) return 'decimal';
return `decimal(${this._num(precision, 8)}, ${this._num(scale, 2)})`;
},
floating: 'real',
increments: 'serial primary key',
json(jsonb) {
if (jsonb) this.client.logger.deprecate('json(true)', 'jsonb()');
return jsonColumn(this.client, jsonb);
},
jsonb() {
return jsonColumn(this.client, true);
},
smallint: 'smallint',
tinyint: 'smallint',
datetime(withoutTz = false, precision) {
let useTz;
if (isObject(withoutTz)) {
({ useTz, precision } = withoutTz);
} else {
useTz = !withoutTz;
}
return `${useTz ? 'timestamptz' : 'timestamp'}${
precision ? '(' + precision + ')' : ''
}`;
},
timestamp(withoutTz = false, precision) {
let useTz;
if (isObject(withoutTz)) {
({ useTz, precision } = withoutTz);
} else {
useTz = !withoutTz;
}
return `${useTz ? 'timestamptz' : 'timestamp'}${
precision ? '(' + precision + ')' : ''
}`;
},
uuid: 'uuid',
// Modifiers:
// ------
comment(comment) {
const columnName = this.args[0] || this.defaults('columnName');
this.pushAdditional(function() {
this.pushQuery(
`comment on column ${this.tableCompiler.tableName()}.` +
this.formatter.wrap(columnName) +
' is ' +
(comment ? `'${comment}'` : 'NULL')
);
}, comment);
},
});
function jsonColumn(client, jsonb) {
if (!client.version || parseFloat(client.version) >= 9.2)
return jsonb ? 'jsonb' : 'json';
return 'text';
}
module.exports = ColumnCompiler_PG;

View File

@@ -0,0 +1,109 @@
// PostgreSQL Schema Compiler
// -------
const inherits = require('inherits');
const SchemaCompiler = require('../../../schema/compiler');
function SchemaCompiler_PG() {
SchemaCompiler.apply(this, arguments);
}
inherits(SchemaCompiler_PG, SchemaCompiler);
// Check whether the current table
SchemaCompiler_PG.prototype.hasTable = function(tableName) {
let sql = 'select * from information_schema.tables where table_name = ?';
const bindings = [tableName];
if (this.schema) {
sql += ' and table_schema = ?';
bindings.push(this.schema);
} else {
sql += ' and table_schema = current_schema()';
}
this.pushQuery({
sql,
bindings,
output(resp) {
return resp.rows.length > 0;
},
});
};
// Compile the query to determine if a column exists in a table.
SchemaCompiler_PG.prototype.hasColumn = function(tableName, columnName) {
let sql =
'select * from information_schema.columns where table_name = ? and column_name = ?';
const bindings = [tableName, columnName];
if (this.schema) {
sql += ' and table_schema = ?';
bindings.push(this.schema);
} else {
sql += ' and table_schema = current_schema()';
}
this.pushQuery({
sql,
bindings,
output(resp) {
return resp.rows.length > 0;
},
});
};
SchemaCompiler_PG.prototype.qualifiedTableName = function(tableName) {
const name = this.schema ? `${this.schema}.${tableName}` : tableName;
return this.formatter.wrap(name);
};
// Compile a rename table command.
SchemaCompiler_PG.prototype.renameTable = function(from, to) {
this.pushQuery(
`alter table ${this.qualifiedTableName(
from
)} rename to ${this.formatter.wrap(to)}`
);
};
SchemaCompiler_PG.prototype.createSchema = function(schemaName) {
this.pushQuery(`create schema ${this.formatter.wrap(schemaName)}`);
};
SchemaCompiler_PG.prototype.createSchemaIfNotExists = function(schemaName) {
this.pushQuery(
`create schema if not exists ${this.formatter.wrap(schemaName)}`
);
};
SchemaCompiler_PG.prototype.dropSchema = function(schemaName) {
this.pushQuery(`drop schema ${this.formatter.wrap(schemaName)}`);
};
SchemaCompiler_PG.prototype.dropSchemaIfExists = function(schemaName) {
this.pushQuery(`drop schema if exists ${this.formatter.wrap(schemaName)}`);
};
SchemaCompiler_PG.prototype.dropExtension = function(extensionName) {
this.pushQuery(`drop extension ${this.formatter.wrap(extensionName)}`);
};
SchemaCompiler_PG.prototype.dropExtensionIfExists = function(extensionName) {
this.pushQuery(
`drop extension if exists ${this.formatter.wrap(extensionName)}`
);
};
SchemaCompiler_PG.prototype.createExtension = function(extensionName) {
this.pushQuery(`create extension ${this.formatter.wrap(extensionName)}`);
};
SchemaCompiler_PG.prototype.createExtensionIfNotExists = function(
extensionName
) {
this.pushQuery(
`create extension if not exists ${this.formatter.wrap(extensionName)}`
);
};
module.exports = SchemaCompiler_PG;

View File

@@ -0,0 +1,183 @@
/* eslint max-len: 0 */
// PostgreSQL Table Builder & Compiler
// -------
const inherits = require('inherits');
const TableCompiler = require('../../../schema/tablecompiler');
const { has } = require('lodash');
function TableCompiler_PG() {
TableCompiler.apply(this, arguments);
}
inherits(TableCompiler_PG, TableCompiler);
// Compile a rename column command.
TableCompiler_PG.prototype.renameColumn = function(from, to) {
return this.pushQuery({
sql: `alter table ${this.tableName()} rename ${this.formatter.wrap(
from
)} to ${this.formatter.wrap(to)}`,
});
};
TableCompiler_PG.prototype.compileAdd = function(builder) {
const table = this.formatter.wrap(builder);
const columns = this.prefixArray('add column', this.getColumns(builder));
return this.pushQuery({
sql: `alter table ${table} ${columns.join(', ')}`,
});
};
// Adds the "create" query to the query sequence.
TableCompiler_PG.prototype.createQuery = function(columns, ifNot) {
const createStatement = ifNot
? 'create table if not exists '
: 'create table ';
let sql =
createStatement + this.tableName() + ' (' + columns.sql.join(', ') + ')';
if (this.single.inherits)
sql += ` inherits (${this.formatter.wrap(this.single.inherits)})`;
this.pushQuery({
sql,
bindings: columns.bindings,
});
const hasComment = has(this.single, 'comment');
if (hasComment) this.comment(this.single.comment);
};
TableCompiler_PG.prototype.addColumns = function(
columns,
prefix,
colCompilers
) {
if (prefix === this.alterColumnsPrefix) {
// alter columns
for (const col of colCompilers) {
const quotedTableName = this.tableName();
const type = col.getColumnType();
// We'd prefer to call this.formatter.wrapAsIdentifier here instead, however the context passed to
// `this` instance is not that of the column, but of the table. Thus, we unfortunately have to call
// `wrapIdentifier` here as well (it is already called once on the initial column operation) to give
// our `alter` operation the correct `queryContext`. Refer to issue #2606 and PR #2612.
const colName = this.client.wrapIdentifier(
col.getColumnName(),
col.columnBuilder.queryContext()
);
this.pushQuery({
sql: `alter table ${quotedTableName} alter column ${colName} drop default`,
bindings: [],
});
this.pushQuery({
sql: `alter table ${quotedTableName} alter column ${colName} drop not null`,
bindings: [],
});
this.pushQuery({
sql: `alter table ${quotedTableName} alter column ${colName} type ${type} using (${colName}::${type})`,
bindings: [],
});
const defaultTo = col.modified['defaultTo'];
if (defaultTo) {
const modifier = col.defaultTo.apply(col, defaultTo);
this.pushQuery({
sql: `alter table ${quotedTableName} alter column ${colName} set ${modifier}`,
bindings: [],
});
}
const nullable = col.modified['nullable'];
if (nullable && nullable[0] === false) {
this.pushQuery({
sql: `alter table ${quotedTableName} alter column ${colName} set not null`,
bindings: [],
});
}
}
} else {
// base class implementation for normal add
TableCompiler.prototype.addColumns.call(this, columns, prefix);
}
};
// Compiles the comment on the table.
TableCompiler_PG.prototype.comment = function(comment) {
this.pushQuery(
`comment on table ${this.tableName()} is '${this.single.comment}'`
);
};
// Indexes:
// -------
TableCompiler_PG.prototype.primary = function(columns, constraintName) {
constraintName = constraintName
? this.formatter.wrap(constraintName)
: this.formatter.wrap(`${this.tableNameRaw}_pkey`);
this.pushQuery(
`alter table ${this.tableName()} add constraint ${constraintName} primary key (${this.formatter.columnize(
columns
)})`
);
};
TableCompiler_PG.prototype.unique = function(columns, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('unique', this.tableNameRaw, columns);
this.pushQuery(
`alter table ${this.tableName()} add constraint ${indexName}` +
' unique (' +
this.formatter.columnize(columns) +
')'
);
};
TableCompiler_PG.prototype.index = function(columns, indexName, indexType) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('index', this.tableNameRaw, columns);
this.pushQuery(
`create index ${indexName} on ${this.tableName()}${(indexType &&
` using ${indexType}`) ||
''}` +
' (' +
this.formatter.columnize(columns) +
')'
);
};
TableCompiler_PG.prototype.dropPrimary = function(constraintName) {
constraintName = constraintName
? this.formatter.wrap(constraintName)
: this.formatter.wrap(this.tableNameRaw + '_pkey');
this.pushQuery(
`alter table ${this.tableName()} drop constraint ${constraintName}`
);
};
TableCompiler_PG.prototype.dropIndex = function(columns, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('index', this.tableNameRaw, columns);
indexName = this.schemaNameRaw
? `${this.formatter.wrap(this.schemaNameRaw)}.${indexName}`
: indexName;
this.pushQuery(`drop index ${indexName}`);
};
TableCompiler_PG.prototype.dropUnique = function(columns, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('unique', this.tableNameRaw, columns);
this.pushQuery(
`alter table ${this.tableName()} drop constraint ${indexName}`
);
};
TableCompiler_PG.prototype.dropForeign = function(columns, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('foreign', this.tableNameRaw, columns);
this.pushQuery(
`alter table ${this.tableName()} drop constraint ${indexName}`
);
};
module.exports = TableCompiler_PG;

73
node_modules/knex/lib/dialects/redshift/index.js generated vendored Normal file
View File

@@ -0,0 +1,73 @@
// Redshift
// -------
const inherits = require('inherits');
const Client_PG = require('../postgres');
const { map } = require('lodash');
const Transaction = require('./transaction');
const QueryCompiler = require('./query/compiler');
const ColumnBuilder = require('./schema/columnbuilder');
const ColumnCompiler = require('./schema/columncompiler');
const TableCompiler = require('./schema/tablecompiler');
const SchemaCompiler = require('./schema/compiler');
function Client_Redshift(config) {
Client_PG.apply(this, arguments);
}
inherits(Client_Redshift, Client_PG);
Object.assign(Client_Redshift.prototype, {
transaction() {
return new Transaction(this, ...arguments);
},
queryCompiler() {
return new QueryCompiler(this, ...arguments);
},
columnBuilder() {
return new ColumnBuilder(this, ...arguments);
},
columnCompiler() {
return new ColumnCompiler(this, ...arguments);
},
tableCompiler() {
return new TableCompiler(this, ...arguments);
},
schemaCompiler() {
return new SchemaCompiler(this, ...arguments);
},
dialect: 'redshift',
driverName: 'pg-redshift',
_driver() {
return require('pg');
},
// Ensures the response is returned in the same format as other clients.
processResponse(obj, runner) {
const resp = obj.response;
if (obj.output) return obj.output.call(runner, resp);
if (obj.method === 'raw') return resp;
if (resp.command === 'SELECT') {
if (obj.method === 'first') return resp.rows[0];
if (obj.method === 'pluck') return map(resp.rows, obj.pluck);
return resp.rows;
}
if (
resp.command === 'INSERT' ||
resp.command === 'UPDATE' ||
resp.command === 'DELETE'
) {
return resp.rowCount;
}
return resp;
},
});
module.exports = Client_Redshift;

View File

@@ -0,0 +1,122 @@
// Redshift Query Builder & Compiler
// ------
const inherits = require('inherits');
const QueryCompiler = require('../../../query/compiler');
const QueryCompiler_PG = require('../../postgres/query/compiler');
const { reduce, identity } = require('lodash');
function QueryCompiler_Redshift(client, builder) {
QueryCompiler_PG.call(this, client, builder);
}
inherits(QueryCompiler_Redshift, QueryCompiler_PG);
Object.assign(QueryCompiler_Redshift.prototype, {
truncate() {
return `truncate ${this.tableName.toLowerCase()}`;
},
// Compiles an `insert` query, allowing for multiple
// inserts using a single query statement.
insert() {
const sql = QueryCompiler.prototype.insert.apply(this, arguments);
if (sql === '') return sql;
this._slightReturn();
return {
sql,
};
},
// Compiles an `update` query, warning on unsupported returning
update() {
const sql = QueryCompiler.prototype.update.apply(this, arguments);
this._slightReturn();
return {
sql,
};
},
// Compiles an `delete` query, warning on unsupported returning
del() {
const sql = QueryCompiler.prototype.del.apply(this, arguments);
this._slightReturn();
return {
sql,
};
},
// simple: if trying to return, warn
_slightReturn() {
if (this.single.isReturning) {
this.client.logger.warn(
'insert/update/delete returning is not supported by redshift dialect'
);
}
},
forUpdate() {
this.client.logger.warn('table lock is not supported by redshift dialect');
return '';
},
forShare() {
this.client.logger.warn(
'lock for share is not supported by redshift dialect'
);
return '';
},
// Compiles a columnInfo query
columnInfo() {
const column = this.single.columnInfo;
let schema = this.single.schema;
// The user may have specified a custom wrapIdentifier function in the config. We
// need to run the identifiers through that function, but not format them as
// identifiers otherwise.
const table = this.client.customWrapIdentifier(this.single.table, identity);
if (schema) {
schema = this.client.customWrapIdentifier(schema, identity);
}
let sql =
'select * from information_schema.columns where table_name = ? and table_catalog = ?';
const bindings = [
table.toLowerCase(),
this.client.database().toLowerCase(),
];
if (schema) {
sql += ' and table_schema = ?';
bindings.push(schema);
} else {
sql += ' and table_schema = current_schema()';
}
return {
sql,
bindings,
output(resp) {
const out = reduce(
resp.rows,
function(columns, val) {
columns[val.column_name] = {
type: val.data_type,
maxLength: val.character_maximum_length,
nullable: val.is_nullable === 'YES',
defaultValue: val.column_default,
};
return columns;
},
{}
);
return (column && out[column]) || out;
},
};
},
});
module.exports = QueryCompiler_Redshift;

View File

@@ -0,0 +1,20 @@
const inherits = require('inherits');
const ColumnBuilder = require('../../../schema/columnbuilder');
function ColumnBuilder_Redshift() {
ColumnBuilder.apply(this, arguments);
}
inherits(ColumnBuilder_Redshift, ColumnBuilder);
// primary needs to set not null on non-preexisting columns, or fail
ColumnBuilder_Redshift.prototype.primary = function() {
this.notNullable();
return ColumnBuilder.prototype.primary.apply(this, arguments);
};
ColumnBuilder_Redshift.prototype.index = function() {
this.client.logger.warn('Redshift does not support the creation of indexes.');
return this;
};
module.exports = ColumnBuilder_Redshift;

View File

@@ -0,0 +1,60 @@
// Redshift Column Compiler
// -------
const inherits = require('inherits');
const ColumnCompiler_PG = require('../../postgres/schema/columncompiler');
function ColumnCompiler_Redshift() {
ColumnCompiler_PG.apply(this, arguments);
}
inherits(ColumnCompiler_Redshift, ColumnCompiler_PG);
Object.assign(ColumnCompiler_Redshift.prototype, {
// Types:
// ------
bigincrements: 'bigint identity(1,1) primary key not null',
binary: 'varchar(max)',
bit(column) {
return column.length !== false ? `char(${column.length})` : 'char(1)';
},
blob: 'varchar(max)',
enu: 'varchar(255)',
enum: 'varchar(255)',
increments: 'integer identity(1,1) primary key not null',
json: 'varchar(max)',
jsonb: 'varchar(max)',
longblob: 'varchar(max)',
mediumblob: 'varchar(16777218)',
set: 'text',
text: 'varchar(max)',
datetime(without) {
return without ? 'timestamp' : 'timestamptz';
},
timestamp(without) {
return without ? 'timestamp' : 'timestamptz';
},
tinyblob: 'varchar(256)',
uuid: 'char(36)',
varbinary: 'varchar(max)',
bigint: 'bigint',
bool: 'boolean',
double: 'double precision',
floating: 'real',
smallint: 'smallint',
tinyint: 'smallint',
// Modifiers:
// ------
comment(comment) {
this.pushAdditional(function() {
this.pushQuery(
`comment on column ${this.tableCompiler.tableName()}.` +
this.formatter.wrap(this.args[0]) +
' is ' +
(comment ? `'${comment}'` : 'NULL')
);
}, comment);
},
});
module.exports = ColumnCompiler_Redshift;

View File

@@ -0,0 +1,14 @@
/* eslint max-len: 0 */
// Redshift Table Builder & Compiler
// -------
const inherits = require('inherits');
const SchemaCompiler_PG = require('../../postgres/schema/compiler');
function SchemaCompiler_Redshift() {
SchemaCompiler_PG.apply(this, arguments);
}
inherits(SchemaCompiler_Redshift, SchemaCompiler_PG);
module.exports = SchemaCompiler_Redshift;

View File

@@ -0,0 +1,123 @@
/* eslint max-len: 0 */
// Redshift Table Builder & Compiler
// -------
const inherits = require('inherits');
const { has } = require('lodash');
const TableCompiler_PG = require('../../postgres/schema/tablecompiler');
function TableCompiler_Redshift() {
TableCompiler_PG.apply(this, arguments);
}
inherits(TableCompiler_Redshift, TableCompiler_PG);
TableCompiler_Redshift.prototype.index = function(
columns,
indexName,
indexType
) {
this.client.logger.warn('Redshift does not support the creation of indexes.');
};
TableCompiler_Redshift.prototype.dropIndex = function(columns, indexName) {
this.client.logger.warn('Redshift does not support the deletion of indexes.');
};
// TODO: have to disable setting not null on columns that already exist...
// Adds the "create" query to the query sequence.
TableCompiler_Redshift.prototype.createQuery = function(columns, ifNot) {
const createStatement = ifNot
? 'create table if not exists '
: 'create table ';
let sql =
createStatement + this.tableName() + ' (' + columns.sql.join(', ') + ')';
if (this.single.inherits)
sql += ` like (${this.formatter.wrap(this.single.inherits)})`;
this.pushQuery({
sql,
bindings: columns.bindings,
});
const hasComment = has(this.single, 'comment');
if (hasComment) this.comment(this.single.comment);
};
TableCompiler_Redshift.prototype.primary = function(columns, constraintName) {
const self = this;
constraintName = constraintName
? self.formatter.wrap(constraintName)
: self.formatter.wrap(`${this.tableNameRaw}_pkey`);
if (columns.constructor !== Array) {
columns = [columns];
}
const thiscolumns = self.grouped.columns;
if (thiscolumns) {
for (let i = 0; i < columns.length; i++) {
let exists = thiscolumns.find(
(tcb) =>
tcb.grouping === 'columns' &&
tcb.builder &&
tcb.builder._method === 'add' &&
tcb.builder._args &&
tcb.builder._args.indexOf(columns[i]) > -1
);
if (exists) {
exists = exists.builder;
}
const nullable = !(
exists &&
exists._modifiers &&
exists._modifiers['nullable'] &&
exists._modifiers['nullable'][0] === false
);
if (nullable) {
if (exists) {
return this.client.logger.warn(
'Redshift does not allow primary keys to contain nullable columns.'
);
} else {
return this.client.logger.warn(
'Redshift does not allow primary keys to contain nonexistent columns.'
);
}
}
}
}
return self.pushQuery(
`alter table ${self.tableName()} add constraint ${constraintName} primary key (${self.formatter.columnize(
columns
)})`
);
};
// Compiles column add. Redshift can only add one column per ALTER TABLE, so core addColumns doesn't work. #2545
TableCompiler_Redshift.prototype.addColumns = function(
columns,
prefix,
colCompilers
) {
if (prefix === this.alterColumnsPrefix) {
TableCompiler_PG.prototype.addColumns.call(
this,
columns,
prefix,
colCompilers
);
} else {
prefix = prefix || this.addColumnsPrefix;
colCompilers = colCompilers || this.getColumns();
for (const col of colCompilers) {
const quotedTableName = this.tableName();
const colCompiled = col.compileColumn();
this.pushQuery({
sql: `alter table ${quotedTableName} ${prefix}${colCompiled}`,
bindings: [],
});
}
}
};
module.exports = TableCompiler_Redshift;

18
node_modules/knex/lib/dialects/redshift/transaction.js generated vendored Normal file
View File

@@ -0,0 +1,18 @@
const Transaction = require('../../transaction');
module.exports = class Redshift_Transaction extends Transaction {
savepoint(conn) {
this.trxClient.logger('Redshift does not support savepoints.');
return Promise.resolve();
}
release(conn, value) {
this.trxClient.logger('Redshift does not support savepoints.');
return Promise.resolve();
}
rollbackTo(conn, error) {
this.trxClient.logger('Redshift does not support savepoints.');
return Promise.resolve();
}
};

21
node_modules/knex/lib/dialects/sqlite3/formatter.js generated vendored Normal file
View File

@@ -0,0 +1,21 @@
const Formatter = require('../../formatter');
const Raw = require('../../raw');
module.exports = class SQlite3_Formatter extends Formatter {
values(values) {
if (Array.isArray(values)) {
if (Array.isArray(values[0])) {
return `( values ${values
.map((value) => `(${this.parameterize(value)})`)
.join(', ')})`;
}
return `(${this.parameterize(values)})`;
}
if (values instanceof Raw) {
return `(${this.parameter(values)})`;
}
return this.parameter(values);
}
};

171
node_modules/knex/lib/dialects/sqlite3/index.js generated vendored Normal file
View File

@@ -0,0 +1,171 @@
// SQLite3
// -------
const Bluebird = require('bluebird');
const inherits = require('inherits');
const { isUndefined, map, defaults } = require('lodash');
const { promisify } = require('util');
const Client = require('../../client');
const QueryCompiler = require('./query/compiler');
const SchemaCompiler = require('./schema/compiler');
const ColumnCompiler = require('./schema/columncompiler');
const TableCompiler = require('./schema/tablecompiler');
const SQLite3_DDL = require('./schema/ddl');
const SQLite3_Formatter = require('./formatter');
function Client_SQLite3(config) {
Client.call(this, config);
if (isUndefined(config.useNullAsDefault)) {
this.logger.warn(
'sqlite does not support inserting default values. Set the ' +
'`useNullAsDefault` flag to hide this warning. ' +
'(see docs http://knexjs.org/#Builder-insert).'
);
}
}
inherits(Client_SQLite3, Client);
Object.assign(Client_SQLite3.prototype, {
dialect: 'sqlite3',
driverName: 'sqlite3',
_driver() {
return require('sqlite3');
},
schemaCompiler() {
return new SchemaCompiler(this, ...arguments);
},
queryCompiler() {
return new QueryCompiler(this, ...arguments);
},
columnCompiler() {
return new ColumnCompiler(this, ...arguments);
},
tableCompiler() {
return new TableCompiler(this, ...arguments);
},
ddl(compiler, pragma, connection) {
return new SQLite3_DDL(this, compiler, pragma, connection);
},
wrapIdentifierImpl(value) {
return value !== '*' ? `\`${value.replace(/`/g, '``')}\`` : '*';
},
// Get a raw connection from the database, returning a promise with the connection object.
acquireRawConnection() {
return new Bluebird((resolve, reject) => {
const db = new this.driver.Database(
this.connectionSettings.filename,
(err) => {
if (err) {
return reject(err);
}
resolve(db);
}
);
});
},
// Used to explicitly close a connection, called internally by the pool when
// a connection times out or the pool is shutdown.
async destroyRawConnection(connection) {
const close = promisify((cb) => connection.close(cb));
return close();
},
// Runs the query on the specified connection, providing the bindings and any
// other necessary prep work.
_query(connection, obj) {
const { method } = obj;
let callMethod;
switch (method) {
case 'insert':
case 'update':
case 'counter':
case 'del':
callMethod = 'run';
break;
default:
callMethod = 'all';
}
return new Bluebird(function(resolver, rejecter) {
if (!connection || !connection[callMethod]) {
return rejecter(
new Error(`Error calling ${callMethod} on connection.`)
);
}
connection[callMethod](obj.sql, obj.bindings, function(err, response) {
if (err) return rejecter(err);
obj.response = response;
// We need the context here, as it contains
// the "this.lastID" or "this.changes"
obj.context = this;
return resolver(obj);
});
});
},
_stream(connection, sql, stream) {
const client = this;
return new Bluebird(function(resolver, rejecter) {
stream.on('error', rejecter);
stream.on('end', resolver);
return client
._query(connection, sql)
.then((obj) => obj.response)
.then((rows) => rows.forEach((row) => stream.write(row)))
.catch(function(err) {
stream.emit('error', err);
})
.then(function() {
stream.end();
});
});
},
// Ensures the response is returned in the same format as other clients.
processResponse(obj, runner) {
const ctx = obj.context;
let { response } = obj;
if (obj.output) return obj.output.call(runner, response);
switch (obj.method) {
case 'select':
case 'pluck':
case 'first':
if (obj.method === 'pluck') response = map(response, obj.pluck);
return obj.method === 'first' ? response[0] : response;
case 'insert':
return [ctx.lastID];
case 'del':
case 'update':
case 'counter':
return ctx.changes;
default:
return response;
}
},
poolDefaults() {
return defaults(
{ min: 1, max: 1 },
Client.prototype.poolDefaults.call(this)
);
},
formatter() {
return new SQLite3_Formatter(this, ...arguments);
},
});
module.exports = Client_SQLite3;

View File

@@ -0,0 +1,176 @@
// SQLite3 Query Builder & Compiler
const inherits = require('inherits');
const QueryCompiler = require('../../../query/compiler');
const {
assign,
each,
isEmpty,
isString,
noop,
reduce,
identity,
} = require('lodash');
function QueryCompiler_SQLite3(client, builder) {
QueryCompiler.call(this, client, builder);
const { returning } = this.single;
if (returning) {
this.client.logger.warn(
'.returning() is not supported by sqlite3 and will not have any effect.'
);
}
}
inherits(QueryCompiler_SQLite3, QueryCompiler);
assign(QueryCompiler_SQLite3.prototype, {
// The locks are not applicable in SQLite3
forShare: emptyStr,
forUpdate: emptyStr,
// SQLite requires us to build the multi-row insert as a listing of select with
// unions joining them together. So we'll build out this list of columns and
// then join them all together with select unions to complete the queries.
insert() {
const insertValues = this.single.insert || [];
let sql = this.with() + `insert into ${this.tableName} `;
if (Array.isArray(insertValues)) {
if (insertValues.length === 0) {
return '';
} else if (
insertValues.length === 1 &&
insertValues[0] &&
isEmpty(insertValues[0])
) {
return sql + this._emptyInsertValue;
}
} else if (typeof insertValues === 'object' && isEmpty(insertValues)) {
return sql + this._emptyInsertValue;
}
const insertData = this._prepInsert(insertValues);
if (isString(insertData)) {
return sql + insertData;
}
if (insertData.columns.length === 0) {
return '';
}
sql += `(${this.formatter.columnize(insertData.columns)})`;
// backwards compatible error
if (this.client.valueForUndefined !== null) {
each(insertData.values, (bindings) => {
each(bindings, (binding) => {
if (binding === undefined)
throw new TypeError(
'`sqlite` does not support inserting default values. Specify ' +
'values explicitly or use the `useNullAsDefault` config flag. ' +
'(see docs http://knexjs.org/#Builder-insert).'
);
});
});
}
if (insertData.values.length === 1) {
const parameters = this.formatter.parameterize(
insertData.values[0],
this.client.valueForUndefined
);
return sql + ` values (${parameters})`;
}
const blocks = [];
let i = -1;
while (++i < insertData.values.length) {
let i2 = -1;
const block = (blocks[i] = []);
let current = insertData.values[i];
current = current === undefined ? this.client.valueForUndefined : current;
while (++i2 < insertData.columns.length) {
block.push(
this.formatter.alias(
this.formatter.parameter(current[i2]),
this.formatter.wrap(insertData.columns[i2])
)
);
}
blocks[i] = block.join(', ');
}
return sql + ' select ' + blocks.join(' union all select ');
},
// Compile a truncate table statement into SQL.
truncate() {
const { table } = this.single;
return {
sql: `delete from ${this.tableName}`,
output() {
return this.query({
sql: `delete from sqlite_sequence where name = '${table}'`,
}).catch(noop);
},
};
},
// Compiles a `columnInfo` query
columnInfo() {
const column = this.single.columnInfo;
// The user may have specified a custom wrapIdentifier function in the config. We
// need to run the identifiers through that function, but not format them as
// identifiers otherwise.
const table = this.client.customWrapIdentifier(this.single.table, identity);
return {
sql: `PRAGMA table_info(\`${table}\`)`,
output(resp) {
const maxLengthRegex = /.*\((\d+)\)/;
const out = reduce(
resp,
function(columns, val) {
let { type } = val;
let maxLength = type.match(maxLengthRegex);
if (maxLength) {
maxLength = maxLength[1];
}
type = maxLength ? type.split('(')[0] : type;
columns[val.name] = {
type: type.toLowerCase(),
maxLength,
nullable: !val.notnull,
defaultValue: val.dflt_value,
};
return columns;
},
{}
);
return (column && out[column]) || out;
},
};
},
limit() {
const noLimit = !this.single.limit && this.single.limit !== 0;
if (noLimit && !this.single.offset) return '';
// Workaround for offset only,
// see http://stackoverflow.com/questions/10491492/sqllite-with-skip-offset-only-not-limit
return `limit ${this.formatter.parameter(
noLimit ? -1 : this.single.limit
)}`;
},
});
function emptyStr() {
return '';
}
module.exports = QueryCompiler_SQLite3;

View File

@@ -0,0 +1,27 @@
const inherits = require('inherits');
const ColumnCompiler = require('../../../schema/columncompiler');
// Column Compiler
// -------
function ColumnCompiler_SQLite3() {
ColumnCompiler.apply(this, arguments);
this.modifiers = ['nullable', 'defaultTo'];
}
inherits(ColumnCompiler_SQLite3, ColumnCompiler);
// Types
// -------
ColumnCompiler_SQLite3.prototype.double = ColumnCompiler_SQLite3.prototype.decimal = ColumnCompiler_SQLite3.prototype.floating =
'float';
ColumnCompiler_SQLite3.prototype.timestamp = 'datetime';
ColumnCompiler_SQLite3.prototype.enu = function(allowed) {
return `text check (${this.formatter.wrap(this.args[0])} in ('${allowed.join(
"', '"
)}'))`;
};
ColumnCompiler_SQLite3.prototype.json = 'json';
module.exports = ColumnCompiler_SQLite3;

View File

@@ -0,0 +1,49 @@
// SQLite3: Column Builder & Compiler
// -------
const inherits = require('inherits');
const SchemaCompiler = require('../../../schema/compiler');
const { some } = require('lodash');
// Schema Compiler
// -------
function SchemaCompiler_SQLite3() {
SchemaCompiler.apply(this, arguments);
}
inherits(SchemaCompiler_SQLite3, SchemaCompiler);
// Compile the query to determine if a table exists.
SchemaCompiler_SQLite3.prototype.hasTable = function(tableName) {
const sql =
`select * from sqlite_master ` +
`where type = 'table' and name = ${this.formatter.parameter(tableName)}`;
this.pushQuery({ sql, output: (resp) => resp.length > 0 });
};
// Compile the query to determine if a column exists.
SchemaCompiler_SQLite3.prototype.hasColumn = function(tableName, column) {
this.pushQuery({
sql: `PRAGMA table_info(${this.formatter.wrap(tableName)})`,
output(resp) {
return some(resp, (col) => {
return (
this.client.wrapIdentifier(col.name.toLowerCase()) ===
this.client.wrapIdentifier(column.toLowerCase())
);
});
},
});
};
// Compile a rename table command.
SchemaCompiler_SQLite3.prototype.renameTable = function(from, to) {
this.pushQuery(
`alter table ${this.formatter.wrap(from)} rename to ${this.formatter.wrap(
to
)}`
);
};
module.exports = SchemaCompiler_SQLite3;

330
node_modules/knex/lib/dialects/sqlite3/schema/ddl.js generated vendored Normal file
View File

@@ -0,0 +1,330 @@
// SQLite3_DDL
//
// All of the SQLite3 specific DDL helpers for renaming/dropping
// columns and changing datatypes.
// -------
const {
assign,
uniqueId,
find,
identity,
map,
omit,
invert,
fromPairs,
some,
negate,
isEmpty,
chunk,
} = require('lodash');
// So altering the schema in SQLite3 is a major pain.
// We have our own object to deal with the renaming and altering the types
// for sqlite3 things.
function SQLite3_DDL(client, tableCompiler, pragma, connection) {
this.client = client;
this.tableCompiler = tableCompiler;
this.pragma = pragma;
this.tableNameRaw = this.tableCompiler.tableNameRaw;
this.alteredName = uniqueId('_knex_temp_alter');
this.connection = connection;
this.formatter =
client && client.config && client.config.wrapIdentifier
? client.config.wrapIdentifier
: (value) => value;
}
assign(SQLite3_DDL.prototype, {
tableName() {
return this.formatter(this.tableNameRaw, (value) => value);
},
getColumn: async function(column) {
const currentCol = find(this.pragma, (col) => {
return (
this.client.wrapIdentifier(col.name).toLowerCase() ===
this.client.wrapIdentifier(column).toLowerCase()
);
});
if (!currentCol)
throw new Error(
`The column ${column} is not in the ${this.tableName()} table`
);
return currentCol;
},
getTableSql() {
this.trx.disableProcessing();
return this.trx
.raw(
`SELECT name, sql FROM sqlite_master WHERE type="table" AND name="${this.tableName()}"`
)
.then((result) => {
this.trx.enableProcessing();
return result;
});
},
renameTable: async function() {
return this.trx.raw(
`ALTER TABLE "${this.tableName()}" RENAME TO "${this.alteredName}"`
);
},
dropOriginal() {
return this.trx.raw(`DROP TABLE "${this.tableName()}"`);
},
dropTempTable() {
return this.trx.raw(`DROP TABLE "${this.alteredName}"`);
},
copyData() {
return this.trx
.raw(`SELECT * FROM "${this.tableName()}"`)
.then((result) =>
this.insertChunked(20, this.alteredName, identity, result)
);
},
reinsertData(iterator) {
return this.trx
.raw(`SELECT * FROM "${this.alteredName}"`)
.then((result) =>
this.insertChunked(20, this.tableName(), iterator, result)
);
},
async insertChunked(chunkSize, target, iterator, result) {
iterator = iterator || identity;
const chunked = chunk(result, chunkSize);
for (const batch of chunked) {
await this.trx
.queryBuilder()
.table(target)
.insert(map(batch, iterator));
}
},
createTempTable(createTable) {
return this.trx.raw(
createTable.sql.replace(this.tableName(), this.alteredName)
);
},
_doReplace(sql, from, to) {
const oneLineSql = sql.replace(/\s+/g, ' ');
const matched = oneLineSql.match(/^CREATE TABLE\s+(\S+)\s*\((.*)\)/);
const tableName = matched[1];
const defs = matched[2];
if (!defs) {
throw new Error('No column definitions in this statement!');
}
let parens = 0,
args = [],
ptr = 0;
let i = 0;
const x = defs.length;
for (i = 0; i < x; i++) {
switch (defs[i]) {
case '(':
parens++;
break;
case ')':
parens--;
break;
case ',':
if (parens === 0) {
args.push(defs.slice(ptr, i));
ptr = i + 1;
}
break;
case ' ':
if (ptr === i) {
ptr = i + 1;
}
break;
}
}
args.push(defs.slice(ptr, i));
const fromIdentifier = from.replace(/[`"'[\]]/g, '');
args = args.map((item) => {
let split = item.trim().split(' ');
// SQLite supports all quoting mechanisms prevalent in all major dialects of SQL
// and preserves the original quoting in sqlite_master.
//
// Also, identifiers are never case sensitive, not even when quoted.
//
// Ref: https://www.sqlite.org/lang_keywords.html
const fromMatchCandidates = [
new RegExp(`\`${fromIdentifier}\``, 'i'),
new RegExp(`"${fromIdentifier}"`, 'i'),
new RegExp(`'${fromIdentifier}'`, 'i'),
new RegExp(`\\[${fromIdentifier}\\]`, 'i'),
];
if (fromIdentifier.match(/^\S+$/)) {
fromMatchCandidates.push(new RegExp(`\\b${fromIdentifier}\\b`, 'i'));
}
const doesMatchFromIdentifier = (target) =>
some(fromMatchCandidates, (c) => target.match(c));
const replaceFromIdentifier = (target) =>
fromMatchCandidates.reduce(
(result, candidate) => result.replace(candidate, to),
target
);
if (doesMatchFromIdentifier(split[0])) {
// column definition
if (to) {
split[0] = to;
return split.join(' ');
}
return ''; // for deletions
}
// skip constraint name
const idx = /constraint/i.test(split[0]) ? 2 : 0;
// primary key and unique constraints have one or more
// columns from this table listed between (); replace
// one if it matches
if (/primary|unique/i.test(split[idx])) {
const ret = item.replace(/\(.*\)/, replaceFromIdentifier);
// If any member columns are dropped then uniqueness/pk constraint
// can not be retained
if (ret !== item && isEmpty(to)) return '';
return ret;
}
// foreign keys have one or more columns from this table
// listed between (); replace one if it matches
// foreign keys also have a 'references' clause
// which may reference THIS table; if it does, replace
// column references in that too!
if (/foreign/.test(split[idx])) {
split = item.split(/ references /i);
// the quoted column names save us from having to do anything
// other than a straight replace here
const replacedKeySpec = replaceFromIdentifier(split[0]);
if (split[0] !== replacedKeySpec) {
// If we are removing one or more columns of a foreign
// key, then we should not retain the key at all
if (isEmpty(to)) return '';
else split[0] = replacedKeySpec;
}
if (split[1].slice(0, tableName.length) === tableName) {
// self-referential foreign key
const replacedKeyTargetSpec = split[1].replace(
/\(.*\)/,
replaceFromIdentifier
);
if (split[1] !== replacedKeyTargetSpec) {
// If we are removing one or more columns of a foreign
// key, then we should not retain the key at all
if (isEmpty(to)) return '';
else split[1] = replacedKeyTargetSpec;
}
}
return split.join(' references ');
}
return item;
});
args = args.filter(negate(isEmpty));
if (args.length === 0) {
throw new Error('Unable to drop last column from table');
}
return oneLineSql
.replace(/\(.*\)/, () => `(${args.join(', ')})`)
.replace(/,\s*([,)])/, '$1');
},
// Boy, this is quite a method.
renameColumn: async function(from, to) {
return this.client.transaction(
async (trx) => {
this.trx = trx;
const column = await this.getColumn(from);
const sql = await this.getTableSql(column);
const a = this.client.wrapIdentifier(from);
const b = this.client.wrapIdentifier(to);
const createTable = sql[0];
const newSql = this._doReplace(createTable.sql, a, b);
if (sql === newSql) {
throw new Error('Unable to find the column to change');
}
const { from: mappedFrom, to: mappedTo } = invert(
this.client.postProcessResponse(
invert({
from,
to,
})
)
);
return this.reinsertMapped(createTable, newSql, (row) => {
row[mappedTo] = row[mappedFrom];
return omit(row, mappedFrom);
});
},
{ connection: this.connection }
);
},
dropColumn: async function(columns) {
return this.client.transaction(
(trx) => {
this.trx = trx;
return Promise.all(columns.map((column) => this.getColumn(column)))
.then(() => this.getTableSql())
.then((sql) => {
const createTable = sql[0];
let newSql = createTable.sql;
columns.forEach((column) => {
const a = this.client.wrapIdentifier(column);
newSql = this._doReplace(newSql, a, '');
});
if (sql === newSql) {
throw new Error('Unable to find the column to change');
}
const mappedColumns = Object.keys(
this.client.postProcessResponse(
fromPairs(columns.map((column) => [column, column]))
)
);
return this.reinsertMapped(createTable, newSql, (row) =>
omit(row, ...mappedColumns)
);
});
},
{ connection: this.connection }
);
},
reinsertMapped(createTable, newSql, mapRow) {
return Promise.resolve()
.then(() => this.createTempTable(createTable))
.then(() => this.copyData())
.then(() => this.dropOriginal())
.then(() => this.trx.raw(newSql))
.then(() => this.reinsertData(mapRow))
.then(() => this.dropTempTable());
},
});
module.exports = SQLite3_DDL;

View File

@@ -0,0 +1,156 @@
const inherits = require('inherits');
const TableCompiler = require('../../../schema/tablecompiler');
const { filter, values } = require('lodash');
// Table Compiler
// -------
function TableCompiler_SQLite3() {
TableCompiler.apply(this, arguments);
this.primaryKey = void 0;
}
inherits(TableCompiler_SQLite3, TableCompiler);
// Create a new table.
TableCompiler_SQLite3.prototype.createQuery = function(columns, ifNot) {
const createStatement = ifNot
? 'create table if not exists '
: 'create table ';
let sql = createStatement + this.tableName() + ' (' + columns.sql.join(', ');
// SQLite forces primary keys to be added when the table is initially created
// so we will need to check for a primary key commands and add the columns
// to the table's declaration here so they can be created on the tables.
sql += this.foreignKeys() || '';
sql += this.primaryKeys() || '';
sql += ')';
this.pushQuery(sql);
};
TableCompiler_SQLite3.prototype.addColumns = function(columns, prefix) {
if (prefix) {
throw new Error('Sqlite does not support alter column.');
}
for (let i = 0, l = columns.sql.length; i < l; i++) {
this.pushQuery({
sql: `alter table ${this.tableName()} add column ${columns.sql[i]}`,
bindings: columns.bindings[i],
});
}
};
// Compile a drop unique key command.
TableCompiler_SQLite3.prototype.dropUnique = function(columns, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('unique', this.tableNameRaw, columns);
this.pushQuery(`drop index ${indexName}`);
};
TableCompiler_SQLite3.prototype.dropIndex = function(columns, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('index', this.tableNameRaw, columns);
this.pushQuery(`drop index ${indexName}`);
};
// Compile a unique key command.
TableCompiler_SQLite3.prototype.unique = function(columns, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('unique', this.tableNameRaw, columns);
columns = this.formatter.columnize(columns);
this.pushQuery(
`create unique index ${indexName} on ${this.tableName()} (${columns})`
);
};
// Compile a plain index key command.
TableCompiler_SQLite3.prototype.index = function(columns, indexName) {
indexName = indexName
? this.formatter.wrap(indexName)
: this._indexCommand('index', this.tableNameRaw, columns);
columns = this.formatter.columnize(columns);
this.pushQuery(
`create index ${indexName} on ${this.tableName()} (${columns})`
);
};
TableCompiler_SQLite3.prototype.primary = TableCompiler_SQLite3.prototype.foreign = function() {
if (this.method !== 'create' && this.method !== 'createIfNot') {
this.client.logger.warn(
'SQLite3 Foreign & Primary keys may only be added on create'
);
}
};
TableCompiler_SQLite3.prototype.primaryKeys = function() {
const pks = filter(this.grouped.alterTable || [], { method: 'primary' });
if (pks.length > 0 && pks[0].args.length > 0) {
const columns = pks[0].args[0];
let constraintName = pks[0].args[1] || '';
if (constraintName) {
constraintName = ' constraint ' + this.formatter.wrap(constraintName);
}
return `,${constraintName} primary key (${this.formatter.columnize(
columns
)})`;
}
};
TableCompiler_SQLite3.prototype.foreignKeys = function() {
let sql = '';
const foreignKeys = filter(this.grouped.alterTable || [], {
method: 'foreign',
});
for (let i = 0, l = foreignKeys.length; i < l; i++) {
const foreign = foreignKeys[i].args[0];
const column = this.formatter.columnize(foreign.column);
const references = this.formatter.columnize(foreign.references);
const foreignTable = this.formatter.wrap(foreign.inTable);
let constraintName = foreign.keyName || '';
if (constraintName) {
constraintName = ' constraint ' + this.formatter.wrap(constraintName);
}
sql += `,${constraintName} foreign key(${column}) references ${foreignTable}(${references})`;
if (foreign.onDelete) sql += ` on delete ${foreign.onDelete}`;
if (foreign.onUpdate) sql += ` on update ${foreign.onUpdate}`;
}
return sql;
};
TableCompiler_SQLite3.prototype.createTableBlock = function() {
return this.getColumns()
.concat()
.join(',');
};
// Compile a rename column command... very complex in sqlite
TableCompiler_SQLite3.prototype.renameColumn = function(from, to) {
const compiler = this;
this.pushQuery({
sql: `PRAGMA table_info(${this.tableName()})`,
output(pragma) {
return compiler.client
.ddl(compiler, pragma, this.connection)
.renameColumn(from, to);
},
});
};
TableCompiler_SQLite3.prototype.dropColumn = function() {
const compiler = this;
const columns = values(arguments);
this.pushQuery({
sql: `PRAGMA table_info(${this.tableName()})`,
output(pragma) {
return compiler.client
.ddl(compiler, pragma, this.connection)
.dropColumn(columns);
},
});
};
module.exports = TableCompiler_SQLite3;