Add initial version of dijkstra backend cloudron image
This commit is contained in:
321
node_modules/knex/lib/dialects/postgres/index.js
generated
vendored
Normal file
321
node_modules/knex/lib/dialects/postgres/index.js
generated
vendored
Normal file
@@ -0,0 +1,321 @@
|
||||
// PostgreSQL
|
||||
// -------
|
||||
const { map, extend, isString } = require('lodash');
|
||||
const { promisify } = require('util');
|
||||
const inherits = require('inherits');
|
||||
const Client = require('../../client');
|
||||
const Bluebird = require('bluebird');
|
||||
|
||||
const QueryCompiler = require('./query/compiler');
|
||||
const ColumnCompiler = require('./schema/columncompiler');
|
||||
const TableCompiler = require('./schema/tablecompiler');
|
||||
const SchemaCompiler = require('./schema/compiler');
|
||||
const { makeEscape } = require('../../query/string');
|
||||
|
||||
function Client_PG(config) {
|
||||
Client.apply(this, arguments);
|
||||
if (config.returning) {
|
||||
this.defaultReturning = config.returning;
|
||||
}
|
||||
|
||||
if (config.searchPath) {
|
||||
this.searchPath = config.searchPath;
|
||||
}
|
||||
}
|
||||
inherits(Client_PG, Client);
|
||||
|
||||
Object.assign(Client_PG.prototype, {
|
||||
queryCompiler() {
|
||||
return new QueryCompiler(this, ...arguments);
|
||||
},
|
||||
|
||||
columnCompiler() {
|
||||
return new ColumnCompiler(this, ...arguments);
|
||||
},
|
||||
|
||||
schemaCompiler() {
|
||||
return new SchemaCompiler(this, ...arguments);
|
||||
},
|
||||
|
||||
tableCompiler() {
|
||||
return new TableCompiler(this, ...arguments);
|
||||
},
|
||||
|
||||
dialect: 'postgresql',
|
||||
|
||||
driverName: 'pg',
|
||||
|
||||
_driver() {
|
||||
return require('pg');
|
||||
},
|
||||
|
||||
_escapeBinding: makeEscape({
|
||||
escapeArray(val, esc) {
|
||||
return esc(arrayString(val, esc));
|
||||
},
|
||||
escapeString(str) {
|
||||
let hasBackslash = false;
|
||||
let escaped = "'";
|
||||
for (let i = 0; i < str.length; i++) {
|
||||
const c = str[i];
|
||||
if (c === "'") {
|
||||
escaped += c + c;
|
||||
} else if (c === '\\') {
|
||||
escaped += c + c;
|
||||
hasBackslash = true;
|
||||
} else {
|
||||
escaped += c;
|
||||
}
|
||||
}
|
||||
escaped += "'";
|
||||
if (hasBackslash === true) {
|
||||
escaped = 'E' + escaped;
|
||||
}
|
||||
return escaped;
|
||||
},
|
||||
escapeObject(val, prepareValue, timezone, seen = []) {
|
||||
if (val && typeof val.toPostgres === 'function') {
|
||||
seen = seen || [];
|
||||
if (seen.indexOf(val) !== -1) {
|
||||
throw new Error(
|
||||
`circular reference detected while preparing "${val}" for query`
|
||||
);
|
||||
}
|
||||
seen.push(val);
|
||||
return prepareValue(val.toPostgres(prepareValue), seen);
|
||||
}
|
||||
return JSON.stringify(val);
|
||||
},
|
||||
}),
|
||||
|
||||
wrapIdentifierImpl(value) {
|
||||
if (value === '*') return value;
|
||||
|
||||
let arrayAccessor = '';
|
||||
const arrayAccessorMatch = value.match(/(.*?)(\[[0-9]+\])/);
|
||||
|
||||
if (arrayAccessorMatch) {
|
||||
value = arrayAccessorMatch[1];
|
||||
arrayAccessor = arrayAccessorMatch[2];
|
||||
}
|
||||
|
||||
return `"${value.replace(/"/g, '""')}"${arrayAccessor}`;
|
||||
},
|
||||
|
||||
// Get a raw connection, called by the `pool` whenever a new
|
||||
// connection needs to be added to the pool.
|
||||
acquireRawConnection() {
|
||||
const client = this;
|
||||
return new Bluebird(function(resolver, rejecter) {
|
||||
const connection = new client.driver.Client(client.connectionSettings);
|
||||
connection.connect(function(err, connection) {
|
||||
if (err) {
|
||||
return rejecter(err);
|
||||
}
|
||||
connection.on('error', (err) => {
|
||||
connection.__knex__disposed = err;
|
||||
});
|
||||
connection.on('end', (err) => {
|
||||
connection.__knex__disposed = err || 'Connection ended unexpectedly';
|
||||
});
|
||||
if (!client.version) {
|
||||
return client.checkVersion(connection).then(function(version) {
|
||||
client.version = version;
|
||||
resolver(connection);
|
||||
});
|
||||
}
|
||||
resolver(connection);
|
||||
});
|
||||
}).then(function setSearchPath(connection) {
|
||||
client.setSchemaSearchPath(connection);
|
||||
return connection;
|
||||
});
|
||||
},
|
||||
|
||||
// Used to explicitly close a connection, called internally by the pool
|
||||
// when a connection times out or the pool is shutdown.
|
||||
async destroyRawConnection(connection) {
|
||||
const end = promisify((cb) => connection.end(cb));
|
||||
return end();
|
||||
},
|
||||
|
||||
// In PostgreSQL, we need to do a version check to do some feature
|
||||
// checking on the database.
|
||||
checkVersion(connection) {
|
||||
return new Bluebird(function(resolver, rejecter) {
|
||||
connection.query('select version();', function(err, resp) {
|
||||
if (err) return rejecter(err);
|
||||
resolver(/^PostgreSQL (.*?)( |$)/.exec(resp.rows[0].version)[1]);
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
// Position the bindings for the query. The escape sequence for question mark
|
||||
// is \? (e.g. knex.raw("\\?") since javascript requires '\' to be escaped too...)
|
||||
positionBindings(sql) {
|
||||
let questionCount = 0;
|
||||
return sql.replace(/(\\*)(\?)/g, function(match, escapes) {
|
||||
if (escapes.length % 2) {
|
||||
return '?';
|
||||
} else {
|
||||
questionCount++;
|
||||
return `$${questionCount}`;
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
setSchemaSearchPath(connection, searchPath) {
|
||||
let path = searchPath || this.searchPath;
|
||||
|
||||
if (!path) return Bluebird.resolve(true);
|
||||
|
||||
if (!Array.isArray(path) && !isString(path)) {
|
||||
throw new TypeError(
|
||||
`knex: Expected searchPath to be Array/String, got: ${typeof path}`
|
||||
);
|
||||
}
|
||||
|
||||
if (isString(path)) {
|
||||
if (path.includes(',')) {
|
||||
const parts = path.split(',');
|
||||
const arraySyntax = `[${parts
|
||||
.map((searchPath) => `'${searchPath}'`)
|
||||
.join(', ')}]`;
|
||||
this.logger.warn(
|
||||
`Detected comma in searchPath "${path}".` +
|
||||
`If you are trying to specify multiple schemas, use Array syntax: ${arraySyntax}`
|
||||
);
|
||||
}
|
||||
path = [path];
|
||||
}
|
||||
|
||||
path = path.map((schemaName) => `"${schemaName}"`).join(',');
|
||||
|
||||
return new Bluebird(function(resolver, rejecter) {
|
||||
connection.query(`set search_path to ${path}`, function(err) {
|
||||
if (err) return rejecter(err);
|
||||
resolver(true);
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
_stream(connection, obj, stream, options) {
|
||||
const PGQueryStream = process.browser
|
||||
? undefined
|
||||
: require('pg-query-stream');
|
||||
const sql = obj.sql;
|
||||
|
||||
return new Bluebird(function(resolver, rejecter) {
|
||||
const queryStream = connection.query(
|
||||
new PGQueryStream(sql, obj.bindings, options)
|
||||
);
|
||||
|
||||
queryStream.on('error', function(error) {
|
||||
rejecter(error);
|
||||
stream.emit('error', error);
|
||||
});
|
||||
|
||||
// 'end' IS propagated by .pipe, by default
|
||||
stream.on('end', resolver);
|
||||
queryStream.pipe(stream);
|
||||
});
|
||||
},
|
||||
|
||||
// Runs the query on the specified connection, providing the bindings
|
||||
// and any other necessary prep work.
|
||||
_query(connection, obj) {
|
||||
let queryConfig = {
|
||||
text: obj.sql,
|
||||
values: obj.bindings || [],
|
||||
};
|
||||
|
||||
if (obj.options) {
|
||||
queryConfig = extend(queryConfig, obj.options);
|
||||
}
|
||||
|
||||
return new Bluebird(function(resolver, rejecter) {
|
||||
connection.query(queryConfig, function(err, response) {
|
||||
if (err) return rejecter(err);
|
||||
obj.response = response;
|
||||
resolver(obj);
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
// Ensures the response is returned in the same format as other clients.
|
||||
processResponse(obj, runner) {
|
||||
const resp = obj.response;
|
||||
if (obj.output) return obj.output.call(runner, resp);
|
||||
if (obj.method === 'raw') return resp;
|
||||
const { returning } = obj;
|
||||
if (resp.command === 'SELECT') {
|
||||
if (obj.method === 'first') return resp.rows[0];
|
||||
if (obj.method === 'pluck') return map(resp.rows, obj.pluck);
|
||||
return resp.rows;
|
||||
}
|
||||
if (returning) {
|
||||
const returns = [];
|
||||
for (let i = 0, l = resp.rows.length; i < l; i++) {
|
||||
const row = resp.rows[i];
|
||||
if (returning === '*' || Array.isArray(returning)) {
|
||||
returns[i] = row;
|
||||
} else {
|
||||
// Pluck the only column in the row.
|
||||
returns[i] = row[Object.keys(row)[0]];
|
||||
}
|
||||
}
|
||||
return returns;
|
||||
}
|
||||
if (resp.command === 'UPDATE' || resp.command === 'DELETE') {
|
||||
return resp.rowCount;
|
||||
}
|
||||
return resp;
|
||||
},
|
||||
|
||||
canCancelQuery: true,
|
||||
cancelQuery(connectionToKill) {
|
||||
const acquiringConn = this.acquireConnection();
|
||||
|
||||
// Error out if we can't acquire connection in time.
|
||||
// Purposely not putting timeout on `pg_cancel_backend` execution because erroring
|
||||
// early there would release the `connectionToKill` back to the pool with
|
||||
// a `KILL QUERY` command yet to finish.
|
||||
return acquiringConn.then((conn) => {
|
||||
return this._wrappedCancelQueryCall(conn, connectionToKill).finally(
|
||||
() => {
|
||||
// NOT returning this promise because we want to release the connection
|
||||
// in a non-blocking fashion
|
||||
this.releaseConnection(conn);
|
||||
}
|
||||
);
|
||||
});
|
||||
},
|
||||
_wrappedCancelQueryCall(conn, connectionToKill) {
|
||||
return this.query(conn, {
|
||||
method: 'raw',
|
||||
sql: 'SELECT pg_cancel_backend(?);',
|
||||
bindings: [connectionToKill.processID],
|
||||
options: {},
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
function arrayString(arr, esc) {
|
||||
let result = '{';
|
||||
for (let i = 0; i < arr.length; i++) {
|
||||
if (i > 0) result += ',';
|
||||
const val = arr[i];
|
||||
if (val === null || typeof val === 'undefined') {
|
||||
result += 'NULL';
|
||||
} else if (Array.isArray(val)) {
|
||||
result += arrayString(val, esc);
|
||||
} else if (typeof val === 'number') {
|
||||
result += val;
|
||||
} else {
|
||||
result += JSON.stringify(typeof val === 'string' ? val : esc(val));
|
||||
}
|
||||
}
|
||||
return result + '}';
|
||||
}
|
||||
|
||||
module.exports = Client_PG;
|
||||
166
node_modules/knex/lib/dialects/postgres/query/compiler.js
generated
vendored
Normal file
166
node_modules/knex/lib/dialects/postgres/query/compiler.js
generated
vendored
Normal file
@@ -0,0 +1,166 @@
|
||||
// PostgreSQL Query Builder & Compiler
|
||||
// ------
|
||||
const inherits = require('inherits');
|
||||
|
||||
const QueryCompiler = require('../../../query/compiler');
|
||||
|
||||
const { reduce, identity } = require('lodash');
|
||||
|
||||
function QueryCompiler_PG(client, builder) {
|
||||
QueryCompiler.call(this, client, builder);
|
||||
}
|
||||
|
||||
inherits(QueryCompiler_PG, QueryCompiler);
|
||||
|
||||
Object.assign(QueryCompiler_PG.prototype, {
|
||||
// Compiles a truncate query.
|
||||
truncate() {
|
||||
return `truncate ${this.tableName} restart identity`;
|
||||
},
|
||||
|
||||
// is used if the an array with multiple empty values supplied
|
||||
_defaultInsertValue: 'default',
|
||||
|
||||
// Compiles an `insert` query, allowing for multiple
|
||||
// inserts using a single query statement.
|
||||
insert() {
|
||||
const sql = QueryCompiler.prototype.insert.call(this);
|
||||
if (sql === '') return sql;
|
||||
const { returning } = this.single;
|
||||
return {
|
||||
sql: sql + this._returning(returning),
|
||||
returning,
|
||||
};
|
||||
},
|
||||
|
||||
// Compiles an `update` query, allowing for a return value.
|
||||
update() {
|
||||
const withSQL = this.with();
|
||||
const updateData = this._prepUpdate(this.single.update);
|
||||
const wheres = this.where();
|
||||
const { returning } = this.single;
|
||||
return {
|
||||
sql:
|
||||
withSQL +
|
||||
`update ${this.single.only ? 'only ' : ''}${this.tableName} ` +
|
||||
`set ${updateData.join(', ')}` +
|
||||
(wheres ? ` ${wheres}` : '') +
|
||||
this._returning(returning),
|
||||
returning,
|
||||
};
|
||||
},
|
||||
|
||||
// Compiles an `update` query, allowing for a return value.
|
||||
del() {
|
||||
const sql = QueryCompiler.prototype.del.apply(this, arguments);
|
||||
const { returning } = this.single;
|
||||
return {
|
||||
sql: sql + this._returning(returning),
|
||||
returning,
|
||||
};
|
||||
},
|
||||
|
||||
aggregate(stmt) {
|
||||
return this._aggregate(stmt, { distinctParentheses: true });
|
||||
},
|
||||
|
||||
_returning(value) {
|
||||
return value ? ` returning ${this.formatter.columnize(value)}` : '';
|
||||
},
|
||||
|
||||
// Join array of table names and apply default schema.
|
||||
_tableNames(tables) {
|
||||
const schemaName = this.single.schema;
|
||||
const sql = [];
|
||||
|
||||
for (let i = 0; i < tables.length; i++) {
|
||||
let tableName = tables[i];
|
||||
|
||||
if (tableName) {
|
||||
if (schemaName) {
|
||||
tableName = `${schemaName}.${tableName}`;
|
||||
}
|
||||
sql.push(this.formatter.wrap(tableName));
|
||||
}
|
||||
}
|
||||
|
||||
return sql.join(', ');
|
||||
},
|
||||
|
||||
forUpdate() {
|
||||
const tables = this.single.lockTables || [];
|
||||
|
||||
return (
|
||||
'for update' + (tables.length ? ' of ' + this._tableNames(tables) : '')
|
||||
);
|
||||
},
|
||||
|
||||
forShare() {
|
||||
const tables = this.single.lockTables || [];
|
||||
|
||||
return (
|
||||
'for share' + (tables.length ? ' of ' + this._tableNames(tables) : '')
|
||||
);
|
||||
},
|
||||
|
||||
skipLocked() {
|
||||
return 'skip locked';
|
||||
},
|
||||
|
||||
noWait() {
|
||||
return 'nowait';
|
||||
},
|
||||
|
||||
// Compiles a columnInfo query
|
||||
columnInfo() {
|
||||
const column = this.single.columnInfo;
|
||||
let schema = this.single.schema;
|
||||
|
||||
// The user may have specified a custom wrapIdentifier function in the config. We
|
||||
// need to run the identifiers through that function, but not format them as
|
||||
// identifiers otherwise.
|
||||
const table = this.client.customWrapIdentifier(this.single.table, identity);
|
||||
|
||||
if (schema) {
|
||||
schema = this.client.customWrapIdentifier(schema, identity);
|
||||
}
|
||||
|
||||
let sql =
|
||||
'select * from information_schema.columns where table_name = ? and table_catalog = ?';
|
||||
const bindings = [table, this.client.database()];
|
||||
|
||||
if (schema) {
|
||||
sql += ' and table_schema = ?';
|
||||
bindings.push(schema);
|
||||
} else {
|
||||
sql += ' and table_schema = current_schema()';
|
||||
}
|
||||
|
||||
return {
|
||||
sql,
|
||||
bindings,
|
||||
output(resp) {
|
||||
const out = reduce(
|
||||
resp.rows,
|
||||
function(columns, val) {
|
||||
columns[val.column_name] = {
|
||||
type: val.data_type,
|
||||
maxLength: val.character_maximum_length,
|
||||
nullable: val.is_nullable === 'YES',
|
||||
defaultValue: val.column_default,
|
||||
};
|
||||
return columns;
|
||||
},
|
||||
{}
|
||||
);
|
||||
return (column && out[column]) || out;
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
distinctOn(value) {
|
||||
return 'distinct on (' + this.formatter.columnize(value) + ') ';
|
||||
},
|
||||
});
|
||||
|
||||
module.exports = QueryCompiler_PG;
|
||||
122
node_modules/knex/lib/dialects/postgres/schema/columncompiler.js
generated
vendored
Normal file
122
node_modules/knex/lib/dialects/postgres/schema/columncompiler.js
generated
vendored
Normal file
@@ -0,0 +1,122 @@
|
||||
// PostgreSQL Column Compiler
|
||||
// -------
|
||||
|
||||
const inherits = require('inherits');
|
||||
const ColumnCompiler = require('../../../schema/columncompiler');
|
||||
const { isObject } = require('lodash');
|
||||
|
||||
function ColumnCompiler_PG() {
|
||||
ColumnCompiler.apply(this, arguments);
|
||||
this.modifiers = ['nullable', 'defaultTo', 'comment'];
|
||||
}
|
||||
inherits(ColumnCompiler_PG, ColumnCompiler);
|
||||
|
||||
Object.assign(ColumnCompiler_PG.prototype, {
|
||||
// Types
|
||||
// ------
|
||||
bigincrements: 'bigserial primary key',
|
||||
bigint: 'bigint',
|
||||
binary: 'bytea',
|
||||
|
||||
bit(column) {
|
||||
return column.length !== false ? `bit(${column.length})` : 'bit';
|
||||
},
|
||||
|
||||
bool: 'boolean',
|
||||
|
||||
// Create the column definition for an enum type.
|
||||
// Using method "2" here: http://stackoverflow.com/a/10984951/525714
|
||||
enu(allowed, options) {
|
||||
options = options || {};
|
||||
|
||||
const values =
|
||||
options.useNative && options.existingType
|
||||
? undefined
|
||||
: allowed.join("', '");
|
||||
|
||||
if (options.useNative) {
|
||||
let enumName = '';
|
||||
const schemaName = options.schemaName || this.tableCompiler.schemaNameRaw;
|
||||
|
||||
if (schemaName) {
|
||||
enumName += `"${schemaName}".`;
|
||||
}
|
||||
|
||||
enumName += `"${options.enumName}"`;
|
||||
|
||||
if (!options.existingType) {
|
||||
this.tableCompiler.unshiftQuery(
|
||||
`create type ${enumName} as enum ('${values}')`
|
||||
);
|
||||
}
|
||||
|
||||
return enumName;
|
||||
}
|
||||
return `text check (${this.formatter.wrap(this.args[0])} in ('${values}'))`;
|
||||
},
|
||||
|
||||
double: 'double precision',
|
||||
decimal(precision, scale) {
|
||||
if (precision === null) return 'decimal';
|
||||
return `decimal(${this._num(precision, 8)}, ${this._num(scale, 2)})`;
|
||||
},
|
||||
floating: 'real',
|
||||
increments: 'serial primary key',
|
||||
json(jsonb) {
|
||||
if (jsonb) this.client.logger.deprecate('json(true)', 'jsonb()');
|
||||
return jsonColumn(this.client, jsonb);
|
||||
},
|
||||
jsonb() {
|
||||
return jsonColumn(this.client, true);
|
||||
},
|
||||
smallint: 'smallint',
|
||||
tinyint: 'smallint',
|
||||
datetime(withoutTz = false, precision) {
|
||||
let useTz;
|
||||
if (isObject(withoutTz)) {
|
||||
({ useTz, precision } = withoutTz);
|
||||
} else {
|
||||
useTz = !withoutTz;
|
||||
}
|
||||
|
||||
return `${useTz ? 'timestamptz' : 'timestamp'}${
|
||||
precision ? '(' + precision + ')' : ''
|
||||
}`;
|
||||
},
|
||||
timestamp(withoutTz = false, precision) {
|
||||
let useTz;
|
||||
if (isObject(withoutTz)) {
|
||||
({ useTz, precision } = withoutTz);
|
||||
} else {
|
||||
useTz = !withoutTz;
|
||||
}
|
||||
|
||||
return `${useTz ? 'timestamptz' : 'timestamp'}${
|
||||
precision ? '(' + precision + ')' : ''
|
||||
}`;
|
||||
},
|
||||
uuid: 'uuid',
|
||||
|
||||
// Modifiers:
|
||||
// ------
|
||||
comment(comment) {
|
||||
const columnName = this.args[0] || this.defaults('columnName');
|
||||
|
||||
this.pushAdditional(function() {
|
||||
this.pushQuery(
|
||||
`comment on column ${this.tableCompiler.tableName()}.` +
|
||||
this.formatter.wrap(columnName) +
|
||||
' is ' +
|
||||
(comment ? `'${comment}'` : 'NULL')
|
||||
);
|
||||
}, comment);
|
||||
},
|
||||
});
|
||||
|
||||
function jsonColumn(client, jsonb) {
|
||||
if (!client.version || parseFloat(client.version) >= 9.2)
|
||||
return jsonb ? 'jsonb' : 'json';
|
||||
return 'text';
|
||||
}
|
||||
|
||||
module.exports = ColumnCompiler_PG;
|
||||
109
node_modules/knex/lib/dialects/postgres/schema/compiler.js
generated
vendored
Normal file
109
node_modules/knex/lib/dialects/postgres/schema/compiler.js
generated
vendored
Normal file
@@ -0,0 +1,109 @@
|
||||
// PostgreSQL Schema Compiler
|
||||
// -------
|
||||
|
||||
const inherits = require('inherits');
|
||||
const SchemaCompiler = require('../../../schema/compiler');
|
||||
|
||||
function SchemaCompiler_PG() {
|
||||
SchemaCompiler.apply(this, arguments);
|
||||
}
|
||||
inherits(SchemaCompiler_PG, SchemaCompiler);
|
||||
|
||||
// Check whether the current table
|
||||
SchemaCompiler_PG.prototype.hasTable = function(tableName) {
|
||||
let sql = 'select * from information_schema.tables where table_name = ?';
|
||||
const bindings = [tableName];
|
||||
|
||||
if (this.schema) {
|
||||
sql += ' and table_schema = ?';
|
||||
bindings.push(this.schema);
|
||||
} else {
|
||||
sql += ' and table_schema = current_schema()';
|
||||
}
|
||||
|
||||
this.pushQuery({
|
||||
sql,
|
||||
bindings,
|
||||
output(resp) {
|
||||
return resp.rows.length > 0;
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
// Compile the query to determine if a column exists in a table.
|
||||
SchemaCompiler_PG.prototype.hasColumn = function(tableName, columnName) {
|
||||
let sql =
|
||||
'select * from information_schema.columns where table_name = ? and column_name = ?';
|
||||
const bindings = [tableName, columnName];
|
||||
|
||||
if (this.schema) {
|
||||
sql += ' and table_schema = ?';
|
||||
bindings.push(this.schema);
|
||||
} else {
|
||||
sql += ' and table_schema = current_schema()';
|
||||
}
|
||||
|
||||
this.pushQuery({
|
||||
sql,
|
||||
bindings,
|
||||
output(resp) {
|
||||
return resp.rows.length > 0;
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
SchemaCompiler_PG.prototype.qualifiedTableName = function(tableName) {
|
||||
const name = this.schema ? `${this.schema}.${tableName}` : tableName;
|
||||
return this.formatter.wrap(name);
|
||||
};
|
||||
|
||||
// Compile a rename table command.
|
||||
SchemaCompiler_PG.prototype.renameTable = function(from, to) {
|
||||
this.pushQuery(
|
||||
`alter table ${this.qualifiedTableName(
|
||||
from
|
||||
)} rename to ${this.formatter.wrap(to)}`
|
||||
);
|
||||
};
|
||||
|
||||
SchemaCompiler_PG.prototype.createSchema = function(schemaName) {
|
||||
this.pushQuery(`create schema ${this.formatter.wrap(schemaName)}`);
|
||||
};
|
||||
|
||||
SchemaCompiler_PG.prototype.createSchemaIfNotExists = function(schemaName) {
|
||||
this.pushQuery(
|
||||
`create schema if not exists ${this.formatter.wrap(schemaName)}`
|
||||
);
|
||||
};
|
||||
|
||||
SchemaCompiler_PG.prototype.dropSchema = function(schemaName) {
|
||||
this.pushQuery(`drop schema ${this.formatter.wrap(schemaName)}`);
|
||||
};
|
||||
|
||||
SchemaCompiler_PG.prototype.dropSchemaIfExists = function(schemaName) {
|
||||
this.pushQuery(`drop schema if exists ${this.formatter.wrap(schemaName)}`);
|
||||
};
|
||||
|
||||
SchemaCompiler_PG.prototype.dropExtension = function(extensionName) {
|
||||
this.pushQuery(`drop extension ${this.formatter.wrap(extensionName)}`);
|
||||
};
|
||||
|
||||
SchemaCompiler_PG.prototype.dropExtensionIfExists = function(extensionName) {
|
||||
this.pushQuery(
|
||||
`drop extension if exists ${this.formatter.wrap(extensionName)}`
|
||||
);
|
||||
};
|
||||
|
||||
SchemaCompiler_PG.prototype.createExtension = function(extensionName) {
|
||||
this.pushQuery(`create extension ${this.formatter.wrap(extensionName)}`);
|
||||
};
|
||||
|
||||
SchemaCompiler_PG.prototype.createExtensionIfNotExists = function(
|
||||
extensionName
|
||||
) {
|
||||
this.pushQuery(
|
||||
`create extension if not exists ${this.formatter.wrap(extensionName)}`
|
||||
);
|
||||
};
|
||||
|
||||
module.exports = SchemaCompiler_PG;
|
||||
183
node_modules/knex/lib/dialects/postgres/schema/tablecompiler.js
generated
vendored
Normal file
183
node_modules/knex/lib/dialects/postgres/schema/tablecompiler.js
generated
vendored
Normal file
@@ -0,0 +1,183 @@
|
||||
/* eslint max-len: 0 */
|
||||
|
||||
// PostgreSQL Table Builder & Compiler
|
||||
// -------
|
||||
|
||||
const inherits = require('inherits');
|
||||
const TableCompiler = require('../../../schema/tablecompiler');
|
||||
|
||||
const { has } = require('lodash');
|
||||
|
||||
function TableCompiler_PG() {
|
||||
TableCompiler.apply(this, arguments);
|
||||
}
|
||||
inherits(TableCompiler_PG, TableCompiler);
|
||||
|
||||
// Compile a rename column command.
|
||||
TableCompiler_PG.prototype.renameColumn = function(from, to) {
|
||||
return this.pushQuery({
|
||||
sql: `alter table ${this.tableName()} rename ${this.formatter.wrap(
|
||||
from
|
||||
)} to ${this.formatter.wrap(to)}`,
|
||||
});
|
||||
};
|
||||
|
||||
TableCompiler_PG.prototype.compileAdd = function(builder) {
|
||||
const table = this.formatter.wrap(builder);
|
||||
const columns = this.prefixArray('add column', this.getColumns(builder));
|
||||
return this.pushQuery({
|
||||
sql: `alter table ${table} ${columns.join(', ')}`,
|
||||
});
|
||||
};
|
||||
|
||||
// Adds the "create" query to the query sequence.
|
||||
TableCompiler_PG.prototype.createQuery = function(columns, ifNot) {
|
||||
const createStatement = ifNot
|
||||
? 'create table if not exists '
|
||||
: 'create table ';
|
||||
let sql =
|
||||
createStatement + this.tableName() + ' (' + columns.sql.join(', ') + ')';
|
||||
if (this.single.inherits)
|
||||
sql += ` inherits (${this.formatter.wrap(this.single.inherits)})`;
|
||||
this.pushQuery({
|
||||
sql,
|
||||
bindings: columns.bindings,
|
||||
});
|
||||
const hasComment = has(this.single, 'comment');
|
||||
if (hasComment) this.comment(this.single.comment);
|
||||
};
|
||||
|
||||
TableCompiler_PG.prototype.addColumns = function(
|
||||
columns,
|
||||
prefix,
|
||||
colCompilers
|
||||
) {
|
||||
if (prefix === this.alterColumnsPrefix) {
|
||||
// alter columns
|
||||
for (const col of colCompilers) {
|
||||
const quotedTableName = this.tableName();
|
||||
const type = col.getColumnType();
|
||||
// We'd prefer to call this.formatter.wrapAsIdentifier here instead, however the context passed to
|
||||
// `this` instance is not that of the column, but of the table. Thus, we unfortunately have to call
|
||||
// `wrapIdentifier` here as well (it is already called once on the initial column operation) to give
|
||||
// our `alter` operation the correct `queryContext`. Refer to issue #2606 and PR #2612.
|
||||
const colName = this.client.wrapIdentifier(
|
||||
col.getColumnName(),
|
||||
col.columnBuilder.queryContext()
|
||||
);
|
||||
|
||||
this.pushQuery({
|
||||
sql: `alter table ${quotedTableName} alter column ${colName} drop default`,
|
||||
bindings: [],
|
||||
});
|
||||
this.pushQuery({
|
||||
sql: `alter table ${quotedTableName} alter column ${colName} drop not null`,
|
||||
bindings: [],
|
||||
});
|
||||
this.pushQuery({
|
||||
sql: `alter table ${quotedTableName} alter column ${colName} type ${type} using (${colName}::${type})`,
|
||||
bindings: [],
|
||||
});
|
||||
|
||||
const defaultTo = col.modified['defaultTo'];
|
||||
if (defaultTo) {
|
||||
const modifier = col.defaultTo.apply(col, defaultTo);
|
||||
this.pushQuery({
|
||||
sql: `alter table ${quotedTableName} alter column ${colName} set ${modifier}`,
|
||||
bindings: [],
|
||||
});
|
||||
}
|
||||
|
||||
const nullable = col.modified['nullable'];
|
||||
if (nullable && nullable[0] === false) {
|
||||
this.pushQuery({
|
||||
sql: `alter table ${quotedTableName} alter column ${colName} set not null`,
|
||||
bindings: [],
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// base class implementation for normal add
|
||||
TableCompiler.prototype.addColumns.call(this, columns, prefix);
|
||||
}
|
||||
};
|
||||
|
||||
// Compiles the comment on the table.
|
||||
TableCompiler_PG.prototype.comment = function(comment) {
|
||||
this.pushQuery(
|
||||
`comment on table ${this.tableName()} is '${this.single.comment}'`
|
||||
);
|
||||
};
|
||||
|
||||
// Indexes:
|
||||
// -------
|
||||
|
||||
TableCompiler_PG.prototype.primary = function(columns, constraintName) {
|
||||
constraintName = constraintName
|
||||
? this.formatter.wrap(constraintName)
|
||||
: this.formatter.wrap(`${this.tableNameRaw}_pkey`);
|
||||
this.pushQuery(
|
||||
`alter table ${this.tableName()} add constraint ${constraintName} primary key (${this.formatter.columnize(
|
||||
columns
|
||||
)})`
|
||||
);
|
||||
};
|
||||
TableCompiler_PG.prototype.unique = function(columns, indexName) {
|
||||
indexName = indexName
|
||||
? this.formatter.wrap(indexName)
|
||||
: this._indexCommand('unique', this.tableNameRaw, columns);
|
||||
this.pushQuery(
|
||||
`alter table ${this.tableName()} add constraint ${indexName}` +
|
||||
' unique (' +
|
||||
this.formatter.columnize(columns) +
|
||||
')'
|
||||
);
|
||||
};
|
||||
TableCompiler_PG.prototype.index = function(columns, indexName, indexType) {
|
||||
indexName = indexName
|
||||
? this.formatter.wrap(indexName)
|
||||
: this._indexCommand('index', this.tableNameRaw, columns);
|
||||
this.pushQuery(
|
||||
`create index ${indexName} on ${this.tableName()}${(indexType &&
|
||||
` using ${indexType}`) ||
|
||||
''}` +
|
||||
' (' +
|
||||
this.formatter.columnize(columns) +
|
||||
')'
|
||||
);
|
||||
};
|
||||
TableCompiler_PG.prototype.dropPrimary = function(constraintName) {
|
||||
constraintName = constraintName
|
||||
? this.formatter.wrap(constraintName)
|
||||
: this.formatter.wrap(this.tableNameRaw + '_pkey');
|
||||
this.pushQuery(
|
||||
`alter table ${this.tableName()} drop constraint ${constraintName}`
|
||||
);
|
||||
};
|
||||
TableCompiler_PG.prototype.dropIndex = function(columns, indexName) {
|
||||
indexName = indexName
|
||||
? this.formatter.wrap(indexName)
|
||||
: this._indexCommand('index', this.tableNameRaw, columns);
|
||||
indexName = this.schemaNameRaw
|
||||
? `${this.formatter.wrap(this.schemaNameRaw)}.${indexName}`
|
||||
: indexName;
|
||||
this.pushQuery(`drop index ${indexName}`);
|
||||
};
|
||||
TableCompiler_PG.prototype.dropUnique = function(columns, indexName) {
|
||||
indexName = indexName
|
||||
? this.formatter.wrap(indexName)
|
||||
: this._indexCommand('unique', this.tableNameRaw, columns);
|
||||
this.pushQuery(
|
||||
`alter table ${this.tableName()} drop constraint ${indexName}`
|
||||
);
|
||||
};
|
||||
TableCompiler_PG.prototype.dropForeign = function(columns, indexName) {
|
||||
indexName = indexName
|
||||
? this.formatter.wrap(indexName)
|
||||
: this._indexCommand('foreign', this.tableNameRaw, columns);
|
||||
this.pushQuery(
|
||||
`alter table ${this.tableName()} drop constraint ${indexName}`
|
||||
);
|
||||
};
|
||||
|
||||
module.exports = TableCompiler_PG;
|
||||
Reference in New Issue
Block a user