primer cambio
This commit is contained in:
241
node_modules/sequelize/lib/dialects/abstract/connection-manager.js
generated
vendored
Normal file
241
node_modules/sequelize/lib/dialects/abstract/connection-manager.js
generated
vendored
Normal file
@@ -0,0 +1,241 @@
|
||||
"use strict";
|
||||
const { Pool, TimeoutError } = require("sequelize-pool");
|
||||
const _ = require("lodash");
|
||||
const semver = require("semver");
|
||||
const errors = require("../../errors");
|
||||
const { logger } = require("../../utils/logger");
|
||||
const deprecations = require("../../utils/deprecations");
|
||||
const debug = logger.debugContext("pool");
|
||||
class ConnectionManager {
|
||||
constructor(dialect, sequelize) {
|
||||
const config = _.cloneDeep(sequelize.config);
|
||||
this.sequelize = sequelize;
|
||||
this.config = config;
|
||||
this.dialect = dialect;
|
||||
this.versionPromise = null;
|
||||
this.dialectName = this.sequelize.options.dialect;
|
||||
if (config.pool === false) {
|
||||
throw new Error("Support for pool:false was removed in v4.0");
|
||||
}
|
||||
config.pool = _.defaults(config.pool || {}, {
|
||||
max: 5,
|
||||
min: 0,
|
||||
idle: 1e4,
|
||||
acquire: 6e4,
|
||||
evict: 1e3,
|
||||
validate: this._validate.bind(this)
|
||||
});
|
||||
this.initPools();
|
||||
}
|
||||
refreshTypeParser(dataTypes) {
|
||||
_.each(dataTypes, (dataType) => {
|
||||
if (Object.prototype.hasOwnProperty.call(dataType, "parse")) {
|
||||
if (dataType.types[this.dialectName]) {
|
||||
this._refreshTypeParser(dataType);
|
||||
} else {
|
||||
throw new Error(`Parse function not supported for type ${dataType.key} in dialect ${this.dialectName}`);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
_loadDialectModule(moduleName) {
|
||||
try {
|
||||
if (this.sequelize.config.dialectModulePath) {
|
||||
return require(this.sequelize.config.dialectModulePath);
|
||||
}
|
||||
if (this.sequelize.config.dialectModule) {
|
||||
return this.sequelize.config.dialectModule;
|
||||
}
|
||||
return require(moduleName);
|
||||
} catch (err) {
|
||||
if (err.code === "MODULE_NOT_FOUND") {
|
||||
if (this.sequelize.config.dialectModulePath) {
|
||||
throw new Error(`Unable to find dialect at ${this.sequelize.config.dialectModulePath}`);
|
||||
}
|
||||
throw new Error(`Please install ${moduleName} package manually`);
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
async _onProcessExit() {
|
||||
if (!this.pool) {
|
||||
return;
|
||||
}
|
||||
await this.pool.drain();
|
||||
debug("connection drain due to process exit");
|
||||
return await this.pool.destroyAllNow();
|
||||
}
|
||||
async close() {
|
||||
this.getConnection = async function getConnection() {
|
||||
throw new Error("ConnectionManager.getConnection was called after the connection manager was closed!");
|
||||
};
|
||||
return await this._onProcessExit();
|
||||
}
|
||||
initPools() {
|
||||
const config = this.config;
|
||||
if (!config.replication) {
|
||||
this.pool = new Pool({
|
||||
name: "sequelize",
|
||||
create: () => this._connect(config),
|
||||
destroy: async (connection) => {
|
||||
const result = await this._disconnect(connection);
|
||||
debug("connection destroy");
|
||||
return result;
|
||||
},
|
||||
validate: config.pool.validate,
|
||||
max: config.pool.max,
|
||||
min: config.pool.min,
|
||||
acquireTimeoutMillis: config.pool.acquire,
|
||||
idleTimeoutMillis: config.pool.idle,
|
||||
reapIntervalMillis: config.pool.evict,
|
||||
maxUses: config.pool.maxUses
|
||||
});
|
||||
debug(`pool created with max/min: ${config.pool.max}/${config.pool.min}, no replication`);
|
||||
return;
|
||||
}
|
||||
if (!Array.isArray(config.replication.read)) {
|
||||
config.replication.read = [config.replication.read];
|
||||
}
|
||||
config.replication.write = _.defaults(config.replication.write, _.omit(config, "replication"));
|
||||
config.replication.read = config.replication.read.map((readConfig) => _.defaults(readConfig, _.omit(this.config, "replication")));
|
||||
let reads = 0;
|
||||
this.pool = {
|
||||
release: (client) => {
|
||||
if (client.queryType === "read") {
|
||||
this.pool.read.release(client);
|
||||
} else {
|
||||
this.pool.write.release(client);
|
||||
}
|
||||
},
|
||||
acquire: (queryType, useMaster) => {
|
||||
useMaster = useMaster === void 0 ? false : useMaster;
|
||||
if (queryType === "SELECT" && !useMaster) {
|
||||
return this.pool.read.acquire();
|
||||
}
|
||||
return this.pool.write.acquire();
|
||||
},
|
||||
destroy: (connection) => {
|
||||
this.pool[connection.queryType].destroy(connection);
|
||||
debug("connection destroy");
|
||||
},
|
||||
destroyAllNow: async () => {
|
||||
await Promise.all([
|
||||
this.pool.read.destroyAllNow(),
|
||||
this.pool.write.destroyAllNow()
|
||||
]);
|
||||
debug("all connections destroyed");
|
||||
},
|
||||
drain: async () => Promise.all([
|
||||
this.pool.write.drain(),
|
||||
this.pool.read.drain()
|
||||
]),
|
||||
read: new Pool({
|
||||
name: "sequelize:read",
|
||||
create: async () => {
|
||||
const nextRead = reads++ % config.replication.read.length;
|
||||
const connection = await this._connect(config.replication.read[nextRead]);
|
||||
connection.queryType = "read";
|
||||
return connection;
|
||||
},
|
||||
destroy: (connection) => this._disconnect(connection),
|
||||
validate: config.pool.validate,
|
||||
max: config.pool.max,
|
||||
min: config.pool.min,
|
||||
acquireTimeoutMillis: config.pool.acquire,
|
||||
idleTimeoutMillis: config.pool.idle,
|
||||
reapIntervalMillis: config.pool.evict,
|
||||
maxUses: config.pool.maxUses
|
||||
}),
|
||||
write: new Pool({
|
||||
name: "sequelize:write",
|
||||
create: async () => {
|
||||
const connection = await this._connect(config.replication.write);
|
||||
connection.queryType = "write";
|
||||
return connection;
|
||||
},
|
||||
destroy: (connection) => this._disconnect(connection),
|
||||
validate: config.pool.validate,
|
||||
max: config.pool.max,
|
||||
min: config.pool.min,
|
||||
acquireTimeoutMillis: config.pool.acquire,
|
||||
idleTimeoutMillis: config.pool.idle,
|
||||
reapIntervalMillis: config.pool.evict,
|
||||
maxUses: config.pool.maxUses
|
||||
})
|
||||
};
|
||||
debug(`pool created with max/min: ${config.pool.max}/${config.pool.min}, with replication`);
|
||||
}
|
||||
async getConnection(options) {
|
||||
options = options || {};
|
||||
if (this.sequelize.options.databaseVersion === 0) {
|
||||
if (!this.versionPromise) {
|
||||
this.versionPromise = (async () => {
|
||||
try {
|
||||
const connection = await this._connect(this.config.replication.write || this.config);
|
||||
const _options = {};
|
||||
_options.transaction = { connection };
|
||||
_options.logging = () => {
|
||||
};
|
||||
_options.logging.__testLoggingFn = true;
|
||||
if (this.sequelize.options.databaseVersion === 0) {
|
||||
const version = await this.sequelize.databaseVersion(_options);
|
||||
const parsedVersion = _.get(semver.coerce(version), "version") || version;
|
||||
this.sequelize.options.databaseVersion = semver.valid(parsedVersion) ? parsedVersion : this.dialect.defaultVersion;
|
||||
}
|
||||
if (semver.lt(this.sequelize.options.databaseVersion, this.dialect.defaultVersion)) {
|
||||
deprecations.unsupportedEngine();
|
||||
debug(`Unsupported database engine version ${this.sequelize.options.databaseVersion}`);
|
||||
}
|
||||
this.versionPromise = null;
|
||||
return await this._disconnect(connection);
|
||||
} catch (err) {
|
||||
this.versionPromise = null;
|
||||
throw err;
|
||||
}
|
||||
})();
|
||||
}
|
||||
await this.versionPromise;
|
||||
}
|
||||
let result;
|
||||
try {
|
||||
await this.sequelize.runHooks("beforePoolAcquire", options);
|
||||
result = await this.pool.acquire(options.type, options.useMaster);
|
||||
await this.sequelize.runHooks("afterPoolAcquire", result, options);
|
||||
} catch (error) {
|
||||
if (error instanceof TimeoutError)
|
||||
throw new errors.ConnectionAcquireTimeoutError(error);
|
||||
throw error;
|
||||
}
|
||||
debug("connection acquired");
|
||||
return result;
|
||||
}
|
||||
releaseConnection(connection) {
|
||||
this.pool.release(connection);
|
||||
debug("connection released");
|
||||
}
|
||||
async destroyConnection(connection) {
|
||||
await this.pool.destroy(connection);
|
||||
debug(`connection ${connection.uuid} destroyed`);
|
||||
}
|
||||
async _connect(config) {
|
||||
await this.sequelize.runHooks("beforeConnect", config);
|
||||
const connection = await this.dialect.connectionManager.connect(config);
|
||||
await this.sequelize.runHooks("afterConnect", connection, config);
|
||||
return connection;
|
||||
}
|
||||
async _disconnect(connection) {
|
||||
await this.sequelize.runHooks("beforeDisconnect", connection);
|
||||
await this.dialect.connectionManager.disconnect(connection);
|
||||
return this.sequelize.runHooks("afterDisconnect", connection);
|
||||
}
|
||||
_validate(connection) {
|
||||
if (!this.dialect.connectionManager.validate) {
|
||||
return true;
|
||||
}
|
||||
return this.dialect.connectionManager.validate(connection);
|
||||
}
|
||||
}
|
||||
module.exports = ConnectionManager;
|
||||
module.exports.ConnectionManager = ConnectionManager;
|
||||
module.exports.default = ConnectionManager;
|
||||
//# sourceMappingURL=connection-manager.js.map
|
||||
7
node_modules/sequelize/lib/dialects/abstract/connection-manager.js.map
generated
vendored
Normal file
7
node_modules/sequelize/lib/dialects/abstract/connection-manager.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
68
node_modules/sequelize/lib/dialects/abstract/index.js
generated
vendored
Normal file
68
node_modules/sequelize/lib/dialects/abstract/index.js
generated
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
"use strict";
|
||||
class AbstractDialect {
|
||||
canBackslashEscape() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
AbstractDialect.prototype.supports = {
|
||||
"DEFAULT": true,
|
||||
"DEFAULT VALUES": false,
|
||||
"VALUES ()": false,
|
||||
"LIMIT ON UPDATE": false,
|
||||
"ON DUPLICATE KEY": true,
|
||||
"ORDER NULLS": false,
|
||||
"UNION": true,
|
||||
"UNION ALL": true,
|
||||
"RIGHT JOIN": true,
|
||||
returnValues: false,
|
||||
autoIncrement: {
|
||||
identityInsert: false,
|
||||
defaultValue: true,
|
||||
update: true
|
||||
},
|
||||
bulkDefault: false,
|
||||
schemas: false,
|
||||
transactions: true,
|
||||
settingIsolationLevelDuringTransaction: true,
|
||||
transactionOptions: {
|
||||
type: false
|
||||
},
|
||||
migrations: true,
|
||||
upserts: true,
|
||||
inserts: {
|
||||
ignoreDuplicates: "",
|
||||
updateOnDuplicate: false,
|
||||
onConflictDoNothing: "",
|
||||
onConflictWhere: false,
|
||||
conflictFields: false
|
||||
},
|
||||
constraints: {
|
||||
restrict: true,
|
||||
addConstraint: true,
|
||||
dropConstraint: true,
|
||||
unique: true,
|
||||
default: false,
|
||||
check: true,
|
||||
foreignKey: true,
|
||||
primaryKey: true
|
||||
},
|
||||
index: {
|
||||
collate: true,
|
||||
length: false,
|
||||
parser: false,
|
||||
concurrently: false,
|
||||
type: false,
|
||||
using: true,
|
||||
functionBased: false,
|
||||
operator: false
|
||||
},
|
||||
groupedLimit: true,
|
||||
indexViaAlter: false,
|
||||
JSON: false,
|
||||
deferrableConstraints: false,
|
||||
escapeStringConstants: false
|
||||
};
|
||||
module.exports = AbstractDialect;
|
||||
module.exports.AbstractDialect = AbstractDialect;
|
||||
module.exports.default = AbstractDialect;
|
||||
//# sourceMappingURL=index.js.map
|
||||
7
node_modules/sequelize/lib/dialects/abstract/index.js.map
generated
vendored
Normal file
7
node_modules/sequelize/lib/dialects/abstract/index.js.map
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"version": 3,
|
||||
"sources": ["../../../src/dialects/abstract/index.js"],
|
||||
"sourcesContent": ["'use strict';\n\nclass AbstractDialect {\n /**\n * Whether this dialect can use \\ in strings to escape string delimiters.\n *\n * @returns {boolean}\n */\n canBackslashEscape() {\n return false;\n }\n}\n\nAbstractDialect.prototype.supports = {\n 'DEFAULT': true,\n 'DEFAULT VALUES': false,\n 'VALUES ()': false,\n 'LIMIT ON UPDATE': false,\n 'ON DUPLICATE KEY': true,\n 'ORDER NULLS': false,\n 'UNION': true,\n 'UNION ALL': true,\n 'RIGHT JOIN': true,\n\n /* does the dialect support returning values for inserted/updated fields */\n returnValues: false,\n\n /* features specific to autoIncrement values */\n autoIncrement: {\n /* does the dialect require modification of insert queries when inserting auto increment fields */\n identityInsert: false,\n\n /* does the dialect support inserting default/null values for autoincrement fields */\n defaultValue: true,\n\n /* does the dialect support updating autoincrement fields */\n update: true\n },\n /* Do we need to say DEFAULT for bulk insert */\n bulkDefault: false,\n schemas: false,\n transactions: true,\n settingIsolationLevelDuringTransaction: true,\n transactionOptions: {\n type: false\n },\n migrations: true,\n upserts: true,\n inserts: {\n ignoreDuplicates: '', /* dialect specific words for INSERT IGNORE or DO NOTHING */\n updateOnDuplicate: false, /* whether dialect supports ON DUPLICATE KEY UPDATE */\n onConflictDoNothing: '', /* dialect specific words for ON CONFLICT DO NOTHING */\n onConflictWhere: false, /* whether dialect supports ON CONFLICT WHERE */\n conflictFields: false /* whether the dialect supports specifying conflict fields or not */\n },\n constraints: {\n restrict: true,\n addConstraint: true,\n dropConstraint: true,\n unique: true,\n default: false,\n check: true,\n foreignKey: true,\n primaryKey: true\n },\n index: {\n collate: true,\n length: false,\n parser: false,\n concurrently: false,\n type: false,\n using: true,\n functionBased: false,\n operator: false\n },\n groupedLimit: true,\n indexViaAlter: false,\n JSON: false,\n /**\n * This dialect supports marking a column's constraints as deferrable.\n * e.g. 'DEFERRABLE' and 'INITIALLY DEFERRED'\n */\n deferrableConstraints: false,\n escapeStringConstants: false\n};\n\nmodule.exports = AbstractDialect;\nmodule.exports.AbstractDialect = AbstractDialect;\nmodule.exports.default = AbstractDialect;\n"],
|
||||
"mappings": ";AAEA,sBAAsB;AAAA,EAMpB,qBAAqB;AACnB,WAAO;AAAA;AAAA;AAIX,gBAAgB,UAAU,WAAW;AAAA,EACnC,WAAW;AAAA,EACX,kBAAkB;AAAA,EAClB,aAAa;AAAA,EACb,mBAAmB;AAAA,EACnB,oBAAoB;AAAA,EACpB,eAAe;AAAA,EACf,SAAS;AAAA,EACT,aAAa;AAAA,EACb,cAAc;AAAA,EAGd,cAAc;AAAA,EAGd,eAAe;AAAA,IAEb,gBAAgB;AAAA,IAGhB,cAAc;AAAA,IAGd,QAAQ;AAAA;AAAA,EAGV,aAAa;AAAA,EACb,SAAS;AAAA,EACT,cAAc;AAAA,EACd,wCAAwC;AAAA,EACxC,oBAAoB;AAAA,IAClB,MAAM;AAAA;AAAA,EAER,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,SAAS;AAAA,IACP,kBAAkB;AAAA,IAClB,mBAAmB;AAAA,IACnB,qBAAqB;AAAA,IACrB,iBAAiB;AAAA,IACjB,gBAAgB;AAAA;AAAA,EAElB,aAAa;AAAA,IACX,UAAU;AAAA,IACV,eAAe;AAAA,IACf,gBAAgB;AAAA,IAChB,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,OAAO;AAAA,IACP,YAAY;AAAA,IACZ,YAAY;AAAA;AAAA,EAEd,OAAO;AAAA,IACL,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,cAAc;AAAA,IACd,MAAM;AAAA,IACN,OAAO;AAAA,IACP,eAAe;AAAA,IACf,UAAU;AAAA;AAAA,EAEZ,cAAc;AAAA,EACd,eAAe;AAAA,EACf,MAAM;AAAA,EAKN,uBAAuB;AAAA,EACvB,uBAAuB;AAAA;AAGzB,OAAO,UAAU;AACjB,OAAO,QAAQ,kBAAkB;AACjC,OAAO,QAAQ,UAAU;",
|
||||
"names": []
|
||||
}
|
||||
2143
node_modules/sequelize/lib/dialects/abstract/query-generator.js
generated
vendored
Normal file
2143
node_modules/sequelize/lib/dialects/abstract/query-generator.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
7
node_modules/sequelize/lib/dialects/abstract/query-generator.js.map
generated
vendored
Normal file
7
node_modules/sequelize/lib/dialects/abstract/query-generator.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
94
node_modules/sequelize/lib/dialects/abstract/query-generator/operators.js
generated
vendored
Normal file
94
node_modules/sequelize/lib/dialects/abstract/query-generator/operators.js
generated
vendored
Normal file
@@ -0,0 +1,94 @@
|
||||
"use strict";
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropSymbols = Object.getOwnPropertySymbols;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __propIsEnum = Object.prototype.propertyIsEnumerable;
|
||||
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
||||
var __spreadValues = (a, b) => {
|
||||
for (var prop in b || (b = {}))
|
||||
if (__hasOwnProp.call(b, prop))
|
||||
__defNormalProp(a, prop, b[prop]);
|
||||
if (__getOwnPropSymbols)
|
||||
for (var prop of __getOwnPropSymbols(b)) {
|
||||
if (__propIsEnum.call(b, prop))
|
||||
__defNormalProp(a, prop, b[prop]);
|
||||
}
|
||||
return a;
|
||||
};
|
||||
const _ = require("lodash");
|
||||
const Op = require("../../../operators");
|
||||
const Utils = require("../../../utils");
|
||||
const OperatorHelpers = {
|
||||
OperatorMap: {
|
||||
[Op.eq]: "=",
|
||||
[Op.ne]: "!=",
|
||||
[Op.gte]: ">=",
|
||||
[Op.gt]: ">",
|
||||
[Op.lte]: "<=",
|
||||
[Op.lt]: "<",
|
||||
[Op.not]: "IS NOT",
|
||||
[Op.is]: "IS",
|
||||
[Op.in]: "IN",
|
||||
[Op.notIn]: "NOT IN",
|
||||
[Op.like]: "LIKE",
|
||||
[Op.notLike]: "NOT LIKE",
|
||||
[Op.iLike]: "ILIKE",
|
||||
[Op.notILike]: "NOT ILIKE",
|
||||
[Op.startsWith]: "LIKE",
|
||||
[Op.endsWith]: "LIKE",
|
||||
[Op.substring]: "LIKE",
|
||||
[Op.regexp]: "~",
|
||||
[Op.notRegexp]: "!~",
|
||||
[Op.iRegexp]: "~*",
|
||||
[Op.notIRegexp]: "!~*",
|
||||
[Op.between]: "BETWEEN",
|
||||
[Op.notBetween]: "NOT BETWEEN",
|
||||
[Op.overlap]: "&&",
|
||||
[Op.contains]: "@>",
|
||||
[Op.contained]: "<@",
|
||||
[Op.adjacent]: "-|-",
|
||||
[Op.strictLeft]: "<<",
|
||||
[Op.strictRight]: ">>",
|
||||
[Op.noExtendRight]: "&<",
|
||||
[Op.noExtendLeft]: "&>",
|
||||
[Op.any]: "ANY",
|
||||
[Op.all]: "ALL",
|
||||
[Op.and]: " AND ",
|
||||
[Op.or]: " OR ",
|
||||
[Op.col]: "COL",
|
||||
[Op.placeholder]: "$$PLACEHOLDER$$",
|
||||
[Op.match]: "@@"
|
||||
},
|
||||
OperatorsAliasMap: {},
|
||||
setOperatorsAliases(aliases) {
|
||||
if (!aliases || _.isEmpty(aliases)) {
|
||||
this.OperatorsAliasMap = false;
|
||||
} else {
|
||||
this.OperatorsAliasMap = __spreadValues({}, aliases);
|
||||
}
|
||||
},
|
||||
_replaceAliases(orig) {
|
||||
const obj = {};
|
||||
if (!this.OperatorsAliasMap) {
|
||||
return orig;
|
||||
}
|
||||
Utils.getOperators(orig).forEach((op) => {
|
||||
const item = orig[op];
|
||||
if (_.isPlainObject(item)) {
|
||||
obj[op] = this._replaceAliases(item);
|
||||
} else {
|
||||
obj[op] = item;
|
||||
}
|
||||
});
|
||||
_.forOwn(orig, (item, prop) => {
|
||||
prop = this.OperatorsAliasMap[prop] || prop;
|
||||
if (_.isPlainObject(item)) {
|
||||
item = this._replaceAliases(item);
|
||||
}
|
||||
obj[prop] = item;
|
||||
});
|
||||
return obj;
|
||||
}
|
||||
};
|
||||
module.exports = OperatorHelpers;
|
||||
//# sourceMappingURL=operators.js.map
|
||||
7
node_modules/sequelize/lib/dialects/abstract/query-generator/operators.js.map
generated
vendored
Normal file
7
node_modules/sequelize/lib/dialects/abstract/query-generator/operators.js.map
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"version": 3,
|
||||
"sources": ["../../../../src/dialects/abstract/query-generator/operators.js"],
|
||||
"sourcesContent": ["'use strict';\n\nconst _ = require('lodash');\nconst Op = require('../../../operators');\nconst Utils = require('../../../utils');\n\nconst OperatorHelpers = {\n OperatorMap: {\n [Op.eq]: '=',\n [Op.ne]: '!=',\n [Op.gte]: '>=',\n [Op.gt]: '>',\n [Op.lte]: '<=',\n [Op.lt]: '<',\n [Op.not]: 'IS NOT',\n [Op.is]: 'IS',\n [Op.in]: 'IN',\n [Op.notIn]: 'NOT IN',\n [Op.like]: 'LIKE',\n [Op.notLike]: 'NOT LIKE',\n [Op.iLike]: 'ILIKE',\n [Op.notILike]: 'NOT ILIKE',\n [Op.startsWith]: 'LIKE',\n [Op.endsWith]: 'LIKE',\n [Op.substring]: 'LIKE',\n [Op.regexp]: '~',\n [Op.notRegexp]: '!~',\n [Op.iRegexp]: '~*',\n [Op.notIRegexp]: '!~*',\n [Op.between]: 'BETWEEN',\n [Op.notBetween]: 'NOT BETWEEN',\n [Op.overlap]: '&&',\n [Op.contains]: '@>',\n [Op.contained]: '<@',\n [Op.adjacent]: '-|-',\n [Op.strictLeft]: '<<',\n [Op.strictRight]: '>>',\n [Op.noExtendRight]: '&<',\n [Op.noExtendLeft]: '&>',\n [Op.any]: 'ANY',\n [Op.all]: 'ALL',\n [Op.and]: ' AND ',\n [Op.or]: ' OR ',\n [Op.col]: 'COL',\n [Op.placeholder]: '$$PLACEHOLDER$$',\n [Op.match]: '@@'\n },\n\n OperatorsAliasMap: {},\n\n setOperatorsAliases(aliases) {\n if (!aliases || _.isEmpty(aliases)) {\n this.OperatorsAliasMap = false;\n } else {\n this.OperatorsAliasMap = { ...aliases };\n }\n },\n\n _replaceAliases(orig) {\n const obj = {};\n if (!this.OperatorsAliasMap) {\n return orig;\n }\n\n Utils.getOperators(orig).forEach(op => {\n const item = orig[op];\n if (_.isPlainObject(item)) {\n obj[op] = this._replaceAliases(item);\n } else {\n obj[op] = item;\n }\n });\n\n _.forOwn(orig, (item, prop) => {\n prop = this.OperatorsAliasMap[prop] || prop;\n if (_.isPlainObject(item)) {\n item = this._replaceAliases(item);\n }\n obj[prop] = item;\n });\n return obj;\n }\n};\n\nmodule.exports = OperatorHelpers;\n"],
|
||||
"mappings": ";;;;;;;;;;;;;;;;;AAEA,MAAM,IAAI,QAAQ;AAClB,MAAM,KAAK,QAAQ;AACnB,MAAM,QAAQ,QAAQ;AAEtB,MAAM,kBAAkB;AAAA,EACtB,aAAa;AAAA,KACV,GAAG,KAAK;AAAA,KACR,GAAG,KAAK;AAAA,KACR,GAAG,MAAM;AAAA,KACT,GAAG,KAAK;AAAA,KACR,GAAG,MAAM;AAAA,KACT,GAAG,KAAK;AAAA,KACR,GAAG,MAAM;AAAA,KACT,GAAG,KAAK;AAAA,KACR,GAAG,KAAK;AAAA,KACR,GAAG,QAAQ;AAAA,KACX,GAAG,OAAO;AAAA,KACV,GAAG,UAAU;AAAA,KACb,GAAG,QAAQ;AAAA,KACX,GAAG,WAAW;AAAA,KACd,GAAG,aAAa;AAAA,KAChB,GAAG,WAAW;AAAA,KACd,GAAG,YAAY;AAAA,KACf,GAAG,SAAS;AAAA,KACZ,GAAG,YAAY;AAAA,KACf,GAAG,UAAU;AAAA,KACb,GAAG,aAAa;AAAA,KAChB,GAAG,UAAU;AAAA,KACb,GAAG,aAAa;AAAA,KAChB,GAAG,UAAU;AAAA,KACb,GAAG,WAAW;AAAA,KACd,GAAG,YAAY;AAAA,KACf,GAAG,WAAW;AAAA,KACd,GAAG,aAAa;AAAA,KAChB,GAAG,cAAc;AAAA,KACjB,GAAG,gBAAgB;AAAA,KACnB,GAAG,eAAe;AAAA,KAClB,GAAG,MAAM;AAAA,KACT,GAAG,MAAM;AAAA,KACT,GAAG,MAAM;AAAA,KACT,GAAG,KAAK;AAAA,KACR,GAAG,MAAM;AAAA,KACT,GAAG,cAAc;AAAA,KACjB,GAAG,QAAQ;AAAA;AAAA,EAGd,mBAAmB;AAAA,EAEnB,oBAAoB,SAAS;AAC3B,QAAI,CAAC,WAAW,EAAE,QAAQ,UAAU;AAClC,WAAK,oBAAoB;AAAA,WACpB;AACL,WAAK,oBAAoB,mBAAK;AAAA;AAAA;AAAA,EAIlC,gBAAgB,MAAM;AACpB,UAAM,MAAM;AACZ,QAAI,CAAC,KAAK,mBAAmB;AAC3B,aAAO;AAAA;AAGT,UAAM,aAAa,MAAM,QAAQ,QAAM;AACrC,YAAM,OAAO,KAAK;AAClB,UAAI,EAAE,cAAc,OAAO;AACzB,YAAI,MAAM,KAAK,gBAAgB;AAAA,aAC1B;AACL,YAAI,MAAM;AAAA;AAAA;AAId,MAAE,OAAO,MAAM,CAAC,MAAM,SAAS;AAC7B,aAAO,KAAK,kBAAkB,SAAS;AACvC,UAAI,EAAE,cAAc,OAAO;AACzB,eAAO,KAAK,gBAAgB;AAAA;AAE9B,UAAI,QAAQ;AAAA;AAEd,WAAO;AAAA;AAAA;AAIX,OAAO,UAAU;",
|
||||
"names": []
|
||||
}
|
||||
41
node_modules/sequelize/lib/dialects/abstract/query-generator/transaction.js
generated
vendored
Normal file
41
node_modules/sequelize/lib/dialects/abstract/query-generator/transaction.js
generated
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
"use strict";
|
||||
const uuidv4 = require("uuid").v4;
|
||||
const TransactionQueries = {
|
||||
setIsolationLevelQuery(value, options) {
|
||||
if (options.parent) {
|
||||
return;
|
||||
}
|
||||
return `SET TRANSACTION ISOLATION LEVEL ${value};`;
|
||||
},
|
||||
generateTransactionId() {
|
||||
return uuidv4();
|
||||
},
|
||||
startTransactionQuery(transaction) {
|
||||
if (transaction.parent) {
|
||||
return `SAVEPOINT ${this.quoteIdentifier(transaction.name, true)};`;
|
||||
}
|
||||
return "START TRANSACTION;";
|
||||
},
|
||||
deferConstraintsQuery() {
|
||||
},
|
||||
setConstraintQuery() {
|
||||
},
|
||||
setDeferredQuery() {
|
||||
},
|
||||
setImmediateQuery() {
|
||||
},
|
||||
commitTransactionQuery(transaction) {
|
||||
if (transaction.parent) {
|
||||
return;
|
||||
}
|
||||
return "COMMIT;";
|
||||
},
|
||||
rollbackTransactionQuery(transaction) {
|
||||
if (transaction.parent) {
|
||||
return `ROLLBACK TO SAVEPOINT ${this.quoteIdentifier(transaction.name, true)};`;
|
||||
}
|
||||
return "ROLLBACK;";
|
||||
}
|
||||
};
|
||||
module.exports = TransactionQueries;
|
||||
//# sourceMappingURL=transaction.js.map
|
||||
7
node_modules/sequelize/lib/dialects/abstract/query-generator/transaction.js.map
generated
vendored
Normal file
7
node_modules/sequelize/lib/dialects/abstract/query-generator/transaction.js.map
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"version": 3,
|
||||
"sources": ["../../../../src/dialects/abstract/query-generator/transaction.js"],
|
||||
"sourcesContent": ["'use strict';\n\nconst uuidv4 = require('uuid').v4;\n\nconst TransactionQueries = {\n /**\n * Returns a query that sets the transaction isolation level.\n *\n * @param {string} value The isolation level.\n * @param {object} options An object with options.\n * @returns {string} The generated sql query.\n * @private\n */\n setIsolationLevelQuery(value, options) {\n if (options.parent) {\n return;\n }\n\n return `SET TRANSACTION ISOLATION LEVEL ${value};`;\n },\n\n generateTransactionId() {\n return uuidv4();\n },\n\n /**\n * Returns a query that starts a transaction.\n *\n * @param {Transaction} transaction\n * @returns {string} The generated sql query.\n * @private\n */\n startTransactionQuery(transaction) {\n if (transaction.parent) {\n // force quoting of savepoint identifiers for postgres\n return `SAVEPOINT ${this.quoteIdentifier(transaction.name, true)};`;\n }\n\n return 'START TRANSACTION;';\n },\n\n deferConstraintsQuery() {},\n\n setConstraintQuery() {},\n setDeferredQuery() {},\n setImmediateQuery() {},\n\n /**\n * Returns a query that commits a transaction.\n *\n * @param {Transaction} transaction An object with options.\n * @returns {string} The generated sql query.\n * @private\n */\n commitTransactionQuery(transaction) {\n if (transaction.parent) {\n return;\n }\n\n return 'COMMIT;';\n },\n\n /**\n * Returns a query that rollbacks a transaction.\n *\n * @param {Transaction} transaction\n * @returns {string} The generated sql query.\n * @private\n */\n rollbackTransactionQuery(transaction) {\n if (transaction.parent) {\n // force quoting of savepoint identifiers for postgres\n return `ROLLBACK TO SAVEPOINT ${this.quoteIdentifier(transaction.name, true)};`;\n }\n\n return 'ROLLBACK;';\n }\n};\n\nmodule.exports = TransactionQueries;\n"],
|
||||
"mappings": ";AAEA,MAAM,SAAS,QAAQ,QAAQ;AAE/B,MAAM,qBAAqB;AAAA,EASzB,uBAAuB,OAAO,SAAS;AACrC,QAAI,QAAQ,QAAQ;AAClB;AAAA;AAGF,WAAO,mCAAmC;AAAA;AAAA,EAG5C,wBAAwB;AACtB,WAAO;AAAA;AAAA,EAUT,sBAAsB,aAAa;AACjC,QAAI,YAAY,QAAQ;AAEtB,aAAO,aAAa,KAAK,gBAAgB,YAAY,MAAM;AAAA;AAG7D,WAAO;AAAA;AAAA,EAGT,wBAAwB;AAAA;AAAA,EAExB,qBAAqB;AAAA;AAAA,EACrB,mBAAmB;AAAA;AAAA,EACnB,oBAAoB;AAAA;AAAA,EASpB,uBAAuB,aAAa;AAClC,QAAI,YAAY,QAAQ;AACtB;AAAA;AAGF,WAAO;AAAA;AAAA,EAUT,yBAAyB,aAAa;AACpC,QAAI,YAAY,QAAQ;AAEtB,aAAO,yBAAyB,KAAK,gBAAgB,YAAY,MAAM;AAAA;AAGzE,WAAO;AAAA;AAAA;AAIX,OAAO,UAAU;",
|
||||
"names": []
|
||||
}
|
||||
569
node_modules/sequelize/lib/dialects/abstract/query-interface.js
generated
vendored
Normal file
569
node_modules/sequelize/lib/dialects/abstract/query-interface.js
generated
vendored
Normal file
@@ -0,0 +1,569 @@
|
||||
"use strict";
|
||||
var __defProp = Object.defineProperty;
|
||||
var __defProps = Object.defineProperties;
|
||||
var __getOwnPropDescs = Object.getOwnPropertyDescriptors;
|
||||
var __getOwnPropSymbols = Object.getOwnPropertySymbols;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __propIsEnum = Object.prototype.propertyIsEnumerable;
|
||||
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
||||
var __spreadValues = (a, b) => {
|
||||
for (var prop in b || (b = {}))
|
||||
if (__hasOwnProp.call(b, prop))
|
||||
__defNormalProp(a, prop, b[prop]);
|
||||
if (__getOwnPropSymbols)
|
||||
for (var prop of __getOwnPropSymbols(b)) {
|
||||
if (__propIsEnum.call(b, prop))
|
||||
__defNormalProp(a, prop, b[prop]);
|
||||
}
|
||||
return a;
|
||||
};
|
||||
var __spreadProps = (a, b) => __defProps(a, __getOwnPropDescs(b));
|
||||
const _ = require("lodash");
|
||||
const Utils = require("../../utils");
|
||||
const DataTypes = require("../../data-types");
|
||||
const Transaction = require("../../transaction");
|
||||
const QueryTypes = require("../../query-types");
|
||||
class QueryInterface {
|
||||
constructor(sequelize, queryGenerator) {
|
||||
this.sequelize = sequelize;
|
||||
this.queryGenerator = queryGenerator;
|
||||
}
|
||||
async createDatabase(database, options) {
|
||||
options = options || {};
|
||||
const sql = this.queryGenerator.createDatabaseQuery(database, options);
|
||||
return await this.sequelize.query(sql, options);
|
||||
}
|
||||
async dropDatabase(database, options) {
|
||||
options = options || {};
|
||||
const sql = this.queryGenerator.dropDatabaseQuery(database);
|
||||
return await this.sequelize.query(sql, options);
|
||||
}
|
||||
async createSchema(schema, options) {
|
||||
options = options || {};
|
||||
const sql = this.queryGenerator.createSchema(schema);
|
||||
return await this.sequelize.query(sql, options);
|
||||
}
|
||||
async dropSchema(schema, options) {
|
||||
options = options || {};
|
||||
const sql = this.queryGenerator.dropSchema(schema);
|
||||
return await this.sequelize.query(sql, options);
|
||||
}
|
||||
async dropAllSchemas(options) {
|
||||
options = options || {};
|
||||
if (!this.queryGenerator._dialect.supports.schemas) {
|
||||
return this.sequelize.drop(options);
|
||||
}
|
||||
const schemas = await this.showAllSchemas(options);
|
||||
return Promise.all(schemas.map((schemaName) => this.dropSchema(schemaName, options)));
|
||||
}
|
||||
async showAllSchemas(options) {
|
||||
options = __spreadProps(__spreadValues({}, options), {
|
||||
raw: true,
|
||||
type: this.sequelize.QueryTypes.SELECT
|
||||
});
|
||||
const showSchemasSql = this.queryGenerator.showSchemasQuery(options);
|
||||
const schemaNames = await this.sequelize.query(showSchemasSql, options);
|
||||
return _.flatten(schemaNames.map((value) => value.schema_name ? value.schema_name : value));
|
||||
}
|
||||
async databaseVersion(options) {
|
||||
return await this.sequelize.query(this.queryGenerator.versionQuery(), __spreadProps(__spreadValues({}, options), { type: QueryTypes.VERSION }));
|
||||
}
|
||||
async createTable(tableName, attributes, options, model) {
|
||||
let sql = "";
|
||||
options = __spreadValues({}, options);
|
||||
if (options && options.uniqueKeys) {
|
||||
_.forOwn(options.uniqueKeys, (uniqueKey) => {
|
||||
if (uniqueKey.customIndex === void 0) {
|
||||
uniqueKey.customIndex = true;
|
||||
}
|
||||
});
|
||||
}
|
||||
if (model) {
|
||||
options.uniqueKeys = options.uniqueKeys || model.uniqueKeys;
|
||||
}
|
||||
attributes = _.mapValues(attributes, (attribute) => this.sequelize.normalizeAttribute(attribute));
|
||||
await this.ensureEnums(tableName, attributes, options, model);
|
||||
if (!tableName.schema && (options.schema || !!model && model._schema)) {
|
||||
tableName = this.queryGenerator.addSchema({
|
||||
tableName,
|
||||
_schema: !!model && model._schema || options.schema
|
||||
});
|
||||
}
|
||||
attributes = this.queryGenerator.attributesToSQL(attributes, {
|
||||
table: tableName,
|
||||
context: "createTable",
|
||||
withoutForeignKeyConstraints: options.withoutForeignKeyConstraints
|
||||
});
|
||||
sql = this.queryGenerator.createTableQuery(tableName, attributes, options);
|
||||
return await this.sequelize.query(sql, options);
|
||||
}
|
||||
async tableExists(tableName, options) {
|
||||
const sql = this.queryGenerator.tableExistsQuery(tableName);
|
||||
const out = await this.sequelize.query(sql, __spreadProps(__spreadValues({}, options), {
|
||||
type: QueryTypes.SHOWTABLES
|
||||
}));
|
||||
return out.length === 1;
|
||||
}
|
||||
async dropTable(tableName, options) {
|
||||
options = __spreadValues({}, options);
|
||||
options.cascade = options.cascade || options.force || false;
|
||||
const sql = this.queryGenerator.dropTableQuery(tableName, options);
|
||||
await this.sequelize.query(sql, options);
|
||||
}
|
||||
async _dropAllTables(tableNames, skip, options) {
|
||||
for (const tableName of tableNames) {
|
||||
if (!skip.includes(tableName.tableName || tableName)) {
|
||||
await this.dropTable(tableName, __spreadProps(__spreadValues({}, options), { cascade: true }));
|
||||
}
|
||||
}
|
||||
}
|
||||
async dropAllTables(options) {
|
||||
options = options || {};
|
||||
const skip = options.skip || [];
|
||||
const tableNames = await this.showAllTables(options);
|
||||
const foreignKeys = await this.getForeignKeysForTables(tableNames, options);
|
||||
for (const tableName of tableNames) {
|
||||
let normalizedTableName = tableName;
|
||||
if (_.isObject(tableName)) {
|
||||
normalizedTableName = `${tableName.schema}.${tableName.tableName}`;
|
||||
}
|
||||
for (const foreignKey of foreignKeys[normalizedTableName]) {
|
||||
await this.sequelize.query(this.queryGenerator.dropForeignKeyQuery(tableName, foreignKey));
|
||||
}
|
||||
}
|
||||
await this._dropAllTables(tableNames, skip, options);
|
||||
}
|
||||
async renameTable(before, after, options) {
|
||||
options = options || {};
|
||||
const sql = this.queryGenerator.renameTableQuery(before, after);
|
||||
return await this.sequelize.query(sql, options);
|
||||
}
|
||||
async showAllTables(options) {
|
||||
options = __spreadProps(__spreadValues({}, options), {
|
||||
raw: true,
|
||||
type: QueryTypes.SHOWTABLES
|
||||
});
|
||||
const showTablesSql = this.queryGenerator.showTablesQuery(this.sequelize.config.database);
|
||||
const tableNames = await this.sequelize.query(showTablesSql, options);
|
||||
return _.flatten(tableNames);
|
||||
}
|
||||
async describeTable(tableName, options) {
|
||||
let schema = null;
|
||||
let schemaDelimiter = null;
|
||||
if (typeof options === "string") {
|
||||
schema = options;
|
||||
} else if (typeof options === "object" && options !== null) {
|
||||
schema = options.schema || null;
|
||||
schemaDelimiter = options.schemaDelimiter || null;
|
||||
}
|
||||
if (typeof tableName === "object" && tableName !== null) {
|
||||
schema = tableName.schema;
|
||||
tableName = tableName.tableName;
|
||||
}
|
||||
const sql = this.queryGenerator.describeTableQuery(tableName, schema, schemaDelimiter);
|
||||
options = __spreadProps(__spreadValues({}, options), { type: QueryTypes.DESCRIBE });
|
||||
try {
|
||||
const data = await this.sequelize.query(sql, options);
|
||||
if (_.isEmpty(data)) {
|
||||
throw new Error(`No description found for "${tableName}" table. Check the table name and schema; remember, they _are_ case sensitive.`);
|
||||
}
|
||||
return data;
|
||||
} catch (e) {
|
||||
if (e.original && e.original.code === "ER_NO_SUCH_TABLE") {
|
||||
throw new Error(`No description found for "${tableName}" table. Check the table name and schema; remember, they _are_ case sensitive.`);
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
async addColumn(table, key, attribute, options) {
|
||||
if (!table || !key || !attribute) {
|
||||
throw new Error("addColumn takes at least 3 arguments (table, attribute name, attribute definition)");
|
||||
}
|
||||
options = options || {};
|
||||
attribute = this.sequelize.normalizeAttribute(attribute);
|
||||
return await this.sequelize.query(this.queryGenerator.addColumnQuery(table, key, attribute), options);
|
||||
}
|
||||
async removeColumn(tableName, attributeName, options) {
|
||||
return this.sequelize.query(this.queryGenerator.removeColumnQuery(tableName, attributeName), options);
|
||||
}
|
||||
normalizeAttribute(dataTypeOrOptions) {
|
||||
let attribute;
|
||||
if (Object.values(DataTypes).includes(dataTypeOrOptions)) {
|
||||
attribute = { type: dataTypeOrOptions, allowNull: true };
|
||||
} else {
|
||||
attribute = dataTypeOrOptions;
|
||||
}
|
||||
return this.sequelize.normalizeAttribute(attribute);
|
||||
}
|
||||
quoteIdentifier(identifier2, force) {
|
||||
return this.queryGenerator.quoteIdentifier(identifier2, force);
|
||||
}
|
||||
quoteIdentifiers(identifiers) {
|
||||
return this.queryGenerator.quoteIdentifiers(identifiers);
|
||||
}
|
||||
async changeColumn(tableName, attributeName, dataTypeOrOptions, options) {
|
||||
options = options || {};
|
||||
const query = this.queryGenerator.attributesToSQL({
|
||||
[attributeName]: this.normalizeAttribute(dataTypeOrOptions)
|
||||
}, {
|
||||
context: "changeColumn",
|
||||
table: tableName
|
||||
});
|
||||
const sql = this.queryGenerator.changeColumnQuery(tableName, query);
|
||||
return this.sequelize.query(sql, options);
|
||||
}
|
||||
async assertTableHasColumn(tableName, columnName, options) {
|
||||
const description = await this.describeTable(tableName, options);
|
||||
if (description[columnName]) {
|
||||
return description;
|
||||
}
|
||||
throw new Error(`Table ${tableName} doesn't have the column ${columnName}`);
|
||||
}
|
||||
async renameColumn(tableName, attrNameBefore, attrNameAfter, options) {
|
||||
options = options || {};
|
||||
const data = (await this.assertTableHasColumn(tableName, attrNameBefore, options))[attrNameBefore];
|
||||
const _options = {};
|
||||
_options[attrNameAfter] = {
|
||||
attribute: attrNameAfter,
|
||||
type: data.type,
|
||||
allowNull: data.allowNull,
|
||||
defaultValue: data.defaultValue
|
||||
};
|
||||
if (data.defaultValue === null && !data.allowNull) {
|
||||
delete _options[attrNameAfter].defaultValue;
|
||||
}
|
||||
const sql = this.queryGenerator.renameColumnQuery(tableName, attrNameBefore, this.queryGenerator.attributesToSQL(_options));
|
||||
return await this.sequelize.query(sql, options);
|
||||
}
|
||||
async addIndex(tableName, attributes, options, rawTablename) {
|
||||
if (!Array.isArray(attributes)) {
|
||||
rawTablename = options;
|
||||
options = attributes;
|
||||
attributes = options.fields;
|
||||
}
|
||||
if (!rawTablename) {
|
||||
rawTablename = tableName;
|
||||
}
|
||||
options = Utils.cloneDeep(options);
|
||||
options.fields = attributes;
|
||||
const sql = this.queryGenerator.addIndexQuery(tableName, options, rawTablename);
|
||||
return await this.sequelize.query(sql, __spreadProps(__spreadValues({}, options), { supportsSearchPath: false }));
|
||||
}
|
||||
async showIndex(tableName, options) {
|
||||
const sql = this.queryGenerator.showIndexesQuery(tableName, options);
|
||||
return await this.sequelize.query(sql, __spreadProps(__spreadValues({}, options), { type: QueryTypes.SHOWINDEXES }));
|
||||
}
|
||||
async getForeignKeysForTables(tableNames, options) {
|
||||
if (tableNames.length === 0) {
|
||||
return {};
|
||||
}
|
||||
options = __spreadProps(__spreadValues({}, options), { type: QueryTypes.FOREIGNKEYS });
|
||||
const results = await Promise.all(tableNames.map((tableName) => this.sequelize.query(this.queryGenerator.getForeignKeysQuery(tableName, this.sequelize.config.database), options)));
|
||||
const result = {};
|
||||
tableNames.forEach((tableName, i) => {
|
||||
if (_.isObject(tableName)) {
|
||||
tableName = `${tableName.schema}.${tableName.tableName}`;
|
||||
}
|
||||
result[tableName] = Array.isArray(results[i]) ? results[i].map((r) => r.constraint_name) : [results[i] && results[i].constraint_name];
|
||||
result[tableName] = result[tableName].filter(_.identity);
|
||||
});
|
||||
return result;
|
||||
}
|
||||
async getForeignKeyReferencesForTable(tableName, options) {
|
||||
const queryOptions = __spreadProps(__spreadValues({}, options), {
|
||||
type: QueryTypes.FOREIGNKEYS
|
||||
});
|
||||
const query = this.queryGenerator.getForeignKeysQuery(tableName, this.sequelize.config.database);
|
||||
return this.sequelize.query(query, queryOptions);
|
||||
}
|
||||
async removeIndex(tableName, indexNameOrAttributes, options) {
|
||||
options = options || {};
|
||||
const sql = this.queryGenerator.removeIndexQuery(tableName, indexNameOrAttributes, options);
|
||||
return await this.sequelize.query(sql, options);
|
||||
}
|
||||
async addConstraint(tableName, options) {
|
||||
if (!options.fields) {
|
||||
throw new Error("Fields must be specified through options.fields");
|
||||
}
|
||||
if (!options.type) {
|
||||
throw new Error("Constraint type must be specified through options.type");
|
||||
}
|
||||
options = Utils.cloneDeep(options);
|
||||
const sql = this.queryGenerator.addConstraintQuery(tableName, options);
|
||||
return await this.sequelize.query(sql, options);
|
||||
}
|
||||
async showConstraint(tableName, constraintName, options) {
|
||||
const sql = this.queryGenerator.showConstraintsQuery(tableName, constraintName);
|
||||
return await this.sequelize.query(sql, __spreadProps(__spreadValues({}, options), { type: QueryTypes.SHOWCONSTRAINTS }));
|
||||
}
|
||||
async removeConstraint(tableName, constraintName, options) {
|
||||
return this.sequelize.query(this.queryGenerator.removeConstraintQuery(tableName, constraintName), options);
|
||||
}
|
||||
async insert(instance, tableName, values, options) {
|
||||
options = Utils.cloneDeep(options);
|
||||
options.hasTrigger = instance && instance.constructor.options.hasTrigger;
|
||||
const sql = this.queryGenerator.insertQuery(tableName, values, instance && instance.constructor.rawAttributes, options);
|
||||
options.type = QueryTypes.INSERT;
|
||||
options.instance = instance;
|
||||
const results = await this.sequelize.query(sql, options);
|
||||
if (instance)
|
||||
results[0].isNewRecord = false;
|
||||
return results;
|
||||
}
|
||||
async upsert(tableName, insertValues, updateValues, where, options) {
|
||||
options = __spreadValues({}, options);
|
||||
const model = options.model;
|
||||
options.type = QueryTypes.UPSERT;
|
||||
options.updateOnDuplicate = Object.keys(updateValues);
|
||||
options.upsertKeys = options.conflictFields || [];
|
||||
if (options.upsertKeys.length === 0) {
|
||||
const primaryKeys = Object.values(model.primaryKeys).map((item) => item.field);
|
||||
const uniqueKeys = Object.values(model.uniqueKeys).filter((c) => c.fields.length > 0).map((c) => c.fields);
|
||||
const indexKeys = Object.values(model._indexes).filter((c) => c.unique && c.fields.length > 0).map((c) => c.fields);
|
||||
for (const field of options.updateOnDuplicate) {
|
||||
const uniqueKey = uniqueKeys.find((fields) => fields.includes(field));
|
||||
if (uniqueKey) {
|
||||
options.upsertKeys = uniqueKey;
|
||||
break;
|
||||
}
|
||||
const indexKey = indexKeys.find((fields) => fields.includes(field));
|
||||
if (indexKey) {
|
||||
options.upsertKeys = indexKey;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (options.upsertKeys.length === 0 || _.intersection(options.updateOnDuplicate, primaryKeys).length) {
|
||||
options.upsertKeys = primaryKeys;
|
||||
}
|
||||
options.upsertKeys = _.uniq(options.upsertKeys);
|
||||
}
|
||||
const sql = this.queryGenerator.insertQuery(tableName, insertValues, model.rawAttributes, options);
|
||||
return await this.sequelize.query(sql, options);
|
||||
}
|
||||
async bulkInsert(tableName, records, options, attributes) {
|
||||
options = __spreadValues({}, options);
|
||||
options.type = QueryTypes.INSERT;
|
||||
const results = await this.sequelize.query(this.queryGenerator.bulkInsertQuery(tableName, records, options, attributes), options);
|
||||
return results[0];
|
||||
}
|
||||
async update(instance, tableName, values, identifier2, options) {
|
||||
options = __spreadValues({}, options);
|
||||
options.hasTrigger = instance && instance.constructor.options.hasTrigger;
|
||||
const sql = this.queryGenerator.updateQuery(tableName, values, identifier2, options, instance.constructor.rawAttributes);
|
||||
options.type = QueryTypes.UPDATE;
|
||||
options.instance = instance;
|
||||
return await this.sequelize.query(sql, options);
|
||||
}
|
||||
async bulkUpdate(tableName, values, identifier2, options, attributes) {
|
||||
options = Utils.cloneDeep(options);
|
||||
if (typeof identifier2 === "object")
|
||||
identifier2 = Utils.cloneDeep(identifier2);
|
||||
const sql = this.queryGenerator.updateQuery(tableName, values, identifier2, options, attributes);
|
||||
const table = _.isObject(tableName) ? tableName : { tableName };
|
||||
const model = options.model ? options.model : _.find(this.sequelize.modelManager.models, { tableName: table.tableName });
|
||||
options.type = QueryTypes.BULKUPDATE;
|
||||
options.model = model;
|
||||
return await this.sequelize.query(sql, options);
|
||||
}
|
||||
async delete(instance, tableName, identifier2, options) {
|
||||
const cascades = [];
|
||||
const sql = this.queryGenerator.deleteQuery(tableName, identifier2, {}, instance.constructor);
|
||||
options = __spreadValues({}, options);
|
||||
if (!!instance.constructor && !!instance.constructor.associations) {
|
||||
const keys = Object.keys(instance.constructor.associations);
|
||||
const length = keys.length;
|
||||
let association;
|
||||
for (let i = 0; i < length; i++) {
|
||||
association = instance.constructor.associations[keys[i]];
|
||||
if (association.options && association.options.onDelete && association.options.onDelete.toLowerCase() === "cascade" && association.options.useHooks === true) {
|
||||
cascades.push(association.accessors.get);
|
||||
}
|
||||
}
|
||||
}
|
||||
for (const cascade of cascades) {
|
||||
let instances = await instance[cascade](options);
|
||||
if (!instances)
|
||||
continue;
|
||||
if (!Array.isArray(instances))
|
||||
instances = [instances];
|
||||
for (const _instance of instances)
|
||||
await _instance.destroy(options);
|
||||
}
|
||||
options.instance = instance;
|
||||
return await this.sequelize.query(sql, options);
|
||||
}
|
||||
async bulkDelete(tableName, where, options, model) {
|
||||
options = Utils.cloneDeep(options);
|
||||
options = _.defaults(options, { limit: null });
|
||||
if (options.truncate === true) {
|
||||
return this.sequelize.query(this.queryGenerator.truncateTableQuery(tableName, options), options);
|
||||
}
|
||||
if (typeof identifier === "object")
|
||||
where = Utils.cloneDeep(where);
|
||||
return await this.sequelize.query(this.queryGenerator.deleteQuery(tableName, where, options, model), options);
|
||||
}
|
||||
async select(model, tableName, optionsArg) {
|
||||
const options = __spreadProps(__spreadValues({}, optionsArg), { type: QueryTypes.SELECT, model });
|
||||
return await this.sequelize.query(this.queryGenerator.selectQuery(tableName, options, model), options);
|
||||
}
|
||||
async increment(model, tableName, where, incrementAmountsByField, extraAttributesToBeUpdated, options) {
|
||||
options = Utils.cloneDeep(options);
|
||||
const sql = this.queryGenerator.arithmeticQuery("+", tableName, where, incrementAmountsByField, extraAttributesToBeUpdated, options);
|
||||
options.type = QueryTypes.UPDATE;
|
||||
options.model = model;
|
||||
return await this.sequelize.query(sql, options);
|
||||
}
|
||||
async decrement(model, tableName, where, incrementAmountsByField, extraAttributesToBeUpdated, options) {
|
||||
options = Utils.cloneDeep(options);
|
||||
const sql = this.queryGenerator.arithmeticQuery("-", tableName, where, incrementAmountsByField, extraAttributesToBeUpdated, options);
|
||||
options.type = QueryTypes.UPDATE;
|
||||
options.model = model;
|
||||
return await this.sequelize.query(sql, options);
|
||||
}
|
||||
async rawSelect(tableName, options, attributeSelector, Model) {
|
||||
options = Utils.cloneDeep(options);
|
||||
options = _.defaults(options, {
|
||||
raw: true,
|
||||
plain: true,
|
||||
type: QueryTypes.SELECT
|
||||
});
|
||||
const sql = this.queryGenerator.selectQuery(tableName, options, Model);
|
||||
if (attributeSelector === void 0) {
|
||||
throw new Error("Please pass an attribute selector!");
|
||||
}
|
||||
const data = await this.sequelize.query(sql, options);
|
||||
if (!options.plain) {
|
||||
return data;
|
||||
}
|
||||
const result = data ? data[attributeSelector] : null;
|
||||
if (!options || !options.dataType) {
|
||||
return result;
|
||||
}
|
||||
const dataType = options.dataType;
|
||||
if (dataType instanceof DataTypes.DECIMAL || dataType instanceof DataTypes.FLOAT) {
|
||||
if (result !== null) {
|
||||
return parseFloat(result);
|
||||
}
|
||||
}
|
||||
if (dataType instanceof DataTypes.INTEGER || dataType instanceof DataTypes.BIGINT) {
|
||||
if (result !== null) {
|
||||
return parseInt(result, 10);
|
||||
}
|
||||
}
|
||||
if (dataType instanceof DataTypes.DATE) {
|
||||
if (result !== null && !(result instanceof Date)) {
|
||||
return new Date(result);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
async createTrigger(tableName, triggerName, timingType, fireOnArray, functionName, functionParams, optionsArray, options) {
|
||||
const sql = this.queryGenerator.createTrigger(tableName, triggerName, timingType, fireOnArray, functionName, functionParams, optionsArray);
|
||||
options = options || {};
|
||||
if (sql) {
|
||||
return await this.sequelize.query(sql, options);
|
||||
}
|
||||
}
|
||||
async dropTrigger(tableName, triggerName, options) {
|
||||
const sql = this.queryGenerator.dropTrigger(tableName, triggerName);
|
||||
options = options || {};
|
||||
if (sql) {
|
||||
return await this.sequelize.query(sql, options);
|
||||
}
|
||||
}
|
||||
async renameTrigger(tableName, oldTriggerName, newTriggerName, options) {
|
||||
const sql = this.queryGenerator.renameTrigger(tableName, oldTriggerName, newTriggerName);
|
||||
options = options || {};
|
||||
if (sql) {
|
||||
return await this.sequelize.query(sql, options);
|
||||
}
|
||||
}
|
||||
async createFunction(functionName, params, returnType, language, body, optionsArray, options) {
|
||||
const sql = this.queryGenerator.createFunction(functionName, params, returnType, language, body, optionsArray, options);
|
||||
options = options || {};
|
||||
if (sql) {
|
||||
return await this.sequelize.query(sql, options);
|
||||
}
|
||||
}
|
||||
async dropFunction(functionName, params, options) {
|
||||
const sql = this.queryGenerator.dropFunction(functionName, params);
|
||||
options = options || {};
|
||||
if (sql) {
|
||||
return await this.sequelize.query(sql, options);
|
||||
}
|
||||
}
|
||||
async renameFunction(oldFunctionName, params, newFunctionName, options) {
|
||||
const sql = this.queryGenerator.renameFunction(oldFunctionName, params, newFunctionName);
|
||||
options = options || {};
|
||||
if (sql) {
|
||||
return await this.sequelize.query(sql, options);
|
||||
}
|
||||
}
|
||||
ensureEnums() {
|
||||
}
|
||||
async setIsolationLevel(transaction, value, options) {
|
||||
if (!transaction || !(transaction instanceof Transaction)) {
|
||||
throw new Error("Unable to set isolation level for a transaction without transaction object!");
|
||||
}
|
||||
if (transaction.parent || !value) {
|
||||
return;
|
||||
}
|
||||
options = __spreadProps(__spreadValues({}, options), { transaction: transaction.parent || transaction });
|
||||
const sql = this.queryGenerator.setIsolationLevelQuery(value, {
|
||||
parent: transaction.parent
|
||||
});
|
||||
if (!sql)
|
||||
return;
|
||||
return await this.sequelize.query(sql, options);
|
||||
}
|
||||
async startTransaction(transaction, options) {
|
||||
if (!transaction || !(transaction instanceof Transaction)) {
|
||||
throw new Error("Unable to start a transaction without transaction object!");
|
||||
}
|
||||
options = __spreadProps(__spreadValues({}, options), { transaction: transaction.parent || transaction });
|
||||
options.transaction.name = transaction.parent ? transaction.name : void 0;
|
||||
const sql = this.queryGenerator.startTransactionQuery(transaction);
|
||||
return await this.sequelize.query(sql, options);
|
||||
}
|
||||
async deferConstraints(transaction, options) {
|
||||
options = __spreadProps(__spreadValues({}, options), { transaction: transaction.parent || transaction });
|
||||
const sql = this.queryGenerator.deferConstraintsQuery(options);
|
||||
if (sql) {
|
||||
return await this.sequelize.query(sql, options);
|
||||
}
|
||||
}
|
||||
async commitTransaction(transaction, options) {
|
||||
if (!transaction || !(transaction instanceof Transaction)) {
|
||||
throw new Error("Unable to commit a transaction without transaction object!");
|
||||
}
|
||||
if (transaction.parent) {
|
||||
return;
|
||||
}
|
||||
options = __spreadProps(__spreadValues({}, options), {
|
||||
transaction: transaction.parent || transaction,
|
||||
supportsSearchPath: false,
|
||||
completesTransaction: true
|
||||
});
|
||||
const sql = this.queryGenerator.commitTransactionQuery(transaction);
|
||||
const promise = this.sequelize.query(sql, options);
|
||||
transaction.finished = "commit";
|
||||
return await promise;
|
||||
}
|
||||
async rollbackTransaction(transaction, options) {
|
||||
if (!transaction || !(transaction instanceof Transaction)) {
|
||||
throw new Error("Unable to rollback a transaction without transaction object!");
|
||||
}
|
||||
options = __spreadProps(__spreadValues({}, options), {
|
||||
transaction: transaction.parent || transaction,
|
||||
supportsSearchPath: false,
|
||||
completesTransaction: true
|
||||
});
|
||||
options.transaction.name = transaction.parent ? transaction.name : void 0;
|
||||
const sql = this.queryGenerator.rollbackTransactionQuery(transaction);
|
||||
const promise = this.sequelize.query(sql, options);
|
||||
transaction.finished = "rollback";
|
||||
return await promise;
|
||||
}
|
||||
}
|
||||
exports.QueryInterface = QueryInterface;
|
||||
//# sourceMappingURL=query-interface.js.map
|
||||
7
node_modules/sequelize/lib/dialects/abstract/query-interface.js.map
generated
vendored
Normal file
7
node_modules/sequelize/lib/dialects/abstract/query-interface.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
546
node_modules/sequelize/lib/dialects/abstract/query.js
generated
vendored
Normal file
546
node_modules/sequelize/lib/dialects/abstract/query.js
generated
vendored
Normal file
@@ -0,0 +1,546 @@
|
||||
"use strict";
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropSymbols = Object.getOwnPropertySymbols;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __propIsEnum = Object.prototype.propertyIsEnumerable;
|
||||
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
||||
var __spreadValues = (a, b) => {
|
||||
for (var prop in b || (b = {}))
|
||||
if (__hasOwnProp.call(b, prop))
|
||||
__defNormalProp(a, prop, b[prop]);
|
||||
if (__getOwnPropSymbols)
|
||||
for (var prop of __getOwnPropSymbols(b)) {
|
||||
if (__propIsEnum.call(b, prop))
|
||||
__defNormalProp(a, prop, b[prop]);
|
||||
}
|
||||
return a;
|
||||
};
|
||||
const _ = require("lodash");
|
||||
const SqlString = require("../../sql-string");
|
||||
const QueryTypes = require("../../query-types");
|
||||
const Dot = require("dottie");
|
||||
const deprecations = require("../../utils/deprecations");
|
||||
const uuid = require("uuid").v4;
|
||||
const { safeStringifyJson } = require("../../utils.js");
|
||||
class AbstractQuery {
|
||||
constructor(connection, sequelize, options) {
|
||||
this.uuid = uuid();
|
||||
this.connection = connection;
|
||||
this.instance = options.instance;
|
||||
this.model = options.model;
|
||||
this.sequelize = sequelize;
|
||||
this.options = __spreadValues({
|
||||
plain: false,
|
||||
raw: false,
|
||||
logging: console.log
|
||||
}, options);
|
||||
this.checkLoggingOption();
|
||||
if (options.rawErrors) {
|
||||
this.formatError = AbstractQuery.prototype.formatError;
|
||||
}
|
||||
}
|
||||
static formatBindParameters(sql, values, dialect, replacementFunc, options) {
|
||||
if (!values) {
|
||||
return [sql, []];
|
||||
}
|
||||
options = options || {};
|
||||
if (typeof replacementFunc !== "function") {
|
||||
options = replacementFunc || {};
|
||||
replacementFunc = void 0;
|
||||
}
|
||||
if (!replacementFunc) {
|
||||
if (options.skipValueReplace) {
|
||||
replacementFunc = (match, key, values2) => {
|
||||
if (values2[key] !== void 0) {
|
||||
return match;
|
||||
}
|
||||
return void 0;
|
||||
};
|
||||
} else {
|
||||
replacementFunc = (match, key, values2, timeZone2, dialect2) => {
|
||||
if (values2[key] !== void 0) {
|
||||
return SqlString.escape(values2[key], timeZone2, dialect2);
|
||||
}
|
||||
return void 0;
|
||||
};
|
||||
}
|
||||
} else if (options.skipValueReplace) {
|
||||
const origReplacementFunc = replacementFunc;
|
||||
replacementFunc = (match, key, values2, timeZone2, dialect2, options2) => {
|
||||
if (origReplacementFunc(match, key, values2, timeZone2, dialect2, options2) !== void 0) {
|
||||
return match;
|
||||
}
|
||||
return void 0;
|
||||
};
|
||||
}
|
||||
const timeZone = null;
|
||||
const list = Array.isArray(values);
|
||||
sql = sql.replace(/\B\$(\$|\w+)/g, (match, key) => {
|
||||
if (key === "$") {
|
||||
return options.skipUnescape ? match : key;
|
||||
}
|
||||
let replVal;
|
||||
if (list) {
|
||||
if (key.match(/^[1-9]\d*$/)) {
|
||||
key = key - 1;
|
||||
replVal = replacementFunc(match, key, values, timeZone, dialect, options);
|
||||
}
|
||||
} else if (!key.match(/^\d*$/)) {
|
||||
replVal = replacementFunc(match, key, values, timeZone, dialect, options);
|
||||
}
|
||||
if (replVal === void 0) {
|
||||
throw new Error(`Named bind parameter "${match}" has no value in the given object.`);
|
||||
}
|
||||
return replVal;
|
||||
});
|
||||
return [sql, []];
|
||||
}
|
||||
formatError(error, errStack) {
|
||||
error.stack = errStack;
|
||||
return error;
|
||||
}
|
||||
run() {
|
||||
throw new Error("The run method wasn't overwritten!");
|
||||
}
|
||||
checkLoggingOption() {
|
||||
if (this.options.logging === true) {
|
||||
deprecations.noTrueLogging();
|
||||
this.options.logging = console.log;
|
||||
}
|
||||
}
|
||||
getInsertIdField() {
|
||||
return "insertId";
|
||||
}
|
||||
getUniqueConstraintErrorMessage(field) {
|
||||
let message = field ? `${field} must be unique` : "Must be unique";
|
||||
if (field && this.model) {
|
||||
for (const key of Object.keys(this.model.uniqueKeys)) {
|
||||
if (this.model.uniqueKeys[key].fields.includes(field.replace(/"/g, ""))) {
|
||||
if (this.model.uniqueKeys[key].msg) {
|
||||
message = this.model.uniqueKeys[key].msg;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
isRawQuery() {
|
||||
return this.options.type === QueryTypes.RAW;
|
||||
}
|
||||
isVersionQuery() {
|
||||
return this.options.type === QueryTypes.VERSION;
|
||||
}
|
||||
isUpsertQuery() {
|
||||
return this.options.type === QueryTypes.UPSERT;
|
||||
}
|
||||
isInsertQuery(results, metaData) {
|
||||
let result = true;
|
||||
if (this.options.type === QueryTypes.INSERT) {
|
||||
return true;
|
||||
}
|
||||
result = result && this.sql.toLowerCase().startsWith("insert into");
|
||||
result = result && (!results || Object.prototype.hasOwnProperty.call(results, this.getInsertIdField()));
|
||||
result = result && (!metaData || Object.prototype.hasOwnProperty.call(metaData, this.getInsertIdField()));
|
||||
return result;
|
||||
}
|
||||
handleInsertQuery(results, metaData) {
|
||||
if (this.instance) {
|
||||
const autoIncrementAttribute = this.model.autoIncrementAttribute;
|
||||
let id = null;
|
||||
id = id || results && results[this.getInsertIdField()];
|
||||
id = id || metaData && metaData[this.getInsertIdField()];
|
||||
this.instance[autoIncrementAttribute] = id;
|
||||
}
|
||||
}
|
||||
isShowTablesQuery() {
|
||||
return this.options.type === QueryTypes.SHOWTABLES;
|
||||
}
|
||||
handleShowTablesQuery(results) {
|
||||
return _.flatten(results.map((resultSet) => Object.values(resultSet)));
|
||||
}
|
||||
isShowIndexesQuery() {
|
||||
return this.options.type === QueryTypes.SHOWINDEXES;
|
||||
}
|
||||
isShowConstraintsQuery() {
|
||||
return this.options.type === QueryTypes.SHOWCONSTRAINTS;
|
||||
}
|
||||
isDescribeQuery() {
|
||||
return this.options.type === QueryTypes.DESCRIBE;
|
||||
}
|
||||
isSelectQuery() {
|
||||
return this.options.type === QueryTypes.SELECT;
|
||||
}
|
||||
isBulkUpdateQuery() {
|
||||
return this.options.type === QueryTypes.BULKUPDATE;
|
||||
}
|
||||
isBulkDeleteQuery() {
|
||||
return this.options.type === QueryTypes.BULKDELETE;
|
||||
}
|
||||
isForeignKeysQuery() {
|
||||
return this.options.type === QueryTypes.FOREIGNKEYS;
|
||||
}
|
||||
isUpdateQuery() {
|
||||
return this.options.type === QueryTypes.UPDATE;
|
||||
}
|
||||
handleSelectQuery(results) {
|
||||
let result = null;
|
||||
if (this.options.fieldMap) {
|
||||
const fieldMap = this.options.fieldMap;
|
||||
results = results.map((result2) => _.reduce(fieldMap, (result3, name, field) => {
|
||||
if (result3[field] !== void 0 && name !== field) {
|
||||
result3[name] = result3[field];
|
||||
delete result3[field];
|
||||
}
|
||||
return result3;
|
||||
}, result2));
|
||||
}
|
||||
if (this.options.raw) {
|
||||
result = results.map((result2) => {
|
||||
let o = {};
|
||||
for (const key in result2) {
|
||||
if (Object.prototype.hasOwnProperty.call(result2, key)) {
|
||||
o[key] = result2[key];
|
||||
}
|
||||
}
|
||||
if (this.options.nest) {
|
||||
o = Dot.transform(o);
|
||||
}
|
||||
return o;
|
||||
});
|
||||
} else if (this.options.hasJoin === true) {
|
||||
results = AbstractQuery._groupJoinData(results, {
|
||||
model: this.model,
|
||||
includeMap: this.options.includeMap,
|
||||
includeNames: this.options.includeNames
|
||||
}, {
|
||||
checkExisting: this.options.hasMultiAssociation
|
||||
});
|
||||
result = this.model.bulkBuild(results, {
|
||||
isNewRecord: false,
|
||||
include: this.options.include,
|
||||
includeNames: this.options.includeNames,
|
||||
includeMap: this.options.includeMap,
|
||||
includeValidated: true,
|
||||
attributes: this.options.originalAttributes || this.options.attributes,
|
||||
raw: true
|
||||
});
|
||||
} else {
|
||||
result = this.model.bulkBuild(results, {
|
||||
isNewRecord: false,
|
||||
raw: true,
|
||||
attributes: this.options.originalAttributes || this.options.attributes
|
||||
});
|
||||
}
|
||||
if (this.options.plain) {
|
||||
result = result.length === 0 ? null : result[0];
|
||||
}
|
||||
return result;
|
||||
}
|
||||
isShowOrDescribeQuery() {
|
||||
let result = false;
|
||||
result = result || this.sql.toLowerCase().startsWith("show");
|
||||
result = result || this.sql.toLowerCase().startsWith("describe");
|
||||
return result;
|
||||
}
|
||||
isCallQuery() {
|
||||
return this.sql.toLowerCase().startsWith("call");
|
||||
}
|
||||
_logQuery(sql, debugContext, parameters) {
|
||||
const { connection, options } = this;
|
||||
const benchmark = this.sequelize.options.benchmark || options.benchmark;
|
||||
const logQueryParameters = this.sequelize.options.logQueryParameters || options.logQueryParameters;
|
||||
const startTime = Date.now();
|
||||
let logParameter = "";
|
||||
if (logQueryParameters && parameters) {
|
||||
const delimiter = sql.endsWith(";") ? "" : ";";
|
||||
let paramStr;
|
||||
if (Array.isArray(parameters)) {
|
||||
paramStr = parameters.map((p) => safeStringifyJson(p)).join(", ");
|
||||
} else {
|
||||
paramStr = safeStringifyJson(parameters);
|
||||
}
|
||||
logParameter = `${delimiter} ${paramStr}`;
|
||||
}
|
||||
const fmt = `(${connection.uuid || "default"}): ${sql}${logParameter}`;
|
||||
const msg = `Executing ${fmt}`;
|
||||
debugContext(msg);
|
||||
if (!benchmark) {
|
||||
this.sequelize.log(`Executing ${fmt}`, options);
|
||||
}
|
||||
return () => {
|
||||
const afterMsg = `Executed ${fmt}`;
|
||||
debugContext(afterMsg);
|
||||
if (benchmark) {
|
||||
this.sequelize.log(afterMsg, Date.now() - startTime, options);
|
||||
}
|
||||
};
|
||||
}
|
||||
static _groupJoinData(rows, includeOptions, options) {
|
||||
if (!rows.length) {
|
||||
return [];
|
||||
}
|
||||
let i;
|
||||
let length;
|
||||
let $i;
|
||||
let $length;
|
||||
let rowsI;
|
||||
let row;
|
||||
const rowsLength = rows.length;
|
||||
let keys;
|
||||
let key;
|
||||
let keyI;
|
||||
let keyLength;
|
||||
let prevKey;
|
||||
let values;
|
||||
let topValues;
|
||||
let topExists;
|
||||
const checkExisting = options.checkExisting;
|
||||
let itemHash;
|
||||
let parentHash;
|
||||
let topHash;
|
||||
const results = checkExisting ? [] : new Array(rowsLength);
|
||||
const resultMap = {};
|
||||
const includeMap = {};
|
||||
let $keyPrefix;
|
||||
let $keyPrefixString;
|
||||
let $prevKeyPrefixString;
|
||||
let $prevKeyPrefix;
|
||||
let $lastKeyPrefix;
|
||||
let $current;
|
||||
let $parent;
|
||||
let previousPiece;
|
||||
const buildIncludeMap = (piece) => {
|
||||
if (Object.prototype.hasOwnProperty.call($current.includeMap, piece)) {
|
||||
includeMap[key] = $current = $current.includeMap[piece];
|
||||
if (previousPiece) {
|
||||
previousPiece = `${previousPiece}.${piece}`;
|
||||
} else {
|
||||
previousPiece = piece;
|
||||
}
|
||||
includeMap[previousPiece] = $current;
|
||||
}
|
||||
};
|
||||
const keyPrefixStringMemo = {};
|
||||
const keyPrefixString = (key2, memo) => {
|
||||
if (!Object.prototype.hasOwnProperty.call(memo, key2)) {
|
||||
memo[key2] = key2.substr(0, key2.lastIndexOf("."));
|
||||
}
|
||||
return memo[key2];
|
||||
};
|
||||
const removeKeyPrefixMemo = {};
|
||||
const removeKeyPrefix = (key2) => {
|
||||
if (!Object.prototype.hasOwnProperty.call(removeKeyPrefixMemo, key2)) {
|
||||
const index = key2.lastIndexOf(".");
|
||||
removeKeyPrefixMemo[key2] = key2.substr(index === -1 ? 0 : index + 1);
|
||||
}
|
||||
return removeKeyPrefixMemo[key2];
|
||||
};
|
||||
const keyPrefixMemo = {};
|
||||
const keyPrefix = (key2) => {
|
||||
if (!Object.prototype.hasOwnProperty.call(keyPrefixMemo, key2)) {
|
||||
const prefixString = keyPrefixString(key2, keyPrefixStringMemo);
|
||||
if (!Object.prototype.hasOwnProperty.call(keyPrefixMemo, prefixString)) {
|
||||
keyPrefixMemo[prefixString] = prefixString ? prefixString.split(".") : [];
|
||||
}
|
||||
keyPrefixMemo[key2] = keyPrefixMemo[prefixString];
|
||||
}
|
||||
return keyPrefixMemo[key2];
|
||||
};
|
||||
const lastKeyPrefixMemo = {};
|
||||
const lastKeyPrefix = (key2) => {
|
||||
if (!Object.prototype.hasOwnProperty.call(lastKeyPrefixMemo, key2)) {
|
||||
const prefix2 = keyPrefix(key2);
|
||||
const length2 = prefix2.length;
|
||||
lastKeyPrefixMemo[key2] = !length2 ? "" : prefix2[length2 - 1];
|
||||
}
|
||||
return lastKeyPrefixMemo[key2];
|
||||
};
|
||||
const getUniqueKeyAttributes = (model) => {
|
||||
let uniqueKeyAttributes2 = _.chain(model.uniqueKeys);
|
||||
uniqueKeyAttributes2 = uniqueKeyAttributes2.result(`${uniqueKeyAttributes2.findKey()}.fields`).map((field) => _.findKey(model.attributes, (chr) => chr.field === field)).value();
|
||||
return uniqueKeyAttributes2;
|
||||
};
|
||||
const stringify = (obj) => obj instanceof Buffer ? obj.toString("hex") : obj;
|
||||
let primaryKeyAttributes;
|
||||
let uniqueKeyAttributes;
|
||||
let prefix;
|
||||
for (rowsI = 0; rowsI < rowsLength; rowsI++) {
|
||||
row = rows[rowsI];
|
||||
if (rowsI === 0) {
|
||||
keys = _.sortBy(Object.keys(row), (item) => [item.split(".").length]);
|
||||
keyLength = keys.length;
|
||||
}
|
||||
if (checkExisting) {
|
||||
topExists = false;
|
||||
$length = includeOptions.model.primaryKeyAttributes.length;
|
||||
topHash = "";
|
||||
if ($length === 1) {
|
||||
topHash = stringify(row[includeOptions.model.primaryKeyAttributes[0]]);
|
||||
} else if ($length > 1) {
|
||||
for ($i = 0; $i < $length; $i++) {
|
||||
topHash += stringify(row[includeOptions.model.primaryKeyAttributes[$i]]);
|
||||
}
|
||||
} else if (!_.isEmpty(includeOptions.model.uniqueKeys)) {
|
||||
uniqueKeyAttributes = getUniqueKeyAttributes(includeOptions.model);
|
||||
for ($i = 0; $i < uniqueKeyAttributes.length; $i++) {
|
||||
topHash += row[uniqueKeyAttributes[$i]];
|
||||
}
|
||||
}
|
||||
}
|
||||
topValues = values = {};
|
||||
$prevKeyPrefix = void 0;
|
||||
for (keyI = 0; keyI < keyLength; keyI++) {
|
||||
key = keys[keyI];
|
||||
$keyPrefixString = keyPrefixString(key, keyPrefixStringMemo);
|
||||
$keyPrefix = keyPrefix(key);
|
||||
if (rowsI === 0 && !Object.prototype.hasOwnProperty.call(includeMap, key)) {
|
||||
if (!$keyPrefix.length) {
|
||||
includeMap[key] = includeMap[""] = includeOptions;
|
||||
} else {
|
||||
$current = includeOptions;
|
||||
previousPiece = void 0;
|
||||
$keyPrefix.forEach(buildIncludeMap);
|
||||
}
|
||||
}
|
||||
if ($prevKeyPrefix !== void 0 && $prevKeyPrefix !== $keyPrefix) {
|
||||
if (checkExisting) {
|
||||
length = $prevKeyPrefix.length;
|
||||
$parent = null;
|
||||
parentHash = null;
|
||||
if (length) {
|
||||
for (i = 0; i < length; i++) {
|
||||
prefix = $parent ? `${$parent}.${$prevKeyPrefix[i]}` : $prevKeyPrefix[i];
|
||||
primaryKeyAttributes = includeMap[prefix].model.primaryKeyAttributes;
|
||||
$length = primaryKeyAttributes.length;
|
||||
itemHash = prefix;
|
||||
if ($length === 1) {
|
||||
itemHash += stringify(row[`${prefix}.${primaryKeyAttributes[0]}`]);
|
||||
} else if ($length > 1) {
|
||||
for ($i = 0; $i < $length; $i++) {
|
||||
itemHash += stringify(row[`${prefix}.${primaryKeyAttributes[$i]}`]);
|
||||
}
|
||||
} else if (!_.isEmpty(includeMap[prefix].model.uniqueKeys)) {
|
||||
uniqueKeyAttributes = getUniqueKeyAttributes(includeMap[prefix].model);
|
||||
for ($i = 0; $i < uniqueKeyAttributes.length; $i++) {
|
||||
itemHash += row[`${prefix}.${uniqueKeyAttributes[$i]}`];
|
||||
}
|
||||
}
|
||||
if (!parentHash) {
|
||||
parentHash = topHash;
|
||||
}
|
||||
itemHash = parentHash + itemHash;
|
||||
$parent = prefix;
|
||||
if (i < length - 1) {
|
||||
parentHash = itemHash;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
itemHash = topHash;
|
||||
}
|
||||
if (itemHash === topHash) {
|
||||
if (!resultMap[itemHash]) {
|
||||
resultMap[itemHash] = values;
|
||||
} else {
|
||||
topExists = true;
|
||||
}
|
||||
} else if (!resultMap[itemHash]) {
|
||||
$parent = resultMap[parentHash];
|
||||
$lastKeyPrefix = lastKeyPrefix(prevKey);
|
||||
if (includeMap[prevKey].association.isSingleAssociation) {
|
||||
if ($parent) {
|
||||
$parent[$lastKeyPrefix] = resultMap[itemHash] = values;
|
||||
}
|
||||
} else {
|
||||
if (!$parent[$lastKeyPrefix]) {
|
||||
$parent[$lastKeyPrefix] = [];
|
||||
}
|
||||
$parent[$lastKeyPrefix].push(resultMap[itemHash] = values);
|
||||
}
|
||||
}
|
||||
values = {};
|
||||
} else {
|
||||
$current = topValues;
|
||||
length = $keyPrefix.length;
|
||||
if (length) {
|
||||
for (i = 0; i < length; i++) {
|
||||
if (i === length - 1) {
|
||||
values = $current[$keyPrefix[i]] = {};
|
||||
}
|
||||
$current = $current[$keyPrefix[i]] || {};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
values[removeKeyPrefix(key)] = row[key];
|
||||
prevKey = key;
|
||||
$prevKeyPrefix = $keyPrefix;
|
||||
$prevKeyPrefixString = $keyPrefixString;
|
||||
}
|
||||
if (checkExisting) {
|
||||
length = $prevKeyPrefix.length;
|
||||
$parent = null;
|
||||
parentHash = null;
|
||||
if (length) {
|
||||
for (i = 0; i < length; i++) {
|
||||
prefix = $parent ? `${$parent}.${$prevKeyPrefix[i]}` : $prevKeyPrefix[i];
|
||||
primaryKeyAttributes = includeMap[prefix].model.primaryKeyAttributes;
|
||||
$length = primaryKeyAttributes.length;
|
||||
itemHash = prefix;
|
||||
if ($length === 1) {
|
||||
itemHash += stringify(row[`${prefix}.${primaryKeyAttributes[0]}`]);
|
||||
} else if ($length > 0) {
|
||||
for ($i = 0; $i < $length; $i++) {
|
||||
itemHash += stringify(row[`${prefix}.${primaryKeyAttributes[$i]}`]);
|
||||
}
|
||||
} else if (!_.isEmpty(includeMap[prefix].model.uniqueKeys)) {
|
||||
uniqueKeyAttributes = getUniqueKeyAttributes(includeMap[prefix].model);
|
||||
for ($i = 0; $i < uniqueKeyAttributes.length; $i++) {
|
||||
itemHash += row[`${prefix}.${uniqueKeyAttributes[$i]}`];
|
||||
}
|
||||
}
|
||||
if (!parentHash) {
|
||||
parentHash = topHash;
|
||||
}
|
||||
itemHash = parentHash + itemHash;
|
||||
$parent = prefix;
|
||||
if (i < length - 1) {
|
||||
parentHash = itemHash;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
itemHash = topHash;
|
||||
}
|
||||
if (itemHash === topHash) {
|
||||
if (!resultMap[itemHash]) {
|
||||
resultMap[itemHash] = values;
|
||||
} else {
|
||||
topExists = true;
|
||||
}
|
||||
} else if (!resultMap[itemHash]) {
|
||||
$parent = resultMap[parentHash];
|
||||
$lastKeyPrefix = lastKeyPrefix(prevKey);
|
||||
if (includeMap[prevKey].association.isSingleAssociation) {
|
||||
if ($parent) {
|
||||
$parent[$lastKeyPrefix] = resultMap[itemHash] = values;
|
||||
}
|
||||
} else {
|
||||
if (!$parent[$lastKeyPrefix]) {
|
||||
$parent[$lastKeyPrefix] = [];
|
||||
}
|
||||
$parent[$lastKeyPrefix].push(resultMap[itemHash] = values);
|
||||
}
|
||||
}
|
||||
if (!topExists) {
|
||||
results.push(topValues);
|
||||
}
|
||||
} else {
|
||||
results[rowsI] = topValues;
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
}
|
||||
module.exports = AbstractQuery;
|
||||
module.exports.AbstractQuery = AbstractQuery;
|
||||
module.exports.default = AbstractQuery;
|
||||
//# sourceMappingURL=query.js.map
|
||||
7
node_modules/sequelize/lib/dialects/abstract/query.js.map
generated
vendored
Normal file
7
node_modules/sequelize/lib/dialects/abstract/query.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
Reference in New Issue
Block a user