primer cambio

This commit is contained in:
beseira13
2026-01-19 12:12:38 -03:00
parent 5f59dba52d
commit 44990f015a
4759 changed files with 588702 additions and 0 deletions

View File

@@ -0,0 +1,244 @@
"use strict";
const _ = require("lodash");
const AbstractConnectionManager = require("../abstract/connection-manager");
const { logger } = require("../../utils/logger");
const debug = logger.debugContext("connection:pg");
const sequelizeErrors = require("../../errors");
const semver = require("semver");
const dataTypes = require("../../data-types");
const momentTz = require("moment-timezone");
const { promisify } = require("util");
class ConnectionManager extends AbstractConnectionManager {
constructor(dialect, sequelize) {
sequelize.config.port = sequelize.config.port || 5432;
super(dialect, sequelize);
const pgLib = this._loadDialectModule("pg");
this.lib = this.sequelize.config.native ? pgLib.native : pgLib;
this._clearDynamicOIDs();
this._clearTypeParser();
this.refreshTypeParser(dataTypes.postgres);
}
_refreshTypeParser(dataType) {
const arrayParserBuilder = (parser2) => {
return (value) => this.lib.types.arrayParser.create(value, parser2).parse();
};
const rangeParserBuilder = (parser2) => {
return (value) => dataType.parse(value, { parser: parser2 });
};
if (dataType.key.toLowerCase() === "range") {
for (const name in this.nameOidMap) {
const entry = this.nameOidMap[name];
if (!entry.rangeOid)
continue;
const rangeParser = rangeParserBuilder(this.getTypeParser(entry.oid));
const arrayRangeParser = arrayParserBuilder(rangeParser);
this.oidParserMap.set(entry.rangeOid, rangeParser);
if (!entry.arrayRangeOid)
continue;
this.oidParserMap.set(entry.arrayRangeOid, arrayRangeParser);
}
return;
}
const parser = (value) => dataType.parse(value);
const arrayParser = arrayParserBuilder(parser);
if (dataType.key.toLowerCase() === "enum") {
this.enumOids.oids.forEach((oid) => {
this.oidParserMap.set(oid, parser);
});
this.enumOids.arrayOids.forEach((arrayOid) => {
this.oidParserMap.set(arrayOid, arrayParser);
});
return;
}
dataType.types.postgres.forEach((name) => {
if (!this.nameOidMap[name])
return;
this.oidParserMap.set(this.nameOidMap[name].oid, parser);
if (!this.nameOidMap[name].arrayOid)
return;
this.oidParserMap.set(this.nameOidMap[name].arrayOid, arrayParser);
});
}
_clearTypeParser() {
this.oidParserMap = /* @__PURE__ */ new Map();
}
getTypeParser(oid, ...args) {
if (this.oidParserMap.get(oid))
return this.oidParserMap.get(oid);
return this.lib.types.getTypeParser(oid, ...args);
}
async connect(config) {
config.user = config.username;
const connectionConfig = _.pick(config, [
"user",
"password",
"host",
"database",
"port"
]);
connectionConfig.types = {
getTypeParser: ConnectionManager.prototype.getTypeParser.bind(this)
};
if (config.dialectOptions) {
_.merge(connectionConfig, _.pick(config.dialectOptions, [
"application_name",
"ssl",
"client_encoding",
"binary",
"keepAlive",
"statement_timeout",
"query_timeout",
"connectionTimeoutMillis",
"idle_in_transaction_session_timeout",
"lock_timeout",
"options",
"stream"
]));
}
const connection = await new Promise((resolve, reject) => {
let responded = false;
const connection2 = new this.lib.Client(connectionConfig);
const parameterHandler = (message) => {
switch (message.parameterName) {
case "server_version":
if (this.sequelize.options.databaseVersion === 0) {
const version = semver.coerce(message.parameterValue).version;
this.sequelize.options.databaseVersion = semver.valid(version) ? version : this.dialect.defaultVersion;
}
break;
case "standard_conforming_strings":
connection2["standard_conforming_strings"] = message.parameterValue;
break;
}
};
const endHandler = () => {
debug("connection timeout");
if (!responded) {
reject(new sequelizeErrors.ConnectionTimedOutError(new Error("Connection timed out")));
}
};
connection2.once("end", endHandler);
if (!this.sequelize.config.native) {
connection2.connection.on("parameterStatus", parameterHandler);
}
connection2.connect((err) => {
responded = true;
if (!this.sequelize.config.native) {
connection2.connection.removeListener("parameterStatus", parameterHandler);
}
if (err) {
if (err.code) {
switch (err.code) {
case "ECONNREFUSED":
reject(new sequelizeErrors.ConnectionRefusedError(err));
break;
case "ENOTFOUND":
reject(new sequelizeErrors.HostNotFoundError(err));
break;
case "EHOSTUNREACH":
reject(new sequelizeErrors.HostNotReachableError(err));
break;
case "EINVAL":
reject(new sequelizeErrors.InvalidConnectionError(err));
break;
default:
reject(new sequelizeErrors.ConnectionError(err));
break;
}
} else {
reject(new sequelizeErrors.ConnectionError(err));
}
} else {
debug("connection acquired");
connection2.removeListener("end", endHandler);
resolve(connection2);
}
});
});
connection.on("error", (error) => {
connection._invalid = true;
debug(`connection error ${error.code || error.message}`);
this.pool.destroy(connection);
});
let query = "";
if (this.sequelize.options.standardConformingStrings !== false && connection["standard_conforming_strings"] !== "on") {
query += "SET standard_conforming_strings=on;";
}
if (this.sequelize.options.clientMinMessages !== void 0) {
console.warn('Usage of "options.clientMinMessages" is deprecated and will be removed in v7.');
console.warn('Please use the sequelize option "dialectOptions.clientMinMessages" instead.');
}
if (!(config.dialectOptions && config.dialectOptions.clientMinMessages && config.dialectOptions.clientMinMessages.toLowerCase() === "ignore" || this.sequelize.options.clientMinMessages === false)) {
const clientMinMessages = config.dialectOptions && config.dialectOptions.clientMinMessages || this.sequelize.options.clientMinMessages || "warning";
query += `SET client_min_messages TO ${clientMinMessages};`;
}
if (!this.sequelize.config.keepDefaultTimezone) {
const isZone = !!momentTz.tz.zone(this.sequelize.options.timezone);
if (isZone) {
query += `SET TIME ZONE '${this.sequelize.options.timezone}';`;
} else {
query += `SET TIME ZONE INTERVAL '${this.sequelize.options.timezone}' HOUR TO MINUTE;`;
}
}
if (query) {
await connection.query(query);
}
if (Object.keys(this.nameOidMap).length === 0 && this.enumOids.oids.length === 0 && this.enumOids.arrayOids.length === 0) {
await this._refreshDynamicOIDs(connection);
}
return connection;
}
async disconnect(connection) {
if (connection._ending) {
debug("connection tried to disconnect but was already at ENDING state");
return;
}
return await promisify((callback) => connection.end(callback))();
}
validate(connection) {
return !connection._invalid && !connection._ending;
}
async _refreshDynamicOIDs(connection) {
const databaseVersion = this.sequelize.options.databaseVersion;
const supportedVersion = "8.3.0";
if ((databaseVersion && semver.gte(databaseVersion, supportedVersion)) === false) {
return;
}
const results = await (connection || this.sequelize).query("WITH ranges AS ( SELECT pg_range.rngtypid, pg_type.typname AS rngtypname, pg_type.typarray AS rngtyparray, pg_range.rngsubtype FROM pg_range LEFT OUTER JOIN pg_type ON pg_type.oid = pg_range.rngtypid)SELECT pg_type.typname, pg_type.typtype, pg_type.oid, pg_type.typarray, ranges.rngtypname, ranges.rngtypid, ranges.rngtyparray FROM pg_type LEFT OUTER JOIN ranges ON pg_type.oid = ranges.rngsubtype WHERE (pg_type.typtype IN('b', 'e'));");
let result = Array.isArray(results) ? results.pop() : results;
if (Array.isArray(result)) {
if (result[0].command === "SET") {
result = result.pop();
}
}
const newNameOidMap = {};
const newEnumOids = { oids: [], arrayOids: [] };
for (const row of result.rows) {
if (row.typtype === "e") {
newEnumOids.oids.push(row.oid);
if (row.typarray)
newEnumOids.arrayOids.push(row.typarray);
continue;
}
newNameOidMap[row.typname] = { oid: row.oid };
if (row.typarray)
newNameOidMap[row.typname].arrayOid = row.typarray;
if (row.rngtypid) {
newNameOidMap[row.typname].rangeOid = row.rngtypid;
if (row.rngtyparray)
newNameOidMap[row.typname].arrayRangeOid = row.rngtyparray;
}
}
this.nameOidMap = newNameOidMap;
this.enumOids = newEnumOids;
this.refreshTypeParser(dataTypes.postgres);
}
_clearDynamicOIDs() {
this.nameOidMap = {};
this.enumOids = { oids: [], arrayOids: [] };
}
}
module.exports = ConnectionManager;
module.exports.ConnectionManager = ConnectionManager;
module.exports.default = ConnectionManager;
//# sourceMappingURL=connection-manager.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,442 @@
"use strict";
const _ = require("lodash");
const wkx = require("wkx");
module.exports = (BaseTypes) => {
const warn = BaseTypes.ABSTRACT.warn.bind(void 0, "http://www.postgresql.org/docs/9.4/static/datatype.html");
function removeUnsupportedIntegerOptions(dataType) {
if (dataType._length || dataType.options.length || dataType._unsigned || dataType._zerofill) {
warn(`PostgresSQL does not support '${dataType.key}' with LENGTH, UNSIGNED or ZEROFILL. Plain '${dataType.key}' will be used instead.`);
dataType._length = void 0;
dataType.options.length = void 0;
dataType._unsigned = void 0;
dataType._zerofill = void 0;
}
}
BaseTypes.UUID.types.postgres = ["uuid"];
BaseTypes.CIDR.types.postgres = ["cidr"];
BaseTypes.INET.types.postgres = ["inet"];
BaseTypes.MACADDR.types.postgres = ["macaddr"];
BaseTypes.TSVECTOR.types.postgres = ["tsvector"];
BaseTypes.JSON.types.postgres = ["json"];
BaseTypes.JSONB.types.postgres = ["jsonb"];
BaseTypes.TIME.types.postgres = ["time"];
class DATEONLY extends BaseTypes.DATEONLY {
_stringify(value, options) {
if (value === Infinity) {
return "Infinity";
}
if (value === -Infinity) {
return "-Infinity";
}
return super._stringify(value, options);
}
_sanitize(value, options) {
if ((!options || options && !options.raw) && value !== Infinity && value !== -Infinity) {
if (typeof value === "string") {
const lower = value.toLowerCase();
if (lower === "infinity") {
return Infinity;
}
if (lower === "-infinity") {
return -Infinity;
}
}
return super._sanitize(value);
}
return value;
}
static parse(value) {
if (value === "infinity") {
return Infinity;
}
if (value === "-infinity") {
return -Infinity;
}
return value;
}
}
BaseTypes.DATEONLY.types.postgres = ["date"];
class DECIMAL extends BaseTypes.DECIMAL {
static parse(value) {
return value;
}
}
BaseTypes.DECIMAL.types.postgres = ["numeric"];
class STRING extends BaseTypes.STRING {
toSql() {
if (this._binary) {
return "BYTEA";
}
return super.toSql();
}
}
BaseTypes.STRING.types.postgres = ["varchar"];
class TEXT extends BaseTypes.TEXT {
toSql() {
if (this._length) {
warn("PostgreSQL does not support TEXT with options. Plain `TEXT` will be used instead.");
this._length = void 0;
}
return "TEXT";
}
}
BaseTypes.TEXT.types.postgres = ["text"];
class CITEXT extends BaseTypes.CITEXT {
static parse(value) {
return value;
}
}
BaseTypes.CITEXT.types.postgres = ["citext"];
class CHAR extends BaseTypes.CHAR {
toSql() {
if (this._binary) {
return "BYTEA";
}
return super.toSql();
}
}
BaseTypes.CHAR.types.postgres = ["char", "bpchar"];
class BOOLEAN extends BaseTypes.BOOLEAN {
toSql() {
return "BOOLEAN";
}
_sanitize(value) {
if (value !== null && value !== void 0) {
if (Buffer.isBuffer(value) && value.length === 1) {
value = value[0];
}
if (typeof value === "string") {
return ["true", "t"].includes(value) ? true : ["false", "f"].includes(value) ? false : value;
}
if (typeof value === "number") {
return value === 1 ? true : value === 0 ? false : value;
}
}
return value;
}
}
BOOLEAN.parse = BOOLEAN.prototype._sanitize;
BaseTypes.BOOLEAN.types.postgres = ["bool"];
class DATE extends BaseTypes.DATE {
toSql() {
return "TIMESTAMP WITH TIME ZONE";
}
validate(value) {
if (value !== Infinity && value !== -Infinity) {
return super.validate(value);
}
return true;
}
_stringify(value, options) {
if (value === Infinity) {
return "Infinity";
}
if (value === -Infinity) {
return "-Infinity";
}
return super._stringify(value, options);
}
_sanitize(value, options) {
if ((!options || options && !options.raw) && !(value instanceof Date) && !!value && value !== Infinity && value !== -Infinity) {
if (typeof value === "string") {
const lower = value.toLowerCase();
if (lower === "infinity") {
return Infinity;
}
if (lower === "-infinity") {
return -Infinity;
}
}
return new Date(value);
}
return value;
}
}
BaseTypes.DATE.types.postgres = ["timestamptz"];
class TINYINT extends BaseTypes.TINYINT {
constructor(length) {
super(length);
removeUnsupportedIntegerOptions(this);
}
}
BaseTypes.TINYINT.types.postgres = ["int2"];
class SMALLINT extends BaseTypes.SMALLINT {
constructor(length) {
super(length);
removeUnsupportedIntegerOptions(this);
}
}
BaseTypes.SMALLINT.types.postgres = ["int2"];
class INTEGER extends BaseTypes.INTEGER {
constructor(length) {
super(length);
removeUnsupportedIntegerOptions(this);
}
}
INTEGER.parse = function parse(value) {
return parseInt(value, 10);
};
BaseTypes.INTEGER.types.postgres = ["int4"];
class BIGINT extends BaseTypes.BIGINT {
constructor(length) {
super(length);
removeUnsupportedIntegerOptions(this);
}
}
BaseTypes.BIGINT.types.postgres = ["int8"];
class REAL extends BaseTypes.REAL {
constructor(length) {
super(length);
removeUnsupportedIntegerOptions(this);
}
}
BaseTypes.REAL.types.postgres = ["float4"];
class DOUBLE extends BaseTypes.DOUBLE {
constructor(length) {
super(length);
removeUnsupportedIntegerOptions(this);
}
}
BaseTypes.DOUBLE.types.postgres = ["float8"];
class FLOAT extends BaseTypes.FLOAT {
constructor(length, decimals) {
super(length, decimals);
if (this._decimals) {
warn("PostgreSQL does not support FLOAT with decimals. Plain `FLOAT` will be used instead.");
this._length = void 0;
this.options.length = void 0;
this._decimals = void 0;
}
if (this._unsigned) {
warn("PostgreSQL does not support FLOAT unsigned. `UNSIGNED` was removed.");
this._unsigned = void 0;
}
if (this._zerofill) {
warn("PostgreSQL does not support FLOAT zerofill. `ZEROFILL` was removed.");
this._zerofill = void 0;
}
}
}
delete FLOAT.parse;
class BLOB extends BaseTypes.BLOB {
toSql() {
if (this._length) {
warn("PostgreSQL does not support BLOB (BYTEA) with options. Plain `BYTEA` will be used instead.");
this._length = void 0;
}
return "BYTEA";
}
_hexify(hex) {
return `E'\\\\x${hex}'`;
}
}
BaseTypes.BLOB.types.postgres = ["bytea"];
class GEOMETRY extends BaseTypes.GEOMETRY {
toSql() {
let result = this.key;
if (this.type) {
result += `(${this.type}`;
if (this.srid) {
result += `,${this.srid}`;
}
result += ")";
}
return result;
}
static parse(value) {
const b = Buffer.from(value, "hex");
return wkx.Geometry.parse(b).toGeoJSON({ shortCrs: true });
}
_stringify(value, options) {
return `ST_GeomFromGeoJSON(${options.escape(JSON.stringify(value))})`;
}
_bindParam(value, options) {
return `ST_GeomFromGeoJSON(${options.bindParam(value)})`;
}
}
BaseTypes.GEOMETRY.types.postgres = ["geometry"];
class GEOGRAPHY extends BaseTypes.GEOGRAPHY {
toSql() {
let result = "GEOGRAPHY";
if (this.type) {
result += `(${this.type}`;
if (this.srid) {
result += `,${this.srid}`;
}
result += ")";
}
return result;
}
static parse(value) {
const b = Buffer.from(value, "hex");
return wkx.Geometry.parse(b).toGeoJSON({ shortCrs: true });
}
_stringify(value, options) {
return `ST_GeomFromGeoJSON(${options.escape(JSON.stringify(value))})`;
}
bindParam(value, options) {
return `ST_GeomFromGeoJSON(${options.bindParam(value)})`;
}
}
BaseTypes.GEOGRAPHY.types.postgres = ["geography"];
let hstore;
class HSTORE extends BaseTypes.HSTORE {
constructor() {
super();
if (!hstore) {
hstore = require("./hstore");
}
}
_value(value) {
if (!hstore) {
hstore = require("./hstore");
}
return hstore.stringify(value);
}
_stringify(value) {
return `'${this._value(value)}'`;
}
_bindParam(value, options) {
return options.bindParam(this._value(value));
}
static parse(value) {
if (!hstore) {
hstore = require("./hstore");
}
return hstore.parse(value);
}
}
HSTORE.prototype.escape = false;
BaseTypes.HSTORE.types.postgres = ["hstore"];
class RANGE extends BaseTypes.RANGE {
_value(values, options) {
if (!Array.isArray(values)) {
return this.options.subtype.stringify(values, options);
}
const valueInclusivity = [true, false];
const valuesStringified = values.map((value, index) => {
if (_.isObject(value) && Object.prototype.hasOwnProperty.call(value, "value")) {
if (Object.prototype.hasOwnProperty.call(value, "inclusive")) {
valueInclusivity[index] = value.inclusive;
}
value = value.value;
}
if (value === null || value === -Infinity || value === Infinity) {
return value;
}
if (this.options.subtype.stringify) {
return this.options.subtype.stringify(value, options);
}
return options.escape(value);
});
valuesStringified.inclusive = valueInclusivity;
return range.stringify(valuesStringified);
}
_stringify(values, options) {
const value = this._value(values, options);
if (!Array.isArray(values)) {
return `'${value}'::${this.toCastType()}`;
}
return `'${value}'`;
}
_bindParam(values, options) {
const value = this._value(values, options);
if (!Array.isArray(values)) {
return `${options.bindParam(value)}::${this.toCastType()}`;
}
return options.bindParam(value);
}
toSql() {
return BaseTypes.RANGE.types.postgres.subtypes[this._subtype.toLowerCase()];
}
toCastType() {
return BaseTypes.RANGE.types.postgres.castTypes[this._subtype.toLowerCase()];
}
static parse(value, options = { parser: (val) => val }) {
return range.parse(value, options.parser);
}
}
const range = require("./range");
RANGE.prototype.escape = false;
BaseTypes.RANGE.types.postgres = {
subtypes: {
integer: "int4range",
decimal: "numrange",
date: "tstzrange",
dateonly: "daterange",
bigint: "int8range"
},
castTypes: {
integer: "int4",
decimal: "numeric",
date: "timestamptz",
dateonly: "date",
bigint: "int8"
}
};
BaseTypes.ARRAY.prototype.escape = false;
BaseTypes.ARRAY.prototype._value = function _value(values, options) {
return values.map((value) => {
if (options && options.bindParam && this.type && this.type._value) {
return this.type._value(value, options);
}
if (this.type && this.type.stringify) {
value = this.type.stringify(value, options);
if (this.type.escape === false) {
return value;
}
}
return options.escape(value);
}, this);
};
BaseTypes.ARRAY.prototype._stringify = function _stringify(values, options) {
let str = `ARRAY[${this._value(values, options).join(",")}]`;
if (this.type) {
const Utils = require("../../utils");
let castKey = this.toSql();
if (this.type instanceof BaseTypes.ENUM) {
const table = options.field.Model.getTableName();
const useSchema = table.schema !== void 0;
const schemaWithDelimiter = useSchema ? `${Utils.addTicks(table.schema, '"')}${table.delimiter}` : "";
castKey = `${Utils.addTicks(Utils.generateEnumName(useSchema ? table.tableName : table, options.field.field), '"')}[]`;
str += `::${schemaWithDelimiter}${castKey}`;
} else {
str += `::${castKey}`;
}
}
return str;
};
BaseTypes.ARRAY.prototype._bindParam = function _bindParam(values, options) {
return options.bindParam(this._value(values, options));
};
class ENUM extends BaseTypes.ENUM {
static parse(value) {
return value;
}
}
BaseTypes.ENUM.types.postgres = [null];
return {
DECIMAL,
BLOB,
STRING,
CHAR,
TEXT,
CITEXT,
TINYINT,
SMALLINT,
INTEGER,
BIGINT,
BOOLEAN,
DATE,
DATEONLY,
REAL,
"DOUBLE PRECISION": DOUBLE,
FLOAT,
GEOMETRY,
GEOGRAPHY,
HSTORE,
RANGE,
ENUM
};
};
//# sourceMappingURL=data-types.js.map

File diff suppressed because one or more lines are too long

15
node_modules/sequelize/lib/dialects/postgres/hstore.js generated vendored Normal file
View File

@@ -0,0 +1,15 @@
"use strict";
const hstore = require("pg-hstore")({ sanitize: true });
function stringify(data) {
if (data === null)
return null;
return hstore.stringify(data);
}
exports.stringify = stringify;
function parse(value) {
if (value === null)
return null;
return hstore.parse(value);
}
exports.parse = parse;
//# sourceMappingURL=hstore.js.map

View File

@@ -0,0 +1,7 @@
{
"version": 3,
"sources": ["../../../src/dialects/postgres/hstore.js"],
"sourcesContent": ["'use strict';\n\nconst hstore = require('pg-hstore')({ sanitize: true });\n\nfunction stringify(data) {\n if (data === null) return null;\n return hstore.stringify(data);\n}\nexports.stringify = stringify;\n\nfunction parse(value) {\n if (value === null) return null;\n return hstore.parse(value);\n}\nexports.parse = parse;\n"],
"mappings": ";AAEA,MAAM,SAAS,QAAQ,aAAa,EAAE,UAAU;AAEhD,mBAAmB,MAAM;AACvB,MAAI,SAAS;AAAM,WAAO;AAC1B,SAAO,OAAO,UAAU;AAAA;AAE1B,QAAQ,YAAY;AAEpB,eAAe,OAAO;AACpB,MAAI,UAAU;AAAM,WAAO;AAC3B,SAAO,OAAO,MAAM;AAAA;AAEtB,QAAQ,QAAQ;",
"names": []
}

77
node_modules/sequelize/lib/dialects/postgres/index.js generated vendored Normal file
View File

@@ -0,0 +1,77 @@
"use strict";
const _ = require("lodash");
const AbstractDialect = require("../abstract");
const ConnectionManager = require("./connection-manager");
const Query = require("./query");
const QueryGenerator = require("./query-generator");
const DataTypes = require("../../data-types").postgres;
const { PostgresQueryInterface } = require("./query-interface");
class PostgresDialect extends AbstractDialect {
constructor(sequelize) {
super();
this.sequelize = sequelize;
this.connectionManager = new ConnectionManager(this, sequelize);
this.queryGenerator = new QueryGenerator({
_dialect: this,
sequelize
});
this.queryInterface = new PostgresQueryInterface(sequelize, this.queryGenerator);
}
canBackslashEscape() {
return !this.sequelize.options.standardConformingStrings;
}
}
PostgresDialect.prototype.supports = _.merge(_.cloneDeep(AbstractDialect.prototype.supports), {
"DEFAULT VALUES": true,
EXCEPTION: true,
"ON DUPLICATE KEY": false,
"ORDER NULLS": true,
returnValues: {
returning: true
},
bulkDefault: true,
schemas: true,
lock: true,
lockOf: true,
lockKey: true,
lockOuterJoinFailure: true,
skipLocked: true,
forShare: "FOR SHARE",
index: {
concurrently: true,
using: 2,
where: true,
functionBased: true,
operator: true
},
inserts: {
onConflictDoNothing: " ON CONFLICT DO NOTHING",
updateOnDuplicate: " ON CONFLICT DO UPDATE SET",
conflictFields: true,
onConflictWhere: true
},
NUMERIC: true,
ARRAY: true,
RANGE: true,
GEOMETRY: true,
REGEXP: true,
GEOGRAPHY: true,
JSON: true,
JSONB: true,
HSTORE: true,
TSVECTOR: true,
deferrableConstraints: true,
searchPath: true,
escapeStringConstants: true
});
PostgresDialect.prototype.defaultVersion = "9.5.0";
PostgresDialect.prototype.Query = Query;
PostgresDialect.prototype.DataTypes = DataTypes;
PostgresDialect.prototype.name = "postgres";
PostgresDialect.prototype.TICK_CHAR = '"';
PostgresDialect.prototype.TICK_CHAR_LEFT = PostgresDialect.prototype.TICK_CHAR;
PostgresDialect.prototype.TICK_CHAR_RIGHT = PostgresDialect.prototype.TICK_CHAR;
module.exports = PostgresDialect;
module.exports.default = PostgresDialect;
module.exports.PostgresDialect = PostgresDialect;
//# sourceMappingURL=index.js.map

View File

@@ -0,0 +1,7 @@
{
"version": 3,
"sources": ["../../../src/dialects/postgres/index.js"],
"sourcesContent": ["'use strict';\n\nconst _ = require('lodash');\nconst AbstractDialect = require('../abstract');\nconst ConnectionManager = require('./connection-manager');\nconst Query = require('./query');\nconst QueryGenerator = require('./query-generator');\nconst DataTypes = require('../../data-types').postgres;\nconst { PostgresQueryInterface } = require('./query-interface');\n\nclass PostgresDialect extends AbstractDialect {\n constructor(sequelize) {\n super();\n this.sequelize = sequelize;\n this.connectionManager = new ConnectionManager(this, sequelize);\n this.queryGenerator = new QueryGenerator({\n _dialect: this,\n sequelize\n });\n this.queryInterface = new PostgresQueryInterface(\n sequelize,\n this.queryGenerator\n );\n }\n\n canBackslashEscape() {\n // postgres can use \\ to escape if one of these is true:\n // - standard_conforming_strings is off\n // - the string is prefixed with E (out of scope for this method)\n\n return !this.sequelize.options.standardConformingStrings;\n }\n}\n\nPostgresDialect.prototype.supports = _.merge(\n _.cloneDeep(AbstractDialect.prototype.supports),\n {\n 'DEFAULT VALUES': true,\n EXCEPTION: true,\n 'ON DUPLICATE KEY': false,\n 'ORDER NULLS': true,\n returnValues: {\n returning: true\n },\n bulkDefault: true,\n schemas: true,\n lock: true,\n lockOf: true,\n lockKey: true,\n lockOuterJoinFailure: true,\n skipLocked: true,\n forShare: 'FOR SHARE',\n index: {\n concurrently: true,\n using: 2,\n where: true,\n functionBased: true,\n operator: true\n },\n inserts: {\n onConflictDoNothing: ' ON CONFLICT DO NOTHING',\n updateOnDuplicate: ' ON CONFLICT DO UPDATE SET',\n conflictFields: true,\n onConflictWhere: true\n },\n NUMERIC: true,\n ARRAY: true,\n RANGE: true,\n GEOMETRY: true,\n REGEXP: true,\n GEOGRAPHY: true,\n JSON: true,\n JSONB: true,\n HSTORE: true,\n TSVECTOR: true,\n deferrableConstraints: true,\n searchPath: true,\n escapeStringConstants: true\n }\n);\n\nPostgresDialect.prototype.defaultVersion = '9.5.0'; // minimum supported version\nPostgresDialect.prototype.Query = Query;\nPostgresDialect.prototype.DataTypes = DataTypes;\nPostgresDialect.prototype.name = 'postgres';\nPostgresDialect.prototype.TICK_CHAR = '\"';\nPostgresDialect.prototype.TICK_CHAR_LEFT = PostgresDialect.prototype.TICK_CHAR;\nPostgresDialect.prototype.TICK_CHAR_RIGHT = PostgresDialect.prototype.TICK_CHAR;\n\nmodule.exports = PostgresDialect;\nmodule.exports.default = PostgresDialect;\nmodule.exports.PostgresDialect = PostgresDialect;\n"],
"mappings": ";AAEA,MAAM,IAAI,QAAQ;AAClB,MAAM,kBAAkB,QAAQ;AAChC,MAAM,oBAAoB,QAAQ;AAClC,MAAM,QAAQ,QAAQ;AACtB,MAAM,iBAAiB,QAAQ;AAC/B,MAAM,YAAY,QAAQ,oBAAoB;AAC9C,MAAM,EAAE,2BAA2B,QAAQ;AAE3C,8BAA8B,gBAAgB;AAAA,EAC5C,YAAY,WAAW;AACrB;AACA,SAAK,YAAY;AACjB,SAAK,oBAAoB,IAAI,kBAAkB,MAAM;AACrD,SAAK,iBAAiB,IAAI,eAAe;AAAA,MACvC,UAAU;AAAA,MACV;AAAA;AAEF,SAAK,iBAAiB,IAAI,uBACxB,WACA,KAAK;AAAA;AAAA,EAIT,qBAAqB;AAKnB,WAAO,CAAC,KAAK,UAAU,QAAQ;AAAA;AAAA;AAInC,gBAAgB,UAAU,WAAW,EAAE,MACrC,EAAE,UAAU,gBAAgB,UAAU,WACtC;AAAA,EACE,kBAAkB;AAAA,EAClB,WAAW;AAAA,EACX,oBAAoB;AAAA,EACpB,eAAe;AAAA,EACf,cAAc;AAAA,IACZ,WAAW;AAAA;AAAA,EAEb,aAAa;AAAA,EACb,SAAS;AAAA,EACT,MAAM;AAAA,EACN,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,sBAAsB;AAAA,EACtB,YAAY;AAAA,EACZ,UAAU;AAAA,EACV,OAAO;AAAA,IACL,cAAc;AAAA,IACd,OAAO;AAAA,IACP,OAAO;AAAA,IACP,eAAe;AAAA,IACf,UAAU;AAAA;AAAA,EAEZ,SAAS;AAAA,IACP,qBAAqB;AAAA,IACrB,mBAAmB;AAAA,IACnB,gBAAgB;AAAA,IAChB,iBAAiB;AAAA;AAAA,EAEnB,SAAS;AAAA,EACT,OAAO;AAAA,EACP,OAAO;AAAA,EACP,UAAU;AAAA,EACV,QAAQ;AAAA,EACR,WAAW;AAAA,EACX,MAAM;AAAA,EACN,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,UAAU;AAAA,EACV,uBAAuB;AAAA,EACvB,YAAY;AAAA,EACZ,uBAAuB;AAAA;AAI3B,gBAAgB,UAAU,iBAAiB;AAC3C,gBAAgB,UAAU,QAAQ;AAClC,gBAAgB,UAAU,YAAY;AACtC,gBAAgB,UAAU,OAAO;AACjC,gBAAgB,UAAU,YAAY;AACtC,gBAAgB,UAAU,iBAAiB,gBAAgB,UAAU;AACrE,gBAAgB,UAAU,kBAAkB,gBAAgB,UAAU;AAEtE,OAAO,UAAU;AACjB,OAAO,QAAQ,UAAU;AACzB,OAAO,QAAQ,kBAAkB;",
"names": []
}

View File

@@ -0,0 +1,671 @@
"use strict";
var __defProp = Object.defineProperty;
var __getOwnPropSymbols = Object.getOwnPropertySymbols;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __propIsEnum = Object.prototype.propertyIsEnumerable;
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
var __spreadValues = (a, b) => {
for (var prop in b || (b = {}))
if (__hasOwnProp.call(b, prop))
__defNormalProp(a, prop, b[prop]);
if (__getOwnPropSymbols)
for (var prop of __getOwnPropSymbols(b)) {
if (__propIsEnum.call(b, prop))
__defNormalProp(a, prop, b[prop]);
}
return a;
};
const Utils = require("../../utils");
const util = require("util");
const DataTypes = require("../../data-types");
const AbstractQueryGenerator = require("../abstract/query-generator");
const semver = require("semver");
const _ = require("lodash");
const POSTGRES_RESERVED_WORDS = "all,analyse,analyze,and,any,array,as,asc,asymmetric,authorization,binary,both,case,cast,check,collate,collation,column,concurrently,constraint,create,cross,current_catalog,current_date,current_role,current_schema,current_time,current_timestamp,current_user,default,deferrable,desc,distinct,do,else,end,except,false,fetch,for,foreign,freeze,from,full,grant,group,having,ilike,in,initially,inner,intersect,into,is,isnull,join,lateral,leading,left,like,limit,localtime,localtimestamp,natural,not,notnull,null,offset,on,only,or,order,outer,overlaps,placing,primary,references,returning,right,select,session_user,similar,some,symmetric,table,tablesample,then,to,trailing,true,union,unique,user,using,variadic,verbose,when,where,window,with".split(",");
class PostgresQueryGenerator extends AbstractQueryGenerator {
setSearchPath(searchPath) {
return `SET search_path to ${searchPath};`;
}
createDatabaseQuery(databaseName, options) {
options = __spreadValues({
encoding: null,
collate: null
}, options);
const values = {
database: this.quoteTable(databaseName),
encoding: options.encoding ? ` ENCODING = ${this.escape(options.encoding)}` : "",
collation: options.collate ? ` LC_COLLATE = ${this.escape(options.collate)}` : "",
ctype: options.ctype ? ` LC_CTYPE = ${this.escape(options.ctype)}` : "",
template: options.template ? ` TEMPLATE = ${this.escape(options.template)}` : ""
};
return `CREATE DATABASE ${values.database}${values.encoding}${values.collation}${values.ctype}${values.template};`;
}
dropDatabaseQuery(databaseName) {
return `DROP DATABASE IF EXISTS ${this.quoteTable(databaseName)};`;
}
createSchema(schema) {
const databaseVersion = _.get(this, "sequelize.options.databaseVersion", 0);
if (databaseVersion && semver.gte(databaseVersion, "9.2.0")) {
return `CREATE SCHEMA IF NOT EXISTS ${this.quoteIdentifier(schema)};`;
}
return `CREATE SCHEMA ${this.quoteIdentifier(schema)};`;
}
dropSchema(schema) {
return `DROP SCHEMA IF EXISTS ${this.quoteIdentifier(schema)} CASCADE;`;
}
showSchemasQuery() {
return "SELECT schema_name FROM information_schema.schemata WHERE schema_name <> 'information_schema' AND schema_name != 'public' AND schema_name !~ E'^pg_';";
}
versionQuery() {
return "SHOW SERVER_VERSION";
}
createTableQuery(tableName, attributes, options) {
options = __spreadValues({}, options);
const databaseVersion = _.get(this, "sequelize.options.databaseVersion", 0);
const attrStr = [];
let comments = "";
let columnComments = "";
const quotedTable = this.quoteTable(tableName);
if (options.comment && typeof options.comment === "string") {
comments += `; COMMENT ON TABLE ${quotedTable} IS ${this.escape(options.comment)}`;
}
for (const attr in attributes) {
const quotedAttr = this.quoteIdentifier(attr);
const i = attributes[attr].indexOf("COMMENT ");
if (i !== -1) {
const escapedCommentText = this.escape(attributes[attr].substring(i + 8));
columnComments += `; COMMENT ON COLUMN ${quotedTable}.${quotedAttr} IS ${escapedCommentText}`;
attributes[attr] = attributes[attr].substring(0, i);
}
const dataType = this.dataTypeMapping(tableName, attr, attributes[attr]);
attrStr.push(`${quotedAttr} ${dataType}`);
}
let attributesClause = attrStr.join(", ");
if (options.uniqueKeys) {
_.each(options.uniqueKeys, (columns) => {
if (columns.customIndex) {
attributesClause += `, UNIQUE (${columns.fields.map((field) => this.quoteIdentifier(field)).join(", ")})`;
}
});
}
const pks = _.reduce(attributes, (acc, attribute, key) => {
if (attribute.includes("PRIMARY KEY")) {
acc.push(this.quoteIdentifier(key));
}
return acc;
}, []).join(",");
if (pks.length > 0) {
attributesClause += `, PRIMARY KEY (${pks})`;
}
return `CREATE TABLE ${databaseVersion === 0 || semver.gte(databaseVersion, "9.1.0") ? "IF NOT EXISTS " : ""}${quotedTable} (${attributesClause})${comments}${columnComments};`;
}
dropTableQuery(tableName, options) {
options = options || {};
return `DROP TABLE IF EXISTS ${this.quoteTable(tableName)}${options.cascade ? " CASCADE" : ""};`;
}
showTablesQuery() {
const schema = this.options.schema || "public";
return `SELECT table_name FROM information_schema.tables WHERE table_schema = ${this.escape(schema)} AND table_type LIKE '%TABLE' AND table_name != 'spatial_ref_sys';`;
}
tableExistsQuery(tableName) {
const table = tableName.tableName || tableName;
const schema = tableName.schema || "public";
return `SELECT table_name FROM information_schema.tables WHERE table_schema = ${this.escape(schema)} AND table_name = ${this.escape(table)}`;
}
describeTableQuery(tableName, schema) {
schema = schema || this.options.schema || "public";
return `SELECT pk.constraint_type as "Constraint",c.column_name as "Field", c.column_default as "Default",c.is_nullable as "Null", (CASE WHEN c.udt_name = 'hstore' THEN c.udt_name ELSE c.data_type END) || (CASE WHEN c.character_maximum_length IS NOT NULL THEN '(' || c.character_maximum_length || ')' ELSE '' END) as "Type", (SELECT array_agg(e.enumlabel) FROM pg_catalog.pg_type t JOIN pg_catalog.pg_enum e ON t.oid=e.enumtypid WHERE t.typname=c.udt_name) AS "special", (SELECT pgd.description FROM pg_catalog.pg_statio_all_tables AS st INNER JOIN pg_catalog.pg_description pgd on (pgd.objoid=st.relid) WHERE c.ordinal_position=pgd.objsubid AND c.table_name=st.relname) AS "Comment" FROM information_schema.columns c LEFT JOIN (SELECT tc.table_schema, tc.table_name, cu.column_name, tc.constraint_type FROM information_schema.TABLE_CONSTRAINTS tc JOIN information_schema.KEY_COLUMN_USAGE cu ON tc.table_schema=cu.table_schema and tc.table_name=cu.table_name and tc.constraint_name=cu.constraint_name and tc.constraint_type='PRIMARY KEY') pk ON pk.table_schema=c.table_schema AND pk.table_name=c.table_name AND pk.column_name=c.column_name WHERE c.table_name = ${this.escape(tableName)} AND c.table_schema = ${this.escape(schema)}`;
}
_checkValidJsonStatement(stmt) {
if (typeof stmt !== "string") {
return false;
}
const jsonFunctionRegex = /^\s*((?:[a-z]+_){0,2}jsonb?(?:_[a-z]+){0,2})\([^)]*\)/i;
const jsonOperatorRegex = /^\s*(->>?|#>>?|@>|<@|\?[|&]?|\|{2}|#-)/i;
const tokenCaptureRegex = /^\s*((?:([`"'])(?:(?!\2).|\2{2})*\2)|[\w\d\s]+|[().,;+-])/i;
let currentIndex = 0;
let openingBrackets = 0;
let closingBrackets = 0;
let hasJsonFunction = false;
let hasInvalidToken = false;
while (currentIndex < stmt.length) {
const string = stmt.substr(currentIndex);
const functionMatches = jsonFunctionRegex.exec(string);
if (functionMatches) {
currentIndex += functionMatches[0].indexOf("(");
hasJsonFunction = true;
continue;
}
const operatorMatches = jsonOperatorRegex.exec(string);
if (operatorMatches) {
currentIndex += operatorMatches[0].length;
hasJsonFunction = true;
continue;
}
const tokenMatches = tokenCaptureRegex.exec(string);
if (tokenMatches) {
const capturedToken = tokenMatches[1];
if (capturedToken === "(") {
openingBrackets++;
} else if (capturedToken === ")") {
closingBrackets++;
} else if (capturedToken === ";") {
hasInvalidToken = true;
break;
}
currentIndex += tokenMatches[0].length;
continue;
}
break;
}
hasInvalidToken |= openingBrackets !== closingBrackets;
if (hasJsonFunction && hasInvalidToken) {
throw new Error(`Invalid json statement: ${stmt}`);
}
return hasJsonFunction;
}
handleSequelizeMethod(smth, tableName, factory, options, prepend) {
if (smth instanceof Utils.Json) {
if (smth.conditions) {
const conditions = this.parseConditionObject(smth.conditions).map((condition) => `${this.jsonPathExtractionQuery(condition.path[0], _.tail(condition.path))} = '${condition.value}'`);
return conditions.join(" AND ");
}
if (smth.path) {
let str;
if (this._checkValidJsonStatement(smth.path)) {
str = smth.path;
} else {
const paths = _.toPath(smth.path);
const column = paths.shift();
str = this.jsonPathExtractionQuery(column, paths);
}
if (smth.value) {
str += util.format(" = %s", this.escape(smth.value));
}
return str;
}
}
return super.handleSequelizeMethod.call(this, smth, tableName, factory, options, prepend);
}
addColumnQuery(table, key, attribute) {
const dbDataType = this.attributeToSQL(attribute, { context: "addColumn", table, key });
const dataType = attribute.type || attribute;
const definition = this.dataTypeMapping(table, key, dbDataType);
const quotedKey = this.quoteIdentifier(key);
const quotedTable = this.quoteTable(this.extractTableDetails(table));
let query = `ALTER TABLE ${quotedTable} ADD COLUMN ${quotedKey} ${definition};`;
if (dataType instanceof DataTypes.ENUM) {
query = this.pgEnum(table, key, dataType) + query;
} else if (dataType.type && dataType.type instanceof DataTypes.ENUM) {
query = this.pgEnum(table, key, dataType.type) + query;
}
return query;
}
removeColumnQuery(tableName, attributeName) {
const quotedTableName = this.quoteTable(this.extractTableDetails(tableName));
const quotedAttributeName = this.quoteIdentifier(attributeName);
return `ALTER TABLE ${quotedTableName} DROP COLUMN ${quotedAttributeName};`;
}
changeColumnQuery(tableName, attributes) {
const query = (subQuery) => `ALTER TABLE ${this.quoteTable(tableName)} ALTER COLUMN ${subQuery};`;
const sql = [];
for (const attributeName in attributes) {
let definition = this.dataTypeMapping(tableName, attributeName, attributes[attributeName]);
let attrSql = "";
if (definition.includes("NOT NULL")) {
attrSql += query(`${this.quoteIdentifier(attributeName)} SET NOT NULL`);
definition = definition.replace("NOT NULL", "").trim();
} else if (!definition.includes("REFERENCES")) {
attrSql += query(`${this.quoteIdentifier(attributeName)} DROP NOT NULL`);
}
if (definition.includes("DEFAULT")) {
attrSql += query(`${this.quoteIdentifier(attributeName)} SET DEFAULT ${definition.match(/DEFAULT ([^;]+)/)[1]}`);
definition = definition.replace(/(DEFAULT[^;]+)/, "").trim();
} else if (!definition.includes("REFERENCES")) {
attrSql += query(`${this.quoteIdentifier(attributeName)} DROP DEFAULT`);
}
if (attributes[attributeName].startsWith("ENUM(")) {
attrSql += this.pgEnum(tableName, attributeName, attributes[attributeName]);
definition = definition.replace(/^ENUM\(.+\)/, this.pgEnumName(tableName, attributeName, { schema: false }));
definition += ` USING (${this.quoteIdentifier(attributeName)}::${this.pgEnumName(tableName, attributeName)})`;
}
if (definition.match(/UNIQUE;*$/)) {
definition = definition.replace(/UNIQUE;*$/, "");
attrSql += query(`ADD UNIQUE (${this.quoteIdentifier(attributeName)})`).replace("ALTER COLUMN", "");
}
if (definition.includes("REFERENCES")) {
definition = definition.replace(/.+?(?=REFERENCES)/, "");
attrSql += query(`ADD FOREIGN KEY (${this.quoteIdentifier(attributeName)}) ${definition}`).replace("ALTER COLUMN", "");
} else {
attrSql += query(`${this.quoteIdentifier(attributeName)} TYPE ${definition}`);
}
sql.push(attrSql);
}
return sql.join("");
}
renameColumnQuery(tableName, attrBefore, attributes) {
const attrString = [];
for (const attributeName in attributes) {
attrString.push(`${this.quoteIdentifier(attrBefore)} TO ${this.quoteIdentifier(attributeName)}`);
}
return `ALTER TABLE ${this.quoteTable(tableName)} RENAME COLUMN ${attrString.join(", ")};`;
}
fn(fnName, tableName, parameters, body, returns, language) {
fnName = fnName || "testfunc";
language = language || "plpgsql";
returns = returns ? `RETURNS ${returns}` : "";
parameters = parameters || "";
return `CREATE OR REPLACE FUNCTION pg_temp.${fnName}(${parameters}) ${returns} AS $func$ BEGIN ${body} END; $func$ LANGUAGE ${language}; SELECT * FROM pg_temp.${fnName}();`;
}
truncateTableQuery(tableName, options = {}) {
return [
`TRUNCATE ${this.quoteTable(tableName)}`,
options.restartIdentity ? " RESTART IDENTITY" : "",
options.cascade ? " CASCADE" : ""
].join("");
}
deleteQuery(tableName, where, options = {}, model) {
const table = this.quoteTable(tableName);
let whereClause = this.getWhereConditions(where, null, model, options);
const limit = options.limit ? ` LIMIT ${this.escape(options.limit)}` : "";
let primaryKeys = "";
let primaryKeysSelection = "";
if (whereClause) {
whereClause = ` WHERE ${whereClause}`;
}
if (options.limit) {
if (!model) {
throw new Error("Cannot LIMIT delete without a model.");
}
const pks = Object.values(model.primaryKeys).map((pk) => this.quoteIdentifier(pk.field)).join(",");
primaryKeys = model.primaryKeyAttributes.length > 1 ? `(${pks})` : pks;
primaryKeysSelection = pks;
return `DELETE FROM ${table} WHERE ${primaryKeys} IN (SELECT ${primaryKeysSelection} FROM ${table}${whereClause}${limit})`;
}
return `DELETE FROM ${table}${whereClause}`;
}
showIndexesQuery(tableName) {
let schemaJoin = "";
let schemaWhere = "";
if (typeof tableName !== "string") {
schemaJoin = ", pg_namespace s";
schemaWhere = ` AND s.oid = t.relnamespace AND s.nspname = '${tableName.schema}'`;
tableName = tableName.tableName;
}
return `SELECT i.relname AS name, ix.indisprimary AS primary, ix.indisunique AS unique, ix.indkey AS indkey, array_agg(a.attnum) as column_indexes, array_agg(a.attname) AS column_names, pg_get_indexdef(ix.indexrelid) AS definition FROM pg_class t, pg_class i, pg_index ix, pg_attribute a${schemaJoin} WHERE t.oid = ix.indrelid AND i.oid = ix.indexrelid AND a.attrelid = t.oid AND t.relkind = 'r' and t.relname = '${tableName}'${schemaWhere} GROUP BY i.relname, ix.indexrelid, ix.indisprimary, ix.indisunique, ix.indkey ORDER BY i.relname;`;
}
showConstraintsQuery(tableName) {
return [
'SELECT constraint_catalog AS "constraintCatalog",',
'constraint_schema AS "constraintSchema",',
'constraint_name AS "constraintName",',
'table_catalog AS "tableCatalog",',
'table_schema AS "tableSchema",',
'table_name AS "tableName",',
'constraint_type AS "constraintType",',
'is_deferrable AS "isDeferrable",',
'initially_deferred AS "initiallyDeferred"',
"from INFORMATION_SCHEMA.table_constraints",
`WHERE table_name='${tableName}';`
].join(" ");
}
removeIndexQuery(tableName, indexNameOrAttributes, options) {
let indexName = indexNameOrAttributes;
if (typeof indexName !== "string") {
indexName = Utils.underscore(`${tableName}_${indexNameOrAttributes.join("_")}`);
}
return [
"DROP INDEX",
options && options.concurrently && "CONCURRENTLY",
`IF EXISTS ${this.quoteIdentifiers(indexName)}`
].filter(Boolean).join(" ");
}
addLimitAndOffset(options) {
let fragment = "";
if (options.limit != null) {
fragment += " LIMIT " + this.escape(options.limit);
}
if (options.offset != null) {
fragment += " OFFSET " + this.escape(options.offset);
}
return fragment;
}
attributeToSQL(attribute, options) {
if (!_.isPlainObject(attribute)) {
attribute = {
type: attribute
};
}
let type;
if (attribute.type instanceof DataTypes.ENUM || attribute.type instanceof DataTypes.ARRAY && attribute.type.type instanceof DataTypes.ENUM) {
const enumType = attribute.type.type || attribute.type;
let values = attribute.values;
if (enumType.values && !attribute.values) {
values = enumType.values;
}
if (Array.isArray(values) && values.length > 0) {
type = `ENUM(${values.map((value) => this.escape(value)).join(", ")})`;
if (attribute.type instanceof DataTypes.ARRAY) {
type += "[]";
}
} else {
throw new Error("Values for ENUM haven't been defined.");
}
}
if (!type) {
type = attribute.type;
}
let sql = type.toString();
if (Object.prototype.hasOwnProperty.call(attribute, "allowNull") && !attribute.allowNull) {
sql += " NOT NULL";
}
if (attribute.autoIncrement) {
if (attribute.autoIncrementIdentity) {
sql += " GENERATED BY DEFAULT AS IDENTITY";
} else {
sql += " SERIAL";
}
}
if (Utils.defaultValueSchemable(attribute.defaultValue)) {
sql += ` DEFAULT ${this.escape(attribute.defaultValue, attribute)}`;
}
if (attribute.unique === true) {
sql += " UNIQUE";
}
if (attribute.primaryKey) {
sql += " PRIMARY KEY";
}
if (attribute.references) {
let referencesTable = this.quoteTable(attribute.references.model);
let schema;
if (options.schema) {
schema = options.schema;
} else if ((!attribute.references.model || typeof attribute.references.model == "string") && options.table && options.table.schema) {
schema = options.table.schema;
}
if (schema) {
referencesTable = this.quoteTable(this.addSchema({
tableName: referencesTable,
_schema: schema
}));
}
let referencesKey;
if (!options.withoutForeignKeyConstraints) {
if (attribute.references.key) {
referencesKey = this.quoteIdentifiers(attribute.references.key);
} else {
referencesKey = this.quoteIdentifier("id");
}
sql += ` REFERENCES ${referencesTable} (${referencesKey})`;
if (attribute.onDelete) {
sql += ` ON DELETE ${attribute.onDelete.toUpperCase()}`;
}
if (attribute.onUpdate) {
sql += ` ON UPDATE ${attribute.onUpdate.toUpperCase()}`;
}
if (attribute.references.deferrable) {
sql += ` ${attribute.references.deferrable.toString(this)}`;
}
}
}
if (attribute.comment && typeof attribute.comment === "string") {
if (options && ["addColumn", "changeColumn"].includes(options.context)) {
const quotedAttr = this.quoteIdentifier(options.key);
const escapedCommentText = this.escape(attribute.comment);
sql += `; COMMENT ON COLUMN ${this.quoteTable(options.table)}.${quotedAttr} IS ${escapedCommentText}`;
} else {
sql += ` COMMENT ${attribute.comment}`;
}
}
return sql;
}
deferConstraintsQuery(options) {
return options.deferrable.toString(this);
}
setConstraintQuery(columns, type) {
let columnFragment = "ALL";
if (columns) {
columnFragment = columns.map((column) => this.quoteIdentifier(column)).join(", ");
}
return `SET CONSTRAINTS ${columnFragment} ${type}`;
}
setDeferredQuery(columns) {
return this.setConstraintQuery(columns, "DEFERRED");
}
setImmediateQuery(columns) {
return this.setConstraintQuery(columns, "IMMEDIATE");
}
attributesToSQL(attributes, options) {
const result = {};
for (const key in attributes) {
const attribute = attributes[key];
result[attribute.field || key] = this.attributeToSQL(attribute, __spreadValues({ key }, options));
}
return result;
}
createTrigger(tableName, triggerName, eventType, fireOnSpec, functionName, functionParams, optionsArray) {
const decodedEventType = this.decodeTriggerEventType(eventType);
const eventSpec = this.expandTriggerEventSpec(fireOnSpec);
const expandedOptions = this.expandOptions(optionsArray);
const paramList = this._expandFunctionParamList(functionParams);
return `CREATE ${this.triggerEventTypeIsConstraint(eventType)}TRIGGER ${this.quoteIdentifier(triggerName)} ${decodedEventType} ${eventSpec} ON ${this.quoteTable(tableName)}${expandedOptions ? ` ${expandedOptions}` : ""} EXECUTE PROCEDURE ${functionName}(${paramList});`;
}
dropTrigger(tableName, triggerName) {
return `DROP TRIGGER ${this.quoteIdentifier(triggerName)} ON ${this.quoteTable(tableName)} RESTRICT;`;
}
renameTrigger(tableName, oldTriggerName, newTriggerName) {
return `ALTER TRIGGER ${this.quoteIdentifier(oldTriggerName)} ON ${this.quoteTable(tableName)} RENAME TO ${this.quoteIdentifier(newTriggerName)};`;
}
createFunction(functionName, params, returnType, language, body, optionsArray, options) {
if (!functionName || !returnType || !language || !body)
throw new Error("createFunction missing some parameters. Did you pass functionName, returnType, language and body?");
const paramList = this._expandFunctionParamList(params);
const variableList = options && options.variables ? this._expandFunctionVariableList(options.variables) : "";
const expandedOptionsArray = this.expandOptions(optionsArray);
const statement = options && options.force ? "CREATE OR REPLACE FUNCTION" : "CREATE FUNCTION";
return `${statement} ${functionName}(${paramList}) RETURNS ${returnType} AS $func$ ${variableList} BEGIN ${body} END; $func$ language '${language}'${expandedOptionsArray};`;
}
dropFunction(functionName, params) {
if (!functionName)
throw new Error("requires functionName");
const paramList = this._expandFunctionParamList(params);
return `DROP FUNCTION ${functionName}(${paramList}) RESTRICT;`;
}
renameFunction(oldFunctionName, params, newFunctionName) {
const paramList = this._expandFunctionParamList(params);
return `ALTER FUNCTION ${oldFunctionName}(${paramList}) RENAME TO ${newFunctionName};`;
}
pgEscapeAndQuote(val) {
return this.quoteIdentifier(Utils.removeTicks(this.escape(val), "'"));
}
_expandFunctionParamList(params) {
if (params === void 0 || !Array.isArray(params)) {
throw new Error("_expandFunctionParamList: function parameters array required, including an empty one for no arguments");
}
const paramList = [];
params.forEach((curParam) => {
const paramDef = [];
if (curParam.type) {
if (curParam.direction) {
paramDef.push(curParam.direction);
}
if (curParam.name) {
paramDef.push(curParam.name);
}
paramDef.push(curParam.type);
} else {
throw new Error("function or trigger used with a parameter without any type");
}
const joined = paramDef.join(" ");
if (joined)
paramList.push(joined);
});
return paramList.join(", ");
}
_expandFunctionVariableList(variables) {
if (!Array.isArray(variables)) {
throw new Error("_expandFunctionVariableList: function variables must be an array");
}
const variableDefinitions = [];
variables.forEach((variable) => {
if (!variable.name || !variable.type) {
throw new Error("function variable must have a name and type");
}
let variableDefinition = `DECLARE ${variable.name} ${variable.type}`;
if (variable.default) {
variableDefinition += ` := ${variable.default}`;
}
variableDefinition += ";";
variableDefinitions.push(variableDefinition);
});
return variableDefinitions.join(" ");
}
expandOptions(options) {
return options === void 0 || _.isEmpty(options) ? "" : options.join(" ");
}
decodeTriggerEventType(eventSpecifier) {
const EVENT_DECODER = {
"after": "AFTER",
"before": "BEFORE",
"instead_of": "INSTEAD OF",
"after_constraint": "AFTER"
};
if (!EVENT_DECODER[eventSpecifier]) {
throw new Error(`Invalid trigger event specified: ${eventSpecifier}`);
}
return EVENT_DECODER[eventSpecifier];
}
triggerEventTypeIsConstraint(eventSpecifier) {
return eventSpecifier === "after_constraint" ? "CONSTRAINT " : "";
}
expandTriggerEventSpec(fireOnSpec) {
if (_.isEmpty(fireOnSpec)) {
throw new Error("no table change events specified to trigger on");
}
return _.map(fireOnSpec, (fireValue, fireKey) => {
const EVENT_MAP = {
"insert": "INSERT",
"update": "UPDATE",
"delete": "DELETE",
"truncate": "TRUNCATE"
};
if (!EVENT_MAP[fireValue]) {
throw new Error(`parseTriggerEventSpec: undefined trigger event ${fireKey}`);
}
let eventSpec = EVENT_MAP[fireValue];
if (eventSpec === "UPDATE") {
if (Array.isArray(fireValue) && fireValue.length > 0) {
eventSpec += ` OF ${fireValue.join(", ")}`;
}
}
return eventSpec;
}).join(" OR ");
}
pgEnumName(tableName, attr, options) {
options = options || {};
const tableDetails = this.extractTableDetails(tableName, options);
let enumName = Utils.addTicks(Utils.generateEnumName(tableDetails.tableName, attr), '"');
if (options.schema !== false && tableDetails.schema) {
enumName = this.quoteIdentifier(tableDetails.schema) + tableDetails.delimiter + enumName;
}
return enumName;
}
pgListEnums(tableName, attrName, options) {
let enumName = "";
const tableDetails = this.extractTableDetails(tableName, options);
if (tableDetails.tableName && attrName) {
enumName = ` AND t.typname=${this.pgEnumName(tableDetails.tableName, attrName, { schema: false }).replace(/"/g, "'")}`;
}
return `SELECT t.typname enum_name, array_agg(e.enumlabel ORDER BY enumsortorder) enum_value FROM pg_type t JOIN pg_enum e ON t.oid = e.enumtypid JOIN pg_catalog.pg_namespace n ON n.oid = t.typnamespace WHERE n.nspname = '${tableDetails.schema}'${enumName} GROUP BY 1`;
}
pgEnum(tableName, attr, dataType, options) {
const enumName = this.pgEnumName(tableName, attr, options);
let values;
if (dataType.values) {
values = `ENUM(${dataType.values.map((value) => this.escape(value)).join(", ")})`;
} else {
values = dataType.toString().match(/^ENUM\(.+\)/)[0];
}
let sql = `DO ${this.escape(`BEGIN CREATE TYPE ${enumName} AS ${values}; EXCEPTION WHEN duplicate_object THEN null; END`)};`;
if (!!options && options.force === true) {
sql = this.pgEnumDrop(tableName, attr) + sql;
}
return sql;
}
pgEnumAdd(tableName, attr, value, options) {
const enumName = this.pgEnumName(tableName, attr);
let sql = `ALTER TYPE ${enumName} ADD VALUE `;
if (semver.gte(this.sequelize.options.databaseVersion, "9.3.0")) {
sql += "IF NOT EXISTS ";
}
sql += this.escape(value);
if (options.before) {
sql += ` BEFORE ${this.escape(options.before)}`;
} else if (options.after) {
sql += ` AFTER ${this.escape(options.after)}`;
}
return sql;
}
pgEnumDrop(tableName, attr, enumName) {
enumName = enumName || this.pgEnumName(tableName, attr);
return `DROP TYPE IF EXISTS ${enumName}; `;
}
fromArray(text) {
text = text.replace(/^{/, "").replace(/}$/, "");
let matches = text.match(/("(?:\\.|[^"\\\\])*"|[^,]*)(?:\s*,\s*|\s*$)/ig);
if (matches.length < 1) {
return [];
}
matches = matches.map((m) => m.replace(/",$/, "").replace(/,$/, "").replace(/(^"|"$)/g, ""));
return matches.slice(0, -1);
}
dataTypeMapping(tableName, attr, dataType) {
if (dataType.includes("PRIMARY KEY")) {
dataType = dataType.replace("PRIMARY KEY", "");
}
if (dataType.includes("SERIAL")) {
if (dataType.includes("BIGINT")) {
dataType = dataType.replace("SERIAL", "BIGSERIAL");
dataType = dataType.replace("BIGINT", "");
} else if (dataType.includes("SMALLINT")) {
dataType = dataType.replace("SERIAL", "SMALLSERIAL");
dataType = dataType.replace("SMALLINT", "");
} else {
dataType = dataType.replace("INTEGER", "");
}
dataType = dataType.replace("NOT NULL", "");
}
if (dataType.startsWith("ENUM(")) {
dataType = dataType.replace(/^ENUM\(.+\)/, this.pgEnumName(tableName, attr));
}
return dataType;
}
getForeignKeysQuery(tableName) {
return `SELECT conname as constraint_name, pg_catalog.pg_get_constraintdef(r.oid, true) as condef FROM pg_catalog.pg_constraint r WHERE r.conrelid = (SELECT oid FROM pg_class WHERE relname = '${tableName}' LIMIT 1) AND r.contype = 'f' ORDER BY 1;`;
}
_getForeignKeyReferencesQueryPrefix() {
return "SELECT DISTINCT tc.constraint_name as constraint_name, tc.constraint_schema as constraint_schema, tc.constraint_catalog as constraint_catalog, tc.table_name as table_name,tc.table_schema as table_schema,tc.table_catalog as table_catalog,tc.initially_deferred as initially_deferred,tc.is_deferrable as is_deferrable,kcu.column_name as column_name,ccu.table_schema AS referenced_table_schema,ccu.table_catalog AS referenced_table_catalog,ccu.table_name AS referenced_table_name,ccu.column_name AS referenced_column_name FROM information_schema.table_constraints AS tc JOIN information_schema.key_column_usage AS kcu ON tc.constraint_name = kcu.constraint_name JOIN information_schema.constraint_column_usage AS ccu ON ccu.constraint_name = tc.constraint_name ";
}
getForeignKeyReferencesQuery(tableName, catalogName, schemaName) {
return `${this._getForeignKeyReferencesQueryPrefix()}WHERE constraint_type = 'FOREIGN KEY' AND tc.table_name = '${tableName}'${catalogName ? ` AND tc.table_catalog = '${catalogName}'` : ""}${schemaName ? ` AND tc.table_schema = '${schemaName}'` : ""}`;
}
getForeignKeyReferenceQuery(table, columnName) {
const tableName = table.tableName || table;
const schema = table.schema;
return `${this._getForeignKeyReferencesQueryPrefix()}WHERE constraint_type = 'FOREIGN KEY' AND tc.table_name='${tableName}' AND kcu.column_name = '${columnName}'${schema ? ` AND tc.table_schema = '${schema}'` : ""}`;
}
dropForeignKeyQuery(tableName, foreignKey) {
return `ALTER TABLE ${this.quoteTable(tableName)} DROP CONSTRAINT ${this.quoteIdentifier(foreignKey)};`;
}
quoteIdentifier(identifier, force) {
const optForceQuote = force || false;
const optQuoteIdentifiers = this.options.quoteIdentifiers !== false;
const rawIdentifier = Utils.removeTicks(identifier, '"');
if (optForceQuote === true || optQuoteIdentifiers !== false || identifier.includes(".") || identifier.includes("->") || POSTGRES_RESERVED_WORDS.includes(rawIdentifier.toLowerCase())) {
return Utils.addTicks(rawIdentifier, '"');
}
return rawIdentifier;
}
}
module.exports = PostgresQueryGenerator;
//# sourceMappingURL=query-generator.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,171 @@
"use strict";
var __defProp = Object.defineProperty;
var __defProps = Object.defineProperties;
var __getOwnPropDescs = Object.getOwnPropertyDescriptors;
var __getOwnPropSymbols = Object.getOwnPropertySymbols;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __propIsEnum = Object.prototype.propertyIsEnumerable;
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
var __spreadValues = (a, b) => {
for (var prop in b || (b = {}))
if (__hasOwnProp.call(b, prop))
__defNormalProp(a, prop, b[prop]);
if (__getOwnPropSymbols)
for (var prop of __getOwnPropSymbols(b)) {
if (__propIsEnum.call(b, prop))
__defNormalProp(a, prop, b[prop]);
}
return a;
};
var __spreadProps = (a, b) => __defProps(a, __getOwnPropDescs(b));
var __objRest = (source, exclude) => {
var target = {};
for (var prop in source)
if (__hasOwnProp.call(source, prop) && exclude.indexOf(prop) < 0)
target[prop] = source[prop];
if (source != null && __getOwnPropSymbols)
for (var prop of __getOwnPropSymbols(source)) {
if (exclude.indexOf(prop) < 0 && __propIsEnum.call(source, prop))
target[prop] = source[prop];
}
return target;
};
const DataTypes = require("../../data-types");
const QueryTypes = require("../../query-types");
const { QueryInterface } = require("../abstract/query-interface");
const Utils = require("../../utils");
const Deferrable = require("../../deferrable");
class PostgresQueryInterface extends QueryInterface {
async ensureEnums(tableName, attributes, options, model) {
const keys = Object.keys(attributes);
const keyLen = keys.length;
let sql = "";
let promises = [];
let i = 0;
for (i = 0; i < keyLen; i++) {
const attribute = attributes[keys[i]];
const type = attribute.type;
if (type instanceof DataTypes.ENUM || type instanceof DataTypes.ARRAY && type.type instanceof DataTypes.ENUM) {
sql = this.queryGenerator.pgListEnums(tableName, attribute.field || keys[i], options);
promises.push(this.sequelize.query(sql, __spreadProps(__spreadValues({}, options), { plain: true, raw: true, type: QueryTypes.SELECT })));
}
}
const results = await Promise.all(promises);
promises = [];
let enumIdx = 0;
const addEnumValue = (field, value, relativeValue, position = "before", spliceStart = promises.length) => {
const valueOptions = __spreadValues({}, options);
valueOptions.before = null;
valueOptions.after = null;
switch (position) {
case "after":
valueOptions.after = relativeValue;
break;
case "before":
default:
valueOptions.before = relativeValue;
break;
}
promises.splice(spliceStart, 0, () => {
return this.sequelize.query(this.queryGenerator.pgEnumAdd(tableName, field, value, valueOptions), valueOptions);
});
};
for (i = 0; i < keyLen; i++) {
const attribute = attributes[keys[i]];
const type = attribute.type;
const enumType = type.type || type;
const field = attribute.field || keys[i];
if (type instanceof DataTypes.ENUM || type instanceof DataTypes.ARRAY && enumType instanceof DataTypes.ENUM) {
if (!results[enumIdx]) {
promises.push(() => {
return this.sequelize.query(this.queryGenerator.pgEnum(tableName, field, enumType, options), __spreadProps(__spreadValues({}, options), { raw: true }));
});
} else if (!!results[enumIdx] && !!model) {
const enumVals = this.queryGenerator.fromArray(results[enumIdx].enum_value);
const vals = enumType.values;
let lastOldEnumValue;
let rightestPosition = -1;
for (let oldIndex = 0; oldIndex < enumVals.length; oldIndex++) {
const enumVal = enumVals[oldIndex];
const newIdx = vals.indexOf(enumVal);
lastOldEnumValue = enumVal;
if (newIdx === -1) {
continue;
}
const newValuesBefore = vals.slice(0, newIdx);
const promisesLength = promises.length;
for (let reverseIdx = newValuesBefore.length - 1; reverseIdx >= 0; reverseIdx--) {
if (~enumVals.indexOf(newValuesBefore[reverseIdx])) {
break;
}
addEnumValue(field, newValuesBefore[reverseIdx], lastOldEnumValue, "before", promisesLength);
}
if (newIdx > rightestPosition) {
rightestPosition = newIdx;
}
}
if (lastOldEnumValue && rightestPosition < vals.length - 1) {
const remainingEnumValues = vals.slice(rightestPosition + 1);
for (let reverseIdx = remainingEnumValues.length - 1; reverseIdx >= 0; reverseIdx--) {
addEnumValue(field, remainingEnumValues[reverseIdx], lastOldEnumValue, "after");
}
}
enumIdx++;
}
}
}
const result = await promises.reduce(async (promise, asyncFunction) => await asyncFunction(await promise), Promise.resolve());
if (promises.length) {
await this.sequelize.dialect.connectionManager._refreshDynamicOIDs();
}
return result;
}
async getForeignKeyReferencesForTable(table, options) {
const queryOptions = __spreadProps(__spreadValues({}, options), {
type: QueryTypes.FOREIGNKEYS
});
const query = this.queryGenerator.getForeignKeyReferencesQuery(table.tableName || table, this.sequelize.config.database, table.schema);
const result = await this.sequelize.query(query, queryOptions);
return result.map((fkMeta) => {
const _a = Utils.camelizeObjectKeys(fkMeta), { initiallyDeferred, isDeferrable } = _a, remaining = __objRest(_a, ["initiallyDeferred", "isDeferrable"]);
return __spreadProps(__spreadValues({}, remaining), {
deferrable: isDeferrable === "NO" ? Deferrable.NOT : initiallyDeferred === "NO" ? Deferrable.INITIALLY_IMMEDIATE : Deferrable.INITIALLY_DEFERRED
});
});
}
async dropEnum(enumName, options) {
options = options || {};
return this.sequelize.query(this.queryGenerator.pgEnumDrop(null, null, this.queryGenerator.pgEscapeAndQuote(enumName)), __spreadProps(__spreadValues({}, options), { raw: true }));
}
async dropAllEnums(options) {
options = options || {};
const enums = await this.pgListEnums(null, options);
return await Promise.all(enums.map((result) => this.sequelize.query(this.queryGenerator.pgEnumDrop(null, null, this.queryGenerator.pgEscapeAndQuote(result.enum_name)), __spreadProps(__spreadValues({}, options), { raw: true }))));
}
async pgListEnums(tableName, options) {
options = options || {};
const sql = this.queryGenerator.pgListEnums(tableName);
return this.sequelize.query(sql, __spreadProps(__spreadValues({}, options), { plain: false, raw: true, type: QueryTypes.SELECT }));
}
async dropTable(tableName, options) {
await super.dropTable(tableName, options);
const promises = [];
const instanceTable = this.sequelize.modelManager.getModel(tableName, { attribute: "tableName" });
if (!instanceTable) {
return;
}
const getTableName = (!options || !options.schema || options.schema === "public" ? "" : `${options.schema}_`) + tableName;
const keys = Object.keys(instanceTable.rawAttributes);
const keyLen = keys.length;
for (let i = 0; i < keyLen; i++) {
if (instanceTable.rawAttributes[keys[i]].type instanceof DataTypes.ENUM) {
const sql = this.queryGenerator.pgEnumDrop(getTableName, keys[i]);
options.supportsSearchPath = false;
promises.push(this.sequelize.query(sql, __spreadProps(__spreadValues({}, options), { raw: true })));
}
}
await Promise.all(promises);
}
}
exports.PostgresQueryInterface = PostgresQueryInterface;
//# sourceMappingURL=query-interface.js.map

File diff suppressed because one or more lines are too long

323
node_modules/sequelize/lib/dialects/postgres/query.js generated vendored Normal file
View File

@@ -0,0 +1,323 @@
"use strict";
const AbstractQuery = require("../abstract/query");
const QueryTypes = require("../../query-types");
const sequelizeErrors = require("../../errors");
const _ = require("lodash");
const { logger } = require("../../utils/logger");
const debug = logger.debugContext("sql:pg");
class Query extends AbstractQuery {
static formatBindParameters(sql, values, dialect) {
const stringReplaceFunc = (value) => typeof value === "string" ? value.replace(/\0/g, "\\0") : value;
let bindParam;
if (Array.isArray(values)) {
bindParam = values.map(stringReplaceFunc);
sql = AbstractQuery.formatBindParameters(sql, values, dialect, { skipValueReplace: true })[0];
} else {
bindParam = [];
let i = 0;
const seen = {};
const replacementFunc = (match, key, values2) => {
if (seen[key] !== void 0) {
return seen[key];
}
if (values2[key] !== void 0) {
i = i + 1;
bindParam.push(stringReplaceFunc(values2[key]));
seen[key] = `$${i}`;
return `$${i}`;
}
return void 0;
};
sql = AbstractQuery.formatBindParameters(sql, values, dialect, replacementFunc)[0];
}
return [sql, bindParam];
}
async run(sql, parameters) {
const { connection } = this;
if (!_.isEmpty(this.options.searchPath)) {
sql = this.sequelize.getQueryInterface().queryGenerator.setSearchPath(this.options.searchPath) + sql;
}
if (this.sequelize.options.minifyAliases && this.options.includeAliases) {
_.toPairs(this.options.includeAliases).sort((a, b) => b[1].length - a[1].length).forEach(([alias, original]) => {
const reg = new RegExp(_.escapeRegExp(original), "g");
sql = sql.replace(reg, alias);
});
}
this.sql = sql;
const query = parameters && parameters.length ? new Promise((resolve, reject) => connection.query(sql, parameters, (error, result) => error ? reject(error) : resolve(result))) : new Promise((resolve, reject) => connection.query(sql, (error, result) => error ? reject(error) : resolve(result)));
const complete = this._logQuery(sql, debug, parameters);
let queryResult;
const errForStack = new Error();
try {
queryResult = await query;
} catch (error) {
if (error.code === "ECONNRESET" || /Unable to set non-blocking to true/i.test(error) || /SSL SYSCALL error: EOF detected/i.test(error) || /Local: Authentication failure/i.test(error) || error.message === "Query read timeout") {
connection._invalid = true;
}
error.sql = sql;
error.parameters = parameters;
throw this.formatError(error, errForStack.stack);
}
complete();
let rows = Array.isArray(queryResult) ? queryResult.reduce((allRows, r) => allRows.concat(r.rows || []), []) : queryResult.rows;
const rowCount = Array.isArray(queryResult) ? queryResult.reduce((count, r) => Number.isFinite(r.rowCount) ? count + r.rowCount : count, 0) : queryResult.rowCount || 0;
if (this.sequelize.options.minifyAliases && this.options.aliasesMapping) {
rows = rows.map((row) => _.toPairs(row).reduce((acc, [key, value]) => {
const mapping = this.options.aliasesMapping.get(key);
acc[mapping || key] = value;
return acc;
}, {}));
}
const isTableNameQuery = sql.startsWith("SELECT table_name FROM information_schema.tables");
const isRelNameQuery = sql.startsWith("SELECT relname FROM pg_class WHERE oid IN");
if (isRelNameQuery) {
return rows.map((row) => ({
name: row.relname,
tableName: row.relname.split("_")[0]
}));
}
if (isTableNameQuery) {
return rows.map((row) => Object.values(row));
}
if (rows[0] && rows[0].sequelize_caught_exception !== void 0) {
if (rows[0].sequelize_caught_exception !== null) {
throw this.formatError({
sql,
parameters,
code: "23505",
detail: rows[0].sequelize_caught_exception
});
}
for (const row of rows) {
delete row.sequelize_caught_exception;
}
}
if (this.isShowIndexesQuery()) {
for (const row of rows) {
const attributes = /ON .*? (?:USING .*?\s)?\(([^]*)\)/gi.exec(row.definition)[1].split(",");
const columns = _.zipObject(row.column_indexes, this.sequelize.getQueryInterface().queryGenerator.fromArray(row.column_names));
delete row.column_indexes;
delete row.column_names;
let field;
let attribute;
row.fields = row.indkey.split(" ").map((indKey, index) => {
field = columns[indKey];
if (!field) {
return null;
}
attribute = attributes[index];
return {
attribute: field,
collate: attribute.match(/COLLATE "(.*?)"/) ? /COLLATE "(.*?)"/.exec(attribute)[1] : void 0,
order: attribute.includes("DESC") ? "DESC" : attribute.includes("ASC") ? "ASC" : void 0,
length: void 0
};
}).filter((n) => n !== null);
delete row.columns;
}
return rows;
}
if (this.isForeignKeysQuery()) {
const result = [];
for (const row of rows) {
let defParts;
if (row.condef !== void 0 && (defParts = row.condef.match(/FOREIGN KEY \((.+)\) REFERENCES (.+)\((.+)\)( ON (UPDATE|DELETE) (CASCADE|RESTRICT))?( ON (UPDATE|DELETE) (CASCADE|RESTRICT))?/))) {
row.id = row.constraint_name;
row.table = defParts[2];
row.from = defParts[1];
row.to = defParts[3];
let i;
for (i = 5; i <= 8; i += 3) {
if (/(UPDATE|DELETE)/.test(defParts[i])) {
row[`on_${defParts[i].toLowerCase()}`] = defParts[i + 1];
}
}
}
result.push(row);
}
return result;
}
if (this.isSelectQuery()) {
let result = rows;
if (this.options.raw === false && this.sequelize.options.quoteIdentifiers === false) {
const attrsMap = _.reduce(this.model.rawAttributes, (m, v, k) => {
m[k.toLowerCase()] = k;
return m;
}, {});
result = rows.map((row) => {
return _.mapKeys(row, (value, key) => {
const targetAttr = attrsMap[key];
if (typeof targetAttr === "string" && targetAttr !== key) {
return targetAttr;
}
return key;
});
});
}
return this.handleSelectQuery(result);
}
if (QueryTypes.DESCRIBE === this.options.type) {
const result = {};
for (const row of rows) {
result[row.Field] = {
type: row.Type.toUpperCase(),
allowNull: row.Null === "YES",
defaultValue: row.Default,
comment: row.Comment,
special: row.special ? this.sequelize.getQueryInterface().queryGenerator.fromArray(row.special) : [],
primaryKey: row.Constraint === "PRIMARY KEY"
};
if (result[row.Field].type === "BOOLEAN") {
result[row.Field].defaultValue = { "false": false, "true": true }[result[row.Field].defaultValue];
if (result[row.Field].defaultValue === void 0) {
result[row.Field].defaultValue = null;
}
}
if (typeof result[row.Field].defaultValue === "string") {
result[row.Field].defaultValue = result[row.Field].defaultValue.replace(/'/g, "");
if (result[row.Field].defaultValue.includes("::")) {
const split = result[row.Field].defaultValue.split("::");
if (split[1].toLowerCase() !== "regclass)") {
result[row.Field].defaultValue = split[0];
}
}
}
}
return result;
}
if (this.isVersionQuery()) {
return rows[0].server_version;
}
if (this.isShowOrDescribeQuery()) {
return rows;
}
if (QueryTypes.BULKUPDATE === this.options.type) {
if (!this.options.returning) {
return parseInt(rowCount, 10);
}
return this.handleSelectQuery(rows);
}
if (QueryTypes.BULKDELETE === this.options.type) {
return parseInt(rowCount, 10);
}
if (this.isInsertQuery() || this.isUpdateQuery() || this.isUpsertQuery()) {
if (this.instance && this.instance.dataValues) {
if (this.isInsertQuery() && !this.isUpsertQuery() && rowCount === 0) {
throw new sequelizeErrors.EmptyResultError();
}
for (const key in rows[0]) {
if (Object.prototype.hasOwnProperty.call(rows[0], key)) {
const record = rows[0][key];
const attr = _.find(this.model.rawAttributes, (attribute) => attribute.fieldName === key || attribute.field === key);
this.instance.dataValues[attr && attr.fieldName || key] = record;
}
}
}
if (this.isUpsertQuery()) {
return [
this.instance,
null
];
}
return [
this.instance || rows && (this.options.plain && rows[0] || rows) || void 0,
rowCount
];
}
if (this.isRawQuery()) {
return [rows, queryResult];
}
return rows;
}
formatError(err, errStack) {
let match;
let table;
let index;
let fields;
let errors;
let message;
const code = err.code || err.sqlState;
const errMessage = err.message || err.messagePrimary;
const errDetail = err.detail || err.messageDetail;
switch (code) {
case "23503":
index = errMessage.match(/violates foreign key constraint "(.+?)"/);
index = index ? index[1] : void 0;
table = errMessage.match(/on table "(.+?)"/);
table = table ? table[1] : void 0;
return new sequelizeErrors.ForeignKeyConstraintError({
message: errMessage,
fields: null,
index,
table,
parent: err,
stack: errStack
});
case "23505":
if (errDetail && (match = errDetail.replace(/"/g, "").match(/Key \((.*?)\)=\((.*?)\)/))) {
fields = _.zipObject(match[1].split(", "), match[2].split(", "));
errors = [];
message = "Validation error";
_.forOwn(fields, (value, field) => {
errors.push(new sequelizeErrors.ValidationErrorItem(this.getUniqueConstraintErrorMessage(field), "unique violation", field, value, this.instance, "not_unique"));
});
if (this.model && this.model.uniqueKeys) {
_.forOwn(this.model.uniqueKeys, (constraint) => {
if (_.isEqual(constraint.fields, Object.keys(fields)) && !!constraint.msg) {
message = constraint.msg;
return false;
}
});
}
return new sequelizeErrors.UniqueConstraintError({ message, errors, parent: err, fields, stack: errStack });
}
return new sequelizeErrors.UniqueConstraintError({
message: errMessage,
parent: err,
stack: errStack
});
case "23P01":
match = errDetail.match(/Key \((.*?)\)=\((.*?)\)/);
if (match) {
fields = _.zipObject(match[1].split(", "), match[2].split(", "));
}
message = "Exclusion constraint error";
return new sequelizeErrors.ExclusionConstraintError({
message,
constraint: err.constraint,
fields,
table: err.table,
parent: err,
stack: errStack
});
case "42704":
if (err.sql && /(CONSTRAINT|INDEX)/gi.test(err.sql)) {
message = "Unknown constraint error";
index = errMessage.match(/(?:constraint|index) "(.+?)"/i);
index = index ? index[1] : void 0;
table = errMessage.match(/relation "(.+?)"/i);
table = table ? table[1] : void 0;
throw new sequelizeErrors.UnknownConstraintError({
message,
constraint: index,
fields,
table,
parent: err,
stack: errStack
});
}
default:
return new sequelizeErrors.DatabaseError(err, { stack: errStack });
}
}
isForeignKeysQuery() {
return /SELECT conname as constraint_name, pg_catalog\.pg_get_constraintdef\(r\.oid, true\) as condef FROM pg_catalog\.pg_constraint r WHERE r\.conrelid = \(SELECT oid FROM pg_class WHERE relname = '.*' LIMIT 1\) AND r\.contype = 'f' ORDER BY 1;/.test(this.sql);
}
getInsertIdField() {
return "id";
}
}
module.exports = Query;
module.exports.Query = Query;
module.exports.default = Query;
//# sourceMappingURL=query.js.map

File diff suppressed because one or more lines are too long

74
node_modules/sequelize/lib/dialects/postgres/range.js generated vendored Normal file
View File

@@ -0,0 +1,74 @@
"use strict";
const _ = require("lodash");
function stringifyRangeBound(bound) {
if (bound === null) {
return "";
}
if (bound === Infinity || bound === -Infinity) {
return bound.toString().toLowerCase();
}
return JSON.stringify(bound);
}
function parseRangeBound(bound, parseType) {
if (!bound) {
return null;
}
if (bound === "infinity") {
return Infinity;
}
if (bound === "-infinity") {
return -Infinity;
}
return parseType(bound);
}
function stringify(data) {
if (data === null)
return null;
if (!Array.isArray(data))
throw new Error("range must be an array");
if (!data.length)
return "empty";
if (data.length !== 2)
throw new Error("range array length must be 0 (empty) or 2 (lower and upper bounds)");
if (Object.prototype.hasOwnProperty.call(data, "inclusive")) {
if (data.inclusive === false)
data.inclusive = [false, false];
else if (!data.inclusive)
data.inclusive = [true, false];
else if (data.inclusive === true)
data.inclusive = [true, true];
} else {
data.inclusive = [true, false];
}
_.each(data, (value, index) => {
if (_.isObject(value)) {
if (Object.prototype.hasOwnProperty.call(value, "inclusive"))
data.inclusive[index] = !!value.inclusive;
if (Object.prototype.hasOwnProperty.call(value, "value"))
data[index] = value.value;
}
});
const lowerBound = stringifyRangeBound(data[0]);
const upperBound = stringifyRangeBound(data[1]);
return `${(data.inclusive[0] ? "[" : "(") + lowerBound},${upperBound}${data.inclusive[1] ? "]" : ")"}`;
}
exports.stringify = stringify;
function parse(value, parser) {
if (value === null)
return null;
if (value === "empty") {
return [];
}
let result = value.substring(1, value.length - 1).split(",", 2);
if (result.length !== 2)
return value;
result = result.map((item, index) => {
return {
value: parseRangeBound(item, parser),
inclusive: index === 0 ? value[0] === "[" : value[value.length - 1] === "]"
};
});
return result;
}
exports.parse = parse;
//# sourceMappingURL=range.js.map

View File

@@ -0,0 +1,7 @@
{
"version": 3,
"sources": ["../../../src/dialects/postgres/range.js"],
"sourcesContent": ["'use strict';\n\nconst _ = require('lodash');\n\nfunction stringifyRangeBound(bound) {\n if (bound === null) {\n return '' ;\n }\n if (bound === Infinity || bound === -Infinity) {\n return bound.toString().toLowerCase();\n }\n return JSON.stringify(bound);\n}\n\nfunction parseRangeBound(bound, parseType) {\n if (!bound) {\n return null;\n }\n if (bound === 'infinity') {\n return Infinity;\n }\n if (bound === '-infinity') {\n return -Infinity;\n }\n return parseType(bound);\n\n}\n\nfunction stringify(data) {\n if (data === null) return null;\n\n if (!Array.isArray(data)) throw new Error('range must be an array');\n if (!data.length) return 'empty';\n if (data.length !== 2) throw new Error('range array length must be 0 (empty) or 2 (lower and upper bounds)');\n\n if (Object.prototype.hasOwnProperty.call(data, 'inclusive')) {\n if (data.inclusive === false) data.inclusive = [false, false];\n else if (!data.inclusive) data.inclusive = [true, false];\n else if (data.inclusive === true) data.inclusive = [true, true];\n } else {\n data.inclusive = [true, false];\n }\n\n _.each(data, (value, index) => {\n if (_.isObject(value)) {\n if (Object.prototype.hasOwnProperty.call(value, 'inclusive')) data.inclusive[index] = !!value.inclusive;\n if (Object.prototype.hasOwnProperty.call(value, 'value')) data[index] = value.value;\n }\n });\n\n const lowerBound = stringifyRangeBound(data[0]);\n const upperBound = stringifyRangeBound(data[1]);\n\n return `${(data.inclusive[0] ? '[' : '(') + lowerBound},${upperBound}${data.inclusive[1] ? ']' : ')'}`;\n}\nexports.stringify = stringify;\n\nfunction parse(value, parser) {\n if (value === null) return null;\n if (value === 'empty') {\n return [];\n }\n\n let result = value\n .substring(1, value.length - 1)\n .split(',', 2);\n\n if (result.length !== 2) return value;\n\n result = result.map((item, index) => {\n return {\n value: parseRangeBound(item, parser),\n inclusive: index === 0 ? value[0] === '[' : value[value.length - 1] === ']'\n };\n });\n\n return result;\n}\nexports.parse = parse;\n"],
"mappings": ";AAEA,MAAM,IAAI,QAAQ;AAElB,6BAA6B,OAAO;AAClC,MAAI,UAAU,MAAM;AAClB,WAAO;AAAA;AAET,MAAI,UAAU,YAAY,UAAU,WAAW;AAC7C,WAAO,MAAM,WAAW;AAAA;AAE1B,SAAO,KAAK,UAAU;AAAA;AAGxB,yBAAyB,OAAO,WAAW;AACzC,MAAI,CAAC,OAAO;AACV,WAAO;AAAA;AAET,MAAI,UAAU,YAAY;AACxB,WAAO;AAAA;AAET,MAAI,UAAU,aAAa;AACzB,WAAO;AAAA;AAET,SAAO,UAAU;AAAA;AAInB,mBAAmB,MAAM;AACvB,MAAI,SAAS;AAAM,WAAO;AAE1B,MAAI,CAAC,MAAM,QAAQ;AAAO,UAAM,IAAI,MAAM;AAC1C,MAAI,CAAC,KAAK;AAAQ,WAAO;AACzB,MAAI,KAAK,WAAW;AAAG,UAAM,IAAI,MAAM;AAEvC,MAAI,OAAO,UAAU,eAAe,KAAK,MAAM,cAAc;AAC3D,QAAI,KAAK,cAAc;AAAO,WAAK,YAAY,CAAC,OAAO;AAAA,aAC9C,CAAC,KAAK;AAAW,WAAK,YAAY,CAAC,MAAM;AAAA,aACzC,KAAK,cAAc;AAAM,WAAK,YAAY,CAAC,MAAM;AAAA,SACrD;AACL,SAAK,YAAY,CAAC,MAAM;AAAA;AAG1B,IAAE,KAAK,MAAM,CAAC,OAAO,UAAU;AAC7B,QAAI,EAAE,SAAS,QAAQ;AACrB,UAAI,OAAO,UAAU,eAAe,KAAK,OAAO;AAAc,aAAK,UAAU,SAAS,CAAC,CAAC,MAAM;AAC9F,UAAI,OAAO,UAAU,eAAe,KAAK,OAAO;AAAU,aAAK,SAAS,MAAM;AAAA;AAAA;AAIlF,QAAM,aAAa,oBAAoB,KAAK;AAC5C,QAAM,aAAa,oBAAoB,KAAK;AAE5C,SAAO,GAAI,MAAK,UAAU,KAAK,MAAM,OAAO,cAAc,aAAa,KAAK,UAAU,KAAK,MAAM;AAAA;AAEnG,QAAQ,YAAY;AAEpB,eAAe,OAAO,QAAQ;AAC5B,MAAI,UAAU;AAAM,WAAO;AAC3B,MAAI,UAAU,SAAS;AACrB,WAAO;AAAA;AAGT,MAAI,SAAS,MACV,UAAU,GAAG,MAAM,SAAS,GAC5B,MAAM,KAAK;AAEd,MAAI,OAAO,WAAW;AAAG,WAAO;AAEhC,WAAS,OAAO,IAAI,CAAC,MAAM,UAAU;AACnC,WAAO;AAAA,MACL,OAAO,gBAAgB,MAAM;AAAA,MAC7B,WAAW,UAAU,IAAI,MAAM,OAAO,MAAM,MAAM,MAAM,SAAS,OAAO;AAAA;AAAA;AAI5E,SAAO;AAAA;AAET,QAAQ,QAAQ;",
"names": []
}