Do not use logger in stream-copy module

remotes/origin/dependabot/npm_and_yarn/node-fetch-2.6.1
Daniel García Aubert 4 years ago
parent dc6e28097f
commit 65640a4c77

@ -90,10 +90,17 @@ function handleCopyTo (logger) {
pgstream pgstream
.on('data', data => metrics.addSize(data.length)) .on('data', data => metrics.addSize(data.length))
.on('error', err => { .on('error', error => {
metrics.end(null, err); const pid = streamCopy.clientProcessID;
streamCopy.cancel(pid, StreamCopy.ACTION_TO, (err) => {
if (err) {
logger.error({ error: err });
}
return next(err); metrics.end(null, error);
return next(error);
});
}) })
.on('end', () => { .on('end', () => {
metrics.end(streamCopy.getRowCount()); metrics.end(streamCopy.getRowCount());

@ -13,14 +13,13 @@ const terminateQuery = pid => `SELECT pg_terminate_backend(${pid}) as terminated
const timeoutQuery = timeout => `SET statement_timeout=${timeout}`; const timeoutQuery = timeout => `SET statement_timeout=${timeout}`;
module.exports = class StreamCopy { module.exports = class StreamCopy {
constructor (sql, userDbParams, logger) { constructor (sql, userDbParams) {
this.dbParams = Object.assign({}, userDbParams, { this.dbParams = Object.assign({}, userDbParams, {
port: global.settings.db_batch_port || userDbParams.port port: global.settings.db_batch_port || userDbParams.port
}); });
this.sql = sql; this.sql = sql;
this.stream = null; this.stream = null;
this.timeout = global.settings.copy_timeout || DEFAULT_TIMEOUT; this.timeout = global.settings.copy_timeout || DEFAULT_TIMEOUT;
this.logger = logger;
} }
static get ACTION_TO () { static get ACTION_TO () {
@ -52,7 +51,6 @@ module.exports = class StreamCopy {
if (action === ACTION_TO) { if (action === ACTION_TO) {
pgstream.on('end', () => done()); pgstream.on('end', () => done());
pgstream.on('error', () => this._cancel(client.processID, action));
pgstream.on('warning', (msg) => this.logger.warn({ error: new Error(msg) })); pgstream.on('warning', (msg) => this.logger.warn({ error: new Error(msg) }));
} else if (action === ACTION_FROM) { } else if (action === ACTION_FROM) {
pgstream.on('finish', () => done()); pgstream.on('finish', () => done());
@ -70,33 +68,33 @@ module.exports = class StreamCopy {
return this.stream.rowCount; return this.stream.rowCount;
} }
_cancel (pid, action) { cancel (pid, action, callback) {
const pg = new PSQL(this.dbParams); const pg = new PSQL(this.dbParams);
const actionType = action === ACTION_TO ? ACTION_TO : ACTION_FROM; const actionType = action === ACTION_TO ? ACTION_TO : ACTION_FROM;
pg.query(cancelQuery(pid), (err, result) => { pg.query(cancelQuery(pid), (err, result) => {
if (err) { if (err) {
return this.logger.error({ error: err }); return callback(err);
} }
const isCancelled = result.rows.length && result.rows[0].cancelled; const isCancelled = result.rows.length && result.rows[0].cancelled;
if (isCancelled) { if (isCancelled) {
return this.logger.info(`Canceled "copy ${actionType}" stream query successfully (pid: ${pid})`); return callback();
} }
return pg.query(terminateQuery(pid), (err, result) => { return pg.query(terminateQuery(pid), (err, result) => {
if (err) { if (err) {
return this.logger.error({ error: err }); return callback(err);
} }
const isTerminated = result.rows.length && result.rows[0].terminated; const isTerminated = result.rows.length && result.rows[0].terminated;
if (!isTerminated) { if (!isTerminated) {
return this.logger.error({ error: new Error(`Unable to terminate "copy ${actionType}" stream query (pid: ${pid})`) }); return callback(new Error(`Unable to terminate "copy ${actionType}" stream query (pid: ${pid})`));
} }
return this.logger.info(`Terminated "copy ${actionType}" stream query successfully (pid: ${pid})`); return callback();
}); });
}); });
} }

Loading…
Cancel
Save