CartoDB-SQL-API/app/controllers/copy_controller.js

183 lines
7.3 KiB
JavaScript
Raw Normal View History

'use strict';
const userMiddleware = require('../middlewares/user');
const errorMiddleware = require('../middlewares/error');
const authorizationMiddleware = require('../middlewares/authorization');
const connectionParamsMiddleware = require('../middlewares/connection-params');
const { initializeProfilerMiddleware } = require('../middlewares/profiler');
const rateLimitsMiddleware = require('../middlewares/rate-limit');
2018-07-23 20:38:56 +08:00
const dbQuotaMiddleware = require('../middlewares/db-quota');
const { RATE_LIMIT_ENDPOINTS_GROUPS } = rateLimitsMiddleware;
2018-05-25 01:48:24 +08:00
const errorHandlerFactory = require('../services/error_handler_factory');
2018-06-12 22:56:18 +08:00
const StreamCopy = require('../services/stream_copy');
2018-06-08 21:03:21 +08:00
const StreamCopyMetrics = require('../services/stream_copy_metrics');
2019-05-29 17:20:18 +08:00
const Throttler = require('../services/throttler-stream');
2018-06-08 21:03:21 +08:00
const zlib = require('zlib');
2019-05-29 17:20:18 +08:00
const { PassThrough } = require('stream');
2019-07-29 22:24:48 +08:00
const params = require('../middlewares/params');
const bodyParserMiddleware = require('../middlewares/body-parser');
module.exports = class CopyController {
constructor (metadataBackend, userDatabaseService, userLimitsService, logger) {
this.metadataBackend = metadataBackend;
this.userDatabaseService = userDatabaseService;
this.userLimitsService = userLimitsService;
this.logger = logger;
}
route (sqlRouter) {
const copyFromMiddlewares = endpointGroup => {
return [
initializeProfilerMiddleware('copyfrom'),
userMiddleware(this.metadataBackend),
rateLimitsMiddleware(this.userLimitsService, endpointGroup),
authorizationMiddleware(this.metadataBackend),
connectionParamsMiddleware(this.userDatabaseService),
dbQuotaMiddleware(),
params({ strategy: 'copyfrom' }),
handleCopyFrom(this.logger),
errorHandler(this.logger),
errorMiddleware()
];
};
const copyToMiddlewares = endpointGroup => {
return [
bodyParserMiddleware(),
initializeProfilerMiddleware('copyto'),
userMiddleware(this.metadataBackend),
rateLimitsMiddleware(this.userLimitsService, endpointGroup),
authorizationMiddleware(this.metadataBackend),
connectionParamsMiddleware(this.userDatabaseService),
params({ strategy: 'copyto' }),
handleCopyTo(this.logger),
errorHandler(this.logger),
errorMiddleware()
];
};
sqlRouter.post('/copyfrom', copyFromMiddlewares(RATE_LIMIT_ENDPOINTS_GROUPS.COPY_FROM));
sqlRouter.get('/copyto', copyToMiddlewares(RATE_LIMIT_ENDPOINTS_GROUPS.COPY_TO));
sqlRouter.post('/copyto', copyToMiddlewares(RATE_LIMIT_ENDPOINTS_GROUPS.COPY_TO));
}
};
2018-06-08 23:02:31 +08:00
function handleCopyTo (logger) {
2018-05-22 17:54:10 +08:00
return function handleCopyToMiddleware (req, res, next) {
const { userDbParams, user } = res.locals;
const { sql, filename } = res.locals.params;
2018-05-08 18:52:33 +08:00
2018-08-14 21:14:22 +08:00
// it is not sure, nginx may choose not to compress the body
2018-08-14 21:30:51 +08:00
// but we want to know it and save it in the metrics
// https://github.com/CartoDB/CartoDB-SQL-API/issues/515
2018-08-14 21:14:22 +08:00
const isGzip = req.get('accept-encoding') && req.get('accept-encoding').includes('gzip');
const streamCopy = new StreamCopy(sql, userDbParams, logger);
2018-08-14 21:14:22 +08:00
const metrics = new StreamCopyMetrics(logger, 'copyto', sql, user, isGzip);
2018-06-08 21:03:21 +08:00
2018-05-22 17:54:10 +08:00
res.header("Content-Disposition", `attachment; filename=${encodeURIComponent(filename)}`);
res.header("Content-Type", "application/octet-stream");
2018-05-08 19:08:29 +08:00
2018-06-22 16:50:39 +08:00
streamCopy.getPGStream(StreamCopy.ACTION_TO, (err, pgstream) => {
if (err) {
return next(err);
2018-06-08 23:09:28 +08:00
}
2018-06-22 16:50:39 +08:00
pgstream
.on('data', data => metrics.addSize(data.length))
.on('error', err => {
metrics.end(null, err);
return next(err);
})
2019-05-24 18:45:27 +08:00
.on('end', () => metrics.end(streamCopy.getRowCount()))
2019-05-24 19:07:34 +08:00
.pipe(res)
2019-05-28 01:24:48 +08:00
.on('close', () => pgstream.emit('error', new Error('Connection closed by client')))
.on('error', err => pgstream.emit('error', err));
2018-06-22 16:50:39 +08:00
});
2018-05-22 17:54:10 +08:00
};
}
2018-05-08 18:52:33 +08:00
2018-06-08 23:02:31 +08:00
function handleCopyFrom (logger) {
return function handleCopyFromMiddleware (req, res, next) {
const { userDbParams, user, dbRemainingQuota } = res.locals;
const { sql } = res.locals.params;
2018-06-11 18:55:30 +08:00
const isGzip = req.get('content-encoding') === 'gzip';
const COPY_FROM_MAX_POST_SIZE = global.settings.copy_from_max_post_size || 2 * 1024 * 1024 * 1024; // 2 GB
const COPY_FROM_MAX_POST_SIZE_PRETTY = global.settings.copy_from_max_post_size_pretty || '2 GB';
2018-06-08 21:03:21 +08:00
const streamCopy = new StreamCopy(sql, userDbParams, logger);
2019-05-16 21:06:27 +08:00
const decompress = isGzip ? zlib.createGunzip() : new PassThrough();
2019-05-16 23:24:21 +08:00
const metrics = new StreamCopyMetrics(logger, 'copyfrom', sql, user, isGzip);
2018-06-08 21:03:21 +08:00
2018-06-22 16:50:39 +08:00
streamCopy.getPGStream(StreamCopy.ACTION_FROM, (err, pgstream) => {
if (err) {
return next(err);
}
const throttle = new Throttler(pgstream);
2019-05-28 01:32:43 +08:00
2018-06-22 16:50:39 +08:00
req
.on('data', data => isGzip ? metrics.addGzipSize(data.length) : undefined)
2018-08-10 20:38:24 +08:00
.on('error', err => {
metrics.end(null, err);
pgstream.emit('error', err);
})
2019-05-28 01:24:48 +08:00
.on('close', () => pgstream.emit('error', new Error('Connection closed by client')))
2019-05-28 01:32:43 +08:00
.pipe(throttle)
2019-05-24 19:07:34 +08:00
.pipe(decompress)
.on('data', data => {
metrics.addSize(data.length);
if(metrics.size > dbRemainingQuota) {
return pgstream.emit('error', new Error('DB Quota exceeded'));
}
if((metrics.gzipSize || metrics.size) > COPY_FROM_MAX_POST_SIZE) {
return pgstream.emit('error', new Error(
`COPY FROM maximum POST size of ${COPY_FROM_MAX_POST_SIZE_PRETTY} exceeded`
));
}
})
.on('error', err => {
err.message = `Error while gunzipping: ${err.message}`;
metrics.end(null, err);
pgstream.emit('error', err);
})
2019-05-24 19:07:34 +08:00
.pipe(pgstream)
2018-06-22 16:50:39 +08:00
.on('error', err => {
metrics.end(null, err);
2019-05-24 21:39:56 +08:00
2018-06-12 21:18:28 +08:00
return next(err);
2018-06-22 16:50:39 +08:00
})
.on('end', () => {
2019-05-24 18:45:27 +08:00
metrics.end(streamCopy.getRowCount());
2018-06-22 16:50:39 +08:00
const { time, rows } = metrics;
if (!rows) {
return next(new Error("No rows copied"));
}
res.send({
time,
total_rows: rows
});
2018-06-22 16:50:39 +08:00
});
});
};
}
2018-05-08 18:52:33 +08:00
2019-05-16 23:46:59 +08:00
function errorHandler (logger) {
2018-05-25 23:50:59 +08:00
return function errorHandlerMiddleware (err, req, res, next) {
if (res.headersSent) {
2019-05-16 23:46:59 +08:00
logger.error(err);
2018-05-25 23:50:59 +08:00
const errorHandler = errorHandlerFactory(err);
res.write(JSON.stringify(errorHandler.getResponse()));
res.end();
} else {
return next(err);
}
};
}