Merge pull request #663 from CartoDB/dgaubert/ch87185/log-system-revamp-sql-api

Log system revamp
This commit is contained in:
Daniel G. Aubert 2020-07-06 13:59:59 +02:00 committed by GitHub
commit 36fd4a85c1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
53 changed files with 998 additions and 1054 deletions

22
NEWS.md
View File

@ -1,5 +1,27 @@
# Changelog # Changelog
## 7.0.0
Released 2020-mm-dd
Breaking changes:
- Log system revamp:
- Logs to stdout, disabled while testing
- Use header `X-Request-Id`, or create a new `uuid` when no present, to identyfy log entries
- Be able to set log level from env variable `LOG_LEVEL`, useful while testing: `LOG_LEVEL=info npm test`; even more human-readable: `LOG_LEVEL=info npm t | ./node_modules/.bin/pino-pretty`
- Be able to reduce the footprint in the final log file depending on the environment
- Use one logger for every service: Queries, Batch Queries (Jobs), and Data Ingestion (CopyTo/CopyFrom)
- Stop using headers such as: `X-SQL-API-Log`, `X-SQL-API-Profiler`, and `X-SQL-API-Errors` as a way to log info.
- Be able to tag requests with labels as an easier way to provide business metrics
- Metro: Add log-collector utility (`metro`), it will be moved to its own repository. Attaching it here fro development purposes. Try it with the following command `LOG_LEVEL=info npm t | node metro`
- Metro: Creates `metrics-collector.js` a stream to update Prometheus' counters and histograms and exposes them via Express' app (`:9145/metrics`). Use the ones defined in `grok_exporter`
Announcements:
- Profiler is always set. No need to check its existence anymore
- Unify profiler usage for every endpoint
Bug fixes:
- Avoid hung requests while fetching user identifier
## 6.0.0 ## 6.0.0
Released 2020-06-05 Released 2020-06-05

137
app.js
View File

@ -5,6 +5,8 @@
const fs = require('fs'); const fs = require('fs');
const path = require('path'); const path = require('path');
const fqdn = require('@carto/fqdn-sync'); const fqdn = require('@carto/fqdn-sync');
const serverOptions = require('./lib/server-options');
const { logger } = serverOptions();
const argv = require('yargs') const argv = require('yargs')
.usage('Usage: node $0 <environment> [options]') .usage('Usage: node $0 <environment> [options]')
@ -23,7 +25,7 @@ const environmentArg = argv._[0] || process.env.NODE_ENV || 'development';
const configurationFile = path.resolve(argv.config || './config/environments/' + environmentArg + '.js'); const configurationFile = path.resolve(argv.config || './config/environments/' + environmentArg + '.js');
if (!fs.existsSync(configurationFile)) { if (!fs.existsSync(configurationFile)) {
console.error('Configuration file "%s" does not exist', configurationFile); logger.fatal(new Error(`Configuration file "${configurationFile}" does not exist`));
process.exit(1); process.exit(1);
} }
@ -34,43 +36,13 @@ process.env.NODE_ENV = ENVIRONMENT;
const availableEnvironments = ['development', 'production', 'test', 'staging']; const availableEnvironments = ['development', 'production', 'test', 'staging'];
// sanity check arguments if (!availableEnvironments.includes(ENVIRONMENT)) {
if (availableEnvironments.indexOf(ENVIRONMENT) === -1) { logger.fatal(new Error(`Invalid environment argument, valid ones: ${Object.keys(availableEnvironments).join(', ')}`));
console.error('node app.js [environment]');
console.error('Available environments: ' + availableEnvironments.join(', '));
process.exit(1); process.exit(1);
} }
global.settings.api_hostname = fqdn.hostname(); global.settings.api_hostname = fqdn.hostname();
global.log4js = require('log4js');
const log4jsConfig = {
appenders: [],
replaceConsole: true
};
if (global.settings.log_filename) {
const logFilename = path.resolve(global.settings.log_filename);
const logDirectory = path.dirname(logFilename);
if (!fs.existsSync(logDirectory)) {
console.error('Log filename directory does not exist: ' + logDirectory);
process.exit(1);
}
console.log('Logs will be written to ' + logFilename);
log4jsConfig.appenders.push(
{ type: 'file', absolute: true, filename: logFilename }
);
} else {
log4jsConfig.appenders.push(
{ type: 'console', layout: { type: 'basic' } }
);
}
global.log4js.configure(log4jsConfig);
global.logger = global.log4js.getLogger();
const version = require('./package').version;
const StatsClient = require('./lib/stats/client'); const StatsClient = require('./lib/stats/client');
if (global.settings.statsd) { if (global.settings.statsd) {
@ -81,89 +53,64 @@ if (global.settings.statsd) {
} }
const statsClient = StatsClient.getInstance(global.settings.statsd); const statsClient = StatsClient.getInstance(global.settings.statsd);
const { version, name } = require('./package');
const createServer = require('./lib/server'); const createServer = require('./lib/server');
const server = createServer(statsClient); const server = createServer(statsClient);
const listener = server.listen(global.settings.node_port, global.settings.node_host); const listener = server.listen(global.settings.node_port, global.settings.node_host);
listener.on('listening', function () { listener.on('listening', function () {
console.info('Using Node.js %s', process.version); const { address, port } = listener.address();
console.info('Using configuration file "%s"', configurationFile); logger.info({ 'Node.js': process.version, pid: process.pid, environment: process.env.NODE_ENV, [name]: version, address, port, config: configurationFile }, `${name} initialized successfully`);
console.log(
'CartoDB SQL API %s listening on %s:%s PID=%d (%s)',
version, global.settings.node_host, global.settings.node_port, process.pid, ENVIRONMENT
);
}); });
process.on('uncaughtException', function (err) { process.on('uncaughtException', function (err) {
global.logger.error('Uncaught exception: ' + err.stack); logger.error(err, 'Uncaught exception');
}); });
process.on('SIGHUP', function () { const exitProcess = logger.finish((err, finalLogger, listener, signal, killTimeout) => {
global.log4js.clearAndShutdownAppenders(function () { scheduleForcedExit(killTimeout, finalLogger);
global.log4js.configure(log4jsConfig);
global.logger = global.log4js.getLogger();
console.log('Log files reloaded');
});
if (server.batch && server.batch.logger) { finalLogger.info(`Process has received signal: ${signal}`);
server.batch.logger.reopenFileStreams();
let code = 0;
if (err) {
code = 1;
finalLogger.fatal(err);
} }
if (server.dataIngestionLogger) { finalLogger.info(`Process is going to exit with code: ${code}`);
server.dataIngestionLogger.reopenFileStreams(); listener.close(() => process.exit(code));
}
});
addHandlers({ killTimeout: 45000 }); listener.close(() => {
server.batch.stop(() => {
server.batch.drain((err) => {
if (err) {
finalLogger.error(err);
return process.exit(1);
}
function addHandlers ({ killTimeout }) { process.exit(code);
// FIXME: minimize the number of 'uncaughtException' before uncomment the following line
// process.on('uncaughtException', exitProcess(listener, logger, killTimeout));
process.on('unhandledRejection', exitProcess({ killTimeout }));
process.on('SIGINT', exitProcess({ killTimeout }));
process.on('SIGTERM', exitProcess({ killTimeout }));
}
function exitProcess ({ killTimeout }) {
return function exitProcessFn (signal) {
scheduleForcedExit({ killTimeout });
let code = 0;
if (!['SIGINT', 'SIGTERM'].includes(signal)) {
const err = signal instanceof Error ? signal : new Error(signal);
signal = undefined;
code = 1;
global.logger.fatal(err);
} else {
global.logger.info(`Process has received signal: ${signal}`);
}
listener.close(function () {
server.batch.stop(function () {
server.batch.drain(function (err) {
if (err) {
global.logger.error(err);
return process.exit(1);
}
global.logger.info(`Process is going to exit with code: ${code}`);
global.log4js.shutdown(function () {
server.batch.logger.end(function () {
process.exit(code);
});
});
});
}); });
}); });
}; });
});
function addHandlers (listener, killTimeout) {
// FIXME: minimize the number of 'uncaughtException' before uncomment the following line
// process.on('uncaughtException', (err) => exitProcess(err, listener, 'uncaughtException', killTimeout));
process.on('unhandledRejection', (err) => exitProcess(err, listener, 'unhandledRejection', killTimeout));
process.on('ENOMEM', (err) => exitProcess(err, listener, 'ENOMEM', killTimeout));
process.on('SIGINT', () => exitProcess(null, listener, 'SIGINT', killTimeout));
process.on('SIGTERM', () => exitProcess(null, listener, 'SIGINT', killTimeout));
} }
function scheduleForcedExit ({ killTimeout }) { addHandlers(listener, 45000);
function scheduleForcedExit (killTimeout, finalLogger) {
// Schedule exit if there is still ongoing work to deal with // Schedule exit if there is still ongoing work to deal with
const killTimer = setTimeout(() => { const killTimer = setTimeout(() => {
global.logger.info('Process didn\'t close on time. Force exit'); finalLogger.info('Process didn\'t close on time. Force exit');
process.exit(1); process.exit(1);
}, killTimeout); }, killTimeout);

View File

@ -35,10 +35,6 @@ module.exports.routes = {
// X-SQLAPI-Profile header containing elapsed timing for various // X-SQLAPI-Profile header containing elapsed timing for various
// steps taken for producing the response. // steps taken for producing the response.
module.exports.useProfiler = true; module.exports.useProfiler = true;
module.exports.log_format = '[:date] :remote-addr :method :req[Host]:url :status :response-time ms -> :res[Content-Type] (:res[X-SQLAPI-Profiler]) (:res[X-SQLAPI-Errors]) (:res[X-SQLAPI-Log])';
// If log_filename is given logs will be written there, in append mode. Otherwise stdout is used (default).
// Log file will be re-opened on receiving the HUP signal
module.exports.log_filename = undefined;
// Regular expression pattern to extract username // Regular expression pattern to extract username
// from hostname. Must have a single grabbing block. // from hostname. Must have a single grabbing block.
module.exports.user_from_host = '^(.*)\\.localhost'; module.exports.user_from_host = '^(.*)\\.localhost';
@ -61,7 +57,6 @@ module.exports.db_port = '5432';
module.exports.db_batch_port = '5432'; module.exports.db_batch_port = '5432';
module.exports.finished_jobs_ttl_in_seconds = 2 * 3600; // 2 hours module.exports.finished_jobs_ttl_in_seconds = 2 * 3600; // 2 hours
module.exports.batch_query_timeout = 12 * 3600 * 1000; // 12 hours in milliseconds module.exports.batch_query_timeout = 12 * 3600 * 1000; // 12 hours in milliseconds
module.exports.batch_log_filename = undefined;
module.exports.copy_timeout = "'5h'"; module.exports.copy_timeout = "'5h'";
module.exports.copy_from_max_post_size = 2 * 1024 * 1024 * 1024 // 2 GB; module.exports.copy_from_max_post_size = 2 * 1024 * 1024 * 1024 // 2 GB;
module.exports.copy_from_max_post_size_pretty = '2 GB'; module.exports.copy_from_max_post_size_pretty = '2 GB';
@ -148,7 +143,6 @@ module.exports.ratelimits = {
} }
module.exports.validatePGEntitiesAccess = false; module.exports.validatePGEntitiesAccess = false;
module.exports.dataIngestionLogPath = undefined;
module.exports.logQueries = true; module.exports.logQueries = true;
module.exports.maxQueriesLogLength = 2000; module.exports.maxQueriesLogLength = 2000;

View File

@ -35,10 +35,6 @@ module.exports.routes = {
// X-SQLAPI-Profile header containing elapsed timing for various // X-SQLAPI-Profile header containing elapsed timing for various
// steps taken for producing the response. // steps taken for producing the response.
module.exports.useProfiler = true; module.exports.useProfiler = true;
module.exports.log_format = '[:date] :remote-addr :method :req[Host]:url :status :response-time ms -> :res[Content-Type] (:res[X-SQLAPI-Profiler]) (:res[X-SQLAPI-Errors]) (:res[X-SQLAPI-Log])';
// If log_filename is given logs will be written there, in append mode. Otherwise stdout is used (default).
// Log file will be re-opened on receiving the HUP signal
module.exports.log_filename = 'logs/cartodb-sql-api.log';
// Regular expression pattern to extract username // Regular expression pattern to extract username
// from hostname. Must have a single grabbing block. // from hostname. Must have a single grabbing block.
module.exports.user_from_host = '^(.*)\\.cartodb\\.com$'; module.exports.user_from_host = '^(.*)\\.cartodb\\.com$';
@ -62,7 +58,6 @@ module.exports.db_port = '6432';
module.exports.db_batch_port = '5432'; module.exports.db_batch_port = '5432';
module.exports.finished_jobs_ttl_in_seconds = 2 * 3600; // 2 hours module.exports.finished_jobs_ttl_in_seconds = 2 * 3600; // 2 hours
module.exports.batch_query_timeout = 12 * 3600 * 1000; // 12 hours in milliseconds module.exports.batch_query_timeout = 12 * 3600 * 1000; // 12 hours in milliseconds
module.exports.batch_log_filename = 'logs/batch-queries.log';
module.exports.copy_timeout = "'5h'"; module.exports.copy_timeout = "'5h'";
module.exports.copy_from_max_post_size = 2 * 1024 * 1024 * 1024 // 2 GB; module.exports.copy_from_max_post_size = 2 * 1024 * 1024 * 1024 // 2 GB;
module.exports.copy_from_max_post_size_pretty = '2 GB'; module.exports.copy_from_max_post_size_pretty = '2 GB';
@ -152,7 +147,6 @@ module.exports.ratelimits = {
} }
module.exports.validatePGEntitiesAccess = false; module.exports.validatePGEntitiesAccess = false;
module.exports.dataIngestionLogPath = 'logs/data-ingestion.log';
module.exports.logQueries = true; module.exports.logQueries = true;
module.exports.maxQueriesLogLength = 1024; module.exports.maxQueriesLogLength = 1024;

View File

@ -35,10 +35,6 @@ module.exports.routes = {
// X-SQLAPI-Profile header containing elapsed timing for various // X-SQLAPI-Profile header containing elapsed timing for various
// steps taken for producing the response. // steps taken for producing the response.
module.exports.useProfiler = true; module.exports.useProfiler = true;
module.exports.log_format = '[:date] :req[X-Real-IP] :method :req[Host]:url :status :response-time ms -> :res[Content-Type] (:res[X-SQLAPI-Profiler]) (:res[X-SQLAPI-Errors]) (:res[X-SQLAPI-Log])';
// If log_filename is given logs will be written there, in append mode. Otherwise stdout is used (default).
// Log file will be re-opened on receiving the HUP signal
module.exports.log_filename = 'logs/cartodb-sql-api.log';
// Regular expression pattern to extract username // Regular expression pattern to extract username
// from hostname. Must have a single grabbing block. // from hostname. Must have a single grabbing block.
module.exports.user_from_host = '^(.*)\\.cartodb\\.com$'; module.exports.user_from_host = '^(.*)\\.cartodb\\.com$';
@ -62,7 +58,6 @@ module.exports.db_port = '6432';
module.exports.db_batch_port = '5432'; module.exports.db_batch_port = '5432';
module.exports.finished_jobs_ttl_in_seconds = 2 * 3600; // 2 hours module.exports.finished_jobs_ttl_in_seconds = 2 * 3600; // 2 hours
module.exports.batch_query_timeout = 12 * 3600 * 1000; // 12 hours in milliseconds module.exports.batch_query_timeout = 12 * 3600 * 1000; // 12 hours in milliseconds
module.exports.batch_log_filename = 'logs/batch-queries.log';
module.exports.copy_timeout = "'5h'"; module.exports.copy_timeout = "'5h'";
module.exports.copy_from_max_post_size = 2 * 1024 * 1024 * 1024 // 2 GB; module.exports.copy_from_max_post_size = 2 * 1024 * 1024 * 1024 // 2 GB;
module.exports.copy_from_max_post_size_pretty = '2 GB'; module.exports.copy_from_max_post_size_pretty = '2 GB';
@ -149,7 +144,6 @@ module.exports.ratelimits = {
} }
module.exports.validatePGEntitiesAccess = false; module.exports.validatePGEntitiesAccess = false;
module.exports.dataIngestionLogPath = 'logs/data-ingestion.log';
module.exports.logQueries = true; module.exports.logQueries = true;
module.exports.maxQueriesLogLength = 1024; module.exports.maxQueriesLogLength = 1024;

View File

@ -35,10 +35,6 @@ module.exports.routes = {
// X-SQLAPI-Profile header containing elapsed timing for various // X-SQLAPI-Profile header containing elapsed timing for various
// steps taken for producing the response. // steps taken for producing the response.
module.exports.useProfiler = true; module.exports.useProfiler = true;
module.exports.log_format = '[:date] :remote-addr :method :req[Host]:url :status :response-time ms -> :res[Content-Type] (:res[X-SQLAPI-Profiler]) (:res[X-SQLAPI-Errors]) (:res[X-SQLAPI-Log])';
// If log_filename is given logs will be written there, in append mode. Otherwise stdout is used (default).
// Log file will be re-opened on receiving the HUP signal
// module.exports.log_filename = 'logs/cartodb-sql-api.log';
// Regular expression pattern to extract username // Regular expression pattern to extract username
// from hostname. Must have a single grabbing block. // from hostname. Must have a single grabbing block.
module.exports.user_from_host = '^([^.]*)\\.'; module.exports.user_from_host = '^([^.]*)\\.';
@ -59,7 +55,6 @@ module.exports.db_port = '5432';
module.exports.db_batch_port = '5432'; module.exports.db_batch_port = '5432';
module.exports.finished_jobs_ttl_in_seconds = 2 * 3600; // 2 hours module.exports.finished_jobs_ttl_in_seconds = 2 * 3600; // 2 hours
module.exports.batch_query_timeout = 5 * 1000; // 5 seconds in milliseconds module.exports.batch_query_timeout = 5 * 1000; // 5 seconds in milliseconds
//module.exports.batch_log_filename = 'logs/batch-queries.log';
module.exports.copy_timeout = "'5h'"; module.exports.copy_timeout = "'5h'";
module.exports.copy_from_max_post_size = 2 * 1024 * 1024 * 1024 // 2 GB; module.exports.copy_from_max_post_size = 2 * 1024 * 1024 * 1024 // 2 GB;
module.exports.copy_from_max_post_size_pretty = '2 GB'; module.exports.copy_from_max_post_size_pretty = '2 GB';
@ -149,7 +144,6 @@ module.exports.ratelimits = {
} }
module.exports.validatePGEntitiesAccess = false; module.exports.validatePGEntitiesAccess = false;
//module.exports.dataIngestionLogPath = 'logs/data-ingestion.log';
module.exports.logQueries = true; module.exports.logQueries = true;
module.exports.maxQueriesLogLength = 1024; module.exports.maxQueriesLogLength = 1024;

View File

@ -10,8 +10,6 @@ const JobsWipController = require('./jobs-wip-controller');
const error = require('./middlewares/error'); const error = require('./middlewares/error');
const pubSubMetrics = require('./middlewares/pubsub-metrics'); const pubSubMetrics = require('./middlewares/pubsub-metrics');
const BatchLogger = require('../batch/batch-logger');
const JobPublisher = require('../batch/pubsub/job-publisher'); const JobPublisher = require('../batch/pubsub/job-publisher');
const JobQueue = require('../batch/job-queue'); const JobQueue = require('../batch/job-queue');
const JobBackend = require('../batch/job-backend'); const JobBackend = require('../batch/job-backend');
@ -20,8 +18,7 @@ const JobService = require('../batch/job-service');
const PubSubMetricsService = require('../services/pubsub-metrics'); const PubSubMetricsService = require('../services/pubsub-metrics');
module.exports = class ApiRouter { module.exports = class ApiRouter {
constructor ({ redisPool, metadataBackend, statsClient, dataIngestionLogger }) { constructor ({ redisPool, metadataBackend, statsClient, logger }) {
const logger = new BatchLogger(global.settings.batch_log_filename, 'batch-queries');
const jobPublisher = new JobPublisher(redisPool); const jobPublisher = new JobPublisher(redisPool);
const jobQueue = new JobQueue(metadataBackend, jobPublisher, logger); const jobQueue = new JobQueue(metadataBackend, jobPublisher, logger);
const jobBackend = new JobBackend(metadataBackend, jobQueue, logger); const jobBackend = new JobBackend(metadataBackend, jobQueue, logger);
@ -36,7 +33,7 @@ module.exports = class ApiRouter {
this.sqlRouter = new SqlRouter({ this.sqlRouter = new SqlRouter({
metadataBackend, metadataBackend,
statsClient, statsClient,
dataIngestionLogger, logger,
jobService jobService
}); });
} }

View File

@ -5,6 +5,7 @@ const queryTables = require('cartodb-query-tables').queryTables;
module.exports = function affectedTables () { module.exports = function affectedTables () {
return function affectedTablesMiddleware (req, res, next) { return function affectedTablesMiddleware (req, res, next) {
const { logger } = res.locals;
const { sql } = res.locals.params; const { sql } = res.locals.params;
const { authDbParams } = res.locals; const { authDbParams } = res.locals;
const pg = new PSQL(authDbParams); const pg = new PSQL(authDbParams);
@ -13,15 +14,13 @@ module.exports = function affectedTables () {
.then(affectedTables => { .then(affectedTables => {
res.locals.affectedTables = affectedTables; res.locals.affectedTables = affectedTables;
if (req.profiler) { req.profiler.done('queryExplain');
req.profiler.done('queryExplain');
}
return next(); return next();
}) })
.catch(err => { .catch(err => {
const message = (err && err.message) || 'unknown error'; err.message = `Error on query explain "${sql}": ${err.message}`;
console.error('Error on query explain \'%s\': %s', sql, message); logger.warn({ error: err });
return next(); return next();
}); });

View File

@ -9,9 +9,7 @@ module.exports = function authorization (metadataBackend, forceToBeMaster = fals
const credentials = getCredentialsFromRequest(req); const credentials = getCredentialsFromRequest(req);
if (!userMatches(credentials, user)) { if (!userMatches(credentials, user)) {
if (req.profiler) { req.profiler.done('authorization');
req.profiler.done('authorization');
}
return next(new Error('permission denied')); return next(new Error('permission denied'));
} }
@ -22,9 +20,7 @@ module.exports = function authorization (metadataBackend, forceToBeMaster = fals
const authApi = new AuthApi(req, params); const authApi = new AuthApi(req, params);
authApi.verifyCredentials(function (err, authorizationLevel) { authApi.verifyCredentials(function (err, authorizationLevel) {
if (req.profiler) { req.profiler.done('authorization');
req.profiler.done('authorization');
}
if (err) { if (err) {
return next(err); return next(err);

View File

@ -6,9 +6,7 @@ module.exports = function connectionParams (userDatabaseService) {
userDatabaseService.getConnectionParams(user, apikeyToken, authorizationLevel, userDatabaseService.getConnectionParams(user, apikeyToken, authorizationLevel,
function (err, userDbParams, authDbParams) { function (err, userDbParams, authDbParams) {
if (req.profiler) { req.profiler.done('getConnectionParams');
req.profiler.done('getConnectionParams');
}
if (err) { if (err) {
return next(err); return next(err);

View File

@ -1,45 +1,27 @@
'use strict'; 'use strict';
const errorHandlerFactory = require('../../services/error-handler-factory'); const errorHandlerFactory = require('../../services/error-handler-factory');
const { stringifyForLogs } = require('../../utils/logs');
const MAX_ERROR_STRING_LENGTH = 1024;
module.exports = function error () { module.exports = function error () {
return function errorMiddleware (err, req, res, next) { return function errorMiddleware (err, req, res, next) {
const { logger } = res.locals;
const errorHandler = errorHandlerFactory(err); const errorHandler = errorHandlerFactory(err);
const errorResponse = errorHandler.getResponse(); const errorResponse = errorHandler.getResponse();
if (global.settings.environment === 'development') { logger.error({ error: err });
errorResponse.stack = err.stack;
}
if (global.settings.environment !== 'test') {
// TODO: email this Exception report
console.error('EXCEPTION REPORT: ' + err.stack);
}
// Force inline content disposition // Force inline content disposition
res.header('Content-Disposition', 'inline'); res.header('Content-Disposition', 'inline');
if (req && req.profiler) {
req.profiler.done('finish');
res.header('X-SQLAPI-Profiler', req.profiler.toJSONString());
}
setErrorHeader(errorHandler, res);
res.header('Content-Type', 'application/json; charset=utf-8'); res.header('Content-Type', 'application/json; charset=utf-8');
res.status(getStatusError(errorHandler, req)); res.status(getStatusError(errorHandler, req));
if (req.query && req.query.callback) { if (req.query && req.query.callback) {
res.jsonp(errorResponse); res.jsonp(errorResponse);
} else { } else {
res.json(errorResponse); res.json(errorResponse);
} }
if (req && req.profiler) {
res.req.profiler.sendStats();
}
return next(); return next();
}; };
}; };
@ -54,15 +36,3 @@ function getStatusError (errorHandler, req) {
return statusError; return statusError;
} }
function setErrorHeader (errorHandler, res) {
const errorsLog = {
context: errorHandler.context,
detail: errorHandler.detail,
hint: errorHandler.hint,
statusCode: errorHandler.http_status,
message: errorHandler.message
};
res.set('X-SQLAPI-Errors', stringifyForLogs(errorsLog, MAX_ERROR_STRING_LENGTH));
}

View File

@ -0,0 +1,23 @@
'use strict';
const MAX_SQL_LENGTH = (global.settings.logQueries && global.settings.maxQueriesLogLength) || 1024;
module.exports = function logQuery () {
if (!global.settings.logQueries) {
return function noopLogQuery (req, res, next) {
return next();
};
}
return function logQueryMiddleware (req, res, next) {
const { logger } = res.locals;
logger.info({ sql: ensureMaxQueryLength(res.locals.params.sql) });
return next();
};
};
function ensureMaxQueryLength (sql, length = MAX_SQL_LENGTH) {
return sql.substring(0, length);
}

View File

@ -1,99 +0,0 @@
'use strict';
const { stringifyForLogs } = require('../../utils/logs');
const MAX_SQL_LENGTH = (global.settings.logQueries && global.settings.maxQueriesLogLength) || 1024;
// This is used to set a hard limit to the header size
// While Node accepts headers of up to 8192 character, different libraries impose other limits
// This might break the JSON structure of the log, but avoids responses being dropped by varnish
const HEADER_HARD_LIMIT = 4096;
const TYPES = {
QUERY: 'query',
JOB: 'job'
};
module.exports = function log (sqlType = TYPES.QUERY) {
return function logMiddleware (req, res, next) {
const logObj = {
request: {
sql: prepareSQL(res.locals.params.sql, sqlType)
}
};
res.set('X-SQLAPI-Log', stringifyForLogs(logObj).substring(0, HEADER_HARD_LIMIT));
return next();
};
};
module.exports.TYPES = TYPES;
function prepareSQL (sql, sqlType) {
if (!sql || !global.settings.logQueries) {
return null;
}
if (typeof sql === 'string') {
return {
type: sqlType,
sql: ensureMaxQueryLength(sql)
};
}
if (Array.isArray(sql)) {
const lengthPerQuery = MAX_SQL_LENGTH / sql.length;
return {
type: sqlType,
sql: sql.map(q => ensureMaxQueryLength(q, lengthPerQuery))
};
}
if (sql.query && Array.isArray(sql.query)) {
return {
type: sqlType,
sql: prepareBatchFallbackQuery(sql)
};
}
}
/**
* Process a Batch API fallback query controlling the queries length
* We need to create a new object avoiding original modifications
*
* @param {Object} sql
*/
function prepareBatchFallbackQuery (sql) {
const fallbackQuery = {};
if (sql.onsuccess) {
fallbackQuery.onsuccess = ensureMaxQueryLength(sql.onsuccess);
}
if (sql.onerror) {
fallbackQuery.onerror = ensureMaxQueryLength(sql.onerror);
}
fallbackQuery.query = sql.query.map(query => {
const subquery = {
query: ensureMaxQueryLength(query.query)
};
if (query.onsuccess) {
subquery.onsuccess = ensureMaxQueryLength(query.onsuccess);
}
if (query.onerror) {
subquery.onerror = ensureMaxQueryLength(query.onerror);
}
return subquery;
});
return fallbackQuery;
}
function ensureMaxQueryLength (sql, length = MAX_SQL_LENGTH) {
return sql.substring(0, length);
}

View File

@ -1,27 +1,16 @@
'use strict'; 'use strict';
module.exports = function logger () { const uuid = require('uuid');
if (!global.log4js) {
return function dummyLoggerMiddleware (req, res, next) {
next();
};
}
const options = { module.exports = function initLogger ({ logger, logOnEvent = 'finish' }) {
level: 'info', return function initLoggerMiddleware (req, res, next) {
buffer: true, const id = req.get('X-Request-Id') || uuid.v4();
// log4js provides a tokens solution as express but it does not provide the request/response in the callback. res.locals.logger = logger.child({ id });
// Thus it is not possible to extract relevant information from them.
// This is a workaround to be able to access request/response.
format: function (req, res, format) {
const defaultFormat = ':remote-addr :method :req[Host]:url :status :response-time ms -> :res[Content-Type]';
const logFormat = global.settings.log_format || defaultFormat;
return format(logFormat); res.locals.logger.info({ request: req });
} res.on(logOnEvent, () => res.locals.logger.info({ response: res }));
res.on('close', () => res.locals.logger.info({ end: true }));
next();
}; };
const logger = global.log4js.getLogger();
return global.log4js.connectLogger(logger, options);
}; };

View File

@ -1,36 +1,31 @@
'use strict'; 'use strict';
const Profiler = require('../../stats/profiler-proxy'); const Profiler = require('../../stats/profiler-proxy');
const { name: prefix } = require('../../../package.json');
module.exports = function profiler ({ statsClient }) { module.exports = function profiler ({ statsClient, logOnEvent = 'finish' }) {
return function profilerMiddleware (req, res, next) { return function profilerMiddleware (req, res, next) {
const start = new Date();
const { logger } = res.locals;
req.profiler = new Profiler({ req.profiler = new Profiler({
profile: global.settings.useProfiler, profile: global.settings.useProfiler,
statsd_client: statsClient statsd_client: statsClient
}); });
next(); req.profiler.start(prefix);
};
};
module.exports.initializeProfiler = function initializeProfiler (label) { res.on(logOnEvent, () => {
return function initializeProfilerMiddleware (req, res, next) { req.profiler.add({ response: new Date() - start });
if (req.profiler) {
req.profiler.start(`sqlapi.${label}`);
}
next();
};
};
module.exports.finishProfiler = function finishProfiler () {
return function finishProfilerMiddleware (req, res, next) {
if (req.profiler) {
req.profiler.end(); req.profiler.end();
req.profiler.sendStats(); logger.info({ stats: req.profiler.toJSON() });
res.header('X-SQLAPI-Profiler', req.profiler.toJSONString()); try {
} req.profiler.sendStats();
} catch (err) {
logger.warn({ error: err });
}
});
next(); next();
}; };

View File

@ -0,0 +1,15 @@
'use strict';
module.exports = function tag ({ tags, logOnEvent = 'finish' }) {
if (!Array.isArray(tags) || !tags.every((tag) => typeof tag === 'string')) {
throw new Error('Required "tags" option must be a valid Array: [string, string, ...]');
}
return function tagMiddleware (req, res, next) {
const { logger } = res.locals;
res.locals.tags = tags;
res.on(logOnEvent, () => logger.info({ tags: res.locals.tags }));
next();
};
};

View File

@ -5,9 +5,7 @@ module.exports = function timeoutLimits (metadataBackend) {
const { user, authorizationLevel } = res.locals; const { user, authorizationLevel } = res.locals;
metadataBackend.getUserTimeoutRenderLimits(user, function (err, timeoutRenderLimit) { metadataBackend.getUserTimeoutRenderLimits(user, function (err, timeoutRenderLimit) {
if (req.profiler) { req.profiler.done('getUserTimeoutLimits');
req.profiler.done('getUserTimeoutLimits');
}
if (err) { if (err) {
return next(err); return next(err);

View File

@ -6,15 +6,24 @@ module.exports = function user (metadataBackend) {
const cdbRequest = new CdbRequest(); const cdbRequest = new CdbRequest();
return function userMiddleware (req, res, next) { return function userMiddleware (req, res, next) {
res.locals.user = getUserNameFromRequest(req, cdbRequest); const { logger } = res.locals;
getUserId(metadataBackend, res.locals.user, function (err, userId) { try {
res.locals.user = getUserNameFromRequest(req, cdbRequest);
logger.info({ user: res.locals.user });
res.set('Carto-User', res.locals.user);
} catch (err) {
return next(err);
}
metadataBackend.getUserId(res.locals.user, (err, userId) => {
if (err || !userId) { if (err || !userId) {
const error = new Error('Unauthorized'); const error = new Error('Unauthorized');
error.type = 'auth'; error.type = 'auth';
error.subtype = 'user-not-found'; error.subtype = 'user-not-found';
error.http_status = 404; error.http_status = 404;
error.message = errorUserNotFoundMessageTemplate(res.locals.user); error.message = errorUserNotFoundMessageTemplate(res.locals.user);
return next(error); return next(error);
} }
@ -29,15 +38,6 @@ function getUserNameFromRequest (req, cdbRequest) {
return cdbRequest.userByReq(req); return cdbRequest.userByReq(req);
} }
function getUserId (metadataBackend, userName, callback) {
metadataBackend.getUserId(userName, function (err, userId) {
if (err) {
return callback(err);
}
return callback(null, userId);
});
}
function errorUserNotFoundMessageTemplate (user) { function errorUserNotFoundMessageTemplate (user) {
return `Sorry, we can't find CARTO user '${user}'. Please check that you have entered the correct domain.`; return `Sorry, we can't find CARTO user '${user}'. Please check that you have entered the correct domain.`;
} }

View File

@ -1,9 +1,10 @@
'use strict'; 'use strict';
const initLogger = require('../middlewares/logger');
const profiler = require('../middlewares/profiler');
const user = require('../middlewares/user'); const user = require('../middlewares/user');
const authorization = require('../middlewares/authorization'); const authorization = require('../middlewares/authorization');
const connectionParams = require('../middlewares/connection-params'); const connectionParams = require('../middlewares/connection-params');
const { initializeProfiler } = require('../middlewares/profiler');
const dbQuota = require('../middlewares/db-quota'); const dbQuota = require('../middlewares/db-quota');
const bodyParser = require('../middlewares/body-parser'); const bodyParser = require('../middlewares/body-parser');
const rateLimits = require('../middlewares/rate-limit'); const rateLimits = require('../middlewares/rate-limit');
@ -15,11 +16,13 @@ const Throttler = require('../../services/throttler-stream');
const zlib = require('zlib'); const zlib = require('zlib');
const { PassThrough } = require('stream'); const { PassThrough } = require('stream');
const params = require('../middlewares/params'); const params = require('../middlewares/params');
const tag = require('../middlewares/tag');
module.exports = class CopyController { module.exports = class CopyController {
constructor (metadataBackend, userDatabaseService, userLimitsService, logger) { constructor ({ metadataBackend, userDatabaseService, statsdClient, userLimitsService, logger }) {
this.metadataBackend = metadataBackend; this.metadataBackend = metadataBackend;
this.userDatabaseService = userDatabaseService; this.userDatabaseService = userDatabaseService;
this.statsdClient = statsdClient;
this.userLimitsService = userLimitsService; this.userLimitsService = userLimitsService;
this.logger = logger; this.logger = logger;
} }
@ -27,29 +30,33 @@ module.exports = class CopyController {
route (sqlRouter) { route (sqlRouter) {
const copyFromMiddlewares = endpointGroup => { const copyFromMiddlewares = endpointGroup => {
return [ return [
initializeProfiler('copyfrom'), initLogger({ logger: this.logger }),
profiler({ statsClient: this.statsdClient }),
tag({ tags: ['ingestion', 'copyfrom'] }),
user(this.metadataBackend), user(this.metadataBackend),
rateLimits(this.userLimitsService, endpointGroup), rateLimits(this.userLimitsService, endpointGroup),
authorization(this.metadataBackend), authorization(this.metadataBackend),
connectionParams(this.userDatabaseService), connectionParams(this.userDatabaseService),
dbQuota(), dbQuota(),
params({ strategy: 'copyfrom' }), params({ strategy: 'copyfrom' }),
handleCopyFrom(this.logger), handleCopyFrom(),
errorHandler(this.logger) errorHandler()
]; ];
}; };
const copyToMiddlewares = endpointGroup => { const copyToMiddlewares = endpointGroup => {
return [ return [
initLogger({ logger: this.logger, logOnEvent: 'log' }),
profiler({ statsClient: this.statsdClient, logOnEvent: 'log' }),
tag({ tags: ['ingestion', 'copyto'], logOnEvent: 'log' }),
bodyParser(), bodyParser(),
initializeProfiler('copyto'),
user(this.metadataBackend), user(this.metadataBackend),
rateLimits(this.userLimitsService, endpointGroup), rateLimits(this.userLimitsService, endpointGroup),
authorization(this.metadataBackend), authorization(this.metadataBackend),
connectionParams(this.userDatabaseService), connectionParams(this.userDatabaseService),
params({ strategy: 'copyto' }), params({ strategy: 'copyto' }),
handleCopyTo(this.logger), handleCopyTo({ logger: this.logger }),
errorHandler(this.logger) errorHandler()
]; ];
}; };
@ -59,8 +66,10 @@ module.exports = class CopyController {
} }
}; };
function handleCopyTo (logger) { function handleCopyTo ({ logger: mainLogger }) {
return function handleCopyToMiddleware (req, res, next) { return function handleCopyToMiddleware (req, res, next) {
const { logger } = res.locals;
const { id: requestId } = logger.bindings();
const { userDbParams, user } = res.locals; const { userDbParams, user } = res.locals;
const { sql, filename } = res.locals.params; const { sql, filename } = res.locals.params;
@ -80,26 +89,44 @@ function handleCopyTo (logger) {
return next(err); return next(err);
} }
req.on('aborted', () => {
const err = new Error('Connection closed by client');
metrics.end(null, err);
res.emit('log');
pgstream.emit('error', err);
const pid = streamCopy.clientProcessID;
streamCopy.cancel(pid, StreamCopy.ACTION_TO, (err) => {
if (err) {
return mainLogger.error({ error: err }, `Unable to cancel "copy ${StreamCopy.ACTION_TO}" stream query (pid: ${pid}, requestId: ${requestId})`);
}
mainLogger.info(`Canceled "copy ${StreamCopy.ACTION_TO}" stream query successfully (pid: ${pid}, requestId: ${requestId})`);
});
return next(err);
});
pgstream pgstream
.on('data', data => metrics.addSize(data.length)) .on('data', data => metrics.addSize(data.length))
.on('error', err => { .on('error', err => {
metrics.end(null, err); metrics.end(null, err);
return next(err); return next(err);
}) })
.on('end', () => { .on('end', () => {
metrics.end(streamCopy.getRowCount()); metrics.end(streamCopy.getRowCount());
res.emit('log');
return next(); return next();
}) })
.pipe(res) .pipe(res);
.on('close', () => pgstream.emit('error', new Error('Connection closed by client')))
.on('error', err => pgstream.emit('error', err));
}); });
}; };
} }
function handleCopyFrom (logger) { function handleCopyFrom () {
return function handleCopyFromMiddleware (req, res, next) { return function handleCopyFromMiddleware (req, res, next) {
const { logger } = res.locals;
const { userDbParams, user, dbRemainingQuota } = res.locals; const { userDbParams, user, dbRemainingQuota } = res.locals;
const { sql } = res.locals.params; const { sql } = res.locals.params;
const isGzip = req.get('content-encoding') === 'gzip'; const isGzip = req.get('content-encoding') === 'gzip';
@ -170,10 +197,11 @@ function handleCopyFrom (logger) {
}; };
} }
function errorHandler (logger) { function errorHandler () {
return function errorHandlerMiddleware (err, req, res, next) { return function errorHandlerMiddleware (err, req, res, next) {
const { logger } = res.locals;
if (res.headersSent) { if (res.headersSent) {
logger.error(err); logger.error({ error: err });
const errorHandler = errorHandlerFactory(err); const errorHandler = errorHandlerFactory(err);
res.write(JSON.stringify(errorHandler.getResponse())); res.write(JSON.stringify(errorHandler.getResponse()));
res.end(); res.end();

View File

@ -2,23 +2,25 @@
const util = require('util'); const util = require('util');
const initLogger = require('../middlewares/logger');
const profiler = require('../middlewares/profiler');
const bodyParser = require('../middlewares/body-parser'); const bodyParser = require('../middlewares/body-parser');
const user = require('../middlewares/user'); const user = require('../middlewares/user');
const { initializeProfiler, finishProfiler } = require('../middlewares/profiler');
const authorization = require('../middlewares/authorization'); const authorization = require('../middlewares/authorization');
const connectionParams = require('../middlewares/connection-params'); const connectionParams = require('../middlewares/connection-params');
const rateLimits = require('../middlewares/rate-limit'); const rateLimits = require('../middlewares/rate-limit');
const { RATE_LIMIT_ENDPOINTS_GROUPS } = rateLimits; const { RATE_LIMIT_ENDPOINTS_GROUPS } = rateLimits;
const params = require('../middlewares/params'); const params = require('../middlewares/params');
const log = require('../middlewares/log'); const tag = require('../middlewares/tag');
module.exports = class JobController { module.exports = class JobController {
constructor (metadataBackend, userDatabaseService, jobService, statsdClient, userLimitsService) { constructor ({ metadataBackend, userDatabaseService, jobService, statsdClient, userLimitsService, logger }) {
this.metadataBackend = metadataBackend; this.metadataBackend = metadataBackend;
this.userDatabaseService = userDatabaseService; this.userDatabaseService = userDatabaseService;
this.jobService = jobService; this.jobService = jobService;
this.statsdClient = statsdClient; this.statsdClient = statsdClient;
this.userLimitsService = userLimitsService; this.userLimitsService = userLimitsService;
this.logger = logger;
} }
route (sqlRouter) { route (sqlRouter) {
@ -31,39 +33,44 @@ module.exports = class JobController {
); );
sqlRouter.post('/job', [ sqlRouter.post('/job', [
initLogger({ logger: this.logger }),
profiler({ statsClient: this.statsdClient }),
tag({ tags: ['job', 'create'] }),
bodyParser(), bodyParser(),
checkBodyPayloadSize(), checkBodyPayloadSize(),
params({ strategy: 'job' }), params({ strategy: 'job' }),
log(log.TYPES.JOB), jobMiddlewares(createJob, RATE_LIMIT_ENDPOINTS_GROUPS.JOB_CREATE)
jobMiddlewares('create', createJob, RATE_LIMIT_ENDPOINTS_GROUPS.JOB_CREATE)
]); ]);
sqlRouter.get('/job/:job_id', [ sqlRouter.get('/job/:job_id', [
initLogger({ logger: this.logger }),
profiler({ statsClient: this.statsdClient }),
tag({ tags: ['job', 'retrieve'] }),
bodyParser(), bodyParser(),
jobMiddlewares('retrieve', getJob, RATE_LIMIT_ENDPOINTS_GROUPS.JOB_GET) jobMiddlewares(getJob, RATE_LIMIT_ENDPOINTS_GROUPS.JOB_GET)
]); ]);
sqlRouter.delete('/job/:job_id', [ sqlRouter.delete('/job/:job_id', [
initLogger({ logger: this.logger }),
profiler({ statsClient: this.statsdClient }),
tag({ tags: ['job', 'cancel'] }),
bodyParser(), bodyParser(),
jobMiddlewares('cancel', cancelJob, RATE_LIMIT_ENDPOINTS_GROUPS.JOB_DELETE) jobMiddlewares(cancelJob, RATE_LIMIT_ENDPOINTS_GROUPS.JOB_DELETE)
]); ]);
} }
}; };
function composeJobMiddlewares (metadataBackend, userDatabaseService, jobService, statsdClient, userLimitsService, pubSubMetricsService) { function composeJobMiddlewares (metadataBackend, userDatabaseService, jobService, statsdClient, userLimitsService, pubSubMetricsService) {
return function jobMiddlewares (action, job, endpointGroup) { return function jobMiddlewares (job, endpointGroup) {
const forceToBeMaster = true; const forceToBeMaster = true;
return [ return [
initializeProfiler('job'),
user(metadataBackend), user(metadataBackend),
rateLimits(userLimitsService, endpointGroup), rateLimits(userLimitsService, endpointGroup),
authorization(metadataBackend, forceToBeMaster), authorization(metadataBackend, forceToBeMaster),
connectionParams(userDatabaseService), connectionParams(userDatabaseService),
job(jobService), job(jobService),
setServedByDBHostHeader(), setServedByDBHostHeader(),
finishProfiler(),
logJobResult(action),
incrementSuccessMetrics(statsdClient), incrementSuccessMetrics(statsdClient),
sendResponse(), sendResponse(),
incrementErrorMetrics(statsdClient) incrementErrorMetrics(statsdClient)
@ -76,9 +83,7 @@ function cancelJob (jobService) {
const { job_id: jobId } = req.params; const { job_id: jobId } = req.params;
jobService.cancel(jobId, (err, job) => { jobService.cancel(jobId, (err, job) => {
if (req.profiler) { req.profiler.done('cancelJob');
req.profiler.done('cancelJob');
}
if (err) { if (err) {
return next(err); return next(err);
@ -96,9 +101,7 @@ function getJob (jobService) {
const { job_id: jobId } = req.params; const { job_id: jobId } = req.params;
jobService.get(jobId, (err, job) => { jobService.get(jobId, (err, job) => {
if (req.profiler) { req.profiler.done('getJob');
req.profiler.done('getJob');
}
if (err) { if (err) {
return next(err); return next(err);
@ -113,7 +116,8 @@ function getJob (jobService) {
function createJob (jobService) { function createJob (jobService) {
return function createJobMiddleware (req, res, next) { return function createJobMiddleware (req, res, next) {
var data = { const { logger } = res.locals;
const data = {
user: res.locals.user, user: res.locals.user,
query: res.locals.params.sql, query: res.locals.params.sql,
host: res.locals.userDbParams.host, host: res.locals.userDbParams.host,
@ -124,15 +128,13 @@ function createJob (jobService) {
}; };
jobService.create(data, (err, job) => { jobService.create(data, (err, job) => {
if (req.profiler) { req.profiler.done('createJob');
req.profiler.done('createJob');
}
if (err) { if (err) {
return next(err); return next(err);
} }
res.locals.job_id = job.job_id; logger.info({ job: job.toJSON() });
res.statusCode = 201; res.statusCode = 201;
res.body = job.serialize(); res.body = job.serialize();
@ -185,21 +187,6 @@ function setServedByDBHostHeader () {
}; };
} }
function logJobResult (action) {
return function logJobResultMiddleware (req, res, next) {
if (process.env.NODE_ENV !== 'test') {
console.info(JSON.stringify({
type: 'sql_api_batch_job',
username: res.locals.user,
action: action,
job_id: req.params.job_id || res.locals.job_id
}));
}
next();
};
}
const METRICS_PREFIX = 'sqlapi.job'; const METRICS_PREFIX = 'sqlapi.job';
function incrementSuccessMetrics (statsdClient) { function incrementSuccessMetrics (statsdClient) {

View File

@ -1,14 +1,15 @@
'use strict'; 'use strict';
const initLogger = require('../middlewares/logger');
const profiler = require('../middlewares/profiler');
const bodyParser = require('../middlewares/body-parser'); const bodyParser = require('../middlewares/body-parser');
const { initializeProfiler } = require('../middlewares/profiler');
const user = require('../middlewares/user'); const user = require('../middlewares/user');
const rateLimits = require('../middlewares/rate-limit'); const rateLimits = require('../middlewares/rate-limit');
const authorization = require('../middlewares/authorization'); const authorization = require('../middlewares/authorization');
const connectionParams = require('../middlewares/connection-params'); const connectionParams = require('../middlewares/connection-params');
const timeoutLimits = require('../middlewares/timeout-limits'); const timeoutLimits = require('../middlewares/timeout-limits');
const params = require('../middlewares/params'); const params = require('../middlewares/params');
const log = require('../middlewares/log'); const logQuery = require('../middlewares/log-query');
const cancelOnClientAbort = require('../middlewares/cancel-on-client-abort'); const cancelOnClientAbort = require('../middlewares/cancel-on-client-abort');
const affectedTables = require('../middlewares/affected-tables'); const affectedTables = require('../middlewares/affected-tables');
const accessValidator = require('../middlewares/access-validator'); const accessValidator = require('../middlewares/access-validator');
@ -19,16 +20,18 @@ const surrogateKey = require('../middlewares/surrogate-key');
const lastModified = require('../middlewares/last-modified'); const lastModified = require('../middlewares/last-modified');
const formatter = require('../middlewares/formatter'); const formatter = require('../middlewares/formatter');
const content = require('../middlewares/content'); const content = require('../middlewares/content');
const tag = require('../middlewares/tag');
const { RATE_LIMIT_ENDPOINTS_GROUPS } = rateLimits; const { RATE_LIMIT_ENDPOINTS_GROUPS } = rateLimits;
const PSQL = require('cartodb-psql'); const PSQL = require('cartodb-psql');
module.exports = class QueryController { module.exports = class QueryController {
constructor (metadataBackend, userDatabaseService, statsdClient, userLimitsService) { constructor ({ metadataBackend, userDatabaseService, statsdClient, userLimitsService, logger }) {
this.metadataBackend = metadataBackend; this.metadataBackend = metadataBackend;
this.stats = statsdClient; this.statsdClient = statsdClient;
this.userDatabaseService = userDatabaseService; this.userDatabaseService = userDatabaseService;
this.userLimitsService = userLimitsService; this.userLimitsService = userLimitsService;
this.logger = logger;
} }
route (sqlRouter) { route (sqlRouter) {
@ -36,15 +39,17 @@ module.exports = class QueryController {
const queryMiddlewares = () => { const queryMiddlewares = () => {
return [ return [
initLogger({ logger: this.logger }),
profiler({ statsClient: this.statsdClient }),
tag({ tags: ['query'] }),
bodyParser(), bodyParser(),
initializeProfiler('query'),
user(this.metadataBackend), user(this.metadataBackend),
rateLimits(this.userLimitsService, RATE_LIMIT_ENDPOINTS_GROUPS.QUERY), rateLimits(this.userLimitsService, RATE_LIMIT_ENDPOINTS_GROUPS.QUERY),
authorization(this.metadataBackend, forceToBeMaster), authorization(this.metadataBackend, forceToBeMaster),
connectionParams(this.userDatabaseService), connectionParams(this.userDatabaseService),
timeoutLimits(this.metadataBackend), timeoutLimits(this.metadataBackend),
params({ strategy: 'query' }), params({ strategy: 'query' }),
log(log.TYPES.QUERY), logQuery(),
cancelOnClientAbort(), cancelOnClientAbort(),
affectedTables(), affectedTables(),
accessValidator(), accessValidator(),
@ -55,7 +60,7 @@ module.exports = class QueryController {
lastModified(), lastModified(),
formatter(), formatter(),
content(), content(),
handleQuery({ stats: this.stats }) handleQuery({ stats: this.statsdClient })
]; ];
}; };
@ -73,9 +78,7 @@ function handleQuery ({ stats } = {}) {
let { formatter } = req; let { formatter } = req;
try { try {
if (req.profiler) { req.profiler.done('init');
req.profiler.done('init');
}
const opts = { const opts = {
username: username, username: username,
@ -95,7 +98,6 @@ function handleQuery ({ stats } = {}) {
opts.profiler = req.profiler; opts.profiler = req.profiler;
opts.beforeSink = function () { opts.beforeSink = function () {
req.profiler.done('beforeSink'); req.profiler.done('beforeSink');
res.header('X-SQLAPI-Profiler', req.profiler.toJSONString());
}; };
} }
@ -106,10 +108,6 @@ function handleQuery ({ stats } = {}) {
formatter.sendResponse(opts, (err) => { formatter.sendResponse(opts, (err) => {
formatter = null; formatter = null;
if (req.profiler) {
req.profiler.sendStats();
}
if (stats) { if (stats) {
if (err) { if (err) {
stats.increment('sqlapi.query.error'); stats.increment('sqlapi.query.error');

View File

@ -6,8 +6,6 @@ const UserDatabaseService = require('../../services/user-database-service');
const UserLimitsService = require('../../services/user-limits'); const UserLimitsService = require('../../services/user-limits');
const socketTimeout = require('../middlewares/socket-timeout'); const socketTimeout = require('../middlewares/socket-timeout');
const logger = require('../middlewares/logger');
const profiler = require('../middlewares/profiler');
const cors = require('../middlewares/cors'); const cors = require('../middlewares/cors');
const servedByHostHeader = require('../middlewares/served-by-host-header'); const servedByHostHeader = require('../middlewares/served-by-host-header');
const clientHeader = require('../middlewares/client-header'); const clientHeader = require('../middlewares/client-header');
@ -17,7 +15,7 @@ const CopyController = require('./copy-controller');
const JobController = require('./job-controller'); const JobController = require('./job-controller');
module.exports = class SqlRouter { module.exports = class SqlRouter {
constructor ({ metadataBackend, statsClient, dataIngestionLogger, jobService }) { constructor ({ metadataBackend, statsClient, logger, jobService }) {
const userLimitsServiceOptions = { const userLimitsServiceOptions = {
limits: { limits: {
rateLimitsEnabled: global.settings.ratelimits.rateLimitsEnabled rateLimitsEnabled: global.settings.ratelimits.rateLimitsEnabled
@ -26,27 +24,30 @@ module.exports = class SqlRouter {
const userDatabaseService = new UserDatabaseService(metadataBackend); const userDatabaseService = new UserDatabaseService(metadataBackend);
const userLimitsService = new UserLimitsService(metadataBackend, userLimitsServiceOptions); const userLimitsService = new UserLimitsService(metadataBackend, userLimitsServiceOptions);
this.queryController = new QueryController( this.queryController = new QueryController({
metadataBackend, metadataBackend,
userDatabaseService, userDatabaseService,
statsClient, statsClient,
userLimitsService userLimitsService,
); logger
});
this.copyController = new CopyController( this.copyController = new CopyController({
metadataBackend, metadataBackend,
userDatabaseService, userDatabaseService,
statsClient,
userLimitsService, userLimitsService,
dataIngestionLogger logger
); });
this.jobController = new JobController( this.jobController = new JobController({
metadataBackend, metadataBackend,
userDatabaseService, userDatabaseService,
jobService, jobService,
statsClient, statsClient,
userLimitsService userLimitsService,
); logger
});
} }
route (apiRouter, routes) { route (apiRouter, routes) {
@ -59,8 +60,6 @@ module.exports = class SqlRouter {
middlewares.forEach(middleware => sqlRouter.use(middleware())); middlewares.forEach(middleware => sqlRouter.use(middleware()));
sqlRouter.use(socketTimeout()); sqlRouter.use(socketTimeout());
sqlRouter.use(logger());
sqlRouter.use(profiler({ statsClient: this.statsClient }));
sqlRouter.use(cors()); sqlRouter.use(cors());
sqlRouter.use(clientHeader()); sqlRouter.use(clientHeader());
sqlRouter.use(servedByHostHeader()); sqlRouter.use(servedByHostHeader());

View File

@ -1,11 +0,0 @@
'use strict';
const Logger = require('../services/logger');
class BatchLogger extends Logger {
log (job) {
return job.log(this.logger);
}
}
module.exports = BatchLogger;

View File

@ -4,6 +4,7 @@ var util = require('util');
var EventEmitter = require('events').EventEmitter; var EventEmitter = require('events').EventEmitter;
var queue = require('queue-async'); var queue = require('queue-async');
var HostScheduler = require('./scheduler/host-scheduler'); var HostScheduler = require('./scheduler/host-scheduler');
var JobStatus = require('./job-status');
var EMPTY_QUEUE = true; var EMPTY_QUEUE = true;
@ -114,12 +115,11 @@ Batch.prototype.processJob = function (user, callback) {
return callback(err, !EMPTY_QUEUE); return callback(err, !EMPTY_QUEUE);
} }
self.logger.debug( if (JobStatus.isFinal(job.data.status)) {
'[%s] Job=%s status=%s user=%s (failed_reason=%s)', self.logger.info({ job: job.toJSON() });
self.name, jobId, job.data.status, user, job.failed_reason } else {
); self.logger.debug({ job: job.toJSON() });
}
self.logger.log(job);
return callback(null, !EMPTY_QUEUE); return callback(null, !EMPTY_QUEUE);
}); });

View File

@ -9,14 +9,11 @@ var JobPublisher = require('./pubsub/job-publisher');
var JobQueue = require('./job-queue'); var JobQueue = require('./job-queue');
var JobBackend = require('./job-backend'); var JobBackend = require('./job-backend');
var JobService = require('./job-service'); var JobService = require('./job-service');
var BatchLogger = require('./batch-logger');
var Batch = require('./batch'); var Batch = require('./batch');
module.exports = function batchFactory (metadataBackend, redisPool, name, statsdClient, loggerPath) { module.exports = function batchFactory (metadataBackend, redisPool, name, statsdClient, logger) {
var userDatabaseMetadataService = new UserDatabaseMetadataService(metadataBackend); var userDatabaseMetadataService = new UserDatabaseMetadataService(metadataBackend);
var logger = new BatchLogger(loggerPath, 'batch-queries');
var jobSubscriber = new JobSubscriber(redisPool); var jobSubscriber = new JobSubscriber(redisPool);
var jobPublisher = new JobPublisher(redisPool); var jobPublisher = new JobPublisher(redisPool);

View File

@ -116,7 +116,3 @@ JobBase.prototype.serialize = function () {
return data; return data;
}; };
JobBase.prototype.log = function (/* logger */) {
return false;
};

View File

@ -205,54 +205,39 @@ JobFallback.prototype.getLastFinishedStatus = function () {
}.bind(this), JobStatus.DONE); }.bind(this), JobStatus.DONE);
}; };
JobFallback.prototype.log = function (logger) { JobFallback.prototype.toJSON = function () {
if (!isFinished(this)) { const queries = this.data.query.query;
return false;
}
var queries = this.data.query.query; return {
type: this.constructor.name,
id: this.data.job_id,
username: this.data.user,
status: this.data.status,
failed_reason: this.failed_reason,
created: this.data.created_at,
updated: this.data.updated_at,
elapsed: elapsedTime(this.data.created_at, this.data.updated_at),
dbhost: this.data.host,
queries: queries.map((query) => {
const node = query.id ? parseQueryId(query.id) : undefined;
for (var i = 0; i < queries.length; i++) { return {
var query = queries[i]; status: query.status,
fallback_status: query.fallback_status,
var logEntry = { failed_reason: query.failed_reason,
created: this.data.created_at, waiting: elapsedTime(this.data.created_at, query.started_at),
waiting: elapsedTime(this.data.created_at, query.started_at), starttime: query.started_at,
time: query.started_at, endtime: query.ended_at,
endtime: query.ended_at, elapsed: elapsedTime(query.started_at, query.ended_at),
username: this.data.user, id: query.id,
dbhost: this.data.host, analysis: node ? node.analysisId : undefined,
job: this.data.job_id, node: node ? node.nodeId : undefined,
status: query.status, type: node ? node.nodeType : undefined
elapsed: elapsedTime(query.started_at, query.ended_at) };
}; })
};
var queryId = query.id;
var tag = 'query';
if (queryId) {
logEntry.query_id = queryId;
var node = parseQueryId(queryId);
if (node) {
logEntry.analysis = node.analysisId;
logEntry.node = node.nodeId;
logEntry.type = node.nodeType;
tag = 'analysis';
}
}
logger.info(logEntry, tag);
}
return true;
}; };
function isFinished (job) {
return JobStatus.isFinal(job.data.status) &&
(!job.data.fallback_status || JobStatus.isFinal(job.data.fallback_status));
}
function parseQueryId (queryId) { function parseQueryId (queryId) {
var data = queryId.split(':'); var data = queryId.split(':');

View File

@ -88,3 +88,34 @@ JobMultiple.prototype.setStatus = function (finalStatus, errorMesssage) {
throw new Error('Cannot set status from ' + initialStatus + ' to ' + finalStatus); throw new Error('Cannot set status from ' + initialStatus + ' to ' + finalStatus);
}; };
JobMultiple.prototype.toJSON = function () {
const queries = this.data.query;
return {
type: this.constructor.name,
id: this.data.job_id,
username: this.data.user,
status: this.data.status,
created: this.data.created_at,
updated: this.data.updated_at,
elapsed: elapsedTime(this.data.created_at, this.data.updated_at),
dbhost: this.data.host,
queries: queries.map((query) => {
return {
status: query.status,
failed_reason: query.failed_reason
};
})
};
};
function elapsedTime (startedAt, endedAt) {
if (!startedAt || !endedAt) {
return;
}
var start = new Date(startedAt);
var end = new Date(endedAt);
return end.getTime() - start.getTime();
}

View File

@ -32,3 +32,27 @@ JobSimple.prototype.setQuery = function (query) {
JobSimple.super_.prototype.setQuery.call(this, query); JobSimple.super_.prototype.setQuery.call(this, query);
}; };
JobSimple.prototype.toJSON = function () {
return {
class: this.constructor.name,
id: this.data.job_id,
username: this.data.user,
status: this.data.status,
failed_reason: this.data.failed_reason,
created: this.data.created_at,
updated: this.data.updated_at,
elapsed: elapsedTime(this.data.created_at, this.data.updated_at),
dbhost: this.data.host
};
};
function elapsedTime (startedAt, endedAt) {
if (!startedAt || !endedAt) {
return;
}
var start = new Date(startedAt);
var end = new Date(endedAt);
return end.getTime() - start.getTime();
}

View File

@ -1,41 +1,24 @@
'use strict'; 'use strict';
/** module.exports = class CdbRequest {
* this module provides cartodb-specific interpretation constructor () {
* of request headers // would extract "strk" from "strk.cartodb.com"
*/ this.RE_USER_FROM_HOST = new RegExp(global.settings.user_from_host || '^([^\\.]+)\\.');
}
function CartodbRequest () {
} userByReq (req) {
const host = req.headers.host || '';
module.exports = CartodbRequest;
if (req.params.user) {
/** return req.params.user;
* If the request contains the user use it, if not guess from the host }
*/
CartodbRequest.prototype.userByReq = function (req) { const mat = host.match(this.RE_USER_FROM_HOST);
if (req.params.user) {
return req.params.user; if (!mat || mat.length !== 2) {
throw new Error(`No username found in hostname '${host}'`);
}
return mat[1];
} }
return userByHostName(req.headers.host);
}; };
var userFromHostRegex = new RegExp(
global.settings.user_from_host || '^([^\\.]+)\\.' // would extract "strk" from "strk.cartodb.com"
);
function userByHostName (host) {
var mat = host.match(userFromHostRegex);
if (!mat) {
console.error("ERROR: user pattern '" + userFromHostRegex + "' does not match hostname '" + host + "'");
return;
}
if (mat.length !== 2) {
console.error(
"ERROR: pattern '" + userFromHostRegex + "' gave unexpected matches against '" + host + "': " + mat
);
return;
}
return mat[1];
}

View File

@ -1,5 +1,8 @@
'use strict'; 'use strict';
const Logger = require('./utils/logger');
const logger = new Logger();
module.exports = function getServerOptions () { module.exports = function getServerOptions () {
const defaults = { const defaults = {
routes: { routes: {
@ -25,7 +28,8 @@ module.exports = function getServerOptions () {
middlewares: [] middlewares: []
}] }]
}] }]
} },
logger
}; };
return Object.assign({}, defaults, global.settings); return Object.assign({}, defaults, global.settings);

View File

@ -4,7 +4,6 @@ const express = require('express');
const fs = require('fs'); const fs = require('fs');
const RedisPool = require('redis-mpool'); const RedisPool = require('redis-mpool');
const cartodbRedis = require('cartodb-redis'); const cartodbRedis = require('cartodb-redis');
const Logger = require('./services/logger');
const ApiRouter = require('./api/api-router'); const ApiRouter = require('./api/api-router');
const batchFactory = require('./batch'); const batchFactory = require('./batch');
const getServerOptions = require('./server-options'); const getServerOptions = require('./server-options');
@ -15,7 +14,7 @@ process.env.PGAPPNAME = process.env.PGAPPNAME || 'cartodb_sqlapi';
require('./utils/date-to-json'); require('./utils/date-to-json');
module.exports = function createServer (statsClient) { module.exports = function createServer (statsClient) {
const options = getServerOptions(); const { routes, logger } = getServerOptions();
const app = express(); const app = express();
const redisPool = new RedisPool({ const redisPool = new RedisPool({
name: 'sql-api', name: 'sql-api',
@ -56,26 +55,20 @@ module.exports = function createServer (statsClient) {
app.disable('x-powered-by'); app.disable('x-powered-by');
app.disable('etag'); app.disable('etag');
const dataIngestionLogger = new Logger(global.settings.dataIngestionLogPath, 'data-ingestion');
app.dataIngestionLogger = dataIngestionLogger;
const apiRouter = new ApiRouter({ const apiRouter = new ApiRouter({
redisPool, redisPool,
metadataBackend, metadataBackend,
statsClient, statsClient,
dataIngestionLogger logger
}); });
apiRouter.route(app, options.routes.api); apiRouter.route(app, routes.api);
const isBatchProcess = process.argv.indexOf('--no-batch') === -1; const isBatchProcess = process.argv.indexOf('--no-batch') === -1;
if (global.settings.environment !== 'test' && isBatchProcess) { if (global.settings.environment !== 'test' && isBatchProcess) {
const batchName = global.settings.api_hostname || 'batch'; const batchName = global.settings.api_hostname || 'batch';
app.batch = batchFactory( app.batch = batchFactory(metadataBackend, redisPool, batchName, statsClient, logger);
metadataBackend, redisPool, batchName, statsClient, global.settings.batch_log_filename
);
app.batch.start(); app.batch.start();
} }

View File

@ -15,12 +15,18 @@ class ErrorHandler extends Error {
} }
getResponse () { getResponse () {
return { const serialized = {
error: [this.message], error: [this.message],
context: this.context, context: this.context,
detail: this.detail, detail: this.detail,
hint: this.hint hint: this.hint
}; };
if (global.settings.environment === 'development') {
serialized.stack = this.stack;
}
return serialized;
} }
getHttpStatus (httpStatus = 400) { getHttpStatus (httpStatus = 400) {

View File

@ -65,7 +65,7 @@ module.exports = class StreamCopyMetrics {
size: this.size, size: this.size,
rows: this.rows, rows: this.rows,
gzip: this.isGzip, gzip: this.isGzip,
'cdb-user': this.username, username: this.username,
time: this.time, time: this.time,
timestamp, timestamp,
sql: this.sql sql: this.sql
@ -82,6 +82,6 @@ module.exports = class StreamCopyMetrics {
logData.success = this.success; logData.success = this.success;
this.logger.info(logData); this.logger.info({ ingestion: logData });
} }
}; };

View File

@ -13,14 +13,13 @@ const terminateQuery = pid => `SELECT pg_terminate_backend(${pid}) as terminated
const timeoutQuery = timeout => `SET statement_timeout=${timeout}`; const timeoutQuery = timeout => `SET statement_timeout=${timeout}`;
module.exports = class StreamCopy { module.exports = class StreamCopy {
constructor (sql, userDbParams, logger) { constructor (sql, userDbParams) {
this.dbParams = Object.assign({}, userDbParams, { this.dbParams = Object.assign({}, userDbParams, {
port: global.settings.db_batch_port || userDbParams.port port: global.settings.db_batch_port || userDbParams.port
}); });
this.sql = sql; this.sql = sql;
this.stream = null; this.stream = null;
this.timeout = global.settings.copy_timeout || DEFAULT_TIMEOUT; this.timeout = global.settings.copy_timeout || DEFAULT_TIMEOUT;
this.logger = logger;
} }
static get ACTION_TO () { static get ACTION_TO () {
@ -52,8 +51,7 @@ module.exports = class StreamCopy {
if (action === ACTION_TO) { if (action === ACTION_TO) {
pgstream.on('end', () => done()); pgstream.on('end', () => done());
pgstream.on('error', () => this._cancel(client.processID, action)); pgstream.on('warning', (msg) => this.logger.warn({ error: new Error(msg) }));
pgstream.on('warning', (msg) => this.logger.warn(msg));
} else if (action === ACTION_FROM) { } else if (action === ACTION_FROM) {
pgstream.on('finish', () => done()); pgstream.on('finish', () => done());
pgstream.on('client-close', err => client.connection.sendCopyFail(err.message)); pgstream.on('client-close', err => client.connection.sendCopyFail(err.message));
@ -70,35 +68,33 @@ module.exports = class StreamCopy {
return this.stream.rowCount; return this.stream.rowCount;
} }
_cancel (pid, action) { cancel (pid, action, callback) {
const pg = new PSQL(this.dbParams); const pg = new PSQL(this.dbParams);
const actionType = action === ACTION_TO ? ACTION_TO : ACTION_FROM; const actionType = action === ACTION_TO ? ACTION_TO : ACTION_FROM;
pg.query(cancelQuery(pid), (err, result) => { pg.query(cancelQuery(pid), (err, result) => {
if (err) { if (err) {
return this.logger.error(err); return callback(err);
} }
const isCancelled = result.rows.length && result.rows[0].cancelled; const isCancelled = result.rows.length && result.rows[0].cancelled;
if (isCancelled) { if (isCancelled) {
return this.logger.info(`Canceled "copy ${actionType}" stream query successfully (pid: ${pid})`); return callback();
} }
return pg.query(terminateQuery(pid), (err, result) => { return pg.query(terminateQuery(pid), (err, result) => {
if (err) { if (err) {
return this.logger.error(err); return callback(err);
} }
const isTerminated = result.rows.length && result.rows[0].terminated; const isTerminated = result.rows.length && result.rows[0].terminated;
if (!isTerminated) { if (!isTerminated) {
return this.logger.error( return callback(new Error(`Unable to terminate "copy ${actionType}" stream query (pid: ${pid})`));
new Error(`Unable to terminate "copy ${actionType}" stream query (pid: ${pid})`)
);
} }
return this.logger.info(`Terminated "copy ${actionType}" stream query successfully (pid: ${pid})`); return callback();
}); });
}); });
} }

View File

@ -52,4 +52,8 @@ ProfilerProxy.prototype.toJSONString = function () {
return this.profile ? this.profiler.toJSONString() : '{}'; return this.profile ? this.profiler.toJSONString() : '{}';
}; };
ProfilerProxy.prototype.toJSON = function () {
return this.profile ? JSON.parse(this.profiler.toJSONString()) : {};
};
module.exports = ProfilerProxy; module.exports = ProfilerProxy;

60
lib/utils/logger.js Normal file
View File

@ -0,0 +1,60 @@
'use strict';
const pino = require('pino');
const { req: requestSerializer, res: responseSerializer, err, wrapErrorSerializer } = pino.stdSerializers;
const DEV_ENVS = ['test', 'development'];
module.exports = class Logger {
constructor () {
const { LOG_LEVEL, NODE_ENV } = process.env;
const logLevelFromNodeEnv = NODE_ENV === 'test' ? 'fatal' : 'info';
const errorSerializer = DEV_ENVS.includes(NODE_ENV) ? err : wrapErrorSerializer(err => {
err.stack = err.stack.split('\n').slice(0, 3).join('\n');
return err;
});
const options = {
base: null, // Do not bind hostname, pid and friends by default
level: LOG_LEVEL || logLevelFromNodeEnv,
serializers: {
request: requestSerializer,
response: responseSerializer,
error: (error) => Array.isArray(error) ? error.map((err) => errorSerializer(err)) : [errorSerializer(error)]
}
};
const dest = pino.destination({ sync: false }); // stdout
this._logger = pino(options, dest);
}
trace (...args) {
this._logger.trace(...args);
}
debug (...args) {
this._logger.debug(...args);
}
info (...args) {
this._logger.info(...args);
}
warn (...args) {
this._logger.warn(...args);
}
error (...args) {
this._logger.error(...args);
}
fatal (...args) {
this._logger.fatal(...args);
}
child (...args) {
return this._logger.child(...args);
}
finish (callback) {
return pino.final(this._logger, callback);
}
};

View File

@ -1,45 +0,0 @@
'use strict';
module.exports = {
/**
* Remove problematic nested characters
* from object for logs RegEx
*
* @param {Object} object
* @param {Number} maxStringLength
*/
stringifyForLogs (object, maxStringLength = 1024) {
return JSON.stringify(cloneAndFilter(object, maxStringLength));
}
};
function cloneAndFilter (object, maxStringLength) {
if (!object || !(object instanceof Object)) {
return null;
}
const newObject = {};
Object.keys(object).map(key => {
if (typeof object[key] === 'string') {
newObject[key] = filterString(object[key], maxStringLength);
} else if (typeof object[key] === 'object') {
newObject[key] = cloneAndFilter(object[key], maxStringLength);
} else if (object[key] instanceof Array) {
newObject[key] = [];
for (const element of object[key]) {
newObject[key].push(cloneAndFilter(element, maxStringLength));
}
} else {
newObject[key] = object[key];
}
});
return newObject;
}
function filterString (s, maxStringLength) {
return s
.substring(0, maxStringLength)
.replace(/[^a-zA-Z0-9]/g, ' ');
}

29
metro/index.js Normal file
View File

@ -0,0 +1,29 @@
'use strict';
const split = require('split2');
const logCollector = require('./log-collector');
const metricsCollector = require('./metrics-collector');
const streams = [process.stdin, split(), logCollector(), metricsCollector(), process.stdout];
pipeline('pipe', streams);
process.on('SIGINT', exitProcess(0));
process.on('SIGTERM', exitProcess(0));
process.on('uncaughtException', exitProcess(1));
process.on('unhandledRejection', exitProcess(1));
function pipeline (action, streams) {
for (let index = 0; index < streams.length - 1; index++) {
const source = streams[index];
const destination = streams[index + 1];
source[action](destination);
}
}
function exitProcess (code = 0) {
return function exitProcess (signal) {
pipeline('unpipe', streams);
process.exit(code);
};
}

105
metro/log-collector.js Normal file
View File

@ -0,0 +1,105 @@
'use strict'
const fs = require('fs');
const split = require('split2');
const assingDeep = require('assign-deep');
const { Transform } = require('stream');
const DEV_ENVS = ['test', 'development'];
const dumpPath = `${__dirname}/dump.json`;
let logs;
const LEVELS = {
10: 'trace',
20: 'debug',
30: 'info',
40: 'warn',
50: 'error',
60: 'fatal'
};
module.exports = function logCollector () {
const stream = new Transform({
transform (chunk, enc, callback) {
let entry;
try {
entry = JSON.parse(chunk);
const { level, time } = entry;
if (level === undefined && time === undefined) {
throw new Error('Entry log is not valid');
}
} catch (e) {
if (DEV_ENVS.includes(process.env.NODE_ENV)) {
this.push(chunk + '\n');
}
return callback();
}
const { id } = entry;
if (id === undefined) {
entry.level = LEVELS[entry.level];
this.push(`${JSON.stringify(entry)}\n`);
return callback();
}
if (logs.has(id)) {
const accEntry = logs.get(id);
const { end } = entry;
if (end === true) {
accEntry.level = LEVELS[accEntry.level];
accEntry.time = entry.time;
this.push(`${JSON.stringify(accEntry)}\n`);
logs.delete(id);
return callback();
}
if (accEntry.level > entry.level) {
delete entry.level;
}
if (hasProperty(accEntry, 'error') && hasProperty(entry, 'error')) {
logs.set(id, assingDeep({}, accEntry, entry, { error: accEntry.error.concat(entry.error) }));
} else {
logs.set(id, assingDeep({}, accEntry, entry));
}
} else {
logs.set(id, entry);
}
callback();
}
});
stream.on('pipe', () => {
if (!fs.existsSync(dumpPath)) {
logs = new Map();
return;
}
try {
const dump = require(dumpPath);
logs = new Map(dump);
} catch (err) {
console.error(`Cannot read the dump for unfinished logs: ${err}`);
logs = new Map();
}
});
stream.on('unpipe', () => {
try {
fs.writeFileSync(dumpPath, JSON.stringify([...logs]));
} catch (err) {
console.error(`Cannot create a dump for unfinished logs: ${err}`);
}
});
return stream;
};
function hasProperty (obj, prop) {
return Object.prototype.hasOwnProperty.call(obj, prop);
}

120
metro/metrics-collector.js Normal file
View File

@ -0,0 +1,120 @@
'use strict';
const http = require('http');
const { Counter, Histogram, register } = require('prom-client');
const { Transform } = require('stream');
const DEV_ENVS = ['test', 'development'];
const requestCounter = new Counter({
name: 'sql_api_requests_total',
help: 'SQL API requests total'
});
const requestOkCounter = new Counter({
name: 'sql_api_requests_ok_total',
help: 'SQL API requests ok total'
});
const requestErrorCounter = new Counter({
name: 'sql_api_requests_errors_total',
help: 'SQL API requests errors total'
});
const responseTimeHistogram = new Histogram({
name: 'sql_api_response_time_total',
help: 'SQL API response time total'
});
const userRequestCounter = new Counter({
name: 'sql_api_requests',
help: 'SQL API requests per user',
labelNames: ['user', 'http_code']
});
const userRequestOkCounter = new Counter({
name: 'sql_api_requests_ok',
help: 'SQL API requests per user with success HTTP code',
labelNames: ['user', 'http_code']
});
const userRequestErrorCounter = new Counter({
name: 'sql_api_requests_errors',
help: 'SQL API requests per user with error HTTP code',
labelNames: ['user', 'http_code']
});
const userResponseTimeHistogram = new Histogram({
name: 'sql_api_response_time',
help: 'SQL API response time total',
labelNames: ['user']
});
module.exports = function metricsCollector () {
return new Transform({
transform (chunk, enc, callback) {
let entry;
try {
entry = JSON.parse(chunk);
const { level, time } = entry;
if (level === undefined && time === undefined) {
throw new Error('Entry log is not valid');
}
} catch (e) {
if (DEV_ENVS.includes(process.env.NODE_ENV)) {
this.push(chunk);
}
return callback();
}
const { request, response, stats } = entry;
if (request === undefined || response === undefined || stats === undefined) {
this.push(chunk);
return callback();
}
const { statusCode, headers } = response;
const { 'carto-user': user } = headers;
requestCounter.inc();
if (statusCode !== undefined && user !== undefined) {
userRequestCounter.labels(user, `${statusCode}`).inc();
}
if (statusCode >= 200 && statusCode < 400) {
requestOkCounter.inc();
if (user !== undefined) {
userRequestOkCounter.labels(user, `${statusCode}`).inc();
}
} else if (statusCode >= 400) {
requestErrorCounter.inc();
if (user !== undefined) {
userRequestErrorCounter.labels(user, `${statusCode}`).inc();
}
}
const { response: responseTime } = stats;
if (Number.isFinite(responseTime)) {
responseTimeHistogram.observe(responseTime);
userResponseTimeHistogram.labels(user).observe(responseTime);
}
this.push(chunk);
callback();
}
});
};
const port = process.env.PORT || 9144;
http
.createServer((req, res) => {
res.writeHead(200, { 'Content-Type': register.contentType });
res.end(register.metrics());
})
.listen(port)
.unref();

276
package-lock.json generated
View File

@ -234,6 +234,12 @@
"protobufjs": "^6.8.6" "protobufjs": "^6.8.6"
} }
}, },
"@hapi/bourne": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/@hapi/bourne/-/bourne-2.0.0.tgz",
"integrity": "sha512-WEezM1FWztfbzqIUbsDzFRVMxSoLy3HugVcux6KDDtTqzPsLE8NDRHfXvev66aH1i2oOKKar3/XDjbvh/OUBdg==",
"dev": true
},
"@istanbuljs/load-nyc-config": { "@istanbuljs/load-nyc-config": {
"version": "1.0.0", "version": "1.0.0",
"resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.0.0.tgz", "resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.0.0.tgz",
@ -642,6 +648,26 @@
"sprintf-js": "~1.0.2" "sprintf-js": "~1.0.2"
} }
}, },
"args": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/args/-/args-5.0.1.tgz",
"integrity": "sha512-1kqmFCFsPffavQFGt8OxJdIcETti99kySRUPMpOhaGjL6mRJn8HFU1OxKY5bMqfZKUwTQc1mZkAjmGYaVOHFtQ==",
"dev": true,
"requires": {
"camelcase": "5.0.0",
"chalk": "2.4.2",
"leven": "2.1.0",
"mri": "1.1.4"
},
"dependencies": {
"camelcase": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.0.0.tgz",
"integrity": "sha512-faqwZqnWxbxn+F1d399ygeamQNy3lPp/H9H6rNrqYh4FSVCtcY+3cub1MxA8o9mDd55mM8Aghuu/kuyYA6VTsA==",
"dev": true
}
}
},
"array-flatten": { "array-flatten": {
"version": "1.1.1", "version": "1.1.1",
"resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz",
@ -686,6 +712,19 @@
"resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz",
"integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=" "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU="
}, },
"assign-deep": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/assign-deep/-/assign-deep-1.0.1.tgz",
"integrity": "sha512-CSXAX79mibneEYfqLT5FEmkqR5WXF+xDRjgQQuVf6wSCXCYU8/vHttPidNar7wJ5BFmKAo8Wei0rCtzb+M/yeA==",
"requires": {
"assign-symbols": "^2.0.2"
}
},
"assign-symbols": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/assign-symbols/-/assign-symbols-2.0.2.tgz",
"integrity": "sha512-9sBQUQZMKFKcO/C3Bo6Rx4CQany0R0UeVcefNGRRdW2vbmaMOhV1sbmlXcQLcD56juLXbSGTBm0GGuvmrAF8pA=="
},
"astral-regex": { "astral-regex": {
"version": "1.0.0", "version": "1.0.0",
"resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-1.0.0.tgz", "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-1.0.0.tgz",
@ -707,6 +746,11 @@
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
"integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=" "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k="
}, },
"atomic-sleep": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/atomic-sleep/-/atomic-sleep-1.0.0.tgz",
"integrity": "sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ=="
},
"aws-sign2": { "aws-sign2": {
"version": "0.7.0", "version": "0.7.0",
"resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz",
@ -1219,6 +1263,12 @@
"assert-plus": "^1.0.0" "assert-plus": "^1.0.0"
} }
}, },
"dateformat": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/dateformat/-/dateformat-3.0.3.tgz",
"integrity": "sha512-jyCETtSl3VMZMWeRo7iY1FL19ges1t55hMo5yaam4Jrsm5EPL89UQkoQRyiI+Yf4k8r2ZpdngkV8hr1lIdjb3Q==",
"dev": true
},
"debug": { "debug": {
"version": "4.1.1", "version": "4.1.1",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz",
@ -2042,6 +2092,16 @@
"integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=", "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=",
"dev": true "dev": true
}, },
"fast-redact": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/fast-redact/-/fast-redact-2.0.0.tgz",
"integrity": "sha512-zxpkULI9W9MNTK2sJ3BpPQrTEXFNESd2X6O1tXMFpK/XM0G5c5Rll2EVYZH2TqI3xRGK/VaJ+eEOt7pnENJpeA=="
},
"fast-safe-stringify": {
"version": "2.0.7",
"resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.0.7.tgz",
"integrity": "sha512-Utm6CdzT+6xsDk2m8S6uL8VHxNwI6Jub+e9NYTcAms28T84pTa25GJQV9j0CY0N1rM8hK4x6grpF2BQf+2qwVA=="
},
"fast-text-encoding": { "fast-text-encoding": {
"version": "1.0.0", "version": "1.0.0",
"resolved": "https://registry.npmjs.org/fast-text-encoding/-/fast-text-encoding-1.0.0.tgz", "resolved": "https://registry.npmjs.org/fast-text-encoding/-/fast-text-encoding-1.0.0.tgz",
@ -2259,6 +2319,11 @@
} }
} }
}, },
"flatstr": {
"version": "1.0.12",
"resolved": "https://registry.npmjs.org/flatstr/-/flatstr-1.0.12.tgz",
"integrity": "sha512-4zPxDyhCyiN2wIAtSLI6gc82/EjqZc1onI4Mz/l0pWrAlsSfYH/2ZIcU+e3oA2wDwbzIWNKwa23F8rh6+DRWkw=="
},
"flatted": { "flatted": {
"version": "2.0.1", "version": "2.0.1",
"resolved": "https://registry.npmjs.org/flatted/-/flatted-2.0.1.tgz", "resolved": "https://registry.npmjs.org/flatted/-/flatted-2.0.1.tgz",
@ -3536,6 +3601,18 @@
"istanbul-lib-report": "^3.0.0" "istanbul-lib-report": "^3.0.0"
} }
}, },
"jmespath": {
"version": "0.15.0",
"resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.15.0.tgz",
"integrity": "sha1-o/Iiqarp+Wb10nx5ZRDigJF2Qhc=",
"dev": true
},
"joycon": {
"version": "2.2.5",
"resolved": "https://registry.npmjs.org/joycon/-/joycon-2.2.5.tgz",
"integrity": "sha512-YqvUxoOcVPnCp0VU1/56f+iKSdvIRJYPznH22BdXV3xMk75SFXhWeJkZ8C9XxUWt1b5x2X1SxuFygW1U0FmkEQ==",
"dev": true
},
"js-string-escape": { "js-string-escape": {
"version": "1.0.1", "version": "1.0.1",
"resolved": "https://registry.npmjs.org/js-string-escape/-/js-string-escape-1.0.1.tgz", "resolved": "https://registry.npmjs.org/js-string-escape/-/js-string-escape-1.0.1.tgz",
@ -3658,6 +3735,12 @@
"safe-buffer": "^5.0.1" "safe-buffer": "^5.0.1"
} }
}, },
"leven": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/leven/-/leven-2.1.0.tgz",
"integrity": "sha1-wuep93IJTe6dNCAq6KzORoeHVYA=",
"dev": true
},
"levn": { "levn": {
"version": "0.3.0", "version": "0.3.0",
"resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz",
@ -4152,6 +4235,12 @@
"integrity": "sha1-PCV/mDn8DpP/UxSWMiOeuQeD/2Y=", "integrity": "sha1-PCV/mDn8DpP/UxSWMiOeuQeD/2Y=",
"optional": true "optional": true
}, },
"mri": {
"version": "1.1.4",
"resolved": "https://registry.npmjs.org/mri/-/mri-1.1.4.tgz",
"integrity": "sha512-6y7IjGPm8AzlvoUrwAaw1tLnUBudaS3752vcd8JtrpGGQn+rXIe63LFVHm/YMwtqAuh+LJPCFdlLYPWM1nYn6w==",
"dev": true
},
"ms": { "ms": {
"version": "2.1.1", "version": "2.1.1",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz",
@ -5055,6 +5144,115 @@
"integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=",
"dev": true "dev": true
}, },
"pino": {
"version": "6.3.2",
"resolved": "https://registry.npmjs.org/pino/-/pino-6.3.2.tgz",
"integrity": "sha512-EiP3L1hoFw19KPocWimjnfXeysld0ne89ZRQ+bf8nAeA2TyuLoggNlibAi+Kla67GvQBopLdIZOsh1z/Lruo5Q==",
"requires": {
"fast-redact": "^2.0.0",
"fast-safe-stringify": "^2.0.7",
"flatstr": "^1.0.12",
"pino-std-serializers": "^2.4.2",
"quick-format-unescaped": "^4.0.1",
"sonic-boom": "^1.0.0"
}
},
"pino-pretty": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/pino-pretty/-/pino-pretty-4.0.0.tgz",
"integrity": "sha512-YLy/n3dMXYWOodSm530gelkSAJGmEp29L9pqiycInlIae5FEJPWAkMRO3JFMbIFtjD2Ve4SH2aBcz2aRreGpBQ==",
"dev": true,
"requires": {
"@hapi/bourne": "^2.0.0",
"args": "^5.0.1",
"chalk": "^3.0.0",
"dateformat": "^3.0.3",
"fast-safe-stringify": "^2.0.7",
"jmespath": "^0.15.0",
"joycon": "^2.2.5",
"pump": "^3.0.0",
"readable-stream": "^3.6.0",
"split2": "^3.1.1",
"strip-json-comments": "^3.0.1"
},
"dependencies": {
"ansi-styles": {
"version": "4.2.1",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz",
"integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==",
"dev": true,
"requires": {
"@types/color-name": "^1.1.1",
"color-convert": "^2.0.1"
}
},
"chalk": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz",
"integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==",
"dev": true,
"requires": {
"ansi-styles": "^4.1.0",
"supports-color": "^7.1.0"
}
},
"color-convert": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
"integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
"dev": true,
"requires": {
"color-name": "~1.1.4"
}
},
"color-name": {
"version": "1.1.4",
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
"dev": true
},
"readable-stream": {
"version": "3.6.0",
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz",
"integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==",
"dev": true,
"requires": {
"inherits": "^2.0.3",
"string_decoder": "^1.1.1",
"util-deprecate": "^1.0.1"
}
},
"safe-buffer": {
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
"integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==",
"dev": true
},
"string_decoder": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz",
"integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==",
"dev": true,
"requires": {
"safe-buffer": "~5.2.0"
}
},
"supports-color": {
"version": "7.1.0",
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz",
"integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==",
"dev": true,
"requires": {
"has-flag": "^4.0.0"
}
}
}
},
"pino-std-serializers": {
"version": "2.4.2",
"resolved": "https://registry.npmjs.org/pino-std-serializers/-/pino-std-serializers-2.4.2.tgz",
"integrity": "sha512-WaL504dO8eGs+vrK+j4BuQQq6GLKeCCcHaMB2ItygzVURcL1CycwNEUHTD/lHFHs/NL5qAz2UKrjYWXKSf4aMQ=="
},
"pkg-dir": { "pkg-dir": {
"version": "2.0.0", "version": "2.0.0",
"resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-2.0.0.tgz", "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-2.0.0.tgz",
@ -5124,6 +5322,14 @@
"integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==", "integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==",
"dev": true "dev": true
}, },
"prom-client": {
"version": "12.0.0",
"resolved": "https://registry.npmjs.org/prom-client/-/prom-client-12.0.0.tgz",
"integrity": "sha512-JbzzHnw0VDwCvoqf8y1WDtq4wSBAbthMB1pcVI/0lzdqHGJI3KBJDXle70XK+c7Iv93Gihqo0a5LlOn+g8+DrQ==",
"requires": {
"tdigest": "^0.1.1"
}
},
"protobufjs": { "protobufjs": {
"version": "6.8.8", "version": "6.8.8",
"resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.8.8.tgz", "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.8.8.tgz",
@ -5165,6 +5371,16 @@
"resolved": "https://registry.npmjs.org/psl/-/psl-1.1.31.tgz", "resolved": "https://registry.npmjs.org/psl/-/psl-1.1.31.tgz",
"integrity": "sha512-/6pt4+C+T+wZUieKR620OpzN/LlnNKuWjy1iFLQ/UG35JqHlR/89MP1d96dUfkf6Dne3TuLQzOYEYshJ+Hx8mw==" "integrity": "sha512-/6pt4+C+T+wZUieKR620OpzN/LlnNKuWjy1iFLQ/UG35JqHlR/89MP1d96dUfkf6Dne3TuLQzOYEYshJ+Hx8mw=="
}, },
"pump": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz",
"integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==",
"dev": true,
"requires": {
"end-of-stream": "^1.1.0",
"once": "^1.3.1"
}
},
"punycode": { "punycode": {
"version": "2.1.1", "version": "2.1.1",
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz",
@ -5180,6 +5396,11 @@
"resolved": "https://registry.npmjs.org/queue-async/-/queue-async-1.0.7.tgz", "resolved": "https://registry.npmjs.org/queue-async/-/queue-async-1.0.7.tgz",
"integrity": "sha1-Iq4KHaxKkvW81GNPmTxoKiqBCUU=" "integrity": "sha1-Iq4KHaxKkvW81GNPmTxoKiqBCUU="
}, },
"quick-format-unescaped": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/quick-format-unescaped/-/quick-format-unescaped-4.0.1.tgz",
"integrity": "sha512-RyYpQ6Q5/drsJyOhrWHYMWTedvjTIat+FTwv0K4yoUxzvekw2aRHMQJLlnvt8UantkZg2++bEzD9EdxXqkWf4A=="
},
"range-parser": { "range-parser": {
"version": "1.2.0", "version": "1.2.0",
"resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz", "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz",
@ -5596,6 +5817,15 @@
} }
} }
}, },
"sonic-boom": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/sonic-boom/-/sonic-boom-1.0.1.tgz",
"integrity": "sha512-o9tx+bonVEXSaPtptyXQXpP8l6UV9Bi3im2geZskvWw2a/o/hrbWI7EBbbv+rOx6Hubnzun9GgH4WfbgEA3MFQ==",
"requires": {
"atomic-sleep": "^1.0.0",
"flatstr": "^1.0.12"
}
},
"source-map": { "source-map": {
"version": "0.5.7", "version": "0.5.7",
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz",
@ -5690,6 +5920,39 @@
"through": "2" "through": "2"
} }
}, },
"split2": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/split2/-/split2-3.1.1.tgz",
"integrity": "sha512-emNzr1s7ruq4N+1993yht631/JH+jaj0NYBosuKmLcq+JkGQ9MmTw1RB1fGaTCzUuseRIClrlSLHRNYGwWQ58Q==",
"requires": {
"readable-stream": "^3.0.0"
},
"dependencies": {
"readable-stream": {
"version": "3.6.0",
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz",
"integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==",
"requires": {
"inherits": "^2.0.3",
"string_decoder": "^1.1.1",
"util-deprecate": "^1.0.1"
}
},
"safe-buffer": {
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
"integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="
},
"string_decoder": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz",
"integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==",
"requires": {
"safe-buffer": "~5.2.0"
}
}
}
},
"sprintf-js": { "sprintf-js": {
"version": "1.0.3", "version": "1.0.3",
"resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz",
@ -5928,6 +6191,14 @@
} }
} }
}, },
"tdigest": {
"version": "0.1.1",
"resolved": "https://registry.npmjs.org/tdigest/-/tdigest-0.1.1.tgz",
"integrity": "sha1-Ljyyw56kSeVdHmzZEReszKRYgCE=",
"requires": {
"bintrees": "1.0.1"
}
},
"test-exclude": { "test-exclude": {
"version": "6.0.0", "version": "6.0.0",
"resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz", "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz",
@ -6138,6 +6409,11 @@
"resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz",
"integrity": "sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=" "integrity": "sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM="
}, },
"uuid": {
"version": "8.1.0",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.1.0.tgz",
"integrity": "sha512-CI18flHDznR0lq54xBycOVmphdCYnQLKn8abKn7PXUiKUGdEd+/l9LWNJmugXel4hXq7S+RMNl34ecyC9TntWg=="
},
"v8-compile-cache": { "v8-compile-cache": {
"version": "2.1.0", "version": "2.1.0",
"resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.1.0.tgz", "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.1.0.tgz",

View File

@ -28,6 +28,7 @@
"dependencies": { "dependencies": {
"@carto/fqdn-sync": "0.2.2", "@carto/fqdn-sync": "0.2.2",
"@google-cloud/pubsub": "1.5.0", "@google-cloud/pubsub": "1.5.0",
"assign-deep": "^1.0.1",
"basic-auth": "^2.0.0", "basic-auth": "^2.0.0",
"bintrees": "1.0.1", "bintrees": "1.0.1",
"bunyan": "1.8.1", "bunyan": "1.8.1",
@ -44,15 +45,19 @@
"node-uuid": "^1.4.7", "node-uuid": "^1.4.7",
"oauth-client": "0.3.0", "oauth-client": "0.3.0",
"pg-copy-streams": "github:cartodb/node-pg-copy-streams#v2.x-carto", "pg-copy-streams": "github:cartodb/node-pg-copy-streams#v2.x-carto",
"pino": "^6.3.2",
"prom-client": "^12.0.0",
"qs": "~6.2.1", "qs": "~6.2.1",
"queue-async": "~1.0.7", "queue-async": "~1.0.7",
"redis-mpool": "^0.8.0", "redis-mpool": "^0.8.0",
"redlock": "2.0.1", "redlock": "2.0.1",
"request": "^2.88.0", "request": "^2.88.0",
"split2": "^3.1.1",
"step": "~0.0.5", "step": "~0.0.5",
"step-profiler": "~0.3.0", "step-profiler": "~0.3.0",
"topojson": "0.0.8", "topojson": "0.0.8",
"underscore": "~1.6.0", "underscore": "~1.6.0",
"uuid": "^8.1.0",
"yargs": "^15.3.1" "yargs": "^15.3.1"
}, },
"devDependencies": { "devDependencies": {
@ -67,6 +72,7 @@
"mocha": "^7.2.0", "mocha": "^7.2.0",
"mockdate": "^2.0.2", "mockdate": "^2.0.2",
"nyc": "^15.0.0", "nyc": "^15.0.0",
"pino-pretty": "^4.0.0",
"shapefile": "0.3.0", "shapefile": "0.3.0",
"sinon": "^9.0.0", "sinon": "^9.0.0",
"sqlite3": "^4.2.0" "sqlite3": "^4.2.0"

View File

@ -5,7 +5,7 @@ var assert = require('../../support/assert');
var redisUtils = require('../../support/redis-utils'); var redisUtils = require('../../support/redis-utils');
var batchFactory = require('../../../lib/batch/index'); var batchFactory = require('../../../lib/batch/index');
var BatchLogger = require('../../../lib/batch/batch-logger'); var Logger = require('../../../lib/utils/logger');
var JobPublisher = require('../../../lib/batch/pubsub/job-publisher'); var JobPublisher = require('../../../lib/batch/pubsub/job-publisher');
var JobQueue = require('../../../lib/batch/job-queue'); var JobQueue = require('../../../lib/batch/job-queue');
var JobBackend = require('../../../lib/batch/job-backend'); var JobBackend = require('../../../lib/batch/job-backend');
@ -17,7 +17,7 @@ describe('batch module', function () {
var dbInstance = 'localhost'; var dbInstance = 'localhost';
var username = 'vizzuality'; var username = 'vizzuality';
var pool = redisUtils.getPool(); var pool = redisUtils.getPool();
var logger = new BatchLogger(null, 'batch-queries'); var logger = new Logger();
var jobPublisher = new JobPublisher(pool); var jobPublisher = new JobPublisher(pool);
var jobQueue = new JobQueue(metadataBackend, jobPublisher, logger); var jobQueue = new JobQueue(metadataBackend, jobPublisher, logger);
var jobBackend = new JobBackend(metadataBackend, jobQueue, logger); var jobBackend = new JobBackend(metadataBackend, jobQueue, logger);
@ -25,7 +25,7 @@ describe('batch module', function () {
var jobService = new JobService(jobBackend, jobCanceller, logger); var jobService = new JobService(jobBackend, jobCanceller, logger);
before(function (done) { before(function (done) {
this.batch = batchFactory(metadataBackend, pool); this.batch = batchFactory(metadataBackend, pool, undefined, undefined, logger);
this.batch.start(); this.batch.start();
this.batch.on('ready', done); this.batch.on('ready', done);
}); });

View File

@ -1,33 +0,0 @@
'use strict';
var server = require('../../lib/server')();
var assert = require('../support/assert');
describe('error handler', function () {
it('should returns a errors header', function (done) {
const errorHeader = {
detail: undefined,
hint: undefined,
context: undefined,
statusCode: 400,
message: 'You must indicate a sql query'
};
assert.response(server, {
url: '/api/v1/sql',
headers: { host: 'vizzuality.cartodb.com' },
method: 'GET'
},
{
status: 400,
headers: {
'Content-Type': 'application/json; charset=utf-8',
'X-SQLAPI-Errors': JSON.stringify(errorHeader)
}
},
function (err) {
assert.ifError(err);
done();
});
});
});

View File

@ -1,280 +0,0 @@
'use strict';
require('../helper');
const server = require('../../lib/server')();
const assert = require('../support/assert');
const qs = require('querystring');
const BatchTestClient = require('../support/batch-test-client');
const { TYPES } = require('../../lib/api/middlewares/log');
const QUERY = 'SELECT 14 as foo';
const API_KEY = 1234;
const logQueries = global.settings.logQueries;
describe('Log middleware', function () {
before(function () {
global.settings.logQueries = true;
});
after(function () {
global.settings.logQueries = logQueries;
});
describe('regular queries endpoint', function () {
['GET', 'POST'].forEach(method => {
it(`${method} query`, function (done) {
assert.response(server,
{
method,
url: '/api/v1/sql?' + qs.stringify({
q: QUERY,
api_key: API_KEY
}),
headers: {
host: 'vizzuality.cartodb.com'
}
},
{ statusCode: 200 },
function (err, res) {
assert.ok(!err);
assert.ok(res.headers['x-sqlapi-log']);
const log = JSON.parse(res.headers['x-sqlapi-log']);
assert.deepStrictEqual(log, {
request: {
sql: {
type: TYPES.QUERY,
sql: QUERY
}
}
});
return done();
}
);
});
it(`${method} Respects max header size with long queries`, function (done) {
let longQuery = "Select '";
for (let i = 0; i < 7000; i++) {
longQuery += 'a';
}
longQuery += "' as foo";
assert.response(server,
{
method,
url: '/api/v1/sql?' + qs.stringify({
q: longQuery,
api_key: API_KEY
}),
headers: {
host: 'vizzuality.cartodb.com'
}
},
{ statusCode: 200 },
function (err, res) {
assert.ok(!err);
assert.ok(res.headers['x-sqlapi-log']);
assert.ok(res.headers['x-sqlapi-log'].length < 5000);
return done();
}
);
});
});
});
describe('batch api queries', function () {
before(function () {
this.batchTestClient = new BatchTestClient();
assert.ok(this.batchTestClient);
});
after(function (done) {
this.batchTestClient.drain(done);
});
it('one query', function (done) {
const payload = { query: QUERY };
this.batchTestClient.createJob(payload, function (err, jobResult, res) {
assert.ok(!err);
assert.ok(res.headers['x-sqlapi-log']);
const log = JSON.parse(res.headers['x-sqlapi-log']);
assert.deepStrictEqual(log, {
request: {
sql: {
type: TYPES.JOB,
sql: QUERY
}
}
});
return done();
});
});
it('Respects max header size with long queries', function (done) {
let longQuery = "Select '";
for (let i = 0; i < 7000; i++) {
longQuery += 'a';
}
longQuery += "' as foo";
const payload = { query: longQuery };
this.batchTestClient.createJob(payload, function (err, jobResult, res) {
assert.ok(!err);
assert.ok(res.headers['x-sqlapi-log']);
assert.ok(res.headers['x-sqlapi-log'].length < 5000);
return done();
});
});
it('multiquery job with two queries', function (done) {
const payload = { query: [QUERY, QUERY] };
this.batchTestClient.createJob(payload, function (err, jobResult, res) {
assert.ok(!err);
assert.ok(res.headers['x-sqlapi-log']);
const log = JSON.parse(res.headers['x-sqlapi-log']);
assert.deepStrictEqual(log, {
request: {
sql: {
type: TYPES.JOB,
sql: {
0: QUERY,
1: QUERY
}
}
}
});
return done();
});
});
it('Respects max header size with long multiqueries', function (done) {
let longQuery = "Select '";
for (let i = 0; i < 100; i++) {
longQuery += 'a';
}
longQuery += "' as foo";
const queries = [longQuery];
for (let i = 0; i < 70; i++) {
queries.push(longQuery);
}
const payload = { query: queries };
this.batchTestClient.createJob(payload, function (err, jobResult, res) {
assert.ok(!err);
assert.ok(res.headers['x-sqlapi-log']);
assert.ok(res.headers['x-sqlapi-log'].length < 5000);
return done();
});
});
it('Respects max header size with lots of multiqueries', function (done) {
const queries = [];
for (let i = 0; i < 1000; i++) {
queries.push('Select 1');
}
const payload = { query: queries };
this.batchTestClient.createJob(payload, function (err, jobResult, res) {
assert.ok(!err);
assert.ok(res.headers['x-sqlapi-log']);
assert.ok(res.headers['x-sqlapi-log'].length < 5000);
return done();
});
});
it('queries with fallbacks', function (done) {
const FALLBACK_QUERY = {
query: [{
query: QUERY,
onsuccess: QUERY,
onerror: QUERY
}],
onsuccess: QUERY,
onerror: QUERY
};
const payload = { query: FALLBACK_QUERY };
this.batchTestClient.createJob(payload, function (err, jobResult, res) {
assert.ok(!err);
assert.ok(res.headers['x-sqlapi-log']);
const log = JSON.parse(res.headers['x-sqlapi-log']);
assert.deepStrictEqual(log, {
request: {
sql: {
type: TYPES.JOB,
sql: {
onsuccess: QUERY,
onerror: QUERY,
query: {
0: {
query: QUERY,
onsuccess: QUERY,
onerror: QUERY
}
}
}
}
}
});
return done();
});
});
});
describe('disable queries log', function () {
before(function () {
global.settings.logQueries = false;
});
after(function () {
global.settings.logQueries = true;
});
it('GET query', function (done) {
assert.response(server,
{
method: 'GET',
url: '/api/v1/sql?' + qs.stringify({
q: QUERY,
api_key: API_KEY
}),
headers: {
host: 'vizzuality.cartodb.com'
}
},
{ statusCode: 200 },
function (err, res) {
assert.ok(!err);
assert.ok(res.headers['x-sqlapi-log']);
const log = JSON.parse(res.headers['x-sqlapi-log']);
assert.deepStrictEqual(log, {
request: {
sql: null
}
});
return done();
}
);
});
});
});

View File

@ -7,14 +7,14 @@ var BATCH_SOURCE = '../../../lib/batch/';
var assert = require('../../support/assert'); var assert = require('../../support/assert');
var redisUtils = require('../../support/redis-utils'); var redisUtils = require('../../support/redis-utils');
var BatchLogger = require(BATCH_SOURCE + 'batch-logger'); var Logger = require('../../../lib/utils/logger');
var JobQueue = require(BATCH_SOURCE + 'job-queue'); var JobQueue = require(BATCH_SOURCE + 'job-queue');
var JobBackend = require(BATCH_SOURCE + 'job-backend'); var JobBackend = require(BATCH_SOURCE + 'job-backend');
var JobPublisher = require(BATCH_SOURCE + 'pubsub/job-publisher'); var JobPublisher = require(BATCH_SOURCE + 'pubsub/job-publisher');
var JobFactory = require(BATCH_SOURCE + 'models/job-factory'); var JobFactory = require(BATCH_SOURCE + 'models/job-factory');
var jobStatus = require(BATCH_SOURCE + 'job-status'); var jobStatus = require(BATCH_SOURCE + 'job-status');
var logger = new BatchLogger(null, 'batch-queries'); var logger = new Logger();
var metadataBackend = require('cartodb-redis')({ pool: redisUtils.getPool() }); var metadataBackend = require('cartodb-redis')({ pool: redisUtils.getPool() });
var jobPublisher = new JobPublisher(redisUtils.getPool()); var jobPublisher = new JobPublisher(redisUtils.getPool());
var jobQueue = new JobQueue(metadataBackend, jobPublisher, logger); var jobQueue = new JobQueue(metadataBackend, jobPublisher, logger);

View File

@ -7,7 +7,7 @@ var BATCH_SOURCE = '../../../lib/batch/';
var assert = require('../../support/assert'); var assert = require('../../support/assert');
var redisUtils = require('../../support/redis-utils'); var redisUtils = require('../../support/redis-utils');
var BatchLogger = require(BATCH_SOURCE + 'batch-logger'); var Logger = require('../../../lib/utils/logger');
var JobQueue = require(BATCH_SOURCE + 'job-queue'); var JobQueue = require(BATCH_SOURCE + 'job-queue');
var JobBackend = require(BATCH_SOURCE + 'job-backend'); var JobBackend = require(BATCH_SOURCE + 'job-backend');
var JobPublisher = require(BATCH_SOURCE + 'pubsub/job-publisher'); var JobPublisher = require(BATCH_SOURCE + 'pubsub/job-publisher');
@ -16,7 +16,7 @@ var JobCanceller = require(BATCH_SOURCE + 'job-canceller');
var JobFactory = require(BATCH_SOURCE + 'models/job-factory'); var JobFactory = require(BATCH_SOURCE + 'models/job-factory');
var PSQL = require('cartodb-psql'); var PSQL = require('cartodb-psql');
var logger = new BatchLogger(null, 'batch-queries'); var logger = new Logger();
var metadataBackend = require('cartodb-redis')({ pool: redisUtils.getPool() }); var metadataBackend = require('cartodb-redis')({ pool: redisUtils.getPool() });
var jobPublisher = new JobPublisher(redisUtils.getPool()); var jobPublisher = new JobPublisher(redisUtils.getPool());
var jobQueue = new JobQueue(metadataBackend, jobPublisher, logger); var jobQueue = new JobQueue(metadataBackend, jobPublisher, logger);

View File

@ -5,7 +5,7 @@ var assert = require('../../support/assert');
var redisUtils = require('../../support/redis-utils'); var redisUtils = require('../../support/redis-utils');
var metadataBackend = require('cartodb-redis')({ pool: redisUtils.getPool() }); var metadataBackend = require('cartodb-redis')({ pool: redisUtils.getPool() });
var BatchLogger = require('../../../lib/batch/batch-logger'); var Logger = require('../../../lib/utils/logger');
var JobPublisher = require('../../../lib/batch/pubsub/job-publisher'); var JobPublisher = require('../../../lib/batch/pubsub/job-publisher');
var JobQueue = require('../../../lib/batch/job-queue'); var JobQueue = require('../../../lib/batch/job-queue');
@ -15,7 +15,7 @@ var JobCanceller = require('../../../lib/batch/job-canceller');
describe('job queue', function () { describe('job queue', function () {
var pool = redisUtils.getPool(); var pool = redisUtils.getPool();
var logger = new BatchLogger(null, 'batch-queries'); var logger = new Logger();
var jobPublisher = new JobPublisher(pool); var jobPublisher = new JobPublisher(pool);
var jobQueue = new JobQueue(metadataBackend, jobPublisher, logger); var jobQueue = new JobQueue(metadataBackend, jobPublisher, logger);
var jobBackend = new JobBackend(metadataBackend, jobQueue, logger); var jobBackend = new JobBackend(metadataBackend, jobQueue, logger);

View File

@ -7,7 +7,7 @@ var BATCH_SOURCE = '../../../lib/batch/';
var assert = require('../../support/assert'); var assert = require('../../support/assert');
var redisUtils = require('../../support/redis-utils'); var redisUtils = require('../../support/redis-utils');
var BatchLogger = require(BATCH_SOURCE + 'batch-logger'); var Logger = require('../../../lib/utils/logger');
var JobQueue = require(BATCH_SOURCE + 'job-queue'); var JobQueue = require(BATCH_SOURCE + 'job-queue');
var JobBackend = require(BATCH_SOURCE + 'job-backend'); var JobBackend = require(BATCH_SOURCE + 'job-backend');
var JobPublisher = require(BATCH_SOURCE + 'pubsub/job-publisher'); var JobPublisher = require(BATCH_SOURCE + 'pubsub/job-publisher');
@ -19,7 +19,7 @@ var JobRunner = require(BATCH_SOURCE + 'job-runner');
var QueryRunner = require(BATCH_SOURCE + 'query-runner'); var QueryRunner = require(BATCH_SOURCE + 'query-runner');
var metadataBackend = require('cartodb-redis')({ pool: redisUtils.getPool() }); var metadataBackend = require('cartodb-redis')({ pool: redisUtils.getPool() });
var logger = new BatchLogger(null, 'batch-queries'); var logger = new Logger();
var jobPublisher = new JobPublisher(redisUtils.getPool()); var jobPublisher = new JobPublisher(redisUtils.getPool());
var jobQueue = new JobQueue(metadataBackend, jobPublisher, logger); var jobQueue = new JobQueue(metadataBackend, jobPublisher, logger);
var jobBackend = new JobBackend(metadataBackend, jobQueue, logger); var jobBackend = new JobBackend(metadataBackend, jobQueue, logger);

View File

@ -7,7 +7,7 @@ var BATCH_SOURCE = '../../../lib/batch/';
var assert = require('../../support/assert'); var assert = require('../../support/assert');
var redisUtils = require('../../support/redis-utils'); var redisUtils = require('../../support/redis-utils');
var BatchLogger = require(BATCH_SOURCE + 'batch-logger'); var Logger = require('../../../lib/utils/logger');
var JobQueue = require(BATCH_SOURCE + 'job-queue'); var JobQueue = require(BATCH_SOURCE + 'job-queue');
var JobBackend = require(BATCH_SOURCE + 'job-backend'); var JobBackend = require(BATCH_SOURCE + 'job-backend');
var JobPublisher = require(BATCH_SOURCE + 'pubsub/job-publisher'); var JobPublisher = require(BATCH_SOURCE + 'pubsub/job-publisher');
@ -17,7 +17,7 @@ var JobService = require(BATCH_SOURCE + 'job-service');
var PSQL = require('cartodb-psql'); var PSQL = require('cartodb-psql');
var metadataBackend = require('cartodb-redis')({ pool: redisUtils.getPool() }); var metadataBackend = require('cartodb-redis')({ pool: redisUtils.getPool() });
var logger = new BatchLogger(null, 'batch-queries'); var logger = new Logger();
var jobPublisher = new JobPublisher(redisUtils.getPool()); var jobPublisher = new JobPublisher(redisUtils.getPool());
var jobQueue = new JobQueue(metadataBackend, jobPublisher, logger); var jobQueue = new JobQueue(metadataBackend, jobPublisher, logger);
var jobBackend = new JobBackend(metadataBackend, jobQueue, logger); var jobBackend = new JobBackend(metadataBackend, jobQueue, logger);

View File

@ -9,6 +9,7 @@ var debug = require('debug')('batch-test-client');
var JobStatus = require('../../lib/batch/job-status'); var JobStatus = require('../../lib/batch/job-status');
var metadataBackend = require('cartodb-redis')({ pool: redisUtils.getPool() }); var metadataBackend = require('cartodb-redis')({ pool: redisUtils.getPool() });
var batchFactory = require('../../lib/batch/index'); var batchFactory = require('../../lib/batch/index');
var Logger = require('../../lib/utils/logger');
function response (code) { function response (code) {
return { return {
@ -26,7 +27,8 @@ function BatchTestClient (config) {
this.config = config || {}; this.config = config || {};
this.server = appServer(); this.server = appServer();
this.batch = batchFactory(metadataBackend, redisUtils.getPool(), this.config.name); const logger = new Logger();
this.batch = batchFactory(metadataBackend, redisUtils.getPool(), this.config.name, undefined, logger);
this.batch.start(); this.batch.start();
this.pendingJobs = []; this.pendingJobs = [];

View File

@ -1,142 +0,0 @@
'use strict';
var assert = require('assert');
var errorMiddleware = require('../../lib/api/middlewares/error');
require('../helper');
const req = { query: { callback: true } };
const getRes = () => {
return {
headers: {},
set (key, value) {
this.headers[key] = value;
},
header (key, value) {
this.set(key, value);
},
statusCode: 0,
status (status) {
this.statusCode = status;
},
json () {},
jsonp () {}
};
};
const getErrorHeader = (context, detail, hint, message) => {
return {
context,
detail,
hint,
statusCode: 400,
message
};
};
describe('error-handler', function () {
it('should return a header with errors', function (done) {
const error = new Error('error test');
error.detail = 'test detail';
error.hint = 'test hint';
error.context = 'test context';
const errorHeader = getErrorHeader(
error.context,
error.detail,
error.hint,
error.message
);
const res = getRes();
errorMiddleware()(error, req, res, function next () {
assert.ok(res.headers['X-SQLAPI-Errors'].length > 0);
assert.deepStrictEqual(
res.headers['X-SQLAPI-Errors'],
JSON.stringify(errorHeader)
);
done();
});
});
it('JSONP should return a header with error statuscode', function (done) {
const error = new Error('error test');
error.detail = 'test detail';
error.hint = 'test hint';
error.context = 'test context';
const errorHeader = getErrorHeader(
error.context,
error.detail,
error.hint,
error.message
);
const res = getRes();
errorMiddleware()(error, req, res, function next () {
assert.ok(res.headers['X-SQLAPI-Errors'].length > 0);
assert.deepStrictEqual(
res.headers['X-SQLAPI-Errors'],
JSON.stringify(errorHeader)
);
done();
});
});
it('should escape chars that broke logs regex', function (done) {
const badString = 'error: ( ) = " " \' * $ & |';
const escapedString = 'error ';
const error = new Error(badString);
error.detail = badString;
error.hint = badString;
error.context = badString;
const errorHeader = getErrorHeader(
escapedString,
escapedString,
escapedString,
escapedString
);
const res = getRes();
errorMiddleware()(error, req, res, function () {
assert.ok(res.headers['X-SQLAPI-Errors'].length > 0);
assert.deepStrictEqual(
res.headers['X-SQLAPI-Errors'],
JSON.stringify(errorHeader)
);
done();
});
});
it('should truncat too long error messages', function (done) {
const veryLongString = 'Very long error message '.repeat(1000);
const truncatedString = veryLongString.substring(0, 1024);
const error = new Error(veryLongString);
const expectedErrorHeader = {
statusCode: 400,
message: truncatedString
};
const res = getRes();
errorMiddleware()(error, req, res, function () {
assert.ok(res.headers['X-SQLAPI-Errors'].length > 0);
assert.deepStrictEqual(
res.headers['X-SQLAPI-Errors'],
JSON.stringify(expectedErrorHeader)
);
done();
});
});
});