Breaking changes:
- Log system revamp: - Logs to stdout, disabled while testing - Use header `X-Request-Id`, or create a new `uuid` when no present, to identyfy log entries - Be able to set log level from env variable `LOG_LEVEL`, useful while testing: `LOG_LEVEL=info npm test`; even more human-readable: `LOG_LEVEL=info npm t | ./node_modules/.bin/pino-pretty` - Be able to reduce the footprint in the final log file depending on the environment - Use one logger for every service: Queries, Batch Queries (Jobs), and Data Ingestion (CopyTo/CopyFrom) - Stop using headers such as: `X-SQL-API-Log`, `X-SQL-API-Profiler`, and `X-SQL-API-Errors` as a way to log info. - Be able to tag requests with labels as an easier way to provide business metrics - Metro: Add log-collector utility (`metro`), it will be moved to its own repository. Attaching it here fro development purposes. Try it with the following command `LOG_LEVEL=info npm t | node metro` - Metro: Creates `metrics-collector.js` a stream to update Prometheus' counters and histograms and exposes them via Express' app (`:9145/metrics`). Use the ones defined in `grok_exporter` Announcements: - Profiler is always set. No need to check its existence anymore - Unify profiler usage for every endpoint Bug fixes: - Avoid hung requests while fetching user identifier
This commit is contained in:
parent
29f5db47f8
commit
762a240890
22
NEWS.md
22
NEWS.md
@ -1,5 +1,27 @@
|
||||
# Changelog
|
||||
|
||||
## 7.0.0
|
||||
Released 2020-mm-dd
|
||||
|
||||
Breaking changes:
|
||||
- Log system revamp:
|
||||
- Logs to stdout, disabled while testing
|
||||
- Use header `X-Request-Id`, or create a new `uuid` when no present, to identyfy log entries
|
||||
- Be able to set log level from env variable `LOG_LEVEL`, useful while testing: `LOG_LEVEL=info npm test`; even more human-readable: `LOG_LEVEL=info npm t | ./node_modules/.bin/pino-pretty`
|
||||
- Be able to reduce the footprint in the final log file depending on the environment
|
||||
- Use one logger for every service: Queries, Batch Queries (Jobs), and Data Ingestion (CopyTo/CopyFrom)
|
||||
- Stop using headers such as: `X-SQL-API-Log`, `X-SQL-API-Profiler`, and `X-SQL-API-Errors` as a way to log info.
|
||||
- Be able to tag requests with labels as an easier way to provide business metrics
|
||||
- Metro: Add log-collector utility (`metro`), it will be moved to its own repository. Attaching it here fro development purposes. Try it with the following command `LOG_LEVEL=info npm t | node metro`
|
||||
- Metro: Creates `metrics-collector.js` a stream to update Prometheus' counters and histograms and exposes them via Express' app (`:9145/metrics`). Use the ones defined in `grok_exporter`
|
||||
|
||||
Announcements:
|
||||
- Profiler is always set. No need to check its existence anymore
|
||||
- Unify profiler usage for every endpoint
|
||||
|
||||
Bug fixes:
|
||||
- Avoid hung requests while fetching user identifier
|
||||
|
||||
## 6.0.0
|
||||
Released 2020-06-05
|
||||
|
||||
|
137
app.js
137
app.js
@ -5,6 +5,8 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const fqdn = require('@carto/fqdn-sync');
|
||||
const serverOptions = require('./lib/server-options');
|
||||
const { logger } = serverOptions();
|
||||
|
||||
const argv = require('yargs')
|
||||
.usage('Usage: node $0 <environment> [options]')
|
||||
@ -23,7 +25,7 @@ const environmentArg = argv._[0] || process.env.NODE_ENV || 'development';
|
||||
const configurationFile = path.resolve(argv.config || './config/environments/' + environmentArg + '.js');
|
||||
|
||||
if (!fs.existsSync(configurationFile)) {
|
||||
console.error('Configuration file "%s" does not exist', configurationFile);
|
||||
logger.fatal(new Error(`Configuration file "${configurationFile}" does not exist`));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
@ -34,43 +36,13 @@ process.env.NODE_ENV = ENVIRONMENT;
|
||||
|
||||
const availableEnvironments = ['development', 'production', 'test', 'staging'];
|
||||
|
||||
// sanity check arguments
|
||||
if (availableEnvironments.indexOf(ENVIRONMENT) === -1) {
|
||||
console.error('node app.js [environment]');
|
||||
console.error('Available environments: ' + availableEnvironments.join(', '));
|
||||
if (!availableEnvironments.includes(ENVIRONMENT)) {
|
||||
logger.fatal(new Error(`Invalid environment argument, valid ones: ${Object.keys(availableEnvironments).join(', ')}`));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
global.settings.api_hostname = fqdn.hostname();
|
||||
|
||||
global.log4js = require('log4js');
|
||||
const log4jsConfig = {
|
||||
appenders: [],
|
||||
replaceConsole: true
|
||||
};
|
||||
|
||||
if (global.settings.log_filename) {
|
||||
const logFilename = path.resolve(global.settings.log_filename);
|
||||
const logDirectory = path.dirname(logFilename);
|
||||
if (!fs.existsSync(logDirectory)) {
|
||||
console.error('Log filename directory does not exist: ' + logDirectory);
|
||||
process.exit(1);
|
||||
}
|
||||
console.log('Logs will be written to ' + logFilename);
|
||||
log4jsConfig.appenders.push(
|
||||
{ type: 'file', absolute: true, filename: logFilename }
|
||||
);
|
||||
} else {
|
||||
log4jsConfig.appenders.push(
|
||||
{ type: 'console', layout: { type: 'basic' } }
|
||||
);
|
||||
}
|
||||
|
||||
global.log4js.configure(log4jsConfig);
|
||||
global.logger = global.log4js.getLogger();
|
||||
|
||||
const version = require('./package').version;
|
||||
|
||||
const StatsClient = require('./lib/stats/client');
|
||||
|
||||
if (global.settings.statsd) {
|
||||
@ -81,89 +53,64 @@ if (global.settings.statsd) {
|
||||
}
|
||||
const statsClient = StatsClient.getInstance(global.settings.statsd);
|
||||
|
||||
const { version, name } = require('./package');
|
||||
const createServer = require('./lib/server');
|
||||
|
||||
const server = createServer(statsClient);
|
||||
const listener = server.listen(global.settings.node_port, global.settings.node_host);
|
||||
listener.on('listening', function () {
|
||||
console.info('Using Node.js %s', process.version);
|
||||
console.info('Using configuration file "%s"', configurationFile);
|
||||
console.log(
|
||||
'CartoDB SQL API %s listening on %s:%s PID=%d (%s)',
|
||||
version, global.settings.node_host, global.settings.node_port, process.pid, ENVIRONMENT
|
||||
);
|
||||
const { address, port } = listener.address();
|
||||
logger.info({ 'Node.js': process.version, pid: process.pid, environment: process.env.NODE_ENV, [name]: version, address, port, config: configurationFile }, `${name} initialized successfully`);
|
||||
});
|
||||
|
||||
process.on('uncaughtException', function (err) {
|
||||
global.logger.error('Uncaught exception: ' + err.stack);
|
||||
logger.error(err, 'Uncaught exception');
|
||||
});
|
||||
|
||||
process.on('SIGHUP', function () {
|
||||
global.log4js.clearAndShutdownAppenders(function () {
|
||||
global.log4js.configure(log4jsConfig);
|
||||
global.logger = global.log4js.getLogger();
|
||||
console.log('Log files reloaded');
|
||||
});
|
||||
const exitProcess = logger.finish((err, finalLogger, listener, signal, killTimeout) => {
|
||||
scheduleForcedExit(killTimeout, finalLogger);
|
||||
|
||||
if (server.batch && server.batch.logger) {
|
||||
server.batch.logger.reopenFileStreams();
|
||||
finalLogger.info(`Process has received signal: ${signal}`);
|
||||
|
||||
let code = 0;
|
||||
|
||||
if (err) {
|
||||
code = 1;
|
||||
finalLogger.fatal(err);
|
||||
}
|
||||
|
||||
if (server.dataIngestionLogger) {
|
||||
server.dataIngestionLogger.reopenFileStreams();
|
||||
}
|
||||
});
|
||||
finalLogger.info(`Process is going to exit with code: ${code}`);
|
||||
listener.close(() => process.exit(code));
|
||||
|
||||
addHandlers({ killTimeout: 45000 });
|
||||
listener.close(() => {
|
||||
server.batch.stop(() => {
|
||||
server.batch.drain((err) => {
|
||||
if (err) {
|
||||
finalLogger.error(err);
|
||||
return process.exit(1);
|
||||
}
|
||||
|
||||
function addHandlers ({ killTimeout }) {
|
||||
// FIXME: minimize the number of 'uncaughtException' before uncomment the following line
|
||||
// process.on('uncaughtException', exitProcess(listener, logger, killTimeout));
|
||||
process.on('unhandledRejection', exitProcess({ killTimeout }));
|
||||
process.on('SIGINT', exitProcess({ killTimeout }));
|
||||
process.on('SIGTERM', exitProcess({ killTimeout }));
|
||||
}
|
||||
|
||||
function exitProcess ({ killTimeout }) {
|
||||
return function exitProcessFn (signal) {
|
||||
scheduleForcedExit({ killTimeout });
|
||||
|
||||
let code = 0;
|
||||
|
||||
if (!['SIGINT', 'SIGTERM'].includes(signal)) {
|
||||
const err = signal instanceof Error ? signal : new Error(signal);
|
||||
signal = undefined;
|
||||
code = 1;
|
||||
|
||||
global.logger.fatal(err);
|
||||
} else {
|
||||
global.logger.info(`Process has received signal: ${signal}`);
|
||||
}
|
||||
|
||||
listener.close(function () {
|
||||
server.batch.stop(function () {
|
||||
server.batch.drain(function (err) {
|
||||
if (err) {
|
||||
global.logger.error(err);
|
||||
return process.exit(1);
|
||||
}
|
||||
|
||||
global.logger.info(`Process is going to exit with code: ${code}`);
|
||||
global.log4js.shutdown(function () {
|
||||
server.batch.logger.end(function () {
|
||||
process.exit(code);
|
||||
});
|
||||
});
|
||||
});
|
||||
process.exit(code);
|
||||
});
|
||||
});
|
||||
};
|
||||
});
|
||||
});
|
||||
|
||||
function addHandlers (listener, killTimeout) {
|
||||
// FIXME: minimize the number of 'uncaughtException' before uncomment the following line
|
||||
// process.on('uncaughtException', (err) => exitProcess(err, listener, 'uncaughtException', killTimeout));
|
||||
process.on('unhandledRejection', (err) => exitProcess(err, listener, 'unhandledRejection', killTimeout));
|
||||
process.on('ENOMEM', (err) => exitProcess(err, listener, 'ENOMEM', killTimeout));
|
||||
process.on('SIGINT', () => exitProcess(null, listener, 'SIGINT', killTimeout));
|
||||
process.on('SIGTERM', () => exitProcess(null, listener, 'SIGINT', killTimeout));
|
||||
}
|
||||
|
||||
function scheduleForcedExit ({ killTimeout }) {
|
||||
addHandlers(listener, 45000);
|
||||
|
||||
function scheduleForcedExit (killTimeout, finalLogger) {
|
||||
// Schedule exit if there is still ongoing work to deal with
|
||||
const killTimer = setTimeout(() => {
|
||||
global.logger.info('Process didn\'t close on time. Force exit');
|
||||
finalLogger.info('Process didn\'t close on time. Force exit');
|
||||
process.exit(1);
|
||||
}, killTimeout);
|
||||
|
||||
|
@ -35,10 +35,6 @@ module.exports.routes = {
|
||||
// X-SQLAPI-Profile header containing elapsed timing for various
|
||||
// steps taken for producing the response.
|
||||
module.exports.useProfiler = true;
|
||||
module.exports.log_format = '[:date] :remote-addr :method :req[Host]:url :status :response-time ms -> :res[Content-Type] (:res[X-SQLAPI-Profiler]) (:res[X-SQLAPI-Errors]) (:res[X-SQLAPI-Log])';
|
||||
// If log_filename is given logs will be written there, in append mode. Otherwise stdout is used (default).
|
||||
// Log file will be re-opened on receiving the HUP signal
|
||||
module.exports.log_filename = undefined;
|
||||
// Regular expression pattern to extract username
|
||||
// from hostname. Must have a single grabbing block.
|
||||
module.exports.user_from_host = '^(.*)\\.localhost';
|
||||
@ -61,7 +57,6 @@ module.exports.db_port = '5432';
|
||||
module.exports.db_batch_port = '5432';
|
||||
module.exports.finished_jobs_ttl_in_seconds = 2 * 3600; // 2 hours
|
||||
module.exports.batch_query_timeout = 12 * 3600 * 1000; // 12 hours in milliseconds
|
||||
module.exports.batch_log_filename = undefined;
|
||||
module.exports.copy_timeout = "'5h'";
|
||||
module.exports.copy_from_max_post_size = 2 * 1024 * 1024 * 1024 // 2 GB;
|
||||
module.exports.copy_from_max_post_size_pretty = '2 GB';
|
||||
@ -148,7 +143,6 @@ module.exports.ratelimits = {
|
||||
}
|
||||
|
||||
module.exports.validatePGEntitiesAccess = false;
|
||||
module.exports.dataIngestionLogPath = undefined;
|
||||
module.exports.logQueries = true;
|
||||
module.exports.maxQueriesLogLength = 2000;
|
||||
|
||||
|
@ -35,10 +35,6 @@ module.exports.routes = {
|
||||
// X-SQLAPI-Profile header containing elapsed timing for various
|
||||
// steps taken for producing the response.
|
||||
module.exports.useProfiler = true;
|
||||
module.exports.log_format = '[:date] :remote-addr :method :req[Host]:url :status :response-time ms -> :res[Content-Type] (:res[X-SQLAPI-Profiler]) (:res[X-SQLAPI-Errors]) (:res[X-SQLAPI-Log])';
|
||||
// If log_filename is given logs will be written there, in append mode. Otherwise stdout is used (default).
|
||||
// Log file will be re-opened on receiving the HUP signal
|
||||
module.exports.log_filename = 'logs/cartodb-sql-api.log';
|
||||
// Regular expression pattern to extract username
|
||||
// from hostname. Must have a single grabbing block.
|
||||
module.exports.user_from_host = '^(.*)\\.cartodb\\.com$';
|
||||
@ -62,7 +58,6 @@ module.exports.db_port = '6432';
|
||||
module.exports.db_batch_port = '5432';
|
||||
module.exports.finished_jobs_ttl_in_seconds = 2 * 3600; // 2 hours
|
||||
module.exports.batch_query_timeout = 12 * 3600 * 1000; // 12 hours in milliseconds
|
||||
module.exports.batch_log_filename = 'logs/batch-queries.log';
|
||||
module.exports.copy_timeout = "'5h'";
|
||||
module.exports.copy_from_max_post_size = 2 * 1024 * 1024 * 1024 // 2 GB;
|
||||
module.exports.copy_from_max_post_size_pretty = '2 GB';
|
||||
@ -152,7 +147,6 @@ module.exports.ratelimits = {
|
||||
}
|
||||
|
||||
module.exports.validatePGEntitiesAccess = false;
|
||||
module.exports.dataIngestionLogPath = 'logs/data-ingestion.log';
|
||||
module.exports.logQueries = true;
|
||||
module.exports.maxQueriesLogLength = 1024;
|
||||
|
||||
|
@ -35,10 +35,6 @@ module.exports.routes = {
|
||||
// X-SQLAPI-Profile header containing elapsed timing for various
|
||||
// steps taken for producing the response.
|
||||
module.exports.useProfiler = true;
|
||||
module.exports.log_format = '[:date] :req[X-Real-IP] :method :req[Host]:url :status :response-time ms -> :res[Content-Type] (:res[X-SQLAPI-Profiler]) (:res[X-SQLAPI-Errors]) (:res[X-SQLAPI-Log])';
|
||||
// If log_filename is given logs will be written there, in append mode. Otherwise stdout is used (default).
|
||||
// Log file will be re-opened on receiving the HUP signal
|
||||
module.exports.log_filename = 'logs/cartodb-sql-api.log';
|
||||
// Regular expression pattern to extract username
|
||||
// from hostname. Must have a single grabbing block.
|
||||
module.exports.user_from_host = '^(.*)\\.cartodb\\.com$';
|
||||
@ -62,7 +58,6 @@ module.exports.db_port = '6432';
|
||||
module.exports.db_batch_port = '5432';
|
||||
module.exports.finished_jobs_ttl_in_seconds = 2 * 3600; // 2 hours
|
||||
module.exports.batch_query_timeout = 12 * 3600 * 1000; // 12 hours in milliseconds
|
||||
module.exports.batch_log_filename = 'logs/batch-queries.log';
|
||||
module.exports.copy_timeout = "'5h'";
|
||||
module.exports.copy_from_max_post_size = 2 * 1024 * 1024 * 1024 // 2 GB;
|
||||
module.exports.copy_from_max_post_size_pretty = '2 GB';
|
||||
@ -149,7 +144,6 @@ module.exports.ratelimits = {
|
||||
}
|
||||
|
||||
module.exports.validatePGEntitiesAccess = false;
|
||||
module.exports.dataIngestionLogPath = 'logs/data-ingestion.log';
|
||||
module.exports.logQueries = true;
|
||||
module.exports.maxQueriesLogLength = 1024;
|
||||
|
||||
|
@ -35,10 +35,6 @@ module.exports.routes = {
|
||||
// X-SQLAPI-Profile header containing elapsed timing for various
|
||||
// steps taken for producing the response.
|
||||
module.exports.useProfiler = true;
|
||||
module.exports.log_format = '[:date] :remote-addr :method :req[Host]:url :status :response-time ms -> :res[Content-Type] (:res[X-SQLAPI-Profiler]) (:res[X-SQLAPI-Errors]) (:res[X-SQLAPI-Log])';
|
||||
// If log_filename is given logs will be written there, in append mode. Otherwise stdout is used (default).
|
||||
// Log file will be re-opened on receiving the HUP signal
|
||||
// module.exports.log_filename = 'logs/cartodb-sql-api.log';
|
||||
// Regular expression pattern to extract username
|
||||
// from hostname. Must have a single grabbing block.
|
||||
module.exports.user_from_host = '^([^.]*)\\.';
|
||||
@ -59,7 +55,6 @@ module.exports.db_port = '5432';
|
||||
module.exports.db_batch_port = '5432';
|
||||
module.exports.finished_jobs_ttl_in_seconds = 2 * 3600; // 2 hours
|
||||
module.exports.batch_query_timeout = 5 * 1000; // 5 seconds in milliseconds
|
||||
//module.exports.batch_log_filename = 'logs/batch-queries.log';
|
||||
module.exports.copy_timeout = "'5h'";
|
||||
module.exports.copy_from_max_post_size = 2 * 1024 * 1024 * 1024 // 2 GB;
|
||||
module.exports.copy_from_max_post_size_pretty = '2 GB';
|
||||
@ -149,7 +144,6 @@ module.exports.ratelimits = {
|
||||
}
|
||||
|
||||
module.exports.validatePGEntitiesAccess = false;
|
||||
//module.exports.dataIngestionLogPath = 'logs/data-ingestion.log';
|
||||
module.exports.logQueries = true;
|
||||
module.exports.maxQueriesLogLength = 1024;
|
||||
|
||||
|
@ -10,8 +10,6 @@ const JobsWipController = require('./jobs-wip-controller');
|
||||
const error = require('./middlewares/error');
|
||||
const pubSubMetrics = require('./middlewares/pubsub-metrics');
|
||||
|
||||
const BatchLogger = require('../batch/batch-logger');
|
||||
|
||||
const JobPublisher = require('../batch/pubsub/job-publisher');
|
||||
const JobQueue = require('../batch/job-queue');
|
||||
const JobBackend = require('../batch/job-backend');
|
||||
@ -20,8 +18,7 @@ const JobService = require('../batch/job-service');
|
||||
const PubSubMetricsService = require('../services/pubsub-metrics');
|
||||
|
||||
module.exports = class ApiRouter {
|
||||
constructor ({ redisPool, metadataBackend, statsClient, dataIngestionLogger }) {
|
||||
const logger = new BatchLogger(global.settings.batch_log_filename, 'batch-queries');
|
||||
constructor ({ redisPool, metadataBackend, statsClient, logger }) {
|
||||
const jobPublisher = new JobPublisher(redisPool);
|
||||
const jobQueue = new JobQueue(metadataBackend, jobPublisher, logger);
|
||||
const jobBackend = new JobBackend(metadataBackend, jobQueue, logger);
|
||||
@ -36,7 +33,7 @@ module.exports = class ApiRouter {
|
||||
this.sqlRouter = new SqlRouter({
|
||||
metadataBackend,
|
||||
statsClient,
|
||||
dataIngestionLogger,
|
||||
logger,
|
||||
jobService
|
||||
});
|
||||
}
|
||||
|
@ -5,6 +5,7 @@ const queryTables = require('cartodb-query-tables').queryTables;
|
||||
|
||||
module.exports = function affectedTables () {
|
||||
return function affectedTablesMiddleware (req, res, next) {
|
||||
const { logger } = res.locals;
|
||||
const { sql } = res.locals.params;
|
||||
const { authDbParams } = res.locals;
|
||||
const pg = new PSQL(authDbParams);
|
||||
@ -13,15 +14,13 @@ module.exports = function affectedTables () {
|
||||
.then(affectedTables => {
|
||||
res.locals.affectedTables = affectedTables;
|
||||
|
||||
if (req.profiler) {
|
||||
req.profiler.done('queryExplain');
|
||||
}
|
||||
req.profiler.done('queryExplain');
|
||||
|
||||
return next();
|
||||
})
|
||||
.catch(err => {
|
||||
const message = (err && err.message) || 'unknown error';
|
||||
console.error('Error on query explain \'%s\': %s', sql, message);
|
||||
err.message = `Error on query explain "${sql}": ${err.message}`;
|
||||
logger.warn({ error: err });
|
||||
|
||||
return next();
|
||||
});
|
||||
|
@ -9,9 +9,7 @@ module.exports = function authorization (metadataBackend, forceToBeMaster = fals
|
||||
const credentials = getCredentialsFromRequest(req);
|
||||
|
||||
if (!userMatches(credentials, user)) {
|
||||
if (req.profiler) {
|
||||
req.profiler.done('authorization');
|
||||
}
|
||||
req.profiler.done('authorization');
|
||||
|
||||
return next(new Error('permission denied'));
|
||||
}
|
||||
@ -22,9 +20,7 @@ module.exports = function authorization (metadataBackend, forceToBeMaster = fals
|
||||
const authApi = new AuthApi(req, params);
|
||||
|
||||
authApi.verifyCredentials(function (err, authorizationLevel) {
|
||||
if (req.profiler) {
|
||||
req.profiler.done('authorization');
|
||||
}
|
||||
req.profiler.done('authorization');
|
||||
|
||||
if (err) {
|
||||
return next(err);
|
||||
|
@ -6,9 +6,7 @@ module.exports = function connectionParams (userDatabaseService) {
|
||||
|
||||
userDatabaseService.getConnectionParams(user, apikeyToken, authorizationLevel,
|
||||
function (err, userDbParams, authDbParams) {
|
||||
if (req.profiler) {
|
||||
req.profiler.done('getConnectionParams');
|
||||
}
|
||||
req.profiler.done('getConnectionParams');
|
||||
|
||||
if (err) {
|
||||
return next(err);
|
||||
|
@ -1,45 +1,27 @@
|
||||
'use strict';
|
||||
|
||||
const errorHandlerFactory = require('../../services/error-handler-factory');
|
||||
const { stringifyForLogs } = require('../../utils/logs');
|
||||
const MAX_ERROR_STRING_LENGTH = 1024;
|
||||
|
||||
module.exports = function error () {
|
||||
return function errorMiddleware (err, req, res, next) {
|
||||
const { logger } = res.locals;
|
||||
const errorHandler = errorHandlerFactory(err);
|
||||
const errorResponse = errorHandler.getResponse();
|
||||
|
||||
if (global.settings.environment === 'development') {
|
||||
errorResponse.stack = err.stack;
|
||||
}
|
||||
|
||||
if (global.settings.environment !== 'test') {
|
||||
// TODO: email this Exception report
|
||||
console.error('EXCEPTION REPORT: ' + err.stack);
|
||||
}
|
||||
logger.error({ error: err });
|
||||
|
||||
// Force inline content disposition
|
||||
res.header('Content-Disposition', 'inline');
|
||||
|
||||
if (req && req.profiler) {
|
||||
req.profiler.done('finish');
|
||||
res.header('X-SQLAPI-Profiler', req.profiler.toJSONString());
|
||||
}
|
||||
|
||||
setErrorHeader(errorHandler, res);
|
||||
|
||||
res.header('Content-Type', 'application/json; charset=utf-8');
|
||||
res.status(getStatusError(errorHandler, req));
|
||||
|
||||
if (req.query && req.query.callback) {
|
||||
res.jsonp(errorResponse);
|
||||
} else {
|
||||
res.json(errorResponse);
|
||||
}
|
||||
|
||||
if (req && req.profiler) {
|
||||
res.req.profiler.sendStats();
|
||||
}
|
||||
|
||||
return next();
|
||||
};
|
||||
};
|
||||
@ -54,15 +36,3 @@ function getStatusError (errorHandler, req) {
|
||||
|
||||
return statusError;
|
||||
}
|
||||
|
||||
function setErrorHeader (errorHandler, res) {
|
||||
const errorsLog = {
|
||||
context: errorHandler.context,
|
||||
detail: errorHandler.detail,
|
||||
hint: errorHandler.hint,
|
||||
statusCode: errorHandler.http_status,
|
||||
message: errorHandler.message
|
||||
};
|
||||
|
||||
res.set('X-SQLAPI-Errors', stringifyForLogs(errorsLog, MAX_ERROR_STRING_LENGTH));
|
||||
}
|
||||
|
23
lib/api/middlewares/log-query.js
Normal file
23
lib/api/middlewares/log-query.js
Normal file
@ -0,0 +1,23 @@
|
||||
'use strict';
|
||||
|
||||
const MAX_SQL_LENGTH = (global.settings.logQueries && global.settings.maxQueriesLogLength) || 1024;
|
||||
|
||||
module.exports = function logQuery () {
|
||||
if (!global.settings.logQueries) {
|
||||
return function noopLogQuery (req, res, next) {
|
||||
return next();
|
||||
};
|
||||
}
|
||||
|
||||
return function logQueryMiddleware (req, res, next) {
|
||||
const { logger } = res.locals;
|
||||
|
||||
logger.info({ sql: ensureMaxQueryLength(res.locals.params.sql) });
|
||||
|
||||
return next();
|
||||
};
|
||||
};
|
||||
|
||||
function ensureMaxQueryLength (sql, length = MAX_SQL_LENGTH) {
|
||||
return sql.substring(0, length);
|
||||
}
|
@ -1,99 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const { stringifyForLogs } = require('../../utils/logs');
|
||||
|
||||
const MAX_SQL_LENGTH = (global.settings.logQueries && global.settings.maxQueriesLogLength) || 1024;
|
||||
|
||||
// This is used to set a hard limit to the header size
|
||||
// While Node accepts headers of up to 8192 character, different libraries impose other limits
|
||||
// This might break the JSON structure of the log, but avoids responses being dropped by varnish
|
||||
const HEADER_HARD_LIMIT = 4096;
|
||||
|
||||
const TYPES = {
|
||||
QUERY: 'query',
|
||||
JOB: 'job'
|
||||
};
|
||||
|
||||
module.exports = function log (sqlType = TYPES.QUERY) {
|
||||
return function logMiddleware (req, res, next) {
|
||||
const logObj = {
|
||||
request: {
|
||||
sql: prepareSQL(res.locals.params.sql, sqlType)
|
||||
}
|
||||
};
|
||||
|
||||
res.set('X-SQLAPI-Log', stringifyForLogs(logObj).substring(0, HEADER_HARD_LIMIT));
|
||||
|
||||
return next();
|
||||
};
|
||||
};
|
||||
|
||||
module.exports.TYPES = TYPES;
|
||||
|
||||
function prepareSQL (sql, sqlType) {
|
||||
if (!sql || !global.settings.logQueries) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (typeof sql === 'string') {
|
||||
return {
|
||||
type: sqlType,
|
||||
sql: ensureMaxQueryLength(sql)
|
||||
};
|
||||
}
|
||||
|
||||
if (Array.isArray(sql)) {
|
||||
const lengthPerQuery = MAX_SQL_LENGTH / sql.length;
|
||||
return {
|
||||
type: sqlType,
|
||||
sql: sql.map(q => ensureMaxQueryLength(q, lengthPerQuery))
|
||||
};
|
||||
}
|
||||
|
||||
if (sql.query && Array.isArray(sql.query)) {
|
||||
return {
|
||||
type: sqlType,
|
||||
sql: prepareBatchFallbackQuery(sql)
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a Batch API fallback query controlling the queries length
|
||||
* We need to create a new object avoiding original modifications
|
||||
*
|
||||
* @param {Object} sql
|
||||
*/
|
||||
function prepareBatchFallbackQuery (sql) {
|
||||
const fallbackQuery = {};
|
||||
|
||||
if (sql.onsuccess) {
|
||||
fallbackQuery.onsuccess = ensureMaxQueryLength(sql.onsuccess);
|
||||
}
|
||||
|
||||
if (sql.onerror) {
|
||||
fallbackQuery.onerror = ensureMaxQueryLength(sql.onerror);
|
||||
}
|
||||
|
||||
fallbackQuery.query = sql.query.map(query => {
|
||||
const subquery = {
|
||||
query: ensureMaxQueryLength(query.query)
|
||||
};
|
||||
|
||||
if (query.onsuccess) {
|
||||
subquery.onsuccess = ensureMaxQueryLength(query.onsuccess);
|
||||
}
|
||||
|
||||
if (query.onerror) {
|
||||
subquery.onerror = ensureMaxQueryLength(query.onerror);
|
||||
}
|
||||
|
||||
return subquery;
|
||||
});
|
||||
|
||||
return fallbackQuery;
|
||||
}
|
||||
|
||||
function ensureMaxQueryLength (sql, length = MAX_SQL_LENGTH) {
|
||||
return sql.substring(0, length);
|
||||
}
|
@ -1,27 +1,16 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function logger () {
|
||||
if (!global.log4js) {
|
||||
return function dummyLoggerMiddleware (req, res, next) {
|
||||
next();
|
||||
};
|
||||
}
|
||||
const uuid = require('uuid');
|
||||
|
||||
const options = {
|
||||
level: 'info',
|
||||
buffer: true,
|
||||
// log4js provides a tokens solution as express but it does not provide the request/response in the callback.
|
||||
// Thus it is not possible to extract relevant information from them.
|
||||
// This is a workaround to be able to access request/response.
|
||||
format: function (req, res, format) {
|
||||
const defaultFormat = ':remote-addr :method :req[Host]:url :status :response-time ms -> :res[Content-Type]';
|
||||
const logFormat = global.settings.log_format || defaultFormat;
|
||||
module.exports = function initLogger ({ logger }) {
|
||||
return function initLoggerMiddleware (req, res, next) {
|
||||
const id = req.get('X-Request-Id') || uuid.v4();
|
||||
res.locals.logger = logger.child({ id });
|
||||
|
||||
return format(logFormat);
|
||||
}
|
||||
res.locals.logger.info({ request: req });
|
||||
res.on('finish', () => res.locals.logger.info({ response: res }));
|
||||
res.on('close', () => res.locals.logger.info({ end: true }));
|
||||
|
||||
next();
|
||||
};
|
||||
|
||||
const logger = global.log4js.getLogger();
|
||||
|
||||
return global.log4js.connectLogger(logger, options);
|
||||
};
|
||||
|
@ -1,36 +1,31 @@
|
||||
'use strict';
|
||||
|
||||
const Profiler = require('../../stats/profiler-proxy');
|
||||
const { name: prefix } = require('../../../package.json');
|
||||
|
||||
module.exports = function profiler ({ statsClient }) {
|
||||
return function profilerMiddleware (req, res, next) {
|
||||
const start = new Date();
|
||||
const { logger } = res.locals;
|
||||
|
||||
req.profiler = new Profiler({
|
||||
profile: global.settings.useProfiler,
|
||||
statsd_client: statsClient
|
||||
});
|
||||
|
||||
next();
|
||||
};
|
||||
};
|
||||
req.profiler.start(prefix);
|
||||
|
||||
module.exports.initializeProfiler = function initializeProfiler (label) {
|
||||
return function initializeProfilerMiddleware (req, res, next) {
|
||||
if (req.profiler) {
|
||||
req.profiler.start(`sqlapi.${label}`);
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
};
|
||||
|
||||
module.exports.finishProfiler = function finishProfiler () {
|
||||
return function finishProfilerMiddleware (req, res, next) {
|
||||
if (req.profiler) {
|
||||
res.on('finish', () => {
|
||||
req.profiler.add({ response: new Date() - start });
|
||||
req.profiler.end();
|
||||
req.profiler.sendStats();
|
||||
logger.info({ stats: req.profiler.toJSON() });
|
||||
|
||||
res.header('X-SQLAPI-Profiler', req.profiler.toJSONString());
|
||||
}
|
||||
try {
|
||||
req.profiler.sendStats();
|
||||
} catch (err) {
|
||||
logger.warn({ error: err });
|
||||
}
|
||||
});
|
||||
|
||||
next();
|
||||
};
|
||||
|
15
lib/api/middlewares/tag.js
Normal file
15
lib/api/middlewares/tag.js
Normal file
@ -0,0 +1,15 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function tag ({ tags }) {
|
||||
if (!Array.isArray(tags) || !tags.every((tag) => typeof tag === 'string')) {
|
||||
throw new Error('Required "tags" option must be a valid Array: [string, string, ...]');
|
||||
}
|
||||
|
||||
return function tagMiddleware (req, res, next) {
|
||||
const { logger } = res.locals;
|
||||
res.locals.tags = tags;
|
||||
res.on('finish', () => logger.info({ tags: res.locals.tags }));
|
||||
|
||||
next();
|
||||
};
|
||||
};
|
@ -5,9 +5,7 @@ module.exports = function timeoutLimits (metadataBackend) {
|
||||
const { user, authorizationLevel } = res.locals;
|
||||
|
||||
metadataBackend.getUserTimeoutRenderLimits(user, function (err, timeoutRenderLimit) {
|
||||
if (req.profiler) {
|
||||
req.profiler.done('getUserTimeoutLimits');
|
||||
}
|
||||
req.profiler.done('getUserTimeoutLimits');
|
||||
|
||||
if (err) {
|
||||
return next(err);
|
||||
|
@ -6,16 +6,24 @@ module.exports = function user (metadataBackend) {
|
||||
const cdbRequest = new CdbRequest();
|
||||
|
||||
return function userMiddleware (req, res, next) {
|
||||
res.locals.user = getUserNameFromRequest(req, cdbRequest);
|
||||
const { logger } = res.locals;
|
||||
|
||||
getUserId(metadataBackend, res.locals.user, function (err, userId) {
|
||||
try {
|
||||
res.locals.user = getUserNameFromRequest(req, cdbRequest);
|
||||
logger.info({ user: res.locals.user });
|
||||
} catch (err) {
|
||||
return next(err);
|
||||
}
|
||||
|
||||
metadataBackend.getUserId(res.locals.user, (err, userId) => {
|
||||
if (err || !userId) {
|
||||
const error = new Error('Unauthorized');
|
||||
error.type = 'auth';
|
||||
error.subtype = 'user-not-found';
|
||||
error.http_status = 404;
|
||||
error.message = errorUserNotFoundMessageTemplate(res.locals.user);
|
||||
next(error);
|
||||
|
||||
return next(error);
|
||||
}
|
||||
|
||||
res.locals.userId = userId;
|
||||
@ -28,15 +36,6 @@ function getUserNameFromRequest (req, cdbRequest) {
|
||||
return cdbRequest.userByReq(req);
|
||||
}
|
||||
|
||||
function getUserId (metadataBackend, userName, callback) {
|
||||
metadataBackend.getUserId(userName, function (err, userId) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
return callback(null, userId);
|
||||
});
|
||||
}
|
||||
|
||||
function errorUserNotFoundMessageTemplate (user) {
|
||||
return `Sorry, we can't find CARTO user '${user}'. Please check that you have entered the correct domain.`;
|
||||
}
|
||||
|
@ -3,7 +3,6 @@
|
||||
const user = require('../middlewares/user');
|
||||
const authorization = require('../middlewares/authorization');
|
||||
const connectionParams = require('../middlewares/connection-params');
|
||||
const { initializeProfiler } = require('../middlewares/profiler');
|
||||
const dbQuota = require('../middlewares/db-quota');
|
||||
const bodyParser = require('../middlewares/body-parser');
|
||||
const rateLimits = require('../middlewares/rate-limit');
|
||||
@ -15,41 +14,41 @@ const Throttler = require('../../services/throttler-stream');
|
||||
const zlib = require('zlib');
|
||||
const { PassThrough } = require('stream');
|
||||
const params = require('../middlewares/params');
|
||||
const tag = require('../middlewares/tag');
|
||||
|
||||
module.exports = class CopyController {
|
||||
constructor (metadataBackend, userDatabaseService, userLimitsService, logger) {
|
||||
constructor (metadataBackend, userDatabaseService, userLimitsService) {
|
||||
this.metadataBackend = metadataBackend;
|
||||
this.userDatabaseService = userDatabaseService;
|
||||
this.userLimitsService = userLimitsService;
|
||||
this.logger = logger;
|
||||
}
|
||||
|
||||
route (sqlRouter) {
|
||||
const copyFromMiddlewares = endpointGroup => {
|
||||
return [
|
||||
initializeProfiler('copyfrom'),
|
||||
tag({ tags: ['ingestion', 'copyfrom'] }),
|
||||
user(this.metadataBackend),
|
||||
rateLimits(this.userLimitsService, endpointGroup),
|
||||
authorization(this.metadataBackend),
|
||||
connectionParams(this.userDatabaseService),
|
||||
dbQuota(),
|
||||
params({ strategy: 'copyfrom' }),
|
||||
handleCopyFrom(this.logger),
|
||||
errorHandler(this.logger)
|
||||
handleCopyFrom(),
|
||||
errorHandler()
|
||||
];
|
||||
};
|
||||
|
||||
const copyToMiddlewares = endpointGroup => {
|
||||
return [
|
||||
tag({ tags: ['ingestion', 'copyto'] }),
|
||||
bodyParser(),
|
||||
initializeProfiler('copyto'),
|
||||
user(this.metadataBackend),
|
||||
rateLimits(this.userLimitsService, endpointGroup),
|
||||
authorization(this.metadataBackend),
|
||||
connectionParams(this.userDatabaseService),
|
||||
params({ strategy: 'copyto' }),
|
||||
handleCopyTo(this.logger),
|
||||
errorHandler(this.logger)
|
||||
handleCopyTo(),
|
||||
errorHandler()
|
||||
];
|
||||
};
|
||||
|
||||
@ -61,6 +60,7 @@ module.exports = class CopyController {
|
||||
|
||||
function handleCopyTo (logger) {
|
||||
return function handleCopyToMiddleware (req, res, next) {
|
||||
const { logger } = res.locals;
|
||||
const { userDbParams, user } = res.locals;
|
||||
const { sql, filename } = res.locals.params;
|
||||
|
||||
@ -98,8 +98,9 @@ function handleCopyTo (logger) {
|
||||
};
|
||||
}
|
||||
|
||||
function handleCopyFrom (logger) {
|
||||
function handleCopyFrom () {
|
||||
return function handleCopyFromMiddleware (req, res, next) {
|
||||
const { logger } = res.locals;
|
||||
const { userDbParams, user, dbRemainingQuota } = res.locals;
|
||||
const { sql } = res.locals.params;
|
||||
const isGzip = req.get('content-encoding') === 'gzip';
|
||||
@ -170,10 +171,11 @@ function handleCopyFrom (logger) {
|
||||
};
|
||||
}
|
||||
|
||||
function errorHandler (logger) {
|
||||
function errorHandler () {
|
||||
return function errorHandlerMiddleware (err, req, res, next) {
|
||||
const { logger } = res.locals;
|
||||
if (res.headersSent) {
|
||||
logger.error(err);
|
||||
logger.error({ error: err });
|
||||
const errorHandler = errorHandlerFactory(err);
|
||||
res.write(JSON.stringify(errorHandler.getResponse()));
|
||||
res.end();
|
||||
|
@ -4,13 +4,12 @@ const util = require('util');
|
||||
|
||||
const bodyParser = require('../middlewares/body-parser');
|
||||
const user = require('../middlewares/user');
|
||||
const { initializeProfiler, finishProfiler } = require('../middlewares/profiler');
|
||||
const authorization = require('../middlewares/authorization');
|
||||
const connectionParams = require('../middlewares/connection-params');
|
||||
const rateLimits = require('../middlewares/rate-limit');
|
||||
const { RATE_LIMIT_ENDPOINTS_GROUPS } = rateLimits;
|
||||
const params = require('../middlewares/params');
|
||||
const log = require('../middlewares/log');
|
||||
const tag = require('../middlewares/tag');
|
||||
|
||||
module.exports = class JobController {
|
||||
constructor (metadataBackend, userDatabaseService, jobService, statsdClient, userLimitsService) {
|
||||
@ -31,39 +30,38 @@ module.exports = class JobController {
|
||||
);
|
||||
|
||||
sqlRouter.post('/job', [
|
||||
tag({ tags: ['job', 'create'] }),
|
||||
bodyParser(),
|
||||
checkBodyPayloadSize(),
|
||||
params({ strategy: 'job' }),
|
||||
log(log.TYPES.JOB),
|
||||
jobMiddlewares('create', createJob, RATE_LIMIT_ENDPOINTS_GROUPS.JOB_CREATE)
|
||||
jobMiddlewares(createJob, RATE_LIMIT_ENDPOINTS_GROUPS.JOB_CREATE)
|
||||
]);
|
||||
|
||||
sqlRouter.get('/job/:job_id', [
|
||||
tag({ tags: ['job', 'retrieve'] }),
|
||||
bodyParser(),
|
||||
jobMiddlewares('retrieve', getJob, RATE_LIMIT_ENDPOINTS_GROUPS.JOB_GET)
|
||||
jobMiddlewares(getJob, RATE_LIMIT_ENDPOINTS_GROUPS.JOB_GET)
|
||||
]);
|
||||
|
||||
sqlRouter.delete('/job/:job_id', [
|
||||
tag({ tags: ['job', 'cancel'] }),
|
||||
bodyParser(),
|
||||
jobMiddlewares('cancel', cancelJob, RATE_LIMIT_ENDPOINTS_GROUPS.JOB_DELETE)
|
||||
jobMiddlewares(cancelJob, RATE_LIMIT_ENDPOINTS_GROUPS.JOB_DELETE)
|
||||
]);
|
||||
}
|
||||
};
|
||||
|
||||
function composeJobMiddlewares (metadataBackend, userDatabaseService, jobService, statsdClient, userLimitsService, pubSubMetricsService) {
|
||||
return function jobMiddlewares (action, job, endpointGroup) {
|
||||
return function jobMiddlewares (job, endpointGroup) {
|
||||
const forceToBeMaster = true;
|
||||
|
||||
return [
|
||||
initializeProfiler('job'),
|
||||
user(metadataBackend),
|
||||
rateLimits(userLimitsService, endpointGroup),
|
||||
authorization(metadataBackend, forceToBeMaster),
|
||||
connectionParams(userDatabaseService),
|
||||
job(jobService),
|
||||
setServedByDBHostHeader(),
|
||||
finishProfiler(),
|
||||
logJobResult(action),
|
||||
incrementSuccessMetrics(statsdClient),
|
||||
sendResponse(),
|
||||
incrementErrorMetrics(statsdClient)
|
||||
@ -76,9 +74,7 @@ function cancelJob (jobService) {
|
||||
const { job_id: jobId } = req.params;
|
||||
|
||||
jobService.cancel(jobId, (err, job) => {
|
||||
if (req.profiler) {
|
||||
req.profiler.done('cancelJob');
|
||||
}
|
||||
req.profiler.done('cancelJob');
|
||||
|
||||
if (err) {
|
||||
return next(err);
|
||||
@ -96,9 +92,7 @@ function getJob (jobService) {
|
||||
const { job_id: jobId } = req.params;
|
||||
|
||||
jobService.get(jobId, (err, job) => {
|
||||
if (req.profiler) {
|
||||
req.profiler.done('getJob');
|
||||
}
|
||||
req.profiler.done('getJob');
|
||||
|
||||
if (err) {
|
||||
return next(err);
|
||||
@ -113,7 +107,8 @@ function getJob (jobService) {
|
||||
|
||||
function createJob (jobService) {
|
||||
return function createJobMiddleware (req, res, next) {
|
||||
var data = {
|
||||
const { logger } = res.locals;
|
||||
const data = {
|
||||
user: res.locals.user,
|
||||
query: res.locals.params.sql,
|
||||
host: res.locals.userDbParams.host,
|
||||
@ -124,15 +119,13 @@ function createJob (jobService) {
|
||||
};
|
||||
|
||||
jobService.create(data, (err, job) => {
|
||||
if (req.profiler) {
|
||||
req.profiler.done('createJob');
|
||||
}
|
||||
req.profiler.done('createJob');
|
||||
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
|
||||
res.locals.job_id = job.job_id;
|
||||
logger.info({ job: job.toJSON() });
|
||||
|
||||
res.statusCode = 201;
|
||||
res.body = job.serialize();
|
||||
@ -185,21 +178,6 @@ function setServedByDBHostHeader () {
|
||||
};
|
||||
}
|
||||
|
||||
function logJobResult (action) {
|
||||
return function logJobResultMiddleware (req, res, next) {
|
||||
if (process.env.NODE_ENV !== 'test') {
|
||||
console.info(JSON.stringify({
|
||||
type: 'sql_api_batch_job',
|
||||
username: res.locals.user,
|
||||
action: action,
|
||||
job_id: req.params.job_id || res.locals.job_id
|
||||
}));
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
}
|
||||
|
||||
const METRICS_PREFIX = 'sqlapi.job';
|
||||
|
||||
function incrementSuccessMetrics (statsdClient) {
|
||||
|
@ -1,14 +1,13 @@
|
||||
'use strict';
|
||||
|
||||
const bodyParser = require('../middlewares/body-parser');
|
||||
const { initializeProfiler } = require('../middlewares/profiler');
|
||||
const user = require('../middlewares/user');
|
||||
const rateLimits = require('../middlewares/rate-limit');
|
||||
const authorization = require('../middlewares/authorization');
|
||||
const connectionParams = require('../middlewares/connection-params');
|
||||
const timeoutLimits = require('../middlewares/timeout-limits');
|
||||
const params = require('../middlewares/params');
|
||||
const log = require('../middlewares/log');
|
||||
const log = require('../middlewares/log-query');
|
||||
const cancelOnClientAbort = require('../middlewares/cancel-on-client-abort');
|
||||
const affectedTables = require('../middlewares/affected-tables');
|
||||
const accessValidator = require('../middlewares/access-validator');
|
||||
@ -19,6 +18,7 @@ const surrogateKey = require('../middlewares/surrogate-key');
|
||||
const lastModified = require('../middlewares/last-modified');
|
||||
const formatter = require('../middlewares/formatter');
|
||||
const content = require('../middlewares/content');
|
||||
const tag = require('../middlewares/tag');
|
||||
|
||||
const { RATE_LIMIT_ENDPOINTS_GROUPS } = rateLimits;
|
||||
const PSQL = require('cartodb-psql');
|
||||
@ -36,15 +36,15 @@ module.exports = class QueryController {
|
||||
|
||||
const queryMiddlewares = () => {
|
||||
return [
|
||||
tag({ tags: ['query'] }),
|
||||
bodyParser(),
|
||||
initializeProfiler('query'),
|
||||
user(this.metadataBackend),
|
||||
rateLimits(this.userLimitsService, RATE_LIMIT_ENDPOINTS_GROUPS.QUERY),
|
||||
authorization(this.metadataBackend, forceToBeMaster),
|
||||
connectionParams(this.userDatabaseService),
|
||||
timeoutLimits(this.metadataBackend),
|
||||
params({ strategy: 'query' }),
|
||||
log(log.TYPES.QUERY),
|
||||
log(),
|
||||
cancelOnClientAbort(),
|
||||
affectedTables(),
|
||||
accessValidator(),
|
||||
@ -73,9 +73,7 @@ function handleQuery ({ stats } = {}) {
|
||||
let { formatter } = req;
|
||||
|
||||
try {
|
||||
if (req.profiler) {
|
||||
req.profiler.done('init');
|
||||
}
|
||||
req.profiler.done('init');
|
||||
|
||||
const opts = {
|
||||
username: username,
|
||||
@ -95,7 +93,6 @@ function handleQuery ({ stats } = {}) {
|
||||
opts.profiler = req.profiler;
|
||||
opts.beforeSink = function () {
|
||||
req.profiler.done('beforeSink');
|
||||
res.header('X-SQLAPI-Profiler', req.profiler.toJSONString());
|
||||
};
|
||||
}
|
||||
|
||||
@ -106,10 +103,6 @@ function handleQuery ({ stats } = {}) {
|
||||
formatter.sendResponse(opts, (err) => {
|
||||
formatter = null;
|
||||
|
||||
if (req.profiler) {
|
||||
req.profiler.sendStats();
|
||||
}
|
||||
|
||||
if (stats) {
|
||||
if (err) {
|
||||
stats.increment('sqlapi.query.error');
|
||||
|
@ -6,7 +6,7 @@ const UserDatabaseService = require('../../services/user-database-service');
|
||||
const UserLimitsService = require('../../services/user-limits');
|
||||
|
||||
const socketTimeout = require('../middlewares/socket-timeout');
|
||||
const logger = require('../middlewares/logger');
|
||||
const initLogger = require('../middlewares/logger');
|
||||
const profiler = require('../middlewares/profiler');
|
||||
const cors = require('../middlewares/cors');
|
||||
const servedByHostHeader = require('../middlewares/served-by-host-header');
|
||||
@ -16,7 +16,10 @@ const CopyController = require('./copy-controller');
|
||||
const JobController = require('./job-controller');
|
||||
|
||||
module.exports = class SqlRouter {
|
||||
constructor ({ metadataBackend, statsClient, dataIngestionLogger, jobService }) {
|
||||
constructor ({ metadataBackend, statsClient, logger, jobService }) {
|
||||
this.logger = logger;
|
||||
this.statsClient = statsClient;
|
||||
|
||||
const userLimitsServiceOptions = {
|
||||
limits: {
|
||||
rateLimitsEnabled: global.settings.ratelimits.rateLimitsEnabled
|
||||
@ -35,8 +38,7 @@ module.exports = class SqlRouter {
|
||||
this.copyController = new CopyController(
|
||||
metadataBackend,
|
||||
userDatabaseService,
|
||||
userLimitsService,
|
||||
dataIngestionLogger
|
||||
userLimitsService
|
||||
);
|
||||
|
||||
this.jobController = new JobController(
|
||||
@ -58,7 +60,7 @@ module.exports = class SqlRouter {
|
||||
middlewares.forEach(middleware => sqlRouter.use(middleware()));
|
||||
|
||||
sqlRouter.use(socketTimeout());
|
||||
sqlRouter.use(logger());
|
||||
sqlRouter.use(initLogger({ logger: this.logger }));
|
||||
sqlRouter.use(profiler({ statsClient: this.statsClient }));
|
||||
sqlRouter.use(cors());
|
||||
sqlRouter.use(servedByHostHeader());
|
||||
|
@ -1,11 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const Logger = require('../services/logger');
|
||||
|
||||
class BatchLogger extends Logger {
|
||||
log (job) {
|
||||
return job.log(this.logger);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = BatchLogger;
|
@ -4,6 +4,7 @@ var util = require('util');
|
||||
var EventEmitter = require('events').EventEmitter;
|
||||
var queue = require('queue-async');
|
||||
var HostScheduler = require('./scheduler/host-scheduler');
|
||||
var JobStatus = require('./job-status');
|
||||
|
||||
var EMPTY_QUEUE = true;
|
||||
|
||||
@ -114,12 +115,11 @@ Batch.prototype.processJob = function (user, callback) {
|
||||
return callback(err, !EMPTY_QUEUE);
|
||||
}
|
||||
|
||||
self.logger.debug(
|
||||
'[%s] Job=%s status=%s user=%s (failed_reason=%s)',
|
||||
self.name, jobId, job.data.status, user, job.failed_reason
|
||||
);
|
||||
|
||||
self.logger.log(job);
|
||||
if (JobStatus.isFinal(job.data.status)) {
|
||||
self.logger.info({ job: job.toJSON() });
|
||||
} else {
|
||||
self.logger.debug({ job: job.toJSON() });
|
||||
}
|
||||
|
||||
return callback(null, !EMPTY_QUEUE);
|
||||
});
|
||||
|
@ -9,14 +9,11 @@ var JobPublisher = require('./pubsub/job-publisher');
|
||||
var JobQueue = require('./job-queue');
|
||||
var JobBackend = require('./job-backend');
|
||||
var JobService = require('./job-service');
|
||||
var BatchLogger = require('./batch-logger');
|
||||
var Batch = require('./batch');
|
||||
|
||||
module.exports = function batchFactory (metadataBackend, redisPool, name, statsdClient, loggerPath) {
|
||||
module.exports = function batchFactory (metadataBackend, redisPool, name, statsdClient, logger) {
|
||||
var userDatabaseMetadataService = new UserDatabaseMetadataService(metadataBackend);
|
||||
|
||||
var logger = new BatchLogger(loggerPath, 'batch-queries');
|
||||
|
||||
var jobSubscriber = new JobSubscriber(redisPool);
|
||||
var jobPublisher = new JobPublisher(redisPool);
|
||||
|
||||
|
@ -116,7 +116,3 @@ JobBase.prototype.serialize = function () {
|
||||
|
||||
return data;
|
||||
};
|
||||
|
||||
JobBase.prototype.log = function (/* logger */) {
|
||||
return false;
|
||||
};
|
||||
|
@ -205,54 +205,39 @@ JobFallback.prototype.getLastFinishedStatus = function () {
|
||||
}.bind(this), JobStatus.DONE);
|
||||
};
|
||||
|
||||
JobFallback.prototype.log = function (logger) {
|
||||
if (!isFinished(this)) {
|
||||
return false;
|
||||
}
|
||||
JobFallback.prototype.toJSON = function () {
|
||||
const queries = this.data.query.query;
|
||||
|
||||
var queries = this.data.query.query;
|
||||
return {
|
||||
type: this.constructor.name,
|
||||
id: this.data.job_id,
|
||||
username: this.data.user,
|
||||
status: this.data.status,
|
||||
failed_reason: this.failed_reason,
|
||||
created: this.data.created_at,
|
||||
updated: this.data.updated_at,
|
||||
elapsed: elapsedTime(this.data.created_at, this.data.updated_at),
|
||||
dbhost: this.data.host,
|
||||
queries: queries.map((query) => {
|
||||
const node = query.id ? parseQueryId(query.id) : undefined;
|
||||
|
||||
for (var i = 0; i < queries.length; i++) {
|
||||
var query = queries[i];
|
||||
|
||||
var logEntry = {
|
||||
created: this.data.created_at,
|
||||
waiting: elapsedTime(this.data.created_at, query.started_at),
|
||||
time: query.started_at,
|
||||
endtime: query.ended_at,
|
||||
username: this.data.user,
|
||||
dbhost: this.data.host,
|
||||
job: this.data.job_id,
|
||||
status: query.status,
|
||||
elapsed: elapsedTime(query.started_at, query.ended_at)
|
||||
};
|
||||
|
||||
var queryId = query.id;
|
||||
|
||||
var tag = 'query';
|
||||
if (queryId) {
|
||||
logEntry.query_id = queryId;
|
||||
|
||||
var node = parseQueryId(queryId);
|
||||
if (node) {
|
||||
logEntry.analysis = node.analysisId;
|
||||
logEntry.node = node.nodeId;
|
||||
logEntry.type = node.nodeType;
|
||||
tag = 'analysis';
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(logEntry, tag);
|
||||
}
|
||||
|
||||
return true;
|
||||
return {
|
||||
status: query.status,
|
||||
fallback_status: query.fallback_status,
|
||||
failed_reason: query.failed_reason,
|
||||
waiting: elapsedTime(this.data.created_at, query.started_at),
|
||||
starttime: query.started_at,
|
||||
endtime: query.ended_at,
|
||||
elapsed: elapsedTime(query.started_at, query.ended_at),
|
||||
id: query.id,
|
||||
analysis: node ? node.analysisId : undefined,
|
||||
node: node ? node.nodeId : undefined,
|
||||
type: node ? node.nodeType : undefined
|
||||
};
|
||||
})
|
||||
};
|
||||
};
|
||||
|
||||
function isFinished (job) {
|
||||
return JobStatus.isFinal(job.data.status) &&
|
||||
(!job.data.fallback_status || JobStatus.isFinal(job.data.fallback_status));
|
||||
}
|
||||
|
||||
function parseQueryId (queryId) {
|
||||
var data = queryId.split(':');
|
||||
|
||||
|
@ -88,3 +88,34 @@ JobMultiple.prototype.setStatus = function (finalStatus, errorMesssage) {
|
||||
|
||||
throw new Error('Cannot set status from ' + initialStatus + ' to ' + finalStatus);
|
||||
};
|
||||
|
||||
JobMultiple.prototype.toJSON = function () {
|
||||
const queries = this.data.query;
|
||||
|
||||
return {
|
||||
type: this.constructor.name,
|
||||
id: this.data.job_id,
|
||||
username: this.data.user,
|
||||
status: this.data.status,
|
||||
created: this.data.created_at,
|
||||
updated: this.data.updated_at,
|
||||
elapsed: elapsedTime(this.data.created_at, this.data.updated_at),
|
||||
dbhost: this.data.host,
|
||||
queries: queries.map((query) => {
|
||||
return {
|
||||
status: query.status,
|
||||
failed_reason: query.failed_reason
|
||||
};
|
||||
})
|
||||
};
|
||||
};
|
||||
|
||||
function elapsedTime (startedAt, endedAt) {
|
||||
if (!startedAt || !endedAt) {
|
||||
return;
|
||||
}
|
||||
|
||||
var start = new Date(startedAt);
|
||||
var end = new Date(endedAt);
|
||||
return end.getTime() - start.getTime();
|
||||
}
|
||||
|
@ -32,3 +32,27 @@ JobSimple.prototype.setQuery = function (query) {
|
||||
|
||||
JobSimple.super_.prototype.setQuery.call(this, query);
|
||||
};
|
||||
|
||||
JobSimple.prototype.toJSON = function () {
|
||||
return {
|
||||
class: this.constructor.name,
|
||||
id: this.data.job_id,
|
||||
username: this.data.user,
|
||||
status: this.data.status,
|
||||
failed_reason: this.data.failed_reason,
|
||||
created: this.data.created_at,
|
||||
updated: this.data.updated_at,
|
||||
elapsed: elapsedTime(this.data.created_at, this.data.updated_at),
|
||||
dbhost: this.data.host
|
||||
};
|
||||
};
|
||||
|
||||
function elapsedTime (startedAt, endedAt) {
|
||||
if (!startedAt || !endedAt) {
|
||||
return;
|
||||
}
|
||||
|
||||
var start = new Date(startedAt);
|
||||
var end = new Date(endedAt);
|
||||
return end.getTime() - start.getTime();
|
||||
}
|
||||
|
@ -1,41 +1,24 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* this module provides cartodb-specific interpretation
|
||||
* of request headers
|
||||
*/
|
||||
|
||||
function CartodbRequest () {
|
||||
}
|
||||
|
||||
module.exports = CartodbRequest;
|
||||
|
||||
/**
|
||||
* If the request contains the user use it, if not guess from the host
|
||||
*/
|
||||
CartodbRequest.prototype.userByReq = function (req) {
|
||||
if (req.params.user) {
|
||||
return req.params.user;
|
||||
module.exports = class CdbRequest {
|
||||
constructor () {
|
||||
// would extract "strk" from "strk.cartodb.com"
|
||||
this.RE_USER_FROM_HOST = new RegExp(global.settings.user_from_host || '^([^\\.]+)\\.');
|
||||
}
|
||||
|
||||
userByReq (req) {
|
||||
const host = req.headers.host || '';
|
||||
|
||||
if (req.params.user) {
|
||||
return req.params.user;
|
||||
}
|
||||
|
||||
const mat = host.match(this.RE_USER_FROM_HOST);
|
||||
|
||||
if (!mat || mat.length !== 2) {
|
||||
throw new Error(`No username found in hostname '${host}'`);
|
||||
}
|
||||
|
||||
return mat[1];
|
||||
}
|
||||
return userByHostName(req.headers.host);
|
||||
};
|
||||
|
||||
var userFromHostRegex = new RegExp(
|
||||
global.settings.user_from_host || '^([^\\.]+)\\.' // would extract "strk" from "strk.cartodb.com"
|
||||
);
|
||||
|
||||
function userByHostName (host) {
|
||||
var mat = host.match(userFromHostRegex);
|
||||
if (!mat) {
|
||||
console.error("ERROR: user pattern '" + userFromHostRegex + "' does not match hostname '" + host + "'");
|
||||
return;
|
||||
}
|
||||
|
||||
if (mat.length !== 2) {
|
||||
console.error(
|
||||
"ERROR: pattern '" + userFromHostRegex + "' gave unexpected matches against '" + host + "': " + mat
|
||||
);
|
||||
return;
|
||||
}
|
||||
return mat[1];
|
||||
}
|
||||
|
@ -1,5 +1,8 @@
|
||||
'use strict';
|
||||
|
||||
const Logger = require('./utils/logger');
|
||||
const logger = new Logger();
|
||||
|
||||
module.exports = function getServerOptions () {
|
||||
const defaults = {
|
||||
routes: {
|
||||
@ -25,7 +28,8 @@ module.exports = function getServerOptions () {
|
||||
middlewares: []
|
||||
}]
|
||||
}]
|
||||
}
|
||||
},
|
||||
logger
|
||||
};
|
||||
|
||||
return Object.assign({}, defaults, global.settings);
|
||||
|
@ -4,7 +4,6 @@ const express = require('express');
|
||||
const fs = require('fs');
|
||||
const RedisPool = require('redis-mpool');
|
||||
const cartodbRedis = require('cartodb-redis');
|
||||
const Logger = require('./services/logger');
|
||||
const ApiRouter = require('./api/api-router');
|
||||
const batchFactory = require('./batch');
|
||||
const getServerOptions = require('./server-options');
|
||||
@ -15,7 +14,7 @@ process.env.PGAPPNAME = process.env.PGAPPNAME || 'cartodb_sqlapi';
|
||||
require('./utils/date-to-json');
|
||||
|
||||
module.exports = function createServer (statsClient) {
|
||||
const options = getServerOptions();
|
||||
const { routes, logger } = getServerOptions();
|
||||
const app = express();
|
||||
const redisPool = new RedisPool({
|
||||
name: 'sql-api',
|
||||
@ -56,26 +55,20 @@ module.exports = function createServer (statsClient) {
|
||||
app.disable('x-powered-by');
|
||||
app.disable('etag');
|
||||
|
||||
const dataIngestionLogger = new Logger(global.settings.dataIngestionLogPath, 'data-ingestion');
|
||||
app.dataIngestionLogger = dataIngestionLogger;
|
||||
|
||||
const apiRouter = new ApiRouter({
|
||||
redisPool,
|
||||
metadataBackend,
|
||||
statsClient,
|
||||
dataIngestionLogger
|
||||
logger
|
||||
});
|
||||
apiRouter.route(app, options.routes.api);
|
||||
apiRouter.route(app, routes.api);
|
||||
|
||||
const isBatchProcess = process.argv.indexOf('--no-batch') === -1;
|
||||
|
||||
if (global.settings.environment !== 'test' && isBatchProcess) {
|
||||
const batchName = global.settings.api_hostname || 'batch';
|
||||
|
||||
app.batch = batchFactory(
|
||||
metadataBackend, redisPool, batchName, statsClient, global.settings.batch_log_filename
|
||||
);
|
||||
|
||||
app.batch = batchFactory(metadataBackend, redisPool, batchName, statsClient, logger);
|
||||
app.batch.start();
|
||||
}
|
||||
|
||||
|
@ -15,12 +15,18 @@ class ErrorHandler extends Error {
|
||||
}
|
||||
|
||||
getResponse () {
|
||||
return {
|
||||
const serialized = {
|
||||
error: [this.message],
|
||||
context: this.context,
|
||||
detail: this.detail,
|
||||
hint: this.hint
|
||||
};
|
||||
|
||||
if (global.settings.environment === 'development') {
|
||||
serialized.stack = this.stack;
|
||||
}
|
||||
|
||||
return serialized;
|
||||
}
|
||||
|
||||
getHttpStatus (httpStatus = 400) {
|
||||
|
@ -65,7 +65,7 @@ module.exports = class StreamCopyMetrics {
|
||||
size: this.size,
|
||||
rows: this.rows,
|
||||
gzip: this.isGzip,
|
||||
'cdb-user': this.username,
|
||||
username: this.username,
|
||||
time: this.time,
|
||||
timestamp,
|
||||
sql: this.sql
|
||||
@ -82,6 +82,6 @@ module.exports = class StreamCopyMetrics {
|
||||
|
||||
logData.success = this.success;
|
||||
|
||||
this.logger.info(logData);
|
||||
this.logger.info({ ingestion: logData });
|
||||
}
|
||||
};
|
||||
|
@ -53,7 +53,7 @@ module.exports = class StreamCopy {
|
||||
if (action === ACTION_TO) {
|
||||
pgstream.on('end', () => done());
|
||||
pgstream.on('error', () => this._cancel(client.processID, action));
|
||||
pgstream.on('warning', (msg) => this.logger.warn(msg));
|
||||
pgstream.on('warning', (msg) => this.logger.warn({ error: new Error(msg) }));
|
||||
} else if (action === ACTION_FROM) {
|
||||
pgstream.on('finish', () => done());
|
||||
pgstream.on('client-close', err => client.connection.sendCopyFail(err.message));
|
||||
@ -76,7 +76,7 @@ module.exports = class StreamCopy {
|
||||
|
||||
pg.query(cancelQuery(pid), (err, result) => {
|
||||
if (err) {
|
||||
return this.logger.error(err);
|
||||
return this.logger.error({ error: err });
|
||||
}
|
||||
|
||||
const isCancelled = result.rows.length && result.rows[0].cancelled;
|
||||
@ -87,15 +87,13 @@ module.exports = class StreamCopy {
|
||||
|
||||
return pg.query(terminateQuery(pid), (err, result) => {
|
||||
if (err) {
|
||||
return this.logger.error(err);
|
||||
return this.logger.error({ error: err });
|
||||
}
|
||||
|
||||
const isTerminated = result.rows.length && result.rows[0].terminated;
|
||||
|
||||
if (!isTerminated) {
|
||||
return this.logger.error(
|
||||
new Error(`Unable to terminate "copy ${actionType}" stream query (pid: ${pid})`)
|
||||
);
|
||||
return this.logger.error({ error: new Error(`Unable to terminate "copy ${actionType}" stream query (pid: ${pid})`) });
|
||||
}
|
||||
|
||||
return this.logger.info(`Terminated "copy ${actionType}" stream query successfully (pid: ${pid})`);
|
||||
|
@ -52,4 +52,8 @@ ProfilerProxy.prototype.toJSONString = function () {
|
||||
return this.profile ? this.profiler.toJSONString() : '{}';
|
||||
};
|
||||
|
||||
ProfilerProxy.prototype.toJSON = function () {
|
||||
return this.profile ? JSON.parse(this.profiler.toJSONString()) : {};
|
||||
};
|
||||
|
||||
module.exports = ProfilerProxy;
|
||||
|
60
lib/utils/logger.js
Normal file
60
lib/utils/logger.js
Normal file
@ -0,0 +1,60 @@
|
||||
'use strict';
|
||||
|
||||
const pino = require('pino');
|
||||
const { req: requestSerializer, res: responseSerializer, err, wrapErrorSerializer } = pino.stdSerializers;
|
||||
const DEV_ENVS = ['test', 'development'];
|
||||
|
||||
module.exports = class Logger {
|
||||
constructor () {
|
||||
const { LOG_LEVEL, NODE_ENV } = process.env;
|
||||
const logLevelFromNodeEnv = NODE_ENV === 'test' ? 'fatal' : 'info';
|
||||
const errorSerializer = DEV_ENVS.includes(NODE_ENV) ? err : wrapErrorSerializer(err => {
|
||||
err.stack = err.stack.split('\n').slice(0, 3).join('\n');
|
||||
return err;
|
||||
});
|
||||
const options = {
|
||||
base: null, // Do not bind hostname, pid and friends by default
|
||||
level: LOG_LEVEL || logLevelFromNodeEnv,
|
||||
serializers: {
|
||||
request: requestSerializer,
|
||||
response: responseSerializer,
|
||||
error: (error) => Array.isArray(error) ? error.map((err) => errorSerializer(err)) : [errorSerializer(error)]
|
||||
}
|
||||
};
|
||||
const dest = pino.destination({ sync: false }); // stdout
|
||||
|
||||
this._logger = pino(options, dest);
|
||||
}
|
||||
|
||||
trace (...args) {
|
||||
this._logger.trace(...args);
|
||||
}
|
||||
|
||||
debug (...args) {
|
||||
this._logger.debug(...args);
|
||||
}
|
||||
|
||||
info (...args) {
|
||||
this._logger.info(...args);
|
||||
}
|
||||
|
||||
warn (...args) {
|
||||
this._logger.warn(...args);
|
||||
}
|
||||
|
||||
error (...args) {
|
||||
this._logger.error(...args);
|
||||
}
|
||||
|
||||
fatal (...args) {
|
||||
this._logger.fatal(...args);
|
||||
}
|
||||
|
||||
child (...args) {
|
||||
return this._logger.child(...args);
|
||||
}
|
||||
|
||||
finish (callback) {
|
||||
return pino.final(this._logger, callback);
|
||||
}
|
||||
};
|
@ -1,45 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = {
|
||||
/**
|
||||
* Remove problematic nested characters
|
||||
* from object for logs RegEx
|
||||
*
|
||||
* @param {Object} object
|
||||
* @param {Number} maxStringLength
|
||||
*/
|
||||
stringifyForLogs (object, maxStringLength = 1024) {
|
||||
return JSON.stringify(cloneAndFilter(object, maxStringLength));
|
||||
}
|
||||
};
|
||||
|
||||
function cloneAndFilter (object, maxStringLength) {
|
||||
if (!object || !(object instanceof Object)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const newObject = {};
|
||||
|
||||
Object.keys(object).map(key => {
|
||||
if (typeof object[key] === 'string') {
|
||||
newObject[key] = filterString(object[key], maxStringLength);
|
||||
} else if (typeof object[key] === 'object') {
|
||||
newObject[key] = cloneAndFilter(object[key], maxStringLength);
|
||||
} else if (object[key] instanceof Array) {
|
||||
newObject[key] = [];
|
||||
for (const element of object[key]) {
|
||||
newObject[key].push(cloneAndFilter(element, maxStringLength));
|
||||
}
|
||||
} else {
|
||||
newObject[key] = object[key];
|
||||
}
|
||||
});
|
||||
|
||||
return newObject;
|
||||
}
|
||||
|
||||
function filterString (s, maxStringLength) {
|
||||
return s
|
||||
.substring(0, maxStringLength)
|
||||
.replace(/[^a-zA-Z0-9]/g, ' ');
|
||||
}
|
11
metro/index.js
Normal file
11
metro/index.js
Normal file
@ -0,0 +1,11 @@
|
||||
'use strict';
|
||||
|
||||
const split = require('split2');
|
||||
const logCollector = require('./log-collector');
|
||||
const metricsCollector = require('./metrics-collector');
|
||||
|
||||
process.stdin
|
||||
.pipe(split())
|
||||
.pipe(logCollector())
|
||||
.pipe(metricsCollector())
|
||||
.pipe(process.stdout);
|
76
metro/log-collector.js
Normal file
76
metro/log-collector.js
Normal file
@ -0,0 +1,76 @@
|
||||
'use strict'
|
||||
|
||||
const assingDeep = require('assign-deep');
|
||||
const { Transform } = require('stream');
|
||||
const DEV_ENVS = ['test', 'development'];
|
||||
const logs = new Map();
|
||||
|
||||
const LEVELS = {
|
||||
10: 'trace',
|
||||
20: 'debug',
|
||||
30: 'info',
|
||||
40: 'warn',
|
||||
50: 'error',
|
||||
60: 'fatal'
|
||||
};
|
||||
|
||||
module.exports = function logCollector () {
|
||||
return new Transform({
|
||||
transform (chunk, enc, callback) {
|
||||
let entry;
|
||||
|
||||
try {
|
||||
entry = JSON.parse(chunk);
|
||||
const { level, time } = entry;
|
||||
|
||||
if (level === undefined && time === undefined) {
|
||||
throw new Error('Entry log is not valid');
|
||||
}
|
||||
} catch (e) {
|
||||
if (DEV_ENVS.includes(process.env.NODE_ENV)) {
|
||||
this.push(chunk + '\n');
|
||||
}
|
||||
return callback();
|
||||
}
|
||||
|
||||
const { id, end } = entry;
|
||||
|
||||
if (id === undefined) {
|
||||
entry.level = LEVELS[entry.level];
|
||||
this.push(`${JSON.stringify(entry)}\n`);
|
||||
return callback();
|
||||
}
|
||||
|
||||
if (end === true) {
|
||||
const accEntry = logs.get(id);
|
||||
accEntry.level = LEVELS[accEntry.level];
|
||||
accEntry.time = entry.time;
|
||||
this.push(`${JSON.stringify(accEntry)}\n`);
|
||||
logs.delete(id);
|
||||
return callback();
|
||||
}
|
||||
|
||||
if (logs.has(id)) {
|
||||
const accEntry = logs.get(id);
|
||||
|
||||
if (accEntry.level > entry.level) {
|
||||
delete entry.level;
|
||||
}
|
||||
|
||||
if (hasProperty(accEntry, 'error') && hasProperty(entry, 'error')) {
|
||||
logs.set(id, assingDeep({}, accEntry, entry, { error: accEntry.error.concat(entry.error) }));
|
||||
} else {
|
||||
logs.set(id, assingDeep({}, accEntry, entry));
|
||||
}
|
||||
} else {
|
||||
logs.set(id, entry);
|
||||
}
|
||||
|
||||
callback();
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
function hasProperty (obj, prop) {
|
||||
return Object.prototype.hasOwnProperty.call(obj, prop);
|
||||
}
|
120
metro/metrics-collector.js
Normal file
120
metro/metrics-collector.js
Normal file
@ -0,0 +1,120 @@
|
||||
'use strict';
|
||||
|
||||
const http = require('http');
|
||||
const { Counter, Histogram, register } = require('prom-client');
|
||||
const { Transform } = require('stream');
|
||||
const DEV_ENVS = ['test', 'development'];
|
||||
|
||||
const requestCounter = new Counter({
|
||||
name: 'sql_api_requests_total',
|
||||
help: 'SQL API requests total'
|
||||
});
|
||||
|
||||
const requestOkCounter = new Counter({
|
||||
name: 'sql_api_requests_ok_total',
|
||||
help: 'SQL API requests ok total'
|
||||
});
|
||||
|
||||
const requestErrorCounter = new Counter({
|
||||
name: 'sql_api_requests_errors_total',
|
||||
help: 'SQL API requests errors total'
|
||||
});
|
||||
|
||||
const responseTimeHistogram = new Histogram({
|
||||
name: 'sql_api_response_time_total',
|
||||
help: 'SQL API response time total'
|
||||
});
|
||||
|
||||
const userRequestCounter = new Counter({
|
||||
name: 'sql_api_requests',
|
||||
help: 'SQL API requests per user',
|
||||
labelNames: ['user', 'http_code']
|
||||
});
|
||||
|
||||
const userRequestOkCounter = new Counter({
|
||||
name: 'sql_api_requests_ok',
|
||||
help: 'SQL API requests per user with success HTTP code',
|
||||
labelNames: ['user', 'http_code']
|
||||
});
|
||||
|
||||
const userRequestErrorCounter = new Counter({
|
||||
name: 'sql_api_requests_errors',
|
||||
help: 'SQL API requests per user with error HTTP code',
|
||||
labelNames: ['user', 'http_code']
|
||||
});
|
||||
|
||||
const userResponseTimeHistogram = new Histogram({
|
||||
name: 'sql_api_response_time',
|
||||
help: 'SQL API response time total',
|
||||
labelNames: ['user']
|
||||
});
|
||||
|
||||
module.exports = function metricsCollector () {
|
||||
return new Transform({
|
||||
transform (chunk, enc, callback) {
|
||||
let entry;
|
||||
|
||||
try {
|
||||
entry = JSON.parse(chunk);
|
||||
const { level, time } = entry;
|
||||
|
||||
if (level === undefined && time === undefined) {
|
||||
throw new Error('Entry log is not valid');
|
||||
}
|
||||
} catch (e) {
|
||||
if (DEV_ENVS.includes(process.env.NODE_ENV)) {
|
||||
this.push(chunk);
|
||||
}
|
||||
return callback();
|
||||
}
|
||||
|
||||
const { request, response, stats } = entry;
|
||||
|
||||
if (request === undefined || response === undefined || stats === undefined) {
|
||||
this.push(chunk);
|
||||
return callback();
|
||||
}
|
||||
|
||||
const { statusCode, headers } = response;
|
||||
const { 'carto-user': user } = headers;
|
||||
|
||||
requestCounter.inc();
|
||||
|
||||
if (statusCode !== undefined && user !== undefined) {
|
||||
userRequestCounter.labels(user, `${statusCode}`).inc();
|
||||
}
|
||||
|
||||
if (statusCode >= 200 && statusCode < 400) {
|
||||
requestOkCounter.inc();
|
||||
if (user !== undefined) {
|
||||
userRequestOkCounter.labels(user, `${statusCode}`).inc();
|
||||
}
|
||||
} else if (statusCode >= 400) {
|
||||
requestErrorCounter.inc();
|
||||
if (user !== undefined) {
|
||||
userRequestErrorCounter.labels(user, `${statusCode}`).inc();
|
||||
}
|
||||
}
|
||||
|
||||
const { response: responseTime } = stats;
|
||||
|
||||
if (Number.isFinite(responseTime)) {
|
||||
responseTimeHistogram.observe(responseTime);
|
||||
userResponseTimeHistogram.labels(user).observe(responseTime);
|
||||
}
|
||||
|
||||
this.push(chunk);
|
||||
callback();
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const port = process.env.PORT || 9145;
|
||||
|
||||
http
|
||||
.createServer((req, res) => {
|
||||
res.writeHead(200, { 'Content-Type': register.contentType });
|
||||
res.end(register.metrics());
|
||||
})
|
||||
.listen(port)
|
||||
.unref();
|
276
package-lock.json
generated
276
package-lock.json
generated
@ -234,6 +234,12 @@
|
||||
"protobufjs": "^6.8.6"
|
||||
}
|
||||
},
|
||||
"@hapi/bourne": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@hapi/bourne/-/bourne-2.0.0.tgz",
|
||||
"integrity": "sha512-WEezM1FWztfbzqIUbsDzFRVMxSoLy3HugVcux6KDDtTqzPsLE8NDRHfXvev66aH1i2oOKKar3/XDjbvh/OUBdg==",
|
||||
"dev": true
|
||||
},
|
||||
"@istanbuljs/load-nyc-config": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.0.0.tgz",
|
||||
@ -642,6 +648,26 @@
|
||||
"sprintf-js": "~1.0.2"
|
||||
}
|
||||
},
|
||||
"args": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/args/-/args-5.0.1.tgz",
|
||||
"integrity": "sha512-1kqmFCFsPffavQFGt8OxJdIcETti99kySRUPMpOhaGjL6mRJn8HFU1OxKY5bMqfZKUwTQc1mZkAjmGYaVOHFtQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"camelcase": "5.0.0",
|
||||
"chalk": "2.4.2",
|
||||
"leven": "2.1.0",
|
||||
"mri": "1.1.4"
|
||||
},
|
||||
"dependencies": {
|
||||
"camelcase": {
|
||||
"version": "5.0.0",
|
||||
"resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.0.0.tgz",
|
||||
"integrity": "sha512-faqwZqnWxbxn+F1d399ygeamQNy3lPp/H9H6rNrqYh4FSVCtcY+3cub1MxA8o9mDd55mM8Aghuu/kuyYA6VTsA==",
|
||||
"dev": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"array-flatten": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz",
|
||||
@ -686,6 +712,19 @@
|
||||
"resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz",
|
||||
"integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU="
|
||||
},
|
||||
"assign-deep": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/assign-deep/-/assign-deep-1.0.1.tgz",
|
||||
"integrity": "sha512-CSXAX79mibneEYfqLT5FEmkqR5WXF+xDRjgQQuVf6wSCXCYU8/vHttPidNar7wJ5BFmKAo8Wei0rCtzb+M/yeA==",
|
||||
"requires": {
|
||||
"assign-symbols": "^2.0.2"
|
||||
}
|
||||
},
|
||||
"assign-symbols": {
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/assign-symbols/-/assign-symbols-2.0.2.tgz",
|
||||
"integrity": "sha512-9sBQUQZMKFKcO/C3Bo6Rx4CQany0R0UeVcefNGRRdW2vbmaMOhV1sbmlXcQLcD56juLXbSGTBm0GGuvmrAF8pA=="
|
||||
},
|
||||
"astral-regex": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-1.0.0.tgz",
|
||||
@ -707,6 +746,11 @@
|
||||
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
|
||||
"integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k="
|
||||
},
|
||||
"atomic-sleep": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/atomic-sleep/-/atomic-sleep-1.0.0.tgz",
|
||||
"integrity": "sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ=="
|
||||
},
|
||||
"aws-sign2": {
|
||||
"version": "0.7.0",
|
||||
"resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz",
|
||||
@ -1219,6 +1263,12 @@
|
||||
"assert-plus": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"dateformat": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/dateformat/-/dateformat-3.0.3.tgz",
|
||||
"integrity": "sha512-jyCETtSl3VMZMWeRo7iY1FL19ges1t55hMo5yaam4Jrsm5EPL89UQkoQRyiI+Yf4k8r2ZpdngkV8hr1lIdjb3Q==",
|
||||
"dev": true
|
||||
},
|
||||
"debug": {
|
||||
"version": "4.1.1",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz",
|
||||
@ -2042,6 +2092,16 @@
|
||||
"integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=",
|
||||
"dev": true
|
||||
},
|
||||
"fast-redact": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/fast-redact/-/fast-redact-2.0.0.tgz",
|
||||
"integrity": "sha512-zxpkULI9W9MNTK2sJ3BpPQrTEXFNESd2X6O1tXMFpK/XM0G5c5Rll2EVYZH2TqI3xRGK/VaJ+eEOt7pnENJpeA=="
|
||||
},
|
||||
"fast-safe-stringify": {
|
||||
"version": "2.0.7",
|
||||
"resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.0.7.tgz",
|
||||
"integrity": "sha512-Utm6CdzT+6xsDk2m8S6uL8VHxNwI6Jub+e9NYTcAms28T84pTa25GJQV9j0CY0N1rM8hK4x6grpF2BQf+2qwVA=="
|
||||
},
|
||||
"fast-text-encoding": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/fast-text-encoding/-/fast-text-encoding-1.0.0.tgz",
|
||||
@ -2259,6 +2319,11 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"flatstr": {
|
||||
"version": "1.0.12",
|
||||
"resolved": "https://registry.npmjs.org/flatstr/-/flatstr-1.0.12.tgz",
|
||||
"integrity": "sha512-4zPxDyhCyiN2wIAtSLI6gc82/EjqZc1onI4Mz/l0pWrAlsSfYH/2ZIcU+e3oA2wDwbzIWNKwa23F8rh6+DRWkw=="
|
||||
},
|
||||
"flatted": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/flatted/-/flatted-2.0.1.tgz",
|
||||
@ -3536,6 +3601,18 @@
|
||||
"istanbul-lib-report": "^3.0.0"
|
||||
}
|
||||
},
|
||||
"jmespath": {
|
||||
"version": "0.15.0",
|
||||
"resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.15.0.tgz",
|
||||
"integrity": "sha1-o/Iiqarp+Wb10nx5ZRDigJF2Qhc=",
|
||||
"dev": true
|
||||
},
|
||||
"joycon": {
|
||||
"version": "2.2.5",
|
||||
"resolved": "https://registry.npmjs.org/joycon/-/joycon-2.2.5.tgz",
|
||||
"integrity": "sha512-YqvUxoOcVPnCp0VU1/56f+iKSdvIRJYPznH22BdXV3xMk75SFXhWeJkZ8C9XxUWt1b5x2X1SxuFygW1U0FmkEQ==",
|
||||
"dev": true
|
||||
},
|
||||
"js-string-escape": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/js-string-escape/-/js-string-escape-1.0.1.tgz",
|
||||
@ -3658,6 +3735,12 @@
|
||||
"safe-buffer": "^5.0.1"
|
||||
}
|
||||
},
|
||||
"leven": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/leven/-/leven-2.1.0.tgz",
|
||||
"integrity": "sha1-wuep93IJTe6dNCAq6KzORoeHVYA=",
|
||||
"dev": true
|
||||
},
|
||||
"levn": {
|
||||
"version": "0.3.0",
|
||||
"resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz",
|
||||
@ -4152,6 +4235,12 @@
|
||||
"integrity": "sha1-PCV/mDn8DpP/UxSWMiOeuQeD/2Y=",
|
||||
"optional": true
|
||||
},
|
||||
"mri": {
|
||||
"version": "1.1.4",
|
||||
"resolved": "https://registry.npmjs.org/mri/-/mri-1.1.4.tgz",
|
||||
"integrity": "sha512-6y7IjGPm8AzlvoUrwAaw1tLnUBudaS3752vcd8JtrpGGQn+rXIe63LFVHm/YMwtqAuh+LJPCFdlLYPWM1nYn6w==",
|
||||
"dev": true
|
||||
},
|
||||
"ms": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz",
|
||||
@ -5055,6 +5144,115 @@
|
||||
"integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=",
|
||||
"dev": true
|
||||
},
|
||||
"pino": {
|
||||
"version": "6.3.2",
|
||||
"resolved": "https://registry.npmjs.org/pino/-/pino-6.3.2.tgz",
|
||||
"integrity": "sha512-EiP3L1hoFw19KPocWimjnfXeysld0ne89ZRQ+bf8nAeA2TyuLoggNlibAi+Kla67GvQBopLdIZOsh1z/Lruo5Q==",
|
||||
"requires": {
|
||||
"fast-redact": "^2.0.0",
|
||||
"fast-safe-stringify": "^2.0.7",
|
||||
"flatstr": "^1.0.12",
|
||||
"pino-std-serializers": "^2.4.2",
|
||||
"quick-format-unescaped": "^4.0.1",
|
||||
"sonic-boom": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"pino-pretty": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/pino-pretty/-/pino-pretty-4.0.0.tgz",
|
||||
"integrity": "sha512-YLy/n3dMXYWOodSm530gelkSAJGmEp29L9pqiycInlIae5FEJPWAkMRO3JFMbIFtjD2Ve4SH2aBcz2aRreGpBQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@hapi/bourne": "^2.0.0",
|
||||
"args": "^5.0.1",
|
||||
"chalk": "^3.0.0",
|
||||
"dateformat": "^3.0.3",
|
||||
"fast-safe-stringify": "^2.0.7",
|
||||
"jmespath": "^0.15.0",
|
||||
"joycon": "^2.2.5",
|
||||
"pump": "^3.0.0",
|
||||
"readable-stream": "^3.6.0",
|
||||
"split2": "^3.1.1",
|
||||
"strip-json-comments": "^3.0.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"ansi-styles": {
|
||||
"version": "4.2.1",
|
||||
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz",
|
||||
"integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@types/color-name": "^1.1.1",
|
||||
"color-convert": "^2.0.1"
|
||||
}
|
||||
},
|
||||
"chalk": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz",
|
||||
"integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"ansi-styles": "^4.1.0",
|
||||
"supports-color": "^7.1.0"
|
||||
}
|
||||
},
|
||||
"color-convert": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
|
||||
"integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"color-name": "~1.1.4"
|
||||
}
|
||||
},
|
||||
"color-name": {
|
||||
"version": "1.1.4",
|
||||
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
|
||||
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
|
||||
"dev": true
|
||||
},
|
||||
"readable-stream": {
|
||||
"version": "3.6.0",
|
||||
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz",
|
||||
"integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"inherits": "^2.0.3",
|
||||
"string_decoder": "^1.1.1",
|
||||
"util-deprecate": "^1.0.1"
|
||||
}
|
||||
},
|
||||
"safe-buffer": {
|
||||
"version": "5.2.1",
|
||||
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
|
||||
"integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==",
|
||||
"dev": true
|
||||
},
|
||||
"string_decoder": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz",
|
||||
"integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"safe-buffer": "~5.2.0"
|
||||
}
|
||||
},
|
||||
"supports-color": {
|
||||
"version": "7.1.0",
|
||||
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz",
|
||||
"integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"has-flag": "^4.0.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"pino-std-serializers": {
|
||||
"version": "2.4.2",
|
||||
"resolved": "https://registry.npmjs.org/pino-std-serializers/-/pino-std-serializers-2.4.2.tgz",
|
||||
"integrity": "sha512-WaL504dO8eGs+vrK+j4BuQQq6GLKeCCcHaMB2ItygzVURcL1CycwNEUHTD/lHFHs/NL5qAz2UKrjYWXKSf4aMQ=="
|
||||
},
|
||||
"pkg-dir": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-2.0.0.tgz",
|
||||
@ -5124,6 +5322,14 @@
|
||||
"integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==",
|
||||
"dev": true
|
||||
},
|
||||
"prom-client": {
|
||||
"version": "12.0.0",
|
||||
"resolved": "https://registry.npmjs.org/prom-client/-/prom-client-12.0.0.tgz",
|
||||
"integrity": "sha512-JbzzHnw0VDwCvoqf8y1WDtq4wSBAbthMB1pcVI/0lzdqHGJI3KBJDXle70XK+c7Iv93Gihqo0a5LlOn+g8+DrQ==",
|
||||
"requires": {
|
||||
"tdigest": "^0.1.1"
|
||||
}
|
||||
},
|
||||
"protobufjs": {
|
||||
"version": "6.8.8",
|
||||
"resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.8.8.tgz",
|
||||
@ -5165,6 +5371,16 @@
|
||||
"resolved": "https://registry.npmjs.org/psl/-/psl-1.1.31.tgz",
|
||||
"integrity": "sha512-/6pt4+C+T+wZUieKR620OpzN/LlnNKuWjy1iFLQ/UG35JqHlR/89MP1d96dUfkf6Dne3TuLQzOYEYshJ+Hx8mw=="
|
||||
},
|
||||
"pump": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz",
|
||||
"integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"end-of-stream": "^1.1.0",
|
||||
"once": "^1.3.1"
|
||||
}
|
||||
},
|
||||
"punycode": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz",
|
||||
@ -5180,6 +5396,11 @@
|
||||
"resolved": "https://registry.npmjs.org/queue-async/-/queue-async-1.0.7.tgz",
|
||||
"integrity": "sha1-Iq4KHaxKkvW81GNPmTxoKiqBCUU="
|
||||
},
|
||||
"quick-format-unescaped": {
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/quick-format-unescaped/-/quick-format-unescaped-4.0.1.tgz",
|
||||
"integrity": "sha512-RyYpQ6Q5/drsJyOhrWHYMWTedvjTIat+FTwv0K4yoUxzvekw2aRHMQJLlnvt8UantkZg2++bEzD9EdxXqkWf4A=="
|
||||
},
|
||||
"range-parser": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz",
|
||||
@ -5596,6 +5817,15 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"sonic-boom": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/sonic-boom/-/sonic-boom-1.0.1.tgz",
|
||||
"integrity": "sha512-o9tx+bonVEXSaPtptyXQXpP8l6UV9Bi3im2geZskvWw2a/o/hrbWI7EBbbv+rOx6Hubnzun9GgH4WfbgEA3MFQ==",
|
||||
"requires": {
|
||||
"atomic-sleep": "^1.0.0",
|
||||
"flatstr": "^1.0.12"
|
||||
}
|
||||
},
|
||||
"source-map": {
|
||||
"version": "0.5.7",
|
||||
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz",
|
||||
@ -5690,6 +5920,39 @@
|
||||
"through": "2"
|
||||
}
|
||||
},
|
||||
"split2": {
|
||||
"version": "3.1.1",
|
||||
"resolved": "https://registry.npmjs.org/split2/-/split2-3.1.1.tgz",
|
||||
"integrity": "sha512-emNzr1s7ruq4N+1993yht631/JH+jaj0NYBosuKmLcq+JkGQ9MmTw1RB1fGaTCzUuseRIClrlSLHRNYGwWQ58Q==",
|
||||
"requires": {
|
||||
"readable-stream": "^3.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"readable-stream": {
|
||||
"version": "3.6.0",
|
||||
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz",
|
||||
"integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==",
|
||||
"requires": {
|
||||
"inherits": "^2.0.3",
|
||||
"string_decoder": "^1.1.1",
|
||||
"util-deprecate": "^1.0.1"
|
||||
}
|
||||
},
|
||||
"safe-buffer": {
|
||||
"version": "5.2.1",
|
||||
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
|
||||
"integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="
|
||||
},
|
||||
"string_decoder": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz",
|
||||
"integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==",
|
||||
"requires": {
|
||||
"safe-buffer": "~5.2.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"sprintf-js": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz",
|
||||
@ -5928,6 +6191,14 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"tdigest": {
|
||||
"version": "0.1.1",
|
||||
"resolved": "https://registry.npmjs.org/tdigest/-/tdigest-0.1.1.tgz",
|
||||
"integrity": "sha1-Ljyyw56kSeVdHmzZEReszKRYgCE=",
|
||||
"requires": {
|
||||
"bintrees": "1.0.1"
|
||||
}
|
||||
},
|
||||
"test-exclude": {
|
||||
"version": "6.0.0",
|
||||
"resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz",
|
||||
@ -6138,6 +6409,11 @@
|
||||
"resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz",
|
||||
"integrity": "sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM="
|
||||
},
|
||||
"uuid": {
|
||||
"version": "8.1.0",
|
||||
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.1.0.tgz",
|
||||
"integrity": "sha512-CI18flHDznR0lq54xBycOVmphdCYnQLKn8abKn7PXUiKUGdEd+/l9LWNJmugXel4hXq7S+RMNl34ecyC9TntWg=="
|
||||
},
|
||||
"v8-compile-cache": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.1.0.tgz",
|
||||
|
@ -28,6 +28,7 @@
|
||||
"dependencies": {
|
||||
"@carto/fqdn-sync": "0.2.2",
|
||||
"@google-cloud/pubsub": "1.5.0",
|
||||
"assign-deep": "^1.0.1",
|
||||
"basic-auth": "^2.0.0",
|
||||
"bintrees": "1.0.1",
|
||||
"bunyan": "1.8.1",
|
||||
@ -44,15 +45,19 @@
|
||||
"node-uuid": "^1.4.7",
|
||||
"oauth-client": "0.3.0",
|
||||
"pg-copy-streams": "github:cartodb/node-pg-copy-streams#v2.x-carto",
|
||||
"pino": "^6.3.2",
|
||||
"prom-client": "^12.0.0",
|
||||
"qs": "~6.2.1",
|
||||
"queue-async": "~1.0.7",
|
||||
"redis-mpool": "^0.8.0",
|
||||
"redlock": "2.0.1",
|
||||
"request": "^2.88.0",
|
||||
"split2": "^3.1.1",
|
||||
"step": "~0.0.5",
|
||||
"step-profiler": "~0.3.0",
|
||||
"topojson": "0.0.8",
|
||||
"underscore": "~1.6.0",
|
||||
"uuid": "^8.1.0",
|
||||
"yargs": "^15.3.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
@ -67,6 +72,7 @@
|
||||
"mocha": "^7.2.0",
|
||||
"mockdate": "^2.0.2",
|
||||
"nyc": "^15.0.0",
|
||||
"pino-pretty": "^4.0.0",
|
||||
"shapefile": "0.3.0",
|
||||
"sinon": "^9.0.0",
|
||||
"sqlite3": "^4.2.0"
|
||||
|
@ -5,7 +5,7 @@ var assert = require('../../support/assert');
|
||||
var redisUtils = require('../../support/redis-utils');
|
||||
var batchFactory = require('../../../lib/batch/index');
|
||||
|
||||
var BatchLogger = require('../../../lib/batch/batch-logger');
|
||||
var Logger = require('../../../lib/utils/logger');
|
||||
var JobPublisher = require('../../../lib/batch/pubsub/job-publisher');
|
||||
var JobQueue = require('../../../lib/batch/job-queue');
|
||||
var JobBackend = require('../../../lib/batch/job-backend');
|
||||
@ -17,7 +17,7 @@ describe('batch module', function () {
|
||||
var dbInstance = 'localhost';
|
||||
var username = 'vizzuality';
|
||||
var pool = redisUtils.getPool();
|
||||
var logger = new BatchLogger(null, 'batch-queries');
|
||||
var logger = new Logger();
|
||||
var jobPublisher = new JobPublisher(pool);
|
||||
var jobQueue = new JobQueue(metadataBackend, jobPublisher, logger);
|
||||
var jobBackend = new JobBackend(metadataBackend, jobQueue, logger);
|
||||
@ -25,7 +25,7 @@ describe('batch module', function () {
|
||||
var jobService = new JobService(jobBackend, jobCanceller, logger);
|
||||
|
||||
before(function (done) {
|
||||
this.batch = batchFactory(metadataBackend, pool);
|
||||
this.batch = batchFactory(metadataBackend, pool, undefined, undefined, logger);
|
||||
this.batch.start();
|
||||
this.batch.on('ready', done);
|
||||
});
|
||||
|
@ -1,33 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
var server = require('../../lib/server')();
|
||||
var assert = require('../support/assert');
|
||||
|
||||
describe('error handler', function () {
|
||||
it('should returns a errors header', function (done) {
|
||||
const errorHeader = {
|
||||
detail: undefined,
|
||||
hint: undefined,
|
||||
context: undefined,
|
||||
statusCode: 400,
|
||||
message: 'You must indicate a sql query'
|
||||
};
|
||||
|
||||
assert.response(server, {
|
||||
url: '/api/v1/sql',
|
||||
headers: { host: 'vizzuality.cartodb.com' },
|
||||
method: 'GET'
|
||||
},
|
||||
{
|
||||
status: 400,
|
||||
headers: {
|
||||
'Content-Type': 'application/json; charset=utf-8',
|
||||
'X-SQLAPI-Errors': JSON.stringify(errorHeader)
|
||||
}
|
||||
},
|
||||
function (err) {
|
||||
assert.ifError(err);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
@ -1,280 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
require('../helper');
|
||||
|
||||
const server = require('../../lib/server')();
|
||||
const assert = require('../support/assert');
|
||||
const qs = require('querystring');
|
||||
const BatchTestClient = require('../support/batch-test-client');
|
||||
const { TYPES } = require('../../lib/api/middlewares/log');
|
||||
|
||||
const QUERY = 'SELECT 14 as foo';
|
||||
const API_KEY = 1234;
|
||||
|
||||
const logQueries = global.settings.logQueries;
|
||||
|
||||
describe('Log middleware', function () {
|
||||
before(function () {
|
||||
global.settings.logQueries = true;
|
||||
});
|
||||
|
||||
after(function () {
|
||||
global.settings.logQueries = logQueries;
|
||||
});
|
||||
|
||||
describe('regular queries endpoint', function () {
|
||||
['GET', 'POST'].forEach(method => {
|
||||
it(`${method} query`, function (done) {
|
||||
assert.response(server,
|
||||
{
|
||||
method,
|
||||
url: '/api/v1/sql?' + qs.stringify({
|
||||
q: QUERY,
|
||||
api_key: API_KEY
|
||||
}),
|
||||
headers: {
|
||||
host: 'vizzuality.cartodb.com'
|
||||
}
|
||||
},
|
||||
{ statusCode: 200 },
|
||||
function (err, res) {
|
||||
assert.ok(!err);
|
||||
|
||||
assert.ok(res.headers['x-sqlapi-log']);
|
||||
const log = JSON.parse(res.headers['x-sqlapi-log']);
|
||||
assert.deepStrictEqual(log, {
|
||||
request: {
|
||||
sql: {
|
||||
type: TYPES.QUERY,
|
||||
sql: QUERY
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return done();
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
it(`${method} Respects max header size with long queries`, function (done) {
|
||||
let longQuery = "Select '";
|
||||
for (let i = 0; i < 7000; i++) {
|
||||
longQuery += 'a';
|
||||
}
|
||||
longQuery += "' as foo";
|
||||
assert.response(server,
|
||||
{
|
||||
method,
|
||||
url: '/api/v1/sql?' + qs.stringify({
|
||||
q: longQuery,
|
||||
api_key: API_KEY
|
||||
}),
|
||||
headers: {
|
||||
host: 'vizzuality.cartodb.com'
|
||||
}
|
||||
},
|
||||
{ statusCode: 200 },
|
||||
function (err, res) {
|
||||
assert.ok(!err);
|
||||
|
||||
assert.ok(res.headers['x-sqlapi-log']);
|
||||
assert.ok(res.headers['x-sqlapi-log'].length < 5000);
|
||||
|
||||
return done();
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('batch api queries', function () {
|
||||
before(function () {
|
||||
this.batchTestClient = new BatchTestClient();
|
||||
assert.ok(this.batchTestClient);
|
||||
});
|
||||
|
||||
after(function (done) {
|
||||
this.batchTestClient.drain(done);
|
||||
});
|
||||
|
||||
it('one query', function (done) {
|
||||
const payload = { query: QUERY };
|
||||
this.batchTestClient.createJob(payload, function (err, jobResult, res) {
|
||||
assert.ok(!err);
|
||||
|
||||
assert.ok(res.headers['x-sqlapi-log']);
|
||||
const log = JSON.parse(res.headers['x-sqlapi-log']);
|
||||
assert.deepStrictEqual(log, {
|
||||
request: {
|
||||
sql: {
|
||||
type: TYPES.JOB,
|
||||
sql: QUERY
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Respects max header size with long queries', function (done) {
|
||||
let longQuery = "Select '";
|
||||
for (let i = 0; i < 7000; i++) {
|
||||
longQuery += 'a';
|
||||
}
|
||||
longQuery += "' as foo";
|
||||
|
||||
const payload = { query: longQuery };
|
||||
this.batchTestClient.createJob(payload, function (err, jobResult, res) {
|
||||
assert.ok(!err);
|
||||
|
||||
assert.ok(res.headers['x-sqlapi-log']);
|
||||
assert.ok(res.headers['x-sqlapi-log'].length < 5000);
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('multiquery job with two queries', function (done) {
|
||||
const payload = { query: [QUERY, QUERY] };
|
||||
this.batchTestClient.createJob(payload, function (err, jobResult, res) {
|
||||
assert.ok(!err);
|
||||
|
||||
assert.ok(res.headers['x-sqlapi-log']);
|
||||
const log = JSON.parse(res.headers['x-sqlapi-log']);
|
||||
assert.deepStrictEqual(log, {
|
||||
request: {
|
||||
sql: {
|
||||
type: TYPES.JOB,
|
||||
sql: {
|
||||
0: QUERY,
|
||||
1: QUERY
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Respects max header size with long multiqueries', function (done) {
|
||||
let longQuery = "Select '";
|
||||
for (let i = 0; i < 100; i++) {
|
||||
longQuery += 'a';
|
||||
}
|
||||
longQuery += "' as foo";
|
||||
|
||||
const queries = [longQuery];
|
||||
for (let i = 0; i < 70; i++) {
|
||||
queries.push(longQuery);
|
||||
}
|
||||
|
||||
const payload = { query: queries };
|
||||
this.batchTestClient.createJob(payload, function (err, jobResult, res) {
|
||||
assert.ok(!err);
|
||||
|
||||
assert.ok(res.headers['x-sqlapi-log']);
|
||||
assert.ok(res.headers['x-sqlapi-log'].length < 5000);
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Respects max header size with lots of multiqueries', function (done) {
|
||||
const queries = [];
|
||||
for (let i = 0; i < 1000; i++) {
|
||||
queries.push('Select 1');
|
||||
}
|
||||
|
||||
const payload = { query: queries };
|
||||
this.batchTestClient.createJob(payload, function (err, jobResult, res) {
|
||||
assert.ok(!err);
|
||||
|
||||
assert.ok(res.headers['x-sqlapi-log']);
|
||||
assert.ok(res.headers['x-sqlapi-log'].length < 5000);
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('queries with fallbacks', function (done) {
|
||||
const FALLBACK_QUERY = {
|
||||
query: [{
|
||||
query: QUERY,
|
||||
onsuccess: QUERY,
|
||||
onerror: QUERY
|
||||
}],
|
||||
onsuccess: QUERY,
|
||||
onerror: QUERY
|
||||
};
|
||||
const payload = { query: FALLBACK_QUERY };
|
||||
|
||||
this.batchTestClient.createJob(payload, function (err, jobResult, res) {
|
||||
assert.ok(!err);
|
||||
|
||||
assert.ok(res.headers['x-sqlapi-log']);
|
||||
const log = JSON.parse(res.headers['x-sqlapi-log']);
|
||||
assert.deepStrictEqual(log, {
|
||||
request: {
|
||||
sql: {
|
||||
type: TYPES.JOB,
|
||||
sql: {
|
||||
onsuccess: QUERY,
|
||||
onerror: QUERY,
|
||||
query: {
|
||||
0: {
|
||||
query: QUERY,
|
||||
onsuccess: QUERY,
|
||||
onerror: QUERY
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('disable queries log', function () {
|
||||
before(function () {
|
||||
global.settings.logQueries = false;
|
||||
});
|
||||
|
||||
after(function () {
|
||||
global.settings.logQueries = true;
|
||||
});
|
||||
|
||||
it('GET query', function (done) {
|
||||
assert.response(server,
|
||||
{
|
||||
method: 'GET',
|
||||
url: '/api/v1/sql?' + qs.stringify({
|
||||
q: QUERY,
|
||||
api_key: API_KEY
|
||||
}),
|
||||
headers: {
|
||||
host: 'vizzuality.cartodb.com'
|
||||
}
|
||||
},
|
||||
{ statusCode: 200 },
|
||||
function (err, res) {
|
||||
assert.ok(!err);
|
||||
|
||||
assert.ok(res.headers['x-sqlapi-log']);
|
||||
const log = JSON.parse(res.headers['x-sqlapi-log']);
|
||||
assert.deepStrictEqual(log, {
|
||||
request: {
|
||||
sql: null
|
||||
}
|
||||
});
|
||||
|
||||
return done();
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
@ -7,14 +7,14 @@ var BATCH_SOURCE = '../../../lib/batch/';
|
||||
var assert = require('../../support/assert');
|
||||
var redisUtils = require('../../support/redis-utils');
|
||||
|
||||
var BatchLogger = require(BATCH_SOURCE + 'batch-logger');
|
||||
var Logger = require('../../../lib/utils/logger');
|
||||
var JobQueue = require(BATCH_SOURCE + 'job-queue');
|
||||
var JobBackend = require(BATCH_SOURCE + 'job-backend');
|
||||
var JobPublisher = require(BATCH_SOURCE + 'pubsub/job-publisher');
|
||||
var JobFactory = require(BATCH_SOURCE + 'models/job-factory');
|
||||
var jobStatus = require(BATCH_SOURCE + 'job-status');
|
||||
|
||||
var logger = new BatchLogger(null, 'batch-queries');
|
||||
var logger = new Logger();
|
||||
var metadataBackend = require('cartodb-redis')({ pool: redisUtils.getPool() });
|
||||
var jobPublisher = new JobPublisher(redisUtils.getPool());
|
||||
var jobQueue = new JobQueue(metadataBackend, jobPublisher, logger);
|
||||
|
@ -7,7 +7,7 @@ var BATCH_SOURCE = '../../../lib/batch/';
|
||||
var assert = require('../../support/assert');
|
||||
var redisUtils = require('../../support/redis-utils');
|
||||
|
||||
var BatchLogger = require(BATCH_SOURCE + 'batch-logger');
|
||||
var Logger = require('../../../lib/utils/logger');
|
||||
var JobQueue = require(BATCH_SOURCE + 'job-queue');
|
||||
var JobBackend = require(BATCH_SOURCE + 'job-backend');
|
||||
var JobPublisher = require(BATCH_SOURCE + 'pubsub/job-publisher');
|
||||
@ -16,7 +16,7 @@ var JobCanceller = require(BATCH_SOURCE + 'job-canceller');
|
||||
var JobFactory = require(BATCH_SOURCE + 'models/job-factory');
|
||||
var PSQL = require('cartodb-psql');
|
||||
|
||||
var logger = new BatchLogger(null, 'batch-queries');
|
||||
var logger = new Logger();
|
||||
var metadataBackend = require('cartodb-redis')({ pool: redisUtils.getPool() });
|
||||
var jobPublisher = new JobPublisher(redisUtils.getPool());
|
||||
var jobQueue = new JobQueue(metadataBackend, jobPublisher, logger);
|
||||
|
@ -5,7 +5,7 @@ var assert = require('../../support/assert');
|
||||
var redisUtils = require('../../support/redis-utils');
|
||||
|
||||
var metadataBackend = require('cartodb-redis')({ pool: redisUtils.getPool() });
|
||||
var BatchLogger = require('../../../lib/batch/batch-logger');
|
||||
var Logger = require('../../../lib/utils/logger');
|
||||
var JobPublisher = require('../../../lib/batch/pubsub/job-publisher');
|
||||
var JobQueue = require('../../../lib/batch/job-queue');
|
||||
|
||||
@ -15,7 +15,7 @@ var JobCanceller = require('../../../lib/batch/job-canceller');
|
||||
|
||||
describe('job queue', function () {
|
||||
var pool = redisUtils.getPool();
|
||||
var logger = new BatchLogger(null, 'batch-queries');
|
||||
var logger = new Logger();
|
||||
var jobPublisher = new JobPublisher(pool);
|
||||
var jobQueue = new JobQueue(metadataBackend, jobPublisher, logger);
|
||||
var jobBackend = new JobBackend(metadataBackend, jobQueue, logger);
|
||||
|
@ -7,7 +7,7 @@ var BATCH_SOURCE = '../../../lib/batch/';
|
||||
var assert = require('../../support/assert');
|
||||
var redisUtils = require('../../support/redis-utils');
|
||||
|
||||
var BatchLogger = require(BATCH_SOURCE + 'batch-logger');
|
||||
var Logger = require('../../../lib/utils/logger');
|
||||
var JobQueue = require(BATCH_SOURCE + 'job-queue');
|
||||
var JobBackend = require(BATCH_SOURCE + 'job-backend');
|
||||
var JobPublisher = require(BATCH_SOURCE + 'pubsub/job-publisher');
|
||||
@ -19,7 +19,7 @@ var JobRunner = require(BATCH_SOURCE + 'job-runner');
|
||||
var QueryRunner = require(BATCH_SOURCE + 'query-runner');
|
||||
|
||||
var metadataBackend = require('cartodb-redis')({ pool: redisUtils.getPool() });
|
||||
var logger = new BatchLogger(null, 'batch-queries');
|
||||
var logger = new Logger();
|
||||
var jobPublisher = new JobPublisher(redisUtils.getPool());
|
||||
var jobQueue = new JobQueue(metadataBackend, jobPublisher, logger);
|
||||
var jobBackend = new JobBackend(metadataBackend, jobQueue, logger);
|
||||
|
@ -7,7 +7,7 @@ var BATCH_SOURCE = '../../../lib/batch/';
|
||||
var assert = require('../../support/assert');
|
||||
var redisUtils = require('../../support/redis-utils');
|
||||
|
||||
var BatchLogger = require(BATCH_SOURCE + 'batch-logger');
|
||||
var Logger = require('../../../lib/utils/logger');
|
||||
var JobQueue = require(BATCH_SOURCE + 'job-queue');
|
||||
var JobBackend = require(BATCH_SOURCE + 'job-backend');
|
||||
var JobPublisher = require(BATCH_SOURCE + 'pubsub/job-publisher');
|
||||
@ -17,7 +17,7 @@ var JobService = require(BATCH_SOURCE + 'job-service');
|
||||
var PSQL = require('cartodb-psql');
|
||||
|
||||
var metadataBackend = require('cartodb-redis')({ pool: redisUtils.getPool() });
|
||||
var logger = new BatchLogger(null, 'batch-queries');
|
||||
var logger = new Logger();
|
||||
var jobPublisher = new JobPublisher(redisUtils.getPool());
|
||||
var jobQueue = new JobQueue(metadataBackend, jobPublisher, logger);
|
||||
var jobBackend = new JobBackend(metadataBackend, jobQueue, logger);
|
||||
|
@ -9,6 +9,7 @@ var debug = require('debug')('batch-test-client');
|
||||
var JobStatus = require('../../lib/batch/job-status');
|
||||
var metadataBackend = require('cartodb-redis')({ pool: redisUtils.getPool() });
|
||||
var batchFactory = require('../../lib/batch/index');
|
||||
var Logger = require('../../lib/utils/logger');
|
||||
|
||||
function response (code) {
|
||||
return {
|
||||
@ -26,7 +27,8 @@ function BatchTestClient (config) {
|
||||
this.config = config || {};
|
||||
this.server = appServer();
|
||||
|
||||
this.batch = batchFactory(metadataBackend, redisUtils.getPool(), this.config.name);
|
||||
const logger = new Logger();
|
||||
this.batch = batchFactory(metadataBackend, redisUtils.getPool(), this.config.name, undefined, logger);
|
||||
this.batch.start();
|
||||
|
||||
this.pendingJobs = [];
|
||||
|
@ -1,142 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
var assert = require('assert');
|
||||
var errorMiddleware = require('../../lib/api/middlewares/error');
|
||||
require('../helper');
|
||||
|
||||
const req = { query: { callback: true } };
|
||||
|
||||
const getRes = () => {
|
||||
return {
|
||||
headers: {},
|
||||
set (key, value) {
|
||||
this.headers[key] = value;
|
||||
},
|
||||
header (key, value) {
|
||||
this.set(key, value);
|
||||
},
|
||||
statusCode: 0,
|
||||
status (status) {
|
||||
this.statusCode = status;
|
||||
},
|
||||
json () {},
|
||||
jsonp () {}
|
||||
};
|
||||
};
|
||||
|
||||
const getErrorHeader = (context, detail, hint, message) => {
|
||||
return {
|
||||
context,
|
||||
detail,
|
||||
hint,
|
||||
statusCode: 400,
|
||||
message
|
||||
};
|
||||
};
|
||||
|
||||
describe('error-handler', function () {
|
||||
it('should return a header with errors', function (done) {
|
||||
const error = new Error('error test');
|
||||
error.detail = 'test detail';
|
||||
error.hint = 'test hint';
|
||||
error.context = 'test context';
|
||||
|
||||
const errorHeader = getErrorHeader(
|
||||
error.context,
|
||||
error.detail,
|
||||
error.hint,
|
||||
error.message
|
||||
);
|
||||
|
||||
const res = getRes();
|
||||
|
||||
errorMiddleware()(error, req, res, function next () {
|
||||
assert.ok(res.headers['X-SQLAPI-Errors'].length > 0);
|
||||
assert.deepStrictEqual(
|
||||
res.headers['X-SQLAPI-Errors'],
|
||||
JSON.stringify(errorHeader)
|
||||
);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('JSONP should return a header with error statuscode', function (done) {
|
||||
const error = new Error('error test');
|
||||
error.detail = 'test detail';
|
||||
error.hint = 'test hint';
|
||||
error.context = 'test context';
|
||||
|
||||
const errorHeader = getErrorHeader(
|
||||
error.context,
|
||||
error.detail,
|
||||
error.hint,
|
||||
error.message
|
||||
);
|
||||
|
||||
const res = getRes();
|
||||
|
||||
errorMiddleware()(error, req, res, function next () {
|
||||
assert.ok(res.headers['X-SQLAPI-Errors'].length > 0);
|
||||
assert.deepStrictEqual(
|
||||
res.headers['X-SQLAPI-Errors'],
|
||||
JSON.stringify(errorHeader)
|
||||
);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should escape chars that broke logs regex', function (done) {
|
||||
const badString = 'error: ( ) = " " \' * $ & |';
|
||||
const escapedString = 'error ';
|
||||
|
||||
const error = new Error(badString);
|
||||
error.detail = badString;
|
||||
error.hint = badString;
|
||||
error.context = badString;
|
||||
|
||||
const errorHeader = getErrorHeader(
|
||||
escapedString,
|
||||
escapedString,
|
||||
escapedString,
|
||||
escapedString
|
||||
);
|
||||
|
||||
const res = getRes();
|
||||
|
||||
errorMiddleware()(error, req, res, function () {
|
||||
assert.ok(res.headers['X-SQLAPI-Errors'].length > 0);
|
||||
assert.deepStrictEqual(
|
||||
res.headers['X-SQLAPI-Errors'],
|
||||
JSON.stringify(errorHeader)
|
||||
);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should truncat too long error messages', function (done) {
|
||||
const veryLongString = 'Very long error message '.repeat(1000);
|
||||
const truncatedString = veryLongString.substring(0, 1024);
|
||||
|
||||
const error = new Error(veryLongString);
|
||||
|
||||
const expectedErrorHeader = {
|
||||
statusCode: 400,
|
||||
message: truncatedString
|
||||
};
|
||||
|
||||
const res = getRes();
|
||||
|
||||
errorMiddleware()(error, req, res, function () {
|
||||
assert.ok(res.headers['X-SQLAPI-Errors'].length > 0);
|
||||
assert.deepStrictEqual(
|
||||
res.headers['X-SQLAPI-Errors'],
|
||||
JSON.stringify(expectedErrorHeader)
|
||||
);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
Loading…
Reference in New Issue
Block a user