Merge pull request #636 from CartoDB/eslint

Eslint
This commit is contained in:
Daniel G. Aubert 2019-12-30 11:14:37 +01:00 committed by GitHub
commit 0a19427ed8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
194 changed files with 7483 additions and 7175 deletions

22
.eslintrc.js Normal file
View File

@ -0,0 +1,22 @@
module.exports = {
env: {
commonjs: true,
es6: true,
node: true,
mocha: true
},
extends: [
'standard'
],
globals: {
Atomics: 'readonly',
SharedArrayBuffer: 'readonly'
},
parserOptions: {
ecmaVersion: 2018
},
rules: {
"indent": ["error", 4],
"semi": ["error", "always"]
}
}

View File

@ -1,3 +0,0 @@
test/support/
test/websocket_test/
app/models/formats/pg/topojson.js

View File

@ -1,98 +0,0 @@
{
// // JSHint Default Configuration File (as on JSHint website)
// // See http://jshint.com/docs/ for more details
//
// "maxerr" : 50, // {int} Maximum error before stopping
//
// // Enforcing
// "bitwise" : true, // true: Prohibit bitwise operators (&, |, ^, etc.)
// "camelcase" : false, // true: Identifiers must be in camelCase
"curly" : true, // true: Require {} for every new block or scope
"eqeqeq" : true, // true: Require triple equals (===) for comparison
"forin" : true, // true: Require filtering for..in loops with obj.hasOwnProperty()
"freeze" : true, // true: prohibits overwriting prototypes of native objects such as Array, Date etc.
"immed" : true, // true: Require immediate invocations to be wrapped in parens e.g. `(function () { } ());`
// "indent" : 4, // {int} Number of spaces to use for indentation
// "latedef" : false, // true: Require variables/functions to be defined before being used
"newcap" : true, // true: Require capitalization of all constructor functions e.g. `new F()`
"noarg" : true, // true: Prohibit use of `arguments.caller` and `arguments.callee`
// "noempty" : true, // true: Prohibit use of empty blocks
"nonbsp" : true, // true: Prohibit "non-breaking whitespace" characters.
"nonew" : true, // true: Prohibit use of constructors for side-effects (without assignment)
// "plusplus" : false, // true: Prohibit use of `++` & `--`
// "quotmark" : false, // Quotation mark consistency:
// // false : do nothing (default)
// // true : ensure whatever is used is consistent
// // "single" : require single quotes
// // "double" : require double quotes
"undef" : true, // true: Require all non-global variables to be declared (prevents global leaks)
"unused" : true, // true: Require all defined variables be used
// "strict" : true, // true: Requires all functions run in ES5 Strict Mode
// "maxparams" : false, // {int} Max number of formal params allowed per function
// "maxdepth" : false, // {int} Max depth of nested blocks (within functions)
// "maxstatements" : false, // {int} Max number statements per function
"maxcomplexity" : 6, // {int} Max cyclomatic complexity per function
"maxlen" : 120, // {int} Max number of characters per line
//
// // Relaxing
// "asi" : false, // true: Tolerate Automatic Semicolon Insertion (no semicolons)
// "boss" : false, // true: Tolerate assignments where comparisons would be expected
"debug" : false, // true: Allow debugger statements e.g. browser breakpoints.
// "eqnull" : false, // true: Tolerate use of `== null`
// "es5" : false, // true: Allow ES5 syntax (ex: getters and setters)
"esnext" : true, // true: Allow ES.next (ES6) syntax (ex: `const`)
// "moz" : false, // true: Allow Mozilla specific syntax (extends and overrides esnext features)
// // (ex: `for each`, multiple try/catch, function expression…)
// "evil" : false, // true: Tolerate use of `eval` and `new Function()`
// "expr" : false, // true: Tolerate `ExpressionStatement` as Programs
// "funcscope" : false, // true: Tolerate defining variables inside control statements
// "globalstrict" : false, // true: Allow global "use strict" (also enables 'strict')
// "iterator" : false, // true: Tolerate using the `__iterator__` property
// "lastsemic" : false, // true: Tolerate omitting a semicolon for the last statement of a 1-line block
// "laxbreak" : false, // true: Tolerate possibly unsafe line breakings
// "laxcomma" : false, // true: Tolerate comma-first style coding
// "loopfunc" : false, // true: Tolerate functions being defined in loops
// "multistr" : false, // true: Tolerate multi-line strings
// "noyield" : false, // true: Tolerate generator functions with no yield statement in them.
// "notypeof" : false, // true: Tolerate invalid typeof operator values
// "proto" : false, // true: Tolerate using the `__proto__` property
// "scripturl" : false, // true: Tolerate script-targeted URLs
// "shadow" : false, // true: Allows re-define variables later in code e.g. `var x=1; x=2;`
// "sub" : false, // true: Tolerate using `[]` notation when it can still be expressed in dot notation
// "supernew" : false, // true: Tolerate `new function () { ... };` and `new Object;`
// "validthis" : false, // true: Tolerate using this in a non-constructor function
//
// // Environments
// "browser" : true, // Web Browser (window, document, etc)
// "browserify" : false, // Browserify (node.js code in the browser)
// "couch" : false, // CouchDB
// "devel" : true, // Development/debugging (alert, confirm, etc)
// "dojo" : false, // Dojo Toolkit
// "jasmine" : false, // Jasmine
// "jquery" : false, // jQuery
"mocha" : true, // Mocha
// "mootools" : false, // MooTools
"node" : true, // Node.js
// "nonstandard" : false, // Widely adopted globals (escape, unescape, etc)
// "prototypejs" : false, // Prototype and Scriptaculous
// "qunit" : false, // QUnit
// "rhino" : false, // Rhino
// "shelljs" : false, // ShellJS
// "worker" : false, // Web Workers
// "wsh" : false, // Windows Scripting Host
// "yui" : false, // Yahoo User Interface
// Custom Globals
"globals" : { // additional predefined global variables
"suite": true,
"suiteSetup": true,
"test": true,
"suiteTeardown": true,
"beforeEach": true,
"afterEach": true,
"before": true,
"after": true,
"describe": true,
"it": true
}
}

View File

@ -9,9 +9,9 @@ clean:
check:
npm test
jshint:
@echo "***jshint***"
@./node_modules/.bin/jshint lib/ test/ app.js
eslint:
@echo "***eslint***"
@./node_modules/.bin/eslint lib/**/*.js test/**/*.js app.js
TEST_SUITE := $(shell find test/{unit,integration,acceptance} -name "*.js")
TEST_SUITE_UNIT := $(shell find test/unit -name "*.js")
@ -39,7 +39,7 @@ test-batch:
@echo "***batch queries tests***"
@$(SHELL) test/run_tests.sh ${RUNTESTFLAGS} $(TEST_SUITE_BATCH)
test-all: test jshint
test-all: test eslint
coverage:
@RUNTESTFLAGS=--with-coverage make test

78
app.js
View File

@ -36,9 +36,9 @@ const availableEnvironments = ['development', 'production', 'test', 'staging'];
// sanity check arguments
if (availableEnvironments.indexOf(ENVIRONMENT) === -1) {
console.error("node app.js [environment]");
console.error("Available environments: " + availableEnvironments.join(', '));
process.exit(1);
console.error('node app.js [environment]');
console.error('Available environments: ' + availableEnvironments.join(', '));
process.exit(1);
}
global.settings.api_hostname = fqdn.hostname();
@ -53,23 +53,23 @@ if (global.settings.log_filename) {
const logFilename = path.resolve(global.settings.log_filename);
const logDirectory = path.dirname(logFilename);
if (!fs.existsSync(logDirectory)) {
console.error("Log filename directory does not exist: " + logDirectory);
console.error('Log filename directory does not exist: ' + logDirectory);
process.exit(1);
}
console.log("Logs will be written to " + logFilename);
console.log('Logs will be written to ' + logFilename);
log4jsConfig.appenders.push(
{ type: "file", absolute: true, filename: logFilename }
{ type: 'file', absolute: true, filename: logFilename }
);
} else {
log4jsConfig.appenders.push(
{ type: "console", layout: { type:'basic' } }
{ type: 'console', layout: { type: 'basic' } }
);
}
global.log4js.configure(log4jsConfig);
global.logger = global.log4js.getLogger();
const version = require("./package").version;
const version = require('./package').version;
const StatsClient = require('./lib/stats/client');
@ -85,21 +85,21 @@ const createServer = require('./lib/server');
const server = createServer(statsClient);
const listener = server.listen(global.settings.node_port, global.settings.node_host);
listener.on('listening', function() {
console.info("Using Node.js %s", process.version);
listener.on('listening', function () {
console.info('Using Node.js %s', process.version);
console.info('Using configuration file "%s"', configurationFile);
console.log(
"CartoDB SQL API %s listening on %s:%s PID=%d (%s)",
'CartoDB SQL API %s listening on %s:%s PID=%d (%s)',
version, global.settings.node_host, global.settings.node_port, process.pid, ENVIRONMENT
);
});
process.on('uncaughtException', function(err) {
process.on('uncaughtException', function (err) {
global.logger.error('Uncaught exception: ' + err.stack);
});
process.on('SIGHUP', function() {
global.log4js.clearAndShutdownAppenders(function() {
process.on('SIGHUP', function () {
global.log4js.clearAndShutdownAppenders(function () {
global.log4js.configure(log4jsConfig);
global.logger = global.log4js.getLogger();
console.log('Log files reloaded');
@ -116,7 +116,7 @@ process.on('SIGHUP', function() {
addHandlers({ killTimeout: 45000 });
function addHandlers({ killTimeout }) {
function addHandlers ({ killTimeout }) {
// FIXME: minimize the number of 'uncaughtException' before uncomment the following line
// process.on('uncaughtException', exitProcess(listener, logger, killTimeout));
process.on('unhandledRejection', exitProcess({ killTimeout }));
@ -171,7 +171,7 @@ function scheduleForcedExit ({ killTimeout }) {
killTimer.unref();
}
function isGteMinVersion(version, minVersion) {
function isGteMinVersion (version, minVersion) {
const versionMatch = /[a-z]?([0-9]*)/.exec(version);
if (versionMatch) {
const majorVersion = parseInt(versionMatch[1], 10);
@ -183,7 +183,7 @@ function isGteMinVersion(version, minVersion) {
}
setInterval(function memoryUsageMetrics () {
let memoryUsage = process.memoryUsage();
const memoryUsage = process.memoryUsage();
Object.keys(memoryUsage).forEach(property => {
statsClient.gauge(`sqlapi.memory.${property}`, memoryUsage[property]);
@ -225,12 +225,12 @@ setInterval(function cpuUsageMetrics () {
}, 5000);
if (global.gc && isGteMinVersion(process.version, 6)) {
const gcInterval = Number.isFinite(global.settings.gc_interval) ?
global.settings.gc_interval :
10000;
const gcInterval = Number.isFinite(global.settings.gc_interval)
? global.settings.gc_interval
: 10000;
if (gcInterval > 0) {
setInterval(function gcForcedCycle() {
setInterval(function gcForcedCycle () {
global.gc();
}, gcInterval);
}
@ -252,24 +252,24 @@ function getGCTypeValue (type) {
let value;
switch (type) {
case 1:
value = 'Scavenge';
break;
case 2:
value = 'MarkSweepCompact';
break;
case 4:
value = 'IncrementalMarking';
break;
case 8:
value = 'ProcessWeakCallbacks';
break;
case 15:
value = 'All';
break;
default:
value = 'Unkown';
break;
case 1:
value = 'Scavenge';
break;
case 2:
value = 'MarkSweepCompact';
break;
case 4:
value = 'IncrementalMarking';
break;
case 8:
value = 'ProcessWeakCallbacks';
break;
case 15:
value = 'All';
break;
default:
value = 'Unkown';
break;
}
return value;

View File

@ -17,7 +17,7 @@ function healthCheck ({ healthCheckBackend }) {
const healthConfig = global.settings.health || {};
if (!healthConfig.enabled) {
return res.status(200).send({enabled: false, ok: true});
return res.status(200).send({ enabled: false, ok: true });
}
const startTime = Date.now();

View File

@ -6,7 +6,7 @@ module.exports = function accessValidator () {
return function accessValidatorMiddleware (req, res, next) {
const { affectedTables, authorizationLevel } = res.locals;
if(!pgEntitiesAccessValidator.validate(affectedTables, authorizationLevel)) {
if (!pgEntitiesAccessValidator.validate(affectedTables, authorizationLevel)) {
const error = new SyntaxError('system tables are forbidden');
error.http_status = 403;

View File

@ -36,7 +36,7 @@ module.exports = function authorization (metadataBackend, forceToBeMaster = fals
return next(new Error('permission denied'));
}
res.set('vary', 'Authorization'); //Honor Authorization header when caching.
res.set('vary', 'Authorization'); // Honor Authorization header when caching.
next();
});
@ -46,7 +46,7 @@ module.exports = function authorization (metadataBackend, forceToBeMaster = fals
const credentialsGetters = [
getCredentialsFromHeaderAuthorization,
getCredentialsFromRequestQueryString,
getCredentialsFromRequestBody,
getCredentialsFromRequestBody
];
function getCredentialsFromRequest (req) {
@ -63,7 +63,7 @@ function getCredentialsFromRequest (req) {
return credentials;
}
function getCredentialsFromHeaderAuthorization(req) {
function getCredentialsFromHeaderAuthorization (req) {
const { pass, name } = basicAuth(req) || {};
if (pass !== undefined && name !== undefined) {
@ -76,7 +76,7 @@ function getCredentialsFromHeaderAuthorization(req) {
return false;
}
function getCredentialsFromRequestQueryString(req) {
function getCredentialsFromRequestQueryString (req) {
if (req.query.api_key) {
return {
apiKeyToken: req.query.api_key
@ -92,7 +92,7 @@ function getCredentialsFromRequestQueryString(req) {
return false;
}
function getCredentialsFromRequestBody(req) {
function getCredentialsFromRequestBody (req) {
if (req.body && req.body.api_key) {
return {
apiKeyToken: req.body.api_key
@ -108,7 +108,7 @@ function getCredentialsFromRequestBody(req) {
return false;
}
function apiKeyTokenFound(credentials) {
function apiKeyTokenFound (credentials) {
if (typeof credentials === 'boolean') {
return credentials;
}

View File

@ -23,7 +23,7 @@ var multer = require('multer');
* @api private
*/
function mime(req) {
function mime (req) {
var str = req.headers['content-type'] || '';
return str.split(';')[0];
}
@ -80,15 +80,15 @@ function mime(req) {
* @api public
*/
exports = module.exports = function bodyParser(options){
exports = module.exports = function bodyParser (options) {
options = options || {};
return function bodyParser(req, res, next) {
return function bodyParser (req, res, next) {
if (req.body) {
return next();
}
req.body = {};
if ('GET' === req.method || 'HEAD' === req.method) {
if (req.method === 'GET' || req.method === 'HEAD') {
return next();
}
var parser = exports.parse[mime(req)];
@ -110,15 +110,15 @@ exports.parse = {};
* Parse application/x-www-form-urlencoded.
*/
exports.parse['application/x-www-form-urlencoded'] = function(req, options, fn){
exports.parse['application/x-www-form-urlencoded'] = function (req, options, fn) {
var buf = '';
req.setEncoding('utf8');
req.on('data', function(chunk){ buf += chunk; });
req.on('end', function(){
req.on('data', function (chunk) { buf += chunk; });
req.on('end', function () {
try {
req.body = buf.length ? qs.parse(buf) : {};
fn();
} catch (err){
} catch (err) {
fn(err);
}
});
@ -128,15 +128,15 @@ exports.parse['application/x-www-form-urlencoded'] = function(req, options, fn){
* Parse application/json.
*/
exports.parse['application/json'] = function(req, options, fn){
exports.parse['application/json'] = function (req, options, fn) {
var buf = '';
req.setEncoding('utf8');
req.on('data', function(chunk){ buf += chunk; });
req.on('end', function(){
req.on('data', function (chunk) { buf += chunk; });
req.on('end', function () {
try {
req.body = buf.length ? JSON.parse(buf) : {};
fn();
} catch (err){
} catch (err) {
fn(err);
}
});

View File

@ -6,18 +6,18 @@ module.exports = function connectionParams (userDatabaseService) {
userDatabaseService.getConnectionParams(user, apikeyToken, authorizationLevel,
function (err, userDbParams, authDbParams) {
if (req.profiler) {
req.profiler.done('getConnectionParams');
}
if (req.profiler) {
req.profiler.done('getConnectionParams');
}
if (err) {
return next(err);
}
if (err) {
return next(err);
}
res.locals.userDbParams = userDbParams;
res.locals.authDbParams = authDbParams;
res.locals.userDbParams = userDbParams;
res.locals.authDbParams = authDbParams;
next();
});
next();
});
};
};

View File

@ -8,8 +8,8 @@ module.exports = function content () {
const { formatter } = req;
const useInline = !req.query.format && !req.body.format && !req.query.filename && !req.body.filename;
res.header("Content-Disposition", getContentDisposition(formatter, filename, useInline));
res.header("Content-Type", formatter.getContentType());
res.header('Content-Disposition', getContentDisposition(formatter, filename, useInline));
res.header('Content-Type', formatter.getContentType());
next();
};

View File

@ -1,6 +1,6 @@
'use strict';
module.exports = function cors(extraHeaders = []) {
module.exports = function cors (extraHeaders = []) {
return function (req, res, next) {
const headers = [
'X-Requested-With',

View File

@ -13,7 +13,7 @@ module.exports = function dbQuota () {
return next(err);
}
client.query(remainingQuotaQuery, (err, result) => {
if(err) {
if (err) {
return next(err);
}
const remainingQuota = result.rows[0].remaining_quota;

View File

@ -4,10 +4,10 @@ const errorHandlerFactory = require('../../services/error-handler-factory');
const { stringifyForLogs } = require('../../utils/logs');
const MAX_ERROR_STRING_LENGTH = 1024;
module.exports = function error() {
return function errorMiddleware(err, req, res, next) {
module.exports = function error () {
return function errorMiddleware (err, req, res, next) {
const errorHandler = errorHandlerFactory(err);
let errorResponse = errorHandler.getResponse();
const errorResponse = errorHandler.getResponse();
if (global.settings.environment === 'development') {
errorResponse.stack = err.stack;
@ -15,11 +15,11 @@ module.exports = function error() {
if (global.settings.environment !== 'test') {
// TODO: email this Exception report
console.error("EXCEPTION REPORT: " + err.stack);
console.error('EXCEPTION REPORT: ' + err.stack);
}
// Force inline content disposition
res.header("Content-Disposition", 'inline');
res.header('Content-Disposition', 'inline');
if (req && req.profiler) {
req.profiler.done('finish');
@ -44,7 +44,7 @@ module.exports = function error() {
};
};
function getStatusError(errorHandler, req) {
function getStatusError (errorHandler, req) {
let statusError = errorHandler.http_status;
// JSONP has to return 200 status error
@ -55,7 +55,7 @@ function getStatusError(errorHandler, req) {
return statusError;
}
function setErrorHeader(errorHandler, res) {
function setErrorHeader (errorHandler, res) {
const errorsLog = {
context: errorHandler.context,
detail: errorHandler.detail,

View File

@ -2,7 +2,7 @@
const formats = require('../../models/formats');
module.exports = function formatter () {
module.exports = function formatter () {
return function formatterMiddleware (req, res, next) {
const { format } = res.locals.params;

View File

@ -4,7 +4,7 @@ module.exports = function lastModified () {
return function lastModifiedMiddleware (req, res, next) {
const { affectedTables } = res.locals;
if(!!affectedTables) {
if (affectedTables) {
const lastUpdatedAt = affectedTables.getLastUpdatedAt(Date.now());
res.header('Last-Modified', new Date(lastUpdatedAt).toUTCString());
}

View File

@ -14,8 +14,8 @@ const TYPES = {
JOB: 'job'
};
module.exports = function log(sqlType = TYPES.QUERY) {
return function logMiddleware(req, res, next) {
module.exports = function log (sqlType = TYPES.QUERY) {
return function logMiddleware (req, res, next) {
const logObj = {
request: {
sql: prepareSQL(res.locals.params.sql, sqlType)
@ -30,12 +30,11 @@ module.exports = function log(sqlType = TYPES.QUERY) {
module.exports.TYPES = TYPES;
function prepareSQL(sql, sqlType) {
function prepareSQL (sql, sqlType) {
if (!sql || !global.settings.logQueries) {
return null;
}
if (typeof sql === 'string') {
return {
type: sqlType,
@ -65,7 +64,7 @@ function prepareSQL(sql, sqlType) {
*
* @param {Object} sql
*/
function prepareBatchFallbackQuery(sql) {
function prepareBatchFallbackQuery (sql) {
const fallbackQuery = {};
if (sql.onsuccess) {
@ -95,6 +94,6 @@ function prepareBatchFallbackQuery(sql) {
return fallbackQuery;
}
function ensureMaxQueryLength(sql, length = MAX_SQL_LENGTH) {
function ensureMaxQueryLength (sql, length = MAX_SQL_LENGTH) {
return sql.substring(0, length);
}

View File

@ -22,20 +22,20 @@ function getParamsFromStrategy (strategy) {
let fn;
switch (strategy) {
case('query'):
fn = queryParamsStrategy;
break;
case('job'):
fn = jobParamsStrategy;
break;
case('copyfrom'):
fn = copyFromParamsStrategy;
break;
case('copyto'):
fn = copyToParamsStrategy;
break;
default:
throw new Error('Missig parameter strategy');
case ('query'):
fn = queryParamsStrategy;
break;
case ('job'):
fn = jobParamsStrategy;
break;
case ('copyfrom'):
fn = copyFromParamsStrategy;
break;
case ('copyto'):
fn = copyToParamsStrategy;
break;
default:
throw new Error('Missig parameter strategy');
}
return fn;
@ -52,7 +52,7 @@ function queryParamsStrategy (input) {
params.format = parseFormat(input.format);
if (!formats.hasOwnProperty(params.format) ) {
if (!Object.prototype.hasOwnProperty.call(formats, params.format)) {
throw new Error(`Invalid format: ${params.format}`);
}
@ -102,7 +102,7 @@ function copyToParamsStrategy (input) {
throw new Error('SQL is missing');
}
if (!params.sql .toUpperCase().startsWith('COPY ')) {
if (!params.sql.toUpperCase().startsWith('COPY ')) {
throw new Error('SQL must start with COPY');
}
@ -150,7 +150,7 @@ function parseFormat (inputFormat) {
if (inputFormat === '' || inputFormat === undefined) {
format = 'json';
} else if (typeof inputFormat === 'string'){
} else if (typeof inputFormat === 'string') {
format = inputFormat.toLowerCase();
}
@ -166,12 +166,12 @@ function parseSkipFiles (inputSkippedFiles) {
return skipfields;
}
if (typeof inputSkippedFiles === 'string' ) {
if (typeof inputSkippedFiles === 'string') {
skipfields = inputSkippedFiles.split(',');
return skipfields;
}
if (Array.isArray(inputSkippedFiles) ) {
if (Array.isArray(inputSkippedFiles)) {
skipfields = [];
inputSkippedFiles.forEach(e => {

View File

@ -9,14 +9,13 @@ const RATE_LIMIT_ENDPOINTS_GROUPS = {
COPY_TO: 'copy_to'
};
function rateLimit(userLimits, endpointGroup = null) {
function rateLimit (userLimits, endpointGroup = null) {
if (!isRateLimitEnabled(endpointGroup)) {
return function rateLimitDisabledMiddleware(req, res, next) { next(); };
return function rateLimitDisabledMiddleware (req, res, next) { next(); };
}
return function rateLimitMiddleware(req, res, next) {
userLimits.getRateLimit(res.locals.user, endpointGroup, function(err, userRateLimit) {
return function rateLimitMiddleware (req, res, next) {
userLimits.getRateLimit(res.locals.user, endpointGroup, function (err, userRateLimit) {
if (err) {
return next(err);
}
@ -51,7 +50,7 @@ function rateLimit(userLimits, endpointGroup = null) {
};
}
function isRateLimitEnabled(endpointGroup) {
function isRateLimitEnabled (endpointGroup) {
return global.settings.ratelimits.rateLimitsEnabled &&
endpointGroup &&
global.settings.ratelimits.endpoints[endpointGroup];

View File

@ -1,7 +1,7 @@
'use strict';
module.exports = function socketTimeout () {
if (!global.settings.hasOwnProperty('node_socket_timeout')) {
if (!Object.prototype.hasOwnProperty.call(global.settings, 'node_socket_timeout')) {
return function dummySocketTimeoutMiddleware (req, res, next) {
next();
};

View File

@ -8,7 +8,7 @@ module.exports = function user (metadataBackend) {
return function userMiddleware (req, res, next) {
res.locals.user = getUserNameFromRequest(req, cdbRequest);
checkUserExists(metadataBackend, res.locals.user, function(err, userExists) {
checkUserExists(metadataBackend, res.locals.user, function (err, userExists) {
if (err || !userExists) {
const error = new Error('Unauthorized');
error.type = 'auth';
@ -28,7 +28,7 @@ function getUserNameFromRequest (req, cdbRequest) {
}
function checkUserExists (metadataBackend, userName, callback) {
metadataBackend.getUserId(userName, function(err) {
metadataBackend.getUserId(userName, function (err) {
callback(err, !err);
});
}

View File

@ -72,8 +72,8 @@ function handleCopyTo (logger) {
const streamCopy = new StreamCopy(sql, userDbParams, logger);
const metrics = new StreamCopyMetrics(logger, 'copyto', sql, user, isGzip);
res.header("Content-Disposition", `attachment; filename=${encodeURIComponent(filename)}`);
res.header("Content-Type", "application/octet-stream");
res.header('Content-Disposition', `attachment; filename=${encodeURIComponent(filename)}`);
res.header('Content-Type', 'application/octet-stream');
streamCopy.getPGStream(StreamCopy.ACTION_TO, (err, pgstream) => {
if (err) {
@ -88,7 +88,7 @@ function handleCopyTo (logger) {
return next(err);
})
.on('end', () => metrics.end(streamCopy.getRowCount()))
.pipe(res)
.pipe(res)
.on('close', () => pgstream.emit('error', new Error('Connection closed by client')))
.on('error', err => pgstream.emit('error', err));
});
@ -121,16 +121,16 @@ function handleCopyFrom (logger) {
pgstream.emit('error', err);
})
.on('close', () => pgstream.emit('error', new Error('Connection closed by client')))
.pipe(throttle)
.pipe(decompress)
.pipe(throttle)
.pipe(decompress)
.on('data', data => {
metrics.addSize(data.length);
if(metrics.size > dbRemainingQuota) {
if (metrics.size > dbRemainingQuota) {
return pgstream.emit('error', new Error('DB Quota exceeded'));
}
if((metrics.gzipSize || metrics.size) > COPY_FROM_MAX_POST_SIZE) {
if ((metrics.gzipSize || metrics.size) > COPY_FROM_MAX_POST_SIZE) {
return pgstream.emit('error', new Error(
`COPY FROM maximum POST size of ${COPY_FROM_MAX_POST_SIZE_PRETTY} exceeded`
));
@ -141,7 +141,7 @@ function handleCopyFrom (logger) {
metrics.end(null, err);
pgstream.emit('error', err);
})
.pipe(pgstream)
.pipe(pgstream)
.on('error', err => {
metrics.end(null, err);
@ -153,7 +153,7 @@ function handleCopyFrom (logger) {
const { time, rows } = metrics;
if (!rows) {
return next(new Error("No rows copied"));
return next(new Error('No rows copied'));
}
res.send({

View File

@ -73,9 +73,9 @@ function composeJobMiddlewares (metadataBackend, userDatabaseService, jobService
function cancelJob (jobService) {
return function cancelJobMiddleware (req, res, next) {
const { job_id } = req.params;
const { job_id: jobId } = req.params;
jobService.cancel(job_id, (err, job) => {
jobService.cancel(jobId, (err, job) => {
if (req.profiler) {
req.profiler.done('cancelJob');
}
@ -93,9 +93,9 @@ function cancelJob (jobService) {
function getJob (jobService) {
return function getJobMiddleware (req, res, next) {
const { job_id } = req.params;
const { job_id: jobId } = req.params;
jobService.get(job_id, (err, job) => {
jobService.get(jobId, (err, job) => {
if (req.profiler) {
req.profiler.done('getJob');
}
@ -143,7 +143,7 @@ function createJob (jobService) {
}
function checkBodyPayloadSize () {
return function checkBodyPayloadSizeMiddleware(req, res, next) {
return function checkBodyPayloadSizeMiddleware (req, res, next) {
const payload = JSON.stringify(req.body);
if (payload.length > MAX_LIMIT_QUERY_SIZE_IN_BYTES) {
@ -158,15 +158,15 @@ const ONE_KILOBYTE_IN_BYTES = 1024;
const MAX_LIMIT_QUERY_SIZE_IN_KB = 16;
const MAX_LIMIT_QUERY_SIZE_IN_BYTES = MAX_LIMIT_QUERY_SIZE_IN_KB * ONE_KILOBYTE_IN_BYTES;
function getMaxSizeErrorMessage(sql) {
function getMaxSizeErrorMessage (sql) {
return util.format([
'Your payload is too large: %s bytes. Max size allowed is %s bytes (%skb).',
'Are you trying to import data?.',
'Please, check out import api http://docs.cartodb.com/cartodb-platform/import-api/'
].join(' '),
sql.length,
MAX_LIMIT_QUERY_SIZE_IN_BYTES,
Math.round(MAX_LIMIT_QUERY_SIZE_IN_BYTES / ONE_KILOBYTE_IN_BYTES)
'Your payload is too large: %s bytes. Max size allowed is %s bytes (%skb).',
'Are you trying to import data?.',
'Please, check out import api http://docs.cartodb.com/cartodb-platform/import-api/'
].join(' '),
sql.length,
MAX_LIMIT_QUERY_SIZE_IN_BYTES,
Math.round(MAX_LIMIT_QUERY_SIZE_IN_BYTES / ONE_KILOBYTE_IN_BYTES)
);
}

View File

@ -121,7 +121,6 @@ function handleQuery ({ stats } = {}) {
stats.increment('sqlapi.query.success');
}
}
});
} catch (err) {
next(err);

View File

@ -3,7 +3,7 @@
/**
* this module allows to auth user using an pregenerated api key
*/
function ApikeyAuth(req, metadataBackend, username, apikeyToken) {
function ApikeyAuth (req, metadataBackend, username, apikeyToken) {
this.req = req;
this.metadataBackend = metadataBackend;
this.username = username;
@ -12,7 +12,7 @@ function ApikeyAuth(req, metadataBackend, username, apikeyToken) {
module.exports = ApikeyAuth;
function usernameMatches(basicAuthUsername, requestUsername) {
function usernameMatches (basicAuthUsername, requestUsername) {
return !(basicAuthUsername && (basicAuthUsername !== requestUsername));
}
@ -43,7 +43,7 @@ ApikeyAuth.prototype.verifyCredentials = function (callback) {
}
return callback(null, getAuthorizationLevel(apikey));
} else {
} else {
const apiKeyNotFoundError = new Error('Unauthorized');
apiKeyNotFoundError.type = 'auth';
apiKeyNotFoundError.subtype = 'api-key-not-found';
@ -62,11 +62,11 @@ ApikeyAuth.prototype.getCredentials = function () {
return this.apikeyToken;
};
function getAuthorizationLevel(apikey) {
function getAuthorizationLevel (apikey) {
return apikey.type;
}
function isApiKeyFound(apikey) {
function isApiKeyFound (apikey) {
return apikey.type !== null &&
apikey.user !== null &&
apikey.databasePassword !== null &&

View File

@ -1,9 +1,9 @@
'use strict';
var ApiKeyAuth = require('./apikey'),
OAuthAuth = require('./oauth');
var ApiKeyAuth = require('./apikey');
var OAuthAuth = require('./oauth');
function AuthApi(req, requestParams) {
function AuthApi (req, requestParams) {
this.req = req;
this.authBackend = getAuthBackend(req, requestParams);
@ -18,18 +18,18 @@ AuthApi.prototype.getType = function () {
}
};
AuthApi.prototype.hasCredentials = function() {
AuthApi.prototype.hasCredentials = function () {
if (this._hasCredentials === null) {
this._hasCredentials = this.authBackend.hasCredentials();
}
return this._hasCredentials;
};
AuthApi.prototype.getCredentials = function() {
AuthApi.prototype.getCredentials = function () {
return this.authBackend.getCredentials();
};
AuthApi.prototype.verifyCredentials = function(callback) {
AuthApi.prototype.verifyCredentials = function (callback) {
if (this.hasCredentials()) {
this.authBackend.verifyCredentials(callback);
} else {
@ -37,7 +37,7 @@ AuthApi.prototype.verifyCredentials = function(callback) {
}
};
function getAuthBackend(req, requestParams) {
function getAuthBackend (req, requestParams) {
if (requestParams.api_key) {
return new ApiKeyAuth(req, requestParams.metadataBackend, requestParams.user, requestParams.api_key);
} else {

View File

@ -7,166 +7,166 @@ var step = require('step');
var CdbRequest = require('../models/cartodb-request');
var cdbReq = new CdbRequest();
var oAuth = (function(){
var me = {
oauth_database: 3,
oauth_user_key: "rails:oauth_access_tokens:<%= oauth_access_key %>",
is_oauth_request: true
};
var oAuth = (function () {
var me = {
oauth_database: 3,
oauth_user_key: 'rails:oauth_access_tokens:<%= oauth_access_key %>',
is_oauth_request: true
};
// oauth token cases:
// * in GET request
// * in header
me.parseTokens = function(req){
var query_oauth = _.clone(req.method === "POST" ? req.body: req.query);
var header_oauth = {};
var oauth_variables = ['oauth_body_hash',
'oauth_consumer_key',
'oauth_token',
'oauth_signature_method',
'oauth_signature',
'oauth_timestamp',
'oauth_nonce',
'oauth_version'];
// oauth token cases:
// * in GET request
// * in header
me.parseTokens = function (req) {
var queryOauth = _.clone(req.method === 'POST' ? req.body : req.query);
var headerOauth = {};
var oauthVariables = ['oauth_body_hash',
'oauth_consumer_key',
'oauth_token',
'oauth_signature_method',
'oauth_signature',
'oauth_timestamp',
'oauth_nonce',
'oauth_version'];
// pull only oauth tokens out of query
var non_oauth = _.difference(_.keys(query_oauth), oauth_variables);
_.each(non_oauth, function(key){ delete query_oauth[key]; });
// pull only oauth tokens out of query
var nonOauth = _.difference(_.keys(queryOauth), oauthVariables);
_.each(nonOauth, function (key) { delete queryOauth[key]; });
// pull oauth tokens out of header
var header_string = req.headers.authorization;
if (!_.isUndefined(header_string)) {
_.each(oauth_variables, function(oauth_key){
var matched_string = header_string.match(new RegExp(oauth_key + '=\"([^\"]+)\"'));
if (!_.isNull(matched_string)) {
header_oauth[oauth_key] = decodeURIComponent(matched_string[1]);
// pull oauth tokens out of header
var headerString = req.headers.authorization;
if (!_.isUndefined(headerString)) {
_.each(oauthVariables, function (oauthKey) {
var matchedString = headerString.match(new RegExp(oauthKey + '="([^"]+)"'));
if (!_.isNull(matchedString)) {
headerOauth[oauthKey] = decodeURIComponent(matchedString[1]);
}
});
}
});
}
//merge header and query oauth tokens. preference given to header oauth
return _.defaults(header_oauth, query_oauth);
};
// merge header and query oauth tokens. preference given to header oauth
return _.defaults(headerOauth, queryOauth);
};
// remove oauthy tokens from an object
me.splitParams = function(obj) {
var removed = null;
for (var prop in obj) {
if (/^oauth_\w+$/.test(prop)) {
if(!removed) {
removed = {};
// remove oauthy tokens from an object
me.splitParams = function (obj) {
var removed = null;
for (var prop in obj) {
if (/^oauth_\w+$/.test(prop)) {
if (!removed) {
removed = {};
}
removed[prop] = obj[prop];
delete obj[prop];
}
removed[prop] = obj[prop];
delete obj[prop];
}
}
return removed;
};
return removed;
};
me.getAllowedHosts= function() {
var oauthConfig = global.settings.oauth || {};
return oauthConfig.allowedHosts || ['carto.com', 'cartodb.com'];
};
me.getAllowedHosts = function () {
var oauthConfig = global.settings.oauth || {};
return oauthConfig.allowedHosts || ['carto.com', 'cartodb.com'];
};
// do new fancy get User ID
me.verifyRequest = function(req, metadataBackend, callback) {
var that = this;
//TODO: review this
var httpProto = req.protocol;
if(!httpProto || (httpProto !== 'http' && httpProto !== 'https')) {
var msg = "Unknown HTTP protocol " + httpProto + ".";
var unknownProtocolErr = new Error(msg);
unknownProtocolErr.http_status = 500;
return callback(unknownProtocolErr);
}
var username = cdbReq.userByReq(req);
var requestTokens;
var signature;
step(
function getTokensFromURL(){
return oAuth.parseTokens(req);
},
function getOAuthHash(err, _requestTokens) {
if (err) {
throw err;
// do new fancy get User ID
me.verifyRequest = function (req, metadataBackend, callback) {
var that = this;
// TODO: review this
var httpProto = req.protocol;
if (!httpProto || (httpProto !== 'http' && httpProto !== 'https')) {
var msg = 'Unknown HTTP protocol ' + httpProto + '.';
var unknownProtocolErr = new Error(msg);
unknownProtocolErr.http_status = 500;
return callback(unknownProtocolErr);
}
// this is oauth request only if oauth headers are present
this.is_oauth_request = !_.isEmpty(_requestTokens);
var username = cdbReq.userByReq(req);
var requestTokens;
var signature;
if (this.is_oauth_request) {
requestTokens = _requestTokens;
that.getOAuthHash(metadataBackend, requestTokens.oauth_token, this);
} else {
return null;
}
},
function regenerateSignature(err, oAuthHash){
if (err) {
throw err;
}
if (!this.is_oauth_request) {
return null;
}
step(
function getTokensFromURL () {
return oAuth.parseTokens(req);
},
function getOAuthHash (err, _requestTokens) {
if (err) {
throw err;
}
var consumer = OAuthUtil.createConsumer(oAuthHash.consumer_key, oAuthHash.consumer_secret);
var access_token = OAuthUtil.createToken(oAuthHash.access_token_token, oAuthHash.access_token_secret);
var signer = OAuthUtil.createHmac(consumer, access_token);
// this is oauth request only if oauth headers are present
this.is_oauth_request = !_.isEmpty(_requestTokens);
var method = req.method;
var hostsToValidate = {};
var requestHost = req.headers.host;
hostsToValidate[requestHost] = true;
that.getAllowedHosts().forEach(function(allowedHost) {
hostsToValidate[username + '.' + allowedHost] = true;
});
if (this.is_oauth_request) {
requestTokens = _requestTokens;
that.getOAuthHash(metadataBackend, requestTokens.oauth_token, this);
} else {
return null;
}
},
function regenerateSignature (err, oAuthHash) {
if (err) {
throw err;
}
if (!this.is_oauth_request) {
return null;
}
that.splitParams(req.query);
// remove oauth_signature from body
if(req.body) {
delete req.body.oauth_signature;
}
signature = requestTokens.oauth_signature;
// remove signature from requestTokens
delete requestTokens.oauth_signature;
var requestParams = _.extend({}, req.body, requestTokens, req.query);
var consumer = OAuthUtil.createConsumer(oAuthHash.consumer_key, oAuthHash.consumer_secret);
var accessToken = OAuthUtil.createToken(oAuthHash.access_token_token, oAuthHash.access_token_secret);
var signer = OAuthUtil.createHmac(consumer, accessToken);
var hosts = Object.keys(hostsToValidate);
var requestSignatures = hosts.map(function(host) {
var url = httpProto + '://' + host + req.path;
return signer.sign(method, url, requestParams);
});
var method = req.method;
var hostsToValidate = {};
var requestHost = req.headers.host;
hostsToValidate[requestHost] = true;
that.getAllowedHosts().forEach(function (allowedHost) {
hostsToValidate[username + '.' + allowedHost] = true;
});
return requestSignatures.reduce(function(validSignature, requestSignature) {
if (signature === requestSignature && !_.isUndefined(requestSignature)) {
validSignature = true;
that.splitParams(req.query);
// remove oauth_signature from body
if (req.body) {
delete req.body.oauth_signature;
}
signature = requestTokens.oauth_signature;
// remove signature from requestTokens
delete requestTokens.oauth_signature;
var requestParams = _.extend({}, req.body, requestTokens, req.query);
var hosts = Object.keys(hostsToValidate);
var requestSignatures = hosts.map(function (host) {
var url = httpProto + '://' + host + req.path;
return signer.sign(method, url, requestParams);
});
return requestSignatures.reduce(function (validSignature, requestSignature) {
if (signature === requestSignature && !_.isUndefined(requestSignature)) {
validSignature = true;
}
return validSignature;
}, false);
},
function finishValidation (err, hasValidSignature) {
const authorizationLevel = hasValidSignature ? 'master' : null;
return callback(err, authorizationLevel);
}
return validSignature;
}, false);
},
function finishValidation(err, hasValidSignature) {
const authorizationLevel = hasValidSignature ? 'master' : null;
return callback(err, authorizationLevel);
}
);
};
);
};
me.getOAuthHash = function(metadataBackend, oAuthAccessKey, callback){
metadataBackend.getOAuthHash(oAuthAccessKey, callback);
};
me.getOAuthHash = function (metadataBackend, oAuthAccessKey, callback) {
metadataBackend.getOAuthHash(oAuthAccessKey, callback);
};
return me;
return me;
})();
function OAuthAuth(req, metadataBackend) {
function OAuthAuth (req, metadataBackend) {
this.req = req;
this.metadataBackend = metadataBackend;
this.isOAuthRequest = null;
}
OAuthAuth.prototype.verifyCredentials = function(callback) {
OAuthAuth.prototype.verifyCredentials = function (callback) {
if (this.hasCredentials()) {
oAuth.verifyRequest(this.req, this.metadataBackend, callback);
} else {
@ -174,19 +174,18 @@ OAuthAuth.prototype.verifyCredentials = function(callback) {
}
};
OAuthAuth.prototype.getCredentials = function() {
return oAuth.parseTokens(this.req);
OAuthAuth.prototype.getCredentials = function () {
return oAuth.parseTokens(this.req);
};
OAuthAuth.prototype.hasCredentials = function() {
OAuthAuth.prototype.hasCredentials = function () {
if (this.isOAuthRequest === null) {
var passed_tokens = oAuth.parseTokens(this.req);
this.isOAuthRequest = !_.isEmpty(passed_tokens);
var passedTokens = oAuth.parseTokens(this.req);
this.isOAuthRequest = !_.isEmpty(passedTokens);
}
return this.isOAuthRequest;
};
module.exports = OAuthAuth;
module.exports.backend = oAuth;

View File

@ -3,10 +3,6 @@
const Logger = require('../services/logger');
class BatchLogger extends Logger {
constructor (path, name) {
super(path, name);
}
log (job) {
return job.log(this.logger);
}

View File

@ -10,7 +10,7 @@ var EMPTY_QUEUE = true;
var MINUTE = 60 * 1000;
var SCHEDULE_INTERVAL = 1 * MINUTE;
function Batch(name, userDatabaseMetadataService, jobSubscriber, jobQueue, jobRunner, jobService, redisPool, logger) {
function Batch (name, userDatabaseMetadataService, jobSubscriber, jobQueue, jobRunner, jobService, redisPool, logger) {
EventEmitter.call(this);
this.name = name || 'batch';
this.userDatabaseMetadataService = userDatabaseMetadataService;
@ -52,7 +52,7 @@ Batch.prototype.start = function () {
};
function createJobHandler (name, userDatabaseMetadataService, hostScheduler, logger) {
return function onJobHandler(user) {
return function onJobHandler (user) {
userDatabaseMetadataService.getUserMetadata(user, function (err, userDatabaseMetadata) {
if (err) {
return logger.debug('Could not get host user=%s from %s. Reason: %s', user, name, err.message);
@ -61,7 +61,7 @@ function createJobHandler (name, userDatabaseMetadataService, hostScheduler, log
var host = userDatabaseMetadata.host;
logger.debug('[%s] onJobHandler(%s, %s)', name, user, host);
hostScheduler.add(host, user, function(err) {
hostScheduler.add(host, user, function (err) {
if (err) {
return logger.debug(
'Could not schedule host=%s user=%s from %s. Reason: %s', host, user, name, err.message
@ -172,15 +172,15 @@ Batch.prototype.drain = function (callback) {
Batch.prototype._drainJob = function (user, callback) {
var self = this;
var job_id = this.getWorkInProgressJob(user);
var jobId = this.getWorkInProgressJob(user);
if (!job_id) {
if (!jobId) {
return process.nextTick(function () {
return callback();
});
}
this.jobService.drain(job_id, function (err) {
this.jobService.drain(jobId, function (err) {
if (err && err.name === 'CancelNotAllowedError') {
return callback();
}
@ -189,12 +189,12 @@ Batch.prototype._drainJob = function (user, callback) {
return callback(err);
}
self.clearWorkInProgressJob(user, job_id, function (err) {
self.clearWorkInProgressJob(user, jobId, function (err) {
if (err) {
self.logger.debug(new Error('Could not clear job from work-in-progress list. Reason: ' + err.message));
}
self.jobQueue.enqueueFirst(user, job_id, callback);
self.jobQueue.enqueueFirst(user, jobId, callback);
});
});
};
@ -206,23 +206,22 @@ Batch.prototype.stop = function (callback) {
this.jobSubscriber.unsubscribe(callback);
};
/* Work in progress jobs */
Batch.prototype.setWorkInProgressJob = function(user, jobId, callback) {
Batch.prototype.setWorkInProgressJob = function (user, jobId, callback) {
this.workInProgressJobs[user] = jobId;
this.jobService.addWorkInProgressJob(user, jobId, callback);
};
Batch.prototype.getWorkInProgressJob = function(user) {
Batch.prototype.getWorkInProgressJob = function (user) {
return this.workInProgressJobs[user];
};
Batch.prototype.clearWorkInProgressJob = function(user, jobId, callback) {
Batch.prototype.clearWorkInProgressJob = function (user, jobId, callback) {
delete this.workInProgressJobs[user];
this.jobService.clearWorkInProgressJob(user, jobId, callback);
};
Batch.prototype.getWorkInProgressUsers = function() {
Batch.prototype.getWorkInProgressUsers = function () {
return Object.keys(this.workInProgressJobs);
};

View File

@ -20,7 +20,7 @@ module.exports = function batchFactory (metadataBackend, redisPool, name, statsd
var jobSubscriber = new JobSubscriber(redisPool);
var jobPublisher = new JobPublisher(redisPool);
var jobQueue = new JobQueue(metadataBackend, jobPublisher, logger);
var jobQueue = new JobQueue(metadataBackend, jobPublisher, logger);
var jobBackend = new JobBackend(metadataBackend, jobQueue, logger);
var queryRunner = new QueryRunner(userDatabaseMetadataService, logger);
var jobCanceller = new JobCanceller();

View File

@ -5,7 +5,7 @@ var REDIS_DB = 5;
var JobStatus = require('./job-status');
var queue = require('queue-async');
function JobBackend(metadataBackend, jobQueue, logger) {
function JobBackend (metadataBackend, jobQueue, logger) {
this.metadataBackend = metadataBackend;
this.jobQueue = jobQueue;
this.maxNumberOfQueuedJobs = global.settings.batch_max_queued_jobs || 64;
@ -14,13 +14,13 @@ function JobBackend(metadataBackend, jobQueue, logger) {
this.logger = logger;
}
function toRedisParams(job) {
function toRedisParams (job) {
var redisParams = [REDIS_PREFIX + job.job_id];
var obj = JSON.parse(JSON.stringify(job));
delete obj.job_id;
for (var property in obj) {
if (obj.hasOwnProperty(property)) {
if (Object.prototype.hasOwnProperty.call(obj, property)) {
redisParams.push(property);
if (property === 'query' && typeof obj[property] !== 'string') {
redisParams.push(JSON.stringify(obj[property]));
@ -33,7 +33,7 @@ function toRedisParams(job) {
return redisParams;
}
function toObject(job_id, redisParams, redisValues) {
function toObject (jobId, redisParams, redisValues) {
var obj = {};
redisParams.shift(); // job_id value
@ -52,29 +52,29 @@ function toObject(job_id, redisParams, redisValues) {
}
}
obj.job_id = job_id; // adds redisKey as object property
obj.job_id = jobId; // adds redisKey as object property
return obj;
}
function isJobFound(redisValues) {
function isJobFound (redisValues) {
return !!(redisValues[0] && redisValues[1] && redisValues[2] && redisValues[3] && redisValues[4]);
}
function getNotFoundError(job_id) {
var notFoundError = new Error('Job with id ' + job_id + ' not found');
function getNotFoundError (jobId) {
var notFoundError = new Error('Job with id ' + jobId + ' not found');
notFoundError.name = 'NotFoundError';
return notFoundError;
}
JobBackend.prototype.get = function (job_id, callback) {
if (!job_id) {
return callback(getNotFoundError(job_id));
JobBackend.prototype.get = function (jobId, callback) {
if (!jobId) {
return callback(getNotFoundError(jobId));
}
var self = this;
var redisParams = [
REDIS_PREFIX + job_id,
REDIS_PREFIX + jobId,
'user',
'status',
'query',
@ -90,16 +90,16 @@ JobBackend.prototype.get = function (job_id, callback) {
'dbuser'
];
self.metadataBackend.redisCmd(REDIS_DB, 'HMGET', redisParams , function (err, redisValues) {
self.metadataBackend.redisCmd(REDIS_DB, 'HMGET', redisParams, function (err, redisValues) {
if (err) {
return callback(err);
}
if (!isJobFound(redisValues)) {
return callback(getNotFoundError(job_id));
return callback(getNotFoundError(jobId));
}
var jobData = toObject(job_id, redisParams, redisValues);
var jobData = toObject(jobId, redisParams, redisValues);
callback(null, jobData);
});
@ -108,7 +108,7 @@ JobBackend.prototype.get = function (job_id, callback) {
JobBackend.prototype.create = function (job, callback) {
var self = this;
this.jobQueue.size(job.user, function(err, size) {
this.jobQueue.size(job.user, function (err, size) {
if (err) {
return callback(new Error('Failed to create job, could not determine user queue size'));
}
@ -146,7 +146,6 @@ JobBackend.prototype.update = function (job, callback) {
var self = this;
self.get(job.job_id, function (err) {
if (err) {
return callback(err);
}
@ -159,7 +158,7 @@ JobBackend.prototype.save = function (job, callback) {
var self = this;
var redisParams = toRedisParams(job);
self.metadataBackend.redisCmd(REDIS_DB, 'HMSET', redisParams , function (err) {
self.metadataBackend.redisCmd(REDIS_DB, 'HMSET', redisParams, function (err) {
if (err) {
return callback(err);
}
@ -284,7 +283,7 @@ JobBackend.prototype.setTTL = function (job, callback) {
return callback();
}
self.metadataBackend.redisCmd(REDIS_DB, 'EXPIRE', [ redisKey, this.inSecondsJobTTLAfterFinished ], callback);
self.metadataBackend.redisCmd(REDIS_DB, 'EXPIRE', [redisKey, this.inSecondsJobTTLAfterFinished], callback);
};
module.exports = JobBackend;

View File

@ -2,28 +2,27 @@
var PSQL = require('cartodb-psql');
function JobCanceller() {
function JobCanceller () {
}
module.exports = JobCanceller;
JobCanceller.prototype.cancel = function (job, callback) {
const dbConfiguration = {
host: job.data.host,
port: job.data.port,
dbname: job.data.dbname,
user: job.data.dbuser,
pass: job.data.pass,
pass: job.data.pass
};
doCancel(job.data.job_id, dbConfiguration, callback);
};
function doCancel(job_id, dbConfiguration, callback) {
function doCancel (jobId, dbConfiguration, callback) {
var pg = new PSQL(dbConfiguration);
getQueryPID(pg, job_id, function (err, pid) {
getQueryPID(pg, jobId, function (err, pid) {
if (err) {
return callback(err);
}
@ -46,10 +45,10 @@ function doCancel(job_id, dbConfiguration, callback) {
});
}
function getQueryPID(pg, job_id, callback) {
var getPIDQuery = "SELECT pid FROM pg_stat_activity WHERE query LIKE '/* " + job_id + " */%'";
function getQueryPID (pg, jobId, callback) {
var getPIDQuery = "SELECT pid FROM pg_stat_activity WHERE query LIKE '/* " + jobId + " */%'";
pg.query(getPIDQuery, function(err, result) {
pg.query(getPIDQuery, function (err, result) {
if (err) {
return callback(err);
}
@ -63,7 +62,7 @@ function getQueryPID(pg, job_id, callback) {
});
}
function doCancelQuery(pg, pid, callback) {
function doCancelQuery (pg, pid, callback) {
var cancelQuery = 'SELECT pg_cancel_backend(' + pid + ')';
pg.query(cancelQuery, function (err, result) {

View File

@ -2,7 +2,7 @@
var queueAsync = require('queue-async');
function JobQueue(metadataBackend, jobPublisher, logger) {
function JobQueue (metadataBackend, jobPublisher, logger) {
this.metadataBackend = metadataBackend;
this.jobPublisher = jobPublisher;
this.logger = logger;
@ -22,8 +22,8 @@ JobQueue.prototype.enqueue = function (user, jobId, callback) {
this.logger.debug('JobQueue.enqueue user=%s, jobId=%s', user, jobId);
this.metadataBackend.redisMultiCmd(QUEUE.DB, [
[ 'LPUSH', QUEUE.PREFIX + user, jobId ],
[ 'SADD', QUEUE.INDEX, user ]
['LPUSH', QUEUE.PREFIX + user, jobId],
['SADD', QUEUE.INDEX, user]
], function (err) {
if (err) {
return callback(err);
@ -35,7 +35,7 @@ JobQueue.prototype.enqueue = function (user, jobId, callback) {
};
JobQueue.prototype.size = function (user, callback) {
this.metadataBackend.redisCmd(QUEUE.DB, 'LLEN', [ QUEUE.PREFIX + user ], callback);
this.metadataBackend.redisCmd(QUEUE.DB, 'LLEN', [QUEUE.PREFIX + user], callback);
};
JobQueue.prototype.dequeue = function (user, callback) {
@ -49,10 +49,10 @@ JobQueue.prototype.dequeue = function (user, callback) {
].join('\n');
var redisParams = [
dequeueScript, //lua source code
dequeueScript, // lua source code
2, // Two "keys" to pass
QUEUE.PREFIX + user, //KEYS[1], the key of the queue
QUEUE.INDEX, //KEYS[2], the key of the index
QUEUE.PREFIX + user, // KEYS[1], the key of the queue
QUEUE.INDEX, // KEYS[2], the key of the index
user // ARGV[1] - value of the element to remove from the index
];
@ -65,8 +65,8 @@ JobQueue.prototype.dequeue = function (user, callback) {
JobQueue.prototype.enqueueFirst = function (user, jobId, callback) {
this.logger.debug('JobQueue.enqueueFirst user=%s, jobId=%s', user, jobId);
this.metadataBackend.redisMultiCmd(QUEUE.DB, [
[ 'RPUSH', QUEUE.PREFIX + user, jobId ],
[ 'SADD', QUEUE.INDEX, user ]
['RPUSH', QUEUE.PREFIX + user, jobId],
['SADD', QUEUE.INDEX, user]
], function (err) {
if (err) {
return callback(err);
@ -77,9 +77,8 @@ JobQueue.prototype.enqueueFirst = function (user, jobId, callback) {
}.bind(this));
};
JobQueue.prototype.getQueues = function (callback) {
this.metadataBackend.redisCmd(QUEUE.DB, 'SMEMBERS', [ QUEUE.INDEX ], function (err, queues) {
this.metadataBackend.redisCmd(QUEUE.DB, 'SMEMBERS', [QUEUE.INDEX], function (err, queues) {
if (err) {
return callback(err);
}
@ -112,7 +111,7 @@ JobQueue.prototype.scan = function (callback) {
var initialCursor = ['0'];
var users = {};
self._scan(initialCursor, users, function(err, users) {
self._scan(initialCursor, users, function (err, users) {
if (err) {
return callback(err);
}
@ -153,7 +152,7 @@ JobQueue.prototype.addToQueueIndex = function (users, callback) {
users.forEach(function (user) {
usersQueues.defer(function (user, callback) {
self.metadataBackend.redisCmd(QUEUE.DB, 'SADD', [ QUEUE.INDEX, user], callback);
self.metadataBackend.redisCmd(QUEUE.DB, 'SADD', [QUEUE.INDEX, user], callback);
}, user);
});

View File

@ -10,7 +10,7 @@ var REDIS_LIMITS = {
PREFIX: 'limits:batch:' // + username
};
function JobRunner(jobService, jobQueue, queryRunner, metadataBackend, statsdClient) {
function JobRunner (jobService, jobQueue, queryRunner, metadataBackend, statsdClient) {
this.jobService = jobService;
this.jobQueue = jobQueue;
this.queryRunner = queryRunner;
@ -18,18 +18,18 @@ function JobRunner(jobService, jobQueue, queryRunner, metadataBackend, statsdCli
this.statsdClient = statsdClient;
}
JobRunner.prototype.run = function (job_id, callback) {
JobRunner.prototype.run = function (jobId, callback) {
var self = this;
var profiler = new Profiler({ statsd_client: self.statsdClient });
profiler.start('sqlapi.batch.job');
self.jobService.get(job_id, function (err, job) {
self.jobService.get(jobId, function (err, job) {
if (err) {
return callback(err);
}
self.getQueryStatementTimeout(job.data.user, function(err, timeout) {
self.getQueryStatementTimeout(job.data.user, function (err, timeout) {
if (err) {
return callback(err);
}
@ -62,14 +62,18 @@ JobRunner.prototype.run = function (job_id, callback) {
});
};
JobRunner.prototype.getQueryStatementTimeout = function(username, callback) {
JobRunner.prototype.getQueryStatementTimeout = function (username, callback) {
var timeout = 12 * 3600 * 1000;
if (Number.isFinite(global.settings.batch_query_timeout)) {
timeout = global.settings.batch_query_timeout;
}
var batchLimitsKey = REDIS_LIMITS.PREFIX + username;
this.metadataBackend.redisCmd(REDIS_LIMITS.DB, 'HGET', [batchLimitsKey, 'timeout'], function(err, timeoutLimit) {
this.metadataBackend.redisCmd(REDIS_LIMITS.DB, 'HGET', [batchLimitsKey, 'timeout'], function (err, timeoutLimit) {
if (err) {
return callback(err);
}
if (timeoutLimit !== null && Number.isFinite(+timeoutLimit)) {
timeout = +timeoutLimit;
}
@ -137,7 +141,7 @@ JobRunner.prototype._run = function (job, query, timeout, profiler, callback) {
});
};
function cancelledByUser(err) {
function cancelledByUser (err) {
return errorCodes[err.code.toString()] === 'query_canceled' && err.message.match(/user.*request/);
}

View File

@ -3,7 +3,7 @@
var JobFactory = require('./models/job-factory');
var jobStatus = require('./job-status');
function JobService(jobBackend, jobCanceller, logger) {
function JobService (jobBackend, jobCanceller, logger) {
this.jobBackend = jobBackend;
this.jobCanceller = jobCanceller;
this.logger = logger;
@ -11,8 +11,8 @@ function JobService(jobBackend, jobCanceller, logger) {
module.exports = JobService;
JobService.prototype.get = function (job_id, callback) {
this.jobBackend.get(job_id, function (err, data) {
JobService.prototype.get = function (jobId, callback) {
this.jobBackend.get(jobId, function (err, data) {
if (err) {
return callback(err);
}
@ -68,10 +68,10 @@ JobService.prototype.save = function (job, callback) {
});
};
JobService.prototype.cancel = function (job_id, callback) {
JobService.prototype.cancel = function (jobId, callback) {
var self = this;
self.get(job_id, function (err, job) {
self.get(jobId, function (err, job) {
if (err) {
return callback(err);
}
@ -98,17 +98,17 @@ JobService.prototype.cancel = function (job_id, callback) {
});
};
JobService.prototype.drain = function (job_id, callback) {
JobService.prototype.drain = function (jobId, callback) {
var self = this;
self.get(job_id, function (err, job) {
self.get(jobId, function (err, job) {
if (err) {
return callback(err);
}
self.jobCanceller.cancel(job, function (err) {
if (err) {
self.logger.debug('There was an error while draining job %s, %s ', job_id, err);
self.logger.debug('There was an error while draining job %s, %s ', jobId, err);
return callback(err);
}

View File

@ -18,6 +18,6 @@ var finalStatus = [
JOB_STATUS_ENUM.FAILED,
JOB_STATUS_ENUM.UNKNOWN
];
module.exports.isFinal = function(status) {
module.exports.isFinal = function (status) {
return finalStatus.indexOf(status) !== -1;
};

View File

@ -9,7 +9,7 @@ var LOCK = {
TTL: 5000
};
function Locker(locker, ttl) {
function Locker (locker, ttl) {
EventEmitter.call(this);
this.locker = locker;
this.ttl = (Number.isFinite(ttl) && ttl > 0) ? ttl : LOCK.TTL;
@ -20,7 +20,7 @@ util.inherits(Locker, EventEmitter);
module.exports = Locker;
Locker.prototype.lock = function(resource, callback) {
Locker.prototype.lock = function (resource, callback) {
var self = this;
debug('Locker.lock(%s, %d)', resource, this.ttl);
this.locker.lock(resource, this.ttl, function (err, lock) {
@ -31,21 +31,21 @@ Locker.prototype.lock = function(resource, callback) {
});
};
Locker.prototype.unlock = function(resource, callback) {
Locker.prototype.unlock = function (resource, callback) {
var self = this;
debug('Locker.unlock(%s)', resource);
this.locker.unlock(resource, function(err) {
this.locker.unlock(resource, function (err) {
self.stopRenewal(resource);
return callback(err);
});
};
Locker.prototype.startRenewal = function(resource) {
Locker.prototype.startRenewal = function (resource) {
var self = this;
if (!this.intervalIds.hasOwnProperty(resource)) {
this.intervalIds[resource] = setInterval(function() {
if (!Object.prototype.hasOwnProperty.call(this.intervalIds, resource)) {
this.intervalIds[resource] = setInterval(function () {
debug('Trying to extend lock resource=%s', resource);
self.locker.lock(resource, self.ttl, function(err, _lock) {
self.locker.lock(resource, self.ttl, function (err, _lock) {
if (err) {
self.emit('error', err, resource);
return self.stopRenewal(resource);
@ -58,14 +58,14 @@ Locker.prototype.startRenewal = function(resource) {
}
};
Locker.prototype.stopRenewal = function(resource) {
if (this.intervalIds.hasOwnProperty(resource)) {
Locker.prototype.stopRenewal = function (resource) {
if (Object.prototype.hasOwnProperty.call(this.intervalIds, resource)) {
clearInterval(this.intervalIds[resource]);
delete this.intervalIds[resource];
}
};
module.exports.create = function createLocker(type, config) {
module.exports.create = function createLocker (type, config) {
if (type !== 'redis-distlock') {
throw new Error('Invalid type Locker type. Valid types are: "redis-distlock"');
}

View File

@ -8,7 +8,7 @@ var REDIS_DISTLOCK = {
var Redlock = require('redlock');
var debug = require('../../util/debug')('leader:redis-distlock');
function RedisDistlockLocker(redisPool) {
function RedisDistlockLocker (redisPool) {
this.pool = redisPool;
this.redlock = new Redlock([{}], {
// see http://redis.io/topics/distlock
@ -24,17 +24,17 @@ function RedisDistlockLocker(redisPool) {
module.exports = RedisDistlockLocker;
module.exports.type = 'redis-distlock';
function resourceId(resource) {
function resourceId (resource) {
return REDIS_DISTLOCK.PREFIX + resource;
}
RedisDistlockLocker.prototype.lock = function(resource, ttl, callback) {
RedisDistlockLocker.prototype.lock = function (resource, ttl, callback) {
var self = this;
debug('RedisDistlockLocker.lock(%s, %d)', resource, ttl);
var lockId = resourceId(resource);
var lock = this._getLock(lockId);
function acquireCallback(err, _lock) {
function acquireCallback (err, _lock) {
if (err) {
return callback(err);
}
@ -42,7 +42,7 @@ RedisDistlockLocker.prototype.lock = function(resource, ttl, callback) {
return callback(null, _lock);
}
if (lock) {
return this._tryExtend(lock, ttl, function(err, _lock) {
return this._tryExtend(lock, ttl, function (err, _lock) {
if (err) {
return self._tryAcquire(lockId, ttl, acquireCallback);
}
@ -54,7 +54,7 @@ RedisDistlockLocker.prototype.lock = function(resource, ttl, callback) {
}
};
RedisDistlockLocker.prototype.unlock = function(resource, callback) {
RedisDistlockLocker.prototype.unlock = function (resource, callback) {
var self = this;
var lock = this._getLock(resourceId(resource));
if (lock) {
@ -63,7 +63,7 @@ RedisDistlockLocker.prototype.unlock = function(resource, callback) {
return callback(err);
}
self.redlock.servers = [client];
return self.redlock.unlock(lock, function(err) {
return self.redlock.unlock(lock, function (err) {
self.pool.release(REDIS_DISTLOCK.DB, client);
return callback(err);
});
@ -71,39 +71,39 @@ RedisDistlockLocker.prototype.unlock = function(resource, callback) {
}
};
RedisDistlockLocker.prototype._getLock = function(resource) {
if (this._locks.hasOwnProperty(resource)) {
RedisDistlockLocker.prototype._getLock = function (resource) {
if (Object.prototype.hasOwnProperty.call(this._locks, resource)) {
return this._locks[resource];
}
return null;
};
RedisDistlockLocker.prototype._setLock = function(resource, lock) {
RedisDistlockLocker.prototype._setLock = function (resource, lock) {
this._locks[resource] = lock;
};
RedisDistlockLocker.prototype._tryExtend = function(lock, ttl, callback) {
RedisDistlockLocker.prototype._tryExtend = function (lock, ttl, callback) {
var self = this;
this.pool.acquire(REDIS_DISTLOCK.DB, function (err, client) {
if (err) {
return callback(err);
}
self.redlock.servers = [client];
return lock.extend(ttl, function(err, _lock) {
return lock.extend(ttl, function (err, _lock) {
self.pool.release(REDIS_DISTLOCK.DB, client);
return callback(err, _lock);
});
});
};
RedisDistlockLocker.prototype._tryAcquire = function(resource, ttl, callback) {
RedisDistlockLocker.prototype._tryAcquire = function (resource, ttl, callback) {
var self = this;
this.pool.acquire(REDIS_DISTLOCK.DB, function (err, client) {
if (err) {
return callback(err);
}
self.redlock.servers = [client];
return self.redlock.lock(resource, ttl, function(err, _lock) {
return self.redlock.lock(resource, ttl, function (err, _lock) {
self.pool.release(REDIS_DISTLOCK.DB, client);
return callback(err, _lock);
});

View File

@ -15,7 +15,7 @@ var QUEUE = {
}
};
function HostUserQueueMover(jobQueue, jobService, locker, redisPool) {
function HostUserQueueMover (jobQueue, jobService, locker, redisPool) {
this.jobQueue = jobQueue;
this.jobService = jobService;
this.locker = locker;
@ -24,11 +24,14 @@ function HostUserQueueMover(jobQueue, jobService, locker, redisPool) {
module.exports = HostUserQueueMover;
HostUserQueueMover.prototype.moveOldJobs = function(callback) {
HostUserQueueMover.prototype.moveOldJobs = function (callback) {
var self = this;
this.getOldQueues(function(err, hosts) {
this.getOldQueues(function (err, hosts) {
if (err) {
return callback(err);
}
var async = asyncQ(4);
hosts.forEach(function(host) {
hosts.forEach(function (host) {
async.defer(self.moveOldQueueJobs.bind(self), host);
});
@ -44,12 +47,12 @@ HostUserQueueMover.prototype.moveOldJobs = function(callback) {
});
};
HostUserQueueMover.prototype.moveOldQueueJobs = function(host, callback) {
HostUserQueueMover.prototype.moveOldQueueJobs = function (host, callback) {
var self = this;
// do forever, it does not throw a stack overflow
forever(
function (next) {
self.locker.lock(host, function(err) {
self.locker.lock(host, function (err) {
// we didn't get the lock for the host
if (err) {
debug('Could not lock host=%s. Reason: %s', host, err.message);
@ -68,16 +71,19 @@ HostUserQueueMover.prototype.moveOldQueueJobs = function(host, callback) {
);
};
//this.metadataBackend.redisCmd(QUEUE.DB, 'RPOP', [ QUEUE.PREFIX + user ], callback);
// this.metadataBackend.redisCmd(QUEUE.DB, 'RPOP', [ QUEUE.PREFIX + user ], callback);
HostUserQueueMover.prototype.processNextJob = function (host, callback) {
var self = this;
this.pool.acquire(QUEUE.OLD.DB, function(err, client) {
this.pool.acquire(QUEUE.OLD.DB, function (err, client) {
if (err) {
return callback(err);
}
client.lpop(QUEUE.OLD.PREFIX + host, function(err, jobId) {
client.lpop(QUEUE.OLD.PREFIX + host, function (err, jobId) {
if (err) {
return callback(err);
}
self.pool.release(QUEUE.OLD.DB, client);
debug('Found jobId=%s at queue=%s', jobId, host);
if (!jobId) {
@ -85,13 +91,13 @@ HostUserQueueMover.prototype.processNextJob = function (host, callback) {
emptyQueueError.name = 'EmptyQueue';
return callback(emptyQueueError);
}
self.jobService.get(jobId, function(err, job) {
self.jobService.get(jobId, function (err, job) {
if (err) {
debug(err);
return callback();
}
if (job) {
return self.jobQueue.enqueueFirst(job.data.user, jobId, function() {
return self.jobQueue.enqueueFirst(job.data.user, jobId, function () {
return callback();
});
}
@ -101,16 +107,16 @@ HostUserQueueMover.prototype.processNextJob = function (host, callback) {
});
};
HostUserQueueMover.prototype.getOldQueues = function(callback) {
HostUserQueueMover.prototype.getOldQueues = function (callback) {
var initialCursor = ['0'];
var hosts = {};
var self = this;
this.pool.acquire(QUEUE.OLD.DB, function(err, client) {
this.pool.acquire(QUEUE.OLD.DB, function (err, client) {
if (err) {
return callback(err);
}
self._getOldQueues(client, initialCursor, hosts, function(err, hosts) {
self._getOldQueues(client, initialCursor, hosts, function (err, hosts) {
self.pool.release(QUEUE.DB, client);
return callback(err, Object.keys(hosts));
});
@ -121,7 +127,7 @@ HostUserQueueMover.prototype._getOldQueues = function (client, cursor, hosts, ca
var self = this;
var redisParams = [cursor[0], 'MATCH', QUEUE.OLD.PREFIX + '*'];
client.scan(redisParams, function(err, currentCursor) {
client.scan(redisParams, function (err, currentCursor) {
if (err) {
return callback(null, hosts);
}

View File

@ -1,7 +1,5 @@
'use strict';
// jshint ignore:start
const debug = require('debug')('>');
debug.enabled = true;
const { promisify } = require('util');
@ -40,12 +38,12 @@ async function scan () {
const jobs = [];
const initialCursor = '0';
return await _scan(initialCursor, jobs);
return _scan(initialCursor, jobs);
};
async function _scan (cursor, jobs) {
const redisParams = [cursor, 'MATCH', `${JOBS.PREFIX}:*`];
const [ _cursor, _jobs ] = await redisCmd(JOBS.DB, 'SCAN', redisParams);
const [_cursor, _jobs] = await redisCmd(JOBS.DB, 'SCAN', redisParams);
if (_jobs && _jobs.length) {
jobs = jobs.concat(_jobs);
@ -55,7 +53,7 @@ async function _scan (cursor, jobs) {
return jobs;
}
return await _scan(_cursor, jobs);
return _scan(_cursor, jobs);
}
async function getJob (key) {
@ -66,12 +64,12 @@ async function getJob (key) {
'updated_at'
];
const redisParams = [ `${key}`, ...props ];
const redisParams = [`${key}`, ...props];
const values = await redisCmd(JOBS.DB, 'HMGET', redisParams);
const job = {};
for (const [ i, v ] of values.entries()) {
for (const [i, v] of values.entries()) {
job[props[i]] = v;
}
@ -79,7 +77,7 @@ async function getJob (key) {
}
async function removeJob (key) {
const redisParams = [ key ];
const redisParams = [key];
const done = await redisCmd(JOBS.DB, 'DEL', redisParams);
@ -90,14 +88,14 @@ async function main () {
const summary = {
found: 0,
removed: 0
}
};
try {
debug(`going to scan jobs`);
debug('going to scan jobs');
const jobKeys = await scan();
summary.found = jobKeys.length;
debug(`found "${jobKeys.length}" jobs`);
debug('--------------------------------------------------')
debug('--------------------------------------------------');
for (const key of jobKeys) {
debug(`fetching job "${key}"`);
@ -118,7 +116,7 @@ async function main () {
debug(`job "${key}" is younger than two days, keeping it`);
}
debug('--------------------------------------------------')
debug('--------------------------------------------------');
}
debug('summary:', summary);
@ -129,5 +127,3 @@ async function main () {
}
main().then(() => process.exit(0));
// jshint ignore:end

View File

@ -14,7 +14,7 @@ var mandatoryProperties = [
'user'
];
function JobBase(data) {
function JobBase (data) {
JobStateMachine.call(this);
var now = new Date().toISOString();
@ -117,6 +117,6 @@ JobBase.prototype.serialize = function () {
return data;
};
JobBase.prototype.log = function(/*logger*/) {
JobBase.prototype.log = function (/* logger */) {
return false;
};

View File

@ -4,9 +4,9 @@ var JobSimple = require('./job-simple');
var JobMultiple = require('./job-multiple');
var JobFallback = require('./job-fallback');
var Models = [ JobSimple, JobMultiple, JobFallback ];
var Models = [JobSimple, JobMultiple, JobFallback];
function JobFactory() {
function JobFactory () {
}
module.exports = JobFactory;

View File

@ -7,7 +7,7 @@ var QueryFallback = require('./query/query-fallback');
var MainFallback = require('./query/main-fallback');
var QueryFactory = require('./query/query-factory');
function JobFallback(jobDefinition) {
function JobFallback (jobDefinition) {
JobBase.call(this, jobDefinition);
this.init();
@ -69,7 +69,7 @@ JobFallback.is = function (query) {
JobFallback.prototype.init = function () {
for (var i = 0; i < this.data.query.query.length; i++) {
if (shouldInitStatus(this.data.query.query[i])){
if (shouldInitStatus(this.data.query.query[i])) {
this.data.query.query[i].status = JobStatus.PENDING;
}
if (shouldInitQueryFallbackStatus(this.data.query.query[i])) {
@ -86,15 +86,15 @@ JobFallback.prototype.init = function () {
}
};
function shouldInitStatus(jobOrQuery) {
function shouldInitStatus (jobOrQuery) {
return !jobOrQuery.status;
}
function shouldInitQueryFallbackStatus(query) {
function shouldInitQueryFallbackStatus (query) {
return (query.onsuccess || query.onerror) && !query.fallback_status;
}
function shouldInitFallbackStatus(job) {
function shouldInitFallbackStatus (job) {
return (job.query.onsuccess || job.query.onerror) && !job.fallback_status;
}
@ -112,7 +112,6 @@ JobFallback.prototype.hasNextQueryFromQueries = function () {
JobFallback.prototype.getNextQueryFromFallback = function () {
if (this.fallback && this.fallback.hasNextQuery(this.data)) {
return this.fallback.getNextQuery(this.data);
}
};
@ -186,11 +185,10 @@ JobFallback.prototype.setFallbackStatus = function (status, job, hasChanged) {
};
JobFallback.prototype.shiftStatus = function (status, hasChanged) {
// jshint maxcomplexity: 7
if (hasChanged.appliedToFallback) {
if (!this.hasNextQueryFromQueries() && (status === JobStatus.DONE || status === JobStatus.FAILED)) {
status = this.getLastFinishedStatus();
} else if (status === JobStatus.DONE || status === JobStatus.FAILED){
} else if (status === JobStatus.DONE || status === JobStatus.FAILED) {
status = JobStatus.PENDING;
}
} else if (this.hasNextQueryFromQueries() && status !== JobStatus.RUNNING) {
@ -207,7 +205,7 @@ JobFallback.prototype.getLastFinishedStatus = function () {
}.bind(this), JobStatus.DONE);
};
JobFallback.prototype.log = function(logger) {
JobFallback.prototype.log = function (logger) {
if (!isFinished(this)) {
return false;
}
@ -268,12 +266,12 @@ function parseQueryId (queryId) {
return null;
}
function elapsedTime (started_at, ended_at) {
if (!started_at || !ended_at) {
function elapsedTime (startedAt, endedAt) {
if (!startedAt || !endedAt) {
return;
}
var start = new Date(started_at);
var end = new Date(ended_at);
var start = new Date(startedAt);
var end = new Date(endedAt);
return end.getTime() - start.getTime();
}

View File

@ -4,7 +4,7 @@ var util = require('util');
var JobBase = require('./job-base');
var jobStatus = require('../job-status');
function JobMultiple(jobDefinition) {
function JobMultiple (jobDefinition) {
JobBase.call(this, jobDefinition);
this.init();
@ -33,7 +33,6 @@ JobMultiple.is = function (query) {
};
JobMultiple.prototype.init = function () {
if (!this.data.status) {
this.data.status = jobStatus.PENDING;
}

View File

@ -4,7 +4,7 @@ var util = require('util');
var JobBase = require('./job-base');
var jobStatus = require('../job-status');
function JobSimple(jobDefinition) {
function JobSimple (jobDefinition) {
JobBase.call(this, jobDefinition);
if (!this.data.status) {

View File

@ -20,11 +20,11 @@ function JobStateMachine () {
module.exports = JobStateMachine;
JobStateMachine.prototype.isValidTransition = function (initialStatus, finalStatus) {
var transition = [ initialStatus, finalStatus ];
var transition = [initialStatus, finalStatus];
for (var i = 0; i < validStatusTransitions.length; i++) {
try {
assert.deepEqual(transition, validStatusTransitions[i]);
assert.deepStrictEqual(transition, validStatusTransitions[i]);
return true;
} catch (e) {
continue;

View File

@ -4,7 +4,7 @@ var util = require('util');
var QueryBase = require('./query-base');
var jobStatus = require('../../job-status');
function Fallback(index) {
function Fallback (index) {
QueryBase.call(this, index);
}
util.inherits(Fallback, QueryBase);

View File

@ -4,7 +4,7 @@ var util = require('util');
var QueryBase = require('./query-base');
var jobStatus = require('../../job-status');
function MainFallback() {
function MainFallback () {
QueryBase.call(this);
}
util.inherits(MainFallback, QueryBase);

View File

@ -3,7 +3,7 @@
var util = require('util');
var JobStateMachine = require('../job-state-machine');
function QueryBase(index) {
function QueryBase (index) {
JobStateMachine.call(this);
this.index = index;

View File

@ -2,7 +2,7 @@
var QueryFallback = require('./query-fallback');
function QueryFactory() {
function QueryFactory () {
}
module.exports = QueryFactory;

View File

@ -6,7 +6,7 @@ var Query = require('./query');
var Fallback = require('./fallback');
var jobStatus = require('../../job-status');
function QueryFallback(job, index) {
function QueryFallback (job, index) {
QueryBase.call(this, index);
this.init(job, index);
@ -40,7 +40,6 @@ QueryFallback.prototype.getNextQuery = function (job) {
};
QueryFallback.prototype.setStatus = function (status, job, previous, errorMesssage) {
// jshint maxcomplexity: 9
var isValid = false;
var appliedToFallback = false;

View File

@ -4,7 +4,7 @@ var util = require('util');
var QueryBase = require('./query-base');
var jobStatus = require('../../job-status');
function Query(index) {
function Query (index) {
QueryBase.call(this, index);
}
util.inherits(Query, QueryBase);

View File

@ -4,7 +4,7 @@ var Channel = require('./channel');
var debug = require('./../util/debug')('pubsub:publisher');
var error = require('./../util/debug')('pubsub:publisher:error');
function JobPublisher(pool) {
function JobPublisher (pool) {
this.pool = pool;
}

View File

@ -4,7 +4,7 @@ var Channel = require('./channel');
var debug = require('./../util/debug')('pubsub:subscriber');
var error = require('./../util/debug')('pubsub:subscriber:error');
function JobSubscriber(pool) {
function JobSubscriber (pool) {
this.pool = pool;
}
@ -13,7 +13,7 @@ module.exports = JobSubscriber;
JobSubscriber.prototype.subscribe = function (onJobHandler, callback) {
var self = this;
self.pool.acquire(Channel.DB, function(err, client) {
self.pool.acquire(Channel.DB, function (err, client) {
if (err) {
if (callback) {
callback(err);

View File

@ -2,7 +2,7 @@
var PSQL = require('cartodb-psql');
function QueryRunner(userDatabaseMetadataService, logger) {
function QueryRunner (userDatabaseMetadataService, logger) {
this.userDatabaseMetadataService = userDatabaseMetadataService;
this.logger = logger;
}
@ -13,9 +13,9 @@ function hasDBParams (dbparams) {
return (dbparams.user && dbparams.host && dbparams.port && dbparams.dbname && dbparams.pass);
}
QueryRunner.prototype.run = function (job_id, sql, user, timeout, dbparams, callback) {
QueryRunner.prototype.run = function (jobId, sql, user, timeout, dbparams, callback) {
if (hasDBParams(dbparams)) {
return this._run(dbparams, job_id, sql, timeout, callback);
return this._run(dbparams, jobId, sql, timeout, callback);
}
const dbConfigurationError = new Error('Batch Job DB misconfiguration');
@ -23,8 +23,8 @@ QueryRunner.prototype.run = function (job_id, sql, user, timeout, dbparams, call
return callback(dbConfigurationError);
};
QueryRunner.prototype._run = function (dbparams, job_id, sql, timeout, callback) {
QueryRunner.prototype._run = function (dbparams, jobId, sql, timeout, callback) {
var pg = new PSQL(dbparams);
this.logger.debug('Running query [timeout=%d] %s', timeout, sql);
pg.query(`/* ${job_id} */ ${sql}`, callback, false, timeout);
pg.query(`/* ${jobId} */ ${sql}`, callback, false, timeout);
};

View File

@ -1,11 +1,11 @@
'use strict';
function FixedCapacity(capacity) {
function FixedCapacity (capacity) {
this.capacity = Math.max(1, capacity);
}
module.exports = FixedCapacity;
FixedCapacity.prototype.getCapacity = function(callback) {
FixedCapacity.prototype.getCapacity = function (callback) {
return callback(null, this.capacity);
};

View File

@ -4,15 +4,15 @@ var util = require('util');
var debug = require('../../util/debug')('capacity-http-load');
var HttpSimpleCapacity = require('./http-simple');
function HttpLoadCapacity(host, capacityEndpoint) {
function HttpLoadCapacity (host, capacityEndpoint) {
HttpSimpleCapacity.call(this, host, capacityEndpoint);
}
util.inherits(HttpLoadCapacity, HttpSimpleCapacity);
module.exports = HttpLoadCapacity;
HttpLoadCapacity.prototype.getCapacity = function(callback) {
this.getResponse(function(err, values) {
HttpLoadCapacity.prototype.getCapacity = function (callback) {
this.getResponse(function (err, values) {
var capacity = 1;
if (err) {

View File

@ -3,7 +3,7 @@
var request = require('request');
var debug = require('../../util/debug')('capacity-http-simple');
function HttpSimpleCapacity(host, capacityEndpoint) {
function HttpSimpleCapacity (host, capacityEndpoint) {
this.host = host;
this.capacityEndpoint = capacityEndpoint;
@ -13,8 +13,8 @@ function HttpSimpleCapacity(host, capacityEndpoint) {
module.exports = HttpSimpleCapacity;
HttpSimpleCapacity.prototype.getCapacity = function(callback) {
this.getResponse(function(err, values) {
HttpSimpleCapacity.prototype.getCapacity = function (callback) {
this.getResponse(function (err, values) {
var capacity = 1;
if (err) {
@ -31,7 +31,7 @@ HttpSimpleCapacity.prototype.getCapacity = function(callback) {
}.bind(this));
};
HttpSimpleCapacity.prototype.getResponse = function(callback) {
HttpSimpleCapacity.prototype.getResponse = function (callback) {
var requestParams = {
method: 'POST',
url: this.capacityEndpoint,
@ -46,7 +46,7 @@ HttpSimpleCapacity.prototype.getResponse = function(callback) {
return callback(null, this.lastResponse);
}
request.post(requestParams, function(err, res, jsonRes) {
request.post(requestParams, function (err, res, jsonRes) {
if (err) {
return callback(err);
}

View File

@ -8,11 +8,11 @@ var FixedCapacity = require('./capacity/fixed');
var HttpSimpleCapacity = require('./capacity/http-simple');
var HttpLoadCapacity = require('./capacity/http-load');
function HostScheduler(name, taskRunner, redisPool) {
function HostScheduler (name, taskRunner, redisPool) {
this.name = name || 'scheduler';
this.taskRunner = taskRunner;
this.locker = Locker.create('redis-distlock', { pool: redisPool });
this.locker.on('error', function(err, host) {
this.locker.on('error', function (err, host) {
debug('[%s] Locker.error %s', this.name, err.message);
this.unlock(host);
}.bind(this));
@ -22,8 +22,8 @@ function HostScheduler(name, taskRunner, redisPool) {
module.exports = HostScheduler;
HostScheduler.prototype.add = function(host, user, callback) {
this.lock(host, function(err, scheduler) {
HostScheduler.prototype.add = function (host, user, callback) {
this.lock(host, function (err, scheduler) {
if (err) {
debug('[%s] Could not lock host=%s', this.name, host);
return callback(err);
@ -35,7 +35,7 @@ HostScheduler.prototype.add = function(host, user, callback) {
}.bind(this));
};
HostScheduler.prototype.getCapacityProvider = function(host) {
HostScheduler.prototype.getCapacityProvider = function (host) {
var strategy = global.settings.batch_capacity_strategy;
if (strategy === 'http-simple' || strategy === 'http-load') {
@ -55,16 +55,16 @@ HostScheduler.prototype.getCapacityProvider = function(host) {
return new FixedCapacity(fixedCapacity);
};
HostScheduler.prototype.lock = function(host, callback) {
HostScheduler.prototype.lock = function (host, callback) {
debug('[%s] lock(%s)', this.name, host);
var self = this;
this.locker.lock(host, function(err) {
this.locker.lock(host, function (err) {
if (err) {
debug('[%s] Could not lock host=%s. Reason: %s', self.name, host, err.message);
return callback(err);
}
if (!self.schedulers.hasOwnProperty(host)) {
if (!Object.prototype.hasOwnProperty.call(self.schedulers, host)) {
var scheduler = new Scheduler(self.getCapacityProvider(host), self.taskRunner);
scheduler.on('done', self.unlock.bind(self, host));
self.schedulers[host] = scheduler;
@ -75,9 +75,9 @@ HostScheduler.prototype.lock = function(host, callback) {
});
};
HostScheduler.prototype.unlock = function(host) {
HostScheduler.prototype.unlock = function (host) {
debug('[%s] unlock(%s)', this.name, host);
if (this.schedulers.hasOwnProperty(host)) {
if (Object.prototype.hasOwnProperty.call(this.schedulers, host)) {
// TODO stop scheduler?
delete this.schedulers[host];
}

View File

@ -13,14 +13,14 @@ var debug = require('../util/debug')('scheduler');
var forever = require('../util/forever');
function Scheduler(capacity, taskRunner) {
function Scheduler (capacity, taskRunner) {
EventEmitter.call(this);
debug('new Scheduler');
this.taskRunner = taskRunner;
this.capacity = capacity;
this.tasks = [];
this.users = {};
this.tasksTree = new RBTree(function(taskEntityA, taskEntityB) {
this.tasksTree = new RBTree(function (taskEntityA, taskEntityB) {
// if the user is the same it's the same entity
if (taskEntityA.user === taskEntityB.user) {
return 0;
@ -44,7 +44,7 @@ util.inherits(Scheduler, EventEmitter);
module.exports = Scheduler;
Scheduler.prototype.add = function(user) {
Scheduler.prototype.add = function (user) {
debug('add(%s)', user);
var taskEntity = this.users[user];
if (taskEntity) {
@ -67,7 +67,7 @@ Scheduler.prototype.add = function(user) {
}
};
Scheduler.prototype.schedule = function() {
Scheduler.prototype.schedule = function () {
if (this.running) {
return true;
}
@ -77,7 +77,7 @@ Scheduler.prototype.schedule = function() {
forever(
function (next) {
debug('Waiting for task');
self.acquire(function(err, taskEntity) {
self.acquire(function (_err, taskEntity) {
debug('Acquired user=%j', taskEntity);
if (!taskEntity) {
@ -88,7 +88,7 @@ Scheduler.prototype.schedule = function() {
taskEntity.running();
debug('Running task for user=%s', taskEntity.user);
self.taskRunner.run(taskEntity.user, function(err, userQueueIsEmpty) {
self.taskRunner.run(taskEntity.user, function (err, userQueueIsEmpty) {
debug('Run task=%j, done=%s', taskEntity, userQueueIsEmpty);
taskEntity.ran(userQueueIsEmpty);
self.release(err, taskEntity);
@ -110,7 +110,7 @@ Scheduler.prototype.schedule = function() {
return false;
};
Scheduler.prototype.acquire = function(callback) {
Scheduler.prototype.acquire = function (callback) {
this.removeAllListeners('add');
this.removeAllListeners('release');
@ -119,7 +119,7 @@ Scheduler.prototype.acquire = function(callback) {
}
var self = this;
this.capacity.getCapacity(function(err, capacity) {
this.capacity.getCapacity(function (err, capacity) {
if (err) {
return callback(err);
}
@ -128,11 +128,11 @@ Scheduler.prototype.acquire = function(callback) {
var running = self.tasks.filter(is(STATUS.RUNNING));
debug('[capacity=%d, running=%d] candidates=%j', capacity, running.length, self.tasks);
self.once('add', function() {
self.once('add', function () {
debug('Got a new task');
self.acquire(callback);
});
self.once('release', function() {
self.once('release', function () {
debug('Slot was released');
self.acquire(callback);
});
@ -157,7 +157,7 @@ Scheduler.prototype.acquire = function(callback) {
});
};
Scheduler.prototype.release = function(err, taskEntity) {
Scheduler.prototype.release = function (_err, taskEntity) {
debug('Released %j', taskEntity);
if (taskEntity.is(STATUS.PENDING)) {
this.tasksTree.insert(taskEntity);
@ -165,7 +165,6 @@ Scheduler.prototype.release = function(err, taskEntity) {
this.emit('release');
};
/* Task entities */
var STATUS = {
@ -174,28 +173,28 @@ var STATUS = {
DONE: 'done'
};
function TaskEntity(user, createdAt) {
function TaskEntity (user, createdAt) {
this.user = user;
this.createdAt = createdAt;
this.status = STATUS.PENDING;
this.jobs = 0;
}
TaskEntity.prototype.is = function(status) {
TaskEntity.prototype.is = function (status) {
return this.status === status;
};
TaskEntity.prototype.running = function() {
TaskEntity.prototype.running = function () {
this.status = STATUS.RUNNING;
};
TaskEntity.prototype.ran = function(userQueueIsEmpty) {
TaskEntity.prototype.ran = function (userQueueIsEmpty) {
this.jobs++;
this.status = userQueueIsEmpty ? STATUS.DONE : STATUS.PENDING;
};
function is(status) {
return function(taskEntity) {
function is (status) {
return function (taskEntity) {
return taskEntity.is(status);
};
}

View File

@ -1,6 +1,6 @@
'use strict';
function UserDatabaseMetadataService(metadataBackend) {
function UserDatabaseMetadataService (metadataBackend) {
this.metadataBackend = metadataBackend;
}

View File

@ -1,7 +1,7 @@
'use strict';
module.exports = function forever(fn, done) {
function next(err) {
module.exports = function forever (fn, done) {
function next (err) {
if (err) {
return done(err);
}

View File

@ -1,33 +1,33 @@
'use strict';
function ArrayBufferSer(type, data, options) {
if(type === undefined) {
throw "ArrayBufferSer should be created with a type";
}
this.options = options || {};
this._initFunctions();
this.headerSize = 8;
this.data = data;
this.type = type = Math.min(type, ArrayBufferSer.BUFFER);
var size = this._sizeFor(this.headerSize, data);
this.buffer = new Buffer(this.headerSize + size);
this.buffer.writeUInt32BE(type, 0); // this could be one byte but for byte padding is better to be 4 bytes
this.buffer.writeUInt32BE(size, 4);
this.offset = this.headerSize;
var w = this.writeFn[type];
var i;
if(!this.options.delta) {
for(i = 0; i < data.length; ++i) {
this[w](data[i]);
function ArrayBufferSer (type, data, options) {
if (type === undefined) {
throw new Error('ArrayBufferSer should be created with a type');
}
} else {
this[w](data[0]);
for(i = 1; i < data.length; ++i) {
this[w](data[i] - data[i - 1]);
this.options = options || {};
this._initFunctions();
this.headerSize = 8;
this.data = data;
this.type = type = Math.min(type, ArrayBufferSer.BUFFER);
var size = this._sizeFor(this.headerSize, data);
this.buffer = Buffer.alloc(this.headerSize + size);
this.buffer.writeUInt32BE(type, 0); // this could be one byte but for byte padding is better to be 4 bytes
this.buffer.writeUInt32BE(size, 4);
this.offset = this.headerSize;
var w = this.writeFn[type];
var i;
if (!this.options.delta) {
for (i = 0; i < data.length; ++i) {
this[w](data[i]);
}
} else {
this[w](data[0]);
for (i = 1; i < data.length; ++i) {
this[w](data[i] - data[i - 1]);
}
}
}
}
//
@ -41,126 +41,124 @@ ArrayBufferSer.UINT16 = 5;
ArrayBufferSer.INT32 = 6;
ArrayBufferSer.UINT32 = 7;
ArrayBufferSer.FLOAT32 = 8;
//ArrayBufferSer.FLOAT64 = 9; not supported
// ArrayBufferSer.FLOAT64 = 9; not supported
ArrayBufferSer.STRING = 10;
ArrayBufferSer.BUFFER = 11;
ArrayBufferSer.MAX_PADDING = ArrayBufferSer.INT32;
ArrayBufferSer.typeNames = {
'int8': ArrayBufferSer.INT8,
'uint8': ArrayBufferSer.UINT8,
'uintclamp': ArrayBufferSer.UINT8_CLAMP,
'int16': ArrayBufferSer.INT16,
'uint16': ArrayBufferSer.UINT16,
'int32': ArrayBufferSer.INT32,
'uint32': ArrayBufferSer.UINT32,
'float32': ArrayBufferSer.FLOAT32,
'string': ArrayBufferSer.STRING,
'buffer': ArrayBufferSer.BUFFER
int8: ArrayBufferSer.INT8,
uint8: ArrayBufferSer.UINT8,
uintclamp: ArrayBufferSer.UINT8_CLAMP,
int16: ArrayBufferSer.INT16,
uint16: ArrayBufferSer.UINT16,
int32: ArrayBufferSer.INT32,
uint32: ArrayBufferSer.UINT32,
float32: ArrayBufferSer.FLOAT32,
string: ArrayBufferSer.STRING,
buffer: ArrayBufferSer.BUFFER
};
ArrayBufferSer.prototype = {
// 0 not used
sizes: [NaN, 1, 1, 1, 2, 2, 4, 4, 4, 8],
// 0 not used
sizes: [NaN, 1, 1, 1, 2, 2, 4, 4, 4, 8],
_paddingFor: function(off, type) {
var s = this.sizes[type];
if(s) {
var r = off % s;
return r === 0 ? 0 : s - r;
_paddingFor: function (off, type) {
var s = this.sizes[type];
if (s) {
var r = off % s;
return r === 0 ? 0 : s - r;
}
return 0;
},
_sizeFor: function (offset, t) {
var self = this;
var s = this.sizes[this.type];
if (s) {
return s * t.length;
}
s = 0;
if (this.type === ArrayBufferSer.STRING) {
// calculate size with padding
t.forEach(function (arr) {
var pad = self._paddingFor(offset, ArrayBufferSer.MAX_PADDING);
s += pad;
offset += pad;
var len = (self.headerSize + arr.length * 2);
s += len;
offset += len;
});
} else {
t.forEach(function (arr) {
var pad = self._paddingFor(offset, ArrayBufferSer.MAX_PADDING);
s += pad;
offset += pad;
s += arr.getSize();
offset += arr.getSize();
});
}
return s;
},
getDataSize: function () {
return this._sizeFor(0, this.data);
},
getSize: function () {
return this.headerSize + this._sizeFor(this.headerSize, this.data);
},
writeFn: [
'',
'writeInt8',
'writeUInt8',
'writeUInt8Clamp',
'writeInt16LE',
'writeUInt16LE',
'writeUInt32LE',
'writeUInt32LE',
'writeFloatLE',
'writeDoubleLE',
'writeString',
'writteBuffer'
],
_initFunctions: function () {
var self = this;
this.writeFn.forEach(function (fn) {
if (self[fn] === undefined) {
self[fn] = function (d) {
self.buffer[fn](d, self.offset);
self.offset += self.sizes[self.type];
};
}
});
},
writeUInt8Clamp: function (c) {
this.buffer.writeUInt8(Math.min(255, c), this.offset);
this.offset += 1;
},
writeString: function (s) {
var arr = [];
for (var i = 0, len = s.length; i < len; ++i) {
arr.push(s.charCodeAt(i));
}
var str = new ArrayBufferSer(ArrayBufferSer.UINT16, arr);
this.writteBuffer(str);
},
writteBuffer: function (b) {
this.offset += this._paddingFor(this.offset, ArrayBufferSer.MAX_PADDING);
// copy header
b.buffer.copy(this.buffer, this.offset);
this.offset += b.buffer.length;
}
return 0;
},
_sizeFor: function(offset, t) {
var self = this;
var s = this.sizes[this.type];
if(s) {
return s*t.length;
}
s = 0;
if(this.type === ArrayBufferSer.STRING) {
// calculate size with padding
t.forEach(function(arr) {
var pad = self._paddingFor(offset, ArrayBufferSer.MAX_PADDING);
s += pad;
offset += pad;
var len = (self.headerSize + arr.length*2);
s += len;
offset += len;
});
} else {
t.forEach(function(arr) {
var pad = self._paddingFor(offset, ArrayBufferSer.MAX_PADDING);
s += pad;
offset += pad;
s += arr.getSize();
offset += arr.getSize();
});
}
return s;
},
getDataSize: function() {
return this._sizeFor(0, this.data);
},
getSize: function() {
return this.headerSize + this._sizeFor(this.headerSize, this.data);
},
writeFn: [
'',
'writeInt8',
'writeUInt8',
'writeUInt8Clamp',
'writeInt16LE',
'writeUInt16LE',
'writeUInt32LE',
'writeUInt32LE',
'writeFloatLE',
'writeDoubleLE',
'writeString',
'writteBuffer'
],
_initFunctions: function() {
var self = this;
this.writeFn.forEach(function(fn) {
if(self[fn] === undefined) {
self[fn] = function(d) {
self.buffer[fn](d, self.offset);
self.offset += self.sizes[self.type];
};
}
});
},
writeUInt8Clamp: function(c) {
this.buffer.writeUInt8(Math.min(255, c), this.offset);
this.offset += 1;
},
writeString: function(s) {
var arr = [];
for(var i = 0, len = s.length; i < len; ++i) {
arr.push(s.charCodeAt(i));
}
var str = new ArrayBufferSer(ArrayBufferSer.UINT16, arr);
this.writteBuffer(str);
},
writteBuffer: function(b) {
this.offset += this._paddingFor(this.offset, ArrayBufferSer.MAX_PADDING);
// copy header
b.buffer.copy(this.buffer, this.offset);
this.offset += b.buffer.length;
}
};
module.exports = ArrayBufferSer;

View File

@ -5,7 +5,7 @@
* of request headers
*/
function CartodbRequest() {
function CartodbRequest () {
}
module.exports = CartodbRequest;
@ -13,27 +13,27 @@ module.exports = CartodbRequest;
/**
* If the request contains the user use it, if not guess from the host
*/
CartodbRequest.prototype.userByReq = function(req) {
CartodbRequest.prototype.userByReq = function (req) {
if (req.params.user) {
return req.params.user;
}
return userByHostName(req.headers.host);
};
var re_userFromHost = new RegExp(
var userFromHostRegex = new RegExp(
global.settings.user_from_host || '^([^\\.]+)\\.' // would extract "strk" from "strk.cartodb.com"
);
function userByHostName(host) {
var mat = host.match(re_userFromHost);
function userByHostName (host) {
var mat = host.match(userFromHostRegex);
if (!mat) {
console.error("ERROR: user pattern '" + re_userFromHost + "' does not match hostname '" + host + "'");
console.error("ERROR: user pattern '" + userFromHostRegex + "' does not match hostname '" + host + "'");
return;
}
if (mat.length !== 2) {
console.error(
"ERROR: pattern '" + re_userFromHost + "' gave unexpected matches against '" + host + "': " + mat
"ERROR: pattern '" + userFromHostRegex + "' gave unexpected matches against '" + host + "': " + mat
);
return;
}

View File

@ -1,12 +1,13 @@
'use strict';
var fs = require("fs");
var fs = require('fs');
const path = require('path');
var formats = {};
function formatFilesWithPath(dir) {
var formatDir = __dirname + '/' + dir;
return fs.readdirSync(formatDir).map(function(formatFile) {
return formatDir + '/' + formatFile;
function formatFilesWithPath (dir) {
var formatDir = path.join(__dirname, dir);
return fs.readdirSync(formatDir).map(function (formatFile) {
return path.join(formatDir, formatFile);
});
}
@ -14,7 +15,7 @@ var formatFilesPaths = []
.concat(formatFilesWithPath('ogr'))
.concat(formatFilesWithPath('pg'));
formatFilesPaths.forEach(function(file) {
formatFilesPaths.forEach(function (file) {
var format = require(file);
formats[format.prototype.id] = format;
});

View File

@ -10,337 +10,330 @@ var spawn = require('child_process').spawn;
// Keeps track of what's waiting baking for export
var bakingExports = {};
function OgrFormat(id) {
this.id = id;
function OgrFormat (id) {
this.id = id;
}
OgrFormat.prototype = {
id: "ogr",
id: 'ogr',
is_file: true,
is_file: true,
getQuery: function(/*sql, options*/) {
return null; // dont execute the query
},
getQuery: function (/* sql, options */) {
return null; // dont execute the query
},
transform: function(/*result, options, callback*/) {
throw "should not be called for file formats";
},
transform: function (/* result, options, callback */) {
throw new Error('should not be called for file formats');
},
getContentType: function(){ return this._contentType; },
getContentType: function () { return this._contentType; },
getFileExtension: function(){ return this._fileExtension; },
getFileExtension: function () { return this._fileExtension; },
getKey: function(options) {
return [this.id,
options.dbopts.dbname,
options.dbopts.user,
options.gn,
this.generateMD5(options.filename),
this.generateMD5(options.sql)].concat(options.skipfields).join(':');
},
getKey: function (options) {
return [this.id,
options.dbopts.dbname,
options.dbopts.user,
options.gn,
this.generateMD5(options.filename),
this.generateMD5(options.sql)].concat(options.skipfields).join(':');
},
generateMD5: function (data){
var hash = crypto.createHash('md5');
hash.update(data);
return hash.digest('hex');
}
generateMD5: function (data) {
var hash = crypto.createHash('md5');
hash.update(data);
return hash.digest('hex');
}
};
// Internal function usable by all OGR-driven outputs
OgrFormat.prototype.toOGR = function(options, out_format, out_filename, callback) {
OgrFormat.prototype.toOGR = function (options, outFormat, outFilename, callback) {
// var gcol = options.gn;
var sql = options.sql;
var skipfields = options.skipfields;
var outLayername = options.filename;
//var gcol = options.gn;
var sql = options.sql;
var skipfields = options.skipfields;
var out_layername = options.filename;
var dbopts = options.dbopts;
var dbopts = options.dbopts;
var ogr2ogr = global.settings.ogr2ogrCommand || 'ogr2ogr';
var dbhost = dbopts.host;
var dbport = dbopts.port;
var dbuser = dbopts.user;
var dbpass = dbopts.pass;
var dbname = dbopts.dbname;
var ogr2ogr = global.settings.ogr2ogrCommand || 'ogr2ogr';
var dbhost = dbopts.host;
var dbport = dbopts.port;
var dbuser = dbopts.user;
var dbpass = dbopts.pass;
var dbname = dbopts.dbname;
var timeout = options.timeout;
var timeout = options.timeout;
var that = this;
var that = this;
var columns = [];
var geocol;
var pg;
// Drop ending semicolon (ogr doens't like it)
sql = sql.replace(/;\s*$/, '');
var columns = [];
var geocol;
var pg;
// Drop ending semicolon (ogr doens't like it)
sql = sql.replace(/;\s*$/, '');
const theGeomFirst = (fieldA, fieldB) => {
if (fieldA.name === 'the_geom') {
return -1;
}
if (fieldB.name === 'the_geom') {
return 1;
}
return 0;
};
const theGeomFirst = (fieldA, fieldB) => {
if (fieldA.name === 'the_geom') {
return -1;
}
if (fieldB.name === 'the_geom') {
return 1;
}
return 0;
};
step(
step (
function fetchColumns () {
var colsql = 'SELECT * FROM (' + sql + ') as _cartodbsqlapi LIMIT 0';
pg = new PSQL(dbopts);
pg.query(colsql, this);
},
function findSRS (err, result) {
if (err) {
throw err;
}
function fetchColumns() {
var colsql = 'SELECT * FROM (' + sql + ') as _cartodbsqlapi LIMIT 0';
pg = new PSQL(dbopts);
pg.query(colsql, this);
},
function findSRS(err, result) {
if (err) {
throw err;
}
var needSRS = that._needSRS;
var needSRS = that._needSRS;
columns = result.fields
// skip columns
.filter(field => skipfields.indexOf(field.name) === -1)
// put "the_geom" first (if exists)
.sort(theGeomFirst)
// get first geometry to calculate SRID ("the_geom" if exists)
.map(field => {
if (needSRS && !geocol && pg.typeName(field.dataTypeID) === 'geometry') {
geocol = field.name;
}
columns = result.fields
// skip columns
.filter(field => skipfields.indexOf(field.name) === -1)
// put "the_geom" first (if exists)
.sort(theGeomFirst)
// get first geometry to calculate SRID ("the_geom" if exists)
.map(field => {
if (needSRS && !geocol && pg.typeName(field.dataTypeID) === 'geometry') {
geocol = field.name;
}
return field;
})
// apply quotes to columns
.map(field => outFormat === 'CSV' ? pg.quoteIdentifier(field.name) + '::text' : pg.quoteIdentifier(field.name));
return field;
})
// apply quotes to columns
.map(field => out_format === 'CSV' ? pg.quoteIdentifier(field.name)+'::text' : pg.quoteIdentifier(field.name));
if (!needSRS || !geocol) {
return null;
}
if ( ! needSRS || ! geocol ) {
return null;
}
var next = this;
var next = this;
var qgeocol = pg.quoteIdentifier(geocol);
var sridsql = 'SELECT ST_Srid(' + qgeocol + ') as srid, GeometryType(' +
var qgeocol = pg.quoteIdentifier(geocol);
var sridsql = 'SELECT ST_Srid(' + qgeocol + ') as srid, GeometryType(' +
qgeocol + ') as type FROM (' + sql + ') as _cartodbsqlapi WHERE ' +
qgeocol + ' is not null limit 1';
pg.query(sridsql, function(err, result) {
if ( err ) { next(err); return; }
if ( result.rows.length ) {
var srid = result.rows[0].srid;
var type = result.rows[0].type;
next(null, srid, type);
} else {
// continue as srid and geom type are not critical when there are no results
next(null);
}
});
},
function spawnDumper(err, srid, type) {
if (err) {
throw err;
}
var next = this;
var ogrsql = 'SELECT ' + columns.join(',') + ' FROM (' + sql + ') as _cartodbsqlapi';
var ogrargs = [
'-f', out_format,
'-lco', 'RESIZE=YES',
'-lco', 'ENCODING=UTF-8',
'-lco', 'LINEFORMAT=CRLF',
out_filename,
"PG:host=" + dbhost + " port=" + dbport + " user=" + dbuser + " dbname=" + dbname + " password=" + dbpass,
'-sql', ogrsql
];
if ( srid ) {
ogrargs.push('-a_srs', 'EPSG:'+srid);
}
if ( type ) {
ogrargs.push('-nlt', type);
}
if (options.cmd_params){
ogrargs = ogrargs.concat(options.cmd_params);
}
ogrargs.push('-nln', out_layername);
// TODO: research if `exec` could fit better than `spawn`
var child = spawn(ogr2ogr, ogrargs);
var timedOut = false;
var ogrTimeout;
if (timeout > 0) {
ogrTimeout = setTimeout(function () {
timedOut = true;
child.kill();
}, timeout);
}
child.on('error', function (err) {
clearTimeout(ogrTimeout);
next(err);
});
var stderrData = [];
child.stderr.setEncoding('utf8');
child.stderr.on('data', function (data) {
stderrData.push(data);
});
child.on('exit', function(code) {
clearTimeout(ogrTimeout);
if (timedOut) {
return next(new Error('statement timeout'));
}
if (code !== 0) {
var errMessage = 'ogr2ogr command return code ' + code;
if (stderrData.length > 0) {
errMessage += ', Error: ' + stderrData.join('\n');
}
return next(new Error(errMessage));
}
return next();
});
},
function finish(err) {
callback(err, out_filename);
}
);
};
OgrFormat.prototype.toOGR_SingleFile = function(options, fmt, callback) {
var dbname = options.dbopts.dbname;
var user_id = options.dbopts.user;
var gcol = options.gcol;
var sql = options.sql;
var skipfields = options.skipfields;
var ext = this._fileExtension;
var layername = options.filename;
var tmpdir = global.settings.tmpDir || '/tmp';
var reqKey = [
fmt,
dbname,
user_id,
gcol,
this.generateMD5(layername),
this.generateMD5(sql)
].concat(skipfields).join(':');
var outdirpath = tmpdir + '/sqlapi-' + process.pid + '-' + reqKey;
var dumpfile = outdirpath + ':cartodb-query.' + ext;
// TODO: following tests:
// - fetch query with no "the_geom" column
this.toOGR(options, fmt, dumpfile, callback);
};
OgrFormat.prototype.sendResponse = function(opts, callback) {
//var next = callback;
var reqKey = this.getKey(opts);
var qElem = new ExportRequest(opts.sink, callback, opts.beforeSink);
var baking = bakingExports[reqKey];
if ( baking ) {
baking.req.push( qElem );
} else {
baking = bakingExports[reqKey] = { req: [ qElem ] };
this.generate(opts, function(err, dumpfile) {
if ( opts.profiler ) {
opts.profiler.done('generate');
}
step (
function sendResults() {
var nextPipe = function(finish) {
var r = baking.req.shift();
if ( ! r ) { finish(null); return; }
r.sendFile(err, dumpfile, function() {
nextPipe(finish);
pg.query(sridsql, function (err, result) {
if (err) { next(err); return; }
if (result.rows.length) {
var srid = result.rows[0].srid;
var type = result.rows[0].type;
next(null, srid, type);
} else {
// continue as srid and geom type are not critical when there are no results
next(null);
}
});
};
if ( ! err ) {
nextPipe(this);
} else {
_.each(baking.req, function(r) {
r.cb(err);
});
return true;
}
},
function cleanup(/*err*/) {
delete bakingExports[reqKey];
// unlink dump file (sync to avoid race condition)
console.log("removing", dumpfile);
try { fs.unlinkSync(dumpfile); }
catch (e) {
if ( e.code !== 'ENOENT' ) {
console.log("Could not unlink dumpfile " + dumpfile + ": " + e);
function spawnDumper (err, srid, type) {
if (err) {
throw err;
}
}
var next = this;
var ogrsql = 'SELECT ' + columns.join(',') + ' FROM (' + sql + ') as _cartodbsqlapi';
var ogrargs = [
'-f', outFormat,
'-lco', 'RESIZE=YES',
'-lco', 'ENCODING=UTF-8',
'-lco', 'LINEFORMAT=CRLF',
outFilename,
'PG:host=' + dbhost + ' port=' + dbport + ' user=' + dbuser + ' dbname=' + dbname + ' password=' + dbpass,
'-sql', ogrsql
];
if (srid) {
ogrargs.push('-a_srs', 'EPSG:' + srid);
}
if (type) {
ogrargs.push('-nlt', type);
}
if (options.cmd_params) {
ogrargs = ogrargs.concat(options.cmd_params);
}
ogrargs.push('-nln', outLayername);
// TODO: research if `exec` could fit better than `spawn`
var child = spawn(ogr2ogr, ogrargs);
var timedOut = false;
var ogrTimeout;
if (timeout > 0) {
ogrTimeout = setTimeout(function () {
timedOut = true;
child.kill();
}, timeout);
}
child.on('error', function (err) {
clearTimeout(ogrTimeout);
next(err);
});
var stderrData = [];
child.stderr.setEncoding('utf8');
child.stderr.on('data', function (data) {
stderrData.push(data);
});
child.on('exit', function (code) {
clearTimeout(ogrTimeout);
if (timedOut) {
return next(new Error('statement timeout'));
}
if (code !== 0) {
var errMessage = 'ogr2ogr command return code ' + code;
if (stderrData.length > 0) {
errMessage += ', Error: ' + stderrData.join('\n');
}
return next(new Error(errMessage));
}
return next();
});
},
function finish (err) {
callback(err, outFilename);
}
);
});
}
);
};
// TODO: put in an ExportRequest.js ----- {
OgrFormat.prototype.toOGR_SingleFile = function (options, fmt, callback) {
var dbname = options.dbopts.dbname;
var userId = options.dbopts.user;
var gcol = options.gcol;
var sql = options.sql;
var skipfields = options.skipfields;
var ext = this._fileExtension;
var layername = options.filename;
function ExportRequest(ostream, callback, beforeSink) {
this.cb = callback;
this.beforeSink = beforeSink;
this.ostream = ostream;
this.istream = null;
this.canceled = false;
var tmpdir = global.settings.tmpDir || '/tmp';
var reqKey = [
fmt,
dbname,
userId,
gcol,
this.generateMD5(layername),
this.generateMD5(sql)
].concat(skipfields).join(':');
var outdirpath = tmpdir + '/sqlapi-' + process.pid + '-' + reqKey;
var dumpfile = outdirpath + ':cartodb-query.' + ext;
var that = this;
// TODO: following tests:
// - fetch query with no "the_geom" column
this.toOGR(options, fmt, dumpfile, callback);
};
this.ostream.on('close', function() {
//console.log("Request close event, qElem.stream is " + qElem.stream);
that.canceled = true;
if ( that.istream ) {
that.istream.destroy();
OgrFormat.prototype.sendResponse = function (opts, callback) {
// var next = callback;
var reqKey = this.getKey(opts);
var qElem = new ExportRequest(opts.sink, callback, opts.beforeSink);
var baking = bakingExports[reqKey];
if (baking) {
baking.req.push(qElem);
} else {
baking = bakingExports[reqKey] = { req: [qElem] };
this.generate(opts, function (err, dumpfile) {
if (opts.profiler) {
opts.profiler.done('generate');
}
step(
function sendResults () {
var nextPipe = function (finish) {
var r = baking.req.shift();
if (!r) { finish(null); return; }
r.sendFile(err, dumpfile, function () {
nextPipe(finish);
});
};
if (!err) {
nextPipe(this);
} else {
_.each(baking.req, function (r) {
r.cb(err);
});
return true;
}
},
function cleanup (/* err */) {
delete bakingExports[reqKey];
// unlink dump file (sync to avoid race condition)
console.log('removing', dumpfile);
try { fs.unlinkSync(dumpfile); } catch (e) {
if (e.code !== 'ENOENT') {
console.log('Could not unlink dumpfile ' + dumpfile + ': ' + e);
}
}
}
);
});
}
});
};
function ExportRequest (ostream, callback, beforeSink) {
this.cb = callback;
this.beforeSink = beforeSink;
this.ostream = ostream;
this.istream = null;
this.canceled = false;
var that = this;
this.ostream.on('close', function () {
that.canceled = true;
if (that.istream) {
that.istream.destroy();
}
});
}
ExportRequest.prototype.sendFile = function (err, filename, callback) {
var that = this;
if ( ! this.canceled ) {
//console.log("Creating readable stream out of dumpfile");
this.istream = fs.createReadStream(filename)
.on('open', function(/*fd*/) {
if ( that.beforeSink ) {
that.beforeSink();
}
that.istream.pipe(that.ostream);
callback();
})
.on('error', function(e) {
console.log("Can't send response: " + e);
that.ostream.end();
callback();
});
} else {
//console.log("Response was canceled, not streaming the file");
callback();
}
this.cb();
if (err) {
return callback(err);
}
var that = this;
if (!this.canceled) {
this.istream = fs.createReadStream(filename)
.on('open', function (/* fd */) {
if (that.beforeSink) {
that.beforeSink();
}
that.istream.pipe(that.ostream);
callback();
})
.on('error', function (e) {
console.log("Can't send response: " + e);
that.ostream.end();
callback();
});
} else {
callback();
}
this.cb();
};
//------ }
module.exports = OgrFormat;

View File

@ -2,14 +2,14 @@
var Ogr = require('./../ogr');
function CsvFormat() {}
function CsvFormat () {}
CsvFormat.prototype = new Ogr('csv');
CsvFormat.prototype._contentType = "text/csv; charset=utf-8; header=present";
CsvFormat.prototype._fileExtension = "csv";
CsvFormat.prototype._contentType = 'text/csv; charset=utf-8; header=present';
CsvFormat.prototype._fileExtension = 'csv';
CsvFormat.prototype.generate = function(options, callback) {
CsvFormat.prototype.generate = function (options, callback) {
this.toOGR_SingleFile(options, 'CSV', callback);
};

View File

@ -2,12 +2,12 @@
var Ogr = require('./../ogr');
function GeoPackageFormat() {}
function GeoPackageFormat () {}
GeoPackageFormat.prototype = new Ogr('gpkg');
GeoPackageFormat.prototype._contentType = "application/x-sqlite3; charset=utf-8";
GeoPackageFormat.prototype._fileExtension = "gpkg";
GeoPackageFormat.prototype._contentType = 'application/x-sqlite3; charset=utf-8';
GeoPackageFormat.prototype._fileExtension = 'gpkg';
// As of GDAL 1.10.1 SRID detection is bogus, so we use
// our own method. See:
// http://trac.osgeo.org/gdal/ticket/5131
@ -17,7 +17,7 @@ GeoPackageFormat.prototype._fileExtension = "gpkg";
// Bug was fixed in GDAL 1.10.2
GeoPackageFormat.prototype._needSRS = true;
GeoPackageFormat.prototype.generate = function(options, callback) {
GeoPackageFormat.prototype.generate = function (options, callback) {
options.cmd_params = ['-lco', 'FID=cartodb_id'];
this.toOGR_SingleFile(options, 'GPKG', callback);
};

View File

@ -2,12 +2,12 @@
var Ogr = require('./../ogr');
function KmlFormat() {}
function KmlFormat () {}
KmlFormat.prototype = new Ogr('kml');
KmlFormat.prototype._contentType = "application/kml; charset=utf-8";
KmlFormat.prototype._fileExtension = "kml";
KmlFormat.prototype._contentType = 'application/kml; charset=utf-8';
KmlFormat.prototype._fileExtension = 'kml';
// As of GDAL 1.10.1 SRID detection is bogus, so we use
// our own method. See:
// http://trac.osgeo.org/gdal/ticket/5131
@ -17,7 +17,7 @@ KmlFormat.prototype._fileExtension = "kml";
// Bug was fixed in GDAL 1.10.2
KmlFormat.prototype._needSRS = true;
KmlFormat.prototype.generate = function(options, callback) {
KmlFormat.prototype.generate = function (options, callback) {
this.toOGR_SingleFile(options, 'KML', callback);
};

View File

@ -6,13 +6,13 @@ var spawn = require('child_process').spawn;
var Ogr = require('./../ogr');
function ShpFormat() {
function ShpFormat () {
}
ShpFormat.prototype = new Ogr('shp');
ShpFormat.prototype._contentType = "application/zip; charset=utf-8";
ShpFormat.prototype._fileExtension = "zip";
ShpFormat.prototype._contentType = 'application/zip; charset=utf-8';
ShpFormat.prototype._fileExtension = 'zip';
// As of GDAL 1.10 SRID detection is bogus, so we use
// our own method. See:
// http://trac.osgeo.org/gdal/ticket/5131
@ -21,115 +21,113 @@ ShpFormat.prototype._fileExtension = "zip";
// http://github.com/CartoDB/CartoDB-SQL-API/issues/116
ShpFormat.prototype._needSRS = true;
ShpFormat.prototype.generate = function(options, callback) {
this.toSHP(options, callback);
ShpFormat.prototype.generate = function (options, callback) {
this.toSHP(options, callback);
};
ShpFormat.prototype.toSHP = function (options, callback) {
var dbname = options.database;
var user_id = options.user_id;
var gcol = options.gn;
var sql = options.sql;
var skipfields = options.skipfields;
var filename = options.filename;
var dbname = options.database;
var userId = options.user_id;
var gcol = options.gn;
var sql = options.sql;
var skipfields = options.skipfields;
var filename = options.filename;
var fmtObj = this;
var zip = global.settings.zipCommand || 'zip';
var zipOptions = '-qrj';
var tmpdir = global.settings.tmpDir || '/tmp';
var reqKey = [ 'shp', dbname, user_id, gcol, this.generateMD5(sql) ].concat(skipfields).join(':');
var outdirpath = tmpdir + '/sqlapi-' + process.pid + '-' + reqKey;
var zipfile = outdirpath + '.zip';
var shapefile = outdirpath + '/' + filename + '.shp';
var fmtObj = this;
var zip = global.settings.zipCommand || 'zip';
var zipOptions = '-qrj';
var tmpdir = global.settings.tmpDir || '/tmp';
var reqKey = ['shp', dbname, userId, gcol, this.generateMD5(sql)].concat(skipfields).join(':');
var outdirpath = tmpdir + '/sqlapi-' + process.pid + '-' + reqKey;
var zipfile = outdirpath + '.zip';
var shapefile = outdirpath + '/' + filename + '.shp';
// TODO: following tests:
// - fetch query with no "the_geom" column
// TODO: following tests:
// - fetch query with no "the_geom" column
step (
function createOutDir() {
fs.mkdir(outdirpath, 0o777, this);
},
function spawnDumper(err) {
if (err) {
throw err;
}
step(
function createOutDir () {
fs.mkdir(outdirpath, 0o777, this);
},
function spawnDumper (err) {
if (err) {
throw err;
}
fmtObj.toOGR(options, 'ESRI Shapefile', shapefile, this);
},
function doZip(err) {
if (err) {
throw err;
}
fmtObj.toOGR(options, 'ESRI Shapefile', shapefile, this);
},
function doZip (err) {
if (err) {
throw err;
}
var next = this;
var next = this;
var child = spawn(zip, [zipOptions, zipfile, outdirpath ]);
var child = spawn(zip, [zipOptions, zipfile, outdirpath]);
child.on('error', function (err) {
next(new Error('Error executing zip command, ' + err));
});
var stderrData = [];
child.stderr.setEncoding('utf8');
child.stderr.on('data', function (data) {
stderrData.push(data);
});
child.on('exit', function(code) {
if (code !== 0) {
var errMessage = 'Zip command return code ' + code;
if (stderrData.length) {
errMessage += ', Error: ' + stderrData.join('\n');
}
return next(new Error(errMessage));
}
return next();
});
},
function cleanupDir(topError) {
var next = this;
// Unlink the dir content
var unlinkall = function(dir, files, finish) {
var f = files.shift();
if ( ! f ) { finish(null); return; }
var fn = dir + '/' + f;
fs.unlink(fn, function(err) {
if ( err ) {
console.log("Unlinking " + fn + ": " + err);
finish(err);
} else {
unlinkall(dir, files, finish);
}
});
};
fs.readdir(outdirpath, function(err, files) {
if ( err ) {
if ( err.code !== 'ENOENT' ) {
next(new Error([topError, err].join('\n')));
} else {
next(topError);
}
} else {
unlinkall(outdirpath, files, function(/*err*/) {
fs.rmdir(outdirpath, function(err) {
if ( err ) {
console.log("Removing dir " + outdirpath + ": " + err);
}
next(topError, zipfile);
child.on('error', function (err) {
next(new Error('Error executing zip command, ' + err));
});
});
var stderrData = [];
child.stderr.setEncoding('utf8');
child.stderr.on('data', function (data) {
stderrData.push(data);
});
child.on('exit', function (code) {
if (code !== 0) {
var errMessage = 'Zip command return code ' + code;
if (stderrData.length) {
errMessage += ', Error: ' + stderrData.join('\n');
}
return next(new Error(errMessage));
}
return next();
});
},
function cleanupDir (topError) {
var next = this;
// Unlink the dir content
var unlinkall = function (dir, files, finish) {
var f = files.shift();
if (!f) { finish(null); return; }
var fn = dir + '/' + f;
fs.unlink(fn, function (err) {
if (err) {
console.log('Unlinking ' + fn + ': ' + err);
finish(err);
} else {
unlinkall(dir, files, finish);
}
});
};
fs.readdir(outdirpath, function (err, files) {
if (err) {
if (err.code !== 'ENOENT') {
next(new Error([topError, err].join('\n')));
} else {
next(topError);
}
} else {
unlinkall(outdirpath, files, function (/* err */) {
fs.rmdir(outdirpath, function (err) {
if (err) {
console.log('Removing dir ' + outdirpath + ': ' + err);
}
next(topError, zipfile);
});
});
}
});
},
function finalStep (err, zipfile) {
callback(err, zipfile);
}
});
},
function finalStep(err, zipfile) {
callback(err, zipfile);
}
);
);
};
module.exports = ShpFormat;

View File

@ -2,12 +2,12 @@
var Ogr = require('./../ogr');
function SpatiaLiteFormat() {}
function SpatiaLiteFormat () {}
SpatiaLiteFormat.prototype = new Ogr('spatialite');
SpatiaLiteFormat.prototype._contentType = "application/x-sqlite3; charset=utf-8";
SpatiaLiteFormat.prototype._fileExtension = "sqlite";
SpatiaLiteFormat.prototype._contentType = 'application/x-sqlite3; charset=utf-8';
SpatiaLiteFormat.prototype._fileExtension = 'sqlite';
// As of GDAL 1.10.1 SRID detection is bogus, so we use
// our own method. See:
// http://trac.osgeo.org/gdal/ticket/5131
@ -17,7 +17,7 @@ SpatiaLiteFormat.prototype._fileExtension = "sqlite";
// Bug was fixed in GDAL 1.10.2
SpatiaLiteFormat.prototype._needSRS = true;
SpatiaLiteFormat.prototype.generate = function(options, callback) {
SpatiaLiteFormat.prototype.generate = function (options, callback) {
this.toOGR_SingleFile(options, 'SQLite', callback);
options.cmd_params = ['SPATIALITE=yes'];
};

View File

@ -3,156 +3,156 @@
var step = require('step');
var PSQL = require('cartodb-psql');
function PostgresFormat(id) {
function PostgresFormat (id) {
this.id = id;
}
PostgresFormat.prototype = {
getQuery: function(sql/*, options*/) {
return sql;
},
getQuery: function (sql/*, options */) {
return sql;
},
getContentType: function(){
return this._contentType;
},
getContentType: function () {
return this._contentType;
},
getFileExtension: function() {
return this.id;
}
getFileExtension: function () {
return this.id;
}
};
PostgresFormat.prototype.handleQueryRow = function(row, result) {
PostgresFormat.prototype.handleQueryRow = function (row, result) {
result.addRow(row);
};
PostgresFormat.prototype.handleQueryRowWithSkipFields = function(row, result) {
PostgresFormat.prototype.handleQueryRowWithSkipFields = function (row, result) {
var sf = this.opts.skipfields;
for ( var j=0; j<sf.length; ++j ) {
for (var j = 0; j < sf.length; ++j) {
delete row[sf[j]];
}
this.handleQueryRow(row, result);
};
PostgresFormat.prototype.handleNotice = function(msg, result) {
if ( ! result.notices ) {
PostgresFormat.prototype.handleNotice = function (msg, result) {
if (!result.notices) {
result.notices = [];
}
for (var i=0; i<msg.length; i++) {
for (var i = 0; i < msg.length; i++) {
result.notices.push(msg[i]);
}
};
PostgresFormat.prototype.handleQueryEnd = function(result) {
this.queryCanceller = undefined;
PostgresFormat.prototype.handleQueryEnd = function (result) {
this.queryCanceller = undefined;
if ( this.error ) {
this.callback(this.error);
return;
}
if ( this.opts.profiler ) {
this.opts.profiler.done('gotRows');
}
this.opts.total_time = (Date.now() - this.start_time)/1000;
// Drop field description for skipped fields
if (this.hasSkipFields) {
var sf = this.opts.skipfields;
var newfields = [];
for ( var j=0; j<result.fields.length; ++j ) {
var f = result.fields[j];
if ( sf.indexOf(f.name) === -1 ) {
newfields.push(f);
}
}
result.fields = newfields;
}
var that = this;
step (
function packageResult() {
that.transform(result, that.opts, this);
},
function sendResults(err, out){
if (err) {
throw err;
}
// return to browser
if ( out ) {
if ( that.opts.beforeSink ) {
that.opts.beforeSink();
}
that.opts.sink.send(out);
} else {
console.error("No output from transform, doing nothing ?!");
}
},
function errorHandle(err){
that.callback(err);
}
);
};
PostgresFormat.prototype.sendResponse = function(opts, callback) {
if ( this.callback ) {
callback(new Error("Invalid double call to .sendResponse on a pg formatter"));
return;
}
this.callback = callback;
this.opts = opts;
this.hasSkipFields = opts.skipfields.length;
var sql = this.getQuery(opts.sql, {
gn: opts.gn,
dp: opts.dp,
skipfields: opts.skipfields
});
var that = this;
this.start_time = Date.now();
this.client = new PSQL(opts.dbopts);
this.client.eventedQuery(sql, function(err, query, queryCanceller) {
that.queryCanceller = queryCanceller;
if (err) {
callback(err);
if (this.error) {
this.callback(this.error);
return;
}
if ( that.opts.profiler ) {
that.opts.profiler.done('eventedQuery');
}
}
if (that.hasSkipFields) {
query.on('row', that.handleQueryRowWithSkipFields.bind(that));
} else {
query.on('row', that.handleQueryRow.bind(that));
}
query.on('end', that.handleQueryEnd.bind(that));
query.on('error', function(err) {
that.error = err;
if (err.message && err.message.match(/row too large, was \d* bytes/i)) {
return console.error(JSON.stringify({
username: opts.username,
type: 'row_size_limit_exceeded',
error: err.message
}));
}
that.handleQueryEnd();
});
query.on('notice', function(msg) {
that.handleNotice(msg, query._result);
});
});
if (this.opts.profiler) {
this.opts.profiler.done('gotRows');
}
this.opts.total_time = (Date.now() - this.start_time) / 1000;
// Drop field description for skipped fields
if (this.hasSkipFields) {
var sf = this.opts.skipfields;
var newfields = [];
for (var j = 0; j < result.fields.length; ++j) {
var f = result.fields[j];
if (sf.indexOf(f.name) === -1) {
newfields.push(f);
}
}
result.fields = newfields;
}
var that = this;
step(
function packageResult () {
that.transform(result, that.opts, this);
},
function sendResults (err, out) {
if (err) {
throw err;
}
// return to browser
if (out) {
if (that.opts.beforeSink) {
that.opts.beforeSink();
}
that.opts.sink.send(out);
} else {
console.error('No output from transform, doing nothing ?!');
}
},
function errorHandle (err) {
that.callback(err);
}
);
};
PostgresFormat.prototype.cancel = function() {
PostgresFormat.prototype.sendResponse = function (opts, callback) {
if (this.callback) {
callback(new Error('Invalid double call to .sendResponse on a pg formatter'));
return;
}
this.callback = callback;
this.opts = opts;
this.hasSkipFields = opts.skipfields.length;
var sql = this.getQuery(opts.sql, {
gn: opts.gn,
dp: opts.dp,
skipfields: opts.skipfields
});
var that = this;
this.start_time = Date.now();
this.client = new PSQL(opts.dbopts);
this.client.eventedQuery(sql, function (err, query, queryCanceller) {
that.queryCanceller = queryCanceller;
if (err) {
callback(err);
return;
}
if (that.opts.profiler) {
that.opts.profiler.done('eventedQuery');
}
if (that.hasSkipFields) {
query.on('row', that.handleQueryRowWithSkipFields.bind(that));
} else {
query.on('row', that.handleQueryRow.bind(that));
}
query.on('end', that.handleQueryEnd.bind(that));
query.on('error', function (err) {
that.error = err;
if (err.message && err.message.match(/row too large, was \d* bytes/i)) {
return console.error(JSON.stringify({
username: opts.username,
type: 'row_size_limit_exceeded',
error: err.message
}));
}
that.handleQueryEnd();
});
query.on('notice', function (msg) {
that.handleNotice(msg, query._result);
});
});
};
PostgresFormat.prototype.cancel = function () {
if (this.queryCanceller) {
this.queryCanceller.call();
}

View File

@ -2,88 +2,86 @@
var _ = require('underscore');
var Pg = require('./../pg');
var ArrayBufferSer = require("../../bin-encoder");
var Pg = require('./../pg');
var ArrayBufferSer = require('../../bin-encoder');
function BinaryFormat() {}
function BinaryFormat () {}
BinaryFormat.prototype = new Pg('arraybuffer');
BinaryFormat.prototype._contentType = "application/octet-stream";
BinaryFormat.prototype._contentType = 'application/octet-stream';
BinaryFormat.prototype._extractTypeFromName = function(name) {
var g = name.match(/.*__(uintclamp|uint|int|float)(8|16|32)/i);
if(g && g.length === 3) {
var typeName = g[1] + g[2];
return ArrayBufferSer.typeNames[typeName];
}
BinaryFormat.prototype._extractTypeFromName = function (name) {
var g = name.match(/.*__(uintclamp|uint|int|float)(8|16|32)/i);
if (g && g.length === 3) {
var typeName = g[1] + g[2];
return ArrayBufferSer.typeNames[typeName];
}
};
// jshint maxcomplexity:12
BinaryFormat.prototype.transform = function(result, options, callback) {
var total_rows = result.rowCount;
var rows = result.rows;
BinaryFormat.prototype.transform = function (result, options, callback) {
var totalRows = result.rowCount;
var rows = result.rows;
// get headers
if(!total_rows) {
callback(null, new Buffer(0));
return;
}
var headersNames = Object.keys(rows[0]);
var headerTypes = [];
if(_.contains(headersNames, 'the_geom')) {
callback(new Error("geometry types are not supported"), null);
return;
}
try {
var i;
var r;
var n;
var t;
// get header types (and guess from name)
for(i = 0; i < headersNames.length; ++i) {
r = rows[0];
n = headersNames[i];
if(typeof(r[n]) === 'string') {
headerTypes.push(ArrayBufferSer.STRING);
} else if(typeof(r[n]) === 'object') {
t = this._extractTypeFromName(n);
t = t || ArrayBufferSer.FLOAT32;
headerTypes.push(ArrayBufferSer.BUFFER + t);
} else {
t = this._extractTypeFromName(n);
headerTypes.push(t || ArrayBufferSer.FLOAT32);
}
// get headers
if (!totalRows) {
callback(null, Buffer.alloc(0));
return;
}
// pack the data
var header = new ArrayBufferSer(ArrayBufferSer.STRING, headersNames);
var data = [header];
for(i = 0; i < headersNames.length; ++i) {
var d = [];
n = headersNames[i];
for(r = 0; r < total_rows; ++r) {
var row = rows[r][n];
if(headerTypes[i] > ArrayBufferSer.BUFFER) {
row = new ArrayBufferSer(headerTypes[i] - ArrayBufferSer.BUFFER, row);
var headersNames = Object.keys(rows[0]);
var headerTypes = [];
if (_.contains(headersNames, 'the_geom')) {
callback(new Error('geometry types are not supported'), null);
return;
}
try {
var i;
var r;
var n;
var t;
// get header types (and guess from name)
for (i = 0; i < headersNames.length; ++i) {
r = rows[0];
n = headersNames[i];
if (typeof (r[n]) === 'string') {
headerTypes.push(ArrayBufferSer.STRING);
} else if (typeof (r[n]) === 'object') {
t = this._extractTypeFromName(n);
t = t || ArrayBufferSer.FLOAT32;
headerTypes.push(ArrayBufferSer.BUFFER + t);
} else {
t = this._extractTypeFromName(n);
headerTypes.push(t || ArrayBufferSer.FLOAT32);
}
}
d.push(row);
}
var b = new ArrayBufferSer(headerTypes[i], d);
data.push(b);
// pack the data
var header = new ArrayBufferSer(ArrayBufferSer.STRING, headersNames);
var data = [header];
for (i = 0; i < headersNames.length; ++i) {
var d = [];
n = headersNames[i];
for (r = 0; r < totalRows; ++r) {
var row = rows[r][n];
if (headerTypes[i] > ArrayBufferSer.BUFFER) {
row = new ArrayBufferSer(headerTypes[i] - ArrayBufferSer.BUFFER, row);
}
d.push(row);
}
var b = new ArrayBufferSer(headerTypes[i], d);
data.push(b);
}
// create the final buffer
var all = new ArrayBufferSer(ArrayBufferSer.BUFFER, data);
callback(null, all.buffer);
} catch (e) {
callback(e, null);
}
// create the final buffer
var all = new ArrayBufferSer(ArrayBufferSer.BUFFER, data);
callback(null, all.buffer);
} catch(e) {
callback(e, null);
}
};
module.exports = BinaryFormat;

View File

@ -5,21 +5,21 @@ var _ = require('underscore');
var Pg = require('./../pg');
const errorHandlerFactory = require('../../../services/error-handler-factory');
function GeoJsonFormat() {
function GeoJsonFormat () {
this.buffer = '';
}
GeoJsonFormat.prototype = new Pg('geojson');
GeoJsonFormat.prototype._contentType = "application/json; charset=utf-8";
GeoJsonFormat.prototype._contentType = 'application/json; charset=utf-8';
GeoJsonFormat.prototype.getQuery = function(sql, options) {
GeoJsonFormat.prototype.getQuery = function (sql, options) {
var gn = options.gn;
var dp = options.dp;
return 'SELECT *, ST_AsGeoJSON(' + gn + ',' + dp + ') as the_geom FROM (' + sql + ') as foo';
};
GeoJsonFormat.prototype.startStreaming = function() {
GeoJsonFormat.prototype.startStreaming = function () {
this.total_rows = 0;
if (this.opts.beforeSink) {
this.opts.beforeSink();
@ -31,9 +31,8 @@ GeoJsonFormat.prototype.startStreaming = function() {
this._streamingStarted = true;
};
GeoJsonFormat.prototype.handleQueryRow = function(row) {
if ( ! this._streamingStarted ) {
GeoJsonFormat.prototype.handleQueryRow = function (row) {
if (!this._streamingStarted) {
this.startStreaming();
}
@ -56,17 +55,17 @@ GeoJsonFormat.prototype.handleQueryRow = function(row) {
}
};
GeoJsonFormat.prototype.handleQueryEnd = function(/*result*/) {
GeoJsonFormat.prototype.handleQueryEnd = function (/* result */) {
if (this.error && !this._streamingStarted) {
this.callback(this.error);
return;
}
if ( this.opts.profiler ) {
if (this.opts.profiler) {
this.opts.profiler.done('gotRows');
}
if ( ! this._streamingStarted ) {
if (!this._streamingStarted) {
this.startStreaming();
}
@ -89,31 +88,31 @@ GeoJsonFormat.prototype.handleQueryEnd = function(/*result*/) {
this.callback();
};
function _toGeoJSON(data, gn, callback){
try {
var out = {
type: "FeatureCollection",
features: []
};
function _toGeoJSON (data, gn, callback) {
try {
var out = {
type: 'FeatureCollection',
features: []
};
_.each(data.rows, function(ele){
var _geojson = {
type: "Feature",
properties: { },
geometry: { }
};
_geojson.geometry = JSON.parse(ele[gn]);
delete ele[gn];
delete ele.the_geom_webmercator; // TODO: use skipfields
_geojson.properties = ele;
out.features.push(_geojson);
});
_.each(data.rows, function (ele) {
var _geojson = {
type: 'Feature',
properties: { },
geometry: { }
};
_geojson.geometry = JSON.parse(ele[gn]);
delete ele[gn];
delete ele.the_geom_webmercator; // TODO: use skipfields
_geojson.properties = ele;
out.features.push(_geojson);
});
// return payload
callback(null, out);
} catch (err) {
callback(err,null);
}
// return payload
callback(null, out);
} catch (err) {
callback(err, null);
}
}
module.exports = GeoJsonFormat;

View File

@ -2,62 +2,56 @@
var _ = require('underscore');
var Pg = require('./../pg');
var Pg = require('./../pg');
const errorHandlerFactory = require('../../../services/error-handler-factory');
function JsonFormat() {
function JsonFormat () {
this.buffer = '';
this.lastKnownResult = {};
}
JsonFormat.prototype = new Pg('json');
JsonFormat.prototype._contentType = "application/json; charset=utf-8";
JsonFormat.prototype._contentType = 'application/json; charset=utf-8';
// jshint maxcomplexity:10
JsonFormat.prototype.formatResultFields = function(flds) {
flds = flds || [];
var nfields = {};
for (var i=0; i<flds.length; ++i) {
var f = flds[i];
var cname = this.client.typeName(f.dataTypeID);
var tname;
JsonFormat.prototype.formatResultFields = function (flds) {
flds = flds || [];
var nfields = {};
for (var i = 0; i < flds.length; ++i) {
var f = flds[i];
var cname = this.client.typeName(f.dataTypeID);
var tname;
if ( ! cname ) {
tname = 'unknown(' + f.dataTypeID + ')';
} else {
if ( cname.match('bool') ) {
tname = 'boolean';
}
else if ( cname.match(/int|float|numeric/) ) {
tname = 'number';
}
else if ( cname.match(/text|char|unknown/) ) {
tname = 'string';
}
else if ( cname.match(/date|time/) ) {
tname = 'date';
}
else {
tname = cname;
}
if ( tname && cname.match(/^_/) ) {
tname += '[]';
}
if (!cname) {
tname = 'unknown(' + f.dataTypeID + ')';
} else {
if (cname.match('bool')) {
tname = 'boolean';
} else if (cname.match(/int|float|numeric/)) {
tname = 'number';
} else if (cname.match(/text|char|unknown/)) {
tname = 'string';
} else if (cname.match(/date|time/)) {
tname = 'date';
} else {
tname = cname;
}
if (tname && cname.match(/^_/)) {
tname += '[]';
}
}
if (['geography', 'geometry', 'raster'].includes(cname)) {
const { wkbtype, ndims, srid } = this.client.typeModInfo(f.dataTypeModifier);
nfields[f.name] = { type: tname, wkbtype, dims: ndims, srid };
} else {
nfields[f.name] = { type: tname, pgtype: cname };
}
}
if (['geography', 'geometry', 'raster'].includes(cname)) {
let { wkbtype, ndims, srid } = this.client.typeModInfo(f.dataTypeModifier);
nfields[f.name] = { type: tname, wkbtype, dims: ndims, srid };
} else {
nfields[f.name] = { type: tname, pgtype: cname };
}
}
return nfields;
return nfields;
};
JsonFormat.prototype.startStreaming = function() {
JsonFormat.prototype.startStreaming = function () {
this.total_rows = 0;
if (this.opts.beforeSink) {
this.opts.beforeSink();
@ -69,15 +63,15 @@ JsonFormat.prototype.startStreaming = function() {
this._streamingStarted = true;
};
JsonFormat.prototype.handleQueryRow = function(row, result) {
if ( ! this._streamingStarted ) {
JsonFormat.prototype.handleQueryRow = function (row, result) {
if (!this._streamingStarted) {
this.startStreaming();
}
this.lastKnownResult = result;
this.buffer += (this.total_rows++ ? ',' : '') + JSON.stringify(row, function (key, value) {
if (value !== value) {
if (value !== value) { // eslint-disable-line no-self-compare
return 'NaN';
}
@ -98,22 +92,21 @@ JsonFormat.prototype.handleQueryRow = function(row, result) {
}
};
// jshint maxcomplexity:13
JsonFormat.prototype.handleQueryEnd = function(result) {
JsonFormat.prototype.handleQueryEnd = function (result) {
if (this.error && !this._streamingStarted) {
this.callback(this.error);
return;
}
if ( this.opts.profiler ) {
if (this.opts.profiler) {
this.opts.profiler.done('gotRows');
}
if ( ! this._streamingStarted ) {
if (!this._streamingStarted) {
this.startStreaming();
}
this.opts.total_time = (Date.now() - this.start_time)/1000;
this.opts.total_time = (Date.now() - this.start_time) / 1000;
result = result || this.lastKnownResult || {};
@ -123,18 +116,18 @@ JsonFormat.prototype.handleQueryEnd = function(result) {
var sf = this.opts.skipfields;
for (var i = 0; i < result.fields.length; i++) {
var f = result.fields[i];
if ( sf.indexOf(f.name) === -1 ) {
if (sf.indexOf(f.name) === -1) {
newfields.push(f);
}
}
result.fields = newfields;
}
var total_time = (Date.now() - this.start_time)/1000;
var totalTime = (Date.now() - this.start_time) / 1000;
var out = [
'],', // end of "rows" array
'"time":', JSON.stringify(total_time),
'"time":', JSON.stringify(totalTime),
',"fields":', JSON.stringify(this.formatResultFields(result.fields)),
',"total_rows":', JSON.stringify(result.rowCount || this.total_rows)
];
@ -143,11 +136,10 @@ JsonFormat.prototype.handleQueryEnd = function(result) {
out.push(',"error":', JSON.stringify(errorHandlerFactory(this.error).getResponse().error));
}
if ( result.notices && result.notices.length > 0 ) {
var notices = {},
severities = [];
_.each(result.notices, function(notice) {
if (result.notices && result.notices.length > 0) {
var notices = {};
var severities = [];
_.each(result.notices, function (notice) {
var severity = notice.severity.toLowerCase() + 's';
if (!notices[severity]) {
severities.push(severity);
@ -155,7 +147,7 @@ JsonFormat.prototype.handleQueryEnd = function(result) {
}
notices[severity].push(notice.message);
});
_.each(severities, function(severity) {
_.each(severities, function (severity) {
out.push(',');
out.push(JSON.stringify(severity));
out.push(':');
@ -165,7 +157,6 @@ JsonFormat.prototype.handleQueryEnd = function(result) {
out.push('}');
this.buffer += out.join('');
if (this.opts.callback) {

View File

@ -1,21 +1,21 @@
'use strict';
var Pg = require('./../pg');
var Pg = require('./../pg');
var svg_width = 1024.0;
var svg_height = 768.0;
var svg_ratio = svg_width/svg_height;
var svgWidth = 1024.0;
var svgHeight = 768.0;
var svgRatio = svgWidth / svgHeight;
var radius = 5; // in pixels (based on svg_width and svg_height)
var radius = 5; // in pixels (based on svgWidth and svgHeight)
var stroke_width = 1; // in pixels (based on svg_width and svg_height)
var stroke_color = 'black';
var strokeWidth = 1; // in pixels (based on svgWidth and svgHeight)
var strokeColor = 'black';
// fill settings affect polygons and points (circles)
var fill_opacity = 0.5; // 0.0 is fully transparent, 1.0 is fully opaque
// unused if fill_color='none'
var fill_color = 'none'; // affects polygons and circles
var fillOpacity = 0.5; // 0.0 is fully transparent, 1.0 is fully opaque
// unused if fillColor='none'
var fillColor = 'none'; // affects polygons and circles
function SvgFormat() {
function SvgFormat () {
this.totalRows = 0;
this.bbox = null; // will be computed during the results scan
@ -25,34 +25,34 @@ function SvgFormat() {
}
SvgFormat.prototype = new Pg('svg');
SvgFormat.prototype._contentType = "image/svg+xml; charset=utf-8";
SvgFormat.prototype._contentType = 'image/svg+xml; charset=utf-8';
SvgFormat.prototype.getQuery = function(sql, options) {
var gn = options.gn;
var dp = options.dp;
return 'WITH source AS ( ' + sql + '), extent AS ( ' +
SvgFormat.prototype.getQuery = function (sql, options) {
var gn = options.gn;
var dp = options.dp;
return 'WITH source AS ( ' + sql + '), extent AS ( ' +
' SELECT ST_Extent(' + gn + ') AS e FROM source ' +
'), extent_info AS ( SELECT e, ' +
'st_xmin(e) as ex0, st_ymax(e) as ey0, ' +
'st_xmax(e)-st_xmin(e) as ew, ' +
'st_ymax(e)-st_ymin(e) as eh FROM extent )' +
', trans AS ( SELECT CASE WHEN ' +
'eh = 0 THEN ' + svg_width +
'/ COALESCE(NULLIF(ew,0),' + svg_width +') WHEN ' +
svg_ratio + ' <= (ew / eh) THEN (' +
svg_width + '/ew ) ELSE (' +
svg_height + '/eh ) END as s ' +
'eh = 0 THEN ' + svgWidth +
'/ COALESCE(NULLIF(ew,0),' + svgWidth + ') WHEN ' +
svgRatio + ' <= (ew / eh) THEN (' +
svgWidth + '/ew ) ELSE (' +
svgHeight + '/eh ) END as s ' +
', ex0 as x0, ey0 as y0 FROM extent_info ) ' +
'SELECT st_TransScale(e, -x0, -y0, s, s)::box2d as ' +
gn + '_box, ST_Dimension(' + gn + ') as ' + gn +
'_dimension, ST_AsSVG(ST_TransScale(' + gn + ', ' +
'-x0, -y0, s, s), 0, ' + dp + ') as ' + gn +
//+ ', ex0, ey0, ew, eh, s ' // DEBUG ONLY +
// + ', ex0, ey0, ew, eh, s ' // DEBUG ONLY +
' FROM trans, extent_info, source' +
' ORDER BY the_geom_dimension ASC';
};
SvgFormat.prototype.startStreaming = function() {
SvgFormat.prototype.startStreaming = function () {
if (this.opts.beforeSink) {
this.opts.beforeSink();
}
@ -63,10 +63,10 @@ SvgFormat.prototype.startStreaming = function() {
];
var rootTag = '<svg ';
if ( this.bbox ) {
if (this.bbox) {
// expand box by "radius" + "stroke-width"
// TODO: use a Box2d class for these ops
var growby = radius + stroke_width;
var growby = radius + strokeWidth;
this.bbox.xmin -= growby;
this.bbox.ymin -= growby;
this.bbox.xmax += growby;
@ -76,8 +76,8 @@ SvgFormat.prototype.startStreaming = function() {
rootTag += 'viewBox="' + this.bbox.xmin + ' ' + (-this.bbox.ymax) + ' ' +
this.bbox.width + ' ' + this.bbox.height + '" ';
}
rootTag += 'style="fill-opacity:' + fill_opacity + '; stroke:' + stroke_color + '; ' +
'stroke-width:' + stroke_width + '; fill:' + fill_color + '" ';
rootTag += 'style="fill-opacity:' + fillOpacity + '; stroke:' + strokeColor + '; ' +
'stroke-width:' + strokeWidth + '; fill:' + fillColor + '" ';
rootTag += 'xmlns="http://www.w3.org/2000/svg" version="1.1">\n';
header.push(rootTag);
@ -87,34 +87,31 @@ SvgFormat.prototype.startStreaming = function() {
this._streamingStarted = true;
};
// jshint maxcomplexity:11
SvgFormat.prototype.handleQueryRow = function(row) {
SvgFormat.prototype.handleQueryRow = function (row) {
this.totalRows++;
if ( ! row.hasOwnProperty(this.opts.gn) ) {
if (!Object.prototype.hasOwnProperty.call(row, this.opts.gn)) {
this.error = new Error('column "' + this.opts.gn + '" does not exist');
}
var g = row[this.opts.gn];
if ( ! g ) {
if (!g) {
return;
} // null or empty
// jshint ignore:start
var gdims = row[this.opts.gn + '_dimension'];
// TODO: add an identifier, if any of "cartodb_id", "oid", "id", "gid" are found
// TODO: add "class" attribute to help with styling ?
if ( gdims == '0' ) {
if (gdims === 0) {
this.buffer += '<circle r="' + radius + '" ' + g + ' />\n';
} else if ( gdims == '1' ) {
} else if (gdims === 1) {
// Avoid filling closed linestrings
this.buffer += '<path ' + ( fill_color !== 'none' ? 'fill="none" ' : '' ) + 'd="' + g + '" />\n';
} else if ( gdims == '2' ) {
this.buffer += '<path ' + (fillColor !== 'none' ? 'fill="none" ' : '') + 'd="' + g + '" />\n';
} else if (gdims === 2) {
this.buffer += '<path d="' + g + '" />\n';
}
// jshint ignore:end
if ( ! this.bbox ) {
if (!this.bbox) {
// Parse layer extent: "BOX(x y, X Y)"
// NOTE: the name of the extent field is
// determined by the same code adding the
@ -140,13 +137,13 @@ SvgFormat.prototype.handleQueryRow = function(row) {
}
};
SvgFormat.prototype.handleQueryEnd = function() {
if ( this.error && !this._streamingStarted) {
SvgFormat.prototype.handleQueryEnd = function () {
if (this.error && !this._streamingStarted) {
this.callback(this.error);
return;
}
if ( this.opts.profiler ) {
if (this.opts.profiler) {
this.opts.profiler.done('gotRows');
}

View File

@ -1,25 +1,23 @@
'use strict';
// jshint ignore:start
var Pg = require('./../pg');
var Pg = require('./../pg');
var _ = require('underscore');
var geojson = require('./geojson');
var TopoJSON = require('topojson');
function TopoJsonFormat() {
function TopoJsonFormat () {
this.features = [];
}
TopoJsonFormat.prototype = new Pg('topojson');
TopoJsonFormat.prototype.getQuery = function(sql, options) {
return geojson.prototype.getQuery(sql, options) + ' where ' + options.gn + ' is not null';
TopoJsonFormat.prototype.getQuery = function (sql, options) {
return geojson.prototype.getQuery(sql, options) + ' where ' + options.gn + ' is not null';
};
TopoJsonFormat.prototype.handleQueryRow = function(row) {
TopoJsonFormat.prototype.handleQueryRow = function (row) {
var _geojson = {
type: "Feature"
type: 'Feature'
};
_geojson.geometry = JSON.parse(row[this.opts.gn]);
delete row[this.opts.gn];
@ -28,20 +26,20 @@ TopoJsonFormat.prototype.handleQueryRow = function(row) {
this.features.push(_geojson);
};
TopoJsonFormat.prototype.handleQueryEnd = function() {
TopoJsonFormat.prototype.handleQueryEnd = function () {
if (this.error) {
this.callback(this.error);
return;
}
if ( this.opts.profiler ) {
if (this.opts.profiler) {
this.opts.profiler.done('gotRows');
}
var topology = TopoJSON.topology(this.features, {
"quantization": 1e4,
"force-clockwise": true,
"property-filter": function(d) {
quantization: 1e4,
'force-clockwise': true,
'property-filter': function (d) {
return d;
}
});
@ -55,19 +53,20 @@ TopoJsonFormat.prototype.handleQueryEnd = function() {
var immediately = global.setImmediate || process.nextTick;
function streamObjectSubtree(obj, key, done) {
/* eslint-disable */
function streamObjectSubtree (obj, key, done) {
buffer += '"' + key + '":';
var isObject = _.isObject(obj[key]),
isArray = _.isArray(obj[key]),
isIterable = isArray || isObject;
var isObject = _.isObject(obj[key]);
var isArray = _.isArray(obj[key]);
var isIterable = isArray || isObject;
if (isIterable) {
buffer += isArray ? '[' : '{';
var subtreeKeys = Object.keys(obj[key]);
var pos = 0;
function streamNext() {
immediately(function() {
function streamNext () {
immediately(function () {
var subtreeKey = subtreeKeys.shift();
if (!isArray) {
buffer += '"' + subtreeKey + '":';
@ -97,16 +96,17 @@ TopoJsonFormat.prototype.handleQueryEnd = function() {
done();
}
}
/* eslint-enable */
if (jsonpCallback) {
buffer += jsonpCallback + '(';
}
buffer += '{';
var keys = Object.keys(topology);
function sendResponse() {
function sendResponse () {
immediately(function () {
var key = keys.shift();
function done() {
function done () {
if (keys.length > 0) {
delete topology[key];
buffer += ',';
@ -129,12 +129,10 @@ TopoJsonFormat.prototype.handleQueryEnd = function() {
this.callback();
};
TopoJsonFormat.prototype.cancel = function() {
TopoJsonFormat.prototype.cancel = function () {
if (this.queryCanceller) {
this.queryCanceller.call();
}
};
module.exports = TopoJsonFormat;
// jshint ignore:end

View File

@ -1,34 +1,34 @@
'use strict';
var step = require('step'),
fs = require('fs');
var step = require('step');
var fs = require('fs');
function HealthCheck(disableFile) {
function HealthCheck (disableFile) {
this.disableFile = disableFile;
}
module.exports = HealthCheck;
HealthCheck.prototype.check = function(callback) {
HealthCheck.prototype.check = function (callback) {
var self = this;
step(
function getManualDisable() {
fs.readFile(self.disableFile, this);
function getManualDisable () {
fs.readFile(self.disableFile, this);
},
function handleDisabledFile(err, data) {
var next = this;
if (err) {
return next();
}
if (!!data) {
err = new Error(data);
err.http_status = 503;
throw err;
}
function handleDisabledFile (err, data) {
var next = this;
if (err) {
return next();
}
if (data) {
err = new Error(data);
err.http_status = 503;
throw err;
}
},
function handleResult(err) {
callback(err);
function handleResult (err) {
callback(err);
}
);
};

View File

@ -6,283 +6,283 @@ var _ = require('underscore');
// Used `^([A-Z0-9]*)\s(.*)` -> `"$1": "$2"` to create the JS object
var codeToCondition = {
// Class 00 — Successful Completion
"00000": "successful_completion",
// Class 01 — Warning
"01000": "warning",
"0100C": "dynamic_result_sets_returned",
"01008": "implicit_zero_bit_padding",
"01003": "null_value_eliminated_in_set_function",
"01007": "privilege_not_granted",
"01006": "privilege_not_revoked",
"01004": "string_data_right_truncation",
"01P01": "deprecated_feature",
// Class 02 — No Data (this is also a warning class per the SQL standard)
"02000": "no_data",
"02001": "no_additional_dynamic_result_sets_returned",
// Class 03 — SQL Statement Not Yet Complete
"03000": "sql_statement_not_yet_complete",
// Class 08 — Connection Exception
"08000": "connection_exception",
"08003": "connection_does_not_exist",
"08006": "connection_failure",
"08001": "sqlclient_unable_to_establish_sqlconnection",
"08004": "sqlserver_rejected_establishment_of_sqlconnection",
"08007": "transaction_resolution_unknown",
"08P01": "protocol_violation",
// Class 09 — Triggered Action Exception
"09000": "triggered_action_exception",
// Class 0A — Feature Not Supported
"0A000": "feature_not_supported",
// Class 0B — Invalid Transaction Initiation
"0B000": "invalid_transaction_initiation",
// Class 0F — Locator Exception
"0F000": "locator_exception",
"0F001": "invalid_locator_specification",
// Class 0L — Invalid Grantor
"0L000": "invalid_grantor",
"0LP01": "invalid_grant_operation",
// Class 0P — Invalid Role Specification
"0P000": "invalid_role_specification",
// Class 0Z — Diagnostics Exception
"0Z000": "diagnostics_exception",
"0Z002": "stacked_diagnostics_accessed_without_active_handler",
// Class 20 — Case Not Found
"20000": "case_not_found",
// Class 21 — Cardinality Violation
"21000": "cardinality_violation",
// Class 22 — Data Exception
"22000": "data_exception",
"2202E": "array_subscript_error",
"22021": "character_not_in_repertoire",
"22008": "datetime_field_overflow",
"22012": "division_by_zero",
"22005": "error_in_assignment",
"2200B": "escape_character_conflict",
"22022": "indicator_overflow",
"22015": "interval_field_overflow",
"2201E": "invalid_argument_for_logarithm",
"22014": "invalid_argument_for_ntile_function",
"22016": "invalid_argument_for_nth_value_function",
"2201F": "invalid_argument_for_power_function",
"2201G": "invalid_argument_for_width_bucket_function",
"22018": "invalid_character_value_for_cast",
"22007": "invalid_datetime_format",
"22019": "invalid_escape_character",
"2200D": "invalid_escape_octet",
"22025": "invalid_escape_sequence",
"22P06": "nonstandard_use_of_escape_character",
"22010": "invalid_indicator_parameter_value",
"22023": "invalid_parameter_value",
"2201B": "invalid_regular_expression",
"2201W": "invalid_row_count_in_limit_clause",
"2201X": "invalid_row_count_in_result_offset_clause",
"22009": "invalid_time_zone_displacement_value",
"2200C": "invalid_use_of_escape_character",
"2200G": "most_specific_type_mismatch",
"22004": "null_value_not_allowed",
"22002": "null_value_no_indicator_parameter",
"22003": "numeric_value_out_of_range",
"22026": "string_data_length_mismatch",
"22001": "string_data_right_truncation",
"22011": "substring_error",
"22027": "trim_error",
"22024": "unterminated_c_string",
"2200F": "zero_length_character_string",
"22P01": "floating_point_exception",
"22P02": "invalid_text_representation",
"22P03": "invalid_binary_representation",
"22P04": "bad_copy_file_format",
"22P05": "untranslatable_character",
"2200L": "not_an_xml_document",
"2200M": "invalid_xml_document",
"2200N": "invalid_xml_content",
"2200S": "invalid_xml_comment",
"2200T": "invalid_xml_processing_instruction",
// Class 23 — Integrity Constraint Violation
"23000": "integrity_constraint_violation",
"23001": "restrict_violation",
"23502": "not_null_violation",
"23503": "foreign_key_violation",
"23505": "unique_violation",
"23514": "check_violation",
"23P01": "exclusion_violation",
// Class 24 — Invalid Cursor State
"24000": "invalid_cursor_state",
// Class 25 — Invalid Transaction State
"25000": "invalid_transaction_state",
"25001": "active_sql_transaction",
"25002": "branch_transaction_already_active",
"25008": "held_cursor_requires_same_isolation_level",
"25003": "inappropriate_access_mode_for_branch_transaction",
"25004": "inappropriate_isolation_level_for_branch_transaction",
"25005": "no_active_sql_transaction_for_branch_transaction",
"25006": "read_only_sql_transaction",
"25007": "schema_and_data_statement_mixing_not_supported",
"25P01": "no_active_sql_transaction",
"25P02": "in_failed_sql_transaction",
"25P03": "idle_in_transaction_session_timeout",
// Class 26 — Invalid SQL Statement Name
"26000": "invalid_sql_statement_name",
// Class 27 — Triggered Data Change Violation
"27000": "triggered_data_change_violation",
// Class 28 — Invalid Authorization Specification
"28000": "invalid_authorization_specification",
"28P01": "invalid_password",
// Class 2B — Dependent Privilege Descriptors Still Exist
"2B000": "dependent_privilege_descriptors_still_exist",
"2BP01": "dependent_objects_still_exist",
// Class 2D — Invalid Transaction Termination
"2D000": "invalid_transaction_termination",
// Class 2F — SQL Routine Exception
"2F000": "sql_routine_exception",
"2F005": "function_executed_no_return_statement",
"2F002": "modifying_sql_data_not_permitted",
"2F003": "prohibited_sql_statement_attempted",
"2F004": "reading_sql_data_not_permitted",
// Class 34 — Invalid Cursor Name
"34000": "invalid_cursor_name",
// Class 38 — External Routine Exception
"38000": "external_routine_exception",
"38001": "containing_sql_not_permitted",
"38002": "modifying_sql_data_not_permitted",
"38003": "prohibited_sql_statement_attempted",
"38004": "reading_sql_data_not_permitted",
// Class 39 — External Routine Invocation Exception
"39000": "external_routine_invocation_exception",
"39001": "invalid_sqlstate_returned",
"39004": "null_value_not_allowed",
"39P01": "trigger_protocol_violated",
"39P02": "srf_protocol_violated",
"39P03": "event_trigger_protocol_violated",
// Class 3B — Savepoint Exception
"3B000": "savepoint_exception",
"3B001": "invalid_savepoint_specification",
// Class 3D — Invalid Catalog Name
"3D000": "invalid_catalog_name",
// Class 3F — Invalid Schema Name
"3F000": "invalid_schema_name",
// Class 40 — Transaction Rollback
"40000": "transaction_rollback",
"40002": "transaction_integrity_constraint_violation",
"40001": "serialization_failure",
"40003": "statement_completion_unknown",
"40P01": "deadlock_detected",
// Class 42 — Syntax Error or Access Rule Violation
"42000": "syntax_error_or_access_rule_violation",
"42601": "syntax_error",
"42501": "insufficient_privilege",
"42846": "cannot_coerce",
"42803": "grouping_error",
"42P20": "windowing_error",
"42P19": "invalid_recursion",
"42830": "invalid_foreign_key",
"42602": "invalid_name",
"42622": "name_too_long",
"42939": "reserved_name",
"42804": "datatype_mismatch",
"42P18": "indeterminate_datatype",
"42P21": "collation_mismatch",
"42P22": "indeterminate_collation",
"42809": "wrong_object_type",
"428C9": "generated_always",
"42703": "undefined_column",
"42883": "undefined_function",
"42P01": "undefined_table",
"42P02": "undefined_parameter",
"42704": "undefined_object",
"42701": "duplicate_column",
"42P03": "duplicate_cursor",
"42P04": "duplicate_database",
"42723": "duplicate_function",
"42P05": "duplicate_prepared_statement",
"42P06": "duplicate_schema",
"42P07": "duplicate_table",
"42712": "duplicate_alias",
"42710": "duplicate_object",
"42702": "ambiguous_column",
"42725": "ambiguous_function",
"42P08": "ambiguous_parameter",
"42P09": "ambiguous_alias",
"42P10": "invalid_column_reference",
"42611": "invalid_column_definition",
"42P11": "invalid_cursor_definition",
"42P12": "invalid_database_definition",
"42P13": "invalid_function_definition",
"42P14": "invalid_prepared_statement_definition",
"42P15": "invalid_schema_definition",
"42P16": "invalid_table_definition",
"42P17": "invalid_object_definition",
// Class 44 — WITH CHECK OPTION Violation
"44000": "with_check_option_violation",
// Class 53 — Insufficient Resources
"53000": "insufficient_resources",
"53100": "disk_full",
"53200": "out_of_memory",
"53300": "too_many_connections",
"53400": "configuration_limit_exceeded",
// Class 54 — Program Limit Exceeded
"54000": "program_limit_exceeded",
"54001": "statement_too_complex",
"54011": "too_many_columns",
"54023": "too_many_arguments",
// Class 55 — Object Not In Prerequisite State
"55000": "object_not_in_prerequisite_state",
"55006": "object_in_use",
"55P02": "cant_change_runtime_param",
"55P03": "lock_not_available",
// Class 57 — Operator Intervention
"57000": "operator_intervention",
"57014": "query_canceled",
"57P01": "admin_shutdown",
"57P02": "crash_shutdown",
"57P03": "cannot_connect_now",
"57P04": "database_dropped",
// Class 58 — System Error (errors external to PostgreSQL itself)
"58000": "system_error",
"58030": "io_error",
"58P01": "undefined_file",
"58P02": "duplicate_file",
// Class F0 — Configuration File Error
"F0000": "config_file_error",
"F0001": "lock_file_exists",
// Class HV — Foreign Data Wrapper Error (SQL/MED)
"HV000": "fdw_error",
"HV005": "fdw_column_name_not_found",
"HV002": "fdw_dynamic_parameter_value_needed",
"HV010": "fdw_function_sequence_error",
"HV021": "fdw_inconsistent_descriptor_information",
"HV024": "fdw_invalid_attribute_value",
"HV007": "fdw_invalid_column_name",
"HV008": "fdw_invalid_column_number",
"HV004": "fdw_invalid_data_type",
"HV006": "fdw_invalid_data_type_descriptors",
"HV091": "fdw_invalid_descriptor_field_identifier",
"HV00B": "fdw_invalid_handle",
"HV00C": "fdw_invalid_option_index",
"HV00D": "fdw_invalid_option_name",
"HV090": "fdw_invalid_string_length_or_buffer_length",
"HV00A": "fdw_invalid_string_format",
"HV009": "fdw_invalid_use_of_null_pointer",
"HV014": "fdw_too_many_handles",
"HV001": "fdw_out_of_memory",
"HV00P": "fdw_no_schemas",
"HV00J": "fdw_option_name_not_found",
"HV00K": "fdw_reply_handle",
"HV00Q": "fdw_schema_not_found",
"HV00R": "fdw_table_not_found",
"HV00L": "fdw_unable_to_create_execution",
"HV00M": "fdw_unable_to_create_reply",
"HV00N": "fdw_unable_to_establish_connection",
// Class P0 — PL/pgSQL Error
"P0000": "plpgsql_error",
"P0001": "raise_exception",
"P0002": "no_data_found",
"P0003": "too_many_rows",
"P0004": "assert_failure",
// Class XX — Internal Error
"XX000": "internal_error",
"XX001": "data_corrupted",
"XX002": "index_corrupted"
'00000': 'successful_completion',
// Class 01 — Warning
'01000': 'warning',
'0100C': 'dynamic_result_sets_returned',
'01008': 'implicit_zero_bit_padding',
'01003': 'null_value_eliminated_in_set_function',
'01007': 'privilege_not_granted',
'01006': 'privilege_not_revoked',
'01004': 'string_data_right_truncation',
'01P01': 'deprecated_feature',
// Class 02 — No Data (this is also a warning class per the SQL standard)
'02000': 'no_data',
'02001': 'no_additional_dynamic_result_sets_returned',
// Class 03 — SQL Statement Not Yet Complete
'03000': 'sql_statement_not_yet_complete',
// Class 08 — Connection Exception
'08000': 'connection_exception',
'08003': 'connection_does_not_exist',
'08006': 'connection_failure',
'08001': 'sqlclient_unable_to_establish_sqlconnection',
'08004': 'sqlserver_rejected_establishment_of_sqlconnection',
'08007': 'transaction_resolution_unknown',
'08P01': 'protocol_violation',
// Class 09 — Triggered Action Exception
'09000': 'triggered_action_exception',
// Class 0A — Feature Not Supported
'0A000': 'feature_not_supported',
// Class 0B — Invalid Transaction Initiation
'0B000': 'invalid_transaction_initiation',
// Class 0F — Locator Exception
'0F000': 'locator_exception',
'0F001': 'invalid_locator_specification',
// Class 0L — Invalid Grantor
'0L000': 'invalid_grantor',
'0LP01': 'invalid_grant_operation',
// Class 0P — Invalid Role Specification
'0P000': 'invalid_role_specification',
// Class 0Z — Diagnostics Exception
'0Z000': 'diagnostics_exception',
'0Z002': 'stacked_diagnostics_accessed_without_active_handler',
// Class 20 — Case Not Found
20000: 'case_not_found',
// Class 21 — Cardinality Violation
21000: 'cardinality_violation',
// Class 22 — Data Exception
22000: 'data_exception',
'2202E': 'array_subscript_error',
22021: 'character_not_in_repertoire',
22008: 'datetime_field_overflow',
22012: 'division_by_zero',
22005: 'error_in_assignment',
'2200B': 'escape_character_conflict',
22022: 'indicator_overflow',
22015: 'interval_field_overflow',
'2201E': 'invalid_argument_for_logarithm',
22014: 'invalid_argument_for_ntile_function',
22016: 'invalid_argument_for_nth_value_function',
'2201F': 'invalid_argument_for_power_function',
'2201G': 'invalid_argument_for_width_bucket_function',
22018: 'invalid_character_value_for_cast',
22007: 'invalid_datetime_format',
22019: 'invalid_escape_character',
'2200D': 'invalid_escape_octet',
22025: 'invalid_escape_sequence',
'22P06': 'nonstandard_use_of_escape_character',
22010: 'invalid_indicator_parameter_value',
22023: 'invalid_parameter_value',
'2201B': 'invalid_regular_expression',
'2201W': 'invalid_row_count_in_limit_clause',
'2201X': 'invalid_row_count_in_result_offset_clause',
22009: 'invalid_time_zone_displacement_value',
'2200C': 'invalid_use_of_escape_character',
'2200G': 'most_specific_type_mismatch',
22004: 'null_value_not_allowed',
22002: 'null_value_no_indicator_parameter',
22003: 'numeric_value_out_of_range',
22026: 'string_data_length_mismatch',
22001: 'string_data_right_truncation',
22011: 'substring_error',
22027: 'trim_error',
22024: 'unterminated_c_string',
'2200F': 'zero_length_character_string',
'22P01': 'floating_point_exception',
'22P02': 'invalid_text_representation',
'22P03': 'invalid_binary_representation',
'22P04': 'bad_copy_file_format',
'22P05': 'untranslatable_character',
'2200L': 'not_an_xml_document',
'2200M': 'invalid_xml_document',
'2200N': 'invalid_xml_content',
'2200S': 'invalid_xml_comment',
'2200T': 'invalid_xml_processing_instruction',
// Class 23 — Integrity Constraint Violation
23000: 'integrity_constraint_violation',
23001: 'restrict_violation',
23502: 'not_null_violation',
23503: 'foreign_key_violation',
23505: 'unique_violation',
23514: 'check_violation',
'23P01': 'exclusion_violation',
// Class 24 — Invalid Cursor State
24000: 'invalid_cursor_state',
// Class 25 — Invalid Transaction State
25000: 'invalid_transaction_state',
25001: 'active_sql_transaction',
25002: 'branch_transaction_already_active',
25008: 'held_cursor_requires_same_isolation_level',
25003: 'inappropriate_access_mode_for_branch_transaction',
25004: 'inappropriate_isolation_level_for_branch_transaction',
25005: 'no_active_sql_transaction_for_branch_transaction',
25006: 'read_only_sql_transaction',
25007: 'schema_and_data_statement_mixing_not_supported',
'25P01': 'no_active_sql_transaction',
'25P02': 'in_failed_sql_transaction',
'25P03': 'idle_in_transaction_session_timeout',
// Class 26 — Invalid SQL Statement Name
26000: 'invalid_sql_statement_name',
// Class 27 — Triggered Data Change Violation
27000: 'triggered_data_change_violation',
// Class 28 — Invalid Authorization Specification
28000: 'invalid_authorization_specification',
'28P01': 'invalid_password',
// Class 2B — Dependent Privilege Descriptors Still Exist
'2B000': 'dependent_privilege_descriptors_still_exist',
'2BP01': 'dependent_objects_still_exist',
// Class 2D — Invalid Transaction Termination
'2D000': 'invalid_transaction_termination',
// Class 2F — SQL Routine Exception
'2F000': 'sql_routine_exception',
'2F005': 'function_executed_no_return_statement',
'2F002': 'modifying_sql_data_not_permitted',
'2F003': 'prohibited_sql_statement_attempted',
'2F004': 'reading_sql_data_not_permitted',
// Class 34 — Invalid Cursor Name
34000: 'invalid_cursor_name',
// Class 38 — External Routine Exception
38000: 'external_routine_exception',
38001: 'containing_sql_not_permitted',
38002: 'modifying_sql_data_not_permitted',
38003: 'prohibited_sql_statement_attempted',
38004: 'reading_sql_data_not_permitted',
// Class 39 — External Routine Invocation Exception
39000: 'external_routine_invocation_exception',
39001: 'invalid_sqlstate_returned',
39004: 'null_value_not_allowed',
'39P01': 'trigger_protocol_violated',
'39P02': 'srf_protocol_violated',
'39P03': 'event_trigger_protocol_violated',
// Class 3B — Savepoint Exception
'3B000': 'savepoint_exception',
'3B001': 'invalid_savepoint_specification',
// Class 3D — Invalid Catalog Name
'3D000': 'invalid_catalog_name',
// Class 3F — Invalid Schema Name
'3F000': 'invalid_schema_name',
// Class 40 — Transaction Rollback
40000: 'transaction_rollback',
40002: 'transaction_integrity_constraint_violation',
40001: 'serialization_failure',
40003: 'statement_completion_unknown',
'40P01': 'deadlock_detected',
// Class 42 — Syntax Error or Access Rule Violation
42000: 'syntax_error_or_access_rule_violation',
42601: 'syntax_error',
42501: 'insufficient_privilege',
42846: 'cannot_coerce',
42803: 'grouping_error',
'42P20': 'windowing_error',
'42P19': 'invalid_recursion',
42830: 'invalid_foreign_key',
42602: 'invalid_name',
42622: 'name_too_long',
42939: 'reserved_name',
42804: 'datatype_mismatch',
'42P18': 'indeterminate_datatype',
'42P21': 'collation_mismatch',
'42P22': 'indeterminate_collation',
42809: 'wrong_object_type',
'428C9': 'generated_always',
42703: 'undefined_column',
42883: 'undefined_function',
'42P01': 'undefined_table',
'42P02': 'undefined_parameter',
42704: 'undefined_object',
42701: 'duplicate_column',
'42P03': 'duplicate_cursor',
'42P04': 'duplicate_database',
42723: 'duplicate_function',
'42P05': 'duplicate_prepared_statement',
'42P06': 'duplicate_schema',
'42P07': 'duplicate_table',
42712: 'duplicate_alias',
42710: 'duplicate_object',
42702: 'ambiguous_column',
42725: 'ambiguous_function',
'42P08': 'ambiguous_parameter',
'42P09': 'ambiguous_alias',
'42P10': 'invalid_column_reference',
42611: 'invalid_column_definition',
'42P11': 'invalid_cursor_definition',
'42P12': 'invalid_database_definition',
'42P13': 'invalid_function_definition',
'42P14': 'invalid_prepared_statement_definition',
'42P15': 'invalid_schema_definition',
'42P16': 'invalid_table_definition',
'42P17': 'invalid_object_definition',
// Class 44 — WITH CHECK OPTION Violation
44000: 'with_check_option_violation',
// Class 53 — Insufficient Resources
53000: 'insufficient_resources',
53100: 'disk_full',
53200: 'out_of_memory',
53300: 'too_many_connections',
53400: 'configuration_limit_exceeded',
// Class 54 — Program Limit Exceeded
54000: 'program_limit_exceeded',
54001: 'statement_too_complex',
54011: 'too_many_columns',
54023: 'too_many_arguments',
// Class 55 — Object Not In Prerequisite State
55000: 'object_not_in_prerequisite_state',
55006: 'object_in_use',
'55P02': 'cant_change_runtime_param',
'55P03': 'lock_not_available',
// Class 57 — Operator Intervention
57000: 'operator_intervention',
57014: 'query_canceled',
'57P01': 'admin_shutdown',
'57P02': 'crash_shutdown',
'57P03': 'cannot_connect_now',
'57P04': 'database_dropped',
// Class 58 — System Error (errors external to PostgreSQL itself)
58000: 'system_error',
58030: 'io_error',
'58P01': 'undefined_file',
'58P02': 'duplicate_file',
// Class F0 — Configuration File Error
F0000: 'config_file_error',
F0001: 'lock_file_exists',
// Class HV — Foreign Data Wrapper Error (SQL/MED)
HV000: 'fdw_error',
HV005: 'fdw_column_name_not_found',
HV002: 'fdw_dynamic_parameter_value_needed',
HV010: 'fdw_function_sequence_error',
HV021: 'fdw_inconsistent_descriptor_information',
HV024: 'fdw_invalid_attribute_value',
HV007: 'fdw_invalid_column_name',
HV008: 'fdw_invalid_column_number',
HV004: 'fdw_invalid_data_type',
HV006: 'fdw_invalid_data_type_descriptors',
HV091: 'fdw_invalid_descriptor_field_identifier',
HV00B: 'fdw_invalid_handle',
HV00C: 'fdw_invalid_option_index',
HV00D: 'fdw_invalid_option_name',
HV090: 'fdw_invalid_string_length_or_buffer_length',
HV00A: 'fdw_invalid_string_format',
HV009: 'fdw_invalid_use_of_null_pointer',
HV014: 'fdw_too_many_handles',
HV001: 'fdw_out_of_memory',
HV00P: 'fdw_no_schemas',
HV00J: 'fdw_option_name_not_found',
HV00K: 'fdw_reply_handle',
HV00Q: 'fdw_schema_not_found',
HV00R: 'fdw_table_not_found',
HV00L: 'fdw_unable_to_create_execution',
HV00M: 'fdw_unable_to_create_reply',
HV00N: 'fdw_unable_to_establish_connection',
// Class P0 — PL/pgSQL Error
P0000: 'plpgsql_error',
P0001: 'raise_exception',
P0002: 'no_data_found',
P0003: 'too_many_rows',
P0004: 'assert_failure',
// Class XX — Internal Error
XX000: 'internal_error',
XX001: 'data_corrupted',
XX002: 'index_corrupted'
};
module.exports.codeToCondition = codeToCondition;

View File

@ -11,7 +11,7 @@ module.exports = function getServerOptions () {
// In case the path has a :user param the username will be the one specified in the URL,
// otherwise it will fallback to extract the username from the host header.
'/api/:version',
'/user/:user/api/:version',
'/user/:user/api/:version'
],
// Optional: attach middlewares at the begining of the router
// to perform custom operations.

View File

@ -15,7 +15,6 @@ process.env.PGAPPNAME = process.env.PGAPPNAME || 'cartodb_sqlapi';
// override Date.toJSON
require('./utils/date-to-json');
// jshint maxcomplexity:9
module.exports = function createServer (statsClient) {
const options = getServerOptions();
const app = express();
@ -30,7 +29,7 @@ module.exports = function createServer (statsClient) {
const metadataBackend = cartodbRedis({ pool: redisPool });
// Set default configuration
global.settings.db_pubuser = global.settings.db_pubuser || "publicuser";
global.settings.db_pubuser = global.settings.db_pubuser || 'publicuser';
global.settings.bufferedRows = global.settings.bufferedRows || 1000;
global.settings.ratelimits = Object.assign(
{
@ -54,7 +53,7 @@ module.exports = function createServer (statsClient) {
}
app.enable('jsonp callback');
app.set("trust proxy", true);
app.set('trust proxy', true);
app.disable('x-powered-by');
app.disable('etag');

View File

@ -11,7 +11,7 @@ module.exports = function ErrorHandlerFactory (err) {
}
};
function isTimeoutError(err) {
function isTimeoutError (err) {
return err.message && (
err.message.indexOf('statement timeout') > -1 ||
err.message.indexOf('RuntimeError: Execution of function interrupted by signal') > -1 ||
@ -19,23 +19,23 @@ function isTimeoutError(err) {
);
}
function createTimeoutError() {
function createTimeoutError () {
return new ErrorHandler({
message: 'You are over platform\'s limits: SQL query timeout error.' +
' Refactor your query before running again or contact CARTO support for more details.',
context: 'limit',
detail: 'datasource',
http_status: 429
httpStatus: 429
});
}
function createGenericError(err) {
function createGenericError (err) {
return new ErrorHandler({
message: err.message,
context: err.context,
detail: err.detail,
hint: err.hint,
http_status: err.http_status,
httpStatus: err.http_status,
name: codeToCondition[err.code] || err.name
});
}

View File

@ -1,10 +1,10 @@
'use strict';
class ErrorHandler extends Error {
constructor({ message, context, detail, hint, http_status, name }) {
constructor ({ message, context, detail, hint, httpStatus, name }) {
super(message);
this.http_status = this.getHttpStatus(http_status);
this.http_status = this.getHttpStatus(httpStatus);
this.context = context;
this.detail = detail;
this.hint = hint;
@ -14,7 +14,7 @@ class ErrorHandler extends Error {
}
}
getResponse() {
getResponse () {
return {
error: [this.message],
context: this.context,
@ -23,14 +23,13 @@ class ErrorHandler extends Error {
};
}
getHttpStatus(http_status = 400) {
getHttpStatus (httpStatus = 400) {
if (this.message.includes('permission denied')) {
return 403;
}
return http_status;
return httpStatus;
}
}
module.exports = ErrorHandler;

View File

@ -7,7 +7,7 @@ class Logger {
const env = process.env.NODE_ENV;
const logLevel = process.env.LOG_LEVEL;
const stream = {
level: logLevel ? logLevel : (env === 'test') ? 'fatal' : (env === 'development') ? 'debug' : 'info'
level: logLevel || ((env === 'test') ? 'fatal' : (env === 'development') ? 'debug' : 'info')
};
if (path) {

View File

@ -17,7 +17,7 @@ const FORBIDDEN_ENTITIES = {
};
const Validator = {
validate(affectedTables, authorizationLevel) {
validate (affectedTables, authorizationLevel) {
let hardValidationResult = true;
let softValidationResult = true;
@ -34,8 +34,8 @@ const Validator = {
return hardValidationResult && softValidationResult;
},
hardValidation(tables) {
for (let table of tables) {
hardValidation (tables) {
for (const table of tables) {
if (FORBIDDEN_ENTITIES[table.schema_name] && FORBIDDEN_ENTITIES[table.schema_name].length &&
(
FORBIDDEN_ENTITIES[table.schema_name][0] === '*' ||
@ -49,8 +49,8 @@ const Validator = {
return true;
},
softValidation(tables) {
for (let table of tables) {
softValidation (tables) {
for (const table of tables) {
if (table.table_name.match(/\bpg_/)) {
return false;
}

View File

@ -3,7 +3,7 @@
const { getFormatFromCopyQuery } = require('../utils/query-info');
module.exports = class StreamCopyMetrics {
constructor(logger, type, sql, user, isGzip = false) {
constructor (logger, type, sql, user, isGzip = false) {
this.logger = logger;
this.type = type;
@ -25,15 +25,15 @@ module.exports = class StreamCopyMetrics {
this.ended = false;
}
addSize(size) {
addSize (size) {
this.size += size;
}
addGzipSize(size) {
addGzipSize (size) {
this.gzipSize += size;
}
end(rows = null, error = null) {
end (rows = null, error = null) {
if (this.ended) {
return;
}
@ -58,8 +58,8 @@ module.exports = class StreamCopyMetrics {
);
}
_log(timestamp, gzipSize = null, errorMessage = null) {
let logData = {
_log (timestamp, gzipSize = null, errorMessage = null) {
const logData = {
type: this.type,
format: this.format,
size: this.size,

View File

@ -13,7 +13,7 @@ const terminateQuery = pid => `SELECT pg_terminate_backend(${pid}) as terminated
const timeoutQuery = timeout => `SET statement_timeout=${timeout}`;
module.exports = class StreamCopy {
constructor(sql, userDbParams, logger) {
constructor (sql, userDbParams, logger) {
this.dbParams = Object.assign({}, userDbParams, {
port: global.settings.db_batch_port || userDbParams.port
});
@ -23,15 +23,15 @@ module.exports = class StreamCopy {
this.logger = logger;
}
static get ACTION_TO() {
static get ACTION_TO () {
return ACTION_TO;
}
static get ACTION_FROM() {
static get ACTION_FROM () {
return ACTION_FROM;
}
getPGStream(action, callback) {
getPGStream (action, callback) {
const pg = new PSQL(this.dbParams);
pg.connect((err, client, done) => {
@ -56,7 +56,7 @@ module.exports = class StreamCopy {
pgstream.on('warning', (msg) => this.logger.warn(msg));
} else if (action === ACTION_FROM) {
pgstream.on('finish', () => done());
pgstream.on('error', err => client.connection.sendCopyFail(err.message));
pgstream.on('error', err => client.connection.sendCopyFail(err.message));
}
pgstream.on('error', err => done(err));
@ -66,7 +66,7 @@ module.exports = class StreamCopy {
});
}
getRowCount() {
getRowCount () {
return this.stream.rowCount;
}

View File

@ -12,7 +12,7 @@ module.exports = class Throttler extends Transform {
this.minimunBytesPerSampleThreshold = global.settings.copy_from_minimum_input_speed || 0;
this.byteCount = 0;
this._interval = setInterval(this._updateMetrics.bind(this), this.sampleSeconds*1000);
this._interval = setInterval(this._updateMetrics.bind(this), this.sampleSeconds * 1000);
}
_updateMetrics () {

View File

@ -1,13 +1,13 @@
'use strict';
function isApiKeyFound(apikey) {
function isApiKeyFound (apikey) {
return apikey.type !== null &&
apikey.user !== null &&
apikey.databasePassword !== null &&
apikey.databaseRole !== null;
}
function UserDatabaseService(metadataBackend) {
function UserDatabaseService (metadataBackend) {
this.metadataBackend = metadataBackend;
}
@ -15,7 +15,7 @@ function errorUserNotFoundMessageTemplate (user) {
return `Sorry, we can't find CARTO user '${user}'. Please check that you have entered the correct domain.`;
}
function isOauthAuthorization({ apikeyToken, authorizationLevel }) {
function isOauthAuthorization ({ apikeyToken, authorizationLevel }) {
return (authorizationLevel === 'master') && !apikeyToken;
}
@ -41,11 +41,10 @@ UserDatabaseService.prototype.getConnectionParams = function (username, apikeyTo
const commonDBConfiguration = {
port: global.settings.db_port,
host: dbParams.dbhost,
dbname: dbParams.dbname,
dbname: dbParams.dbname
};
this.metadataBackend.getMasterApikey(username, (err, masterApikey) => {
if (err) {
err.http_status = 404;
err.message = errorUserNotFoundMessageTemplate(username);
@ -66,9 +65,9 @@ UserDatabaseService.prototype.getConnectionParams = function (username, apikeyTo
user: masterApikey.databaseRole,
pass: masterApikey.databasePassword
},
commonDBConfiguration);
commonDBConfiguration);
if (isOauthAuthorization({ apikeyToken, authorizationLevel})) {
if (isOauthAuthorization({ apikeyToken, authorizationLevel })) {
return callback(null, masterDBConfiguration, masterDBConfiguration);
}
@ -96,7 +95,7 @@ UserDatabaseService.prototype.getConnectionParams = function (username, apikeyTo
user: apikey.databaseRole,
pass: apikey.databasePassword
},
commonDBConfiguration);
commonDBConfiguration);
callback(null, DBConfiguration, masterDBConfiguration);
});

View File

@ -6,20 +6,20 @@
* @param {object} options
*/
class UserLimits {
constructor(metadataBackend, options = {}) {
constructor (metadataBackend, options = {}) {
this.metadataBackend = metadataBackend;
this.options = options;
this.preprareRateLimit();
}
preprareRateLimit() {
preprareRateLimit () {
if (this.options.limits.rateLimitsEnabled) {
this.metadataBackend.loadRateLimitsScript();
}
}
getRateLimit(user, endpointGroup, callback) {
getRateLimit (user, endpointGroup, callback) {
this.metadataBackend.getRateLimit(user, 'sql', endpointGroup, callback);
}
}

View File

@ -17,30 +17,28 @@ module.exports = {
* @param config Configuration for StatsD, if undefined it will return an stub
* @returns {StatsD|Object}
*/
getInstance: function(config) {
getInstance: function (config) {
if (!this.instance) {
var instance;
if (config) {
instance = new StatsD(config);
instance.last_error = { msg: '', count: 0 };
instance.socket.on('error', function (err) {
var last_err = instance.last_error;
var last_msg = last_err.msg;
var this_msg = '' + err;
if (this_msg !== last_msg) {
debug("statsd client socket error: " + err);
var lastErr = instance.last_error;
var lastMsg = lastErr.msg;
var thisMsg = '' + err;
if (thisMsg !== lastMsg) {
debug('statsd client socket error: ' + err);
instance.last_error.count = 1;
instance.last_error.msg = this_msg;
instance.last_error.msg = thisMsg;
} else {
++last_err.count;
if (!last_err.interval) {
++lastErr.count;
if (!lastErr.interval) {
instance.last_error.interval = setInterval(function () {
var count = instance.last_error.count;
if (count > 1) {
debug("last statsd client socket error repeated " + count + " times");
debug('last statsd client socket error repeated ' + count + ' times');
instance.last_error.count = 1;
clearInterval(instance.last_error.interval);
instance.last_error.interval = null;

View File

@ -5,51 +5,51 @@ var Profiler = require('step-profiler');
/**
* Proxy to encapsulate node-step-profiler module so there is no need to check if there is an instance
*/
function ProfilerProxy(opts) {
function ProfilerProxy (opts) {
this.profile = !!opts.profile;
this.profiler = null;
if (!!opts.profile) {
this.profiler = new Profiler({statsd_client: opts.statsd_client});
if (opts.profile) {
this.profiler = new Profiler({ statsd_client: opts.statsd_client });
}
}
ProfilerProxy.prototype.done = function(what) {
ProfilerProxy.prototype.done = function (what) {
if (this.profile) {
this.profiler.done(what);
}
};
ProfilerProxy.prototype.end = function() {
ProfilerProxy.prototype.end = function () {
if (this.profile) {
this.profiler.end();
}
};
ProfilerProxy.prototype.start = function(what) {
ProfilerProxy.prototype.start = function (what) {
if (this.profile) {
this.profiler.start(what);
}
};
ProfilerProxy.prototype.add = function(what) {
ProfilerProxy.prototype.add = function (what) {
if (this.profile) {
this.profiler.add(what || {});
}
};
ProfilerProxy.prototype.sendStats = function() {
ProfilerProxy.prototype.sendStats = function () {
if (this.profile) {
this.profiler.sendStats();
}
};
ProfilerProxy.prototype.toString = function() {
return this.profile ? this.profiler.toString() : "";
ProfilerProxy.prototype.toString = function () {
return this.profile ? this.profiler.toString() : '';
};
ProfilerProxy.prototype.toJSONString = function() {
return this.profile ? this.profiler.toJSONString() : "{}";
ProfilerProxy.prototype.toJSONString = function () {
return this.profile ? this.profiler.toJSONString() : '{}';
};
module.exports = ProfilerProxy;

View File

@ -1,8 +1,8 @@
'use strict';
module.exports = function getContentDisposition(formatter, filename, inline) {
module.exports = function getContentDisposition (formatter, filename, inline) {
var ext = formatter.getFileExtension();
var time = new Date().toUTCString();
return ( inline ? 'inline' : 'attachment' ) + '; filename=' + filename + '.' + ext + '; ' +
return (inline ? 'inline' : 'attachment') + '; filename=' + filename + '.' + ext + '; ' +
'modification-date="' + time + '";';
};

View File

@ -1,19 +1,19 @@
'use strict';
// jshint ignore:start
function pad(n) {
function pad (n) {
return n < 10 ? '0' + n : n;
}
Date.prototype.toJSON = function() {
/* eslint-disable no-extend-native */
Date.prototype.toJSON = function () {
var s = this.getFullYear() + '-' + pad(this.getMonth() + 1) + '-' + pad(this.getDate()) + 'T' +
pad(this.getHours()) + ':' + pad(this.getMinutes()) + ':' + pad(this.getSeconds());
var offset = this.getTimezoneOffset();
if (offset === 0) {
s += 'Z';
} else {
s += ( offset < 0 ? '+' : '-' ) + pad(Math.abs(offset / 60)) + pad(Math.abs(offset % 60));
s += (offset < 0 ? '+' : '-') + pad(Math.abs(offset / 60)) + pad(Math.abs(offset % 60));
}
return s;
};
// jshint ignore:end
/* eslint-enable no-extend-native */

View File

@ -2,8 +2,8 @@
var path = require('path');
module.exports = function sanitize_filename(filename) {
filename = path.basename(filename, path.extname(filename));
filename = filename.replace(/[;()\[\]<>'"\s]/g, '_');
return filename;
module.exports = function sanitizeFilename (filename) {
filename = path.basename(filename, path.extname(filename));
filename = filename.replace(/[;()\[\]<>'"\s]/g, '_'); // eslint-disable-line no-useless-escape
return filename;
};

View File

@ -6,14 +6,14 @@ module.exports = {
* from object for logs RegEx
*
* @param {Object} object
* @param {Number} max_string_length
* @param {Number} maxStringLength
*/
stringifyForLogs(object, max_string_length = 1024) {
return JSON.stringify(cloneAndFilter(object, max_string_length));
stringifyForLogs (object, maxStringLength = 1024) {
return JSON.stringify(cloneAndFilter(object, maxStringLength));
}
};
function cloneAndFilter(object, max_string_length) {
function cloneAndFilter (object, maxStringLength) {
if (!object || !(object instanceof Object)) {
return null;
}
@ -22,13 +22,13 @@ function cloneAndFilter(object, max_string_length) {
Object.keys(object).map(key => {
if (typeof object[key] === 'string') {
newObject[key] = filterString(object[key], max_string_length);
newObject[key] = filterString(object[key], maxStringLength);
} else if (typeof object[key] === 'object') {
newObject[key] = cloneAndFilter(object[key], max_string_length);
newObject[key] = cloneAndFilter(object[key], maxStringLength);
} else if (object[key] instanceof Array) {
newObject[key] = [];
for (let element of object[key]) {
newObject[key].push(cloneAndFilter(element, max_string_length));
for (const element of object[key]) {
newObject[key].push(cloneAndFilter(element, maxStringLength));
}
} else {
newObject[key] = object[key];
@ -38,8 +38,8 @@ function cloneAndFilter(object, max_string_length) {
return newObject;
}
function filterString(s, max_string_length) {
function filterString (s, maxStringLength) {
return s
.substring(0, max_string_length)
.substring(0, maxStringLength)
.replace(/[^a-zA-Z0-9]/g, ' ');
}

View File

@ -2,7 +2,7 @@
var crypto = require('crypto');
module.exports = function generateMD5(data){
module.exports = function generateMD5 (data) {
var hash = crypto.createHash('md5');
hash.update(data);
return hash.digest('hex');

View File

@ -3,16 +3,16 @@
const COPY_FORMATS = ['TEXT', 'CSV', 'BINARY'];
module.exports = {
getFormatFromCopyQuery(copyQuery) {
getFormatFromCopyQuery (copyQuery) {
let format = 'TEXT'; // Postgres default format
copyQuery = copyQuery.toUpperCase();
if (!copyQuery.startsWith("COPY ")) {
if (!copyQuery.startsWith('COPY ')) {
return false;
}
if(copyQuery.includes(' WITH') && copyQuery.includes('FORMAT ')) {
if (copyQuery.includes(' WITH') && copyQuery.includes('FORMAT ')) {
const regex = /\bFORMAT\s+(\w+)/;
const result = regex.exec(copyQuery);

View File

@ -1,6 +1,6 @@
'use strict';
var sqlQueryMayWriteRegex = new RegExp("\\b(alter|insert|update|delete|create|drop|reindex|truncate|refresh)\\b", "i");
var sqlQueryMayWriteRegex = new RegExp('\\b(alter|insert|update|delete|create|drop|reindex|truncate|refresh)\\b', 'i');
/**
* This is a fuzzy check, the return could be true even if the query doesn't really write anything. But you can be
@ -9,6 +9,6 @@ var sqlQueryMayWriteRegex = new RegExp("\\b(alter|insert|update|delete|create|dr
* @param sql The SQL statement to check against
* @returns {boolean} Return true of the given query may write to the database
*/
module.exports = function queryMayWrite(sql) {
module.exports = function queryMayWrite (sql) {
return sqlQueryMayWriteRegex.test(sql);
};

1431
package-lock.json generated

File diff suppressed because it is too large Load Diff

Some files were not shown because too many files have changed in this diff Show More