commit
69afee61e0
22
.eslintrc.js
Normal file
22
.eslintrc.js
Normal file
@ -0,0 +1,22 @@
|
||||
module.exports = {
|
||||
env: {
|
||||
commonjs: true,
|
||||
es6: true,
|
||||
node: true,
|
||||
mocha: true
|
||||
},
|
||||
extends: [
|
||||
'standard'
|
||||
],
|
||||
globals: {
|
||||
Atomics: 'readonly',
|
||||
SharedArrayBuffer: 'readonly'
|
||||
},
|
||||
parserOptions: {
|
||||
ecmaVersion: 2018
|
||||
},
|
||||
rules: {
|
||||
"indent": ["error", 4],
|
||||
"semi": ["error", "always"]
|
||||
}
|
||||
}
|
95
.jshintrc
95
.jshintrc
@ -1,95 +0,0 @@
|
||||
{
|
||||
// // JSHint Default Configuration File (as on JSHint website)
|
||||
// // See http://jshint.com/docs/ for more details
|
||||
//
|
||||
// "maxerr" : 50, // {int} Maximum error before stopping
|
||||
//
|
||||
// // Enforcing
|
||||
// "bitwise" : true, // true: Prohibit bitwise operators (&, |, ^, etc.)
|
||||
// "camelcase" : false, // true: Identifiers must be in camelCase
|
||||
"curly" : true, // true: Require {} for every new block or scope
|
||||
"eqeqeq" : true, // true: Require triple equals (===) for comparison
|
||||
"forin" : true, // true: Require filtering for..in loops with obj.hasOwnProperty()
|
||||
"freeze" : true, // true: prohibits overwriting prototypes of native objects such as Array, Date etc.
|
||||
"immed" : true, // true: Require immediate invocations to be wrapped in parens e.g. `(function () { } ());`
|
||||
// "indent" : 4, // {int} Number of spaces to use for indentation
|
||||
// "latedef" : false, // true: Require variables/functions to be defined before being used
|
||||
"newcap" : true, // true: Require capitalization of all constructor functions e.g. `new F()`
|
||||
"noarg" : true, // true: Prohibit use of `arguments.caller` and `arguments.callee`
|
||||
// "noempty" : true, // true: Prohibit use of empty blocks
|
||||
"nonbsp" : true, // true: Prohibit "non-breaking whitespace" characters.
|
||||
"nonew" : true, // true: Prohibit use of constructors for side-effects (without assignment)
|
||||
// "plusplus" : false, // true: Prohibit use of `++` & `--`
|
||||
// "quotmark" : false, // Quotation mark consistency:
|
||||
// // false : do nothing (default)
|
||||
// // true : ensure whatever is used is consistent
|
||||
// // "single" : require single quotes
|
||||
// // "double" : require double quotes
|
||||
"undef" : true, // true: Require all non-global variables to be declared (prevents global leaks)
|
||||
"unused" : true, // true: Require all defined variables be used
|
||||
// "strict" : true, // true: Requires all functions run in ES5 Strict Mode
|
||||
// "maxparams" : false, // {int} Max number of formal params allowed per function
|
||||
// "maxdepth" : false, // {int} Max depth of nested blocks (within functions)
|
||||
// "maxstatements" : false, // {int} Max number statements per function
|
||||
"maxcomplexity" : 6, // {int} Max cyclomatic complexity per function
|
||||
"maxlen" : 120, // {int} Max number of characters per line
|
||||
//
|
||||
// // Relaxing
|
||||
// "asi" : false, // true: Tolerate Automatic Semicolon Insertion (no semicolons)
|
||||
// "boss" : false, // true: Tolerate assignments where comparisons would be expected
|
||||
"debug" : false, // true: Allow debugger statements e.g. browser breakpoints.
|
||||
// "eqnull" : false, // true: Tolerate use of `== null`
|
||||
// "es5" : false, // true: Allow ES5 syntax (ex: getters and setters)
|
||||
"esnext" : true, // true: Allow ES.next (ES6) syntax (ex: `const`)
|
||||
// "moz" : false, // true: Allow Mozilla specific syntax (extends and overrides esnext features)
|
||||
// // (ex: `for each`, multiple try/catch, function expression…)
|
||||
// "evil" : false, // true: Tolerate use of `eval` and `new Function()`
|
||||
// "expr" : false, // true: Tolerate `ExpressionStatement` as Programs
|
||||
// "funcscope" : false, // true: Tolerate defining variables inside control statements
|
||||
// "globalstrict" : false, // true: Allow global "use strict" (also enables 'strict')
|
||||
// "iterator" : false, // true: Tolerate using the `__iterator__` property
|
||||
// "lastsemic" : false, // true: Tolerate omitting a semicolon for the last statement of a 1-line block
|
||||
// "laxbreak" : false, // true: Tolerate possibly unsafe line breakings
|
||||
// "laxcomma" : false, // true: Tolerate comma-first style coding
|
||||
// "loopfunc" : false, // true: Tolerate functions being defined in loops
|
||||
// "multistr" : false, // true: Tolerate multi-line strings
|
||||
// "noyield" : false, // true: Tolerate generator functions with no yield statement in them.
|
||||
// "notypeof" : false, // true: Tolerate invalid typeof operator values
|
||||
// "proto" : false, // true: Tolerate using the `__proto__` property
|
||||
// "scripturl" : false, // true: Tolerate script-targeted URLs
|
||||
// "shadow" : false, // true: Allows re-define variables later in code e.g. `var x=1; x=2;`
|
||||
// "sub" : false, // true: Tolerate using `[]` notation when it can still be expressed in dot notation
|
||||
// "supernew" : false, // true: Tolerate `new function () { ... };` and `new Object;`
|
||||
// "validthis" : false, // true: Tolerate using this in a non-constructor function
|
||||
//
|
||||
// // Environments
|
||||
// "browser" : true, // Web Browser (window, document, etc)
|
||||
// "browserify" : false, // Browserify (node.js code in the browser)
|
||||
// "couch" : false, // CouchDB
|
||||
// "devel" : true, // Development/debugging (alert, confirm, etc)
|
||||
// "dojo" : false, // Dojo Toolkit
|
||||
// "jasmine" : false, // Jasmine
|
||||
// "jquery" : false, // jQuery
|
||||
// "mocha" : true, // Mocha
|
||||
// "mootools" : false, // MooTools
|
||||
"node" : true, // Node.js
|
||||
// "nonstandard" : false, // Widely adopted globals (escape, unescape, etc)
|
||||
// "prototypejs" : false, // Prototype and Scriptaculous
|
||||
// "qunit" : false, // QUnit
|
||||
// "rhino" : false, // Rhino
|
||||
// "shelljs" : false, // ShellJS
|
||||
// "worker" : false, // Web Workers
|
||||
// "wsh" : false, // Windows Scripting Host
|
||||
// "yui" : false, // Yahoo User Interface
|
||||
|
||||
// Custom predefined global variables
|
||||
"predef": [
|
||||
"-console", // disallows console, use debug
|
||||
"beforeEach",
|
||||
"afterEach",
|
||||
"before",
|
||||
"after",
|
||||
"describe",
|
||||
"it"
|
||||
]
|
||||
}
|
10
Makefile
10
Makefile
@ -39,15 +39,15 @@ test-acceptance: config/environments/test.js
|
||||
@echo "***tests***"
|
||||
@$(SHELL) ./run_tests.sh ${RUNTESTFLAGS} $(TEST_SUITE_ACCEPTANCE)
|
||||
|
||||
jshint:
|
||||
@echo "***jshint***"
|
||||
@./node_modules/.bin/jshint lib/ test/ app.js
|
||||
lint:
|
||||
@echo "***eslint***"
|
||||
@./node_modules/.bin/eslint app.js "lib/**/*.js" "test/**/*.js"
|
||||
|
||||
test-all: test jshint
|
||||
test-all: test lint
|
||||
|
||||
coverage:
|
||||
@RUNTESTFLAGS=--with-coverage make test
|
||||
|
||||
check: test
|
||||
|
||||
.PHONY: pre-install test jshint coverage
|
||||
.PHONY: pre-install test lint coverage
|
||||
|
3
NEWS.md
3
NEWS.md
@ -3,6 +3,9 @@
|
||||
## 8.0.1
|
||||
Released 2019-mm-dd
|
||||
|
||||
Announcements:
|
||||
- Removed `jshint` as linter in favour of `eslint` to check syntax, find problems, and enforce code style.
|
||||
|
||||
## 8.0.0
|
||||
Released 2019-11-13
|
||||
|
||||
|
75
app.js
75
app.js
@ -54,7 +54,7 @@ var availableEnvironments = {
|
||||
};
|
||||
|
||||
// sanity check
|
||||
if (!availableEnvironments[ENVIRONMENT]){
|
||||
if (!availableEnvironments[ENVIRONMENT]) {
|
||||
logError('node app.js [environment]');
|
||||
logError('environments: %s', Object.keys(availableEnvironments).join(', '));
|
||||
process.exit(1);
|
||||
@ -76,27 +76,26 @@ var agentOptions = _.defaults(global.environment.httpAgent || {}, {
|
||||
http.globalAgent = new http.Agent(agentOptions);
|
||||
https.globalAgent = new https.Agent(agentOptions);
|
||||
|
||||
|
||||
global.log4js = require('log4js');
|
||||
var log4jsConfig = {
|
||||
appenders: [],
|
||||
replaceConsole: true
|
||||
};
|
||||
|
||||
if ( global.environment.log_filename ) {
|
||||
if (global.environment.log_filename) {
|
||||
var logFilename = path.resolve(global.environment.log_filename);
|
||||
var logDirectory = path.dirname(logFilename);
|
||||
if (!fs.existsSync(logDirectory)) {
|
||||
logError("Log filename directory does not exist: " + logDirectory);
|
||||
logError('Log filename directory does not exist: ' + logDirectory);
|
||||
process.exit(1);
|
||||
}
|
||||
log("Logs will be written to " + logFilename);
|
||||
log('Logs will be written to ' + logFilename);
|
||||
log4jsConfig.appenders.push(
|
||||
{ type: "file", absolute: true, filename: logFilename }
|
||||
{ type: 'file', absolute: true, filename: logFilename }
|
||||
);
|
||||
} else {
|
||||
log4jsConfig.appenders.push(
|
||||
{ type: "console", layout: { type:'basic' } }
|
||||
{ type: 'console', layout: { type: 'basic' } }
|
||||
);
|
||||
}
|
||||
|
||||
@ -118,13 +117,13 @@ var backlog = global.environment.maxConnections || 128;
|
||||
|
||||
var listener = server.listen(serverOptions.bind.port, serverOptions.bind.host, backlog);
|
||||
|
||||
var version = require("./package").version;
|
||||
var version = require('./package').version;
|
||||
|
||||
listener.on('listening', function() {
|
||||
log("Using Node.js %s", process.version);
|
||||
listener.on('listening', function () {
|
||||
log('Using Node.js %s', process.version);
|
||||
log('Using configuration file "%s"', configurationFile);
|
||||
log(
|
||||
"Windshaft tileserver %s started on %s:%s PID=%d (%s)",
|
||||
'Windshaft tileserver %s started on %s:%s PID=%d (%s)',
|
||||
version, serverOptions.bind.host, serverOptions.bind.port, process.pid, ENVIRONMENT
|
||||
);
|
||||
});
|
||||
@ -163,15 +162,15 @@ setInterval(function cpuUsageMetrics () {
|
||||
previousCPUUsage = CPUUsage;
|
||||
}, 5000);
|
||||
|
||||
setInterval(function() {
|
||||
setInterval(function () {
|
||||
var memoryUsage = process.memoryUsage();
|
||||
Object.keys(memoryUsage).forEach(function(k) {
|
||||
Object.keys(memoryUsage).forEach(function (k) {
|
||||
global.statsClient.gauge('windshaft.memory.' + k, memoryUsage[k]);
|
||||
});
|
||||
}, 5000);
|
||||
|
||||
process.on('SIGHUP', function() {
|
||||
global.log4js.clearAndShutdownAppenders(function() {
|
||||
process.on('SIGHUP', function () {
|
||||
global.log4js.clearAndShutdownAppenders(function () {
|
||||
global.log4js.configure(log4jsConfig);
|
||||
global.logger = global.log4js.getLogger();
|
||||
log('Log files reloaded');
|
||||
@ -179,12 +178,12 @@ process.on('SIGHUP', function() {
|
||||
});
|
||||
|
||||
if (global.gc) {
|
||||
var gcInterval = Number.isFinite(global.environment.gc_interval) ?
|
||||
global.environment.gc_interval :
|
||||
10000;
|
||||
var gcInterval = Number.isFinite(global.environment.gc_interval)
|
||||
? global.environment.gc_interval
|
||||
: 10000;
|
||||
|
||||
if (gcInterval > 0) {
|
||||
setInterval(function gcForcedCycle() {
|
||||
setInterval(function gcForcedCycle () {
|
||||
global.gc();
|
||||
}, gcInterval);
|
||||
}
|
||||
@ -206,24 +205,24 @@ function getGCTypeValue (type) {
|
||||
let value;
|
||||
|
||||
switch (type) {
|
||||
case 1:
|
||||
value = 'Scavenge';
|
||||
break;
|
||||
case 2:
|
||||
value = 'MarkSweepCompact';
|
||||
break;
|
||||
case 4:
|
||||
value = 'IncrementalMarking';
|
||||
break;
|
||||
case 8:
|
||||
value = 'ProcessWeakCallbacks';
|
||||
break;
|
||||
case 15:
|
||||
value = 'All';
|
||||
break;
|
||||
default:
|
||||
value = 'Unkown';
|
||||
break;
|
||||
case 1:
|
||||
value = 'Scavenge';
|
||||
break;
|
||||
case 2:
|
||||
value = 'MarkSweepCompact';
|
||||
break;
|
||||
case 4:
|
||||
value = 'IncrementalMarking';
|
||||
break;
|
||||
case 8:
|
||||
value = 'ProcessWeakCallbacks';
|
||||
break;
|
||||
case 15:
|
||||
value = 'All';
|
||||
break;
|
||||
default:
|
||||
value = 'Unkown';
|
||||
break;
|
||||
}
|
||||
|
||||
return value;
|
||||
@ -231,7 +230,7 @@ function getGCTypeValue (type) {
|
||||
|
||||
addHandlers(listener, global.logger, 45000);
|
||||
|
||||
function addHandlers(listener, logger, killTimeout) {
|
||||
function addHandlers (listener, logger, killTimeout) {
|
||||
process.on('uncaughtException', exitProcess(listener, logger, killTimeout));
|
||||
process.on('unhandledRejection', exitProcess(listener, logger, killTimeout));
|
||||
process.on('ENOMEM', exitProcess(listener, logger, killTimeout));
|
||||
|
@ -1,5 +1,7 @@
|
||||
'use strict';
|
||||
|
||||
const path = require('path');
|
||||
|
||||
const { Router: router } = require('express');
|
||||
|
||||
const RedisPool = require('redis-mpool');
|
||||
@ -83,10 +85,9 @@ module.exports = class ApiRouter {
|
||||
const metadataBackend = cartodbRedis({ pool: redisPool });
|
||||
const pgConnection = new PgConnection(metadataBackend);
|
||||
|
||||
const windshaftLogger = environmentOptions.log_windshaft && global.log4js ?
|
||||
global.log4js.getLogger('[windshaft]') :
|
||||
null;
|
||||
|
||||
const windshaftLogger = environmentOptions.log_windshaft && global.log4js
|
||||
? global.log4js.getLogger('[windshaft]')
|
||||
: null;
|
||||
const mapStore = new windshaft.storage.MapStore({
|
||||
pool: redisPool,
|
||||
expire_time: serverOptions.grainstore.default_layergroup_ttl,
|
||||
@ -237,7 +238,6 @@ module.exports = class ApiRouter {
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
function createTemplateMaps ({ redisPool, surrogateKeysCache }) {
|
||||
const templateMaps = new TemplateMaps(redisPool, {
|
||||
max_user_templates: global.environment.maxUserTemplates
|
||||
@ -245,12 +245,12 @@ function createTemplateMaps ({ redisPool, surrogateKeysCache }) {
|
||||
|
||||
function invalidateNamedMap (owner, templateName) {
|
||||
var startTime = Date.now();
|
||||
surrogateKeysCache.invalidate(new NamedMapsCacheEntry(owner, templateName), function(err) {
|
||||
surrogateKeysCache.invalidate(new NamedMapsCacheEntry(owner, templateName), function (err) {
|
||||
var logMessage = JSON.stringify({
|
||||
username: owner,
|
||||
type: 'named_map_invalidation',
|
||||
elapsed: Date.now() - startTime,
|
||||
error: !!err ? JSON.stringify(err.message) : undefined
|
||||
error: err ? JSON.stringify(err.message) : undefined
|
||||
});
|
||||
if (err) {
|
||||
global.logger.warn(logMessage);
|
||||
@ -260,15 +260,14 @@ function createTemplateMaps ({ redisPool, surrogateKeysCache }) {
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
['update', 'delete'].forEach(function(eventType) {
|
||||
['update', 'delete'].forEach(function (eventType) {
|
||||
templateMaps.on(eventType, invalidateNamedMap);
|
||||
});
|
||||
|
||||
return templateMaps;
|
||||
}
|
||||
|
||||
function createSurrogateKeysCacheBackends(serverOptions) {
|
||||
function createSurrogateKeysCacheBackends (serverOptions) {
|
||||
var cacheBackends = [];
|
||||
|
||||
if (serverOptions.varnish_purge_enabled) {
|
||||
@ -287,14 +286,13 @@ function createSurrogateKeysCacheBackends(serverOptions) {
|
||||
return cacheBackends;
|
||||
}
|
||||
|
||||
const timeoutErrorTilePath = __dirname + '/../../assets/render-timeout-fallback.png';
|
||||
const timeoutErrorTile = require('fs').readFileSync(timeoutErrorTilePath, {encoding: null});
|
||||
const timeoutErrorTilePath = path.join(__dirname, '/../../assets/render-timeout-fallback.png');
|
||||
const timeoutErrorTile = require('fs').readFileSync(timeoutErrorTilePath, { encoding: null });
|
||||
|
||||
function createRendererFactory ({ redisPool, serverOptions, environmentOptions }) {
|
||||
var onTileErrorStrategy;
|
||||
if (environmentOptions.enabledFeatures.onTileErrorStrategy !== false) {
|
||||
onTileErrorStrategy = function onTileErrorStrategy$TimeoutTile(err, tile, headers, stats, format, callback) {
|
||||
|
||||
onTileErrorStrategy = function onTileErrorStrategy$TimeoutTile (err, tile, headers, stats, format, callback) {
|
||||
function isRenderTimeoutError (err) {
|
||||
return err.message === 'Render timed out';
|
||||
}
|
||||
@ -313,7 +311,7 @@ function createRendererFactory ({ redisPool, serverOptions, environmentOptions }
|
||||
|
||||
if (isTimeoutError(err) && isRasterFormat(format)) {
|
||||
return callback(null, timeoutErrorTile, {
|
||||
'Content-Type': 'image/png',
|
||||
'Content-Type': 'image/png'
|
||||
}, {});
|
||||
} else {
|
||||
return callback(err, tile, headers, stats);
|
||||
|
@ -48,10 +48,10 @@ function createPGClient () {
|
||||
};
|
||||
}
|
||||
|
||||
function getDataFromQuery({ queryTemplate, key }) {
|
||||
function getDataFromQuery ({ queryTemplate, key }) {
|
||||
const readOnlyTransactionOn = true;
|
||||
|
||||
return function getCatalogMiddleware(req, res, next) {
|
||||
return function getCatalogMiddleware (req, res, next) {
|
||||
const { pg, user } = res.locals;
|
||||
const sql = queryTemplate({ _username: user });
|
||||
|
||||
@ -82,27 +82,27 @@ function prepareResponse () {
|
||||
}, {});
|
||||
|
||||
const analysisCatalog = catalog.map(analysis => {
|
||||
if (analysisIdToTable.hasOwnProperty(analysis.node_id)) {
|
||||
if (Object.prototype.hasOwnProperty.call(analysisIdToTable, analysis.node_id)) {
|
||||
analysis.table = analysisIdToTable[analysis.node_id];
|
||||
}
|
||||
|
||||
return analysis;
|
||||
})
|
||||
.sort((analysisA, analysisB) => {
|
||||
if (!!analysisA.table && !!analysisB.table) {
|
||||
return analysisB.table.size - analysisA.table.size;
|
||||
}
|
||||
.sort((analysisA, analysisB) => {
|
||||
if (!!analysisA.table && !!analysisB.table) {
|
||||
return analysisB.table.size - analysisA.table.size;
|
||||
}
|
||||
|
||||
if (analysisA.table) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (analysisB.table) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (!!analysisA.table) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (!!analysisB.table) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
return -1;
|
||||
});
|
||||
});
|
||||
|
||||
res.statusCode = 200;
|
||||
res.body = { catalog: analysisCatalog };
|
||||
@ -112,7 +112,7 @@ function prepareResponse () {
|
||||
}
|
||||
|
||||
function unauthorizedError () {
|
||||
return function unathorizedErrorMiddleware(err, req, res, next) {
|
||||
return function unathorizedErrorMiddleware (err, req, res, next) {
|
||||
if (err.message.match(/permission\sdenied/)) {
|
||||
err = new Error('Unauthorized');
|
||||
err.http_status = 401;
|
||||
|
@ -35,7 +35,7 @@ module.exports = class AnalysisLayergroupController {
|
||||
};
|
||||
|
||||
function analysisNodeStatus (analysisStatusBackend) {
|
||||
return function analysisNodeStatusMiddleware(req, res, next) {
|
||||
return function analysisNodeStatusMiddleware (req, res, next) {
|
||||
const { nodeId } = req.params;
|
||||
const dbParams = dbParamsFromResLocals(res.locals);
|
||||
|
||||
|
@ -87,7 +87,7 @@ module.exports = class AnonymousMapController {
|
||||
checkJsonContentType(),
|
||||
checkCreateLayergroup(),
|
||||
prepareAdapterMapConfig(this.mapConfigAdapter),
|
||||
createLayergroup (
|
||||
createLayergroup(
|
||||
this.mapBackend,
|
||||
this.userLimitsBackend,
|
||||
this.pgConnection,
|
||||
@ -130,10 +130,10 @@ function checkCreateLayergroup () {
|
||||
}
|
||||
|
||||
function prepareAdapterMapConfig (mapConfigAdapter) {
|
||||
return function prepareAdapterMapConfigMiddleware(req, res, next) {
|
||||
return function prepareAdapterMapConfigMiddleware (req, res, next) {
|
||||
const requestMapConfig = req.body;
|
||||
|
||||
const { user, api_key } = res.locals;
|
||||
const { user, api_key: apiKey } = res.locals;
|
||||
const { dbuser, dbname, dbpassword, dbhost, dbport } = res.locals;
|
||||
const params = Object.assign({ dbuser, dbname, dbpassword, dbhost, dbport }, req.query);
|
||||
|
||||
@ -149,31 +149,30 @@ function prepareAdapterMapConfig (mapConfigAdapter) {
|
||||
},
|
||||
batch: {
|
||||
username: user,
|
||||
apiKey: api_key
|
||||
apiKey
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
mapConfigAdapter.getMapConfig(user,
|
||||
requestMapConfig,
|
||||
params,
|
||||
context,
|
||||
(err, requestMapConfig, stats = { overviewsAddedToMapconfig : false }) => {
|
||||
req.profiler.done('anonymous.getMapConfig');
|
||||
requestMapConfig,
|
||||
params,
|
||||
context,
|
||||
(err, requestMapConfig, stats = { overviewsAddedToMapconfig: false }) => {
|
||||
req.profiler.done('anonymous.getMapConfig');
|
||||
|
||||
stats.mapType = 'anonymous';
|
||||
req.profiler.add(stats);
|
||||
stats.mapType = 'anonymous';
|
||||
req.profiler.add(stats);
|
||||
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
req.body = requestMapConfig;
|
||||
res.locals.context = context;
|
||||
|
||||
req.body = requestMapConfig;
|
||||
res.locals.context = context;
|
||||
|
||||
next();
|
||||
});
|
||||
next();
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
@ -182,12 +181,17 @@ function createLayergroup (mapBackend, userLimitsBackend, pgConnection, affected
|
||||
const requestMapConfig = req.body;
|
||||
|
||||
const { context } = res.locals;
|
||||
const { user, cache_buster, api_key } = res.locals;
|
||||
const { user, cache_buster: cacheBuster, api_key: apiKey } = res.locals;
|
||||
const { dbuser, dbname, dbpassword, dbhost, dbport } = res.locals;
|
||||
|
||||
const params = {
|
||||
cache_buster, api_key,
|
||||
dbuser, dbname, dbpassword, dbhost, dbport
|
||||
cache_buster: cacheBuster,
|
||||
api_key: apiKey,
|
||||
dbuser,
|
||||
dbname,
|
||||
dbpassword,
|
||||
dbhost,
|
||||
dbport
|
||||
};
|
||||
|
||||
const datasource = context.datasource || Datasource.EmptyDatasource();
|
||||
|
@ -70,8 +70,13 @@ function getFeatureAttributes (attributesBackend) {
|
||||
|
||||
const params = {
|
||||
token,
|
||||
dbuser, dbname, dbpassword, dbhost, dbport,
|
||||
layer, fid
|
||||
dbuser,
|
||||
dbname,
|
||||
dbpassword,
|
||||
dbhost,
|
||||
dbport,
|
||||
layer,
|
||||
fid
|
||||
};
|
||||
|
||||
attributesBackend.getFeatureAttributes(mapConfigProvider, params, false, (err, tile, stats = {}) => {
|
||||
|
@ -44,7 +44,7 @@ module.exports = class AggregatedFeaturesLayergroupController {
|
||||
dbConnSetup(this.pgConnection),
|
||||
// TODO: create its rate limit
|
||||
rateLimit(this.userLimitsBackend, RATE_LIMIT_ENDPOINTS_GROUPS.ATTRIBUTES),
|
||||
cleanUpQueryParams([ 'aggregation' ]),
|
||||
cleanUpQueryParams(['aggregation']),
|
||||
createMapStoreMapConfigProvider(
|
||||
this.mapStore,
|
||||
this.userLimitsBackend,
|
||||
@ -71,9 +71,16 @@ function getClusteredFeatures (clusterBackend) {
|
||||
const { aggregation } = req.query;
|
||||
|
||||
const params = {
|
||||
user, token,
|
||||
dbuser, dbname, dbpassword, dbhost, dbport,
|
||||
layer, zoom, clusterId,
|
||||
user,
|
||||
token,
|
||||
dbuser,
|
||||
dbname,
|
||||
dbpassword,
|
||||
dbhost,
|
||||
dbport,
|
||||
layer,
|
||||
zoom,
|
||||
clusterId,
|
||||
aggregation
|
||||
};
|
||||
|
||||
|
@ -22,10 +22,10 @@ const ALLOWED_DATAVIEW_QUERY_PARAMS = [
|
||||
'end', // number
|
||||
'column_type', // string
|
||||
'bins', // number
|
||||
'aggregation', //string
|
||||
'aggregation', // string
|
||||
'offset', // number
|
||||
'q', // widgets search
|
||||
'categories', // number
|
||||
'categories' // number
|
||||
];
|
||||
|
||||
module.exports = class DataviewLayergroupController {
|
||||
|
@ -21,7 +21,7 @@ const DEFAULT_ZOOM_CENTER = {
|
||||
}
|
||||
};
|
||||
|
||||
function numMapper(n) {
|
||||
function numMapper (n) {
|
||||
return +n;
|
||||
}
|
||||
|
||||
@ -60,7 +60,8 @@ module.exports = class PreviewTemplateController {
|
||||
checkStaticImageFormat(),
|
||||
namedMapProvider({
|
||||
namedMapProviderCache: this.namedMapProviderCache,
|
||||
label: 'STATIC_VIZ_MAP', forcedFormat: 'png'
|
||||
label: 'STATIC_VIZ_MAP',
|
||||
forcedFormat: 'png'
|
||||
}),
|
||||
getTemplate({ label: 'STATIC_VIZ_MAP' }),
|
||||
prepareLayerFilterFromPreviewLayers({
|
||||
@ -99,7 +100,7 @@ function getTemplate ({ label }) {
|
||||
function prepareLayerFilterFromPreviewLayers ({ namedMapProviderCache, label }) {
|
||||
return function prepareLayerFilterFromPreviewLayersMiddleware (req, res, next) {
|
||||
const { template } = res.locals;
|
||||
const { config, auth_token } = req.query;
|
||||
const { config, auth_token: authToken } = req.query;
|
||||
|
||||
if (!template || !template.view || !template.view.preview_layers) {
|
||||
return next();
|
||||
@ -109,8 +110,8 @@ function prepareLayerFilterFromPreviewLayers ({ namedMapProviderCache, label })
|
||||
var layerVisibilityFilter = [];
|
||||
|
||||
template.layergroup.layers.forEach((layer, index) => {
|
||||
if (previewLayers[''+index] !== false && previewLayers[layer.id] !== false) {
|
||||
layerVisibilityFilter.push(''+index);
|
||||
if (previewLayers['' + index] !== false && previewLayers[layer.id] !== false) {
|
||||
layerVisibilityFilter.push('' + index);
|
||||
}
|
||||
});
|
||||
|
||||
@ -118,21 +119,29 @@ function prepareLayerFilterFromPreviewLayers ({ namedMapProviderCache, label })
|
||||
return next();
|
||||
}
|
||||
|
||||
const { user, token, cache_buster, api_key } = res.locals;
|
||||
const { user, token, cache_buster: cacheBuster, api_key: apiKey } = res.locals;
|
||||
const { dbuser, dbname, dbpassword, dbhost, dbport } = res.locals;
|
||||
const { template_id, format } = req.params;
|
||||
const { template_id: templateId, format } = req.params;
|
||||
|
||||
const params = {
|
||||
user, token, cache_buster, api_key,
|
||||
dbuser, dbname, dbpassword, dbhost, dbport,
|
||||
template_id, format
|
||||
user,
|
||||
token,
|
||||
cache_buster: cacheBuster,
|
||||
api_key: apiKey,
|
||||
dbuser,
|
||||
dbname,
|
||||
dbpassword,
|
||||
dbhost,
|
||||
dbport,
|
||||
template_id: templateId,
|
||||
format
|
||||
};
|
||||
|
||||
// overwrites 'all' default filter
|
||||
params.layer = layerVisibilityFilter.join(',');
|
||||
|
||||
// recreates the provider
|
||||
namedMapProviderCache.get(user, template_id, config, auth_token, params, (err, provider) => {
|
||||
namedMapProviderCache.get(user, templateId, config, authToken, params, (err, provider) => {
|
||||
if (err) {
|
||||
err.label = label;
|
||||
return next(err);
|
||||
@ -146,7 +155,7 @@ function prepareLayerFilterFromPreviewLayers ({ namedMapProviderCache, label })
|
||||
}
|
||||
|
||||
function getStaticImageOptions ({ tablesExtentBackend }) {
|
||||
return function getStaticImageOptionsMiddleware(req, res, next) {
|
||||
return function getStaticImageOptionsMiddleware (req, res, next) {
|
||||
const { user, mapConfigProvider, template } = res.locals;
|
||||
const { zoom, lon, lat, bbox } = req.query;
|
||||
const params = { zoom, lon, lat, bbox };
|
||||
@ -248,7 +257,7 @@ function getImageOptionsFromBoundingBox (bbox = '') {
|
||||
}
|
||||
}
|
||||
|
||||
function getImage({ previewBackend, label }) {
|
||||
function getImage ({ previewBackend, label }) {
|
||||
return function getImageMiddleware (req, res, next) {
|
||||
const { imageOpts, mapConfigProvider } = res.locals;
|
||||
const { zoom, center, bbox } = imageOpts;
|
||||
@ -298,7 +307,7 @@ function getImage({ previewBackend, label }) {
|
||||
}
|
||||
|
||||
function setContentTypeHeader () {
|
||||
return function setContentTypeHeaderMiddleware(req, res, next) {
|
||||
return function setContentTypeHeaderMiddleware (req, res, next) {
|
||||
const format = req.params.format === 'jpg' ? 'jpeg' : 'png';
|
||||
|
||||
res.set('Content-Type', `image/${format}`);
|
||||
@ -312,7 +321,7 @@ function incrementMapViewsError (ctx) {
|
||||
}
|
||||
|
||||
function incrementMapViews ({ metadataBackend }) {
|
||||
return function incrementMapViewsMiddleware(req, res, next) {
|
||||
return function incrementMapViewsMiddleware (req, res, next) {
|
||||
const { user, mapConfigProvider } = res.locals;
|
||||
|
||||
mapConfigProvider.getMapConfig((err, mapConfig) => {
|
||||
@ -334,7 +343,7 @@ function incrementMapViews ({ metadataBackend }) {
|
||||
};
|
||||
}
|
||||
|
||||
function templateZoomCenter(view) {
|
||||
function templateZoomCenter (view) {
|
||||
if (view.zoom !== undefined && view.center) {
|
||||
return {
|
||||
zoom: view.zoom,
|
||||
@ -344,7 +353,7 @@ function templateZoomCenter(view) {
|
||||
return false;
|
||||
}
|
||||
|
||||
function templateBounds(view) {
|
||||
function templateBounds (view) {
|
||||
if (view.bounds) {
|
||||
var hasAllBounds = ['west', 'south', 'east', 'north'].every(prop => Number.isFinite(view.bounds[prop]));
|
||||
|
||||
|
@ -45,14 +45,14 @@ module.exports = class TileLayergroupController {
|
||||
|
||||
route (mapRouter) {
|
||||
// REGEXP: doesn't match with `val`
|
||||
const not = (val) => `(?!${val})([^\/]+?)`;
|
||||
const not = (val) => `(?!${val})([^\/]+?)`; // eslint-disable-line no-useless-escape
|
||||
|
||||
// Sadly the path that matches 1 also matches with 2 so we need to tell to express
|
||||
// that performs only the middlewares of the first path that matches
|
||||
// for that we use one array to group all paths.
|
||||
mapRouter.get([
|
||||
`/:token/:z/:x/:y@:scale_factor?x.:format`, // 1
|
||||
`/:token/:z/:x/:y.:format`, // 2
|
||||
'/:token/:z/:x/:y@:scale_factor?x.:format', // 1
|
||||
'/:token/:z/:x/:y.:format', // 2
|
||||
`/:token${not('static')}/:layer/:z/:x/:y.(:format)`
|
||||
], this.middlewares());
|
||||
}
|
||||
@ -90,7 +90,7 @@ function parseFormat (format = '') {
|
||||
return SUPPORTED_FORMATS[prettyFormat] ? prettyFormat : 'invalid';
|
||||
}
|
||||
|
||||
function getStatusCode(tile, format){
|
||||
function getStatusCode (tile, format) {
|
||||
return tile.length === 0 && format === 'mvt' ? 204 : 200;
|
||||
}
|
||||
|
||||
@ -149,9 +149,8 @@ function incrementErrorMetrics (statsClient) {
|
||||
|
||||
function tileError () {
|
||||
return function tileErrorMiddleware (err, req, res, next) {
|
||||
|
||||
// See https://github.com/Vizzuality/Windshaft-cartodb/issues/68
|
||||
let errMsg = err.message ? ( '' + err.message ) : ( '' + err );
|
||||
let errMsg = err.message ? ('' + err.message) : ('' + err);
|
||||
|
||||
// Rewrite mapnik parsing errors to start with layer number
|
||||
const matches = errMsg.match("(.*) in style 'layer([0-9]+)'");
|
||||
|
@ -9,8 +9,8 @@ module.exports = function authorize (authBackend) {
|
||||
return next(err);
|
||||
}
|
||||
|
||||
if(!authorized) {
|
||||
err = new Error("Sorry, you are unauthorized (permission denied)");
|
||||
if (!authorized) {
|
||||
err = new Error('Sorry, you are unauthorized (permission denied)');
|
||||
err.http_status = 403;
|
||||
return next(err);
|
||||
}
|
||||
|
@ -48,7 +48,7 @@ module.exports = function setCacheControlHeader ({
|
||||
return next();
|
||||
}
|
||||
|
||||
const directives = [ 'public' ];
|
||||
const directives = ['public'];
|
||||
|
||||
if (everyAffectedTableCanBeInvalidated(affectedTables)) {
|
||||
directives.push(`max-age=${ttl}`);
|
||||
|
@ -1,7 +1,7 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function checkJsonContentType () {
|
||||
return function checkJsonContentTypeMiddleware(req, res, next) {
|
||||
return function checkJsonContentTypeMiddleware (req, res, next) {
|
||||
if (req.method === 'POST' && !req.is('application/json')) {
|
||||
return next(new Error('POST data must be of type application/json'));
|
||||
}
|
||||
|
@ -4,7 +4,7 @@ const VALID_IMAGE_FORMATS = ['png', 'jpg'];
|
||||
|
||||
module.exports = function checkStaticImageFormat () {
|
||||
return function checkStaticImageFormatMiddleware (req, res, next) {
|
||||
if(!VALID_IMAGE_FORMATS.includes(req.params.format)) {
|
||||
if (!VALID_IMAGE_FORMATS.includes(req.params.format)) {
|
||||
return next(new Error(`Unsupported image format "${req.params.format}"`));
|
||||
}
|
||||
|
||||
|
@ -13,8 +13,8 @@ module.exports = function cors () {
|
||||
headers.push('Content-Type');
|
||||
}
|
||||
|
||||
res.set("Access-Control-Allow-Origin", "*");
|
||||
res.set("Access-Control-Allow-Headers", headers.join(', '));
|
||||
res.set('Access-Control-Allow-Origin', '*');
|
||||
res.set('Access-Control-Allow-Headers', headers.join(', '));
|
||||
|
||||
next();
|
||||
};
|
||||
|
@ -3,24 +3,24 @@
|
||||
const basicAuth = require('basic-auth');
|
||||
|
||||
module.exports = function credentials () {
|
||||
return function credentialsMiddleware(req, res, next) {
|
||||
return function credentialsMiddleware (req, res, next) {
|
||||
const apikeyCredentials = getApikeyCredentialsFromRequest(req);
|
||||
|
||||
res.locals.api_key = apikeyCredentials.token;
|
||||
res.locals.basicAuthUsername = apikeyCredentials.username;
|
||||
res.set('vary', 'Authorization'); //Honor Authorization header when caching.
|
||||
res.set('vary', 'Authorization'); // Honor Authorization header when caching.
|
||||
|
||||
return next();
|
||||
};
|
||||
};
|
||||
|
||||
function getApikeyCredentialsFromRequest(req) {
|
||||
function getApikeyCredentialsFromRequest (req) {
|
||||
let apikeyCredentials = {
|
||||
token: null,
|
||||
username: null,
|
||||
username: null
|
||||
};
|
||||
|
||||
for (let getter of apikeyGetters) {
|
||||
for (const getter of apikeyGetters) {
|
||||
apikeyCredentials = getter(req);
|
||||
if (apikeyTokenFound(apikeyCredentials)) {
|
||||
break;
|
||||
@ -33,10 +33,10 @@ function getApikeyCredentialsFromRequest(req) {
|
||||
const apikeyGetters = [
|
||||
getApikeyTokenFromHeaderAuthorization,
|
||||
getApikeyTokenFromRequestQueryString,
|
||||
getApikeyTokenFromRequestBody,
|
||||
getApikeyTokenFromRequestBody
|
||||
];
|
||||
|
||||
function getApikeyTokenFromHeaderAuthorization(req) {
|
||||
function getApikeyTokenFromHeaderAuthorization (req) {
|
||||
const credentials = basicAuth(req);
|
||||
|
||||
if (credentials) {
|
||||
@ -47,12 +47,12 @@ function getApikeyTokenFromHeaderAuthorization(req) {
|
||||
} else {
|
||||
return {
|
||||
username: null,
|
||||
token: null,
|
||||
token: null
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function getApikeyTokenFromRequestQueryString(req) {
|
||||
function getApikeyTokenFromRequestQueryString (req) {
|
||||
let token = null;
|
||||
|
||||
if (req.query && req.query.api_key) {
|
||||
@ -63,11 +63,11 @@ function getApikeyTokenFromRequestQueryString(req) {
|
||||
|
||||
return {
|
||||
username: null,
|
||||
token: token,
|
||||
token: token
|
||||
};
|
||||
}
|
||||
|
||||
function getApikeyTokenFromRequestBody(req) {
|
||||
function getApikeyTokenFromRequestBody (req) {
|
||||
let token = null;
|
||||
|
||||
if (req.body && req.body.api_key) {
|
||||
@ -78,10 +78,10 @@ function getApikeyTokenFromRequestBody(req) {
|
||||
|
||||
return {
|
||||
username: null,
|
||||
token: token,
|
||||
token: token
|
||||
};
|
||||
}
|
||||
|
||||
function apikeyTokenFound(apikey) {
|
||||
function apikeyTokenFound (apikey) {
|
||||
return !!apikey && !!apikey.token;
|
||||
}
|
||||
|
@ -10,7 +10,7 @@ module.exports = function dbConnSetup (pgConnection) {
|
||||
req.profiler.done('dbConnSetup');
|
||||
|
||||
if (err) {
|
||||
if (err.message && -1 !== err.message.indexOf('name not found')) {
|
||||
if (err.message && err.message.indexOf('name not found') !== -1) {
|
||||
err.http_status = 404;
|
||||
}
|
||||
|
||||
|
@ -52,10 +52,10 @@ function isTimeoutError (errorTypes) {
|
||||
return errorTypes.renderTimeoutError || errorTypes.datasourceTimeoutError;
|
||||
}
|
||||
|
||||
function getErrorTypes(error) {
|
||||
function getErrorTypes (error) {
|
||||
return {
|
||||
renderTimeoutError: isRenderTimeoutError(error),
|
||||
datasourceTimeoutError: isDatasourceTimeoutError(error),
|
||||
datasourceTimeoutError: isDatasourceTimeoutError(error)
|
||||
};
|
||||
}
|
||||
|
||||
@ -99,9 +99,9 @@ function populateLimitErrors (errors) {
|
||||
});
|
||||
}
|
||||
|
||||
function findStatusCode(err) {
|
||||
function findStatusCode (err) {
|
||||
var statusCode;
|
||||
if ( err.http_status ) {
|
||||
if (err.http_status) {
|
||||
statusCode = err.http_status;
|
||||
} else {
|
||||
statusCode = statusFromErrorMessage('' + err);
|
||||
@ -111,34 +111,30 @@ function findStatusCode(err) {
|
||||
|
||||
module.exports.findStatusCode = findStatusCode;
|
||||
|
||||
function statusFromErrorMessage(errMsg) {
|
||||
function statusFromErrorMessage (errMsg) {
|
||||
// Find an appropriate statusCode based on message
|
||||
// jshint maxcomplexity:7
|
||||
var statusCode = 400;
|
||||
if ( -1 !== errMsg.indexOf('permission denied') ) {
|
||||
if (errMsg.indexOf('permission denied') !== -1) {
|
||||
statusCode = 403;
|
||||
}
|
||||
else if ( -1 !== errMsg.indexOf('authentication failed') ) {
|
||||
} else if (errMsg.indexOf('authentication failed') !== -1) {
|
||||
statusCode = 403;
|
||||
}
|
||||
else if (errMsg.match(/Postgis Plugin.*[\s|\n].*column.*does not exist/)) {
|
||||
} else if (errMsg.match(/Postgis Plugin.*[\s|\n].*column.*does not exist/)) {
|
||||
statusCode = 400;
|
||||
}
|
||||
else if ( -1 !== errMsg.indexOf('does not exist') ) {
|
||||
if ( -1 !== errMsg.indexOf(' role ') ) {
|
||||
} else if (errMsg.indexOf('does not exist') !== -1) {
|
||||
if (errMsg.indexOf(' role ') !== -1) {
|
||||
statusCode = 403; // role 'xxx' does not exist
|
||||
} else if ( errMsg.match(/function .* does not exist/) ) {
|
||||
} else if (errMsg.match(/function .* does not exist/)) {
|
||||
statusCode = 400; // invalid SQL (SQL function does not exist)
|
||||
} else {
|
||||
statusCode = 404;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return statusCode;
|
||||
}
|
||||
|
||||
function errorMessage(err) {
|
||||
function errorMessage (err) {
|
||||
// See https://github.com/Vizzuality/Windshaft-cartodb/issues/68
|
||||
var message = (_.isString(err) ? err : err.message) || 'Unknown error';
|
||||
|
||||
@ -147,7 +143,7 @@ function errorMessage(err) {
|
||||
|
||||
module.exports.errorMessage = errorMessage;
|
||||
|
||||
function stripConnectionInfo(message) {
|
||||
function stripConnectionInfo (message) {
|
||||
// Strip connection info, if any
|
||||
return message
|
||||
// See https://github.com/CartoDB/Windshaft/issues/173
|
||||
@ -168,18 +164,18 @@ function shouldBeExposed (prop) {
|
||||
return !!ERROR_INFO_TO_EXPOSE[prop];
|
||||
}
|
||||
|
||||
function errorMessageWithContext(err) {
|
||||
function errorMessageWithContext (err) {
|
||||
// See https://github.com/Vizzuality/Windshaft-cartodb/issues/68
|
||||
var message = (_.isString(err) ? err : err.message) || 'Unknown error';
|
||||
|
||||
var error = {
|
||||
type: err.type || 'unknown',
|
||||
message: stripConnectionInfo(message),
|
||||
message: stripConnectionInfo(message)
|
||||
};
|
||||
|
||||
for (var prop in err) {
|
||||
// type & message are properties from Error's prototype and will be skipped
|
||||
if (err.hasOwnProperty(prop) && shouldBeExposed(prop)) {
|
||||
if (Object.prototype.hasOwnProperty.call(err, prop) && shouldBeExposed(prop)) {
|
||||
error[prop] = err[prop];
|
||||
}
|
||||
}
|
||||
@ -187,27 +183,27 @@ function errorMessageWithContext(err) {
|
||||
return error;
|
||||
}
|
||||
|
||||
function setErrorHeader(errors, statusCode, res) {
|
||||
let errorsCopy = errors.slice(0);
|
||||
function setErrorHeader (errors, statusCode, res) {
|
||||
const errorsCopy = errors.slice(0);
|
||||
const mainError = errorsCopy.shift();
|
||||
|
||||
let errorsLog = {
|
||||
const errorsLog = {
|
||||
mainError: {
|
||||
statusCode: statusCode || 200,
|
||||
message: mainError.message,
|
||||
name: mainError.name,
|
||||
label: mainError.label,
|
||||
type: mainError.type,
|
||||
subtype: mainError.subtype
|
||||
message: mainError.message,
|
||||
name: mainError.name,
|
||||
label: mainError.label,
|
||||
type: mainError.type,
|
||||
subtype: mainError.subtype
|
||||
}
|
||||
};
|
||||
|
||||
errorsLog.moreErrors = errorsCopy.map(error => {
|
||||
return {
|
||||
message: error.message,
|
||||
name: error.name,
|
||||
label: error.label,
|
||||
type: error.type,
|
||||
name: error.name,
|
||||
label: error.label,
|
||||
type: error.type,
|
||||
subtype: error.subtype
|
||||
};
|
||||
});
|
||||
@ -221,14 +217,14 @@ function setErrorHeader(errors, statusCode, res) {
|
||||
*
|
||||
* @param {Object} object
|
||||
*/
|
||||
function stringifyForLogs(object) {
|
||||
function stringifyForLogs (object) {
|
||||
Object.keys(object).map(key => {
|
||||
if(typeof object[key] === 'string') {
|
||||
if (typeof object[key] === 'string') {
|
||||
object[key] = object[key].replace(/[^a-zA-Z0-9]/g, ' ');
|
||||
} else if (typeof object[key] === 'object') {
|
||||
stringifyForLogs(object[key]);
|
||||
} else if (object[key] instanceof Array) {
|
||||
for (let element of object[key]) {
|
||||
for (const element of object[key]) {
|
||||
stringifyForLogs(element);
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function incrementMapViewCount (metadataBackend) {
|
||||
return function incrementMapViewCountMiddleware(req, res, next) {
|
||||
return function incrementMapViewCountMiddleware (req, res, next) {
|
||||
const { mapConfig, user } = res.locals;
|
||||
|
||||
// Error won't blow up, just be logged.
|
||||
|
@ -1,18 +1,18 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function setLastModifiedHeader () {
|
||||
return function setLastModifiedHeaderMiddleware(req, res, next) {
|
||||
return function setLastModifiedHeaderMiddleware (req, res, next) {
|
||||
if (req.method !== 'GET') {
|
||||
return next();
|
||||
}
|
||||
|
||||
const { mapConfigProvider, cache_buster } = res.locals;
|
||||
const { mapConfigProvider, cache_buster: cacheBuster } = res.locals;
|
||||
|
||||
if (cache_buster) {
|
||||
const cacheBuster = parseInt(cache_buster, 10);
|
||||
const lastModifiedDate = Number.isFinite(cacheBuster) && cacheBuster !== 0 ?
|
||||
new Date(cacheBuster) :
|
||||
new Date();
|
||||
if (cacheBuster) {
|
||||
const cacheBusterTimestamp = parseInt(cacheBuster, 10);
|
||||
const lastModifiedDate = Number.isFinite(cacheBusterTimestamp) && cacheBusterTimestamp !== 0
|
||||
? new Date(cacheBusterTimestamp)
|
||||
: new Date();
|
||||
|
||||
res.set('Last-Modified', lastModifiedDate.toUTCString());
|
||||
|
||||
|
@ -27,12 +27,12 @@ module.exports = function setLastUpdatedTimeToLayergroup () {
|
||||
};
|
||||
};
|
||||
|
||||
function getLastUpdatedTime(analysesResults, lastUpdateTime) {
|
||||
function getLastUpdatedTime (analysesResults, lastUpdateTime) {
|
||||
if (!Array.isArray(analysesResults)) {
|
||||
return lastUpdateTime;
|
||||
}
|
||||
return analysesResults.reduce(function(lastUpdateTime, analysis) {
|
||||
return analysis.getNodes().reduce(function(lastNodeUpdatedAtTime, node) {
|
||||
return analysesResults.reduce(function (lastUpdateTime, analysis) {
|
||||
return analysis.getNodes().reduce(function (lastNodeUpdatedAtTime, node) {
|
||||
var nodeUpdatedAtDate = node.getUpdatedAt();
|
||||
var nodeUpdatedTimeAt = (nodeUpdatedAtDate && nodeUpdatedAtDate.getTime()) || 0;
|
||||
return nodeUpdatedTimeAt > lastNodeUpdatedAtTime ? nodeUpdatedTimeAt : lastNodeUpdatedAtTime;
|
||||
|
@ -1,7 +1,7 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function setLayerStats (pgConnection, statsBackend) {
|
||||
return function setLayerStatsMiddleware(req, res, next) {
|
||||
return function setLayerStatsMiddleware (req, res, next) {
|
||||
const { user, mapConfig } = res.locals;
|
||||
const layergroup = res.body;
|
||||
|
||||
@ -10,7 +10,7 @@ module.exports = function setLayerStats (pgConnection, statsBackend) {
|
||||
return next(err);
|
||||
}
|
||||
|
||||
statsBackend.getStats(mapConfig, connection, function(err, layersStats) {
|
||||
statsBackend.getStats(mapConfig, connection, function (err, layersStats) {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
|
@ -9,7 +9,7 @@ module.exports = function setMetadataToLayergroup (layergroupMetadata, includeQu
|
||||
layergroupMetadata.addAnalysesMetadata(user, layergroup, analysesResults, includeQuery);
|
||||
layergroupMetadata.addTurboCartoContextMetadata(layergroup, mapConfig.obj(), context);
|
||||
layergroupMetadata.addAggregationContextMetadata(layergroup, mapConfig.obj(), context);
|
||||
layergroupMetadata.addDateWrappingMetadata (layergroup, mapConfig.obj());
|
||||
layergroupMetadata.addDateWrappingMetadata(layergroup, mapConfig.obj());
|
||||
layergroupMetadata.addTileJsonMetadata(layergroup, user, mapConfig, userApiKey);
|
||||
|
||||
next();
|
||||
|
@ -19,7 +19,7 @@ module.exports = function layergroupToken () {
|
||||
if (res.locals.signer !== user) {
|
||||
const err = new Error(authErrorMessageTemplate(res.locals.signer, user));
|
||||
err.type = 'auth';
|
||||
err.http_status = (req.query && req.query.callback) ? 200: 403;
|
||||
err.http_status = (req.query && req.query.callback) ? 200 : 403;
|
||||
|
||||
return next(err);
|
||||
}
|
||||
|
@ -6,20 +6,20 @@ module.exports = function lzma () {
|
||||
const lzmaWorker = new LZMA();
|
||||
|
||||
return function lzmaMiddleware (req, res, next) {
|
||||
if (!req.query.hasOwnProperty('lzma')) {
|
||||
if (!Object.prototype.hasOwnProperty.call(req.query, 'lzma')) {
|
||||
return next();
|
||||
}
|
||||
|
||||
// Decode (from base64)
|
||||
var lzma = new Buffer(req.query.lzma, 'base64')
|
||||
var lzma = Buffer.from(req.query.lzma, 'base64')
|
||||
.toString('binary')
|
||||
.split('')
|
||||
.map(function(c) {
|
||||
.map(function (c) {
|
||||
return c.charCodeAt(0) - 128;
|
||||
});
|
||||
|
||||
// Decompress
|
||||
lzmaWorker.decompress(lzma, function(result) {
|
||||
lzmaWorker.decompress(lzma, function (result) {
|
||||
try {
|
||||
delete req.query.lzma;
|
||||
Object.assign(req.query, JSON.parse(result));
|
||||
|
@ -17,7 +17,7 @@ module.exports = function mapError (options) {
|
||||
};
|
||||
};
|
||||
|
||||
function populateError(err, mapConfig) {
|
||||
function populateError (err, mapConfig) {
|
||||
var error = new Error(err.message);
|
||||
error.http_status = err.http_status;
|
||||
|
||||
|
@ -10,15 +10,27 @@ module.exports = function createMapStoreMapConfigProvider (
|
||||
forcedFormat = null
|
||||
) {
|
||||
return function createMapStoreMapConfigProviderMiddleware (req, res, next) {
|
||||
const { user, token, cache_buster, api_key } = res.locals;
|
||||
const { user, token, cache_buster: cacheBuster, api_key: apiKey } = res.locals;
|
||||
const { dbuser, dbname, dbpassword, dbhost, dbport } = res.locals;
|
||||
const { layer: layerFromParams, z, x, y, scale_factor, format } = req.params;
|
||||
const { layer: layerFromParams, z, x, y, scale_factor: scaleFactor, format } = req.params;
|
||||
const { layer: layerFromQuery } = req.query;
|
||||
|
||||
const params = {
|
||||
user, token, cache_buster, api_key,
|
||||
dbuser, dbname, dbpassword, dbhost, dbport,
|
||||
layer: (layerFromQuery || layerFromParams), z, x, y, scale_factor, format
|
||||
user,
|
||||
token,
|
||||
cache_buster: cacheBuster,
|
||||
api_key: apiKey,
|
||||
dbuser,
|
||||
dbname,
|
||||
dbpassword,
|
||||
dbhost,
|
||||
dbport,
|
||||
layer: (layerFromQuery || layerFromParams),
|
||||
z,
|
||||
x,
|
||||
y,
|
||||
scale_factor: scaleFactor,
|
||||
format
|
||||
};
|
||||
|
||||
if (forcedFormat) {
|
||||
|
@ -2,15 +2,27 @@
|
||||
|
||||
module.exports = function getNamedMapProvider ({ namedMapProviderCache, label, forcedFormat = null }) {
|
||||
return function getNamedMapProviderMiddleware (req, res, next) {
|
||||
const { user, token, cache_buster, api_key } = res.locals;
|
||||
const { user, token, cache_buster: cacheBuster, api_key: apiKey } = res.locals;
|
||||
const { dbuser, dbname, dbpassword, dbhost, dbport } = res.locals;
|
||||
const { template_id, layer: layerFromParams, z, x, y, format } = req.params;
|
||||
const { template_id: templateId, layer: layerFromParams, z, x, y, format } = req.params;
|
||||
const { layer: layerFromQuery } = req.query;
|
||||
|
||||
const params = {
|
||||
user, token, cache_buster, api_key,
|
||||
dbuser, dbname, dbpassword, dbhost, dbport,
|
||||
template_id, layer: (layerFromQuery || layerFromParams), z, x, y, format
|
||||
user,
|
||||
token,
|
||||
cache_buster: cacheBuster,
|
||||
api_key: apiKey,
|
||||
dbuser,
|
||||
dbname,
|
||||
dbpassword,
|
||||
dbhost,
|
||||
dbport,
|
||||
template_id: templateId,
|
||||
layer: (layerFromQuery || layerFromParams),
|
||||
z,
|
||||
x,
|
||||
y,
|
||||
format
|
||||
};
|
||||
|
||||
if (forcedFormat) {
|
||||
@ -18,9 +30,9 @@ module.exports = function getNamedMapProvider ({ namedMapProviderCache, label, f
|
||||
params.layer = params.layer || 'all';
|
||||
}
|
||||
|
||||
const { config, auth_token } = req.query;
|
||||
const { config, auth_token: authToken } = req.query;
|
||||
|
||||
namedMapProviderCache.get(user, template_id, config, auth_token, params, (err, namedMapProvider) => {
|
||||
namedMapProviderCache.get(user, templateId, config, authToken, params, (err, namedMapProvider) => {
|
||||
if (err) {
|
||||
err.label = label;
|
||||
return next(err);
|
||||
|
@ -19,12 +19,12 @@ const RATE_LIMIT_ENDPOINTS_GROUPS = {
|
||||
NAMED_TILES: 'named_tiles'
|
||||
};
|
||||
|
||||
function rateLimit(userLimitsBackend, endpointGroup = null) {
|
||||
function rateLimit (userLimitsBackend, endpointGroup = null) {
|
||||
if (!isRateLimitEnabled(endpointGroup)) {
|
||||
return function rateLimitDisabledMiddleware(req, res, next) { next(); };
|
||||
return function rateLimitDisabledMiddleware (req, res, next) { next(); };
|
||||
}
|
||||
|
||||
return function rateLimitMiddleware(req, res, next) {
|
||||
return function rateLimitMiddleware (req, res, next) {
|
||||
userLimitsBackend.getRateLimit(res.locals.user, endpointGroup, function (err, userRateLimit) {
|
||||
if (err) {
|
||||
return next(err);
|
||||
@ -46,7 +46,7 @@ function rateLimit(userLimitsBackend, endpointGroup = null) {
|
||||
// retry is floor rounded in seconds by redis-cell
|
||||
res.set('Retry-After', retry + 1);
|
||||
|
||||
let rateLimitError = new Error(
|
||||
const rateLimitError = new Error(
|
||||
'You are over platform\'s limits: too many requests.' +
|
||||
' Please contact us to know more details'
|
||||
);
|
||||
@ -61,8 +61,7 @@ function rateLimit(userLimitsBackend, endpointGroup = null) {
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
function isRateLimitEnabled(endpointGroup) {
|
||||
function isRateLimitEnabled (endpointGroup) {
|
||||
return global.environment.enabledFeatures.rateLimitsEnabled &&
|
||||
endpointGroup &&
|
||||
global.environment.enabledFeatures.rateLimitsByEndpoint[endpointGroup];
|
||||
|
@ -20,7 +20,7 @@ module.exports = function stats (options) {
|
||||
// May throw due to dns, see: http://github.com/CartoDB/Windshaft/issues/166
|
||||
req.profiler.sendStats();
|
||||
} catch (err) {
|
||||
debug("error sending profiling stats: " + err);
|
||||
debug('error sending profiling stats: ' + err);
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -4,7 +4,7 @@ const NamedMapsCacheEntry = require('../../cache/model/named-maps-entry');
|
||||
const NamedMapMapConfigProvider = require('../../models/mapconfig/provider/named-map-provider');
|
||||
|
||||
module.exports = function setSurrogateKeyHeader ({ surrogateKeysCache }) {
|
||||
return function setSurrogateKeyHeaderMiddleware(req, res, next) {
|
||||
return function setSurrogateKeyHeaderMiddleware (req, res, next) {
|
||||
const { user, mapConfigProvider } = res.locals;
|
||||
|
||||
if (mapConfigProvider instanceof NamedMapMapConfigProvider) {
|
||||
|
@ -5,7 +5,7 @@ const CdbRequest = require('../../models/cdb-request');
|
||||
module.exports = function user () {
|
||||
const cdbRequest = new CdbRequest();
|
||||
|
||||
return function userMiddleware(req, res, next) {
|
||||
return function userMiddleware (req, res, next) {
|
||||
res.locals.user = cdbRequest.userByReq(req);
|
||||
|
||||
next();
|
||||
|
@ -1,12 +1,12 @@
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const timeoutErrorVectorTile = fs.readFileSync(__dirname + '/../../../assets/render-timeout-fallback.mvt');
|
||||
|
||||
module.exports = function vectorError() {
|
||||
return function vectorErrorMiddleware(err, req, res, next) {
|
||||
if(req.params.format === 'mvt') {
|
||||
const path = require('path');
|
||||
const timeoutErrorVectorTile = fs.readFileSync(path.join(__dirname, '/../../../assets/render-timeout-fallback.mvt'));
|
||||
|
||||
module.exports = function vectorError () {
|
||||
return function vectorErrorMiddleware (err, req, res, next) {
|
||||
if (req.params.format === 'mvt') {
|
||||
if (isTimeoutError(err) || isRateLimitError(err)) {
|
||||
res.set('Content-Type', 'application/x-protobuf');
|
||||
return res.status(429).send(timeoutErrorVectorTile);
|
||||
@ -17,7 +17,6 @@ module.exports = function vectorError() {
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
function isRenderTimeoutError (err) {
|
||||
return err.message === 'Render timed out';
|
||||
}
|
||||
|
@ -19,7 +19,7 @@ module.exports = class AdminTemplateController {
|
||||
}
|
||||
|
||||
route (templateRouter) {
|
||||
templateRouter.options(`/:template_id`);
|
||||
templateRouter.options('/:template_id');
|
||||
|
||||
templateRouter.post('/', this.middlewares({
|
||||
action: 'create',
|
||||
|
@ -106,7 +106,7 @@ module.exports = class NamedMapController {
|
||||
lastModifiedHeader(),
|
||||
lastUpdatedTimeLayergroup(),
|
||||
layerStats(this.pgConnection, this.statsBackend),
|
||||
layergroupIdHeader(this.templateMaps ,useTemplateHash),
|
||||
layergroupIdHeader(this.templateMaps, useTemplateHash),
|
||||
layergroupMetadata(this.layergroupMetadata, includeQuery),
|
||||
mapError({ label, addContext })
|
||||
];
|
||||
@ -114,7 +114,7 @@ module.exports = class NamedMapController {
|
||||
};
|
||||
|
||||
function checkInstantiteLayergroup () {
|
||||
return function checkInstantiteLayergroupMiddleware(req, res, next) {
|
||||
return function checkInstantiteLayergroupMiddleware (req, res, next) {
|
||||
if (req.method === 'GET') {
|
||||
const { callback, config } = req.query;
|
||||
|
||||
@ -125,7 +125,7 @@ function checkInstantiteLayergroup () {
|
||||
if (config) {
|
||||
try {
|
||||
req.body = JSON.parse(config);
|
||||
} catch(e) {
|
||||
} catch (e) {
|
||||
return next(new Error('Invalid config parameter, should be a valid JSON'));
|
||||
}
|
||||
}
|
||||
@ -148,8 +148,8 @@ function getTemplate (
|
||||
return function getTemplateMiddleware (req, res, next) {
|
||||
const templateParams = req.body;
|
||||
const { user, dbuser, dbname, dbpassword, dbhost, dbport } = res.locals;
|
||||
const { template_id } = req.params;
|
||||
const { auth_token } = req.query;
|
||||
const { template_id: templateId } = req.params;
|
||||
const { auth_token: authToken } = req.query;
|
||||
|
||||
const params = Object.assign({ dbuser, dbname, dbpassword, dbhost, dbport }, req.query);
|
||||
|
||||
@ -161,9 +161,9 @@ function getTemplate (
|
||||
mapConfigAdapter,
|
||||
affectedTablesCache,
|
||||
user,
|
||||
template_id,
|
||||
templateId,
|
||||
templateParams,
|
||||
auth_token,
|
||||
authToken,
|
||||
params
|
||||
);
|
||||
|
||||
|
@ -21,7 +21,7 @@ module.exports = class TemplateRouter {
|
||||
authBackend,
|
||||
layergroupMetadata,
|
||||
namedMapProviderCache,
|
||||
tileBackend,
|
||||
tileBackend
|
||||
} = collaborators;
|
||||
|
||||
this.namedMapController = new NamedMapController(
|
||||
|
@ -89,7 +89,7 @@ function getTile ({ tileBackend, label }) {
|
||||
}
|
||||
|
||||
function setContentTypeHeader () {
|
||||
return function setContentTypeHeaderMiddleware(req, res, next) {
|
||||
return function setContentTypeHeaderMiddleware (req, res, next) {
|
||||
res.set('Content-Type', res.get('content-type') || res.get('Content-Type') || 'image/png');
|
||||
|
||||
next();
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
var PSQL = require('cartodb-psql');
|
||||
|
||||
function AnalysisStatusBackend() {
|
||||
function AnalysisStatusBackend () {
|
||||
}
|
||||
|
||||
module.exports = AnalysisStatusBackend;
|
||||
@ -15,7 +15,7 @@ AnalysisStatusBackend.prototype.getNodeStatus = function (nodeId, dbParams, call
|
||||
|
||||
var pg = new PSQL(dbParams);
|
||||
|
||||
pg.query(statusQuery, function(err, result) {
|
||||
pg.query(statusQuery, function (err, result) {
|
||||
if (err) {
|
||||
return callback(err, result);
|
||||
}
|
||||
|
@ -43,7 +43,7 @@ AnalysisBackend.prototype.setLoggerConfig = function (options) {
|
||||
}
|
||||
};
|
||||
|
||||
AnalysisBackend.prototype.create = function(analysisConfiguration, analysisDefinition, callback) {
|
||||
AnalysisBackend.prototype.create = function (analysisConfiguration, analysisDefinition, callback) {
|
||||
analysisConfiguration.batch.endpoint = this.batchConfig.endpoint;
|
||||
analysisConfiguration.batch.inlineExecution = this.batchConfig.inlineExecution;
|
||||
analysisConfiguration.batch.hostHeaderTemplate = this.batchConfig.hostHeaderTemplate;
|
||||
@ -52,13 +52,14 @@ AnalysisBackend.prototype.create = function(analysisConfiguration, analysisDefin
|
||||
stream: this.stream ? this.stream : process.stdout
|
||||
};
|
||||
|
||||
this.getAnalysesLimits(analysisConfiguration.user, function(err, limits) {
|
||||
this.getAnalysesLimits(analysisConfiguration.user, function (err, limits) {
|
||||
if (err) {}
|
||||
analysisConfiguration.limits = limits || {};
|
||||
camshaft.create(analysisConfiguration, analysisDefinition, callback);
|
||||
});
|
||||
};
|
||||
|
||||
AnalysisBackend.prototype.getAnalysesLimits = function(username, callback) {
|
||||
AnalysisBackend.prototype.getAnalysesLimits = function (username, callback) {
|
||||
var self = this;
|
||||
|
||||
var analysesLimits = {
|
||||
@ -70,16 +71,17 @@ AnalysisBackend.prototype.getAnalysesLimits = function(username, callback) {
|
||||
}
|
||||
};
|
||||
|
||||
Object.keys(self.options.limits).forEach(function(analysisTypeOrTag) {
|
||||
Object.keys(self.options.limits).forEach(function (analysisTypeOrTag) {
|
||||
analysesLimits.analyses[analysisTypeOrTag] = _.extend({}, self.options.limits[analysisTypeOrTag]);
|
||||
});
|
||||
|
||||
var analysesLimitsKey = REDIS_LIMITS.PREFIX + username;
|
||||
this.metadataBackend.redisCmd(REDIS_LIMITS.DB, 'HGETALL', [analysesLimitsKey], function(err, analysesTimeouts) {
|
||||
this.metadataBackend.redisCmd(REDIS_LIMITS.DB, 'HGETALL', [analysesLimitsKey], function (err, analysesTimeouts) {
|
||||
if (err) {}
|
||||
// analysesTimeouts wil be something like: { moran: 3000, intersection: 5000 }
|
||||
analysesTimeouts = analysesTimeouts || {};
|
||||
|
||||
Object.keys(analysesTimeouts).forEach(function(analysisType) {
|
||||
Object.keys(analysesTimeouts).forEach(function (analysisType) {
|
||||
analysesLimits.analyses[analysisType] = _.defaults(
|
||||
{
|
||||
timeout: Number.isFinite(+analysesTimeouts[analysisType]) ? +analysesTimeouts[analysisType] : 0
|
||||
|
@ -9,7 +9,7 @@
|
||||
* @constructor
|
||||
* @type {AuthBackend}
|
||||
*/
|
||||
function AuthBackend(pgConnection, metadataBackend, mapStore, templateMaps) {
|
||||
function AuthBackend (pgConnection, metadataBackend, mapStore, templateMaps) {
|
||||
this.pgConnection = pgConnection;
|
||||
this.metadataBackend = metadataBackend;
|
||||
this.mapStore = mapStore;
|
||||
@ -25,28 +25,28 @@ module.exports = AuthBackend;
|
||||
// null if the request is not signed by anyone
|
||||
// or will be a string cartodb username otherwise.
|
||||
//
|
||||
AuthBackend.prototype.authorizedBySigner = function(req, res, callback) {
|
||||
if ( ! res.locals.token || ! res.locals.signer ) {
|
||||
AuthBackend.prototype.authorizedBySigner = function (req, res, callback) {
|
||||
if (!res.locals.token || !res.locals.signer) {
|
||||
return callback(null, false); // no signer requested
|
||||
}
|
||||
|
||||
var self = this;
|
||||
|
||||
var layergroup_id = res.locals.token;
|
||||
var auth_token = req.query.auth_token;
|
||||
var layergroupId = res.locals.token;
|
||||
var authToken = req.query.auth_token;
|
||||
|
||||
this.mapStore.load(layergroup_id, function(err, mapConfig) {
|
||||
this.mapStore.load(layergroupId, function (err, mapConfig) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
var authorized = self.templateMaps.isAuthorized(mapConfig.obj().template, auth_token);
|
||||
var authorized = self.templateMaps.isAuthorized(mapConfig.obj().template, authToken);
|
||||
|
||||
return callback(null, authorized);
|
||||
});
|
||||
};
|
||||
|
||||
function isValidApiKey(apikey) {
|
||||
function isValidApiKey (apikey) {
|
||||
return apikey.type &&
|
||||
apikey.user &&
|
||||
apikey.databasePassword &&
|
||||
@ -60,11 +60,11 @@ function isValidApiKey(apikey) {
|
||||
// @param callback function(err, authorized)
|
||||
// NOTE: authorized is expected to be 0 or 1 (integer)
|
||||
//
|
||||
AuthBackend.prototype.authorizedByAPIKey = function(user, res, callback) {
|
||||
AuthBackend.prototype.authorizedByAPIKey = function (user, res, callback) {
|
||||
const apikeyToken = res.locals.api_key;
|
||||
const basicAuthUsername = res.locals.basicAuthUsername;
|
||||
|
||||
if ( ! apikeyToken ) {
|
||||
if (!apikeyToken) {
|
||||
return callback(null, false); // no api key, no authorization...
|
||||
}
|
||||
|
||||
@ -77,7 +77,7 @@ AuthBackend.prototype.authorizedByAPIKey = function(user, res, callback) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
if ( !isValidApiKey(apikey)) {
|
||||
if (!isValidApiKey(apikey)) {
|
||||
const error = new Error('Unauthorized');
|
||||
error.type = 'auth';
|
||||
error.subtype = 'api-key-not-found';
|
||||
@ -109,7 +109,7 @@ AuthBackend.prototype.authorizedByAPIKey = function(user, res, callback) {
|
||||
};
|
||||
|
||||
function isNameNotFoundError (err) {
|
||||
return err.message && -1 !== err.message.indexOf('name not found');
|
||||
return err.message && err.message.indexOf('name not found') !== -1;
|
||||
}
|
||||
|
||||
function usernameMatches (basicAuthUsername, requestUsername) {
|
||||
@ -123,7 +123,7 @@ function usernameMatches (basicAuthUsername, requestUsername) {
|
||||
* @param res - standard res object. Contains the auth parameters in locals
|
||||
* @param callback function(err, allowed) is access allowed not?
|
||||
*/
|
||||
AuthBackend.prototype.authorize = function(req, res, callback) {
|
||||
AuthBackend.prototype.authorize = function (req, res, callback) {
|
||||
var user = res.locals.user;
|
||||
|
||||
this.authorizedByAPIKey(user, res, (err, isAuthorizedByApikey) => {
|
||||
|
@ -71,8 +71,8 @@ function getFeatures (pg, layer, params, callback) {
|
||||
}
|
||||
|
||||
const SKIP_COLUMNS = {
|
||||
'the_geom': true,
|
||||
'the_geom_webmercator': true
|
||||
the_geom: true,
|
||||
the_geom_webmercator: true
|
||||
};
|
||||
|
||||
function getColumnsName (pg, query, callback) {
|
||||
@ -100,7 +100,7 @@ function getClusterFeatures (pg, zoom, clusterId, columns, query, resolution, ag
|
||||
zoom: zoom,
|
||||
id: clusterId,
|
||||
query: query,
|
||||
res: 256/resolution,
|
||||
res: 256 / resolution,
|
||||
columns: columns
|
||||
});
|
||||
|
||||
@ -127,7 +127,7 @@ function getClusterFeatures (pg, zoom, clusterId, columns, query, resolution, ag
|
||||
}
|
||||
|
||||
return callback(null, data);
|
||||
} , true); // use read-only transaction
|
||||
}, true); // use read-only transaction
|
||||
}
|
||||
|
||||
const schemaQuery = ctx => `SELECT * FROM (${ctx.query}) __cdb_cluster_schema LIMIT 0`;
|
||||
@ -159,8 +159,8 @@ const clusterFeaturesQuery = ctx => `
|
||||
`;
|
||||
|
||||
const gridResolution = ctx => {
|
||||
const zoomResolution = webmercator.getResolution({ z : Math.min(38, ctx.zoom) });
|
||||
return `${256/ctx.res} * (${zoomResolution})::double precision`;
|
||||
const zoomResolution = webmercator.getResolution({ z: Math.min(38, ctx.zoom) });
|
||||
return `${256 / ctx.res} * (${zoomResolution})::double precision`;
|
||||
};
|
||||
|
||||
const aggregationQuery = ctx => `
|
||||
@ -194,9 +194,8 @@ function parseAggregation (aggregation) {
|
||||
try {
|
||||
aggregation = JSON.parse(aggregation);
|
||||
} catch (err) {
|
||||
throw new Error(`Invalid aggregation input, should be a a valid JSON`);
|
||||
throw new Error('Invalid aggregation input, should be a a valid JSON');
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return aggregation;
|
||||
@ -207,7 +206,7 @@ function validateAggregation (aggregation) {
|
||||
const { columns, expressions } = aggregation;
|
||||
|
||||
if (!hasColumns(columns)) {
|
||||
throw new Error(`Invalid aggregation input, columns should be and array of column names`);
|
||||
throw new Error('Invalid aggregation input, columns should be and array of column names');
|
||||
}
|
||||
|
||||
validateExpressions(expressions);
|
||||
@ -221,16 +220,16 @@ function hasColumns (columns) {
|
||||
function validateExpressions (expressions) {
|
||||
if (expressions !== undefined) {
|
||||
if (!isValidExpression(expressions)) {
|
||||
throw new Error(`Invalid aggregation input, expressions should be and object with valid functions`);
|
||||
throw new Error('Invalid aggregation input, expressions should be and object with valid functions');
|
||||
}
|
||||
|
||||
for (const { aggregate_function, aggregated_column } of Object.values(expressions)) {
|
||||
if (typeof aggregated_column !== 'string') {
|
||||
throw new Error(`Invalid aggregation input, aggregated column should be an string`);
|
||||
for (const { aggregate_function: aggregateFunction, aggregated_column: aggregatedColumn } of Object.values(expressions)) {
|
||||
if (typeof aggregatedColumn !== 'string') {
|
||||
throw new Error('Invalid aggregation input, aggregated column should be an string');
|
||||
}
|
||||
|
||||
if (typeof aggregate_function !== 'string') {
|
||||
throw new Error(`Invalid aggregation input, aggregate function should be an string`);
|
||||
if (typeof aggregateFunction !== 'string') {
|
||||
throw new Error('Invalid aggregation input, aggregate function should be an string');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -14,7 +14,7 @@ var overviewsQueryRewriter = new OverviewsQueryRewriter({
|
||||
var dot = require('dot');
|
||||
dot.templateSettings.strip = false;
|
||||
|
||||
function DataviewBackend(analysisBackend) {
|
||||
function DataviewBackend (analysisBackend) {
|
||||
this.analysisBackend = analysisBackend;
|
||||
}
|
||||
|
||||
@ -84,14 +84,14 @@ function getQueryWithFilters (dataviewDefinition, params) {
|
||||
var query = getDataviewQuery(dataviewDefinition, ownFilter, noFilters);
|
||||
|
||||
if (params.bbox) {
|
||||
var bboxFilter = new BBoxFilter({column: 'the_geom_webmercator', srid: 3857}, {bbox: params.bbox});
|
||||
var bboxFilter = new BBoxFilter({ column: 'the_geom_webmercator', srid: 3857 }, { bbox: params.bbox });
|
||||
query = bboxFilter.sql(query);
|
||||
}
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
function getDataviewQuery(dataviewDefinition, ownFilter, noFilters) {
|
||||
function getDataviewQuery (dataviewDefinition, ownFilter, noFilters) {
|
||||
if (noFilters) {
|
||||
return dataviewDefinition.sql.no_filters;
|
||||
} else if (ownFilter === 1) {
|
||||
@ -101,9 +101,9 @@ function getDataviewQuery(dataviewDefinition, ownFilter, noFilters) {
|
||||
}
|
||||
}
|
||||
|
||||
function getQueryRewriteData(mapConfig, dataviewDefinition, params) {
|
||||
function getQueryRewriteData (mapConfig, dataviewDefinition, params) {
|
||||
var sourceId = dataviewDefinition.source.id; // node.id
|
||||
var layer = _.find(mapConfig.obj().layers, function(l) {
|
||||
var layer = _.find(mapConfig.obj().layers, function (l) {
|
||||
return l.options.source && (l.options.source.id === sourceId);
|
||||
});
|
||||
var queryRewriteData = layer && layer.options.query_rewrite_data;
|
||||
@ -115,7 +115,7 @@ function getQueryRewriteData(mapConfig, dataviewDefinition, params) {
|
||||
}
|
||||
|
||||
if (params.bbox && queryRewriteData) {
|
||||
var bbox_filter_definition = {
|
||||
var bboxFilterDefinition = {
|
||||
type: 'bbox',
|
||||
options: {
|
||||
column: 'the_geom_webmercator',
|
||||
@ -125,22 +125,22 @@ function getQueryRewriteData(mapConfig, dataviewDefinition, params) {
|
||||
bbox: params.bbox
|
||||
}
|
||||
};
|
||||
queryRewriteData = _.extend(queryRewriteData, { bbox_filter: bbox_filter_definition });
|
||||
queryRewriteData = _.extend(queryRewriteData, { bbox_filter: bboxFilterDefinition });
|
||||
}
|
||||
|
||||
return queryRewriteData;
|
||||
}
|
||||
|
||||
function getOverrideParams(params, ownFilter) {
|
||||
function getOverrideParams (params, ownFilter) {
|
||||
var overrideParams = _.reduce(_.pick(params, 'start', 'end', 'bins', 'offset', 'categories'),
|
||||
function castNumbers(overrides, val, k) {
|
||||
function castNumbers (overrides, val, k) {
|
||||
if (!Number.isFinite(+val)) {
|
||||
throw new Error('Invalid number format for parameter \'' + k + '\'');
|
||||
}
|
||||
overrides[k] = +val;
|
||||
return overrides;
|
||||
},
|
||||
{ownFilter: ownFilter}
|
||||
{ ownFilter: ownFilter }
|
||||
);
|
||||
|
||||
// validation will be delegated to the proper dataview
|
||||
@ -202,7 +202,7 @@ function getQueryWithOwnFilters (dataviewDefinition, params) {
|
||||
return query;
|
||||
}
|
||||
|
||||
function getDataviewDefinition(mapConfig, dataviewName) {
|
||||
function getDataviewDefinition (mapConfig, dataviewName) {
|
||||
var dataviews = mapConfig.dataviews || {};
|
||||
return dataviews[dataviewName];
|
||||
}
|
||||
|
@ -3,32 +3,32 @@
|
||||
var _ = require('underscore');
|
||||
var AnalysisFilter = require('../models/filter/analysis');
|
||||
|
||||
function FilterStatsBackends(pgQueryRunner) {
|
||||
function FilterStatsBackends (pgQueryRunner) {
|
||||
this.pgQueryRunner = pgQueryRunner;
|
||||
}
|
||||
|
||||
module.exports = FilterStatsBackends;
|
||||
|
||||
function getEstimatedRows(pgQueryRunner, username, query, callback) {
|
||||
pgQueryRunner.run(username, "EXPLAIN (FORMAT JSON)"+query, function(err, result_rows) {
|
||||
if (err){
|
||||
function getEstimatedRows (pgQueryRunner, username, query, callback) {
|
||||
pgQueryRunner.run(username, 'EXPLAIN (FORMAT JSON)' + query, function (err, resultRows) {
|
||||
if (err) {
|
||||
callback(err);
|
||||
return;
|
||||
}
|
||||
var rows;
|
||||
if ( result_rows[0] && result_rows[0]['QUERY PLAN'] &&
|
||||
result_rows[0]['QUERY PLAN'][0] && result_rows[0]['QUERY PLAN'][0].Plan ) {
|
||||
rows = result_rows[0]['QUERY PLAN'][0].Plan['Plan Rows'];
|
||||
if (resultRows[0] && resultRows[0]['QUERY PLAN'] &&
|
||||
resultRows[0]['QUERY PLAN'][0] && resultRows[0]['QUERY PLAN'][0].Plan) {
|
||||
rows = resultRows[0]['QUERY PLAN'][0].Plan['Plan Rows'];
|
||||
}
|
||||
return callback(null, rows);
|
||||
});
|
||||
}
|
||||
|
||||
FilterStatsBackends.prototype.getFilterStats = function (username, unfiltered_query, filters, callback) {
|
||||
FilterStatsBackends.prototype.getFilterStats = function (username, unfilteredQuery, filters, callback) {
|
||||
var stats = {};
|
||||
|
||||
getEstimatedRows(this.pgQueryRunner, username, unfiltered_query, (err, rows) => {
|
||||
if (err){
|
||||
getEstimatedRows(this.pgQueryRunner, username, unfilteredQuery, (err, rows) => {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
@ -39,10 +39,10 @@ FilterStatsBackends.prototype.getFilterStats = function (username, unfiltered_qu
|
||||
}
|
||||
|
||||
var analysisFilter = new AnalysisFilter(filters);
|
||||
var query = analysisFilter.sql(unfiltered_query);
|
||||
var query = analysisFilter.sql(unfilteredQuery);
|
||||
|
||||
getEstimatedRows(this.pgQueryRunner, username, query, (err, rows) => {
|
||||
if (err){
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
'use strict';
|
||||
|
||||
function EmptyLayerStats(types) {
|
||||
function EmptyLayerStats (types) {
|
||||
this._types = types || {};
|
||||
}
|
||||
|
||||
@ -10,7 +10,7 @@ EmptyLayerStats.prototype.is = function (type) {
|
||||
|
||||
EmptyLayerStats.prototype.getStats =
|
||||
function (layer, dbConnection, callback) {
|
||||
setImmediate(function() {
|
||||
setImmediate(function () {
|
||||
callback(null, {});
|
||||
});
|
||||
};
|
||||
|
@ -5,7 +5,7 @@ var EmptyLayerStats = require('./empty-layer-stats');
|
||||
var MapnikLayerStats = require('./mapnik-layer-stats');
|
||||
var TorqueLayerStats = require('./torque-layer-stats');
|
||||
|
||||
module.exports = function LayerStatsFactory(type) {
|
||||
module.exports = function LayerStatsFactory (type) {
|
||||
var layerStatsIterator = [];
|
||||
var selectedType = type || 'ALL';
|
||||
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
var queue = require('queue-async');
|
||||
|
||||
function LayerStats(layerStatsIterator) {
|
||||
function LayerStats (layerStatsIterator) {
|
||||
this.layerStatsIterator = layerStatsIterator;
|
||||
}
|
||||
|
||||
@ -41,7 +41,6 @@ LayerStats.prototype.getStats = function (mapConfig, dbConnection, callback) {
|
||||
|
||||
return callback(err, stats);
|
||||
});
|
||||
|
||||
};
|
||||
|
||||
module.exports = LayerStats;
|
||||
|
@ -15,7 +15,7 @@ MapnikLayerStats.prototype.is = function (type) {
|
||||
return this._types[type] ? this._types[type] : false;
|
||||
};
|
||||
|
||||
function columnAggregations(field) {
|
||||
function columnAggregations (field) {
|
||||
if (field.type === 'number') {
|
||||
return ['min', 'max', 'avg', 'sum'];
|
||||
}
|
||||
@ -28,25 +28,24 @@ function columnAggregations(field) {
|
||||
return [];
|
||||
}
|
||||
|
||||
function _getSQL(ctx, query, type='pre', zoom=0) {
|
||||
function _getSQL (ctx, query, type = 'pre', zoom = 0) {
|
||||
let sql;
|
||||
if (type === 'pre') {
|
||||
sql = ctx.preQuery;
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
sql = ctx.aggrQuery;
|
||||
}
|
||||
sql = queryUtils.substituteTokensForZoom(sql, zoom || 0);
|
||||
return query(sql);
|
||||
}
|
||||
|
||||
function _estimatedFeatureCount(ctx) {
|
||||
function _estimatedFeatureCount (ctx) {
|
||||
return queryUtils.queryPromise(ctx.dbConnection, _getSQL(ctx, queryUtils.getQueryRowEstimation))
|
||||
.then(res => ({ estimatedFeatureCount: res.rows[0].rows }))
|
||||
.catch(() => ({ estimatedFeatureCount: -1 }));
|
||||
}
|
||||
|
||||
function _featureCount(ctx) {
|
||||
function _featureCount (ctx) {
|
||||
if (ctx.metaOptions.featureCount) {
|
||||
// TODO: if ctx.metaOptions.columnStats we can combine this with column stats query
|
||||
return queryUtils.queryPromise(ctx.dbConnection, _getSQL(ctx, queryUtils.getQueryActualRowCount))
|
||||
@ -55,20 +54,20 @@ function _featureCount(ctx) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
function _aggrFeatureCount(ctx) {
|
||||
if (ctx.metaOptions.hasOwnProperty('aggrFeatureCount')) {
|
||||
function _aggrFeatureCount (ctx) {
|
||||
if (Object.prototype.hasOwnProperty.call(ctx.metaOptions, 'aggrFeatureCount')) {
|
||||
// We expect as zoom level as the value of aggrFeatureCount
|
||||
// TODO: it'd be nice to admit an array of zoom levels to
|
||||
// return metadata for multiple levels.
|
||||
return queryUtils.queryPromise(
|
||||
ctx.dbConnection,
|
||||
_getSQL(ctx, queryUtils.getQueryActualRowCount, 'post', ctx.metaOptions.aggrFeatureCount)
|
||||
_getSQL(ctx, queryUtils.getQueryActualRowCount, 'post', ctx.metaOptions.aggrFeatureCount)
|
||||
).then(res => ({ aggrFeatureCount: res.rows[0].rows }));
|
||||
}
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
function _geometryType(ctx) {
|
||||
function _geometryType (ctx) {
|
||||
if (ctx.metaOptions.geometryType) {
|
||||
const geometryColumn = AggregationMapConfig.getAggregationGeometryColumn();
|
||||
const sqlQuery = _getSQL(ctx, sql => queryUtils.getQueryGeometryType(sql, geometryColumn));
|
||||
@ -78,7 +77,7 @@ function _geometryType(ctx) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
function _columns(ctx) {
|
||||
function _columns (ctx) {
|
||||
if (ctx.metaOptions.columns || ctx.metaOptions.columnStats || ctx.metaOptions.dimensions) {
|
||||
// note: post-aggregation columns are in layer.options.columns when aggregation is present
|
||||
return queryUtils.queryPromise(ctx.dbConnection, _getSQL(ctx, sql => queryUtils.getQueryLimited(sql, 0)))
|
||||
@ -89,7 +88,7 @@ function _columns(ctx) {
|
||||
|
||||
// combine a list of results merging the properties of all the objects
|
||||
// undefined results are admitted and ignored
|
||||
function mergeResults(results) {
|
||||
function mergeResults (results) {
|
||||
if (results) {
|
||||
if (results.length === 0) {
|
||||
return {};
|
||||
@ -108,15 +107,15 @@ function mergeResults(results) {
|
||||
|
||||
// deeper (1 level) combination of a list of objects:
|
||||
// mergeColumns([{ col1: { a: 1 }, col2: { a: 2 } }, { col1: { b: 3 } }]) => { col1: { a: 1, b: 3 }, col2: { a: 2 } }
|
||||
function mergeColumns(results) {
|
||||
function mergeColumns (results) {
|
||||
if (results) {
|
||||
if (results.length === 0) {
|
||||
return {};
|
||||
}
|
||||
return results.reduce((a, b) => {
|
||||
let c = Object.assign({}, b || {}, a || {});
|
||||
const c = Object.assign({}, b || {}, a || {});
|
||||
Object.keys(c).forEach(key => {
|
||||
if (b.hasOwnProperty(key)) {
|
||||
if (Object.prototype.hasOwnProperty.call(b, key)) {
|
||||
c[key] = Object.assign(c[key], b[key]);
|
||||
}
|
||||
});
|
||||
@ -127,7 +126,7 @@ function mergeColumns(results) {
|
||||
|
||||
const DEFAULT_SAMPLE_ROWS = 100;
|
||||
|
||||
function _sample(ctx) {
|
||||
function _sample (ctx) {
|
||||
if (!ctx.metaOptions.sample) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
@ -164,32 +163,32 @@ function _getSampleValuesFromRange (min, span, limit) {
|
||||
return Array.from(sample);
|
||||
}
|
||||
|
||||
function _columnsMetadataRequired(options) {
|
||||
function _columnsMetadataRequired (options) {
|
||||
// We need determine the columns of a query
|
||||
// if either column stats or dimension stats are required,
|
||||
// since we'll ultimately use the same query to fetch both
|
||||
return options.columnStats || options.dimensions;
|
||||
}
|
||||
|
||||
function _columnStats(ctx, columns, dimensions) {
|
||||
function _columnStats (ctx, columns, dimensions) {
|
||||
if (!columns) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
if (_columnsMetadataRequired(ctx.metaOptions)) {
|
||||
let queries = [];
|
||||
const queries = [];
|
||||
let aggr = [];
|
||||
if (ctx.metaOptions.columnStats) {
|
||||
queries.push(new Promise(resolve => resolve({ columns }))); // add columns as first result
|
||||
Object.keys(columns).forEach(name => {
|
||||
aggr = aggr.concat(
|
||||
columnAggregations(columns[name])
|
||||
.map(fn => `${fn}("${name}") AS "${name}_${fn}"`)
|
||||
.map(fn => `${fn}("${name}") AS "${name}_${fn}"`)
|
||||
);
|
||||
if (columns[name].type === 'string') {
|
||||
const topN = ctx.metaOptions.columnStats.topCategories || 1024;
|
||||
const includeNulls = ctx.metaOptions.columnStats.hasOwnProperty('includeNulls') ?
|
||||
ctx.metaOptions.columnStats.includeNulls :
|
||||
true;
|
||||
const includeNulls = Object.prototype.hasOwnProperty.call(ctx.metaOptions.columnStats, 'includeNulls')
|
||||
? ctx.metaOptions.columnStats.includeNulls
|
||||
: true;
|
||||
|
||||
// TODO: ctx.metaOptions.columnStats.maxCategories
|
||||
// => use PG stats to dismiss columns with more distinct values
|
||||
@ -223,7 +222,7 @@ function _columnStats(ctx, columns, dimensions) {
|
||||
ctx.dbConnection,
|
||||
_getSQL(ctx, sql => `SELECT ${aggr.join(',')} FROM (${sql}) AS __cdb_query`)
|
||||
).then(res => {
|
||||
let stats = { columns: {}, dimensions: {} };
|
||||
const stats = { columns: {}, dimensions: {} };
|
||||
Object.keys(columns).forEach(name => {
|
||||
stats.columns[name] = {};
|
||||
columnAggregations(columns[name]).forEach(fn => {
|
||||
@ -245,62 +244,62 @@ function _columnStats(ctx, columns, dimensions) {
|
||||
);
|
||||
return Promise.all(queries).then(results => ({
|
||||
columns: mergeColumns(results.map(r => r.columns)),
|
||||
dimensions: mergeColumns(results.map( r => r.dimensions))
|
||||
dimensions: mergeColumns(results.map(r => r.dimensions))
|
||||
}));
|
||||
}
|
||||
return Promise.resolve({ columns });
|
||||
}
|
||||
|
||||
// This is adapted from SQL API:
|
||||
function fieldType(cname) {
|
||||
function fieldType (cname) {
|
||||
let tname;
|
||||
switch (true) {
|
||||
case /bool/.test(cname):
|
||||
tname = 'boolean';
|
||||
break;
|
||||
case /int|float|numeric/.test(cname):
|
||||
tname = 'number';
|
||||
break;
|
||||
case /text|char|unknown/.test(cname):
|
||||
tname = 'string';
|
||||
break;
|
||||
case /date|time/.test(cname):
|
||||
tname = 'date';
|
||||
break;
|
||||
default:
|
||||
tname = cname;
|
||||
case /bool/.test(cname):
|
||||
tname = 'boolean';
|
||||
break;
|
||||
case /int|float|numeric/.test(cname):
|
||||
tname = 'number';
|
||||
break;
|
||||
case /text|char|unknown/.test(cname):
|
||||
tname = 'string';
|
||||
break;
|
||||
case /date|time/.test(cname):
|
||||
tname = 'date';
|
||||
break;
|
||||
default:
|
||||
tname = cname;
|
||||
}
|
||||
if ( tname && cname.match(/^_/) ) {
|
||||
if (tname && cname.match(/^_/)) {
|
||||
tname += '[]';
|
||||
}
|
||||
return tname;
|
||||
}
|
||||
|
||||
function fieldTypeSafe(dbConnection, field) {
|
||||
function fieldTypeSafe (dbConnection, field) {
|
||||
const cname = dbConnection.typeName(field.dataTypeID);
|
||||
return cname ? fieldType(cname) : `unknown(${field.dataTypeID})`;
|
||||
}
|
||||
|
||||
// columns are returned as an object { columnName1: { type1: ...}, ..}
|
||||
// for consistency with SQL API
|
||||
function formatResultFields(dbConnection, fields = []) {
|
||||
let nfields = {};
|
||||
for (let field of fields) {
|
||||
nfields[field.name] = { type: fieldTypeSafe(dbConnection, field) };
|
||||
function formatResultFields (dbConnection, fields = []) {
|
||||
const nfields = {};
|
||||
for (const field of fields) {
|
||||
nfields[field.name] = { type: fieldTypeSafe(dbConnection, field) };
|
||||
}
|
||||
return nfields;
|
||||
}
|
||||
|
||||
MapnikLayerStats.prototype.getStats =
|
||||
function (layer, dbConnection, callback) {
|
||||
let aggrQuery = layer.options.sql;
|
||||
let preQuery = layer.options.sql_raw || aggrQuery;
|
||||
const aggrQuery = layer.options.sql;
|
||||
const preQuery = layer.options.sql_raw || aggrQuery;
|
||||
|
||||
let ctx = {
|
||||
const ctx = {
|
||||
dbConnection,
|
||||
preQuery,
|
||||
aggrQuery,
|
||||
metaOptions: layer.options.metadata || {},
|
||||
metaOptions: layer.options.metadata || {}
|
||||
};
|
||||
|
||||
// TODO: could save some queries if queryUtils.getAggregationMetadata() has been used and kept somewhere
|
||||
@ -316,7 +315,7 @@ function (layer, dbConnection, callback) {
|
||||
Promise.all([
|
||||
_estimatedFeatureCount(ctx).then(
|
||||
({ estimatedFeatureCount }) => _sample(ctx)
|
||||
.then(sampleResults => mergeResults([ sampleResults, { estimatedFeatureCount }] ))
|
||||
.then(sampleResults => mergeResults([sampleResults, { estimatedFeatureCount }]))
|
||||
),
|
||||
_featureCount(ctx),
|
||||
_aggrFeatureCount(ctx),
|
||||
|
@ -1,6 +1,6 @@
|
||||
'use strict';
|
||||
|
||||
function TorqueLayerStats() {
|
||||
function TorqueLayerStats () {
|
||||
this._types = {
|
||||
torque: true
|
||||
};
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
const queryUtils = require('../utils/query-utils');
|
||||
|
||||
function OverviewsMetadataBackend(pgQueryRunner) {
|
||||
function OverviewsMetadataBackend (pgQueryRunner) {
|
||||
this.pgQueryRunner = pgQueryRunner;
|
||||
}
|
||||
|
||||
@ -17,15 +17,15 @@ OverviewsMetadataBackend.prototype.getOverviewsMetadata = function (username, sq
|
||||
cartodb.CDB_QueryTablesText($windshaft$${queryUtils.substituteDummyTokens(sql)}$windshaft$)
|
||||
);
|
||||
`;
|
||||
this.pgQueryRunner.run(username, query, function handleOverviewsRows(err, rows) {
|
||||
if (err){
|
||||
this.pgQueryRunner.run(username, query, function handleOverviewsRows (err, rows) {
|
||||
if (err) {
|
||||
callback(err);
|
||||
return;
|
||||
}
|
||||
var metadata = rows.reduce(function(metadata, row){
|
||||
var metadata = rows.reduce(function (metadata, row) {
|
||||
var table = row.base_table;
|
||||
var schema = row._cdb_schema_name;
|
||||
if ( !metadata[table] ) {
|
||||
if (!metadata[table]) {
|
||||
metadata[table] = {};
|
||||
}
|
||||
metadata[table][row.z] = { table: row.overview_table };
|
||||
|
@ -5,13 +5,12 @@ var _ = require('underscore');
|
||||
const debug = require('debug')('cachechan');
|
||||
const dbParamsFromReqParams = require('../utils/database-params');
|
||||
|
||||
function PgConnection(metadataBackend) {
|
||||
function PgConnection (metadataBackend) {
|
||||
this.metadataBackend = metadataBackend;
|
||||
}
|
||||
|
||||
module.exports = PgConnection;
|
||||
|
||||
|
||||
// Set db authentication parameters to those of the given username
|
||||
//
|
||||
// @param username the cartodb username, mapped to a database username
|
||||
@ -22,7 +21,7 @@ module.exports = PgConnection;
|
||||
//
|
||||
// @param callback function(err)
|
||||
//
|
||||
PgConnection.prototype.setDBAuth = function(username, params, apikeyType, callback) {
|
||||
PgConnection.prototype.setDBAuth = function (username, params, apikeyType, callback) {
|
||||
if (apikeyType === 'master') {
|
||||
this.metadataBackend.getMasterApikey(username, (err, apikey) => {
|
||||
if (err) {
|
||||
@ -37,7 +36,7 @@ PgConnection.prototype.setDBAuth = function(username, params, apikeyType, callba
|
||||
|
||||
return callback();
|
||||
});
|
||||
} else if (apikeyType === 'regular') { //Actually it can be any type of api key
|
||||
} else if (apikeyType === 'regular') { // Actually it can be any type of api key
|
||||
this.metadataBackend.getApikey(username, params.api_key, (err, apikey) => {
|
||||
if (err) {
|
||||
if (isNameNotFoundError(err)) {
|
||||
@ -71,10 +70,9 @@ PgConnection.prototype.setDBAuth = function(username, params, apikeyType, callba
|
||||
};
|
||||
|
||||
function isNameNotFoundError (err) {
|
||||
return err.message && -1 !== err.message.indexOf('name not found');
|
||||
return err.message && err.message.indexOf('name not found') !== -1;
|
||||
}
|
||||
|
||||
|
||||
// Set db connection parameters to those for the given username
|
||||
//
|
||||
// @param dbowner cartodb username of database owner,
|
||||
@ -86,7 +84,7 @@ function isNameNotFoundError (err) {
|
||||
//
|
||||
// @param callback function(err)
|
||||
//
|
||||
PgConnection.prototype.setDBConn = function(dbowner, params, callback) {
|
||||
PgConnection.prototype.setDBConn = function (dbowner, params, callback) {
|
||||
_.defaults(params, {
|
||||
// dbuser: global.environment.postgres.user,
|
||||
// dbpassword: global.environment.postgres.password,
|
||||
@ -118,8 +116,8 @@ PgConnection.prototype.setDBConn = function(dbowner, params, callback) {
|
||||
* @param {Function} callback function({Error}, {PSQL})
|
||||
*/
|
||||
|
||||
PgConnection.prototype.getConnection = function(username, callback) {
|
||||
debug("getConn1");
|
||||
PgConnection.prototype.getConnection = function (username, callback) {
|
||||
debug('getConn1');
|
||||
|
||||
this.getDatabaseParams(username, (err, databaseParams) => {
|
||||
if (err) {
|
||||
@ -129,7 +127,7 @@ PgConnection.prototype.getConnection = function(username, callback) {
|
||||
});
|
||||
};
|
||||
|
||||
PgConnection.prototype.getDatabaseParams = function(username, callback) {
|
||||
PgConnection.prototype.getDatabaseParams = function (username, callback) {
|
||||
const databaseParams = {};
|
||||
|
||||
this.setDBAuth(username, databaseParams, 'master', err => {
|
||||
|
@ -3,7 +3,7 @@
|
||||
var PSQL = require('cartodb-psql');
|
||||
const dbParamsFromReqParams = require('../utils/database-params');
|
||||
|
||||
function PgQueryRunner(pgConnection) {
|
||||
function PgQueryRunner (pgConnection) {
|
||||
this.pgConnection = pgConnection;
|
||||
}
|
||||
|
||||
@ -16,8 +16,7 @@ module.exports = PgQueryRunner;
|
||||
* @param {String} query
|
||||
* @param {Function} callback function({Error}, {Array}) second argument is guaranteed to be an array
|
||||
*/
|
||||
PgQueryRunner.prototype.run = function(username, query, callback) {
|
||||
|
||||
PgQueryRunner.prototype.run = function (username, query, callback) {
|
||||
this.pgConnection.getDatabaseParams(username, (err, databaseParams) => {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
|
@ -2,14 +2,14 @@
|
||||
|
||||
var layerStats = require('./layer-stats/factory');
|
||||
|
||||
function StatsBackend() {
|
||||
function StatsBackend () {
|
||||
}
|
||||
|
||||
module.exports = StatsBackend;
|
||||
|
||||
StatsBackend.prototype.getStats = function(mapConfig, dbConnection, callback) {
|
||||
StatsBackend.prototype.getStats = function (mapConfig, dbConnection, callback) {
|
||||
var enabledFeatures = global.environment.enabledFeatures;
|
||||
var layerStatsEnabled = enabledFeatures ? enabledFeatures.layerStats: false;
|
||||
var layerStatsEnabled = enabledFeatures ? enabledFeatures.layerStats : false;
|
||||
if (layerStatsEnabled) {
|
||||
layerStats().getStats(mapConfig, dbConnection, callback);
|
||||
} else {
|
||||
|
@ -1,6 +1,6 @@
|
||||
'use strict';
|
||||
|
||||
function TablesExtentBackend(pgQueryRunner) {
|
||||
function TablesExtentBackend (pgQueryRunner) {
|
||||
this.pgQueryRunner = pgQueryRunner;
|
||||
}
|
||||
|
||||
@ -16,21 +16,21 @@ module.exports = TablesExtentBackend;
|
||||
* @param {Function} callback function(err, result) {Object} result with `west`, `south`, `east`, `north`
|
||||
*/
|
||||
TablesExtentBackend.prototype.getBounds = function (username, tables, callback) {
|
||||
var estimatedExtentSQLs = tables.map(function(table) {
|
||||
var estimatedExtentSQLs = tables.map(function (table) {
|
||||
return "ST_EstimatedExtent('" + table.schema_name + "', '" + table.table_name + "', 'the_geom_webmercator')";
|
||||
});
|
||||
|
||||
var query = [
|
||||
"WITH ext as (" +
|
||||
"SELECT ST_Transform(ST_SetSRID(ST_Extent(ST_Union(ARRAY[",
|
||||
estimatedExtentSQLs.join(','),
|
||||
"])), 3857), 4326) geom)",
|
||||
"SELECT",
|
||||
"ST_XMin(geom) west,",
|
||||
"ST_YMin(geom) south,",
|
||||
"ST_XMax(geom) east,",
|
||||
"ST_YMax(geom) north",
|
||||
"FROM ext"
|
||||
'WITH ext as (' +
|
||||
'SELECT ST_Transform(ST_SetSRID(ST_Extent(ST_Union(ARRAY[',
|
||||
estimatedExtentSQLs.join(','),
|
||||
'])), 3857), 4326) geom)',
|
||||
'SELECT',
|
||||
'ST_XMin(geom) west,',
|
||||
'ST_YMin(geom) south,',
|
||||
'ST_XMax(geom) east,',
|
||||
'ST_YMax(geom) north',
|
||||
'FROM ext'
|
||||
].join(' ');
|
||||
|
||||
this.pgQueryRunner.run(username, query, function handleBoundsResult (err, rows) {
|
||||
|
@ -5,16 +5,14 @@ var debug = require('debug')('windshaft:templates');
|
||||
var _ = require('underscore');
|
||||
var dot = require('dot');
|
||||
|
||||
|
||||
var EventEmitter = require('events').EventEmitter;
|
||||
var util = require('util');
|
||||
|
||||
|
||||
// Class handling map templates
|
||||
//
|
||||
// See http://github.com/CartoDB/Windshaft-cartodb/wiki/Template-maps
|
||||
//
|
||||
// @param redis_pool an instance of a "redis-mpool"
|
||||
// @param redisPool an instance of a "redis-mpool"
|
||||
// See https://github.com/CartoDB/node-redis-mpool
|
||||
// Needs version 0.x.x of the API.
|
||||
//
|
||||
@ -22,43 +20,42 @@ var util = require('util');
|
||||
// 'max_user_templates' limit on the number of per-user
|
||||
//
|
||||
//
|
||||
function TemplateMaps(redis_pool, opts) {
|
||||
if (!(this instanceof TemplateMaps)) {
|
||||
return new TemplateMaps();
|
||||
}
|
||||
function TemplateMaps (redisPool, opts) {
|
||||
if (!(this instanceof TemplateMaps)) {
|
||||
return new TemplateMaps();
|
||||
}
|
||||
|
||||
EventEmitter.call(this);
|
||||
EventEmitter.call(this);
|
||||
|
||||
this.redis_pool = redis_pool;
|
||||
this.opts = opts || {};
|
||||
this.redisPool = redisPool;
|
||||
this.opts = opts || {};
|
||||
|
||||
// Database containing templates
|
||||
// TODO: allow configuring ?
|
||||
// NOTE: currently it is the same as
|
||||
// the one containing layergroups
|
||||
this.db_signatures = 0;
|
||||
// Database containing templates
|
||||
// TODO: allow configuring ?
|
||||
// NOTE: currently it is the same as
|
||||
// the one containing layergroups
|
||||
this.db_signatures = 0;
|
||||
|
||||
//
|
||||
// Map templates are owned by a user that specifies access permissions
|
||||
// for their instances.
|
||||
//
|
||||
// We have the following datastores:
|
||||
//
|
||||
// 1. User templates: set of per-user map templates
|
||||
//
|
||||
// Map templates are owned by a user that specifies access permissions
|
||||
// for their instances.
|
||||
//
|
||||
// We have the following datastores:
|
||||
//
|
||||
// 1. User templates: set of per-user map templates
|
||||
|
||||
// User templates (HASH:tpl_id->tpl_val)
|
||||
this.key_usr_tpl = dot.template("map_tpl|{{=it.owner}}");
|
||||
// User templates (HASH:tplId->tpl_val)
|
||||
this.key_usr_tpl = dot.template('map_tpl|{{=it.owner}}');
|
||||
}
|
||||
|
||||
util.inherits(TemplateMaps, EventEmitter);
|
||||
|
||||
module.exports = TemplateMaps;
|
||||
|
||||
// --------------- PRIVATE METHODS --------------------------------
|
||||
|
||||
//--------------- PRIVATE METHODS --------------------------------
|
||||
|
||||
TemplateMaps.prototype._userTemplateLimit = function() {
|
||||
return this.opts.max_user_templates || 0;
|
||||
TemplateMaps.prototype._userTemplateLimit = function () {
|
||||
return this.opts.max_user_templates || 0;
|
||||
};
|
||||
|
||||
/**
|
||||
@ -68,14 +65,14 @@ TemplateMaps.prototype._userTemplateLimit = function() {
|
||||
* @param redisArgs - the arguments for the redis function in an array
|
||||
* @param callback - function to pass results too.
|
||||
*/
|
||||
TemplateMaps.prototype._redisCmd = function(redisFunc, redisArgs, callback) {
|
||||
this.redis_pool.acquire(this.db_signatures, (err, redisClient) => {
|
||||
TemplateMaps.prototype._redisCmd = function (redisFunc, redisArgs, callback) {
|
||||
this.redisPool.acquire(this.db_signatures, (err, redisClient) => {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
redisClient[redisFunc.toUpperCase()](...redisArgs, (err, data) => {
|
||||
this.redis_pool.release(this.db_signatures, redisClient);
|
||||
this.redisPool.release(this.db_signatures, redisClient);
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
@ -84,64 +81,64 @@ TemplateMaps.prototype._redisCmd = function(redisFunc, redisArgs, callback) {
|
||||
});
|
||||
};
|
||||
|
||||
var _reValidNameIdentifier = /^[a-z0-9][0-9a-z_\-]*$/i;
|
||||
var _reValidNameIdentifier = /^[a-z0-9][0-9a-z_-]*$/i;
|
||||
var _reValidPlaceholderIdentifier = /^[a-z][0-9a-z_]*$/i;
|
||||
// jshint maxcomplexity:15
|
||||
TemplateMaps.prototype._checkInvalidTemplate = function(template) {
|
||||
if ( template.version !== '0.0.1' ) {
|
||||
return new Error("Unsupported template version " + template.version);
|
||||
}
|
||||
var tplname = template.name;
|
||||
if ( ! tplname ) {
|
||||
return new Error("Missing template name");
|
||||
}
|
||||
if ( ! tplname.match(_reValidNameIdentifier) ) {
|
||||
return new Error("Invalid characters in template name '" + tplname + "'");
|
||||
}
|
||||
TemplateMaps.prototype._checkInvalidTemplate = function (template) {
|
||||
if (template.version !== '0.0.1') {
|
||||
return new Error('Unsupported template version ' + template.version);
|
||||
}
|
||||
var tplname = template.name;
|
||||
if (!tplname) {
|
||||
return new Error('Missing template name');
|
||||
}
|
||||
if (!tplname.match(_reValidNameIdentifier)) {
|
||||
return new Error("Invalid characters in template name '" + tplname + "'");
|
||||
}
|
||||
|
||||
var invalidError = isInvalidLayergroup(template.layergroup);
|
||||
if (invalidError) {
|
||||
return invalidError;
|
||||
}
|
||||
var invalidError = isInvalidLayergroup(template.layergroup);
|
||||
if (invalidError) {
|
||||
return invalidError;
|
||||
}
|
||||
|
||||
var placeholders = template.placeholders || {};
|
||||
var placeholders = template.placeholders || {};
|
||||
|
||||
var placeholderKeys = Object.keys(placeholders);
|
||||
for (var i = 0, len = placeholderKeys.length; i < len; i++) {
|
||||
var placeholderKey = placeholderKeys[i];
|
||||
var placeholderKeys = Object.keys(placeholders);
|
||||
for (var i = 0, len = placeholderKeys.length; i < len; i++) {
|
||||
var placeholderKey = placeholderKeys[i];
|
||||
|
||||
if (!placeholderKey.match(_reValidPlaceholderIdentifier)) {
|
||||
return new Error("Invalid characters in placeholder name '" + placeholderKey + "'");
|
||||
}
|
||||
if ( ! placeholders[placeholderKey].hasOwnProperty('default') ) {
|
||||
return new Error("Missing default for placeholder '" + placeholderKey + "'");
|
||||
}
|
||||
if ( ! placeholders[placeholderKey].hasOwnProperty('type') ) {
|
||||
return new Error("Missing type for placeholder '" + placeholderKey + "'");
|
||||
}
|
||||
}
|
||||
if (!placeholderKey.match(_reValidPlaceholderIdentifier)) {
|
||||
return new Error("Invalid characters in placeholder name '" + placeholderKey + "'");
|
||||
}
|
||||
if (!Object.prototype.hasOwnProperty.call(placeholders[placeholderKey], 'default')) {
|
||||
return new Error("Missing default for placeholder '" + placeholderKey + "'");
|
||||
}
|
||||
if (!Object.prototype.hasOwnProperty.call(placeholders[placeholderKey], 'type')) {
|
||||
return new Error("Missing type for placeholder '" + placeholderKey + "'");
|
||||
}
|
||||
}
|
||||
|
||||
var auth = template.auth || {};
|
||||
|
||||
switch ( auth.method ) {
|
||||
case 'open':
|
||||
break;
|
||||
case 'token':
|
||||
if ( ! _.isArray(auth.valid_tokens) ) {
|
||||
return new Error("Invalid 'token' authentication: missing valid_tokens");
|
||||
}
|
||||
if ( ! auth.valid_tokens.length ) {
|
||||
return new Error("Invalid 'token' authentication: no valid_tokens");
|
||||
}
|
||||
break;
|
||||
default:
|
||||
return new Error("Unsupported authentication method: " + auth.method);
|
||||
switch (auth.method) {
|
||||
case 'open':
|
||||
break;
|
||||
case 'token':
|
||||
if (!_.isArray(auth.valid_tokens)) {
|
||||
return new Error("Invalid 'token' authentication: missing valid_tokens");
|
||||
}
|
||||
if (!auth.valid_tokens.length) {
|
||||
return new Error("Invalid 'token' authentication: no valid_tokens");
|
||||
}
|
||||
break;
|
||||
default:
|
||||
return new Error('Unsupported authentication method: ' + auth.method);
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
function isInvalidLayergroup(layergroup) {
|
||||
function isInvalidLayergroup (layergroup) {
|
||||
if (!layergroup) {
|
||||
return new Error('Missing layergroup');
|
||||
}
|
||||
@ -153,10 +150,10 @@ function isInvalidLayergroup(layergroup) {
|
||||
}
|
||||
|
||||
var invalidLayers = layers
|
||||
.map(function(layer, layerIndex) {
|
||||
.map(function (layer, layerIndex) {
|
||||
return layer.options ? null : layerIndex;
|
||||
})
|
||||
.filter(function(layerIndex) {
|
||||
.filter(function (layerIndex) {
|
||||
return layerIndex !== null;
|
||||
});
|
||||
|
||||
@ -167,7 +164,7 @@ function isInvalidLayergroup(layergroup) {
|
||||
return false;
|
||||
}
|
||||
|
||||
function templateDefaults(template) {
|
||||
function templateDefaults (template) {
|
||||
var templateAuth = _.defaults({}, template.auth || {}, {
|
||||
method: 'open'
|
||||
});
|
||||
@ -183,10 +180,10 @@ function templateDefaults(template) {
|
||||
* @param owner cartodb username of the template owner
|
||||
* @param callback returns error if the user reaches the limit
|
||||
*/
|
||||
TemplateMaps.prototype._checkUserTemplatesLimit = function(userTemplatesKey, owner, callback) {
|
||||
TemplateMaps.prototype._checkUserTemplatesLimit = function (userTemplatesKey, owner, callback) {
|
||||
const limit = this._userTemplateLimit();
|
||||
|
||||
if(!limit) {
|
||||
if (!limit) {
|
||||
return callback();
|
||||
}
|
||||
|
||||
@ -207,7 +204,7 @@ TemplateMaps.prototype._checkUserTemplatesLimit = function(userTemplatesKey, own
|
||||
});
|
||||
};
|
||||
|
||||
//--------------- PUBLIC API -------------------------------------
|
||||
// --------------- PUBLIC API -------------------------------------
|
||||
|
||||
// Add a template
|
||||
//
|
||||
@ -218,10 +215,10 @@ TemplateMaps.prototype._checkUserTemplatesLimit = function(userTemplatesKey, own
|
||||
// @param template layergroup template, see
|
||||
// http://github.com/CartoDB/Windshaft-cartodb/wiki/Template-maps#template-format
|
||||
//
|
||||
// @param callback function(err, tpl_id)
|
||||
// @param callback function(err, tplId)
|
||||
// Return template identifier (only valid for given user)
|
||||
//
|
||||
TemplateMaps.prototype.addTemplate = function(owner, template, callback) {
|
||||
TemplateMaps.prototype.addTemplate = function (owner, template, callback) {
|
||||
template = templateDefaults(template);
|
||||
|
||||
var invalidError = this._checkInvalidTemplate(template);
|
||||
@ -263,22 +260,22 @@ TemplateMaps.prototype.addTemplate = function(owner, template, callback) {
|
||||
//
|
||||
// @param owner cartodb username of the template owner
|
||||
//
|
||||
// @param tpl_id template identifier as returned
|
||||
// @param tplId template identifier as returned
|
||||
// by addTemplate or listTemplates
|
||||
//
|
||||
// @param callback function(err)
|
||||
//
|
||||
TemplateMaps.prototype.delTemplate = function(owner, tpl_id, callback) {
|
||||
this._redisCmd('HDEL', [ this.key_usr_tpl({ owner:owner }), tpl_id ], (err, deleted) => {
|
||||
TemplateMaps.prototype.delTemplate = function (owner, tplId, callback) {
|
||||
this._redisCmd('HDEL', [this.key_usr_tpl({ owner: owner }), tplId], (err, deleted) => {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
if (!deleted) {
|
||||
return callback(new Error(`Template '${tpl_id}' of user '${owner}' does not exist`));
|
||||
return callback(new Error(`Template '${tplId}' of user '${owner}' does not exist`));
|
||||
}
|
||||
|
||||
this.emit('delete', owner, tpl_id);
|
||||
this.emit('delete', owner, tplId);
|
||||
return callback();
|
||||
});
|
||||
};
|
||||
@ -292,14 +289,14 @@ TemplateMaps.prototype.delTemplate = function(owner, tpl_id, callback) {
|
||||
//
|
||||
// @param owner cartodb username of the template owner
|
||||
//
|
||||
// @param tpl_id template identifier as returned by addTemplate
|
||||
// @param tplId template identifier as returned by addTemplate
|
||||
//
|
||||
// @param template layergroup template, see
|
||||
// http://github.com/CartoDB/Windshaft-cartodb/wiki/Template-maps#template-format
|
||||
//
|
||||
// @param callback function(err)
|
||||
//
|
||||
TemplateMaps.prototype.updTemplate = function(owner, tpl_id, template, callback) {
|
||||
TemplateMaps.prototype.updTemplate = function (owner, tplId, template, callback) {
|
||||
template = templateDefaults(template);
|
||||
|
||||
var invalidError = this._checkInvalidTemplate(template);
|
||||
@ -307,19 +304,19 @@ TemplateMaps.prototype.updTemplate = function(owner, tpl_id, template, callback)
|
||||
return callback(invalidError);
|
||||
}
|
||||
|
||||
if (tpl_id !== template.name) {
|
||||
return callback(new Error(`Cannot update name of a map template ('${tpl_id}' != '${template.name}')`));
|
||||
if (tplId !== template.name) {
|
||||
return callback(new Error(`Cannot update name of a map template ('${tplId}' != '${template.name}')`));
|
||||
}
|
||||
|
||||
var userTemplatesKey = this.key_usr_tpl({ owner });
|
||||
|
||||
this._redisCmd('HGET', [userTemplatesKey, tpl_id], (err, beforeUpdateTemplate) => {
|
||||
this._redisCmd('HGET', [userTemplatesKey, tplId], (err, beforeUpdateTemplate) => {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
if (!beforeUpdateTemplate) {
|
||||
return callback(new Error(`Template '${tpl_id}' of user '${owner}' does not exist`));
|
||||
return callback(new Error(`Template '${tplId}' of user '${owner}' does not exist`));
|
||||
}
|
||||
|
||||
let templateString;
|
||||
@ -358,25 +355,25 @@ TemplateMaps.prototype.updTemplate = function(owner, tpl_id, template, callback)
|
||||
//
|
||||
// @param owner cartodb username of the templates owner
|
||||
//
|
||||
// @param callback function(err, tpl_id_list)
|
||||
// @param callback function(err, tplId_list)
|
||||
// Returns a list of template identifiers
|
||||
//
|
||||
TemplateMaps.prototype.listTemplates = function(owner, callback) {
|
||||
this._redisCmd('HKEYS', [ this.key_usr_tpl({owner:owner}) ], callback);
|
||||
TemplateMaps.prototype.listTemplates = function (owner, callback) {
|
||||
this._redisCmd('HKEYS', [this.key_usr_tpl({ owner: owner })], callback);
|
||||
};
|
||||
|
||||
// Get a templates
|
||||
//
|
||||
// @param owner cartodb username of the template owner
|
||||
//
|
||||
// @param tpl_id template identifier as returned
|
||||
// @param tplId template identifier as returned
|
||||
// by addTemplate or listTemplates
|
||||
//
|
||||
// @param callback function(err, template)
|
||||
// Return full template definition
|
||||
//
|
||||
TemplateMaps.prototype.getTemplate = function(owner, tpl_id, callback) {
|
||||
this._redisCmd('HGET', [this.key_usr_tpl({owner:owner}), tpl_id], (err, template) => {
|
||||
TemplateMaps.prototype.getTemplate = function (owner, tplId, callback) {
|
||||
this._redisCmd('HGET', [this.key_usr_tpl({ owner: owner }), tplId], (err, template) => {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
@ -392,7 +389,7 @@ TemplateMaps.prototype.getTemplate = function(owner, tpl_id, callback) {
|
||||
});
|
||||
};
|
||||
|
||||
TemplateMaps.prototype.isAuthorized = function(template, authTokens) {
|
||||
TemplateMaps.prototype.isAuthorized = function (template, authTokens) {
|
||||
if (!template) {
|
||||
return false;
|
||||
}
|
||||
@ -432,99 +429,95 @@ TemplateMaps.prototype.isAuthorized = function(template, authTokens) {
|
||||
//
|
||||
// @throws Error on malformed template or parameter
|
||||
//
|
||||
var _reNumber = /^([-+]?[\d\.]?\d+([eE][+-]?\d+)?)$/,
|
||||
_reCSSColorName = /^[a-zA-Z]+$/,
|
||||
_reCSSColorVal = /^#[0-9a-fA-F]{3,6}$/;
|
||||
var _reNumber = /^([-+]?[\d\.]?\d+([eE][+-]?\d+)?)$/; // eslint-disable-line no-useless-escape
|
||||
var _reCSSColorName = /^[a-zA-Z]+$/;
|
||||
var _reCSSColorVal = /^#[0-9a-fA-F]{3,6}$/;
|
||||
|
||||
function _replaceVars (str, params) {
|
||||
// Construct regular expressions for each param
|
||||
Object.keys(params).forEach(function(k) {
|
||||
str = str.replace(new RegExp("<%=\\s*" + k + "\\s*%>", "g"), params[k]);
|
||||
Object.keys(params).forEach(function (k) {
|
||||
str = str.replace(new RegExp('<%=\\s*' + k + '\\s*%>', 'g'), params[k]);
|
||||
});
|
||||
return str;
|
||||
}
|
||||
|
||||
function isObject(val) {
|
||||
return ( _.isObject(val) && !_.isArray(val) && !_.isFunction(val));
|
||||
function isObject (val) {
|
||||
return (_.isObject(val) && !_.isArray(val) && !_.isFunction(val));
|
||||
}
|
||||
|
||||
TemplateMaps.prototype.instance = function(template, params) {
|
||||
var all_params = {};
|
||||
var phold = template.placeholders || {};
|
||||
Object.keys(phold).forEach(function(k) {
|
||||
var val = params.hasOwnProperty(k) ? params[k] : phold[k].default;
|
||||
var type = phold[k].type;
|
||||
// properly escape
|
||||
if ( type === 'sql_literal' ) {
|
||||
// duplicate any single-quote
|
||||
val = val.replace(/'/g, "''");
|
||||
}
|
||||
else if ( type === 'sql_ident' ) {
|
||||
// duplicate any double-quote
|
||||
val = val.replace(/"/g, '""');
|
||||
}
|
||||
else if ( type === 'number' ) {
|
||||
// check it's a number
|
||||
if ( typeof(val) !== 'number' && ! val.match(_reNumber) ) {
|
||||
throw new Error("Invalid number value for template parameter '" + k + "': " + val);
|
||||
}
|
||||
}
|
||||
else if ( type === 'css_color' ) {
|
||||
// check it only contains letters or
|
||||
// starts with # and only contains hexdigits
|
||||
if ( ! val.match(_reCSSColorName) && ! val.match(_reCSSColorVal) ) {
|
||||
throw new Error("Invalid css_color value for template parameter '" + k + "': " + val);
|
||||
}
|
||||
}
|
||||
else {
|
||||
// NOTE: should be checked at template create/update time
|
||||
throw new Error("Invalid placeholder type '" + type + "'");
|
||||
}
|
||||
all_params[k] = val;
|
||||
});
|
||||
TemplateMaps.prototype.instance = function (template, params) {
|
||||
var allParams = {};
|
||||
var phold = template.placeholders || {};
|
||||
Object.keys(phold).forEach(function (k) {
|
||||
var val = Object.prototype.hasOwnProperty.call(params, k) ? params[k] : phold[k].default;
|
||||
var type = phold[k].type;
|
||||
// properly escape
|
||||
if (type === 'sql_literal') {
|
||||
// duplicate any single-quote
|
||||
val = val.replace(/'/g, "''");
|
||||
} else if (type === 'sql_ident') {
|
||||
// duplicate any double-quote
|
||||
val = val.replace(/"/g, '""');
|
||||
} else if (type === 'number') {
|
||||
// check it's a number
|
||||
if (typeof (val) !== 'number' && !val.match(_reNumber)) {
|
||||
throw new Error("Invalid number value for template parameter '" + k + "': " + val);
|
||||
}
|
||||
} else if (type === 'css_color') {
|
||||
// check it only contains letters or
|
||||
// starts with # and only contains hexdigits
|
||||
if (!val.match(_reCSSColorName) && !val.match(_reCSSColorVal)) {
|
||||
throw new Error("Invalid css_color value for template parameter '" + k + "': " + val);
|
||||
}
|
||||
} else {
|
||||
// NOTE: should be checked at template create/update time
|
||||
throw new Error("Invalid placeholder type '" + type + "'");
|
||||
}
|
||||
allParams[k] = val;
|
||||
});
|
||||
|
||||
// NOTE: we're deep-cloning the layergroup here
|
||||
var layergroup = JSON.parse(JSON.stringify(template.layergroup));
|
||||
// NOTE: we're deep-cloning the layergroup here
|
||||
var layergroup = JSON.parse(JSON.stringify(template.layergroup));
|
||||
|
||||
if (layergroup.buffersize && isObject(layergroup.buffersize)) {
|
||||
Object.keys(layergroup.buffersize).forEach(function(k) {
|
||||
layergroup.buffersize[k] = parseInt(_replaceVars(layergroup.buffersize[k], all_params), 10);
|
||||
});
|
||||
}
|
||||
if (layergroup.buffersize && isObject(layergroup.buffersize)) {
|
||||
Object.keys(layergroup.buffersize).forEach(function (k) {
|
||||
layergroup.buffersize[k] = parseInt(_replaceVars(layergroup.buffersize[k], allParams), 10);
|
||||
});
|
||||
}
|
||||
|
||||
for (var i=0; i<layergroup.layers.length; ++i) {
|
||||
var lyropt = layergroup.layers[i].options;
|
||||
for (var i = 0; i < layergroup.layers.length; ++i) {
|
||||
var lyropt = layergroup.layers[i].options;
|
||||
|
||||
if ( params.styles && params.styles[i] ) {
|
||||
if (params.styles && params.styles[i]) {
|
||||
// dynamic styling for this layer
|
||||
lyropt.cartocss = params.styles[i];
|
||||
} else if ( lyropt.cartocss ) {
|
||||
lyropt.cartocss = _replaceVars(lyropt.cartocss, all_params);
|
||||
}
|
||||
if ( lyropt.sql) {
|
||||
lyropt.sql = _replaceVars(lyropt.sql, all_params);
|
||||
}
|
||||
lyropt.cartocss = params.styles[i];
|
||||
} else if (lyropt.cartocss) {
|
||||
lyropt.cartocss = _replaceVars(lyropt.cartocss, allParams);
|
||||
}
|
||||
if (lyropt.sql) {
|
||||
lyropt.sql = _replaceVars(lyropt.sql, allParams);
|
||||
}
|
||||
// Anything else ?
|
||||
}
|
||||
}
|
||||
|
||||
// extra information about the template
|
||||
layergroup.template = {
|
||||
name: template.name,
|
||||
auth: template.auth
|
||||
};
|
||||
// extra information about the template
|
||||
layergroup.template = {
|
||||
name: template.name,
|
||||
auth: template.auth
|
||||
};
|
||||
|
||||
return layergroup;
|
||||
return layergroup;
|
||||
};
|
||||
|
||||
// Return a fingerPrint of the object
|
||||
TemplateMaps.prototype.fingerPrint = function(template) {
|
||||
return crypto.createHash('md5')
|
||||
.update(JSON.stringify(template))
|
||||
.digest('hex')
|
||||
;
|
||||
TemplateMaps.prototype.fingerPrint = function (template) {
|
||||
return crypto.createHash('md5')
|
||||
.update(JSON.stringify(template))
|
||||
.digest('hex')
|
||||
;
|
||||
};
|
||||
|
||||
module.exports.templateName = function templateName(templateId) {
|
||||
module.exports.templateName = function templateName (templateId) {
|
||||
var templateIdTokens = templateId.split('@');
|
||||
var name = templateIdTokens[0];
|
||||
|
||||
|
@ -3,7 +3,7 @@
|
||||
var dot = require('dot');
|
||||
dot.templateSettings.strip = false;
|
||||
|
||||
function createTemplate(method) {
|
||||
function createTemplate (method) {
|
||||
return dot.template([
|
||||
'SELECT',
|
||||
'min({{=it._column}}) min_val,',
|
||||
@ -27,7 +27,7 @@ var methods = {
|
||||
headtails: 'CDB_HeadsTailsBins(array_agg({{=it._column}}::numeric), {{=it._buckets}}) as headtails'
|
||||
};
|
||||
|
||||
var methodTemplates = Object.keys(methods).reduce(function(methodTemplates, methodName) {
|
||||
var methodTemplates = Object.keys(methods).reduce(function (methodTemplates, methodName) {
|
||||
methodTemplates[methodName] = createTemplate(methods[methodName]);
|
||||
return methodTemplates;
|
||||
}, {});
|
||||
@ -68,7 +68,7 @@ PostgresDatasource.prototype.getName = function () {
|
||||
};
|
||||
|
||||
PostgresDatasource.prototype.getRamp = function (column, buckets, method, callback) {
|
||||
if (method && !methodTemplates.hasOwnProperty(method)) {
|
||||
if (method && !Object.prototype.hasOwnProperty.call(methodTemplates, method)) {
|
||||
return callback(new Error(
|
||||
'Invalid method "' + method + '", valid methods: [' + Object.keys(methodTemplates).join(',') + ']'
|
||||
));
|
||||
@ -94,9 +94,9 @@ PostgresDatasource.prototype.getRamp = function (column, buckets, method, callba
|
||||
// Skip null values from ramp
|
||||
// Generated turbo-carto won't be correct, but better to keep it working than failing
|
||||
// TODO fix cartodb-postgres extension quantification functions
|
||||
ramp = ramp.filter(function(value) { return value !== null; });
|
||||
ramp = ramp.filter(function (value) { return value !== null; });
|
||||
if (strategy !== STRATEGY.EXACT) {
|
||||
ramp = ramp.sort(function(a, b) {
|
||||
ramp = ramp.sort(function (a, b) {
|
||||
return a - b;
|
||||
});
|
||||
}
|
||||
@ -105,7 +105,7 @@ PostgresDatasource.prototype.getRamp = function (column, buckets, method, callba
|
||||
}, true); // use read-only transaction
|
||||
};
|
||||
|
||||
function getResult(resultSet) {
|
||||
function getResult (resultSet) {
|
||||
resultSet = resultSet || {};
|
||||
var result = resultSet.rows || [];
|
||||
result = result[0] || {};
|
||||
|
@ -7,7 +7,7 @@
|
||||
* @constructor
|
||||
* @type {UserLimitsBackend}
|
||||
*/
|
||||
function UserLimitsBackend(metadataBackend, options) {
|
||||
function UserLimitsBackend (metadataBackend, options) {
|
||||
this.metadataBackend = metadataBackend;
|
||||
this.options = options || {};
|
||||
this.options.limits = this.options.limits || {};
|
||||
@ -59,7 +59,7 @@ UserLimitsBackend.prototype.getTimeoutRenderLimit = function (username, apiKey,
|
||||
});
|
||||
};
|
||||
|
||||
function isAuthorized(metadataBackend, username, apiKey, callback) {
|
||||
function isAuthorized (metadataBackend, username, apiKey, callback) {
|
||||
if (!apiKey) {
|
||||
return callback(null, false);
|
||||
}
|
||||
|
4
lib/cache/backend/fastly.js
vendored
4
lib/cache/backend/fastly.js
vendored
@ -2,7 +2,7 @@
|
||||
|
||||
var FastlyPurge = require('fastly-purge');
|
||||
|
||||
function FastlyCacheBackend(apiKey, serviceId) {
|
||||
function FastlyCacheBackend (apiKey, serviceId) {
|
||||
this.serviceId = serviceId;
|
||||
this.fastlyPurge = new FastlyPurge(apiKey, { softPurge: false });
|
||||
}
|
||||
@ -13,6 +13,6 @@ module.exports = FastlyCacheBackend;
|
||||
* @param cacheObject should respond to `key() -> String` method
|
||||
* @param {Function} callback
|
||||
*/
|
||||
FastlyCacheBackend.prototype.invalidate = function(cacheObject, callback) {
|
||||
FastlyCacheBackend.prototype.invalidate = function (cacheObject, callback) {
|
||||
this.fastlyPurge.key(this.serviceId, cacheObject.key(), callback);
|
||||
};
|
||||
|
6
lib/cache/backend/varnish-http.js
vendored
6
lib/cache/backend/varnish-http.js
vendored
@ -2,7 +2,7 @@
|
||||
|
||||
var request = require('request');
|
||||
|
||||
function VarnishHttpCacheBackend(host, port) {
|
||||
function VarnishHttpCacheBackend (host, port) {
|
||||
this.host = host;
|
||||
this.port = port;
|
||||
}
|
||||
@ -13,7 +13,7 @@ module.exports = VarnishHttpCacheBackend;
|
||||
* @param cacheObject should respond to `key() -> String` method
|
||||
* @param {Function} callback
|
||||
*/
|
||||
VarnishHttpCacheBackend.prototype.invalidate = function(cacheObject, callback) {
|
||||
VarnishHttpCacheBackend.prototype.invalidate = function (cacheObject, callback) {
|
||||
request(
|
||||
{
|
||||
method: 'PURGE',
|
||||
@ -22,7 +22,7 @@ VarnishHttpCacheBackend.prototype.invalidate = function(cacheObject, callback) {
|
||||
'Invalidation-Match': '\\b' + cacheObject.key() + '\\b'
|
||||
}
|
||||
},
|
||||
function(err, response) {
|
||||
function (err, response) {
|
||||
if (err || response.statusCode !== 204) {
|
||||
return callback(new Error('Unable to invalidate Varnish object'));
|
||||
}
|
||||
|
10
lib/cache/layergroup-affected-tables.js
vendored
10
lib/cache/layergroup-affected-tables.js
vendored
@ -2,25 +2,25 @@
|
||||
|
||||
var LruCache = require('lru-cache');
|
||||
|
||||
function LayergroupAffectedTables() {
|
||||
function LayergroupAffectedTables () {
|
||||
// dbname + layergroupId -> affected tables cache
|
||||
this.cache = new LruCache({ max: 2000 });
|
||||
}
|
||||
|
||||
module.exports = LayergroupAffectedTables;
|
||||
|
||||
LayergroupAffectedTables.prototype.hasAffectedTables = function(dbName, layergroupId) {
|
||||
LayergroupAffectedTables.prototype.hasAffectedTables = function (dbName, layergroupId) {
|
||||
return this.cache.has(createKey(dbName, layergroupId));
|
||||
};
|
||||
|
||||
LayergroupAffectedTables.prototype.set = function(dbName, layergroupId, affectedTables) {
|
||||
LayergroupAffectedTables.prototype.set = function (dbName, layergroupId, affectedTables) {
|
||||
this.cache.set(createKey(dbName, layergroupId), affectedTables);
|
||||
};
|
||||
|
||||
LayergroupAffectedTables.prototype.get = function(dbName, layergroupId) {
|
||||
LayergroupAffectedTables.prototype.get = function (dbName, layergroupId) {
|
||||
return this.cache.get(createKey(dbName, layergroupId));
|
||||
};
|
||||
|
||||
function createKey(dbName, layergroupId) {
|
||||
function createKey (dbName, layergroupId) {
|
||||
return dbName + ':' + layergroupId;
|
||||
}
|
||||
|
9
lib/cache/model/named-maps-entry.js
vendored
9
lib/cache/model/named-maps-entry.js
vendored
@ -2,7 +2,7 @@
|
||||
|
||||
var crypto = require('crypto');
|
||||
|
||||
function NamedMaps(owner, name) {
|
||||
function NamedMaps (owner, name) {
|
||||
this.namespace = 'n';
|
||||
this.owner = owner;
|
||||
this.name = name;
|
||||
@ -10,11 +10,10 @@ function NamedMaps(owner, name) {
|
||||
|
||||
module.exports = NamedMaps;
|
||||
|
||||
|
||||
NamedMaps.prototype.key = function() {
|
||||
NamedMaps.prototype.key = function () {
|
||||
return this.namespace + ':' + shortHashKey(this.owner + ':' + this.name);
|
||||
};
|
||||
|
||||
function shortHashKey(target) {
|
||||
return crypto.createHash('sha256').update(target).digest('base64').substring(0,6);
|
||||
function shortHashKey (target) {
|
||||
return crypto.createHash('sha256').update(target).digest('base64').substring(0, 6);
|
||||
}
|
||||
|
2
lib/cache/named-map-provider-cache.js
vendored
2
lib/cache/named-map-provider-cache.js
vendored
@ -34,7 +34,7 @@ module.exports = class NamedMapProviderCache {
|
||||
const namedMapProviders = this.providerCache.get(namedMapKey) || {};
|
||||
const providerKey = createProviderKey(config, authToken, params);
|
||||
|
||||
if (namedMapProviders.hasOwnProperty(providerKey)) {
|
||||
if (Object.prototype.hasOwnProperty.call(namedMapProviders, providerKey)) {
|
||||
return callback(null, namedMapProviders[providerKey]);
|
||||
}
|
||||
|
||||
|
18
lib/cache/surrogate-keys-cache.js
vendored
18
lib/cache/surrogate-keys-cache.js
vendored
@ -6,28 +6,26 @@ var queue = require('queue-async');
|
||||
* @param {Array|Object} cacheBackends each backend backend should respond to `invalidate(cacheObject, callback)` method
|
||||
* @constructor
|
||||
*/
|
||||
function SurrogateKeysCache(cacheBackends) {
|
||||
function SurrogateKeysCache (cacheBackends) {
|
||||
this.cacheBackends = Array.isArray(cacheBackends) ? cacheBackends : [cacheBackends];
|
||||
}
|
||||
|
||||
module.exports = SurrogateKeysCache;
|
||||
|
||||
|
||||
/**
|
||||
* @param response should respond to `header(key, value)` method
|
||||
* @param cacheObject should respond to `key() -> String` method
|
||||
*/
|
||||
SurrogateKeysCache.prototype.tag = function(response, cacheObject) {
|
||||
SurrogateKeysCache.prototype.tag = function (response, cacheObject) {
|
||||
var newKey = cacheObject.key();
|
||||
response.set('Surrogate-Key', appendSurrogateKey(
|
||||
response.get('Surrogate-Key'),
|
||||
Array.isArray(newKey) ? cacheObject.key().join(' ') : newKey
|
||||
));
|
||||
|
||||
};
|
||||
|
||||
function appendSurrogateKey(currentKey, newKey) {
|
||||
if (!!currentKey) {
|
||||
function appendSurrogateKey (currentKey, newKey) {
|
||||
if (currentKey) {
|
||||
newKey = currentKey + ' ' + newKey;
|
||||
}
|
||||
return newKey;
|
||||
@ -37,16 +35,16 @@ function appendSurrogateKey(currentKey, newKey) {
|
||||
* @param cacheObject should respond to `key() -> String` method
|
||||
* @param {Function} callback
|
||||
*/
|
||||
SurrogateKeysCache.prototype.invalidate = function(cacheObject, callback) {
|
||||
SurrogateKeysCache.prototype.invalidate = function (cacheObject, callback) {
|
||||
var invalidationQueue = queue(this.cacheBackends.length);
|
||||
|
||||
this.cacheBackends.forEach(function(cacheBackend) {
|
||||
invalidationQueue.defer(function(cacheBackend, done) {
|
||||
this.cacheBackends.forEach(function (cacheBackend) {
|
||||
invalidationQueue.defer(function (cacheBackend, done) {
|
||||
cacheBackend.invalidate(cacheObject, done);
|
||||
}, cacheBackend);
|
||||
});
|
||||
|
||||
invalidationQueue.awaitAll(function(err, result) {
|
||||
invalidationQueue.awaitAll(function (err, result) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
@ -53,11 +53,11 @@ module.exports = class AggregationMapConfig extends MapConfig {
|
||||
return null;
|
||||
}
|
||||
|
||||
static supportsGeometryType(geometryType) {
|
||||
static supportsGeometryType (geometryType) {
|
||||
return AggregationMapConfig.SUPPORTED_GEOMETRY_TYPES.includes(geometryType);
|
||||
}
|
||||
|
||||
static getAggregationGeometryColumn() {
|
||||
static getAggregationGeometryColumn () {
|
||||
return aggregationQuery.GEOMETRY_COLUMN;
|
||||
}
|
||||
|
||||
@ -83,7 +83,7 @@ module.exports = class AggregationMapConfig extends MapConfig {
|
||||
}
|
||||
|
||||
getAggregatedQuery (index) {
|
||||
const { sql_raw, sql } = this.getLayer(index).options;
|
||||
const { sql_raw: sqlRaw, sql } = this.getLayer(index).options;
|
||||
const {
|
||||
// The default aggregation has no placement, columns or dimensions;
|
||||
// this enables the special "full-sample" aggregation.
|
||||
@ -96,7 +96,7 @@ module.exports = class AggregationMapConfig extends MapConfig {
|
||||
} = this.getAggregation(index);
|
||||
|
||||
return aggregationQuery({
|
||||
query: sql_raw || sql,
|
||||
query: sqlRaw || sql,
|
||||
resolution,
|
||||
threshold,
|
||||
placement,
|
||||
@ -108,7 +108,7 @@ module.exports = class AggregationMapConfig extends MapConfig {
|
||||
}
|
||||
|
||||
isAggregationLayer (index) {
|
||||
let hasAggregation = this.hasLayerAggregation(index);
|
||||
const hasAggregation = this.hasLayerAggregation(index);
|
||||
// for vector-only MapConfig are aggregated unless explicitly disabled
|
||||
return hasAggregation || (
|
||||
this.isVectorOnlyMapConfig() && hasAggregation !== AggregationMapConfig.HAS_AGGREGATION_DISABLED
|
||||
@ -176,7 +176,7 @@ module.exports = class AggregationMapConfig extends MapConfig {
|
||||
_getLayerAggregationRequiredColumns (index) {
|
||||
const { columns, dimensions } = this.getAggregation(index);
|
||||
|
||||
let finalColumns = ['cartodb_id', '_cdb_feature_count'];
|
||||
const finalColumns = ['cartodb_id', '_cdb_feature_count'];
|
||||
|
||||
let aggregatedColumns = [];
|
||||
if (columns) {
|
||||
@ -191,10 +191,10 @@ module.exports = class AggregationMapConfig extends MapConfig {
|
||||
return removeDuplicates(finalColumns.concat(aggregatedColumns).concat(dimensionsColumns));
|
||||
}
|
||||
|
||||
doesLayerReachThreshold(index, featureCount) {
|
||||
const threshold = this.getAggregation(index) && this.getAggregation(index).threshold ?
|
||||
this.getAggregation(index).threshold :
|
||||
AggregationMapConfig.THRESHOLD;
|
||||
doesLayerReachThreshold (index, featureCount) {
|
||||
const threshold = this.getAggregation(index) && this.getAggregation(index).threshold
|
||||
? this.getAggregation(index).threshold
|
||||
: AggregationMapConfig.THRESHOLD;
|
||||
|
||||
return featureCount >= threshold;
|
||||
}
|
||||
@ -243,7 +243,7 @@ module.exports = class AggregationMapConfig extends MapConfig {
|
||||
this._isEmptyParameter(aggregation.filters);
|
||||
}
|
||||
|
||||
_isEmptyParameter(parameter) {
|
||||
_isEmptyParameter (parameter) {
|
||||
return parameter === undefined || parameter === null || this._isEmptyObject(parameter);
|
||||
}
|
||||
|
||||
|
@ -9,7 +9,7 @@ const webmercator = new WebMercatorHelper();
|
||||
function optionsToParams (options) {
|
||||
return {
|
||||
sourceQuery: options.query,
|
||||
res: 256/options.resolution,
|
||||
res: 256 / options.resolution,
|
||||
columns: options.columns,
|
||||
dimensions: options.dimensions,
|
||||
filters: options.filters,
|
||||
@ -50,30 +50,30 @@ module.exports.infoForOptions = (options) => {
|
||||
};
|
||||
|
||||
const SUPPORTED_AGGREGATE_FUNCTIONS = {
|
||||
'count': {
|
||||
sql: (column_name, params) => `count(${params.aggregated_column || '*'})`
|
||||
count: {
|
||||
sql: (columnName, params) => `count(${params.aggregated_column || '*'})`
|
||||
},
|
||||
'avg': {
|
||||
sql: (column_name, params) => `avg(${params.aggregated_column || column_name})`
|
||||
avg: {
|
||||
sql: (columnName, params) => `avg(${params.aggregated_column || columnName})`
|
||||
},
|
||||
'sum': {
|
||||
sql: (column_name, params) => `sum(${params.aggregated_column || column_name})`
|
||||
sum: {
|
||||
sql: (columnName, params) => `sum(${params.aggregated_column || columnName})`
|
||||
},
|
||||
'min': {
|
||||
sql: (column_name, params) => `min(${params.aggregated_column || column_name})`
|
||||
min: {
|
||||
sql: (columnName, params) => `min(${params.aggregated_column || columnName})`
|
||||
},
|
||||
'max': {
|
||||
sql: (column_name, params) => `max(${params.aggregated_column || column_name})`
|
||||
max: {
|
||||
sql: (columnName, params) => `max(${params.aggregated_column || columnName})`
|
||||
},
|
||||
'mode': {
|
||||
sql: (column_name, params) => `mode() WITHIN GROUP (ORDER BY ${params.aggregated_column || column_name})`
|
||||
mode: {
|
||||
sql: (columnName, params) => `mode() WITHIN GROUP (ORDER BY ${params.aggregated_column || columnName})`
|
||||
}
|
||||
};
|
||||
|
||||
module.exports.SUPPORTED_AGGREGATE_FUNCTIONS = Object.keys(SUPPORTED_AGGREGATE_FUNCTIONS);
|
||||
|
||||
const sep = (list) => {
|
||||
let expr = list.join(', ');
|
||||
const expr = list.join(', ');
|
||||
return expr ? ', ' + expr : expr;
|
||||
};
|
||||
|
||||
@ -85,20 +85,20 @@ const aggregateColumns = ctx => {
|
||||
}, ctx.columns || {});
|
||||
};
|
||||
|
||||
const aggregateExpression = (column_name, column_parameters) => {
|
||||
const aggregate_function = column_parameters.aggregate_function || 'count';
|
||||
const aggregate_definition = SUPPORTED_AGGREGATE_FUNCTIONS[aggregate_function];
|
||||
if (!aggregate_definition) {
|
||||
throw new Error("Invalid Aggregate function: '" + aggregate_function + "'");
|
||||
const aggregateExpression = (columnName, columnParameters) => {
|
||||
const aggregateFunction = columnParameters.aggregate_function || 'count';
|
||||
const aggregateDefinition = SUPPORTED_AGGREGATE_FUNCTIONS[aggregateFunction];
|
||||
if (!aggregateDefinition) {
|
||||
throw new Error("Invalid Aggregate function: '" + aggregateFunction + "'");
|
||||
}
|
||||
return aggregate_definition.sql(column_name, column_parameters);
|
||||
return aggregateDefinition.sql(columnName, columnParameters);
|
||||
};
|
||||
|
||||
const aggregateColumnDefs = ctx => {
|
||||
let columns = aggregateColumns(ctx);
|
||||
return sep(Object.keys(columns).map(column_name => {
|
||||
const aggregate_expression = aggregateExpression(column_name, columns[column_name]);
|
||||
return `${aggregate_expression} AS ${column_name}`;
|
||||
const columns = aggregateColumns(ctx);
|
||||
return sep(Object.keys(columns).map(columnName => {
|
||||
const aggregate = aggregateExpression(columnName, columns[columnName]);
|
||||
return `${aggregate} AS ${columnName}`;
|
||||
}));
|
||||
};
|
||||
|
||||
@ -119,7 +119,7 @@ const timeDimensionParameters = definition => {
|
||||
|
||||
// Adapt old-style dimension definitions for backwards compatibility
|
||||
const adaptDimensionDefinition = definition => {
|
||||
if (typeof(definition) === 'string') {
|
||||
if (typeof (definition) === 'string') {
|
||||
return { column: definition };
|
||||
}
|
||||
return definition;
|
||||
@ -135,7 +135,7 @@ const dimensionExpression = definition => {
|
||||
};
|
||||
|
||||
const dimensionNamesAndExpressions = (ctx) => {
|
||||
let dimensions = aggregateDimensions(ctx);
|
||||
const dimensions = aggregateDimensions(ctx);
|
||||
return Object.keys(dimensions).map(dimensionName => {
|
||||
const dimension = adaptDimensionDefinition(dimensions[dimensionName]);
|
||||
const expression = dimensionExpression(dimension);
|
||||
@ -152,7 +152,7 @@ const dimensionNames = (ctx, table) => {
|
||||
const dimensionDefs = ctx => {
|
||||
return sep(
|
||||
dimensionNamesAndExpressions(ctx)
|
||||
.map(([dimensionName, expression]) => `${expression.sql} AS "${dimensionName}"`)
|
||||
.map(([dimensionName, expression]) => `${expression.sql} AS "${dimensionName}"`)
|
||||
);
|
||||
};
|
||||
|
||||
@ -192,7 +192,7 @@ const sqlQ = (value) => {
|
||||
|
||||
const FILTERS = {
|
||||
between: (expr, filter) => {
|
||||
const lo = filter.greater_than_or_equal_to, hi = filter.less_than_or_equal_to;
|
||||
const lo = filter.greater_than_or_equal_to; const hi = filter.less_than_or_equal_to;
|
||||
if (lo != null && hi != null) {
|
||||
return `(${expr} BETWEEN ${sqlQ(lo)} AND ${sqlQ(hi)})`;
|
||||
}
|
||||
@ -218,7 +218,7 @@ const FILTERS = {
|
||||
}
|
||||
},
|
||||
range: (expr, filter) => {
|
||||
let conds = [];
|
||||
const conds = [];
|
||||
if (filter.greater_than_or_equal_to != null) {
|
||||
conds.push(`(${expr} >= ${sqlQ(filter.greater_than_or_equal_to)})`);
|
||||
}
|
||||
@ -238,26 +238,25 @@ const FILTERS = {
|
||||
};
|
||||
|
||||
const filterConditions = ctx => {
|
||||
let columns = aggregateColumns(ctx);
|
||||
let dimensions = aggregateDimensions(ctx);
|
||||
let filters = aggregateFilters(ctx);
|
||||
return Object.keys(filters).map(filtered_column => {
|
||||
let filtered_expr;
|
||||
if (columns[filtered_column]) {
|
||||
filtered_expr = aggregateExpression(filtered_column, columns[filtered_column]);
|
||||
const columns = aggregateColumns(ctx);
|
||||
const dimensions = aggregateDimensions(ctx);
|
||||
const filters = aggregateFilters(ctx);
|
||||
return Object.keys(filters).map(filteredColumn => {
|
||||
let filteredExpr;
|
||||
if (columns[filteredColumn]) {
|
||||
filteredExpr = aggregateExpression(filteredColumn, columns[filteredColumn]);
|
||||
} else if (dimensions[filteredColumn]) {
|
||||
filteredExpr = dimensions[filteredColumn];
|
||||
}
|
||||
else if (dimensions[filtered_column]) {
|
||||
filtered_expr = dimensions[filtered_column];
|
||||
if (!filteredExpr) {
|
||||
throw new Error("Invalid filtered column: '" + filteredColumn + "'");
|
||||
}
|
||||
if (!filtered_expr) {
|
||||
throw new Error("Invalid filtered column: '" + filtered_column + "'");
|
||||
}
|
||||
return filterConditionSQL(filtered_expr, filters[filtered_column]);
|
||||
return filterConditionSQL(filteredExpr, filters[filteredColumn]);
|
||||
}).join(' AND ');
|
||||
};
|
||||
|
||||
const havingClause = ctx => {
|
||||
let cond = filterConditions(ctx);
|
||||
const cond = filterConditions(ctx);
|
||||
return cond ? `HAVING ${cond}` : '';
|
||||
};
|
||||
|
||||
@ -274,8 +273,8 @@ const havingClause = ctx => {
|
||||
// NOTE 2: The 0.00028 is used in Mapnik (and replicated in pg-mvt) and comes from
|
||||
// OGC's Styled Layer Descriptor Implementation Specification
|
||||
const gridResolution = ctx => {
|
||||
const minimumResolution = webmercator.getResolution({ z : 38 });
|
||||
return `${256/ctx.res} * GREATEST(!scale_denominator! * 0.00028, ${minimumResolution})::double precision`;
|
||||
const minimumResolution = webmercator.getResolution({ z: 38 });
|
||||
return `${256 / ctx.res} * GREATEST(!scale_denominator! * 0.00028, ${minimumResolution})::double precision`;
|
||||
};
|
||||
|
||||
// SQL query to extract the boundaries of the area to be aggregated and the grid resolution
|
||||
@ -309,55 +308,49 @@ const gridInfoQuery = ctx => {
|
||||
`;
|
||||
};
|
||||
|
||||
|
||||
// Function to generate the resulting point for a cell from the aggregated data
|
||||
const aggregatedPointWebMercator = (ctx) => {
|
||||
switch (ctx.placement) {
|
||||
|
||||
// For centroid, we return the average of the cell
|
||||
case 'centroid':
|
||||
return ', ST_SetSRID(ST_MakePoint(AVG(cdb_x), AVG(cdb_y)), 3857) AS the_geom_webmercator';
|
||||
// For centroid, we return the average of the cell
|
||||
case 'centroid':
|
||||
return ', ST_SetSRID(ST_MakePoint(AVG(cdb_x), AVG(cdb_y)), 3857) AS the_geom_webmercator';
|
||||
|
||||
// Middle point of the cell
|
||||
case 'point-grid':
|
||||
return `, ST_SetSRID(ST_MakePoint(cdb_pos_grid_x, cdb_pos_grid_y), 3857) AS the_geom_webmercator`;
|
||||
case 'point-grid':
|
||||
return ', ST_SetSRID(ST_MakePoint(cdb_pos_grid_x, cdb_pos_grid_y), 3857) AS the_geom_webmercator';
|
||||
|
||||
// For point-sample we'll get a single point directly from the source
|
||||
// If it's default aggregation we'll add the extra columns to keep backwards compatibility
|
||||
case 'point-sample':
|
||||
return '';
|
||||
case 'point-sample':
|
||||
return '';
|
||||
|
||||
default:
|
||||
throw new Error(`Invalid aggregation placement "${ctx.placement}"`);
|
||||
default:
|
||||
throw new Error(`Invalid aggregation placement "${ctx.placement}"`);
|
||||
}
|
||||
};
|
||||
|
||||
// Function to generate the resulting point for a cell from the a join with the source
|
||||
const aggregatedPointJoin = (ctx) => {
|
||||
switch (ctx.placement) {
|
||||
|
||||
case 'centroid':
|
||||
return '';
|
||||
|
||||
case 'point-grid':
|
||||
return '';
|
||||
|
||||
// For point-sample we'll get a single point directly from the source
|
||||
// If it's default aggregation we'll add the extra columns to keep backwards compatibility
|
||||
case 'point-sample':
|
||||
return `
|
||||
NATURAL JOIN
|
||||
(
|
||||
SELECT ${ctx.isDefaultAggregation ? `*` : `cartodb_id, the_geom_webmercator`}
|
||||
FROM
|
||||
(
|
||||
${ctx.sourceQuery}
|
||||
) __cdb_src_query
|
||||
) __cdb_query_columns
|
||||
`;
|
||||
|
||||
default:
|
||||
throw new Error('Invalid aggregation placement "${ctx.placement}"');
|
||||
case 'centroid':
|
||||
return '';
|
||||
case 'point-grid':
|
||||
return '';
|
||||
// For point-sample we'll get a single point directly from the source
|
||||
// If it's default aggregation we'll add the extra columns to keep backwards compatibility
|
||||
case 'point-sample':
|
||||
return `
|
||||
NATURAL JOIN
|
||||
(
|
||||
SELECT ${ctx.isDefaultAggregation ? '*' : 'cartodb_id, the_geom_webmercator'}
|
||||
FROM
|
||||
(
|
||||
${ctx.sourceQuery}
|
||||
) __cdb_src_query
|
||||
) __cdb_query_columns
|
||||
`;
|
||||
default:
|
||||
throw new Error(`Invalid aggregation placement "${ctx.placement}"`);
|
||||
}
|
||||
};
|
||||
|
||||
@ -367,17 +360,16 @@ NATURAL JOIN
|
||||
// which requires extra data in the group by clause
|
||||
const aggregatedPosCoordinate = (ctx, coordinate) => {
|
||||
switch (ctx.placement) {
|
||||
// For point-grid we return the coordinate of the middle point of the grid
|
||||
case `point-grid`:
|
||||
return `(FLOOR(cdb_${coordinate} / __cdb_grid_params.cdb_res) + 0.5) * __cdb_grid_params.cdb_res`;
|
||||
// For point-grid we return the coordinate of the middle point of the grid
|
||||
case 'point-grid':
|
||||
return `(FLOOR(cdb_${coordinate} / __cdb_grid_params.cdb_res) + 0.5) * __cdb_grid_params.cdb_res`;
|
||||
|
||||
// For other, we return the cell position (relative to the world)
|
||||
default:
|
||||
return `FLOOR(cdb_${coordinate} / __cdb_grid_params.cdb_res)`;
|
||||
default:
|
||||
return `FLOOR(cdb_${coordinate} / __cdb_grid_params.cdb_res)`;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
const aggregationQueryTemplate = ctx => `
|
||||
WITH __cdb_grid_params AS
|
||||
(
|
||||
@ -445,5 +437,5 @@ const clusterFeaturesQuery = ctx => `
|
||||
module.exports.featuresQuery = (id, options) => clusterFeaturesQuery({
|
||||
id,
|
||||
sourceQuery: options.query,
|
||||
res: 256/options.resolution
|
||||
res: 256 / options.resolution
|
||||
});
|
||||
|
@ -76,11 +76,11 @@ module.exports.createAggregationFiltersValidator = function (mapconfig, validPar
|
||||
};
|
||||
};
|
||||
|
||||
function createAggregationColumnNamesValidator(mapconfig) {
|
||||
function createAggregationColumnNamesValidator (mapconfig) {
|
||||
return function validateAggregationColumnNames (value, key, index) {
|
||||
Object.keys(value).forEach((columnName) => {
|
||||
if (columnName.length <= 0) {
|
||||
const message = `Invalid column name, should be a non empty string`;
|
||||
const message = 'Invalid column name, should be a non empty string';
|
||||
throw createLayerError(message, mapconfig, index);
|
||||
}
|
||||
});
|
||||
@ -90,10 +90,10 @@ function createAggregationColumnNamesValidator(mapconfig) {
|
||||
function createAggregateFunctionValidator (mapconfig, validAggregatedFunctions) {
|
||||
return function validateAggregateFunction (value, key, index) {
|
||||
Object.keys(value).forEach((columnName) => {
|
||||
const { aggregate_function } = value[columnName];
|
||||
const { aggregate_function: aggregateFunction } = value[columnName];
|
||||
|
||||
if (!validAggregatedFunctions.includes(aggregate_function)) {
|
||||
const message = `Unsupported aggregation function ${aggregate_function},` +
|
||||
if (!validAggregatedFunctions.includes(aggregateFunction)) {
|
||||
const message = `Unsupported aggregation function ${aggregateFunction},` +
|
||||
` valid ones: ${validAggregatedFunctions.join(', ')}`;
|
||||
throw createLayerError(message, mapconfig, index);
|
||||
}
|
||||
@ -104,17 +104,17 @@ function createAggregateFunctionValidator (mapconfig, validAggregatedFunctions)
|
||||
function createAggregatedColumnValidator (mapconfig) {
|
||||
return function validateAggregatedColumn (value, key, index) {
|
||||
Object.keys(value).forEach((columnName) => {
|
||||
const { aggregated_column } = value[columnName];
|
||||
const { aggregated_column: aggregatedColumn } = value[columnName];
|
||||
|
||||
if (typeof aggregated_column !== 'string' || aggregated_column <= 0) {
|
||||
const message = `Invalid aggregated column, should be a non empty string`;
|
||||
if (typeof aggregatedColumn !== 'string' || aggregatedColumn <= 0) {
|
||||
const message = 'Invalid aggregated column, should be a non empty string';
|
||||
throw createLayerError(message, mapconfig, index);
|
||||
}
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
function createLayerError(message, mapconfig, index) {
|
||||
function createLayerError (message, mapconfig, index) {
|
||||
const error = new Error(message);
|
||||
error.type = 'layer';
|
||||
error.layer = {
|
||||
|
@ -4,7 +4,7 @@
|
||||
// a valid (case-insensitive) tz/PG name;
|
||||
// they include abbreviations defined by PG (which have precedence and
|
||||
// are fixed offsets, not handling DST) or general names that can handle DST.
|
||||
function timezone(tz) {
|
||||
function timezone (tz) {
|
||||
if (isFinite(tz)) {
|
||||
return `INTERVAL '${tz} seconds'`;
|
||||
}
|
||||
@ -19,21 +19,21 @@ function timezone(tz) {
|
||||
// So, for using this with aggregations, relying on dates & times
|
||||
// converted to UTC UNIX epoch numbers, apply `to_timestamp` to the
|
||||
// (converted) column.
|
||||
function timeExpression(t, tz) {
|
||||
if (tz !== undefined) {
|
||||
function timeExpression (t, tz) {
|
||||
if (tz !== undefined) {
|
||||
return `timezone(${timezone(tz)}, ${t})`;
|
||||
}
|
||||
return t;
|
||||
}
|
||||
return t;
|
||||
}
|
||||
|
||||
function epochWithDefaults(epoch) {
|
||||
function epochWithDefaults (epoch) {
|
||||
/* jshint maxcomplexity:9 */ // goddammit linter, I like this as is!!
|
||||
const format = /^(\d\d\d\d)(?:\-?(\d\d)(?:\-?(\d\d)(?:[T\s]?(\d\d)(?:(\d\d)(?:\:(\d\d))?)?)?)?)?$/;
|
||||
const match = (epoch || '').match(format) || [];
|
||||
const year = match[1] || '0001';
|
||||
const month = match[2] || '01';
|
||||
const day = match[3] || '01';
|
||||
const hour = match[4] || '00';
|
||||
const format = /^(\d\d\d\d)(?:\-?(\d\d)(?:\-?(\d\d)(?:[T\s]?(\d\d)(?:(\d\d)(?:\:(\d\d))?)?)?)?)?$/; // eslint-disable-line no-useless-escape
|
||||
const match = (epoch || '').match(format) || [];
|
||||
const year = match[1] || '0001';
|
||||
const month = match[2] || '01';
|
||||
const day = match[3] || '01';
|
||||
const hour = match[4] || '00';
|
||||
const minute = match[5] || '00';
|
||||
const second = match[6] || '00';
|
||||
return `${year}-${month}-${day}T${hour}:${minute}:${second}`;
|
||||
@ -44,9 +44,9 @@ function epochWithDefaults(epoch) {
|
||||
// It can be partial, e.g. 'YYYY', 'YYYY-MM', 'YYYY-MM-DDTHH', etc.
|
||||
// Defaults are applied: YYYY=0001, MM=01, DD=01, HH=00, MM=00, S=00
|
||||
// It returns a timestamp without time zone
|
||||
function epochExpression(epoch) {
|
||||
function epochExpression (epoch) {
|
||||
return `TIMESTAMP '${epoch}'`;
|
||||
}
|
||||
}
|
||||
|
||||
const YEARSPAN = "(date_part('year', $t)-date_part('year', $epoch))";
|
||||
// Note that SECONDSPAN is not a UTC epoch, but an epoch in the specified TZ,
|
||||
@ -112,10 +112,10 @@ const serialParts = {
|
||||
}
|
||||
};
|
||||
|
||||
function serialSqlExpr(params) {
|
||||
function serialSqlExpr (params) {
|
||||
const { sql, zeroBased } = serialParts[params.units];
|
||||
const column = timeExpression(params.time, params.timezone);
|
||||
const epoch = epochExpression(params.starting);
|
||||
const epoch = epochExpression(params.starting);
|
||||
const serial = sql.replace(/\$t/g, column).replace(/\$epoch/g, epoch);
|
||||
let expr = serial;
|
||||
if (params.count !== 1) {
|
||||
@ -131,22 +131,22 @@ function serialSqlExpr(params) {
|
||||
}
|
||||
|
||||
const isoParts = {
|
||||
second: `to_char($t, 'YYYY-MM-DD"T"HH24:MI:SS')`,
|
||||
minute: `to_char($t, 'YYYY-MM-DD"T"HH24:MI')`,
|
||||
hour: `to_char($t, 'YYYY-MM-DD"T"HH24')`,
|
||||
day: `to_char($t, 'YYYY-MM-DD')`,
|
||||
month: `to_char($t, 'YYYY-MM')`,
|
||||
year: `to_char($t, 'YYYY')`,
|
||||
week: `to_char($t, 'IYYY-"W"IW')`,
|
||||
quarter: `to_char($t, 'YYYY-"Q"Q')`,
|
||||
semester: `to_char($t, 'YYYY"S"') || to_char(CEIL(date_part('month', $t)/6), '9')`,
|
||||
trimester: `to_char($t, 'YYYY"t"') || to_char(CEIL(date_part('month', $t)/4), '9')`,
|
||||
decade: `to_char(date_part('decade', $t), '"D"999')`,
|
||||
century: `to_char($t, '"C"CC')`,
|
||||
millennium: `to_char(date_part('millennium', $t), '"M"999')`
|
||||
second: 'to_char($t, \'YYYY-MM-DD"T"HH24:MI:SS\')',
|
||||
minute: 'to_char($t, \'YYYY-MM-DD"T"HH24:MI\')',
|
||||
hour: 'to_char($t, \'YYYY-MM-DD"T"HH24\')',
|
||||
day: 'to_char($t, \'YYYY-MM-DD\')',
|
||||
month: 'to_char($t, \'YYYY-MM\')',
|
||||
year: 'to_char($t, \'YYYY\')',
|
||||
week: 'to_char($t, \'IYYY-"W"IW\')',
|
||||
quarter: 'to_char($t, \'YYYY-"Q"Q\')',
|
||||
semester: 'to_char($t, \'YYYY"S"\') || to_char(CEIL(date_part(\'month\', $t)/6), \'9\')',
|
||||
trimester: 'to_char($t, \'YYYY"t"\') || to_char(CEIL(date_part(\'month\', $t)/4), \'9\')',
|
||||
decade: 'to_char(date_part(\'decade\', $t), \'"D"999\')',
|
||||
century: 'to_char($t, \'"C"CC\')',
|
||||
millennium: 'to_char(date_part(\'millennium\', $t), \'"M"999\')'
|
||||
};
|
||||
|
||||
function isoSqlExpr(params) {
|
||||
function isoSqlExpr (params) {
|
||||
const column = timeExpression(params.time, params.timezone);
|
||||
if (params.count > 1) {
|
||||
// TODO: it would be sensible to return the ISO of the first unit in the period
|
||||
@ -156,19 +156,19 @@ function isoSqlExpr(params) {
|
||||
}
|
||||
|
||||
const cyclicParts = {
|
||||
dayOfWeek: `date_part('isodow', $t)`, // 1 = monday to 7 = sunday;
|
||||
dayOfMonth: `date_part('day', $t)`, // 1 to 31
|
||||
dayOfYear: `date_part('doy', $t)`, // 1 to 366
|
||||
hourOfDay: `date_part('hour', $t)`, // 0 to 23
|
||||
monthOfYear: `date_part('month', $t)`, // 1 to 12
|
||||
quarterOfYear: `date_part('quarter', $t)`, // 1 to 4
|
||||
semesterOfYear: `FLOOR((date_part('month', $t)-1)/6.0) + 1`, // 1 to 2
|
||||
trimesterOfYear: `FLOOR((date_part('month', $t)-1)/4.0) + 1`, // 1 to 3
|
||||
weekOfYear: `date_part('week', $t)`, // 1 to 53
|
||||
minuteOfHour: `date_part('minute', $t)` // 0 to 59
|
||||
dayOfWeek: 'date_part(\'isodow\', $t)', // 1 = monday to 7 = sunday;
|
||||
dayOfMonth: 'date_part(\'day\', $t)', // 1 to 31
|
||||
dayOfYear: 'date_part(\'doy\', $t)', // 1 to 366
|
||||
hourOfDay: 'date_part(\'hour\', $t)', // 0 to 23
|
||||
monthOfYear: 'date_part(\'month\', $t)', // 1 to 12
|
||||
quarterOfYear: 'date_part(\'quarter\', $t)', // 1 to 4
|
||||
semesterOfYear: 'FLOOR((date_part(\'month\', $t)-1)/6.0) + 1', // 1 to 2
|
||||
trimesterOfYear: 'FLOOR((date_part(\'month\', $t)-1)/4.0) + 1', // 1 to 3
|
||||
weekOfYear: 'date_part(\'week\', $t)', // 1 to 53
|
||||
minuteOfHour: 'date_part(\'minute\', $t)' // 0 to 59
|
||||
};
|
||||
|
||||
function cyclicSqlExpr(params) {
|
||||
function cyclicSqlExpr (params) {
|
||||
const column = timeExpression(params.time, params.timezone);
|
||||
return cyclicParts[params.units].replace(/\$t/g, column);
|
||||
}
|
||||
@ -176,7 +176,7 @@ function cyclicSqlExpr(params) {
|
||||
const ACCEPTED_PARAMETERS = ['time', 'units', 'timezone', 'count', 'starting', 'format'];
|
||||
const REQUIRED_PARAMETERS = ['time', 'units'];
|
||||
|
||||
function validateParameters(params, checker) {
|
||||
function validateParameters (params, checker) {
|
||||
const errors = [];
|
||||
const presentParams = Object.keys(params);
|
||||
const invalidParams = presentParams.filter(param => !ACCEPTED_PARAMETERS.includes(param));
|
||||
@ -187,19 +187,19 @@ function validateParameters(params, checker) {
|
||||
if (missingParams.length) {
|
||||
errors.push(`Missing parameters: ${missingParams.join(', ')}`);
|
||||
}
|
||||
const params_errors = checker(params);
|
||||
errors.push(...params_errors.errors);
|
||||
const paramsErrors = checker(params);
|
||||
errors.push(...paramsErrors.errors);
|
||||
if (errors.length) {
|
||||
throw new Error(`Invalid time dimension:\n${errors.join("\n")}`);
|
||||
throw new Error(`Invalid time dimension:\n${errors.join('\n')}`);
|
||||
}
|
||||
return params_errors.params;
|
||||
return paramsErrors.params;
|
||||
}
|
||||
|
||||
const VALID_CYCLIC_UNITS = Object.keys(cyclicParts);
|
||||
const VALID_SERIAL_UNITS = Object.keys(serialParts);
|
||||
const VALID_ISO_UNITS = Object.keys(isoParts);
|
||||
|
||||
function cyclicCheckParams(params) {
|
||||
function cyclicCheckParams (params) {
|
||||
const errors = [];
|
||||
if (!VALID_CYCLIC_UNITS.includes(params.units)) {
|
||||
errors.push(`Invalid units "${params.units}"`);
|
||||
@ -210,7 +210,7 @@ function cyclicCheckParams(params) {
|
||||
return { errors: errors, params: params };
|
||||
}
|
||||
|
||||
function serialCheckParams(params) {
|
||||
function serialCheckParams (params) {
|
||||
const errors = [];
|
||||
if (!VALID_SERIAL_UNITS.includes(params.units)) {
|
||||
errors.push(`Invalid grouping units "${params.units}"`);
|
||||
@ -218,7 +218,7 @@ function serialCheckParams(params) {
|
||||
return { errors: errors, params: Object.assign({}, params, { starting: epochWithDefaults(params.starting) }) };
|
||||
}
|
||||
|
||||
function isoCheckParams(params) {
|
||||
function isoCheckParams (params) {
|
||||
const errors = [];
|
||||
if (!VALID_ISO_UNITS.includes(params.units)) {
|
||||
errors.push(`Invalid units "${params.units}"`);
|
||||
@ -244,11 +244,11 @@ const CLASSIFIERS = {
|
||||
}
|
||||
};
|
||||
|
||||
function isCyclic(units) {
|
||||
function isCyclic (units) {
|
||||
return VALID_CYCLIC_UNITS.includes(units);
|
||||
}
|
||||
|
||||
function classifierFor(params) {
|
||||
function classifierFor (params) {
|
||||
let classifier = 'serial';
|
||||
if (params.units && isCyclic(params.units)) {
|
||||
classifier = 'cyclic';
|
||||
@ -258,7 +258,7 @@ function classifierFor(params) {
|
||||
return CLASSIFIERS[classifier];
|
||||
}
|
||||
|
||||
function classificationSql(params) {
|
||||
function classificationSql (params) {
|
||||
const classifier = classifierFor(params);
|
||||
params = validateParameters(params, classifier.checkParams);
|
||||
return { sql: classifier.sqlExpr(params), effectiveParams: params };
|
||||
|
@ -1,6 +1,6 @@
|
||||
'use strict';
|
||||
|
||||
function CdbRequest() {
|
||||
function CdbRequest () {
|
||||
this.RE_USER_FROM_HOST = new RegExp(global.environment.user_from_host ||
|
||||
'^([^\\.]+)\\.' // would extract "strk" from "strk.cartodb.com"
|
||||
);
|
||||
@ -8,18 +8,17 @@ function CdbRequest() {
|
||||
|
||||
module.exports = CdbRequest;
|
||||
|
||||
|
||||
CdbRequest.prototype.userByReq = function(req) {
|
||||
CdbRequest.prototype.userByReq = function (req) {
|
||||
var host = req.headers.host || '';
|
||||
if (req.params.user) {
|
||||
return req.params.user;
|
||||
}
|
||||
var mat = host.match(this.RE_USER_FROM_HOST);
|
||||
if ( ! mat ) {
|
||||
if (!mat) {
|
||||
global.logger.error("Pattern '%s' does not match hostname '%s'", this.RE_USER_FROM_HOST, host);
|
||||
return;
|
||||
}
|
||||
if ( mat.length !== 2 ) {
|
||||
if (mat.length !== 2) {
|
||||
global.logger.error("Pattern '%s' gave unexpected matches against '%s': %s", this.RE_USER_FROM_HOST, host, mat);
|
||||
return;
|
||||
}
|
||||
|
@ -12,9 +12,9 @@ const filteredQueryTpl = ctx => `
|
||||
AND
|
||||
${ctx.aggregationColumn} != '-infinity'::float
|
||||
AND
|
||||
${ctx.aggregationColumn} != 'NaN'::float` :
|
||||
''
|
||||
}
|
||||
${ctx.aggregationColumn} != 'NaN'::float`
|
||||
: ''
|
||||
}
|
||||
`;
|
||||
|
||||
const summaryQueryTpl = ctx => `
|
||||
@ -30,9 +30,9 @@ const summaryQueryTpl = ctx => `
|
||||
ELSE 0
|
||||
END
|
||||
) AS infinities_count,
|
||||
sum(CASE WHEN ${ctx.aggregationColumn} = 'NaN'::float THEN 1 ELSE 0 END) AS nans_count` :
|
||||
''
|
||||
}
|
||||
sum(CASE WHEN ${ctx.aggregationColumn} = 'NaN'::float THEN 1 ELSE 0 END) AS nans_count`
|
||||
: ''
|
||||
}
|
||||
FROM (${ctx.query}) _cdb_aggregation_nulls
|
||||
)
|
||||
`;
|
||||
@ -44,7 +44,7 @@ const rankedCategoriesQueryTpl = ctx => `
|
||||
${ctx.aggregationFn} AS value,
|
||||
row_number() OVER (ORDER BY ${ctx.aggregationFn} desc) as rank
|
||||
FROM (${filteredQueryTpl(ctx)}) filtered_source
|
||||
WHERE ${ctx.aggregation === "count" ? `${ctx.column}` : `${ctx.aggregationColumn}`} IS NOT NULL
|
||||
WHERE ${ctx.aggregation === 'count' ? `${ctx.column}` : `${ctx.aggregationColumn}`} IS NOT NULL
|
||||
GROUP BY ${ctx.column}
|
||||
ORDER BY 2 DESC
|
||||
)
|
||||
@ -70,7 +70,7 @@ const categoriesSummaryCountQueryTpl = ctx => `
|
||||
)
|
||||
`;
|
||||
|
||||
const specialNumericValuesColumns = () => `, nans_count, infinities_count`;
|
||||
const specialNumericValuesColumns = () => ', nans_count, infinities_count';
|
||||
|
||||
const rankedAggregationQueryTpl = ctx => `
|
||||
SELECT
|
||||
@ -82,7 +82,7 @@ const rankedAggregationQueryTpl = ctx => `
|
||||
max_val,
|
||||
count,
|
||||
categories_count
|
||||
${ctx.isFloatColumn ? `${specialNumericValuesColumns(ctx)}` : '' }
|
||||
${ctx.isFloatColumn ? `${specialNumericValuesColumns(ctx)}` : ''}
|
||||
FROM categories, summary, categories_summary_min_max, categories_summary_count
|
||||
WHERE rank < ${ctx.limit}
|
||||
UNION ALL
|
||||
@ -95,7 +95,7 @@ const rankedAggregationQueryTpl = ctx => `
|
||||
max_val,
|
||||
count,
|
||||
categories_count
|
||||
${ctx.isFloatColumn ? `${specialNumericValuesColumns(ctx)}` : '' }
|
||||
${ctx.isFloatColumn ? `${specialNumericValuesColumns(ctx)}` : ''}
|
||||
FROM categories, summary, categories_summary_min_max, categories_summary_count
|
||||
WHERE rank >= ${ctx.limit}
|
||||
GROUP BY
|
||||
@ -104,7 +104,7 @@ const rankedAggregationQueryTpl = ctx => `
|
||||
max_val,
|
||||
count,
|
||||
categories_count
|
||||
${ctx.isFloatColumn ? `${specialNumericValuesColumns(ctx)}` : '' }
|
||||
${ctx.isFloatColumn ? `${specialNumericValuesColumns(ctx)}` : ''}
|
||||
`;
|
||||
|
||||
const aggregationQueryTpl = ctx => `
|
||||
@ -117,7 +117,7 @@ const aggregationQueryTpl = ctx => `
|
||||
max_val,
|
||||
count,
|
||||
categories_count
|
||||
${ctx.isFloatColumn ? `${specialNumericValuesColumns(ctx)}` : '' }
|
||||
${ctx.isFloatColumn ? `${specialNumericValuesColumns(ctx)}` : ''}
|
||||
FROM (${ctx.query}) _cdb_aggregation_all, summary, categories_summary_min_max, categories_summary_count
|
||||
GROUP BY
|
||||
${ctx.column},
|
||||
@ -126,7 +126,7 @@ const aggregationQueryTpl = ctx => `
|
||||
max_val,
|
||||
count,
|
||||
categories_count
|
||||
${ctx.isFloatColumn ? `${specialNumericValuesColumns(ctx)}` : '' }
|
||||
${ctx.isFloatColumn ? `${specialNumericValuesColumns(ctx)}` : ''}
|
||||
ORDER BY value DESC
|
||||
`;
|
||||
|
||||
@ -138,7 +138,7 @@ const aggregationDataviewQueryTpl = ctx => `
|
||||
${rankedCategoriesQueryTpl(ctx)},
|
||||
${categoriesSummaryMinMaxQueryTpl(ctx)},
|
||||
${categoriesSummaryCountQueryTpl(ctx)}
|
||||
${!!ctx.override.ownFilter ? `${aggregationQueryTpl(ctx)}` : `${rankedAggregationQueryTpl(ctx)}`}
|
||||
${ctx.override.ownFilter ? `${aggregationQueryTpl(ctx)}` : `${rankedAggregationQueryTpl(ctx)}`}
|
||||
`;
|
||||
|
||||
const filterCategoriesQueryTpl = ctx => `
|
||||
@ -206,11 +206,11 @@ module.exports = class Aggregation extends BaseDataview {
|
||||
|
||||
_checkOptions (options) {
|
||||
if (typeof options.column !== 'string') {
|
||||
throw new Error(`Aggregation expects 'column' in dataview options`);
|
||||
throw new Error('Aggregation expects \'column\' in dataview options');
|
||||
}
|
||||
|
||||
if (typeof options.aggregation !== 'string') {
|
||||
throw new Error(`Aggregation expects 'aggregation' operation in dataview options`);
|
||||
throw new Error('Aggregation expects \'aggregation\' operation in dataview options');
|
||||
}
|
||||
|
||||
if (!VALID_OPERATIONS[options.aggregation]) {
|
||||
@ -218,7 +218,7 @@ module.exports = class Aggregation extends BaseDataview {
|
||||
}
|
||||
|
||||
const requiredOptions = VALID_OPERATIONS[options.aggregation];
|
||||
const missingOptions = requiredOptions.filter(requiredOption => !options.hasOwnProperty(requiredOption));
|
||||
const missingOptions = requiredOptions.filter(requiredOption => !Object.prototype.hasOwnProperty.call(options, requiredOption));
|
||||
|
||||
if (missingOptions.length > 0) {
|
||||
throw new Error(
|
||||
@ -244,9 +244,9 @@ module.exports = class Aggregation extends BaseDataview {
|
||||
return null;
|
||||
}
|
||||
|
||||
const limit = Number.isFinite(override.categories) && override.categories > 0 ?
|
||||
override.categories :
|
||||
CATEGORIES_LIMIT;
|
||||
const limit = Number.isFinite(override.categories) && override.categories > 0
|
||||
? override.categories
|
||||
: CATEGORIES_LIMIT;
|
||||
|
||||
const aggregationSql = aggregationDataviewQueryTpl({
|
||||
override: override,
|
||||
@ -280,7 +280,7 @@ module.exports = class Aggregation extends BaseDataview {
|
||||
min_val = 0,
|
||||
max_val = 0,
|
||||
categories_count = 0
|
||||
} = result.rows[0] || {};
|
||||
} = result.rows[0] || {};
|
||||
|
||||
return {
|
||||
aggregation: this.aggregation,
|
||||
@ -303,9 +303,9 @@ module.exports = class Aggregation extends BaseDataview {
|
||||
|
||||
search (psql, userQuery, callback) {
|
||||
const escapedUserQuery = psql.escapeLiteral(`%${userQuery}%`);
|
||||
const value = this.aggregation !== 'count' && this.aggregationColumn ?
|
||||
`${this.aggregation}(${this.aggregationColumn})` :
|
||||
'count(1)';
|
||||
const value = this.aggregation !== 'count' && this.aggregationColumn
|
||||
? `${this.aggregation}(${this.aggregationColumn})`
|
||||
: 'count(1)';
|
||||
|
||||
// TODO unfiltered will be wrong as filters are already applied at this point
|
||||
const query = searchQueryTpl({
|
||||
@ -330,7 +330,7 @@ module.exports = class Aggregation extends BaseDataview {
|
||||
return callback(err, result);
|
||||
}
|
||||
|
||||
return callback(null, {type: this.getType(), categories: result.rows });
|
||||
return callback(null, { type: this.getType(), categories: result.rows });
|
||||
}, true); // use read-only transaction
|
||||
}
|
||||
|
||||
|
@ -16,8 +16,8 @@ const columnTypeQueryTpl = ctx => `SELECT pg_typeof(${ctx.column})::oid FROM (${
|
||||
|
||||
function getPGTypeName (pgType) {
|
||||
return {
|
||||
float: FLOAT_OIDS.hasOwnProperty(pgType),
|
||||
date: DATE_OIDS.hasOwnProperty(pgType)
|
||||
float: Object.prototype.hasOwnProperty.call(FLOAT_OIDS, pgType),
|
||||
date: Object.prototype.hasOwnProperty.call(DATE_OIDS, pgType)
|
||||
};
|
||||
}
|
||||
|
||||
@ -36,7 +36,7 @@ module.exports = class BaseDataview {
|
||||
result = this.format(result, override);
|
||||
result.type = this.getType();
|
||||
|
||||
//Overviews logging
|
||||
// Overviews logging
|
||||
const stats = {};
|
||||
|
||||
if (flags && flags.usesOverviews !== undefined) {
|
||||
|
@ -3,7 +3,7 @@
|
||||
const dataviews = require('.');
|
||||
|
||||
module.exports = class DataviewFactory {
|
||||
static get dataviews() {
|
||||
static get dataviews () {
|
||||
return Object.keys(dataviews).reduce((allDataviews, dataviewClassName) => {
|
||||
allDataviews[dataviewClassName.toLowerCase()] = dataviews[dataviewClassName];
|
||||
return allDataviews;
|
||||
|
@ -5,11 +5,11 @@ const debug = require('debug')('windshaft:dataview:formula');
|
||||
const utils = require('../../utils/query-utils');
|
||||
|
||||
const formulaQueryTpl = ctx =>
|
||||
`SELECT
|
||||
`SELECT
|
||||
${ctx.operation}(${utils.handleFloatColumn(ctx)}) AS result,
|
||||
${utils.countNULLs(ctx)} AS nulls_count
|
||||
${ctx.isFloatColumn ? `,${utils.countInfinites(ctx)} AS infinities_count,` : ``}
|
||||
${ctx.isFloatColumn ? `${utils.countNaNs(ctx)} AS nans_count` : ``}
|
||||
${ctx.isFloatColumn ? `,${utils.countInfinites(ctx)} AS infinities_count,` : ''}
|
||||
${ctx.isFloatColumn ? `${utils.countNaNs(ctx)} AS nans_count` : ''}
|
||||
FROM (${ctx.query}) __cdb_formula`;
|
||||
|
||||
const VALID_OPERATIONS = {
|
||||
@ -46,7 +46,7 @@ module.exports = class Formula extends BaseDataview {
|
||||
|
||||
_checkOptions (options) {
|
||||
if (typeof options.operation !== 'string') {
|
||||
throw new Error(`Formula expects 'operation' in dataview options`);
|
||||
throw new Error('Formula expects \'operation\' in dataview options');
|
||||
}
|
||||
|
||||
if (!VALID_OPERATIONS[options.operation]) {
|
||||
@ -54,11 +54,10 @@ module.exports = class Formula extends BaseDataview {
|
||||
}
|
||||
|
||||
if (options.operation !== 'count' && typeof options.column !== 'string') {
|
||||
throw new Error(`Formula expects 'column' in dataview options`);
|
||||
throw new Error('Formula expects \'column\' in dataview options');
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
sql (psql, override, callback) {
|
||||
if (!callback) {
|
||||
callback = override;
|
||||
@ -92,16 +91,16 @@ module.exports = class Formula extends BaseDataview {
|
||||
const {
|
||||
result = 0,
|
||||
nulls_count = 0,
|
||||
nans_count,
|
||||
infinities_count
|
||||
nans_count: nansCount,
|
||||
infinities_count: infinitiesCount
|
||||
} = res.rows[0] || {};
|
||||
|
||||
return {
|
||||
operation: this.operation,
|
||||
result,
|
||||
nulls: nulls_count,
|
||||
nans: nans_count,
|
||||
infinities: infinities_count
|
||||
nans: nansCount,
|
||||
infinities: infinitiesCount
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -20,23 +20,23 @@ module.exports = class Histogram {
|
||||
let implementation = null;
|
||||
|
||||
switch (this._getHistogramSubtype(override)) {
|
||||
case DATE_HISTOGRAM:
|
||||
debug('Delegating to DateHistogram with options: %j and overriding: %j', this.options, override);
|
||||
implementation = new DateHistogram(this.query, this.options, this.queries);
|
||||
break;
|
||||
case NUMERIC_HISTOGRAM:
|
||||
debug('Delegating to NumericHistogram with options: %j and overriding: %j', this.options, override);
|
||||
implementation = new NumericHistogram(this.query, this.options, this.queries);
|
||||
break;
|
||||
default:
|
||||
throw new Error('Unsupported Histogram type');
|
||||
case DATE_HISTOGRAM:
|
||||
debug('Delegating to DateHistogram with options: %j and overriding: %j', this.options, override);
|
||||
implementation = new DateHistogram(this.query, this.options, this.queries);
|
||||
break;
|
||||
case NUMERIC_HISTOGRAM:
|
||||
debug('Delegating to NumericHistogram with options: %j and overriding: %j', this.options, override);
|
||||
implementation = new NumericHistogram(this.query, this.options, this.queries);
|
||||
break;
|
||||
default:
|
||||
throw new Error('Unsupported Histogram type');
|
||||
}
|
||||
|
||||
return implementation;
|
||||
}
|
||||
|
||||
_getHistogramSubtype (override) {
|
||||
if(this._isDateHistogram(override)) {
|
||||
if (this._isDateHistogram(override)) {
|
||||
return DATE_HISTOGRAM;
|
||||
}
|
||||
|
||||
@ -44,7 +44,8 @@ module.exports = class Histogram {
|
||||
}
|
||||
|
||||
_isDateHistogram (override = {}) {
|
||||
return (this.options.hasOwnProperty('aggregation') || override.hasOwnProperty('aggregation'));
|
||||
return (Object.prototype.hasOwnProperty.call(this.options, 'aggregation') ||
|
||||
Object.prototype.hasOwnProperty.call(override, 'aggregation'));
|
||||
}
|
||||
|
||||
getResult (psql, override, callback) {
|
||||
|
@ -62,7 +62,7 @@ module.exports = class BaseHistogram extends BaseDataview {
|
||||
}
|
||||
|
||||
_hasOverridenRange (override) {
|
||||
return override && override.hasOwnProperty('start') && override.hasOwnProperty('end');
|
||||
return override && Object.prototype.hasOwnProperty.call(override, 'start') && Object.prototype.hasOwnProperty.call(override, 'end');
|
||||
}
|
||||
|
||||
_getBinStart (override = {}) {
|
||||
|
@ -4,7 +4,6 @@ const BaseHistogram = require('./base-histogram');
|
||||
const debug = require('debug')('windshaft:dataview:date-histogram');
|
||||
const utils = require('../../../utils/query-utils');
|
||||
|
||||
|
||||
/**
|
||||
* Gets the name of a timezone with the same offset as the required
|
||||
* using the pg_timezone_names table. We do this because it's simpler to pass
|
||||
@ -32,18 +31,17 @@ WITH __wd_tz AS
|
||||
* the aggregation. Since the data stored is in epoch we need to adapt it to
|
||||
* our timezone so when calling date_trunc it falls into the correct bin
|
||||
*/
|
||||
function dataBucketsQuery(ctx) {
|
||||
var condition_str = '';
|
||||
function dataBucketsQuery (ctx) {
|
||||
var conditionStr = '';
|
||||
|
||||
if (ctx.start !== 0) {
|
||||
condition_str = `WHERE ${ctx.column} >= to_timestamp(${ctx.start})`;
|
||||
conditionStr = `WHERE ${ctx.column} >= to_timestamp(${ctx.start})`;
|
||||
}
|
||||
if (ctx.end !== 0) {
|
||||
if (condition_str === '') {
|
||||
condition_str = `WHERE ${ctx.column} <= to_timestamp(${ctx.end})`;
|
||||
}
|
||||
else {
|
||||
condition_str += ` and ${ctx.column} <= to_timestamp(${ctx.end})`;
|
||||
if (conditionStr === '') {
|
||||
conditionStr = `WHERE ${ctx.column} <= to_timestamp(${ctx.end})`;
|
||||
} else {
|
||||
conditionStr += ` and ${ctx.column} <= to_timestamp(${ctx.end})`;
|
||||
}
|
||||
}
|
||||
|
||||
@ -58,7 +56,7 @@ __wd_buckets AS
|
||||
(
|
||||
${ctx.query}
|
||||
) __source, __wd_tz
|
||||
${condition_str}
|
||||
${conditionStr}
|
||||
GROUP BY 1, __wd_tz.name
|
||||
),`;
|
||||
}
|
||||
@ -68,23 +66,23 @@ __wd_buckets AS
|
||||
* start and end date. If not provided we use the min and max generated from
|
||||
* the dataBucketsQuery
|
||||
*/
|
||||
function allBucketsArrayQuery(ctx) {
|
||||
var extra_from = ``;
|
||||
var series_start = ``;
|
||||
var series_end = ``;
|
||||
function allBucketsArrayQuery (ctx) {
|
||||
var extraFrom = '';
|
||||
var seriesStart = '';
|
||||
var seriesEnd = '';
|
||||
|
||||
if (ctx.start === 0) {
|
||||
extra_from = `, __wd_buckets GROUP BY __wd_tz.name`;
|
||||
series_start = `min(__wd_buckets.timestamp)`;
|
||||
extraFrom = ', __wd_buckets GROUP BY __wd_tz.name';
|
||||
seriesStart = 'min(__wd_buckets.timestamp)';
|
||||
} else {
|
||||
series_start = `date_trunc('${ctx.aggregation}', timezone(__wd_tz.name, to_timestamp(${ctx.start})))`;
|
||||
seriesStart = `date_trunc('${ctx.aggregation}', timezone(__wd_tz.name, to_timestamp(${ctx.start})))`;
|
||||
}
|
||||
|
||||
if (ctx.end === 0) {
|
||||
extra_from = `, __wd_buckets GROUP BY __wd_tz.name`;
|
||||
series_end = `max(__wd_buckets.timestamp)`;
|
||||
extraFrom = ', __wd_buckets GROUP BY __wd_tz.name';
|
||||
seriesEnd = 'max(__wd_buckets.timestamp)';
|
||||
} else {
|
||||
series_end = `date_trunc('${ctx.aggregation}', timezone(__wd_tz.name, to_timestamp(${ctx.end})))`;
|
||||
seriesEnd = `date_trunc('${ctx.aggregation}', timezone(__wd_tz.name, to_timestamp(${ctx.end})))`;
|
||||
}
|
||||
|
||||
return `
|
||||
@ -93,10 +91,10 @@ __wd_all_buckets AS
|
||||
SELECT ARRAY(
|
||||
SELECT
|
||||
generate_series(
|
||||
${series_start},
|
||||
${series_end},
|
||||
${seriesStart},
|
||||
${seriesEnd},
|
||||
interval '${ctx.interval}') as bin_start
|
||||
FROM __wd_tz${extra_from}
|
||||
FROM __wd_tz${extraFrom}
|
||||
) as bins
|
||||
)`;
|
||||
}
|
||||
@ -148,18 +146,18 @@ const dateIntervalQueryTpl = ctx => `
|
||||
const MAX_INTERVAL_VALUE = 100;
|
||||
|
||||
const DATE_AGGREGATIONS = {
|
||||
'auto': true,
|
||||
'second' : true,
|
||||
'minute': true,
|
||||
'hour': true,
|
||||
'day': true,
|
||||
'week': true,
|
||||
'month': true,
|
||||
'quarter': true,
|
||||
'year': true,
|
||||
'decade' : true,
|
||||
'century' : true,
|
||||
'millennium' : true
|
||||
auto: true,
|
||||
second: true,
|
||||
minute: true,
|
||||
hour: true,
|
||||
day: true,
|
||||
week: true,
|
||||
month: true,
|
||||
quarter: true,
|
||||
year: true,
|
||||
decade: true,
|
||||
century: true,
|
||||
millennium: true
|
||||
};
|
||||
|
||||
/**
|
||||
@ -219,8 +217,8 @@ ORDER BY bin ASC;
|
||||
return null;
|
||||
}
|
||||
|
||||
var interval = this._getAggregation(override) === 'quarter' ?
|
||||
'3 months' : '1 ' + this._getAggregation(override);
|
||||
var interval = this._getAggregation(override) === 'quarter'
|
||||
? '3 months' : '1 ' + this._getAggregation(override);
|
||||
|
||||
const histogramSql = this._buildQueryTpl({
|
||||
override: override,
|
||||
@ -239,7 +237,7 @@ ORDER BY bin ASC;
|
||||
}
|
||||
|
||||
_isValidAggregation (override) {
|
||||
return DATE_AGGREGATIONS.hasOwnProperty(this._getAggregation(override));
|
||||
return Object.prototype.hasOwnProperty.call(DATE_AGGREGATIONS, this._getAggregation(override));
|
||||
}
|
||||
|
||||
_getAutomaticAggregation (psql, callback) {
|
||||
@ -294,7 +292,7 @@ ORDER BY bin ASC;
|
||||
}
|
||||
|
||||
_getBuckets (result) {
|
||||
result.rows.forEach(function(row) {
|
||||
result.rows.forEach(function (row) {
|
||||
row.min = row.max = row.avg = row.timestamp;
|
||||
});
|
||||
|
||||
|
@ -26,7 +26,7 @@ const irqQueryTpl = ctx => `
|
||||
max(${ctx.column}) AS __cdb_max_val,
|
||||
min(${ctx.column}) AS __cdb_min_val,
|
||||
count(1) AS __cdb_total_rows,
|
||||
${ctx.irq ? ctx.irq : `0`} AS __cdb_iqr
|
||||
${ctx.irq ? ctx.irq : '0'} AS __cdb_iqr
|
||||
FROM
|
||||
(
|
||||
SELECT *
|
||||
@ -51,10 +51,6 @@ Numeric histogram:
|
||||
}
|
||||
*/
|
||||
module.exports = class NumericHistogram extends BaseHistogram {
|
||||
constructor (query, options, queries) {
|
||||
super(query, options, queries);
|
||||
}
|
||||
|
||||
_buildQuery (psql, override, callback) {
|
||||
const histogramSql = this._buildQueryTpl({
|
||||
column: this._columnType === 'date' ? utils.columnCastTpl({ column: this.column }) : this.column,
|
||||
@ -72,8 +68,7 @@ module.exports = class NumericHistogram extends BaseHistogram {
|
||||
return callback(null, histogramSql);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
/**
|
||||
* ctx: Object with the following values
|
||||
* ctx.column -- Column for the histogram
|
||||
* ctx.isFloatColumn - Whether the column is float or not
|
||||
@ -85,35 +80,35 @@ module.exports = class NumericHistogram extends BaseHistogram {
|
||||
* ctx.maxBins - If !full max bins to calculate [Optional]
|
||||
*/
|
||||
_buildQueryTpl (ctx) {
|
||||
var extra_tables = ``;
|
||||
var extra_queries = ``;
|
||||
var extra_groupby = ``;
|
||||
var extra_filter = ``;
|
||||
var extraTables = '';
|
||||
var extraQueries = '';
|
||||
var extraGroupBy = '';
|
||||
var extraFilter = '';
|
||||
|
||||
if (ctx.start < ctx.end) {
|
||||
extra_filter = `
|
||||
extraFilter = `
|
||||
WHERE __ctx_query.${ctx.column} >= ${ctx.start}
|
||||
AND __ctx_query.${ctx.column} <= ${ctx.end}
|
||||
`;
|
||||
} else {
|
||||
ctx.end = `__cdb_basics.__cdb_max_val`;
|
||||
ctx.start = `__cdb_basics.__cdb_min_val`;
|
||||
extra_groupby = `, __cdb_basics.__cdb_max_val, __cdb_basics.__cdb_min_val`;
|
||||
extra_tables = `, __cdb_basics`;
|
||||
extra_queries = `WITH ${irqQueryTpl(ctx)}`;
|
||||
ctx.end = '__cdb_basics.__cdb_max_val';
|
||||
ctx.start = '__cdb_basics.__cdb_min_val';
|
||||
extraGroupBy = ', __cdb_basics.__cdb_max_val, __cdb_basics.__cdb_min_val';
|
||||
extraTables = ', __cdb_basics';
|
||||
extraQueries = `WITH ${irqQueryTpl(ctx)}`;
|
||||
}
|
||||
|
||||
if (ctx.bins <= 0) {
|
||||
ctx.bins = `__cdb_basics.__cdb_bins_number`;
|
||||
ctx.bins = '__cdb_basics.__cdb_bins_number';
|
||||
ctx.irq = `percentile_disc(0.75) within group (order by ${ctx.column})
|
||||
- percentile_disc(0.25) within group (order by ${ctx.column})`;
|
||||
extra_groupby += `, __cdb_basics.__cdb_bins_number`;
|
||||
extra_tables = `, __cdb_basics`;
|
||||
extra_queries = `WITH ${irqQueryTpl(ctx)}`;
|
||||
extraGroupBy += ', __cdb_basics.__cdb_bins_number';
|
||||
extraTables = ', __cdb_basics';
|
||||
extraQueries = `WITH ${irqQueryTpl(ctx)}`;
|
||||
}
|
||||
|
||||
return `
|
||||
${extra_queries}
|
||||
${extraQueries}
|
||||
SELECT
|
||||
(${ctx.end} - ${ctx.start}) / ${ctx.bins}::float AS bin_width,
|
||||
${ctx.bins} as bins_number,
|
||||
@ -132,44 +127,44 @@ SELECT
|
||||
END AS bin
|
||||
FROM
|
||||
(
|
||||
SELECT * FROM (${ctx.query}) __ctx_query${extra_tables} ${extra_filter}
|
||||
) __cdb_filtered_source_query${extra_tables}
|
||||
GROUP BY 10${extra_groupby}
|
||||
SELECT * FROM (${ctx.query}) __ctx_query${extraTables} ${extraFilter}
|
||||
) __cdb_filtered_source_query${extraTables}
|
||||
GROUP BY 10${extraGroupBy}
|
||||
ORDER BY 10;`;
|
||||
}
|
||||
|
||||
_hasOverridenBins (override) {
|
||||
return override && override.hasOwnProperty('bins');
|
||||
return override && Object.prototype.hasOwnProperty.call(override, 'bins');
|
||||
}
|
||||
|
||||
_getSummary (result, override) {
|
||||
const firstRow = result.rows[0] || {};
|
||||
|
||||
var total_nulls = 0;
|
||||
var total_infinities = 0;
|
||||
var total_nans = 0;
|
||||
var total_avg = 0;
|
||||
var total_count = 0;
|
||||
var totalNulls = 0;
|
||||
var totalInfinities = 0;
|
||||
var totalNans = 0;
|
||||
var totalAvg = 0;
|
||||
var totalCount = 0;
|
||||
|
||||
result.rows.forEach(function(row) {
|
||||
total_nulls += row.nulls_count;
|
||||
total_infinities += row.infinities_count;
|
||||
total_nans += row.nans_count;
|
||||
total_avg += row.avg * row.freq;
|
||||
total_count += row.freq;
|
||||
result.rows.forEach(function (row) {
|
||||
totalNulls += row.nulls_count;
|
||||
totalInfinities += row.infinities_count;
|
||||
totalNans += row.nans_count;
|
||||
totalAvg += row.avg * row.freq;
|
||||
totalCount += row.freq;
|
||||
});
|
||||
if (total_count !== 0) {
|
||||
total_avg /= total_count;
|
||||
if (totalCount !== 0) {
|
||||
totalAvg /= totalCount;
|
||||
}
|
||||
|
||||
return {
|
||||
bin_width: firstRow.bin_width,
|
||||
bins_count: firstRow.bins_number,
|
||||
bins_start: this._populateBinStart(firstRow, override),
|
||||
nulls: total_nulls,
|
||||
infinities: total_infinities,
|
||||
nans: total_nans,
|
||||
avg: total_avg
|
||||
nulls: totalNulls,
|
||||
infinities: totalInfinities,
|
||||
nans: totalNans,
|
||||
avg: totalAvg
|
||||
};
|
||||
}
|
||||
|
||||
@ -180,7 +175,7 @@ ORDER BY 10;`;
|
||||
_populateBinStart (firstRow, override = {}) {
|
||||
let binStart;
|
||||
|
||||
if (override.hasOwnProperty('start')) {
|
||||
if (Object.prototype.hasOwnProperty.call(override, 'start')) {
|
||||
binStart = this._getBinStart(override);
|
||||
} else {
|
||||
binStart = firstRow.min;
|
||||
@ -188,5 +183,4 @@ ORDER BY 10;`;
|
||||
|
||||
return binStart;
|
||||
}
|
||||
|
||||
};
|
||||
|
@ -91,7 +91,7 @@ var aggregationQueryTpl = dot.template([
|
||||
|
||||
var CATEGORIES_LIMIT = 6;
|
||||
|
||||
function Aggregation(query, options, queryRewriter, queryRewriteData, params, queries) {
|
||||
function Aggregation (query, options, queryRewriter, queryRewriteData, params, queries) {
|
||||
BaseOverviewsDataview.call(this, query, options, BaseDataview, queryRewriter, queryRewriteData, params, queries);
|
||||
|
||||
this._checkOptions(options);
|
||||
@ -109,7 +109,7 @@ Aggregation.prototype.constructor = Aggregation;
|
||||
|
||||
module.exports = Aggregation;
|
||||
|
||||
Aggregation.prototype.sql = function(psql, override, callback) {
|
||||
Aggregation.prototype.sql = function (psql, override, callback) {
|
||||
var self = this;
|
||||
|
||||
if (!callback) {
|
||||
@ -132,9 +132,9 @@ Aggregation.prototype.sql = function(psql, override, callback) {
|
||||
}
|
||||
|
||||
var aggregationSql;
|
||||
if (!!override.ownFilter) {
|
||||
if (override.ownFilter) {
|
||||
aggregationSql = [
|
||||
"WITH",
|
||||
'WITH',
|
||||
[
|
||||
filteredQueryTpl({
|
||||
_isFloatColumn: this._isFloatColumn,
|
||||
@ -173,7 +173,7 @@ Aggregation.prototype.sql = function(psql, override, callback) {
|
||||
].join('\n');
|
||||
} else {
|
||||
aggregationSql = [
|
||||
"WITH",
|
||||
'WITH',
|
||||
[
|
||||
filteredQueryTpl({
|
||||
_isFloatColumn: this._isFloatColumn,
|
||||
@ -218,7 +218,7 @@ Aggregation.prototype.sql = function(psql, override, callback) {
|
||||
|
||||
var aggregationFnQueryTpl = {
|
||||
count: dot.template('sum(_feature_count)'),
|
||||
sum: dot.template('sum({{=it._aggregationColumn}}*_feature_count)')
|
||||
sum: dot.template('sum({{=it._aggregationColumn}}*_feature_count)')
|
||||
};
|
||||
|
||||
const VALID_OPERATIONS = {
|
||||
@ -232,7 +232,7 @@ Aggregation.prototype._checkOptions = function (options) {
|
||||
}
|
||||
|
||||
const requiredOptions = VALID_OPERATIONS[options.aggregation];
|
||||
const missingOptions = requiredOptions.filter(requiredOption => !options.hasOwnProperty(requiredOption));
|
||||
const missingOptions = requiredOptions.filter(requiredOption => !Object.prototype.hasOwnProperty.call(options, requiredOption));
|
||||
|
||||
if (missingOptions.length > 0) {
|
||||
throw new Error(
|
||||
@ -241,7 +241,7 @@ Aggregation.prototype._checkOptions = function (options) {
|
||||
}
|
||||
};
|
||||
|
||||
Aggregation.prototype.getAggregationSql = function() {
|
||||
Aggregation.prototype.getAggregationSql = function () {
|
||||
return aggregationFnQueryTpl[this.aggregation]({
|
||||
_aggregationFn: this.aggregation,
|
||||
_aggregationColumn: this.aggregationColumn || 1
|
||||
|
@ -3,15 +3,15 @@
|
||||
var _ = require('underscore');
|
||||
var BaseDataview = require('../base');
|
||||
|
||||
function BaseOverviewsDataview(query, queryOptions, BaseDataview, queryRewriter, queryRewriteData, options, queries) {
|
||||
this.BaseDataview = BaseDataview;
|
||||
this.query = query;
|
||||
this.queryOptions = queryOptions;
|
||||
this.queryRewriter = queryRewriter;
|
||||
this.queryRewriteData = queryRewriteData;
|
||||
this.options = options;
|
||||
this.queries = queries;
|
||||
this.baseDataview = new this.BaseDataview(this.query, this.queryOptions, this.queries);
|
||||
function BaseOverviewsDataview (query, queryOptions, BaseDataview, queryRewriter, queryRewriteData, options, queries) {
|
||||
this.BaseDataview = BaseDataview;
|
||||
this.query = query;
|
||||
this.queryOptions = queryOptions;
|
||||
this.queryRewriter = queryRewriter;
|
||||
this.queryRewriteData = queryRewriteData;
|
||||
this.options = options;
|
||||
this.queries = queries;
|
||||
this.baseDataview = new this.BaseDataview(this.query, this.queryOptions, this.queries);
|
||||
}
|
||||
|
||||
module.exports = BaseOverviewsDataview;
|
||||
@ -32,36 +32,36 @@ var SETTINGS = {
|
||||
// Compute zoom level so that the the resolution grid size of the
|
||||
// selected overview is smaller (zoomLevelFactor times smaller at least)
|
||||
// than the bounding box size.
|
||||
BaseOverviewsDataview.prototype.zoomLevelForBbox = function(bbox) {
|
||||
BaseOverviewsDataview.prototype.zoomLevelForBbox = function (bbox) {
|
||||
var pxPerTile = 256.0;
|
||||
var earthWidth = 360.0;
|
||||
// TODO: now we assume overviews are computed for 1-pixel tolerance;
|
||||
// should use extended overviews metadata to compute this properly.
|
||||
if ( bbox ) {
|
||||
var bboxValues = _.map(bbox.split(','), function(v) { return +v; });
|
||||
var w = Math.abs(bboxValues[2]-bboxValues[0]);
|
||||
var h = Math.abs(bboxValues[3]-bboxValues[1]);
|
||||
var maxDim = Math.min(w, h);
|
||||
if (bbox) {
|
||||
var bboxValues = _.map(bbox.split(','), function (v) { return +v; });
|
||||
var w = Math.abs(bboxValues[2] - bboxValues[0]);
|
||||
var h = Math.abs(bboxValues[3] - bboxValues[1]);
|
||||
var maxDim = Math.min(w, h);
|
||||
|
||||
// Find minimum suitable z
|
||||
// note that the QueryRewirter will use the minimum level overview
|
||||
// of level >= z if it exists, and otherwise the base table
|
||||
var z = Math.ceil(-Math.log(maxDim*pxPerTile/earthWidth/SETTINGS.zoomLevelFactor)/Math.log(2.0));
|
||||
return Math.max(z, 0);
|
||||
// Find minimum suitable z
|
||||
// note that the QueryRewirter will use the minimum level overview
|
||||
// of level >= z if it exists, and otherwise the base table
|
||||
var z = Math.ceil(-Math.log(maxDim * pxPerTile / earthWidth / SETTINGS.zoomLevelFactor) / Math.log(2.0));
|
||||
return Math.max(z, 0);
|
||||
}
|
||||
return 0;
|
||||
};
|
||||
|
||||
BaseOverviewsDataview.prototype.rewrittenQuery = function(query) {
|
||||
var zoom_level = this.zoomLevelForBbox(this.options.bbox);
|
||||
return this.queryRewriter.query(query, this.queryRewriteData, { zoom_level: zoom_level });
|
||||
BaseOverviewsDataview.prototype.rewrittenQuery = function (query) {
|
||||
var zoomLevel = this.zoomLevelForBbox(this.options.bbox);
|
||||
return this.queryRewriter.query(query, this.queryRewriteData, { zoom_level: zoomLevel });
|
||||
};
|
||||
|
||||
// Default behaviour
|
||||
BaseOverviewsDataview.prototype.defaultSql = function(psql, override, callback) {
|
||||
BaseOverviewsDataview.prototype.defaultSql = function (psql, override, callback) {
|
||||
var query = this.query;
|
||||
var dataview = this.baseDataview;
|
||||
if ( SETTINGS.defaultOverviews ) {
|
||||
if (SETTINGS.defaultOverviews) {
|
||||
query = this.rewrittenQuery(query);
|
||||
dataview = new this.BaseDataview(query, this.queryOptions);
|
||||
}
|
||||
@ -70,22 +70,22 @@ BaseOverviewsDataview.prototype.defaultSql = function(psql, override, callback)
|
||||
|
||||
// default implementation that can be override in derived classes:
|
||||
|
||||
BaseOverviewsDataview.prototype.sql = function(psql, override, callback) {
|
||||
BaseOverviewsDataview.prototype.sql = function (psql, override, callback) {
|
||||
return this.defaultSql(psql, override, callback);
|
||||
};
|
||||
|
||||
BaseOverviewsDataview.prototype.search = function(psql, userQuery, callback) {
|
||||
BaseOverviewsDataview.prototype.search = function (psql, userQuery, callback) {
|
||||
return this.baseDataview.search(psql, userQuery, callback);
|
||||
};
|
||||
|
||||
BaseOverviewsDataview.prototype.format = function(result) {
|
||||
BaseOverviewsDataview.prototype.format = function (result) {
|
||||
return this.baseDataview.format(result);
|
||||
};
|
||||
|
||||
BaseOverviewsDataview.prototype.getType = function() {
|
||||
BaseOverviewsDataview.prototype.getType = function () {
|
||||
return this.baseDataview.getType();
|
||||
};
|
||||
|
||||
BaseOverviewsDataview.prototype.toString = function() {
|
||||
BaseOverviewsDataview.prototype.toString = function () {
|
||||
return this.baseDataview.toString();
|
||||
};
|
||||
|
@ -3,16 +3,16 @@
|
||||
var parentFactory = require('../factory');
|
||||
var dataviews = require('.');
|
||||
|
||||
function OverviewsDataviewFactory(queryRewriter, queryRewriteData, options) {
|
||||
function OverviewsDataviewFactory (queryRewriter, queryRewriteData, options) {
|
||||
this.queryRewriter = queryRewriter;
|
||||
this.queryRewriteData = queryRewriteData;
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
OverviewsDataviewFactory.prototype.getDataview = function(query, dataviewDefinition) {
|
||||
OverviewsDataviewFactory.prototype.getDataview = function (query, dataviewDefinition) {
|
||||
var type = dataviewDefinition.type;
|
||||
var dataviews = OverviewsDataviewMetaFactory.dataviews;
|
||||
if ( !this.queryRewriter || !this.queryRewriteData || !dataviews[type] ) {
|
||||
if (!this.queryRewriter || !this.queryRewriteData || !dataviews[type]) {
|
||||
return parentFactory.getDataview(query, dataviewDefinition);
|
||||
}
|
||||
return new dataviews[type](
|
||||
@ -22,14 +22,14 @@ OverviewsDataviewFactory.prototype.getDataview = function(query, dataviewDefinit
|
||||
};
|
||||
|
||||
var OverviewsDataviewMetaFactory = {
|
||||
dataviews: Object.keys(dataviews).reduce(function(allDataviews, dataviewClassName) {
|
||||
dataviews: Object.keys(dataviews).reduce(function (allDataviews, dataviewClassName) {
|
||||
allDataviews[dataviewClassName.toLowerCase()] = dataviews[dataviewClassName];
|
||||
return allDataviews;
|
||||
}, {}),
|
||||
|
||||
getFactory: function(queryRewriter, queryRewriteData, options) {
|
||||
getFactory: function (queryRewriter, queryRewriteData, options) {
|
||||
return new OverviewsDataviewFactory(queryRewriter, queryRewriteData, options);
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = OverviewsDataviewMetaFactory;
|
||||
|
@ -14,28 +14,28 @@ const VALID_OPERATIONS = {
|
||||
avg: true
|
||||
};
|
||||
|
||||
/** Formulae to calculate the end result using _feature_count from the overview table*/
|
||||
function dataviewResult(ctx) {
|
||||
/** Formulae to calculate the end result using _feature_count from the overview table */
|
||||
function dataviewResult (ctx) {
|
||||
switch (ctx.operation) {
|
||||
case 'count':
|
||||
return `sum(_feature_count)`;
|
||||
case 'sum':
|
||||
return `sum(${utils.handleFloatColumn(ctx)}*_feature_count)`;
|
||||
case 'avg':
|
||||
return `sum(${utils.handleFloatColumn(ctx)}*_feature_count)/sum(_feature_count) `;
|
||||
case 'count':
|
||||
return 'sum(_feature_count)';
|
||||
case 'sum':
|
||||
return `sum(${utils.handleFloatColumn(ctx)}*_feature_count)`;
|
||||
case 'avg':
|
||||
return `sum(${utils.handleFloatColumn(ctx)}*_feature_count)/sum(_feature_count) `;
|
||||
}
|
||||
return `${ctx.operation}(${utils.handleFloatColumn(ctx)})`;
|
||||
}
|
||||
|
||||
const formulaQueryTpl = ctx =>
|
||||
`SELECT
|
||||
`SELECT
|
||||
${dataviewResult(ctx)} AS result,
|
||||
${utils.countNULLs(ctx)} AS nulls_count
|
||||
${ctx.isFloatColumn ? `,${utils.countInfinites(ctx)} AS infinities_count,` : ``}
|
||||
${ctx.isFloatColumn ? `${utils.countNaNs(ctx)} AS nans_count` : ``}
|
||||
${ctx.isFloatColumn ? `,${utils.countInfinites(ctx)} AS infinities_count,` : ''}
|
||||
${ctx.isFloatColumn ? `${utils.countNaNs(ctx)} AS nans_count` : ''}
|
||||
FROM (${ctx.query}) __cdb_formula`;
|
||||
|
||||
function Formula(query, options, queryRewriter, queryRewriteData, params, queries) {
|
||||
function Formula (query, options, queryRewriter, queryRewriteData, params, queries) {
|
||||
BaseOverviewsDataview.call(this, query, options, BaseDataview, queryRewriter, queryRewriteData, params, queries);
|
||||
this.column = options.column || '1';
|
||||
this.operation = options.operation;
|
||||
|
@ -133,7 +133,7 @@ var histogramQueryTpl = dot.template([
|
||||
'ORDER BY bin'
|
||||
].join('\n'));
|
||||
|
||||
function Histogram(query, options, queryRewriter, queryRewriteData, params, queries) {
|
||||
function Histogram (query, options, queryRewriter, queryRewriteData, params, queries) {
|
||||
BaseOverviewsDataview.call(this, query, options, BaseDataview, queryRewriter, queryRewriteData, params, queries);
|
||||
|
||||
this.query = query;
|
||||
@ -149,7 +149,7 @@ Histogram.prototype.constructor = Histogram;
|
||||
|
||||
module.exports = Histogram;
|
||||
|
||||
Histogram.prototype.sql = function(psql, override, callback) {
|
||||
Histogram.prototype.sql = function (psql, override, callback) {
|
||||
var self = this;
|
||||
|
||||
if (!callback) {
|
||||
@ -157,7 +157,6 @@ Histogram.prototype.sql = function(psql, override, callback) {
|
||||
override = {};
|
||||
}
|
||||
|
||||
|
||||
if (this._columnType === null) {
|
||||
this.getColumnType(psql, this.column, this.queries.no_filters, function (err, type) {
|
||||
// assume numeric, will fail later
|
||||
@ -259,7 +258,7 @@ Histogram.prototype._buildQuery = function (override) {
|
||||
}
|
||||
|
||||
var histogramSql = [
|
||||
"WITH",
|
||||
'WITH',
|
||||
cteSql.join(',\n'),
|
||||
histogramQueryTpl({
|
||||
_isFloatColumn: this._columnType === 'float',
|
||||
|
@ -1,7 +1,7 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = {
|
||||
Aggregation: require('./aggregation'),
|
||||
Formula: require('./formula'),
|
||||
Histogram: require('./histogram')
|
||||
Aggregation: require('./aggregation'),
|
||||
Formula: require('./formula'),
|
||||
Histogram: require('./histogram')
|
||||
};
|
||||
|
@ -5,33 +5,33 @@ var filters = {
|
||||
range: require('./analysis/range')
|
||||
};
|
||||
|
||||
function createFilter(filterDefinition) {
|
||||
function createFilter (filterDefinition) {
|
||||
var filterType = filterDefinition.type.toLowerCase();
|
||||
if (!filters.hasOwnProperty(filterType)) {
|
||||
if (!Object.prototype.hasOwnProperty.call(filters, filterType)) {
|
||||
throw new Error('Unknown filter type: ' + filterType);
|
||||
}
|
||||
return new filters[filterType](filterDefinition.column, filterDefinition.params);
|
||||
}
|
||||
|
||||
function AnalysisFilters(filters) {
|
||||
function AnalysisFilters (filters) {
|
||||
this.filters = filters;
|
||||
}
|
||||
|
||||
AnalysisFilters.prototype.sql = function(rawSql) {
|
||||
var filters = this.filters || {};
|
||||
var applyFilters = {};
|
||||
AnalysisFilters.prototype.sql = function (rawSql) {
|
||||
var filters = this.filters || {};
|
||||
var applyFilters = {};
|
||||
|
||||
return Object.keys(filters)
|
||||
.filter(function(filterName) {
|
||||
return applyFilters.hasOwnProperty(filterName) ? applyFilters[filterName] : true;
|
||||
})
|
||||
.map(function(filterName) {
|
||||
var filterDefinition = filters[filterName];
|
||||
return createFilter(filterDefinition);
|
||||
})
|
||||
.reduce(function(sql, filter) {
|
||||
return filter.sql(sql);
|
||||
}, rawSql);
|
||||
return Object.keys(filters)
|
||||
.filter(function (filterName) {
|
||||
return Object.prototype.hasOwnProperty.call(applyFilters, filterName) ? applyFilters[filterName] : true;
|
||||
})
|
||||
.map(function (filterName) {
|
||||
var filterDefinition = filters[filterName];
|
||||
return createFilter(filterDefinition);
|
||||
})
|
||||
.reduce(function (sql, filter) {
|
||||
return filter.sql(sql);
|
||||
}, rawSql);
|
||||
};
|
||||
|
||||
module.exports = AnalysisFilters;
|
||||
|
@ -9,11 +9,11 @@ var filterQueryTpl = dot.template([
|
||||
'FROM ({{=it._sql}}) _analysis_category_filter',
|
||||
'WHERE {{=it._filters}}'
|
||||
].join('\n'));
|
||||
var escapeStringTpl = dot.template('$escape_{{=it._i}}${{=it._value}}$escape_{{=it._i}}$');
|
||||
var escapeStringTpl = dot.template('$escape_{{=it._i}}${{=it._value}}$escape_{{=it._i}}$'); // eslint-disable-line no-template-curly-in-string
|
||||
var inConditionTpl = dot.template('{{=it._column}} IN ({{=it._values}})');
|
||||
var notInConditionTpl = dot.template('{{=it._column}} NOT IN ({{=it._values}})');
|
||||
|
||||
function Category(column, filterParams) {
|
||||
function Category (column, filterParams) {
|
||||
this.column = column;
|
||||
|
||||
if (!Array.isArray(filterParams.accept) && !Array.isArray(filterParams.reject)) {
|
||||
@ -38,15 +38,15 @@ module.exports = Category;
|
||||
- accept: [] => reject all
|
||||
- reject: [] => accept all
|
||||
*/
|
||||
Category.prototype.sql = function(rawSql) {
|
||||
Category.prototype.sql = function (rawSql) {
|
||||
var valueFilters = [];
|
||||
|
||||
if (Array.isArray(this.accept)) {
|
||||
if (this.accept.length > 0) {
|
||||
valueFilters.push(inConditionTpl({
|
||||
_column: this.column,
|
||||
_values: this.accept.map(function(value, i) {
|
||||
return Number.isFinite(value) ? value : escapeStringTpl({_i: i, _value: value});
|
||||
_values: this.accept.map(function (value, i) {
|
||||
return Number.isFinite(value) ? value : escapeStringTpl({ _i: i, _value: value });
|
||||
}).join(',')
|
||||
}));
|
||||
} else {
|
||||
@ -59,7 +59,7 @@ Category.prototype.sql = function(rawSql) {
|
||||
valueFilters.push(notInConditionTpl({
|
||||
_column: this.column,
|
||||
_values: this.reject.map(function (value, i) {
|
||||
return Number.isFinite(value) ? value : escapeStringTpl({_i: i, _value: value});
|
||||
return Number.isFinite(value) ? value : escapeStringTpl({ _i: i, _value: value });
|
||||
}).join(',')
|
||||
}));
|
||||
} else {
|
||||
|
@ -8,7 +8,7 @@ var minFilterTpl = dot.template('{{=it._column}} >= {{=it._min}}');
|
||||
var maxFilterTpl = dot.template('{{=it._column}} <= {{=it._max}}');
|
||||
var filterQueryTpl = dot.template('SELECT * FROM ({{=it._sql}}) _analysis_range_filter WHERE {{=it._filter}}');
|
||||
|
||||
function Range(column, filterParams) {
|
||||
function Range (column, filterParams) {
|
||||
this.column = column;
|
||||
|
||||
if (!Number.isFinite(filterParams.min) && !Number.isFinite(filterParams.max)) {
|
||||
@ -22,7 +22,7 @@ function Range(column, filterParams) {
|
||||
|
||||
module.exports = Range;
|
||||
|
||||
Range.prototype.sql = function(rawSql) {
|
||||
Range.prototype.sql = function (rawSql) {
|
||||
var minMaxFilter;
|
||||
if (Number.isFinite(this.min) && Number.isFinite(this.max)) {
|
||||
minMaxFilter = betweenFilterTpl({
|
||||
|
@ -33,14 +33,14 @@ var LONGITUDE_RANGE = LONGITUDE_UPPER_BOUND - LONGITUDE_LOWER_BOUND;
|
||||
“bbox”: "west,south,east,north"
|
||||
}
|
||||
*/
|
||||
function BBox(filterDefinition, filterParams) {
|
||||
function BBox (filterDefinition, filterParams) {
|
||||
var bbox = filterParams.bbox;
|
||||
|
||||
if (!bbox) {
|
||||
throw new Error('BBox filter expects to have a bbox param');
|
||||
}
|
||||
|
||||
var bboxElements = bbox.split(',').map(function(e) { return +e; });
|
||||
var bboxElements = bbox.split(',').map(function (e) { return +e; });
|
||||
|
||||
validateBboxElements(bboxElements);
|
||||
|
||||
@ -59,7 +59,7 @@ function BBox(filterDefinition, filterParams) {
|
||||
this.bboxes = getBoundingBoxes(west, south, east, north);
|
||||
}
|
||||
|
||||
function getBoundingBoxes(west, south, east, north) {
|
||||
function getBoundingBoxes (west, south, east, north) {
|
||||
var bboxes = [];
|
||||
|
||||
if (east - west >= 360) {
|
||||
@ -75,10 +75,10 @@ function getBoundingBoxes(west, south, east, north) {
|
||||
return bboxes;
|
||||
}
|
||||
|
||||
function validateBboxElements(bboxElements) {
|
||||
function validateBboxElements (bboxElements) {
|
||||
var isNumericBbox = bboxElements
|
||||
.map(function(n) { return Number.isFinite(n); })
|
||||
.reduce(function(allFinite, isFinite) {
|
||||
.map(function (n) { return Number.isFinite(n); })
|
||||
.reduce(function (allFinite, isFinite) {
|
||||
if (!allFinite) {
|
||||
return false;
|
||||
}
|
||||
@ -90,7 +90,7 @@ function validateBboxElements(bboxElements) {
|
||||
}
|
||||
}
|
||||
|
||||
function adjustLongitudeRange(we) {
|
||||
function adjustLongitudeRange (we) {
|
||||
var west = we[0];
|
||||
west -= LONGITUDE_LOWER_BOUND;
|
||||
west = west - (LONGITUDE_RANGE * Math.floor(west / LONGITUDE_RANGE)) + LONGITUDE_LOWER_BOUND;
|
||||
@ -106,11 +106,10 @@ module.exports.adjustLongitudeRange = adjustLongitudeRange;
|
||||
module.exports.LATITUDE_MAX_VALUE = LATITUDE_MAX_VALUE;
|
||||
module.exports.LONGITUDE_MAX_VALUE = LONGITUDE_UPPER_BOUND;
|
||||
|
||||
|
||||
BBox.prototype.sql = function(rawSql) {
|
||||
BBox.prototype.sql = function (rawSql) {
|
||||
var bboxSql = filterQueryTpl({
|
||||
_sql: rawSql,
|
||||
_filters: this.bboxes.map(function(bbox) {
|
||||
_filters: this.bboxes.map(function (bbox) {
|
||||
return bboxFilterTpl({
|
||||
_column: this.column,
|
||||
_bbox: bbox.join(','),
|
||||
|
@ -3,7 +3,7 @@
|
||||
/**
|
||||
* @param {String} token might match the following pattern: {user}@{tpl_id}@{token}:{cache_buster}
|
||||
*/
|
||||
function parse(token) {
|
||||
function parse (token) {
|
||||
var signer, cacheBuster;
|
||||
|
||||
var tokenSplit = token.split(':');
|
||||
@ -14,10 +14,10 @@ function parse(token) {
|
||||
}
|
||||
|
||||
tokenSplit = token.split('@');
|
||||
if ( tokenSplit.length > 1 ) {
|
||||
if (tokenSplit.length > 1) {
|
||||
signer = tokenSplit.shift();
|
||||
if ( tokenSplit.length > 1 ) {
|
||||
/*var template_hash = */tokenSplit.shift(); // unused
|
||||
if (tokenSplit.length > 1) {
|
||||
/* var template_hash = */tokenSplit.shift(); // unused
|
||||
}
|
||||
token = tokenSplit.shift();
|
||||
}
|
||||
|
@ -4,11 +4,11 @@ const AggregationMapConfig = require('../../aggregation/aggregation-mapconfig');
|
||||
const queryUtils = require('../../../utils/query-utils');
|
||||
|
||||
const unsupportedGeometryTypeErrorMessage = ctx =>
|
||||
`Unsupported geometry type: ${ctx.geometryType}. ` +
|
||||
`Unsupported geometry type: ${ctx.geometryType}. ` +
|
||||
`Aggregation is available only for geometry type: ${AggregationMapConfig.SUPPORTED_GEOMETRY_TYPES}`;
|
||||
|
||||
const invalidAggregationParamValueErrorMessage = ctx =>
|
||||
`Invalid value for 'aggregation' query param: ${ctx.value}. Valid ones are 'true' or 'false'`;
|
||||
`Invalid value for 'aggregation' query param: ${ctx.value}. Valid ones are 'true' or 'false'`;
|
||||
|
||||
module.exports = class AggregationMapConfigAdapter {
|
||||
constructor (pgConnection) {
|
||||
@ -27,7 +27,6 @@ module.exports = class AggregationMapConfigAdapter {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
|
||||
if (!this._shouldAdapt(mapConfig, params)) {
|
||||
return callback(null, requestMapConfig);
|
||||
}
|
||||
@ -101,8 +100,7 @@ module.exports = class AggregationMapConfigAdapter {
|
||||
|
||||
try {
|
||||
aggregationSql = mapConfig.getAggregatedQuery(index);
|
||||
}
|
||||
catch (error) {
|
||||
} catch (error) {
|
||||
return reject(error);
|
||||
}
|
||||
|
||||
|
@ -7,13 +7,13 @@ var camshaft = require('camshaft');
|
||||
var dot = require('dot');
|
||||
dot.templateSettings.strip = false;
|
||||
|
||||
function AnalysisMapConfigAdapter(analysisBackend) {
|
||||
function AnalysisMapConfigAdapter (analysisBackend) {
|
||||
this.analysisBackend = analysisBackend;
|
||||
}
|
||||
|
||||
module.exports = AnalysisMapConfigAdapter;
|
||||
|
||||
AnalysisMapConfigAdapter.prototype.getMapConfig = function(user, requestMapConfig, params, context, callback) {
|
||||
AnalysisMapConfigAdapter.prototype.getMapConfig = function (user, requestMapConfig, params, context, callback) {
|
||||
// jshint maxcomplexity:7
|
||||
var self = this;
|
||||
|
||||
@ -41,11 +41,11 @@ AnalysisMapConfigAdapter.prototype.getMapConfig = function(user, requestMapConfi
|
||||
return callback(errors);
|
||||
}
|
||||
|
||||
var dataviewsFiltersBySourceId = Object.keys(dataviewsFilters).reduce(function(bySourceId, dataviewName) {
|
||||
var dataviewsFiltersBySourceId = Object.keys(dataviewsFilters).reduce(function (bySourceId, dataviewName) {
|
||||
var dataview = dataviews[dataviewName];
|
||||
if (dataview) {
|
||||
var sourceId = dataview.source.id;
|
||||
if (!bySourceId.hasOwnProperty(sourceId)) {
|
||||
if (!Object.prototype.hasOwnProperty.call(bySourceId, sourceId)) {
|
||||
bySourceId[sourceId] = {};
|
||||
}
|
||||
|
||||
@ -67,7 +67,7 @@ AnalysisMapConfigAdapter.prototype.getMapConfig = function(user, requestMapConfi
|
||||
// }}
|
||||
requestMapConfig = appendFiltersToNodes(requestMapConfig, filters.analyses);
|
||||
|
||||
function createAnalysis(analysisDefinition, done) {
|
||||
function createAnalysis (analysisDefinition, done) {
|
||||
self.analysisBackend.create(analysisConfiguration, analysisDefinition, function (err, analysis) {
|
||||
if (err) {
|
||||
var error = new Error(err.message);
|
||||
@ -85,22 +85,22 @@ AnalysisMapConfigAdapter.prototype.getMapConfig = function(user, requestMapConfi
|
||||
}
|
||||
|
||||
var analysesQueue = queue(1);
|
||||
requestMapConfig.analyses.forEach(function(analysis) {
|
||||
requestMapConfig.analyses.forEach(function (analysis) {
|
||||
analysesQueue.defer(createAnalysis, analysis);
|
||||
});
|
||||
|
||||
analysesQueue.awaitAll(function(err, analysesResults) {
|
||||
analysesQueue.awaitAll(function (err, analysesResults) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
var sourceId2Node = analysesResults.reduce(function(sourceId2Query, analysis) {
|
||||
var sourceId2Node = analysesResults.reduce(function (sourceId2Query, analysis) {
|
||||
var rootNode = analysis.getRoot();
|
||||
if (rootNode.params && rootNode.params.id) {
|
||||
sourceId2Query[rootNode.params.id] = rootNode;
|
||||
}
|
||||
|
||||
analysis.getNodes().forEach(function(node) {
|
||||
analysis.getNodes().forEach(function (node) {
|
||||
if (node.params && node.params.id) {
|
||||
sourceId2Query[node.params.id] = node;
|
||||
}
|
||||
@ -111,7 +111,7 @@ AnalysisMapConfigAdapter.prototype.getMapConfig = function(user, requestMapConfi
|
||||
|
||||
var analysesErrors = [];
|
||||
|
||||
requestMapConfig.layers = requestMapConfig.layers.map(function(layer, layerIndex) {
|
||||
requestMapConfig.layers = requestMapConfig.layers.map(function (layer, layerIndex) {
|
||||
if (getLayerSourceId(layer)) {
|
||||
var layerSourceId = getLayerSourceId(layer);
|
||||
var layerNode = sourceId2Node[layerSourceId];
|
||||
@ -135,7 +135,7 @@ AnalysisMapConfigAdapter.prototype.getMapConfig = function(user, requestMapConfi
|
||||
}
|
||||
} else {
|
||||
analysesErrors.push(
|
||||
new Error('Missing analysis node.id="' + layerSourceId +'" for layer='+layerIndex)
|
||||
new Error('Missing analysis node.id="' + layerSourceId + '" for layer=' + layerIndex)
|
||||
);
|
||||
}
|
||||
}
|
||||
@ -148,7 +148,7 @@ AnalysisMapConfigAdapter.prototype.getMapConfig = function(user, requestMapConfi
|
||||
}
|
||||
|
||||
// Augment dataviews with sql from analyses
|
||||
Object.keys(dataviews).forEach(function(dataviewName) {
|
||||
Object.keys(dataviews).forEach(function (dataviewName) {
|
||||
var dataview = requestMapConfig.dataviews[dataviewName];
|
||||
var dataviewSourceId = dataview.source.id;
|
||||
var dataviewNode = sourceId2Node[dataviewSourceId];
|
||||
@ -160,7 +160,7 @@ AnalysisMapConfigAdapter.prototype.getMapConfig = function(user, requestMapConfi
|
||||
own_filter_on: dataviewQuery(dataviewNode, dataviewName, true),
|
||||
own_filter_off: dataviewQuery(dataviewNode, dataviewName, false),
|
||||
no_filters: dataviewNode.getQuery(Object.keys(dataviewNode.getFilters())
|
||||
.reduce(function(applyFilters, filterId) {
|
||||
.reduce(function (applyFilters, filterId) {
|
||||
applyFilters[filterId] = false;
|
||||
return applyFilters;
|
||||
}, {})
|
||||
@ -180,13 +180,13 @@ AnalysisMapConfigAdapter.prototype.getMapConfig = function(user, requestMapConfi
|
||||
};
|
||||
|
||||
var SKIP_COLUMNS = {
|
||||
'the_geom': true,
|
||||
'the_geom_webmercator': true
|
||||
the_geom: true,
|
||||
the_geom_webmercator: true
|
||||
};
|
||||
|
||||
function skipColumns(columnNames) {
|
||||
function skipColumns (columnNames) {
|
||||
return columnNames
|
||||
.filter(function(columnName) { return !SKIP_COLUMNS[columnName]; });
|
||||
.filter(function (columnName) { return !SKIP_COLUMNS[columnName]; });
|
||||
}
|
||||
|
||||
var wrappedQueryTpl = dot.template([
|
||||
@ -194,7 +194,7 @@ var wrappedQueryTpl = dot.template([
|
||||
'FROM ({{=it._query}}) _cdb_analysis_query'
|
||||
].join('\n'));
|
||||
|
||||
function layerQuery(node) {
|
||||
function layerQuery (node) {
|
||||
if (node.type === 'source') {
|
||||
return node.getQuery();
|
||||
}
|
||||
@ -202,7 +202,7 @@ function layerQuery(node) {
|
||||
return wrappedQueryTpl({ _query: node.getQuery(), _columns: _columns.join(', ') });
|
||||
}
|
||||
|
||||
function dataviewQuery(node, dataviewName, ownFilter) {
|
||||
function dataviewQuery (node, dataviewName, ownFilter) {
|
||||
var applyFilters = {};
|
||||
if (!ownFilter) {
|
||||
applyFilters[dataviewName] = false;
|
||||
@ -215,15 +215,15 @@ function dataviewQuery(node, dataviewName, ownFilter) {
|
||||
return wrappedQueryTpl({ _query: node.getQuery(applyFilters), _columns: _columns.join(', ') });
|
||||
}
|
||||
|
||||
function appendFiltersToNodes(requestMapConfig, dataviewsFiltersBySourceId) {
|
||||
function appendFiltersToNodes (requestMapConfig, dataviewsFiltersBySourceId) {
|
||||
var analyses = requestMapConfig.analyses || [];
|
||||
dataviewsFiltersBySourceId = dataviewsFiltersBySourceId || {};
|
||||
|
||||
requestMapConfig.analyses = analyses.map(function(analysisDefinition) {
|
||||
requestMapConfig.analyses = analyses.map(function (analysisDefinition) {
|
||||
var analysisGraph = new camshaft.reference.AnalysisGraph(analysisDefinition);
|
||||
var definition = analysisDefinition;
|
||||
Object.keys(dataviewsFiltersBySourceId).forEach(function(sourceId) {
|
||||
definition = analysisGraph.getDefinitionWith(sourceId, {filters: dataviewsFiltersBySourceId[sourceId] });
|
||||
Object.keys(dataviewsFiltersBySourceId).forEach(function (sourceId) {
|
||||
definition = analysisGraph.getDefinitionWith(sourceId, { filters: dataviewsFiltersBySourceId[sourceId] });
|
||||
});
|
||||
|
||||
return definition;
|
||||
@ -232,8 +232,8 @@ function appendFiltersToNodes(requestMapConfig, dataviewsFiltersBySourceId) {
|
||||
return requestMapConfig;
|
||||
}
|
||||
|
||||
function shouldAdaptLayers(requestMapConfig) {
|
||||
return Array.isArray(requestMapConfig.layers) && requestMapConfig.layers.some(getLayerSourceId) ||
|
||||
function shouldAdaptLayers (requestMapConfig) {
|
||||
return (Array.isArray(requestMapConfig.layers) && requestMapConfig.layers.some(getLayerSourceId)) ||
|
||||
(Array.isArray(requestMapConfig.analyses) && requestMapConfig.analyses.length > 0) ||
|
||||
requestMapConfig.dataviews;
|
||||
}
|
||||
@ -242,7 +242,7 @@ var DATAVIEW_TYPE_2_FILTER_TYPE = {
|
||||
aggregation: 'category',
|
||||
histogram: 'range'
|
||||
};
|
||||
function getFilter(dataview, params) {
|
||||
function getFilter (dataview, params) {
|
||||
var type = dataview.type;
|
||||
|
||||
return {
|
||||
@ -252,21 +252,21 @@ function getFilter(dataview, params) {
|
||||
};
|
||||
}
|
||||
|
||||
function getLayerSourceId(layer) {
|
||||
function getLayerSourceId (layer) {
|
||||
return layer.options.source && layer.options.source.id;
|
||||
}
|
||||
|
||||
function getDataviewSourceId(dataview) {
|
||||
function getDataviewSourceId (dataview) {
|
||||
return dataview.source && dataview.source.id;
|
||||
}
|
||||
|
||||
function getLayerDataviews(layer, dataviews) {
|
||||
function getLayerDataviews (layer, dataviews) {
|
||||
var layerDataviews = [];
|
||||
|
||||
var layerSourceId = getLayerSourceId(layer);
|
||||
if (layerSourceId) {
|
||||
var dataviewsList = getDataviewsList(dataviews);
|
||||
dataviewsList.forEach(function(dataview) {
|
||||
dataviewsList.forEach(function (dataview) {
|
||||
if (getDataviewSourceId(dataview) === layerSourceId) {
|
||||
layerDataviews.push(dataview);
|
||||
}
|
||||
@ -276,10 +276,10 @@ function getLayerDataviews(layer, dataviews) {
|
||||
return layerDataviews;
|
||||
}
|
||||
|
||||
function getDataviewsColumns(dataviews) {
|
||||
return Object.keys(dataviews.reduce(function(columnsDict, dataview) {
|
||||
getDataviewColumns(dataview).forEach(function(columnName) {
|
||||
if (!!columnName) {
|
||||
function getDataviewsColumns (dataviews) {
|
||||
return Object.keys(dataviews.reduce(function (columnsDict, dataview) {
|
||||
getDataviewColumns(dataview).forEach(function (columnName) {
|
||||
if (columnName) {
|
||||
columnsDict[columnName] = true;
|
||||
}
|
||||
});
|
||||
@ -287,22 +287,22 @@ function getDataviewsColumns(dataviews) {
|
||||
}, {}));
|
||||
}
|
||||
|
||||
function getDataviewColumns(dataview) {
|
||||
function getDataviewColumns (dataview) {
|
||||
var columns = [];
|
||||
var options = dataview.options;
|
||||
['column', 'aggregationColumn'].forEach(function(opt) {
|
||||
if (options.hasOwnProperty(opt) && !!options[opt]) {
|
||||
['column', 'aggregationColumn'].forEach(function (opt) {
|
||||
if (Object.prototype.hasOwnProperty.call(options, opt) && !!options[opt]) {
|
||||
columns.push(options[opt]);
|
||||
}
|
||||
});
|
||||
return columns;
|
||||
}
|
||||
|
||||
function getDataviewsList(dataviews) {
|
||||
return Object.keys(dataviews).map(function(dataviewKey) { return dataviews[dataviewKey]; });
|
||||
function getDataviewsList (dataviews) {
|
||||
return Object.keys(dataviews).map(function (dataviewKey) { return dataviews[dataviewKey]; });
|
||||
}
|
||||
|
||||
function getDataviewsErrors(dataviews) {
|
||||
function getDataviewsErrors (dataviews) {
|
||||
var dataviewType = typeof dataviews;
|
||||
if (dataviewType !== 'object') {
|
||||
return [new Error('"dataviews" must be a valid JSON object: "' + dataviewType + '" type found')];
|
||||
@ -314,9 +314,9 @@ function getDataviewsErrors(dataviews) {
|
||||
|
||||
var errors = [];
|
||||
|
||||
Object.keys(dataviews).forEach(function(dataviewName) {
|
||||
Object.keys(dataviews).forEach(function (dataviewName) {
|
||||
var dataview = dataviews[dataviewName];
|
||||
if (!dataview.hasOwnProperty('source') || !dataview.source.id) {
|
||||
if (!Object.prototype.hasOwnProperty.call(dataview, 'source') || !dataview.source.id) {
|
||||
errors.push(new Error('Dataview "' + dataviewName + '" is missing `source.id` attribute'));
|
||||
}
|
||||
|
||||
@ -328,13 +328,13 @@ function getDataviewsErrors(dataviews) {
|
||||
return errors;
|
||||
}
|
||||
|
||||
function getMissingDataviewsSourceIds(dataviews, sourceId2Node) {
|
||||
function getMissingDataviewsSourceIds (dataviews, sourceId2Node) {
|
||||
var missingDataviewsSourceIds = [];
|
||||
Object.keys(dataviews).forEach(function(dataviewName) {
|
||||
Object.keys(dataviews).forEach(function (dataviewName) {
|
||||
var dataview = dataviews[dataviewName];
|
||||
var dataviewSourceId = getDataviewSourceId(dataview);
|
||||
if (!sourceId2Node.hasOwnProperty(dataviewSourceId)) {
|
||||
missingDataviewsSourceIds.push(new AnalysisError('Node with `source.id="' + dataviewSourceId +'"`' +
|
||||
if (!Object.prototype.hasOwnProperty.call(sourceId2Node, dataviewSourceId)) {
|
||||
missingDataviewsSourceIds.push(new AnalysisError('Node with `source.id="' + dataviewSourceId + '"`' +
|
||||
' not found in analyses for dataview "' + dataviewName + '"'));
|
||||
}
|
||||
});
|
||||
@ -342,19 +342,19 @@ function getMissingDataviewsSourceIds(dataviews, sourceId2Node) {
|
||||
return missingDataviewsSourceIds;
|
||||
}
|
||||
|
||||
function AnalysisError(message) {
|
||||
function AnalysisError (message) {
|
||||
Error.captureStackTrace(this, this.constructor);
|
||||
this.name = this.constructor.name;
|
||||
this.type = 'analysis';
|
||||
this.message = message;
|
||||
}
|
||||
|
||||
function getAllAffectedTablesFromSourceNodes(node) {
|
||||
function getAllAffectedTablesFromSourceNodes (node) {
|
||||
var affectedTables = node.getAllInputNodes(function (node) {
|
||||
return node.getType() === 'source';
|
||||
}).reduce(function(list, node) {
|
||||
}).reduce(function (list, node) {
|
||||
return list.concat(node.getAffectedTables());
|
||||
},[]);
|
||||
}, []);
|
||||
return affectedTables;
|
||||
}
|
||||
|
||||
|
@ -1,12 +1,11 @@
|
||||
'use strict';
|
||||
|
||||
function DataviewsWidgetsMapConfigAdapter() {
|
||||
function DataviewsWidgetsMapConfigAdapter () {
|
||||
}
|
||||
|
||||
module.exports = DataviewsWidgetsMapConfigAdapter;
|
||||
|
||||
|
||||
DataviewsWidgetsMapConfigAdapter.prototype.getMapConfig = function(user, requestMapConfig, params, context, callback) {
|
||||
DataviewsWidgetsMapConfigAdapter.prototype.getMapConfig = function (user, requestMapConfig, params, context, callback) {
|
||||
if (!shouldAdapt(requestMapConfig)) {
|
||||
return callback(null, requestMapConfig);
|
||||
}
|
||||
@ -15,7 +14,7 @@ DataviewsWidgetsMapConfigAdapter.prototype.getMapConfig = function(user, request
|
||||
requestMapConfig.analyses = requestMapConfig.analyses || [];
|
||||
requestMapConfig.dataviews = requestMapConfig.dataviews || {};
|
||||
|
||||
requestMapConfig.layers.forEach(function(layer, index) {
|
||||
requestMapConfig.layers.forEach(function (layer, index) {
|
||||
var layerSourceId = getLayerSourceId(layer);
|
||||
|
||||
if (!layer.options.widgets) {
|
||||
@ -41,7 +40,7 @@ DataviewsWidgetsMapConfigAdapter.prototype.getMapConfig = function(user, request
|
||||
}
|
||||
var source = { id: dataviewSourceId };
|
||||
var layerWidgets = layer.options.widgets || {};
|
||||
Object.keys(layerWidgets).forEach(function(widgetId) {
|
||||
Object.keys(layerWidgets).forEach(function (widgetId) {
|
||||
var dataview = layerWidgets[widgetId];
|
||||
requestMapConfig.dataviews[widgetId] = {
|
||||
source: source,
|
||||
@ -54,7 +53,7 @@ DataviewsWidgetsMapConfigAdapter.prototype.getMapConfig = function(user, request
|
||||
|
||||
delete layer.options.sql;
|
||||
// don't delete widgets for now as it might be useful for old clients
|
||||
//delete layer.options.widgets;
|
||||
// delete layer.options.widgets;
|
||||
});
|
||||
|
||||
// filters have to be rewritten also
|
||||
@ -62,9 +61,9 @@ DataviewsWidgetsMapConfigAdapter.prototype.getMapConfig = function(user, request
|
||||
var layersFilters = filters.layers || [];
|
||||
filters.dataviews = filters.dataviews || {};
|
||||
|
||||
layersFilters.forEach(function(layerFilters) {
|
||||
Object.keys(layerFilters).forEach(function(filterName) {
|
||||
if (!filters.dataviews.hasOwnProperty(filterName)) {
|
||||
layersFilters.forEach(function (layerFilters) {
|
||||
Object.keys(layerFilters).forEach(function (filterName) {
|
||||
if (!Object.prototype.hasOwnProperty.call(filters.dataviews, filterName)) {
|
||||
filters.dataviews[filterName] = layerFilters[filterName];
|
||||
}
|
||||
});
|
||||
@ -77,17 +76,17 @@ DataviewsWidgetsMapConfigAdapter.prototype.getMapConfig = function(user, request
|
||||
return callback(null, requestMapConfig);
|
||||
};
|
||||
|
||||
function shouldAdapt(requestMapConfig) {
|
||||
return Array.isArray(requestMapConfig.layers) && requestMapConfig.layers.some(function hasWidgets(layer) {
|
||||
function shouldAdapt (requestMapConfig) {
|
||||
return Array.isArray(requestMapConfig.layers) && requestMapConfig.layers.some(function hasWidgets (layer) {
|
||||
return layer.options && layer.options.widgets && Object.keys(layer.options.widgets).length > 0;
|
||||
});
|
||||
}
|
||||
|
||||
function getLayerSourceId(layer) {
|
||||
function getLayerSourceId (layer) {
|
||||
return layer.options.source && layer.options.source.id;
|
||||
}
|
||||
|
||||
function getFilters(params) {
|
||||
function getFilters (params) {
|
||||
var filters = {};
|
||||
if (params.filters) {
|
||||
try {
|
||||
|
@ -1,19 +1,19 @@
|
||||
'use strict';
|
||||
|
||||
function MapConfigAdapter(adapters) {
|
||||
function MapConfigAdapter (adapters) {
|
||||
this.adapters = Array.isArray(adapters) ? adapters : Array.apply(null, arguments);
|
||||
}
|
||||
|
||||
module.exports = MapConfigAdapter;
|
||||
|
||||
MapConfigAdapter.prototype.getMapConfig = function(user, requestMapConfig, params, context, callback) {
|
||||
MapConfigAdapter.prototype.getMapConfig = function (user, requestMapConfig, params, context, callback) {
|
||||
var self = this;
|
||||
var i = 0;
|
||||
var tasksLeft = this.adapters.length;
|
||||
|
||||
let mapConfigStats = {};
|
||||
|
||||
function next(err, _requestMapConfig, adapterStats = {}) {
|
||||
function next (err, _requestMapConfig, adapterStats = {}) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
'use strict';
|
||||
|
||||
function MapConfigBufferSizeAdapter() {
|
||||
function MapConfigBufferSizeAdapter () {
|
||||
this.formats = ['png', 'png32', 'mvt', 'grid.json'];
|
||||
}
|
||||
|
||||
|
@ -4,7 +4,7 @@ var queue = require('queue-async');
|
||||
var _ = require('underscore');
|
||||
var Datasource = require('windshaft').model.Datasource;
|
||||
|
||||
function MapConfigNamedLayersAdapter(templateMaps, pgConnection) {
|
||||
function MapConfigNamedLayersAdapter (templateMaps, pgConnection) {
|
||||
this.templateMaps = templateMaps;
|
||||
this.pgConnection = pgConnection;
|
||||
}
|
||||
@ -22,9 +22,8 @@ MapConfigNamedLayersAdapter.prototype.getMapConfig = function (user, requestMapC
|
||||
|
||||
var adaptLayersQueue = queue(layers.length);
|
||||
|
||||
function adaptLayer(layer, done) {
|
||||
function adaptLayer (layer, done) {
|
||||
if (isNamedTypeLayer(layer)) {
|
||||
|
||||
if (!layer.options.name) {
|
||||
return done(new Error('Missing Named Map `name` in layer options'));
|
||||
}
|
||||
@ -33,13 +32,13 @@ MapConfigNamedLayersAdapter.prototype.getMapConfig = function (user, requestMapC
|
||||
var templateConfigParams = layer.options.config || {};
|
||||
var templateAuthTokens = layer.options.auth_tokens;
|
||||
|
||||
self.templateMaps.getTemplate(user, templateName, function(err, template) {
|
||||
self.templateMaps.getTemplate(user, templateName, function (err, template) {
|
||||
if (err || !template) {
|
||||
return done(new Error("Template '" + templateName + "' of user '" + user + "' not found"));
|
||||
}
|
||||
|
||||
if (self.templateMaps.isAuthorized(template, templateAuthTokens)) {
|
||||
var nestedNamedLayers = template.layergroup.layers.filter(function(layer) {
|
||||
var nestedNamedLayers = template.layergroup.layers.filter(function (layer) {
|
||||
return layer.type === 'named';
|
||||
});
|
||||
|
||||
@ -63,7 +62,6 @@ MapConfigNamedLayersAdapter.prototype.getMapConfig = function (user, requestMapC
|
||||
return done(unauthorizedError);
|
||||
}
|
||||
});
|
||||
|
||||
} else {
|
||||
return done(null, {
|
||||
datasource: false,
|
||||
@ -74,7 +72,7 @@ MapConfigNamedLayersAdapter.prototype.getMapConfig = function (user, requestMapC
|
||||
|
||||
var datasourceBuilder = new Datasource.Builder();
|
||||
|
||||
function layersAdaptQueueFinish(err, layersResults) {
|
||||
function layersAdaptQueueFinish (err, layersResults) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
@ -83,26 +81,24 @@ MapConfigNamedLayersAdapter.prototype.getMapConfig = function (user, requestMapC
|
||||
return callback(new Error('Missing layers array from layergroup config'));
|
||||
}
|
||||
|
||||
var layers = [],
|
||||
currentLayerIndex = 0;
|
||||
var layers = [];
|
||||
var currentLayerIndex = 0;
|
||||
|
||||
layersResults.forEach(function(layersResult) {
|
||||
|
||||
layersResult.layers.forEach(function(layer) {
|
||||
layersResults.forEach(function (layersResult) {
|
||||
layersResult.layers.forEach(function (layer) {
|
||||
layers.push(layer);
|
||||
if (layersResult.datasource) {
|
||||
datasourceBuilder.withLayerDatasource(currentLayerIndex, {
|
||||
user: dbAuth.dbuser,
|
||||
// Used internally (PSQL)
|
||||
// Used internally (PSQL)
|
||||
pass: dbAuth.dbpassword,
|
||||
dbpassword: dbAuth.dbpassword,
|
||||
// Used by Mapnik
|
||||
// Used by Mapnik
|
||||
password: dbAuth.dbpassword
|
||||
});
|
||||
}
|
||||
currentLayerIndex++;
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
requestMapConfig.layers = layers;
|
||||
@ -111,15 +107,14 @@ MapConfigNamedLayersAdapter.prototype.getMapConfig = function (user, requestMapC
|
||||
return callback(null, requestMapConfig);
|
||||
}
|
||||
|
||||
|
||||
var dbAuth = {};
|
||||
|
||||
if (_.some(layers, isNamedTypeLayer)) {
|
||||
this.pgConnection.setDBAuth(user, dbAuth, 'master', function(err) {
|
||||
this.pgConnection.setDBAuth(user, dbAuth, 'master', function (err) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
layers.forEach(function(layer) {
|
||||
layers.forEach(function (layer) {
|
||||
adaptLayersQueue.defer(adaptLayer, layer);
|
||||
});
|
||||
adaptLayersQueue.awaitAll(layersAdaptQueueFinish);
|
||||
@ -128,9 +123,8 @@ MapConfigNamedLayersAdapter.prototype.getMapConfig = function (user, requestMapC
|
||||
context.datasource = datasourceBuilder.build();
|
||||
return callback(null, requestMapConfig);
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
function isNamedTypeLayer(layer) {
|
||||
function isNamedTypeLayer (layer) {
|
||||
return layer.type === 'named';
|
||||
}
|
||||
|
@ -4,7 +4,7 @@ var queue = require('queue-async');
|
||||
var _ = require('underscore');
|
||||
const AggregationMapConfig = require('../../aggregation/aggregation-mapconfig');
|
||||
|
||||
function MapConfigOverviewsAdapter(overviewsMetadataBackend, filterStatsBackend) {
|
||||
function MapConfigOverviewsAdapter (overviewsMetadataBackend, filterStatsBackend) {
|
||||
this.overviewsMetadataBackend = overviewsMetadataBackend;
|
||||
this.filterStatsBackend = filterStatsBackend;
|
||||
}
|
||||
@ -115,12 +115,12 @@ function getUnfilteredQuery (analysesResults, layer) {
|
||||
|
||||
if (node) {
|
||||
var filters = node.getFilters();
|
||||
var filters_disabler = Object.keys(filters).reduce(function (disabler, filter_id) {
|
||||
disabler[filter_id] = false;
|
||||
var filtersDisabler = Object.keys(filters).reduce(function (disabler, filterId) {
|
||||
disabler[filterId] = false;
|
||||
return disabler;
|
||||
}, {});
|
||||
|
||||
return node.getQuery(filters_disabler);
|
||||
return node.getQuery(filtersDisabler);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,14 +1,13 @@
|
||||
'use strict';
|
||||
|
||||
function SqlWrapMapConfigAdapter() {
|
||||
function SqlWrapMapConfigAdapter () {
|
||||
}
|
||||
|
||||
module.exports = SqlWrapMapConfigAdapter;
|
||||
|
||||
|
||||
SqlWrapMapConfigAdapter.prototype.getMapConfig = function(user, requestMapConfig, params, context, callback) {
|
||||
SqlWrapMapConfigAdapter.prototype.getMapConfig = function (user, requestMapConfig, params, context, callback) {
|
||||
if (requestMapConfig && Array.isArray(requestMapConfig.layers)) {
|
||||
requestMapConfig.layers = requestMapConfig.layers.map(function(layer) {
|
||||
requestMapConfig.layers = requestMapConfig.layers.map(function (layer) {
|
||||
if (layer.options) {
|
||||
var sqlQueryWrap = layer.options.sql_wrap;
|
||||
if (sqlQueryWrap) {
|
||||
|
@ -13,7 +13,7 @@ var MapConfig = require('windshaft').model.MapConfig;
|
||||
|
||||
const dbParamsFromReqParams = require('../../../utils/database-params');
|
||||
|
||||
function TurboCartoAdapter() {
|
||||
function TurboCartoAdapter () {
|
||||
}
|
||||
|
||||
module.exports = TurboCartoAdapter;
|
||||
@ -29,7 +29,7 @@ TurboCartoAdapter.prototype.getMapConfig = function (user, requestMapConfig, par
|
||||
|
||||
var parseCartoQueue = queue(layers.length);
|
||||
|
||||
layers.forEach(function(layer, index) {
|
||||
layers.forEach(function (layer, index) {
|
||||
var layerId = MapConfig.getLayerId(requestMapConfig, index);
|
||||
parseCartoQueue.defer(self._parseCartoCss.bind(self), user, params, layer, index, layerId);
|
||||
});
|
||||
@ -39,7 +39,7 @@ TurboCartoAdapter.prototype.getMapConfig = function (user, requestMapConfig, par
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
var errors = results.reduce(function(errors, result) {
|
||||
var errors = results.reduce(function (errors, result) {
|
||||
if (result.error) {
|
||||
errors.push(result.error);
|
||||
}
|
||||
@ -49,9 +49,9 @@ TurboCartoAdapter.prototype.getMapConfig = function (user, requestMapConfig, par
|
||||
return callback(errors);
|
||||
}
|
||||
|
||||
requestMapConfig.layers = results.map(function(result) { return result.layer; });
|
||||
requestMapConfig.layers = results.map(function (result) { return result.layer; });
|
||||
context.turboCarto = {
|
||||
layers: results.map(function(result) {
|
||||
layers: results.map(function (result) {
|
||||
return result.meta;
|
||||
})
|
||||
};
|
||||
@ -91,7 +91,7 @@ TurboCartoAdapter.prototype._parseCartoCss = function (username, params, layer,
|
||||
}
|
||||
|
||||
var pg = new PSQL(dbParamsFromReqParams(params));
|
||||
function processCallback(err, cartocss, meta) {
|
||||
function processCallback (err, cartocss, meta) {
|
||||
// Only return turbo-carto errors
|
||||
if (err && err.name === 'TurboCartoError') {
|
||||
var error = new Error(err.message);
|
||||
@ -121,8 +121,8 @@ TurboCartoAdapter.prototype._parseCartoCss = function (username, params, layer,
|
||||
// For wrapped queries we'll derive the tokens from the data extent
|
||||
// instead of the whole Earth/root tile.
|
||||
var self = this;
|
||||
var tokensQuery = tokensQueryTpl({_sql: layerRawSql});
|
||||
return pg.query(tokensQuery, function(err, resultSet) {
|
||||
var tokensQuery = tokensQueryTpl({ _sql: layerRawSql });
|
||||
return pg.query(tokensQuery, function (err, resultSet) {
|
||||
if (err) {
|
||||
return processCallback(err);
|
||||
}
|
||||
@ -159,6 +159,6 @@ TurboCartoAdapter.prototype.process = function (psql, cartocss, sql, callback) {
|
||||
turboCarto(cartocss, datasource, callback);
|
||||
};
|
||||
|
||||
function shouldParseLayerCartocss(layer) {
|
||||
function shouldParseLayerCartocss (layer) {
|
||||
return layer && layer.options && layer.options.cartocss && layer.options.sql;
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user