Merge branch 'master' into response-time-limit
This commit is contained in:
commit
14f85abd39
30
NEWS.md
30
NEWS.md
@ -1,9 +1,37 @@
|
||||
# Changelog
|
||||
|
||||
## 3.9.9
|
||||
## 3.11.1
|
||||
Released 2017-mm-dd
|
||||
|
||||
|
||||
## 3.11.0
|
||||
Released 2017-08-08
|
||||
|
||||
Announcements:
|
||||
- Allow to override with any aggregation for histograms instantiated w/o aggregation.
|
||||
|
||||
Bug fixes:
|
||||
- Apply timezone after truncating the minimun date for each bin to calculate timestamps in time-series.
|
||||
- Support timestamp with timezones to calculate the number of bins in time-series.
|
||||
- Fixed issue related to name collision while building time-series query.
|
||||
|
||||
|
||||
## 3.10.1
|
||||
Released 2017-08-04
|
||||
|
||||
Bug fixes:
|
||||
- Exclude Infinities & NaNs from ramps #719.
|
||||
- Fixed issue in time-series when aggregation starts at 1970-01-01 (epoch) #720.
|
||||
|
||||
|
||||
## 3.10.0
|
||||
Released 2017-08-03
|
||||
|
||||
Announcements:
|
||||
- Improve time-series dataview, now supports date aggregations (e.g: daily, weekly, monthly, etc.) and timezones (UTC by default) #698.
|
||||
- Support special numeric values (±Infinity, NaN) for json responses #706
|
||||
|
||||
|
||||
## 3.9.8
|
||||
Released 2017-07-21
|
||||
|
||||
|
@ -94,7 +94,7 @@ function getQueryRewriteData(mapConfig, dataviewDefinition, params) {
|
||||
}
|
||||
|
||||
function getOverrideParams(params, ownFilter) {
|
||||
return _.reduce(_.pick(params, 'start', 'end', 'bins'),
|
||||
var overrideParams = _.reduce(_.pick(params, 'start', 'end', 'bins', 'offset'),
|
||||
function castNumbers(overrides, val, k) {
|
||||
if (!Number.isFinite(+val)) {
|
||||
throw new Error('Invalid number format for parameter \'' + k + '\'');
|
||||
@ -104,6 +104,13 @@ function getOverrideParams(params, ownFilter) {
|
||||
},
|
||||
{ownFilter: ownFilter}
|
||||
);
|
||||
|
||||
// validation will be delegated to the proper dataview
|
||||
if (params.aggregation !== undefined) {
|
||||
overrideParams.aggregation = params.aggregation;
|
||||
}
|
||||
|
||||
return overrideParams;
|
||||
}
|
||||
|
||||
DataviewBackend.prototype.search = function (mapConfigProvider, user, params, callback) {
|
||||
|
@ -10,7 +10,13 @@ function createTemplate(method) {
|
||||
'max({{=it._column}}) max_val,',
|
||||
'avg({{=it._column}}) avg_val,',
|
||||
method,
|
||||
'FROM ({{=it._sql}}) _table_sql WHERE {{=it._column}} IS NOT NULL'
|
||||
'FROM ({{=it._sql}}) _table_sql WHERE {{=it._column}} IS NOT NULL',
|
||||
'AND',
|
||||
' {{=it._column}} != \'infinity\'::float',
|
||||
'AND',
|
||||
' {{=it._column}} != \'-infinity\'::float',
|
||||
'AND',
|
||||
' {{=it._column}} != \'NaN\'::float'
|
||||
].join('\n'));
|
||||
}
|
||||
|
||||
|
@ -17,16 +17,8 @@ var REQUEST_QUERY_PARAMS_WHITELIST = [
|
||||
'zoom',
|
||||
'lon',
|
||||
'lat',
|
||||
// widgets & filters
|
||||
'filters', // json
|
||||
'own_filter', // 0, 1
|
||||
'bbox', // w,s,e,n
|
||||
'bins', // number
|
||||
'start', // number
|
||||
'end', // number
|
||||
'column_type', // string
|
||||
// widgets search
|
||||
'q'
|
||||
// analysis
|
||||
'filters' // json
|
||||
];
|
||||
|
||||
function BaseController(authApi, pgConnection) {
|
||||
|
@ -79,19 +79,51 @@ LayergroupController.prototype.register = function(app) {
|
||||
|
||||
// Undocumented/non-supported API endpoint methods.
|
||||
// Use at your own peril.
|
||||
app.get(app.base_url_mapconfig +
|
||||
'/:token/dataview/:dataviewName', cors(), userMiddleware,
|
||||
this.dataview.bind(this));
|
||||
app.get(app.base_url_mapconfig +
|
||||
'/:token/:layer/widget/:dataviewName', cors(), userMiddleware,
|
||||
this.dataview.bind(this));
|
||||
|
||||
app.get(app.base_url_mapconfig +
|
||||
'/:token/dataview/:dataviewName/search', cors(), userMiddleware,
|
||||
this.dataviewSearch.bind(this));
|
||||
app.get(app.base_url_mapconfig +
|
||||
'/:token/:layer/widget/:dataviewName/search', cors(), userMiddleware,
|
||||
this.dataviewSearch.bind(this));
|
||||
var allowedDataviewQueryParams = [
|
||||
'filters', // json
|
||||
'own_filter', // 0, 1
|
||||
'bbox', // w,s,e,n
|
||||
'start', // number
|
||||
'end', // number
|
||||
'column_type', // string
|
||||
'bins', // number
|
||||
'aggregation', //string
|
||||
'offset', // number
|
||||
'q' // widgets search
|
||||
];
|
||||
|
||||
app.get(
|
||||
app.base_url_mapconfig + '/:token/dataview/:dataviewName',
|
||||
cors(),
|
||||
userMiddleware,
|
||||
allowQueryParams(allowedDataviewQueryParams),
|
||||
this.dataview.bind(this)
|
||||
);
|
||||
|
||||
app.get(
|
||||
app.base_url_mapconfig + '/:token/:layer/widget/:dataviewName',
|
||||
cors(),
|
||||
userMiddleware,
|
||||
allowQueryParams(allowedDataviewQueryParams),
|
||||
this.dataview.bind(this)
|
||||
);
|
||||
|
||||
app.get(
|
||||
app.base_url_mapconfig + '/:token/dataview/:dataviewName/search',
|
||||
cors(),
|
||||
userMiddleware,
|
||||
allowQueryParams(allowedDataviewQueryParams),
|
||||
this.dataviewSearch.bind(this)
|
||||
);
|
||||
|
||||
app.get(
|
||||
app.base_url_mapconfig + '/:token/:layer/widget/:dataviewName/search',
|
||||
cors(),
|
||||
userMiddleware,
|
||||
allowQueryParams(allowedDataviewQueryParams),
|
||||
this.dataviewSearch.bind(this)
|
||||
);
|
||||
|
||||
app.get(app.base_url_mapconfig +
|
||||
'/:token/analysis/node/:nodeId', cors(), userMiddleware,
|
||||
|
@ -8,6 +8,10 @@ module.exports = BaseDataview;
|
||||
BaseDataview.prototype.getResult = function(psql, override, callback) {
|
||||
var self = this;
|
||||
this.sql(psql, override, function(err, query) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
psql.query(query, function(err, result) {
|
||||
if (err) {
|
||||
return callback(err, result);
|
||||
|
@ -7,13 +7,54 @@ dot.templateSettings.strip = false;
|
||||
|
||||
var columnCastTpl = dot.template("date_part('epoch', {{=it.column}})");
|
||||
|
||||
var dateIntervalQueryTpl = dot.template([
|
||||
'WITH',
|
||||
'__cdb_dates AS (',
|
||||
' SELECT',
|
||||
' MAX({{=it.column}}::timestamp) AS __cdb_end,',
|
||||
' MIN({{=it.column}}::timestamp) AS __cdb_start',
|
||||
' FROM ({{=it.query}}) __cdb_source',
|
||||
'),',
|
||||
'__cdb_interval_in_days AS (',
|
||||
' SELECT' ,
|
||||
' DATE_PART(\'day\', __cdb_end - __cdb_start) AS __cdb_days',
|
||||
' FROM __cdb_dates',
|
||||
'),',
|
||||
'__cdb_interval_in_hours AS (',
|
||||
' SELECT',
|
||||
' __cdb_days * 24 + DATE_PART(\'hour\', __cdb_end - __cdb_start) AS __cdb_hours',
|
||||
' FROM __cdb_interval_in_days, __cdb_dates',
|
||||
'),',
|
||||
'__cdb_interval_in_minutes AS (',
|
||||
' SELECT',
|
||||
' __cdb_hours * 60 + DATE_PART(\'minute\', __cdb_end - __cdb_start) AS __cdb_minutes',
|
||||
' FROM __cdb_interval_in_hours, __cdb_dates',
|
||||
'),',
|
||||
'__cdb_interval_in_seconds AS (',
|
||||
' SELECT',
|
||||
' __cdb_minutes * 60 + DATE_PART(\'second\', __cdb_end - __cdb_start) AS __cdb_seconds',
|
||||
' FROM __cdb_interval_in_minutes, __cdb_dates',
|
||||
')',
|
||||
'SELECT',
|
||||
' ROUND(__cdb_days / 365) AS year,',
|
||||
' ROUND(__cdb_days / 90) AS quarter,',
|
||||
' ROUND(__cdb_days / 30) AS month,',
|
||||
' ROUND(__cdb_days / 7) AS week,',
|
||||
' __cdb_days AS day,',
|
||||
' __cdb_hours AS hour,',
|
||||
' __cdb_minutes AS minute,',
|
||||
' __cdb_seconds AS second',
|
||||
'FROM __cdb_interval_in_days, __cdb_interval_in_hours, __cdb_interval_in_minutes, __cdb_interval_in_seconds'
|
||||
].join('\n'));
|
||||
|
||||
var MAX_INTERVAL_VALUE = 366;
|
||||
var BIN_MIN_NUMBER = 6;
|
||||
var BIN_MAX_NUMBER = 48;
|
||||
|
||||
var filteredQueryTpl = dot.template([
|
||||
'filtered_source AS (',
|
||||
'__cdb_filtered_source AS (',
|
||||
' SELECT *',
|
||||
' FROM ({{=it._query}}) _cdb_filtered_source',
|
||||
' FROM ({{=it._query}}) __cdb_filtered_source_query',
|
||||
' WHERE',
|
||||
' {{=it._column}} IS NOT NULL',
|
||||
' {{?it._isFloatColumn}}AND',
|
||||
@ -26,74 +67,74 @@ var filteredQueryTpl = dot.template([
|
||||
].join(' \n'));
|
||||
|
||||
var basicsQueryTpl = dot.template([
|
||||
'basics AS (',
|
||||
'__cdb_basics AS (',
|
||||
' SELECT',
|
||||
' max({{=it._column}}) AS max_val, min({{=it._column}}) AS min_val,',
|
||||
' avg({{=it._column}}) AS avg_val, count(1) AS total_rows',
|
||||
' FROM filtered_source',
|
||||
' max({{=it._column}}) AS __cdb_max_val, min({{=it._column}}) AS __cdb_min_val,',
|
||||
' avg({{=it._column}}) AS __cdb_avg_val, count(1) AS __cdb_total_rows',
|
||||
' FROM __cdb_filtered_source',
|
||||
')'
|
||||
].join(' \n'));
|
||||
|
||||
var overrideBasicsQueryTpl = dot.template([
|
||||
'basics AS (',
|
||||
'__cdb_basics AS (',
|
||||
' SELECT',
|
||||
' max({{=it._end}}) AS max_val, min({{=it._start}}) AS min_val,',
|
||||
' avg({{=it._column}}) AS avg_val, count(1) AS total_rows',
|
||||
' FROM filtered_source',
|
||||
' max({{=it._end}}) AS __cdb_max_val, min({{=it._start}}) AS __cdb_min_val,',
|
||||
' avg({{=it._column}}) AS __cdb_avg_val, count(1) AS __cdb_total_rows',
|
||||
' FROM __cdb_filtered_source',
|
||||
')'
|
||||
].join('\n'));
|
||||
|
||||
var iqrQueryTpl = dot.template([
|
||||
'iqrange AS (',
|
||||
' SELECT max(quartile_max) - min(quartile_max) AS iqr',
|
||||
'__cdb_iqrange AS (',
|
||||
' SELECT max(quartile_max) - min(quartile_max) AS __cdb_iqr',
|
||||
' FROM (',
|
||||
' SELECT quartile, max(_cdb_iqr_column) AS quartile_max from (',
|
||||
' SELECT {{=it._column}} AS _cdb_iqr_column, ntile(4) over (order by {{=it._column}}',
|
||||
' ) AS quartile',
|
||||
' FROM filtered_source) _cdb_quartiles',
|
||||
' FROM __cdb_filtered_source) _cdb_quartiles',
|
||||
' WHERE quartile = 1 or quartile = 3',
|
||||
' GROUP BY quartile',
|
||||
' ) _cdb_iqr',
|
||||
' ) __cdb_iqr',
|
||||
')'
|
||||
].join('\n'));
|
||||
|
||||
var binsQueryTpl = dot.template([
|
||||
'bins AS (',
|
||||
' SELECT CASE WHEN total_rows = 0 OR iqr = 0',
|
||||
'__cdb_bins AS (',
|
||||
' SELECT CASE WHEN __cdb_total_rows = 0 OR __cdb_iqr = 0',
|
||||
' THEN 1',
|
||||
' ELSE GREATEST(',
|
||||
' LEAST({{=it._minBins}}, CAST(total_rows AS INT)),',
|
||||
' LEAST({{=it._minBins}}, CAST(__cdb_total_rows AS INT)),',
|
||||
' LEAST(',
|
||||
' CAST(((max_val - min_val) / (2 * iqr * power(total_rows, 1/3))) AS INT),',
|
||||
' CAST(((__cdb_max_val - __cdb_min_val) / (2 * __cdb_iqr * power(__cdb_total_rows, 1/3))) AS INT),',
|
||||
' {{=it._maxBins}}',
|
||||
' )',
|
||||
' )',
|
||||
' END AS bins_number',
|
||||
' FROM basics, iqrange, filtered_source',
|
||||
' END AS __cdb_bins_number',
|
||||
' FROM __cdb_basics, __cdb_iqrange, __cdb_filtered_source',
|
||||
' LIMIT 1',
|
||||
')'
|
||||
].join('\n'));
|
||||
|
||||
var overrideBinsQueryTpl = dot.template([
|
||||
'bins AS (',
|
||||
' SELECT {{=it._bins}} AS bins_number',
|
||||
'__cdb_bins AS (',
|
||||
' SELECT {{=it._bins}} AS __cdb_bins_number',
|
||||
')'
|
||||
].join('\n'));
|
||||
|
||||
var nullsQueryTpl = dot.template([
|
||||
'nulls AS (',
|
||||
'__cdb_nulls AS (',
|
||||
' SELECT',
|
||||
' count(*) AS nulls_count',
|
||||
' FROM ({{=it._query}}) _cdb_histogram_nulls',
|
||||
' count(*) AS __cdb_nulls_count',
|
||||
' FROM ({{=it._query}}) __cdb_histogram_nulls',
|
||||
' WHERE {{=it._column}} IS NULL',
|
||||
')'
|
||||
].join('\n'));
|
||||
|
||||
var infinitiesQueryTpl = dot.template([
|
||||
'infinities AS (',
|
||||
'__cdb_infinities AS (',
|
||||
' SELECT',
|
||||
' count(*) AS infinities_count',
|
||||
' FROM ({{=it._query}}) _cdb_histogram_infinities',
|
||||
' count(*) AS __cdb_infinities_count',
|
||||
' FROM ({{=it._query}}) __cdb_infinities_query',
|
||||
' WHERE',
|
||||
' {{=it._column}} = \'infinity\'::float',
|
||||
' OR',
|
||||
@ -102,46 +143,151 @@ var infinitiesQueryTpl = dot.template([
|
||||
].join('\n'));
|
||||
|
||||
var nansQueryTpl = dot.template([
|
||||
'nans AS (',
|
||||
'__cdb_nans AS (',
|
||||
' SELECT',
|
||||
' count(*) AS nans_count',
|
||||
' FROM ({{=it._query}}) _cdb_histogram_infinities',
|
||||
' count(*) AS __cdb_nans_count',
|
||||
' FROM ({{=it._query}}) __cdb_nans_query',
|
||||
' WHERE {{=it._column}} = \'NaN\'::float',
|
||||
')'
|
||||
].join('\n'));
|
||||
|
||||
var histogramQueryTpl = dot.template([
|
||||
'SELECT',
|
||||
' (max_val - min_val) / cast(bins_number as float) AS bin_width,',
|
||||
' bins_number,',
|
||||
' nulls_count,',
|
||||
' {{?it._isFloatColumn}}infinities_count,',
|
||||
' nans_count,{{?}}',
|
||||
' avg_val,',
|
||||
' CASE WHEN min_val = max_val',
|
||||
' (__cdb_max_val - __cdb_min_val) / cast(__cdb_bins_number as float) AS bin_width,',
|
||||
' __cdb_bins_number AS bins_number,',
|
||||
' __cdb_nulls_count AS nulls_count,',
|
||||
' {{?it._isFloatColumn}}__cdb_infinities_count AS infinities_count,',
|
||||
' __cdb_nans_count AS nans_count,{{?}}',
|
||||
' __cdb_avg_val AS avg_val,',
|
||||
' CASE WHEN __cdb_min_val = __cdb_max_val',
|
||||
' THEN 0',
|
||||
' ELSE GREATEST(1, LEAST(WIDTH_BUCKET({{=it._column}}, min_val, max_val, bins_number), bins_number)) - 1',
|
||||
' ELSE GREATEST(',
|
||||
' 1,',
|
||||
' LEAST(',
|
||||
' WIDTH_BUCKET({{=it._column}}, __cdb_min_val, __cdb_max_val, __cdb_bins_number),',
|
||||
' __cdb_bins_number',
|
||||
' )',
|
||||
' ) - 1',
|
||||
' END AS bin,',
|
||||
' min({{=it._column}})::numeric AS min,',
|
||||
' max({{=it._column}})::numeric AS max,',
|
||||
' avg({{=it._column}})::numeric AS avg,',
|
||||
' count(*) AS freq',
|
||||
'FROM filtered_source, basics, nulls, bins{{?it._isFloatColumn}}, infinities, nans{{?}}',
|
||||
'FROM __cdb_filtered_source, __cdb_basics, __cdb_nulls,',
|
||||
' __cdb_bins{{?it._isFloatColumn}}, __cdb_infinities, __cdb_nans{{?}}',
|
||||
'GROUP BY bin, bins_number, bin_width, nulls_count,',
|
||||
' avg_val{{?it._isFloatColumn}}, infinities_count, nans_count{{?}}',
|
||||
'ORDER BY bin'
|
||||
].join('\n'));
|
||||
|
||||
var dateBasicsQueryTpl = dot.template([
|
||||
'__cdb_basics AS (',
|
||||
' SELECT',
|
||||
' max(date_part(\'epoch\', {{=it._column}})) AS __cdb_max_val,',
|
||||
' min(date_part(\'epoch\', {{=it._column}})) AS __cdb_min_val,',
|
||||
' avg(date_part(\'epoch\', {{=it._column}})) AS __cdb_avg_val,',
|
||||
' min(date_trunc(',
|
||||
' \'{{=it._aggregation}}\', {{=it._column}}::timestamp AT TIME ZONE \'{{=it._offset}}\'',
|
||||
' )) AS __cdb_start_date,',
|
||||
' max({{=it._column}}::timestamp AT TIME ZONE \'{{=it._offset}}\') AS __cdb_end_date,',
|
||||
' count(1) AS __cdb_total_rows',
|
||||
' FROM ({{=it._query}}) __cdb_basics_query',
|
||||
')'
|
||||
].join(' \n'));
|
||||
|
||||
var dateOverrideBasicsQueryTpl = dot.template([
|
||||
'__cdb_basics AS (',
|
||||
' SELECT',
|
||||
' max({{=it._end}}) AS __cdb_max_val,',
|
||||
' min({{=it._start}}) AS __cdb_min_val,',
|
||||
' avg(date_part(\'epoch\', {{=it._column}})) AS __cdb_avg_val,',
|
||||
' min(',
|
||||
' date_trunc(',
|
||||
' \'{{=it._aggregation}}\',',
|
||||
' TO_TIMESTAMP({{=it._start}})::timestamp AT TIME ZONE \'{{=it._offset}}\'',
|
||||
' )',
|
||||
' ) AS __cdb_start_date,',
|
||||
' max(',
|
||||
' TO_TIMESTAMP({{=it._end}})::timestamp AT TIME ZONE \'{{=it._offset}}\'',
|
||||
' ) AS __cdb_end_date,',
|
||||
' count(1) AS __cdb_total_rows',
|
||||
' FROM ({{=it._query}}) __cdb_basics_query',
|
||||
')'
|
||||
].join(' \n'));
|
||||
|
||||
var dateBinsQueryTpl = dot.template([
|
||||
'__cdb_bins AS (',
|
||||
' SELECT',
|
||||
' __cdb_bins_array,',
|
||||
' ARRAY_LENGTH(__cdb_bins_array, 1) AS __cdb_bins_number',
|
||||
' FROM (',
|
||||
' SELECT',
|
||||
' ARRAY(',
|
||||
' SELECT GENERATE_SERIES(',
|
||||
' __cdb_start_date::timestamptz,',
|
||||
' __cdb_end_date::timestamptz,',
|
||||
' {{?it._aggregation==="quarter"}}\'3 month\'{{??}}\'1 {{=it._aggregation}}\'{{?}}::interval',
|
||||
' )',
|
||||
' ) AS __cdb_bins_array',
|
||||
' FROM __cdb_basics',
|
||||
' ) __cdb_bins_array_query',
|
||||
')'
|
||||
].join('\n'));
|
||||
|
||||
var dateHistogramQueryTpl = dot.template([
|
||||
'SELECT',
|
||||
' (__cdb_max_val - __cdb_min_val) / cast(__cdb_bins_number as float) AS bin_width,',
|
||||
' __cdb_bins_number AS bins_number,',
|
||||
' __cdb_nulls_count AS nulls_count,',
|
||||
' CASE WHEN __cdb_min_val = __cdb_max_val',
|
||||
' THEN 0',
|
||||
' ELSE GREATEST(1, LEAST(',
|
||||
' WIDTH_BUCKET(',
|
||||
' {{=it._column}}::timestamp AT TIME ZONE \'{{=it._offset}}\',',
|
||||
' __cdb_bins_array',
|
||||
' ),',
|
||||
' __cdb_bins_number',
|
||||
' )) - 1',
|
||||
' END AS bin,',
|
||||
' min(',
|
||||
' date_part(',
|
||||
' \'epoch\', ',
|
||||
' date_trunc(',
|
||||
' \'{{=it._aggregation}}\', {{=it._column}}::timestamp AT TIME ZONE \'{{=it._offset}}\'',
|
||||
' ) AT TIME ZONE \'{{=it._offset}}\'',
|
||||
' )',
|
||||
' )::numeric AS timestamp,',
|
||||
' date_part(\'epoch\', __cdb_start_date)::numeric AS timestamp_start,',
|
||||
' min(date_part(\'epoch\', {{=it._column}}))::numeric AS min,',
|
||||
' max(date_part(\'epoch\', {{=it._column}}))::numeric AS max,',
|
||||
' avg(date_part(\'epoch\', {{=it._column}}))::numeric AS avg,',
|
||||
' count(*) AS freq',
|
||||
'FROM ({{=it._query}}) __cdb_histogram, __cdb_basics, __cdb_bins, __cdb_nulls',
|
||||
'WHERE date_part(\'epoch\', {{=it._column}}) IS NOT NULL',
|
||||
'GROUP BY bin, bins_number, bin_width, nulls_count, timestamp_start',
|
||||
'ORDER BY bin'
|
||||
].join('\n'));
|
||||
|
||||
var TYPE = 'histogram';
|
||||
|
||||
/**
|
||||
{
|
||||
type: 'histogram',
|
||||
options: {
|
||||
column: 'name',
|
||||
bins: 10 // OPTIONAL
|
||||
}
|
||||
Numeric histogram:
|
||||
{
|
||||
type: 'histogram',
|
||||
options: {
|
||||
column: 'name', // column data type: numeric
|
||||
bins: 10 // OPTIONAL
|
||||
}
|
||||
}
|
||||
|
||||
Time series:
|
||||
{
|
||||
type: 'histogram',
|
||||
options: {
|
||||
column: 'date', // column data type: date
|
||||
aggregation: 'day' // OPTIONAL (if undefined then it'll be built as numeric)
|
||||
offset: -7200 // OPTIONAL (UTC offset in seconds)
|
||||
}
|
||||
}
|
||||
*/
|
||||
function Histogram(query, options, queries) {
|
||||
@ -153,6 +299,8 @@ function Histogram(query, options, queries) {
|
||||
this.queries = queries;
|
||||
this.column = options.column;
|
||||
this.bins = options.bins;
|
||||
this.aggregation = options.aggregation;
|
||||
this.offset = options.offset;
|
||||
|
||||
this._columnType = null;
|
||||
}
|
||||
@ -184,16 +332,22 @@ Histogram.prototype.sql = function(psql, override, callback) {
|
||||
return null;
|
||||
}
|
||||
|
||||
var histogramSql = this._buildQuery(override);
|
||||
|
||||
return callback(null, histogramSql);
|
||||
this._buildQuery(psql, override, callback);
|
||||
};
|
||||
|
||||
Histogram.prototype._buildQuery = function (override) {
|
||||
Histogram.prototype.isDateHistogram = function (override) {
|
||||
return this._columnType === 'date' && (this.aggregation !== undefined || override.aggregation !== undefined);
|
||||
};
|
||||
|
||||
Histogram.prototype._buildQuery = function (psql, override, callback) {
|
||||
var filteredQuery, basicsQuery, binsQuery;
|
||||
var _column = this.column;
|
||||
var _query = this.query;
|
||||
|
||||
if (this.isDateHistogram(override)) {
|
||||
return this._buildDateHistogramQuery(psql, override, callback);
|
||||
}
|
||||
|
||||
if (this._columnType === 'date') {
|
||||
_column = columnCastTpl({column: _column});
|
||||
}
|
||||
@ -280,7 +434,7 @@ Histogram.prototype._buildQuery = function (override) {
|
||||
|
||||
debug(histogramSql);
|
||||
|
||||
return histogramSql;
|
||||
return callback(null, histogramSql);
|
||||
};
|
||||
|
||||
Histogram.prototype._shouldOverride = function (override) {
|
||||
@ -291,6 +445,135 @@ Histogram.prototype._shouldOverrideBins = function (override) {
|
||||
return override && _.has(override, 'bins');
|
||||
};
|
||||
|
||||
var DATE_AGGREGATIONS = {
|
||||
'auto': true,
|
||||
'minute': true,
|
||||
'hour': true,
|
||||
'day': true,
|
||||
'week': true,
|
||||
'month': true,
|
||||
'quarter': true,
|
||||
'year': true
|
||||
};
|
||||
|
||||
Histogram.prototype._buildDateHistogramQuery = function (psql, override, callback) {
|
||||
var _column = this.column;
|
||||
var _query = this.query;
|
||||
var _aggregation = override && override.aggregation ? override.aggregation : this.aggregation;
|
||||
var _offset = override && Number.isFinite(override.offset) ? override.offset : this.offset;
|
||||
|
||||
if (!DATE_AGGREGATIONS.hasOwnProperty(_aggregation)) {
|
||||
return callback(new Error('Invalid aggregation value. Valid ones: ' +
|
||||
Object.keys(DATE_AGGREGATIONS).join(', ')
|
||||
));
|
||||
}
|
||||
|
||||
if (_aggregation === 'auto') {
|
||||
this.getAutomaticAggregation(psql, function (err, aggregation) {
|
||||
if (err || aggregation === 'none') {
|
||||
this.aggregation = 'day';
|
||||
} else {
|
||||
this.aggregation = aggregation;
|
||||
}
|
||||
override.aggregation = this.aggregation;
|
||||
this._buildDateHistogramQuery(psql, override, callback);
|
||||
}.bind(this));
|
||||
return null;
|
||||
}
|
||||
|
||||
var dateBasicsQuery;
|
||||
|
||||
if (override && _.has(override, 'start') && _.has(override, 'end')) {
|
||||
dateBasicsQuery = dateOverrideBasicsQueryTpl({
|
||||
_query: _query,
|
||||
_column: _column,
|
||||
_aggregation: _aggregation,
|
||||
_start: getBinStart(override),
|
||||
_end: getBinEnd(override),
|
||||
_offset: parseOffset(_offset, _aggregation)
|
||||
});
|
||||
} else {
|
||||
dateBasicsQuery = dateBasicsQueryTpl({
|
||||
_query: _query,
|
||||
_column: _column,
|
||||
_aggregation: _aggregation,
|
||||
_offset: parseOffset(_offset, _aggregation)
|
||||
});
|
||||
}
|
||||
|
||||
var dateBinsQuery = [
|
||||
dateBinsQueryTpl({
|
||||
_aggregation: _aggregation
|
||||
})
|
||||
].join(',\n');
|
||||
|
||||
var nullsQuery = nullsQueryTpl({
|
||||
_query: _query,
|
||||
_column: _column
|
||||
});
|
||||
|
||||
var dateHistogramQuery = dateHistogramQueryTpl({
|
||||
_query: _query,
|
||||
_column: _column,
|
||||
_aggregation: _aggregation,
|
||||
_offset: parseOffset(_offset, _aggregation)
|
||||
});
|
||||
|
||||
var histogramSql = [
|
||||
"WITH",
|
||||
[
|
||||
dateBasicsQuery,
|
||||
dateBinsQuery,
|
||||
nullsQuery
|
||||
].join(',\n'),
|
||||
dateHistogramQuery
|
||||
].join('\n');
|
||||
|
||||
debug(histogramSql);
|
||||
|
||||
return callback(null, histogramSql);
|
||||
};
|
||||
|
||||
Histogram.prototype.getAutomaticAggregation = function (psql, callback) {
|
||||
var dateIntervalQuery = dateIntervalQueryTpl({
|
||||
query: this.query,
|
||||
column: this.column
|
||||
});
|
||||
|
||||
debug(dateIntervalQuery);
|
||||
|
||||
psql.query(dateIntervalQuery, function (err, result) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
var aggegations = result.rows[0];
|
||||
var aggregation = Object.keys(aggegations)
|
||||
.map(function (key) {
|
||||
return {
|
||||
name: key,
|
||||
value: aggegations[key]
|
||||
};
|
||||
})
|
||||
.reduce(function (closer, current) {
|
||||
if (current.value > MAX_INTERVAL_VALUE) {
|
||||
return closer;
|
||||
}
|
||||
|
||||
var closerDiff = MAX_INTERVAL_VALUE - closer.value;
|
||||
var currentDiff = MAX_INTERVAL_VALUE - current.value;
|
||||
|
||||
if (Number.isFinite(current.value) && closerDiff > currentDiff) {
|
||||
return current;
|
||||
}
|
||||
|
||||
return closer;
|
||||
}, { name: 'none', value: -1 });
|
||||
|
||||
callback(null, aggregation.name);
|
||||
});
|
||||
};
|
||||
|
||||
Histogram.prototype.format = function(result, override) {
|
||||
override = override || {};
|
||||
var buckets = [];
|
||||
@ -302,6 +585,9 @@ Histogram.prototype.format = function(result, override) {
|
||||
var infinities = 0;
|
||||
var nans = 0;
|
||||
var avg;
|
||||
var timestampStart;
|
||||
var aggregation;
|
||||
var offset;
|
||||
|
||||
if (result.rows.length) {
|
||||
var firstRow = result.rows[0];
|
||||
@ -309,16 +595,34 @@ Histogram.prototype.format = function(result, override) {
|
||||
width = firstRow.bin_width || width;
|
||||
avg = firstRow.avg_val;
|
||||
nulls = firstRow.nulls_count;
|
||||
timestampStart = firstRow.timestamp_start;
|
||||
infinities = firstRow.infinities_count;
|
||||
nans = firstRow.nans_count;
|
||||
binsStart = override.hasOwnProperty('start') ? getBinStart(override) : firstRow.min;
|
||||
binsStart = populateBinStart(override, firstRow);
|
||||
|
||||
if (Number.isFinite(timestampStart)) {
|
||||
aggregation = getAggregation(override, this.aggregation);
|
||||
offset = getOffset(override, this.offset);
|
||||
}
|
||||
|
||||
buckets = result.rows.map(function(row) {
|
||||
return _.omit(row, 'bins_number', 'bin_width', 'nulls_count', 'infinities_count', 'nans_count', 'avg_val');
|
||||
return _.omit(
|
||||
row,
|
||||
'bins_number',
|
||||
'bin_width',
|
||||
'nulls_count',
|
||||
'infinities_count',
|
||||
'nans_count',
|
||||
'avg_val',
|
||||
'timestamp_start'
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
aggregation: aggregation,
|
||||
offset: offset,
|
||||
timestamp_start: timestampStart,
|
||||
bin_width: width,
|
||||
bins_count: binsCount,
|
||||
bins_start: binsStart,
|
||||
@ -330,6 +634,21 @@ Histogram.prototype.format = function(result, override) {
|
||||
};
|
||||
};
|
||||
|
||||
function getAggregation(override, aggregation) {
|
||||
return override && override.aggregation ? override.aggregation : aggregation;
|
||||
}
|
||||
|
||||
function getOffset(override, offset) {
|
||||
if (override && override.offset) {
|
||||
return override.offset;
|
||||
}
|
||||
if (offset) {
|
||||
return offset;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
function getBinStart(override) {
|
||||
if (override.hasOwnProperty('start') && override.hasOwnProperty('end')) {
|
||||
return Math.min(override.start, override.end);
|
||||
@ -359,6 +678,32 @@ function getWidth(override) {
|
||||
return width;
|
||||
}
|
||||
|
||||
function parseOffset(offset, aggregation) {
|
||||
if (!offset) {
|
||||
return '0';
|
||||
}
|
||||
if (aggregation === 'hour' || aggregation === 'minute') {
|
||||
return '0';
|
||||
}
|
||||
|
||||
var offsetInHours = Math.ceil(offset / 3600);
|
||||
return '' + offsetInHours;
|
||||
}
|
||||
|
||||
function populateBinStart(override, firstRow) {
|
||||
var binStart;
|
||||
|
||||
if (firstRow.hasOwnProperty('timestamp')) {
|
||||
binStart = firstRow.timestamp;
|
||||
} else if (override.hasOwnProperty('start')) {
|
||||
binStart = getBinStart(override);
|
||||
} else {
|
||||
binStart = firstRow.min;
|
||||
}
|
||||
|
||||
return binStart;
|
||||
}
|
||||
|
||||
Histogram.prototype.getType = function() {
|
||||
return TYPE;
|
||||
};
|
||||
|
@ -329,6 +329,25 @@ function bootstrap(opts) {
|
||||
app.enable('jsonp callback');
|
||||
app.disable('x-powered-by');
|
||||
app.disable('etag');
|
||||
|
||||
// Fix: https://github.com/CartoDB/Windshaft-cartodb/issues/705
|
||||
// See: http://expressjs.com/en/4x/api.html#app.set
|
||||
app.set('json replacer', function (key, value) {
|
||||
if (value !== value) {
|
||||
return 'NaN';
|
||||
}
|
||||
|
||||
if (value === Infinity) {
|
||||
return 'Infinity';
|
||||
}
|
||||
|
||||
if (value === -Infinity) {
|
||||
return '-Infinity';
|
||||
}
|
||||
|
||||
return value;
|
||||
});
|
||||
|
||||
app.use(bodyParser.json());
|
||||
|
||||
app.use(function bootstrap$prepareRequestResponse(req, res, next) {
|
||||
|
@ -1,7 +1,7 @@
|
||||
{
|
||||
"private": true,
|
||||
"name": "windshaft-cartodb",
|
||||
"version": "3.9.9",
|
||||
"version": "3.11.1",
|
||||
"description": "A map tile server for CartoDB",
|
||||
"keywords": [
|
||||
"cartodb"
|
||||
@ -47,6 +47,7 @@
|
||||
"istanbul": "~0.4.3",
|
||||
"jshint": "~2.9.4",
|
||||
"mocha": "~3.4.1",
|
||||
"moment": "~2.18.1",
|
||||
"nock": "~2.11.0",
|
||||
"redis": "~0.12.1",
|
||||
"semver": "~1.1.4",
|
||||
|
File diff suppressed because it is too large
Load Diff
71
test/acceptance/special-numeric-values.js
Normal file
71
test/acceptance/special-numeric-values.js
Normal file
@ -0,0 +1,71 @@
|
||||
require('../support/test_helper');
|
||||
|
||||
var assert = require('../support/assert');
|
||||
var TestClient = require('../support/test-client');
|
||||
|
||||
describe('special numeric values', function() {
|
||||
|
||||
afterEach(function(done) {
|
||||
if (this.testClient) {
|
||||
this.testClient.drain(done);
|
||||
} else {
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
var ATTRIBUTES_LAYER = 1;
|
||||
|
||||
function createMapConfig(sql, id, columns) {
|
||||
return {
|
||||
version: '1.6.0',
|
||||
layers: [
|
||||
{
|
||||
type: 'mapnik',
|
||||
options: {
|
||||
sql: "select 1 as id, 'SRID=4326;POINT(0 0)'::geometry as the_geom",
|
||||
cartocss: '#style { }',
|
||||
cartocss_version: '2.0.1'
|
||||
}
|
||||
},
|
||||
{
|
||||
type: 'mapnik',
|
||||
options: {
|
||||
sql: sql || "select 1 as i, 6 as n, 'SRID=4326;POINT(0 0)'::geometry as the_geom",
|
||||
attributes: {
|
||||
id: id || 'i',
|
||||
columns: columns || ['n']
|
||||
},
|
||||
cartocss: '#style { }',
|
||||
cartocss_version: '2.0.1'
|
||||
}
|
||||
}
|
||||
]
|
||||
};
|
||||
}
|
||||
|
||||
it('should retrieve special numeric values', function (done) {
|
||||
var featureId = 1;
|
||||
var sql = [
|
||||
'SELECT',
|
||||
' 1 as cartodb_id,',
|
||||
' null::geometry the_geom_webmercator,',
|
||||
' \'infinity\'::float as infinity,',
|
||||
' \'-infinity\'::float as _infinity,',
|
||||
' \'NaN\'::float as nan'
|
||||
].join('\n');
|
||||
var id = 'cartodb_id';
|
||||
var columns = ['infinity', '_infinity', 'nan'];
|
||||
|
||||
var mapConfig = createMapConfig(sql, id, columns);
|
||||
|
||||
this.testClient = new TestClient(mapConfig, 1234);
|
||||
this.testClient.getFeatureAttributes(featureId, ATTRIBUTES_LAYER, {}, function (err, attributes) {
|
||||
assert.ifError(err);
|
||||
assert.equal(attributes.infinity, 'Infinity');
|
||||
assert.equal(attributes._infinity, '-Infinity');
|
||||
assert.equal(attributes.nan, 'NaN');
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -411,7 +411,7 @@ TestClient.prototype.getDataview = function(dataviewName, params, callback) {
|
||||
own_filter: params.hasOwnProperty('own_filter') ? params.own_filter : 1
|
||||
};
|
||||
|
||||
['bbox', 'bins', 'start', 'end'].forEach(function(extraParam) {
|
||||
['bbox', 'bins', 'start', 'end', 'aggregation', 'offset'].forEach(function(extraParam) {
|
||||
if (params.hasOwnProperty(extraParam)) {
|
||||
urlParams[extraParam] = params[extraParam];
|
||||
}
|
||||
@ -455,6 +455,105 @@ TestClient.prototype.getDataview = function(dataviewName, params, callback) {
|
||||
);
|
||||
};
|
||||
|
||||
TestClient.prototype.getFeatureAttributes = function(featureId, layerId, params, callback) {
|
||||
var self = this;
|
||||
|
||||
if (!callback) {
|
||||
callback = params;
|
||||
params = {};
|
||||
}
|
||||
|
||||
var extraParams = {};
|
||||
if (this.apiKey) {
|
||||
extraParams.api_key = this.apiKey;
|
||||
}
|
||||
if (params && params.filters) {
|
||||
extraParams.filters = JSON.stringify(params.filters);
|
||||
}
|
||||
|
||||
var url = '/api/v1/map';
|
||||
if (Object.keys(extraParams).length > 0) {
|
||||
url += '?' + qs.stringify(extraParams);
|
||||
}
|
||||
|
||||
var expectedResponse = params.response || {
|
||||
status: 200,
|
||||
headers: {
|
||||
'Content-Type': 'application/json; charset=utf-8'
|
||||
}
|
||||
};
|
||||
|
||||
var layergroupId;
|
||||
step(
|
||||
function createLayergroup() {
|
||||
var next = this;
|
||||
assert.response(server,
|
||||
{
|
||||
url: url,
|
||||
method: 'POST',
|
||||
headers: {
|
||||
host: 'localhost',
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
data: JSON.stringify(self.mapConfig)
|
||||
},
|
||||
{
|
||||
status: 200,
|
||||
headers: {
|
||||
'Content-Type': 'application/json; charset=utf-8'
|
||||
}
|
||||
},
|
||||
function(res, err) {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
|
||||
var parsedBody = JSON.parse(res.body);
|
||||
|
||||
if (parsedBody.layergroupid) {
|
||||
self.keysToDelete['map_cfg|' + LayergroupToken.parse(parsedBody.layergroupid).token] = 0;
|
||||
self.keysToDelete['user:localhost:mapviews:global'] = 5;
|
||||
}
|
||||
|
||||
return next(null, parsedBody.layergroupid);
|
||||
}
|
||||
);
|
||||
},
|
||||
function getFeatureAttributes(err, layergroupId) {
|
||||
assert.ifError(err);
|
||||
|
||||
var next = this;
|
||||
|
||||
url = '/api/v1/map/' + layergroupId + '/' + layerId + '/attributes/' + featureId;
|
||||
|
||||
assert.response(server,
|
||||
{
|
||||
url: url,
|
||||
method: 'GET',
|
||||
headers: {
|
||||
host: 'localhost'
|
||||
}
|
||||
},
|
||||
expectedResponse,
|
||||
function(res, err) {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
|
||||
next(null, JSON.parse(res.body));
|
||||
}
|
||||
);
|
||||
},
|
||||
function finish(err, attributes) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
return callback(null, attributes);
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
TestClient.prototype.getTile = function(z, x, y, params, callback) {
|
||||
var self = this;
|
||||
|
||||
|
@ -0,0 +1,44 @@
|
||||
var PostgresDatasource = require('../../../../lib/cartodb/backends/turbo-carto-postgres-datasource');
|
||||
var PSQL = require('cartodb-psql');
|
||||
var _ = require('underscore');
|
||||
var assert = require('assert');
|
||||
|
||||
describe('turbo-carto-postgres-datasource', function() {
|
||||
|
||||
beforeEach(function () {
|
||||
const dbname = _.template(global.environment.postgres_auth_user, { user_id: 1 }) + '_db';
|
||||
const psql = new PSQL({
|
||||
user: 'postgres',
|
||||
dbname: dbname,
|
||||
host: global.environment.postgres.host,
|
||||
port: global.environment.postgres.port
|
||||
});
|
||||
const sql = [
|
||||
'SELECT',
|
||||
' null::geometry the_geom_webmercator,',
|
||||
' CASE',
|
||||
' WHEN x % 4 = 0 THEN \'infinity\'::float',
|
||||
' WHEN x % 4 = 1 THEN \'-infinity\'::float',
|
||||
' WHEN x % 4 = 2 THEN \'NaN\'::float',
|
||||
' ELSE x',
|
||||
' END AS values',
|
||||
'FROM generate_series(1, 1000) x'
|
||||
].join('\n');
|
||||
this.datasource = new PostgresDatasource(psql, sql);
|
||||
});
|
||||
|
||||
it('should ignore NaNs and Infinities when computing ramps', function(done) {
|
||||
var column = 'values';
|
||||
var buckets = 4;
|
||||
var method = 'equal';
|
||||
this.datasource.getRamp(column, buckets, method, function(err, result) {
|
||||
var expected_result = {
|
||||
ramp: [ 252, 501, 750, 999 ],
|
||||
stats: { min_val: 3, max_val: 999, avg_val: 501 },
|
||||
strategy: undefined
|
||||
};
|
||||
assert.deepEqual(result, expected_result);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
@ -2,7 +2,7 @@
|
||||
# yarn lockfile v1
|
||||
|
||||
|
||||
"abaculus@github:cartodb/abaculus#2.0.3-cdb1":
|
||||
abaculus@cartodb/abaculus#2.0.3-cdb1:
|
||||
version "2.0.3-cdb1"
|
||||
resolved "https://codeload.github.com/cartodb/abaculus/tar.gz/f5f34e1c80cdd8d49edd1d6fe3b2220ab2e23aaf"
|
||||
dependencies:
|
||||
@ -205,7 +205,7 @@ camshaft@0.55.6:
|
||||
dot "^1.0.3"
|
||||
request "^2.69.0"
|
||||
|
||||
"canvas@github:cartodb/node-canvas#1.6.2-cdb2":
|
||||
canvas@cartodb/node-canvas#1.6.2-cdb2:
|
||||
version "1.6.2-cdb2"
|
||||
resolved "https://codeload.github.com/cartodb/node-canvas/tar.gz/8acf04557005c633f9e68524488a2657c04f3766"
|
||||
dependencies:
|
||||
@ -231,7 +231,7 @@ carto@0.16.3:
|
||||
optimist "~0.6.0"
|
||||
underscore "~1.6.0"
|
||||
|
||||
"carto@github:cartodb/carto#0.15.1-cdb3":
|
||||
carto@cartodb/carto#0.15.1-cdb3:
|
||||
version "0.15.1-cdb3"
|
||||
resolved "https://codeload.github.com/cartodb/carto/tar.gz/945f5efb74fd1af1f5e1f69f409f9567f94fb5a7"
|
||||
dependencies:
|
||||
@ -1296,7 +1296,7 @@ mocha@~3.4.1:
|
||||
mkdirp "0.5.1"
|
||||
supports-color "3.1.2"
|
||||
|
||||
moment@^2.10.6:
|
||||
moment@^2.10.6, moment@~2.18.1:
|
||||
version "2.18.1"
|
||||
resolved "https://registry.yarnpkg.com/moment/-/moment-2.18.1.tgz#c36193dd3ce1c2eed2adb7c802dbbc77a81b1c0f"
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user