2018-10-23 23:45:42 +08:00
|
|
|
'use strict';
|
|
|
|
|
2017-09-11 19:54:46 +08:00
|
|
|
const BaseHistogram = require('./base-histogram');
|
2017-09-12 00:44:14 +08:00
|
|
|
const debug = require('debug')('windshaft:dataview:date-histogram');
|
2017-12-05 00:03:31 +08:00
|
|
|
const utils = require('../../../utils/query-utils');
|
|
|
|
|
2018-05-31 18:41:34 +08:00
|
|
|
|
2017-12-05 00:03:31 +08:00
|
|
|
/**
|
|
|
|
* Gets the name of a timezone with the same offset as the required
|
|
|
|
* using the pg_timezone_names table. We do this because it's simpler to pass
|
|
|
|
* the name than to pass the offset itself as PostgreSQL uses different
|
|
|
|
* sign convention. For example: TIME ZONE 'CET' is equal to TIME ZONE 'UTC-1',
|
|
|
|
* not 'UTC+1' which would be expected.
|
|
|
|
* Gives priority to Etc/GMT±N timezones but still support odd offsets like 8.5
|
|
|
|
* hours for Asia/Pyongyang.
|
|
|
|
* It also makes it easier to, in the future, support the input of expected timezone
|
|
|
|
* instead of the offset; that is using 'Europe/Madrid' instead of
|
|
|
|
* '+3600' or '+7200'. The daylight saving status can be handled by postgres.
|
|
|
|
*/
|
|
|
|
const offsetNameQueryTpl = ctx => `
|
|
|
|
WITH __wd_tz AS
|
|
|
|
(
|
|
|
|
SELECT name
|
|
|
|
FROM pg_timezone_names
|
|
|
|
WHERE utc_offset = interval '${ctx.offset} hours'
|
|
|
|
ORDER BY CASE WHEN name LIKE 'Etc/GMT%' THEN 0 ELSE 1 END
|
|
|
|
LIMIT 1
|
|
|
|
),`;
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Function to get the subquery that places each row in its bin depending on
|
|
|
|
* the aggregation. Since the data stored is in epoch we need to adapt it to
|
|
|
|
* our timezone so when calling date_trunc it falls into the correct bin
|
|
|
|
*/
|
|
|
|
function dataBucketsQuery(ctx) {
|
|
|
|
var condition_str = '';
|
|
|
|
|
|
|
|
if (ctx.start !== 0) {
|
|
|
|
condition_str = `WHERE ${ctx.column} >= to_timestamp(${ctx.start})`;
|
|
|
|
}
|
|
|
|
if (ctx.end !== 0) {
|
|
|
|
if (condition_str === '') {
|
|
|
|
condition_str = `WHERE ${ctx.column} <= to_timestamp(${ctx.end})`;
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
condition_str += ` and ${ctx.column} <= to_timestamp(${ctx.end})`;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return `
|
|
|
|
__wd_buckets AS
|
|
|
|
(
|
|
|
|
SELECT
|
|
|
|
date_trunc('${ctx.aggregation}', timezone(__wd_tz.name, ${ctx.column}::timestamptz)) as timestamp,
|
|
|
|
count(*) as freq,
|
|
|
|
${utils.countNULLs(ctx)} as nulls_count
|
|
|
|
FROM
|
|
|
|
(
|
|
|
|
${ctx.query}
|
|
|
|
) __source, __wd_tz
|
|
|
|
${condition_str}
|
2017-12-22 22:58:40 +08:00
|
|
|
GROUP BY 1, __wd_tz.name
|
2017-12-05 00:03:31 +08:00
|
|
|
),`;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Function that generates an array with all the possible bins between the
|
|
|
|
* start and end date. If not provided we use the min and max generated from
|
|
|
|
* the dataBucketsQuery
|
|
|
|
*/
|
|
|
|
function allBucketsArrayQuery(ctx) {
|
|
|
|
var extra_from = ``;
|
|
|
|
var series_start = ``;
|
|
|
|
var series_end = ``;
|
|
|
|
|
|
|
|
if (ctx.start === 0) {
|
|
|
|
extra_from = `, __wd_buckets GROUP BY __wd_tz.name`;
|
|
|
|
series_start = `min(__wd_buckets.timestamp)`;
|
|
|
|
} else {
|
|
|
|
series_start = `date_trunc('${ctx.aggregation}', timezone(__wd_tz.name, to_timestamp(${ctx.start})))`;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (ctx.end === 0) {
|
|
|
|
extra_from = `, __wd_buckets GROUP BY __wd_tz.name`;
|
|
|
|
series_end = `max(__wd_buckets.timestamp)`;
|
|
|
|
} else {
|
|
|
|
series_end = `date_trunc('${ctx.aggregation}', timezone(__wd_tz.name, to_timestamp(${ctx.end})))`;
|
|
|
|
}
|
|
|
|
|
|
|
|
return `
|
|
|
|
__wd_all_buckets AS
|
|
|
|
(
|
|
|
|
SELECT ARRAY(
|
|
|
|
SELECT
|
|
|
|
generate_series(
|
|
|
|
${series_start},
|
|
|
|
${series_end},
|
|
|
|
interval '${ctx.interval}') as bin_start
|
|
|
|
FROM __wd_tz${extra_from}
|
|
|
|
) as bins
|
|
|
|
)`;
|
|
|
|
}
|
2017-09-08 16:29:54 +08:00
|
|
|
|
|
|
|
const dateIntervalQueryTpl = ctx => `
|
|
|
|
WITH
|
|
|
|
__cdb_dates AS (
|
|
|
|
SELECT
|
|
|
|
MAX(${ctx.column}::timestamp) AS __cdb_end,
|
|
|
|
MIN(${ctx.column}::timestamp) AS __cdb_start
|
|
|
|
FROM (${ctx.query}) __cdb_source
|
|
|
|
),
|
|
|
|
__cdb_interval_in_days AS (
|
|
|
|
SELECT
|
|
|
|
DATE_PART('day', __cdb_end - __cdb_start) AS __cdb_days
|
|
|
|
FROM __cdb_dates
|
|
|
|
),
|
|
|
|
__cdb_interval_in_hours AS (
|
|
|
|
SELECT
|
|
|
|
__cdb_days * 24 + DATE_PART('hour', __cdb_end - __cdb_start) AS __cdb_hours
|
|
|
|
FROM __cdb_interval_in_days, __cdb_dates
|
|
|
|
),
|
|
|
|
__cdb_interval_in_minutes AS (
|
|
|
|
SELECT
|
|
|
|
__cdb_hours * 60 + DATE_PART('minute', __cdb_end - __cdb_start) AS __cdb_minutes
|
|
|
|
FROM __cdb_interval_in_hours, __cdb_dates
|
|
|
|
),
|
|
|
|
__cdb_interval_in_seconds AS (
|
|
|
|
SELECT
|
|
|
|
__cdb_minutes * 60 + DATE_PART('second', __cdb_end - __cdb_start) AS __cdb_seconds
|
|
|
|
FROM __cdb_interval_in_minutes, __cdb_dates
|
|
|
|
)
|
|
|
|
SELECT
|
2017-12-18 20:29:32 +08:00
|
|
|
ROUND(__cdb_days / 365243) AS millennium,
|
|
|
|
ROUND(__cdb_days / 36525) AS century,
|
|
|
|
ROUND(__cdb_days / 3652) AS decade,
|
2017-09-08 16:29:54 +08:00
|
|
|
ROUND(__cdb_days / 365) AS year,
|
2017-12-18 20:29:32 +08:00
|
|
|
ROUND(__cdb_days / 91) AS quarter,
|
2017-09-08 16:29:54 +08:00
|
|
|
ROUND(__cdb_days / 30) AS month,
|
|
|
|
ROUND(__cdb_days / 7) AS week,
|
|
|
|
__cdb_days AS day,
|
|
|
|
__cdb_hours AS hour,
|
|
|
|
__cdb_minutes AS minute,
|
|
|
|
__cdb_seconds AS second
|
|
|
|
FROM __cdb_interval_in_days, __cdb_interval_in_hours, __cdb_interval_in_minutes, __cdb_interval_in_seconds
|
|
|
|
`;
|
|
|
|
|
2017-12-18 20:30:04 +08:00
|
|
|
/** Constant to switch between aggregations in auto mode */
|
|
|
|
const MAX_INTERVAL_VALUE = 100;
|
2017-09-08 16:29:54 +08:00
|
|
|
|
|
|
|
const DATE_AGGREGATIONS = {
|
|
|
|
'auto': true,
|
2017-12-18 20:29:32 +08:00
|
|
|
'second' : true,
|
2017-09-08 16:29:54 +08:00
|
|
|
'minute': true,
|
|
|
|
'hour': true,
|
|
|
|
'day': true,
|
|
|
|
'week': true,
|
|
|
|
'month': true,
|
|
|
|
'quarter': true,
|
2017-12-18 20:29:32 +08:00
|
|
|
'year': true,
|
|
|
|
'decade' : true,
|
|
|
|
'century' : true,
|
|
|
|
'millennium' : true
|
2017-09-08 16:29:54 +08:00
|
|
|
};
|
|
|
|
|
|
|
|
/**
|
|
|
|
date_histogram: {
|
|
|
|
type: 'histogram',
|
|
|
|
options: {
|
|
|
|
column: 'date', // column data type: date
|
2017-09-12 01:26:28 +08:00
|
|
|
aggregation: 'day' // MANDATORY
|
2017-09-08 16:29:54 +08:00
|
|
|
offset: -7200 // OPTIONAL (UTC offset in seconds)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
*/
|
2017-09-11 19:54:46 +08:00
|
|
|
module.exports = class DateHistogram extends BaseHistogram {
|
2017-09-08 16:29:54 +08:00
|
|
|
constructor (query, options, queries) {
|
2017-09-11 17:30:09 +08:00
|
|
|
super(query, options, queries);
|
2017-09-11 19:54:46 +08:00
|
|
|
|
2017-09-08 16:29:54 +08:00
|
|
|
this.aggregation = options.aggregation;
|
|
|
|
this.offset = options.offset;
|
|
|
|
}
|
|
|
|
|
2017-09-08 18:04:25 +08:00
|
|
|
_buildQueryTpl (ctx) {
|
2017-09-08 16:29:54 +08:00
|
|
|
return `
|
2017-12-05 00:03:31 +08:00
|
|
|
${offsetNameQueryTpl(ctx)}
|
|
|
|
${dataBucketsQuery(ctx)}
|
|
|
|
${allBucketsArrayQuery(ctx)}
|
|
|
|
SELECT
|
|
|
|
array_position(__wd_all_buckets.bins, __wd_buckets.timestamp) - 1 as bin,
|
|
|
|
date_part('epoch', timezone(__wd_tz.name, __wd_buckets.timestamp)) AS timestamp,
|
|
|
|
__wd_buckets.freq as freq,
|
|
|
|
date_part('epoch', timezone(__wd_tz.name, (__wd_all_buckets.bins)[1])) as timestamp_start,
|
|
|
|
array_length(__wd_all_buckets.bins, 1) as bins_number,
|
|
|
|
date_part('epoch', interval '${ctx.interval}') as bin_width,
|
|
|
|
__wd_buckets.nulls_count as nulls_count
|
|
|
|
FROM __wd_buckets, __wd_all_buckets, __wd_tz
|
|
|
|
GROUP BY __wd_tz.name, __wd_all_buckets.bins, __wd_buckets.timestamp, __wd_buckets.nulls_count, __wd_buckets.freq
|
|
|
|
ORDER BY bin ASC;
|
|
|
|
`;
|
2017-09-08 16:29:54 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
_buildQuery (psql, override, callback) {
|
2017-09-12 00:57:50 +08:00
|
|
|
if (!this._isValidAggregation(override)) {
|
2017-09-08 16:29:54 +08:00
|
|
|
return callback(new Error('Invalid aggregation value. Valid ones: ' +
|
|
|
|
Object.keys(DATE_AGGREGATIONS).join(', ')
|
|
|
|
));
|
|
|
|
}
|
|
|
|
|
2017-09-12 00:44:14 +08:00
|
|
|
if (this._getAggregation(override) === 'auto') {
|
2017-09-08 18:01:15 +08:00
|
|
|
this._getAutomaticAggregation(psql, function (err, aggregation) {
|
2017-09-08 16:29:54 +08:00
|
|
|
if (err || aggregation === 'none') {
|
|
|
|
this.aggregation = 'day';
|
|
|
|
} else {
|
|
|
|
this.aggregation = aggregation;
|
|
|
|
}
|
|
|
|
override.aggregation = this.aggregation;
|
2017-09-08 17:55:28 +08:00
|
|
|
this._buildQuery(psql, override, callback);
|
2017-09-08 16:29:54 +08:00
|
|
|
}.bind(this));
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
|
2017-12-05 00:03:31 +08:00
|
|
|
var interval = this._getAggregation(override) === 'quarter' ?
|
|
|
|
'3 months' : '1 ' + this._getAggregation(override);
|
|
|
|
|
2017-09-08 18:04:25 +08:00
|
|
|
const histogramSql = this._buildQueryTpl({
|
2017-09-12 19:05:46 +08:00
|
|
|
override: override,
|
|
|
|
query: this.query,
|
|
|
|
column: this.column,
|
|
|
|
aggregation: this._getAggregation(override),
|
|
|
|
start: this._getBinStart(override),
|
|
|
|
end: this._getBinEnd(override),
|
2017-12-05 00:03:31 +08:00
|
|
|
offset: this._parseOffset(override),
|
|
|
|
interval: interval
|
2017-09-08 16:29:54 +08:00
|
|
|
});
|
|
|
|
|
2017-09-12 00:44:14 +08:00
|
|
|
debug(histogramSql);
|
|
|
|
|
2017-09-08 16:29:54 +08:00
|
|
|
return callback(null, histogramSql);
|
|
|
|
}
|
|
|
|
|
2017-09-12 00:57:16 +08:00
|
|
|
_isValidAggregation (override) {
|
|
|
|
return DATE_AGGREGATIONS.hasOwnProperty(this._getAggregation(override));
|
|
|
|
}
|
|
|
|
|
2017-09-08 18:01:15 +08:00
|
|
|
_getAutomaticAggregation (psql, callback) {
|
2017-09-08 23:43:10 +08:00
|
|
|
const dateIntervalQuery = dateIntervalQueryTpl({
|
2017-09-08 16:29:54 +08:00
|
|
|
query: this.query,
|
|
|
|
column: this.column
|
|
|
|
});
|
|
|
|
|
|
|
|
psql.query(dateIntervalQuery, function (err, result) {
|
|
|
|
if (err) {
|
|
|
|
return callback(err);
|
|
|
|
}
|
|
|
|
|
2017-12-18 20:30:04 +08:00
|
|
|
const aggregations = result.rows[0];
|
|
|
|
const aggregation = Object.keys(aggregations)
|
|
|
|
.map(key => ({ name: key, value: aggregations[key] }))
|
2017-09-12 00:57:50 +08:00
|
|
|
.reduce((closer, current) => {
|
2017-09-08 16:29:54 +08:00
|
|
|
if (current.value > MAX_INTERVAL_VALUE) {
|
|
|
|
return closer;
|
|
|
|
}
|
|
|
|
|
2017-09-11 17:48:33 +08:00
|
|
|
const closerDiff = MAX_INTERVAL_VALUE - closer.value;
|
|
|
|
const currentDiff = MAX_INTERVAL_VALUE - current.value;
|
2017-09-08 16:29:54 +08:00
|
|
|
|
|
|
|
if (Number.isFinite(current.value) && closerDiff > currentDiff) {
|
|
|
|
return current;
|
|
|
|
}
|
|
|
|
|
|
|
|
return closer;
|
|
|
|
}, { name: 'none', value: -1 });
|
|
|
|
|
|
|
|
callback(null, aggregation.name);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2017-09-11 23:19:02 +08:00
|
|
|
_getSummary (result, override) {
|
|
|
|
const firstRow = result.rows[0] || {};
|
2017-09-08 16:29:54 +08:00
|
|
|
|
|
|
|
return {
|
2017-09-11 23:19:02 +08:00
|
|
|
aggregation: this._getAggregation(override),
|
|
|
|
offset: this._getOffset(override),
|
|
|
|
timestamp_start: firstRow.timestamp_start,
|
|
|
|
|
2017-12-04 19:44:51 +08:00
|
|
|
bin_width: firstRow.bin_width || 0,
|
|
|
|
bins_count: firstRow.bins_number || 0,
|
2017-09-11 23:19:02 +08:00
|
|
|
bins_start: firstRow.timestamp,
|
|
|
|
nulls: firstRow.nulls_count,
|
|
|
|
infinities: firstRow.infinities_count,
|
|
|
|
nans: firstRow.nans_count,
|
|
|
|
avg: firstRow.avg_val
|
2017-09-08 16:29:54 +08:00
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2017-09-11 23:19:02 +08:00
|
|
|
_getBuckets (result) {
|
2017-12-05 00:03:31 +08:00
|
|
|
result.rows.forEach(function(row) {
|
|
|
|
row.min = row.max = row.avg = row.timestamp;
|
|
|
|
});
|
|
|
|
|
2017-09-12 01:24:01 +08:00
|
|
|
return result.rows.map(({ bin, min, max, avg, freq, timestamp }) => ({ bin, min, max, avg, freq, timestamp }));
|
2017-09-08 16:29:54 +08:00
|
|
|
}
|
|
|
|
|
2017-09-11 23:19:02 +08:00
|
|
|
_getAggregation (override = {}) {
|
2017-09-12 00:44:14 +08:00
|
|
|
return override.aggregation ? override.aggregation : this.aggregation;
|
2017-09-08 16:29:54 +08:00
|
|
|
}
|
|
|
|
|
2017-09-11 23:19:02 +08:00
|
|
|
_getOffset (override = {}) {
|
2017-09-12 00:44:14 +08:00
|
|
|
return Number.isFinite(override.offset) ? override.offset : (this.offset || 0);
|
2017-09-08 16:29:54 +08:00
|
|
|
}
|
|
|
|
|
2017-09-12 00:44:14 +08:00
|
|
|
_parseOffset (override) {
|
|
|
|
if (this._shouldIgnoreOffset(override)) {
|
2017-09-08 16:29:54 +08:00
|
|
|
return '0';
|
|
|
|
}
|
|
|
|
|
2017-09-12 00:44:14 +08:00
|
|
|
const offsetInHours = Math.ceil(this._getOffset(override) / 3600);
|
|
|
|
|
2017-09-08 16:29:54 +08:00
|
|
|
return '' + offsetInHours;
|
|
|
|
}
|
2017-09-12 00:44:14 +08:00
|
|
|
|
|
|
|
_shouldIgnoreOffset (override) {
|
|
|
|
return (this._getAggregation(override) === 'hour' || this._getAggregation(override) === 'minute');
|
|
|
|
}
|
2017-09-08 16:29:54 +08:00
|
|
|
};
|