Send dates as unix epoch instead strings in .mvt files

This commit creates a new ConfigAdapter used in vector maps instantiations.
This adapter generates a new sql query for ONE SINGLE LAYER (carto-vl currently only supports one layer per mvt)
where the date columns are wrapped into a epoch using the `date_part` function.

Due this mvt files are smaller since we use numbers instead strings to represent dates, this is also faster in carto-gl
where we interpolate linearly between 0 and 1 to create animations.

Notice we should add a parameter to make this transformation optional.
We also should take into account the epoch precission.
This commit is contained in:
IagoLast 2018-05-31 12:41:34 +02:00
parent f732ed970b
commit 3850bbb68e
5 changed files with 128 additions and 24 deletions

View File

@ -35,6 +35,7 @@ const TurboCartoAdapter = require('../models/mapconfig/adapter/turbo-carto-adapt
const DataviewsWidgetsAdapter = require('../models/mapconfig/adapter/dataviews-widgets-adapter'); const DataviewsWidgetsAdapter = require('../models/mapconfig/adapter/dataviews-widgets-adapter');
const AggregationMapConfigAdapter = require('../models/mapconfig/adapter/aggregation-mapconfig-adapter'); const AggregationMapConfigAdapter = require('../models/mapconfig/adapter/aggregation-mapconfig-adapter');
const MapConfigAdapter = require('../models/mapconfig/adapter'); const MapConfigAdapter = require('../models/mapconfig/adapter');
const VectorMapConfigAdapter = require('../models/mapconfig/adapter/vector-mapconfig-adapter');
const ResourceLocator = require('../models/resource-locator'); const ResourceLocator = require('../models/resource-locator');
const LayergroupMetadata = require('../utils/layergroup-metadata'); const LayergroupMetadata = require('../utils/layergroup-metadata');
@ -135,6 +136,7 @@ module.exports = class ApiRouter {
new SqlWrapMapConfigAdapter(), new SqlWrapMapConfigAdapter(),
new DataviewsWidgetsAdapter(), new DataviewsWidgetsAdapter(),
new AnalysisMapConfigAdapter(analysisBackend), new AnalysisMapConfigAdapter(analysisBackend),
new VectorMapConfigAdapter(pgConnection),
new AggregationMapConfigAdapter(pgConnection), new AggregationMapConfigAdapter(pgConnection),
new MapConfigOverviewsAdapter(overviewsMetadataBackend, filterStatsBackend), new MapConfigOverviewsAdapter(overviewsMetadataBackend, filterStatsBackend),
new TurboCartoAdapter() new TurboCartoAdapter()

View File

@ -2,6 +2,7 @@ const BaseHistogram = require('./base-histogram');
const debug = require('debug')('windshaft:dataview:date-histogram'); const debug = require('debug')('windshaft:dataview:date-histogram');
const utils = require('../../../utils/query-utils'); const utils = require('../../../utils/query-utils');
/** /**
* Gets the name of a timezone with the same offset as the required * Gets the name of a timezone with the same offset as the required
* using the pg_timezone_names table. We do this because it's simpler to pass * using the pg_timezone_names table. We do this because it's simpler to pass

View File

@ -0,0 +1,39 @@
const AggregationMapConfig = require('../../aggregation/aggregation-mapconfig');
const utilsService = require('../../../utils/get-column-types');
// Generate query to detect time columns
// For every column cast to unix timestamp
module.exports = class VectorMapConfigAdapter {
constructor(pgConnection) {
this.pgConnection = pgConnection;
}
getMapConfig(user, requestMapConfig, params, context, callback) {
let mapConfig;
try {
mapConfig = new AggregationMapConfig(user, requestMapConfig, this.pgConnection);
} catch (err) {
return callback(err);
}
if (!mapConfig.isVectorOnlyMapConfig()) {
return callback(null, requestMapConfig);
}
if (requestMapConfig.layers.lenght > 1) {
return callback(new Error('Get column types for multiple vector layers is not implemented'));
}
// // Get columns
utilsService.getColumns(user, this.pgConnection, requestMapConfig.layers[0])
.then(result => {
const newSqlQuery = utilsService.wrapDates(requestMapConfig.layers[0].options.sql, result.fields);
requestMapConfig.layers[0].options.sql = newSqlQuery;
return callback(null, requestMapConfig);
})
.catch(err => {
return callback(err);
});
}
};

View File

@ -0,0 +1,60 @@
// Postgress ID of date types
const DATE_OIDS = {
1082: true,
1114: true,
1184: true
};
/**
* Wrap a query transforming all date columns into a unix epoch
* @param {*} originalQuery
* @param {*} fields
*/
function wrapDates(originalQuery, fields) {
return `
SELECT
${fields.map(field => DATE_OIDS.hasOwnProperty(field.dataTypeID) ? _castColumnToEpoch(field.name) : `${field.name}`).join(',')}
FROM
(${originalQuery}) _cdb_epoch_transformation `;
}
/**
* Return a list of all the columns in the query
* @param {*} dbConnection
* @param {*} originalQuery
*/
function getColumns(user, dbConnection, layer) {
return _getColumns(user, dbConnection, layer.options.sql);
}
/**
* Return a sql query to transform a date column into a unix epoch
* @param {string} column - The name of the date column
*/
function _castColumnToEpoch(columnName) {
return `date_part('epoch', ${columnName}) as ${columnName}`;
}
function _getColumns(user, dbConnection, originalQuery) {
return new Promise((resolve, reject) => {
dbConnection.getConnection(user, (err, connection) => {
if (err) {
return reject(err);
}
connection.query(`SELECT * FROM (${originalQuery}) _cdb_column_type limit 0`, (err, res) => {
if (err) {
return reject(err);
}
resolve(res);
});
});
});
}
module.exports = {
wrapDates,
getColumns,
};

View File

@ -1,16 +1,16 @@
function prepareQuery(sql) { function prepareQuery(sql) {
var affectedTableRegexCache = { var affectedTableRegexCache = {
bbox: /!bbox!/g, bbox: /!bbox!/g,
scale_denominator: /!scale_denominator!/g, scale_denominator: /!scale_denominator!/g,
pixel_width: /!pixel_width!/g, pixel_width: /!pixel_width!/g,
pixel_height: /!pixel_height!/g pixel_height: /!pixel_height!/g
}; };
return sql return sql
.replace(affectedTableRegexCache.bbox, 'ST_MakeEnvelope(0,0,0,0)') .replace(affectedTableRegexCache.bbox, 'ST_MakeEnvelope(0,0,0,0)')
.replace(affectedTableRegexCache.scale_denominator, '0') .replace(affectedTableRegexCache.scale_denominator, '0')
.replace(affectedTableRegexCache.pixel_width, '1') .replace(affectedTableRegexCache.pixel_width, '1')
.replace(affectedTableRegexCache.pixel_height, '1'); .replace(affectedTableRegexCache.pixel_height, '1');
} }
module.exports.extractTableNames = function extractTableNames(query) { module.exports.extractTableNames = function extractTableNames(query) {
@ -59,25 +59,25 @@ module.exports.handleFloatColumn = function handleFloatColumn(ctx) {
}; };
/** Count NULL appearances */ /** Count NULL appearances */
module.exports.countNULLs= function countNULLs(ctx) { module.exports.countNULLs = function countNULLs(ctx) {
return `sum(CASE WHEN (${ctx.column} IS NULL) THEN 1 ELSE 0 END)`; return `sum(CASE WHEN (${ctx.column} IS NULL) THEN 1 ELSE 0 END)`;
}; };
/** Count only infinity (positive and negative) appearances */ /** Count only infinity (positive and negative) appearances */
module.exports.countInfinites = function countInfinites(ctx) { module.exports.countInfinites = function countInfinites(ctx) {
return `${!ctx.isFloatColumn ? `0` : return `${!ctx.isFloatColumn ? '0' :
`sum(CASE WHEN (${ctx.column} = 'infinity'::float OR ${ctx.column} = '-infinity'::float) THEN 1 ELSE 0 END)` `sum(CASE WHEN (${ctx.column} = 'infinity'::float OR ${ctx.column} = '-infinity'::float) THEN 1 ELSE 0 END)`
}`; }`;
}; };
/** Count only NaNs appearances*/ /** Count only NaNs appearances*/
module.exports.countNaNs = function countNaNs(ctx) { module.exports.countNaNs = function countNaNs(ctx) {
return `${!ctx.isFloatColumn ? `0` : return `${!ctx.isFloatColumn ? '0' :
`sum(CASE WHEN (${ctx.column} = 'NaN'::float) THEN 1 ELSE 0 END)` `sum(CASE WHEN (${ctx.column} = 'NaN'::float) THEN 1 ELSE 0 END)`
}`; }`;
}; };
module.exports.getQueryTopCategories = function(query, column, topN, includeNulls=false) { module.exports.getQueryTopCategories = function (query, column, topN, includeNulls = false) {
const where = includeNulls ? '' : `WHERE ${column} IS NOT NULL`; const where = includeNulls ? '' : `WHERE ${column} IS NOT NULL`;
return ` return `
SELECT ${column} AS category, COUNT(*) AS frequency SELECT ${column} AS category, COUNT(*) AS frequency
@ -101,7 +101,7 @@ function columnSelector(columns) {
throw new TypeError(`Bad argument type for columns: ${typeof columns}`); throw new TypeError(`Bad argument type for columns: ${typeof columns}`);
} }
module.exports.getQuerySample = function(query, sampleProb, limit = null, randomSeed = 0.5, columns = null) { module.exports.getQuerySample = function (query, sampleProb, limit = null, randomSeed = 0.5, columns = null) {
const singleTable = simpleQueryTable(query); const singleTable = simpleQueryTable(query);
if (singleTable) { if (singleTable) {
return getTableSample(singleTable.table, columns || singleTable.columns, sampleProb, limit, randomSeed); return getTableSample(singleTable.table, columns || singleTable.columns, sampleProb, limit, randomSeed);
@ -121,7 +121,7 @@ module.exports.getQuerySample = function(query, sampleProb, limit = null, random
function getTableSample(table, columns, sampleProb, limit = null, randomSeed = 0.5) { function getTableSample(table, columns, sampleProb, limit = null, randomSeed = 0.5) {
const limitClause = limit ? `LIMIT ${limit}` : ''; const limitClause = limit ? `LIMIT ${limit}` : '';
sampleProb *= 100; sampleProb *= 100;
randomSeed *= Math.pow(2, 31) -1; randomSeed *= Math.pow(2, 31) - 1;
return ` return `
SELECT ${columnSelector(columns)} SELECT ${columnSelector(columns)}
FROM ${table} FROM ${table}
@ -132,12 +132,12 @@ function getTableSample(table, columns, sampleProb, limit = null, randomSeed = 0
function simpleQueryTable(sql) { function simpleQueryTable(sql) {
const basicQuery = const basicQuery =
/\s*SELECT\s+([\*a-z0-9_,\s]+?)\s+FROM\s+((\"[^"]+\"|[a-z0-9_]+)\.)?(\"[^"]+\"|[a-z0-9_]+)\s*;?\s*/i; /\s*SELECT\s+([\*a-z0-9_,\s]+?)\s+FROM\s+((\"[^"]+\"|[a-z0-9_]+)\.)?(\"[^"]+\"|[a-z0-9_]+)\s*;?\s*/i;
const unwrappedQuery = new RegExp("^"+basicQuery.source+"$", 'i'); const unwrappedQuery = new RegExp('^' + basicQuery.source + '$', 'i');
// queries for named maps are wrapped like this: // queries for named maps are wrapped like this:
var wrappedQuery = new RegExp( var wrappedQuery = new RegExp(
"^\\s*SELECT\\s+\\*\\s+FROM\\s+\\(" + '^\\s*SELECT\\s+\\*\\s+FROM\\s+\\(' +
basicQuery.source + basicQuery.source +
"\\)\\s+AS\\s+wrapped_query\\s+WHERE\\s+\\d+=1\\s*$", '\\)\\s+AS\\s+wrapped_query\\s+WHERE\\s+\\d+=1\\s*$',
'i' 'i'
); );
let match = sql.match(unwrappedQuery); let match = sql.match(unwrappedQuery);
@ -147,13 +147,13 @@ function simpleQueryTable(sql) {
if (match) { if (match) {
const columns = match[1]; const columns = match[1];
const schema = match[3]; const schema = match[3];
const table = match[4]; const table = match[4];
return { table: schema ? `${schema}.${table}` : table, columns }; return { table: schema ? `${schema}.${table}` : table, columns };
} }
return false; return false;
} }
module.exports.getQueryGeometryType = function(query, geometryColumn) { module.exports.getQueryGeometryType = function (query, geometryColumn) {
return ` return `
SELECT ST_GeometryType(${geometryColumn}) AS geom_type SELECT ST_GeometryType(${geometryColumn}) AS geom_type
FROM (${query}) AS __cdb_query FROM (${query}) AS __cdb_query
@ -162,10 +162,12 @@ module.exports.getQueryGeometryType = function(query, geometryColumn) {
`; `;
}; };
module.exports.getQueryLimited = function(query, limit=0) { function getQueryLimited(query, limit = 0) {
return ` return `
SELECT * SELECT *
FROM (${query}) AS __cdb_query FROM (${query}) AS __cdb_query
LIMIT ${limit} LIMIT ${limit}
`; `;
}; }
module.exports.getQueryLimited = getQueryLimited;