No sql param in generateCacheChannel
This commit is contained in:
parent
415d0c42d5
commit
ea3d2124dc
@ -2,7 +2,6 @@ var _ = require('underscore');
|
||||
var step = require('step');
|
||||
var QueryTablesApi = require('./api/query_tables_api');
|
||||
var PgConnection = require('./backends/pg_connection');
|
||||
var crypto = require('crypto');
|
||||
var LZMA = require('lzma').LZMA;
|
||||
var TemplateMaps = require('./template_maps.js');
|
||||
var MapConfigNamedLayersAdapter = require('./models/mapconfig_named_layers_adapter');
|
||||
@ -120,106 +119,78 @@ module.exports = function(redisPool) {
|
||||
return dbName + ':' + tableNames.join(',');
|
||||
};
|
||||
|
||||
me.generateMD5 = function(data){
|
||||
var hash = crypto.createHash('md5');
|
||||
hash.update(data);
|
||||
return hash.digest('hex');
|
||||
};
|
||||
|
||||
me.generateCacheChannel = function(app, req, callback){
|
||||
// Build channelCache key
|
||||
var dbName = req.params.dbname;
|
||||
var cacheKey = [ dbName, req.params.token ].join(':');
|
||||
|
||||
// Build channelCache key
|
||||
var dbName = req.params.dbname;
|
||||
var cacheKey = [ dbName ];
|
||||
if ( req.params.token ) cacheKey.push(req.params.token);
|
||||
else if ( req.params.sql ) cacheKey.push( me.generateMD5(req.params.sql) );
|
||||
cacheKey = cacheKey.join(':');
|
||||
step(
|
||||
function checkCached() {
|
||||
if ( me.channelCache.hasOwnProperty(cacheKey) ) {
|
||||
return callback(null, me.channelCache[cacheKey]);
|
||||
}
|
||||
return null;
|
||||
},
|
||||
function extractSQL(err) {
|
||||
assert.ifError(err);
|
||||
|
||||
var that = this;
|
||||
// TODO: cached cache channel for token-based access should
|
||||
// be constructed at renderer cache creation time
|
||||
// See http://github.com/CartoDB/Windshaft-cartodb/issues/152
|
||||
if ( ! app.mapStore ) {
|
||||
throw new Error('missing channel cache for token ' + req.params.token);
|
||||
}
|
||||
var next = this;
|
||||
var mapStore = app.mapStore;
|
||||
step(
|
||||
function loadFromStore() {
|
||||
mapStore.load(req.params.token, this);
|
||||
},
|
||||
function getSQL(err, mapConfig) {
|
||||
if (req.profiler) {
|
||||
req.profiler.done('mapStore_load');
|
||||
}
|
||||
assert.ifError(err);
|
||||
|
||||
step (
|
||||
function checkCached() {
|
||||
if ( me.channelCache.hasOwnProperty(cacheKey) ) {
|
||||
callback(null, me.channelCache[cacheKey]);
|
||||
return;
|
||||
}
|
||||
return null;
|
||||
},
|
||||
function extractSQL(err) {
|
||||
if ( err ) throw err;
|
||||
return mapConfig.obj().layers.map(function(lyr) {
|
||||
return lyr.options.sql;
|
||||
}).join(';');
|
||||
},
|
||||
function finish(err, sql) {
|
||||
next(err, sql);
|
||||
}
|
||||
);
|
||||
},
|
||||
function findAffectedTables(err, sql) {
|
||||
assert.ifError(err);
|
||||
if ( ! sql ) {
|
||||
if ( ! req.params.table ) {
|
||||
throw new Error("this request doesn't need an X-Cache-Channel generated");
|
||||
}
|
||||
return [req.params.table];
|
||||
}
|
||||
|
||||
if ( req.params.token ) {
|
||||
// TODO: cached cache channel for token-based access should
|
||||
// be constructed at renderer cache creation time
|
||||
// See http://github.com/CartoDB/Windshaft-cartodb/issues/152
|
||||
if ( ! app.mapStore ) {
|
||||
throw new Error('missing channel cache for token ' + req.params.token);
|
||||
queryTablesApi.getAffectedTablesInQuery(cdbRequest.userByReq(req), sql, this); // in addCacheChannel
|
||||
},
|
||||
function buildCacheChannel(err, tableNames) {
|
||||
assert.ifError(err);
|
||||
|
||||
if (req.profiler && ! req.params.table ) {
|
||||
req.profiler.done('affectedTables');
|
||||
}
|
||||
|
||||
var cacheChannel = me.buildCacheChannel(dbName,tableNames);
|
||||
// store for caching from me.generateCacheChannel
|
||||
// (not worth when table was specified in params)
|
||||
if ( ! req.params.table ) {
|
||||
me.channelCache[cacheKey] = cacheChannel;
|
||||
}
|
||||
return cacheChannel;
|
||||
},
|
||||
function finish(err, cacheChannel) {
|
||||
callback(err, cacheChannel);
|
||||
}
|
||||
var next = this;
|
||||
var mapStore = app.mapStore;
|
||||
step(
|
||||
function loadFromStore() {
|
||||
mapStore.load(req.params.token, this);
|
||||
},
|
||||
function getSQL(err, mapConfig) {
|
||||
if (req.profiler) req.profiler.done('mapStore_load');
|
||||
if ( err ) throw err;
|
||||
var sql = [];
|
||||
_.each(mapConfig.obj().layers, function(lyr) {
|
||||
sql.push(lyr.options.sql);
|
||||
});
|
||||
sql = sql.join(';');
|
||||
return sql;
|
||||
},
|
||||
function finish(err, sql) {
|
||||
next(err, sql);
|
||||
}
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if ( ! req.params.sql ) {
|
||||
return null; // no sql
|
||||
}
|
||||
|
||||
// We have sql, and no token...
|
||||
|
||||
// strip out windshaft/mapnik inserted sql if present
|
||||
var sql = req.params.sql.match(/^\((.*)\)\sas\scdbq$/);
|
||||
sql = (sql !== null) ? sql[1] : req.params.sql;
|
||||
|
||||
return sql;
|
||||
},
|
||||
function findAffectedTables(err, sql) {
|
||||
if ( err ) throw err;
|
||||
if ( ! sql ) {
|
||||
if ( ! req.params.table ) {
|
||||
throw new Error("this request doesn't need an X-Cache-Channel generated");
|
||||
}
|
||||
return [req.params.table];
|
||||
}
|
||||
|
||||
queryTablesApi.getAffectedTablesInQuery(cdbRequest.userByReq(req), sql, this); // in addCacheChannel
|
||||
},
|
||||
function buildCacheChannel(err, tableNames) {
|
||||
if ( err ) throw err;
|
||||
if (req.profiler && ! req.params.table ) {
|
||||
req.profiler.done('affectedTables');
|
||||
}
|
||||
|
||||
var dbName = req.params.dbname;
|
||||
var cacheChannel = me.buildCacheChannel(dbName,tableNames);
|
||||
// store for caching from me.generateCacheChannel
|
||||
// (not worth when table was specified in params)
|
||||
if ( ! req.params.table ) {
|
||||
me.channelCache[cacheKey] = cacheChannel;
|
||||
}
|
||||
return cacheChannel;
|
||||
},
|
||||
function finish(err, cacheChannel) {
|
||||
callback(err, cacheChannel);
|
||||
}
|
||||
);
|
||||
);
|
||||
};
|
||||
|
||||
// Set the cache chanel info to invalidate the cache on the frontend server
|
||||
|
Loading…
Reference in New Issue
Block a user