Merge pull request #685 from CartoDB/feature/ch145435/reduce-sql-api-log-verbosity-pre

Be able to run tests in development env w/o requiring a different dokerfile image
This commit is contained in:
Daniel G. Aubert 2021-04-08 11:46:14 +02:00 committed by GitHub
commit 4141ce0ac7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
32 changed files with 7521 additions and 7555 deletions

View File

@ -83,6 +83,8 @@ module.exports.batch_capacity_fixed_amount = 4;
// It will use 1 as min.
// If no template is provided it will default to 'fixed' strategy.
module.exports.batch_capacity_http_url_template = 'http://<%= dbhost %>:9999/load';
// Default database number in redis for batch queries
module.exports.batch_db = 5;
// Max database connections in the pool
// Subsequent connections will wait for a free slot.i
// NOTE: not used by OGR-mediated accesses
@ -178,6 +180,7 @@ if (process.env.NODE_ENV === 'test') {
module.exports.redisReapIntervalMillis = 1;
module.exports.db_pubuser = 'testpublicuser';
module.exports.batch_query_timeout = 5 * 1000; // 5 seconds in milliseconds
module.exports.batch_db = 6;
module.exports.redisIdleTimeoutMillis = 1;
module.exports.redisReapIntervalMillis = 1;
module.exports.db_base_name = 'cartodb_test_user_<%= user_id %>_db';

View File

@ -78,6 +78,8 @@ module.exports.batch_capacity_fixed_amount = 4;
// It will use 1 as min.
// If no template is provided it will default to 'fixed' strategy.
module.exports.batch_capacity_http_url_template = 'http://<%= dbhost %>:9999/load';
// Default database number in redis for batch queries
module.exports.batch_db = 5;
// Max database connections in the pool
// Subsequent connections will wait for a free slot.
// NOTE: not used by OGR-mediated accesses

View File

@ -79,6 +79,8 @@ module.exports.batch_capacity_fixed_amount = 4;
// It will use 1 as min.
// If no template is provided it will default to 'fixed' strategy.
module.exports.batch_capacity_http_url_template = 'http://<%= dbhost %>:9999/load';
// Default database number in redis for batch queries
module.exports.batch_db = 5;
// Max database connections in the pool
// Subsequent connections will wait for a free slot.i
// NOTE: not used by OGR-mediated accesses

View File

@ -79,6 +79,8 @@ module.exports.batch_capacity_fixed_amount = 4;
// It will use 1 as min.
// If no template is provided it will default to 'fixed' strategy.
module.exports.batch_capacity_http_url_template = 'http://<%= dbhost %>:9999/load';
// Default database number in redis for batch queries
module.exports.batch_db = 5;
// Max database connections in the pool
// Subsequent connections will wait for a free slot.
// NOTE: not used by OGR-mediated accesses

View File

@ -76,6 +76,8 @@ module.exports.batch_capacity_fixed_amount = 4;
// It will use 1 as min.
// If no template is provided it will default to 'fixed' strategy.
module.exports.batch_capacity_http_url_template = 'http://<%= dbhost %>:9999/load';
// Default database number in redis for batch queries
module.exports.batch_db = 6;
// Max database connections in the pool
// Subsequent connections will wait for a free slot.
// NOTE: not used by OGR-mediated accesses

View File

@ -10,7 +10,7 @@ and developers.
Redis Hash: `batch:jobs:{UUID}`.
Redis DB: 5.
Redis DB: global.settings.batch_db || 5.
It stores the job definition, the user, and some metadata like the final status, the failure reason, and so.
@ -18,7 +18,7 @@ It stores the job definition, the user, and some metadata like the final status,
Redis List: `batch:queue:{username}`.
Redis DB: 5.
Redis DB: global.settings.batch_db || 5.
It stores a pending list of jobs per user. It points to a job definition with the `{UUID}`.

View File

@ -1,7 +1,7 @@
'use strict';
var REDIS_PREFIX = 'batch:jobs:';
var REDIS_DB = 5;
var REDIS_DB = global.settings.batch_db || 5;
var JobStatus = require('./job-status');
var queue = require('queue-async');
@ -180,7 +180,7 @@ JobBackend.prototype.save = function (job, callback) {
};
var WORK_IN_PROGRESS_JOB = {
DB: 5,
DB: global.settings.batch_db || 5,
PREFIX_USER: 'batch:wip:user:',
USER_INDEX_KEY: 'batch:wip:users'
};

View File

@ -11,7 +11,7 @@ function JobQueue (metadataBackend, jobPublisher, logger) {
module.exports = JobQueue;
var QUEUE = {
DB: 5,
DB: global.settings.batch_db || 5,
PREFIX: 'batch:queue:',
INDEX: 'batch:indexes:queue'
};

View File

@ -6,7 +6,7 @@ var Profiler = require('step-profiler');
var _ = require('underscore');
var REDIS_LIMITS = {
DB: 5,
DB: global.settings.batch_db || 5,
PREFIX: 'limits:batch:' // + username
};

View File

@ -1,7 +1,7 @@
'use strict';
var REDIS_DISTLOCK = {
DB: 5,
DB: global.settings.batch_db || 5,
PREFIX: 'batch:locks:'
};

View File

@ -6,11 +6,11 @@ var forever = require('../util/forever');
var QUEUE = {
OLD: {
DB: 5,
DB: global.settings.batch_db || 5,
PREFIX: 'batch:queues:' // host
},
NEW: {
DB: 5,
DB: global.settings.batch_db || 5,
PREFIX: 'batch:queue:' // user
}
};

View File

@ -21,7 +21,7 @@ const redisPool = new RedisPool({
});
const metadata = cartodbRedis({ pool: redisPool });
const JOBS = {
DB: 5,
DB: global.settings.batch_db || 5,
PREFIX: 'batch:jobs'
};

View File

@ -257,7 +257,6 @@ OgrFormat.prototype.sendResponse = function (opts, callback) {
},
function cleanup () {
// unlink dump file (sync to avoid race condition)
console.log('removing', dumpfile);
try { fs.unlinkSync(dumpfile); } catch (e) {
if (e.code !== 'ENOENT') {
console.log('Could not unlink dumpfile ' + dumpfile + ': ' + e);

47
package-lock.json generated
View File

@ -1582,9 +1582,9 @@
"dev": true
},
"denque": {
"version": "1.4.1",
"resolved": "https://registry.npmjs.org/denque/-/denque-1.4.1.tgz",
"integrity": "sha512-OfzPuSZKGcgr96rf1oODnfjqBFmr1DVoc/TrItj3Ohe0Ah1C5WX5Baquw/9U9KovnQ88EqmJbD66rKYUQYN1tQ=="
"version": "1.5.0",
"resolved": "https://registry.npmjs.org/denque/-/denque-1.5.0.tgz",
"integrity": "sha512-CYiCSgIF1p6EUByQPlGkKnP1M9g0ZV3qMIrqMqZqdwazygIA/YP2vrbcyl1h/WppKJTdl1F85cXIle+394iDAQ=="
},
"depd": {
"version": "1.1.2",
@ -5622,11 +5622,11 @@
}
},
"pgpass": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.2.tgz",
"integrity": "sha1-Knu0G2BltnkH6R2hsHwYR8h3swY=",
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.4.tgz",
"integrity": "sha512-YmuA56alyBq7M59vxVBfPJrGSozru8QAdoNlWuW3cz8l+UX3cWge0vTvjKhsSHSJpo3Bom8/Mm6hf0TR5GY0+w==",
"requires": {
"split": "^1.0.0"
"split2": "^3.1.1"
}
},
"picomatch": {
@ -5781,9 +5781,9 @@
"integrity": "sha1-AntTPAqokOJtFy1Hz5zOzFIazTU="
},
"postgres-date": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.4.tgz",
"integrity": "sha512-bESRvKVuTrjoBluEcpv2346+6kgB7UlnqWZsnbnCccTNq/pqfj1j6oBaN5+b/NrDXepYUT/HKadqv3iS9lJuVA=="
"version": "1.0.7",
"resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz",
"integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q=="
},
"postgres-interval": {
"version": "1.2.0",
@ -5992,20 +5992,20 @@
}
},
"redis": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/redis/-/redis-3.0.2.tgz",
"integrity": "sha512-PNhLCrjU6vKVuMOyFu7oSP296mwBkcE6lrAjruBYG5LgdSqtRBoVQIylrMyVZD/lkF24RSNNatzvYag6HRBHjQ==",
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/redis/-/redis-3.1.0.tgz",
"integrity": "sha512-//lAOcEtNIKk2ekZibes5oyWKYUVWMvMB71lyD/hS9KRePNkB7AU3nXGkArX6uDKEb2N23EyJBthAv6pagD0uw==",
"requires": {
"denque": "^1.4.1",
"redis-commands": "^1.5.0",
"denque": "^1.5.0",
"redis-commands": "^1.7.0",
"redis-errors": "^1.2.0",
"redis-parser": "^3.0.0"
}
},
"redis-commands": {
"version": "1.5.0",
"resolved": "https://registry.npmjs.org/redis-commands/-/redis-commands-1.5.0.tgz",
"integrity": "sha512-6KxamqpZ468MeQC3bkWmCB1fp56XL64D4Kf0zJSwDZbVLLm7KFkoIcHrgRvQ+sk8dnhySs7+yBg94yIkAK7aJg=="
"version": "1.7.0",
"resolved": "https://registry.npmjs.org/redis-commands/-/redis-commands-1.7.0.tgz",
"integrity": "sha512-nJWqw3bTFy21hX/CPKHth6sfhZbdiHP6bTawSgQBlKOVRG7EZkfHbbHwQJnrE4vsQf0CMNE+3gJ4Fmm16vdVlQ=="
},
"redis-errors": {
"version": "1.2.0",
@ -6474,14 +6474,6 @@
"integrity": "sha1-4qMDI2ysVLBAMfp6WnnH5wHfhS8=",
"dev": true
},
"split": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/split/-/split-1.0.1.tgz",
"integrity": "sha512-mTyOoPbrivtXnwnIxZRFYRrPNtEFKlpB2fvjSnCQUiAA6qAZzqwna5envK4uk6OIeP17CsdF3rSBGYVBsU0Tkg==",
"requires": {
"through": "2"
}
},
"split2": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/split2/-/split2-3.1.1.tgz",
@ -6803,7 +6795,8 @@
"through": {
"version": "2.3.8",
"resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz",
"integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU="
"integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=",
"dev": true
},
"through2": {
"version": "3.0.1",

View File

@ -73,7 +73,9 @@
"mockdate": "^2.0.2",
"nodemon": "^2.0.6",
"nyc": "^15.0.0",
"pg": "github:cartodb/node-postgres#6.4.2-cdb2",
"pino-pretty": "^4.0.0",
"redis": "^3.1.0",
"shapefile": "0.3.0",
"sinon": "^9.0.0",
"sqlite3": "^4.2.0"

@ -1 +1 @@
Subproject commit cb59185b1ad0654b0e805741aaf29d41ec688357
Subproject commit 31278edaa9aaaf1c01d30ca32ea6666240af21c3

View File

@ -35,7 +35,7 @@ describe('batch module', function () {
after(function (done) {
this.batch.stop();
redisUtils.clean('batch:*', done);
redisUtils.clean(global.settings.batch_db, 'batch:*', done);
});
function createJob (sql, done) {

View File

@ -14,12 +14,12 @@ describe('batch query statement_timeout limit', function () {
this.batchTestClient = new BatchTestClient();
this.batchQueryTimeout = global.settings.batch_query_timeout;
global.settings.batch_query_timeout = 15000;
metadataBackend.redisCmd(5, 'HMSET', ['limits:batch:vizzuality', 'timeout', 100], done);
metadataBackend.redisCmd(global.settings.batch_db, 'HMSET', ['limits:batch:vizzuality', 'timeout', 100], done);
});
before(dbUtils.resetPgBouncerConnections);
after(function (done) {
global.settings.batch_query_timeout = this.batchQueryTimeout;
redisUtils.clean('limits:batch:*', function () {
redisUtils.clean(global.settings.batch_db, 'limits:batch:*', function () {
this.batchTestClient.drain(done);
}.bind(this));
});

View File

@ -38,7 +38,7 @@ describe('job query limit', function () {
}
after(function (done) {
redisUtils.clean('batch:*', done);
redisUtils.clean(global.settings.batch_db, 'batch:*', done);
});
it('POST /api/v2/sql/job with a invalid query size should respond with 400 query too long', function (done) {

View File

@ -25,7 +25,7 @@ describe('job module', function () {
var job = {};
after(function (done) {
redisUtils.clean('batch:*', done);
redisUtils.clean(global.settings.batch_db, 'batch:*', done);
});
it('POST /api/v2/sql/job should respond with 200 and the created job', function (done) {

View File

@ -20,7 +20,7 @@ describe('max queued jobs', function () {
after(function (done) {
global.settings.batch_max_queued_jobs = this.batch_max_queued_jobs;
redisUtils.clean('batch:*', done);
redisUtils.clean(global.settings.batch_db, 'batch:*', done);
});
function createJob (server, status, callback) {

View File

@ -18,18 +18,17 @@ require('../helper');
var assert = require('../support/assert');
var step = require('step');
var server = require('../../lib/server');
describe('timeout', function () {
// See https://github.com/CartoDB/CartoDB-SQL-API/issues/128
it('after configured milliseconds', function (done) {
var testTimeout = 10;
// console.log("settings:"); console.dir(global.settings);
var testTimeout = 1;
var timeoutBackup = global.settings.node_socket_timeout;
global.settings.node_socket_timeout = testTimeout;
var server = require('../../lib/server')();
step(
function sendLongQuery () {
assert.response(server, {
assert.response(server(), {
url: '/api/v1/sql?q=SELECT+count(*)+FROM+generate_series(1,100000)',
method: 'GET',
headers: { host: 'vizzuality.localhost' }

View File

@ -1,14 +1,13 @@
'use strict';
const util = require('util');
// const path = require('path');
const exec = util.promisify(require('child_process').exec);
const redis = require('redis');
const pg = require('pg');
const fs = require('fs').promises;
if (!process.env.NODE_ENV) {
console.error('Please set "NODE_ENV" variable, e.g.: "NODE_ENV=test"');
process.exit(1);
}
let configFileName = process.env.NODE_ENV;
if (process.env.CARTO_SQL_API_ENV_BASED_CONF) {
// we override the file with the one with env vars
@ -18,6 +17,8 @@ if (process.env.CARTO_SQL_API_ENV_BASED_CONF) {
const environment = require(`../config/environments/${configFileName}.js`);
const REDIS_PORT = environment.redis_port;
const REDIS_HOST = environment.redis_host;
const PGHOST = environment.db_host;
const PGPORT = environment.db_port;
const TEST_USER_ID = 1;
const TEST_USER = environment.db_user.replace('<%= user_id %>', TEST_USER_ID);
@ -25,118 +26,168 @@ const TEST_PASSWORD = environment.db_user_pass.replace('<%= user_id %>', TEST_US
const PUBLIC_USER = environment.db_pubuser;
const PUBLIC_USER_PASSWORD = environment.db_pubuser_pass;
const TEST_DB = environment.db_base_name.replace('<%= user_id %>', TEST_USER_ID);
const PGHOST = environment.db_host;
const PGPORT = environment.db_port;
async function query ({ db = 'postgres', sql }) {
const client = new pg.Client({
host: PGHOST,
port: PGPORT,
user: 'postgres',
database: db
});
await new Promise((resolve, reject) => client.connect((err) => err ? reject(err) : resolve()));
const res = await new Promise((resolve, reject) => client.query(sql, (err, res) => err ? reject(err) : resolve(res)));
await new Promise((resolve, reject) => client.end((err) => err ? reject(err) : resolve()));
return res;
}
async function dropDatabase () {
await exec(`dropdb -p "${PGPORT}" -h "${PGHOST}" --if-exists ${TEST_DB}`, {
env: Object.assign({ PGUSER: 'postgres' }, process.env)
});
await query({ sql: `DROP DATABASE IF EXISTS ${TEST_DB}` });
}
async function createDatabase () {
await exec(`createdb -p "${PGPORT}" -h "${PGHOST}" -T template_postgis -EUTF8 "${TEST_DB}"`, {
env: Object.assign({ PGUSER: 'postgres' }, process.env)
});
await query({ sql: `CREATE DATABASE ${TEST_DB} TEMPLATE template_postgis ENCODING UTF8` });
}
async function createDatabaseExtension () {
await exec(`psql -p "${PGPORT}" -h "${PGHOST}" -c "CREATE EXTENSION IF NOT EXISTS cartodb CASCADE;" ${TEST_DB}`, {
env: Object.assign({ PGUSER: 'postgres' }, process.env)
});
await query({ db: TEST_DB, sql: 'CREATE EXTENSION IF NOT EXISTS cartodb CASCADE' });
}
async function currentSearchPath () {
const res = await query({ db: TEST_DB, sql: 'SELECT current_setting(\'search_path\')' });
return res.rows[0].current_setting;
}
async function populateDatabase () {
const searchPath = await currentSearchPath();
const filenames = [
'test',
'populated_places_simple_reduced',
'quota_mock'
].map(filename => `${__dirname}/support/sql/${filename}.sql`);
const populateDatabaseCmd = `
cat ${filenames.join(' ')} |
sed -e "s/:PUBLICUSER/${PUBLIC_USER}/g" |
sed -e "s/:PUBLICPASS/${PUBLIC_USER_PASSWORD}/g" |
sed -e "s/:TESTUSER/${TEST_USER}/g" |
sed -e "s/:TESTPASS/${TEST_PASSWORD}/g" |
PGOPTIONS='--client-min-messages=WARNING' psql -h "${PGHOST}" -p "${PGPORT}" -q -v ON_ERROR_STOP=1 ${TEST_DB}
`;
for (const filename of filenames) {
const content = await fs.readFile(filename, 'utf-8');
const sql = content
.replace(/:SEARCHPATH/g, searchPath)
.replace(/:PUBLICUSER/g, PUBLIC_USER)
.replace(/:PUBLICPASS/g, PUBLIC_USER_PASSWORD)
.replace(/:TESTUSER/g, TEST_USER)
.replace(/:TESTPASS/g, TEST_PASSWORD);
await exec(populateDatabaseCmd, {
env: Object.assign({ PGUSER: 'postgres' }, process.env)
});
await query({ db: TEST_DB, sql });
}
}
async function vacuumAnalyze () {
const tables = [
'populated_places_simple_reduced',
'untitle_table_4',
'scoped_table_1',
'private_table',
'cpg_test',
'copy_endpoints_test',
'pgtypes_table'
];
await query({ db: TEST_DB, sql: `VACUUM ANALYZE ${tables.join(', ')}` });
}
async function populateRedis () {
const commands = `
HMSET rails:users:vizzuality \
id ${TEST_USER_ID} \
database_name "${TEST_DB}" \
database_host "${PGHOST}" \
map_key 1234
const client = redis.createClient({ host: REDIS_HOST, port: REDIS_PORT, db: 5 });
HMSET rails:users:cartodb250user \
id ${TEST_USER_ID} \
database_name "${TEST_DB}" \
database_host "${PGHOST}" \
database_password "${TEST_PASSWORD}" \
map_key 1234
const commands = client.multi()
.hmset('rails:users:vizzuality', [
'id', TEST_USER_ID,
'database_name', TEST_DB,
'database_host', PGHOST,
'map_key', '1234'
])
.hmset('rails:users:cartodb250user', [
'id', TEST_USER_ID,
'database_name', TEST_DB,
'database_host', PGHOST,
'database_password', TEST_PASSWORD,
'map_key', '1234'
])
.hmset('api_keys:vizzuality:1234', [
'user', 'vizzuality',
'type', 'master',
'grants_sql', 'true',
'database_role', TEST_USER,
'database_password', TEST_PASSWORD
])
.hmset('api_keys:vizzuality:default_public', [
'user', 'vizzuality',
'type', 'default',
'grants_sql', 'true',
'database_role', PUBLIC_USER,
'database_password', PUBLIC_USER_PASSWORD
])
.hmset('api_keys:vizzuality:regular1', [
'user', 'vizzuality',
'type', 'regular',
'grants_sql', 'true',
'database_role', 'regular_1',
'database_password', 'regular1'
])
.hmset('api_keys:vizzuality:regular2', [
'user', 'vizzuality',
'type', 'regular',
'grants_sql', 'true',
'database_role', 'regular_2',
'database_password', 'regular2'
])
.hmset('api_keys:cartodb250user:1234', [
'user', 'cartodb250user',
'type', 'master',
'grants_sql', 'true',
'database_role', TEST_USER,
'database_password', TEST_PASSWORD
])
.hmset('api_keys:cartodb250user:default_public', [
'user', 'cartodb250user',
'type', 'default',
'grants_sql', 'true',
'database_role', PUBLIC_USER,
'database_password', PUBLIC_USER_PASSWORD
]);
HMSET api_keys:vizzuality:1234 \
user "vizzuality" \
type "master" \
grants_sql "true" \
database_role "${TEST_USER}" \
database_password "${TEST_PASSWORD}"
await new Promise((resolve, reject) => commands.exec((err) => err ? reject(err) : resolve()));
HMSET api_keys:vizzuality:default_public \
user "vizzuality" \
type "default" \
grants_sql "true" \
database_role "${PUBLIC_USER}" \
database_password "${PUBLIC_USER_PASSWORD}"
client.select('3');
HMSET api_keys:vizzuality:regular1 \
user "vizzuality" \
type "regular" \
grants_sql "true" \
database_role "regular_1" \
database_password "regular1"
const oauthCommands = client.multi()
.hmset('rails:oauth_access_tokens:l0lPbtP68ao8NfStCiA3V3neqfM03JKhToxhUQTR', [
'consumer_key', 'fZeNGv5iYayvItgDYHUbot1Ukb5rVyX6QAg8GaY2',
'consumer_secret', 'IBLCvPEefxbIiGZhGlakYV4eM8AbVSwsHxwEYpzx',
'access_token_token', 'l0lPbtP68ao8NfStCiA3V3neqfM03JKhToxhUQTR',
'access_token_secret', '22zBIek567fMDEebzfnSdGe8peMFVFqAreOENaDK',
'user_id', TEST_USER_ID,
'time', 'sometime'
]);
HMSET api_keys:vizzuality:regular2 \
user "vizzuality" \
type "regular" \
grants_sql "true" \
database_role "regular_2" \
database_password "regular2"
await new Promise((resolve, reject) => oauthCommands.exec((err) => err ? reject(err) : resolve()));
await new Promise((resolve, reject) => client.quit((err) => err ? reject(err) : resolve()));
}
HMSET api_keys:cartodb250user:1234 \
user "cartodb250user" \
type "master" \
grants_sql "true" \
database_role "${TEST_USER}" \
database_password "${TEST_PASSWORD}"
async function unpopulateRedis () {
const client = redis.createClient({ host: REDIS_HOST, port: REDIS_PORT, db: 5 });
HMSET api_keys:cartodb250user:default_public \
user "cartodb250user" \
type "default" \
grants_sql "true" \
database_role "${PUBLIC_USER}" \
database_password "${PUBLIC_USER_PASSWORD}"
`;
const commands = client.multi()
.del('rails:users:vizzuality')
.del('rails:users:cartodb250user')
.del('api_keys:vizzuality:1234')
.del('api_keys:vizzuality:default_public')
.del('api_keys:vizzuality:regular1')
.del('api_keys:vizzuality:regular2')
.del('api_keys:cartodb250user:1234')
.del('api_keys:cartodb250user:default_public')
.del('rails:oauth_access_tokens:l0lPbtP68ao8NfStCiA3V3neqfM03JKhToxhUQTR');
const oauthCommands = `
HMSET rails:oauth_access_tokens:l0lPbtP68ao8NfStCiA3V3neqfM03JKhToxhUQTR \
consumer_key fZeNGv5iYayvItgDYHUbot1Ukb5rVyX6QAg8GaY2 \
consumer_secret IBLCvPEefxbIiGZhGlakYV4eM8AbVSwsHxwEYpzx \
access_token_token l0lPbtP68ao8NfStCiA3V3neqfM03JKhToxhUQTR \
access_token_secret 22zBIek567fMDEebzfnSdGe8peMFVFqAreOENaDK \
user_id ${TEST_USER_ID} \
time sometime
`;
await exec(`echo "${commands}" | redis-cli -h ${REDIS_HOST} -p ${REDIS_PORT} -n 5`);
await exec(`echo "${oauthCommands}" | redis-cli -h ${REDIS_HOST} -p ${REDIS_PORT} -n 3`);
await new Promise((resolve, reject) => commands.exec((err) => err ? reject(err) : resolve()));
await new Promise((resolve, reject) => client.quit((err) => err ? reject(err) : resolve()));
}
async function main (args) {
@ -145,13 +196,17 @@ async function main (args) {
try {
switch (args[0]) {
case 'setup':
await unpopulateRedis();
await populateRedis();
await dropDatabase();
await createDatabase();
await createDatabaseExtension();
await populateDatabase();
await vacuumAnalyze();
break;
case 'teardown':
await unpopulateRedis();
await dropDatabase();
break;
default:
throw new Error('Missing "mode" argument. Valid ones: "setup" or "teardown"');

View File

@ -38,7 +38,7 @@ describe('job backend', function () {
var jobBackend = new JobBackend(metadataBackend, jobQueue, logger);
after(function (done) {
redisUtils.clean('batch:*', done);
redisUtils.clean(global.settings.batch_db, 'batch:*', done);
});
it('.create() should persist a job', function (done) {
@ -159,7 +159,7 @@ describe('job backend', function () {
it('.listWorkInProgressJobByUser() should retrieve WIP jobs of given user', function (done) {
var testStepsQueue = queue(1);
testStepsQueue.defer(redisUtils.clean, 'batch:wip:user:*');
testStepsQueue.defer(redisUtils.clean, global.settings.batch_db, 'batch:wip:user:*');
testStepsQueue.defer(jobBackend.addWorkInProgressJob.bind(jobBackend), 'vizzuality', 'wadus');
testStepsQueue.defer(jobBackend.listWorkInProgressJobByUser.bind(jobBackend), 'vizzuality');

View File

@ -82,7 +82,7 @@ describe('job canceller', function () {
var jobCanceller = new JobCanceller();
after(function (done) {
redisUtils.clean('batch:*', done);
redisUtils.clean(global.settings.batch_db, 'batch:*', done);
});
it('.cancel() should cancel a job', function (done) {

View File

@ -30,7 +30,7 @@ describe('job queue', function () {
});
afterEach(function (done) {
redisUtils.clean('batch:*', done);
redisUtils.clean(global.settings.batch_db, 'batch:*', done);
});
it('should find queues for one user', function (done) {
@ -120,7 +120,7 @@ describe('job queue', function () {
assert.strictEqual(queuesFromIndex.length, 1);
assert.ok(queuesFromIndex.indexOf(data.user) >= 0);
redisUtils.clean('batch:*', done);
redisUtils.clean(global.settings.batch_db, 'batch:*', done);
});
});
});
@ -169,7 +169,7 @@ describe('job queue', function () {
assert.ok(queuesFromIndex.indexOf(jobVizzuality.user) >= 0);
assert.ok(queuesFromIndex.indexOf(jobWadus.user) >= 0);
redisUtils.clean('batch:*', done);
redisUtils.clean(global.settings.batch_db, 'batch:*', done);
});
});
});

View File

@ -50,8 +50,8 @@ describe('job runner', function () {
var jobRunner = new JobRunner(jobService, jobQueue, queryRunner, metadataBackend, statsdClient);
after(function (done) {
redisUtils.clean('batch:*', function () {
redisUtils.clean('limits:batch:*', done);
redisUtils.clean(global.settings.batch_db, 'batch:*', function () {
redisUtils.clean(global.settings.batch_db, 'limits:batch:*', done);
});
});

View File

@ -82,7 +82,7 @@ describe('job service', function () {
var jobService = new JobService(jobBackend, jobCanceller);
after(function (done) {
redisUtils.clean('batch:*', done);
redisUtils.clean(global.settings.batch_db, 'batch:*', done);
});
it('.get() should return a job', function (done) {

View File

@ -160,7 +160,7 @@ BatchTestClient.prototype.cancelJob = function (jobId, override, callback) {
BatchTestClient.prototype.drain = function (callback) {
this.batch.stop(function () {
return redisUtils.clean('batch:*', callback);
return redisUtils.clean(global.settings.batch_db, 'batch:*', callback);
});
};

View File

@ -11,8 +11,8 @@ var redisConfig = {
};
var metadataBackend = require('cartodb-redis')(redisConfig);
module.exports.clean = function clean (pattern, callback) {
metadataBackend.redisCmd(5, 'KEYS', [pattern], function (err, keys) {
module.exports.clean = function clean (db, pattern, callback) {
metadataBackend.redisCmd(db, 'KEYS', [pattern], function (err, keys) {
if (err) {
return callback(err);
}
@ -21,7 +21,7 @@ module.exports.clean = function clean (pattern, callback) {
return callback();
}
metadataBackend.redisCmd(5, 'DEL', keys, callback);
metadataBackend.redisCmd(db, 'DEL', keys, callback);
});
};

File diff suppressed because it is too large Load Diff

View File

@ -1,90 +0,0 @@
'use strict';
require('../helper');
var ApikeyAuth = require('../../lib/auth/apikey');
var assert = require('assert');
describe.skip('has credentials', function () {
var noCredentialsRequests = [
{
des: 'there is not api_key/map_key in the request query',
req: { query: {} }
},
{
des: 'api_key is undefined`ish in the request query',
req: { query: { api_key: null } }
},
{
des: 'map_key is undefined`ish in the request query',
req: { query: { map_key: null } }
},
{
des: 'there is not api_key/map_key in the request body',
req: { query: {}, body: {} }
},
{
des: 'api_key is undefined`ish in the request body',
req: { query: {}, body: { api_key: null } }
},
{
des: 'map_key is undefined`ish in the request body',
req: { query: {}, body: { map_key: null } }
}
];
noCredentialsRequests.forEach(function (request) {
it('has no credentials if ' + request.des, function () {
testCredentials(request.req, false);
});
});
var credentialsRequests = [
{
des: 'there is api_key in the request query',
req: { query: { api_key: 'foo' } }
},
{
des: 'there is api_key in the request query',
req: { query: { map_key: 'foo' } }
},
{
des: 'there is api_key in the request body',
req: { query: {}, body: { api_key: 'foo' } }
},
{
des: 'there is map_key in the request body',
req: { query: {}, body: { map_key: 'foo' } }
}
];
credentialsRequests.forEach(function (request) {
it('has credentials if ' + request.des, function () {
testCredentials(request.req, true);
});
});
function testCredentials (req, hasCredentials) {
var apiKeyAuth = new ApikeyAuth(req);
assert.strictEqual(apiKeyAuth.hasCredentials(), hasCredentials);
}
});
describe.skip('verifyCredentials', function () {
it('callbacks with true value when request api_key is the same', function (done) {
testVerifyCredentials({ query: { api_key: 'foo' } }, { apiKey: 'foo' }, true, done);
});
it('callbacks with false value when request api_key is different', function (done) {
testVerifyCredentials({ query: { api_key: 'foo' } }, { apiKey: 'bar' }, false, done);
});
function testVerifyCredentials (req, options, shouldBeValid, done) {
var apiKeyAuth = new ApikeyAuth(req);
apiKeyAuth.verifyCredentials(options, function (err, validCredentials) {
assert.ifError(err);
assert.strictEqual(validCredentials, shouldBeValid);
done();
});
}
});