2016-07-20 22:38:57 +08:00
|
|
|
'use strict';
|
|
|
|
|
|
|
|
require('../../helper');
|
|
|
|
var assert = require('../../support/assert');
|
2016-07-22 23:05:01 +08:00
|
|
|
var redisUtils = require('../../support/redis_utils');
|
2016-07-20 22:38:57 +08:00
|
|
|
var queue = require('queue-async');
|
|
|
|
|
|
|
|
var redisConfig = {
|
|
|
|
host: global.settings.redis_host,
|
|
|
|
port: global.settings.redis_port,
|
|
|
|
max: global.settings.redisPool,
|
|
|
|
idleTimeoutMillis: global.settings.redisIdleTimeoutMillis,
|
|
|
|
reapIntervalMillis: global.settings.redisReapIntervalMillis
|
|
|
|
};
|
|
|
|
|
|
|
|
var metadataBackend = require('cartodb-redis')(redisConfig);
|
|
|
|
var StatsD = require('node-statsd').StatsD;
|
|
|
|
var statsdClient = new StatsD(global.settings.statsd);
|
|
|
|
|
|
|
|
var BATCH_SOURCE = '../../../batch/';
|
|
|
|
var batchFactory = require(BATCH_SOURCE + 'index');
|
|
|
|
|
|
|
|
|
|
|
|
var _ = require('underscore');
|
|
|
|
var RedisPool = require('redis-mpool');
|
|
|
|
var jobStatus = require(BATCH_SOURCE + 'job_status');
|
|
|
|
var JobPublisher = require(BATCH_SOURCE + 'job_publisher');
|
|
|
|
var JobQueue = require(BATCH_SOURCE + 'job_queue');
|
|
|
|
var UserIndexer = require(BATCH_SOURCE + 'user_indexer');
|
|
|
|
var JobBackend = require(BATCH_SOURCE + 'job_backend');
|
2016-07-21 00:27:54 +08:00
|
|
|
var JobFactory = require(BATCH_SOURCE + 'models/job_factory');
|
2016-07-20 22:38:57 +08:00
|
|
|
|
|
|
|
var redisPoolPublisher = new RedisPool(_.extend(redisConfig, { name: 'batch-publisher'}));
|
|
|
|
var jobPublisher = new JobPublisher(redisPoolPublisher);
|
|
|
|
var jobQueue = new JobQueue(metadataBackend, jobPublisher);
|
|
|
|
var userIndexer = new UserIndexer(metadataBackend);
|
|
|
|
var jobBackend = new JobBackend(metadataBackend, jobQueue, userIndexer);
|
2016-07-21 00:27:54 +08:00
|
|
|
|
2016-07-20 22:38:57 +08:00
|
|
|
var USER = 'vizzuality';
|
|
|
|
var HOST = 'localhost';
|
|
|
|
|
2016-07-21 00:27:54 +08:00
|
|
|
function createJob(job) {
|
|
|
|
jobBackend.create(job, function () {});
|
2016-07-20 22:38:57 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
function getJob(job_id, callback) {
|
2016-07-21 00:27:54 +08:00
|
|
|
jobBackend.get(job_id, function (err, job) {
|
2016-07-20 22:38:57 +08:00
|
|
|
if (err) {
|
|
|
|
return callback(err);
|
|
|
|
}
|
|
|
|
|
2016-07-21 00:27:54 +08:00
|
|
|
callback(null, job);
|
2016-07-20 22:38:57 +08:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
function assertJob(job, expectedStatus, done) {
|
|
|
|
return function (job_id) {
|
|
|
|
if (job.job_id === job_id) {
|
|
|
|
getJob(job_id, function (err, jobDone) {
|
|
|
|
if (err) {
|
|
|
|
return done(err);
|
|
|
|
}
|
|
|
|
|
|
|
|
assert.equal(jobDone.status, expectedStatus);
|
|
|
|
done();
|
|
|
|
});
|
|
|
|
}
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2016-07-22 20:27:18 +08:00
|
|
|
describe('batch multiquery', function() {
|
2016-07-21 00:27:54 +08:00
|
|
|
var batch = batchFactory(metadataBackend, redisConfig, statsdClient);
|
2016-07-20 22:38:57 +08:00
|
|
|
|
2016-07-22 20:27:18 +08:00
|
|
|
before(function (done) {
|
2016-07-20 22:38:57 +08:00
|
|
|
batch.start();
|
2016-07-22 20:27:18 +08:00
|
|
|
batch.on('ready', done);
|
2016-07-20 22:38:57 +08:00
|
|
|
});
|
|
|
|
|
2016-07-22 20:27:18 +08:00
|
|
|
after(function (done) {
|
2016-07-20 22:38:57 +08:00
|
|
|
batch.removeAllListeners();
|
2016-07-21 00:27:54 +08:00
|
|
|
batch.stop();
|
2016-07-22 23:05:01 +08:00
|
|
|
redisUtils.clean('batch:*', done);
|
2016-07-20 22:38:57 +08:00
|
|
|
});
|
|
|
|
|
|
|
|
it('should perform one multiquery job with two queries', function (done) {
|
|
|
|
var queries = [
|
|
|
|
'select pg_sleep(0)',
|
|
|
|
'select pg_sleep(0)'
|
|
|
|
];
|
|
|
|
|
2016-07-21 00:27:54 +08:00
|
|
|
var job = JobFactory.create({ user: USER, host: HOST, query: queries});
|
|
|
|
var assertCallback = assertJob(job.data, jobStatus.DONE, done);
|
2016-07-20 22:38:57 +08:00
|
|
|
|
2016-07-21 00:27:54 +08:00
|
|
|
batch.on('job:done', assertCallback);
|
|
|
|
|
|
|
|
createJob(job.data);
|
2016-07-20 22:38:57 +08:00
|
|
|
});
|
|
|
|
|
|
|
|
it('should perform one multiquery job with two queries and fail on last one', function (done) {
|
|
|
|
var queries = [
|
|
|
|
'select pg_sleep(0)',
|
|
|
|
'select shouldFail()'
|
|
|
|
];
|
|
|
|
|
2016-07-21 00:27:54 +08:00
|
|
|
var job = JobFactory.create({ user: USER, host: HOST, query: queries});
|
|
|
|
var assertCallback = assertJob(job.data, jobStatus.FAILED, done);
|
2016-07-20 22:38:57 +08:00
|
|
|
|
2016-07-21 00:27:54 +08:00
|
|
|
batch.on('job:failed', assertCallback);
|
|
|
|
|
|
|
|
createJob(job.data);
|
2016-07-20 22:38:57 +08:00
|
|
|
});
|
|
|
|
|
|
|
|
it('should perform one multiquery job with three queries and fail on last one', function (done) {
|
|
|
|
var queries = [
|
|
|
|
'select pg_sleep(0)',
|
|
|
|
'select pg_sleep(0)',
|
|
|
|
'select shouldFail()'
|
|
|
|
];
|
|
|
|
|
2016-07-21 00:27:54 +08:00
|
|
|
var job = JobFactory.create({ user: USER, host: HOST, query: queries});
|
|
|
|
var assertCallback = assertJob(job.data, jobStatus.FAILED, done);
|
2016-07-20 22:38:57 +08:00
|
|
|
|
2016-07-21 00:27:54 +08:00
|
|
|
batch.on('job:failed', assertCallback);
|
|
|
|
|
|
|
|
createJob(job.data);
|
2016-07-20 22:38:57 +08:00
|
|
|
});
|
|
|
|
|
|
|
|
|
|
|
|
it('should perform one multiquery job with three queries and fail on second one', function (done) {
|
|
|
|
var queries = [
|
|
|
|
'select pg_sleep(0)',
|
|
|
|
'select shouldFail()',
|
|
|
|
'select pg_sleep(0)'
|
|
|
|
];
|
|
|
|
|
2016-07-21 00:27:54 +08:00
|
|
|
var job = JobFactory.create({ user: USER, host: HOST, query: queries});
|
|
|
|
var assertCallback = assertJob(job.data, jobStatus.FAILED, done);
|
2016-07-20 22:38:57 +08:00
|
|
|
|
2016-07-21 00:27:54 +08:00
|
|
|
batch.on('job:failed', assertCallback);
|
|
|
|
|
|
|
|
createJob(job.data);
|
2016-07-20 22:38:57 +08:00
|
|
|
});
|
|
|
|
|
|
|
|
it('should perform two multiquery job with two queries for each one', function (done) {
|
2016-07-21 00:27:54 +08:00
|
|
|
var jobs = [];
|
|
|
|
|
|
|
|
jobs.push(JobFactory.create({ user: USER, host: HOST, query: [
|
2016-07-20 22:38:57 +08:00
|
|
|
'select pg_sleep(0)',
|
|
|
|
'select pg_sleep(0)'
|
2016-07-21 00:27:54 +08:00
|
|
|
]}));
|
|
|
|
|
|
|
|
jobs.push(JobFactory.create({ user: USER, host: HOST, query: [
|
2016-07-20 22:38:57 +08:00
|
|
|
'select pg_sleep(0)',
|
|
|
|
'select pg_sleep(0)'
|
2016-07-21 00:27:54 +08:00
|
|
|
]}));
|
2016-07-20 22:38:57 +08:00
|
|
|
|
|
|
|
var jobsQueue = queue(jobs.length);
|
|
|
|
|
2016-07-21 00:27:54 +08:00
|
|
|
jobs.forEach(function (job) {
|
|
|
|
jobsQueue.defer(function (callback) {
|
|
|
|
batch.on('job:done', assertJob(job.data, jobStatus.DONE, callback));
|
|
|
|
createJob(job.data);
|
2016-07-20 22:38:57 +08:00
|
|
|
});
|
|
|
|
});
|
2016-07-21 00:27:54 +08:00
|
|
|
|
|
|
|
jobsQueue.awaitAll(done);
|
2016-07-20 22:38:57 +08:00
|
|
|
});
|
|
|
|
|
|
|
|
it('should perform two multiquery job with two queries for each one and fail the first one', function (done) {
|
2016-07-21 00:27:54 +08:00
|
|
|
var jobs = [];
|
|
|
|
|
|
|
|
jobs.push(JobFactory.create({ user: USER, host: HOST, query: [
|
2016-07-20 22:38:57 +08:00
|
|
|
'select pg_sleep(0)',
|
|
|
|
'select shouldFail()'
|
2016-07-21 00:27:54 +08:00
|
|
|
]}));
|
|
|
|
|
|
|
|
jobs.push(JobFactory.create({ user: USER, host: HOST, query: [
|
2016-07-20 22:38:57 +08:00
|
|
|
'select pg_sleep(0)',
|
|
|
|
'select pg_sleep(0)'
|
2016-07-21 00:27:54 +08:00
|
|
|
]}));
|
2016-07-20 22:38:57 +08:00
|
|
|
|
2016-07-21 00:27:54 +08:00
|
|
|
var jobsQueue = queue(jobs.length);
|
2016-07-20 22:38:57 +08:00
|
|
|
|
2016-07-21 00:27:54 +08:00
|
|
|
jobsQueue.defer(function (callback) {
|
|
|
|
batch.on('job:failed', assertJob(jobs[0].data, jobStatus.FAILED, callback));
|
|
|
|
createJob(jobs[0].data);
|
|
|
|
});
|
|
|
|
|
|
|
|
jobsQueue.defer(function (callback) {
|
|
|
|
batch.on('job:done', assertJob(jobs[1].data, jobStatus.DONE, callback));
|
|
|
|
createJob(jobs[1].data);
|
|
|
|
});
|
|
|
|
|
|
|
|
jobsQueue.awaitAll(done);
|
2016-07-20 22:38:57 +08:00
|
|
|
});
|
|
|
|
|
|
|
|
it('should perform two multiquery job with two queries for each one and fail the second one', function (done) {
|
2016-07-21 00:27:54 +08:00
|
|
|
var jobs = [];
|
|
|
|
|
|
|
|
jobs.push(JobFactory.create({ user: USER, host: HOST, query: [
|
2016-07-20 22:38:57 +08:00
|
|
|
'select pg_sleep(0)',
|
|
|
|
'select pg_sleep(0)'
|
2016-07-21 00:27:54 +08:00
|
|
|
]}));
|
|
|
|
|
|
|
|
jobs.push(JobFactory.create({ user: USER, host: HOST, query: [
|
2016-07-20 22:38:57 +08:00
|
|
|
'select pg_sleep(0)',
|
2016-07-21 00:27:54 +08:00
|
|
|
'select shouldFail()'
|
|
|
|
]}));
|
2016-07-20 22:38:57 +08:00
|
|
|
|
2016-07-21 00:27:54 +08:00
|
|
|
var jobsQueue = queue(jobs.length);
|
2016-07-20 22:38:57 +08:00
|
|
|
|
2016-07-21 00:27:54 +08:00
|
|
|
jobsQueue.defer(function (callback) {
|
|
|
|
batch.on('job:done', assertJob(jobs[0].data, jobStatus.DONE, callback));
|
|
|
|
createJob(jobs[0].data);
|
|
|
|
});
|
2016-07-20 22:38:57 +08:00
|
|
|
|
2016-07-21 00:27:54 +08:00
|
|
|
jobsQueue.defer(function (callback) {
|
|
|
|
batch.on('job:failed', assertJob(jobs[1].data, jobStatus.FAILED, callback));
|
|
|
|
createJob(jobs[1].data);
|
|
|
|
});
|
|
|
|
|
|
|
|
jobsQueue.awaitAll(done);
|
|
|
|
});
|
2016-07-20 22:38:57 +08:00
|
|
|
});
|