Merge pull request #673 from CartoDB/cleanUp
Run build in Github Actions
This commit is contained in:
commit
95cf0391a8
73
.github/workflows/main.yml
vendored
Normal file
73
.github/workflows/main.yml
vendored
Normal file
@ -0,0 +1,73 @@
|
||||
on:
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- 'LICENSE'
|
||||
- 'README**'
|
||||
- 'HOW_TO_RELEASE**'
|
||||
- 'LOGGING**'
|
||||
|
||||
env:
|
||||
GCLOUD_VERSION: '306.0.0'
|
||||
ARTIFACTS_PROJECT_ID: cartodb-on-gcp-main-artifacts
|
||||
NODE_VERSION: '12.18.3'
|
||||
|
||||
jobs:
|
||||
build-and-test:
|
||||
runs-on: ubuntu-18.04
|
||||
timeout-minutes: 10
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: true
|
||||
token: ${{ secrets.CARTOFANTE_PERSONAL_TOKEN }}
|
||||
|
||||
- name: Setup gcloud authentication
|
||||
uses: google-github-actions/setup-gcloud@master
|
||||
with:
|
||||
version: ${{env.GCLOUD_VERSION}}
|
||||
service_account_key: ${{ secrets.ARTIFACTS_GCLOUD_ACCOUNT_BASE64 }}
|
||||
|
||||
- name: Configure docker and pull images
|
||||
run: |
|
||||
gcloud auth configure-docker
|
||||
docker pull gcr.io/cartodb-on-gcp-main-artifacts/postgres:latest
|
||||
docker pull gcr.io/cartodb-on-gcp-main-artifacts/redis:latest
|
||||
|
||||
- name: Run deps (Redis and Postgres)
|
||||
run: |
|
||||
docker run -d --name postgres -p 5432:5432 gcr.io/cartodb-on-gcp-main-artifacts/postgres:latest
|
||||
docker run -d --name redis -p 6379:6379 gcr.io/cartodb-on-gcp-main-artifacts/redis:latest
|
||||
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: ${{env.NODE_VERSION}}
|
||||
|
||||
- run: npm ci
|
||||
|
||||
- name: install redis tools
|
||||
run: sudo apt-get -yqq install redis-tools gdal-bin
|
||||
|
||||
- run: npm test
|
||||
env:
|
||||
CI: true
|
||||
CARTO_SQL_API_ENV_BASED_CONF: true
|
||||
CARTO_SQL_API_POSTGRES_HOST: localhost
|
||||
CARTO_SQL_API_POSTGRES_PORT: 5432
|
||||
CARTO_SQL_API_REDIS_HOST: localhost
|
||||
CARTO_SQL_API_REDIS_PORT: 6379
|
||||
CARTO_SQL_API_NODE_ENV: test
|
||||
NODE_ENV: test
|
||||
|
||||
- name: Build image
|
||||
run: |
|
||||
echo ${GITHUB_SHA::7}
|
||||
echo ${GITHUB_REF##*/}
|
||||
docker build -f private/Dockerfile -t gcr.io/$ARTIFACTS_PROJECT_ID/sql-api:${GITHUB_REF##*/} -t gcr.io/$ARTIFACTS_PROJECT_ID/sql-api:${GITHUB_SHA::7} -t gcr.io/$ARTIFACTS_PROJECT_ID/sql-api:${GITHUB_REF##*/}--${GITHUB_SHA::7} .
|
||||
|
||||
- name: Upload image
|
||||
run: |
|
||||
docker push gcr.io/$ARTIFACTS_PROJECT_ID/sql-api:${GITHUB_REF##*/}
|
||||
docker push gcr.io/$ARTIFACTS_PROJECT_ID/sql-api:${GITHUB_SHA::7}
|
||||
docker push gcr.io/$ARTIFACTS_PROJECT_ID/sql-api:${GITHUB_REF##*/}--${GITHUB_SHA::7}
|
43
.github/workflows/master.yml
vendored
Normal file
43
.github/workflows/master.yml
vendored
Normal file
@ -0,0 +1,43 @@
|
||||
# in this workflow we don't run the tests. Only build image, tag (also latests) and upload. The tests are not run because they are run
|
||||
# on each pull request, and there is a branch protection that forces to have branch up to date before merging, so tests are always run
|
||||
# with the latest code
|
||||
|
||||
name: master build image
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
|
||||
env:
|
||||
GCLOUD_VERSION: '306.0.0'
|
||||
ARTIFACTS_PROJECT_ID: cartodb-on-gcp-main-artifacts
|
||||
|
||||
jobs:
|
||||
build-master:
|
||||
runs-on: ubuntu-18.04
|
||||
timeout-minutes: 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: true
|
||||
token: ${{ secrets.CARTOFANTE_PERSONAL_TOKEN }}
|
||||
|
||||
- name: Build image
|
||||
run: |
|
||||
echo ${GITHUB_SHA::7}
|
||||
echo ${GITHUB_REF##*/}
|
||||
docker build -f private/Dockerfile -t gcr.io/$ARTIFACTS_PROJECT_ID/sql-api:latest -t gcr.io/$ARTIFACTS_PROJECT_ID/sql-api:${GITHUB_REF##*/} -t gcr.io/$ARTIFACTS_PROJECT_ID/sql-api:${GITHUB_SHA::7} .
|
||||
|
||||
- name: Setup gcloud authentication
|
||||
uses: google-github-actions/setup-gcloud@master
|
||||
with:
|
||||
version: ${{env.GCLOUD_VERSION}}
|
||||
service_account_key: ${{ secrets.ARTIFACTS_GCLOUD_ACCOUNT_BASE64 }}
|
||||
|
||||
- name: Upload image
|
||||
run: |
|
||||
docker push gcr.io/$ARTIFACTS_PROJECT_ID/sql-api:${GITHUB_REF##*/}
|
||||
docker push gcr.io/$ARTIFACTS_PROJECT_ID/sql-api:${GITHUB_SHA::7}
|
||||
docker push gcr.io/$ARTIFACTS_PROJECT_ID/sql-api:latest
|
||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -18,3 +18,4 @@ yarn.lock
|
||||
build_resources/
|
||||
.dockerignore
|
||||
Dockerfile
|
||||
docker_node_modules
|
||||
|
4
.gitmodules
vendored
Normal file
4
.gitmodules
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
[submodule "private"]
|
||||
path = private
|
||||
url = git@github.com:CartoDB/CartoDB-SQL-API-private.git
|
||||
branch = master
|
61
.travis.yml
61
.travis.yml
@ -1,61 +0,0 @@
|
||||
sudo: false
|
||||
|
||||
language: node_js
|
||||
|
||||
services:
|
||||
- redis-server
|
||||
|
||||
env:
|
||||
global:
|
||||
- PGUSER=postgres
|
||||
- PGDATABASE=postgres
|
||||
- PGOPTIONS='-c client_min_messages=NOTICE'
|
||||
|
||||
jobs:
|
||||
include:
|
||||
- env: POSTGRESQL_VERSION="10" POSTGIS_VERSION="2.4"
|
||||
dist: xenial
|
||||
- env: POSTGRESQL_VERSION="11" POSTGIS_VERSION="2.5"
|
||||
dist: xenial
|
||||
- env: POSTGRESQL_VERSION="12" POSTGIS_VERSION="3"
|
||||
dist: bionic
|
||||
|
||||
node_js:
|
||||
- "12"
|
||||
|
||||
install:
|
||||
- npm ci
|
||||
|
||||
script:
|
||||
|
||||
# Remove old packages
|
||||
- sudo apt-get remove postgresql-$POSTGRESQL_VERSION postgresql-client-$POSTGRESQL_VERSION postgresql-server-dev-$POSTGRESQL_VERSION postgresql-common postgresql-client-common postgresql-$POSTGRESQL_VERSION-postgis-$POSTGIS_VERSION postgresql-$POSTGRESQL_VERSION-postgis-$POSTGIS_VERSION-scripts postgis
|
||||
|
||||
# Install CARTO packages
|
||||
- if [[ $POSTGRESQL_VERSION == '10' ]]; then sudo add-apt-repository -y ppa:cartodb/gis; fi;
|
||||
- sudo add-apt-repository -y ppa:cartodb/postgresql-$POSTGRESQL_VERSION
|
||||
- sudo apt-get -q update
|
||||
# We use -t $TRAVIS_DIST to give preference to our ppa's (which are called as the ${dist}), instead of
|
||||
# pgdg repos (which are called ${dist}-pgdg. Nasty but it works.
|
||||
- sudo apt-get install -y --allow-unauthenticated --no-install-recommends --no-install-suggests postgresql-$POSTGRESQL_VERSION postgresql-client-$POSTGRESQL_VERSION postgresql-server-dev-$POSTGRESQL_VERSION postgresql-common postgresql-$POSTGRESQL_VERSION-postgis-$POSTGIS_VERSION postgresql-$POSTGRESQL_VERSION-postgis-$POSTGIS_VERSION-scripts postgis gdal-bin -t $TRAVIS_DIST
|
||||
# For pre12, install plpython2. For PG12 install plpython3
|
||||
- if [[ $POSTGRESQL_VERSION != '12' ]]; then sudo apt-get install -y postgresql-plpython-$POSTGRESQL_VERSION python python-redis -t $TRAVIS_DIST; else sudo apt-get install -y postgresql-plpython3-12 python3 python3-redis -t $TRAVIS_DIST; fi;
|
||||
|
||||
# Remove old clusters and create the new one
|
||||
- for i in $(pg_lsclusters | tail -n +2 | awk '{print $1}'); do sudo pg_dropcluster --stop $i main; done;
|
||||
- sudo rm -rf /etc/postgresql/$POSTGRESQL_VERSION /var/lib/postgresql/$POSTGRESQL_VERSION /var/ramfs/postgresql/$POSTGRESQL_VERSION
|
||||
- sudo pg_createcluster -u postgres $POSTGRESQL_VERSION main --start -p 5432 -- --auth-local trust
|
||||
|
||||
- git clone https://github.com/CartoDB/cartodb-postgresql.git
|
||||
- cd cartodb-postgresql && make && sudo make install && cd ..
|
||||
|
||||
- createdb template_postgis
|
||||
- psql -c "CREATE EXTENSION postgis" template_postgis
|
||||
- if [[ $POSTGRESQL_VERSION == '12' ]]; then psql -c "CREATE EXTENSION postgis_raster" template_postgis; fi;
|
||||
|
||||
- cp config/environments/test.js.example config/environments/test.js
|
||||
- npm test
|
||||
|
||||
after_failure:
|
||||
- pg_lsclusters
|
||||
- sudo cat /var/log/postgresql/postgresql-$POSTGRESQL_VERSION-main.log
|
60
README.md
60
README.md
@ -11,8 +11,8 @@ The [`CARTO’s SQL API`](https://carto.com/developers/sql-api/) allows you to i
|
||||
|
||||
Requirements:
|
||||
|
||||
* [`Node 10.x (npm 6.x)`](https://nodejs.org/dist/latest-v10.x/)
|
||||
* [`PostgreSQL >= 10.0`](https://www.postgresql.org/download/)
|
||||
* [`Node 12.x`](https://nodejs.org/dist/latest-v12.x/)
|
||||
* [`PostgreSQL >= 11.0`](https://www.postgresql.org/download/)
|
||||
* [`PostGIS >= 2.4`](https://postgis.net/install/)
|
||||
* [`CARTO Postgres Extension >= 0.24.1`](https://github.com/CartoDB/cartodb-postgresql)
|
||||
* [`Redis >= 4`](https://redis.io/download)
|
||||
@ -43,7 +43,11 @@ $ npm install
|
||||
|
||||
### Run
|
||||
|
||||
Create the `./config/environments/<env>.js` file (there are `.example` files to start from). Look at `./lib/server-options.js` for more on config.
|
||||
You can inject the configuration through environment variables at run time. Check the file `./config/environments/config.js` to see the ones you have available.
|
||||
|
||||
While the migration to the new environment based configuration, you can still use the old method of copying a config file. To enabled the one with environment variables you need to pass `CARTO_SQL_API_ENV_BASED_CONF=true`. You can use the docker image to run it.
|
||||
|
||||
Old way:
|
||||
|
||||
```shell
|
||||
$ node app.js <env>
|
||||
@ -57,6 +61,28 @@ Where `<env>` is the name of a configuration file under `./config/environments/`
|
||||
$ npm test
|
||||
```
|
||||
|
||||
You can try to run the tests against the dependencies from the `dev-env`. To do so, you need to build the test docker image:
|
||||
|
||||
```shell
|
||||
$ docker-compose -f private/docker-compose.yml build
|
||||
```
|
||||
|
||||
Then you can run the tests like:
|
||||
|
||||
```shell
|
||||
$ docker-compose -f private/docker-compose.yml run sql-api-tests
|
||||
```
|
||||
|
||||
It will mount your code inside a volume. In case you want to play and run `npm test` or something else you can do:
|
||||
|
||||
```shell
|
||||
$ docker-compose -f private/docker-compose.yml run --entrypoint bash sql-api-tests
|
||||
```
|
||||
|
||||
So you will have a bash shell inside the test container, with the code from your host.
|
||||
|
||||
⚠️ *WARNING* Some tests still fail inside the docker environment. Inside CI they don't yet use the `ci` folder to run the tests either. There is a failing test which prevents it.
|
||||
|
||||
### Coverage
|
||||
|
||||
```shell
|
||||
@ -65,34 +91,6 @@ $ npm run cover
|
||||
|
||||
Open `./coverage/lcov-report/index.html`.
|
||||
|
||||
### Docker support
|
||||
|
||||
We provide docker images just for testing and continuous integration purposes:
|
||||
|
||||
* [`nodejs-xenial-pg1121`](https://hub.docker.com/r/carto/nodejs-xenial-pg1121/tags)
|
||||
* [`nodejs-xenial-pg101`](https://hub.docker.com/r/carto/nodejs-xenial-pg101/tags)
|
||||
|
||||
You can find instructions to install Docker, download, and update images [here](https://github.com/CartoDB/Windshaft-cartodb/blob/master/docker/reference.md).
|
||||
|
||||
### Useful `npm` scripts
|
||||
|
||||
Run test in a docker image with a specific Node.js version:
|
||||
|
||||
```shell
|
||||
$ DOCKER_IMAGE=<docker-image-tag> NODE_VERSION=<nodejs-version> npm run test:docker
|
||||
```
|
||||
|
||||
Where:
|
||||
|
||||
* `<docker-image-tag>`: the tag of required docker image, e.g. `carto/nodejs-xenial-pg1121:latest`
|
||||
* `<nodejs-version>`: the Node.js version, e.g. `10.15.1`
|
||||
|
||||
In case you need to debug:
|
||||
|
||||
```shell
|
||||
$ DOCKER_IMAGE=<docker-image-tag> npm run docker:bash
|
||||
```
|
||||
|
||||
## Documentation
|
||||
|
||||
You can find an overview, guides, full reference, and support in [`CARTO's developer center`](https://carto.com/developers/sql-api/). The [docs directory](https://github.com/CartoDB/CartoDB-SQL-API/tree/master/docs) contains different documentation resources, from a higher level to more detailed ones.
|
||||
|
13
app.js
13
app.js
@ -21,14 +21,17 @@ const argv = require('yargs')
|
||||
.describe('c', 'Load configuration from path')
|
||||
.argv;
|
||||
|
||||
const environmentArg = argv._[0] || process.env.NODE_ENV || 'development';
|
||||
let environmentArg = argv._[0] || process.env.NODE_ENV || 'development';
|
||||
if (process.env.CARTO_SQL_API_ENV_BASED_CONF) {
|
||||
// we override the file with the one with env vars
|
||||
environmentArg = 'config';
|
||||
}
|
||||
const configurationFile = path.resolve(argv.config || './config/environments/' + environmentArg + '.js');
|
||||
|
||||
if (!fs.existsSync(configurationFile)) {
|
||||
logger.fatal(new Error(`Configuration file "${configurationFile}" does not exist`));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
global.settings = require(configurationFile);
|
||||
|
||||
const ENVIRONMENT = argv._[0] || process.env.NODE_ENV || global.settings.environment;
|
||||
@ -37,7 +40,7 @@ process.env.NODE_ENV = ENVIRONMENT;
|
||||
const availableEnvironments = ['development', 'production', 'test', 'staging'];
|
||||
|
||||
if (!availableEnvironments.includes(ENVIRONMENT)) {
|
||||
logger.fatal(new Error(`Invalid environment argument, valid ones: ${Object.keys(availableEnvironments).join(', ')}`));
|
||||
logger.fatal(new Error(`Invalid environment ${ENVIRONMENT} argument, valid ones: ${Object.values(availableEnvironments).join(', ')}`));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
@ -57,6 +60,7 @@ const { version, name } = require('./package');
|
||||
const createServer = require('./lib/server');
|
||||
|
||||
const server = createServer(statsClient);
|
||||
|
||||
const listener = server.listen(global.settings.node_port, global.settings.node_host);
|
||||
listener.on('listening', function () {
|
||||
const { address, port } = listener.address();
|
||||
@ -118,8 +122,9 @@ function scheduleForcedExit (killTimeout, finalLogger) {
|
||||
killTimer.unref();
|
||||
}
|
||||
|
||||
const regex = /[a-z]?([0-9]*)/;
|
||||
function isGteMinVersion (version, minVersion) {
|
||||
const versionMatch = /[a-z]?([0-9]*)/.exec(version);
|
||||
const versionMatch = regex.exec(version);
|
||||
if (versionMatch) {
|
||||
const majorVersion = parseInt(versionMatch[1], 10);
|
||||
if (Number.isFinite(majorVersion)) {
|
||||
|
198
config/environments/config.js
Normal file
198
config/environments/config.js
Normal file
@ -0,0 +1,198 @@
|
||||
// This is the file that has the generic configuration and you can override the chnages
|
||||
// in different environments with env. vars
|
||||
|
||||
// Time in milliseconds to force GC cycle.
|
||||
// Disable by using <=0 value.
|
||||
module.exports.gc_interval = 10000;
|
||||
module.exports.routes = {
|
||||
// Each entry corresponds with an express' router.
|
||||
// You must define at least one path. However, middlewares are optional.
|
||||
api: [{
|
||||
// Required: path where other "routers" or "controllers" will be attached to.
|
||||
paths: [
|
||||
// In case the path has a :user param the username will be the one specified in the URL,
|
||||
// otherwise it will fallback to extract the username from the host header.
|
||||
'/api/:version',
|
||||
'/user/:user/api/:version',
|
||||
],
|
||||
// Optional: attach middlewares at the begining of the router
|
||||
// to perform custom operations.
|
||||
middlewares: [
|
||||
function noop () {
|
||||
return function noopMiddleware (req, res, next) {
|
||||
next();
|
||||
}
|
||||
}
|
||||
],
|
||||
sql: [{
|
||||
// Required
|
||||
paths: [
|
||||
'/sql'
|
||||
],
|
||||
// Optional
|
||||
middlewares: []
|
||||
}]
|
||||
}]
|
||||
};
|
||||
// If useProfiler is true every response will be served with an
|
||||
// X-SQLAPI-Profile header containing elapsed timing for various
|
||||
// steps taken for producing the response.
|
||||
module.exports.useProfiler = true;
|
||||
// Regular expression pattern to extract username
|
||||
// from hostname. Must have a single grabbing block.
|
||||
// for dev-env you need to use '^(.*)\\.localhost';
|
||||
module.exports.user_from_host = process.env.CARTO_SQL_API_USER_FROM_HOST || '^(.*)\\.cartodb\\.com$';
|
||||
module.exports.node_port = 8080;
|
||||
module.exports.node_host = null; // null on purpouse so it listens to whatever address docker assigns
|
||||
// idle socket timeout, in miliseconds
|
||||
module.exports.node_socket_timeout = 600000;
|
||||
module.exports.environment = process.env.CARTO_SQL_API_NODE_ENV || 'development';
|
||||
// Supported labels: 'user_id' (read from redis)
|
||||
module.exports.db_base_name = process.env.CARTO_SQL_API_DB_BASE_NAME || 'cartodb_user_<%= user_id %>_db';
|
||||
// Supported labels: 'user_id' (read from redis)
|
||||
module.exports.db_user = process.env.CARTO_SQL_API_DB_USER || 'cartodb_user_<%= user_id %>';
|
||||
// Supported labels: 'user_id', 'user_password' (both read from redis)
|
||||
module.exports.db_user_pass = '<%= user_password %>';
|
||||
// Name of the anonymous PostgreSQL user
|
||||
module.exports.db_pubuser = 'publicuser';
|
||||
// Password for the anonymous PostgreSQL user
|
||||
module.exports.db_pubuser_pass = 'public';
|
||||
module.exports.db_host = process.env.CARTO_SQL_API_POSTGRES_HOST || 'localhost';
|
||||
module.exports.db_port = process.env.CARTO_SQL_API_POSTGRES_PORT || '6432';
|
||||
module.exports.db_batch_port = process.env.CARTO_SQL_API_POSTGRES_BATCH_PORT || '5432';
|
||||
module.exports.finished_jobs_ttl_in_seconds = 2 * 3600; // 2 hours
|
||||
module.exports.batch_query_timeout = 12 * 3600 * 1000; // 12 hours in milliseconds
|
||||
module.exports.copy_timeout = "'5h'";
|
||||
module.exports.copy_from_max_post_size = 2 * 1024 * 1024 * 1024; // 2 GB;
|
||||
module.exports.copy_from_max_post_size_pretty = '2 GB';
|
||||
module.exports.copy_from_minimum_input_speed = 0; // 1 byte per second
|
||||
module.exports.copy_from_maximum_slow_input_speed_interval = 15; // 15 seconds
|
||||
// Max number of queued jobs a user can have at a given time
|
||||
module.exports.batch_max_queued_jobs = 64;
|
||||
// Capacity strategy to use.
|
||||
// It allows to tune how many queries run at a db host at the same time.
|
||||
// Options: 'fixed', 'http-simple', 'http-load'
|
||||
module.exports.batch_capacity_strategy = 'fixed';
|
||||
// Applies when strategy='fixed'.
|
||||
// Number of simultaneous users running queries in the same host.
|
||||
// It will use 1 as min.
|
||||
// Default 4.
|
||||
module.exports.batch_capacity_fixed_amount = 4;
|
||||
// Applies when strategy='http-simple' or strategy='http-load'.
|
||||
// HTTP endpoint to check db host load.
|
||||
// Helps to decide the number of simultaneous users running queries in that host.
|
||||
// 'http-simple' will use 'available_cores' to decide the number.
|
||||
// 'http-load' will use 'cores' and 'relative_load' to decide the number.
|
||||
// It will use 1 as min.
|
||||
// If no template is provided it will default to 'fixed' strategy.
|
||||
module.exports.batch_capacity_http_url_template = 'http://<%= dbhost %>:9999/load';
|
||||
// Max database connections in the pool
|
||||
// Subsequent connections will wait for a free slot.i
|
||||
// NOTE: not used by OGR-mediated accesses
|
||||
module.exports.db_pool_size = 500;
|
||||
// Milliseconds before a connection is removed from pool
|
||||
module.exports.db_pool_idleTimeout = 30000;
|
||||
// Milliseconds between idle client checking
|
||||
module.exports.db_pool_reapInterval = 1000;
|
||||
// max number of bytes for a row, when exceeded the query will throw an error
|
||||
// module.exports.db_max_row_size = 10 * 1024 * 1024;
|
||||
// allows to use an object to connect with node-postgres instead of a connection string
|
||||
module.exports.db_use_config_object = true;
|
||||
// requires enabling db_use_config_object=true
|
||||
// allows to enable/disable keep alive for database connections
|
||||
// by default is not enabled
|
||||
module.exports.db_keep_alive = {
|
||||
enabled: true,
|
||||
initialDelay: 5000 // Not used yet
|
||||
};
|
||||
module.exports.redis_host = process.env.CARTO_SQL_API_REDIS_HOST || '127.0.0.1';
|
||||
module.exports.redis_port = process.env.CARTO_SQL_API_REDIS_PORT || 6379;
|
||||
module.exports.redisPool = 50;
|
||||
module.exports.redisIdleTimeoutMillis = 10000;
|
||||
module.exports.redisReapIntervalMillis = 1000;
|
||||
module.exports.redisLog = false;
|
||||
|
||||
// Temporary directory, make sure it is writable by server user
|
||||
module.exports.tmpDir = '/tmp';
|
||||
// change ogr2ogr command or path
|
||||
module.exports.ogr2ogrCommand = 'ogr2ogr';
|
||||
// change zip command or path
|
||||
module.exports.zipCommand = 'zip';
|
||||
// Optional statsd support
|
||||
module.exports.statsd = {
|
||||
host: 'localhost',
|
||||
port: 8125,
|
||||
prefix: 'dev.:host.',
|
||||
cacheDns: true
|
||||
// support all allowed node-statsd options
|
||||
};
|
||||
module.exports.health = {
|
||||
enabled: true,
|
||||
username: 'development',
|
||||
query: 'select 1'
|
||||
};
|
||||
|
||||
let allowedHosts = ['carto.com', 'cartodb.com'];
|
||||
if (process.env.CARTO_SQL_API_OAUTH_HOSTS) {
|
||||
const hosts = process.env.CARTO_SQL_API_OAUTH_HOSTS.split(',');
|
||||
if (hosts.length > 0) {
|
||||
allowedHosts = hosts;
|
||||
}
|
||||
}
|
||||
module.exports.oauth = {
|
||||
|
||||
allowedHosts: allowedHosts
|
||||
};
|
||||
module.exports.disabled_file = 'pids/disabled';
|
||||
|
||||
module.exports.ratelimits = {
|
||||
// whether it should rate limit endpoints (global configuration)
|
||||
rateLimitsEnabled: false,
|
||||
// whether it should rate limit one or more endpoints (only if rateLimitsEnabled = true)
|
||||
endpoints: {
|
||||
query: false,
|
||||
job_create: false,
|
||||
job_get: false,
|
||||
job_delete: false,
|
||||
copy_from: false,
|
||||
copy_to: false
|
||||
}
|
||||
};
|
||||
|
||||
module.exports.validatePGEntitiesAccess = false;
|
||||
module.exports.logQueries = true;
|
||||
module.exports.maxQueriesLogLength = 1024;
|
||||
|
||||
module.exports.cache = {
|
||||
ttl: 60 * 60 * 24 * 365, // one year in seconds
|
||||
fallbackTtl: 60 * 5 // five minutes in seconds
|
||||
};
|
||||
|
||||
module.exports.pubSubMetrics = {
|
||||
enabled: process.env.CARTO_SQL_API_METRICS_ENABLED === 'true' || false,
|
||||
project_id: process.env.CARTO_SQL_API_METRICS_PROJECT_ID || 'avid-wavelet-844',
|
||||
credentials: '',
|
||||
topic: process.env.CARTO_SQL_API_METRICS_PROJECT_ID || 'raw-metric-events'
|
||||
};
|
||||
|
||||
// override some defaults for tests
|
||||
if (process.env.NODE_ENV === 'test') {
|
||||
module.exports.redisIdleTimeoutMillis = 1;
|
||||
module.exports.redisReapIntervalMillis = 1;
|
||||
module.exports.db_pubuser = 'testpublicuser';
|
||||
module.exports.batch_query_timeout = 5 * 1000; // 5 seconds in milliseconds
|
||||
module.exports.redisIdleTimeoutMillis = 1;
|
||||
module.exports.redisReapIntervalMillis = 1;
|
||||
module.exports.db_base_name = 'cartodb_test_user_<%= user_id %>_db';
|
||||
module.exports.db_user = 'test_cartodb_user_<%= user_id %>';
|
||||
module.exports.db_user_pass = 'test_cartodb_user_<%= user_id %>_pass';
|
||||
module.exports.user_from_host = '^([^.]*)\\.';
|
||||
module.exports.oauth = {
|
||||
allowedHosts: ['localhost.lan:8080', 'localhostdb.lan:8080']
|
||||
};
|
||||
module.exports.health = {
|
||||
enabled: true,
|
||||
username: 'vizzuality',
|
||||
query: 'select 1'
|
||||
};
|
||||
}
|
@ -50,7 +50,7 @@ module.exports.db_user_pass = 'test_cartodb_user_<%= user_id %>_pass';
|
||||
module.exports.db_pubuser = 'testpublicuser';
|
||||
// Password for the anonymous PostgreSQL user
|
||||
module.exports.db_pubuser_pass = 'public';
|
||||
module.exports.db_host = 'localhost';
|
||||
module.exports.db_host = process.env.PGHOST || 'localhost';
|
||||
module.exports.db_port = '5432';
|
||||
module.exports.db_batch_port = '5432';
|
||||
module.exports.finished_jobs_ttl_in_seconds = 2 * 3600; // 2 hours
|
||||
@ -98,8 +98,8 @@ module.exports.db_keep_alive = {
|
||||
enabled: true,
|
||||
initialDelay: 5000 // Not used yet
|
||||
};
|
||||
module.exports.redis_host = '127.0.0.1';
|
||||
module.exports.redis_port = 6336;
|
||||
module.exports.redis_host = process.env.REDIS_HOST || '127.0.0.1';
|
||||
module.exports.redis_port = process.env.REDIS_PORT || 6336;
|
||||
module.exports.redisPool = 50;
|
||||
module.exports.redisIdleTimeoutMillis = 1;
|
||||
module.exports.redisReapIntervalMillis = 1;
|
||||
|
@ -1,14 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
/etc/init.d/postgresql start
|
||||
|
||||
source /src/nodejs-install.sh
|
||||
|
||||
# Install cartodb-postgresql extension
|
||||
git clone https://github.com/CartoDB/cartodb-postgresql.git
|
||||
cd cartodb-postgresql && make && make install && cd ..
|
||||
|
||||
cp config/environments/test.js.example config/environments/test.js
|
||||
|
||||
npm ci
|
||||
npm test
|
@ -2,6 +2,8 @@
|
||||
|
||||
const COPY_FORMATS = ['TEXT', 'CSV', 'BINARY'];
|
||||
|
||||
const regex = /\bFORMAT\s+(\w+)/;
|
||||
|
||||
module.exports = {
|
||||
getFormatFromCopyQuery (copyQuery) {
|
||||
let format = 'TEXT'; // Postgres default format
|
||||
@ -13,7 +15,6 @@ module.exports = {
|
||||
}
|
||||
|
||||
if (copyQuery.includes(' WITH') && copyQuery.includes('FORMAT ')) {
|
||||
const regex = /\bFORMAT\s+(\w+)/;
|
||||
const result = regex.exec(copyQuery);
|
||||
|
||||
if (result && result.length === 2) {
|
||||
|
742
package-lock.json
generated
742
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -71,6 +71,7 @@
|
||||
"libxmljs": "^0.19.7",
|
||||
"mocha": "^7.2.0",
|
||||
"mockdate": "^2.0.2",
|
||||
"nodemon": "^2.0.6",
|
||||
"nyc": "^15.0.0",
|
||||
"pino-pretty": "^4.0.0",
|
||||
"shapefile": "0.3.0",
|
||||
@ -87,8 +88,7 @@
|
||||
"posttest": "npm run test:teardown",
|
||||
"test:teardown": "NODE_ENV=test node test teardown",
|
||||
"cover": "nyc --reporter=lcov npm test",
|
||||
"test:docker": "docker run -e \"NODEJS_VERSION=$NODE_VERSION\" -v `pwd`:/srv $DOCKER_IMAGE bash docker/scripts/test-setup.sh && docker ps --filter status=dead --filter status=exited -aq | xargs docker rm -v",
|
||||
"docker:bash": "docker run -it -v `pwd`:/srv $DOCKER_IMAGE bash"
|
||||
"dev": "NODE_ENV=development nodemon app.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^10.15.1",
|
||||
|
1
private
Submodule
1
private
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit 2e20a0f0206fe7121162ef2db2ea292d971aabf0
|
@ -6,20 +6,11 @@ var server = require('../../lib/server')();
|
||||
var assert = require('../support/assert');
|
||||
|
||||
const accessControlHeaders = [
|
||||
'X-Requested-With',
|
||||
'X-Prototype-Version',
|
||||
'X-CSRF-Token',
|
||||
'Authorization',
|
||||
'Carto-Event',
|
||||
'Carto-Event-Source',
|
||||
'Carto-Event-Group-Id'
|
||||
'*'
|
||||
].join(', ');
|
||||
|
||||
const exposedHeaders = [
|
||||
'Carto-Rate-Limit-Limit',
|
||||
'Carto-Rate-Limit-Remaining',
|
||||
'Carto-Rate-Limit-Reset',
|
||||
'Retry-After'
|
||||
'*'
|
||||
].join(', ');
|
||||
|
||||
describe('app-configuration', function () {
|
||||
|
@ -13,8 +13,11 @@ var JobService = require('../../../lib/batch/job-service');
|
||||
var JobCanceller = require('../../../lib/batch/job-canceller');
|
||||
var metadataBackend = require('cartodb-redis')({ pool: redisUtils.getPool() });
|
||||
|
||||
const TEST_USER_ID = 1;
|
||||
const TEST_USER = global.settings.db_user.replace('<%= user_id %>', TEST_USER_ID);
|
||||
const TEST_DB = global.settings.db_base_name.replace('<%= user_id %>', TEST_USER_ID);
|
||||
|
||||
describe('batch module', function () {
|
||||
var dbInstance = 'localhost';
|
||||
var username = 'vizzuality';
|
||||
var pool = redisUtils.getPool();
|
||||
var logger = new Logger();
|
||||
@ -39,11 +42,11 @@ describe('batch module', function () {
|
||||
var data = {
|
||||
user: username,
|
||||
query: sql,
|
||||
host: dbInstance,
|
||||
dbname: 'cartodb_test_user_1_db',
|
||||
dbuser: 'test_cartodb_user_1',
|
||||
port: 5432,
|
||||
pass: 'test_cartodb_user_1_pass'
|
||||
host: global.settings.db_host,
|
||||
dbname: TEST_DB,
|
||||
dbuser: TEST_USER,
|
||||
port: global.settings.db_batch_port,
|
||||
pass: global.settings.db_user_pass
|
||||
};
|
||||
|
||||
jobService.create(data, function (err, job) {
|
||||
|
@ -21,6 +21,10 @@ if (global.settings.statsd) {
|
||||
const statsClient = StatsClient.getInstance(global.settings.statsd);
|
||||
const server = require('../../lib/server')(statsClient);
|
||||
|
||||
const TEST_USER_ID = 1;
|
||||
const TEST_USER = 'postgres';
|
||||
const TEST_DB = global.settings.db_base_name.replace('<%= user_id %>', TEST_USER_ID);
|
||||
|
||||
// Give it enough time to connect and issue the query
|
||||
// but not too much so as to disconnect in the middle of the query.
|
||||
const CLIENT_DISCONNECT_TIMEOUT = 100;
|
||||
@ -43,10 +47,10 @@ const assertCanReuseCanceledConnection = function (done) {
|
||||
describe('copy-endpoints', function () {
|
||||
before(function () {
|
||||
this.client = new Client({
|
||||
user: 'postgres',
|
||||
host: 'localhost',
|
||||
database: 'cartodb_test_user_1_db',
|
||||
port: 5432
|
||||
user: TEST_USER,
|
||||
host: global.settings.db_host,
|
||||
database: TEST_DB,
|
||||
port: global.settings.db_batch_port
|
||||
});
|
||||
this.client.connect();
|
||||
});
|
||||
|
@ -77,7 +77,10 @@ describe('rate limit', function () {
|
||||
global.settings.ratelimits.endpoints.query = true;
|
||||
|
||||
server = app();
|
||||
redisClient = redis.createClient(global.settings.redis_port);
|
||||
redisClient = redis.createClient({
|
||||
port: global.settings.redis_port,
|
||||
host: global.settings.redis_host
|
||||
});
|
||||
|
||||
const count = 1;
|
||||
const period = 1;
|
||||
|
@ -1,4 +1,10 @@
|
||||
'use strict';
|
||||
|
||||
global.settings = require('../config/environments/test');
|
||||
let configFileName = process.env.NODE_ENV;
|
||||
if (process.env.CARTO_SQL_API_ENV_BASED_CONF) {
|
||||
// we override the file with the one with env vars
|
||||
configFileName = 'config';
|
||||
}
|
||||
|
||||
global.settings = require(`../config/environments/${configFileName}.js`);
|
||||
process.env.NODE_ENV = 'test';
|
||||
|
@ -1,7 +1,7 @@
|
||||
'use strict';
|
||||
|
||||
const util = require('util');
|
||||
const path = require('path');
|
||||
// const path = require('path');
|
||||
const exec = util.promisify(require('child_process').exec);
|
||||
|
||||
if (!process.env.NODE_ENV) {
|
||||
@ -9,13 +9,15 @@ if (!process.env.NODE_ENV) {
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const environment = require(`../config/environments/${process.env.NODE_ENV}.js`);
|
||||
let configFileName = process.env.NODE_ENV;
|
||||
if (process.env.CARTO_SQL_API_ENV_BASED_CONF) {
|
||||
// we override the file with the one with env vars
|
||||
configFileName = 'config';
|
||||
}
|
||||
|
||||
const environment = require(`../config/environments/${configFileName}.js`);
|
||||
const REDIS_PORT = environment.redis_port;
|
||||
const REDIS_CELL_PATH = path.resolve(
|
||||
process.platform === 'darwin'
|
||||
? './test/support/libredis_cell.dylib'
|
||||
: './test/support/libredis_cell.so'
|
||||
);
|
||||
const REDIS_HOST = environment.redis_host;
|
||||
|
||||
const TEST_USER_ID = 1;
|
||||
const TEST_USER = environment.db_user.replace('<%= user_id %>', TEST_USER_ID);
|
||||
@ -24,29 +26,22 @@ const PUBLIC_USER = environment.db_pubuser;
|
||||
const PUBLIC_USER_PASSWORD = environment.db_pubuser_pass;
|
||||
const TEST_DB = environment.db_base_name.replace('<%= user_id %>', TEST_USER_ID);
|
||||
const PGHOST = environment.db_host;
|
||||
|
||||
async function startRedis () {
|
||||
await exec(`redis-server --port ${REDIS_PORT} --loadmodule ${REDIS_CELL_PATH} --logfile ${__dirname}/redis-server.log --daemonize yes`);
|
||||
}
|
||||
|
||||
async function stopRedis () {
|
||||
await exec(`redis-cli -p ${REDIS_PORT} shutdown`);
|
||||
}
|
||||
const PGPORT = environment.db_port;
|
||||
|
||||
async function dropDatabase () {
|
||||
await exec(`dropdb --if-exists ${TEST_DB}`, {
|
||||
await exec(`dropdb -p "${PGPORT}" -h "${PGHOST}" --if-exists ${TEST_DB}`, {
|
||||
env: Object.assign({ PGUSER: 'postgres' }, process.env)
|
||||
});
|
||||
}
|
||||
|
||||
async function createDatabase () {
|
||||
await exec(`createdb -T template_postgis -EUTF8 "${TEST_DB}"`, {
|
||||
await exec(`createdb -p "${PGPORT}" -h "${PGHOST}" -T template_postgis -EUTF8 "${TEST_DB}"`, {
|
||||
env: Object.assign({ PGUSER: 'postgres' }, process.env)
|
||||
});
|
||||
}
|
||||
|
||||
async function createDatabaseExtension () {
|
||||
await exec(`psql -c "CREATE EXTENSION IF NOT EXISTS cartodb CASCADE;" ${TEST_DB}`, {
|
||||
await exec(`psql -p "${PGPORT}" -h "${PGHOST}" -c "CREATE EXTENSION IF NOT EXISTS cartodb CASCADE;" ${TEST_DB}`, {
|
||||
env: Object.assign({ PGUSER: 'postgres' }, process.env)
|
||||
});
|
||||
}
|
||||
@ -64,7 +59,7 @@ async function populateDatabase () {
|
||||
sed -e "s/:PUBLICPASS/${PUBLIC_USER_PASSWORD}/g" |
|
||||
sed -e "s/:TESTUSER/${TEST_USER}/g" |
|
||||
sed -e "s/:TESTPASS/${TEST_PASSWORD}/g" |
|
||||
PGOPTIONS='--client-min-messages=WARNING' psql -q -v ON_ERROR_STOP=1 ${TEST_DB}
|
||||
PGOPTIONS='--client-min-messages=WARNING' psql -h "${PGHOST}" -p "${PGPORT}" -q -v ON_ERROR_STOP=1 ${TEST_DB}
|
||||
`;
|
||||
|
||||
await exec(populateDatabaseCmd, {
|
||||
@ -140,8 +135,8 @@ async function populateRedis () {
|
||||
time sometime
|
||||
`;
|
||||
|
||||
await exec(`echo "${commands}" | redis-cli -p ${REDIS_PORT} -n 5`);
|
||||
await exec(`echo "${oauthCommands}" | redis-cli -p ${REDIS_PORT} -n 3`);
|
||||
await exec(`echo "${commands}" | redis-cli -h ${REDIS_HOST} -p ${REDIS_PORT} -n 5`);
|
||||
await exec(`echo "${oauthCommands}" | redis-cli -h ${REDIS_HOST} -p ${REDIS_PORT} -n 3`);
|
||||
}
|
||||
|
||||
async function main (args) {
|
||||
@ -150,7 +145,6 @@ async function main (args) {
|
||||
try {
|
||||
switch (args[0]) {
|
||||
case 'setup':
|
||||
await startRedis();
|
||||
await populateRedis();
|
||||
await dropDatabase();
|
||||
await createDatabase();
|
||||
@ -158,7 +152,6 @@ async function main (args) {
|
||||
await populateDatabase();
|
||||
break;
|
||||
case 'teardown':
|
||||
await stopRedis();
|
||||
break;
|
||||
default:
|
||||
throw new Error('Missing "mode" argument. Valid ones: "setup" or "teardown"');
|
||||
|
@ -22,9 +22,12 @@ var jobPublisher = new JobPublisher(redisUtils.getPool());
|
||||
var jobQueue = new JobQueue(metadataBackend, jobPublisher, logger);
|
||||
var jobBackend = new JobBackend(metadataBackend, jobQueue, logger);
|
||||
|
||||
const TEST_USER_ID = 1;
|
||||
const TEST_USER = global.settings.db_user.replace('<%= user_id %>', TEST_USER_ID);
|
||||
const TEST_DB = global.settings.db_base_name.replace('<%= user_id %>', TEST_USER_ID);
|
||||
|
||||
var USER = 'vizzuality';
|
||||
var QUERY = 'select pg_sleep(0)';
|
||||
var HOST = 'localhost';
|
||||
|
||||
// sets job to running, run its query and returns inmediatly (don't wait for query finishes)
|
||||
// in order to test query cancelation/draining
|
||||
@ -66,11 +69,12 @@ function createWadusJob (query) {
|
||||
return JobFactory.create(JSON.parse(JSON.stringify({
|
||||
user: USER,
|
||||
query: query,
|
||||
host: HOST,
|
||||
dbname: 'cartodb_test_user_1_db',
|
||||
dbuser: 'test_cartodb_user_1',
|
||||
port: 5432,
|
||||
pass: 'test_cartodb_user_1_pass'
|
||||
host: global.settings.db_host,
|
||||
dbname: TEST_DB,
|
||||
dbuser: TEST_USER,
|
||||
port: global.settings.db_batch_port,
|
||||
pass: global.settings.db_user_pass
|
||||
|
||||
})));
|
||||
}
|
||||
|
||||
|
@ -30,17 +30,20 @@ var queryRunner = new QueryRunner(userDatabaseMetadataService, logger);
|
||||
var StatsD = require('node-statsd').StatsD;
|
||||
var statsdClient = new StatsD(global.settings.statsd);
|
||||
|
||||
const TEST_USER_ID = 1;
|
||||
const TEST_USER = global.settings.db_user.replace('<%= user_id %>', TEST_USER_ID);
|
||||
const TEST_DB = global.settings.db_base_name.replace('<%= user_id %>', TEST_USER_ID);
|
||||
|
||||
var USER = 'vizzuality';
|
||||
var QUERY = 'select pg_sleep(0)';
|
||||
var HOST = 'localhost';
|
||||
var JOB = {
|
||||
user: USER,
|
||||
query: QUERY,
|
||||
host: HOST,
|
||||
dbname: 'cartodb_test_user_1_db',
|
||||
dbuser: 'test_cartodb_user_1',
|
||||
port: 5432,
|
||||
pass: 'test_cartodb_user_1_pass'
|
||||
host: global.settings.db_host,
|
||||
dbname: TEST_DB,
|
||||
dbuser: TEST_USER,
|
||||
port: global.settings.db_batch_port,
|
||||
pass: global.settings.db_user_pass
|
||||
};
|
||||
|
||||
describe('job runner', function () {
|
||||
|
@ -23,18 +23,20 @@ var jobQueue = new JobQueue(metadataBackend, jobPublisher, logger);
|
||||
var jobBackend = new JobBackend(metadataBackend, jobQueue, logger);
|
||||
var jobCanceller = new JobCanceller();
|
||||
|
||||
const TEST_USER_ID = 1;
|
||||
const TEST_USER = global.settings.db_user.replace('<%= user_id %>', TEST_USER_ID);
|
||||
const TEST_DB = global.settings.db_base_name.replace('<%= user_id %>', TEST_USER_ID);
|
||||
|
||||
var USER = 'vizzuality';
|
||||
var QUERY = 'select pg_sleep(0)';
|
||||
var HOST = 'localhost';
|
||||
var JOB = {
|
||||
user: USER,
|
||||
query: QUERY,
|
||||
host: HOST,
|
||||
dbname: 'cartodb_test_user_1_db',
|
||||
dbuser: 'test_cartodb_user_1',
|
||||
port: 5432,
|
||||
pass: 'test_cartodb_user_1_pass'
|
||||
|
||||
host: global.settings.db_host,
|
||||
dbname: TEST_DB,
|
||||
dbuser: TEST_USER,
|
||||
port: global.settings.db_batch_port,
|
||||
pass: global.settings.db_user_pass
|
||||
};
|
||||
|
||||
function createWadusDataJob () {
|
||||
|
Loading…
Reference in New Issue
Block a user