Compare commits

...

20 Commits

Author SHA1 Message Date
Daniel García Aubert
5ff2cf6746 Stubs next version 2018-11-21 18:13:03 +01:00
Daniel García Aubert
94c8d89aba Release v1.2.0-carto.3 2018-11-21 18:08:01 +01:00
Daniel G. Aubert
c19ab0746c
Merge pull request #3 from CartoDB/nodejs-10
Suport Node.js 10 LTS
2018-11-21 18:05:40 +01:00
Daniel García Aubert
a27bd1fd8f Prepare next release 2018-11-21 17:57:20 +01:00
Daniel García Aubert
01ab20fce2 Add note to README 2018-11-07 16:50:38 +01:00
Daniel García Aubert
d55dbe70d5 Do not use deprecated Buffer constructor 2018-11-07 15:06:38 +01:00
Daniel García Aubert
de47558628 Support 'package-lock.json' 2018-11-07 15:01:55 +01:00
Daniel García Aubert
38c78677e8 Drop support for old version of Node.js and support latest LTS releases 2018-11-07 15:01:24 +01:00
Daniel García Aubert
6f1d5cb4a5 Release v1.2.0-carto.2 2018-10-26 13:26:02 +02:00
Daniel G. Aubert
99d397956c
Merge pull request #2 from CartoDB/use-strict
Use strict mode
2018-10-26 13:22:30 +02:00
Daniel García Aubert
c85f7d27b8 Update NEWS 2018-10-24 19:17:10 +02:00
Daniel García Aubert
8b355b6e72 Use strict 2018-10-24 18:52:57 +02:00
Rafa de la Torre
d7e5c1383f Update v1.2.0-carto.1 release date 2018-06-11 13:30:43 +02:00
Rafa de la Torre
cb2227d159
Merge pull request #1 from CartoDB/performance-tune-copy-to
Improve performance of COPY TO
2018-06-11 13:29:23 +02:00
Rafa de la Torre
7930d1b8dd Add entry to changelog 2018-06-11 13:27:44 +02:00
Rafa de la Torre
e94fefe902 Merge branch 'v1.2-carto' into performance-tune-copy-to 2018-06-11 13:24:09 +02:00
Rafa de la Torre
9293926047 Add a NEWS.carto.md with the changelog 2018-06-11 13:20:53 +02:00
Rafa de la Torre
fd3cc95573 Remove unused var buffer_sent 2018-06-11 12:17:39 +02:00
Rafa de la Torre
922627daaf Small refactor 2018-06-11 12:14:28 +02:00
Rafa de la Torre
61bc713e0c Improve performance of COPY TO #56
Under some circumstances, the COPY TO streamming can be CPU-bound,
particularly when PG holds the resultset in memory buffers and the size
of the rows << chunk (64 KB in my linux box).

This commits improves the situation by creating a buffer of `chunk`
size and fitting in as many rows as it can before pushing them. This
results in more balanced read and writes (in terms of size and in bigger
chunks) as well as more frequent calls to the callback, thus freeing the
main loop for other events to be processed, and therefore avoiding
starvation.
2018-06-08 15:04:42 +02:00
11 changed files with 269 additions and 19 deletions

View File

@ -1,9 +1,8 @@
language: node_js
node_js:
- "0.12"
- "4"
- "5"
- "6"
- "8"
- "10"
addons:
postgresql: "9.2"

30
NEWS.carto.md Normal file
View File

@ -0,0 +1,30 @@
# CARTO's Changelog
## v1.2.0-carto.4
Released 2018-mm-dd
## v1.2.0-carto.3
Released 2018-11-21
Features:
* Drop support for Node.js 0.12, 4 and, 5.
* Add support for Node.js 8 and 10.
* Add package-lock.json
* Do not use deprecated Buffer constructors.
## v1.2.0-carto.2
Released 2018-10-26
Bug fixes:
* Make all modules to use strict mode semantics.
## v1.2.0-carto.1
Released 2018-06-11
Bug fixes:
* Improves performance of COPY TO by sending bigger chunks through low level `push()`. See https://github.com/CartoDB/node-pg-copy-streams/pull/1
## v1.2.0
Released 2016-08-22
Vanilla version v1.2.0 from upstream repository. See https://github.com/CartoDB/node-pg-copy-streams/releases/tag/v1.2.0

View File

@ -1,3 +1,7 @@
## Note
This is forked repository from [brianc/node-pg-copy-streams](https://github.com/brianc/node-pg-query-stream)
## pg-copy-streams
[![Build Status](https://travis-ci.org/brianc/node-pg-copy-streams.svg)](https://travis-ci.org/brianc/node-pg-copy-streams)

View File

@ -1,3 +1,5 @@
'use strict';
module.exports = function(txt, options) {
return new CopyStreamQuery(txt, options)
}
@ -42,6 +44,16 @@ CopyStreamQuery.prototype._transform = function(chunk, enc, cb) {
var messageCode;
var needPush = false;
var buffer = Buffer.alloc(chunk.length);
var buffer_offset = 0;
this.pushBufferIfneeded = function() {
if (needPush && buffer_offset > 0) {
this.push(buffer.slice(0, buffer_offset))
buffer_offset = 0;
}
}
while((chunk.length - offset) >= (Byte1Len + Int32Len)) {
var messageCode = chunk[offset]
@ -70,6 +82,7 @@ CopyStreamQuery.prototype._transform = function(chunk, enc, cb) {
case code.ErrorResponse:
case code.CopyDone:
this.pushBufferIfneeded();
this._detach()
this.push(null)
return cb();
@ -84,7 +97,8 @@ CopyStreamQuery.prototype._transform = function(chunk, enc, cb) {
if (needPush) {
var row = chunk.slice(offset, offset + length - Int32Len)
this.rowCount++
this.push(row)
row.copy(buffer, buffer_offset);
buffer_offset += row.length;
}
offset += (length - Int32Len)
} else {
@ -93,6 +107,8 @@ CopyStreamQuery.prototype._transform = function(chunk, enc, cb) {
}
}
this.pushBufferIfneeded();
if(chunk.length - offset) {
var slice = chunk.slice(offset)
this._remainder = slice

View File

@ -1,3 +1,5 @@
'use strict';
var CopyToQueryStream = require('./copy-to')
module.exports = {
to: function(txt, options) {
@ -28,11 +30,11 @@ CopyStreamQuery.prototype.submit = function(connection) {
}
var copyDataBuffer = Buffer([code.CopyData])
var copyDataBuffer = Buffer.from([code.CopyData])
CopyStreamQuery.prototype._transform = function(chunk, enc, cb) {
var Int32Len = 4;
this.push(copyDataBuffer)
var lenBuffer = Buffer(Int32Len)
var lenBuffer = Buffer.alloc(Int32Len)
lenBuffer.writeUInt32BE(chunk.length + Int32Len, 0)
this.push(lenBuffer)
this.push(chunk)
@ -41,7 +43,7 @@ CopyStreamQuery.prototype._transform = function(chunk, enc, cb) {
CopyStreamQuery.prototype._flush = function(cb) {
var Int32Len = 4;
var finBuffer = Buffer([code.CopyDone, 0, 0, 0, Int32Len])
var finBuffer = Buffer.from([code.CopyDone, 0, 0, 0, Int32Len])
this.push(finBuffer)
this.cb_flush = cb
}

193
package-lock.json generated Normal file
View File

@ -0,0 +1,193 @@
{
"name": "pg-copy-streams",
"version": "1.2.0-carto.4",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
"async": {
"version": "0.2.10",
"resolved": "http://registry.npmjs.org/async/-/async-0.2.10.tgz",
"integrity": "sha1-trvgsGdLnXGXCMo43owjfLUmw9E=",
"dev": true
},
"base64-js": {
"version": "0.0.2",
"resolved": "https://registry.npmjs.org/base64-js/-/base64-js-0.0.2.tgz",
"integrity": "sha1-Ak8Pcq+iW3X5wO5zzU9V7Bvtl4Q=",
"dev": true
},
"bops": {
"version": "0.0.6",
"resolved": "https://registry.npmjs.org/bops/-/bops-0.0.6.tgz",
"integrity": "sha1-CC0dVfoB5g29wuvC26N/ZZVUzzo=",
"dev": true,
"requires": {
"base64-js": "0.0.2",
"to-utf8": "0.0.1"
}
},
"buffer-writer": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/buffer-writer/-/buffer-writer-1.0.1.tgz",
"integrity": "sha1-Iqk2kB4wKa/NdUfrRIfOtpejvwg=",
"dev": true
},
"concat-stream": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.1.0.tgz",
"integrity": "sha1-hCae/YzGUCdeMi8wnfRIZ7xRxfM=",
"dev": true,
"requires": {
"bops": "0.0.6"
}
},
"generic-pool": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/generic-pool/-/generic-pool-2.1.1.tgz",
"integrity": "sha1-rwTcLDJc/Ll1Aj+lK/zpYXp0Nf0=",
"dev": true
},
"gonna": {
"version": "0.0.0",
"resolved": "https://registry.npmjs.org/gonna/-/gonna-0.0.0.tgz",
"integrity": "sha1-6k4ZsVJ6F4LhJQVeMCSabUvHmlk=",
"dev": true
},
"heroku-env": {
"version": "0.1.1",
"resolved": "https://registry.npmjs.org/heroku-env/-/heroku-env-0.1.1.tgz",
"integrity": "sha1-wGeRyUTpuHSOMXf1S/cBQyZ+Yxc=",
"dev": true,
"requires": {
"parse-database-url": "~0.2.0"
}
},
"lodash": {
"version": "2.2.1",
"resolved": "http://registry.npmjs.org/lodash/-/lodash-2.2.1.tgz",
"integrity": "sha1-ypNf0UqzwMhyq6zxmLnNpQFECGc=",
"dev": true
},
"packet-reader": {
"version": "0.2.0",
"resolved": "https://registry.npmjs.org/packet-reader/-/packet-reader-0.2.0.tgz",
"integrity": "sha1-gZ300BC4LV6lZx+KGjrPA5vNdwA=",
"dev": true
},
"parse-database-url": {
"version": "0.2.2",
"resolved": "https://registry.npmjs.org/parse-database-url/-/parse-database-url-0.2.2.tgz",
"integrity": "sha1-SGFa56fA/HfjKU0jVCpqUnPDVws=",
"dev": true
},
"pg": {
"version": "4.4.6",
"resolved": "http://registry.npmjs.org/pg/-/pg-4.4.6.tgz",
"integrity": "sha1-EZgiP7rva6QRqm9Q4X9OtaGTFVk=",
"dev": true,
"requires": {
"buffer-writer": "1.0.1",
"generic-pool": "2.1.1",
"packet-reader": "0.2.0",
"pg-connection-string": "0.1.3",
"pg-types": "1.*",
"pgpass": "0.0.3",
"semver": "^4.1.0"
}
},
"pg-connection-string": {
"version": "0.1.3",
"resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-0.1.3.tgz",
"integrity": "sha1-2hhHsglA5C7hSSvq9l1J2RskXfc=",
"dev": true
},
"pg-int8": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz",
"integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==",
"dev": true
},
"pg-types": {
"version": "1.13.0",
"resolved": "https://registry.npmjs.org/pg-types/-/pg-types-1.13.0.tgz",
"integrity": "sha512-lfKli0Gkl/+za/+b6lzENajczwZHc7D5kiUCZfgm914jipD2kIOIvEkAhZ8GrW3/TUoP9w8FHjwpPObBye5KQQ==",
"dev": true,
"requires": {
"pg-int8": "1.0.1",
"postgres-array": "~1.0.0",
"postgres-bytea": "~1.0.0",
"postgres-date": "~1.0.0",
"postgres-interval": "^1.1.0"
}
},
"pgpass": {
"version": "0.0.3",
"resolved": "https://registry.npmjs.org/pgpass/-/pgpass-0.0.3.tgz",
"integrity": "sha1-EuZ+NDsxicLzEgbrycwL7//PkUA=",
"dev": true,
"requires": {
"split": "~0.3"
}
},
"postgres-array": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-1.0.3.tgz",
"integrity": "sha512-5wClXrAP0+78mcsNX3/ithQ5exKvCyK5lr5NEEEeGwwM6NJdQgzIJBVxLvRW+huFpX92F2QnZ5CcokH0VhK2qQ==",
"dev": true
},
"postgres-bytea": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz",
"integrity": "sha1-AntTPAqokOJtFy1Hz5zOzFIazTU=",
"dev": true
},
"postgres-date": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.3.tgz",
"integrity": "sha1-4tiXAu/bJY/52c7g/pG9BpdSV6g=",
"dev": true
},
"postgres-interval": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.1.2.tgz",
"integrity": "sha512-fC3xNHeTskCxL1dC8KOtxXt7YeFmlbTYtn7ul8MkVERuTmf7pI4DrkAxcw3kh1fQ9uz4wQmd03a1mRiXUZChfQ==",
"dev": true,
"requires": {
"xtend": "^4.0.0"
}
},
"semver": {
"version": "4.3.6",
"resolved": "http://registry.npmjs.org/semver/-/semver-4.3.6.tgz",
"integrity": "sha1-MAvG4OhjdPe6YQaLWx7NV/xlMto=",
"dev": true
},
"split": {
"version": "0.3.3",
"resolved": "http://registry.npmjs.org/split/-/split-0.3.3.tgz",
"integrity": "sha1-zQ7qXmOiEd//frDwkcQTPi0N0o8=",
"dev": true,
"requires": {
"through": "2"
}
},
"through": {
"version": "2.3.8",
"resolved": "http://registry.npmjs.org/through/-/through-2.3.8.tgz",
"integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=",
"dev": true
},
"to-utf8": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/to-utf8/-/to-utf8-0.0.1.tgz",
"integrity": "sha1-0Xrqcv8vujm55DYBvns/9y4ImFI=",
"dev": true
},
"xtend": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz",
"integrity": "sha1-pcbVMr5lbiPbgg77lDofBJmNY68=",
"dev": true
}
}
}

View File

@ -1,6 +1,6 @@
{
"name": "pg-copy-streams",
"version": "1.2.0",
"version": "1.2.0-carto.4",
"description": "Low-Level COPY TO and COPY FROM streams for PostgreSQL in JavaScript using",
"main": "index.js",
"scripts": {

View File

@ -1,3 +1,5 @@
'use strict';
var assert = require('assert')
var gonna = require('gonna')
@ -19,7 +21,7 @@ var testBinaryCopy = function() {
var fromClient = client()
var toClient = client()
queries = [
var queries = [
'DROP TABLE IF EXISTS data',
'CREATE TABLE IF NOT EXISTS data (num BIGINT, word TEXT)',
'INSERT INTO data (num, word) VALUES (1, \'hello\'), (2, \'other thing\'), (3, \'goodbye\')',
@ -33,7 +35,7 @@ var testBinaryCopy = function() {
var fromStream = fromClient.query(to('COPY (SELECT * FROM data) TO STDOUT BINARY'))
var toStream = toClient.query(from('COPY data_copy FROM STDIN BINARY'))
runStream = function(callback) {
var runStream = function(callback) {
fromStream.on('error', callback)
toStream.on('error', callback)
toStream.on('finish', callback)

View File

@ -1,3 +1,5 @@
'use strict';
var assert = require('assert')
var gonna = require('gonna')
@ -31,7 +33,7 @@ var testRange = function(top) {
var txt = 'COPY numbers FROM STDIN'
var stream = fromClient.query(copy(txt))
for(var i = 0; i < top; i++) {
stream.write(Buffer('' + i + '\t' + i*10 + '\n'))
stream.write(Buffer.from('' + i + '\t' + i*10 + '\n'))
}
stream.end()
var countDone = gonna('have correct count')
@ -67,7 +69,7 @@ var testSingleEnd = function() {
assert(count==1, '`end` Event was triggered ' + count + ' times');
if (count == 1) fromClient.end();
})
stream.end(Buffer('1\n'))
stream.end(Buffer.from('1\n'))
}
testSingleEnd()

View File

@ -1,3 +1,5 @@
'use strict';
var assert = require('assert')
var gonna = require('gonna')
@ -29,7 +31,7 @@ var testComparators = function() {
assert(copy1._gotCopyOutResponse, 'should have received CopyOutResponse')
assert(!copy1._remainder, 'Message with no additional data (len=Int4Len+0) should not leave a remainder')
}))
copy1.end(new Buffer([code.CopyOutResponse, 0x00, 0x00, 0x00, 0x04]));
copy1.end(Buffer.from([code.CopyOutResponse, 0x00, 0x00, 0x00, 0x04]));
}
@ -118,5 +120,3 @@ var testNoticeResponse = function() {
}
testNoticeResponse();

View File

@ -1,3 +1,5 @@
'use strict';
require('./copy-from')
require('./copy-to')
require('./binary')