Commit e9b83a50 authored by erwan's avatar erwan
Browse files

Add js files to support gitlab semantic release

parent db2f3ff0
Pipeline #20895 passed with stage
in 4 minutes and 6 seconds
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','depth.json')
, JSONStream = require('../')
, it = require('it-is')
var expected = JSON.parse(fs.readFileSync(file))
, parser = JSONStream.parse(['docs', {recurse: true}, 'value'])
, called = 0
, ended = false
, parsed = []
fs.createReadStream(file).pipe(parser)
parser.on('data', function (data) {
called ++
parsed.push(data)
})
parser.on('end', function () {
ended = true
})
process.on('exit', function () {
var expectedValues = [0, [1], {"a": 2}, "3", 4]
it(called).equal(expectedValues.length)
for (var i = 0 ; i < 5 ; i++)
it(parsed[i]).deepEqual(expectedValues[i])
console.error('PASSED')
})
var JSONStream = require('../')
, stream = require('stream')
, it = require('it-is')
var output = [ [], [] ]
var parser1 = JSONStream.parse(['docs', /./])
parser1.on('data', function(data) {
output[0].push(data)
})
var parser2 = JSONStream.parse(['docs', /./])
parser2.on('data', function(data) {
output[1].push(data)
})
var pending = 2
function onend () {
if (--pending > 0) return
it(output).deepEqual([
[], [{hello: 'world'}]
])
console.error('PASSED')
}
parser1.on('end', onend)
parser2.on('end', onend)
function makeReadableStream() {
var readStream = new stream.Stream()
readStream.readable = true
readStream.write = function (data) { this.emit('data', data) }
readStream.end = function (data) { this.emit('end') }
return readStream
}
var emptyArray = makeReadableStream()
emptyArray.pipe(parser1)
emptyArray.write('{"docs":[]}')
emptyArray.end()
var objectArray = makeReadableStream()
objectArray.pipe(parser2)
objectArray.write('{"docs":[{"hello":"world"}]}')
objectArray.end()
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','error.json')
, JSONStream = require('../')
, it = require('it-is')
var expected = JSON.parse(fs.readFileSync(file))
, parser = JSONStream.parse(['rows'])
, called = 0
, headerCalled = 0
, footerCalled = 0
, ended = false
, parsed = []
fs.createReadStream(file).pipe(parser)
parser.on('header', function (data) {
headerCalled ++
it(data).deepEqual({
error: 'error_code',
message: 'this is an error message'
})
})
parser.on('footer', function (data) {
footerCalled ++
})
parser.on('data', function (data) {
called ++
parsed.push(data)
})
parser.on('end', function () {
ended = true
})
process.on('exit', function () {
it(called).equal(0)
it(headerCalled).equal(1)
it(footerCalled).equal(0)
console.error('PASSED')
})
This diff is collapsed.
{"total_rows":129,"offset":0,"rows":[
{ "id":"change1_0.6995461115147918"
, "key":"change1_0.6995461115147918"
, "value":{"rev":"1-e240bae28c7bb3667f02760f6398d508"}
, "doc":{
"_id": "change1_0.6995461115147918"
, "_rev": "1-e240bae28c7bb3667f02760f6398d508","hello":1}
},
{ "id":"change2_0.6995461115147918"
, "key":"change2_0.6995461115147918"
, "value":{"rev":"1-13677d36b98c0c075145bb8975105153"}
, "doc":{
"_id":"change2_0.6995461115147918"
, "_rev":"1-13677d36b98c0c075145bb8975105153"
, "hello":2
}
},
]}
{
"total": 5,
"docs": [
{
"key": {
"value": 0,
"some": "property"
}
},
{"value": [1]},
{"value": {"a":2}},
{"blbl": [{}, {"a":0, "b":1, "value":"3"}, 10]},
{"value": 4}
]
}
{"error": "error_code", "message": "this is an error message"}
{"total_rows":129,"offset":0,"rows":[
{ "id":"change1_0.6995461115147918"
, "key":"change1_0.6995461115147918"
, "value":{"rev":"1-e240bae28c7bb3667f02760f6398d508"}
, "doc":{
"_id": "change1_0.6995461115147918"
, "_rev": "1-e240bae28c7bb3667f02760f6398d508","hello":1}
},
{ "id":"change2_0.6995461115147918"
, "key":"change2_0.6995461115147918"
, "value":{"rev":"1-13677d36b98c0c075145bb8975105153"}
, "doc":{
"_id":"change2_0.6995461115147918"
, "_rev":"1-13677d36b98c0c075145bb8975105153"
, "hello":2
}
}
],
"foo": {"bar": "baz"}}
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','all_npm.json')
, JSONStream = require('../')
, it = require('it-is')
function fn (s) {
return !isNaN(parseInt(s, 10))
}
var expected = JSON.parse(fs.readFileSync(file))
, parser = JSONStream.parse(['rows', fn])
, called = 0
, ended = false
, parsed = []
fs.createReadStream(file).pipe(parser)
parser.on('data', function (data) {
called ++
it.has({
id: it.typeof('string'),
value: {rev: it.typeof('string')},
key:it.typeof('string')
})
parsed.push(data)
})
parser.on('end', function () {
ended = true
})
process.on('exit', function () {
it(called).equal(expected.rows.length)
it(parsed).deepEqual(expected.rows)
console.error('PASSED')
})
return // dont run this test for now since tape is weird and broken on 0.10
var fs = require('fs')
var JSONStream = require('../')
var file = process.argv[2] || '/tmp/JSONStream-test-large.json'
var size = Number(process.argv[3] || 100000)
var tape = require('tape')
// if (process.title !== 'browser') {
tape('out of mem', function (t) {
t.plan(1)
//////////////////////////////////////////////////////
// Produces a random number between arg1 and arg2
//////////////////////////////////////////////////////
var randomNumber = function (min, max) {
var number = Math.floor(Math.random() * (max - min + 1) + min);
return number;
};
//////////////////////////////////////////////////////
// Produces a random string of a length between arg1 and arg2
//////////////////////////////////////////////////////
var randomString = function (min, max) {
// add several spaces to increase chanses of creating 'words'
var chars = ' 0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ';
var result = '';
var randomLength = randomNumber(min, max);
for (var i = randomLength; i > 0; --i) {
result += chars[Math.round(Math.random() * (chars.length - 1))];
}
return result;
};
//////////////////////////////////////////////////////
// Produces a random JSON document, as a string
//////////////////////////////////////////////////////
var randomJsonDoc = function () {
var doc = {
"CrashOccurenceID": randomNumber(10000, 50000),
"CrashID": randomNumber(1000, 10000),
"SiteName": randomString(10, 25),
"MachineName": randomString(10, 25),
"Date": randomString(26, 26),
"ProcessDuration": randomString(18, 18),
"ThreadIdentityName": null,
"WindowsIdentityName": randomString(15, 40),
"OperatingSystemName": randomString(35, 65),
"DetailedExceptionInformation": randomString(100, 800)
};
doc = JSON.stringify(doc);
doc = doc.replace(/\,/g, ',\n'); // add new lines after each attribute
return doc;
};
//////////////////////////////////////////////////////
// generates test data
//////////////////////////////////////////////////////
var generateTestData = function (cb) {
console.log('generating large data file...');
var stream = fs.createWriteStream(file, {
encoding: 'utf8'
});
var i = 0;
var max = size;
var writing = false
var split = ',\n';
var doc = randomJsonDoc();
stream.write('[');
function write () {
if(writing) return
writing = true
while(++i < max) {
if(Math.random() < 0.001)
console.log('generate..', i + ' / ' + size)
if(!stream.write(doc + split)) {
writing = false
return stream.once('drain', write)
}
}
stream.write(doc + ']')
stream.end();
console.log('END')
}
write()
stream.on('close', cb)
};
//////////////////////////////////////////////////////
// Shows that parsing 100000 instances using JSONStream fails
//
// After several seconds, you will get this crash
// FATAL ERROR: JS Allocation failed - process out of memory
//////////////////////////////////////////////////////
var testJSONStreamParse_causesOutOfMem = function (done) {
var items = 0
console.log('parsing data files using JSONStream...');
var parser = JSONStream.parse([true]);
var stream = fs.createReadStream(file);
stream.pipe(parser);
parser.on('data', function (data) {
items++
if(Math.random() < 0.01) console.log(items, '...')
});
parser.on('end', function () {
t.equal(items, size)
});
};
//////////////////////////////////////////////////////
// main
//////////////////////////////////////////////////////
fs.stat(file, function (err, stat) {
console.log(stat)
if(err)
generateTestData(testJSONStreamParse_causesOutOfMem);
else
testJSONStreamParse_causesOutOfMem()
})
})
// }
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','header_footer.json')
, JSONStream = require('../')
, it = require('it-is')
var expected = JSON.parse(fs.readFileSync(file))
, parser = JSONStream.parse(['rows', /\d+/ /*, 'value'*/])
, called = 0
, headerCalled = 0
, footerCalled = 0
, ended = false
, parsed = []
fs.createReadStream(file).pipe(parser)
parser.on('header', function (data) {
headerCalled ++
it(data).deepEqual({
total_rows: 129,
offset: 0
})
})
parser.on('footer', function (data) {
footerCalled ++
it(data).deepEqual({
foo: { bar: 'baz' }
})
})
parser.on('data', function (data) {
called ++
it.has({
id: it.typeof('string'),
value: {rev: it.typeof('string')},
key:it.typeof('string')
})
it(headerCalled).equal(1)
parsed.push(data)
})
parser.on('end', function () {
ended = true
})
process.on('exit', function () {
it(called).equal(expected.rows.length)
it(headerCalled).equal(1)
it(footerCalled).equal(1)
it(parsed).deepEqual(expected.rows)
console.error('PASSED')
})
var JSONStream = require('../');
var test = require('tape')
test('#66', function (t) {
var error = 0;
var stream = JSONStream
.parse()
.on('error', function (err) {
t.ok(err);
error++;
})
.on('end', function () {
t.ok(error === 1);
t.end();
});
stream.write('["foo":bar[');
stream.end();
});
test('#81 - failure to parse nested objects', function (t) {
var stream = JSONStream
.parse('.bar.foo')
.on('error', function (err) {
t.error(err);
})
.on('end', function () {
t.end();
});
stream.write('{"bar":{"foo":"baz"}}');
stream.end();
});
var test = require('tape');
var fs = require ('fs');
var join = require('path').join;
var couch_sample_file = join(__dirname, 'fixtures','couch_sample.json');
var JSONStream = require('../');
var fixture = {
obj: {
one: 1,
two: 2,
three: 3
}
};
function assertFixtureKeys(stream, t) {
var keys = [];
var values = [];
stream.on('data', function(data) {
keys.push(data.key);
values.push(data.value);
});
stream.on('end', function() {
t.deepEqual(keys, ['one', 'two', 'three']);
t.deepEqual(values, [1,2,3]);
t.end();
});
stream.write(JSON.stringify(fixture));
stream.end();
}
test('keys via string', function(t) {
var stream = JSONStream.parse('obj.$*');
assertFixtureKeys(stream, t);
});
test('keys via array', function(t) {
var stream = JSONStream.parse(['obj',{emitKey: true}]);
assertFixtureKeys(stream, t);
});
test('path via array', function(t) {
var stream = JSONStream.parse(['obj',{emitPath: true}]);
var paths = [];
var values = [];
stream.on('data', function(data) {
console.log(JSON.stringify(data));
paths.push(data.path);
values.push(data.value);
});
stream.on('end', function() {
t.deepEqual(paths, [['obj', 'one'], ['obj', 'two'], ['obj', 'three']]);
t.deepEqual(values, [1,2,3]);
t.end();
});
stream.write(JSON.stringify(fixture));
stream.end();
});
test('advanced keys', function(t) {
var advanced = fs.readFileSync(couch_sample_file);
var stream = JSONStream.parse(['rows', true, 'doc', {emitKey: true}]);
var keys = [];
var values = [];
stream.on('data', function(data) {
keys.push(data.key);
values.push(data.value);
});
stream.on('end', function() {
t.deepEqual(keys, [
'_id', '_rev', 'hello',
'_id', '_rev', 'hello'
]);
t.deepEqual(values, [
"change1_0.6995461115147918", "1-e240bae28c7bb3667f02760f6398d508", 1,
"change2_0.6995461115147918", "1-13677d36b98c0c075145bb8975105153", 2
]);
t.end();
});
stream.write(advanced);
stream.end();
});
test('parent keys', function(t) {
var stream = JSONStream.parse('$*');
var d = null;
stream.on('data', function(data) {
if(d) t.fail('should only be called once');
d = data;
});
stream.on('end', function() {
t.deepEqual(d,{
key: 'obj',
value: fixture.obj
});
t.end();
});
stream.write(JSON.stringify(fixture));
stream.end();
})
var test = require('tape')
var JSONStream = require('../')
test('map function', function (t) {
var actual = []
stream = JSONStream.parse([true], function (e) { return e*10 })
stream.on('data', function (v) { actual.push(v)})
stream.on('end', function () {
t.deepEqual(actual, [10,20,30,40,50,60])
t.end()
})
stream.write(JSON.stringify([1,2,3,4,5,6], null, 2))
stream.end()
})
test('filter function', function (t) {