This commit is contained in:
Lukian LEIZOUR 2022-11-26 15:56:34 +01:00
parent 70e2f7a8aa
commit 008d2f30d7
675 changed files with 189892 additions and 0 deletions

59
node_modules/requests/test/development.js generated vendored Normal file
View file

@ -0,0 +1,59 @@
'use strict';
var browserify = require('browserify')
, path = require('path')
, fs = require('fs');
require('http').createServer(function statical(req, res) {
res.statusCode = 200;
res.setHeader('Content-Type', 'text/html');
switch (req.url) {
case '/':
req.url = '/index.html';
break;
case '/dist/requests.js':
var compiler
, stream;
res.setHeader('Content-Type', 'text/javascript');
compiler = browserify({ debug: true, standalone: 'Requests' });
compiler.add(path.join(__dirname, '..', 'browser.js'));
stream = compiler.bundle();
stream.pipe(res);
stream.pipe(fs.createWriteStream(path.join(__dirname, '..', req.url)));
return;
case '/stream':
res.write('[first chunk]');
setTimeout(function () {
res.write('[second chunk]');
setTimeout(function () {
res.end('[final chunk]');
}, 10000);
}, 100);
return;
}
if (!fs.existsSync(__dirname + req.url)) {
res.write('<script src="/dist/requests.js"></script>');
return res.end('Nope, doesn\'t exist.');
}
require('fs').createReadStream(__dirname + req.url).pipe(res);
}).on('connection', function connection(socket) {
//
// Force buffer flusing when we call our write.
//
socket.setNoDelay(true);
}).listen(+process.argv[2] || 8080, function listening() {
console.log('Development server is now running on:');
console.log('');
console.log(' http://localhost:'+ this.address().port);
console.log('');
});

26
node_modules/requests/test/index.html generated vendored Normal file
View file

@ -0,0 +1,26 @@
<div id="output">
<h2>Progress log</h2>
</div>
<script src="/dist/requests.js"></script>
<script>
/**
* Poor mans cross browser logger.
*
* @param {String} line The message that needs to be logged
* @api private
*/
function log(line) {
document.getElementById('output').innerHTML += line +'<br />';
}
var r = new Requests('/stream', { streaming: true });
r.on('data', function (data) {
log('<strong>received chunk:</strong> <code>'+ data +'</code>');
});
r.on('end', function () {
log('<strong>request has ended</strong>');
});
</script>

107
node_modules/requests/test/index.js generated vendored Normal file
View file

@ -0,0 +1,107 @@
'use strict';
var path = require('path')
, Mocha = require('mocha')
, argv = require('argh').argv
, mochify = require('mochify');
argv.reporter = argv.reporter || 'spec';
argv.ui = argv.ui || 'bdd';
argv.wd = argv.wd || false;
/**
* Poor mans kill switch. Kills all active hooks.
*
* @api private
*/
function kill() {
require('async-each')(kill.hooks, function each(fn, next) {
fn(next);
}, function done(err) {
if (err) return process.exit(1);
process.exit(0);
});
}
/**
* All the hooks that need destruction.
*
* @type {Array}
* @private
*/
kill.hooks = [];
//
// This is the magical test runner that setup's all the things and runs various
// of test suites until something starts failing.
//
(function runner(steps) {
if (!steps.length) return kill(), runner;
var step = steps.shift();
step(function unregister(fn) {
kill.hooks.push(fn);
}, function register(err) {
if (err) throw err;
runner(steps);
});
return runner;
})([
//
// Run the normal node tests.
//
function creamy(kill, next) {
var mocha = new Mocha();
mocha.reporter(argv.reporter);
mocha.ui(argv.ui);
//
// The next bulk of logic is required to correctly glob and lookup all the
// files required for testing.
//
mocha.files = [
'./test/*.test.js'
].map(function lookup(glob) {
return Mocha.utils.lookupFiles(glob, ['js']);
}).reduce(function flatten(arr, what) {
Array.prototype.push.apply(arr, what);
return arr;
}, []).map(function resolve(file) {
return path.resolve(file);
});
//
// Run the mocha test suite inside this node process with a custom callback
// so we don't accidentally exit the process and forget to run the test of the
// tests.
//
mocha.run(function ran(err) {
if (err) err = new Error('Something failed in the mocha test suite');
next(err);
});
},
//
// Start-up a small static file server so we can download files and fixtures
// inside our PhantomJS test.
//
require('./static'),
//
// Run the PhantomJS tests now that we have a small static server setup.
//
function phantomjs(kill, next) {
mochify('./test/*.browser.js', {
reporter: argv.reporter,
cover: argv.cover,
wd: argv.wd,
ui: argv.ui
})
.bundle(next);
}
]);

79794
node_modules/requests/test/large.js generated vendored Normal file

File diff suppressed because it is too large Load diff

56
node_modules/requests/test/requested.js generated vendored Normal file
View file

@ -0,0 +1,56 @@
describe('Requested', function () {
'use strict';
var Requested = require('../requested')
, assume = require('assume')
, r;
it('is exported as a function', function () {
assume(Requested).is.a('function');
});
describe('#typeof', function () {
it('knows the difference between an array and object', function () {
var r = new Requested();
assume(r.typeof({})).equals('object');
assume(r.typeof([])).equals('array');
});
});
describe('#merge', function () {
before(function () {
r = new Requested();
});
it('returns the merge', function () {
var x = { foo: 'foo' }
, y = { bar: 'bar' }
, z = r.merge(x, y);
assume(z).equals(x);
assume(x.bar).equals('bar');
});
it('merges multiple objects', function () {
var xyz = r.merge({}, { foo: 'foo' }, { bar: 'bar' }, { hello: 'world' });
assume(xyz.foo).equals('foo');
assume(xyz.bar).equals('bar');
assume(xyz.hello).equals('world');
});
it('can deep merge without modification', function () {
var x = { foo: 'foo' }
, y = { deep: { nested: 'object' } }
, z = { deep: { another: 'key' } }
, xyz = r.merge(x, y, z);
assume(x.deep).is.a('object');
assume(x.deep).does.not.equal(y.deep);
assume(x.deep).does.not.equal(z.deep);
assume(x.deep.nested).equals('object');
assume(x.deep.another).equals('key');
});
});
});

103
node_modules/requests/test/requests.browser.js generated vendored Normal file
View file

@ -0,0 +1,103 @@
describe('requests', function () {
'use strict';
//
// Include the Base class that we inherit from to ensure that it's also
// included in the test run as it should run on both browsers and node.js
//
require('./requested');
var Requested = require('../requested')
, requests = require('..')
, assume = require('assume')
, req;
beforeEach(function () {
req = requests(unique('http://localhost:8080'), { manual: true });
});
afterEach(function () {
req.destroy();
});
/**
* Make a URL unique so we can bust the browser cache which could affect
*
* @param {String} url Transform to an URL.
* @returns {String}
* @api private
*/
function unique(url) {
return url + '?t='+ (+ new Date());
}
it('is exported as function', function () {
assume(requests).is.a('function');
});
it('increments the internal `.id` for each instance', function () {
var id = req.id;
assume(id).equals(Requested.requested);
req.destroy();
req = requests(unique('http://localhost:8080'), { manual: true });
assume(req.id).is.above(id);
assume(Requested.requested).is.above(id);
});
it('sets the stream\'s booleans', function () {
assume(req.readable).is.true();
assume(req.writable).is.false();
});
it('stores active requests', function () {
assume(requests.active[req.id]).equals(req);
});
it('does not receive content for 204 requests', function (done) {
req.destroy();
req = requests(unique('http://localhost:8080/204'));
req.on('data', function () {
throw new Error('I should never be called');
});
req.on('end', done);
});
it('can handle large files with streaming', function (done) {
this.timeout(3E4);
req = requests(unique('http://localhost:8080/unshiftio/requests/master/test/large.js'), {
streaming: true
});
var buffer = [];
req.on('data', function received(chunk) {
buffer.push(chunk);
});
req.on('error', done);
req.once('end', function end(err, status) {
assume(buffer.length).to.be.above(1);
assume(buffer.join('').length).equals(2127897);
assume(status.code).to.equal(200);
assume(status.text).to.equal('OK');
buffer = null;
done();
});
});
describe('#destroy', function () {
it('removes the .active instance', function () {
assume(requests.active[req.id]).equals(req);
req.destroy();
assume(requests.active[req.id]).is.undefined();
});
});
});

100
node_modules/requests/test/requests.test.js generated vendored Normal file
View file

@ -0,0 +1,100 @@
'use strict';
var path = require('path');
var fs = require('fs');
var assume = require('assume');
var requests = require('../');
var staticserver = require('./static');
/**
* Make a URL unique so we can bust the browser cache which could affect
*
* @param {String} url Transform to an URL.
* @returns {String} A unique URL.
* @api private
*/
function unique(url) {
return url + '?t=' + (+new Date());
}
describe('requests', function () {
var closeServer;
var req;
before(function (done) {
//
// Start-up a small static file server so we can download files and fixtures
// inside our tests.
//
staticserver(function (close) {
closeServer = close;
}, done);
});
after(function (done) {
closeServer(done);
});
beforeEach(function () {
req = requests(unique('http://localhost:8080/index.html'), { manual: true });
});
afterEach(function () {
req.destroy();
});
it('is exported as function', function () {
assume(requests).is.a('function');
});
it('sets the stream\'s booleans', function () {
assume(req.readable).is.true();
assume(req.writable).is.false();
});
it('stores active requests', function () {
assume(requests.active[req.id]).equals(req);
});
it('successfully makes a request', function (done) {
var resp = '';
req.on('data', function (data) {
resp += data;
});
req.on('end', function () {
fs.readFile(
path.resolve(__dirname, 'index.html'),
{ encoding: 'utf8' },
function (err, file) {
if (err) {
throw err;
}
assume(resp).equals(file);
done();
}
);
});
req.open();
});
it('does not receive content for 204 requests', function (done) {
req.destroy();
req = requests(unique('http://localhost:8080/204'));
req.on('data', function () {
throw new Error('I should never be called');
});
req.on('end', done);
});
describe('#destroy', function () {
it('removes the .active instance', function () {
assume(requests.active[req.id]).equals(req);
req.destroy();
assume(requests.active[req.id]).is.undefined();
});
});
});

50
node_modules/requests/test/static.js generated vendored Normal file
View file

@ -0,0 +1,50 @@
'use strict';
var fs = require('fs')
, url = require('url')
, path = require('path')
, http = require('http')
, setHeader = require('setheader')
, httpProxy = require('http-proxy');
module.exports = function staticserver(kill, next) {
var proxy = httpProxy.createProxyServer({});
var server = http.createServer(function serve(req, res) {
var file = path.join(__dirname, url.parse(req.url).pathname);
setHeader(res, 'Access-Control-Allow-Origin', req.headers.origin || '*');
setHeader(res, 'Access-Control-Allow-Credentials', 'true');
if (~req.url.indexOf('/204')) {
res.statusCode = 204;
return res.end('');
}
if (!fs.existsSync(file)) {
req.headers.host = '';
setHeader(res, 'Content-Security-Policy', 'removed');
return proxy.web(req, res, { target: 'https://raw.githubusercontent.com' });
}
res.statusCode = 200;
fs.createReadStream(file).pipe(res);
});
kill(function close(next) {
server.close(next);
proxy.close();
});
server.listen(8080, next);
};
//
// Static server loaded directly.
//
if (require.main === module) module.exports(function kill() {
}, function next(err) {
if (err) throw err;
console.log('static server listening on ', this.address());
});