How do I limit the bandwidth of a HTTP request in Node?
Streaming bandwidth control must be implemented in both end, server and client.
From client perspective,
Upload rate can be managed by throttling
client application or client network layer or server network layer
Download rate can be managed by throttling
server application or server network layer client network layer
Please have a look this test code. You may change rate variable in both side.
Environment
node v10.16.3 in windows 10.
server.js
var fs = require('fs'); // file system
var http = require('http');
const {ThrottleGroup} = require("stream-throttle");
/**
* Change to various rate to test
*/
var tg = new ThrottleGroup({rate: 1024*1024}); //1 MiB per sec
/**
* please copy your own file
* my file is 4.73 MB (4,961,271 bytes) ,it takes 4~5 sec to send data chunk
*/
var source = "source.jpg"; //
var server = http.createServer((req, res) => {
var rstream = fs.createReadStream(source);
rstream
.pipe(tg.throttle()) //throttle here
.pipe(res);
//define event
rstream
.on('open', ()=>{
console.log('open', new Date())
})
.on('data', (chunk)=>{
console.log(new Date(), chunk.length) // 65536 bytes
})
.on('close', () => {
console.log('close', new Date())
});
});
server.listen(80, '127.0.0.1'); // start
//OUTPUT when client request, max chunk 65536 bytes
>node server.js
open 2019-09-13T05:27:40.724Z
2019-09-13T05:27:40.730Z 65536
2019-09-13T05:27:40.732Z 65536
...
2019-09-13T05:27:44.355Z 65536
2019-09-13T05:27:44.419Z 46071
close 2019-09-13T05:27:44.421Z
client.js
const fs = require('fs');
const http = require("http");
const {ThrottleGroup} = require("stream-throttle");
var tg = new ThrottleGroup({rate: 1024*1024*2}); //2 MiB /sec
/**
receiving 4.73 MB (4,961,271 bytes) ,
it takes 2~3 sec to receive 4.73MB, but server side throttle is 1Mib
Thus, it still takes 4~5 sec to download as server has been throttled
*/
var wstream = fs.createWriteStream("ouput.jpg");
wstream
.on('open', () => {
console.log('open', new Date())
})
.on('finish', () => {
console.log('finish', new Date())
});
var dataLength = 0;
http.get('http://127.0.0.1/', (res) => {
res
.pipe(tg.throttle())
.pipe(wstream);
res
.on('open', ()=>{
console.log('res open', new Date())
})
.on('data', (chunk)=>{
dataLength += chunk.length
console.log(new Date(), `data length: ${dataLength}`)
})
.on('close', () => {
console.log('res close', new Date())
})
});
//OUTPUT
>node client.js
open 2019-09-13T05:27:40.718Z
2019-09-13T05:27:40.736Z 'data length: 65426'
2019-09-13T05:27:40.741Z 'data length: 65536'
2019-09-13T05:27:40.742Z 'data length: 130953'
...
2019-09-13T05:27:44.463Z 'data length: 4961271'
finish 2019-09-13T05:27:44.474Z
res close 2019-09-13T05:27:44.476Z
For real world example, change client.js throttle rate and next line
http.get('http://127.0.0.1/', (res) => {
to something like
http.get('http://i.ytimg.com/vi/ZYifkcmIb-4/maxresdefault.jpg', (res) => {
In real world, networking is more complicated as more actors are involved.
SERVER side OSI MODEL <==> NETWORK <==> CLIENT side OSI MODEL
Because internet provider or carrier will throttle their port, it will affect your upload and download rate.
I manage to get it worked by doing away with a custom agent
and use createConnection
inside http.request
options:
const options = {
createConnection(options) {
const socket = new net.Socket();
return socket.connect({host: options.host, port: options.port});
},
hostname: "212.183.159.230",
path: "/5MB.zip"
};
const time = Date.now();
const req = http.request(options, (res) => {
res.pipe(new Throttle({rate: 200 * 1024}))
.on("data", (chunk) => {
console.log(chunk.length);
})
res.on("end", () => {
console.log("Done! Elapsed time: " + (Date.now() - time) + "ms");
});
});