Logo Questions Linux Laravel Mysql Ubuntu Git Menu
 

Node.js: chunked transfer encoding

Tags:

node.js

Is that code valid HTTP/1.1?

var fs = require('fs')
var http = require('http')

var buf=function(res,fd,i,s,buffer){
 if(i+buffer.length<s){
  fs.read(fd,buffer,0,buffer.length,i,function(e,l,b){
   res.write(b.slice(0,l))
   //console.log(b.toString('utf8',0,l))
   i=i+buffer.length
   buf(res,fd,i,s,buffer)
  })
 }
 else{
  fs.read(fd,buffer,0,buffer.length,i,function(e,l,b){
   res.end(b.slice(0,l))
   fs.close(fd)
  })
 }
}

var app = function(req,res){
 var head={'Content-Type':'text/html; charset=UTF-8'}
 switch(req.url.slice(-3)){
  case '.js':head={'Content-Type':'text/javascript'};break;
  case 'css':head={'Content-Type':'text/css'};break;
  case 'png':head={'Content-Type':'image/png'};break;
  case 'ico':head={'Content-Type':'image/x-icon'};break;
  case 'ogg':head={'Content-Type':'audio/ogg'};break;
  case 'ebm':head={'Content-Type':'video/webm'};break;
 }
 head['Transfer-Encoding']='chunked'
 res.writeHead(200,head)
 fs.open('.'+req.url,'r',function(err,fd){
  fs.fstat(fd,function(err, stats){
   console.log('.'+req.url+' '+stats.size+' '+head['Content-Type']+' '+head['Transfer-Encoding'])
   var buffer = new Buffer(100)
   buf(res,fd,0,stats.size,buffer)
  })
 })
}

http.createServer(app).listen(8000,"127.0.0.1")
console.log('GET http://127.0.0.1:8000/appwsgi/www/index.htm')

I think I am violating HTTP/1.1 here? Text files do seem to work fine, but that could be coincidental. Is my header "200 OK" or need it to be "100"? Is one header sufficient?

like image 463
Gert Cuykens Avatar asked Jun 03 '11 23:06

Gert Cuykens


1 Answers

If you're doing chunked transfer encoding, you actually need to set that header:

Transfer-Encoding: chunked

You can see from the headers returned by google, which does chunked transfers for the homepage and most likely other pages:

HTTP/1.1 200 OK
Date: Sat, 04 Jun 2011 00:04:08 GMT
Expires: -1
Cache-Control: private, max-age=0
Content-Type: text/html; charset=ISO-8859-1
Set-Cookie: PREF=ID=f9c65f4927515ce7:FF=0:TM=1307145848:LM=1307145848:S=fB58RFtpI5YeXdU9; expires=Mon, 03-Jun-2013 00:04:08 GMT; path=/; domain=.google.com
Set-Cookie: NID=47=UiPfl5ew2vCEte9JyBRkrFk4EhRQqy4dRuzG5Y-xeE---Q8AVvPDQq46GYbCy9VnOA8n7vxR8ETEAxKCh-b58r7elfURfiskmrOCgU706msiUx8L9qBpw-3OTPsY-6tl; expires=Sun, 04-Dec-2011 00:04:08 GMT; path=/; domain=.google.com; HttpOnly
Server: gws
X-XSS-Protection: 1; mode=block
Transfer-Encoding: chunked

EDIT Yikes, that read is way too complicated:

var app = function(req,res){
 var head={'Content-Type':'text/html'}
 switch(req.url.slice(-3)){
  case '.js':head={'Content-Type':'text/javascript'};break;
  case 'css':head={'Content-Type':'text/css'};break;
  case 'png':head={'Content-Type':'image/png'};break;
  case 'ico':head={'Content-Type':'image/x-icon'};break;
  case 'ogg':head={'Content-Type':'audio/ogg'};break;
  case 'ebm':head={'Content-Type':'video/webm'};break;
 }
 res.writeHead(200,head)
 var file_stream = fs.createReadStream('.'+req.url);
 file_stream.on("error", function(exception) {
   console.error("Error reading file: ", exception);
 });
 file_stream.on("data", function(data) {
   res.write(data);
 });
 file_stream.on("close", function() {
   res.end();
 });
}

There you go, a nice streamed buffer for you to write with. Here's a blog post I wrote on different ways to read in files. I recommend looking that over so you can see how to best work with files in node's asynchronous environment.

like image 87
onteria_ Avatar answered Nov 15 '22 14:11

onteria_