I have code for reading an image from directory and sending it to index.html.
I am trying to replace fs.readFile with fs.createReadStream but i have no idea how to implement this as i can not find a good example.
Here is what i got (index.js)
var app = require('express')();
var http = require('http').Server(app);
var io = require('socket.io')(http);
var fs = require('fs');
http.listen(3000, function () {
console.log('listening on *:3000');
});
app.get('/', function (req, res) {
res.sendFile(__dirname + '/public/views/index.html');
});
io.on('connection', function (socket) {
fs.readFile(__dirname + '/public/images/image.png', function (err, buf){
socket.emit('image', { image: true, buffer: buf.toString('base64') });
});
});
index.html
<!DOCTYPE html>
<html>
<body>
<canvas id="canvas" width="200" height="100">
Your browser does not support the HTML5 canvas tag.
</canvas>
<script src="https://cdn.socket.io/socket.io-1.2.0.js"></script>
<script>
var socket = io();
var ctx = document.getElementById('canvas').getContext('2d');
socket.on("image", function (info) {
if (info.image) {
var img = new Image();
img.src = 'data:image/jpeg;base64,' + info.buffer;
ctx.drawImage(img, 0, 0);
}
});
</script>
</body >
</html >
In fs. readFile() method, we can read a file in a non-blocking asynchronous way, but in fs. readFileSync() method, we can read files in a synchronous way, i.e. we are telling node.
createReadStream() allows you to open up a readable stream in a very simple manner. All you have to do is pass the path of the file to start streaming in. It turns out that the response (as well as the request) objects are streams. So we will use this fact to create a http server that streams the files to the client.
readFile. Returns the contents of the file named filename. If encoding is specified then this function returns a string. Otherwise it returns a buffer.
The below approach only uses core modules and reads the chunks from the stream.Readable
instance returned from fs.createReadStream()
and returns those chunks back as a Buffer
. This isn't that great of an approach if you're not going to stream the chunks back. You're going to hold the file within a Buffer
which resides in memory, so its only a good solution for reasonably sized files.
io.on('connection', function (socket) {
fileToBuffer(__dirname + '/public/images/image.png', (err, imageBuffer) => {
if (err) {
socket.emit('error', err)
} else {
socket.emit('image', { image: true, buffer: imageBuffer.toString('base64') });
}
});
});
const fileToBuffer = (filename, cb) => {
let readStream = fs.createReadStream(filename);
let chunks = [];
// Handle any errors while reading
readStream.on('error', err => {
// handle error
// File could not be read
return cb(err);
});
// Listen for data
readStream.on('data', chunk => {
chunks.push(chunk);
});
// File is done being read
readStream.on('close', () => {
// Create a buffer of the image from the stream
return cb(null, Buffer.concat(chunks));
});
}
Its almost always a better idea to use HTTP
for streaming data since its built into the protocol and you'd never need to load the data into memory all at once since you can pipe()
the file stream directly to the response.
This is a very basic example without the bells and whistles and just is to demonstrate how to pipe()
a stream.Readable
to a http.ServerResponse
. the example uses Express but it works the exact same way using http
or https
from the Node.js Core API.
const express = require('express');
const fs = require('fs');
const server = express();
const port = process.env.PORT || 1337;
server.get ('/image', (req, res) => {
let readStream = fs.createReadStream(__dirname + '/public/images/image.png')
// When the stream is done being read, end the response
readStream.on('close', () => {
res.end()
})
// Stream chunks to response
readStream.pipe(res)
});
server.listen(port, () => {
console.log(`Listening on ${port}`);
});
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With