As in the title, I have std::vector<cv::Mat> matrices
which I want to write/read to/from binary file.
Now, following this answer, all I should do is for writing is:
ofstream fout("matrices.bin", ios::out | ios::binary);
size_t size = matrices.size();
fout.write((char*)&size, sizeof(size));
fout.write((char*)&matrices[0], v.size() * sizeof(cv::Mat));
fout.close();
However, following this answer, writing cv::Mat
objects seems a little bit tricky, and in the answer matRead
and matWrite
do the job. So I wonder if instead of the code above I should do something like:
ofstream fout("matrices.bin", ios::out | ios::binary);
size_t size = matrices.size();
fout.write((char*)&size, sizeof(size));
for(size_t i = 0 ; i < matrices.size() ; i++)
matWrite("matrices.bin", matrices[i]);
However this code doesn't work since matWrite()
overwrites matrices.bin
at each cycle, so I should append the size of matrices[i]
as offset before writing the matrix itself.
What should I do?
UPDATE:
I came up with this solution, rewriting matWrite
and matRead
with optional arguments for appending matrices during writing and starting to read from a certain point:
void matwrite(const std::string& filename, const cv::Mat& mat, const bool append = false) {
std::ofstream fs;
if(append)
fs.open(filename.c_str(), std::fstream::binary | std::fstream::app);
else
fs.open(filename.c_str(), std::fstream::binary);
//the rest of matwrite is the same...
}
cv::Mat matRead(const std::string& filename, size_t &offset = 0)
{
std::ifstream fs(filename, std::fstream::binary);
fs.seekg(offset);
...
offset += 4 * sizeof(int) + CV_ELEM_SIZE(type) * rows * cols; //update offset //move offset of 4 ints and mat size
return mat;
}
And functions are called with:
//writing:
for(size_t i = 0 ; i<v.size() ; i++)
writemat(filename, v[i], true);
//reading:
size_t offset = 0;
for(size_t i = 0 ; i<size ; i++){ // size = v.size() during writing
cv::Mat mat = matRead(filename, offset);
v.push_back(mat);
}
You can adapt the code of matread
and matwrite
to be used with vectors if Mat
, instead of single Mat
. The functions vecmatread
and vecmatwrite
below allow to write a std::vector<cv::Mat>
to a file, and read the vector back:
#include <opencv2\opencv.hpp>
#include <vector>
#include <iostream>
#include <fstream>
using namespace std;
using namespace cv;
void vecmatwrite(const string& filename, const vector<Mat>& matrices)
{
ofstream fs(filename, fstream::binary);
for (size_t i = 0; i < matrices.size(); ++i)
{
const Mat& mat = matrices[i];
// Header
int type = mat.type();
int channels = mat.channels();
fs.write((char*)&mat.rows, sizeof(int)); // rows
fs.write((char*)&mat.cols, sizeof(int)); // cols
fs.write((char*)&type, sizeof(int)); // type
fs.write((char*)&channels, sizeof(int)); // channels
// Data
if (mat.isContinuous())
{
fs.write(mat.ptr<char>(0), (mat.dataend - mat.datastart));
}
else
{
int rowsz = CV_ELEM_SIZE(type) * mat.cols;
for (int r = 0; r < mat.rows; ++r)
{
fs.write(mat.ptr<char>(r), rowsz);
}
}
}
}
vector<Mat> vecmatread(const string& filename)
{
vector<Mat> matrices;
ifstream fs(filename, fstream::binary);
// Get length of file
fs.seekg(0, fs.end);
int length = fs.tellg();
fs.seekg(0, fs.beg);
while (fs.tellg() < length)
{
// Header
int rows, cols, type, channels;
fs.read((char*)&rows, sizeof(int)); // rows
fs.read((char*)&cols, sizeof(int)); // cols
fs.read((char*)&type, sizeof(int)); // type
fs.read((char*)&channels, sizeof(int)); // channels
// Data
Mat mat(rows, cols, type);
fs.read((char*)mat.data, CV_ELEM_SIZE(type) * rows * cols);
matrices.push_back(mat);
}
return matrices;
}
int main()
{
vector<Mat> matrices;
// Fill vector...
Mat1f m1(3,3);
randu(m1, 0, 1);
Mat3b m2(4, 5);
randu(m2, Scalar(0,0,0), Scalar(256,256,256));
Mat2d m3(2, 3);
randu(m3, Scalar(0, 0), Scalar(1, 1));
matrices.push_back(m1);
matrices.push_back(m2);
matrices.push_back(m3);
// Write the vector to file
vecmatwrite("test.bin", matrices);
// Read the vector from file
vector<Mat> matrices2 = vecmatread("test.bin");
return 0;
}
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With