#include #include #include #include #include #include // for SWMR support #include #include using namespace std; using namespace H5; int main (int argc, char* argv[]) { FileAccPropList fapl; fapl.setLibverBounds(H5F_LIBVER_LATEST, H5F_LIBVER_LATEST); FYI: add fapl declaration and set LATEST (above) and change this line to the one below H5File* _hdf5File = new H5File("/home/here/user/CompressionIssue.h5", H5F_ACC_TRUNC|H5F_ACC_SWMR_WRITE); H5File* _hdf5File = new H5File("/home/here/user/CompressionIssue.h5", H5F_ACC_TRUNC|H5F_ACC_SWMR_WRITE, FileCreatPropList::DEFAULT, fapl); // Create a scalar data set hsize_t dims[1] = {1}; // dataset dimensions at creation hsize_t maxdims[1] = {H5S_UNLIMITED}; // dataset maximum potential size int rank = 1; // 1 dimensional arrays DataSpace dataspace(rank, dims, maxdims); DSetCreatPropList* plist = new DSetCreatPropList; hsize_t chunkdims[1] = {3600}; plist->setChunk(rank, chunkdims); plist->setDeflate(4); FYI: this is for the H5Pset_chunk_opts, there is no C++ wrapper for it, so you call the C function directly, passing in the ID returned by getId() unsigned opts = 0; opts |= H5D_CHUNK_DONT_FILTER_PARTIAL_CHUNKS; if (H5Pset_chunk_opts(plist->getId(), opts) < 0) throw exception or just exit(0) if you want simple for now DataSet ds = _hdf5File->createDataSet("MyScalars", DataType(PredType::NATIVE_DOUBLE), dataspace, *plist); delete plist; ds.close(); // write to data set one element at a time - this will be how the data is reported DataSpace* filespace = NULL; DataSpace* memspace = NULL; srand(time(NULL)); for (int i=0; i<1000; i++) { ds = _hdf5File->openDataSet("MyScalars"); unsigned long npointsToAdd = 1; hsize_t npoints = ds.getSpace().getSimpleExtentNpoints(); if (i==0) npoints--; // replace the first point if this is the initial add to the data set // We'll be appending to an existing data set so lets set our "pointer" to the end of the existing data set in the file // extend the data - let's add some points to the data set hsize_t extend[1] = { npointsToAdd }; // number of points being added hsize_t size[1]; size[0] = npoints + extend[0]; // new size of the data set hsize_t offset[1]; offset[0] = npoints; // where should new value(s) be inserted ds.extend(size); // Select a "hyperslab" in extended portion of the dataset. filespace = new DataSpace(ds.getSpace()); filespace->selectHyperslab(H5S_SELECT_SET, extend, offset); // Define memory space. memspace = new DataSpace(1, extend, NULL); // generate random value and write to file double val = (double)rand()/RAND_MAX*1000.0; ds.write(&val, DataType(PredType::NATIVE_DOUBLE), *memspace, *filespace); // clean up delete memspace; delete filespace; // flush the file H5F_scope_t scope = H5F_SCOPE_GLOBAL; ds.flush(scope); FYI: you don't need this line, ds going out of scope takes care of it ds.close(); } FYI: you don't need this line, delete _hdf5File takes care of it _hdf5File->close(); delete _hdf5File; return 0; }