001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the COPYING file, which can be found  *
009 * at the root of the source code distribution tree,                         *
010 * or in https://www.hdfgroup.org/licenses.                                  *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h5;
016
017import java.lang.reflect.Array;
018import java.math.BigDecimal;
019import java.math.BigInteger;
020import java.text.DecimalFormat;
021import java.util.ArrayList;
022import java.util.Arrays;
023import java.util.Iterator;
024import java.util.List;
025import java.util.Vector;
026
027import hdf.object.Attribute;
028import hdf.object.CompoundDS;
029import hdf.object.Dataset;
030import hdf.object.Datatype;
031import hdf.object.FileFormat;
032import hdf.object.Group;
033import hdf.object.HObject;
034import hdf.object.MetaDataContainer;
035import hdf.object.Utils;
036import hdf.object.h5.H5Datatype;
037import hdf.object.h5.H5MetaDataContainer;
038import hdf.object.h5.H5ReferenceType;
039
040import hdf.hdf5lib.H5;
041import hdf.hdf5lib.HDF5Constants;
042import hdf.hdf5lib.HDFArray;
043import hdf.hdf5lib.HDFNativeData;
044import hdf.hdf5lib.exceptions.HDF5DataFiltersException;
045import hdf.hdf5lib.exceptions.HDF5Exception;
046import hdf.hdf5lib.exceptions.HDF5LibraryException;
047import hdf.hdf5lib.structs.H5O_info_t;
048import hdf.hdf5lib.structs.H5O_token_t;
049
050import org.slf4j.Logger;
051import org.slf4j.LoggerFactory;
052
053/**
054 * The H5CompoundDS class defines an HDF5 dataset of compound datatypes.
055 *
056 * An HDF5 dataset is an object composed of a collection of data elements, or raw data, and metadata that
057 * stores a description of the data elements, data layout, and all other information necessary to write, read,
058 * and interpret the stored data.
059 *
060 * A HDF5 compound datatype is similar to a struct in C or a common block in Fortran: it is a collection of
061 * one or more atomic types or small arrays of such types. Each member of a compound type has a name which is
062 * unique within that type, and a byte offset that determines the first byte (smallest byte address) of that
063 * member in a compound datum.
064 *
065 * For more information on HDF5 datasets and datatypes, read <a href=
066 * "https://support.hdfgroup.org/releases/hdf5/v1_14/v1_14_5/documentation/doxygen/_h5_d__u_g.html#sec_dataset">HDF5
067 * Datasets in HDF5 User Guide</a> <a href=
068 * "https://support.hdfgroup.org/releases/hdf5/v1_14/v1_14_5/documentation/doxygen/_h5_t__u_g.html#sec_datatype">HDF5
069 * Datatypes in HDF5 User Guide</a>
070 *
071 * There are two basic types of compound datasets: simple compound data and nested compound data. Members of a
072 * simple compound dataset have atomic datatypes. Members of a nested compound dataset are compound or array
073 * of compound data.
074 *
075 * Since Java does not understand C structures, we cannot directly read/write compound data values as in the
076 * following C example.
077 *
078 * <pre>
079 * typedef struct s1_t {
080 *         int    a;
081 *         float  b;
082 *         double c;
083 *         } s1_t;
084 *     s1_t       s1[LENGTH];
085 *     ...
086 *     H5Dwrite(..., s1);
087 *     H5Dread(..., s1);
088 * </pre>
089 *
090 * Values of compound data fields are stored in java.util.Vector object. We read and write compound data by
091 * fields instead of compound structure. As for the example above, the java.util.Vector object has three
092 * elements: int[LENGTH], float[LENGTH] and double[LENGTH]. Since Java understands the primitive datatypes of
093 * int, float and double, we will be able to read/write the compound data by field.
094 *
095 * @version 1.1 9/4/2007
096 * @author Peter X. Cao
097 */
098public class H5CompoundDS extends CompoundDS implements MetaDataContainer {
099    private static final long serialVersionUID = -5968625125574032736L;
100
101    private static final Logger log = LoggerFactory.getLogger(H5CompoundDS.class);
102
103    /**
104     * The metadata object for this data object. Members of the metadata are instances of Attribute.
105     */
106    private H5MetaDataContainer objMetadata;
107
108    /** the object properties */
109    private H5O_info_t objInfo;
110
111    /** flag to indicate if the dataset is an external dataset */
112    private boolean isExternal = false;
113
114    /** flag to indicate if the dataset is a virtual dataset */
115    private boolean isVirtual = false;
116    /** the list of virtual names */
117    private List<String> virtualNameList;
118
119    /**
120     * Constructs an instance of a HDF5 compound dataset with given file, dataset name and path.
121     *
122     * The dataset object represents an existing dataset in the file. For example, new
123     * H5CompoundDS(file, "dset1", "/g0/") constructs a dataset object that corresponds to the
124     * dataset,"dset1", at group "/g0/".
125     *
126     * This object is usually constructed at FileFormat.open(), which loads the file structure and
127     * object information into memory. It is rarely used elsewhere.
128     *
129     * @param theFile
130     *            the file that contains the data object.
131     * @param theName
132     *            the name of the data object, e.g. "dset".
133     * @param thePath
134     *            the full path of the data object, e.g. "/arrays/".
135     */
136    public H5CompoundDS(FileFormat theFile, String theName, String thePath)
137    {
138        this(theFile, theName, thePath, null);
139    }
140
141    /**
142     * @deprecated Not for public use in the future.<br>
143     *             Using {@link #H5CompoundDS(FileFormat, String, String)}
144     *
145     * @param theFile
146     *            the file that contains the data object.
147     * @param theName
148     *            the name of the data object, e.g. "dset".
149     * @param thePath
150     *            the full path of the data object, e.g. "/arrays/".
151     * @param oid
152     *            the oid of the data object.
153     */
154    @Deprecated
155    public H5CompoundDS(FileFormat theFile, String theName, String thePath, long[] oid)
156    {
157        super(theFile, theName, thePath, oid);
158        objMetadata = new H5MetaDataContainer(theFile, theName, thePath, this);
159
160        if (theFile != null) {
161            if (oid == null) {
162                // retrieve the object ID
163                byte[] refBuf = null;
164                try {
165                    refBuf =
166                        H5.H5Rcreate_object(theFile.getFID(), this.getFullName(), HDF5Constants.H5P_DEFAULT);
167                    this.oid = HDFNativeData.byteToLong(refBuf);
168                    log.trace("constructor REF {} to OID {}", refBuf, this.oid);
169                }
170                catch (Exception ex) {
171                    log.debug("constructor ID {} for {} failed H5Rcreate_object", theFile.getFID(),
172                              this.getFullName());
173                }
174                finally {
175                    if (refBuf != null)
176                        H5.H5Rdestroy(refBuf);
177                }
178            }
179            log.trace("constructor OID {}", this.oid);
180            try {
181                objInfo = H5.H5Oget_info_by_name(theFile.getFID(), this.getFullName(),
182                                                 HDF5Constants.H5O_INFO_BASIC, HDF5Constants.H5P_DEFAULT);
183            }
184            catch (Exception ex) {
185                objInfo = new H5O_info_t(-1L, null, 0, 0, 0L, 0L, 0L, 0L, 0L);
186            }
187        }
188        else {
189            this.oid = null;
190            objInfo  = new H5O_info_t(-1L, null, 0, 0, 0L, 0L, 0L, 0L, 0L);
191        }
192    }
193
194    /*
195     * (non-Javadoc)
196     *
197     * @see hdf.object.HObject#open()
198     */
199    @Override
200    public long open()
201    {
202        long did = HDF5Constants.H5I_INVALID_HID;
203
204        if (getFID() < 0)
205            log.trace("open(): file id for:{} is invalid", getPath() + getName());
206        else {
207            try {
208                did = H5.H5Dopen(getFID(), getPath() + getName(), HDF5Constants.H5P_DEFAULT);
209                log.trace("open(): did={}", did);
210            }
211            catch (HDF5Exception ex) {
212                log.debug("open(): Failed to open dataset {}", getPath() + getName(), ex);
213                did = HDF5Constants.H5I_INVALID_HID;
214            }
215        }
216
217        return did;
218    }
219
220    /*
221     * (non-Javadoc)
222     *
223     * @see hdf.object.HObject#close(int)
224     */
225    @Override
226    public void close(long did)
227    {
228        if (did >= 0) {
229            try {
230                H5.H5Fflush(did, HDF5Constants.H5F_SCOPE_LOCAL);
231            }
232            catch (Exception ex) {
233                log.debug("close(): H5Fflush(did {}) failure: ", did, ex);
234            }
235            try {
236                H5.H5Dclose(did);
237            }
238            catch (HDF5Exception ex) {
239                log.debug("close(): H5Dclose(did {}) failure: ", did, ex);
240            }
241        }
242    }
243
244    /**
245     * Retrieves datatype and dataspace information from file and sets the dataset
246     * in memory.
247     *
248     * The init() is designed to support lazy operation in a dataset object. When a
249     * data object is retrieved from file, the datatype, dataspace and raw data are
250     * not loaded into memory. When it is asked to read the raw data from file,
251     * init() is first called to get the datatype and dataspace information, then
252     * load the raw data from file.
253     *
254     * init() is also used to reset the selection of a dataset (start, stride and
255     * count) to the default, which is the entire dataset for 1D or 2D datasets. In
256     * the following example, init() at step 1) retrieves datatype and dataspace
257     * information from file. getData() at step 3) reads only one data point. init()
258     * at step 4) resets the selection to the whole dataset. getData() at step 4)
259     * reads the values of whole dataset into memory.
260     *
261     * <pre>
262     * dset = (Dataset) file.get(NAME_DATASET);
263     *
264     * // 1) get datatype and dataspace information from file
265     * dset.init();
266     * rank = dset.getRank(); // rank = 2, a 2D dataset
267     * count = dset.getSelectedDims();
268     * start = dset.getStartDims();
269     * dims = dset.getDims();
270     *
271     * // 2) select only one data point
272     * for (int i = 0; i &lt; rank; i++) {
273     *     start[0] = 0;
274     *     count[i] = 1;
275     * }
276     *
277     * // 3) read one data point
278     * data = dset.getData();
279     *
280     * // 4) reset selection to the whole dataset
281     * dset.init();
282     *
283     * // 5) clean the memory data buffer
284     * dset.clearData();
285     *
286     * // 6) Read the whole dataset
287     * data = dset.getData();
288     * </pre>
289     */
290    @Override
291    public void init()
292    {
293        if (inited) {
294            resetSelection();
295            log.trace("init(): Dataset already initialized");
296            return; // already called. Initialize only once
297        }
298
299        long did     = HDF5Constants.H5I_INVALID_HID;
300        long tid     = HDF5Constants.H5I_INVALID_HID;
301        long sid     = HDF5Constants.H5I_INVALID_HID;
302        flatNameList = new Vector<>();
303        flatTypeList = new Vector<>();
304
305        did = open();
306        if (did >= 0) {
307            // check if it is an external or virtual dataset
308            long pid = HDF5Constants.H5I_INVALID_HID;
309            try {
310                pid = H5.H5Dget_create_plist(did);
311                try {
312                    int nfiles     = H5.H5Pget_external_count(pid);
313                    isExternal     = (nfiles > 0);
314                    int layoutType = H5.H5Pget_layout(pid);
315                    if (isVirtual = (layoutType == HDF5Constants.H5D_VIRTUAL)) {
316                        try {
317                            long vmaps = H5.H5Pget_virtual_count(pid);
318                            if (vmaps > 0) {
319                                virtualNameList = new Vector<>();
320                                for (long next = 0; next < vmaps; next++) {
321                                    try {
322                                        String fname = H5.H5Pget_virtual_filename(pid, next);
323                                        virtualNameList.add(fname);
324                                        log.trace("init(): virtualNameList[{}]={}", next, fname);
325                                    }
326                                    catch (Exception err) {
327                                        log.trace("init(): vds[{}] continue", next);
328                                    }
329                                }
330                            }
331                        }
332                        catch (Exception err) {
333                            log.debug("init(): vds count error: ", err);
334                        }
335                    }
336                    log.trace("init(): pid={} nfiles={} isExternal={} isVirtual={}", pid, nfiles, isExternal,
337                              isVirtual);
338                }
339                catch (Exception ex) {
340                    log.debug("init(): check if it is an external or virtual dataset: ", ex);
341                }
342            }
343            catch (Exception ex) {
344                log.debug("init(): H5Dget_create_plist() failure: ", ex);
345            }
346            finally {
347                try {
348                    H5.H5Pclose(pid);
349                }
350                catch (Exception ex) {
351                    log.debug("init(): H5Pclose(pid {}) failure: ", pid, ex);
352                }
353            }
354
355            try {
356                sid        = H5.H5Dget_space(did);
357                rank       = H5.H5Sget_simple_extent_ndims(sid);
358                space_type = H5.H5Sget_simple_extent_type(sid);
359                if (space_type == HDF5Constants.H5S_NULL)
360                    isNULL = true;
361                else
362                    isNULL = false;
363                tid = H5.H5Dget_type(did);
364                log.trace("init(): tid={} sid={} rank={}", tid, sid, rank);
365
366                if (rank == 0) {
367                    // a scalar data point
368                    isScalar = true;
369                    rank     = 1;
370                    dims     = new long[] {1};
371                    log.trace("init(): rank is a scalar data point");
372                }
373                else {
374                    isScalar = false;
375                    dims     = new long[rank];
376                    maxDims  = new long[rank];
377                    H5.H5Sget_simple_extent_dims(sid, dims, maxDims);
378                    log.trace("init(): rank={}, dims={}, maxDims={}", rank, dims, maxDims);
379                }
380
381                try {
382                    int nativeClass = H5.H5Tget_class(tid);
383                    if (nativeClass == HDF5Constants.H5T_REFERENCE) {
384                        long lsize = 1;
385                        if (rank > 0) {
386                            log.trace("init():rank={}, dims={}", rank, dims);
387                            for (int j = 0; j < dims.length; j++) {
388                                lsize *= dims[j];
389                            }
390                        }
391                        datatype = new H5ReferenceType(getFileFormat(), lsize, tid);
392                    }
393                    else
394                        datatype = new H5Datatype(getFileFormat(), tid);
395
396                    log.trace(
397                        "init(): tid={} has isText={} : isVLEN={} : isEnum={} : isUnsigned={} : isStdRef={} : isRegRef={}",
398                        tid, datatype.isText(), datatype.isVLEN(), ((H5Datatype)datatype).isEnum(),
399                        datatype.isUnsigned(), ((H5Datatype)datatype).isStdRef(),
400                        ((H5Datatype)datatype).isRegRef());
401
402                    H5Datatype.extractCompoundInfo((H5Datatype)datatype, "", flatNameList, flatTypeList);
403                }
404                catch (Exception ex) {
405                    log.debug("init(): failed to create datatype for dataset: ", ex);
406                    datatype = null;
407                }
408
409                // initialize member information
410                numberOfMembers = flatNameList.size();
411                log.trace("init(): numberOfMembers={}", numberOfMembers);
412
413                memberNames      = new String[numberOfMembers];
414                memberTypes      = new Datatype[numberOfMembers];
415                memberOrders     = new int[numberOfMembers];
416                isMemberSelected = new boolean[numberOfMembers];
417                memberDims       = new Object[numberOfMembers];
418
419                for (int i = 0; i < numberOfMembers; i++) {
420                    isMemberSelected[i] = true;
421                    memberOrders[i]     = 1;
422                    memberDims[i]       = null;
423
424                    try {
425                        memberTypes[i] = flatTypeList.get(i);
426                        log.trace("init()[{}]: memberTypes[{}]={}", i, i, memberTypes[i].getDescription());
427
428                        if (memberTypes[i].isArray()) {
429                            long mdim[]      = memberTypes[i].getArrayDims();
430                            int idim[]       = new int[mdim.length];
431                            int arrayNpoints = 1;
432
433                            for (int j = 0; j < idim.length; j++) {
434                                idim[j] = (int)mdim[j];
435                                arrayNpoints *= idim[j];
436                            }
437
438                            memberDims[i]   = idim;
439                            memberOrders[i] = arrayNpoints;
440                        }
441                    }
442                    catch (Exception ex) {
443                        log.debug("init()[{}]: memberTypes[{}] get failure: ", i, i, ex);
444                        memberTypes[i] = null;
445                    }
446
447                    try {
448                        memberNames[i] = flatNameList.get(i);
449                        log.trace("init()[{}]: memberNames[{}]={}", i, i, memberNames[i]);
450                    }
451                    catch (Exception ex) {
452                        log.debug("init()[{}]: memberNames[{}] get failure: ", i, i, ex);
453                        memberNames[i] = "null";
454                    }
455                } //  (int i=0; i<numberOfMembers; i++)
456
457                inited = true;
458            }
459            catch (HDF5Exception ex) {
460                numberOfMembers = 0;
461                memberNames     = null;
462                memberTypes     = null;
463                memberOrders    = null;
464                log.debug("init(): ", ex);
465            }
466            finally {
467                if (datatype != null)
468                    datatype.close(tid);
469
470                try {
471                    H5.H5Sclose(sid);
472                }
473                catch (HDF5Exception ex2) {
474                    log.debug("init(): H5Sclose(sid {}) failure: ", sid, ex2);
475                }
476            }
477
478            close(did);
479
480            startDims    = new long[rank];
481            selectedDims = new long[rank];
482
483            resetSelection();
484        }
485        else {
486            log.debug("init(): failed to open dataset");
487        }
488    }
489
490    /**
491     * Get the token for this object.
492     *
493     * @return true if it has any attributes, false otherwise.
494     */
495    public long[] getToken()
496    {
497        H5O_token_t token = objInfo.token;
498        return HDFNativeData.byteToLong(token.data);
499    }
500
501    /**
502     * Check if the object has any attributes attached.
503     *
504     * @return true if it has any attributes, false otherwise.
505     */
506    @Override
507    public boolean hasAttribute()
508    {
509        objInfo.num_attrs = objMetadata.getObjectAttributeSize();
510
511        if (objInfo.num_attrs < 0) {
512            long did = open();
513            if (did >= 0) {
514                objInfo.num_attrs = 0;
515
516                try {
517                    objInfo = H5.H5Oget_info(did);
518                }
519                catch (Exception ex) {
520                    objInfo.num_attrs = 0;
521                    log.debug("hasAttribute(): get object info failure: ", ex);
522                }
523                finally {
524                    close(did);
525                }
526                objMetadata.setObjectAttributeSize((int)objInfo.num_attrs);
527            }
528            else {
529                log.debug("hasAttribute(): could not open dataset");
530            }
531        }
532
533        log.trace("hasAttribute(): nAttributes={}", objInfo.num_attrs);
534        return (objInfo.num_attrs > 0);
535    }
536
537    /**
538     * Returns the datatype of the data object.
539     *
540     * @return the datatype of the data object.
541     */
542    @Override
543    public Datatype getDatatype()
544    {
545        if (!inited)
546            init();
547
548        if (datatype == null) {
549            long did = HDF5Constants.H5I_INVALID_HID;
550            long tid = HDF5Constants.H5I_INVALID_HID;
551
552            did = open();
553            if (did >= 0) {
554                try {
555                    tid             = H5.H5Dget_type(did);
556                    int nativeClass = H5.H5Tget_class(tid);
557                    if (nativeClass == HDF5Constants.H5T_REFERENCE) {
558                        long lsize = 1;
559                        if (rank > 0) {
560                            log.trace("getDatatype(): rank={}, dims={}", rank, dims);
561                            for (int j = 0; j < dims.length; j++) {
562                                lsize *= dims[j];
563                            }
564                        }
565                        datatype = new H5ReferenceType(getFileFormat(), lsize, tid);
566                    }
567                    else
568                        datatype = new H5Datatype(getFileFormat(), tid);
569                }
570                catch (Exception ex) {
571                    log.debug("getDatatype(): ", ex);
572                }
573                finally {
574                    try {
575                        H5.H5Tclose(tid);
576                    }
577                    catch (HDF5Exception ex) {
578                        log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex);
579                    }
580                    try {
581                        H5.H5Dclose(did);
582                    }
583                    catch (HDF5Exception ex) {
584                        log.debug("getDatatype(): H5Dclose(did {}) failure: ", did, ex);
585                    }
586                }
587            }
588        }
589
590        if (isExternal) {
591            String pdir = this.getFileFormat().getAbsoluteFile().getParent();
592
593            if (pdir == null) {
594                pdir = ".";
595            }
596            System.setProperty("user.dir", pdir);
597            log.trace("getDatatype(): External dataset: user.dir={}", pdir);
598        }
599
600        return datatype;
601    }
602
603    /**
604     * Removes all of the elements from metadata list.
605     * The list should be empty after this call returns.
606     */
607    @Override
608    public void clear()
609    {
610        super.clear();
611        objMetadata.clear();
612    }
613
614    /*
615     * (non-Javadoc)
616     *
617     * @see hdf.object.Dataset#readBytes()
618     */
619    @Override
620    public byte[] readBytes() throws HDF5Exception
621    {
622        byte[] theData = null;
623
624        if (!isInited())
625            init();
626
627        long did = open();
628        if (did >= 0) {
629            long fspace = HDF5Constants.H5I_INVALID_HID;
630            long mspace = HDF5Constants.H5I_INVALID_HID;
631            long tid    = HDF5Constants.H5I_INVALID_HID;
632
633            try {
634                long[] lsize = {1};
635                for (int j = 0; j < selectedDims.length; j++)
636                    lsize[0] *= selectedDims[j];
637
638                fspace = H5.H5Dget_space(did);
639                mspace = H5.H5Screate_simple(rank, selectedDims, null);
640
641                // set the rectangle selection
642                // HDF5 bug: for scalar dataset, H5Sselect_hyperslab gives core dump
643                if (rank * dims[0] > 1)
644                    H5.H5Sselect_hyperslab(fspace, HDF5Constants.H5S_SELECT_SET, startDims, selectedStride,
645                                           selectedDims, null); // set block to 1
646
647                tid       = H5.H5Dget_type(did);
648                long size = H5.H5Tget_size(tid) * lsize[0];
649                log.trace("readBytes(): size = {}", size);
650
651                if (size < Integer.MIN_VALUE || size > Integer.MAX_VALUE)
652                    throw new Exception("Invalid int size");
653
654                theData = new byte[(int)size];
655
656                log.trace("readBytes(): H5Dread: did={} tid={} fspace={} mspace={}", did, tid, fspace,
657                          mspace);
658                H5.H5Dread(did, tid, mspace, fspace, HDF5Constants.H5P_DEFAULT, theData);
659            }
660            catch (Exception ex) {
661                log.debug("readBytes(): failed to read data: ", ex);
662            }
663            finally {
664                try {
665                    H5.H5Sclose(fspace);
666                }
667                catch (Exception ex2) {
668                    log.debug("readBytes(): H5Sclose(fspace {}) failure: ", fspace, ex2);
669                }
670                try {
671                    H5.H5Sclose(mspace);
672                }
673                catch (Exception ex2) {
674                    log.debug("readBytes(): H5Sclose(mspace {}) failure: ", mspace, ex2);
675                }
676                try {
677                    H5.H5Tclose(tid);
678                }
679                catch (HDF5Exception ex2) {
680                    log.debug("readBytes(): H5Tclose(tid {}) failure: ", tid, ex2);
681                }
682                close(did);
683            }
684        }
685
686        return theData;
687    }
688
689    /**
690     * Reads the data from file.
691     *
692     * read() reads the data from file to a memory buffer and returns the memory
693     * buffer. The dataset object does not hold the memory buffer. To store the
694     * memory buffer in the dataset object, one must call getData().
695     *
696     * By default, the whole dataset is read into memory. Users can also select
697     * a subset to read. Subsetting is done in an implicit way.
698     *
699     * <b>How to Select a Subset</b>
700     *
701     * A selection is specified by three arrays: start, stride and count.
702     * <ol>
703     * <li>start: offset of a selection
704     * <li>stride: determines how many elements to move in each dimension
705     * <li>count: number of elements to select in each dimension
706     * </ol>
707     * getStartDims(), getStride() and getSelectedDims() returns the start,
708     * stride and count arrays respectively. Applications can make a selection
709     * by changing the values of the arrays.
710     *
711     * The following example shows how to make a subset. In the example, the
712     * dataset is a 4-dimensional array of [200][100][50][10], i.e. dims[0]=200;
713     * dims[1]=100; dims[2]=50; dims[3]=10; <br>
714     * We want to select every other data point in dims[1] and dims[2]
715     *
716     * <pre>
717     * int rank = dataset.getRank(); // number of dimensions of the dataset
718     * long[] dims = dataset.getDims(); // the dimension sizes of the dataset
719     * long[] selected = dataset.getSelectedDims(); // the selected size of the
720     *                                              // dataset
721     * long[] start = dataset.getStartDims(); // the offset of the selection
722     * long[] stride = dataset.getStride(); // the stride of the dataset
723     * int[] selectedIndex = dataset.getSelectedIndex(); // the selected
724     *                                                   // dimensions for
725     *                                                   // display
726     *
727     * // select dim1 and dim2 as 2D data for display, and slice through dim0
728     * selectedIndex[0] = 1;
729     * selectedIndex[1] = 2;
730     * selectedIndex[1] = 0;
731     *
732     * // reset the selection arrays
733     * for (int i = 0; i &lt; rank; i++) {
734     *     start[i] = 0;
735     *     selected[i] = 1;
736     *     stride[i] = 1;
737     * }
738     *
739     * // set stride to 2 on dim1 and dim2 so that every other data point is
740     * // selected.
741     * stride[1] = 2;
742     * stride[2] = 2;
743     *
744     * // set the selection size of dim1 and dim2
745     * selected[1] = dims[1] / stride[1];
746     * selected[2] = dims[1] / stride[2];
747     *
748     * // when dataset.getData() is called, the selection above will be used
749     * // since
750     * // the dimension arrays are passed by reference. Changes of these arrays
751     * // outside the dataset object directly change the values of these array
752     * // in the dataset object.
753     * </pre>
754     *
755     * For CompoundDS, the memory data object is an java.util.List object. Each
756     * element of the list is a data array that corresponds to a compound field.
757     *
758     * For example, if compound dataset "comp" has the following nested
759     * structure, and member datatypes
760     *
761     * <pre>
762     * comp --&gt; m01 (int)
763     * comp --&gt; m02 (float)
764     * comp --&gt; nest1 --&gt; m11 (char)
765     * comp --&gt; nest1 --&gt; m12 (String)
766     * comp --&gt; nest1 --&gt; nest2 --&gt; m21 (long)
767     * comp --&gt; nest1 --&gt; nest2 --&gt; m22 (double)
768     * </pre>
769     *
770     * getData() returns a list of six arrays: {int[], float[], char[],
771     * String[], long[] and double[]}.
772     *
773     * @return the data read from file.
774     *
775     * @see #getData()
776     * @see hdf.object.DataFormat#read()
777     *
778     * @throws Exception
779     *             if object can not be read
780     */
781    @Override
782    public Object read() throws Exception
783    {
784        Object readData = null;
785
786        if (!isInited())
787            init();
788
789        try {
790            readData = compoundDatasetCommonIO(H5File.IO_TYPE.READ, null);
791        }
792        catch (Exception ex) {
793            log.debug("read(): failed to read compound dataset: ", ex);
794            throw new Exception("failed to read compound dataset: " + ex.getMessage(), ex);
795        }
796
797        return readData;
798    }
799
800    /**
801     * Writes the given data buffer into this dataset in a file.
802     *
803     * The data buffer is a vector that contains the data values of compound fields. The data is written
804     * into file field by field.
805     *
806     * @param buf
807     *            The vector that contains the data values of compound fields.
808     *
809     * @throws Exception
810     *             If there is an error at the HDF5 library level.
811     */
812    @Override
813    public void write(Object buf) throws Exception
814    {
815        if (this.getFileFormat().isReadOnly())
816            throw new Exception("cannot write to compound dataset in file opened as read-only");
817
818        if (!isInited())
819            init();
820
821        try {
822            compoundDatasetCommonIO(H5File.IO_TYPE.WRITE, buf);
823        }
824        catch (Exception ex) {
825            log.debug("write(Object): failed to write compound dataset: ", ex);
826            throw new Exception("failed to write compound dataset: " + ex.getMessage(), ex);
827        }
828    }
829
830    /*
831     * Routine to convert datatypes that are read in as byte arrays to
832     * regular types.
833     */
834    @Override
835    protected Object convertByteMember(final Datatype dtype, byte[] byteData)
836    {
837        Object theObj = null;
838        log.debug("convertByteMember(): dtype={} byteData={}", dtype, byteData);
839
840        if (dtype.isFloat() && dtype.getDatatypeSize() == 16)
841            theObj = ((H5Datatype)dtype).byteToBigDecimal(byteData, 0);
842        else
843            theObj = super.convertByteMember(dtype, byteData);
844
845        return theObj;
846    }
847
848    private Object compoundDatasetCommonIO(H5File.IO_TYPE ioType, Object writeBuf) throws Exception
849    {
850        H5Datatype dsDatatype = (H5Datatype)getDatatype();
851        Object theData        = null;
852
853        if (numberOfMembers <= 0) {
854            log.debug("compoundDatasetCommonIO(): Dataset contains no members");
855            throw new Exception("dataset contains no members");
856        }
857
858        /*
859         * I/O type-specific pre-initialization.
860         */
861        if (ioType == H5File.IO_TYPE.WRITE) {
862            if ((writeBuf == null) || !(writeBuf instanceof List)) {
863                log.debug("compoundDatasetCommonIO(): writeBuf is null or invalid");
864                throw new Exception("write buffer is null or invalid");
865            }
866
867            /*
868             * Check for any unsupported datatypes and fail early before
869             * attempting to write to the dataset.
870             */
871            if (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isCompound()) {
872                log.debug("compoundDatasetCommonIO(): cannot write dataset of type ARRAY of COMPOUND");
873                throw new HDF5Exception("Unsupported dataset of type ARRAY of COMPOUND");
874            }
875
876            if (dsDatatype.isVLEN() && !dsDatatype.isVarStr() && dsDatatype.getDatatypeBase().isCompound()) {
877                log.debug("compoundDatasetCommonIO(): cannot write dataset of type VLEN of COMPOUND");
878                throw new HDF5Exception("Unsupported dataset of type VLEN of COMPOUND");
879            }
880        }
881
882        long did = open();
883        if (did >= 0) {
884            long[] spaceIDs = {HDF5Constants.H5I_INVALID_HID,
885                               HDF5Constants.H5I_INVALID_HID}; // spaceIDs[0]=mspace, spaceIDs[1]=fspace
886
887            try {
888                /*
889                 * NOTE: this call sets up a hyperslab selection in the file according to the
890                 * current selection in the dataset object.
891                 */
892                long totalSelectedSpacePoints = H5Utils.getTotalSelectedSpacePoints(
893                    did, dims, startDims, selectedStride, selectedDims, spaceIDs);
894
895                theData = compoundTypeIO(ioType, did, spaceIDs, (int)totalSelectedSpacePoints, dsDatatype,
896                                         writeBuf, new int[] {0});
897            }
898            finally {
899                if (HDF5Constants.H5S_ALL != spaceIDs[0]) {
900                    try {
901                        H5.H5Sclose(spaceIDs[0]);
902                    }
903                    catch (Exception ex) {
904                        log.debug(
905                            "compoundDatasetCommonIO(): H5Sclose(spaceIDs[0] {}) failure: ", spaceIDs[0], ex);
906                    }
907                }
908
909                if (HDF5Constants.H5S_ALL != spaceIDs[1]) {
910                    try {
911                        H5.H5Sclose(spaceIDs[1]);
912                    }
913                    catch (Exception ex) {
914                        log.debug(
915                            "compoundDatasetCommonIO(): H5Sclose(spaceIDs[1] {}) failure: ", spaceIDs[1], ex);
916                    }
917                }
918
919                close(did);
920            }
921        }
922        else
923            log.debug("compoundDatasetCommonIO(): failed to open dataset");
924
925        return theData;
926    }
927
928    /*
929     * Private recursive routine to read/write an entire compound datatype field by
930     * field. This routine is called recursively for ARRAY of COMPOUND and VLEN of
931     * COMPOUND datatypes.
932     *
933     * NOTE: the globalMemberIndex hack is ugly, but we need to keep track of a
934     * running counter so that we can index properly into the flattened name list
935     * generated from H5Datatype.extractCompoundInfo() at dataset init time.
936     */
937    private Object compoundTypeIO(H5File.IO_TYPE ioType, long did, long[] spaceIDs, int nSelPoints,
938                                  final H5Datatype cmpdType, Object writeBuf, int[] globalMemberIndex)
939    {
940        Object theData = null;
941
942        if (cmpdType.isArray()) {
943            log.trace("compoundTypeIO(): ARRAY type");
944
945            long[] arrayDims = cmpdType.getArrayDims();
946            int arrSize      = nSelPoints;
947            for (int i = 0; i < arrayDims.length; i++)
948                arrSize *= arrayDims[i];
949            theData = compoundTypeIO(ioType, did, spaceIDs, arrSize, (H5Datatype)cmpdType.getDatatypeBase(),
950                                     writeBuf, globalMemberIndex);
951        }
952        else if (cmpdType.isVLEN() && !cmpdType.isVarStr()) {
953            /*
954             * TODO: true variable-length support.
955             */
956            String[] errVal = new String[nSelPoints];
957            String errStr   = "*UNSUPPORTED*";
958
959            for (int j = 0; j < nSelPoints; j++)
960                errVal[j] = errStr;
961
962            /*
963             * Setup a fake data list.
964             */
965            Datatype baseType = cmpdType.getDatatypeBase();
966            while (baseType != null && !baseType.isCompound()) {
967                baseType = baseType.getDatatypeBase();
968            }
969
970            List<Object> fakeVlenData =
971                (List<Object>)H5Datatype.allocateArray((H5Datatype)baseType, nSelPoints);
972            fakeVlenData.add(errVal);
973
974            theData = fakeVlenData;
975        }
976        else if (cmpdType.isCompound()) {
977            List<Object> memberDataList = null;
978            List<Datatype> typeList     = cmpdType.getCompoundMemberTypes();
979
980            log.trace("compoundTypeIO(): {} {} members:", (ioType == H5File.IO_TYPE.READ) ? "read" : "write",
981                      typeList.size());
982
983            if (ioType == H5File.IO_TYPE.READ)
984                memberDataList = (List<Object>)H5Datatype.allocateArray(cmpdType, nSelPoints);
985
986            try {
987                for (int i = 0, writeListIndex = 0; i < typeList.size(); i++) {
988                    H5Datatype memberType = null;
989                    String memberName     = null;
990                    Object memberData     = null;
991
992                    try {
993                        memberType = (H5Datatype)typeList.get(i);
994                    }
995                    catch (Exception ex) {
996                        log.debug("compoundTypeIO(): get member {} failure: ", i, ex);
997                        globalMemberIndex[0]++;
998                        continue;
999                    }
1000
1001                    /*
1002                     * Since the type list used here is not a flattened structure, we need to skip
1003                     * the member selection check for compound types, as otherwise having a single
1004                     * member not selected would skip the reading/writing for the entire compound
1005                     * type. The member selection check will be deferred to the recursive compound
1006                     * read/write below.
1007                     */
1008                    if (!memberType.isCompound()) {
1009                        if (!isMemberSelected[globalMemberIndex[0] % this.getMemberCount()]) {
1010                            log.debug("compoundTypeIO(): member[{}] is not selected", i);
1011                            globalMemberIndex[0]++;
1012                            continue; // the field is not selected
1013                        }
1014                    }
1015
1016                    if (!memberType.isCompound()) {
1017                        try {
1018                            memberName = new String(flatNameList.get(globalMemberIndex[0]));
1019                        }
1020                        catch (Exception ex) {
1021                            log.debug("compoundTypeIO(): get member {} name failure: ", i, ex);
1022                            memberName = "null";
1023                        }
1024                    }
1025
1026                    log.trace("compoundTypeIO(): member[{}]({}) is type {}", i, memberName,
1027                              memberType.getDescription());
1028
1029                    if (ioType == H5File.IO_TYPE.READ) {
1030                        try {
1031                            if (memberType.isCompound())
1032                                memberData = compoundTypeIO(ioType, did, spaceIDs, nSelPoints, memberType,
1033                                                            writeBuf, globalMemberIndex);
1034                            else if (
1035                                memberType
1036                                    .isArray() /* || (memberType.isVLEN() && !memberType.isVarStr()) */) {
1037                                /*
1038                                 * Recursively detect any nested array/vlen of compound types.
1039                                 */
1040                                boolean compoundFound = false;
1041
1042                                Datatype base = memberType.getDatatypeBase();
1043                                while (base != null) {
1044                                    if (base.isCompound())
1045                                        compoundFound = true;
1046
1047                                    base = base.getDatatypeBase();
1048                                }
1049
1050                                if (compoundFound) {
1051                                    /*
1052                                     * Skip the top-level array/vlen type.
1053                                     */
1054                                    globalMemberIndex[0]++;
1055
1056                                    memberData = compoundTypeIO(ioType, did, spaceIDs, nSelPoints, memberType,
1057                                                                writeBuf, globalMemberIndex);
1058                                }
1059                                else {
1060                                    memberData = readSingleCompoundMember(did, spaceIDs, nSelPoints,
1061                                                                          memberType, memberName);
1062                                    globalMemberIndex[0]++;
1063                                }
1064                            }
1065                            else {
1066                                memberData = readSingleCompoundMember(did, spaceIDs, nSelPoints, memberType,
1067                                                                      memberName);
1068                                globalMemberIndex[0]++;
1069                            }
1070                        }
1071                        catch (Exception ex) {
1072                            log.debug("compoundTypeIO(): failed to read member {}: ", i, ex);
1073                            globalMemberIndex[0]++;
1074                            memberData = null;
1075                        }
1076
1077                        if (memberData == null) {
1078                            String[] errVal = new String[nSelPoints];
1079                            String errStr   = "*ERROR*";
1080
1081                            for (int j = 0; j < nSelPoints; j++)
1082                                errVal[j] = errStr;
1083
1084                            memberData = errVal;
1085                        }
1086
1087                        memberDataList.add(memberData);
1088                    }
1089                    else {
1090                        try {
1091                            /*
1092                             * TODO: currently doesn't correctly handle non-selected compound members.
1093                             */
1094                            memberData = ((List<?>)writeBuf).get(writeListIndex++);
1095                        }
1096                        catch (Exception ex) {
1097                            log.debug("compoundTypeIO(): get member[{}] data failure: ", i, ex);
1098                            globalMemberIndex[0]++;
1099                            continue;
1100                        }
1101
1102                        if (memberData == null) {
1103                            log.debug("compoundTypeIO(): member[{}] data is null", i);
1104                            globalMemberIndex[0]++;
1105                            continue;
1106                        }
1107
1108                        try {
1109                            if (memberType.isCompound()) {
1110                                List<?> nestedList = (List<?>)((List<?>)writeBuf).get(writeListIndex++);
1111                                compoundTypeIO(ioType, did, spaceIDs, nSelPoints, memberType, nestedList,
1112                                               globalMemberIndex);
1113                            }
1114                            else {
1115                                writeSingleCompoundMember(did, spaceIDs, nSelPoints, memberType, memberName,
1116                                                          memberData);
1117                                globalMemberIndex[0]++;
1118                            }
1119                        }
1120                        catch (Exception ex) {
1121                            log.debug("compoundTypeIO(): failed to write member[{}]: ", i, ex);
1122                            globalMemberIndex[0]++;
1123                        }
1124                    }
1125                } //  (i = 0, writeListIndex = 0; i < atomicTypeList.size(); i++)
1126            }
1127            catch (Exception ex) {
1128                log.debug("compoundTypeIO(): failure: ", ex);
1129                memberDataList = null;
1130            }
1131
1132            theData = memberDataList;
1133        }
1134
1135        return theData;
1136    }
1137
1138    /*
1139     * Private routine to read a single field of a compound datatype by creating a
1140     * compound datatype and inserting the single field into that datatype.
1141     */
1142    private Object readSingleCompoundMember(long dsetID, long[] spaceIDs, int nSelPoints,
1143                                            final H5Datatype memberType, String memberName) throws Exception
1144    {
1145        H5Datatype dsDatatype = (H5Datatype)this.getDatatype();
1146        Object memberData     = null;
1147
1148        try {
1149            memberData = H5Datatype.allocateArray(memberType, nSelPoints);
1150            log.trace("readSingleCompoundMember(): allocateArray {} points ", nSelPoints);
1151        }
1152        catch (OutOfMemoryError err) {
1153            memberData = null;
1154            throw new Exception("Out of memory");
1155        }
1156        catch (Exception ex) {
1157            log.debug("readSingleCompoundMember(): ", ex);
1158            memberData = null;
1159        }
1160
1161        if (memberData != null) {
1162            /*
1163             * Create a compound datatype containing just a single field (the one which we
1164             * want to read).
1165             */
1166            long compTid = -1;
1167            try {
1168                compTid = dsDatatype.createCompoundFieldType(memberName);
1169            }
1170            catch (HDF5Exception ex) {
1171                log.debug(
1172                    "readSingleCompoundMember(): unable to create compound field type for member of type {}: ",
1173                    memberType.getDescription(), ex);
1174                memberData = null;
1175            }
1176
1177            /*
1178             * Actually read the data for this member now that everything has been setup.
1179             */
1180            try {
1181                if (memberType.isVarStr()) {
1182                    log.trace(
1183                        "readSingleCompoundMember(): H5DreadVL did={} compTid={} spaceIDs[0]={} spaceIDs[1]={}",
1184                        dsetID, compTid,
1185                        (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1186                        (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1187
1188                    H5.H5Dread_VLStrings(dsetID, compTid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT,
1189                                         (Object[])memberData);
1190                }
1191                else if (memberType.isVLEN() ||
1192                         (memberType.isArray() && memberType.getDatatypeBase().isVLEN())) {
1193                    log.trace(
1194                        "readSingleCompoundMember(): H5DreadVL did={} compTid={} spaceIDs[0]={} spaceIDs[1]={}",
1195                        dsetID, compTid,
1196                        (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1197                        (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1198
1199                    H5.H5DreadVL(dsetID, compTid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT,
1200                                 (Object[])memberData);
1201                }
1202                else {
1203                    log.trace(
1204                        "readSingleCompoundMember(): H5Dread did={} compTid={} spaceIDs[0]={} spaceIDs[1]={}",
1205                        dsetID, compTid,
1206                        (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1207                        (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1208
1209                    H5.H5Dread(dsetID, compTid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT,
1210                               memberData);
1211                }
1212            }
1213            catch (HDF5DataFiltersException exfltr) {
1214                log.debug("readSingleCompoundMember(): read failure: ", exfltr);
1215                throw new Exception("Filter not available exception: " + exfltr.getMessage(), exfltr);
1216            }
1217            catch (Exception ex) {
1218                log.debug("readSingleCompoundMember(): read failure: ", ex);
1219                throw new Exception("failed to read compound member: " + ex.getMessage(), ex);
1220            }
1221            finally {
1222                dsDatatype.close(compTid);
1223            }
1224
1225            /*
1226             * Perform any necessary data conversions.
1227             */
1228            if (memberType.isUnsigned()) {
1229                log.trace("readSingleCompoundMember(): converting from unsigned C-type integers");
1230                memberData = Dataset.convertFromUnsignedC(memberData, null);
1231            }
1232            else if (Utils.getJavaObjectRuntimeClass(memberData) == 'B') {
1233                log.trace("readSingleCompoundMember(): converting byte array member into Object");
1234
1235                /*
1236                 * For all other types that get read into memory as a byte[] (such as nested
1237                 * compounds and arrays of compounds), we must manually convert the byte[] into
1238                 * something usable.
1239                 */
1240                memberData = convertByteMember(memberType, (byte[])memberData);
1241            }
1242        }
1243
1244        return memberData;
1245    }
1246
1247    /*
1248     * Private routine to write a single field of a compound datatype by creating a
1249     * compound datatype and inserting the single field into that datatype.
1250     */
1251    private void writeSingleCompoundMember(long dsetID, long[] spaceIDs, int nSelPoints,
1252                                           final H5Datatype memberType, String memberName, Object theData)
1253        throws Exception
1254    {
1255        H5Datatype dsDatatype = (H5Datatype)this.getDatatype();
1256
1257        /*
1258         * Check for any unsupported datatypes before attempting to write this compound
1259         * member.
1260         */
1261        if (memberType.isVLEN() && !memberType.isVarStr()) {
1262            log.debug("writeSingleCompoundMember(): writing of VL non-strings is not currently supported");
1263            throw new Exception("writing of VL non-strings is not currently supported");
1264        }
1265
1266        /*
1267         * Perform any necessary data conversions before writing the data.
1268         */
1269        Object tmpData = theData;
1270        try {
1271            if (memberType.isUnsigned()) {
1272                // Check if we need to convert unsigned integer data from Java-style
1273                // to C-style integers
1274                long tsize   = memberType.getDatatypeSize();
1275                String cname = theData.getClass().getName();
1276                char dname   = cname.charAt(cname.lastIndexOf('[') + 1);
1277                boolean doIntConversion =
1278                    (((tsize == 1) && (dname == 'S')) || ((tsize == 2) && (dname == 'I')) ||
1279                     ((tsize == 4) && (dname == 'J')));
1280
1281                if (doIntConversion) {
1282                    log.trace(
1283                        "writeSingleCompoundMember(): converting integer data to unsigned C-type integers");
1284                    tmpData = convertToUnsignedC(theData, null);
1285                }
1286            }
1287            else if (memberType.isString() && (Array.get(theData, 0) instanceof String)) {
1288                log.trace("writeSingleCompoundMember(): converting string array to byte array");
1289                tmpData = stringToByte((String[])theData, (int)memberType.getDatatypeSize());
1290            }
1291            else if (memberType.isEnum() && (Array.get(theData, 0) instanceof String)) {
1292                log.trace("writeSingleCompoundMember(): converting enum names to values");
1293                tmpData = memberType.convertEnumNameToValue((String[])theData);
1294            }
1295        }
1296        catch (Exception ex) {
1297            log.debug("writeSingleCompoundMember(): data conversion failure: ", ex);
1298            tmpData = null;
1299        }
1300
1301        if (tmpData == null) {
1302            log.debug("writeSingleCompoundMember(): data is null");
1303            return;
1304        }
1305
1306        /*
1307         * Create a compound datatype containing just a single field (the one which we
1308         * want to write).
1309         */
1310        long compTid = -1;
1311        try {
1312            compTid = dsDatatype.createCompoundFieldType(memberName);
1313        }
1314        catch (HDF5Exception ex) {
1315            log.debug(
1316                "writeSingleCompoundMember(): unable to create compound field type for member of type {}: ",
1317                memberType.getDescription(), ex);
1318        }
1319
1320        /*
1321         * Actually write the data now that everything has been setup.
1322         */
1323        try {
1324            if (memberType.isVarStr()) {
1325                log.trace(
1326                    "writeSingleCompoundMember(): H5Dwrite_string did={} compTid={} spaceIDs[0]={} spaceIDs[1]={}",
1327                    dsetID, compTid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1328                    (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1329
1330                H5.H5Dwrite_string(dsetID, compTid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT,
1331                                   (String[])tmpData);
1332            }
1333            else {
1334                log.trace(
1335                    "writeSingleCompoundMember(): H5Dwrite did={} compTid={} spaceIDs[0]={} spaceIDs[1]={}",
1336                    dsetID, compTid, (spaceIDs[0] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[0],
1337                    (spaceIDs[1] == HDF5Constants.H5P_DEFAULT) ? "H5P_DEFAULT" : spaceIDs[1]);
1338
1339                // BUG!!! does not write nested compound data and no
1340                // exception was caught. Need to check if it is a java
1341                // error or C library error.
1342                H5.H5Dwrite(dsetID, compTid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, tmpData);
1343            }
1344        }
1345        catch (Exception ex) {
1346            log.debug("writeSingleCompoundMember(): write failure: ", ex);
1347            throw new Exception("failed to write compound member: " + ex.getMessage(), ex);
1348        }
1349        finally {
1350            dsDatatype.close(compTid);
1351        }
1352    }
1353
1354    /**
1355     * Converts the data values of this data object to appropriate Java integers if
1356     * they are unsigned integers.
1357     *
1358     * @see hdf.object.Dataset#convertToUnsignedC(Object)
1359     * @see hdf.object.Dataset#convertFromUnsignedC(Object, Object)
1360     *
1361     * @return the converted data buffer.
1362     */
1363    @Override
1364    public Object convertFromUnsignedC()
1365    {
1366        throw new UnsupportedOperationException("H5CompoundDS:convertFromUnsignedC Unsupported operation.");
1367    }
1368
1369    /**
1370     * Converts Java integer data values of this data object back to unsigned C-type
1371     * integer data if they are unsigned integers.
1372     *
1373     * @see hdf.object.Dataset#convertToUnsignedC(Object)
1374     * @see hdf.object.Dataset#convertToUnsignedC(Object, Object)
1375     *
1376     * @return the converted data buffer.
1377     */
1378    @Override
1379    public Object convertToUnsignedC()
1380    {
1381        throw new UnsupportedOperationException("H5CompoundDS:convertToUnsignedC Unsupported operation.");
1382    }
1383
1384    /**
1385     * Retrieves the object's metadata, such as attributes, from the file.
1386     *
1387     * Metadata, such as attributes, is stored in a List.
1388     *
1389     * @return the list of metadata objects.
1390     *
1391     * @throws HDF5Exception
1392     *             if the metadata can not be retrieved
1393     */
1394    @Override
1395    public List<Attribute> getMetadata() throws HDF5Exception
1396    {
1397        int gmIndexType  = 0;
1398        int gmIndexOrder = 0;
1399
1400        try {
1401            gmIndexType = fileFormat.getIndexType(null);
1402        }
1403        catch (Exception ex) {
1404            log.debug("getMetadata(): getIndexType failed: ", ex);
1405        }
1406        try {
1407            gmIndexOrder = fileFormat.getIndexOrder(null);
1408        }
1409        catch (Exception ex) {
1410            log.debug("getMetadata(): getIndexOrder failed: ", ex);
1411        }
1412        return this.getMetadata(gmIndexType, gmIndexOrder);
1413    }
1414
1415    /**
1416     * Retrieves the object's metadata, such as attributes, from the file.
1417     *
1418     * Metadata, such as attributes, is stored in a List.
1419     *
1420     * @param attrPropList
1421     *             the list of properties to get
1422     *
1423     * @return the list of metadata objects.
1424     *
1425     * @throws HDF5Exception
1426     *             if the metadata can not be retrieved
1427     */
1428    public List<Attribute> getMetadata(int... attrPropList) throws HDF5Exception
1429    {
1430        if (!isInited())
1431            init();
1432
1433        try {
1434            this.linkTargetObjName = H5File.getLinkTargetName(this);
1435        }
1436        catch (Exception ex) {
1437            log.debug("getMetadata(): getLinkTargetName failed: ", ex);
1438        }
1439
1440        if (objMetadata.getAttributeList() == null) {
1441            long did  = HDF5Constants.H5I_INVALID_HID;
1442            long pcid = HDF5Constants.H5I_INVALID_HID;
1443            long paid = HDF5Constants.H5I_INVALID_HID;
1444
1445            did = open();
1446            if (did >= 0) {
1447                try {
1448                    // get the compression and chunk information
1449                    pcid             = H5.H5Dget_create_plist(did);
1450                    paid             = H5.H5Dget_access_plist(did);
1451                    long storageSize = H5.H5Dget_storage_size(did);
1452                    int nfilt        = H5.H5Pget_nfilters(pcid);
1453                    int layoutType   = H5.H5Pget_layout(pcid);
1454
1455                    storageLayout.setLength(0);
1456                    compression.setLength(0);
1457
1458                    if (layoutType == HDF5Constants.H5D_CHUNKED) {
1459                        chunkSize = new long[rank];
1460                        H5.H5Pget_chunk(pcid, rank, chunkSize);
1461                        int n = chunkSize.length;
1462                        storageLayout.append("CHUNKED: ").append(chunkSize[0]);
1463                        for (int i = 1; i < n; i++)
1464                            storageLayout.append(" X ").append(chunkSize[i]);
1465
1466                        if (nfilt > 0) {
1467                            long nelmts = 1;
1468                            long uncompSize;
1469                            long datumSize = getDatatype().getDatatypeSize();
1470
1471                            if (datumSize < 0) {
1472                                long tmptid = HDF5Constants.H5I_INVALID_HID;
1473                                try {
1474                                    tmptid    = H5.H5Dget_type(did);
1475                                    datumSize = H5.H5Tget_size(tmptid);
1476                                }
1477                                finally {
1478                                    try {
1479                                        H5.H5Tclose(tmptid);
1480                                    }
1481                                    catch (Exception ex2) {
1482                                        log.debug("getMetadata(): H5Tclose(tmptid {}) failure: ", tmptid,
1483                                                  ex2);
1484                                    }
1485                                }
1486                            }
1487
1488                            for (int i = 0; i < rank; i++)
1489                                nelmts *= dims[i];
1490                            uncompSize = nelmts * datumSize;
1491
1492                            /* compression ratio = uncompressed size / compressed size */
1493
1494                            if (storageSize != 0) {
1495                                double ratio     = (double)uncompSize / (double)storageSize;
1496                                DecimalFormat df = new DecimalFormat();
1497                                df.setMinimumFractionDigits(3);
1498                                df.setMaximumFractionDigits(3);
1499                                compression.append(df.format(ratio)).append(":1");
1500                            }
1501                        }
1502                    }
1503                    else if (layoutType == HDF5Constants.H5D_COMPACT) {
1504                        storageLayout.append("COMPACT");
1505                    }
1506                    else if (layoutType == HDF5Constants.H5D_CONTIGUOUS) {
1507                        storageLayout.append("CONTIGUOUS");
1508                        if (H5.H5Pget_external_count(pcid) > 0)
1509                            storageLayout.append(" - EXTERNAL ");
1510                    }
1511                    else if (layoutType == HDF5Constants.H5D_VIRTUAL) {
1512                        storageLayout.append("VIRTUAL - ");
1513                        try {
1514                            long vmaps = H5.H5Pget_virtual_count(pcid);
1515                            try {
1516                                int virtView = H5.H5Pget_virtual_view(paid);
1517                                long virtGap = H5.H5Pget_virtual_printf_gap(paid);
1518                                if (virtView == HDF5Constants.H5D_VDS_FIRST_MISSING)
1519                                    storageLayout.append("First Missing");
1520                                else
1521                                    storageLayout.append("Last Available");
1522                                storageLayout.append("\nGAP : ").append(virtGap);
1523                            }
1524                            catch (Exception err) {
1525                                log.debug("getMetadata(): vds error: ", err);
1526                                storageLayout.append("ERROR");
1527                            }
1528                            storageLayout.append("\nMAPS : ").append(vmaps);
1529                            if (vmaps > 0) {
1530                                for (long next = 0; next < vmaps; next++) {
1531                                    try {
1532                                        H5.H5Pget_virtual_vspace(pcid, next);
1533                                        H5.H5Pget_virtual_srcspace(pcid, next);
1534                                        String fname    = H5.H5Pget_virtual_filename(pcid, next);
1535                                        String dsetname = H5.H5Pget_virtual_dsetname(pcid, next);
1536                                        storageLayout.append("\n").append(fname).append(" : ").append(
1537                                            dsetname);
1538                                    }
1539                                    catch (Exception err) {
1540                                        log.debug("getMetadata(): vds space[{}] error: ", next, err);
1541                                        storageLayout.append("ERROR");
1542                                    }
1543                                }
1544                            }
1545                        }
1546                        catch (Exception err) {
1547                            log.debug("getMetadata(): vds count error: ", err);
1548                            storageLayout.append("ERROR");
1549                        }
1550                    }
1551                    else {
1552                        chunkSize = null;
1553                        storageLayout.append("NONE");
1554                    }
1555
1556                    int[] flags     = {0, 0};
1557                    long[] cdNelmts = {20};
1558                    int[] cdValues  = new int[(int)cdNelmts[0]];
1559                    String[] cdName = {"", ""};
1560                    log.trace("getMetadata(): {} filters in pipeline", nfilt);
1561                    int filter         = -1;
1562                    int[] filterConfig = {1};
1563
1564                    filters.setLength(0);
1565
1566                    if (nfilt == 0) {
1567                        filters.append("NONE");
1568                    }
1569                    else {
1570                        for (int i = 0, k = 0; i < nfilt; i++) {
1571                            log.trace("getMetadata(): filter[{}]", i);
1572                            if (i > 0)
1573                                filters.append(", ");
1574                            if (k > 0)
1575                                compression.append(", ");
1576
1577                            try {
1578                                cdNelmts[0] = 20;
1579                                cdValues    = new int[(int)cdNelmts[0]];
1580                                cdValues    = new int[(int)cdNelmts[0]];
1581                                filter = H5.H5Pget_filter(pcid, i, flags, cdNelmts, cdValues, 120, cdName,
1582                                                          filterConfig);
1583                                log.trace("getMetadata(): filter[{}] is {} has {} elements ", i, cdName[0],
1584                                          cdNelmts[0]);
1585                                for (int j = 0; j < cdNelmts[0]; j++)
1586                                    log.trace("getMetadata(): filter[{}] element {} = {}", i, j, cdValues[j]);
1587                            }
1588                            catch (Exception err) {
1589                                log.debug("getMetadata(): filter[{}] error: ", i, err);
1590                                filters.append("ERROR");
1591                                continue;
1592                            }
1593
1594                            if (filter == HDF5Constants.H5Z_FILTER_NONE) {
1595                                filters.append("NONE");
1596                            }
1597                            else if (filter == HDF5Constants.H5Z_FILTER_DEFLATE) {
1598                                filters.append("GZIP");
1599                                compression.append(COMPRESSION_GZIP_TXT).append(cdValues[0]);
1600                                k++;
1601                            }
1602                            else if (filter == HDF5Constants.H5Z_FILTER_FLETCHER32) {
1603                                filters.append("Error detection filter");
1604                            }
1605                            else if (filter == HDF5Constants.H5Z_FILTER_SHUFFLE) {
1606                                filters.append("SHUFFLE: Nbytes = ").append(cdValues[0]);
1607                            }
1608                            else if (filter == HDF5Constants.H5Z_FILTER_NBIT) {
1609                                filters.append("NBIT");
1610                            }
1611                            else if (filter == HDF5Constants.H5Z_FILTER_SCALEOFFSET) {
1612                                filters.append("SCALEOFFSET: MIN BITS = ").append(cdValues[0]);
1613                            }
1614                            else if (filter == HDF5Constants.H5Z_FILTER_SZIP) {
1615                                filters.append("SZIP");
1616                                compression.append("SZIP: Pixels per block = ").append(cdValues[1]);
1617                                k++;
1618                                int flag = -1;
1619                                try {
1620                                    flag = H5.H5Zget_filter_info(filter);
1621                                }
1622                                catch (Exception ex) {
1623                                    log.debug("getMetadata(): H5Zget_filter_info failure: ", ex);
1624                                    flag = -1;
1625                                }
1626                                if (flag == HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED)
1627                                    compression.append(": H5Z_FILTER_CONFIG_DECODE_ENABLED");
1628                                else if ((flag == HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) ||
1629                                         (flag >= (HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED +
1630                                                   HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED)))
1631                                    compression.append(": H5Z_FILTER_CONFIG_ENCODE_ENABLED");
1632                            }
1633                            else {
1634                                filters.append("USERDEFINED ")
1635                                    .append(cdName[0])
1636                                    .append("(")
1637                                    .append(filter)
1638                                    .append("): ");
1639                                for (int j = 0; j < cdNelmts[0]; j++) {
1640                                    if (j > 0)
1641                                        filters.append(", ");
1642                                    filters.append(cdValues[j]);
1643                                }
1644                                log.debug("getMetadata(): filter[{}] is user defined compression", i);
1645                            }
1646                        } //  (int i=0; i<nfilt; i++)
1647                    }
1648
1649                    if (compression.length() == 0)
1650                        compression.append("NONE");
1651                    log.trace("getMetadata(): filter compression={}", compression);
1652                    log.trace("getMetadata(): filter information={}", filters);
1653
1654                    storage.setLength(0);
1655                    storage.append("SIZE: ").append(storageSize);
1656
1657                    try {
1658                        int[] at = {0};
1659                        H5.H5Pget_alloc_time(pcid, at);
1660                        storage.append(", allocation time: ");
1661                        if (at[0] == HDF5Constants.H5D_ALLOC_TIME_EARLY)
1662                            storage.append("Early");
1663                        else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_INCR)
1664                            storage.append("Incremental");
1665                        else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_LATE)
1666                            storage.append("Late");
1667                        else
1668                            storage.append("Default");
1669                    }
1670                    catch (Exception ex) {
1671                        log.debug("getMetadata(): Storage allocation time:", ex);
1672                    }
1673                    log.trace("getMetadata(): storage={}", storage);
1674                }
1675                finally {
1676                    try {
1677                        H5.H5Pclose(paid);
1678                    }
1679                    catch (Exception ex) {
1680                        log.debug("getMetadata(): H5Pclose(paid {}) failure: ", paid, ex);
1681                    }
1682                    try {
1683                        H5.H5Pclose(pcid);
1684                    }
1685                    catch (Exception ex) {
1686                        log.debug("getMetadata(): H5Pclose(pcid {}) failure: ", pcid, ex);
1687                    }
1688                    close(did);
1689                }
1690            }
1691        }
1692
1693        List<Attribute> attrlist = null;
1694        try {
1695            attrlist = objMetadata.getMetadata(attrPropList);
1696        }
1697        catch (Exception ex) {
1698            log.debug("getMetadata(): getMetadata failed: ", ex);
1699        }
1700        return attrlist;
1701    }
1702
1703    /**
1704     * Writes a specific piece of metadata (such as an attribute) into the file.
1705     *
1706     * If an HDF(4&amp;5) attribute exists in the file, this method updates its
1707     * value. If the attribute does not exist in the file, it creates the
1708     * attribute in the file and attaches it to the object. It will fail to
1709     * write a new attribute to the object where an attribute with the same name
1710     * already exists. To update the value of an existing attribute in the file,
1711     * one needs to get the instance of the attribute by getMetadata(), change
1712     * its values, then use writeMetadata() to write the value.
1713     *
1714     * @param info
1715     *            the metadata to write.
1716     *
1717     * @throws Exception
1718     *             if the metadata can not be written
1719     */
1720    @Override
1721    public void writeMetadata(Object info) throws Exception
1722    {
1723        try {
1724            objMetadata.writeMetadata(info);
1725        }
1726        catch (Exception ex) {
1727            log.debug("writeMetadata(): Object not an Attribute");
1728            return;
1729        }
1730    }
1731
1732    /**
1733     * Deletes an existing piece of metadata from this object.
1734     *
1735     * @param info
1736     *            the metadata to delete.
1737     *
1738     * @throws HDF5Exception
1739     *             if the metadata can not be removed
1740     */
1741    @Override
1742    public void removeMetadata(Object info) throws HDF5Exception
1743    {
1744        try {
1745            objMetadata.removeMetadata(info);
1746        }
1747        catch (Exception ex) {
1748            log.debug("removeMetadata(): Object not an Attribute");
1749            return;
1750        }
1751
1752        Attribute attr = (Attribute)info;
1753        log.trace("removeMetadata(): {}", attr.getAttributeName());
1754        long did = open();
1755        if (did >= 0) {
1756            try {
1757                H5.H5Adelete(did, attr.getAttributeName());
1758            }
1759            finally {
1760                close(did);
1761            }
1762        }
1763        else {
1764            log.debug("removeMetadata(): failed to open compound dataset");
1765        }
1766    }
1767
1768    /**
1769     * Updates an existing piece of metadata attached to this object.
1770     *
1771     * @param info
1772     *            the metadata to update.
1773     *
1774     * @throws HDF5Exception
1775     *             if the metadata can not be updated
1776     */
1777    @Override
1778    public void updateMetadata(Object info) throws HDF5Exception
1779    {
1780        try {
1781            objMetadata.updateMetadata(info);
1782        }
1783        catch (Exception ex) {
1784            log.debug("updateMetadata(): Object not an Attribute");
1785            return;
1786        }
1787    }
1788
1789    /*
1790     * (non-Javadoc)
1791     *
1792     * @see hdf.object.HObject#setName(java.lang.String)
1793     */
1794    @Override
1795    public void setName(String newName) throws Exception
1796    {
1797        if (newName == null)
1798            throw new IllegalArgumentException("The new name is NULL");
1799
1800        H5File.renameObject(this, newName);
1801        super.setName(newName);
1802    }
1803
1804    /**
1805     * @deprecated Not for public use in the future. <br>
1806     *             Using
1807     *             {@link #create(String, Group, long[], long[], long[], int, String[], Datatype[], int[],
1808     * long[][], Object)}
1809     *
1810     * @param name
1811     *            the name of the dataset to create.
1812     * @param pgroup
1813     *            parent group where the new dataset is created.
1814     * @param dims
1815     *            the dimension size of the dataset.
1816     * @param memberNames
1817     *            the names of compound datatype
1818     * @param memberDatatypes
1819     *            the datatypes of the compound datatype
1820     * @param memberSizes
1821     *            the dim sizes of the members
1822     * @param data
1823     *            list of data arrays written to the new dataset, null if no data is written to the new
1824     *            dataset.
1825     *
1826     * @return the new compound dataset if successful; otherwise returns null.
1827     *
1828     * @throws Exception
1829     *             if there is a failure.
1830     */
1831    @Deprecated
1832    public static Dataset create(String name, Group pgroup, long[] dims, String[] memberNames,
1833                                 Datatype[] memberDatatypes, int[] memberSizes, Object data) throws Exception
1834    {
1835        if ((pgroup == null) || (name == null) || (dims == null) || (memberNames == null) ||
1836            (memberDatatypes == null) || (memberSizes == null)) {
1837            return null;
1838        }
1839
1840        int nMembers        = memberNames.length;
1841        int memberRanks[]   = new int[nMembers];
1842        long memberDims[][] = new long[nMembers][1];
1843        for (int i = 0; i < nMembers; i++) {
1844            memberRanks[i]   = 1;
1845            memberDims[i][0] = memberSizes[i];
1846        }
1847
1848        return H5CompoundDS.create(name, pgroup, dims, memberNames, memberDatatypes, memberRanks, memberDims,
1849                                   data);
1850    }
1851
1852    /**
1853     * @deprecated Not for public use in the future. <br>
1854     *             Using
1855     *             {@link #create(String, Group, long[], long[], long[], int, String[], Datatype[], int[],
1856     * long[][], Object)}
1857     *
1858     * @param name
1859     *            the name of the dataset to create.
1860     * @param pgroup
1861     *            parent group where the new dataset is created.
1862     * @param dims
1863     *            the dimension size of the dataset.
1864     * @param memberNames
1865     *            the names of compound datatype
1866     * @param memberDatatypes
1867     *            the datatypes of the compound datatype
1868     * @param memberRanks
1869     *            the ranks of the members
1870     * @param memberDims
1871     *            the dim sizes of the members
1872     * @param data
1873     *            list of data arrays written to the new dataset, null if no data is written to the new
1874     *            dataset.
1875     *
1876     * @return the new compound dataset if successful; otherwise returns null.
1877     *
1878     * @throws Exception
1879     *             if the dataset can not be created.
1880     */
1881    @Deprecated
1882    public static Dataset create(String name, Group pgroup, long[] dims, String[] memberNames,
1883                                 Datatype[] memberDatatypes, int[] memberRanks, long[][] memberDims,
1884                                 Object data) throws Exception
1885    {
1886        return H5CompoundDS.create(name, pgroup, dims, null, null, -1, memberNames, memberDatatypes,
1887                                   memberRanks, memberDims, data);
1888    }
1889
1890    /**
1891     * Creates a simple compound dataset in a file with/without chunking and compression.
1892     *
1893     * This function provides an easy way to create a simple compound dataset in file by hiding tedious
1894     * details of creating a compound dataset from users.
1895     *
1896     * This function calls H5.H5Dcreate() to create a simple compound dataset in file. Nested compound
1897     * dataset is not supported. The required information to create a compound dataset includes the
1898     * name, the parent group and data space of the dataset, the names, datatypes and data spaces of the
1899     * compound fields. Other information such as chunks, compression and the data buffer is optional.
1900     *
1901     * The following example shows how to use this function to create a compound dataset in file.
1902     *
1903     * <pre>
1904     * H5File file = null;
1905     * String message = &quot;&quot;;
1906     * Group pgroup = null;
1907     * int[] DATA_INT = new int[DIM_SIZE];
1908     * float[] DATA_FLOAT = new float[DIM_SIZE];
1909     * String[] DATA_STR = new String[DIM_SIZE];
1910     * long[] DIMs = { 50, 10 };
1911     * long[] CHUNKs = { 25, 5 };
1912     *
1913     * try {
1914     *     file = (H5File) H5FILE.open(fname, H5File.CREATE);
1915     *     file.open();
1916     *     pgroup = (Group) file.get(&quot;/&quot;);
1917     * }
1918     * catch (Exception ex) {
1919     * }
1920     *
1921     * Vector data = new Vector();
1922     * data.add(0, DATA_INT);
1923     * data.add(1, DATA_FLOAT);
1924     * data.add(2, DATA_STR);
1925     *
1926     * // create groups
1927     * Datatype[] mdtypes = new H5Datatype[3];
1928     * String[] mnames = { &quot;int&quot;, &quot;float&quot;, &quot;string&quot; };
1929     * Dataset dset = null;
1930     * try {
1931     *     mdtypes[0] = new H5Datatype(Datatype.CLASS_INTEGER, 4, Datatype.NATIVE, Datatype.NATIVE);
1932     *     mdtypes[1] = new H5Datatype(Datatype.CLASS_FLOAT, 4, Datatype.NATIVE, Datatype.NATIVE);
1933     *     mdtypes[2] = new H5Datatype(Datatype.CLASS_STRING, STR_LEN, Datatype.NATIVE, Datatype.NATIVE);
1934     *     dset = file.createCompoundDS(&quot;/CompoundDS&quot;, pgroup, DIMs, null, CHUNKs, 9, mnames,
1935     * mdtypes, null, data);
1936     * }
1937     * catch (Exception ex) {
1938     *     failed(message, ex, file);
1939     *     return 1;
1940     * }
1941     * </pre>
1942     *
1943     * @param name
1944     *            the name of the dataset to create.
1945     * @param pgroup
1946     *            parent group where the new dataset is created.
1947     * @param dims
1948     *            the dimension size of the dataset.
1949     * @param maxdims
1950     *            the max dimension size of the dataset. maxdims is set to dims if maxdims = null.
1951     * @param chunks
1952     *            the chunk size of the dataset. No chunking if chunk = null.
1953     * @param gzip
1954     *            GZIP compression level (1 to 9). 0 or negative values if no compression.
1955     * @param memberNames
1956     *            the names of compound datatype
1957     * @param memberDatatypes
1958     *            the datatypes of the compound datatype
1959     * @param memberRanks
1960     *            the ranks of the members
1961     * @param memberDims
1962     *            the dim sizes of the members
1963     * @param data
1964     *            list of data arrays written to the new dataset, null if no data is written to the new
1965     *            dataset.
1966     *
1967     * @return the new compound dataset if successful; otherwise returns null.
1968     *
1969     * @throws Exception
1970     *             if there is a failure.
1971     */
1972    public static Dataset create(String name, Group pgroup, long[] dims, long[] maxdims, long[] chunks,
1973                                 int gzip, String[] memberNames, Datatype[] memberDatatypes,
1974                                 int[] memberRanks, long[][] memberDims, Object data) throws Exception
1975    {
1976        H5CompoundDS dataset = null;
1977        String fullPath      = null;
1978        long did             = HDF5Constants.H5I_INVALID_HID;
1979        long plist           = HDF5Constants.H5I_INVALID_HID;
1980        long sid             = HDF5Constants.H5I_INVALID_HID;
1981        long tid             = HDF5Constants.H5I_INVALID_HID;
1982
1983        if ((pgroup == null) || (name == null) || (dims == null) || ((gzip > 0) && (chunks == null)) ||
1984            (memberNames == null) || (memberDatatypes == null) || (memberRanks == null) ||
1985            (memberDims == null)) {
1986            log.debug("create(): one or more parameters are null");
1987            return null;
1988        }
1989
1990        H5File file = (H5File)pgroup.getFileFormat();
1991        if (file == null) {
1992            log.debug("create(): parent group FileFormat is null");
1993            return null;
1994        }
1995
1996        String path = HObject.SEPARATOR;
1997        if (!pgroup.isRoot()) {
1998            path = pgroup.getPath() + pgroup.getName() + HObject.SEPARATOR;
1999            if (name.endsWith("/"))
2000                name = name.substring(0, name.length() - 1);
2001            int idx = name.lastIndexOf('/');
2002            if (idx >= 0)
2003                name = name.substring(idx + 1);
2004        }
2005
2006        fullPath = path + name;
2007
2008        int typeSize   = 0;
2009        int nMembers   = memberNames.length;
2010        long[] mTypes  = new long[nMembers];
2011        int memberSize = 1;
2012        for (int i = 0; i < nMembers; i++) {
2013            memberSize = 1;
2014            for (int j = 0; j < memberRanks[i]; j++)
2015                memberSize *= memberDims[i][j];
2016
2017            mTypes[i] = -1;
2018            // the member is an array
2019            if ((memberSize > 1) && (!memberDatatypes[i].isString())) {
2020                long tmptid = -1;
2021                if ((tmptid = memberDatatypes[i].createNative()) >= 0) {
2022                    try {
2023                        mTypes[i] = H5.H5Tarray_create(tmptid, memberRanks[i], memberDims[i]);
2024                    }
2025                    finally {
2026                        try {
2027                            H5.H5Tclose(tmptid);
2028                        }
2029                        catch (Exception ex) {
2030                            log.debug("create(): H5Tclose(tmptid {}) failure: ", tmptid, ex);
2031                        }
2032                    }
2033                }
2034            }
2035            else {
2036                mTypes[i] = memberDatatypes[i].createNative();
2037            }
2038            try {
2039                typeSize += H5.H5Tget_size(mTypes[i]);
2040            }
2041            catch (Exception ex) {
2042                log.debug("create(): array create H5Tget_size:", ex);
2043
2044                while (i > 0) {
2045                    try {
2046                        H5.H5Tclose(mTypes[i]);
2047                    }
2048                    catch (HDF5Exception ex2) {
2049                        log.debug("create(): H5Tclose(mTypes[{}] {}) failure: ", i, mTypes[i], ex2);
2050                    }
2051                    i--;
2052                }
2053                throw ex;
2054            }
2055        } //  (int i = 0; i < nMembers; i++) {
2056
2057        // setup chunking and compression
2058        boolean isExtentable = false;
2059        if (maxdims != null) {
2060            for (int i = 0; i < maxdims.length; i++) {
2061                if (maxdims[i] == 0)
2062                    maxdims[i] = dims[i];
2063                else if (maxdims[i] < 0)
2064                    maxdims[i] = HDF5Constants.H5S_UNLIMITED;
2065
2066                if (maxdims[i] != dims[i])
2067                    isExtentable = true;
2068            }
2069        }
2070
2071        // HDF5 requires you to use chunking in order to define extendible
2072        // datasets. Chunking makes it possible to extend datasets efficiently,
2073        // without having to reorganize storage excessively. Using default size
2074        // of 64x...which has good performance
2075        if ((chunks == null) && isExtentable) {
2076            chunks = new long[dims.length];
2077            for (int i = 0; i < dims.length; i++)
2078                chunks[i] = Math.min(dims[i], 64);
2079        }
2080
2081        // prepare the dataspace and datatype
2082        int rank = dims.length;
2083
2084        try {
2085            sid = H5.H5Screate_simple(rank, dims, maxdims);
2086
2087            // figure out creation properties
2088            plist = HDF5Constants.H5P_DEFAULT;
2089
2090            tid        = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, typeSize);
2091            int offset = 0;
2092            for (int i = 0; i < nMembers; i++) {
2093                H5.H5Tinsert(tid, memberNames[i], offset, mTypes[i]);
2094                offset += H5.H5Tget_size(mTypes[i]);
2095            }
2096
2097            if (chunks != null) {
2098                plist = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
2099
2100                H5.H5Pset_layout(plist, HDF5Constants.H5D_CHUNKED);
2101                H5.H5Pset_chunk(plist, rank, chunks);
2102
2103                // compression requires chunking
2104                if (gzip > 0) {
2105                    H5.H5Pset_deflate(plist, gzip);
2106                }
2107            }
2108
2109            long fid = file.getFID();
2110
2111            did     = H5.H5Dcreate(fid, fullPath, tid, sid, HDF5Constants.H5P_DEFAULT, plist,
2112                                   HDF5Constants.H5P_DEFAULT);
2113            dataset = new H5CompoundDS(file, name, path);
2114        }
2115        finally {
2116            try {
2117                H5.H5Pclose(plist);
2118            }
2119            catch (HDF5Exception ex) {
2120                log.debug("create(): H5Pclose(plist {}) failure: ", plist, ex);
2121            }
2122            try {
2123                H5.H5Sclose(sid);
2124            }
2125            catch (HDF5Exception ex) {
2126                log.debug("create(): H5Sclose(sid {}) failure: ", sid, ex);
2127            }
2128            try {
2129                H5.H5Tclose(tid);
2130            }
2131            catch (HDF5Exception ex) {
2132                log.debug("create(): H5Tclose(tid {}) failure: ", tid, ex);
2133            }
2134            try {
2135                H5.H5Dclose(did);
2136            }
2137            catch (HDF5Exception ex) {
2138                log.debug("create(): H5Dclose(did {}) failure: ", did, ex);
2139            }
2140
2141            for (int i = 0; i < nMembers; i++) {
2142                try {
2143                    H5.H5Tclose(mTypes[i]);
2144                }
2145                catch (HDF5Exception ex) {
2146                    log.debug("create(): H5Tclose(mTypes[{}] {}) failure: ", i, mTypes[i], ex);
2147                }
2148            }
2149        }
2150
2151        if (dataset != null) {
2152            pgroup.addToMemberList(dataset);
2153            if (data != null) {
2154                dataset.init();
2155                long selected[] = dataset.getSelectedDims();
2156                for (int i = 0; i < rank; i++)
2157                    selected[i] = dims[i];
2158                dataset.write(data);
2159            }
2160        }
2161
2162        return dataset;
2163    }
2164
2165    /*
2166     * (non-Javadoc)
2167     *
2168     * @see hdf.object.Dataset#isString(long)
2169     */
2170    @Override
2171    public boolean isString(long tid)
2172    {
2173        boolean b = false;
2174        try {
2175            b = (HDF5Constants.H5T_STRING == H5.H5Tget_class(tid));
2176        }
2177        catch (Exception ex) {
2178            b = false;
2179        }
2180
2181        return b;
2182    }
2183
2184    /*
2185     * (non-Javadoc)
2186     *
2187     * @see hdf.object.Dataset#getSize(long)
2188     */
2189    @Override
2190    public long getSize(long tid)
2191    {
2192        return H5Datatype.getDatatypeSize(tid);
2193    }
2194
2195    /*
2196     * (non-Javadoc)
2197     *
2198     * @see hdf.object.Dataset#isVirtual()
2199     */
2200    @Override
2201    public boolean isVirtual()
2202    {
2203        return isVirtual;
2204    }
2205
2206    /*
2207     * (non-Javadoc)
2208     *
2209     * @see hdf.object.Dataset#getVirtualFilename(int)
2210     */
2211    @Override
2212    public String getVirtualFilename(int index)
2213    {
2214        if (isVirtual)
2215            return virtualNameList.get(index);
2216        else
2217            return null;
2218    }
2219
2220    /*
2221     * (non-Javadoc)
2222     *
2223     * @see hdf.object.Dataset#getVirtualMaps()
2224     */
2225    @Override
2226    public int getVirtualMaps()
2227    {
2228        if (isVirtual)
2229            return virtualNameList.size();
2230        else
2231            return -1;
2232    }
2233
2234    /*
2235     * (non-Javadoc)
2236     *
2237     * @see hdf.object.Dataset#toString(String delimiter, int maxItems)
2238     */
2239    @Override
2240    public String toString(String delimiter, int maxItems)
2241    {
2242        Object theData = originalBuf;
2243        if (theData == null) {
2244            log.debug("toString: value is null");
2245            return null;
2246        }
2247
2248        if (theData instanceof List<?>) {
2249            log.trace("toString: value is list");
2250            return null;
2251        }
2252
2253        Class<? extends Object> valClass = theData.getClass();
2254
2255        if (!valClass.isArray()) {
2256            log.trace("toString: finish - not array");
2257            String strValue = theData.toString();
2258            if (maxItems > 0 && strValue.length() > maxItems)
2259                // truncate the extra characters
2260                strValue = strValue.substring(0, maxItems);
2261            return strValue;
2262        }
2263
2264        // value is an array
2265        StringBuilder sb = new StringBuilder();
2266        int n            = Array.getLength(theData);
2267        if ((maxItems > 0) && (n > maxItems))
2268            n = maxItems;
2269
2270        log.trace("toString: isStdRef={} Array.getLength={}", ((H5Datatype)getDatatype()).isStdRef(), n);
2271        if (((H5Datatype)getDatatype()).isStdRef()) {
2272            String cname = valClass.getName();
2273            char dname   = cname.charAt(cname.lastIndexOf('[') + 1);
2274            log.trace("toString: isStdRef with cname={} dname={}", cname, dname);
2275            String ref_str = ((H5ReferenceType)getDatatype()).getObjectReferenceName((byte[])theData);
2276            log.trace("toString: ref_str={}", ref_str);
2277            return ref_str;
2278        }
2279        else {
2280            return super.toString(delimiter, maxItems);
2281        }
2282    }
2283}