001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the COPYING file, which can be found  *
009 * at the root of the source code distribution tree,                         *
010 * or in https://www.hdfgroup.org/licenses.                                  *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h5;
016
017import java.lang.reflect.Array;
018import java.math.BigDecimal;
019import java.math.BigInteger;
020import java.nio.ByteBuffer;
021import java.nio.ByteOrder;
022import java.text.DecimalFormat;
023import java.util.ArrayList;
024import java.util.Arrays;
025import java.util.Collection;
026import java.util.HashMap;
027import java.util.Iterator;
028import java.util.List;
029import java.util.Map;
030import java.util.Vector;
031
032import hdf.object.Attribute;
033import hdf.object.DataFormat;
034import hdf.object.Dataset;
035import hdf.object.Datatype;
036import hdf.object.FileFormat;
037import hdf.object.Group;
038import hdf.object.HObject;
039import hdf.object.MetaDataContainer;
040import hdf.object.ScalarDS;
041import hdf.object.h5.H5Attribute;
042import hdf.object.h5.H5Datatype;
043import hdf.object.h5.H5ReferenceType;
044
045import hdf.hdf5lib.H5;
046import hdf.hdf5lib.HDF5Constants;
047import hdf.hdf5lib.HDFNativeData;
048import hdf.hdf5lib.exceptions.HDF5DataFiltersException;
049import hdf.hdf5lib.exceptions.HDF5Exception;
050import hdf.hdf5lib.exceptions.HDF5LibraryException;
051
052import org.slf4j.Logger;
053import org.slf4j.LoggerFactory;
054
055/**
056 * An attribute is a (name, value) pair of metadata attached to a primary data object such as a dataset, group
057 * or named datatype.
058 *
059 * Like a dataset, an attribute has a name, datatype and dataspace.
060 *
061 * For more details on attributes, <a
062 * href="https://support.hdfgroup.org/releases/hdf5/v1_14/v1_14_5/documentation/doxygen/_h5_a__u_g.html#sec_attribute">HDF5
063 * Attributes in HDF5 User Guide</a>
064 *
065 * The following code is an example of an attribute with 1D integer array of two elements.
066 *
067 * <pre>
068 * // Example of creating a new attribute
069 * // The name of the new attribute
070 * String name = "Data range";
071 * // Creating an unsigned 1-byte integer datatype
072 * Datatype type = new Datatype(Datatype.CLASS_INTEGER, // class
073 *                              1,                      // size in bytes
074 *                              Datatype.ORDER_LE,      // byte order
075 *                              Datatype.SIGN_NONE);    // unsigned
076 * // 1-D array of size two
077 * long[] dims = {2};
078 * // The value of the attribute
079 * int[] value = {0, 255};
080 * // Create a new attribute
081 * Attribute dataRange = new H5ScalarAttr(name, type, dims);
082 * // Set the attribute value
083 * dataRange.setValue(value);
084 * // See FileFormat.writeAttribute() for how to attach an attribute to an object,
085 * &#64;see hdf.object.FileFormat#writeAttribute(HObject, Attribute, boolean)
086 * </pre>
087 *
088 *
089 * For an atomic datatype, the value of an Attribute will be a 1D array of integers, floats and strings.
090 *
091 * @see hdf.object.Datatype
092 *
093 * @version 1.0 6/15/2021
094 * @author Allen Byrne
095 */
096public class H5ScalarAttr extends ScalarDS implements H5Attribute {
097    private static final long serialVersionUID = 2072473407027648309L;
098
099    private static final Logger log = LoggerFactory.getLogger(H5ScalarAttr.class);
100
101    /** The HObject to which this NC2Attribute is attached, Attribute interface */
102    protected HObject parentObject;
103
104    /** additional information and properties for the attribute, Attribute interface */
105    private transient Map<String, Object> properties;
106
107    /**
108     * flag to indicate if the datatype in file is the same as dataype in memory
109     */
110    protected boolean isNativeDatatype = false;
111
112    /**
113     * Create an attribute with specified name, data type and dimension sizes.
114     *
115     * For scalar attribute, the dimension size can be either an array of size one
116     * or null, and the rank can be either 1 or zero. Attribute is a general class
117     * and is independent of file format, e.g., the implementation of attribute
118     * applies to both HDF4 and HDF5.
119     *
120     * The following example creates a string attribute with the name "CLASS" and
121     * value "IMAGE".
122     *
123     * <pre>
124     * long[] attrDims = { 1 };
125     * String attrName = &quot;CLASS&quot;;
126     * String[] classValue = { &quot;IMAGE&quot; };
127     * Datatype attrType = null;
128     * try {
129     *     attrType = new H5Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE,
130     * Datatype.NATIVE);
131     * }
132     * catch (Exception ex) {}
133     * Attribute attr = new H5ScalarAttr(attrName, attrType, attrDims);
134     * attr.setValue(classValue);
135     * </pre>
136     *
137     * @param parentObj
138     *            the HObject to which this H5ScalarAttr is attached.
139     * @param attrName
140     *            the name of the attribute.
141     * @param attrType
142     *            the datatype of the attribute.
143     * @param attrDims
144     *            the dimension sizes of the attribute, null for scalar attribute
145     *
146     * @see hdf.object.Datatype
147     */
148    public H5ScalarAttr(HObject parentObj, String attrName, Datatype attrType, long[] attrDims)
149    {
150        this(parentObj, attrName, attrType, attrDims, null);
151    }
152
153    /**
154     * Create an attribute with specific name and value.
155     *
156     * For scalar attribute, the dimension size can be either an array of size one
157     * or null, and the rank can be either 1 or zero. Attribute is a general class
158     * and is independent of file format, e.g., the implementation of attribute
159     * applies to both HDF4 and HDF5.
160     *
161     * The following example creates a string attribute with the name "CLASS" and
162     * value "IMAGE".
163     *
164     * <pre>
165     * long[] attrDims = { 1 };
166     * String attrName = &quot;CLASS&quot;;
167     * String[] classValue = { &quot;IMAGE&quot; };
168     * Datatype attrType = null;
169     * try {
170     *     attrType = new H5Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE,
171     * Datatype.NATIVE);
172     * }
173     * catch (Exception ex) {}
174     * Attribute attr = new H5ScalarAttr(attrName, attrType, attrDims, classValue);
175     * </pre>
176     *
177     * @param parentObj
178     *            the HObject to which this H5ScalarAttr is attached.
179     * @param attrName
180     *            the name of the attribute.
181     * @param attrType
182     *            the datatype of the attribute.
183     * @param attrDims
184     *            the dimension sizes of the attribute, null for scalar attribute
185     * @param attrValue
186     *            the value of the attribute, null if no value
187     *
188     * @see hdf.object.Datatype
189     */
190    @SuppressWarnings({"rawtypes", "unchecked", "deprecation"})
191    public H5ScalarAttr(HObject parentObj, String attrName, Datatype attrType, long[] attrDims,
192                        Object attrValue)
193    {
194        super((parentObj == null) ? null : parentObj.getFileFormat(), attrName,
195              (parentObj == null) ? null : parentObj.getFullName(), null);
196
197        log.trace("H5ScalarAttr: start {}", parentObj);
198        this.parentObject = parentObj;
199
200        datatype = attrType;
201
202        if (attrValue != null) {
203            data         = attrValue;
204            originalBuf  = attrValue;
205            isDataLoaded = true;
206        }
207        properties = new HashMap();
208
209        if (attrDims == null) {
210            rank     = 1;
211            dims     = new long[] {1};
212            isScalar = true;
213        }
214        else {
215            dims     = attrDims;
216            rank     = dims.length;
217            isScalar = false;
218        }
219
220        selectedDims   = new long[rank];
221        startDims      = new long[rank];
222        selectedStride = new long[rank];
223
224        log.trace("attrName={}, attrType={}, attrValue={}, rank={}, isUnsigned={}", attrName,
225                  attrType.getDescription(), data, rank, getDatatype().isUnsigned());
226
227        resetSelection();
228    }
229
230    /*
231     * (non-Javadoc)
232     *
233     * @see hdf.object.HObject#open()
234     */
235    @Override
236    public long open()
237    {
238        if (parentObject == null) {
239            log.debug("open(): attribute's parent object is null");
240            return HDF5Constants.H5I_INVALID_HID;
241        }
242
243        long aid    = HDF5Constants.H5I_INVALID_HID;
244        long pObjID = HDF5Constants.H5I_INVALID_HID;
245
246        try {
247            pObjID = parentObject.open();
248            if (pObjID >= 0) {
249                if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5))) {
250                    log.trace("open(): FILE_TYPE_HDF5");
251                    if (H5.H5Aexists(pObjID, getName()))
252                        aid = H5.H5Aopen(pObjID, getName(), HDF5Constants.H5P_DEFAULT);
253                }
254            }
255
256            log.trace("open(): aid={}", aid);
257        }
258        catch (Exception ex) {
259            log.debug("open(): Failed to open attribute {}: ", getName(), ex);
260            aid = HDF5Constants.H5I_INVALID_HID;
261        }
262        finally {
263            parentObject.close(pObjID);
264        }
265
266        return aid;
267    }
268
269    /*
270     * (non-Javadoc)
271     *
272     * @see hdf.object.HObject#close(int)
273     */
274    @Override
275    public void close(long aid)
276    {
277        if (aid >= 0) {
278            if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5))) {
279                log.trace("close(): FILE_TYPE_HDF5");
280                try {
281                    H5.H5Aclose(aid);
282                }
283                catch (HDF5Exception ex) {
284                    log.debug("close(): H5Aclose({}) failure: ", aid, ex);
285                }
286            }
287        }
288    }
289
290    /**
291     * Retrieves datatype and dataspace information from file and sets the attribute
292     * in memory.
293     *
294     * The init() is designed to support lazy operation in a attribute object. When a
295     * data object is retrieved from file, the datatype, dataspace and raw data are
296     * not loaded into memory. When it is asked to read the raw data from file,
297     * init() is first called to get the datatype and dataspace information, then
298     * load the raw data from file.
299     */
300    @Override
301    public void init()
302    {
303        if (inited) {
304            // already called. Initialize only once
305            resetSelection();
306            log.trace("init(): H5ScalarAttr already initialized");
307            return;
308        }
309
310        long aid       = HDF5Constants.H5I_INVALID_HID;
311        long tid       = HDF5Constants.H5I_INVALID_HID;
312        long sid       = HDF5Constants.H5I_INVALID_HID;
313        long nativeTID = HDF5Constants.H5I_INVALID_HID;
314
315        aid = open();
316        if (aid >= 0) {
317            try {
318                sid        = H5.H5Aget_space(aid);
319                rank       = H5.H5Sget_simple_extent_ndims(sid);
320                space_type = H5.H5Sget_simple_extent_type(sid);
321                if (space_type == HDF5Constants.H5S_NULL)
322                    isNULL = true;
323                else
324                    isNULL = false;
325                tid = H5.H5Aget_type(aid);
326                log.trace("init(): tid={} sid={} rank={} space_type={}", tid, sid, rank, space_type);
327
328                if (rank == 0) {
329                    // a scalar data point
330                    isScalar = true;
331                    rank     = 1;
332                    dims     = new long[] {1};
333                    log.trace("init(): rank is a scalar data point");
334                }
335                else {
336                    isScalar = false;
337                    dims     = new long[rank];
338                    maxDims  = new long[rank];
339                    H5.H5Sget_simple_extent_dims(sid, dims, maxDims);
340                    log.trace("init(): rank={}, dims={}, maxDims={}", rank, dims, maxDims);
341                }
342
343                if (datatype == null) {
344                    try {
345                        int nativeClass = H5.H5Tget_class(tid);
346                        if (nativeClass == HDF5Constants.H5T_REFERENCE) {
347                            long lsize = 1;
348                            if (rank > 0) {
349                                log.trace("init(): rank={}, dims={}", rank, dims);
350                                for (int j = 0; j < dims.length; j++) {
351                                    lsize *= dims[j];
352                                }
353                            }
354                            datatype = new H5ReferenceType(getFileFormat(), lsize, tid);
355                        }
356                        else
357                            datatype = new H5Datatype(getFileFormat(), tid);
358
359                        log.trace(
360                            "init(): tid={} is tclass={} has isText={} : isNamed={} :  isVLEN={} : isEnum={} : isUnsigned={} : isRegRef={}",
361                            tid, datatype.getDatatypeClass(), ((H5Datatype)datatype).isText(),
362                            datatype.isNamed(), datatype.isVLEN(), datatype.isEnum(), datatype.isUnsigned(),
363                            ((H5Datatype)datatype).isRegRef());
364                    }
365                    catch (Exception ex) {
366                        log.debug("init(): failed to create datatype for attribute: ", ex);
367                        datatype = null;
368                    }
369                }
370
371                // Check if the datatype in the file is the native datatype
372                try {
373                    nativeTID        = H5.H5Tget_native_type(tid);
374                    isNativeDatatype = H5.H5Tequal(tid, nativeTID);
375                    log.trace("init(): isNativeDatatype={}", isNativeDatatype);
376                }
377                catch (Exception ex) {
378                    log.debug("init(): check if native type failure: ", ex);
379                }
380
381                inited = true;
382            }
383            catch (HDF5Exception ex) {
384                log.debug("init(): ", ex);
385            }
386            finally {
387                try {
388                    H5.H5Tclose(nativeTID);
389                }
390                catch (Exception ex2) {
391                    log.debug("init(): H5Tclose(nativeTID {}) failure: ", nativeTID, ex2);
392                }
393                try {
394                    H5.H5Tclose(tid);
395                }
396                catch (HDF5Exception ex2) {
397                    log.debug("init(): H5Tclose(tid {}) failure: ", tid, ex2);
398                }
399                try {
400                    H5.H5Sclose(sid);
401                }
402                catch (HDF5Exception ex2) {
403                    log.debug("init(): H5Sclose(sid {}) failure: ", sid, ex2);
404                }
405            }
406
407            close(aid);
408
409            startDims    = new long[rank];
410            selectedDims = new long[rank];
411
412            resetSelection();
413        }
414        else {
415            log.debug("init(): failed to open attribute");
416        }
417    }
418
419    /**
420     * Returns the datatype of the data object.
421     *
422     * @return the datatype of the data object.
423     */
424    @Override
425    public Datatype getDatatype()
426    {
427        if (!inited)
428            init();
429
430        if (datatype == null) {
431            long aid = HDF5Constants.H5I_INVALID_HID;
432            long tid = HDF5Constants.H5I_INVALID_HID;
433
434            aid = open();
435            if (aid >= 0) {
436                try {
437                    tid = H5.H5Aget_type(aid);
438                    log.trace("getDatatype(): isNativeDatatype={}", isNativeDatatype);
439                    if (!isNativeDatatype) {
440                        long tmptid = -1;
441                        try {
442                            tmptid = H5Datatype.toNative(tid);
443                            if (tmptid >= 0) {
444                                try {
445                                    H5.H5Tclose(tid);
446                                }
447                                catch (Exception ex2) {
448                                    log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex2);
449                                }
450                                tid = tmptid;
451                            }
452                        }
453                        catch (Exception ex) {
454                            log.debug("getDatatype(): toNative: ", ex);
455                        }
456                    }
457                    int nativeClass = H5.H5Tget_class(tid);
458                    if (nativeClass == HDF5Constants.H5T_REFERENCE) {
459                        long lsize = 1;
460                        long sid   = H5.H5Aget_space(aid);
461                        int rank   = H5.H5Sget_simple_extent_ndims(sid);
462                        if (rank > 0) {
463                            long dims[] = new long[rank];
464                            H5.H5Sget_simple_extent_dims(sid, dims, null);
465                            log.trace("getDatatype(): rank={}, dims={}", rank, dims);
466                            for (int j = 0; j < dims.length; j++) {
467                                lsize *= dims[j];
468                            }
469                        }
470                        datatype = new H5ReferenceType(getFileFormat(), lsize, tid);
471                    }
472                    else
473                        datatype = new H5Datatype(getFileFormat(), tid);
474                }
475                catch (Exception ex) {
476                    log.debug("getDatatype(): ", ex);
477                }
478                finally {
479                    try {
480                        H5.H5Tclose(tid);
481                    }
482                    catch (HDF5Exception ex) {
483                        log.debug("getDatatype(): H5Tclose(tid {}) failure: ", tid, ex);
484                    }
485                    try {
486                        H5.H5Aclose(aid);
487                    }
488                    catch (HDF5Exception ex) {
489                        log.debug("getDatatype(): H5Aclose(aid {}) failure: ", aid, ex);
490                    }
491                }
492            }
493        }
494
495        return datatype;
496    }
497
498    /**
499     * Returns the data buffer of the attribute in memory.
500     *
501     * If data is already loaded into memory, returns the data; otherwise, calls
502     * read() to read data from file into a memory buffer and returns the memory
503     * buffer.
504     *
505     * The whole attribute is read into memory. Users can also select
506     * a subset from the whole data. Subsetting is done in an implicit way.
507     *
508     * <b>How to Select a Subset</b>
509     *
510     * A selection is specified by three arrays: start, stride and count.
511     * <ol>
512     * <li>start: offset of a selection
513     * <li>stride: determines how many elements to move in each dimension
514     * <li>count: number of elements to select in each dimension
515     * </ol>
516     * getStartDims(), getStride() and getSelectedDims() returns the start,
517     * stride and count arrays respectively. Applications can make a selection
518     * by changing the values of the arrays.
519     *
520     * The following example shows how to make a subset. In the example, the
521     * attribute is a 4-dimensional array of [200][100][50][10], i.e. dims[0]=200;
522     * dims[1]=100; dims[2]=50; dims[3]=10; <br>
523     * We want to select every other data point in dims[1] and dims[2]
524     *
525     * <pre>
526     * int rank = attribute.getRank(); // number of dimensions of the attribute
527     * long[] dims = attribute.getDims(); // the dimension sizes of the attribute
528     * long[] selected = attribute.getSelectedDims(); // the selected size of the attribute
529     * long[] start = attribute.getStartDims(); // the offset of the selection
530     * long[] stride = attribute.getStride(); // the stride of the attribute
531     * int[] selectedIndex = attribute.getSelectedIndex(); // the selected dimensions for display
532     *
533     * // select dim1 and dim2 as 2D data for display,and slice through dim0
534     * selectedIndex[0] = 1;
535     * selectedIndex[1] = 2;
536     * selectedIndex[2] = 0;
537     *
538     * // reset the selection arrays
539     * for (int i = 0; i &lt; rank; i++) {
540     *     start[i] = 0;
541     *     selected[i] = 1;
542     *     stride[i] = 1;
543     * }
544     *
545     * // set stride to 2 on dim1 and dim2 so that every other data point is
546     * // selected.
547     * stride[1] = 2;
548     * stride[2] = 2;
549     *
550     * // set the selection size of dim1 and dim2
551     * selected[1] = dims[1] / stride[1];
552     * selected[2] = dims[1] / stride[2];
553     *
554     * // when H5ScalarAttr.getData() is called, the selection above will be used since
555     * // the dimension arrays are passed by reference. Changes of these arrays
556     * // outside the attribute object directly change the values of these array
557     * // in the attribute object.
558     * </pre>
559     *
560     * For H5ScalarAttr, the memory data buffer is a one-dimensional array of byte,
561     * short, int, float, double or String type based on the datatype of the
562     * attribute.
563     *
564     * @return the memory buffer of the attribute.
565     *
566     * @throws Exception if object can not be read
567     * @throws OutOfMemoryError if memory is exhausted
568     */
569    @Override
570    public Object getData() throws Exception, OutOfMemoryError
571    {
572        log.trace("getData(): isDataLoaded={}", isDataLoaded);
573        if (!isDataLoaded)
574            data = read(); // load the data, attributes read all data
575
576        nPoints = 1;
577        log.trace("getData(): selectedDims length={}", selectedDims.length);
578        int point_len = selectedDims.length;
579        // Partial data for 3 or more dimensions
580        if (rank > 2)
581            point_len = 3;
582        for (int j = 0; j < point_len; j++) {
583            log.trace("getData(): selectedDims[{}]={}", j, selectedDims[j]);
584            nPoints *= selectedDims[j];
585        }
586        log.trace("getData: read {}", nPoints);
587
588        // apply the selection for 3 or more dimensions
589        // selection only expects to use 3 selectedDims
590        //     where selectedIndex[0] is the row dimension
591        //     where selectedIndex[1] is the col dimension
592        //     where selectedIndex[2] is the frame dimension
593        if (rank > 2)
594            data = AttributeSelection();
595
596        return data;
597    }
598
599    /*
600     * (non-Javadoc)
601     * @see hdf.object.Dataset#copy(hdf.object.Group, java.lang.String, long[], java.lang.Object)
602     */
603    @Override
604    public Dataset copy(Group pgroup, String dstName, long[] dims, Object buff) throws Exception
605    {
606        // not supported
607        throw new UnsupportedOperationException("copy operation unsupported for H5.");
608    }
609
610    /*
611     * (non-Javadoc)
612     *
613     * @see hdf.object.Attribute#readBytes()
614     */
615    @Override
616    public byte[] readBytes() throws HDF5Exception
617    {
618        byte[] theData = null;
619
620        if (!isInited())
621            init();
622
623        long aid = open();
624        if (aid >= 0) {
625            long tid = HDF5Constants.H5I_INVALID_HID;
626
627            try {
628                long[] lsize = {1};
629                for (int j = 0; j < selectedDims.length; j++)
630                    lsize[0] *= selectedDims[j];
631
632                tid       = H5.H5Aget_type(aid);
633                long size = H5.H5Tget_size(tid) * lsize[0];
634                log.trace("readBytes(): size = {}", size);
635
636                if (size < Integer.MIN_VALUE || size > Integer.MAX_VALUE)
637                    throw new Exception("Invalid int size");
638
639                theData = new byte[(int)size];
640
641                log.trace("readBytes(): read attribute id {} of size={}", tid, lsize);
642                H5.H5Aread(aid, tid, theData);
643            }
644            catch (Exception ex) {
645                log.debug("readBytes(): failed to read data: ", ex);
646            }
647            finally {
648                try {
649                    H5.H5Tclose(tid);
650                }
651                catch (HDF5Exception ex2) {
652                    log.debug("readBytes(): H5Tclose(tid {}) failure: ", tid, ex2);
653                }
654                close(aid);
655            }
656        }
657
658        return theData;
659    }
660
661    /**
662     * Reads the data from file.
663     *
664     * read() reads the data from file to a memory buffer and returns the memory
665     * buffer. The attribute object does not hold the memory buffer. To store the
666     * memory buffer in the attribute object, one must call getData().
667     *
668     * By default, the whole attribute is read into memory.
669     *
670     * For ScalarAttr, the memory data buffer is a one-dimensional array of byte,
671     * short, int, float, double or String type based on the datatype of the
672     * attribute.
673     *
674     * @return the data read from file.
675     *
676     * @see #getData()
677     * @see hdf.object.DataFormat#read()
678     *
679     * @throws Exception
680     *             if object can not be read
681     */
682    @Override
683    public Object read() throws Exception
684    {
685        Object readData = null;
686
687        if (!isInited())
688            init();
689
690        try {
691            readData = scalarAttributeCommonIO(H5File.IO_TYPE.READ, null);
692        }
693        catch (Exception ex) {
694            log.debug("read(): failed to read scalar attribute: ", ex);
695            throw new Exception("failed to read scalar attribute: " + ex.getMessage(), ex);
696        }
697
698        return readData;
699    }
700
701    /**
702     * Writes the given data buffer into this attribute in a file.
703     *
704     * @param buf
705     *            The buffer that contains the data values.
706     *
707     * @throws Exception
708     *             If there is an error at the HDF5 library level.
709     */
710    @Override
711    public void write(Object buf) throws Exception
712    {
713        if (this.getFileFormat().isReadOnly())
714            throw new Exception("cannot write to scalar attribute in file opened as read-only");
715
716        if (!buf.equals(data))
717            setData(buf);
718
719        if (parentObject == null) {
720            log.debug("write(Object): parent object is null; nowhere to write attribute to");
721            return;
722        }
723
724        ((MetaDataContainer)getParentObject()).writeMetadata(this);
725
726        try {
727            scalarAttributeCommonIO(H5File.IO_TYPE.WRITE, buf);
728        }
729        catch (Exception ex) {
730            log.debug("write(Object): failed to write to scalar attribute: ", ex);
731            throw new Exception("failed to write to scalar attribute: " + ex.getMessage(), ex);
732        }
733        resetSelection();
734    }
735
736    private Object scalarAttributeCommonIO(H5File.IO_TYPE ioType, Object writeBuf) throws Exception
737    {
738        H5Datatype dsDatatype = (H5Datatype)getDatatype();
739        Object theData        = null;
740
741        /*
742         * I/O type-specific pre-initialization.
743         */
744        if (ioType == H5File.IO_TYPE.WRITE) {
745            if (writeBuf == null) {
746                log.debug("scalarAttributeCommonIO(): writeBuf is null");
747                throw new Exception("write buffer is null");
748            }
749        }
750
751        long aid = open();
752        if (aid >= 0) {
753            log.trace("scalarAttributeCommonIO(): isDataLoaded={}", isDataLoaded);
754            try {
755                theData = AttributeCommonIO(aid, ioType, writeBuf);
756            }
757            finally {
758                close(aid);
759            }
760        }
761        else
762            log.debug("scalarAttributeCommonIO(): failed to open attribute");
763
764        return theData;
765    }
766
767    /* Implement interface Attribute */
768
769    /**
770     * Returns the HObject to which this Attribute is currently "attached".
771     *
772     * @return the HObject to which this Attribute is currently "attached".
773     */
774    @Override
775    public HObject getParentObject()
776    {
777        return parentObject;
778    }
779
780    /**
781     * Sets the HObject to which this Attribute is "attached".
782     *
783     * @param pObj
784     *            the new HObject to which this Attribute is "attached".
785     */
786    @Override
787    public void setParentObject(HObject pObj)
788    {
789        parentObject = pObj;
790    }
791
792    /**
793     * set a property for the attribute.
794     *
795     * @param key the attribute Map key
796     * @param value the attribute Map value
797     */
798    @Override
799    public void setProperty(String key, Object value)
800    {
801        properties.put(key, value);
802    }
803
804    /**
805     * get a property for a given key.
806     *
807     * @param key the attribute Map key
808     *
809     * @return the property
810     */
811    @Override
812    public Object getProperty(String key)
813    {
814        return properties.get(key);
815    }
816
817    /**
818     * get all property keys.
819     *
820     * @return the Collection of property keys
821     */
822    @Override
823    public Collection<String> getPropertyKeys()
824    {
825        return properties.keySet();
826    }
827
828    /**
829     * Returns the name of the object. For example, "Raster Image #2".
830     *
831     * @return The name of the object.
832     */
833    @Override
834    public final String getAttributeName()
835    {
836        return getName();
837    }
838
839    /**
840     * Retrieves the attribute data from the file.
841     *
842     * @return the attribute data.
843     *
844     * @throws Exception
845     *             if the data can not be retrieved
846     */
847    @Override
848    public final Object getAttributeData() throws Exception, OutOfMemoryError
849    {
850        return getData();
851    }
852
853    /**
854     * Returns the datatype of the attribute.
855     *
856     * @return the datatype of the attribute.
857     */
858    @Override
859    public final Datatype getAttributeDatatype()
860    {
861        return getDatatype();
862    }
863
864    /**
865     * Returns the space type for the attribute. It returns a
866     * negative number if it failed to retrieve the type information from
867     * the file.
868     *
869     * @return the space type for the attribute.
870     */
871    @Override
872    public final int getAttributeSpaceType()
873    {
874        return getSpaceType();
875    }
876
877    /**
878     * Returns the rank (number of dimensions) of the attribute. It returns a
879     * negative number if it failed to retrieve the dimension information from
880     * the file.
881     *
882     * @return the number of dimensions of the attribute.
883     */
884    @Override
885    public final int getAttributeRank()
886    {
887        return getRank();
888    }
889
890    /**
891     * Returns the selected size of the rows and columns of the attribute. It returns a
892     * negative number if it failed to retrieve the size information from
893     * the file.
894     *
895     * @return the selected size of the rows and colums of the attribute.
896     */
897    @Override
898    public final int getAttributePlane()
899    {
900        return (int)getWidth() * (int)getHeight();
901    }
902
903    /**
904     * Returns the array that contains the dimension sizes of the data value of
905     * the attribute. It returns null if it failed to retrieve the dimension
906     * information from the file.
907     *
908     * @return the dimension sizes of the attribute.
909     */
910    @Override
911    public final long[] getAttributeDims()
912    {
913        return getDims();
914    }
915
916    /**
917     * @return true if the dataspace is a NULL; otherwise, returns false.
918     */
919    @Override
920    public boolean isAttributeNULL()
921    {
922        return isNULL();
923    }
924
925    /**
926     * @return true if the data is a single scalar point; otherwise, returns false.
927     */
928    @Override
929    public boolean isAttributeScalar()
930    {
931        return isScalar();
932    }
933
934    /**
935     * Not for public use in the future.
936     *
937     * setData() is not safe to use because it changes memory buffer
938     * of the dataset object. Dataset operations such as write/read
939     * will fail if the buffer type or size is changed.
940     *
941     * @param d  the object data -must be an array of Objects
942     */
943    @Override
944    public void setAttributeData(Object d)
945    {
946        setData(d);
947    }
948
949    /**
950     * Writes the memory buffer of this dataset to file.
951     *
952     * @throws Exception if buffer can not be written
953     */
954    @Override
955    public void writeAttribute() throws Exception
956    {
957        write();
958    }
959
960    /**
961     * Writes the given data buffer into this attribute in a file.
962     *
963     * The data buffer is a vector that contains the data values of compound fields. The data is written
964     * into file as one data blob.
965     *
966     * @param buf
967     *            The vector that contains the data values of compound fields.
968     *
969     * @throws Exception
970     *             If there is an error at the library level.
971     */
972    @Override
973    public void writeAttribute(Object buf) throws Exception
974    {
975        write(buf);
976    }
977
978    /**
979     * Returns a string representation of the data value. For
980     * example, "0, 255".
981     *
982     * For a compound datatype, it will be a 1D array of strings with field
983     * members separated by the delimiter. For example,
984     * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int,
985     * float} of three data points.
986     *
987     * @param delimiter
988     *            The delimiter used to separate individual data points. It
989     *            can be a comma, semicolon, tab or space. For example,
990     *            toString(",") will separate data by commas.
991     *
992     * @return the string representation of the data values.
993     */
994    @Override
995    public String toAttributeString(String delimiter)
996    {
997        return toString(delimiter, -1);
998    }
999
1000    /**
1001     * Returns a string representation of the data value. For
1002     * example, "0, 255".
1003     *
1004     * For a compound datatype, it will be a 1D array of strings with field
1005     * members separated by the delimiter. For example,
1006     * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int,
1007     * float} of three data points.
1008     *
1009     * @param delimiter
1010     *            The delimiter used to separate individual data points. It
1011     *            can be a comma, semicolon, tab or space. For example,
1012     *            toString(",") will separate data by commas.
1013     * @param maxItems
1014     *            The maximum number of Array values to return
1015     *
1016     * @return the string representation of the data values.
1017     */
1018    @Override
1019    public String toAttributeString(String delimiter, int maxItems)
1020    {
1021        Object theData = originalBuf;
1022        if (theData == null) {
1023            log.debug("toAttributeString: value is null");
1024            return null;
1025        }
1026
1027        Class<? extends Object> valClass = theData.getClass();
1028        if (!valClass.isArray() && !getDatatype().isRef()) {
1029            log.trace("toAttributeString: finish - not array");
1030            String strValue = theData.toString();
1031            if (maxItems > 0 && strValue.length() > maxItems)
1032                // truncate the extra characters
1033                strValue = strValue.substring(0, maxItems);
1034            return strValue;
1035        }
1036
1037        int n          = 0;
1038        Datatype dtype = getDatatype();
1039        // value is an array
1040        if (valClass.isArray()) {
1041            n = Array.getLength(theData);
1042            if (dtype.isRef())
1043                n /= (int)dtype.getDatatypeSize();
1044        }
1045        else
1046            n = ((ArrayList<Object[]>)theData).size();
1047        if ((maxItems > 0) && (n > maxItems))
1048            n = maxItems;
1049
1050        return toString(theData, dtype, delimiter, n);
1051    }
1052
1053    @Override
1054    protected String toString(Object theData, Datatype theType, String delimiter, int count)
1055    {
1056        log.trace("toString: is_enum={} is_unsigned={} count={}", theType.isEnum(), theType.isUnsigned(),
1057                  count);
1058        StringBuilder sb                 = new StringBuilder();
1059        Class<? extends Object> valClass = theData.getClass();
1060        log.trace("toString:valClass={}", valClass);
1061
1062        H5Datatype dtype = (H5Datatype)theType;
1063        log.trace("toString: count={} isStdRef={}", count, dtype.isStdRef());
1064        if (dtype.isStdRef()) {
1065            return ((H5ReferenceType)dtype).toString(delimiter, count);
1066        }
1067        else if (dtype.isVLEN() && !dtype.isVarStr()) {
1068            log.trace("toString: vlen");
1069            String strValue;
1070
1071            for (int k = 0; k < count; k++) {
1072                Object value = Array.get(theData, k);
1073                if (value == null)
1074                    strValue = "null";
1075                else {
1076                    if (dtype.getDatatypeBase().isRef()) {
1077                        ArrayList<byte[]> ref_value = (ArrayList<byte[]>)value;
1078                        log.trace("toString: vlen value={}", ref_value);
1079                        strValue = "{";
1080                        for (int m = 0; m < ref_value.size(); m++) {
1081                            byte[] curBytes = ref_value.get(m);
1082                            if (m > 0)
1083                                strValue += ", ";
1084                            if (H5ReferenceType.zeroArrayCheck(curBytes))
1085                                strValue += "NULL";
1086                            else {
1087                                if (((H5Datatype)dtype.getDatatypeBase()).isStdRef()) {
1088                                    strValue += H5.H5Rget_obj_name(curBytes, HDF5Constants.H5P_DEFAULT);
1089                                }
1090                                else if (dtype.getDatatypeBase().getDatatypeSize() ==
1091                                         HDF5Constants.H5R_DSET_REG_REF_BUF_SIZE) {
1092                                    try {
1093                                        strValue += H5Datatype.descRegionDataset(
1094                                            parentObject.getFileFormat().getFID(), curBytes);
1095                                    }
1096                                    catch (Exception ex) {
1097                                        ex.printStackTrace();
1098                                    }
1099                                }
1100                                else if (dtype.getDatatypeBase().getDatatypeSize() ==
1101                                         HDF5Constants.H5R_OBJ_REF_BUF_SIZE) {
1102                                    try {
1103                                        strValue += H5Datatype.descReferenceObject(
1104                                            parentObject.getFileFormat().getFID(), curBytes);
1105                                    }
1106                                    catch (Exception ex) {
1107                                        ex.printStackTrace();
1108                                    }
1109                                }
1110                            }
1111                        }
1112                        strValue += "}";
1113                    }
1114                    else
1115                        strValue = value.toString();
1116                }
1117                if (k > 0)
1118                    sb.append(", ");
1119                sb.append(strValue);
1120            }
1121        }
1122        else if (dtype.isRef()) {
1123            log.trace("toString: ref");
1124            String strValue  = "NULL";
1125            byte[] rElements = null;
1126
1127            for (int k = 0; k < count; k++) {
1128                // need to iterate if type is ArrayList
1129                if (theData instanceof ArrayList)
1130                    rElements = (byte[])((ArrayList)theData).get(k);
1131                else
1132                    rElements = (byte[])Array.get(theData, k);
1133
1134                if (H5ReferenceType.zeroArrayCheck(rElements))
1135                    strValue = "NULL";
1136                else {
1137                    if (dtype.isStdRef()) {
1138                        strValue = H5.H5Rget_obj_name(rElements, HDF5Constants.H5P_DEFAULT);
1139                    }
1140                    else if (dtype.getDatatypeSize() == HDF5Constants.H5R_DSET_REG_REF_BUF_SIZE) {
1141                        try {
1142                            strValue = H5Datatype.descRegionDataset(parentObject.getFileFormat().getFID(),
1143                                                                    rElements);
1144                        }
1145                        catch (Exception ex) {
1146                            ex.printStackTrace();
1147                        }
1148                    }
1149                    else if (dtype.getDatatypeSize() == HDF5Constants.H5R_OBJ_REF_BUF_SIZE) {
1150                        try {
1151                            strValue = H5Datatype.descReferenceObject(parentObject.getFileFormat().getFID(),
1152                                                                      rElements);
1153                        }
1154                        catch (Exception ex) {
1155                            ex.printStackTrace();
1156                        }
1157                    }
1158                }
1159                if (k > 0)
1160                    sb.append(", ");
1161                sb.append(strValue);
1162            }
1163        }
1164        else {
1165            return super.toString(theData, theType, delimiter, count);
1166        }
1167
1168        return sb.toString();
1169    }
1170
1171    /* Implement interface H5Attribute */
1172
1173    /**
1174     * The general read and write attribute operations for hdf5 object data.
1175     *
1176     * @param attr_id
1177     *        the attribute to access
1178     * @param ioType
1179     *        the type of IO operation
1180     * @param objBuf
1181     *        the data buffer to use for write operation
1182     *
1183     * @return the attribute data
1184     *
1185     * @throws Exception
1186     *             if the data can not be retrieved
1187     */
1188    @Override
1189    public Object AttributeCommonIO(long attr_id, H5File.IO_TYPE ioType, Object objBuf) throws Exception
1190    {
1191        H5Datatype dsDatatype = (H5Datatype)getDatatype();
1192        Object theData        = null;
1193
1194        long dt_size = dsDatatype.getDatatypeSize();
1195        log.trace("AttributeCommonIO(): create native");
1196        long tid = dsDatatype.createNative();
1197
1198        if (ioType == H5File.IO_TYPE.READ) {
1199            log.trace("AttributeCommonIO():read ioType isNamed={} isEnum={} isText={} isRefObj={}",
1200                      dsDatatype.isNamed(), dsDatatype.isEnum(), dsDatatype.isText(), dsDatatype.isRefObj());
1201            log.trace("AttributeCommonIO():read ioType isVLEN={}", dsDatatype.isVLEN());
1202
1203            long lsize = 1;
1204            for (int j = 0; j < dims.length; j++)
1205                lsize *= dims[j];
1206            log.trace("AttributeCommonIO():read ioType dt_size={} lsize={}", dt_size, lsize);
1207
1208            try {
1209                if (dsDatatype.isVarStr()) {
1210                    String[] strs = new String[(int)lsize];
1211                    for (int j = 0; j < lsize; j++)
1212                        strs[j] = "";
1213                    try {
1214                        log.trace("AttributeCommonIO():read ioType H5Aread_VLStrings");
1215                        H5.H5Aread_VLStrings(attr_id, tid, strs);
1216                    }
1217                    catch (Exception ex) {
1218                        log.debug("AttributeCommonIO():read ioType H5Aread_VLStrings failure: ", ex);
1219                        ex.printStackTrace();
1220                    }
1221                    theData = strs;
1222                }
1223                else if (dsDatatype.isCompound()) {
1224                    String[] strs = new String[(int)lsize];
1225                    for (int j = 0; j < lsize; j++)
1226                        strs[j] = "";
1227                    try {
1228                        log.trace("AttributeCommonIO():read ioType H5AreadComplex");
1229                        H5.H5AreadComplex(attr_id, tid, strs);
1230                    }
1231                    catch (Exception ex) {
1232                        ex.printStackTrace();
1233                    }
1234                    theData = strs;
1235                }
1236                else if (dsDatatype.isVLEN()) {
1237                    log.trace("AttributeCommonIO():read ioType:VLEN-REF H5Aread isArray()={}",
1238                              dsDatatype.isArray());
1239                    theData = new ArrayList[(int)lsize];
1240                    for (int j = 0; j < lsize; j++)
1241                        ((ArrayList[])theData)[j] = new ArrayList<byte[]>();
1242
1243                    try {
1244                        H5.H5AreadVL(attr_id, tid, (Object[])theData);
1245                    }
1246                    catch (Exception ex) {
1247                        log.debug("AttributeCommonIO():read ioType:VLEN-REF H5Aread failure: ", ex);
1248                        ex.printStackTrace();
1249                    }
1250                }
1251                else {
1252                    Object attr_data = null;
1253                    try {
1254                        attr_data = H5Datatype.allocateArray(dsDatatype, (int)lsize);
1255                    }
1256                    catch (OutOfMemoryError e) {
1257                        log.debug("AttributeCommonIO():read ioType out of memory", e);
1258                        theData = null;
1259                    }
1260                    if (attr_data == null)
1261                        log.debug("AttributeCommonIO():read ioType allocateArray returned null");
1262
1263                    log.trace("AttributeCommonIO():read ioType H5Aread isArray()={}", dsDatatype.isArray());
1264                    try {
1265                        H5.H5Aread(attr_id, tid, attr_data);
1266                    }
1267                    catch (Exception ex) {
1268                        log.debug("AttributeCommonIO():read ioType H5Aread failure: ", ex);
1269                        ex.printStackTrace();
1270                    }
1271
1272                    /*
1273                     * Perform any necessary data conversions.
1274                     */
1275                    if (dsDatatype.isText() && convertByteToString && (attr_data instanceof byte[])) {
1276                        log.trace(
1277                            "AttributeCommonIO():read ioType isText: converting byte array to string array");
1278                        theData = byteToString((byte[])attr_data, (int)dsDatatype.getDatatypeSize());
1279                    }
1280                    else if (dsDatatype.isFloat() && dt_size == 16) {
1281                        log.trace(
1282                            "AttributeCommonIO():read ioType isFloat: converting byte array to BigDecimal array");
1283                        theData = dsDatatype.byteToBigDecimal(0, (int)nPoints, (byte[])attr_data);
1284                    }
1285                    else if (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isFloat() &&
1286                             dsDatatype.getDatatypeBase().getDatatypeSize() == 16) {
1287                        log.trace(
1288                            "AttributeCommonIO():read ioType isArray and isFloat: converting byte array to BigDecimal array");
1289                        long[] arrayDims = dsDatatype.getArrayDims();
1290                        int asize        = (int)nPoints;
1291                        for (int j = 0; j < arrayDims.length; j++) {
1292                            asize *= arrayDims[j];
1293                        }
1294                        theData = ((H5Datatype)dsDatatype.getDatatypeBase())
1295                                      .byteToBigDecimal(0, asize, (byte[])attr_data);
1296                    }
1297                    else if (dsDatatype.isRef() && (attr_data instanceof byte[])) {
1298                        log.trace(
1299                            "AttributeCommonIO():read ioType isRef: converting byte array to List of bytes");
1300                        theData = new ArrayList<byte[]>((int)lsize);
1301                        for (int m = 0; m < (int)lsize; m++) {
1302                            byte[] curBytes = new byte[(int)dsDatatype.getDatatypeSize()];
1303                            try {
1304                                System.arraycopy(attr_data, m * (int)dt_size, curBytes, 0,
1305                                                 (int)dsDatatype.getDatatypeSize());
1306                                ((ArrayList<byte[]>)theData).add(curBytes);
1307                            }
1308                            catch (Exception err) {
1309                                log.trace("AttributeCommonIO(): arraycopy failure: ", err);
1310                            }
1311                        }
1312                    }
1313                    else
1314                        theData = attr_data;
1315                }
1316            }
1317            catch (HDF5DataFiltersException exfltr) {
1318                log.debug("AttributeCommonIO():read ioType read failure: ", exfltr);
1319                throw new Exception("Filter not available exception: " + exfltr.getMessage(), exfltr);
1320            }
1321            catch (Exception ex) {
1322                log.debug("AttributeCommonIO():read ioType read failure: ", ex);
1323                throw new Exception(ex.getMessage(), ex);
1324            }
1325            finally {
1326                dsDatatype.close(tid);
1327            }
1328            log.trace("AttributeCommonIO():read ioType data: {}", theData);
1329            originalBuf  = theData;
1330            isDataLoaded = true;
1331        } // H5File.IO_TYPE.READ
1332        else {
1333            /*
1334             * Perform any necessary data conversions before writing the data.
1335             *
1336             * Note that v-len strings do not get converted, regardless of
1337             * conversion request type.
1338             */
1339            Object tmpData = objBuf;
1340            try {
1341                // Check if we need to convert integer data
1342                String cname = objBuf.getClass().getName();
1343                char dname   = cname.charAt(cname.lastIndexOf("[") + 1);
1344                boolean doIntConversion =
1345                    (((dt_size == 1) && (dname == 'S')) || ((dt_size == 2) && (dname == 'I')) ||
1346                     ((dt_size == 4) && (dname == 'J')) || (dsDatatype.isUnsigned() && unsignedConverted));
1347
1348                if (doIntConversion) {
1349                    log.trace("AttributeCommonIO(): converting integer data to unsigned C-type integers");
1350                    tmpData = convertToUnsignedC(objBuf, null);
1351                }
1352                else if (dsDatatype.isText() && !dsDatatype.isVarStr() && convertByteToString &&
1353                         !(objBuf instanceof byte[])) {
1354                    log.trace("AttributeCommonIO(): converting string array to byte array");
1355                    tmpData = stringToByte((String[])objBuf, (int)dt_size);
1356                }
1357                else if (dsDatatype.isEnum() && (Array.get(objBuf, 0) instanceof String)) {
1358                    log.trace("AttributeCommonIO(): converting enum names to values");
1359                    tmpData = dsDatatype.convertEnumNameToValue((String[])objBuf);
1360                }
1361                else if (dsDatatype.isFloat() && dsDatatype.getDatatypeSize() == 16) {
1362                    log.trace("AttributeCommonIO(): isFloat: converting BigDecimal array to byte array");
1363                    throw new Exception("data conversion failure: cannot write BigDecimal values");
1364                    // tmpData = dsDatatype.bigDecimalToByte(0, (int)nPoints, (BigDecimal[]) objBuf);
1365                }
1366            }
1367            catch (Exception ex) {
1368                log.debug("AttributeCommonIO(): data conversion failure: ", ex);
1369                throw new Exception("data conversion failure: " + ex.getMessage());
1370            }
1371
1372            /*
1373             * Actually write the data now that everything has been setup.
1374             */
1375            try {
1376                if (dsDatatype.isVarStr()) {
1377                    log.trace("AttributeCommonIO(): H5Awrite_VLStrings aid={} tid={}", attr_id, tid);
1378
1379                    H5.H5Awrite_VLStrings(attr_id, tid, (Object[])tmpData);
1380                }
1381                else if (dsDatatype.isVLEN() ||
1382                         (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isVLEN())) {
1383                    log.trace("AttributeCommonIO(): H5AwriteVL aid={} tid={}", attr_id, tid);
1384
1385                    H5.H5AwriteVL(attr_id, tid, (Object[])tmpData);
1386                }
1387                else {
1388                    log.trace("AttributeCommonIO(): dsDatatype.isRef()={} data is String={}",
1389                              dsDatatype.isRef(), tmpData instanceof String);
1390                    if (dsDatatype.isRef() && tmpData instanceof String) {
1391                        // reference is a path+name to the object
1392                        log.trace("AttributeCommonIO(): Attribute class is CLASS_REFERENCE");
1393                        log.trace("AttributeCommonIO(): H5Awrite aid={} tid={}", attr_id, tid);
1394                        byte[] refBuf =
1395                            H5.H5Rcreate_object(getFID(), (String)tmpData, HDF5Constants.H5P_DEFAULT);
1396                        if (refBuf != null) {
1397                            H5.H5Awrite(attr_id, tid, refBuf);
1398                            H5.H5Rdestroy(refBuf);
1399                        }
1400                    }
1401                    else if (Array.get(tmpData, 0) instanceof String) {
1402                        int len     = ((String[])tmpData).length;
1403                        byte[] bval = Dataset.stringToByte((String[])tmpData, (int)dt_size);
1404                        if (bval != null && bval.length == dt_size * len) {
1405                            bval[bval.length - 1] = 0;
1406                            tmpData               = bval;
1407                        }
1408                        log.trace("AttributeCommonIO(): String={}: {}", tmpData);
1409                        log.trace("AttributeCommonIO(): H5Awrite aid={} tid={}", attr_id, tid);
1410                        H5.H5Awrite(attr_id, tid, tmpData);
1411                    }
1412                    else {
1413                        log.trace("AttributeCommonIO(): H5Awrite aid={} tid={}", attr_id, tid);
1414                        H5.H5Awrite(attr_id, tid, tmpData);
1415                    }
1416                }
1417            }
1418            catch (Exception ex) {
1419                log.debug("AttributeCommonIO(): write failure: ", ex);
1420                throw new Exception(ex.getMessage());
1421            }
1422            finally {
1423                dsDatatype.close(tid);
1424            }
1425        } // H5File.IO_TYPE.WRITE
1426
1427        return theData;
1428    }
1429
1430    /**
1431     * Read a subset of an attribute for hdf5 object data.
1432     *
1433     * @return the selected attribute data
1434     *
1435     * @throws Exception
1436     *             if the data can not be retrieved
1437     */
1438    @Override
1439    public Object AttributeSelection() throws Exception
1440    {
1441        H5Datatype dsDatatype = (H5Datatype)getDatatype();
1442        int dsSize            = (int)dsDatatype.getDatatypeSize();
1443        if (dsDatatype.isArray())
1444            dsSize = (int)dsDatatype.getDatatypeBase().getDatatypeSize();
1445        Object theData = H5Datatype.allocateArray(dsDatatype, (int)nPoints);
1446        if (dsDatatype.isText() && convertByteToString && (theData instanceof byte[])) {
1447            log.trace("scalarAttributeSelection(): isText: converting byte array to string array");
1448            theData = byteToString((byte[])theData, dsSize);
1449        }
1450        else if (dsDatatype.isFloat() && dsSize == 16) {
1451            log.trace("scalarAttributeSelection(): isFloat: converting byte array to BigDecimal array");
1452            theData = dsDatatype.byteToBigDecimal(0, (int)nPoints, (byte[])theData);
1453        }
1454        else if (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isFloat() && dsSize == 16) {
1455            log.trace(
1456                "scalarAttributeSelection(): isArray and isFloat: converting byte array to BigDecimal array");
1457            long[] arrayDims = dsDatatype.getArrayDims();
1458            int asize        = (int)nPoints;
1459            for (int j = 0; j < arrayDims.length; j++) {
1460                asize *= arrayDims[j];
1461            }
1462            theData = ((H5Datatype)dsDatatype.getDatatypeBase()).byteToBigDecimal(0, asize, (byte[])theData);
1463        }
1464        Object theOrig = originalBuf;
1465        log.trace("scalarAttributeSelection(): originalBuf={} with datatype size={}", originalBuf, dsSize);
1466
1467        // Copy the selection from originalBuf to theData
1468        // Only three dims are involved and selected data is 2 dimensions
1469        //     getHeight() is the row dimension
1470        //     getWidth() is the col dimension
1471        //     getDepth() is the frame dimension
1472        long[] start  = getStartDims();
1473        long curFrame = start[selectedIndex[2]];
1474        int k         = (int)startDims[selectedIndex[2]] * (int)getDepth();
1475        for (int col = 0; col < (int)getWidth(); col++) {
1476            for (int row = 0; row < (int)getHeight(); row++) {
1477                int index = row * (int)getWidth() + col;
1478                log.trace("scalarAttributeSelection(): point[{}] row:col:k={}:{}:{}", curFrame, row, col, k);
1479                int fromIndex =
1480                    ((int)curFrame * (int)getWidth() * (int)getHeight() + col * (int)getHeight() + row);
1481                int toIndex = (col * (int)getHeight() + row);
1482                int objSize = 1;
1483                if (dsDatatype.isArray()) {
1484                    long[] arrayDims = dsDatatype.getArrayDims();
1485                    objSize          = arrayDims.length;
1486                    try {
1487                        System.arraycopy(theOrig, fromIndex, theData, toIndex, objSize);
1488                    }
1489                    catch (Exception err) {
1490                        log.debug("scalarAttributeSelection(): arraycopy failure: ", err);
1491                    }
1492                }
1493                else if (dsDatatype.isStdRef()) {
1494                    objSize   = (int)HDF5Constants.H5R_REF_BUF_SIZE;
1495                    fromIndex = fromIndex * HDF5Constants.H5R_REF_BUF_SIZE;
1496                    toIndex   = toIndex * HDF5Constants.H5R_REF_BUF_SIZE;
1497                    try {
1498                        System.arraycopy(theOrig, fromIndex, theData, toIndex, objSize);
1499                    }
1500                    catch (Exception err) {
1501                        log.debug("scalarAttributeSelection(): arraycopy failure: ", err);
1502                    }
1503                }
1504                else {
1505                    if (theOrig instanceof ArrayList) {
1506                        if (dsDatatype.isRef()) {
1507                            byte[] rElements = (byte[])((ArrayList)theOrig).get(fromIndex);
1508                            try {
1509                                System.arraycopy(rElements, 0, theData, toIndex * dsSize, dsSize);
1510                            }
1511                            catch (Exception err) {
1512                                log.trace("scalarAttributeSelection(): refarraycopy failure: ", err);
1513                            }
1514                        }
1515                        else {
1516                            Object value = Array.get(theOrig, fromIndex);
1517                            log.trace("scalarAttributeSelection(): value={}", value);
1518                            ((ArrayList<Object>)theData).add(toIndex, value);
1519                        }
1520                    }
1521                    else
1522                        theData = theOrig;
1523                }
1524            }
1525        }
1526
1527        log.trace("scalarAttributeSelection(): theData={}", theData);
1528        return theData;
1529    }
1530}