001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the COPYING file, which can be found  *
009 * at the root of the source code distribution tree,                         *
010 * or in https://www.hdfgroup.org/licenses.                                  *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.fits;
016
017import java.lang.reflect.Array;
018import java.math.BigInteger;
019import java.util.Arrays;
020import java.util.Collection;
021import java.util.HashMap;
022import java.util.List;
023import java.util.Map;
024
025import hdf.object.Attribute;
026import hdf.object.DataFormat;
027import hdf.object.Dataset;
028import hdf.object.Datatype;
029import hdf.object.FileFormat;
030import hdf.object.Group;
031import hdf.object.HObject;
032import hdf.object.ScalarDS;
033
034import org.slf4j.Logger;
035import org.slf4j.LoggerFactory;
036
037/**
038 * An attribute is a (name, value) pair of metadata attached to a primary data object such as a dataset, group
039 * or named datatype.
040 *
041 * Like a dataset, an attribute has a name, datatype and dataspace.
042 *
043 * For more details on attributes, <a
044 * href="https://support.hdfgroup.org/releases/hdf5/v1_14/v1_14_5/documentation/doxygen/_h5_a__u_g.html#sec_attribute">HDF5
045 * Attributes in HDF5 User Guide</a>
046 *
047 * The following code is an example of an attribute with 1D integer array of two elements.
048 *
049 * <pre>
050 * // Example of creating a new attribute
051 * // The name of the new attribute
052 * String name = "Data range";
053 * // Creating an unsigned 1-byte integer datatype
054 * Datatype type = new Datatype(Datatype.CLASS_INTEGER, // class
055 *                              1,                      // size in bytes
056 *                              Datatype.ORDER_LE,      // byte order
057 *                              Datatype.SIGN_NONE);    // unsigned
058 * // 1-D array of size two
059 * long[] dims = {2};
060 * // The value of the attribute
061 * int[] value = {0, 255};
062 * // Create a new attribute
063 * FitsAttribute dataRange = new FitsAttribute(name, type, dims);
064 * // Set the attribute value
065 * dataRange.setValue(value);
066 * // See FileFormat.writeAttribute() for how to attach an attribute to an object,
067 * &#64;see hdf.object.FileFormat#writeAttribute(HObject, Attribute, boolean)
068 * </pre>
069 *
070 *
071 * For an atomic datatype, the value of an FitsAttribute will be a 1D array of integers, floats and strings.
072 * For a compound datatype, it will be a 1D array of strings with field members separated by a comma. For
073 * example, "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int, float} of three data
074 * points.
075 *
076 * @see hdf.object.Datatype
077 *
078 * @version 2.0 4/2/2018
079 * @author Peter X. Cao, Jordan T. Henderson
080 */
081public class FitsAttribute extends ScalarDS implements Attribute {
082
083    private static final long serialVersionUID = 2072473407027648309L;
084
085    private static final Logger log = LoggerFactory.getLogger(FitsAttribute.class);
086
087    /** The HObject to which this NC2Attribute is attached, Attribute interface */
088    protected HObject parentObject;
089
090    /** additional information and properties for the attribute, Attribute interface */
091    private transient Map<String, Object> properties;
092
093    /**
094     * Create an attribute with specified name, data type and dimension sizes.
095     *
096     * For scalar attribute, the dimension size can be either an array of size one
097     * or null, and the rank can be either 1 or zero. Attribute is a general class
098     * and is independent of file format, e.g., the implementation of attribute
099     * applies to both HDF4 and HDF5.
100     *
101     * The following example creates a string attribute with the name "CLASS" and
102     * value "IMAGE".
103     *
104     * <pre>
105     * long[] attrDims = { 1 };
106     * String attrName = &quot;CLASS&quot;;
107     * String[] classValue = { &quot;IMAGE&quot; };
108     * Datatype attrType = null;
109     * try {
110     *     attrType = new FitsDatatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE,
111     * Datatype.NATIVE);
112     * }
113     * catch (Exception ex) {}
114     * FitsAttribute attr = new FitsAttribute(attrName, attrType, attrDims);
115     * attr.setValue(classValue);
116     * </pre>
117     *
118     * @param parentObj
119     *            the HObject to which this Attribute is attached.
120     * @param attrName
121     *            the name of the attribute.
122     * @param attrType
123     *            the datatype of the attribute.
124     * @param attrDims
125     *            the dimension sizes of the attribute, null for scalar attribute
126     *
127     * @see hdf.object.Datatype
128     */
129    public FitsAttribute(HObject parentObj, String attrName, Datatype attrType, long[] attrDims)
130    {
131        this(parentObj, attrName, attrType, attrDims, null);
132    }
133
134    /**
135     * Create an attribute with specific name and value.
136     *
137     * For scalar attribute, the dimension size can be either an array of size one
138     * or null, and the rank can be either 1 or zero. Attribute is a general class
139     * and is independent of file format, e.g., the implementation of attribute
140     * applies to both HDF4 and HDF5.
141     *
142     * The following example creates a string attribute with the name "CLASS" and
143     * value "IMAGE".
144     *
145     * <pre>
146     * long[] attrDims = { 1 };
147     * String attrName = &quot;CLASS&quot;;
148     * String[] classValue = { &quot;IMAGE&quot; };
149     * Datatype attrType = null;
150     * try {
151     *     attrType = new FitsDatatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE,
152     * Datatype.NATIVE);
153     * }
154     * catch (Exception ex) {}
155     * FitsAttribute attr = new FitsAttribute(attrName, attrType, attrDims, classValue);
156     * </pre>
157     *
158     * @param parentObj
159     *            the HObject to which this FitsAttribute is attached.
160     * @param attrName
161     *            the name of the attribute.
162     * @param attrType
163     *            the datatype of the attribute.
164     * @param attrDims
165     *            the dimension sizes of the attribute, null for scalar attribute
166     * @param attrValue
167     *            the value of the attribute, null if no value
168     *
169     * @see hdf.object.Datatype
170     */
171    @SuppressWarnings({"rawtypes", "unchecked", "deprecation"})
172    public FitsAttribute(HObject parentObj, String attrName, Datatype attrType, long[] attrDims,
173                         Object attrValue)
174    {
175        super((parentObj == null) ? null : parentObj.getFileFormat(), attrName,
176              (parentObj == null) ? null : parentObj.getFullName(), null);
177
178        log.trace("FitsAttribute: start {}", parentObj);
179        this.parentObject = parentObj;
180
181        unsignedConverted = false;
182
183        datatype = attrType;
184
185        if (attrValue != null) {
186            data         = attrValue;
187            originalBuf  = attrValue;
188            isDataLoaded = true;
189        }
190        properties = new HashMap();
191
192        if (attrDims == null) {
193            rank = 1;
194            dims = new long[] {1};
195        }
196        else {
197            dims = attrDims;
198            rank = dims.length;
199        }
200
201        selectedDims   = new long[rank];
202        startDims      = new long[rank];
203        selectedStride = new long[rank];
204
205        log.trace("attrName={}, attrType={}, attrValue={}, rank={}, isUnsigned={}", attrName,
206                  getDatatype().getDescription(), data, rank, getDatatype().isUnsigned());
207
208        resetSelection();
209    }
210
211    /*
212     * (non-Javadoc)
213     *
214     * @see hdf.object.HObject#open()
215     */
216    @Override
217    public long open()
218    {
219        if (parentObject == null) {
220            log.debug("open(): attribute's parent object is null");
221            return -1;
222        }
223
224        return -1;
225    }
226
227    /*
228     * (non-Javadoc)
229     *
230     * @see hdf.object.HObject#close(int)
231     */
232    @Override
233    public void close(long aid)
234    {
235    }
236
237    @Override
238    public void init()
239    {
240        if (inited) {
241            resetSelection();
242            log.trace("init(): FitsAttribute already inited");
243            return;
244        }
245    }
246
247    /**
248     * Reads the data from file.
249     *
250     * read() reads the data from file to a memory buffer and returns the memory
251     * buffer. The dataset object does not hold the memory buffer. To store the
252     * memory buffer in the dataset object, one must call getData().
253     *
254     * By default, the whole dataset is read into memory. Users can also select
255     * a subset to read. Subsetting is done in an implicit way.
256     *
257     * @return the data read from file.
258     *
259     * @see #getData()
260     *
261     * @throws Exception
262     *             if object can not be read
263     * @throws OutOfMemoryError
264     *             if memory is exhausted
265     */
266    @Override
267    public Object read() throws Exception, OutOfMemoryError
268    {
269        if (!inited)
270            init();
271
272        return data;
273    }
274
275    /* Implement abstract Dataset */
276
277    /*
278     * (non-Javadoc)
279     * @see hdf.object.Dataset#copy(hdf.object.Group, java.lang.String, long[], java.lang.Object)
280     */
281    @Override
282    public Dataset copy(Group pgroup, String dstName, long[] dims, Object buff) throws Exception
283    {
284        // not supported
285        throw new UnsupportedOperationException("copy operation unsupported for FITS.");
286    }
287
288    /*
289     * (non-Javadoc)
290     * @see hdf.object.Dataset#readBytes()
291     */
292    @Override
293    public byte[] readBytes() throws Exception
294    {
295        // not supported
296        throw new UnsupportedOperationException("readBytes operation unsupported for FITS.");
297    }
298
299    /**
300     * Writes a memory buffer to the object in the file.
301     *
302     * @param buf
303     *            the data to write
304     *
305     * @throws Exception
306     *             if data can not be written
307     */
308    @Override
309    public void write(Object buf) throws Exception
310    {
311        // not supported
312        throw new UnsupportedOperationException("write operation unsupported for FITS.");
313    }
314
315    /**
316     * Returns the HObject to which this Attribute is currently "attached".
317     *
318     * @return the HObject to which this Attribute is currently "attached".
319     */
320    public HObject getParentObject() { return parentObject; }
321
322    /**
323     * Sets the HObject to which this Attribute is "attached".
324     *
325     * @param pObj
326     *            the new HObject to which this Attribute is "attached".
327     */
328    public void setParentObject(HObject pObj) { parentObject = pObj; }
329
330    /**
331     * set a property for the attribute.
332     *
333     * @param key the attribute Map key
334     * @param value the attribute Map value
335     */
336    public void setProperty(String key, Object value) { properties.put(key, value); }
337
338    /**
339     * get a property for a given key.
340     *
341     * @param key the attribute Map key
342     *
343     * @return the property
344     */
345    public Object getProperty(String key) { return properties.get(key); }
346
347    /**
348     * get all property keys.
349     *
350     * @return the Collection of property keys
351     */
352    public Collection<String> getPropertyKeys() { return properties.keySet(); }
353
354    /**
355     * Returns the name of the object. For example, "Raster Image #2".
356     *
357     * @return The name of the object.
358     */
359    public final String getAttributeName() { return getName(); }
360
361    /**
362     * Retrieves the attribute data from the file.
363     *
364     * @return the attribute data.
365     *
366     * @throws Exception
367     *             if the data can not be retrieved
368     */
369    public final Object getAttributeData() throws Exception, OutOfMemoryError { return getData(); }
370
371    /**
372     * Returns the datatype of the attribute.
373     *
374     * @return the datatype of the attribute.
375     */
376    public final Datatype getAttributeDatatype() { return getDatatype(); }
377
378    /**
379     * Returns the space type for the attribute. It returns a
380     * negative number if it failed to retrieve the type information from
381     * the file.
382     *
383     * @return the space type for the attribute.
384     */
385    public final int getAttributeSpaceType() { return getSpaceType(); }
386
387    /**
388     * Returns the rank (number of dimensions) of the attribute. It returns a
389     * negative number if it failed to retrieve the dimension information from
390     * the file.
391     *
392     * @return the number of dimensions of the attribute.
393     */
394    public final int getAttributeRank() { return getRank(); }
395
396    /**
397     * Returns the selected size of the rows and columns of the attribute. It returns a
398     * negative number if it failed to retrieve the size information from
399     * the file.
400     *
401     * @return the selected size of the rows and colums of the attribute.
402     */
403    public final int getAttributePlane() { return (int)getWidth() * (int)getHeight(); }
404
405    /**
406     * Returns the array that contains the dimension sizes of the data value of
407     * the attribute. It returns null if it failed to retrieve the dimension
408     * information from the file.
409     *
410     * @return the dimension sizes of the attribute.
411     */
412    public final long[] getAttributeDims() { return getDims(); }
413
414    /**
415     * @return true if the dataspace is a NULL; otherwise, returns false.
416     */
417    @Override
418    public boolean isAttributeNULL()
419    {
420        return isNULL();
421    }
422
423    /**
424     * @return true if the data is a single scalar point; otherwise, returns false.
425     */
426    public boolean isAttributeScalar() { return isScalar(); }
427
428    /**
429     * Not for public use in the future.
430     *
431     * setData() is not safe to use because it changes memory buffer
432     * of the dataset object. Dataset operations such as write/read
433     * will fail if the buffer type or size is changed.
434     *
435     * @param d  the object data -must be an array of Objects
436     */
437    public void setAttributeData(Object d) { setData(d); }
438
439    /**
440     * Writes the memory buffer of this dataset to file.
441     *
442     * @throws Exception if buffer can not be written
443     */
444    public void writeAttribute() throws Exception { write(); }
445
446    /**
447     * Writes the given data buffer into this attribute in a file.
448     *
449     * The data buffer is a vector that contains the data values of compound fields. The data is written
450     * into file as one data blob.
451     *
452     * @param buf
453     *            The vector that contains the data values of compound fields.
454     *
455     * @throws Exception
456     *             If there is an error at the library level.
457     */
458    public void writeAttribute(Object buf) throws Exception { write(buf); }
459
460    /**
461     * Returns a string representation of the data value. For
462     * example, "0, 255".
463     *
464     * For a compound datatype, it will be a 1D array of strings with field
465     * members separated by the delimiter. For example,
466     * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int,
467     * float} of three data points.
468     *
469     * @param delimiter
470     *            The delimiter used to separate individual data points. It
471     *            can be a comma, semicolon, tab or space. For example,
472     *            toString(",") will separate data by commas.
473     *
474     * @return the string representation of the data values.
475     */
476    public String toAttributeString(String delimiter) { return toString(delimiter, -1); }
477
478    /**
479     * Returns a string representation of the data value. For
480     * example, "0, 255".
481     *
482     * For a compound datatype, it will be a 1D array of strings with field
483     * members separated by the delimiter. For example,
484     * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int,
485     * float} of three data points.
486     *
487     * @param delimiter
488     *            The delimiter used to separate individual data points. It
489     *            can be a comma, semicolon, tab or space. For example,
490     *            toString(",") will separate data by commas.
491     * @param maxItems
492     *            The maximum number of Array values to return
493     *
494     * @return the string representation of the data values.
495     */
496    public String toAttributeString(String delimiter, int maxItems) { return toString(delimiter, maxItems); }
497}