001/***************************************************************************** 002 * Copyright by The HDF Group. * 003 * Copyright by the Board of Trustees of the University of Illinois. * 004 * All rights reserved. * 005 * * 006 * This file is part of the HDF Java Products distribution. * 007 * The full copyright notice, including terms governing use, modification, * 008 * and redistribution, is contained in the COPYING file, which can be found * 009 * at the root of the source code distribution tree, * 010 * or in https://www.hdfgroup.org/licenses. * 011 * If you do not have access to either file, you may request a copy from * 012 * help@hdfgroup.org. * 013 ****************************************************************************/ 014 015package hdf.object.h4; 016 017import java.lang.reflect.Array; 018import java.math.BigInteger; 019import java.util.Arrays; 020import java.util.Collection; 021import java.util.HashMap; 022import java.util.List; 023import java.util.Map; 024 025import hdf.object.Attribute; 026import hdf.object.DataFormat; 027import hdf.object.Dataset; 028import hdf.object.Datatype; 029import hdf.object.FileFormat; 030import hdf.object.Group; 031import hdf.object.HObject; 032import hdf.object.MetaDataContainer; 033import hdf.object.ScalarDS; 034 035/** 036 * An attribute is a (name, value) pair of metadata attached to a primary data object such as a dataset, group 037 * or named datatype. 038 * 039 * Like a dataset, an attribute has a name, datatype and dataspace. 040 * 041 * For more details on attributes, read <a 042 * href="https://support.hdfgroup.org/releases/hdf5/v1_14/v1_14_5/documentation/doxygen/_h5_a__u_g.html#sec_attribute">HDF5 043 * Attributes in HDF5 User Guide</a> 044 * 045 * The following code is an example of an attribute with 1D integer array of two elements. 046 * 047 * <pre> 048 * // Example of creating a new attribute 049 * // The name of the new attribute 050 * String name = "Data range"; 051 * // Creating an unsigned 1-byte integer datatype 052 * Datatype type = new Datatype(Datatype.CLASS_INTEGER, // class 053 * 1, // size in bytes 054 * Datatype.ORDER_LE, // byte order 055 * Datatype.SIGN_NONE); // unsigned 056 * // 1-D array of size two 057 * long[] dims = {2}; 058 * // The value of the attribute 059 * int[] value = {0, 255}; 060 * // Create a new attribute 061 * Attribute dataRange = new Attribute(name, type, dims); 062 * // Set the attribute value 063 * dataRange.setValue(value); 064 * // See FileFormat.writeAttribute() for how to attach an attribute to an object, 065 * @see hdf.object.FileFormat#writeAttribute(HObject, Attribute, boolean) 066 * </pre> 067 * 068 * 069 * For an atomic datatype, the value of an H4ScalarAttribute will be a 1D array of integers, floats and 070 * strings. 071 * 072 * @see hdf.object.Datatype 073 * 074 * @version 2.0 4/2/2018 075 * @author Peter X. Cao, Jordan T. Henderson 076 */ 077public class H4ScalarAttribute extends ScalarDS implements Attribute { 078 079 private static final long serialVersionUID = 2072473407027648309L; 080 081 private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H4ScalarAttribute.class); 082 083 /** The HObject to which this NC2Attribute is attached, Attribute interface */ 084 protected HObject parentObject; 085 086 /** additional information and properties for the attribute, Attribute interface */ 087 private transient Map<String, Object> properties; 088 089 /** 090 * Create an attribute with specified name, data type and dimension sizes. 091 * 092 * For scalar attribute, the dimension size can be either an array of size one 093 * or null, and the rank can be either 1 or zero. Attribute is a general class 094 * and is independent of file format, e.g., the implementation of attribute 095 * applies to both HDF4 and HDF5. 096 * 097 * The following example creates a string attribute with the name "CLASS" and 098 * value "IMAGE". 099 * 100 * <pre> 101 * long[] attrDims = { 1 }; 102 * String attrName = "CLASS"; 103 * String[] classValue = { "IMAGE" }; 104 * Datatype attrType = null; 105 * try { 106 * attrType = new H4Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE, 107 * Datatype.NATIVE); 108 * } 109 * catch (Exception ex) {} 110 * Attribute attr = new Attribute(attrName, attrType, attrDims); 111 * attr.setValue(classValue); 112 * </pre> 113 * 114 * @param parentObj 115 * the HObject to which this Attribute is attached. 116 * @param attrName 117 * the name of the attribute. 118 * @param attrType 119 * the datatype of the attribute. 120 * @param attrDims 121 * the dimension sizes of the attribute, null for scalar attribute 122 * 123 * @see hdf.object.Datatype 124 */ 125 public H4ScalarAttribute(HObject parentObj, String attrName, Datatype attrType, long[] attrDims) 126 { 127 this(parentObj, attrName, attrType, attrDims, null); 128 } 129 130 /** 131 * Create an attribute with specific name and value. 132 * 133 * For scalar attribute, the dimension size can be either an array of size one 134 * or null, and the rank can be either 1 or zero. Attribute is a general class 135 * and is independent of file format, e.g., the implementation of attribute 136 * applies to both HDF4 and HDF5. 137 * 138 * The following example creates a string attribute with the name "CLASS" and 139 * value "IMAGE". 140 * 141 * <pre> 142 * long[] attrDims = { 1 }; 143 * String attrName = "CLASS"; 144 * String[] classValue = { "IMAGE" }; 145 * Datatype attrType = null; 146 * try { 147 * attrType = new H4Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE, 148 * Datatype.NATIVE); 149 * } 150 * catch (Exception ex) {} 151 * Attribute attr = new Attribute(attrName, attrType, attrDims, classValue); 152 * </pre> 153 * 154 * @param parentObj 155 * the HObject to which this Attribute is attached. 156 * @param attrName 157 * the name of the attribute. 158 * @param attrType 159 * the datatype of the attribute. 160 * @param attrDims 161 * the dimension sizes of the attribute, null for scalar attribute 162 * @param attrValue 163 * the value of the attribute, null if no value 164 * 165 * @see hdf.object.Datatype 166 */ 167 @SuppressWarnings({"rawtypes", "unchecked", "deprecation"}) 168 public H4ScalarAttribute(HObject parentObj, String attrName, Datatype attrType, long[] attrDims, 169 Object attrValue) 170 { 171 super((parentObj == null) ? null : parentObj.getFileFormat(), attrName, 172 (parentObj == null) ? null : parentObj.getFullName(), null); 173 174 log.trace("H4ScalarAttribute: start {}", parentObj); 175 this.parentObject = parentObj; 176 177 unsignedConverted = false; 178 179 datatype = attrType; 180 181 if (attrValue != null) { 182 data = attrValue; 183 originalBuf = attrValue; 184 isDataLoaded = true; 185 } 186 properties = new HashMap(); 187 188 if (attrDims == null) { 189 rank = 1; 190 dims = new long[] {1}; 191 } 192 else { 193 dims = attrDims; 194 rank = dims.length; 195 } 196 197 selectedDims = new long[rank]; 198 startDims = new long[rank]; 199 selectedStride = new long[rank]; 200 201 log.trace("attrName={}, attrType={}, attrValue={}, rank={}, isUnsigned={}", attrName, 202 getDatatype().getDescription(), data, rank, getDatatype().isUnsigned()); 203 204 resetSelection(); 205 } 206 207 /* 208 * (non-Javadoc) 209 * 210 * @see hdf.object.HObject#open() 211 */ 212 @Override 213 public long open() 214 { 215 if (parentObject == null) { 216 log.debug("open(): attribute's parent object is null"); 217 return -1; 218 } 219 220 long aid = -1; 221 long pObjID = -1; 222 223 try { 224 pObjID = parentObject.open(); 225 if (pObjID >= 0) { 226 if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF4))) { 227 log.trace("open(): FILE_TYPE_HDF4"); 228 /* 229 * TODO: Get type of HDF4 object this is attached to and retrieve attribute info. 230 */ 231 } 232 } 233 234 log.trace("open(): aid={}", aid); 235 } 236 catch (Exception ex) { 237 log.debug("open(): Failed to open attribute {}: ", getName(), ex); 238 aid = -1; 239 } 240 finally { 241 parentObject.close(pObjID); 242 } 243 244 return aid; 245 } 246 247 /* 248 * (non-Javadoc) 249 * 250 * @see hdf.object.HObject#close(int) 251 */ 252 @Override 253 public void close(long aid) 254 { 255 if (aid >= 0) { 256 if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF4))) { 257 log.trace("close(): FILE_TYPE_HDF4"); 258 /* 259 * TODO: Get type of HDF4 object this is attached to and close attribute. 260 */ 261 } 262 } 263 } 264 265 @Override 266 public void init() 267 { 268 if (inited) { 269 resetSelection(); 270 log.trace("init(): Attribute already inited"); 271 return; 272 } 273 274 if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF4))) { 275 log.trace("init(): FILE_TYPE_HDF4"); 276 /* 277 * TODO: If HDF4 attribute object needs to init dependent objects. 278 */ 279 inited = true; 280 } 281 282 resetSelection(); 283 } 284 285 /** 286 * Reads the data from file. 287 * 288 * read() reads the data from file to a memory buffer and returns the memory 289 * buffer. The dataset object does not hold the memory buffer. To store the 290 * memory buffer in the dataset object, one must call getData(). 291 * 292 * By default, the whole dataset is read into memory. Users can also select 293 * a subset to read. Subsetting is done in an implicit way. 294 * 295 * @return the data read from file. 296 * 297 * @see #getData() 298 * 299 * @throws Exception 300 * if object can not be read 301 * @throws OutOfMemoryError 302 * if memory is exhausted 303 */ 304 @Override 305 public Object read() throws Exception, OutOfMemoryError 306 { 307 if (!inited) 308 init(); 309 310 return data; 311 } 312 313 /* Implement abstract Dataset */ 314 315 /** 316 * Writes a memory buffer to the object in the file. 317 * 318 * @param buf 319 * The buffer that contains the data values. 320 * 321 * @throws Exception 322 * if data can not be written 323 */ 324 @Override 325 public void write(Object buf) throws Exception 326 { 327 log.trace("function of dataset: write(Object) start"); 328 if (!buf.equals(data)) 329 setData(buf); 330 331 init(); 332 333 if (parentObject == null) { 334 log.debug("write(Object): parent object is null; nowhere to write attribute to"); 335 return; 336 } 337 338 ((MetaDataContainer)getParentObject()).writeMetadata(this); 339 } 340 341 /* 342 * (non-Javadoc) 343 * @see hdf.object.Dataset#copy(hdf.object.Group, java.lang.String, long[], java.lang.Object) 344 */ 345 @Override 346 public Dataset copy(Group pgroup, String dstName, long[] dims, Object buff) throws Exception 347 { 348 // not supported 349 throw new UnsupportedOperationException("copy operation unsupported for H4."); 350 } 351 352 /* 353 * (non-Javadoc) 354 * @see hdf.object.Dataset#readBytes() 355 */ 356 @Override 357 public byte[] readBytes() throws Exception 358 { 359 // not supported 360 throw new UnsupportedOperationException("readBytes operation unsupported for H4."); 361 } 362 363 /* Implement interface Attribute */ 364 365 /** 366 * Returns the HObject to which this Attribute is currently "attached". 367 * 368 * @return the HObject to which this Attribute is currently "attached". 369 */ 370 public HObject getParentObject() { return parentObject; } 371 372 /** 373 * Sets the HObject to which this Attribute is "attached". 374 * 375 * @param pObj 376 * the new HObject to which this Attribute is "attached". 377 */ 378 public void setParentObject(HObject pObj) { parentObject = pObj; } 379 380 /** 381 * set a property for the attribute. 382 * 383 * @param key the attribute Map key 384 * @param value the attribute Map value 385 */ 386 public void setProperty(String key, Object value) { properties.put(key, value); } 387 388 /** 389 * get a property for a given key. 390 * 391 * @param key the attribute Map key 392 * 393 * @return the property 394 */ 395 public Object getProperty(String key) { return properties.get(key); } 396 397 /** 398 * get all property keys. 399 * 400 * @return the Collection of property keys 401 */ 402 public Collection<String> getPropertyKeys() { return properties.keySet(); } 403 404 /** 405 * Returns the name of the object. For example, "Raster Image #2". 406 * 407 * @return The name of the object. 408 */ 409 public final String getAttributeName() { return getName(); } 410 411 /** 412 * Retrieves the attribute data from the file. 413 * 414 * @return the attribute data. 415 * 416 * @throws Exception 417 * if the data can not be retrieved 418 */ 419 public final Object getAttributeData() throws Exception, OutOfMemoryError { return getData(); } 420 421 /** 422 * Returns the datatype of the attribute. 423 * 424 * @return the datatype of the attribute. 425 */ 426 public final Datatype getAttributeDatatype() { return getDatatype(); } 427 428 /** 429 * Returns the space type for the attribute. It returns a 430 * negative number if it failed to retrieve the type information from 431 * the file. 432 * 433 * @return the space type for the attribute. 434 */ 435 public final int getAttributeSpaceType() { return getSpaceType(); } 436 437 /** 438 * Returns the rank (number of dimensions) of the attribute. It returns a 439 * negative number if it failed to retrieve the dimension information from 440 * the file. 441 * 442 * @return the number of dimensions of the attribute. 443 */ 444 public final int getAttributeRank() { return getRank(); } 445 446 /** 447 * Returns the selected size of the rows and columns of the attribute. It returns a 448 * negative number if it failed to retrieve the size information from 449 * the file. 450 * 451 * @return the selected size of the rows and colums of the attribute. 452 */ 453 public final int getAttributePlane() { return (int)getWidth() * (int)getHeight(); } 454 455 /** 456 * Returns the array that contains the dimension sizes of the data value of 457 * the attribute. It returns null if it failed to retrieve the dimension 458 * information from the file. 459 * 460 * @return the dimension sizes of the attribute. 461 */ 462 public final long[] getAttributeDims() { return getDims(); } 463 464 /** 465 * @return true if the dataspace is a NULL; otherwise, returns false. 466 */ 467 @Override 468 public boolean isAttributeNULL() 469 { 470 return isNULL(); 471 } 472 473 /** 474 * @return true if the data is a single scalar point; otherwise, returns false. 475 */ 476 public boolean isAttributeScalar() { return isScalar(); } 477 478 /** 479 * Not for public use in the future. 480 * 481 * setData() is not safe to use because it changes memory buffer 482 * of the dataset object. Dataset operations such as write/read 483 * will fail if the buffer type or size is changed. 484 * 485 * @param d the object data -must be an array of Objects 486 */ 487 public void setAttributeData(Object d) { setData(d); } 488 489 /** 490 * Writes the memory buffer of this dataset to file. 491 * 492 * @throws Exception if buffer can not be written 493 */ 494 public void writeAttribute() throws Exception { write(); } 495 496 /** 497 * Writes the given data buffer into this attribute in a file. 498 * 499 * The data buffer is a vector that contains the data values of compound fields. The data is written 500 * into file as one data blob. 501 * 502 * @param buf 503 * The vector that contains the data values of compound fields. 504 * 505 * @throws Exception 506 * If there is an error at the library level. 507 */ 508 public void writeAttribute(Object buf) throws Exception { write(buf); } 509 510 /** 511 * Returns a string representation of the data value. For 512 * example, "0, 255". 513 * 514 * For a compound datatype, it will be a 1D array of strings with field 515 * members separated by the delimiter. For example, 516 * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int, 517 * float} of three data points. 518 * 519 * @param delimiter 520 * The delimiter used to separate individual data points. It 521 * can be a comma, semicolon, tab or space. For example, 522 * toString(",") will separate data by commas. 523 * 524 * @return the string representation of the data values. 525 */ 526 public String toAttributeString(String delimiter) { return toString(delimiter, -1); } 527 528 /** 529 * Returns a string representation of the data value. For 530 * example, "0, 255". 531 * 532 * For a compound datatype, it will be a 1D array of strings with field 533 * members separated by the delimiter. For example, 534 * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int, 535 * float} of three data points. 536 * 537 * @param delimiter 538 * The delimiter used to separate individual data points. It 539 * can be a comma, semicolon, tab or space. For example, 540 * toString(",") will separate data by commas. 541 * @param maxItems 542 * The maximum number of Array values to return 543 * 544 * @return the string representation of the data values. 545 */ 546 public String toAttributeString(String delimiter, int maxItems) { return toString(delimiter, maxItems); } 547}