001/***************************************************************************** 002 * Copyright by The HDF Group. * 003 * Copyright by the Board of Trustees of the University of Illinois. * 004 * All rights reserved. * 005 * * 006 * This file is part of the HDF Java Products distribution. * 007 * The full copyright notice, including terms governing use, modification, * 008 * and redistribution, is contained in the COPYING file, which can be found * 009 * at the root of the source code distribution tree, * 010 * or in https://www.hdfgroup.org/licenses. * 011 * If you do not have access to either file, you may request a copy from * 012 * help@hdfgroup.org. * 013 ****************************************************************************/ 014 015package hdf.object.h4; 016 017import java.lang.reflect.Array; 018import java.math.BigInteger; 019import java.util.Arrays; 020import java.util.Collection; 021import java.util.HashMap; 022import java.util.List; 023import java.util.Map; 024 025import hdf.object.Attribute; 026import hdf.object.CompoundDS; 027import hdf.object.CompoundDataFormat; 028import hdf.object.DataFormat; 029import hdf.object.Dataset; 030import hdf.object.Datatype; 031import hdf.object.FileFormat; 032import hdf.object.Group; 033import hdf.object.HObject; 034import hdf.object.MetaDataContainer; 035 036import org.slf4j.Logger; 037import org.slf4j.LoggerFactory; 038 039/** 040 * An attribute is a (name, value) pair of metadata attached to a primary data object such as a dataset, group 041 * or named datatype. 042 * 043 * Like a dataset, an attribute has a name, datatype and dataspace. 044 * 045 * For more details on attributes, <a 046 * href="https://support.hdfgroup.org/releases/hdf5/v1_14/v1_14_5/documentation/doxygen/_h5_a__u_g.html#sec_attribute">HDF5 047 * Attributes in HDF5 User Guide</a> 048 * 049 * The following code is an example of an attribute with 1D integer array of two elements. 050 * 051 * <pre> 052 * // Example of creating a new attribute 053 * // The name of the new attribute 054 * String name = "Data range"; 055 * // Creating an unsigned 1-byte integer datatype 056 * Datatype type = new Datatype(Datatype.CLASS_INTEGER, // class 057 * 1, // size in bytes 058 * Datatype.ORDER_LE, // byte order 059 * Datatype.SIGN_NONE); // unsigned 060 * // 1-D array of size two 061 * long[] dims = {2}; 062 * // The value of the attribute 063 * int[] value = {0, 255}; 064 * // Create a new attribute 065 * Attribute dataRange = new Attribute(name, type, dims); 066 * // Set the attribute value 067 * dataRange.setValue(value); 068 * // See FileFormat.writeAttribute() for how to attach an attribute to an object, 069 * @see hdf.object.FileFormat#writeAttribute(HObject, Attribute, boolean) 070 * </pre> 071 * 072 * For a compound datatype, the value of an H4CompoundAttribute will be a 1D array of strings with field 073 * members separated by a comma. For example, "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of 074 * {int, float} of three data points. 075 * 076 * @see hdf.object.Datatype 077 * 078 * @version 2.0 4/2/2018 079 * @author Peter X. Cao, Jordan T. Henderson 080 */ 081public class H4CompoundAttribute extends CompoundDS implements Attribute { 082 083 private static final long serialVersionUID = 2072473407027648309L; 084 085 private static final Logger log = LoggerFactory.getLogger(H4CompoundAttribute.class); 086 087 /** The HObject to which this NC2Attribute is attached, Attribute interface */ 088 protected HObject parentObject; 089 090 /** additional information and properties for the attribute, Attribute interface */ 091 private transient Map<String, Object> properties; 092 093 /** 094 * Create an attribute with specified name, data type and dimension sizes. 095 * 096 * For scalar attribute, the dimension size can be either an array of size one 097 * or null, and the rank can be either 1 or zero. Attribute is a general class 098 * and is independent of file format, e.g., the implementation of attribute 099 * applies to both HDF4 and HDF5. 100 * 101 * The following example creates a string attribute with the name "CLASS" and 102 * value "IMAGE". 103 * 104 * <pre> 105 * long[] attrDims = { 1 }; 106 * String attrName = "CLASS"; 107 * String[] classValue = { "IMAGE" }; 108 * Datatype attrType = null; 109 * try { 110 * attrType = new H4Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE, 111 * Datatype.NATIVE); 112 * } 113 * catch (Exception ex) {} 114 * Attribute attr = new Attribute(attrName, attrType, attrDims); 115 * attr.setValue(classValue); 116 * </pre> 117 * 118 * @param parentObj 119 * the HObject to which this Attribute is attached. 120 * @param attrName 121 * the name of the attribute. 122 * @param attrType 123 * the datatype of the attribute. 124 * @param attrDims 125 * the dimension sizes of the attribute, null for scalar attribute 126 * 127 * @see hdf.object.Datatype 128 */ 129 public H4CompoundAttribute(HObject parentObj, String attrName, Datatype attrType, long[] attrDims) 130 { 131 this(parentObj, attrName, attrType, attrDims, null); 132 } 133 134 /** 135 * Create an attribute with specific name and value. 136 * 137 * For scalar attribute, the dimension size can be either an array of size one 138 * or null, and the rank can be either 1 or zero. Attribute is a general class 139 * and is independent of file format, e.g., the implementation of attribute 140 * applies to both HDF4 and HDF5. 141 * 142 * The following example creates a string attribute with the name "CLASS" and 143 * value "IMAGE". 144 * 145 * <pre> 146 * long[] attrDims = { 1 }; 147 * String attrName = "CLASS"; 148 * String[] classValue = { "IMAGE" }; 149 * Datatype attrType = null; 150 * try { 151 * attrType = new H4Datatype(Datatype.CLASS_STRING, classValue[0].length() + 1, Datatype.NATIVE, 152 * Datatype.NATIVE); 153 * } 154 * catch (Exception ex) {} 155 * Attribute attr = new Attribute(attrName, attrType, attrDims, classValue); 156 * </pre> 157 * 158 * @param parentObj 159 * the HObject to which this Attribute is attached. 160 * @param attrName 161 * the name of the attribute. 162 * @param attrType 163 * the datatype of the attribute. 164 * @param attrDims 165 * the dimension sizes of the attribute, null for scalar attribute 166 * @param attrValue 167 * the value of the attribute, null if no value 168 * 169 * @see hdf.object.Datatype 170 */ 171 @SuppressWarnings({"rawtypes", "unchecked", "deprecation"}) 172 public H4CompoundAttribute(HObject parentObj, String attrName, Datatype attrType, long[] attrDims, 173 Object attrValue) 174 { 175 super((parentObj == null) ? null : parentObj.getFileFormat(), attrName, 176 (parentObj == null) ? null : parentObj.getFullName(), null); 177 178 log.trace("H4CompoundAttribute: start {}", parentObj); 179 180 this.parentObject = parentObj; 181 182 datatype = attrType; 183 184 if (attrValue != null) { 185 data = attrValue; 186 originalBuf = attrValue; 187 isDataLoaded = true; 188 } 189 properties = new HashMap(); 190 191 if (attrDims == null) { 192 rank = 1; 193 dims = new long[] {1}; 194 } 195 else { 196 dims = attrDims; 197 rank = dims.length; 198 } 199 200 selectedDims = new long[rank]; 201 startDims = new long[rank]; 202 selectedStride = new long[rank]; 203 204 log.trace("attrName={}, attrType={}, attrValue={}, rank={}, isUnsigned={}", attrName, 205 getDatatype().getDescription(), data, rank, getDatatype().isUnsigned()); 206 207 resetSelection(); 208 } 209 210 /* 211 * (non-Javadoc) 212 * 213 * @see hdf.object.HObject#open() 214 */ 215 @Override 216 public long open() 217 { 218 if (parentObject == null) { 219 log.debug("open(): attribute's parent object is null"); 220 return -1; 221 } 222 223 long aid = -1; 224 long pObjID = -1; 225 226 try { 227 pObjID = parentObject.open(); 228 if (pObjID >= 0) { 229 if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF4))) { 230 log.trace("open(): FILE_TYPE_HDF4"); 231 /* 232 * TODO: Get type of HDF4 object this is attached to and retrieve attribute info. 233 */ 234 } 235 } 236 237 log.trace("open(): aid={}", aid); 238 } 239 catch (Exception ex) { 240 log.debug("open(): Failed to open attribute {}: ", getName(), ex); 241 aid = -1; 242 } 243 finally { 244 parentObject.close(pObjID); 245 } 246 247 return aid; 248 } 249 250 /* 251 * (non-Javadoc) 252 * 253 * @see hdf.object.HObject#close(int) 254 */ 255 @Override 256 public void close(long aid) 257 { 258 if (aid >= 0) { 259 if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF4))) { 260 log.trace("close(): FILE_TYPE_HDF4"); 261 /* 262 * TODO: Get type of HDF4 object this is attached to and close attribute. 263 */ 264 } 265 } 266 } 267 268 @Override 269 public void init() 270 { 271 if (inited) { 272 resetSelection(); 273 log.trace("init(): Attribute already inited"); 274 return; 275 } 276 277 if (this.getFileFormat().isThisType(FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF4))) { 278 log.trace("init(): FILE_TYPE_HDF4"); 279 /* 280 * TODO: If HDF4 attribute object needs to init dependent objects. 281 */ 282 inited = true; 283 } 284 285 resetSelection(); 286 } 287 288 /** 289 * Reads the data from file. 290 * 291 * read() reads the data from file to a memory buffer and returns the memory 292 * buffer. The dataset object does not hold the memory buffer. To store the 293 * memory buffer in the dataset object, one must call getData(). 294 * 295 * By default, the whole dataset is read into memory. Users can also select 296 * a subset to read. Subsetting is done in an implicit way. 297 * 298 * @return the data read from file. 299 * 300 * @see #getData() 301 * 302 * @throws Exception 303 * if object can not be read 304 * @throws OutOfMemoryError 305 * if memory is exhausted 306 */ 307 @Override 308 public Object read() throws Exception, OutOfMemoryError 309 { 310 if (!inited) 311 init(); 312 313 /* 314 * TODO: For now, convert a compound Attribute's data (String[]) into a List for 315 * convenient processing 316 */ 317 if (getDatatype().isCompound() && !(data instanceof List)) { 318 List<String> valueList = Arrays.asList((String[])data); 319 320 data = valueList; 321 } 322 323 return data; 324 } 325 326 /* Implement abstract Dataset */ 327 328 /** 329 * Writes a memory buffer to the object in the file. 330 * 331 * @param buf 332 * The buffer that contains the data values. 333 * 334 * @throws Exception 335 * if data can not be written 336 */ 337 @Override 338 public void write(Object buf) throws Exception 339 { 340 log.trace("function of dataset: write(Object) start"); 341 if (!buf.equals(data)) 342 setData(buf); 343 344 init(); 345 346 if (parentObject == null) { 347 log.debug("write(Object): parent object is null; nowhere to write attribute to"); 348 return; 349 } 350 351 ((MetaDataContainer)getParentObject()).writeMetadata(this); 352 } 353 354 /* 355 * (non-Javadoc) 356 * @see hdf.object.Dataset#copy(hdf.object.Group, java.lang.String, long[], java.lang.Object) 357 */ 358 @Override 359 public Dataset copy(Group pgroup, String dstName, long[] dims, Object buff) throws Exception 360 { 361 // not supported 362 throw new UnsupportedOperationException("copy operation unsupported for H4."); 363 } 364 365 /* 366 * (non-Javadoc) 367 * @see hdf.object.Dataset#readBytes() 368 */ 369 @Override 370 public byte[] readBytes() throws Exception 371 { 372 // not supported 373 throw new UnsupportedOperationException("readBytes operation unsupported for H4."); 374 } 375 376 /** 377 * Given an array of bytes representing a compound Datatype and a start index 378 * and length, converts len number of bytes into the correct Object type and 379 * returns it. 380 * 381 * @param data 382 * The byte array representing the data of the compound Datatype 383 * @param data_type 384 * The type of data to convert the bytes to 385 * @param start 386 * The start index of the bytes to get 387 * @param len 388 * The number of bytes to convert 389 * @return The converted type of the bytes 390 */ 391 protected Object convertCompoundByteMember(byte[] data, long data_type, long start, long len) 392 { 393 return null; 394 } 395 396 /** 397 * Converts the data values of this data object to appropriate Java integers if 398 * they are unsigned integers. 399 * 400 * @see hdf.object.Dataset#convertToUnsignedC(Object) 401 * @see hdf.object.Dataset#convertFromUnsignedC(Object, Object) 402 * 403 * @return the converted data buffer. 404 */ 405 @Override 406 public Object convertFromUnsignedC() 407 { 408 throw new UnsupportedOperationException("H5CompoundDS:convertFromUnsignedC Unsupported operation."); 409 } 410 411 /** 412 * Converts Java integer data values of this data object back to unsigned C-type 413 * integer data if they are unsigned integers. 414 * 415 * @see hdf.object.Dataset#convertToUnsignedC(Object) 416 * @see hdf.object.Dataset#convertToUnsignedC(Object, Object) 417 * 418 * @return the converted data buffer. 419 */ 420 @Override 421 public Object convertToUnsignedC() 422 { 423 throw new UnsupportedOperationException("H5CompoundDS:convertToUnsignedC Unsupported operation."); 424 } 425 426 /* Implement interface Attribute */ 427 428 /** 429 * Returns the HObject to which this Attribute is currently "attached". 430 * 431 * @return the HObject to which this Attribute is currently "attached". 432 */ 433 public HObject getParentObject() { return parentObject; } 434 435 /** 436 * Sets the HObject to which this Attribute is "attached". 437 * 438 * @param pObj 439 * the new HObject to which this Attribute is "attached". 440 */ 441 public void setParentObject(HObject pObj) { parentObject = pObj; } 442 443 /** 444 * set a property for the attribute. 445 * 446 * @param key the attribute Map key 447 * @param value the attribute Map value 448 */ 449 public void setProperty(String key, Object value) { properties.put(key, value); } 450 451 /** 452 * get a property for a given key. 453 * 454 * @param key the attribute Map key 455 * 456 * @return the property 457 */ 458 public Object getProperty(String key) { return properties.get(key); } 459 460 /** 461 * get all property keys. 462 * 463 * @return the Collection of property keys 464 */ 465 public Collection<String> getPropertyKeys() { return properties.keySet(); } 466 467 /** 468 * Returns the name of the object. For example, "Raster Image #2". 469 * 470 * @return The name of the object. 471 */ 472 public final String getAttributeName() { return getName(); } 473 474 /** 475 * Retrieves the attribute data from the file. 476 * 477 * @return the attribute data. 478 * 479 * @throws Exception 480 * if the data can not be retrieved 481 */ 482 public final Object getAttributeData() throws Exception, OutOfMemoryError { return getData(); } 483 484 /** 485 * Returns the datatype of the attribute. 486 * 487 * @return the datatype of the attribute. 488 */ 489 public final Datatype getAttributeDatatype() { return getDatatype(); } 490 491 /** 492 * Returns the space type for the attribute. It returns a 493 * negative number if it failed to retrieve the type information from 494 * the file. 495 * 496 * @return the space type for the attribute. 497 */ 498 public final int getAttributeSpaceType() { return getSpaceType(); } 499 500 /** 501 * Returns the rank (number of dimensions) of the attribute. It returns a 502 * negative number if it failed to retrieve the dimension information from 503 * the file. 504 * 505 * @return the number of dimensions of the attribute. 506 */ 507 public final int getAttributeRank() { return getRank(); } 508 509 /** 510 * Returns the selected size of the rows and columns of the attribute. It returns a 511 * negative number if it failed to retrieve the size information from 512 * the file. 513 * 514 * @return the selected size of the rows and colums of the attribute. 515 */ 516 public final int getAttributePlane() { return (int)getWidth() * (int)getHeight(); } 517 518 /** 519 * Returns the array that contains the dimension sizes of the data value of 520 * the attribute. It returns null if it failed to retrieve the dimension 521 * information from the file. 522 * 523 * @return the dimension sizes of the attribute. 524 */ 525 public final long[] getAttributeDims() { return getDims(); } 526 527 /** 528 * @return true if the dataspace is a NULL; otherwise, returns false. 529 */ 530 @Override 531 public boolean isAttributeNULL() 532 { 533 return isNULL(); 534 } 535 536 /** 537 * @return true if the data is a single scalar point; otherwise, returns false. 538 */ 539 public boolean isAttributeScalar() { return isScalar(); } 540 541 /** 542 * Not for public use in the future. 543 * 544 * setData() is not safe to use because it changes memory buffer 545 * of the dataset object. Dataset operations such as write/read 546 * will fail if the buffer type or size is changed. 547 * 548 * @param d the object data -must be an array of Objects 549 */ 550 public void setAttributeData(Object d) { setData(d); } 551 552 /** 553 * Writes the memory buffer of this dataset to file. 554 * 555 * @throws Exception if buffer can not be written 556 */ 557 public void writeAttribute() throws Exception { write(); } 558 559 /** 560 * Writes the given data buffer into this attribute in a file. 561 * 562 * The data buffer is a vector that contains the data values of compound fields. The data is written 563 * into file as one data blob. 564 * 565 * @param buf 566 * The vector that contains the data values of compound fields. 567 * 568 * @throws Exception 569 * If there is an error at the library level. 570 */ 571 public void writeAttribute(Object buf) throws Exception { write(buf); } 572 573 /** 574 * Returns a string representation of the data value. For 575 * example, "0, 255". 576 * 577 * For a compound datatype, it will be a 1D array of strings with field 578 * members separated by the delimiter. For example, 579 * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int, 580 * float} of three data points. 581 * 582 * @param delimiter 583 * The delimiter used to separate individual data points. It 584 * can be a comma, semicolon, tab or space. For example, 585 * toString(",") will separate data by commas. 586 * 587 * @return the string representation of the data values. 588 */ 589 public String toAttributeString(String delimiter) { return toString(delimiter, -1); } 590 591 /** 592 * Returns a string representation of the data value. For 593 * example, "0, 255". 594 * 595 * For a compound datatype, it will be a 1D array of strings with field 596 * members separated by the delimiter. For example, 597 * "{0, 10.5}, {255, 20.0}, {512, 30.0}" is a compound attribute of {int, 598 * float} of three data points. 599 * 600 * @param delimiter 601 * The delimiter used to separate individual data points. It 602 * can be a comma, semicolon, tab or space. For example, 603 * toString(",") will separate data by commas. 604 * @param maxItems 605 * The maximum number of Array values to return 606 * 607 * @return the string representation of the data values. 608 */ 609 public String toAttributeString(String delimiter, int maxItems) { return toString(delimiter, maxItems); } 610}