#include "nagg.h" #define WriteGranules_Product_list_max NAGG_Product_list_max+1 #define NAGG_Dataset_Path_max 160 #define NAGG_Group_Path_max 80 #define NAGG_Granule_index_string_max 4 /*------------------------------------------------------------------------ * Structure: product_info_t * * Purpose: store IDs for input and output files, output file name, index * of last granule written to ouput file, and pointer to the * previous granule written. * * Programmer: Larry Knox * December 15, 2011 * *------------------------------------------------------------------------ */ typedef struct { char dpid[DPID_size+1]; const char *product_name; hid_t infile; hid_t outfile; const char * outfilename; int last_i_granule; int granules_written; } product_info_t; static product_info_t products[WriteGranules_Product_list_max]; /* granules per ouput file and granules per input file should be constant for an aggregation when bucket boundaries and fill granules are implemented. */ static int g_gransperoutfile; static int g_gransperinfile; static int g_numproducts; static int print_links(hid_t group_id, const char *name, const H5L_info_t *info, void *op_data); static int create_resized_duplicate_dataset(hid_t ingroup, const char *name, const H5L_info_t *info, void *op_data); static int add_string_attribute(hid_t loc_id, const char *attr_name, const char *value); static int update_string_attribute(hid_t loc_id, const char *attr_name, const char *value); static int attrit_op_func(hid_t loc_id, const char *attr_name, const H5A_info_t *ainfo, void *op_data); static int update_aggregate_ending_attributes(int product_index); static int copy_attribute_value(hid_t in_loc_id, const char *attr_name, const char *attr_name_out, hid_t out_loc_id); static int write_missing_granules(hid_t output_file, hid_t input_file, const char *outputfile_name, granule_p_t granule, int last_i_granule, int i_granule); static int write_fill_granule_attrs(hid_t loc_id, const char *attr_name, const H5A_info_t *ainfo, void *op_data); static int write_fillgr_time_attrs(hid_t dset_id, granule_p_t granule); static int write_dataset_fill_values(hid_t output_file, const char *dsetpath, const hsize_t *block_coord, int rank); static int create_aggregate_dataset(hid_t output_file, hid_t input_file, granule_p_t granule); static int copy_granule_hyperslabs(hid_t output_file, hid_t input_file, char *src_dest, char *dest_dset, const char * outputfilename, int gran_index); static int make_aggregate_dataset_path(char *buffer, const char *product_name); static int make_granule_dataset_path(char *buffer, const char *product_name, int granidx); static int make_All_Data_product_group_path(char *buffer, const char *product_name); static int make_Data_Products_product_group_path(char *buffer, const char *product_name); static char *iet2utc(iet_t iet_time); /*------------------------------------------------------------------------- * Function: create_resized_duplicate_dataset * * Purpose: Create a dataset in the output file with similar characteristics * to a dataset in the input file sized for the new aggregation * number. * * Parameters: ingroup: id of the group containing the input dataset * name: dataset name * info: H5L_info_t struct required for callback functions used * by H5Literate * op_data address of variable containing id of the group containing * the ouput dataset * * Notes: Creates a dataset without any data, which will be added by calls * to the function copy_granule_hyperslabs. * Whether ouput datasets are fixed or unlimited will be determined * by the input dataset characteristics. * * Return: Success: 0 * Failure: -1 * * Programmer: Larry Knox * April 27, 2012 * *------------------------------------------------------------------------- */ static int create_resized_duplicate_dataset(hid_t ingroup, const char *name, const H5L_info_t *info, void *op_data) { int status; hid_t indset_id, indspace_id, indtype_id, inpl_id; hid_t outdset_id = -1; hid_t outdspace_id, outdtype_id, outpl_id; H5O_info_t infobuf; int i, ndims; hsize_t origdim; hsize_t *dims, *maxdims; if ((status = H5Oget_info_by_name (ingroup, name, &infobuf, H5P_DEFAULT)) < 0) { fprintf(stderr, "create_resized_duplicate_dataset(): Failed to get info for product group.\n"); goto done; } if (infobuf.type == H5O_TYPE_DATASET) { if((indset_id = H5Dopen(ingroup, name, H5P_DEFAULT))<0) { fprintf(stderr, "create_resized_duplicate_dataset(): Failed to open dataset %s.\n", name); status = FAIL; H5Gclose(ingroup); goto done; } if ((indspace_id = H5Dget_space(indset_id)) < 0) { fprintf(stderr, "create_resized_duplicate_dataset(): Failed to open dataspace for dataset %s.\n", name); status = FAIL; H5Dclose(indset_id); goto done; } if ((indtype_id = H5Dget_type(indset_id)) < 0) { fprintf(stderr, "create_resized_duplicate_dataset(): Failed to open datatype for dataset %s.\n", name); status = FAIL; H5Dclose(indset_id); goto done; } if ((inpl_id = H5Dget_create_plist(indset_id)) < 0) { fprintf(stderr, "create_resized_duplicate_dataset(): Failed to open dataset property list for dataset %s.\n", name); status = FAIL; H5Dclose(indset_id); goto done; } if ((ndims = H5Sget_simple_extent_ndims(indspace_id)) < 0){ fprintf(stderr, "Failed to get number of dataset dimensions.\n"); status = FAIL; goto done; } /* The first dimension is resized for the size of the new aggregation. The others will keep the same size. */ dims = (hsize_t *) HDmalloc (ndims * sizeof(hsize_t)); maxdims = (hsize_t *) HDmalloc (ndims * sizeof(hsize_t)); if ((H5Sget_simple_extent_dims(indspace_id, dims, maxdims)) < 0 ) { fprintf(stderr, "Failed to get dataset dimensions.\n"); status = FAIL; goto done; } origdim = dims[0]; /* g_gransperinfile and g_gransperoutfile are the old and new aggregation numbers (from the AggregateNumberGranules attribute). */ dims[0] = (origdim / g_gransperinfile * g_gransperoutfile); if (maxdims[0] != H5S_UNLIMITED) maxdims[0] = dims[0]; /* set all maxdims to H5S_UNLIMITED */ /* maybe we don't need to do this either? */ /* for (i=0; i -1) H5Dclose(outdset_id); return status; } /*------------------------------------------------------------------------- * Function: get_product_idx_by_id * * Purpose: * * Note: * * Return: Success: the index of a product in the array of products. * Failure: -1 * * Programmer: Larry Knox * December 15, 2011 * *------------------------------------------------------------------------- */ static int get_product_idx_by_id(const char *product_id) { int i; for (i=0; iinfile) H5Fclose(products[product_index]->infile); products[product_index]->inwritsofar = 0; if ((products[product_index]->infile = H5Fopen(granule->file_in, H5F_ACC_RDONLY, H5P_DEFAULT)) < 0) { fprintf(stderr, "Failed to open file %s.\n", granule->file_in); status = FAIL; } return status; } */ /*------------------------------------------------------------------------- * Function: first_granule_file_init * * Purpose: Initialize empty output file when first granule is written. * * Note: Datasets under the /All_Data group are copied without data from * the input file to the empty output file, and their storage size * set for the specified number of granules. Attributes belonging * to groups or to the ..._Aggr dataset are copied from the input * file to the new output file, except for attributes previously * written in the start_write function when the file was created. * When the aggregation size differs from that of the input files, * beginning and ending time attributes for the output file will be * different from those for the input file. * * Parameters: output_file: handle to the empty output file. * granule_p_t: pointer to an entry in the granule table * i_granule: index that the granule should have in the new output * file. * * Return: Success: 0 * Failure: -1 * * Programmer: Larry Knox * December 15, 2011 * *------------------------------------------------------------------------- */ static int first_granule_file_init( hid_t output_file, hid_t input_file, granule_p_t granule, int i_granule) { int status; hid_t gcpl, ingroup, adingroup, dpingroup; hid_t outgroup, adoutgroup, dpoutgroup; hid_t dset_id = -1; hid_t attr; char adgrpname[NAGG_Group_Path_max]; char dpgrpname[NAGG_Group_Path_max]; H5L_info_t *info; char dest_dset[NAGG_Dataset_Path_max]; int product_index; #ifdef DEBUG printf("Initialize file with granule %d for product %s.\n", i_granule, granule->product_id); #endif product_index = get_product_idx_by_id(granule->product_id); reset_product_granules_written(product_index); /* Create /Data_Products//product_name); status = make_Data_Products_product_group_path(dpgrpname, granule->product_name); status = make_aggregate_dataset_path(dest_dset, granule->product_name); if((dset_id = H5Dopen(input_file, dest_dset, H5P_DEFAULT)) < 0 ) { fprintf(stderr, "first_granule_file_init(): Failed to open dataset %s in %s.\n", dest_dset, granule->file_in); status = FAIL; } if ((attr = H5Aopen(dset_id, "AggregateNumberGranules", H5P_DEFAULT)) < 0) { fprintf(stderr, "Failed to open attribute AggregateNumberGranules.\n"); status = FAIL; goto done; } /* this should be the same for all products. Currently it's looked up for each. */ if ((status = H5Aread(attr, H5T_NATIVE_INT, &g_gransperinfile)) < 0) { fprintf(stderr, "Failed to read attribute AggregateNumberGranules.\n"); status = FAIL; H5Aclose(attr); goto done; } if (dset_id > -1) H5Dclose(dset_id); /* Copy the root group attributes. */ if ((ingroup = H5Gopen(input_file, "/", H5P_DEFAULT)) < 0) { fprintf(stderr, "Failed to open input file \"/\"group.\n"); status = FAIL; goto done; } if ((outgroup = H5Gopen(output_file, "/", H5P_DEFAULT)) < 0) { fprintf(stderr, "Failed to open output file \"/\"group.\n"); status = FAIL; goto done; } status = H5Aiterate2(ingroup, H5_INDEX_NAME, H5_ITER_NATIVE, 0, attrit_op_func, &outgroup); if ((gcpl = H5Pcreate (H5P_LINK_CREATE)) < 0) { fprintf(stderr, "Failed to create propery list for output group %s.\n", adgrpname); status = FAIL; goto done; } if ((status = H5Pset_create_intermediate_group (gcpl, 1)) < 0) { fprintf(stderr, "Failed to set intermediate group creation property for output group %s.\n", adgrpname); status = FAIL; goto done; } if ((adoutgroup = H5Gcreate (output_file, adgrpname, gcpl, H5P_DEFAULT, H5P_DEFAULT)) < 0) { fprintf(stderr, "Failed to create output group %s again.\n", adgrpname); status = FAIL; goto done; } if ((adingroup = H5Gopen(input_file, adgrpname, H5P_DEFAULT)) < 0) { fprintf(stderr, "Failed to open input group %s.\n", adgrpname); status = FAIL; goto done; } /* Call a function to copy the datasets from /All_Data in the file with the first granule to /All_Data in the new file. The new datasets will have no data, which will be added by the write_granules function. */ status = H5Literate(adingroup, H5_INDEX_NAME, H5_ITER_NATIVE, 0, create_resized_duplicate_dataset, &adoutgroup); status = H5Pclose (gcpl); status = H5Gclose (adoutgroup); gcpl = H5Pcreate (H5P_LINK_CREATE); status = H5Pset_create_intermediate_group (gcpl, 1); dpoutgroup = H5Gcreate (output_file, dpgrpname, gcpl, H5P_DEFAULT, H5P_DEFAULT); /* Copy the /Data_Products/ group attributes */ dpingroup = H5Gopen(input_file, dpgrpname, H5P_DEFAULT); status = H5Aiterate2(dpingroup, H5_INDEX_NAME, H5_ITER_NATIVE, 0, attrit_op_func, &dpoutgroup); status = create_aggregate_dataset(output_file, input_file, granule); status = H5Pclose (gcpl); status = H5Gclose (dpoutgroup); status = H5Gclose (dpingroup); status = H5Gclose (adingroup); done: return status; } /*------------------------------------------------------------------------- * Function: make_granule_dataset_path * * Purpose: assemble path to a granule dataset from product name and index * * Note: * * Return: Success: 0 * Failure: -1 * * Programmer: Larry Knox * February 9, 2012 * *------------------------------------------------------------------------- */ static int make_granule_dataset_path(char *buffer, const char *product_name, int granidx) { int status = SUCCEED; char index_str[NAGG_Granule_index_string_max]; if (granidx > 999) { NAGG_ERROR("make granule_dataset_path(): Granule index too large.\n"); status= FAIL; goto done; } sprintf(index_str,"%d", granidx); snprintf(buffer, 16, "/Data_Products/"); strncat(buffer, product_name, strlen(product_name)); strncat(buffer, "/", 1); strncat(buffer, product_name, strlen(product_name)); strncat(buffer, "_Gran_", 6); strncat(buffer, index_str, strlen(index_str)); done: return status; } /*------------------------------------------------------------------------- * Function: make_aggregate_dataset_path * * Purpose: assemble path to an aggregate dataset from product name * * Note: * * Return: Success: 0 * Failure: -1 * * Programmer: Larry Knox * February 9, 2012 * *------------------------------------------------------------------------- */ static int make_aggregate_dataset_path(char *buffer, const char *product_name) { int status = 0; snprintf(buffer, 16, "/Data_Products/"); strncat(buffer, product_name, strlen(product_name)); strncat(buffer, "/", 1); strncat(buffer, product_name, strlen(product_name)); strncat(buffer, "_Aggr", 5); return status; } /*------------------------------------------------------------------------- * Function: make_All_Data_product_group_path * * Purpose: assemble path to an aggregate dataset from product name * * Note: * * Return: Success: 0 * Failure: -1 * * Programmer: Larry Knox * February 9, 2012 * *------------------------------------------------------------------------- */ static int make_All_Data_product_group_path(char *buffer, const char *product_name) { int status = 0; snprintf(buffer, 11, "/All_Data/"); strncat(buffer, product_name, strlen(product_name)); strncat(buffer, "_All", 4); return status; } /*------------------------------------------------------------------------- * Function: make_Data_Products_product_group_path * * Purpose: assemble path to an aggregate dataset from product name * * Note: * * Return: Success: 0 * Failure: -1 * * Programmer: Larry Knox * February 9, 2012 * *------------------------------------------------------------------------- */ static int make_Data_Products_product_group_path(char *buffer, const char *product_name) { int status = 0; snprintf(buffer, 16, "/Data_Products/"); strncat(buffer, product_name, strlen(product_name)); return status; } /*------------------------------------------------------------------------- * Function: write_missing_granules * * Purpose: write a granule with fill values for data when a granule is * missing * * Note: * * Return: Success: 0 * Failure: -1 * * Programmer: Larry Knox * December 15, 2011 * *------------------------------------------------------------------------- */ static int write_missing_granules(hid_t output_file, hid_t input_file, const char *outputfile_name, granule_p_t granule, int last_i_granule, int i_granule) { int i, j, k; int product_index; hdset_reg_ref_t *ref = NULL; hdset_reg_ref_t *ref_new = NULL; char *reg_name; char src_group[NAGG_Group_Path_max]; char dest_group[NAGG_Group_Path_max]; char dest_dset[NAGG_Dataset_Path_max]; char src_dset[NAGG_Dataset_Path_max]; hsize_t *region_buf; size_t rnlength; int rank_out; size_t numelem; size_t numrefs; H5S_sel_type sel_type; size_t size_ref; hid_t dtype, dcpl; hid_t group_id; hid_t dset_id = -1; hid_t ref_dset_id = -1; hid_t dspace, outdspace, attr; hsize_t delta; htri_t exists; int array_size; hsize_t *start; hsize_t chunk[1] = {32}; char **dataset_paths; int status = SUCCEED; granule_pattern_p_t gran_pattern; /* last_i_granule was previously used to calculate the hyperslab position, but now we are writing only the granule sent, so i_granule is more appropriate. The parameter last_i_granule (not the product structure's member) can possibly be eliminated or used for error checking. */ last_i_granule++; product_index = get_product_idx_by_id(granule->product_id); gran_pattern = get_granule_pattern(get_product_id_by_idx(product_index)); #ifdef DEBUG printf("Write fill granule for ..._Gran_%d for product %s\n", i_granule, granule->product_name); #endif if ((products[product_index].infile = H5Fopen(gran_pattern->file_in, H5F_ACC_RDONLY, H5P_DEFAULT)) < 0) { fprintf(stderr, "Failed to open file %s.\n", gran_pattern->file_in); status = FAIL; } input_file = products[product_index].infile; if ( get_product_granules_written(product_index) < 0) { if ((status = first_granule_file_init(output_file, input_file, granule, i_granule)) < 0) { goto done; } } status = make_granule_dataset_path(dest_dset, granule->product_name, i_granule); status = make_granule_dataset_path(src_dset, granule->product_name, granule->granule_input_index); if((dset_id = H5Dopen(input_file, src_dset, H5P_DEFAULT)) < 0 ) { fprintf(stderr, "Failed to open dataset %s.\n", src_dset); status = FAIL; goto done; } if ((dspace= H5Dget_space(dset_id)) < 0) { fprintf(stderr, "Failed to get dataspace for dataset %s", src_dset); status = FAIL; goto done; } if ((dtype = H5Dget_type(dset_id)) < 0) { fprintf(stderr, "Failed to open datatype for dataset.\n"); status = FAIL; goto done; } if ((outdspace = H5Scopy(dspace)) < 0) { fprintf(stderr, "Failed to copy dataspace.\n"); status = FAIL; } dcpl = H5Pcreate (H5P_DATASET_CREATE); status = H5Pset_chunk (dcpl, 1, chunk); if((ref_dset_id = H5Dcreate2(output_file, dest_dset, H5T_STD_REF_DSETREG, outdspace, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0 ) { NAGG_ERROR("(Line 550)"); fprintf(stderr, "Failed to create dataset %s in output file.\n", dest_dset); status = FAIL; goto done; } if ((numrefs = H5Sget_simple_extent_npoints(dspace)) < 1) { fprintf(stderr, "Failed to get number of region references in dataset %s", src_dset); } /* Allocate memory for arrays of references. */ ref = (hdset_reg_ref_t *) HDmalloc (numrefs * sizeof(hdset_reg_ref_t)); ref_new = (hdset_reg_ref_t *) HDmalloc (numrefs * sizeof(hdset_reg_ref_t)); status = H5Dread(dset_id, H5T_STD_REF_DSETREG, H5S_ALL, H5S_ALL, H5P_DEFAULT, ref); if (status < 0) { fprintf(stderr, "Failed to read dataset %s.\n", src_dset); } array_size = 0; /* In order to call H5LRcreate_region_references, we need an hsize_t* array the size of 2 * rank * numrefs. We will need to call H5LRget_region_info for each ref, summing the hsize_ts, allocate the hsize_t array, then call it again for each ref to fill in the arrays. */ for (j=0; j<(int)numrefs; ++j) { status = H5LRget_region_info(input_file, (const hdset_reg_ref_t*)ref[j], &rnlength, NULL, &rank_out, NULL, &sel_type, &numelem, NULL); if (status < 0) fprintf(stderr, "Failed to get rank and number of region references.\n"); array_size += (int)numelem * rank_out * 2; } start = (hsize_t *) HDmalloc(array_size * sizeof(hsize_t)); dataset_paths = (char **) HDmalloc((int)numrefs * sizeof(char**)); region_buf = start; for (j=0; j<(int)numrefs; ++j) { status = H5LRget_region_info(input_file, (const hdset_reg_ref_t*)ref[j], &rnlength, NULL, &rank_out, NULL, &sel_type, &numelem, NULL); if (status < 0) fprintf(stderr, "Failed to get region references.\n"); reg_name = (char *) HDmalloc (rnlength); dataset_paths[j] = reg_name; status = H5LRget_region_info(input_file, (const hdset_reg_ref_t*)ref[j], &rnlength, reg_name, &rank_out, &dtype, &sel_type, &numelem, region_buf); if (status < 0) fprintf(stderr, "Failed in line 515.\n"); /* region_buf contains an array of numbers, 2 for each rank of the dataset. The first half of them represent the smallest coordinate in each dimension of a hyperslab and the second half the highest dimension of the hyperslab (in the same order). The aggregation boundary should be in the first dimension, so only region_buf[0] and region_buf[rank] will change for the hyperslab of each granule. Therefore the coordinates of the hyperslab for any granule (returned from H5LRget_region_info as region_buf) can be used to compute the coordinates of the hyperslab of any other granule as follows: */ delta = region_buf[rank_out] - region_buf[0]; region_buf[0] = (delta +1) * i_granule; region_buf[rank_out] = region_buf[0] + delta; /* Here we need to create the reference(s) that H5LRcopy_reference would return in the write_granule after copying the data to the new file in the output file. There is no data to copy, so the correct location will be calculated and a region reference created. h5dump will report the unwritten data as fill values. */ /* now we are going to write fill values to the hyperslabs for the missing granules. They could be written at various stages, but maybe this is a good place while we have a new set of hyperslab coordinates? */ write_dataset_fill_values(output_file, reg_name, region_buf, rank_out); /* advance to the coordinates for the next dataset's hyperslab */ region_buf += 2*rank_out; } if((status = H5LRcreate_region_references(output_file, numrefs, dataset_paths, start, ref_new)) < 0) { NAGG_ERROR("write_missing_granules(): Failed to create region references\n"); goto done; } status = H5Dwrite(ref_dset_id, H5T_STD_REF_DSETREG, H5S_ALL, H5S_ALL, H5P_DEFAULT, ref_new); status = add_string_attribute(ref_dset_id, "N_Granule_Status", "Missing at delivery time"); status = H5Aiterate2(dset_id, H5_INDEX_NAME, H5_ITER_NATIVE, 0, write_fill_granule_attrs, &ref_dset_id); status = write_fillgr_time_attrs(ref_dset_id, granule); H5Tclose(dtype); H5Sclose(dspace); H5Sclose(outdspace); H5Dclose(dset_id); dset_id = -1; H5Dclose(ref_dset_id); ref_dset_id = -1; for (j=0; j<(int)numrefs; ++j) { free(dataset_paths[j]); } free(dataset_paths); dataset_paths = 0; free(ref); ref = 0; free(ref_new); ref_new = 0; free(start); start = 0; if (get_product_granules_written(product_index) < 0) { /* Create /Data_Products//product_name); status = make_Data_Products_product_group_path(dest_group, granule->product_name); status = make_aggregate_dataset_path(dest_dset, granule->product_name); status = H5LRcreate_ref_to_all(output_file, src_group, dest_dset, H5_INDEX_NAME, H5_ITER_NATIVE, H5R_OBJECT); if (status < 0) { NAGG_ERROR("write_missing_granules(): Failed to create dataset\n"); fprintf(stderr, "Failed to create %s dataset.\n", dest_dset); } if((dset_id = H5Dopen(input_file, dest_dset, H5P_DEFAULT)) < 0 ) { fprintf(stderr, "Failed to open dataset %s.\n", dest_dset); status = FAIL; goto done; } if((ref_dset_id = H5Dopen(output_file, dest_dset, H5P_DEFAULT)) < 0 ) { fprintf(stderr, "Failed to open dataset %s in output file.\n", dest_dset); status = FAIL; goto done; } if ((H5Aiterate2(dset_id, H5_INDEX_NAME, H5_ITER_NATIVE, 0, attrit_op_func, &ref_dset_id)) < 0) { fprintf(stderr, "Failed to copy attributes for dataset %s.\n", dest_dset); status = FAIL; } H5Dclose(ref_dset_id); H5Dclose(dset_id); status = create_aggregate_dataset(output_file, input_file, granule); H5Fflush(output_file, H5F_SCOPE_GLOBAL); } done: if (ref) free(ref); return status; } /*------------------------------------------------------------------------- * Function: start_write * * Purpose: Prepare file for writing an aggregation of granules. * * Parameters: outfiles: list of output files for NPP products. * noutfiles: number of output files in outfiles. * outgeofile: Geolocation output file; NULL if not needed. * products_list: list of products requested. * nproducts: number of products in products_list. * creationtime: Creation Time stamp of the output files * ngranulesperfile: number of granules per file requested. * * Notes: Whether the output is packaged or unpackaged is dependent * on the combination of values for noutfiles, nproducts, * outgeofile and geoproduct: * * noutfiles nproducts outgeofile geoproduct output * * 0 0 yes yes geoproduct in * outgeofile * * 1 1 to n NULL yes packaged file * including geoproduct * * 1 1 to n NULL NULL packaged file not * including geoproduct * * n n yes yes unpackaged * * n n NULL NULL unpackaged with * no geoproduct * * Other combinations are unsupported. * * Return: Success: 0 * Failure: -1 * * Programmer: Larry Knox * December 15, 2011 * *------------------------------------------------------------------------- */ int start_write(NPPFileName_t *outfiles, int noutfiles, const char *outgeofile, char *geoproduct, char **products_list, int nproducts, const char *creationdate, const char *creationtime, int ngranulesperfile) { int i, product_index; int status = SUCCEED; hid_t outfile_ids[WriteGranules_Product_list_max]; g_gransperoutfile = ngranulesperfile; int prodfileidx; static char geo_product_id_backup[DPID_size+1]; char geo_product_id[DPID_size+1]; #if defined(DEBUG) printf("start_write with ngranulesperfile=%d\n", ngranulesperfile); printf("First outfile is %s.\n", outfiles[0]); #endif g_numproducts = nproducts; if (nproducts > NAGG_Product_list_max) { NAGG_ERROR("start_write(): Illegal number of products in product list."); status = FAIL; goto done; } for (i=0;iproduct_name); #endif real_granule = HDstrneq(granule->granule_id, "N/A"); product_index = get_product_idx_by_id(granule->product_id); output_file = products[product_index].outfile; if (real_granule) { if ((products[product_index].infile = H5Fopen(granule->file_in, H5F_ACC_RDONLY, H5P_DEFAULT)) < 0) { fprintf(stderr, "Failed to open input file %s.\n", granule->file_in); status = FAIL; goto done; } input_file = products[product_index].infile; if ( get_product_granules_written(product_index) < 0) { if ((status = first_granule_file_init(output_file, input_file, granule, i_granule)) < 0) { goto done; } } if ((status = make_granule_dataset_path(dest_dset, granule->product_name, i_granule)) <0) { goto done; } if ((status = make_granule_dataset_path(src_dset, granule->product_name, granule->granule_input_index)) < 0) { goto done; } /* Provided the granule doesn't exist in the output file, copy it from input to output */ if((exists = H5Lexists(output_file, dest_dset, H5P_DEFAULT)) == 0) { #ifdef DEBUG printf("write_granules(): Copy %s from input file to %s in output file.\n", src_dset, dest_dset); #endif if ((H5Ocopy(input_file, src_dset, output_file, dest_dset, H5P_DEFAULT, H5P_DEFAULT)) < 0) { fprintf(stderr, "Failed to copy %s to %s.\n", src_dset, products[product_index].outfilename); } } else if (exists > 0) { fprintf(stderr, "%s in %s already exists.\n", src_dset, products[product_index].outfilename); } else { fprintf(stderr, "Call to H5Lexists failed for %s in %s.\n", src_dset, products[product_index].outfilename); } if ((status = copy_granule_hyperslabs(output_file, input_file, src_dset, dest_dset, products[product_index].outfilename, i_granule)) < 0) { goto done; } if (get_product_granules_written(product_index) < 0) { if((status = create_aggregate_dataset(output_file, input_file, granule)) < 0) { goto done; } } /* For fill granules, call write_missing_granule */ } else { outgran_index = get_last_i_granule(product_index); if ((status = write_missing_granules(output_file, input_file, products[product_index].outfilename, granule, outgran_index, i_granule)) < 0) goto done; } products[product_index].last_i_granule = i_granule; outgran_index = increment_granules_written(product_index); done: if (products[product_index].infile > 0) { H5Fclose(products[product_index].infile); products[product_index].infile = -1; } return status; } /*------------------------------------------------------------------------- * Function: copy_granule_hyperslabs * * Purpose: Copy the hyperslab of raw data for a granule to the correct * location in the output file. * * Note: * * Return: Success: 0 * Failure: -1 * * Programmer: Larry Knox * February 10, 2012 * *------------------------------------------------------------------------- */ static int copy_granule_hyperslabs(hid_t output_file, hid_t input_file, char *src_dset, char *dest_dset, const char *outfilename, int gran_index) { hdset_reg_ref_t *ref = NULL; hdset_reg_ref_t *ref_new = NULL; char *reg_name; hsize_t *region_buf; size_t rnlength; int rank_out; size_t numelem; hssize_t numrefs; H5S_sel_type sel_type; size_t size_ref; hsize_t delta; hid_t dtype; hid_t dset_id = -1; hid_t ref_dset_id = -1; hid_t dspace, attr; int j, status = 0; /* Open input and output granule datasets for copying data to new file and writing reference in the new granule dataset for the new data location. */ if((dset_id = H5Dopen(input_file, src_dset, H5P_DEFAULT)) < 0 ) { fprintf(stderr, "Failed to open dataset %s in %s.\n", src_dset, outfilename); status =FAIL; goto done; } if((ref_dset_id = H5Dopen(output_file, dest_dset, H5P_DEFAULT)) < 0 ) { fprintf(stderr, "Failed to open dataset %s.\n", dest_dset); status = FAIL; goto done; } if ((dspace= H5Dget_space(dset_id)) < 0) { fprintf(stderr, "Failed to get dataspace for dataset %s", src_dset); status = FAIL; goto done; } if ((dtype = H5Dget_type(dset_id)) < 0) { fprintf(stderr, "Failed to open datatype for dataset.\n"); status = FAIL; goto done; } if ((numrefs = H5Sget_simple_extent_npoints(dspace)) < 1) { fprintf(stderr, "Failed to get number of region references in dataset %s.", src_dset); } /* Allocate memory for arrays of references. */ ref = (hdset_reg_ref_t *) HDmalloc (numrefs * sizeof(hdset_reg_ref_t)); ref_new = (hdset_reg_ref_t *) HDmalloc (numrefs * sizeof(hdset_reg_ref_t)); status = H5Dread(dset_id, H5T_STD_REF_DSETREG, H5S_ALL, H5S_ALL, H5P_DEFAULT, ref); if (status < 0) { fprintf(stderr, "Failed to read dataset %s.\n", src_dset); } for (j=0; j<(int)numrefs; ++j) { /* The first call gets the rank and dimensions for allocating storage space in the buffers for the dataset name and the corners of the region. The second call gets the dataset name (reg_name) and coordinates. */ status = H5LRget_region_info(input_file, (const hdset_reg_ref_t*)ref[j], &rnlength, NULL, &rank_out, NULL, &sel_type, &numelem, NULL); if (status < 0) fprintf(stderr, "copy_granule_hyperslabs(): H5LRget_region_info() failed to get rank and dimensions.\n"); reg_name = (char *) HDmalloc (rnlength); region_buf = (hsize_t *)HDmalloc((int)numelem * rank_out * sizeof(hsize_t) * 2); status = H5LRget_region_info(input_file, (const hdset_reg_ref_t*)ref[j], &rnlength, reg_name, &rank_out, &dtype, &sel_type, &numelem, region_buf); if (status < 0) fprintf(stderr, "copy_granule_hyperslabs(): failed to get dataset name and coordinates.\n"); /* region_buf is array of numbers, 2 for each rank of the dataset. The first half of them represent the smallest coordinate in each dimension of a hyperslab and the second half the highest. The aggregation boundary should be in the first dimension, so only the region_buf[0] and region_buf[rank] will change for the hyperslab when a granule is moved to a different position. */ delta = region_buf[rank_out] - region_buf[0]; region_buf[0] = (delta +1) * gran_index; region_buf[rank_out] = region_buf[0] + delta; /* Call H5LRcopy_reference to copy the data pointed to by the reference in the input file to the calculated location in the output file. The function also returns a region reference to the location of the data in the new file, which is stored in ref_new[j] and written to the new _Gran_n dataset in the output file after the data for all region references has been copied. */ status = H5LRcopy_reference(input_file, (const hdset_reg_ref_t*)ref[j], outfilename, reg_name, region_buf, (hdset_reg_ref_t*)ref_new[j]); if (status < 0) { fprintf(stderr, "Copy reference failed on the %d dataset, %s.\n", j, reg_name); goto done; } free(reg_name); free(region_buf); } /* Write all the region references for the granule to the new _Gran_n dataset in the output file. */ status = H5Dwrite(ref_dset_id, H5T_STD_REF_DSETREG, H5S_ALL, H5S_ALL, H5P_DEFAULT, ref_new); H5Tclose(dtype); H5Sclose(dspace); H5Dclose(dset_id); H5Dclose(ref_dset_id); H5Fflush(output_file, H5F_SCOPE_GLOBAL); done: if (ref) free(ref); ref = NULL; if (ref) free(ref_new); ref_new = NULL; return status; } /*------------------------------------------------------------------------- * Function: create_aggregate_dataset * * Purpose: * * Note: * * Return: Success: 0 * Failure: -1 * * Programmer: Larry Knox * February 10, 2012 * *------------------------------------------------------------------------- */ static int create_aggregate_dataset(hid_t output_file, hid_t input_file, granule_p_t granule) { char src_group[NAGG_Group_Path_max]; char dest_group[NAGG_Group_Path_max]; char dest_dset[NAGG_Dataset_Path_max]; char src_dset[NAGG_Dataset_Path_max]; hid_t dset_id = -1; hid_t ref_dset_id = -1; hid_t attr; int status = SUCCEED; /* Create /Data_Products//product_name); status = make_Data_Products_product_group_path(dest_group, granule->product_name); status = make_aggregate_dataset_path(dest_dset, granule->product_name); status = make_granule_dataset_path(src_dset, granule->product_name, granule->granule_input_index); status = H5LRcreate_ref_to_all(output_file, src_group, dest_dset, H5_INDEX_NAME, H5_ITER_NATIVE, H5R_OBJECT); if (status < 0) { NAGG_ERROR("create_aggregate_dataset(): Failed to create dataset\n"); fprintf(stderr, "Failed to create %s dataset.\n", dest_dset); status = FAIL; goto done; } if((dset_id = H5Dopen(input_file, dest_dset, H5P_DEFAULT)) < 0 ) { fprintf(stderr, "Failed to open dataset %s.\n", dest_dset); status = FAIL; goto done; } if((ref_dset_id = H5Dopen(output_file, dest_dset, H5P_DEFAULT)) < 0 ) { fprintf(stderr, "Failed to open dataset %s in output file.\n", dest_dset); status = FAIL; goto done; } if ((H5Aiterate2(dset_id, H5_INDEX_NAME, H5_ITER_NATIVE, 0, attrit_op_func, &ref_dset_id)) < 0) { fprintf(stderr, "Failed to copy attributes for dataset %s.\n", dest_dset); status = FAIL; goto done; } /* nagg needs to know how many granules are in the input file. The attribute is checked here because the input file aggregation dataset is already open, create_aggregate_dataset, and the number of granules in the input dataset and the output dataset are used for sizing the /All_Data/<*> datasets in the new output file. This should be refactored to get the number in the first_granule_file_init function and then pass it to */ if ((attr = H5Aopen(ref_dset_id, "AggregateNumberGranules", H5P_DEFAULT)) < 0) { fprintf(stderr, "Failed to open attribute AggregateNumberGranules.\n"); status = FAIL; goto done; } if ((status = H5Aread(attr, H5T_NATIVE_INT, &g_gransperinfile)) < 0) { fprintf(stderr, "Failed to read attribute AggregateNumberGranules.\n"); status = FAIL; H5Aclose(attr); goto done; } H5Aclose(attr); H5Dclose(ref_dset_id); ref_dset_id = -1; H5Dclose(dset_id); dset_id = -1; /* Four beginning attribute values should be written from the first granule to be written to the new output file (or the first fill granule if there is one): AggregateBeginningDate from Beginning_Date, AggregateBeginningGranuleID from N_Granule_ID, AggregateBeginningOrbitNumber from N_Beginning_Orbit_Number, and AggregateBeginningTime from Beginning_Time. */ if((dset_id = H5Dopen(output_file, dest_dset, H5P_DEFAULT)) < 0 ) { fprintf(stderr, "Failed to open dataset %s in output file.\n", dest_dset); status = FAIL; goto done; } if ((status = update_string_attribute(dset_id, "AggregateBeginningDate", granule->beginning_date)) < 0) { NAGG_ERROR("create_aggregate_dataset(): Failed to write value to AggregateBeginningDate\n"); goto done; } if ((status = update_string_attribute(dset_id, "AggregateBeginningGranuleID", granule->granule_id)) < 0) { NAGG_ERROR("create_aggregate_dataset(): Failed to write value to AggregateBeginningGranuleID\n"); goto done; } if ((attr = H5Aopen(dset_id, "AggregateBeginningOrbitNumber", H5P_DEFAULT)) < 0) { NAGG_ERROR("write_fillgr_time_attrs(): Failed to open attribute AggregateBeginningOrbitNumber for writing\n"); status = FAIL; goto done; } if ((status = H5Awrite(attr, H5T_NATIVE_ULLONG, &(granule->orbit_number))) < 0) { fprintf(stderr, "Failed to write attribute AggregateBeginningOrbitNumber.\n"); H5Aclose(attr); } if ((status = update_string_attribute(dset_id, "AggregateBeginningTime", granule->beginning_time )) < 0) { NAGG_ERROR("write_granules(): Failed to write value to AggregateBeginningTime\n"); goto done; } done: if (ref_dset_id > -1) H5Dclose(ref_dset_id); if (dset_id > -1) H5Dclose(dset_id); return status; } /*------------------------------------------------------------------------- * Function: end_write * * Purpose: provide notification that an aggregation is finished and the * files can be closed. * * Note: * * Return: Success: 0 * Failure: -1 * * Programmer: Larry Knox * December 15, 2011 * *------------------------------------------------------------------------- */ int end_write() { int status = SUCCEED; int i; int files_to_close; /* check the number of granules written and update the ending attributes for all products. */ for (i=0;i -1) { if((H5Fclose(products[i].outfile)) < 0) { fprintf(stderr, "Error closing output data file.\n"); status = FAIL; goto done; } products[i].outfile = -1; } } done: return status; } /*------------------------------------------------------------------------- * Function: add_string_attribute * * Purpose: Add an attribute of type string to the object identified by the * first parameter. * * Note: The attribute will be added as a 1x1 array of strings to match * the files that IDPS delivers. * * Return: Success: 0 * Failure: -1 * * Programmer: Larry Knox * December 15, 2011 * *------------------------------------------------------------------------- */ static int add_string_attribute(hid_t loc_id, const char *attr_name, const char *value) { herr_t status; hid_t dataset, datatype, dataspace, dsid; hid_t grpid, spaceid, typeid, attid; hsize_t dims[2] = {1,1}; int rank = 2; if (H5Aexists_by_name(loc_id, ".", attr_name, H5P_DEFAULT)>0) { /* fprintf(stderr, "Note: %s already exists, value will not be written.\n", attr_name); */ status = 0; } else { spaceid = H5Screate_simple(rank, dims, dims); typeid = H5Tcopy(H5T_C_S1); H5Tset_size(typeid, strlen(value) + 1); attid = H5Acreate2(loc_id, attr_name, typeid, spaceid, H5P_DEFAULT, H5P_DEFAULT); status = H5Awrite(attid, typeid, value); H5Sclose(spaceid); H5Tclose(typeid); H5Aclose(attid); } return status; } /*------------------------------------------------------------------------- * Function: get_string_attribute_value * * Purpose: Get the value of a string type attribute. * * Note: * * Return: Success: 0 * Failure: -1 * * Programmer: Larry Knox * December 15, 2011 * *------------------------------------------------------------------------- */ static int get_string_attribute_value(hid_t loc_id, const char * name, char *buf) { hid_t attr; hid_t atype, atype_mem; /* Attribute type */ H5T_class_t type_class; int status = SUCCEED; if((attr = H5Aopen(loc_id, name, H5P_DEFAULT)) < 0) { fprintf(stderr, "Failed to open attribute %s.\n", name); status = FAIL; goto done; } else if(( atype = H5Aget_type(attr))< 0) { fprintf(stderr, "Failed to get type for attribute %s.\n", name); status = FAIL; } else if((type_class = H5Tget_class(atype)) < 0) { fprintf(stderr, "Failed to get type class for attribute %s.\n", name); status = FAIL; } else if ((atype_mem = H5Tget_native_type(atype, H5T_DIR_ASCEND)) < 0) { fprintf(stderr, "Failed to get type class for attribute %s.\n", name); status = FAIL; } else if ((status = H5Aread(attr, atype_mem, buf)) < 0) { fprintf(stderr, "Failed to read attribute %s.\n", name); } H5Aclose(attr); done: return status; } /*------------------------------------------------------------------------- * Function: update_string_attribute * * Purpose: . * * Note: * * Return: Success: 0 * Failure: -1 * * Programmer: Larry Knox * December 15, 2011 * *------------------------------------------------------------------------- */ static int update_string_attribute(hid_t loc_id, const char *attr_name, const char *value) { hid_t attr, atype; int status = SUCCEED; if((attr = H5Aopen(loc_id, attr_name, H5P_DEFAULT)) < 0) { fprintf(stderr, "Failed to open attribute %s.\n", attr_name); status = FAIL; goto done; } else if(( atype = H5Aget_type(attr))< 0) { fprintf(stderr, "Failed to get type for attribute %s.\n", attr_name); status = FAIL; } else if ((status = H5Awrite(attr, atype, value)) < 0) { fprintf(stderr, "Failed to write attribute %s.\n", attr_name); } H5Tclose(atype); done: H5Aclose(attr); return status; } /*------------------------------------------------------------------------- * Function: update_aggregate_ending_attributes * * Purpose: Write attributes of the Aggregate dataset that are determined by * the last granule in the aggregation. * * Note: The attributes "AggregateEndingDate", "AggregateEndingGranuleID", * "AggregateEndingOrbitNumber", and "AggregateEndingTime" can be * copied from the last granule in the aggregation. The last granule * and the value of the "AggregateNumberGranules" attribute are * determined by the product_gran_idx for the product being aggregated. * * Return: Success: 0 * Failure: -1 * * Programmer: Larry Knox * December 15, 2011 * *------------------------------------------------------------------------- */ static int update_aggregate_ending_attributes(int product_index) { int status; char aggr_dset[NAGG_Dataset_Path_max]; char gran_dset[NAGG_Dataset_Path_max]; int aggr_gran_num; hid_t agdset_id, grdset_id, attr, atype; aggr_gran_num = g_gransperoutfile; status = make_aggregate_dataset_path(aggr_dset, get_product_name_by_idx(product_index)); status = make_granule_dataset_path(gran_dset, get_product_name_by_idx(product_index), get_last_i_granule(product_index)); if((agdset_id = H5Dopen(products[product_index].outfile, aggr_dset, H5P_DEFAULT)) < 0 ) { NAGG_ERROR("update_aggregate_ending_attributes(): Failed to open output dataset\n"); fprintf(stderr, "Failed to open dataset %s in output file %s for updating ending attributes.\n", aggr_dset, products[product_index].outfilename); status = FAIL; goto done; } if((grdset_id = H5Dopen(products[product_index].outfile, gran_dset, H5P_DEFAULT)) < 0 ) { NAGG_ERROR("update_aggregate_ending_attributes(): Failed to open input dataset\n"); fprintf(stderr, "Failed to open dataset %s in output file %s for updating ending attributes.\n", gran_dset, products[product_index].outfile); status = FAIL; goto done; } if ((status = copy_attribute_value(grdset_id, "Ending_Date", "AggregateEndingDate", agdset_id)) < 0) { NAGG_ERROR("update_aggregate_ending_attributes(): Failed to copy value to AggregateEndingDate\n"); goto done; } if ((status = copy_attribute_value(grdset_id, "N_Granule_ID", "AggregateEndingGranuleID", agdset_id)) < 0) { NAGG_ERROR("update_aggregate_ending_attributes(): Failed to copy value to AggregateEndingGranuleID\n"); goto done; } if ((status = copy_attribute_value(grdset_id, "N_Beginning_Orbit_Number", "AggregateEndingOrbitNumber", agdset_id)) < 0) { NAGG_ERROR("update_aggregate_ending_attributes(): Failed to copy value to AggregateEndingOrbitNumber\n"); goto done; } if ((status = copy_attribute_value(grdset_id, "Ending_Time", "AggregateEndingTime", agdset_id)) < 0) { NAGG_ERROR("update_aggregate_ending_attributes(): Failed to copy value to AggregateEndingTime\n"); goto done; } if((attr = H5Aopen(agdset_id, "AggregateNumberGranules", H5P_DEFAULT)) < 0) { NAGG_ERROR("update_aggregate_ending_attributes(Failed to open attribute AggregateNumberGranules for writing`\n"); status = FAIL; } else if ((status = H5Awrite(attr, H5T_NATIVE_INT, &g_gransperoutfile)) < 0) { NAGG_ERROR("update_aggregate_ending_attributes(): Failed to write attribute AggregateNumberGranules.\n"); } done: return status; } /*------------------------------------------------------------------------- * Function: write_dataset_fill_values * * Purpose: write fill values to the specified hyperslab of the dataset. * * Note: * * Return: Success: 0 * Failure: -1 * * Programmer: Larry Knox * December 15, 2011 * *------------------------------------------------------------------------- */ static int write_dataset_fill_values(hid_t output_file, const char *dsetpath, const hsize_t *block_coord, int rank) { int i, status = SUCCEED; int skip = 0; hsize_t numelems; hsize_t dims[1]; hid_t dset_id = -1; hid_t dtype, dtype_mem, dspace, dspace_mem; size_t dtype_size; hsize_t *start = NULL, *stride = NULL, *count = NULL , *block_size = NULL; void *databuf = NULL; int8_t *i8buf = NULL; unsigned char *ui8buf = NULL; short int *i16buf = NULL; unsigned short *ui16buf = NULL; int *i32buf = NULL; unsigned int *ui32buf = NULL; float *f32buf = NULL; long long *i64buf = NULL; double *f64buf = NULL; /* fill values from Common Data Format Control Book - Volume I - Overview, Table 3.5.6-1, p 104 */ int8_t i8val = 126; unsigned char ui8val = 254; short int i16val = -998; unsigned short ui16val = 65534; int i32val = -998; unsigned long ui32val = 4294967294UL; float f32val = -999.8; long long i64val = -998; double f64val = -999.8; start = (hsize_t *) HDmalloc(rank * sizeof(hsize_t)); stride = (hsize_t *) HDmalloc(rank * sizeof(hsize_t)); count = (hsize_t *) HDmalloc(rank * sizeof(hsize_t)); block_size = (hsize_t *) HDmalloc(rank * sizeof(hsize_t)); numelems = 1; for (i=0; i -1) H5Dclose(dset_id); return status; } /*------------------------------------------------------------------------- * Function: attrit_op_func * * Purpose: * * Note: * * Return: Success: 0 * Failure: -1 * * Programmer: Larry Knox * December 15, 2011 * *------------------------------------------------------------------------- */ static int attrit_op_func(hid_t loc_id, const char *attr_name, const H5A_info_t *ainfo, void *op_data) { hid_t attr_in, attr_out, atype, aspace, acpl; void *buf; hsize_t size; int status = SUCCEED; if (HDstreq(attr_name, "N_GEO_Ref")) { /* skip N_GEO_Ref: will be written later if needed */ goto done; } if (H5Aexists_by_name(*(hid_t *)op_data, ".", attr_name, H5P_DEFAULT)>0) { /* fprintf(stderr, "Note: %s already exists, value will not be written.\n", attr_name); */ status = 0; goto done; } else if((attr_in = H5Aopen(loc_id, attr_name, H5P_DEFAULT)) < 0) { fprintf(stderr, "Failed to open attribute %s.\n", attr_name); status = FAIL; goto done; } else if(( atype = H5Aget_type(attr_in))< 0) { fprintf(stderr, "Failed to get type for attribute %s.\n", attr_name); status = FAIL; } else if(( aspace = H5Aget_space(attr_in))< 0) { fprintf(stderr, "Failed to get dataspace for attribute %s.\n", attr_name); status = FAIL; } else if((acpl = H5Aget_create_plist(attr_in)) < 0) { fprintf(stderr, "Failed to get creation property list for attribute %s.\n", attr_name); status = FAIL; } else if((size = H5Aget_storage_size(attr_in)) == 0) { fprintf(stderr, "Failed to get storage size for attribute %s.\n", attr_name); status = FAIL; } else { /* compute and allocate memory for copy buffer */ buf = HDmalloc (size); if ((status = H5Aread(attr_in, atype, buf)) < 0) { fprintf(stderr, "Failed to read attribute %s.\n", attr_name); status = FAIL; goto done; } else if ((attr_out = H5Acreate2(*(hid_t *)op_data, attr_name, atype, aspace, acpl, H5P_DEFAULT)) < 0) { fprintf(stderr, "Failed to open attribute %s.\n", attr_name); status = FAIL; goto done; } else if ((status = H5Awrite(attr_out, atype, buf)) < 0) { fprintf(stderr, "Failed to write attribute %s.\n", attr_name); status = FAIL; } H5Pclose(acpl); H5Sclose(aspace); H5Tclose(atype); H5Aclose(attr_out); } H5Aclose(attr_in); done: return status; } /*------------------------------------------------------------------------- * Function: write_fillgr_time_attrs * * Purpose: Write time attributes to a fill granule. * * Note: * * Return: Success: 0 * Failure: -1 * * Programmer: Larry Knox * February 14, 2012 * *------------------------------------------------------------------------- */ static int write_fillgr_time_attrs(hid_t dset_id, granule_p_t granule) { hid_t attr, atype, aspace, acpl; hsize_t size; int status = SUCCEED; unsigned long long ietimestamp; char begin_hour[3], end_hour[3]; char ending_date[NAGG_DATE_size+1]; char *UTC; ietimestamp = granule->granule_start_time_IET; if ((attr = H5Aopen(dset_id, "N_Beginning_Time_IET", H5P_DEFAULT)) < 0) { NAGG_ERROR("write_fillgr_time_attrs(): Failed to open attribute N_Beginning_Time_IET for writing\n"); status = FAIL; goto done; } else if ((status = H5Awrite(attr, H5T_NATIVE_ULLONG, &ietimestamp)) < 0) { fprintf(stderr, "Failed to write attribute N_Beginning_Time_IET.\n"); H5Aclose(attr); status = FAIL; } ietimestamp = granule->granule_end_time_IET; if ((attr = H5Aopen(dset_id, "N_Ending_Time_IET", H5P_DEFAULT)) < 0) { NAGG_ERROR("write_fillgr_time_attrs(): Failed to open attribute N_Ending_Time_IET for writing\n"); status = FAIL; goto done; } else if ((status = H5Awrite(attr, H5T_NATIVE_ULLONG, &ietimestamp)) < 0) { fprintf(stderr, "Failed to write attribute N_Beginning_Time_IET.\n"); H5Aclose(attr); status = FAIL; } if ((status = update_string_attribute(dset_id, "Beginning_Date", granule->beginning_date)) < 0) { NAGG_ERROR("write_fillgr_time_attrs(): Failed to add attribute Beginning_Date.\n"); goto done; } if ((status = update_string_attribute(dset_id, "Beginning_Time", granule->beginning_time)) < 0) { NAGG_ERROR("write_fillgr_time_attrs(): Failed to add attribute Beginning_Date.\n"); goto done; } if ((status = update_string_attribute(dset_id, "Ending_Time", granule->ending_time)) < 0) { NAGG_ERROR("write_fillgr_time_attrs(): Failed to add attribute Beginning_Date.\n"); goto done; } /* write "Ending_Date" attribute. The ending date will be the same as the beginning date unless the granule straddles midnight, in which case the Ending_Time (00) hour will be less than the Beginning_Time hour (23). Beginning_Time and Ending_Time are strings with the format HHMMSS.mmmmmmZ. */ HDstrncpy(begin_hour, granule->beginning_time, 2); begin_hour[2] = '\0'; HDstrncpy(end_hour, granule->ending_time, 2); end_hour[2] = '\0'; if(atoi(begin_hour) > atoi(end_hour)) { /* Get Ending_Date from end_time_IET */ UTC = iet2utc(granule->granule_end_time_IET); if (NULL==UTC){ NAGG_ERROR("write_fillgr_time_attrs(): Failed to convert end_time to ending time"); status = FAIL; goto done; } sprintf(ending_date, "%8.8s", UTC); if ((status = update_string_attribute(dset_id, "Ending_Date", ending_date)) < 0) { NAGG_ERROR("write_fillgr_time_attrs(): Failed to write attribute Ending_Date.\n"); goto done; } } else { /* Ending_Date is the same as Beginning_Date */ if ((status = update_string_attribute(dset_id, "Ending_Date", granule->beginning_date)) < 0) { NAGG_ERROR("write_fillgr_time_attrs(): Failed to write attribute Beginning_Date.\n"); goto done; } } done: return status; } /*------------------------------------------------------------------------- * Function: write_fill_granule_attrs * * Purpose: Functor to write attributes of an object found by H5Aiterate2. * * Note: * * Return: Success: 0 * Failure: -1 * * Programmer: Larry Knox * December 15, 2011 * *------------------------------------------------------------------------- */ static int write_fill_granule_attrs(hid_t loc_id, const char *attr_name, const H5A_info_t *ainfo, void *op_data) { hid_t attr_in, attr_out, atype, atype_mem, aspace, acpl; void *buf; void *sbuf = NULL; hsize_t size; hssize_t attrsize; int status = SUCCEED; int rank = 2; hsize_t dims[2] = {1,1}; /* fill values from Common Data Format Control Book - External Volume V - Metadata, Table 4.4.1-1, p 75 */ unsigned short ucval = 249; unsigned int uival = 65529; unsigned long long ullval = 993; int ival = -993; float fval = -999.3; if (H5Aexists_by_name(*(hid_t *)op_data, ".", attr_name, H5P_DEFAULT)>0) { /* fprintf(stderr, "Note: %s already exists, value will not be written.\n", attr_name); */ status = 0; goto done; } else if((attr_in = H5Aopen(loc_id, attr_name, H5P_DEFAULT)) < 0) { fprintf(stderr, "Failed to open attribute %s.\n", attr_name); status = FAIL; goto done; } else if(( atype = H5Aget_type(attr_in))< 0) { fprintf(stderr, "Failed to get type for attribute %s.\n", attr_name); status = FAIL; } else if ((atype_mem = H5Tget_native_type(atype, H5T_DIR_ASCEND)) < 0) { fprintf(stderr, "Failed to get type class for attribute %s.\n", attr_name); status = FAIL; } else if(( aspace = H5Screate_simple(rank, dims, NULL))< 0) { fprintf(stderr, "Failed to get dataspace for attribute %s.\n", attr_name); status = FAIL; } else if((acpl = H5Aget_create_plist(attr_in)) < 0) { fprintf(stderr, "Failed to get creation property list for attribute %s.\n", attr_name); status = FAIL; } else if((size = H5Aget_storage_size(attr_in)) == 0) { fprintf(stderr, "Failed to get storage size for attribute %s.\n", attr_name); status = FAIL; } else { attrsize = H5Sget_simple_extent_npoints(aspace); if ((attr_out = H5Acreate2(*(hid_t *)op_data, attr_name, atype, aspace, acpl, H5P_DEFAULT)) < 0) { fprintf(stderr, "Failed to create attribute %s.\n", attr_name); status = FAIL; goto done; } switch(H5Tget_class(atype)) { case H5T_INTEGER: if(H5Tequal(atype, H5T_STD_U8BE) == TRUE || H5Tequal(atype, H5T_STD_U8LE) == TRUE) { buf = &ucval; } else if(H5Tequal(atype, H5T_STD_U64BE) == TRUE || H5Tequal(atype, H5T_STD_U64LE) == TRUE) { buf = &ullval; } else if(H5Tequal(atype, H5T_STD_U32BE) == TRUE || H5Tequal(atype, H5T_STD_U32LE) == TRUE) { buf = &uival; } else { buf = &ival; } break; case H5T_FLOAT: buf = &fval; break; case H5T_STRING: /* compute and allocate memory for copy buffer */ sbuf = HDmalloc (size); sprintf(sbuf, "%s", "N/A"); buf = sbuf; break; default: fprintf(stderr, "Unknown type for attribute %s\n", attr_name); status = FAIL; goto done; break; } if ((status = H5Awrite(attr_out, atype_mem, buf)) < 0) { fprintf(stderr, "Failed to write attribute %s.\n", attr_name); status = FAIL; } H5Pclose(acpl); H5Sclose(aspace); H5Tclose(atype); H5Aclose(attr_out); } H5Aclose(attr_in); done: if (sbuf) free(sbuf); return status; } /*------------------------------------------------------------------------- * Function: copy_attribute_value * * Purpose: Functor to copy attributes of an object found be H5Aiterate2. * * Note: * * Return: Success: 0 * Failure: -1 * * Programmer: Larry Knox * December 15, 2011 * *------------------------------------------------------------------------- */ static int copy_attribute_value(hid_t in_loc_id, const char *attr_name_in, const char *attr_name_out, hid_t out_loc_id) { hid_t attr_in, attr_out, atype; void *buf; hsize_t size; int status; if((attr_in = H5Aopen(in_loc_id, attr_name_in, H5P_DEFAULT)) < 0) { fprintf(stderr, "Failed to open attribute %s.\n", attr_name_in); status = FAIL; goto done; } else if(( atype = H5Aget_type(attr_in))< 0) { fprintf(stderr, "Failed to get type for attribute %s.\n", attr_name_in); status = FAIL; } else if((size = H5Aget_storage_size(attr_in)) == 0) { fprintf(stderr, "Failed to get storage size for attribute %s.\n", attr_name_in); status = FAIL; } else { /* compute and allocate memory for copy buffer */ buf = HDmalloc (size); if ((status = H5Aread(attr_in, atype, buf)) < 0) { fprintf(stderr, "Failed to read attribute %s.\n", attr_name_in); status = FAIL; goto done; } else if ((attr_out = H5Aopen(out_loc_id, attr_name_out, H5P_DEFAULT)) < 0) { fprintf(stderr, "Failed to open attribute %s.\n", attr_name_out); status = FAIL; goto done; } else if ((status = H5Awrite(attr_out, atype, buf)) < 0) { fprintf(stderr, "Failed to write attribute %s.\n", attr_name_out); } H5Tclose(atype); H5Aclose(attr_out); free(buf); } H5Aclose(attr_in); done: return status; } /*------------------------------------------------------------------------- * Function: print_links * * Purpose: Debug function to print names of objects in a group. * * Note: * * Return: Success: 0 * Failure: -1 * * Programmer: Larry Knox * December 15, 2011 * *------------------------------------------------------------------------- */ static int print_links(hid_t group_id, const char *name, const H5L_info_t *info, void *op_data) { int status; H5O_info_t infobuf; if((status = H5Oget_info_by_name (group_id, name, &infobuf, H5P_DEFAULT)) < 0) { fprintf(stderr, "Failed to get info for %s.\n", name); goto done; } switch (infobuf.type) { case H5O_TYPE_GROUP: printf (" Group: %s\n", name); break; case H5O_TYPE_DATASET: printf (" Dataset: %s\n", name); break; case H5O_TYPE_NAMED_DATATYPE: printf (" Datatype: %s\n", name); break; default: printf ( " Unknown: %s\n", name); } done: return status; } /* This function is copied from nagg_select_granules.c. If we don't add ending_date to the granule_t structure, the function should probably be made public there or moved to a common utility functions file. */ /* Convert IET time (in microsecons) to UTC format. * Return value: * If succeeded, return a char string that points to the UTC format; * otherwise, return NULL. * Note that the return value points to a statically allocated string which * might be overwritten by subsequent calls to iet2utc. */ #define IET_UNIX_diff 378691234L; /* Difference in seconds between IET */ /* and Unix time */ #define UTC_SIZE 22 /* YYYYMMDD.HHMMSS.ssssss */ #define million_microsec 1000000 /* one million microsec = 1 sec */ static char utc_string[UTC_SIZE+1]; static char *iet2utc(iet_t iet_time) { time_t unixtime; struct tm *t; unixtime = iet_time/million_microsec - IET_UNIX_diff; t = gmtime(&unixtime); sprintf(utc_string, "%4d%02d%02d.%02d%02d%02d.%06d", t->tm_year+1900, t->tm_mon+1, t->tm_mday, t->tm_hour, t->tm_min, t->tm_sec, iet_time-(iet_time/million_microsec)*million_microsec); #ifdef DEBUG printf("utc_string=%s\n", utc_string); #endif return(utc_string); }