bes Updated for version 3.20.13
hdfdesc.cc
Go to the documentation of this file.
1
5// This file is part of the hdf4 data handler for the OPeNDAP data server.
6// The code includes the support of HDF-EOS2 and NASA HDF4 files that follow CF.
7// Copyright (c) 2008-2012 The HDF Group.
8// Author: MuQun Yang <myang6@hdfgroup.org>
9// Author: Hyo-Kyung Lee <hyoklee@hdfgroup.org>
10//
11// Copyright (c) 2005 OPeNDAP, Inc.
12// Author: James Gallagher <jgallagher@opendap.org>
13//
14// This is free software; you can redistribute it and/or modify it under the
15// terms of the GNU Lesser General Public License as published by the Free
16// Software Foundation; either version 2.1 of the License, or (at your
17// option) any later version.
18//
19// This software is distributed in the hope that it will be useful, but
20// WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
21// or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
22// License for more details.
23//
24// You should have received a copy of the GNU Lesser General Public License
25// along with this software; if not, write to the Free Software Foundation,
26// Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
27//
28// You can contact OPeNDAP, Inc. at PO Box 112, Saunderstown, RI. 02874-0112.
29
31// Copyright 1996, by the California Institute of Technology.
32// ALL RIGHTS RESERVED. United States Government Sponsorship
33// acknowledged. Any commercial use must be negotiated with the
34// Office of Technology Transfer at the California Institute of
35// Technology. This software may be subject to U.S. export control
36// laws and regulations. By accepting this software, the user
37// agrees to comply with all applicable U.S. export laws and
38// regulations. User has the responsibility to obtain export
39// licenses, or other export authority as may be required before
40// exporting such information to foreign countries or providing
41// access to foreign persons.
42
43// Author: Todd Karakashian, NASA/Jet Propulsion Laboratory
44// Todd.K.Karakashian@jpl.nasa.gov
45//
46//
48
49#include "config.h"
50#include "config_hdf.h"
51
52#include <cstdio>
53#include <cassert>
54#include <libgen.h>
55
56// STL includes
57#include <string>
58#include <fstream>
59#include <iostream>
60#include <sstream>
61#include <algorithm>
62#include <numeric>
63#include <functional>
64
65
66// Include this on linux to suppress an annoying warning about multiple
67// definitions of MIN and MAX.
68#ifdef HAVE_SYS_PARAM_H
69#include <sys/param.h>
70#endif
71
72#include <unistd.h>
73#include <sys/types.h>
74#include <dirent.h>
75#include <iomanip>
76#include <cerrno>
77
78
79// HDF and HDFClass includes
80#include <mfhdf.h>
81
82// DODS includes
83#include <libdap/DDS.h>
84#include <libdap/DAS.h>
85#include <libdap/escaping.h>
86#include <libdap/parser.h>
87#include <libdap/InternalErr.h>
88#include <libdap/debug.h>
89
90#include <BESDebug.h>
91#include <BESLog.h>
92
93#include "HDF4RequestHandler.h"
94// DODS/HDF includes for the default option only
95#include "hcstream.h"
96#include "hdfclass.h"
97#include "hcerr.h"
98#include "dhdferr.h"
99#include "HDFArray.h"
100#include "HDFSequence.h"
101#include "HDFTypeFactory.h"
102#include "HDFGrid.h"
103#include "dodsutil.h"
104#include "hdf-dods.h"
105#include "hdf-maps.h"
106
107// DAP2 doesn't have signed char type, the signed char will be converted to int32 with this macro.
108#define SIGNED_BYTE_TO_INT32 1
109
110// HDF datatype headers for both the default and the CF options
111#include "HDFByte.h"
112#include "HDFInt16.h"
113#include "HDFUInt16.h"
114#include "HDFInt32.h"
115#include "HDFUInt32.h"
116#include "HDFFloat32.h"
117#include "HDFFloat64.h"
118#include "HDFStr.h"
119
120// Routines that handle SDS and Vdata attributes for the HDF-EOS2 objects in a hybrid HDF-EOS2 file for the CF option
121#include "HE2CF.h"
122
123// HDF4 for the CF option(EOS2 will treat as pure HDF4 objects if the HDF-EOS2 library is not configured in)
124#include "HDFSP.h"
125#include "HDFSPArray_RealField.h"
126#include "HDFSPArrayGeoField.h"
127#include "HDFSPArrayMissField.h"
128#include "HDFSPArrayAddCVField.h"
129#include "HDFSPArray_VDField.h"
130#include "HDFCFStrField.h"
131#include "HDFCFStr.h"
132#include "HDFCFUtil.h"
133
134// HDF-EOS2 (including the hybrid) will be handled as HDF-EOS2 objects if the HDF-EOS2 library is configured in
135#ifdef USE_HDFEOS2_LIB
136#include "HDFEOS2.h"
137#include "HDFEOS2Array_RealField.h"
138#include "HDFEOS2ArrayGridGeoField.h"
139#include "HDFEOS2ArraySwathGeoField.h"
140#include "HDFEOS2ArrayMissField.h"
141#include "HDFEOS2ArraySwathDimMapField.h"
142#include "HDFEOS2ArraySwathGeoMultiDimMapField.h"
143#include "HDFEOS2ArraySwathGeoDimMapExtraField.h"
144#include "HDFEOS2CFStr.h"
145#include "HDFEOS2CFStrField.h"
146#include "HDFEOS2HandleType.h"
147#endif
148
149
150using namespace std;
151
152// Added 5/7/09; This bug (#1163) was fixed in July 2008 except for this
153// handler. jhrg
154#define ATTR_STRING_QUOTE_FIX
155
156template < class T > string num2string(T n)
157{
158 ostringstream oss;
159 oss << n;
160 return oss.str();
161}
162
163// Glue routines declared in hdfeos.lex
164void hdfeos_switch_to_buffer(void *new_buffer);
165void hdfeos_delete_buffer(void * buffer);
166void *hdfeos_string(const char *yy_str);
167
168struct yy_buffer_state;
169yy_buffer_state *hdfeos_scan_string(const char *str);
170extern int hdfeosparse(libdap::parser_arg *arg); // defined in hdfeos.tab.c
171
172// Functions for the default option
173void AddHDFAttr(DAS & das, const string & varname,
174 const vector < hdf_attr > &hav);
175void AddHDFAttr(DAS & das, const string & varname,
176 const vector < string > &anv);
177
178static void build_descriptions(DDS & dds, DAS & das,
179 const string & filename);
180static void SDS_descriptions(sds_map & map, DAS & das,
181 const string & filename);
182static void Vdata_descriptions(vd_map & map, DAS & das,
183 const string & filename);
184static void Vgroup_descriptions(DDS & dds, DAS & das,
185 const string & filename, sds_map & sdmap,
186 vd_map & vdmap, gr_map & grmap);
187static void GR_descriptions(gr_map & map, DAS & das,
188 const string & filename);
189static void FileAnnot_descriptions(DAS & das, const string & filename);
190static vector < hdf_attr > Pals2Attrs(const vector < hdf_palette > palv);
191static vector < hdf_attr > Dims2Attrs(const hdf_dim dim);
192
193void read_das(DAS & das, const string & filename);
194void read_dds(DDS & dds, const string & filename);
195
196// For the CF option
197// read_dds for HDF4 files. Some NASA non-eos2 HDF4 products are handled specifially to follow the CF conventions.
198bool read_dds_hdfsp(DDS & dds, const string & filename,int32 sdfd, int32 fileid,const HDFSP::File*h4file);
199bool read_das_hdfsp(DAS & das, const string & filename,int32 sdfd, int32 fileid,HDFSP::File**h4filepptr);
200
201// read_dds for special NASA HDF-EOS2 hybrid(non-EOS2) objects
202bool read_dds_hdfhybrid(DDS & dds, const string & filename,int32 sdfd, int32 fileid,const HDFSP::File*h4file);
203bool read_das_hdfhybrid(DAS & das, const string & filename,int32 sdfd, int32 fileid,HDFSP::File**h4filepptr);
204
205// Functions to read special 1-d HDF-EOS2 grid. This grid can be built up quickly.
206#if 0
207//bool read_dds_special_1d_grid(DDS &dds, HDFSP::File *spf, const string & filename,int32 sdfd, int32 fileid);
208#endif
209
210bool read_dds_special_1d_grid(DDS &dds, const HDFSP::File *spf, const string & filename,int32 sdfd,bool can_cache);
211bool read_das_special_eos2(DAS &das,const string & filename,int32 sdid, int32 fileid,bool ecs_metadata,HDFSP::File**h4filepptr);
212bool read_das_special_eos2_core(DAS &das, const HDFSP::File *spf, const string & filename,bool ecs_metadata);
213
214void read_das_sds(DAS & das, const string & filename,int32 sdfd, bool ecs_metadata,HDFSP::File**h4fileptr);
215void read_dds_sds(DDS &dds, const string & filename,int32 sdfd, HDFSP::File*h4file,bool dds_set_cache);
216
217void change_das_mod08_scale_offset(DAS & das, const HDFSP::File *spf);
218
219// Functions to handle SDS fields for the CF option.
220void read_dds_spfields(DDS &dds,const string& filename,const int sdfd,const HDFSP::SDField *spsds, SPType sptype);
221
222// Functions to handle Vdata fields for the CF option.
223void read_dds_spvdfields(DDS &dds,const string& filename, const int fileid,int32 vdref, int32 numrec,HDFSP::VDField *spvd);
224
225// Check if this is a special HDF-EOS2 file that can be handled by HDF4 directly. Using HDF4 only can improve performance.
226int check_special_eosfile(const string&filename,string&grid_name,int32 sdfd);
227
228
229// The following blocks only handle HDF-EOS2 objects based on HDF-EOS2 libraries.
230#ifdef USE_HDFEOS2_LIB
231
232// Parse HDF-EOS2's ECS metadata(coremetadata etc.)
233void parse_ecs_metadata(DAS &das,const string & metaname, const string &metadata);
234
235// read_dds for HDF-EOS2
236#if 0
238#endif
239
240// We find some special HDF-EOS2(MOD08_M3) products that provides coordinate variables
241// without using the dimension scales. We will handle this in a special way.
242// So change the return value of read_dds_hdfeos2 to represent different cases
243// 0: general non-EOS2 pure HDF4
244// 1: HDF-EOS2 hybrid
245// 2: MOD08_M3
246// HDF-EOS2 but no need to use HDF-EOS2 lib: no real dimension scales but have CVs for every dimension, treat differently
247// 3: AIRS version 6
248// HDF-EOS2 but no need to use HDF-EOS2 lib:
249// have dimension scales but don’t have CVs for every dimension, also need to condense dimensions, treat differently
250// 4. Expected AIRS level 3 or level 2
251// HDF-EOS2 but no need to use HDF-EOS2 lib: Have dimension scales for all dimensions
252// 5. MERRA
253// Special handling for MERRA file
254int read_dds_hdfeos2(DDS & dds, const string & filename,int32 sdfd, int32 gridfd, int32 swathfd,const HDFSP::File*h4file,HDFEOS2::File*eosfile);
255
256// reas das for HDF-EOS2
257int read_das_hdfeos2(DAS & das, const string & filename,int32 sdfd,int32 fileid, int32 gridfd, int32 swathfd,bool ecs_metadata,HDFSP::File**h4filepptr,HDFEOS2::File**eosfilepptr);
258
259
260// read_dds for one grid or swath
261void read_dds_hdfeos2_grid_swath(DDS &dds, const string&filename, HDFEOS2::Dataset *dataset, int grid_or_swath,bool ownll, SOType sotype,bool multi_dmap,
262 int32 sdfd, int32 gridfd,int32 swathfd)
263{
264
265 BESDEBUG("h4","Coming to read_dds_hdfeos2_grid_swath "<<endl);
266 // grid_or_swath - 0: grid, 1: swath
267 if(grid_or_swath < 0 || grid_or_swath > 1)
268 throw InternalErr(__FILE__, __LINE__, "The current type should be either grid or swath");
269
271
272 // Declare dim. map entry. The defination of dimmap_entry can be found at HDFCFUtil.h.
273 vector<struct dimmap_entry> dimmaps;
274
275 //The extra dim map file name(lat/lon of swath with dim. map can be found in a separate HDF file.
276 string modis_geofilename="";
277 bool geofile_has_dimmap = false;
278
279 // 1. Obtain dimension map info and stores the info. to dimmaps.
280 // 2. Check if MODIS swath geo-location HDF-EOS2 file exists for the dimension map case of MODIS Swath
281 if (grid_or_swath == 1)
282 HDFCFUtil::obtain_dimmap_info(filename,dataset,dimmaps,modis_geofilename,geofile_has_dimmap);
284
285
287 const vector<HDFEOS2::Field*>& fields = dataset->getDataFields();
288 vector<HDFEOS2::Field*> all_fields = fields;
289 vector<HDFEOS2::Field*>::const_iterator it_f;
290
291 if(1 == grid_or_swath) {
292 auto sw = static_cast<HDFEOS2::SwathDataset *>(dataset);
293 const vector<HDFEOS2::Field*>geofields = sw->getGeoFields();
294 for (it_f = geofields.begin(); it_f != geofields.end(); it_f++)
295 all_fields.push_back(*it_f);
296 }
298
300 for(it_f = all_fields.begin(); it_f != all_fields.end(); it_f++)
301 {
302 BESDEBUG("h4","New field Name " <<(*it_f)->getNewName()<<endl);
303
304 BaseType *bt=nullptr;
305
306 // Whether the field is real field,lat/lon field or missing Z-dimension field
307 int fieldtype = (*it_f)->getFieldType();
308
309 // Check if the datatype needs to be changed.This is for MODIS data that needs to apply scale and offset.
310 // ctype_field_namelist is assigned in the function read_das_hdfeos2.
311 bool changedtype = false;
312 for (auto const &cf:ctype_field_namelist){
313 if (cf == (*it_f)->getNewName()){
314 changedtype = true;
315 break;
316 }
317 }
318
319 switch((*it_f)->getType())
320 {
321
322#define HANDLE_CASE2(tid, type) \
323 case tid: \
324 if(true == changedtype && fieldtype==0) \
325 bt = new (HDFFloat32) ((*it_f)->getNewName(), (dataset)->getName()); \
326 else \
327 bt = new (type)((*it_f)->getNewName(), (dataset)->getName()); \
328 break;
329
330#define HANDLE_CASE(tid, type)\
331 case tid: \
332 bt = new (type)((*it_f)->getNewName(), (dataset)->getName()); \
333 break;
334 HANDLE_CASE(DFNT_FLOAT32, HDFFloat32)
335 HANDLE_CASE(DFNT_FLOAT64, HDFFloat64)
336 HANDLE_CASE(DFNT_CHAR8,HDFStr)
337#ifndef SIGNED_BYTE_TO_INT32
338 HANDLE_CASE2(DFNT_INT8, HDFByte)
339#else
340 HANDLE_CASE2(DFNT_INT8,HDFInt32)
341#endif
342 HANDLE_CASE2(DFNT_UINT8, HDFByte)
343 HANDLE_CASE2(DFNT_INT16, HDFInt16)
344 HANDLE_CASE2(DFNT_UINT16,HDFUInt16)
345 HANDLE_CASE2(DFNT_INT32, HDFInt32)
346 HANDLE_CASE2(DFNT_UINT32, HDFUInt32)
347 HANDLE_CASE2(DFNT_UCHAR8, HDFByte)
348 default:
349 throw InternalErr(__FILE__,__LINE__,"unsupported data type.");
350#undef HANDLE_CASE
351#undef HANDLE_CASE2
352 }
353
354 if(bt)
355 {
356 const vector<HDFEOS2::Dimension*>& dims= (*it_f)->getCorrectedDimensions();
357 vector<HDFEOS2::Dimension*>::const_iterator it_d;
358
359 // Char array maps to DAP string.
360 if(DFNT_CHAR == (*it_f)->getType()) {
361
362 if((*it_f)->getRank() >1) {
363
364 HDFEOS2CFStrField * ar = nullptr;
365
366 try {
367
368 ar = new HDFEOS2CFStrField(
369 (*it_f)->getRank() -1,
370 (grid_or_swath ==0)?gridfd:swathfd,
371 filename,
372 dataset->getName(),
373 (*it_f)->getName(),
374 grid_or_swath,
375 (*it_f)->getNewName(),
376 bt);
377 }
378 catch(...) {
379 delete bt;
380 throw InternalErr(__FILE__,__LINE__,"Unable to allocate the HDFCFStr instance.");
381 }
382 for(it_d = dims.begin(); it_d != dims.begin()+dims.size()-1; it_d++){
383 ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
384 }
385
386 dds.add_var(ar);
387 delete bt;
388 if(ar != nullptr)
389 delete ar;
390
391 }
392
393 else {
394 HDFEOS2CFStr * sca_str = nullptr;
395 try {
396
397 sca_str = new HDFEOS2CFStr(
398 (grid_or_swath ==0)?gridfd:swathfd,
399 filename,
400 dataset->getName(),
401 (*it_f)->getName(),
402 (*it_f)->getNewName(),
403 grid_or_swath);
404 }
405 catch(...) {
406 delete bt;
407 throw InternalErr(__FILE__,__LINE__,"Unable to allocate the HDFCFStr instance.");
408 }
409 dds.add_var(sca_str);
410 delete bt;
411 delete sca_str;
412 }
413
414 }
415
416 // For general variables and non-lat/lon existing coordinate variables
417 else if(fieldtype == 0 || fieldtype == 3 || fieldtype == 5) {
418
419 // grid
420 if(grid_or_swath==0){
421 HDFEOS2Array_RealField *ar = nullptr;
422 ar = new HDFEOS2Array_RealField(
423 (*it_f)->getRank(),
424 filename,false,sdfd,gridfd,
425 dataset->getName(), "", (*it_f)->getName(),
426 sotype,
427 (*it_f)->getNewName(), bt);
428 for(it_d = dims.begin(); it_d != dims.end(); it_d++)
429 ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
430 dds.add_var(ar);
431 delete bt;
432 delete ar;
433 }
434 // swath
435 else if(grid_or_swath==1){
436
437 string tempfieldname = (*it_f)->getName();
438
439 // This swath uses the dimension map,but not the multi-dim. map we can handle.
440 if((*it_f)->UseDimMap() && false == multi_dmap) {
441 // We also find that a separate geolocation file exists
442
443 if (!modis_geofilename.empty()) {
444
445 // This field can be found in the geo-location file. The field name may be corrected.
446 if (true == HDFCFUtil::is_modis_dimmap_nonll_field(tempfieldname)) {
447
448 if(false == geofile_has_dimmap) {
449
450 // Here we have to use HDFEOS2Array_RealField since the field may
451 // need to apply scale and offset equation.
452 // MODIS geolocation swath name is always MODIS_Swath_Type_GEO.
453 // We can improve the handling of this by not hard-coding the swath name
454 // in the future. KY 2012-08-16
455 HDFEOS2Array_RealField *ar = nullptr;
456 ar = new HDFEOS2Array_RealField(
457 (*it_f)->getRank(),
458 modis_geofilename,
459 true,
460 sdfd,
461 swathfd,
462 "",
463 "MODIS_Swath_Type_GEO",
464 tempfieldname,
465 sotype,
466 (*it_f)->getNewName(),
467 bt);
468
469 for(it_d = dims.begin(); it_d != dims.end(); it_d++)
470 ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
471 dds.add_var(ar);
472 delete bt;
473 delete ar;
474 }
475 else {// Use dimension maps in the dimension map file
476
477 HDFEOS2ArraySwathDimMapField * ar = nullptr;
478
479 // SET dimmaps to empty.
480 // This is very key since we are using the geolocation file for the new information.
481 // The dimension map info. will be obtained when the data is reading. KY 2013-03-13
482
483 dimmaps.clear();
484 ar = new HDFEOS2ArraySwathDimMapField(
485 (*it_f)->getRank(),
486 modis_geofilename,
487 true,
488 sdfd,
489 swathfd,
490 "",
491 "MODIS_Swath_Type_GEO",
492 tempfieldname,
493 dimmaps,
494 sotype,
495 (*it_f)->getNewName(),
496 bt);
497 for(it_d = dims.begin(); it_d != dims.end(); it_d++)
498 ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
499 dds.add_var(ar);
500 delete bt;
501 delete ar;
502 }
503 }
504 else { // This field cannot be found in the dimension map file.
505
506 HDFEOS2ArraySwathDimMapField * ar = nullptr;
507
508 // Even if the dimension map file exists, it only applies to some
509 // specific data fields, if this field doesn't belong to these fields,
510 // we should still apply the dimension map rule to these fields.
511
512 ar = new HDFEOS2ArraySwathDimMapField(
513 (*it_f)->getRank(),
514 filename,
515 false,
516 sdfd,
517 swathfd,
518 "",
519 dataset->getName(),
520 tempfieldname,
521 dimmaps,
522 sotype,
523 (*it_f)->getNewName(),
524 bt);
525 for(it_d = dims.begin(); it_d != dims.end(); it_d++)
526 ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
527 dds.add_var(ar);
528 delete bt;
529 delete ar;
530
531
532 }
533 }
534 else {// No dimension map file
535 HDFEOS2ArraySwathDimMapField * ar = nullptr;
536 ar = new HDFEOS2ArraySwathDimMapField(
537 (*it_f)->getRank(),
538 filename,
539 false,
540 sdfd,
541 swathfd,
542 "",
543 dataset->getName(),
544 tempfieldname,
545 dimmaps,
546 sotype,
547 (*it_f)->getNewName(),
548 bt);
549 for(it_d = dims.begin(); it_d != dims.end(); it_d++)
550 ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
551 dds.add_var(ar);
552 delete bt;
553 delete ar;
554 }
555 }
556 else { // No dimension map
557
558 HDFEOS2Array_RealField * ar = nullptr;
559 ar = new HDFEOS2Array_RealField(
560 (*it_f)->getRank(),
561 filename,
562 false,
563 sdfd,
564 swathfd,
565 "",
566 dataset->getName(),
567 tempfieldname,
568 sotype,
569 (*it_f)->getNewName(),
570 bt);
571 for(it_d = dims.begin(); it_d != dims.end(); it_d++)
572 ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
573 dds.add_var(ar);
574 delete bt;
575 delete ar;
576 }
577 }
578 else {
579 delete bt;
580 throw InternalErr(__FILE__, __LINE__, "The current type should be either grid or swath");
581 }
582 }
583
584 // For latitude and longitude
585 else if(fieldtype == 1 || fieldtype == 2) {
586
587 // For grid
588 if(grid_or_swath==0) {
589
590 HDFEOS2ArrayGridGeoField *ar = nullptr;
591 //int fieldtype = (*it_f)->getFieldType();
592 bool ydimmajor = (*it_f)->getYDimMajor();
593 bool condenseddim = (*it_f)->getCondensedDim();
594 bool speciallon = (*it_f)->getSpecialLon();
595 int specialformat = (*it_f)->getSpecialLLFormat();
596
597 ar = new HDFEOS2ArrayGridGeoField(
598 (*it_f)->getRank(),
599 fieldtype,
600 ownll,
601 ydimmajor,
602 condenseddim,
603 speciallon,
604 specialformat,
605 /*fieldcache,*/
606 filename,
607 gridfd,
608 dataset->getName(),
609 (*it_f)->getName(),
610 (*it_f)->getNewName(),
611 bt);
612
613 for(it_d = dims.begin(); it_d != dims.end(); it_d++)
614 ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
615 dds.add_var(ar);
616 delete bt;
617 delete ar;
618 }
619
620 // We encounter a very special MODIS case (MOD/MYD ATML2 files),
621 // Latitude and longitude fields are located under data fields.
622 // So include this case. KY 2010-7-12
623 // We also encounter another special case(MOD11_L2.A2012248.2355.041.2012249083024.hdf),
624 // the latitude/longitude with dimension map is under the "data fields".
625 // So we have to consider this. KY 2012-09-24
626
627 else if(grid_or_swath ==1) {
628
629 if(true == multi_dmap) {
630 if((*it_f)->getRank() !=2)
631 throw InternalErr(__FILE__, __LINE__, "For the multi-dimmap case, the field rank must be 2.");
632 int dim0size = (dims[0])->getSize();
633 int dim1size = (dims[1])->getSize();
634 int dim0offset = (*it_f)->getLLDim0Offset();
635 int dim1offset = (*it_f)->getLLDim1Offset();
636 int dim0inc = (*it_f)->getLLDim0Inc();
637 int dim1inc = (*it_f)->getLLDim1Inc();
638 string fieldname;
639 if(fieldtype == 1)
640 fieldname = "Latitude";
641 else
642 fieldname = "Longitude";
643#if 0
644cerr<<"hdfdesc: newfieldname is "<<(*it_f)->getNewName() <<endl;
645cerr<<"hdfdesc: dim0size "<<dim0size <<endl;
646cerr<<"hdfdesc: dim1size "<<dim1size <<endl;
647cerr<<"hdfdesc: dim0offset "<<dim0offset <<endl;
648cerr<<"hdfdesc: dim1offset "<<dim1offset <<endl;
649cerr<<"hdfdesc: dim0inc "<<dim0inc <<endl;
650cerr<<"hdfdesc: dim1inc "<<dim1inc <<endl;
651#endif
652
653 HDFEOS2ArraySwathGeoMultiDimMapField * ar = nullptr;
654
655 ar = new HDFEOS2ArraySwathGeoMultiDimMapField(
656 (*it_f)->getRank(),
657 filename,
658 swathfd,
659 dataset->getName(),
660 fieldname,
661 dim0size,
662 dim0offset,
663 dim0inc,
664 dim1size,
665 dim1offset,
666 dim1inc,
667 (*it_f)->getNewName(),
668 bt);
669
670 for(it_d = dims.begin(); it_d != dims.end(); it_d++)
671 ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
672
673 dds.add_var(ar);
674 delete bt;
675 delete ar;
676 }
677 else {
678
679 // Use Swath dimension map
680 if((*it_f)->UseDimMap()) {
681
682 // Have an extra HDF-EOS file for latitude and longtiude
683 if(!modis_geofilename.empty()) {
684
685 if (false == geofile_has_dimmap) {
686 HDFEOS2ArraySwathGeoDimMapExtraField *ar = nullptr;
687 ar = new HDFEOS2ArraySwathGeoDimMapExtraField(
688 (*it_f)->getRank(),
689 modis_geofilename,
690 (*it_f)->getName(),
691 (*it_f)->getNewName(),
692 bt);
693 for(it_d = dims.begin(); it_d != dims.end(); it_d++)
694 ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
695 dds.add_var(ar);
696 delete bt;
697 delete ar;
698 }
699 else {
700
701 HDFEOS2ArraySwathDimMapField * ar = nullptr;
702
703 // SET dimmaps to empty.
704 // This is essential since we are using the geolocation file for the new information.
705 // The dimension map info. will be obtained when the data is read. KY 2013-03-13
706 dimmaps.clear();
707 ar = new HDFEOS2ArraySwathDimMapField(
708 (*it_f)->getRank(),
709 modis_geofilename,
710 true,
711 sdfd,
712 swathfd,
713 "",
714 "MODIS_Swath_Type_GEO",
715 (*it_f)->getName(),
716 dimmaps,
717 sotype,
718 (*it_f)->getNewName(),
719 bt);
720 for(it_d = dims.begin(); it_d != dims.end(); it_d++)
721 ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
722
723 dds.add_var(ar);
724 delete bt;
725 delete ar;
726 }
727 }
728 // Will interpolate by the handler
729 else {
730
731 HDFEOS2ArraySwathDimMapField * ar = nullptr;
732 ar = new HDFEOS2ArraySwathDimMapField(
733 (*it_f)->getRank(),
734 filename,
735 false,
736 sdfd,
737 swathfd,
738 "",
739 dataset->getName(),
740 (*it_f)->getName(),
741 dimmaps,
742 sotype,
743 (*it_f)->getNewName(),
744 bt);
745 for(it_d = dims.begin(); it_d != dims.end(); it_d++)
746 ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
747
748 dds.add_var(ar);
749 delete bt;
750 delete ar;
751 }
752 }
753 else {// No Dimension map
754
755 HDFEOS2ArraySwathGeoField * ar = nullptr;
756 ar = new HDFEOS2ArraySwathGeoField(
757 (*it_f)->getRank(),
758 filename,
759 swathfd,
760 dataset->getName(),
761 (*it_f)->getName(),
762 (*it_f)->getNewName(),
763 bt);
764
765 for(it_d = dims.begin(); it_d != dims.end(); it_d++)
766 ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
767 dds.add_var(ar);
768 delete bt;
769 delete ar;
770 }
771 }
772 }
773 else {
774 delete bt;
775 throw InternalErr(__FILE__, __LINE__, "The current type should be either grid or swath");
776 }
777
778 }
779
780 //Missing Z dimensional field
781 else if(fieldtype == 4) {
782
783 if((*it_f)->getRank()!=1){
784 delete bt;
785 throw InternalErr(__FILE__, __LINE__, "The rank of missing Z dimension field must be 1");
786 }
787
788 int nelem = ((*it_f)->getCorrectedDimensions()[0])->getSize();
789 HDFEOS2ArrayMissGeoField *ar = nullptr;
790 ar = new HDFEOS2ArrayMissGeoField(
791 (*it_f)->getRank(),
792 nelem,
793 (*it_f)->getNewName(),
794 bt);
795
796 for(it_d = dims.begin(); it_d != dims.end(); it_d++)
797 ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
798
799 dds.add_var(ar);
800 delete bt;
801 delete ar;
802 }
803 else {
804 delete bt;
805 throw InternalErr(__FILE__, __LINE__, "Encounter unsupported datatype or The field type should be between 0 and 5. ");
806 }
807
808 }
809 }
810
811}
812
813// Build DDS for HDF-EOS2 only.
814//bool read_dds_hdfeos2(DDS & dds, const string & filename)
815int read_dds_hdfeos2(DDS & dds, const string & filename,int32 sdfd, int32 gridfd, int32 swathfd,const HDFSP::File*spf,HDFEOS2::File*f)
816{
817
818 BESDEBUG("h4","Coming to read_dds_hdfeos2 "<<endl);
819
820 // Set DDS dataset.
821 dds.set_dataset_name(basename(filename));
822
823 // There are some HDF-EOS2 files(MERRA) that should be treated
824 // exactly like HDF4 SDS files. We don't need to use HDF-EOS2 APIs to
825 // retrieve any information. In fact, treating them as HDF-EOS2 files
826 // will cause confusions and we may get wrong information.
827 // A quick fix is to check if the file name contains MERRA. KY 2011-3-4
828 // Find MERRA data, return 5, then just use HDF4 SDS code.
829 if((basename(filename).size() >=5) && ((basename(filename)).compare(0,5,"MERRA")==0))
830 return 5;
831
832 if(true == HDF4RequestHandler::get_enable_special_eos()) {
833
834 string grid_name;
835 int ret_val = check_special_eosfile(filename,grid_name,sdfd);
836
837 // These are AIRS-like products that use HDF4 SDS dimension scale perfectly.
838 // We originally thought that the AIRS version 6 products fall into this category, so we added this case.
839 // However, the current AIRS version 6 products still miss some dimension scales. So currently we don't
840 // find any products that support this case. Leave it for future use. KY 2015-06-03
841 if(4== ret_val)
842 return ret_val;
843
844
845 // Case 2 or 3 are MOD08M3 or AIRS version 6
846 if(2 == ret_val || 3 == ret_val) {
847
848 try {
849 read_dds_special_1d_grid(dds,spf,filename,sdfd,false);
850 } catch (...)
851 {
852 throw;
853 }
854 return ret_val;
855 }
856
857 }
858
859 // Special HDF-EOS2 file, doesn't use HDF-EOS2 file structure. so
860 // the file pointer passed from DAS is Null. return 0.
861 if( f == nullptr)
862 return 0;
863
864 //Some grids have one shared lat/lon pair. For this case,"onelatlon" is true.
865 // Other grids have their individual grids. We have to handle them differently.
866 // ownll is the flag to distinguish "one lat/lon pair" and multiple lat/lon pairs.
867 const vector<HDFEOS2::GridDataset *>& grids = f->getGrids();
868 bool ownll = false;
869 bool onelatlon = f->getOneLatLon();
870
871 // Set scale and offset type to the DEFAULT one.
872 SOType sotype = DEFAULT_CF_EQU;
873
874 // Iterate all the grids of this file and map them to DAP DDS.
875 for (const auto &gd:grids){
876
877 // Check if this grid provides its own lat/lon.
878 ownll = onelatlon?onelatlon:gd->getLatLonFlag();
879
880 // Obtain Scale and offset type. This is for MODIS products who use non-CF scale/offset rules.
881 sotype = gd->getScaleType();
882 try {
883 read_dds_hdfeos2_grid_swath(
884 dds, filename, static_cast<HDFEOS2::Dataset*>(gd), 0,ownll,sotype,false,sdfd,gridfd,swathfd);
885 // Add 1-D CF grid projection required coordinate variables.
886 // Currently only supports sinusoidal projection.
887 HDFCFUtil::add_cf_grid_cvs(dds,gd);
888 }
889 catch(...) {
890 throw;
891 }
892 }
893
894 // Obtain the multi dimmap flag.
895 bool multi_dmap = f->getMultiDimMaps();
896
897
898 // Iterate all the swaths of this file and map them to DAP DDS.
899 const vector<HDFEOS2::SwathDataset *>& swaths= f->getSwaths();
900 for (const auto &swath:swaths) {
901
902 // Obtain Scale and offset type. This is for MODIS products who use non-CF scale/offset rules.
903 sotype = swath->getScaleType();
904 try {
905 //No global lat/lon for multiple swaths
906 read_dds_hdfeos2_grid_swath(
907 dds, filename, static_cast<HDFEOS2::Dataset*>(swath), 1,false,sotype,multi_dmap,sdfd,gridfd,swathfd);
908 }
909 catch(...) {
910 throw;
911 }
912 }
913
914 // Clear the field name list of which the datatype is changed. KY 2012-8-1
915 // ctype_field_namelist is a global vector(see HDFEOS2HandleType.h for more description)
916 // Since the handler program is a continuously running service, this values of this global vector may
917 // change from one file to another. So clearing this vector each time when mapping DDS is over.
918 ctype_field_namelist.clear();
919
920 return 1;
921}
922
923
924// The wrapper of building DDS of non-EOS fields and attributes in a hybrid HDF-EOS2 file.
925//bool read_dds_hdfhybrid(DDS & dds, const string & filename,int32 sdfd, int32 fileid,int32 gridfd,int32 swathfd)
926bool read_dds_hdfhybrid(DDS & dds, const string & filename,int32 sdfd, int32 fileid,const HDFSP::File*f)
927
928{
929
930 BESDEBUG("h4","Coming to read_dds_hdfhybrid "<<endl);
931
932 // Set DDS dataset.
933 dds.set_dataset_name(basename(filename));
934
935 // Obtain non-EOS SDS fields.
936 const vector<HDFSP::SDField *>& spsds = f->getSD()->getFields();
937
938 // Read SDS
939 for(const auto &sdfield:spsds){
940 try {
941 read_dds_spfields(dds,filename,sdfd,sdfield,f->getSPType());
942 }
943 catch(...) {
944 throw;
945 }
946 }
947
948 // Read Vdata fields.
949 // To speed up the performance for CERES data, we turn off some CERES vdata fields.
950
951 // Many MODIS and MISR products use Vdata intensively. To make the output CF compliant, we map
952 // each vdata field to a DAP array. However, this may cause the generation of many DAP fields. So
953 // we use the BES keys for users to turn on/off as they choose. By default, the key is turned on. KY 2012-6-26
954
955 if( true == HDF4RequestHandler::get_enable_hybrid_vdata()) {
956
957 for(const auto &vd:f->getVDATAs()) {
958 if(false == vd->getTreatAsAttrFlag()){
959 for(const auto &vdf:vd->getFields()) {
960 try {
961 read_dds_spvdfields(dds,filename,fileid, vd->getObjRef(),vdf->getNumRec(),vdf);
962 }
963 catch(...) {
964 throw;
965 }
966 }
967 }
968 }
969 }
970
971 return true;
972}
973
974
975// Build DAS for non-EOS objects in a hybrid HDF-EOS2 file.
976bool read_das_hdfhybrid(DAS & das, const string & filename,int32 sdfd, int32 fileid,HDFSP::File**fpptr)
977{
978
979 BESDEBUG("h4","Coming to read_das_hdfhybrid "<<endl);
980 // Declare a non-EOS file pointer
981 HDFSP::File *f = nullptr;
982 try {
983 // Read non-EOS objects in a hybrid HDF-EOS2 file.
984 f = HDFSP::File::Read_Hybrid(filename.c_str(), sdfd,fileid);
985 }
986 catch (HDFSP::Exception &e)
987 {
988 if(f!=nullptr)
989 delete f;
990 throw InternalErr(e.what());
991 }
992
993 // Remember the file pointer
994 *fpptr = f;
995
996 // First Added non-HDFEOS2 SDS attributes.
997 const vector<HDFSP::SDField *>& spsds = f->getSD()->getFields();
998
999 for (const auto &spfield:spsds) {
1000
1001 // Use CF field name as the DAS table name.
1002 AttrTable *at = das.get_table(spfield->getNewName());
1003 if (!at)
1004 at = das.add_table(spfield->getNewName(), new AttrTable);
1005
1006 // Some fields have "long_name" attributes,so we have to use this attribute rather than creating our own "long_name"
1007 bool long_name_flag = false;
1008
1009 for (const auto &attr:spfield->getAttributes()) {
1010
1011 if(attr->getName() == "long_name") {
1012 long_name_flag = true;
1013 break;
1014 }
1015 }
1016
1017 if(false == long_name_flag)
1018 at->append_attr("long_name", "String", spfield->getName());
1019
1020 // Map all attributes to DAP DAS.
1021 for (const auto& attr:spfield->getAttributes()) {
1022
1023 // Handle string first.
1024 if(attr->getType()==DFNT_UCHAR || attr->getType() == DFNT_CHAR){
1025
1026 // Questionable use of string. KY 2014-02-12
1027 string tempstring2(attr->getValue().begin(),attr->getValue().end());
1028 string tempfinalstr= string(tempstring2.c_str());
1029
1030 // We want to escape the possible special characters except the fullpath attribute.
1031 // The fullpath is only added for some CERES and MERRA data. People use fullpath to keep their
1032 // original names even their original name includes special characters. KY 2014-02-12
1033 at->append_attr(attr->getNewName(), "String" , (attr->getNewName()=="fullpath")?tempfinalstr:HDFCFUtil::escattr(tempfinalstr));
1034 }
1035 else {
1036 for (int loc=0; loc < attr->getCount() ; loc++) {
1037 string print_rep = HDFCFUtil::print_attr(attr->getType(), loc, (void*) &(attr->getValue()[0]));
1038 at->append_attr(attr->getNewName(), HDFCFUtil::print_type(attr->getType()), print_rep);
1039 }
1040 }
1041 }
1042
1043 // Check if having _FillValue. If having _FillValue, compare the datatype of _FillValue
1044 // with the variable datatype. Correct the fillvalue datatype if necessary.
1045 if(at != nullptr) {
1046 int32 var_type = spfield->getType();
1047 try {
1048 HDFCFUtil::correct_fvalue_type(at,var_type);
1049 }
1050 catch(...) {
1051 throw;
1052 }
1053 }
1054
1055 // If H4.EnableCheckScaleOffsetType BES key is true,
1056 // if yes, check if having scale_factor and add_offset attributes;
1057 // if yes, check if scale_factor and add_offset attribute types are the same;
1058 // if no, make add_offset's datatype be the same as the datatype of scale_factor.
1059 // (CF requires the type of scale_factor and add_offset the same).
1060 if (true == HDF4RequestHandler::get_enable_check_scale_offset_type() && at !=nullptr)
1062
1063 }
1064
1065 // Handle vdata attributes.
1066 try {
1067 HDFCFUtil::handle_vdata_attrs_with_desc_key(f,das);
1068 }
1069 catch(...) {
1070 throw;
1071 }
1072
1073 return true;
1074
1075}
1076
1079void read_dds_use_eos2lib(DDS & dds, const string & filename,int32 sdfd,int32 fileid, int32 gridfd, int32 swathfd,HDFSP::File*h4file,HDFEOS2::File*eosfile)
1080{
1081
1082 BESDEBUG("h4","Coming to read_dds_use_eos2lib" <<endl);
1083
1084 int ret_value = read_dds_hdfeos2(dds,filename,sdfd,gridfd,swathfd,h4file,eosfile);
1085
1086 BESDEBUG("h4","ret_value of read_dds_hdfeos2 is "<<ret_value<<endl);
1087
1088 // read_dds_hdfeos2 return value description:
1089 // 0: general non-EOS2 pure HDF4
1090 // 1: HDF-EOS2 hybrid
1091 // 2: MOD08_M3
1092 // HDF-EOS2 but no need to use HDF-EOS2 lib: no real dimension scales but have CVs for every dimension, treat differently
1093 // 3: AIRS version 6
1094 // HDF-EOS2 but no need to use HDF-EOS2 lib:
1095 // have dimension scales but don’t have CVs for every dimension, also need to condense dimensions, treat differently
1096 // 4. Ideal(Expected) AIRS version 6(No real products yet)
1097 // HDF-EOS2 but no need to use HDF-EOS2 lib: Have dimension scales for all dimensions
1098 // 5. MERRA
1099 // Special handling for MERRA file
1100
1101
1102 // Treat MERRA and non-HDFEOS2 HDF4 products as pure HDF4 objects
1103 // For Ideal AIRS version 6 products, we temporarily handle them in a generic HDF4 way.
1104 if (0 == ret_value || 5 == ret_value || 4 == ret_value ) {
1105 if(true == read_dds_hdfsp(dds, filename,sdfd,fileid,h4file))
1106 return;
1107 }
1108 // Special handling
1109 else if ( 1 == ret_value ) {
1110
1111 // Map non-EOS2 objects to DDS
1112 if(true ==read_dds_hdfhybrid(dds,filename,sdfd,fileid,h4file))
1113 return;
1114 }
1115 else {// ret_value = 2 and 3 are handled already in the read_dds_hdfeos2 calls. Just return.
1116 return;
1117 }
1118
1119// leave this code block for performance comparison.
1120#if 0
1121 // first map HDF-EOS2 objects to DDS
1122 if(true == read_dds_hdfeos2(dds, filename)){
1123
1124 // Map non-EOS2 objects to DDS
1125 if(true == read_dds_hdfhybrid(dds,filename))
1126 return;
1127 }
1128
1129 // Map HDF4 objects in pure HDF4 files to DDS
1130 if(read_dds_hdfsp(dds, filename)){
1131 return;
1132 }
1133#endif
1134
1135 // Call the default mapping of HDF4 to DDS. It should never reach here.
1136 // We add this line to ensure the HDF4 objects mapped to DDS even if the above routines return false.
1137 read_dds(dds, filename);
1138
1139}
1140
1141// Map other HDF global attributes, this routine must be called after all ECS metadata are handled.
1142void write_non_ecsmetadata_attrs(HE2CF& cf) {
1143
1144 cf.set_non_ecsmetadata_attrs();
1145
1146}
1147
1148// Map HDF-EOS2's ECS attributes to DAS. ECS attributes include coremetadata, structmetadata etc.
1149void write_ecsmetadata(DAS& das, HE2CF& cf, const string& _meta)
1150{
1151
1152 // There is a maximum length for each ECS metadata if one uses ECS toolkit to add the metadata.
1153 // For some products of which the metadata size is huge, one metadata may be stored in several
1154 // ECS attributes such as coremetadata.0, coremetadata.1 etc.
1155 // When mapping the ECS metadata, we need to merge such metadata attributes into one attribute(coremetadata)
1156 // so that end users can easily understand this metadata.
1157 // ECS toolkit recommends data producers to use the format shown in the following coremetadata example:
1158 // coremetadata.0, coremetadata.1, etc.
1159 // Most NASA HDF-EOS2 products follow this naming convention.
1160 // However, the toolkit also allows data producers to freely name its metadata.
1161 // So we also find the following slightly different format:
1162 // (1) No suffix: coremetadata
1163 // (2) only have one such ECS attribute: coremetadata.0
1164 // (3) have several ECS attributes with two dots after the name: coremetadata.0, coremetadata.0.1 etc.
1165 // (4) Have non-number suffix: productmetadata.s, productmetadata.t etc.
1166 // We handle the above case in the function set_metadata defined in HE2CF.cc. KY 2013-07-06
1167
1168 bool suffix_is_number = true;
1169 vector<string> meta_nonum_names;
1170 vector<string> meta_nonum_data;
1171
1172 string meta = cf.get_metadata(_meta,suffix_is_number,meta_nonum_names, meta_nonum_data);
1173
1174 if(""==meta && true == suffix_is_number){
1175 return; // No _meta metadata exists.
1176 }
1177
1178 BESDEBUG("h4",meta << endl);
1179
1180 if (false == suffix_is_number) {
1181 // For the case when the suffix is like productmetadata.s, productmetadata.t,
1182 // we will not merge the metadata since we are not sure about the order.
1183 // We just parse each attribute individually.
1184 for (unsigned int i = 0; i <meta_nonum_names.size(); i++)
1185 parse_ecs_metadata(das,meta_nonum_names[i],meta_nonum_data[i]);
1186 }
1187 else
1188 parse_ecs_metadata(das,_meta,meta);
1189
1190}
1191
1192void parse_ecs_metadata(DAS &das,const string & metaname, const string &metadata) {
1193
1194
1195 AttrTable *at = das.get_table(metaname);
1196 if (!at)
1197 at = das.add_table(metaname, new AttrTable);
1198
1199 // tell lexer to scan attribute string
1200 void *buf = hdfeos_string(metadata.c_str());
1201 parser_arg arg(at);
1202
1203 if (hdfeosparse(&arg) != 0) {
1204 hdfeos_delete_buffer(buf);
1205 throw Error("HDF-EOS parse error while processing a " + metadata + " HDFEOS attribute.");
1206 }
1207
1208 if (arg.status() == false) {
1209 (*BESLog::TheLog())<< "HDF-EOS parse error while processing a "
1210 << metadata << " HDFEOS attribute. (2) " << endl;
1211#if 0
1212// for debugging
1213 << arg.error()->get_error_message() << endl;
1214#endif
1215 }
1216
1217 hdfeos_delete_buffer(buf);
1218}
1219
1220// Build DAS for HDFEOS2 files.
1221int read_das_hdfeos2(DAS & das, const string & filename,int32 sdfd,int32 fileid, int32 gridfd, int32 swathfd,
1222 bool ecs_metadata,HDFSP::File**spfpptr,HDFEOS2::File **fpptr)
1223{
1224
1225 BESDEBUG("h4","Coming to read_das_hdfeos2 " << endl);
1226
1227 // There are some HDF-EOS2 files(MERRA) that should be treated
1228 // exactly like HDF4 SDS files. We don't need to use HDF-EOS2 APIs to
1229 // retrieve any information. In fact, treating them as HDF-EOS2 files
1230 // will cause confusions and retrieve wrong information, though may not be essential.
1231 // So far, we've only found that the MERRA product has this problem.
1232 // A quick fix is to check if the file name contains MERRA. KY 2011-3-4
1233 // Actually, AIRS version 6 and MODO8M3 also fall into this category,
1234 // they are also specially handled, check read_das_special_eos2_core. KY 2015-06-04
1235
1236 // Find MERRA data, return 5.
1237 if((basename(filename).size() >=5) && ((basename(filename)).compare(0,5,"MERRA")==0)) {
1238 return 5;
1239 }
1240
1241 // We will check if the handler wants to turn on the special EOS key checking
1242#if 0
1243 string check_enable_spec_eos_key="H4.EnableSpecialEOS";
1244 bool turn_on_enable_spec_eos_key= false;
1245 turn_on_enable_spec_eos_key = HDFCFUtil::check_beskeys(check_enable_spec_eos_key);
1246#endif
1247 if(true == HDF4RequestHandler::get_enable_special_eos()) {
1248
1249 string grid_name;
1250 int ret_val = check_special_eosfile(filename,grid_name,sdfd);
1251
1252 // Expected AIRS level 2 or 3
1253 if(4== ret_val)
1254 return ret_val;
1255
1256 bool airs_l2_l3_v6 = false;
1257 bool special_1d_grid = false;
1258
1259 // AIRS level 2,3 version 6 or MOD08_M3-like products
1260 if(2 == ret_val || 3 == ret_val) {
1261
1262 HDFSP::File *spf = nullptr;
1263 try {
1264 spf = HDFSP::File::Read(filename.c_str(),sdfd,fileid);
1265 }
1266 catch (HDFSP::Exception &e)
1267 {
1268 if (spf != nullptr)
1269 delete spf;
1270 throw InternalErr(e.what());
1271 }
1272
1273 try {
1274 if( 2 == ret_val) {
1275
1276 // More check and build the relations if this is a special MOD08_M3-like file
1277 if(spf->Check_update_special(grid_name)== true){
1278
1279 special_1d_grid = true;
1280
1281 // Building the normal HDF4 DAS here.
1282 read_das_special_eos2_core(das,spf,filename,ecs_metadata);
1283
1284 // Need to handle MOD08M3 product
1285 if(grid_name =="mod08") {
1286 change_das_mod08_scale_offset(das,spf);
1287 }
1288 }
1289 }
1290 else {
1291
1292 airs_l2_l3_v6 = true;
1293 spf->Handle_AIRS_L23();
1294 read_das_special_eos2_core(das,spf,filename,ecs_metadata);
1295 }
1296
1297 }
1298 catch (...)
1299 {
1300 delete spf;
1301 throw;
1302 }
1303
1304 // If this is MOD08M3 or AIRS version 6,we just need to return the file pointer.
1305 if (true == special_1d_grid || true == airs_l2_l3_v6) {
1306 *spfpptr = spf;
1307 return ret_val;
1308 }
1309
1310 }
1311 }
1312
1313 HDFEOS2::File *f = nullptr;
1314
1315 try {
1316 // Read all the information of EOS objects from an HDF-EOS2 file
1317 f= HDFEOS2::File::Read(filename.c_str(),gridfd,swathfd);
1318 }
1319 catch (HDFEOS2::Exception &e){
1320
1321 if(f != nullptr)
1322 delete f;
1323
1324 // If this file is not an HDF-EOS2 file, return 0.
1325 if (!e.getFileType()){
1326 return 0;
1327 }
1328 else
1329 {
1330 throw InternalErr(e.what());
1331 }
1332 }
1333
1334 try {
1335 // Generate CF coordinate variables(including auxiliary coordinate variables) and dimensions
1336 // All the names follow CF.
1337 f->Prepare(filename.c_str());
1338 }
1339
1340 catch (HDFEOS2:: Exception &e) {
1341 if(f!=nullptr)
1342 delete f;
1343 throw InternalErr(e.what());
1344 }
1345
1346 *fpptr = f;
1347
1348 // HE2CF cf is used to handle hybrid SDS and SD attributes.
1349 HE2CF cf;
1350
1351 try {
1352 cf.open(filename,sdfd,fileid);
1353 }
1354 catch(...) {
1355 throw;
1356 }
1357 cf.set_DAS(&das);
1358
1359 SOType sotype = DEFAULT_CF_EQU;
1360
1361 // A flag not to generate structMetadata for the MOD13C2 file.
1362 // MOD13C2's structMetadata has wrong values. It couldn't pass the parser.
1363 // So we want to turn it off. KY 2010-8-10
1364 bool tempstrflag = false;
1365
1366#if 0
1367 // AMSR_E may stop using "SCALE_FACTOR", so the following "if block" is empty. Still leave it here for future reference. KY 2022-06-16
1368 // Product name(AMSR_E) that needs to change attribute from "SCALE FACTOR" to scale_factor etc. to follow the CF conventions
1369 if (f->getSwaths().empty() == false) {
1370 string temp_fname = basename(filename);
1371 string temp_prod_prefix = "AMSR_E";
1372 if ((temp_fname.size() > temp_prod_prefix.size()) &&
1373 (0 == (temp_fname.compare(0,temp_prod_prefix.size(),temp_prod_prefix)))) {
1374 }
1375
1376 }
1377#endif
1378
1379 // Obtain information to identify MEaSURES VIP. This product needs to be handled properly.
1380 bool gridname_change_valid_range = false;
1381 if(1 == f->getGrids().size()) {
1382 string gridname = f->getGrids()[0]->getName();
1383 if ("VIP_CMG_GRID" == gridname)
1384 gridname_change_valid_range = true;
1385 }
1386
1387 // Obtain information to identify MODIS_SWATH_Type_L1B product. This product's scale and offset need to be handled properly.
1388 bool is_modis_l1b = false;
1389
1390 // Since this is a swath product, we check swath only.
1391 for (int i = 0; i<(int) f->getSwaths().size(); i++) {
1392 const HDFEOS2::SwathDataset* swath = f->getSwaths()[i];
1393 string sname = swath->getName();
1394 if("MODIS_SWATH_Type_L1B" == sname){
1395 is_modis_l1b = true;
1396 break;
1397 }
1398 }
1399
1400 try {
1401
1402 // MAP grids to DAS.
1403 for (int i = 0; i < (int) f->getGrids().size(); i++) {
1404
1405 const HDFEOS2::GridDataset* grid = f->getGrids()[i];
1406 string gname = grid->getName();
1407 sotype = grid->getScaleType();
1408
1409 const vector<HDFEOS2::Field*>gfields = grid->getDataFields();
1410
1411 for (const auto &gf:gfields) {
1412
1413 bool change_fvtype = false;
1414
1415 // original field name
1416 string fname = gf->getName();
1417
1418 // new field name that follows CF
1419 string newfname = gf->getNewName();
1420
1421 BESDEBUG("h4","Original field name: " << fname << endl);
1422 BESDEBUG("h4","Corrected field name: " << newfname << endl);
1423
1424 // whether coordinate variable or data variables
1425 int fieldtype = gf->getFieldType();
1426
1427 // 0 means that the data field is NOT a coordinate variable.
1428 if (fieldtype == 0){
1429
1430 // If you don't find any _FillValue through generic API.
1431 if(gf->haveAddedFillValue()) {
1432 BESDEBUG("h4","Has an added fill value." << endl);
1433 float addedfillvalue =
1434 gf->getAddedFillValue();
1435 int type =
1436 gf->getType();
1437 BESDEBUG("h4","Added fill value = "<<addedfillvalue);
1438 cf.write_attribute_FillValue(newfname,
1439 type, addedfillvalue);
1440 }
1441 string coordinate = gf->getCoordinate();
1442 BESDEBUG("h4","Coordinate attribute: " << coordinate <<endl);
1443 if (coordinate != "")
1444 cf.write_attribute_coordinates(newfname, coordinate);
1445 }
1446
1447 // This will override _FillValue if it's defined on the field.
1448 cf.write_attribute(gname, fname, newfname,
1449 (int)(f->getGrids().size()), fieldtype);
1450
1451 // For fieldtype values:
1452 // 0 is general fields
1453 // 1 is latitude.
1454 // 2 is longtitude.
1455 // 3 is the existing 3rd-dimension coordinate variable
1456 // 4 is the dimension that misses the coordinate variable,use natural number
1457 // 5 is time
1458 if (fieldtype > 0){
1459
1460 // MOD13C2 is treated specially.
1461 if(fieldtype == 1 && (gf->getSpecialLLFormat())==3)
1462 tempstrflag = true;
1463
1464 // Don't change the units if the 3-rd dimension field exists.(fieldtype =3)
1465 // KY 2013-02-15
1466 if (fieldtype !=3) {
1467 string tempunits = gf->getUnits();
1468 BESDEBUG("h4",
1469 "fieldtype " << fieldtype
1470 << " units" << tempunits
1471 << endl);
1472 cf.write_attribute_units(newfname, tempunits);
1473 }
1474 }
1475
1476 //Rename attributes of MODIS products.
1477 AttrTable *at = das.get_table(newfname);
1478
1479 // No need for the case that follows the CF scale and offset .
1480 if(sotype!=DEFAULT_CF_EQU && at!=nullptr)
1481 {
1482 bool has_Key_attr = false;
1483 AttrTable::Attr_iter it = at->attr_begin();
1484 while (it!=at->attr_end())
1485 {
1486 if(at->get_name(it)=="Key")
1487 {
1488 has_Key_attr = true;
1489 break;
1490 }
1491 it++;
1492 }
1493
1494 if((false == is_modis_l1b) && (false == gridname_change_valid_range)&&(false == has_Key_attr) &&
1495 (true == HDF4RequestHandler::get_disable_scaleoffset_comp()))
1496 HDFCFUtil::handle_modis_special_attrs_disable_scale_comp(at,basename(filename), true, newfname,sotype);
1497 else {
1498
1499 // Check if the datatype of this field needs to be changed.
1500 bool changedtype = HDFCFUtil::change_data_type(das,sotype,newfname);
1501
1502 // Build up the field name list if the datatype of the field needs to be changed.
1503 if (true == changedtype)
1504 ctype_field_namelist.push_back(newfname);
1505
1506 HDFCFUtil::handle_modis_special_attrs(at,basename(filename),true, newfname,sotype,gridname_change_valid_range,changedtype,change_fvtype);
1507
1508 }
1509 }
1510
1511 // Handle AMSR-E attributes.
1512 HDFCFUtil::handle_amsr_attrs(at);
1513
1514 // Check if having _FillValue. If having _FillValue, compare the datatype of _FillValue
1515 // with the variable datatype. Correct the fillvalue datatype if necessary.
1516 if((false == change_fvtype) && at != nullptr) {
1517 int32 var_type = gf->getType();
1518 HDFCFUtil::correct_fvalue_type(at,var_type);
1519 }
1520
1521 // if h4.enablecheckscaleoffsettype bes key is true,
1522 // if yes, check if having scale_factor and add_offset attributes;
1523 // if yes, check if scale_factor and add_offset attribute types are the same;
1524 // if no, make add_offset's datatype be the same as the datatype of scale_factor.
1525 // (cf requires the type of scale_factor and add_offset the same).
1526 if (true == HDF4RequestHandler::get_enable_check_scale_offset_type() && at!=nullptr)
1528
1529 }
1530
1531 // Add possible 1-D CV CF attributes to identify projection info. for CF.
1532 // Currently only the Sinusoidal projection is supported.
1533 HDFCFUtil::add_cf_grid_cv_attrs(das,grid);
1534
1535 }
1536 }
1537 catch(...) {
1538 //delete f;
1539 throw;
1540 }
1541
1542 try {
1543 // MAP Swath attributes to DAS.
1544 for (int i = 0; i < (int) f->getSwaths().size(); i++) {
1545
1546 const HDFEOS2::SwathDataset* swath = f->getSwaths()[i];
1547
1548 // Swath includes two parts: "Geolocation Fields" and "Data Fields".
1549 // The all_fields vector includes both.
1550 const vector<HDFEOS2::Field*> geofields = swath->getGeoFields();
1551 vector<HDFEOS2::Field*> all_fields = geofields;
1552
1553 const vector<HDFEOS2::Field*> datafields = swath->getDataFields();
1554 for (const auto &df:datafields)
1555 all_fields.push_back(df);
1556
1557 auto total_geofields = (int)(geofields.size());
1558
1559 string gname = swath->getName();
1560 BESDEBUG("h4","Swath name: " << gname << endl);
1561
1562 sotype = swath->getScaleType();
1563
1564 // field_counter is only used to separate the geo field from the data field.
1565 int field_counter = 0;
1566
1567 for (const auto &af:all_fields)
1568 {
1569 bool change_fvtype = false;
1570 string fname = af->getName();
1571 string newfname = af->getNewName();
1572 BESDEBUG("h4","Original Field name: " << fname << endl);
1573 BESDEBUG("h4","Corrected Field name: " << newfname << endl);
1574
1575 int fieldtype = af->getFieldType();
1576 if (fieldtype == 0){
1577 string coordinate = af->getCoordinate();
1578 BESDEBUG("h4","Coordinate attribute: " << coordinate <<endl);
1579 if (coordinate != "")
1580 cf.write_attribute_coordinates(newfname, coordinate);
1581 }
1582
1583 // 1 is latitude.
1584 // 2 is longitude.
1585 // Don't change "units" if a non-latlon coordinate variable exists.
1586 if(fieldtype >0 && fieldtype !=3){
1587 string tempunits = af->getUnits();
1588 BESDEBUG("h4",
1589 "fieldtype " << fieldtype
1590 << " units" << tempunits << endl);
1591 cf.write_attribute_units(newfname, tempunits);
1592
1593 }
1594 BESDEBUG("h4","Field Name: " << fname << endl);
1595
1596 // coordinate "fillvalue" attribute
1597 // This operation should only apply to data fields.
1598 if (field_counter >=total_geofields) {
1599 if(af->haveAddedFillValue()){
1600 float addedfillvalue =
1601 af->getAddedFillValue();
1602 int type =
1603 af->getType();
1604 BESDEBUG("h4","Added fill value = "<<addedfillvalue);
1605 cf.write_attribute_FillValue(newfname, type, addedfillvalue);
1606 }
1607 }
1608 cf.write_attribute(gname, fname, newfname,
1609 (int)(f->getSwaths().size()), fieldtype);
1610
1611 AttrTable *at = das.get_table(newfname);
1612
1613 // No need for CF scale and offset equation.
1614 if(sotype!=DEFAULT_CF_EQU && at!=nullptr)
1615 {
1616
1617 bool has_Key_attr = false;
1618 AttrTable::Attr_iter it = at->attr_begin();
1619 while (it!=at->attr_end())
1620 {
1621 if(at->get_name(it)=="Key")
1622 {
1623 has_Key_attr = true;
1624 break;
1625 }
1626 it++;
1627 }
1628
1629 if((false == is_modis_l1b) && (false == gridname_change_valid_range) &&(false == has_Key_attr) &&
1630 (true == HDF4RequestHandler::get_disable_scaleoffset_comp()))
1631 HDFCFUtil::handle_modis_special_attrs_disable_scale_comp(at,basename(filename),false,newfname,sotype);
1632 else {
1633
1634 // Check if the datatype of this field needs to be changed.
1635 bool changedtype = HDFCFUtil::change_data_type(das,sotype,newfname);
1636
1637 // Build up the field name list if the datatype of the field needs to be changed.
1638 if (true == changedtype)
1639
1640 ctype_field_namelist.push_back(newfname);
1641
1642 // Handle MODIS special attributes such as valid_range, scale_factor and add_offset etc.
1643 // Need to catch the exception since this function calls handle_modis_vip_special_attrs that may
1644 // throw an exception.
1645 HDFCFUtil::handle_modis_special_attrs(at,basename(filename), false,newfname,sotype,gridname_change_valid_range,changedtype,change_fvtype);
1646 }
1647 }
1648
1649 // Handle AMSR-E attributes
1650 if(at !=nullptr)
1651 HDFCFUtil::handle_amsr_attrs(at);
1652
1653 // Check if having _FillValue. If having _FillValue, compare the datatype of _FillValue
1654 // with the variable datatype. Correct the fillvalue datatype if necessary.
1655 if((false == change_fvtype) && at != nullptr) {
1656 int32 var_type = af->getType();
1657 HDFCFUtil::correct_fvalue_type(at,var_type);
1658 }
1659
1660 // If H4.EnableCheckScaleOffsetType BES key is true,
1661 // if yes, check if having scale_factor and add_offset attributes;
1662 // if yes, check if scale_factor and add_offset attribute types are the same;
1663 // if no, make add_offset's datatype be the same as the datatype of scale_factor.
1664 // (CF requires the type of scale_factor and add_offset the same).
1665 //if (true == turn_on_enable_check_scale_offset_key && at !=nullptr)
1666 if (true == HDF4RequestHandler::get_enable_check_scale_offset_type() && at !=nullptr)
1668
1669 field_counter++;
1670 }
1671 }
1672 }
1673 catch(...) {
1674 throw;
1675 }
1676
1677
1678 try {
1679
1680 if(ecs_metadata == true) {
1681
1682 // Handle ECS metadata. The following metadata are what we found so far.
1683 write_ecsmetadata(das,cf, "CoreMetadata");
1684
1685 write_ecsmetadata(das,cf, "coremetadata");
1686
1687 write_ecsmetadata(das,cf,"ArchiveMetadata");
1688
1689 write_ecsmetadata(das,cf,"archivemetadata");
1690
1691 write_ecsmetadata(das,cf,"ProductMetadata");
1692
1693 write_ecsmetadata(das,cf,"productmetadata");
1694 }
1695
1696 // This cause a problem for a MOD13C2 file, So turn it off temporarily. KY 2010-6-29
1697 if(false == tempstrflag) {
1698
1699#if 0
1700 string check_disable_smetadata_key ="H4.DisableStructMetaAttr";
1701 bool is_check_disable_smetadata = false;
1702 is_check_disable_smetadata = HDFCFUtil::check_beskeys(check_disable_smetadata_key);
1703#endif
1704
1705 if (false == HDF4RequestHandler::get_disable_structmeta() ) {
1706 write_ecsmetadata(das, cf, "StructMetadata");
1707 }
1708 }
1709
1710 // Write other HDF global attributes, this routine must be called after all ECS metadata are handled.
1711 write_non_ecsmetadata_attrs(cf);
1712
1713 cf.close();
1714 }
1715 catch(...) {
1716 throw;
1717 }
1718
1719 try {
1720
1721 // Check if swath or grid object (like vgroup) attributes should be mapped to DAP2. If yes, start mapping.
1722#if 0
1723 string check_enable_sg_attr_key="H4.EnableSwathGridAttr";
1724 bool turn_on_enable_sg_attr_key= false;
1725 turn_on_enable_sg_attr_key = HDFCFUtil::check_beskeys(check_enable_sg_attr_key);
1726#endif
1727
1728 if(true == HDF4RequestHandler::get_enable_swath_grid_attr()) {
1729
1730 // MAP grid attributes to DAS.
1731 for (int i = 0; i < (int) f->getGrids().size(); i++) {
1732
1733
1734 HDFEOS2::GridDataset* grid = f->getGrids()[i];
1735
1736 string gname = HDFCFUtil::get_CF_string(grid->getName());
1737
1738 AttrTable*at = nullptr;
1739
1740 // Create a "grid" DAS table if this grid has attributes.
1741 if(grid->getAttributes().empty() == false){
1742 at = das.get_table(gname);
1743 if (!at)
1744 at = das.add_table(gname, new AttrTable);
1745 }
1746 if(at!= nullptr) {
1747
1748 // Process grid attributes
1749 const vector<HDFEOS2::Attribute *> grid_attrs = grid->getAttributes();
1750 for (const auto &attr:grid_attrs) {
1751
1752 int attr_type = attr->getType();
1753
1754 // We treat string differently. DFNT_UCHAR and DFNT_CHAR are treated as strings.
1755 if(attr_type==DFNT_UCHAR || attr_type == DFNT_CHAR){
1756 string tempstring2(attr->getValue().begin(),attr->getValue().end());
1757 auto tempfinalstr= string(tempstring2.c_str());
1758
1759 // Using the customized escattr function to escape special characters except
1760 // \n,\r,\t since escaping them may make the attributes hard to read. KY 2013-10-14
1761 at->append_attr(attr->getNewName(), "String" , HDFCFUtil::escattr(tempfinalstr));
1762 }
1763
1764
1765 else {
1766 for (int loc=0; loc < attr->getCount() ; loc++) {
1767 string print_rep = HDFCFUtil::print_attr(attr->getType(), loc, (void*) &(attr->getValue()[0]));
1768 at->append_attr(attr->getNewName(), HDFCFUtil::print_type(attr->getType()), print_rep);
1769 }
1770 }
1771 }
1772 }
1773 }
1774
1775 //
1776 // MAP swath attributes to DAS.
1777 for (int i = 0; i < (int) f->getSwaths().size(); i++) {
1778
1779 const HDFEOS2::SwathDataset* swath = f->getSwaths()[i];
1780 string sname = swath->getName();
1781 AttrTable*at = nullptr;
1782
1783 // Create a "swath" DAS table if this swath has attributes.
1784 if(swath->getAttributes().empty() == false) {
1785 at = das.get_table(sname);
1786 if (!at)
1787 at = das.add_table(sname, new AttrTable);
1788 }
1789
1790 if(at != nullptr) {
1791 const vector<HDFEOS2::Attribute *> swath_attrs = swath->getAttributes();
1792 for (const auto &attr:swath_attrs) {
1793
1794 int attr_type = attr->getType();
1795
1796 // We treat string differently. DFNT_UCHAR and DFNT_CHAR are treated as strings.
1797 if(attr_type==DFNT_UCHAR || attr_type == DFNT_CHAR){
1798 string tempstring2(attr->getValue().begin(),attr->getValue().end());
1799 string tempfinalstr= string(tempstring2.c_str());
1800
1801 // Using the customized escattr function to escape special characters except
1802 // \n,\r,\t since escaping them may make the attributes hard to read. KY 2013-10-14
1803 at->append_attr(attr->getNewName(), "String" , HDFCFUtil::escattr(tempfinalstr));
1804 }
1805 else {
1806 for (int loc=0; loc < attr->getCount() ; loc++) {
1807 string print_rep = HDFCFUtil::print_attr(attr->getType(), loc, (void*) &(attr->getValue()[0]));
1808 at->append_attr(attr->getNewName(), HDFCFUtil::print_type(attr->getType()), print_rep);
1809 }
1810
1811 }
1812 }
1813 }
1814 }
1815 }// end of mapping swath and grid object attributes to DAP2
1816 }
1817 catch(...) {
1818 throw;
1819 }
1820
1821 return 1;
1822}
1823
1824//The wrapper of building HDF-EOS2 and special HDF4 files.
1825void read_das_use_eos2lib(DAS & das, const string & filename,
1826 int32 sdfd,int32 fileid, int32 gridfd, int32 swathfd,bool ecs_metadata,
1827 HDFSP::File**h4filepptr,HDFEOS2::File**eosfilepptr)
1828{
1829
1830 BESDEBUG("h4","Coming to read_das_use_eos2lib" << endl);
1831
1832 int ret_value = read_das_hdfeos2(das,filename,sdfd,fileid, gridfd, swathfd,ecs_metadata,h4filepptr,eosfilepptr);
1833
1834 BESDEBUG("h4","ret_value of read_das_hdfeos2 is "<<ret_value <<endl);
1835
1836 // read_das_hdfeos2 return value description:
1837 // 0: general non-EOS2 pure HDF4
1838 // 1: HDF-EOS2 hybrid
1839 // 2: MOD08_M3
1840 // HDF-EOS2 but no need to use HDF-EOS2 lib: no real dimension scales but have CVs for every dimension, treat differently
1841 // 3: AIRS version 6 level 3 and level 2
1842 // HDF-EOS2 but no need to use HDF-EOS2 lib:
1843 // have dimension scales but don’t have CVs for every dimension, also need to condense dimensions, treat differently
1844 // 4. Expected AIRS version 6 level 3 and level 2
1845 // HDF-EOS2 but no need to use HDF-EOS2 lib: Have dimension scales for all dimensions
1846 // 5. MERRA
1847 // Special handling for MERRA products.
1848
1849 // Treat as pure HDF4 objects
1850 if (ret_value == 4) {
1851 if(true == read_das_special_eos2(das, filename,sdfd,fileid,ecs_metadata,h4filepptr))
1852 return;
1853 }
1854 // Special handling, already handled
1855 else if (ret_value == 2 || ret_value == 3) {
1856 return;
1857 }
1858 else if (ret_value == 1) {
1859
1860 // Map non-EOS2 objects to DDS
1861 if(true == read_das_hdfhybrid(das,filename,sdfd,fileid,h4filepptr))
1862 return;
1863 }
1864 else {// ret_value is 0(pure HDF4) or 5(Merra)
1865 if(true == read_das_hdfsp(das, filename,sdfd, fileid,h4filepptr))
1866 return;
1867 }
1868
1869
1870// Leave the original code that don't pass the file pointers.
1871#if 0
1872 // First map HDF-EOS2 attributes to DAS
1873 if(true == read_das_hdfeos2(das, filename)){
1874
1875 // Map non-EOS2 attributes to DAS
1876 if (true == read_das_hdfhybrid(das,filename))
1877 return;
1878 }
1879
1880 // Map HDF4 attributes in pure HDF4 files to DAS
1881 if(true == read_das_hdfsp(das, filename)){
1882 return;
1883 }
1884#endif
1885
1886 // Call the default mapping of HDF4 to DAS. It should never reach here.
1887 // We add this line to ensure the HDF4 attributes mapped to DAS even if the above routines return false.
1888 read_das(das, filename);
1889}
1890
1891#endif // #ifdef USE_HDFEOS2_LIB
1892
1893// The wrapper of building DDS function.
1894//bool read_dds_hdfsp(DDS & dds, const string & filename,int32 sdfd, int32 fileid,int32 gridfd, int32 swathfd)
1895bool read_dds_hdfsp(DDS & dds, const string & filename,int32 sdfd, int32 fileid,const HDFSP::File*f)
1896{
1897
1898 BESDEBUG("h4","Coming to read_dds_sp "<<endl);
1899 dds.set_dataset_name(basename(filename));
1900
1901 // Obtain SDS fields
1902 const vector<HDFSP::SDField *>& spsds = f->getSD()->getFields();
1903
1904 // Read SDS
1905 for (const auto& spf:spsds){
1906
1907 // Although the following line's logic needs to improve, it is right.
1908 // When Has_Dim_NoScale_Field is false, it only happens to the OTHERHDF case.
1909 // For the OTHERHDF case, we will not map the dimension_no_dim_scale (empty) field. This is equivalent to
1910 if (false == f->Has_Dim_NoScale_Field() || (0 == spf->getFieldType()) || (true == spf->IsDimScale())){
1911 try {
1912 read_dds_spfields(dds,filename,sdfd,spf,f->getSPType());
1913 }
1914 catch(...) {
1915 throw;
1916 }
1917 }
1918 }
1919
1920 // Read Vdata fields.
1921 // To speed up the performance for handling CERES data, we turn off some CERES vdata fields, this should be resumed in the future version with BESKeys.
1922#if 0
1923 string check_ceres_vdata_key="H4.EnableCERESVdata";
1924 bool turn_on_ceres_vdata_key= false;
1925 turn_on_ceres_vdata_key = HDFCFUtil::check_beskeys(check_ceres_vdata_key);
1926#endif
1927
1928 bool output_vdata_flag = true;
1929 if (false == HDF4RequestHandler::get_enable_ceres_vdata() &&
1930 (CER_AVG == f->getSPType() ||
1931 CER_ES4 == f->getSPType() ||
1932 CER_SRB == f->getSPType() ||
1933 CER_ZAVG == f->getSPType()))
1934 output_vdata_flag = false;
1935
1936 if(true == output_vdata_flag) {
1937 for (const auto &vd:f->getVDATAs()) {
1938 if(!vd->getTreatAsAttrFlag()){
1939 for(const auto &vdf:vd->getFields()) {
1940 try {
1941 read_dds_spvdfields(dds,filename,fileid,vd->getObjRef(),vdf->getNumRec(),vdf);
1942 }
1943 catch(...) {
1944 throw;
1945 }
1946 }
1947 }
1948 }
1949 }
1950
1951 return true;
1952}
1953
1954// Follow CF to build DAS for non-HDFEOS2 HDF4 products. This routine also applies
1955// to all HDF4 products when HDF-EOS2 library is not configured in.
1956//bool read_das_hdfsp(DAS & das, const string & filename, int32 sdfd, int32 fileid,int32 gridfd, int32 swathfd)
1957bool read_das_hdfsp(DAS & das, const string & filename, int32 sdfd, int32 fileid,HDFSP::File**fpptr)
1958{
1959
1960 BESDEBUG("h4","Coming to read_das_sp "<<endl);
1961
1962 // Define a file pointer
1963 HDFSP::File *f = nullptr;
1964 try {
1965 // Obtain all the necesary information from HDF4 files.
1966 f = HDFSP::File::Read(filename.c_str(), sdfd,fileid);
1967 }
1968 catch (HDFSP::Exception &e)
1969 {
1970 if (f != nullptr)
1971 delete f;
1972 throw InternalErr(e.what());
1973 }
1974
1975 try {
1976 // Generate CF coordinate variables(including auxiliary coordinate variables) and dimensions
1977 // All the names follow CF.
1978 f->Prepare();
1979 }
1980 catch (HDFSP::Exception &e) {
1981 delete f;
1982 throw InternalErr(e.what());
1983 }
1984
1985 *fpptr = f;
1986
1987 // Check if mapping vgroup attribute key is turned on, if yes, mapping vgroup attributes.
1988#if 0
1989 string check_enable_vg_attr_key="H4.EnableVgroupAttr";
1990 bool turn_on_enable_vg_attr_key= false;
1991 turn_on_enable_vg_attr_key = HDFCFUtil::check_beskeys(check_enable_vg_attr_key);
1992#endif
1993
1994
1995 if(true == HDF4RequestHandler::get_enable_vgroup_attr()) {
1996
1997 // Obtain vgroup attributes if having vgroup attributes.
1998 vector<HDFSP::AttrContainer *>vg_container = f->getVgattrs();
1999 for (const auto &vgattr_c:f->getVgattrs()) {
2000 AttrTable *vgattr_at = das.get_table(vgattr_c->getName());
2001 if (!vgattr_at)
2002 vgattr_at = das.add_table(vgattr_c->getName(), new AttrTable);
2003
2004 for (const auto &attr:vgattr_c->getAttributes()) {
2005
2006 // Handle string first.
2007 if(attr->getType()==DFNT_UCHAR || attr->getType() == DFNT_CHAR){
2008 string tempstring2(attr->getValue().begin(),attr->getValue().end());
2009 string tempfinalstr= string(tempstring2.c_str());
2010
2011 //escaping the special characters in string attributes when mapping to DAP
2012 vgattr_at->append_attr(attr->getNewName(), "String" , HDFCFUtil::escattr(tempfinalstr));
2013 }
2014 else {
2015 for (int loc=0; loc < attr->getCount() ; loc++) {
2016
2017 string print_rep = HDFCFUtil::print_attr(attr->getType(), loc, (void*) &(attr->getValue()[0]));
2018 vgattr_at->append_attr(attr->getNewName(), HDFCFUtil::print_type(attr->getType()), print_rep);
2019 }
2020 }
2021 }
2022 }
2023 }// end of mapping vgroup attributes.
2024
2025 // Initialize ECS metadata
2026 string core_metadata = "";
2027 string archive_metadata = "";
2028 string struct_metadata = "";
2029
2030 // Obtain SD pointer, this is used to retrieve the file attributes associated with the SD interface
2031 const HDFSP::SD* spsd = f->getSD();
2032
2033 // Except TRMM, we don't find ECS metadata in other non-EOS products. For the option to treat EOS2 as pure HDF4, we
2034 // kind of relax the support of merging metadata as we do for the EOS2 case(read_das_hdfeos2). We will see if we have the user
2035 // request to make them consistent in the future. KY 2013-07-08
2036 for (const auto &sp_attr:spsd->getAttributes()) {
2037
2038 // Here we try to combine ECS metadata into a string.
2039 if((sp_attr->getName().compare(0, 12, "CoreMetadata" )== 0) ||
2040 (sp_attr->getName().compare(0, 12, "coremetadata" )== 0)){
2041
2042 // We assume that CoreMetadata.0, CoreMetadata.1, ..., CoreMetadata.n attribures
2043 // are processed in the right order during HDFSP::Attribute vector iteration.
2044 // Otherwise, this won't work.
2045 string tempstring(sp_attr->getValue().begin(),sp_attr->getValue().end());
2046
2047 // Temporarily turn off CERES data since there are so many fields in CERES. It will choke clients KY 2010-7-9
2048 if(f->getSPType() != CER_AVG &&
2049 f->getSPType() != CER_ES4 &&
2050 f->getSPType() !=CER_SRB &&
2051 f->getSPType() != CER_ZAVG)
2052 core_metadata.append(tempstring);
2053 }
2054 else if((sp_attr->getName().compare(0, 15, "ArchiveMetadata" )== 0) ||
2055 (sp_attr->getName().compare(0, 16, "ArchivedMetadata")==0) ||
2056 (sp_attr->getName().compare(0, 15, "archivemetadata" )== 0)){
2057 string tempstring(sp_attr->getValue().begin(),sp_attr->getValue().end());
2058 // Currently some TRMM "swath" archivemetadata includes special characters that cannot be handled by OPeNDAP
2059 // So turn it off.
2060 // Turn off CERES data since it may choke JAVA clients KY 2010-7-9
2061 if(f->getSPType() != TRMML2_V6 && f->getSPType() != CER_AVG && f->getSPType() != CER_ES4 && f->getSPType() !=CER_SRB && f->getSPType() != CER_ZAVG)
2062 archive_metadata.append(tempstring);
2063 }
2064 else if((sp_attr->getName().compare(0, 14, "StructMetadata" )== 0) ||
2065 (sp_attr->getName().compare(0, 14, "structmetadata" )== 0)){
2066
2067 if (false == HDF4RequestHandler::get_disable_structmeta()) {
2068
2069 string tempstring(sp_attr->getValue().begin(),sp_attr->getValue().end());
2070
2071 // Turn off TRMM "swath" verison 6 level 2 productsCERES data since it may choke JAVA clients KY 2010-7-9
2072 if(f->getSPType() != TRMML2_V6 &&
2073 f->getSPType() != CER_AVG &&
2074 f->getSPType() != CER_ES4 &&
2075 f->getSPType() !=CER_SRB &&
2076 f->getSPType() != CER_ZAVG)
2077 struct_metadata.append(tempstring);
2078
2079 }
2080 }
2081 else {
2082 // Process gloabal attributes
2083 AttrTable *at = das.get_table("HDF_GLOBAL");
2084 if (!at)
2085 at = das.add_table("HDF_GLOBAL", new AttrTable);
2086
2087 // We treat string differently. DFNT_UCHAR and DFNT_CHAR are treated as strings.
2088 if(sp_attr->getType()==DFNT_UCHAR || sp_attr->getType() == DFNT_CHAR){
2089 string tempstring2(sp_attr->getValue().begin(),sp_attr->getValue().end());
2090 auto tempfinalstr= string(tempstring2.c_str());
2091
2092 // Using the customized escattr function to escape special characters except
2093 // \n,\r,\t since escaping them may make the attributes hard to read. KY 2013-10-14
2094 at->append_attr(sp_attr->getNewName(), "String" , HDFCFUtil::escattr(tempfinalstr));
2095 }
2096
2097 else {
2098 for (int loc=0; loc < sp_attr->getCount() ; loc++) {
2099 string print_rep = HDFCFUtil::print_attr(sp_attr->getType(), loc, (void*) &(sp_attr->getValue()[0]));
2100 at->append_attr(sp_attr->getNewName(), HDFCFUtil::print_type(sp_attr->getType()), print_rep);
2101 }
2102
2103 }
2104 }
2105
2106 }
2107
2108 // The following code may be condensed in the future. KY 2012-09-19
2109 // Coremetadata, structmetadata and archive metadata need special parsers.
2110
2111 // Write coremetadata.
2112 if(core_metadata.size() > 0){
2113 AttrTable *at = das.get_table("CoreMetadata");
2114 if (!at)
2115 at = das.add_table("CoreMetadata", new AttrTable);
2116 // tell lexer to scan attribute string
2117 void *buf = hdfeos_string(core_metadata.c_str());
2118 parser_arg arg(at);
2119
2120 if (hdfeosparse(&arg) != 0) {
2121 hdfeos_delete_buffer(buf);
2122 throw Error("Parse error while processing a CoreMetadata attribute.");
2123 }
2124
2125 // Errors returned from here are ignored.
2126 if (arg.status() == false) {
2127 ERROR_LOG("Parse error while processing a CoreMetadata attribute. (2) " << endl);
2128#if 0
2129 // << arg.error()->get_error_message() << endl;
2130#endif
2131 }
2132
2133 hdfeos_delete_buffer(buf);
2134 }
2135
2136 // Write archive metadata.
2137 if(archive_metadata.size() > 0){
2138 AttrTable *at = das.get_table("ArchiveMetadata");
2139 if (!at)
2140 at = das.add_table("ArchiveMetadata", new AttrTable);
2141 // tell lexer to scan attribute string
2142 void *buf = hdfeos_string(archive_metadata.c_str());
2143 parser_arg arg(at);
2144 if (hdfeosparse(&arg) != 0){
2145 hdfeos_delete_buffer(buf);
2146 throw Error("Parse error while processing an ArchiveMetadata attribute.");
2147 }
2148
2149 // Errors returned from here are ignored.
2150 if (arg.status() == false) {
2151 ERROR_LOG("Parse error while processing an ArchiveMetadata attribute. (2) " << endl);
2152#if 0
2153 // << arg.error()->get_error_message() << endl;
2154#endif
2155 }
2156
2157 hdfeos_delete_buffer(buf);
2158 }
2159
2160 // Write struct metadata.
2161 if(struct_metadata.size() > 0){
2162 AttrTable *at = das.get_table("StructMetadata");
2163 if (!at)
2164 at = das.add_table("StructMetadata", new AttrTable);
2165 // tell lexer to scan attribute string
2166 void *buf = hdfeos_string(struct_metadata.c_str());
2167 parser_arg arg(at);
2168 if (hdfeosparse(&arg) != 0){
2169 hdfeos_delete_buffer(buf);
2170 throw Error("Parse error while processing a StructMetadata attribute.");
2171 }
2172
2173 if (arg.status() == false) {
2174 ERROR_LOG("Parse error while processing a StructMetadata attribute. (2)" << endl);
2175 }
2176
2177
2178 // Errors returned from here are ignored.
2179#if 0
2180 if (arg.status() == false) {
2181 (*BESLog::TheLog())<< "Parse error while processing a StructMetadata attribute. (2)" << endl
2182 << arg.error()->get_error_message() << endl;
2183 }
2184#endif
2185
2186 hdfeos_delete_buffer(buf);
2187 }
2188
2189 // The following code checks the special handling of scale and offset of the OBPG products.
2190 //Store value of "Scaling" attribute.
2191 string scaling;
2192
2193 //Store value of "Slope" attribute.
2194 float slope = 0.;
2195 bool global_slope_flag = false;
2196 float intercept = 0.;
2197 bool global_intercept_flag = false;
2198
2199 // Check OBPG attributes. Specifically, check if slope and intercept can be obtained from the file level.
2200 // If having global slope and intercept, obtain OBPG scaling, slope and intercept values.
2201 HDFCFUtil::check_obpg_global_attrs(f,scaling,slope,global_slope_flag,intercept,global_intercept_flag);
2202
2203 // Handle individual fields
2204 const vector<HDFSP::SDField *>& spsds = f->getSD()->getFields();
2205 vector<HDFSP::SDField *>::const_iterator it_g;
2206 for(it_g = spsds.begin(); it_g != spsds.end(); it_g++){
2207
2208 // The following two if-statements are double secure checks. It will
2209 // make sure no-dimension-scale dimension variables and the associated coordinate variables(if any) are ignored.
2210 // Ignore ALL coordinate variables if this is "OTHERHDF" case and some dimensions
2211 // don't have dimension scale data.
2212 if ( true == f->Has_Dim_NoScale_Field() &&
2213 ((*it_g)->getFieldType() !=0)&&
2214 ((*it_g)->IsDimScale() == false))
2215 continue;
2216
2217 // Ignore the empty(no data) dimension variable.
2218 if (OTHERHDF == f->getSPType() && true == (*it_g)->IsDimNoScale())
2219 continue;
2220
2221 AttrTable *at = das.get_table((*it_g)->getNewName());
2222 if (!at)
2223 at = das.add_table((*it_g)->getNewName(), new AttrTable);
2224
2225 // Some fields have "long_name" attributes,so we have to use this attribute rather than creating our own
2226 bool long_name_flag = false;
2227
2228 for(vector<HDFSP::Attribute *>::const_iterator i=(*it_g)->getAttributes().begin();
2229 i!=(*it_g)->getAttributes().end();i++) {
2230 if((*i)->getName() == "long_name") {
2231 long_name_flag = true;
2232 break;
2233 }
2234 }
2235
2236 if(false == long_name_flag) {
2237 if (f->getSPType() == TRMML2_V7) {
2238 if((*it_g)->getFieldType() == 1)
2239 at->append_attr("standard_name","String","latitude");
2240 else if ((*it_g)->getFieldType() == 2) {
2241 at->append_attr("standard_name","String","longitude");
2242
2243 }
2244
2245 }
2246 else if (f->getSPType() == TRMML3S_V7 || f->getSPType() == TRMML3M_V7) {
2247 if((*it_g)->getFieldType() == 1) {
2248 at->append_attr("long_name","String","latitude");
2249 at->append_attr("standard_name","String","latitude");
2250
2251 }
2252 else if ((*it_g)->getFieldType() == 2) {
2253 at->append_attr("long_name","String","longitude");
2254 at->append_attr("standard_name","String","longitude");
2255 }
2256
2257 }
2258 else
2259 at->append_attr("long_name", "String", (*it_g)->getName());
2260 }
2261
2262 // For some OBPG files that only provide slope and intercept at the file level,
2263 // we need to add the global slope and intercept to all fields and change their names to scale_factor and add_offset.
2264 // For OBPG files that provide slope and intercept at the field level, we need to rename those attribute names to scale_factor and add_offset.
2265 HDFCFUtil::add_obpg_special_attrs(f,das,*it_g,scaling,slope,global_slope_flag,intercept,global_intercept_flag);
2266
2267 // MAP individual SDS field to DAP DAS
2268 for(vector<HDFSP::Attribute *>::const_iterator i=(*it_g)->getAttributes().begin();i!=(*it_g)->getAttributes().end();i++) {
2269
2270 // Handle string first.
2271 if((*i)->getType()==DFNT_UCHAR || (*i)->getType() == DFNT_CHAR){
2272 string tempstring2((*i)->getValue().begin(),(*i)->getValue().end());
2273 string tempfinalstr= string(tempstring2.c_str());
2274
2275 // We want to escape the possible special characters except the fullpath attribute. This may be overkilled since
2276 // fullpath is only added for some CERES and MERRA data. We think people use fullpath really mean to keep their
2277 // original names. So escaping them for the time being. KY 2013-10-14
2278
2279 at->append_attr((*i)->getNewName(), "String" ,((*i)->getNewName()=="fullpath")?tempfinalstr:HDFCFUtil::escattr(tempfinalstr));
2280 }
2281 else {
2282 for (int loc=0; loc < (*i)->getCount() ; loc++) {
2283 string print_rep = HDFCFUtil::print_attr((*i)->getType(), loc, (void*) &((*i)->getValue()[0]));
2284 at->append_attr((*i)->getNewName(), HDFCFUtil::print_type((*i)->getType()), print_rep);
2285 }
2286 }
2287
2288 }
2289
2290 // MAP dimension info. to DAS(Currently this should only affect the OTHERHDF case when no dimension scale for some dimensions)
2291 // KY 2012-09-19
2292 // For the type DFNT_CHAR, one dimensional char array is mapped to a scalar DAP string,
2293 // N dimensional char array is mapped to N-1 dimensional DAP string,
2294 // So the number of dimension info stored in the attribute container should be reduced by 1.
2295 // KY 2014-04-11
2296
2297 bool has_dim_info = true;
2298 vector<HDFSP::AttrContainer *>::const_iterator it_end = (*it_g)->getDimInfo().end();
2299 if((*it_g)->getType() == DFNT_CHAR) {
2300 if((*it_g)->getRank() >1 && (*it_g)->getDimInfo().size() >1)
2301 it_end = (*it_g)->getDimInfo().begin()+(*it_g)->getDimInfo().size() -1;
2302 else
2303 has_dim_info = false;
2304 }
2305
2306 if( true == has_dim_info) {
2307
2308 for(vector<HDFSP::AttrContainer *>::const_iterator i=(*it_g)->getDimInfo().begin();i!=it_end;i++) {
2309
2310 // Here a little surgory to add the field path(including) name before dim0, dim1, etc.
2311 string attr_container_name = (*it_g)->getNewName() + (*i)->getName();
2312 AttrTable *dim_at = das.get_table(attr_container_name);
2313 if (!dim_at)
2314 dim_at = das.add_table(attr_container_name, new AttrTable);
2315
2316 for (const auto &attr:(*i)->getAttributes()) {
2317
2318 // Handle string first.
2319 if(attr->getType()==DFNT_UCHAR || attr->getType() == DFNT_CHAR){
2320 string tempstring2(attr->getValue().begin(),attr->getValue().end());
2321 string tempfinalstr= string(tempstring2.c_str());
2322
2323 //escaping the special characters in string attributes when mapping to DAP
2324 dim_at->append_attr(attr->getNewName(), "String" , HDFCFUtil::escattr(tempfinalstr));
2325 }
2326 else {
2327 for (int loc=0; loc < attr->getCount() ; loc++) {
2328
2329 string print_rep = HDFCFUtil::print_attr(attr->getType(), loc, (void*) &(attr->getValue()[0]));
2330 dim_at->append_attr(attr->getNewName(), HDFCFUtil::print_type(attr->getType()), print_rep);
2331 }
2332 }
2333 }
2334
2335 }
2336 }
2337
2338 // Handle special CF attributes such as units, valid_range and coordinates
2339 // Overwrite units if fieldtype is latitude.
2340 if((*it_g)->getFieldType() == 1){
2341
2342 at->del_attr("units"); // Override any existing units attribute.
2343 at->append_attr("units", "String",(*it_g)->getUnits());
2344 if (f->getSPType() == CER_ES4) // Drop the valid_range attribute since the value will be interpreted wrongly by CF tools
2345 at->del_attr("valid_range");
2346
2347
2348 }
2349 // Overwrite units if fieldtype is longitude
2350 if((*it_g)->getFieldType() == 2){
2351 at->del_attr("units"); // Override any existing units attribute.
2352 at->append_attr("units", "String",(*it_g)->getUnits());
2353 if (f->getSPType() == CER_ES4) // Drop the valid_range attribute since the value will be interpreted wrongly by CF tools
2354 at->del_attr("valid_range");
2355
2356 }
2357
2358 // The following if-statement may not be necessary since fieldtype=4 is the missing CV.
2359 // This missing CV is added by the handler and the units is always level.
2360 if((*it_g)->getFieldType() == 4){
2361 at->del_attr("units"); // Override any existing units attribute.
2362 at->append_attr("units", "String",(*it_g)->getUnits());
2363 }
2364
2365 // Overwrite coordinates if fieldtype is neither lat nor lon.
2366 if((*it_g)->getFieldType() == 0){
2367 at->del_attr("coordinates"); // Override any existing units attribute.
2368
2369 // If no "dimension scale" dimension exists, delete the "coordinates" attributes
2370 if (false == f->Has_Dim_NoScale_Field()) {
2371 string coordinate = (*it_g)->getCoordinate();
2372 if (coordinate !="")
2373 at->append_attr("coordinates", "String", coordinate);
2374 }
2375 }
2376 }
2377
2378
2379 // For OTHERHDF products, add units for latitude and longitude; also change unit to units.
2380 HDFCFUtil::handle_otherhdf_special_attrs(f,das);
2381
2382 // For NASA products, add missing CF attributes if possible
2383 HDFCFUtil::add_missing_cf_attrs(f,das);
2384
2385 // Check if having _FillValue. If having _FillValue, compare the datatype of _FillValue
2386 // with the variable datatype. Correct the fillvalue datatype if necessary.
2387 for(it_g = spsds.begin(); it_g != spsds.end(); it_g++){
2388
2389 AttrTable *at = das.get_table((*it_g)->getNewName());
2390 if (at != nullptr) {
2391 int32 var_type = (*it_g)->getType();
2392 try {
2393 HDFCFUtil::correct_fvalue_type(at,var_type);
2394 }
2395 catch(...) {
2396 throw;
2397 }
2398 }
2399
2400 // If H4.EnableCheckScaleOffsetType BES key is true,
2401 // if yes, check if having scale_factor and add_offset attributes;
2402 // if yes, check if scale_factor and add_offset attribute types are the same;
2403 // if no, make add_offset's datatype be the same as the datatype of scale_factor.
2404 // (CF requires the type of scale_factor and add_offset the same).
2405 if (true == HDF4RequestHandler::get_enable_check_scale_offset_type() && at !=nullptr)
2407 }
2408
2409 // Optimization for users to tune the DAS output.
2410 HDFCFUtil::handle_merra_ceres_attrs_with_bes_keys(f,das,filename);
2411
2412 // Check the EnableVdataDescAttr key. If this key is turned on, the handler-added attribute VDdescname and
2413 // the attributes of vdata and vdata fields will be outputed to DAS. Otherwise, these attributes will
2414 // not output to DAS. The key will be turned off by default to shorten the DAP output. KY 2012-09-18
2415 try {
2416 HDFCFUtil::handle_vdata_attrs_with_desc_key(f,das);
2417 }
2418 catch(...) {
2419 throw;
2420 }
2421
2422 return true;
2423}
2424
2425// This routine is for case 4 of the cases returned by read_das_hdfeos2.
2426// Creating this routine is for performance reasons. Structmetadata is
2427// turned off because the information has been retrieved and presented
2428// by DDS and DAS.
2429// Currently we don't have a user case for this routine and also
2430// this code is not used. We still keep it for the future usage.
2431// KY 2014-01-29
2432
2433bool read_das_special_eos2(DAS &das,const string& filename,int32 sdfd,int32 fileid,bool ecs_metadata,HDFSP::File**fpptr) {
2434
2435 BESDEBUG("h4","Coming to read_das_special_eos2 " << endl);
2436
2437 // Define a file pointer
2438 HDFSP::File *f = nullptr;
2439
2440 try {
2441 // Obtain all the necesary information from HDF4 files.
2442 f = HDFSP::File::Read(filename.c_str(), sdfd,fileid);
2443 }
2444 catch (HDFSP::Exception &e)
2445 {
2446 if (f!= nullptr)
2447 delete f;
2448 throw InternalErr(e.what());
2449 }
2450
2451 try {
2452 // Generate CF coordinate variables(including auxiliary coordinate variables) and dimensions
2453 // All the names follow CF.
2454 f->Prepare();
2455 }
2456 catch (HDFSP::Exception &e) {
2457 delete f;
2458 throw InternalErr(e.what());
2459 }
2460
2461 *fpptr = f;
2462
2463 try {
2464 read_das_special_eos2_core(das, f, filename,ecs_metadata);
2465 }
2466 catch(...) {
2467 throw;
2468 }
2469
2470 // The return value is a dummy value, not used.
2471 return true;
2472}
2473
2474// This routine is for special EOS2 that can be tuned to build up DAS and DDS quickly.
2475// We also turn off the generation of StructMetadata for the performance reason.
2476bool read_das_special_eos2_core(DAS &das,const HDFSP::File* f,const string& filename,bool ecs_metadata) {
2477
2478 BESDEBUG("h4","Coming to read_das_special_eos2_core "<<endl);
2479 // Initialize ECS metadata
2480 string core_metadata = "";
2481 string archive_metadata = "";
2482 string struct_metadata = "";
2483
2484 // Obtain SD pointer, this is used to retrieve the file attributes associated with the SD interface
2485 const HDFSP::SD* spsd = f->getSD();
2486
2487 //Ignore StructMetadata to improve performance
2488 for (const auto &attr:spsd->getAttributes()) {
2489
2490 // Here we try to combine ECS metadata into a string.
2491 if((attr->getName().compare(0, 12, "CoreMetadata" )== 0) ||
2492 (attr->getName().compare(0, 12, "coremetadata" )== 0)){
2493
2494 if(ecs_metadata == true) {
2495 // We assume that CoreMetadata.0, CoreMetadata.1, ..., CoreMetadata.n attribures
2496 // are processed in the right order during HDFSP::Attribute vector iteration.
2497 // Otherwise, this won't work.
2498 string tempstring(attr->getValue().begin(),attr->getValue().end());
2499 core_metadata.append(tempstring);
2500 }
2501 }
2502 else if((attr->getName().compare(0, 15, "ArchiveMetadata" )== 0) ||
2503 (attr->getName().compare(0, 16, "ArchivedMetadata")==0) ||
2504 (attr->getName().compare(0, 15, "archivemetadata" )== 0)){
2505 if(ecs_metadata == true) {
2506 string tempstring(attr->getValue().begin(),attr->getValue().end());
2507 archive_metadata.append(tempstring);
2508 }
2509 }
2510 else if((attr->getName().compare(0, 14, "StructMetadata" )== 0) ||
2511 (attr->getName().compare(0, 14, "structmetadata" )== 0))
2512 ; // Ignore StructMetadata for performance
2513 else {
2514 // Process gloabal attributes
2515 AttrTable *at = das.get_table("HDF_GLOBAL");
2516 if (!at)
2517 at = das.add_table("HDF_GLOBAL", new AttrTable);
2518
2519 // We treat string differently. DFNT_UCHAR and DFNT_CHAR are treated as strings.
2520 if(attr->getType()==DFNT_UCHAR || attr->getType() == DFNT_CHAR){
2521 string tempstring2(attr->getValue().begin(),attr->getValue().end());
2522 auto tempfinalstr= string(tempstring2.c_str());
2523
2524 // Using the customized escattr function to escape special characters except
2525 // \n,\r,\t since escaping them may make the attributes hard to read. KY 2013-10-14
2526 at->append_attr(attr->getNewName(), "String" , HDFCFUtil::escattr(tempfinalstr));
2527 }
2528
2529 else {
2530 for (int loc=0; loc < attr->getCount() ; loc++) {
2531 string print_rep = HDFCFUtil::print_attr(attr->getType(), loc, (void*) &(attr->getValue()[0]));
2532 at->append_attr(attr->getNewName(), HDFCFUtil::print_type(attr->getType()), print_rep);
2533 }
2534
2535 }
2536 }
2537
2538 }
2539
2540 // The following code may be condensed in the future. KY 2012-09-19
2541 // Coremetadata, structmetadata and archive metadata need special parsers.
2542
2543 if(ecs_metadata == true) {
2544 // Write coremetadata.
2545 if(core_metadata.size() > 0){
2546 AttrTable *at = das.get_table("CoreMetadata");
2547 if (!at)
2548 at = das.add_table("CoreMetadata", new AttrTable);
2549 // tell lexer to scan attribute string
2550 void *buf = hdfeos_string(core_metadata.c_str());
2551 parser_arg arg(at);
2552
2553 if (hdfeosparse(&arg) != 0) {
2554 hdfeos_delete_buffer(buf);
2555 throw Error("Parse error while processing a CoreMetadata attribute.");
2556 }
2557
2558 // Errors returned from here are ignored.
2559 if (arg.status() == false) {
2560 ERROR_LOG("Parse error while processing a CoreMetadata attribute. (2)" << endl);
2561#if 0
2562 //for debugging
2563 << arg.error()->get_error_message() << endl;
2564#endif
2565 }
2566
2567 hdfeos_delete_buffer(buf);
2568
2569 }
2570
2571 // Write archive metadata.
2572 if(archive_metadata.size() > 0){
2573 AttrTable *at = das.get_table("ArchiveMetadata");
2574 if (!at)
2575 at = das.add_table("ArchiveMetadata", new AttrTable);
2576 // tell lexer to scan attribute string
2577 void *buf = hdfeos_string(archive_metadata.c_str());
2578 parser_arg arg(at);
2579 if (hdfeosparse(&arg) != 0) {
2580 hdfeos_delete_buffer(buf);
2581 throw Error("Parse error while processing an ArchiveMetadata attribute.");
2582 }
2583
2584 // Errors returned from here are ignored.
2585 if (arg.status() == false) {
2586 ERROR_LOG("Parse error while processing an ArchiveMetadata attribute. (2)" << endl);
2587 // << arg.error()->get_error_message() << endl;
2588 }
2589
2590 hdfeos_delete_buffer(buf);
2591 }
2592 }
2593
2594 // Handle individual fields
2595 const vector<HDFSP::SDField *>& spsds = f->getSD()->getFields();
2596
2597 for (const auto &sdf:spsds){
2598
2599 // Add units for CV variables
2600#if 0
2601// if(sdf->getFieldType() != 0 && sdf->IsDimScale() == false)
2602#endif
2603 if(sdf->getFieldType() != 0){
2604
2605 AttrTable *at = das.get_table(sdf->getNewName());
2606 if (!at)
2607 at = das.add_table(sdf->getNewName(), new AttrTable);
2608
2609 string tempunits = sdf->getUnits();
2610 if(at->simple_find("units")== at->attr_end() && tempunits!="")
2611 at->append_attr("units", "String" ,tempunits);
2612 if(sdf->getFieldType() == 1){
2613 if(at->simple_find("long_name")== at->attr_end())
2614 at->append_attr("long_name","String","Latitude");
2615 }
2616 else if(sdf->getFieldType() == 2) {
2617 if(at->simple_find("long_name")== at->attr_end())
2618 at->append_attr("long_name","String","Longitude");
2619 }
2620 }
2621 else {// We will check if having the coordinates attribute.
2622 AttrTable *at = das.get_table(sdf->getNewName());
2623 if (!at)
2624 at = das.add_table(sdf->getNewName(), new AttrTable);
2625 string tempcoors = sdf->getCoordinate();
2626 // If we add the coordinates attribute, any existing coordinates attribute will be removed.
2627 if(tempcoors!=""){
2628 at->del_attr("coordinates");
2629 at->append_attr("coordinates","String",tempcoors);
2630 }
2631 }
2632
2633 // Ignore variables that don't have attributes.
2634 if(sdf->getAttributes().empty())
2635 continue;
2636
2637 AttrTable *at = das.get_table(sdf->getNewName());
2638 if (!at)
2639 at = das.add_table(sdf->getNewName(), new AttrTable);
2640
2641 // MAP individual SDS field to DAP DAS
2642 for (const auto &attr:sdf->getAttributes()) {
2643
2644 // Handle string first.
2645 if(attr->getType()==DFNT_UCHAR || attr->getType() == DFNT_CHAR){
2646 string tempstring2(attr->getValue().begin(),attr->getValue().end());
2647 string tempfinalstr= string(tempstring2.c_str());
2648
2649 // We want to escape the possible special characters for attributes except the fullpath attribute. This may be overkilled since
2650 // fullpath is only added for some CERES and MERRA data. However, we think people use fullpath really mean to keep their
2651 // original names. So we don't escape the fullpath attribute. KY 2013-10-14
2652
2653 at->append_attr(attr->getNewName(), "String" ,(attr->getNewName()=="fullpath")?tempfinalstr:HDFCFUtil::escattr(tempfinalstr));
2654 }
2655 else {
2656 for (int loc=0; loc < attr->getCount() ; loc++) {
2657 string print_rep = HDFCFUtil::print_attr(attr->getType(), loc, (void*) &(attr->getValue()[0]));
2658 at->append_attr(attr->getNewName(), HDFCFUtil::print_type(attr->getType()), print_rep);
2659 }
2660 }
2661 }
2662
2663 }
2664
2665 // Handle HDF-EOS2 object attributes. These are found in AIRS version 6.
2666 HDFCFUtil::map_eos2_objects_attrs(das,filename);
2667
2668 return true;
2669}
2670
2671
2672// MOD/MYD08M3 follows the no-CF scale/offset rulea,we need to change the add_offset value when add_offset is 0.
2673void change_das_mod08_scale_offset(DAS &das, const HDFSP::File *f) {
2674
2675 // Handle individual fields
2676 // Check HDFCFUtil::handle_modis_special_attrs_disable_scale_comp
2677
2678 const vector<HDFSP::SDField *>& spsds = f->getSD()->getFields();
2679 for (const auto &sdf:spsds) {
2680 if(sdf->getFieldType() == 0){
2681 AttrTable *at = das.get_table(sdf->getNewName());
2682 if (!at)
2683 at = das.add_table(sdf->getNewName(), new AttrTable);
2684
2685 // Declare add_offset type in string format.
2686 string add_offset_type;
2687
2688 // add_offset values
2689 string add_offset_value="0";
2690 double orig_offset_value = 0;
2691 bool add_offset_modify = false;
2692
2693
2694 // Go through all attributes to find add_offset
2695 // If add_offset is 0 or add_offset is not found, we don't need
2696 // to modify the add_offset value.
2697 AttrTable::Attr_iter it = at->attr_begin();
2698 while (it!=at->attr_end())
2699 {
2700 if(at->get_name(it)=="add_offset")
2701 {
2702 add_offset_value = (*at->get_attr_vector(it)->begin());
2703 orig_offset_value = atof(add_offset_value.c_str());
2704 add_offset_type = at->get_type(it);
2705 if(add_offset_value == "0.0" || orig_offset_value == 0)
2706 add_offset_modify = false;
2707 else
2708 add_offset_modify = true;
2709 break;
2710 }
2711 it++;
2712
2713 }
2714
2715 // We need to modify the add_offset value if the add_offset exists.
2716 if( true == add_offset_modify) {
2717
2718 // Declare scale_factor type in string format.
2719 string scale_factor_type;
2720
2721 // Scale values
2722 string scale_factor_value="";
2723 double orig_scale_value = 1;
2724
2725 it = at->attr_begin();
2726 while (it!=at->attr_end())
2727 {
2728 if(at->get_name(it)=="scale_factor")
2729 {
2730 scale_factor_value = (*at->get_attr_vector(it)->begin());
2731 orig_scale_value = atof(scale_factor_value.c_str());
2732 scale_factor_type = at->get_type(it);
2733 }
2734 it++;
2735 }
2736
2737 if(scale_factor_value.length() !=0) {
2738 double new_offset_value = -1 * orig_scale_value*orig_offset_value;
2739 string print_rep = HDFCFUtil::print_attr(DFNT_FLOAT64,0,(void*)(&new_offset_value));
2740 at->del_attr("add_offset");
2741 at->append_attr("add_offset", HDFCFUtil::print_type(DFNT_FLOAT64), print_rep);
2742 }
2743 }
2744
2745 }
2746
2747 }
2748
2749}
2750
2751// Function to build special AIRS version 6 and MOD08_M3 DDS. Doing this way is for improving performance.
2752bool read_dds_special_1d_grid(DDS &dds,const HDFSP::File* spf,const string& filename, int32 sdid,bool check_cache) {
2753
2754
2755 BESDEBUG("h4","Coming to read_dds_special_1d_grid "<<endl);
2756 bool dds_cache = false;
2757 size_t total_bytes_dds_cache = 0;
2758
2759 // Only support AIRS version 6 level 2 or level 3 KY 2015-06-07
2760 if(true == check_cache) {
2761
2762 total_bytes_dds_cache = HDFCFUtil::obtain_dds_cache_size(spf);
2763 BESDEBUG("h4","Total DDS cache file size is "<< total_bytes_dds_cache<<endl);
2764 if(total_bytes_dds_cache !=0)
2765 dds_cache = true;
2766
2767 }
2768
2769 SPType sptype = OTHERHDF;
2770 const vector<HDFSP::SDField *>& spsds = spf->getSD()->getFields();
2771
2772 // Read SDS
2773 for (const auto &spsdsf:spsds) {
2774
2775 BaseType *bt=nullptr;
2776 switch(spsdsf->getType()) {
2777#define HANDLE_CASE(tid, type) \
2778 case tid: \
2779 bt = new (type)(spsdsf->getNewName(),filename); \
2780 break;
2781 HANDLE_CASE(DFNT_FLOAT32, HDFFloat32)
2782 HANDLE_CASE(DFNT_FLOAT64, HDFFloat64)
2783 HANDLE_CASE(DFNT_CHAR, HDFStr)
2784#ifndef SIGNED_BYTE_TO_INT32
2785 HANDLE_CASE(DFNT_INT8, HDFByte)
2786#else
2787 HANDLE_CASE(DFNT_INT8,HDFInt32)
2788#endif
2789 HANDLE_CASE(DFNT_UINT8, HDFByte)
2790 HANDLE_CASE(DFNT_INT16, HDFInt16)
2791 HANDLE_CASE(DFNT_UINT16, HDFUInt16)
2792 HANDLE_CASE(DFNT_INT32, HDFInt32)
2793 HANDLE_CASE(DFNT_UINT32, HDFUInt32)
2794 HANDLE_CASE(DFNT_UCHAR8, HDFByte)
2795 default:
2796 throw InternalErr(__FILE__,__LINE__,"unsupported data type.");
2797#undef HANDLE_CASE
2798 }
2799
2800 if(bt)
2801 {
2802
2803 const vector<HDFSP::Dimension*>& dims= spsdsf->getDimensions();
2804
2805 vector<HDFSP::Dimension*>::const_iterator it_d;
2806
2807 // Char will be mapped to DAP string.
2808 if(DFNT_CHAR == spsdsf->getType()) {
2809 if(1 == spsdsf->getRank()) {
2810 HDFCFStr * sca_str = nullptr;
2811 try {
2812 sca_str = new HDFCFStr(
2813 sdid,
2814 spsdsf->getFieldRef(),
2815 filename,
2816 spsdsf->getName(),
2817 spsdsf->getNewName(),
2818 false
2819 );
2820 }
2821 catch(...) {
2822 delete bt;
2823 throw InternalErr(__FILE__,__LINE__,"Unable to allocate the HDFCFStr instance.");
2824 }
2825 dds.add_var(sca_str);
2826 delete bt;
2827 delete sca_str;
2828 }
2829
2830 else {
2831 HDFCFStrField *ar = nullptr;
2832 try {
2833
2834 ar = new HDFCFStrField(
2835 spsdsf->getRank() -1 ,
2836 filename,
2837 false,
2838 sdid,
2839 spsdsf->getFieldRef(),
2840 0,
2841 spsdsf->getName(),
2842 spsdsf->getNewName(),
2843 bt);
2844
2845 }
2846 catch(...) {
2847 delete bt;
2848 throw InternalErr(__FILE__,__LINE__,"Unable to allocate the HDFCFStrField instance.");
2849 }
2850
2851 for(it_d = dims.begin(); it_d != dims.begin()+dims.size()-1; it_d++)
2852 ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
2853 dds.add_var(ar);
2854 delete bt;
2855 delete ar;
2856 }
2857
2858 }
2859
2860 else {// Other datatypes
2861
2862 // Non missing fields
2863 if(spsdsf->getFieldType()!= 4) {
2864 HDFSPArray_RealField *ar = nullptr;
2865
2866 try {
2867
2868 vector<int32>dimsizes;
2869
2870 dimsizes.resize(spsdsf->getRank());
2871 for(int i = 0; i <spsdsf->getRank();i++)
2872 dimsizes[i] = (dims[i])->getSize();
2873 ar = new HDFSPArray_RealField(
2874 spsdsf->getRank(),
2875 filename,
2876 sdid,
2877 spsdsf->getFieldRef(),
2878 spsdsf->getType(),
2879 sptype,
2880 spsdsf->getName(),
2881 dimsizes,
2882 spsdsf->getNewName(),
2883 bt);
2884 }
2885 catch(...) {
2886 delete bt;
2887 throw InternalErr(__FILE__,__LINE__,"Unable to allocate the HDFSPArray_RealField instance.");
2888 }
2889 for(it_d = dims.begin(); it_d != dims.end(); it_d++)
2890 ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
2891 dds.add_var(ar);
2892 delete bt;
2893 delete ar;
2894 }
2895 else {
2896 if(spsdsf->getRank()!=1){
2897 delete bt;
2898 throw InternalErr(__FILE__, __LINE__, "The rank of missing Z dimension field must be 1");
2899 }
2900 int nelem = (spsdsf->getDimensions()[0])->getSize();
2901
2902 HDFSPArrayMissGeoField *ar = nullptr;
2903
2904 try {
2905 ar = new HDFSPArrayMissGeoField(
2906 spsdsf->getRank(),
2907 nelem,
2908 spsdsf->getNewName(),
2909 bt);
2910 }
2911 catch(...) {
2912 delete bt;
2913 throw InternalErr(__FILE__,__LINE__,
2914 "Unable to allocate the HDFSPArrayMissGeoField instance.");
2915 }
2916
2917
2918 for(it_d = dims.begin(); it_d != dims.end(); it_d++)
2919 ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
2920 dds.add_var(ar);
2921 delete bt;
2922 delete ar;
2923
2924 }
2925 }
2926 }
2927 }
2928
2929 // If we need to generate a DDS cache file,
2930 if(true == dds_cache) {
2931
2932 // Check the file path
2933 string md_cache_dir;
2934 string key = "H4.Cache.metadata.path";
2935 bool found = false;
2936 TheBESKeys::TheKeys()->get_value(key,md_cache_dir,found);
2937
2938 if(true == found) {
2939
2940 // Create the DDS cache file name.
2941 string base_file_name = basename(filename);
2942 string dds_filename = md_cache_dir + "/"+base_file_name +"_dds";
2943
2944 // DDS cache file is a binary file, this makes the file size smaller.
2945 FILE* dds_file =fopen(dds_filename.c_str(),"wb");
2946 if(nullptr == dds_file) {
2947 string msg = "Cannot create the cache file. " + dds_filename + get_errno();
2948 throw InternalErr(__FILE__,__LINE__,msg);
2949 }
2950 int fd = fileno(dds_file);
2951 struct flock *l= lock(F_WRLCK);
2952 if (fcntl(fd, F_SETLKW, l) == -1) {
2953 fclose(dds_file);
2954 string msg = "Cannot hold the write lock for dds cached file "+ dds_filename;
2955 throw InternalErr (__FILE__, __LINE__,msg);
2956 }
2957 // TRY CATCH to close fclose.
2958 try {
2959 HDFCFUtil::write_sp_sds_dds_cache(spf,dds_file,total_bytes_dds_cache,dds_filename);
2960 }
2961 catch(...) {
2962 if (fcntl(fd, F_SETLK, lock(F_UNLCK)) == -1) {
2963 fclose(dds_file);
2964 string msg = "Cannot release the write lock for dds cached file "+ dds_filename;
2965 throw InternalErr (__FILE__, __LINE__,msg);
2966 }
2967
2968 fclose(dds_file);
2969 throw InternalErr(__FILE__,__LINE__,"Fail to generate a dds cache file.");
2970 }
2971 if (fcntl(fd, F_SETLK, lock(F_UNLCK)) == -1) {
2972 fclose(dds_file);
2973 string msg = "Cannot release the write lock for dds cached file "+ dds_filename;
2974 throw InternalErr (__FILE__, __LINE__,msg);
2975 }
2976 fclose(dds_file);
2977
2978 }
2979
2980 else {
2981 throw InternalErr (__FILE__, __LINE__,
2982 "DDS/DAS metadata cache path cannot be found when 'H4.EnableMetaDataCacheFile' key is set to be true.");
2983 }
2984 }
2985
2986 return true;
2987
2988}
2989
2990// Read SDS fields
2991void read_dds_spfields(DDS &dds,const string& filename,const int sdfd,const HDFSP::SDField *spsds, SPType sptype) {
2992
2993 BESDEBUG("h4","Coming to read_dds_spfields "<<endl);
2994
2995 // Ignore the dimension variable that is empty for non-special handling NASA HDF products
2996 if(OTHERHDF == sptype && (true == spsds->IsDimNoScale()))
2997 return;
2998
2999 BaseType *bt=nullptr;
3000 switch(spsds->getType()) {
3001
3002#define HANDLE_CASE(tid, type) \
3003 case tid: \
3004 bt = new (type)(spsds->getNewName(),filename); \
3005 break;
3006 HANDLE_CASE(DFNT_FLOAT32, HDFFloat32)
3007 HANDLE_CASE(DFNT_FLOAT64, HDFFloat64)
3008 HANDLE_CASE(DFNT_CHAR, HDFStr)
3009#ifndef SIGNED_BYTE_TO_INT32
3010 HANDLE_CASE(DFNT_INT8, HDFByte)
3011 //HANDLE_CASE(DFNT_CHAR, HDFByte);
3012#else
3013 HANDLE_CASE(DFNT_INT8,HDFInt32)
3014 //HANDLE_CASE(DFNT_CHAR, HDFInt32);
3015#endif
3016 HANDLE_CASE(DFNT_UINT8, HDFByte);
3017 HANDLE_CASE(DFNT_INT16, HDFInt16);
3018 HANDLE_CASE(DFNT_UINT16, HDFUInt16);
3019 HANDLE_CASE(DFNT_INT32, HDFInt32);
3020 HANDLE_CASE(DFNT_UINT32, HDFUInt32);
3021 HANDLE_CASE(DFNT_UCHAR, HDFByte);
3022 default:
3023 throw InternalErr(__FILE__,__LINE__,"unsupported data type.");
3024#undef HANDLE_CASE
3025 }
3026 int fieldtype = spsds->getFieldType();// Whether the field is real field,lat/lon field or missing Z-dimension field
3027
3028 if(bt)
3029 {
3030
3031 const vector<HDFSP::Dimension*>& dims= spsds->getCorrectedDimensions();
3032 vector<HDFSP::Dimension*>::const_iterator it_d;
3033
3034 if(DFNT_CHAR == spsds->getType()) {
3035
3036 if(1 == spsds->getRank()) {
3037
3038 HDFCFStr * sca_str = nullptr;
3039
3040 try {
3041
3042 sca_str = new HDFCFStr(
3043 sdfd,
3044 spsds->getFieldRef(),
3045 filename,
3046 spsds->getName(),
3047 spsds->getNewName(),
3048 false
3049 );
3050 }
3051 catch(...) {
3052 delete bt;
3053 throw InternalErr(__FILE__,__LINE__,"Unable to allocate the HDFCFStr instance.");
3054 }
3055 dds.add_var(sca_str);
3056 delete bt;
3057 delete sca_str;
3058 }
3059 else {
3060 HDFCFStrField *ar = nullptr;
3061 try {
3062
3063 ar = new HDFCFStrField(
3064 spsds->getRank() -1 ,
3065 filename,
3066 false,
3067 sdfd,
3068 spsds->getFieldRef(),
3069 0,
3070 spsds->getName(),
3071 spsds->getNewName(),
3072 bt);
3073
3074 }
3075 catch(...) {
3076 delete bt;
3077 throw InternalErr(__FILE__,__LINE__,"Unable to allocate the HDFCFStrField instance.");
3078 }
3079
3080 for(it_d = dims.begin(); it_d != dims.begin()+dims.size()-1; it_d++)
3081 ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
3082 dds.add_var(ar);
3083 delete bt;
3084 delete ar;
3085 }
3086
3087 }
3088
3089 // For non-CV variables and the existing non-lat/lon CV variables
3090 else if(fieldtype == 0 || fieldtype == 3 ) {
3091
3092 HDFSPArray_RealField *ar = nullptr;
3093
3094 try {
3095 vector<int32>dimsizes;
3096 dimsizes.resize(spsds->getRank());
3097 for(int i = 0; i <spsds->getRank();i++)
3098 dimsizes[i] = (dims[i])->getSize();
3099
3100 ar = new HDFSPArray_RealField(
3101 spsds->getRank(),
3102 filename,
3103 sdfd,
3104 spsds->getFieldRef(),
3105 spsds->getType(),
3106 sptype,
3107 spsds->getName(),
3108 dimsizes,
3109 spsds->getNewName(),
3110 bt);
3111 }
3112 catch(...) {
3113 delete bt;
3114 throw InternalErr(__FILE__,__LINE__,"Unable to allocate the HDFSPArray_RealField instance.");
3115 }
3116
3117 for(it_d = dims.begin(); it_d != dims.end(); it_d++)
3118 ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
3119 dds.add_var(ar);
3120 delete bt;
3121 delete ar;
3122 }
3123
3124 // For latitude and longitude
3125 else if(fieldtype == 1 || fieldtype == 2) {
3126
3127 if(sptype == MODISARNSS || sptype == TRMML2_V7) {
3128
3129 HDFSPArray_RealField *ar = nullptr;
3130
3131 try {
3132
3133 vector<int32>dimsizes;
3134
3135 dimsizes.resize(spsds->getRank());
3136 for(int i = 0; i <spsds->getRank();i++)
3137 dimsizes[i] = (dims[i])->getSize();
3138
3139 ar = new HDFSPArray_RealField(
3140 spsds->getRank(),
3141 filename,
3142 sdfd,
3143 spsds->getFieldRef(),
3144 spsds->getType(),
3145 sptype,
3146 spsds->getName(),
3147 dimsizes,
3148 spsds->getNewName(),
3149 bt);
3150 }
3151 catch(...) {
3152 delete bt;
3153 throw InternalErr(__FILE__,__LINE__,
3154 "Unable to allocate the HDFSPArray_RealField instance.");
3155 }
3156
3157
3158 for(it_d = dims.begin(); it_d != dims.end(); it_d++)
3159 ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
3160 dds.add_var(ar);
3161 delete bt;
3162 delete ar;
3163
3164 }
3165 else {
3166
3167 HDFSPArrayGeoField *ar = nullptr;
3168
3169 try {
3170 ar = new HDFSPArrayGeoField(
3171 spsds->getRank(),
3172 filename,
3173 sdfd,
3174 spsds->getFieldRef(),
3175 spsds->getType(),
3176 sptype,
3177 fieldtype,
3178 spsds->getName(),
3179 spsds->getNewName(),
3180 bt);
3181 }
3182 catch(...) {
3183 delete bt;
3184 throw InternalErr(__FILE__,__LINE__,
3185 "Unable to allocate the HDFSPArray_RealField instance.");
3186 }
3187
3188 for(it_d = dims.begin(); it_d != dims.end(); it_d++)
3189 ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
3190 dds.add_var(ar);
3191 delete bt;
3192 delete ar;
3193 }
3194 }
3195
3196
3197 else if(fieldtype == 4) { //missing Z dimensional field(or coordinate variables with missing values)
3198 if(spsds->getRank()!=1){
3199 delete bt;
3200 throw InternalErr(__FILE__, __LINE__, "The rank of missing Z dimension field must be 1");
3201 }
3202 int nelem = (spsds->getDimensions()[0])->getSize();
3203
3204 HDFSPArrayMissGeoField *ar = nullptr;
3205
3206 try {
3207 ar = new HDFSPArrayMissGeoField(
3208 spsds->getRank(),
3209 nelem,
3210 spsds->getNewName(),
3211 bt);
3212 }
3213 catch(...) {
3214 delete bt;
3215 throw InternalErr(__FILE__,__LINE__,
3216 "Unable to allocate the HDFSPArrayMissGeoField instance.");
3217 }
3218
3219
3220 for(it_d = dims.begin(); it_d != dims.end(); it_d++)
3221 ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
3222 dds.add_var(ar);
3223 delete bt;
3224 delete ar;
3225 }
3226 // fieldtype =5 originally keeps for time. Still keep it for a while.
3227
3228 else if(fieldtype == 6) { //Coordinate variables added from the product specification
3229
3230 if(spsds->getRank()!=1){
3231 delete bt;
3232 throw InternalErr(__FILE__, __LINE__, "The rank of added coordinate variable must be 1");
3233 }
3234 int nelem = (spsds->getDimensions()[0])->getSize();
3235
3236 HDFSPArrayAddCVField *ar = nullptr;
3237 try {
3238 ar = new HDFSPArrayAddCVField(
3239 spsds->getType(),
3240 sptype,
3241 spsds->getName(),
3242 nelem,
3243 spsds->getNewName(),
3244 bt);
3245 }
3246 catch(...) {
3247 delete bt;
3248 throw InternalErr(__FILE__,__LINE__,
3249 "Unable to allocate the HDFSPArrayAddCVField instance.");
3250 }
3251
3252
3253 for(it_d = dims.begin(); it_d != dims.end(); it_d++)
3254 ar->append_dim((*it_d)->getSize(), (*it_d)->getName());
3255 dds.add_var(ar);
3256 delete bt;
3257 delete ar;
3258 }
3259 else {
3260 delete bt;
3261 throw InternalErr(__FILE__, __LINE__, "The field type should be one of 0,1,2,3,4 or 6.");
3262
3263 }
3264 }
3265
3266}
3267
3268// Read Vdata fields.
3269void read_dds_spvdfields(DDS &dds,const string & filename, const int fileid,int32 objref,int32 numrec,HDFSP::VDField *spvd) {
3270
3271 BESDEBUG("h4","Coming to read_dds_spvdfields "<<endl);
3272
3273 // First map the HDF4 datatype to DAP2
3274 BaseType *bt=nullptr;
3275 switch(spvd->getType()) {
3276#define HANDLE_CASE(tid, type) \
3277 case tid: \
3278 bt = new (type)(spvd->getNewName(),filename); \
3279 break;
3280 HANDLE_CASE(DFNT_FLOAT32, HDFFloat32)
3281 HANDLE_CASE(DFNT_FLOAT64, HDFFloat64)
3282 HANDLE_CASE(DFNT_CHAR8,HDFStr)
3283#ifndef SIGNED_BYTE_TO_INT32
3284 HANDLE_CASE(DFNT_INT8, HDFByte)
3285#else
3286 HANDLE_CASE(DFNT_INT8,HDFInt32)
3287#endif
3288 HANDLE_CASE(DFNT_UINT8, HDFByte)
3289 HANDLE_CASE(DFNT_INT16, HDFInt16)
3290 HANDLE_CASE(DFNT_UINT16, HDFUInt16)
3291 HANDLE_CASE(DFNT_INT32, HDFInt32)
3292 HANDLE_CASE(DFNT_UINT32, HDFUInt32)
3293 HANDLE_CASE(DFNT_UCHAR8, HDFByte)
3294 //HANDLE_CASE(DFNT_CHAR8, HDFByte)
3295 //HANDLE_CASE(DFNT_CHAR8, HDFByte)
3296 default:
3297 throw InternalErr(__FILE__,__LINE__,"unsupported data type.");
3298#undef HANDLE_CASE
3299 }
3300
3301 if(bt)
3302 {
3303
3304 if(DFNT_CHAR == spvd->getType()) {
3305
3306 // If the field order is >1, the vdata field will be 2-D array
3307 // with the number of elements along the fastest changing dimension
3308 // as the field order.
3309 int vdrank = ((spvd->getFieldOrder())>1)?2:1;
3310 if (1 == vdrank) {
3311
3312 HDFCFStr * sca_str = nullptr;
3313 try {
3314 sca_str = new HDFCFStr(
3315 fileid,
3316 objref,
3317 filename,
3318 spvd->getName(),
3319 spvd->getNewName(),
3320 true
3321 );
3322 }
3323 catch(...) {
3324 delete bt;
3325 throw InternalErr(__FILE__,__LINE__,"Unable to allocate the HDFCFStr instance.");
3326 }
3327 dds.add_var(sca_str);
3328 delete bt;
3329 delete sca_str;
3330 }
3331
3332 else {
3333
3334 HDFCFStrField *ar = nullptr;
3335 try {
3336
3337 ar = new HDFCFStrField(
3338 vdrank -1 ,
3339 filename,
3340 true,
3341 fileid,
3342 objref,
3343 spvd->getFieldOrder(),
3344 spvd->getName(),
3345 spvd->getNewName(),
3346 bt);
3347
3348 }
3349 catch(...) {
3350 delete bt;
3351 throw InternalErr(__FILE__,__LINE__,"Unable to allocate the HDFCFStrField instance.");
3352 }
3353
3354 string dimname0 = "VDFDim0_"+spvd->getNewName();
3355 ar->append_dim(numrec, dimname0);
3356 dds.add_var(ar);
3357 delete bt;
3358 delete ar;
3359
3360 }
3361 }
3362 else {
3363 HDFSPArray_VDField *ar = nullptr;
3364
3365 // If the field order is >1, the vdata field will be 2-D array
3366 // with the number of elements along the fastest changing dimension
3367 // as the field order.
3368 int vdrank = ((spvd->getFieldOrder())>1)?2:1;
3369 ar = new HDFSPArray_VDField(
3370 vdrank,
3371 filename,
3372 fileid,
3373 objref,
3374 spvd->getType(),
3375 spvd->getFieldOrder(),
3376 spvd->getName(),
3377 spvd->getNewName(),
3378 bt);
3379
3380 string dimname1 = "VDFDim0_"+spvd->getNewName();
3381
3382 string dimname2 = "VDFDim1_"+spvd->getNewName();
3383 if(spvd->getFieldOrder() >1) {
3384 ar->append_dim(numrec,dimname1);
3385 ar->append_dim(spvd->getFieldOrder(),dimname2);
3386 }
3387 else
3388 ar->append_dim(numrec,dimname1);
3389
3390 dds.add_var(ar);
3391 delete bt;
3392 delete ar;
3393 }
3394 }
3395
3396}
3397
3398// This routine will check if this is a special EOS2 file that we can improve the performance
3399// Currently AIRS level 2 and 3 version 6 and MOD08_M3-like products are what we can serve. KY 2014-01-29
3400int check_special_eosfile(const string & filename, string& grid_name,int32 sdfd) {
3401
3402 int32 sds_id = 0;
3403 int32 n_sds = 0;
3404 int32 n_sd_attrs = 0;
3405 bool is_eos = false;
3406 int ret_val = 1;
3407
3408 // Obtain number of SDS objects and number of SD(file) attributes
3409 if (SDfileinfo (sdfd, &n_sds, &n_sd_attrs) == FAIL){
3410 throw InternalErr (__FILE__,__LINE__,"SDfileinfo failed ");
3411 }
3412
3413 char attr_name[H4_MAX_NC_NAME];
3414 int32 attr_type = -1;
3415 int32 attr_count = -1;
3416 char structmdname[] = "StructMetadata.0";
3417
3418 // Is this an HDF-EOS2 file?
3419 for (int attr_index = 0; attr_index < n_sd_attrs;attr_index++) {
3420 if(SDattrinfo(sdfd,attr_index,attr_name,&attr_type,&attr_count) == FAIL) {
3421 throw InternalErr (__FILE__,__LINE__,"SDattrinfo failed ");
3422 }
3423
3424 if(strcmp(attr_name,structmdname)==0) {
3425 is_eos = true;
3426 break;
3427 }
3428 }
3429
3430 if(true == is_eos) {
3431
3432 int sds_index = 0;
3433 int32 sds_rank = 0;
3434 int32 dim_sizes[H4_MAX_VAR_DIMS];
3435 int32 sds_dtype = 0;
3436 int32 n_sds_attrs = 0;
3437 char sds_name[H4_MAX_NC_NAME];
3438 char xdim_name[] ="XDim";
3439 char ydim_name[] ="YDim";
3440
3441 string temp_grid_name1;
3442 string temp_grid_name2;
3443 bool xdim_is_cv_flag = false;
3444 bool ydim_is_cv_flag = false;
3445
3446
3447 // The following for-loop checks if this is a MOD08_M3-like HDF-EOS2 product.
3448 for (sds_index = 0; sds_index < n_sds; sds_index++) {
3449
3450 sds_id = SDselect (sdfd, sds_index);
3451 if (sds_id == FAIL) {
3452 throw InternalErr (__FILE__,__LINE__,"SDselect failed ");
3453 }
3454
3455 // Obtain object name, rank, size, field type and number of SDS attributes
3456 int status = SDgetinfo (sds_id, sds_name, &sds_rank, dim_sizes,
3457 &sds_dtype, &n_sds_attrs);
3458 if (status == FAIL) {
3459 SDendaccess(sds_id);
3460 throw InternalErr (__FILE__,__LINE__,"SDgetinfo failed ");
3461 }
3462
3463 if(1 == sds_rank) {
3464
3465 // This variable "XDim" exists
3466 if(strcmp(sds_name,xdim_name) == 0) {
3467 int32 sds_dimid = SDgetdimid(sds_id,0);
3468 if(sds_dimid == FAIL) {
3469 SDendaccess(sds_id);
3470 throw InternalErr (__FILE__,__LINE__,"SDgetinfo failed ");
3471 }
3472 char dim_name[H4_MAX_NC_NAME];
3473 int32 dim_size = 0;
3474 int32 dim_type = 0;
3475 int32 num_dim_attrs = 0;
3476 if(SDdiminfo(sds_dimid,dim_name,&dim_size,&dim_type,&num_dim_attrs) == FAIL) {
3477 SDendaccess(sds_id);
3478 throw InternalErr(__FILE__,__LINE__,"SDdiminfo failed ");
3479 }
3480
3481 // No dimension scale and XDim exists
3482 if(0 == dim_type) {
3483 string tempdimname(dim_name);
3484 if(tempdimname.size() >=5) {
3485 if(tempdimname.compare(0,5,"XDim:") == 0) {
3486
3487 // Obtain the grid name.
3488 temp_grid_name1 = tempdimname.substr(5);
3489 xdim_is_cv_flag = true;
3490
3491 }
3492 }
3493 else if("XDim" == tempdimname)
3494 xdim_is_cv_flag = true;
3495 }
3496 }
3497
3498 // The variable "YDim" exists
3499 if(strcmp(sds_name,ydim_name) == 0) {
3500
3501 int32 sds_dimid = SDgetdimid(sds_id,0);
3502 if(sds_dimid == FAIL) {
3503 SDendaccess (sds_id);
3504 throw InternalErr (__FILE__,__LINE__,"SDgetinfo failed ");
3505 }
3506 char dim_name[H4_MAX_NC_NAME];
3507 int32 dim_size = 0;
3508 int32 dim_type = 0;
3509 int32 num_dim_attrs = 0;
3510 if(SDdiminfo(sds_dimid,dim_name,&dim_size,&dim_type,&num_dim_attrs) == FAIL) {
3511 SDendaccess(sds_id);
3512 throw InternalErr(__FILE__,__LINE__,"SDdiminfo failed ");
3513 }
3514
3515 // For this case, the dimension should not have dimension scales.
3516 if(0 == dim_type) {
3517 string tempdimname(dim_name);
3518 if(tempdimname.size() >=5) {
3519 if(tempdimname.compare(0,5,"YDim:") == 0) {
3520 // Obtain the grid name.
3521 temp_grid_name2 = tempdimname.substr(5);
3522 ydim_is_cv_flag = true;
3523 }
3524 }
3525 else if ("YDim" == tempdimname)
3526 ydim_is_cv_flag = true;
3527 }
3528 }
3529 }
3530
3531 SDendaccess(sds_id);
3532 if((true == xdim_is_cv_flag) && (true == ydim_is_cv_flag ))
3533 break;
3534
3535 }
3536
3537 // If one-grid and variable XDim/YDim exist and also they don't have dim. scales,we treat this as MOD08-M3-like products
3538 if ((temp_grid_name1 == temp_grid_name2) && (true == xdim_is_cv_flag) && (true == ydim_is_cv_flag)) {
3539 grid_name = temp_grid_name1;
3540 ret_val = 2;
3541 }
3542
3543 // Check if this is a new AIRS level 2 and 3 product. Since new AIRS level 2 and 3 version 6 products still have dimensions that don't have
3544 // dimension scales and the old way to handle level 2 and 3 dimensions makes the performance suffer. We will see if we can improve
3545 // performance by handling the data with just the HDF4 interfaces.
3546 // At least the file name should have string AIRS.L3. or AIRS.L2..
3547 else if((basename(filename).size() >8) && (basename(filename).compare(0,4,"AIRS") == 0)
3548 && ((basename(filename).find(".L3.")!=string::npos) || (basename(filename).find(".L2.")!=string::npos))){
3549
3550 bool has_dimscale = false;
3551
3552 // Go through the SDS object and check if this file has dimension scales.
3553 for (sds_index = 0; sds_index < n_sds; sds_index++) {
3554
3555 sds_id = SDselect (sdfd, sds_index);
3556 if (sds_id == FAIL) {
3557 throw InternalErr (__FILE__,__LINE__,"SDselect failed ");
3558 }
3559
3560 // Obtain object name, rank, size, field type and number of SDS attributes
3561 int status = SDgetinfo (sds_id, sds_name, &sds_rank, dim_sizes,
3562 &sds_dtype, &n_sds_attrs);
3563 if (status == FAIL) {
3564 SDendaccess(sds_id);
3565 throw InternalErr (__FILE__,__LINE__,"SDgetinfo failed ");
3566 }
3567
3568 for (int dim_index = 0; dim_index<sds_rank; dim_index++) {
3569
3570 int32 sds_dimid = SDgetdimid(sds_id,dim_index);
3571 if(sds_dimid == FAIL) {
3572 SDendaccess(sds_id);
3573 throw InternalErr (__FILE__,__LINE__,"SDgetinfo failed ");
3574 }
3575
3576 char dim_name[H4_MAX_NC_NAME];
3577 int32 dim_size = 0;
3578 int32 dim_type = 0;
3579 int32 num_dim_attrs = 0;
3580 if(SDdiminfo(sds_dimid,dim_name,&dim_size,&dim_type,&num_dim_attrs) == FAIL) {
3581 SDendaccess(sds_id);
3582 throw InternalErr(__FILE__,__LINE__,"SDdiminfo failed ");
3583 }
3584
3585 if(dim_type !=0) {
3586 has_dimscale = true;
3587 break;
3588 }
3589
3590 }
3591 SDendaccess(sds_id);
3592 if( true == has_dimscale)
3593 break;
3594 }
3595
3596 // If having dimension scales, this is an AIRS level 2 or 3 version 6. Treat it differently. Otherwise, this is an old AIRS level 3 product.
3597 if (true == has_dimscale)
3598 ret_val = 3;
3599 }
3600 else {// Check if this is an HDF-EOS2 file but not using HDF-EOS2 at all.
3601 // We turn off this for the time being because
3602 // 1) We need to make sure this is a grid file not swath or point file.
3603 // It will be time consuming to identify grids or swaths and hurts the performance for general case.
3604 // 2) No real NASA files exist. We will handle them later.
3605 // KY 2014-01-29
3606 ;
3607#if 0
3608 bool has_dimscale = true;
3609 bool is_grid = false;
3610
3611 // Go through the SDS object
3612 for (sds_index = 0; sds_index < n_sds; sds_index++) {
3613
3614 sds_id = SDselect (sdid, sds_index);
3615 if (sds_id == FAIL) {
3616 SDend(sdid);
3617 throw InternalErr (__FILE__,__LINE__,"SDselect failed ");
3618 }
3619
3620 // Obtain object name, rank, size, field type and number of SDS attributes
3621 int status = SDgetinfo (sds_id, sds_name, &sds_rank, dim_sizes,
3622 &sds_dtype, &n_sds_attrs);
3623 if (status == FAIL) {
3624 SDendaccess(sds_id);
3625 SDend(sdid);
3626 throw InternalErr (__FILE__,__LINE__,"SDgetinfo failed ");
3627 }
3628
3629
3630 for (int dim_index = 0; dim_index<sds_rank; dim_index++) {
3631
3632 int32 sds_dimid = SDgetdimid(sds_id,dim_index);
3633 if(sds_dimid == FAIL) {
3634 SDendaccess(sds_id);
3635 SDend(sdid);
3636 throw InternalErr (__FILE__,__LINE__,"SDgetinfo failed ");
3637 }
3638 char dim_name[H4_MAX_NC_NAME];
3639 int32 dim_size = 0;
3640 int32 dim_type = 0;
3641 int32 num_dim_attrs = 0;
3642 if(SDdiminfo(sds_dimid,dim_name,&dim_size,&dim_type,&num_dim_attrs) == FAIL) {
3643 SDendaccess(sds_id);
3644 SDend(sdid);
3645 throw InternalErr(__FILE__,__LINE__,"SDdiminfo failed ");
3646 }
3647
3648 if(0 == dim_type) {
3649 has_dimscale = false;
3650 }
3651
3652 }
3653 SDendaccess(sds_id);
3654 }
3655 if (true == has_dimscale)
3656 ret_val = 4;
3657#endif
3658 }
3659 }
3660
3661 return ret_val;
3662}
3663
3664// Generate DAS for the file that only use SDS APIs. Currently this routine only applies to AIRS version 6
3665// that can take advantage of the handler's metadata cache feature.
3666void read_das_sds(DAS & das, const string & filename,int32 sdfd, bool ecs_metadata,HDFSP::File**h4fileptr) {
3667
3668 HDFSP::File *spf = nullptr;
3669 try {
3670 spf = HDFSP::File::Read(filename.c_str(),sdfd,-1);
3671 spf->Handle_AIRS_L23();
3672 read_das_special_eos2_core(das,spf,filename,ecs_metadata);
3673 }
3674 catch (HDFSP::Exception &e)
3675 {
3676 if (spf != nullptr)
3677 delete spf;
3678 throw InternalErr(e.what());
3679 }
3680
3681 *h4fileptr = spf;
3682 return;
3683}
3684
3685// Generate DDS for the file that only use SDS APIs. Currently this routine only applies to AIRS version 6
3686// that can take advantage of the handler's metadata cache feature.
3687void read_dds_sds(DDS &dds, const string & filename,int32 sdfd, HDFSP::File*h4file,bool dds_setcache) {
3688
3689 // Set DDS dataset.
3690 dds.set_dataset_name(basename(filename));
3691 read_dds_special_1d_grid(dds,h4file,filename,sdfd,dds_setcache);
3692 return;
3693
3694}
3695// Default option
3696void read_dds(DDS & dds, const string & filename)
3697{
3698 // generate DDS, DAS
3699 DAS das;
3700 dds.set_dataset_name(basename(filename));
3701 build_descriptions(dds, das, filename);
3702
3703 if (!dds.check_semantics()) { // DDS didn't get built right
3704 THROW(dhdferr_ddssem);
3705 }
3706 return;
3707}
3708
3709void read_das(DAS & das, const string & filename)
3710{
3711 // generate DDS, DAS
3712 DDS dds(nullptr);
3713 dds.set_dataset_name(basename(filename));
3714
3715 build_descriptions(dds, das, filename);
3716
3717 if (!dds.check_semantics()) { // DDS didn't get built right
3718 dds.print(cout);
3719 THROW(dhdferr_ddssem);
3720 }
3721 return;
3722}
3723
3724// Scan the HDF file and build the DDS and DAS
3725static void build_descriptions(DDS & dds, DAS & das,
3726 const string & filename)
3727{
3728 sds_map sdsmap;
3729 vd_map vdatamap;
3730 gr_map grmap;
3731
3732 // Build descriptions of SDS items
3733 // If CF option is enabled, StructMetadata will be parsed here.
3734 SDS_descriptions(sdsmap, das, filename);
3735
3736 // Build descriptions of file annotations
3737 FileAnnot_descriptions(das, filename);
3738
3739 // Build descriptions of Vdatas
3740 Vdata_descriptions(vdatamap, das, filename);
3741
3742 // Build descriptions of General Rasters
3743 GR_descriptions(grmap, das, filename);
3744
3745 // Build descriptions of Vgroups and add SDS/Vdata/GR in the correct order
3746 Vgroup_descriptions(dds, das, filename, sdsmap, vdatamap, grmap);
3747 return;
3748}
3749
3750// These two Functor classes are used to look for EOS attributes with certain
3751// base names (is_named) and to accumulate values in in different hdf_attr
3752// objects with the same base names (accum_attr). These are used by
3753// merge_split_eos_attributes() to do just that. Some HDF EOS attributes are
3754// longer than HDF 4's 32,000 character limit. Those attributes are split up
3755// in the HDF 4 files and named `StructMetadata.0', `StructMetadata.1', et
3756// cetera. This code merges those attributes so that they can be processed
3757// correctly by the hdf eos attribute parser (see AddHDFAttr() further down
3758// in this file). 10/29/2001 jhrg
3759
3760struct accum_attr
3761 :public binary_function < hdf_genvec &, hdf_attr, hdf_genvec & > {
3762
3763 string d_named;
3764
3765 accum_attr(const string & named):d_named(named) {
3766 }
3767
3768 hdf_genvec & operator() (hdf_genvec & accum, const hdf_attr & attr) {
3769 // Assume that all fields with the same base name should be combined,
3770 // and assume that they are in order.
3771 BESDEBUG("h4", "attr.name: " << attr.name << endl);
3772 if (attr.name.find(d_named) != string::npos) {
3773#if 0
3774 string stuff;
3775 stuff.assign(attr.values.data(), attr.values.size());
3776 cerr << "Attribute chunk: " << attr.name << endl;
3777 cerr << stuff << endl;
3778#endif
3779 accum.append(attr.values.number_type(), attr.values.data(),
3780 attr.values.size());
3781 return accum;
3782 }
3783 else {
3784 return accum;
3785 }
3786 }
3787};
3788
3789struct is_named:public unary_function < hdf_attr, bool > {
3790 string d_named;
3791
3792 is_named(const string & named):d_named(named) {
3793 }
3794
3795 bool operator() (const hdf_attr & attr) {
3796 return (attr.name.find(d_named) != string::npos);
3797 }
3798};
3799
3800static void
3801merge_split_eos_attributes(vector < hdf_attr > &attr_vec,
3802 const string & attr_name)
3803{
3804 // Only do this if there's more than one part.
3805 if (count_if(attr_vec.begin(), attr_vec.end(), is_named(attr_name)) > 1) {
3806 // Merge all split up parts named `attr_name.' Assume they are in
3807 // order in `attr_vec.'
3808 hdf_genvec attributes;
3809 attributes = accumulate(attr_vec.begin(), attr_vec.end(),
3810 attributes, accum_attr(attr_name));
3811
3812 // When things go south, check out the hdf_genvec...
3813 // BEDEBUG seems not providing a way to handle the following debugging info.
3814 // I can define a vector and call attributes.print(s_m), then use
3815 // BESDEBUG to output the debugging info. The downside is that whether BESDEBUG
3816 // is called, a vector of s_m will always be generated and a chunk of memory is
3817 // always used. So don't change this for the time being. KY 2012-09-13
3818 DBG(vector < string > s_m;
3819 attributes.print(s_m);
3820 cerr << "Accum struct MD: (" << s_m.size() << ") "
3821 << s_m[0] << endl);
3822
3823 // Remove all the parts that have been merged
3824 attr_vec.erase(remove_if(attr_vec.begin(), attr_vec.end(),
3825 is_named(attr_name)), attr_vec.end());
3826
3827 // Make a new hdf_attr and assign it the newly merged attributes...
3828 hdf_attr merged_attr;
3829 merged_attr.name = attr_name;
3830 merged_attr.values = attributes;
3831
3832 // And add it to the vector of attributes.
3833 attr_vec.push_back(merged_attr);
3834 }
3835}
3836
3837// Read SDS's out of filename, build descriptions and put them into dds, das.
3838static void SDS_descriptions(sds_map & map, DAS & das,
3839 const string & filename)
3840{
3841
3842 hdfistream_sds sdsin(filename);
3843 sdsin.setmeta(true);
3844
3845 // Read SDS file attributes attr_iter i = ;
3846
3847 vector < hdf_attr > fileattrs;
3848 sdsin >> fileattrs;
3849
3850 // Read SDS's
3851 sdsin.rewind();
3852 while (!sdsin.eos()) {
3853 sds_info sdi; // add the next sds_info to map
3854 sdsin >> sdi.sds;
3855 sdi.in_vgroup = false; // assume we're not part of a vgroup
3856 map[sdi.sds.ref] = sdi; // assign to map by ref
3857 }
3858
3859 sdsin.close();
3860
3861 // This is the call to combine SDS attributes that have been split up
3862 // into N 32,000 character strings. 10/24/2001 jhrg
3863 merge_split_eos_attributes(fileattrs, "StructMetadata");
3864 merge_split_eos_attributes(fileattrs, "CoreMetadata");
3865 merge_split_eos_attributes(fileattrs, "ProductMetadata");
3866 merge_split_eos_attributes(fileattrs, "ArchiveMetadata");
3867 merge_split_eos_attributes(fileattrs, "coremetadata");
3868 merge_split_eos_attributes(fileattrs, "productmetadata");
3869
3870 // Build DAS, add SDS file attributes
3871 AddHDFAttr(das, string("HDF_GLOBAL"), fileattrs);
3872 // add each SDS's attrs
3873 vector < hdf_attr > dattrs;
3874
3875 // TODO Remove these attributes (name and dimension)? jhrg 8/17/11
3876 // ***
3877 for (SDSI s = map.begin(); s != map.end(); ++s) {
3878 const hdf_sds *sds = &s->second.sds;
3879 AddHDFAttr(das, sds->name, sds->attrs);
3880 for (int k = 0; k < (int) sds->dims.size(); ++k) {
3881 dattrs = Dims2Attrs(sds->dims[k]);
3882 AddHDFAttr(das, sds->name + "_dim_" + num2string(k), dattrs);
3883 }
3884
3885 }
3886
3887 return;
3888}
3889
3890// Read Vdata's out of filename, build descriptions and put them into dds.
3891static void Vdata_descriptions(vd_map & map, DAS & das,
3892 const string & filename)
3893{
3894 hdfistream_vdata vdin(filename);
3895 vdin.setmeta(true);
3896
3897 // Read Vdata's
3898 while (!vdin.eos()) {
3899 vd_info vdi; // add the next vd_info to map
3900 vdin >> vdi.vdata;
3901 vdi.in_vgroup = false; // assume we're not part of a vgroup
3902 map[vdi.vdata.ref] = vdi; // assign to map by ref
3903 }
3904 vdin.close();
3905
3906 // Build DAS
3907 vector < hdf_attr > dattrs;
3908 for (VDI s = map.begin(); s != map.end(); ++s) {
3909 const hdf_vdata *vd = &s->second.vdata;
3910 AddHDFAttr(das, vd->name, vd->attrs);
3911 }
3912
3913 return;
3914}
3915
3916// Read Vgroup's out of filename, build descriptions and put them into dds.
3917static void Vgroup_descriptions(DDS & dds, DAS & das,
3918 const string & filename, sds_map & sdmap,
3919 vd_map & vdmap, gr_map & grmap)
3920{
3921
3922 hdfistream_vgroup vgin(filename);
3923
3924 // Read Vgroup's
3925 vg_map vgmap;
3926 while (!vgin.eos()) {
3927 vg_info vgi; // add the next vg_info to map
3928 vgin >> vgi.vgroup; // read vgroup itself
3929 vgi.toplevel = true; // assume toplevel until we prove otherwise
3930 vgmap[vgi.vgroup.ref] = vgi; // assign to map by vgroup ref
3931 }
3932 vgin.close();
3933 // for each Vgroup
3934 for (VGI v = vgmap.begin(); v != vgmap.end(); ++v) {
3935 const hdf_vgroup *vg = &v->second.vgroup;
3936
3937 // Add Vgroup attributes
3938 AddHDFAttr(das, vg->name, vg->attrs);
3939
3940 // now, assign children
3941 for (uint32 i = 0; i < vg->tags.size(); i++) {
3942 int32 tag = vg->tags[i];
3943 int32 ref = vg->refs[i];
3944 switch (tag) {
3945 case DFTAG_VG:
3946 // Could be a GRI or a Vgroup
3947 if (grmap.find(ref) != grmap.end())
3948 grmap[ref].in_vgroup = true;
3949 else
3950 vgmap[ref].toplevel = false;
3951 break;
3952 case DFTAG_VH:
3953 vdmap[ref].in_vgroup = true;
3954 break;
3955 case DFTAG_NDG:
3956 sdmap[ref].in_vgroup = true;
3957 break;
3958 default:
3959 ERROR_LOG("unknown tag: " << tag << " ref: " << ref << endl);
3960 // TODO: Make this an exception? jhrg 8/19/11
3961 // Don't make an exception. Possibly you will meet other valid tags. Need to know if it
3962 // is worth to tackle this. KY 09/13/12
3963 // cerr << "unknown tag: " << tag << " ref: " << ref << endl;
3964 break;
3965 }// switch (tag)
3966 } // for (uint32 i = 0; i < vg->tags.size(); i++)
3967 } // for (VGI v = vgmap.begin(); v != vgmap.end(); ++v)
3968 // Build DDS for all toplevel vgroups
3969 BaseType *pbt = 0;
3970 for (VGI v = vgmap.begin(); v != vgmap.end(); ++v) {
3971 if (!v->second.toplevel)
3972 continue; // skip over non-toplevel vgroups
3973 pbt = NewStructureFromVgroup(v->second.vgroup,
3974 vgmap, sdmap, vdmap,
3975 grmap, filename);
3976 if (pbt != 0) {
3977 dds.add_var(pbt);
3978 delete pbt;
3979 }
3980
3981 } // for (VGI v = vgmap.begin(); v != vgmap.end(); ++v)
3982
3983 // add lone SDS's
3984 for (SDSI s = sdmap.begin(); s != sdmap.end(); ++s) {
3985 if (s->second.in_vgroup)
3986 continue; // skip over SDS's in vgroups
3987 if (s->second.sds.has_scale()) // make a grid
3988 pbt = NewGridFromSDS(s->second.sds, filename);
3989 else
3990 pbt = NewArrayFromSDS(s->second.sds, filename);
3991 if (pbt != 0) {
3992 dds.add_var(pbt);
3993 delete pbt;
3994 }
3995 }
3996
3997 // add lone Vdata's
3998 for (VDI v = vdmap.begin(); v != vdmap.end(); ++v) {
3999 if (v->second.in_vgroup)
4000 continue; // skip over Vdata in vgroups
4001 pbt = NewSequenceFromVdata(v->second.vdata, filename);
4002 if (pbt != 0) {
4003 dds.add_var(pbt);
4004 delete pbt;
4005 }
4006 }
4007 // add lone GR's
4008 for (GRI g = grmap.begin(); g != grmap.end(); ++g) {
4009 if (g->second.in_vgroup)
4010 continue; // skip over GRs in vgroups
4011 pbt = NewArrayFromGR(g->second.gri, filename);
4012 if (pbt != 0) {
4013 dds.add_var(pbt);
4014 delete pbt ;
4015 }
4016 }
4017}
4018
4019static void GR_descriptions(gr_map & map, DAS & das,
4020 const string & filename)
4021{
4022
4023 hdfistream_gri grin(filename);
4024 grin.setmeta(true);
4025
4026 // Read GR file attributes
4027 vector < hdf_attr > fileattrs;
4028 grin >> fileattrs;
4029
4030 // Read general rasters
4031 grin.rewind();
4032 while (!grin.eos()) {
4033 gr_info gri; // add the next gr_info to map
4034 grin >> gri.gri;
4035 gri.in_vgroup = false; // assume we're not part of a vgroup
4036 map[gri.gri.ref] = gri; // assign to map by ref
4037 }
4038
4039 grin.close();
4040
4041 // Build DAS
4042 AddHDFAttr(das, string("HDF_GLOBAL"), fileattrs); // add GR file attributes
4043
4044 // add each GR's attrs
4045 vector < hdf_attr > pattrs;
4046 for (GRI g = map.begin(); g != map.end(); ++g) {
4047 const hdf_gri *gri = &g->second.gri;
4048 // add GR attributes
4049 AddHDFAttr(das, gri->name, gri->attrs);
4050
4051 // add palettes as attributes
4052 pattrs = Pals2Attrs(gri->palettes);
4053 AddHDFAttr(das, gri->name, pattrs);
4054
4055 }
4056
4057 return;
4058}
4059
4060// Read file annotations out of filename, put in attribute structure
4061static void FileAnnot_descriptions(DAS & das, const string & filename)
4062{
4063
4064 hdfistream_annot annotin(filename);
4065 vector < string > fileannots;
4066
4067 annotin >> fileannots;
4068 AddHDFAttr(das, string("HDF_GLOBAL"), fileannots);
4069
4070 annotin.close();
4071 return;
4072}
4073
4074// add a vector of hdf_attr to a DAS
4075void AddHDFAttr(DAS & das, const string & varname,
4076 const vector < hdf_attr > &hav)
4077{
4078 if (hav.size() == 0) // nothing to add
4079 return;
4080 // get pointer to the AttrTable for the variable varname (create one if
4081 // necessary)
4082 string tempname = varname;
4083 AttrTable *atp = das.get_table(tempname);
4084 if (atp == 0) {
4085 atp = new AttrTable;
4086 atp = das.add_table(tempname, atp);
4087 }
4088 // add the attributes to the DAS
4089 vector < string > attv; // vector of attribute strings
4090 string attrtype; // name of type of attribute
4091 for (int i = 0; i < (int) hav.size(); ++i) { // for each attribute
4092
4093 attrtype = DAPTypeName(hav[i].values.number_type());
4094 // get a vector of strings representing the values of the attribute
4095 attv = vector < string > (); // clear attv
4096 hav[i].values.print(attv);
4097
4098 // add the attribute and its values to the DAS
4099 for (int j = 0; j < (int) attv.size(); ++j) {
4100 // handle HDF-EOS metadata with separate parser
4101 string container_name = hav[i].name;
4102 if (container_name.find("StructMetadata") == 0
4103 || container_name.find("CoreMetadata") == 0
4104 || container_name.find("ProductMetadata") == 0
4105 || container_name.find("ArchiveMetadata") == 0
4106 || container_name.find("coremetadata") == 0
4107 || container_name.find("productmetadata") == 0) {
4108 string::size_type dotzero = container_name.find('.');
4109 if (dotzero != container_name.npos)
4110 container_name.erase(dotzero); // erase .0
4111
4112
4113 AttrTable *at = das.get_table(container_name);
4114 if (!at)
4115 at = das.add_table(container_name, new AttrTable);
4116
4117 // tell lexer to scan attribute string
4118 void *buf = hdfeos_string(attv[j].c_str());
4119
4120 // cerr << "About to print attributes to be parsed..." << endl;
4121 // TODO: remove when done!
4122 // cerr << "attv[" << j << "]" << endl << attv[j].c_str() << endl;
4123
4124 parser_arg arg(at);
4125 // HDF-EOS attribute parsing is complex and some errors are
4126 // tolerated. Thus, if the parser proper returns an error,
4127 // that results in an exception that is fatal. However, if
4128 // the status returned by an otherwise successful parse shows
4129 // an error was encountered but successful parsing continued,
4130 // that's OK, but it should be logged.
4131 //
4132 // Also, HDF-EOS files should be read using the new HDF-EOS
4133 // features and not this older parser. jhrg 8/18/11
4134 //
4135 // TODO: How to log (as opposed to using BESDEBUG)?
4136 if (hdfeosparse(&arg) != 0){
4137 hdfeos_delete_buffer(buf);
4138 throw Error("HDF-EOS parse error while processing a " + container_name + " HDFEOS attribute.");
4139 }
4140
4141 // We don't use the parse_error for this case since it generates memory leaking. KY 2014-02-25
4142 if (arg.status() == false) {
4143 ERROR_LOG("HDF-EOS parse error while processing a "
4144 << container_name << " HDFEOS attribute. (2)" << endl);
4145 //<< arg.error()->get_error_message() << endl;
4146 }
4147
4148 hdfeos_delete_buffer(buf);
4149 }
4150 else {
4151 if (attrtype == "String")
4152#ifdef ATTR_STRING_QUOTE_FIX
4153 attv[j] = escattr(attv[j]);
4154#else
4155 attv[j] = "\"" + escattr(attv[j]) + "\"";
4156#endif
4157
4158 if (atp->append_attr(hav[i].name, attrtype, attv[j]) == 0)
4159 THROW(dhdferr_addattr);
4160 }
4161 }
4162 }
4163
4164 return;
4165}
4166
4167// add a vector of annotations to a DAS. They are stored as attributes. They
4168// are encoded as string values of an attribute named "HDF_ANNOT".
4169void AddHDFAttr(DAS & das, const string & varname,
4170 const vector < string > &anv)
4171{
4172 if (anv.size() == 0) // nothing to add
4173 return;
4174
4175 // get pointer to the AttrTable for the variable varname (create one if
4176 // necessary)
4177 AttrTable *atp = das.get_table(varname);
4178 if (atp == 0) {
4179 atp = new AttrTable;
4180 atp = das.add_table(varname, atp);
4181 }
4182 // add the annotations to the DAS
4183 string an;
4184 for (int i = 0; i < (int) anv.size(); ++i) { // for each annotation
4185#ifdef ATTR_STRING_QUOTE_FIX
4186 an = escattr(anv[i]); // quote strings
4187#else
4188 an = "\"" + escattr(anv[i]) + "\""; // quote strings
4189#endif
4190 if (atp->append_attr(string("HDF_ANNOT"), "String", an) == 0)
4191 THROW(dhdferr_addattr);
4192 }
4193
4194 return;
4195}
4196
4197// Add a vector of palettes as attributes to a GR. Each palette is added as
4198// two attributes: the first contains the palette data; the second contains
4199// the number of components in the palette.
4200static vector < hdf_attr > Pals2Attrs(const vector < hdf_palette > palv)
4201{
4202 vector < hdf_attr > pattrs;
4203
4204 if (palv.size() != 0) {
4205 // for each palette create an attribute with the palette inside, and an
4206 // attribute containing the number of components
4207 hdf_attr pattr;
4208 string palname;
4209 for (int i = 0; i < (int) palv.size(); ++i) {
4210 palname = "hdf_palette_" + num2string(i);
4211 pattr.name = palname;
4212 pattr.values = palv[i].table;
4213 pattrs.push_back(pattr);
4214 pattr.name = palname + "_ncomps";
4215 pattr.values = hdf_genvec(DFNT_INT32,
4216 const_cast <
4217 int32 * >(&palv[i].ncomp), 1);
4218 pattrs.push_back(pattr);
4219 if (palv[i].name.length() != 0) {
4220 pattr.name = palname + "_name";
4221 pattr.values = hdf_genvec(DFNT_CHAR,
4222 const_cast <
4223 char *>(palv[i].name.c_str()),
4224 palv[i].name.length());
4225 pattrs.push_back(pattr);
4226 }
4227 }
4228 }
4229 return pattrs;
4230}
4231
4232// Convert the meta information in a hdf_dim into a vector of
4233// hdf_attr.
4234static vector < hdf_attr > Dims2Attrs(const hdf_dim dim)
4235{
4236 vector < hdf_attr > dattrs;
4237 hdf_attr dattr;
4238 if (dim.name.length() != 0) {
4239 dattr.name = "name";
4240 dattr.values =
4241 hdf_genvec(DFNT_CHAR, const_cast < char *>(dim.name.c_str()),
4242 dim.name.length());
4243 dattrs.push_back(dattr);
4244 }
4245 if (dim.label.length() != 0) {
4246 dattr.name = "long_name";
4247 dattr.values =
4248 hdf_genvec(DFNT_CHAR, const_cast < char *>(dim.label.c_str()),
4249 dim.label.length());
4250 dattrs.push_back(dattr);
4251 }
4252 if (dim.unit.length() != 0) {
4253 dattr.name = "units";
4254 dattr.values =
4255 hdf_genvec(DFNT_CHAR, const_cast < char *>(dim.unit.c_str()),
4256 dim.unit.length());
4257 dattrs.push_back(dattr);
4258 }
4259 if (dim.format.length() != 0) {
4260 dattr.name = "format";
4261 dattr.values =
4262 hdf_genvec(DFNT_CHAR, const_cast < char *>(dim.format.c_str()),
4263 dim.format.length());
4264 dattrs.push_back(dattr);
4265 }
4266 return dattrs;
4267}
4268
This class provides a way to map HDF4 1-D character array to DAP Str for the CF option.
This class provides a way to map HDFEOS2 character >1D array to DAP Str array for the CF option.
This class provides a way to map HDFEOS2 1-D character array to DAP Str for the CF option.
const char * what() const override
Return exception message.
Definition: HDFSP.h:107
int32 getType() const
Get the data type of this field.
Definition: HDFSP.h:300
const std::string & getNewName() const
Get the CF name(special characters replaced by underscores) of this field.
Definition: HDFSP.h:288
int32 getRank() const
Get the dimension rank of this field.
Definition: HDFSP.h:294
const std::string & getName() const
Get the name of this field.
Definition: HDFSP.h:282
static File * Read(const char *path, int32 sdid, int32 fileid)
Retrieve SDS and Vdata information from the HDF4 file.
Definition: HDFSP.cc:208
void Prepare()
Definition: HDFSP.cc:4089
const std::vector< VDATA * > & getVDATAs() const
Public interface to Obtain Vdata.
Definition: HDFSP.h:769
const std::vector< AttrContainer * > & getVgattrs() const
Get attributes for all vgroups.
Definition: HDFSP.h:775
bool Has_Dim_NoScale_Field() const
This file has a field that is a SDS dimension but no dimension scale.
Definition: HDFSP.h:748
SD * getSD() const
Public interface to Obtain SD.
Definition: HDFSP.h:763
static File * Read_Hybrid(const char *path, int32 sdid, int32 fileid)
Definition: HDFSP.cc:263
SPType getSPType() const
Obtain special HDF4 product type.
Definition: HDFSP.h:741
One instance of this class represents one SDS object.
Definition: HDFSP.h:337
const std::vector< Dimension * > & getCorrectedDimensions() const
Get the list of the corrected dimensions.
Definition: HDFSP.h:344
const std::vector< Dimension * > & getDimensions() const
Get the list of dimensions.
Definition: HDFSP.h:398
bool IsDimNoScale() const
Is this field a dimension without dimension scale(or empty[no data]dimension variable)
Definition: HDFSP.h:411
This class retrieves all SDS objects and SD file attributes.
Definition: HDFSP.h:542
const std::vector< Attribute * > & getAttributes() const
Public interface to obtain the SD(file) attributes.
Definition: HDFSP.h:567
const std::vector< SDField * > & getFields() const
Redundant member function.
Definition: HDFSP.h:561
One instance of this class represents one Vdata field.
Definition: HDFSP.h:490
int32 getFieldOrder() const
Get the order of this field.
Definition: HDFSP.h:496
Definition: HDFStr.h:51
Definition: HE2CF.h:54
bool open(const std::string &filename, const int sd_id, const int file_id)
openes \afilename HDF4 file.
Definition: HE2CF.cc:955
string get_metadata(const std::string &metadataname, bool &suffix_is_num, std::vector< std::string > &non_num_names, std::vector< std::string > &non_num_data)
retrieves the merged metadata.
Definition: HE2CF.cc:948
bool write_attribute(const std::string &gname, const std::string &fname, const std::string &newfname, int n_groups, int fieldtype)
Definition: HE2CF.cc:985
void set_DAS(libdap::DAS *das)
sets DAS pointer so that we can bulid attribute tables.
Definition: HE2CF.cc:181
bool close()
closes the opened file.
Definition: HE2CF.cc:932
bool write_attribute_FillValue(const std::string &varname, int type, float val)
Definition: HE2CF.cc:1052
bool write_attribute_coordinates(const std::string &varname, std::string coord)
Definition: HE2CF.cc:1146
bool write_attribute_units(const std::string &varname, std::string units)
Definition: HE2CF.cc:1159
void get_value(const std::string &s, std::string &val, bool &found)
Retrieve the value of a given key, if set.
Definition: TheBESKeys.cc:340
static TheBESKeys * TheKeys()
Definition: TheBESKeys.cc:71
static std::string print_attr(int32, int, void *)
Print attribute values in string.
Definition: HDFCFUtil.cc:268
static std::string print_type(int32)
Print datatype in string.
Definition: HDFCFUtil.cc:389
static void correct_scale_offset_type(libdap::AttrTable *at)
Definition: HDFCFUtil.cc:616
static std::string get_CF_string(std::string s)
Change special characters to "_".
Definition: HDFCFUtil.cc:166
static std::string escattr(std::string s)
Definition: HDFCFUtil.cc:3309
static void correct_fvalue_type(libdap::AttrTable *at, int32 dtype)
Definition: HDFCFUtil.cc:549