bes Updated for version 3.20.13
HDFEOS2ArraySwathDimMapField.cc
1
2
3// Retrieves the latitude and longitude of the HDF-EOS2 Swath with dimension map
4// Authors: MuQun Yang <myang6@hdfgroup.org>
5// Copyright (c) 2010-2012 The HDF Group
7
8// Currently the handling of swath data fields with dimension maps is the same as
9// other data fields(HDFEOS2Array_RealField.cc etc)
10// The reason to keep it in separate is, in theory, that data fields with dimension map
11// may need special handlings.
12// So we will leave it this way for now. It may be removed in the future.
13// HDFEOS2Array_RealField.cc may be used.
14// KY 2014-02-19
15
16#ifdef USE_HDFEOS2_LIB
17#include "config.h"
18#include "config_hdf.h"
19
20#include <iostream>
21#include <sstream>
22#include <cassert>
23#include <libdap/debug.h>
24#include <libdap/InternalErr.h>
25#include "BESDebug.h"
26#include <BESLog.h>
27#include "HDFEOS2ArraySwathDimMapField.h"
28#include "HDF4RequestHandler.h"
29#define SIGNED_BYTE_TO_INT32 1
30
31using namespace std;
32bool
33HDFEOS2ArraySwathDimMapField::read ()
34{
35
36 BESDEBUG("h4","Coming to HDFEOS2ArraySwathDimMapField read "<<endl);
37 if(length() == 0)
38 return true;
39
40 bool check_pass_fileid_key = HDF4RequestHandler::get_pass_fileid();
41
42 // Declare offset, count and step
43 vector<int>offset;
44 offset.resize(rank);
45
46 vector<int>count;
47 count.resize(rank);
48
49 vector<int>step;
50 step.resize(rank);
51
52 // Obtain offset,step and count from the client expression constraint
53 int nelms = format_constraint(offset.data(),step.data(),count.data());
54
55 // Just declare offset,count and step in the int32 type.
56 vector<int32>offset32;
57 offset32.resize(rank);
58
59 vector<int32>count32;
60 count32.resize(rank);
61
62 vector<int32>step32;
63 step32.resize(rank);
64
65 // Just obtain the offset,count and step in the datatype of int32.
66 for (int i = 0; i < rank; i++) {
67 offset32[i] = (int32) offset[i];
68 count32[i] = (int32) count[i];
69 step32[i] = (int32) step[i];
70 }
71
72 // Define function pointers to handle both grid and swath
73 int32 (*openfunc) (char *, intn);
74 intn (*closefunc) (int32);
75 int32 (*attachfunc) (int32, char *);
76 intn (*detachfunc) (int32);
77
78 string datasetname;
79
80 if (swathname == "") {
81 throw InternalErr (__FILE__, __LINE__, "It should be either grid or swath.");
82 }
83 else if (gridname == "") {
84 openfunc = SWopen;
85 closefunc = SWclose;
86 attachfunc = SWattach;
87 detachfunc = SWdetach;
88 datasetname = swathname;
89 }
90 else {
91 throw InternalErr (__FILE__, __LINE__, "It should be either grid or swath.");
92 }
93
94 // Swath ID, swathid is actually in this case only the id of latitude and longitude.
95 int32 sfid = -1;
96 int32 swathid = -1;
97
98 if (true == isgeofile || false == check_pass_fileid_key) {
99
100 // Open, attach and obtain datatype information based on HDF-EOS2 APIs.
101 sfid = openfunc (const_cast < char *>(filename.c_str ()), DFACC_READ);
102
103 if (sfid < 0) {
104 ostringstream eherr;
105 eherr << "File " << filename.c_str () << " cannot be open.";
106 throw InternalErr (__FILE__, __LINE__, eherr.str ());
107 }
108 }
109 else
110 sfid = swfd;
111
112 swathid = attachfunc (sfid, const_cast < char *>(datasetname.c_str ()));
113 if (swathid < 0) {
114 close_fileid (sfid,-1);
115 ostringstream eherr;
116 eherr << "Grid/Swath " << datasetname.c_str () << " cannot be attached.";
117 throw InternalErr (__FILE__, __LINE__, eherr.str ());
118 }
119
120 // dimmaps was set to be empty in hdfdesc.cc if the extra geolocation file also
121 // uses the dimension map
122 // This is because the dimmaps may be different in the MODIS geolocation file.
123 // So we cannot just pass
124 // the dimmaps to this class.
125 // Here we then obtain the dimension map info. in the geolocation file.
126 if(true == dimmaps.empty()) {
127
128 int32 nummaps = 0;
129 int32 bufsize = 0;
130
131 // Obtain number of dimension maps and the buffer size.
132 if ((nummaps = SWnentries(swathid, HDFE_NENTMAP, &bufsize)) == -1){
133 detachfunc(swathid);
134 close_fileid(sfid,-1);
135 throw InternalErr (__FILE__, __LINE__, "cannot obtain the number of dimmaps");
136 }
137
138 if (nummaps <= 0){
139 detachfunc(swathid);
140 close_fileid(sfid,-1);
141 throw InternalErr (__FILE__,__LINE__,
142 "Number of dimension maps should be greater than 0");
143 }
144
145 vector<char> namelist;
146 vector<int32> map_offset;
147 vector<int32> increment;
148
149 namelist.resize(bufsize + 1);
150 map_offset.resize(nummaps);
151 increment.resize(nummaps);
152 if (SWinqmaps(swathid, namelist.data(), map_offset.data(), increment.data())
153 == -1) {
154 detachfunc(swathid);
155 close_fileid(sfid,-1);
156 throw InternalErr (__FILE__,__LINE__,"fail to inquiry dimension maps");
157 }
158
159 vector<string> mapnames;
160 HDFCFUtil::Split(namelist.data(), bufsize, ',', mapnames);
161 int map_count = 0;
162#if 0
163 for (vector<string>::const_iterator i = mapnames.begin();
164 i != mapnames.end(); ++i) {
165#endif
166 for (auto const &mapname:mapnames) {
167 vector<string> parts;
168 HDFCFUtil::Split(mapname.c_str(), '/', parts);
169 if (parts.size() != 2){
170 detachfunc(swathid);
171 close_fileid(sfid,-1);
172 throw InternalErr (__FILE__,__LINE__,"the dimmaps should only include two parts");
173 }
174
175 struct dimmap_entry tempdimmap;
176 tempdimmap.geodim = parts[0];
177 tempdimmap.datadim = parts[1];
178 tempdimmap.offset = map_offset[map_count];
179 tempdimmap.inc = increment[map_count];
180#if 0
181//cerr<<"map_count is: "<<map_count <<endl;
182//cerr<<"dimmap geodim is: "<<tempdimmap.geodim <<endl;
183//cerr<<"dimmap datadim is: "<<tempdimmap.datadim <<endl;
184//cerr<<"offset is: "<<tempdimmap.offset <<endl;
185//cerr<<"inc is: "<<tempdimmap.inc <<endl;
186#endif
187 dimmaps.push_back(tempdimmap);
188 ++map_count;
189 }
190 }
191#if 0
192else {
193for(int i = 0; i <dimmaps.size();i++) {
194cerr<<"dimmap geodim is: "<<dimmaps[i].geodim <<endl;
195cerr<<"dimmap datadim is: "<<dimmaps[i].datadim <<endl;
196cerr<<"offset is: "<<dimmaps[i].offset <<endl;
197cerr<<"inc is: "<<dimmaps[i].inc <<endl;
198
199
200}
201
202}
203#endif
204
205 if (sotype!=DEFAULT_CF_EQU) {
206
207 if("MODIS_SWATH_Type_L1B" == swathname) {
208
209 string emissive_str = "Emissive";
210 string RefSB_str = "RefSB";
211 bool is_emissive_field = false;
212 bool is_refsb_field = false;
213
214 if(fieldname.find(emissive_str)!=string::npos) {
215 if(0 == fieldname.compare(fieldname.size()-emissive_str.size(),
216 emissive_str.size(),emissive_str))
217 is_emissive_field = true;
218 }
219
220 if(fieldname.find(RefSB_str)!=string::npos) {
221 if(0 == fieldname.compare(fieldname.size()-RefSB_str.size(),
222 RefSB_str.size(),RefSB_str))
223 is_refsb_field = true;
224 }
225
226 if ((true == is_emissive_field) || (true == is_refsb_field)) {
227 detachfunc(swathid);
228 close_fileid(sfid,-1);
229 throw InternalErr (__FILE__, __LINE__,
230 "Currently don't support MODIS Level 1B swath dim. map for data ");
231 }
232 }
233 }
234
235 bool is_modis1b = false;
236 if("MODIS_SWATH_Type_L1B" == swathname)
237 is_modis1b = true;
238
239 try {
240 //if(true == turn_on_disable_scale_comp_key && false== is_modis1b)
241 if(true == HDF4RequestHandler::get_disable_scaleoffset_comp() && false== is_modis1b)
242 write_dap_data_disable_scale_comp(swathid,nelms,offset32,count32,step32);
243 else
244 write_dap_data_scale_comp(swathid,nelms,offset32,count32,step32);
245 }
246 catch(...) {
247 detachfunc(swathid);
248 close_fileid(sfid,-1);
249 throw;
250 }
251
252 intn r = 0;
253 r = detachfunc (swathid);
254 if (r != 0) {
255 close_fileid(sfid,-1);
256 ostringstream eherr;
257
258 eherr << "Grid/Swath " << datasetname.c_str () << " cannot be detached.";
259 throw InternalErr (__FILE__, __LINE__, eherr.str ());
260 }
261
262
263 if(true == isgeofile || false == check_pass_fileid_key) {
264 r = closefunc (sfid);
265 if (r != 0) {
266 ostringstream eherr;
267 eherr << "Grid/Swath " << filename.c_str () << " cannot be closed.";
268 throw InternalErr (__FILE__, __LINE__, eherr.str ());
269 }
270 }
271
272
273 return false;
274}
275
276// Standard way of DAP handlers to pass the coordinates of the subsetted region to the handlers
277// Return the number of elements to read.
278int
279HDFEOS2ArraySwathDimMapField::format_constraint (int *offset, int *step, int *count)
280{
281 int nels = 1;
282 int id = 0;
283
284 Dim_iter p = dim_begin ();
285 while (p != dim_end ()) {
286
287 int start = dimension_start (p, true);
288 int stride = dimension_stride (p, true);
289 int stop = dimension_stop (p, true);
290
291 // Check for illegal constraint
292 if (start > stop) {
293 ostringstream oss;
294 oss << "Array/Grid hyperslab start point "<< start <<
295 " is greater than stop point " << stop <<".";
296 throw Error(malformed_expr, oss.str());
297 }
298
299 offset[id] = start;
300 step[id] = stride;
301 count[id] = ((stop - start) / stride) + 1; // count of elements
302 nels *= count[id]; // total number of values for variable
303
304 BESDEBUG ("h4",
305 "=format_constraint():"
306 << "id=" << id << " offset=" << offset[id]
307 << " step=" << step[id]
308 << " count=" << count[id]
309 << endl);
310
311 id++;
312 p++;
313 }// while (p != dim_end ())
314
315 return nels;
316}
317
318// Get latitude and longitude fields.
319// It will call expand_dimmap_field to interpolate latitude and longitude.
320template < class T > int
321HDFEOS2ArraySwathDimMapField::
322GetFieldValue (int32 swathid, const string & geofieldname,
323 vector < struct dimmap_entry >&sw_dimmaps,
324 vector < T > &vals, vector<int32>&newdims)
325{
326
327 int32 ret = -1;
328 int32 size = -1;
329 int32 sw_rank = -1;
330 int32 dims[130];
331 int32 type = -1;
332
333 // Two dimensions for lat/lon; each dimension name is < 64 characters,
334 // The dimension names are separated by a comma.
335 char dimlist[130];
336 ret = SWfieldinfo (swathid, const_cast < char *>(geofieldname.c_str ()),
337 &sw_rank, dims, &type, dimlist);
338 if (ret != 0)
339 return -1;
340
341 size = 1;
342 for (int i = 0; i <sw_rank; i++)
343 size *= dims[i];
344
345 vals.resize (size);
346
347 ret = SWreadfield (swathid, const_cast < char *>(geofieldname.c_str ()),
348 NULL, NULL, NULL, (void *) vals.data());
349 if (ret != 0)
350 return -1;
351
352 vector < string > dimname;
353 HDFCFUtil::Split (dimlist, ',', dimname);
354
355 for (int i = 0; i < sw_rank; i++) {
356 vector < struct dimmap_entry >::iterator it;
357
358 for (it = sw_dimmaps.begin (); it != sw_dimmaps.end (); it++) {
359 if (it->geodim == dimname[i]) {
360#if 0
361//cerr<<"dimnames["<<i<<"]: " <<dimname[i]<<endl;
362//cerr<<"offset is "<<it->offset<<endl;
363//cerr<<"inc is "<<it->inc<<endl;
364#endif
365 int32 ddimsize = SWdiminfo (swathid, (char *) it->datadim.c_str ());
366 if (ddimsize == -1)
367 return -1;
368 int r;
369
370 r = _expand_dimmap_field (&vals, sw_rank, dims, i, ddimsize, it->offset, it->inc);
371 if (r != 0)
372 return -1;
373 }
374 }
375 }
376
377 // dims[] are expanded already.
378 for (int i = 0; i < sw_rank; i++) {
379 if (dims[i] < 0)
380 return -1;
381 newdims[i] = dims[i];
382 }
383
384 return 0;
385}
386
387// expand the dimension map field.
388template < class T > int
389HDFEOS2ArraySwathDimMapField::_expand_dimmap_field (vector < T >
390 *pvals, int32 sw_rank,
391 int32 dimsa[],
392 int dimindex,
393 int32 ddimsize,
394 int32 offset,
395 int32 inc) const
396{
397 vector < T > orig = *pvals;
398 vector < int32 > pos;
399 vector < int32 > dims;
400 vector < int32 > newdims;
401 pos.resize (sw_rank);
402 dims.resize (sw_rank);
403
404 for (int i = 0; i < sw_rank; i++) {
405 pos[i] = 0;
406 dims[i] = dimsa[i];
407 }
408 newdims = dims;
409 newdims[dimindex] = ddimsize;
410 dimsa[dimindex] = ddimsize;
411
412 int newsize = 1;
413
414 for (int i = 0; i < sw_rank; i++) {
415 newsize *= newdims[i];
416 }
417 pvals->clear ();
418 pvals->resize (newsize);
419
420 for (;;) {
421 // if end
422 if (pos[0] == dims[0]) {
423 // we past then end
424 break;
425 }
426 else if (pos[dimindex] == 0) {
427 // extract 1D values
428 vector < T > v;
429 for (int i = 0; i < dims[dimindex]; i++) {
430 pos[dimindex] = i;
431 v.push_back (orig[INDEX_nD_TO_1D (dims, pos)]);
432 }
433 // expand them
434
435 vector < T > w;
436 for (int32 j = 0; j < ddimsize; j++) {
437 int32 i = (j - offset) / inc;
438 T f;
439
440 if (i * inc + offset == j) // perfect match
441 {
442 f = (v[i]);
443 }
444 else {
445 int32 i1 = 0;
446 int32 i2 = (i<=0)?1:0;
447 int32 j1 = 0;
448 int32 j2 = 0;
449
450#if 0
451 if (i <= 0) {
452 //i1 = 0;
453 i2 = 1;
454 }
455#endif
456 if ((unsigned int) i + 1 >= v.size ()) {
457 i1 = v.size () - 2;
458 i2 = v.size () - 1;
459 }
460 else {
461 i1 = i;
462 i2 = i + 1;
463 }
464 j1 = i1 * inc + offset;
465 j2 = i2 * inc + offset;
466 f = (((j - j1) * v[i2] + (j2 - j) * v[i1]) / (j2 - j1));
467 }
468 w.push_back (f);
469 pos[dimindex] = j;
470 (*pvals)[INDEX_nD_TO_1D (newdims, pos)] = f;
471 }
472 pos[dimindex] = 0;
473 }
474 // next pos
475 pos[sw_rank - 1]++;
476 for (int i = sw_rank - 1; i > 0; i--) {
477 if (pos[i] == dims[i]) {
478 pos[i] = 0;
479 pos[i - 1]++;
480 }
481 }
482 }
483
484 return 0;
485}
486
487template < class T >
488bool HDFEOS2ArraySwathDimMapField::FieldSubset (T * outlatlon,
489 const vector<int32>&newdims,
490 T * latlon,
491 const int32 * offset,
492 const int32 * count,
493 const int32 * step)
494{
495
496 if (newdims.size() == 1)
497 Field1DSubset(outlatlon,newdims[0],latlon,offset,count,step);
498 else if (newdims.size() == 2)
499 Field2DSubset(outlatlon,newdims[0],newdims[1],latlon,offset,count,step);
500 else if (newdims.size() == 3)
501 Field3DSubset(outlatlon,newdims,latlon,offset,count,step);
502 else
503 throw InternalErr(__FILE__, __LINE__,
504 "Currently doesn't support rank >3 when interpolating with dimension map");
505
506 return true;
507}
508
509// Subset of 1-D field to follow the parameters from the DAP expression constraint
510template < class T >
511bool HDFEOS2ArraySwathDimMapField::Field1DSubset (T * outlatlon,
512 const int majordim,
513 T * latlon,
514 const int32 * offset,
515 const int32 * count,
516 const int32 * step)
517{
518 if (majordim < count[0])
519 throw InternalErr(__FILE__, __LINE__,
520 "The number of elements is greater than the total dimensional size");
521
522 for (int i = 0; i < count[0]; i++)
523 outlatlon[i] = latlon[offset[0]+i*step[0]];
524 return true;
525
526}
527// Subset of latitude and longitude to follow the parameters
528// from the DAP expression constraint
529template < class T >
530bool HDFEOS2ArraySwathDimMapField::Field2DSubset (T * outlatlon,
531 const int ,
532 const int minordim,
533 T * latlon,
534 const int32 * offset,
535 const int32 * count,
536 const int32 * step)
537{
538#if 0
539 T (*templatlonptr)[majordim][minordim] = (T *[][]) latlon;
540#endif
541 int i = 0;
542 int j = 0;
543
544 // do subsetting
545 // Find the correct index
546 int dim0count = count[0];
547 int dim1count = count[1];
548
549 int dim0index[dim0count];
550 int dim1index[dim1count];
551
552 for (i = 0; i < count[0]; i++) // count[0] is the least changing dimension
553 dim0index[i] = offset[0] + i * step[0];
554
555
556 for (j = 0; j < count[1]; j++)
557 dim1index[j] = offset[1] + j * step[1];
558
559 // Now assign the subsetting data
560 int k = 0;
561
562 for (i = 0; i < count[0]; i++) {
563 for (j = 0; j < count[1]; j++) {
564#if 0
565 outlatlon[k] = (*templatlonptr)[dim0index[i]][dim1index[j]];
566#endif
567 outlatlon[k] = *(latlon + (dim0index[i] * minordim) + dim1index[j]);
568 k++;
569 }
570 }
571 return true;
572}
573
574// Subsetting the field to follow the parameters from the DAP expression constraint
575template < class T >
576bool HDFEOS2ArraySwathDimMapField::Field3DSubset (T * outlatlon,
577 const vector<int32>& newdims,
578 T * latlon,
579 const int32 * offset,
580 const int32 * count,
581 const int32 * step)
582{
583 if (newdims.size() !=3)
584 throw InternalErr(__FILE__, __LINE__,
585 "the rank must be 3 to call this function");
586#if 0
587 T (*templatlonptr)[newdims[0]][newdims[1]][newdims[2]] = (T *[][][]) latlon;
588#endif
589 int i = 0;
590 int j = 0;
591 int k = 0;
592
593 // do subsetting
594 // Find the correct index
595 int dim0count = count[0];
596 int dim1count = count[1];
597 int dim2count = count[2];
598
599 int dim0index[dim0count];
600 int dim1index[dim1count];
601 int dim2index[dim2count];
602
603 for (i = 0; i < count[0]; i++) // count[0] is the least changing dimension
604 dim0index[i] = offset[0] + i * step[0];
605
606
607 for (j = 0; j < count[1]; j++)
608 dim1index[j] = offset[1] + j * step[1];
609
610 for (k = 0; k < count[2]; k++)
611 dim2index[k] = offset[2] + k * step[2];
612
613 // Now assign the subsetting data
614 int l = 0;
615
616 for (i = 0; i < count[0]; i++) {
617 for (j = 0; j < count[1]; j++) {
618 for (k =0; k < count[2]; k++) {
619#if 0
620 outlatlon[l] = (*templatlonptr)[dim0index[i]][dim1index[j]][dim2index[k]];
621#endif
622 outlatlon[l] = *(latlon + (dim0index[i] * newdims[1] * newdims[2]) + (dim1index[j] * newdims[2])+ dim2index[k]);
623 l++;
624 }
625 }
626 }
627 return true;
628}
629
630int
631HDFEOS2ArraySwathDimMapField::write_dap_data_scale_comp(int32 swathid,
632 int nelms,
633 vector<int32>& offset32,
634 vector<int32>& count32,
635 vector<int32>& step32) {
636
637#if 0
638 string check_pass_fileid_key_str="H4.EnablePassFileID";
639 bool check_pass_fileid_key = false;
640 check_pass_fileid_key = HDFCFUtil::check_beskeys(check_pass_fileid_key_str);
641#endif
642
643 bool check_pass_fileid_key = HDF4RequestHandler::get_pass_fileid();
644
645 // Define function pointers to handle both grid and swath
646 intn (*fieldinfofunc) (int32, char *, int32 *, int32 *, int32 *, char *);
647
648
649 fieldinfofunc = SWfieldinfo;
650
651 int32 attrtype = -1;
652 int32 attrcount = -1;
653 int32 attrindex = -1;
654
655 int32 scale_factor_attr_index = -1;
656 int32 add_offset_attr_index =-1;
657
658 float scale=1;
659 float offset2=0;
660 float fillvalue = 0.;
661
662 if (sotype!=DEFAULT_CF_EQU) {
663
664 // Obtain attribute values.
665 int32 sdfileid = -1;
666
667 if (true == isgeofile || false == check_pass_fileid_key) {
668 sdfileid = SDstart(filename.c_str (), DFACC_READ);
669 if (FAIL == sdfileid) {
670 ostringstream eherr;
671 eherr << "Cannot Start the SD interface for the file " << filename <<endl;
672 throw InternalErr (__FILE__, __LINE__, eherr.str ());
673 }
674 }
675 else
676 sdfileid = sdfd;
677
678 int32 sdsindex = -1;
679 int32 sdsid = -1;
680
681 sdsindex = SDnametoindex(sdfileid, fieldname.c_str());
682 if (FAIL == sdsindex) {
683 if(true == isgeofile || false == check_pass_fileid_key)
684 SDend(sdfileid);
685 ostringstream eherr;
686 eherr << "Cannot obtain the index of " << fieldname;
687 throw InternalErr (__FILE__, __LINE__, eherr.str ());
688 }
689
690 sdsid = SDselect(sdfileid, sdsindex);
691 if (FAIL == sdsid) {
692 if(true == isgeofile || false == check_pass_fileid_key)
693 SDend(sdfileid);
694 ostringstream eherr;
695 eherr << "Cannot obtain the SDS ID of " << fieldname;
696 throw InternalErr (__FILE__, __LINE__, eherr.str ());
697 }
698
699 char attrname[H4_MAX_NC_NAME + 1];
700 vector<char> attrbuf;
701 vector<char> attrbuf2;
702
703 scale_factor_attr_index = SDfindattr(sdsid, "scale_factor");
704 if(scale_factor_attr_index!=FAIL)
705 {
706 intn ret = 0;
707 ret = SDattrinfo(sdsid, scale_factor_attr_index, attrname, &attrtype, &attrcount);
708 if (ret==FAIL)
709 {
710 SDendaccess(sdsid);
711 if(true == isgeofile || false == check_pass_fileid_key)
712 SDend(sdfileid);
713 ostringstream eherr;
714 eherr << "Attribute 'scale_factor' in "
715 << fieldname.c_str () << " cannot be obtained.";
716 throw InternalErr (__FILE__, __LINE__, eherr.str ());
717 }
718
719 attrbuf.clear();
720 attrbuf.resize(DFKNTsize(attrtype)*attrcount);
721 ret = SDreadattr(sdsid, scale_factor_attr_index, (VOIDP)attrbuf.data());
722 if (ret==FAIL)
723 {
724 SDendaccess(sdsid);
725 if(true == isgeofile || false == check_pass_fileid_key)
726 SDend(sdfileid);
727 ostringstream eherr;
728 eherr << "Attribute 'scale_factor' in "
729 << fieldname.c_str () << " cannot be obtained.";
730 throw InternalErr (__FILE__, __LINE__, eherr.str ());
731 }
732
733 // Appears that the assumption for the datatype of scale_factor
734 // is either float or double
735 // for this type of MODIS files. So far we haven't found any problems.
736 // Maybe this is okay.
737 // KY 2013-12-19
738 switch(attrtype)
739 {
740#define GET_SCALE_FACTOR_ATTR_VALUE(TYPE, CAST) \
741 case DFNT_##TYPE: \
742 { \
743 CAST tmpvalue = *(CAST*)attrbuf.data(); \
744 scale = (float)tmpvalue; \
745 } \
746 break;
747 GET_SCALE_FACTOR_ATTR_VALUE(FLOAT32, float)
748 GET_SCALE_FACTOR_ATTR_VALUE(FLOAT64, double)
749 default:
750 throw InternalErr(__FILE__,__LINE__,"unsupported data type.");
751
752 }
753
754#undef GET_SCALE_FACTOR_ATTR_VALUE
755 }
756
757 add_offset_attr_index = SDfindattr(sdsid, "add_offset");
758 if(add_offset_attr_index!=FAIL)
759 {
760 intn ret = 0;
761 ret = SDattrinfo(sdsid, add_offset_attr_index, attrname, &attrtype, &attrcount);
762 if (ret==FAIL)
763 {
764 SDendaccess(sdsid);
765 if(true == isgeofile || false == check_pass_fileid_key)
766 SDend(sdfileid);
767 ostringstream eherr;
768 eherr << "Attribute 'add_offset' in "
769 << fieldname.c_str () << " cannot be obtained.";
770 throw InternalErr (__FILE__, __LINE__, eherr.str ());
771 }
772 attrbuf.clear();
773 attrbuf.resize(DFKNTsize(attrtype)*attrcount);
774 ret = SDreadattr(sdsid, add_offset_attr_index, (VOIDP)attrbuf.data());
775 if (ret==FAIL)
776 {
777 SDendaccess(sdsid);
778 if(true == isgeofile || false == check_pass_fileid_key)
779 SDend(sdfileid);
780 ostringstream eherr;
781 eherr << "Attribute 'add_offset' in "
782 << fieldname.c_str () << " cannot be obtained.";
783 throw InternalErr (__FILE__, __LINE__, eherr.str ());
784 }
785 switch(attrtype)
786 {
787#define GET_ADD_OFFSET_ATTR_VALUE(TYPE, CAST) \
788 case DFNT_##TYPE: \
789 { \
790 CAST tmpvalue = *(CAST*)attrbuf.data(); \
791 offset2 = (float)tmpvalue; \
792 } \
793 break;
794 GET_ADD_OFFSET_ATTR_VALUE(FLOAT32, float)
795 GET_ADD_OFFSET_ATTR_VALUE(FLOAT64, double)
796 default:
797 throw InternalErr(__FILE__,__LINE__,"unsupported data type.");
798 }
799#undef GET_ADD_OFFSET_ATTR_VALUE
800 }
801
802 attrindex = SDfindattr(sdsid, "_FillValue");
803 if(sotype!=DEFAULT_CF_EQU && attrindex!=FAIL)
804 {
805 intn ret = 0;
806 ret = SDattrinfo(sdsid, attrindex, attrname, &attrtype, &attrcount);
807 if (ret==FAIL)
808 {
809 SDendaccess(sdsid);
810 if(true == isgeofile || false == check_pass_fileid_key)
811 SDend(sdfileid);
812 ostringstream eherr;
813 eherr << "Attribute '_FillValue' in "
814 << fieldname.c_str () << " cannot be obtained.";
815 throw InternalErr (__FILE__, __LINE__, eherr.str ());
816 }
817 attrbuf.clear();
818 attrbuf.resize(DFKNTsize(attrtype)*attrcount);
819 ret = SDreadattr(sdsid, attrindex, (VOIDP)attrbuf.data());
820 if (ret==FAIL)
821 {
822 SDendaccess(sdsid);
823 if(true == isgeofile || false == check_pass_fileid_key)
824 SDend(sdfileid);
825 ostringstream eherr;
826 eherr << "Attribute '_FillValue' in "
827 << fieldname.c_str () << " cannot be obtained.";
828 throw InternalErr (__FILE__, __LINE__, eherr.str ());
829 }
830
831 switch(attrtype)
832 {
833#define GET_FILLVALUE_ATTR_VALUE(TYPE, CAST) \
834 case DFNT_##TYPE: \
835 { \
836 CAST tmpvalue = *(CAST*)attrbuf.data(); \
837 fillvalue = (float)tmpvalue; \
838 } \
839 break;
840 GET_FILLVALUE_ATTR_VALUE(INT8, int8)
841 GET_FILLVALUE_ATTR_VALUE(INT16, int16)
842 GET_FILLVALUE_ATTR_VALUE(INT32, int32)
843 GET_FILLVALUE_ATTR_VALUE(UINT8, uint8)
844 GET_FILLVALUE_ATTR_VALUE(UINT16, uint16)
845 GET_FILLVALUE_ATTR_VALUE(UINT32, uint32)
846 // Float and double are not considered. Handle them later.
847 default:
848 ;
849#if 0
850 // throw InternalErr(__FILE__,__LINE__,"unsupported data type.");
851#endif
852
853 }
854#undef GET_FILLVALUE_ATTR_VALUE
855 }
856
857#if 0
858
859 // There is a controversy if we need to apply the valid_range to the data, for the time being comment this out.
860 // KY 2013-12-19
861 float orig_valid_min = 0.;
862 float orig_valid_max = 0.;
863
864
865 // Retrieve valid_range,valid_range is normally represented as (valid_min,valid_max)
866 // for non-CF scale and offset rules, the data is always float. So we only
867 // need to change the data type to float.
868 attrindex = SDfindattr(sdsid, "valid_range");
869 if(attrindex!=FAIL)
870 {
871 intn ret;
872 ret = SDattrinfo(sdsid, attrindex, attrname, &attrtype, &attrcount);
873 if (ret==FAIL)
874 {
875 detachfunc(gridid);
876 closefunc(gfid);
877 SDendaccess(sdsid);
878 SDend(sdfileid);
879 ostringstream eherr;
880 eherr << "Attribute '_FillValue' in " << fieldname.c_str () << " cannot be obtained.";
881 throw InternalErr (__FILE__, __LINE__, eherr.str ());
882 }
883 attrbuf.clear();
884 attrbuf.resize(DFKNTsize(attrtype)*attrcount);
885 ret = SDreadattr(sdsid, attrindex, (VOIDP)attrbuf.data());
886 if (ret==FAIL)
887 {
888 detachfunc(gridid);
889 closefunc(gfid);
890 SDendaccess(sdsid);
891 SDend(sdfileid);
892 ostringstream eherr;
893 eherr << "Attribute '_FillValue' in " << fieldname.c_str () << " cannot be obtained.";
894 throw InternalErr (__FILE__, __LINE__, eherr.str ());
895 }
896
897 string attrbuf_str(attrbuf.begin(),attrbuf.end());
898
899 switch(attrtype) {
900
901 case DFNT_CHAR:
902 {
903 // We need to treat the attribute data as characters or string.
904 // So find the separator.
905 size_t found = attrbuf_str.find_first_of(",");
906 size_t found_from_end = attrbuf_str.find_last_of(",");
907
908 if (string::npos == found)
909 throw InternalErr(__FILE__,__LINE__,"should find the separator ,");
910 if (found != found_from_end)
911 throw InternalErr(__FILE__,__LINE__,"Only one separator , should be available.");
912
913 //istringstream(attrbuf_str.substr(0,found))>> orig_valid_min;
914 //istringstream(attrbuf_str.substr(found+1))>> orig_valid_max;
915
916 orig_valid_min = atof((attrbuf_str.substr(0,found)).c_str());
917 orig_valid_max = atof((attrbuf_str.substr(found+1)).c_str());
918
919 }
920 break;
921
922 case DFNT_INT8:
923 {
924 if (2 == temp_attrcount) {
925 orig_valid_min = (float)attrbuf[0];
926 orig_valid_max = (float)attrbuf[1];
927 }
928 else
929 throw InternalErr(__FILE__,__LINE__,"The number of attribute count should be greater than 1.");
930
931 }
932 break;
933
934 case DFNT_UINT8:
935 case DFNT_UCHAR:
936 {
937 if (temp_attrcount != 2)
938 throw InternalErr(__FILE__,__LINE__,"The number of attribute count should be 2 for the DFNT_UINT8 type.");
939
940 unsigned char* temp_valid_range = (unsigned char *)attrbuf.data();
941 orig_valid_min = (float)(temp_valid_range[0]);
942 orig_valid_max = (float)(temp_valid_range[1]);
943 }
944 break;
945
946 case DFNT_INT16:
947 {
948 if (temp_attrcount != 2)
949 throw InternalErr(__FILE__,__LINE__,"The number of attribute count should be 2 for the DFNT_INT16 type.");
950
951 short* temp_valid_range = (short *)attrbuf.data();
952 orig_valid_min = (float)(temp_valid_range[0]);
953 orig_valid_max = (float)(temp_valid_range[1]);
954 }
955 break;
956
957 case DFNT_UINT16:
958 {
959 if (temp_attrcount != 2)
960 throw InternalErr(__FILE__,__LINE__,"The number of attribute count should be 2 for the DFNT_UINT16 type.");
961
962 unsigned short* temp_valid_range = (unsigned short *)attrbuf.data();
963 orig_valid_min = (float)(temp_valid_range[0]);
964 orig_valid_max = (float)(temp_valid_range[1]);
965 }
966 break;
967
968 case DFNT_INT32:
969 {
970 if (temp_attrcount != 2)
971 throw InternalErr(__FILE__,__LINE__,"The number of attribute count should be 2 for the DFNT_INT32 type.");
972
973 int* temp_valid_range = (int *)attrbuf.data();
974 orig_valid_min = (float)(temp_valid_range[0]);
975 orig_valid_max = (float)(temp_valid_range[1]);
976 }
977 break;
978
979 case DFNT_UINT32:
980 {
981 if (temp_attrcount != 2)
982 throw InternalErr(__FILE__,__LINE__,"The number of attribute count should be 2 for the DFNT_UINT32 type.");
983
984 unsigned int* temp_valid_range = (unsigned int *)attrbuf.data();
985 orig_valid_min = (float)(temp_valid_range[0]);
986 orig_valid_max = (float)(temp_valid_range[1]);
987 }
988 break;
989
990 case DFNT_FLOAT32:
991 {
992 if (temp_attrcount != 2)
993 throw InternalErr(__FILE__,__LINE__,"The number of attribute count should be 2 for the DFNT_FLOAT32 type.");
994
995 float* temp_valid_range = (float *)attrbuf.data();
996 orig_valid_min = temp_valid_range[0];
997 orig_valid_max = temp_valid_range[1];
998 }
999 break;
1000
1001 case DFNT_FLOAT64:
1002 {
1003 if (temp_attrcount != 2)
1004 throw InternalErr(__FILE__,__LINE__,"The number of attribute count should be 2 for the DFNT_FLOAT32 type.");
1005 double* temp_valid_range = (double *)attrbuf.data();
1006
1007 // Notice: this approach will lose precision and possibly overflow. Fortunately it is not a problem for MODIS data.
1008 // This part of code may not be called. If it is called, mostly the value is within the floating-point range.
1009 // KY 2013-01-29
1010 orig_valid_min = temp_valid_range[0];
1011 orig_valid_max = temp_valid_range[1];
1012 }
1013 break;
1014 default:
1015 throw InternalErr(__FILE__,__LINE__,"Unsupported data type.");
1016 }
1017 }
1018
1019#endif
1020
1021
1022#if 0
1023 // For testing only.
1024 //cerr << "scale=" << scale << endl;
1025 //cerr << "offset=" << offset2 << endl;
1026 //cerr << "fillvalue=" << fillvalue << endl;
1027#endif
1028
1029 SDendaccess(sdsid);
1030 if(true == isgeofile || false == check_pass_fileid_key)
1031 SDend(sdfileid);
1032 }
1033
1034 // According to our observations, it seems that MODIS products ALWAYS
1035 // use the "scale" factor to make bigger values smaller.
1036 // So for MODIS_MUL_SCALE products, if the scale of some variable is greater than 1,
1037 // it means that for this variable, the MODIS type for this variable may be MODIS_DIV_SCALE.
1038 // For the similar logic, we may need to change MODIS_DIV_SCALE to
1039 // MODIS_MUL_SCALE and MODIS_EQ_SCALE to MODIS_DIV_SCALE.
1040 // We indeed find such a case. HDF-EOS2 Grid MODIS_Grid_1km_2D of MOD(or MYD)09GA is
1041 // a MODIS_EQ_SCALE.
1042 // However,
1043 // the scale_factor of the variable Range_1 in the MOD09GA product is 25.
1044 // According to our observation, this variable should be MODIS_DIV_SCALE.
1045 // We verify this is true according to MODIS 09 product document
1046 // http://modis-sr.ltdri.org/products/MOD09_UserGuide_v1_3.pdf.
1047 // Since this conclusion is based on our observation, we would like to add
1048 // a BESlog to detect if we find
1049 // the similar cases so that we can verify with the corresponding product
1050 // documents to see if this is true.
1051 //
1052 // More information,
1053 // We just verified with the MOD09 data producer, the scale_factor for Range_1
1054 // and Range_c is 25 but the
1055 // equation is still multiplication instead of division. So we have to make this
1056 // as a special case that we don't want to change the scale and offset settings.
1057 // KY 2014-01-13
1058 // However, since this function only handles swath and we haven't found an outlier
1059 // for a swath case, we still keep the old ways.
1060
1061
1062 if (MODIS_EQ_SCALE == sotype || MODIS_MUL_SCALE == sotype) {
1063 if (scale > 1) {
1064 sotype = MODIS_DIV_SCALE;
1065 (*BESLog::TheLog())<< "The field " << fieldname << " scale factor is "
1066 << scale << endl
1067 << " But the original scale factor type is MODIS_MUL_SCALE or MODIS_EQ_SCALE. "
1068 << endl
1069 << " Now change it to MODIS_DIV_SCALE. "<<endl;
1070 }
1071 }
1072
1073 if (MODIS_DIV_SCALE == sotype) {
1074 if (scale < 1) {
1075 sotype = MODIS_MUL_SCALE;
1076 (*BESLog::TheLog())<< "The field " << fieldname << " scale factor is "
1077 << scale << endl
1078 << " But the original scale factor type is MODIS_DIV_SCALE. "
1079 << endl
1080 << " Now change it to MODIS_MUL_SCALE. "<<endl;
1081 }
1082 }
1083
1084 // RECALCULATE formula in the following #if block
1085#if 0
1087/* if(sotype==MODIS_MUL_SCALE) \
1088// tmpval[l] = (tmptr[l]-field_offset)*scale; \
1089// else if(sotype==MODIS_EQ_SCALE) \
1090// tmpval[l] = tmptr[l]*scale + field_offset; \
1091// else if(sotype==MODIS_DIV_SCALE) \
1092// tmpval[l] = (tmptr[l]-field_offset)/scale; \
1093*/
1094#endif
1095
1096
1097#define RECALCULATE(CAST, DODS_CAST, VAL) \
1098{ \
1099 bool change_data_value = false; \
1100 if(sotype!=DEFAULT_CF_EQU) \
1101 { \
1102 if(scale_factor_attr_index!=FAIL && !(scale==1 && offset2==0)) \
1103 { \
1104 vector<float>tmpval; \
1105 tmpval.resize(nelms); \
1106 CAST tmptr = (CAST)VAL; \
1107 for(int l=0; l<nelms; l++) \
1108 tmpval[l] = (float)tmptr[l]; \
1109 float temp_scale = scale; \
1110 float temp_offset = offset2; \
1111 if(sotype==MODIS_MUL_SCALE) \
1112 temp_offset = -1. *offset2*temp_scale;\
1113 else if (sotype==MODIS_DIV_SCALE) \
1114 {\
1115 temp_scale = 1/scale; \
1116 temp_offset = -1. *temp_scale *offset2;\
1117 }\
1118 for(int l=0; l<nelms; l++) \
1119 if(attrindex!=FAIL && ((float)tmptr[l])!=fillvalue) \
1120 tmpval[l] = tmptr[l]*temp_scale + temp_offset; \
1121 change_data_value = true; \
1122 set_value((dods_float32 *)tmpval.data(), nelms); \
1123 } \
1124 } \
1125 if(!change_data_value) \
1126 { \
1127 set_value ((DODS_CAST)VAL, nelms); \
1128 } \
1129}
1130
1131 // tmp_rank and tmp_dimlist are two dummy variables that are
1132 // only used when calling fieldinfo.
1133 int32 tmp_rank = 0;
1134 char tmp_dimlist[1024];
1135
1136 // field dimension sizes
1137 int32 tmp_dims[rank];
1138
1139 // field data type
1140 int32 field_dtype = 0;
1141
1142 // returned value of HDF4 and HDF-EOS2 APIs
1143 intn r = 0;
1144
1145 // Obtain the field info. We mainly need the datatype information
1146 // to allocate the buffer to store the data
1147 r = fieldinfofunc (swathid, const_cast < char *>(fieldname.c_str ()),
1148 &tmp_rank, tmp_dims, &field_dtype, tmp_dimlist);
1149 if (r != 0) {
1150 ostringstream eherr;
1151 eherr << "Field " << fieldname.c_str ()
1152 << " information cannot be obtained.";
1153 throw InternalErr (__FILE__, __LINE__, eherr.str ());
1154 }
1155
1156
1157 // int32 majordimsize, minordimsize;
1158 vector<int32> newdims;
1159 newdims.resize(rank);
1160
1161 // Loop through the data type.
1162 switch (field_dtype) {
1163
1164 case DFNT_INT8:
1165 {
1166 // Obtaining the total value and interpolating the data
1167 // according to dimension map
1168 vector < int8 > total_val8;
1169 r = GetFieldValue (swathid, fieldname, dimmaps, total_val8, newdims);
1170 if (r != 0) {
1171 ostringstream eherr;
1172 eherr << "field " << fieldname.c_str ()
1173 << "cannot be read.";
1174 throw InternalErr (__FILE__, __LINE__, eherr.str ());
1175 }
1176
1177 check_num_elems_constraint(nelms,newdims);
1178
1179 vector<int8>val8;
1180 val8.resize(nelms);
1181
1182 FieldSubset (val8.data(), newdims, total_val8.data(),
1183 offset32.data(), count32.data(), step32.data());
1184
1185#ifndef SIGNED_BYTE_TO_INT32
1186 RECALCULATE(int8*, dods_byte*, val8.data());
1187#else
1188 vector<int32>newval;
1189 newval.resize(nelms);
1190
1191 for (int counter = 0; counter < nelms; counter++)
1192 newval[counter] = (int32) (val8[counter]);
1193
1194 RECALCULATE(int32*, dods_int32*, newval.data());
1195#endif
1196 }
1197 break;
1198 case DFNT_UINT8:
1199 case DFNT_UCHAR8:
1200 {
1201 // Obtaining the total value and interpolating the data
1202 // according to dimension map
1203 vector < uint8 > total_val_u8;
1204 r = GetFieldValue (swathid, fieldname, dimmaps, total_val_u8, newdims);
1205 if (r != 0) {
1206 ostringstream eherr;
1207 eherr << "field " << fieldname.c_str () << "cannot be read.";
1208 throw InternalErr (__FILE__, __LINE__, eherr.str ());
1209 }
1210
1211 check_num_elems_constraint(nelms,newdims);
1212 vector<uint8>val_u8;
1213 val_u8.resize(nelms);
1214
1215 FieldSubset (val_u8.data(), newdims, total_val_u8.data(),
1216 offset32.data(), count32.data(), step32.data());
1217 RECALCULATE(uint8*, dods_byte*, val_u8.data());
1218 }
1219 break;
1220 case DFNT_INT16:
1221 {
1222 // Obtaining the total value and interpolating the data
1223 // according to dimension map
1224 vector < int16 > total_val16;
1225 r = GetFieldValue (swathid, fieldname, dimmaps, total_val16, newdims);
1226 if (r != 0) {
1227 ostringstream eherr;
1228 eherr << "field " << fieldname.c_str () << "cannot be read.";
1229 throw InternalErr (__FILE__, __LINE__, eherr.str ());
1230 }
1231
1232 check_num_elems_constraint(nelms,newdims);
1233 vector<int16>val16;
1234 val16.resize(nelms);
1235
1236 FieldSubset (val16.data(), newdims, total_val16.data(),
1237 offset32.data(), count32.data(), step32.data());
1238
1239 RECALCULATE(int16*, dods_int16*, val16.data());
1240 }
1241 break;
1242 case DFNT_UINT16:
1243 {
1244 // Obtaining the total value and interpolating the data
1245 // according to dimension map
1246 vector < uint16 > total_val_u16;
1247 r = GetFieldValue (swathid, fieldname, dimmaps, total_val_u16, newdims);
1248 if (r != 0) {
1249 ostringstream eherr;
1250
1251 eherr << "field " << fieldname.c_str () << "cannot be read.";
1252 throw InternalErr (__FILE__, __LINE__, eherr.str ());
1253 }
1254
1255 check_num_elems_constraint(nelms,newdims);
1256 vector<uint16>val_u16;
1257 val_u16.resize(nelms);
1258
1259 FieldSubset (val_u16.data(), newdims, total_val_u16.data(),
1260 offset32.data(), count32.data(), step32.data());
1261 RECALCULATE(uint16*, dods_uint16*, val_u16.data());
1262
1263 }
1264 break;
1265 case DFNT_INT32:
1266 {
1267 // Obtaining the total value and interpolating the data
1268 // according to dimension map
1269 vector < int32 > total_val32;
1270 r = GetFieldValue (swathid, fieldname, dimmaps, total_val32, newdims);
1271 if (r != 0) {
1272 ostringstream eherr;
1273
1274 eherr << "field " << fieldname.c_str () << "cannot be read.";
1275 throw InternalErr (__FILE__, __LINE__, eherr.str ());
1276 }
1277
1278 check_num_elems_constraint(nelms,newdims);
1279 vector<int32> val32;
1280 val32.resize(nelms);
1281
1282 FieldSubset (val32.data(), newdims, total_val32.data(),
1283 offset32.data(), count32.data(), step32.data());
1284
1285 RECALCULATE(int32*, dods_int32*, val32.data());
1286 }
1287 break;
1288 case DFNT_UINT32:
1289 {
1290 // Obtaining the total value and interpolating the data
1291 // according to dimension map
1292 // Notice the total_val_u32 is allocated inside the GetFieldValue.
1293 vector < uint32 > total_val_u32;
1294 r = GetFieldValue (swathid, fieldname, dimmaps, total_val_u32, newdims);
1295 if (r != 0) {
1296 ostringstream eherr;
1297 eherr << "field " << fieldname.c_str () << "cannot be read.";
1298 throw InternalErr (__FILE__, __LINE__, eherr.str ());
1299 }
1300
1301 check_num_elems_constraint(nelms,newdims);
1302 vector<uint32>val_u32;
1303 val_u32.resize(nelms);
1304
1305 FieldSubset (val_u32.data(), newdims, total_val_u32.data(),
1306 offset32.data(), count32.data(), step32.data());
1307 RECALCULATE(uint32*, dods_uint32*, val_u32.data());
1308
1309 }
1310 break;
1311 case DFNT_FLOAT32:
1312 {
1313 // Obtaining the total value and interpolating the data
1314 // according to dimension map
1315 vector < float32 > total_val_f32;
1316 r = GetFieldValue (swathid, fieldname, dimmaps, total_val_f32, newdims);
1317 if (r != 0) {
1318 ostringstream eherr;
1319 eherr << "field " << fieldname.c_str () << "cannot be read.";
1320 throw InternalErr (__FILE__, __LINE__, eherr.str ());
1321 }
1322
1323 check_num_elems_constraint(nelms,newdims);
1324 vector<float32>val_f32;
1325 val_f32.resize(nelms);
1326
1327 FieldSubset (val_f32.data(), newdims, total_val_f32.data(),
1328 offset32.data(), count32.data(), step32.data());
1329 RECALCULATE(float32*, dods_float32*, val_f32.data());
1330 }
1331 break;
1332 case DFNT_FLOAT64:
1333 {
1334 // Obtaining the total value and interpolating the data according to dimension map
1335 vector < float64 > total_val_f64;
1336 r = GetFieldValue (swathid, fieldname, dimmaps, total_val_f64, newdims);
1337 if (r != 0) {
1338 ostringstream eherr;
1339 eherr << "field " << fieldname.c_str () << "cannot be read.";
1340 throw InternalErr (__FILE__, __LINE__, eherr.str ());
1341 }
1342
1343 check_num_elems_constraint(nelms,newdims);
1344 vector<float64>val_f64;
1345 val_f64.resize(nelms);
1346 FieldSubset (val_f64.data(), newdims, total_val_f64.data(),
1347 offset32.data(), count32.data(), step32.data());
1348 RECALCULATE(float64*, dods_float64*, val_f64.data());
1349
1350 }
1351 break;
1352 default:
1353 {
1354 throw InternalErr (__FILE__, __LINE__, "unsupported data type.");
1355 }
1356 }
1357
1358 return 0;
1359}
1360
1361int
1362HDFEOS2ArraySwathDimMapField::write_dap_data_disable_scale_comp(int32 swathid,
1363 int nelms,
1364 vector<int32>& offset32,
1365 vector<int32>& count32,
1366 vector<int32>& step32) {
1367
1368 // Define function pointers to handle both grid and swath
1369 intn (*fieldinfofunc) (int32, char *, int32 *, int32 *, int32 *, char *);
1370
1371 fieldinfofunc = SWfieldinfo;
1372
1373
1374 // tmp_rank and tmp_dimlist are two dummy variables
1375 // that are only used when calling fieldinfo.
1376 int32 tmp_rank = 0;
1377 char tmp_dimlist[1024];
1378
1379 // field dimension sizes
1380 int32 tmp_dims[rank];
1381
1382 // field data type
1383 int32 field_dtype = 0;
1384
1385 // returned value of HDF4 and HDF-EOS2 APIs
1386 intn r = 0;
1387
1388 // Obtain the field info. We mainly need the datatype information
1389 // to allocate the buffer to store the data
1390 r = fieldinfofunc (swathid, const_cast < char *>(fieldname.c_str ()),
1391 &tmp_rank, tmp_dims, &field_dtype, tmp_dimlist);
1392 if (r != 0) {
1393 ostringstream eherr;
1394 eherr << "Field " << fieldname.c_str () << " information cannot be obtained.";
1395 throw InternalErr (__FILE__, __LINE__, eherr.str ());
1396 }
1397
1398
1399 // int32 majordimsize, minordimsize;
1400 vector<int32> newdims;
1401 newdims.resize(rank);
1402
1403 // Loop through the data type.
1404 switch (field_dtype) {
1405
1406 case DFNT_INT8:
1407 {
1408 // Obtaining the total value and interpolating the data according to dimension map
1409 vector < int8 > total_val8;
1410 r = GetFieldValue (swathid, fieldname, dimmaps, total_val8, newdims);
1411 if (r != 0) {
1412 ostringstream eherr;
1413 eherr << "field " << fieldname.c_str () << "cannot be read.";
1414 throw InternalErr (__FILE__, __LINE__, eherr.str ());
1415 }
1416
1417 check_num_elems_constraint(nelms,newdims);
1418
1419 vector<int8>val8;
1420 val8.resize(nelms);
1421 FieldSubset (val8.data(), newdims, total_val8.data(),
1422 offset32.data(), count32.data(), step32.data());
1423
1424
1425#ifndef SIGNED_BYTE_TO_INT32
1426 set_value((dods_byte*)val8.data(),nelms);
1427#else
1428 vector<int32>newval;
1429 newval.resize(nelms);
1430
1431 for (int counter = 0; counter < nelms; counter++)
1432 newval[counter] = (int32) (val8[counter]);
1433
1434 set_value ((dods_int32 *) newval.data(), nelms);
1435#endif
1436 }
1437 break;
1438 case DFNT_UINT8:
1439 case DFNT_UCHAR8:
1440 {
1441 // Obtaining the total value and interpolating the data according to dimension map
1442 vector < uint8 > total_val_u8;
1443 r = GetFieldValue (swathid, fieldname, dimmaps, total_val_u8, newdims);
1444 if (r != 0) {
1445 ostringstream eherr;
1446 eherr << "field " << fieldname.c_str () << "cannot be read.";
1447 throw InternalErr (__FILE__, __LINE__, eherr.str ());
1448 }
1449
1450 check_num_elems_constraint(nelms,newdims);
1451 vector<uint8>val_u8;
1452 val_u8.resize(nelms);
1453
1454 FieldSubset (val_u8.data(), newdims, total_val_u8.data(),
1455 offset32.data(), count32.data(), step32.data());
1456 set_value ((dods_byte *) val_u8.data(), nelms);
1457 }
1458 break;
1459 case DFNT_INT16:
1460 {
1461 // Obtaining the total value and interpolating the data according to dimension map
1462 vector < int16 > total_val16;
1463 r = GetFieldValue (swathid, fieldname, dimmaps, total_val16, newdims);
1464 if (r != 0) {
1465 ostringstream eherr;
1466 eherr << "field " << fieldname.c_str () << "cannot be read.";
1467 throw InternalErr (__FILE__, __LINE__, eherr.str ());
1468 }
1469
1470 check_num_elems_constraint(nelms,newdims);
1471 vector<int16>val16;
1472 val16.resize(nelms);
1473
1474 FieldSubset (val16.data(), newdims, total_val16.data(),
1475 offset32.data(), count32.data(), step32.data());
1476
1477 set_value ((dods_int16 *) val16.data(), nelms);
1478 }
1479 break;
1480 case DFNT_UINT16:
1481 {
1482 // Obtaining the total value and interpolating the data according to dimension map
1483 vector < uint16 > total_val_u16;
1484 r = GetFieldValue (swathid, fieldname, dimmaps, total_val_u16, newdims);
1485 if (r != 0) {
1486 ostringstream eherr;
1487 eherr << "field " << fieldname.c_str () << "cannot be read.";
1488 throw InternalErr (__FILE__, __LINE__, eherr.str ());
1489 }
1490
1491 check_num_elems_constraint(nelms,newdims);
1492 vector<uint16>val_u16;
1493 val_u16.resize(nelms);
1494
1495 FieldSubset (val_u16.data(), newdims, total_val_u16.data(),
1496 offset32.data(), count32.data(), step32.data());
1497 set_value ((dods_uint16 *) val_u16.data(), nelms);
1498
1499 }
1500 break;
1501 case DFNT_INT32:
1502 {
1503 // Obtaining the total value and interpolating the data according to dimension map
1504 vector < int32 > total_val32;
1505 r = GetFieldValue (swathid, fieldname, dimmaps, total_val32, newdims);
1506 if (r != 0) {
1507 ostringstream eherr;
1508
1509 eherr << "field " << fieldname.c_str () << "cannot be read.";
1510 throw InternalErr (__FILE__, __LINE__, eherr.str ());
1511 }
1512
1513 check_num_elems_constraint(nelms,newdims);
1514 vector<int32> val32;
1515 val32.resize(nelms);
1516
1517 FieldSubset (val32.data(), newdims, total_val32.data(),
1518 offset32.data(), count32.data(), step32.data());
1519 set_value ((dods_int32 *) val32.data(), nelms);
1520 }
1521 break;
1522 case DFNT_UINT32:
1523 {
1524 // Obtaining the total value and interpolating the data according to dimension map
1525 // Notice the total_val_u32 is allocated inside the GetFieldValue.
1526 vector < uint32 > total_val_u32;
1527 r = GetFieldValue (swathid, fieldname, dimmaps, total_val_u32, newdims);
1528 if (r != 0) {
1529 ostringstream eherr;
1530
1531 eherr << "field " << fieldname.c_str () << "cannot be read.";
1532 throw InternalErr (__FILE__, __LINE__, eherr.str ());
1533 }
1534
1535 check_num_elems_constraint(nelms,newdims);
1536 vector<uint32>val_u32;
1537 val_u32.resize(nelms);
1538
1539 FieldSubset (val_u32.data(), newdims, total_val_u32.data(),
1540 offset32.data(), count32.data(), step32.data());
1541 set_value ((dods_uint32 *) val_u32.data(), nelms);
1542
1543 }
1544 break;
1545 case DFNT_FLOAT32:
1546 {
1547 // Obtaining the total value and interpolating the data according to dimension map
1548 vector < float32 > total_val_f32;
1549 r = GetFieldValue (swathid, fieldname, dimmaps, total_val_f32, newdims);
1550 if (r != 0) {
1551 ostringstream eherr;
1552
1553 eherr << "field " << fieldname.c_str () << "cannot be read.";
1554 throw InternalErr (__FILE__, __LINE__, eherr.str ());
1555 }
1556
1557 check_num_elems_constraint(nelms,newdims);
1558 vector<float32>val_f32;
1559 val_f32.resize(nelms);
1560
1561 FieldSubset (val_f32.data(), newdims, total_val_f32.data(),
1562 offset32.data(), count32.data(), step32.data());
1563
1564 set_value ((dods_float32 *) val_f32.data(), nelms);
1565 }
1566 break;
1567 case DFNT_FLOAT64:
1568 {
1569 // Obtaining the total value and interpolating the data according to dimension map
1570 vector < float64 > total_val_f64;
1571 r = GetFieldValue (swathid, fieldname, dimmaps, total_val_f64, newdims);
1572 if (r != 0) {
1573 ostringstream eherr;
1574
1575 eherr << "field " << fieldname.c_str () << "cannot be read.";
1576 throw InternalErr (__FILE__, __LINE__, eherr.str ());
1577 }
1578
1579 check_num_elems_constraint(nelms,newdims);
1580 vector<float64>val_f64;
1581 val_f64.resize(nelms);
1582 FieldSubset (val_f64.data(), newdims, total_val_f64.data(),
1583 offset32.data(), count32.data(), step32.data());
1584 set_value ((dods_float64 *) val_f64.data(), nelms);
1585
1586 }
1587 break;
1588 default:
1589 {
1590 throw InternalErr (__FILE__, __LINE__, "unsupported data type.");
1591 }
1592 }
1593
1594 return 0;
1595}
1596
1597void HDFEOS2ArraySwathDimMapField::close_fileid(const int32 swfileid, const int32 sdfileid) {
1598
1599
1600 if(true == isgeofile || false == HDF4RequestHandler::get_pass_fileid()) {
1601
1602 if(sdfileid != -1)
1603 SDend(sdfileid);
1604
1605 if(swfileid != -1)
1606 SWclose(swfileid);
1607
1608 }
1609
1610}
1611
1612bool HDFEOS2ArraySwathDimMapField::check_num_elems_constraint(const int num_elems,
1613 const vector<int32>&newdims) const {
1614
1615 int total_dim_size = 1;
1616 for (int i =0;i<rank;i++)
1617 total_dim_size*=newdims[i];
1618
1619 if(total_dim_size < num_elems) {
1620 ostringstream eherr;
1621 eherr << "The total number of elements for the array " << total_dim_size
1622 << "is less than the user-requested number of elements " << num_elems;
1623 throw InternalErr (__FILE__, __LINE__, eherr.str ());
1624 }
1625
1626 return false;
1627
1628}
1629#endif
void close_fileid(hid_t fid)
Definition: h5get.cc:434
static void Split(const char *s, int len, char sep, std::vector< std::string > &names)
Definition: HDFCFUtil.cc:82
Definition: HDFCFUtil.h:52