001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the files COPYING and Copyright.html. *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * Or, see http://hdfgroup.org/products/hdf-java/doc/Copyright.html.         *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h5;
016
017import java.lang.reflect.Array;
018import java.text.DecimalFormat;
019import java.util.List;
020import java.util.Vector;
021
022import hdf.hdf5lib.H5;
023import hdf.hdf5lib.HDF5Constants;
024import hdf.hdf5lib.HDFNativeData;
025import hdf.hdf5lib.exceptions.HDF5DataFiltersException;
026import hdf.hdf5lib.exceptions.HDF5Exception;
027import hdf.hdf5lib.exceptions.HDF5LibraryException;
028import hdf.hdf5lib.structs.H5O_info_t;
029import hdf.object.Attribute;
030import hdf.object.Dataset;
031import hdf.object.Datatype;
032import hdf.object.FileFormat;
033import hdf.object.Group;
034import hdf.object.HObject;
035import hdf.object.ScalarDS;
036
037/**
038 * H5ScalarDS describes a multi-dimension array of HDF5 scalar or atomic data types, such as byte, int, short, long,
039 * float, double and string, and operations performed on the scalar dataset.
040 * <p>
041 * The library predefines a modest number of datatypes. For details,
042 * read <a href="http://hdfgroup.org/HDF5/doc/Datatypes.html">The Datatype Interface (H5T).</a>
043 *
044 * @version 1.1 9/4/2007
045 * @author Peter X. Cao
046 */
047public class H5ScalarDS extends ScalarDS {
048    /**
049     *
050     */
051    private static final long serialVersionUID = 2887517608230611642L;
052
053    private final static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H5ScalarDS.class);
054
055    /**
056     * The list of attributes of this data object. Members of the list are instance of Attribute.
057     */
058    private List<Attribute> attributeList;
059
060    private int nAttributes = -1;
061
062    private H5O_info_t obj_info;
063
064    /**
065     * The byte array containing references of palettes. Each reference requires eight bytes storage. Therefore, the
066     * array length is 8*numberOfPalettes.
067     */
068    private byte[] paletteRefs;
069
070    /** flag to indicate if the dataset is a variable length */
071    private boolean isVLEN = false;
072
073    /** flag to indicate if the dataset is enum */
074    private boolean isEnum = false;
075
076    /** flag to indicate if the dataset is an external dataset */
077    private boolean isExternal = false;
078
079    private boolean isArrayOfCompound = false;
080
081    private boolean isArrayOfVLEN = false;
082    /**
083     * flag to indicate if the datatype in file is the same as dataype in memory
084     */
085    private boolean isNativeDatatype = false;
086
087    /** flag to indicate is the datatype is reg. ref. */
088    private boolean isRegRef = false;
089
090    /**
091     * Constructs an instance of a H5 scalar dataset with given file, dataset name and path.
092     * <p>
093     * For example, in H5ScalarDS(h5file, "dset", "/arrays/"), "dset" is the name of the dataset, "/arrays" is the group
094     * path of the dataset.
095     *
096     * @param theFile
097     *            the file that contains the data object.
098     * @param theName
099     *            the name of the data object, e.g. "dset".
100     * @param thePath
101     *            the full path of the data object, e.g. "/arrays/".
102     */
103    public H5ScalarDS(FileFormat theFile, String theName, String thePath) {
104        this(theFile, theName, thePath, null);
105    }
106
107    /**
108     * @deprecated Not for public use in the future.<br>
109     *             Using {@link #H5ScalarDS(FileFormat, String, String)}
110     *
111     * @param theFile
112     *            the file that contains the data object.
113     * @param theName
114     *            the name of the data object, e.g. "dset".
115     * @param thePath
116     *            the full path of the data object, e.g. "/arrays/".
117     * @param oid
118     *            the oid of the data object.
119     */
120    @Deprecated
121    public H5ScalarDS(FileFormat theFile, String theName, String thePath, long[] oid) {
122        super(theFile, theName, thePath, oid);
123        unsignedConverted = false;
124        paletteRefs = null;
125        obj_info = new H5O_info_t(-1L, -1L, 0, 0, -1L, 0L, 0L, 0L, 0L, null, null, null);
126
127        if ((oid == null) && (theFile != null)) {
128            // retrieve the object ID
129            try {
130                byte[] ref_buf = H5.H5Rcreate(theFile.getFID(), this.getFullName(), HDF5Constants.H5R_OBJECT, -1);
131                this.oid = new long[1];
132                this.oid[0] = HDFNativeData.byteToLong(ref_buf, 0);
133            }
134            catch (Exception ex) {
135                log.debug("constructor ID {} for {} failed H5Rcreate", theFile.getFID(), this.getFullName());
136            }
137        }
138    }
139
140    /*
141     * (non-Javadoc)
142     *
143     * @see hdf.object.HObject#open()
144     */
145    @Override
146    public int open() {
147        int did = -1;
148
149        try {
150            did = H5.H5Dopen(getFID(), getPath() + getName(), HDF5Constants.H5P_DEFAULT);
151        }
152        catch (HDF5Exception ex) {
153            log.debug("Failed to open dataset {}", getPath() + getName());
154            did = -1;
155        }
156
157        return did;
158    }
159
160    /*
161     * (non-Javadoc)
162     *
163     * @see hdf.object.HObject#close(int)
164     */
165    @Override
166    public void close(int did) {
167        if (did >= 0) {
168            try {
169                H5.H5Fflush(did, HDF5Constants.H5F_SCOPE_LOCAL);
170            }
171            catch (Exception ex) {
172                log.debug("close.H5Fflush:", ex);
173            }
174            try {
175                H5.H5Dclose(did);
176            }
177            catch (HDF5Exception ex) {
178                log.debug("close.H5Dclose:", ex);
179            }
180        }
181    }
182
183    /*
184     * (non-Javadoc)
185     *
186     * @see hdf.object.Dataset#init()
187     */
188    @Override
189    public void init() {
190        if (rank > 0) {
191            resetSelection();
192            return; // already called. Initialize only once
193        }
194        log.trace("init() start");
195
196        int did = -1, sid = -1, tid = -1, tclass = -1;
197
198        did = open();
199        if (did >= 0) {
200            // check if it is an external dataset
201            int pid = -1;
202            try {
203                log.trace("init() check if it is an external dataset");
204                pid = H5.H5Dget_create_plist(did);
205                int nfiles = H5.H5Pget_external_count(pid);
206                log.trace("init() external dataset nfiles={}",nfiles);
207                isExternal = (nfiles > 0);
208            }
209            catch (Exception ex) {
210                log.debug("check if it is an external dataset:", ex);
211            }
212            finally {
213                try {
214                    H5.H5Pclose(pid);
215                }
216                catch (Exception ex) {
217                    log.debug("finally close:", ex);
218                }
219            }
220
221            paletteRefs = getPaletteRefs(did);
222
223            try {
224                sid = H5.H5Dget_space(did);
225                rank = H5.H5Sget_simple_extent_ndims(sid);
226                tid = H5.H5Dget_type(did);
227                tclass = H5.H5Tget_class(tid);
228                log.debug("H5Tget_class: {} is Array {}", tclass, HDF5Constants.H5T_ARRAY);
229
230                int tmptid = 0;
231                if (tclass == HDF5Constants.H5T_ARRAY) {
232                    // use the base datatype to define the array
233                    int basetid = H5.H5Tget_super(tid);
234                    int baseclass = H5.H5Tget_class(basetid);
235                    isArrayOfCompound = (baseclass == HDF5Constants.H5T_COMPOUND);
236                    isArrayOfVLEN = (baseclass == HDF5Constants.H5T_VLEN);
237                    isVLEN = isVLEN || ((baseclass == HDF5Constants.H5T_VLEN) || H5.H5Tis_variable_str(basetid));
238                    isVLEN = isVLEN || H5.H5Tdetect_class(basetid, HDF5Constants.H5T_VLEN);
239                }
240
241                isText = (tclass == HDF5Constants.H5T_STRING);
242                isVLEN = isVLEN || ((tclass == HDF5Constants.H5T_VLEN) || H5.H5Tis_variable_str(tid));
243                isEnum = (tclass == HDF5Constants.H5T_ENUM);
244                isUnsigned = H5Datatype.isUnsigned(tid);
245                isRegRef = H5.H5Tequal(tid, HDF5Constants.H5T_STD_REF_DSETREG);
246                log.debug(
247                        "init() tid={} is tclass={} has isText={} : isVLEN={} : isEnum={} : isUnsigned={} : isRegRef={}",
248                        tid, tclass, isText, isVLEN, isEnum, isUnsigned, isRegRef);
249
250                // check if datatype in file is native datatype
251                try {
252                    tmptid = H5.H5Tget_native_type(tid);
253                    isNativeDatatype = H5.H5Tequal(tid, tmptid);
254                    log.trace("init() isNativeDatatype={}", isNativeDatatype);
255
256                    /* see if fill value is defined */
257                    pid = H5.H5Dget_create_plist(did);
258                    int[] fillStatus = { 0 };
259                    if (H5.H5Pfill_value_defined(pid, fillStatus) >= 0) {
260                        if (fillStatus[0] == HDF5Constants.H5D_FILL_VALUE_USER_DEFINED) {
261                            fillValue = H5Datatype.allocateArray(tmptid, 1);
262                            log.trace("init() fillValue={}", fillValue);
263                            try {
264                                H5.H5Pget_fill_value(pid, tmptid, fillValue);
265                                log.trace("init() H5Pget_fill_value={}", fillValue);
266                                if (fillValue != null) {
267                                    if (isFillValueConverted)
268                                        fillValue = ScalarDS.convertToUnsignedC(fillValue, null);
269
270                                    int n = Array.getLength(fillValue);
271                                    for (int i = 0; i < n; i++)
272                                        addFilteredImageValue((Number) Array.get(fillValue, i));
273                                }
274                            }
275                            catch (Exception ex2) {
276                                log.debug("fill value was defined :", ex2);
277                                fillValue = null;
278                            }
279                        }
280                    }
281                }
282                catch (HDF5Exception ex) {
283                    log.debug("check if datatype in file is native datatype :", ex);
284                }
285                finally {
286                    try {
287                        H5.H5Tclose(tmptid);
288                    }
289                    catch (HDF5Exception ex) {
290                        log.debug("finally close:", ex);
291                    }
292                    try {
293                        H5.H5Pclose(pid);
294                    }
295                    catch (Exception ex) {
296                        log.debug("finally close:", ex);
297                    }
298                }
299
300                if (rank == 0) {
301                    // a scalar data point
302                    rank = 1;
303                    dims = new long[1];
304                    dims[0] = 1;
305                    log.trace("init() rank is a scalar data point");
306                }
307                else {
308                    dims = new long[rank];
309                    maxDims = new long[rank];
310                    H5.H5Sget_simple_extent_dims(sid, dims, maxDims);
311                    log.trace("init() rank={}, dims={}, maxDims={}", rank, dims, maxDims);
312                }
313            }
314            catch (HDF5Exception ex) {
315                log.debug("init():", ex);
316            }
317            finally {
318                try {
319                    H5.H5Tclose(tid);
320                }
321                catch (HDF5Exception ex2) {
322                    log.debug("finally close:", ex2);
323                }
324                try {
325                    H5.H5Sclose(sid);
326                }
327                catch (HDF5Exception ex2) {
328                    log.debug("finally close:", ex2);
329                }
330            }
331
332            // check for the type of image and interlace mode
333            // it is a true color image at one of three cases:
334            // 1) IMAGE_SUBCLASS = IMAGE_TRUECOLOR,
335            // 2) INTERLACE_MODE = INTERLACE_PIXEL,
336            // 3) INTERLACE_MODE = INTERLACE_PLANE
337            if ((rank >= 3) && isImage) {
338                interlace = -1;
339                isTrueColor = isStringAttributeOf(did, "IMAGE_SUBCLASS", "IMAGE_TRUECOLOR");
340
341                if (isTrueColor) {
342                    interlace = INTERLACE_PIXEL;
343                    if (isStringAttributeOf(did, "INTERLACE_MODE", "INTERLACE_PLANE")) {
344                        interlace = INTERLACE_PLANE;
345                    }
346                }
347            }
348
349            close(did);
350        }
351        else {
352            log.debug("init() failed to open dataset");
353        }
354
355        startDims = new long[rank];
356        selectedDims = new long[rank];
357        resetSelection();
358        log.trace("init() rank={}, startDims={}, selectedDims={}", rank, startDims, selectedDims);
359        log.trace("init() finish");
360    }
361
362    /*
363     * (non-Javadoc)
364     *
365     * @see hdf.object.DataFormat#hasAttribute()
366     */
367    public boolean hasAttribute() {
368        obj_info.num_attrs = nAttributes;
369
370        log.trace("hasAttribute start: nAttributes = {}", nAttributes);
371        if (obj_info.num_attrs < 0) {
372            int did = open();
373            if (did >= 0) {
374                int tid = -1;
375                obj_info.num_attrs = 0;
376
377                try {
378                    obj_info = H5.H5Oget_info(did);
379                    nAttributes = (int) obj_info.num_attrs;
380
381                    tid = H5.H5Dget_type(did);
382
383                    int tclass = H5.H5Tget_class(tid);
384                    isText = (tclass == HDF5Constants.H5T_STRING);
385                    isVLEN = ((tclass == HDF5Constants.H5T_VLEN) || H5.H5Tis_variable_str(tid));
386                    isEnum = (tclass == HDF5Constants.H5T_ENUM);
387                    log.trace("hasAttribute: obj_info.num_attrs={} with tclass type: isText={},isVLEN={},isEnum={}", nAttributes, isText, isVLEN, isEnum);
388                }
389                catch (Exception ex) {
390                    obj_info.num_attrs = 0;
391                    log.debug("hasAttribute: get object info:", ex);
392                }
393                finally {
394                    try {H5.H5Tclose(tid);} catch (HDF5Exception ex) {}
395                }
396
397                if(nAttributes > 0) {
398                    // test if it is an image
399                    // check image
400                    Object avalue = getAttrValue(did, "CLASS");
401                    if (avalue != null) {
402                        try {
403                            isImageDisplay = isImage = "IMAGE".equalsIgnoreCase(new String((byte[]) avalue).trim());
404                            log.trace("hasAttribute: isImageDisplay dataset: {} with value = {}", isImageDisplay, avalue);
405                        }
406                        catch (Throwable err) {
407                            log.debug("check image:", err);
408                        }
409                    }
410
411                    // retrieve the IMAGE_MINMAXRANGE
412                    avalue = getAttrValue(did, "IMAGE_MINMAXRANGE");
413                    if (avalue != null) {
414                        double x0 = 0, x1 = 0;
415                        try {
416                            x0 = Double.valueOf(java.lang.reflect.Array.get(avalue, 0).toString()).doubleValue();
417                            x1 = Double.valueOf(java.lang.reflect.Array.get(avalue, 1).toString()).doubleValue();
418                        }
419                        catch (Exception ex2) {
420                            x0 = x1 = 0;
421                        }
422                        if (x1 > x0) {
423                            imageDataRange = new double[2];
424                            imageDataRange[0] = x0;
425                            imageDataRange[1] = x1;
426                        }
427                    }
428
429                    try {
430                        checkCFconvention(did);
431                    }
432                    catch (Exception ex) {
433                        log.debug("checkCFconvention({}):", did, ex);
434                    }
435                }
436                close(did);
437            }
438            else {
439                log.debug("could not open dataset");
440            }
441        }
442        log.trace("hasAttribute exit");
443
444        return (obj_info.num_attrs > 0);
445    }
446
447    /*
448     * (non-Javadoc)
449     *
450     * @see hdf.object.Dataset#getDatatype()
451     */
452    @Override
453    public Datatype getDatatype() {
454        if (datatype == null) {
455            log.trace("H5ScalarDS getDatatype: datatype == null");
456            int did = -1, tid = -1;
457
458            did = open();
459            if (did >= 0) {
460                try {
461                    tid = H5.H5Dget_type(did);
462
463                    log.trace("H5ScalarDS getDatatype: isNativeDatatype", isNativeDatatype);
464                    if (!isNativeDatatype) {
465                        int tmptid = -1;
466                        try {
467                            tmptid = tid;
468                            tid = H5.H5Tget_native_type(tmptid);
469                        }
470                        finally {
471                            try {
472                                H5.H5Tclose(tmptid);
473                            }
474                            catch (Exception ex2) {
475                                log.debug("finally close:", ex2);
476                            }
477                        }
478                    }
479                    datatype = new H5Datatype(tid);
480                }
481                catch (Exception ex) {
482                    log.debug("new H5Datatype:", ex);
483                }
484                finally {
485                    try {
486                        H5.H5Tclose(tid);
487                    }
488                    catch (HDF5Exception ex) {
489                        log.debug("finally close:", ex);
490                    }
491                    try {
492                        H5.H5Dclose(did);
493                    }
494                    catch (HDF5Exception ex) {
495                        log.debug("finally close:", ex);
496                    }
497                }
498            }
499        }
500
501        return datatype;
502    }
503
504    /*
505     * (non-Javadoc)
506     *
507     * @see hdf.object.Dataset#clear()
508     */
509    @Override
510    public void clear() {
511        super.clear();
512
513        if (attributeList != null) {
514            ((Vector<Attribute>) attributeList).setSize(0);
515        }
516    }
517
518    /*
519     * (non-Javadoc)
520     *
521     * @see hdf.object.Dataset#readBytes()
522     */
523    @Override
524    public byte[] readBytes() throws HDF5Exception {
525        byte[] theData = null;
526
527        log.trace("H5ScalarDS readBytes: start");
528        if (rank <= 0) {
529            init();
530        }
531
532        int did = open();
533        if (did >= 0) {
534            int fspace = -1, mspace = -1, tid = -1;
535
536            try {
537                long[] lsize = { 1 };
538                for (int j = 0; j < selectedDims.length; j++) {
539                    lsize[0] *= selectedDims[j];
540                }
541
542                fspace = H5.H5Dget_space(did);
543                mspace = H5.H5Screate_simple(rank, selectedDims, null);
544
545                // set the rectangle selection
546                // HDF5 bug: for scalar dataset, H5Sselect_hyperslab gives core dump
547                if (rank * dims[0] > 1) {
548                    H5.H5Sselect_hyperslab(fspace, HDF5Constants.H5S_SELECT_SET, startDims, selectedStride,
549                            selectedDims, null); // set
550                    // block
551                    // to 1
552                }
553
554                tid = H5.H5Dget_type(did);
555                int size = H5.H5Tget_size(tid) * (int) lsize[0];
556                log.trace("H5ScalarDS readBytes: size = {}", size);
557                theData = new byte[size];
558                H5.H5Dread(did, tid, mspace, fspace, HDF5Constants.H5P_DEFAULT, theData);
559            }
560            finally {
561                try {
562                    H5.H5Sclose(fspace);
563                }
564                catch (Exception ex2) {
565                    log.debug("finally close:", ex2);
566                }
567                try {
568                    H5.H5Sclose(mspace);
569                }
570                catch (Exception ex2) {
571                    log.debug("finally close:", ex2);
572                }
573                try {
574                    H5.H5Tclose(tid);
575                }
576                catch (HDF5Exception ex2) {
577                    log.debug("finally close:", ex2);
578                }
579                close(did);
580            }
581        }
582        log.trace("H5ScalarDS readBytes: finish");
583
584        return theData;
585    }
586
587    /*
588     * (non-Javadoc)
589     *
590     * @see hdf.object.Dataset#read()
591     */
592    @Override
593    public Object read() throws Exception {
594        Object theData = null;
595        int did = -1;
596        int tid = -1;
597        int spaceIDs[] = { -1, -1 }; // spaceIDs[0]=mspace, spaceIDs[1]=fspace
598
599        log.trace("H5ScalarDS read: start");
600        if (rank <= 0) {
601            init(); // read data information into memory
602        }
603
604        if (isArrayOfCompound)
605            throw new HDF5Exception("Cannot show data with datatype of ARRAY of COMPOUND.");
606        if (isArrayOfVLEN)
607            throw new HDF5Exception("Cannot show data with datatype of ARRAY of VL.");
608
609        if (isExternal) {
610            String pdir = this.getFileFormat().getAbsoluteFile().getParent();
611
612            if (pdir == null) {
613                pdir = ".";
614            }
615            System.setProperty("user.dir", pdir);//H5.H5Dchdir_ext(pdir);
616        }
617
618        boolean isREF = false;
619        long[] lsize = { 1 };
620        log.trace("H5ScalarDS read: open dataset");
621        did = open();
622        if (did >= 0) {
623            try {
624                lsize[0] = selectHyperslab(did, spaceIDs);
625                log.trace("H5ScalarDS read: opened dataset size {} for {}", lsize[0], nPoints);
626
627                if (lsize[0] == 0) {
628                    throw new HDF5Exception("No data to read.\nEither the dataset or the selected subset is empty.");
629                }
630
631                if (log.isDebugEnabled()) {
632                    // check is storage space is allocated
633                    try {
634                        long ssize = H5.H5Dget_storage_size(did);
635                        log.trace("Storage space allocated = {}.", ssize);
636                    }
637                    catch (Exception ex) {
638                        log.debug("check if storage space is allocated:", ex);
639                    }
640                }
641
642                tid = H5.H5Dget_type(did);
643                log.trace("H5ScalarDS read: H5Tget_native_type:");
644                log.trace("H5ScalarDS read: isNativeDatatype={}", isNativeDatatype);
645                if (!isNativeDatatype) {
646                    int tmptid = -1;
647                    try {
648                        tmptid = tid;
649                        tid = H5.H5Tget_native_type(tmptid);
650                    }
651                    finally {
652                        try {H5.H5Tclose(tmptid);}
653                        catch (Exception ex2) {log.debug("finally close:", ex2);}
654                    }
655                }
656
657                isREF = (H5.H5Tequal(tid, HDF5Constants.H5T_STD_REF_OBJ));
658
659                log.trace("H5ScalarDS read: originalBuf={} isText={} isREF={} lsize[0]={} nPoints={}", originalBuf, isText, isREF, lsize[0], nPoints);
660                if ((originalBuf == null) || isEnum || isText || isREF || ((originalBuf != null) && (lsize[0] != nPoints))) {
661                    try {
662                        theData = H5Datatype.allocateArray(tid, (int) lsize[0]);
663                    }
664                    catch (OutOfMemoryError err) {
665                        throw new HDF5Exception("Out Of Memory.");
666                    }
667                }
668                else {
669                    theData = originalBuf; // reuse the buffer if the size is the
670                    // same
671                }
672
673                if (theData != null) {
674                    if (isVLEN) {
675                        log.trace("H5ScalarDS read: H5DreadVL");
676                        H5.H5DreadVL(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, (Object[]) theData);
677                    }
678                    else {
679                        log.trace("H5ScalarDS read: H5Dread did={} spaceIDs[0]={} spaceIDs[1]={}", did, spaceIDs[0], spaceIDs[1]);
680                        H5.H5Dread(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, theData);
681                    }
682                } // if (theData != null)
683            }
684            catch (HDF5DataFiltersException exfltr) {
685                log.debug("H5ScalarDS read: read filter failure:", exfltr);
686                throw new Exception("Filter not available exception: " + exfltr.getMessage(), exfltr);
687            }
688            catch (HDF5Exception h5ex) {
689                log.debug("H5ScalarDS read: read failure", h5ex);
690                throw new HDF5Exception(h5ex.toString());
691            }
692            finally {
693                try {
694                    if (HDF5Constants.H5S_ALL != spaceIDs[0])
695                        H5.H5Sclose(spaceIDs[0]);
696                }
697                catch (Exception ex2) {
698                    log.debug("read: finally close:", ex2);
699                }
700                try {
701                    if (HDF5Constants.H5S_ALL != spaceIDs[1])
702                        H5.H5Sclose(spaceIDs[1]);
703                }
704                catch (Exception ex2) {
705                    log.debug("read: finally close:", ex2);
706                }
707                try {
708                    if (isText && convertByteToString) {
709                        log.trace("H5ScalarDS read: H5Dread convertByteToString");
710                        theData = byteToString((byte[]) theData, H5.H5Tget_size(tid));
711                    }
712                    else if (isREF) {
713                        log.trace("H5ScalarDS read: H5Dread isREF");
714                        theData = HDFNativeData.byteToLong((byte[]) theData);
715                    }
716                    else if (isEnum && isEnumConverted()) {
717                        log.trace("H5ScalarDS read: H5Dread isEnum theData={}", theData);
718                        theData = H5Datatype.convertEnumValueToName(tid, theData, null);
719                    }
720                }
721                catch (Exception ex) {
722                    log.debug("H5ScalarDS read: convert data:", ex);
723                }
724                try {H5.H5Tclose(tid);}
725                catch (Exception ex2) {log.debug("finally close:", ex2);}
726
727                close(did);
728            }
729        }
730
731        log.trace("H5ScalarDS read: finish");
732        return theData;
733    }
734
735
736    /**
737     * Writes the given data buffer into this dataset in a file.
738     *
739     * @param buf
740     *            The buffer that contains the data values.
741     *
742     * @throws HDF5Exception
743     *             If there is an error at the HDF5 library level.
744     */
745    @Override
746    public void write(Object buf) throws HDF5Exception {
747        log.trace("H5ScalarDS write: start");
748        int did = -1;
749        int tid = -1;
750        int spaceIDs[] = { -1, -1 }; // spaceIDs[0]=mspace, spaceIDs[1]=fspace
751        Object tmpData = null;
752
753        if (buf == null) {
754            return;
755        }
756
757        if (isVLEN && !isText) {
758            log.trace("H5ScalarDS write: VL data={}", buf);
759            throw (new HDF5Exception("Writing non-string variable-length data is not supported"));
760        }
761        else if (isRegRef) {
762            throw (new HDF5Exception("Writing region references data is not supported"));
763        }
764
765        long[] lsize = { 1 };
766        did = open();
767        log.trace("H5ScalarDS write: dataset opened");
768        if (did >= 0) {
769            try {
770                lsize[0] = selectHyperslab(did, spaceIDs);
771                tid = H5.H5Dget_type(did);
772
773                log.trace("H5ScalarDS write: isNativeDatatype={}", isNativeDatatype);
774                if (!isNativeDatatype) {
775                    int tmptid = -1;
776                    try {
777                        tmptid = tid;
778                        tid = H5.H5Tget_native_type(tmptid);
779                    }
780                    finally {
781                        try {H5.H5Tclose(tmptid);}
782                        catch (Exception ex2) {log.debug("finally close:", ex2);}
783                    }
784                }
785
786                isText = (H5.H5Tget_class(tid) == HDF5Constants.H5T_STRING);
787
788                // check if need to convert integer data
789                int tsize = H5.H5Tget_size(tid);
790                String cname = buf.getClass().getName();
791                char dname = cname.charAt(cname.lastIndexOf("[") + 1);
792                boolean doConversion = (((tsize == 1) && (dname == 'S')) || ((tsize == 2) && (dname == 'I'))
793                        || ((tsize == 4) && (dname == 'J')) || (isUnsigned && unsignedConverted));
794                log.trace("H5ScalarDS write: tsize={} cname={} dname={} doConversion={}", tsize, cname, dname,
795                        doConversion);
796
797                tmpData = buf;
798                if (doConversion) {
799                    tmpData = convertToUnsignedC(buf, null);
800                }
801                // do not convert v-len strings, regardless of conversion request
802                // type
803                else if (isText && convertByteToString && !H5.H5Tis_variable_str(tid)) {
804                    tmpData = stringToByte((String[]) buf, H5.H5Tget_size(tid));
805                }
806                else if (isEnum && (Array.get(buf, 0) instanceof String)) {
807                    tmpData = H5Datatype.convertEnumNameToValue(tid, (String[]) buf, null);
808                }
809
810                H5.H5Dwrite(did, tid, spaceIDs[0], spaceIDs[1], HDF5Constants.H5P_DEFAULT, tmpData);
811
812            }
813            finally {
814                tmpData = null;
815                try {
816                    if (HDF5Constants.H5S_ALL != spaceIDs[0])
817                        H5.H5Sclose(spaceIDs[0]);
818                }
819                catch (Exception ex2) {
820                    log.debug("write: finally close:", ex2);
821                }
822                try {
823                    if (HDF5Constants.H5S_ALL != spaceIDs[1])
824                        H5.H5Sclose(spaceIDs[1]);
825                }
826                catch (Exception ex2) {
827                    log.debug("write: finally close:", ex2);
828                }
829                try {
830                    H5.H5Tclose(tid);
831                }
832                catch (Exception ex2) {
833                    log.debug("write: finally close:", ex2);
834                }
835            }
836            close(did);
837        }
838        log.trace("H5ScalarDS write: finish");
839    }
840
841    /**
842     * Set up the selection of hyperslab
843     *
844     * @param did
845     *            IN dataset ID
846     * @param spaceIDs
847     *            IN/OUT memory and file space IDs -- spaceIDs[0]=mspace, spaceIDs[1]=fspace
848     *
849     * @return total number of data point selected
850     *
851     * @throws HDF5Exception
852     *             If there is an error at the HDF5 library level.
853     */
854    private long selectHyperslab(int did, int[] spaceIDs) throws HDF5Exception {
855        long lsize = 1;
856
857        boolean isAllSelected = true;
858        for (int i = 0; i < rank; i++) {
859            lsize *= selectedDims[i];
860            if (selectedDims[i] < dims[i]) {
861                isAllSelected = false;
862            }
863        }
864
865        if (isAllSelected) {
866            spaceIDs[0] = HDF5Constants.H5S_ALL;
867            spaceIDs[1] = HDF5Constants.H5S_ALL;
868        }
869        else {
870            spaceIDs[1] = H5.H5Dget_space(did);
871
872            // When 1D dataspace is used in chunked dataset, reading is very
873            // slow.
874            // It is a known problem on HDF5 library for chunked dataset.
875            // mspace = H5.H5Screate_simple(1, lsize, null);
876            spaceIDs[0] = H5.H5Screate_simple(rank, selectedDims, null);
877            H5.H5Sselect_hyperslab(spaceIDs[1], HDF5Constants.H5S_SELECT_SET, startDims, selectedStride, selectedDims,
878                    null);
879        }
880
881        if ((rank > 1) && (selectedIndex[0] > selectedIndex[1]))
882            isDefaultImageOrder = false;
883        else
884            isDefaultImageOrder = true;
885
886        return lsize;
887    }
888
889    /*
890     * (non-Javadoc)
891     *
892     * @see hdf.object.DataFormat#getMetadata()
893     */
894    public List<Attribute> getMetadata() throws HDF5Exception {
895        return this.getMetadata(fileFormat.getIndexType(null), fileFormat.getIndexOrder(null));
896    }
897
898    /*
899     * (non-Javadoc)
900     *
901     * @see hdf.object.DataFormat#getMetadata(int...)
902     */
903    public List<Attribute> getMetadata(int... attrPropList) throws HDF5Exception {
904        if (rank <= 0) {
905            init();
906        }
907        log.trace("getMetadata: inited");
908
909        try {
910            this.linkTargetObjName = H5File.getLinkTargetName(this);
911        }
912        catch (Exception ex) {
913            log.debug("getLinkTargetName failed: ", ex);
914        }
915
916        if (attributeList != null) {
917            log.trace("getMetadata: attributeList != null");
918            return attributeList;
919        }
920
921        // load attributes first
922        int did = -1, pid = -1;
923        int indxType = fileFormat.getIndexType(null);
924        int order = fileFormat.getIndexOrder(null);
925
926        if (attrPropList.length > 0) {
927            indxType = attrPropList[0];
928            if (attrPropList.length > 1) {
929                order = attrPropList[1];
930            }
931        }
932        log.trace("getMetadata: open dataset");
933        did = open();
934        if (did >= 0) {
935            log.trace("getMetadata: dataset opened");
936            try {
937                compression = "";
938                attributeList = H5File.getAttribute(did, indxType, order);
939                log.trace("getMetadata: attributeList loaded");
940
941                // get the compression and chunk information
942                pid = H5.H5Dget_create_plist(did);
943                long storage_size = H5.H5Dget_storage_size(did);
944                int nfilt = H5.H5Pget_nfilters(pid);
945                if (H5.H5Pget_layout(pid) == HDF5Constants.H5D_CHUNKED) {
946                    chunkSize = new long[rank];
947                    H5.H5Pget_chunk(pid, rank, chunkSize);
948                    if(nfilt > 0) {
949                        long    nelmts = 1;
950                        long    uncomp_size;
951                        long    datum_size = getDatatype().getDatatypeSize();
952                        if (datum_size < 0) {
953                            int tmptid = -1;
954                            try {
955                                tmptid = H5.H5Dget_type(did);
956                                datum_size = H5.H5Tget_size(tmptid);
957                            }
958                            finally {
959                                try {H5.H5Tclose(tmptid);}
960                                catch (Exception ex2) {log.debug("finally close:", ex2);}
961                            }
962                        }
963
964
965                        for(int i = 0; i < rank; i++) {
966                            nelmts *= dims[i];
967                        }
968                        uncomp_size = nelmts * datum_size;
969
970                        /* compression ratio = uncompressed size /  compressed size */
971
972                        if(storage_size != 0) {
973                            double ratio = (double) uncomp_size / (double) storage_size;
974                            DecimalFormat df = new DecimalFormat();
975                            df.setMinimumFractionDigits(3);
976                            df.setMaximumFractionDigits(3);
977                            compression +=  df.format(ratio) + ":1";
978                        }
979                    }
980                }
981                else {
982                    chunkSize = null;
983                }
984
985                int[] flags = { 0, 0 };
986                long[] cd_nelmts = { 20 };
987                int[] cd_values = new int[(int) cd_nelmts[0]];;
988                String[] cd_name = { "", "" };
989                log.trace("getMetadata: {} filters in pipeline", nfilt);
990                int filter = -1;
991                int[] filter_config = { 1 };
992                filters = "";
993
994                for (int i = 0, k = 0; i < nfilt; i++) {
995                    log.trace("getMetadata: filter[{}]", i);
996                    if (i > 0) {
997                        filters += ", ";
998                    }
999                    if (k > 0) {
1000                        compression += ", ";
1001                    }
1002
1003                    try {
1004                        cd_nelmts[0] = 20;
1005                        cd_values = new int[(int) cd_nelmts[0]];
1006                        cd_values = new int[(int) cd_nelmts[0]];
1007                        filter = H5.H5Pget_filter(pid, i, flags, cd_nelmts, cd_values, 120, cd_name, filter_config);
1008                        log.trace("getMetadata: filter[{}] is {} has {} elements ", i, cd_name[0], cd_nelmts[0]);
1009                        for (int j = 0; j < cd_nelmts[0]; j++) {
1010                            log.trace("getMetadata: filter[{}] element {} = {}", i, j, cd_values[j]);
1011                        }
1012                    }
1013                    catch (Throwable err) {
1014                        filters += "ERROR";
1015                        continue;
1016                    }
1017
1018                    if (filter == HDF5Constants.H5Z_FILTER_NONE) {
1019                        filters += "NONE";
1020                    }
1021                    else if (filter == HDF5Constants.H5Z_FILTER_DEFLATE) {
1022                        filters += "GZIP";
1023                        compression += compression_gzip_txt + cd_values[0];
1024                        k++;
1025                    }
1026                    else if (filter == HDF5Constants.H5Z_FILTER_FLETCHER32) {
1027                        filters += "Error detection filter";
1028                    }
1029                    else if (filter == HDF5Constants.H5Z_FILTER_SHUFFLE) {
1030                        filters += "SHUFFLE: Nbytes = " + cd_values[0];
1031                    }
1032                    else if (filter == HDF5Constants.H5Z_FILTER_NBIT) {
1033                        filters += "NBIT";
1034                    }
1035                    else if (filter == HDF5Constants.H5Z_FILTER_SCALEOFFSET) {
1036                        filters += "SCALEOFFSET: MIN BITS = " + cd_values[0];
1037                    }
1038                    else if (filter == HDF5Constants.H5Z_FILTER_SZIP) {
1039                        filters += "SZIP";
1040                        compression += "SZIP: Pixels per block = " + cd_values[1];
1041                        k++;
1042                        int flag = -1;
1043                        try {
1044                            flag = H5.H5Zget_filter_info(filter);
1045                        }
1046                        catch (Exception ex) {
1047                            flag = -1;
1048                        }
1049                        if (flag == HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) {
1050                            compression += ": H5Z_FILTER_CONFIG_DECODE_ENABLED";
1051                        }
1052                        else if ((flag == HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED)
1053                                || (flag >= (HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED + HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED))) {
1054                            compression += ": H5Z_FILTER_CONFIG_ENCODE_ENABLED";
1055                        }
1056                    }
1057                    else {
1058                        filters += "USERDEFINED " + cd_name[0] + "(" + filter + "): ";
1059                        for (int j = 0; j < cd_nelmts[0]; j++) {
1060                            if (j > 0)
1061                                filters += ", ";
1062                            filters += cd_values[j];
1063                        }
1064                        log.debug("getMetadata: filter[{}] is user defined compression", i);
1065                    }
1066                } // for (int i=0; i<nfilt; i++)
1067
1068                if (compression.length() == 0) {
1069                    compression = "NONE";
1070                }
1071                log.trace("getMetadata: filter compression={}", compression);
1072
1073                if (filters.length() == 0) {
1074                    filters = "NONE";
1075                }
1076                log.trace("getMetadata: filter information={}", filters);
1077
1078                storage = "" + storage_size;
1079                try {
1080                    int[] at = { 0 };
1081                    H5.H5Pget_alloc_time(pid, at);
1082                    storage += ", allocation time: ";
1083                    if (at[0] == HDF5Constants.H5D_ALLOC_TIME_EARLY) {
1084                        storage += "Early";
1085                    }
1086                    else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_INCR) {
1087                        storage += "Incremental";
1088                    }
1089                    else if (at[0] == HDF5Constants.H5D_ALLOC_TIME_LATE) {
1090                        storage += "Late";
1091                    }
1092                }
1093                catch (Exception ex) {
1094                    log.debug("Storage allocation time:", ex);
1095                }
1096                if (storage.length() == 0) {
1097                    storage = "NONE";
1098                }
1099                log.trace("getMetadata: storage={}", storage);
1100            }
1101            finally {
1102                try {
1103                    H5.H5Pclose(pid);
1104                }
1105                catch (Exception ex) {
1106                    log.debug("finally close:", ex);
1107                }
1108                close(did);
1109            }
1110        }
1111
1112        log.trace("getMetadata: finish");
1113        return attributeList;
1114    }
1115
1116    /*
1117     * (non-Javadoc)
1118     *
1119     * @see hdf.object.DataFormat#writeMetadata(java.lang.Object)
1120     */
1121    public void writeMetadata(Object info) throws Exception {
1122        // only attribute metadata is supported.
1123        if (!(info instanceof Attribute)) {
1124            return;
1125        }
1126
1127        boolean attrExisted = false;
1128        Attribute attr = (Attribute) info;
1129        log.trace("writeMetadata: {}", attr.getName());
1130
1131        if (attributeList == null) {
1132            this.getMetadata();
1133        }
1134
1135        if (attributeList != null)
1136            attrExisted = attributeList.contains(attr);
1137
1138        getFileFormat().writeAttribute(this, attr, attrExisted);
1139        // add the new attribute into attribute list
1140        if (!attrExisted) {
1141            attributeList.add(attr);
1142            nAttributes = attributeList.size();
1143        }
1144    }
1145
1146    /*
1147     * (non-Javadoc)
1148     *
1149     * @see hdf.object.DataFormat#removeMetadata(java.lang.Object)
1150     */
1151    public void removeMetadata(Object info) throws HDF5Exception {
1152        // only attribute metadata is supported.
1153        if (!(info instanceof Attribute)) {
1154            return;
1155        }
1156
1157        Attribute attr = (Attribute) info;
1158        log.trace("removeMetadata: {}", attr.getName());
1159        int did = open();
1160        if (did >= 0) {
1161            try {
1162                H5.H5Adelete(did, attr.getName());
1163                List<Attribute> attrList = getMetadata();
1164                attrList.remove(attr);
1165                nAttributes = attrList.size();
1166            }
1167            finally {
1168                close(did);
1169            }
1170        }
1171    }
1172
1173    /*
1174     * (non-Javadoc)
1175     *
1176     * @see hdf.object.DataFormat#updateMetadata(java.lang.Object)
1177     */
1178    public void updateMetadata(Object info) throws HDF5Exception {
1179        // only attribute metadata is supported.
1180        if (!(info instanceof Attribute)) {
1181            return;
1182        }
1183        log.trace("updateMetadata");
1184
1185        Attribute attr = (Attribute) info;
1186        log.trace("updateMetadata: {}", attr.getName());
1187        nAttributes = -1;
1188    }
1189
1190    /*
1191     * (non-Javadoc)
1192     *
1193     * @see hdf.object.HObject#setName(java.lang.String)
1194     */
1195    @Override
1196    public void setName(String newName) throws Exception {
1197        H5File.renameObject(this, newName);
1198        super.setName(newName);
1199    }
1200
1201    /**
1202     * Resets selection of dataspace
1203     */
1204    private void resetSelection() {
1205        log.trace("resetSelection: start");
1206
1207        for (int i = 0; i < rank; i++) {
1208            startDims[i] = 0;
1209            selectedDims[i] = 1;
1210            if (selectedStride != null) {
1211                selectedStride[i] = 1;
1212            }
1213        }
1214
1215        if (interlace == INTERLACE_PIXEL) {
1216            // 24-bit TRUE color image
1217            // [height][width][pixel components]
1218            selectedDims[2] = 3;
1219            selectedDims[0] = dims[0];
1220            selectedDims[1] = dims[1];
1221            selectedIndex[0] = 0; // index for height
1222            selectedIndex[1] = 1; // index for width
1223            selectedIndex[2] = 2; // index for depth
1224        }
1225        else if (interlace == INTERLACE_PLANE) {
1226            // 24-bit TRUE color image
1227            // [pixel components][height][width]
1228            selectedDims[0] = 3;
1229            selectedDims[1] = dims[1];
1230            selectedDims[2] = dims[2];
1231            selectedIndex[0] = 1; // index for height
1232            selectedIndex[1] = 2; // index for width
1233            selectedIndex[2] = 0; // index for depth
1234        }
1235        else if (rank == 1) {
1236            selectedIndex[0] = 0;
1237            selectedDims[0] = dims[0];
1238        }
1239        else if (rank == 2) {
1240            selectedIndex[0] = 0;
1241            selectedIndex[1] = 1;
1242            selectedDims[0] = dims[0];
1243            selectedDims[1] = dims[1];
1244        }
1245        else if (rank > 2) {
1246            // // hdf-java 2.5 version: 3D dataset is arranged in the order of
1247            // [frame][height][width] by default
1248            // selectedIndex[1] = rank-1; // width, the fastest dimension
1249            // selectedIndex[0] = rank-2; // height
1250            // selectedIndex[2] = rank-3; // frames
1251
1252            //
1253            // (5/4/09) Modified the default dimension order. See bug#1379
1254            // We change the default order to the following. In most situation,
1255            // users want to use the natural order of
1256            // selectedIndex[0] = 0
1257            // selectedIndex[1] = 1
1258            // selectedIndex[2] = 2
1259            // Most of NPOESS data is the the order above.
1260
1261            if (isImage) {
1262                // 3D dataset is arranged in the order of [frame][height][width]
1263                selectedIndex[1] = rank - 1; // width, the fastest dimension
1264                selectedIndex[0] = rank - 2; // height
1265                selectedIndex[2] = rank - 3; // frames
1266            }
1267            else {
1268                selectedIndex[0] = 0; // width, the fastest dimension
1269                selectedIndex[1] = 1; // height
1270                selectedIndex[2] = 2; // frames
1271            }
1272
1273            selectedDims[selectedIndex[0]] = dims[selectedIndex[0]];
1274            selectedDims[selectedIndex[1]] = dims[selectedIndex[1]];
1275        }
1276
1277        // by default, only one-D is selected for text data
1278        if ((rank > 1) && isText) {
1279            selectedIndex[0] = rank - 1;
1280            selectedIndex[1] = 0;
1281            selectedDims[0] = 1;
1282            selectedDims[selectedIndex[0]] = dims[selectedIndex[0]];
1283        }
1284
1285        isDataLoaded = false;
1286        isDefaultImageOrder = true;
1287        log.trace("resetSelection: finish");
1288    }
1289
1290    public static Dataset create(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims,
1291            long[] chunks, int gzip, Object data) throws Exception {
1292        return create(name, pgroup, type, dims, maxdims, chunks, gzip, null, data);
1293    }
1294
1295    /**
1296     * Creates a scalar dataset in a file with/without chunking and compression.
1297     * <p>
1298     * The following example shows how to create a string dataset using this function.
1299     *
1300     * <pre>
1301     * H5File file = new H5File(&quot;test.h5&quot;, H5File.CREATE);
1302     * int max_str_len = 120;
1303     * Datatype strType = new H5Datatype(Datatype.CLASS_STRING, max_str_len, -1, -1);
1304     * int size = 10000;
1305     * long dims[] = { size };
1306     * long chunks[] = { 1000 };
1307     * int gzip = 9;
1308     * String strs[] = new String[size];
1309     *
1310     * for (int i = 0; i &lt; size; i++)
1311     *     strs[i] = String.valueOf(i);
1312     *
1313     * file.open();
1314     * file.createScalarDS(&quot;/1D scalar strings&quot;, null, strType, dims, null, chunks, gzip, strs);
1315     *
1316     * try {
1317     *     file.close();
1318     * }
1319     * catch (Exception ex) {
1320     * }
1321     * </pre>
1322     *
1323     * @param name
1324     *            the name of the dataset to create.
1325     * @param pgroup
1326     *            parent group where the new dataset is created.
1327     * @param type
1328     *            the datatype of the dataset.
1329     * @param dims
1330     *            the dimension size of the dataset.
1331     * @param maxdims
1332     *            the max dimension size of the dataset. maxdims is set to dims if maxdims = null.
1333     * @param chunks
1334     *            the chunk size of the dataset. No chunking if chunk = null.
1335     * @param gzip
1336     *            GZIP compression level (1 to 9). No compression if gzip&lt;=0.
1337     * @param fillValue
1338     *            the default data value.
1339     * @param data
1340     *            the array of data values.
1341     *
1342     * @return the new scalar dataset if successful; otherwise returns null.
1343     *
1344     * @throws Exception if there is a failure.
1345     */
1346    public static Dataset create(String name, Group pgroup, Datatype type, long[] dims, long[] maxdims,
1347            long[] chunks, int gzip, Object fillValue, Object data) throws Exception {
1348        H5ScalarDS dataset = null;
1349        String fullPath = null;
1350        int did = -1, sid = -1, tid = -1, plist = -1;
1351
1352        log.trace("H5ScalarDS create start");
1353        if ((pgroup == null) || (name == null) || (dims == null) || ((gzip > 0) && (chunks == null))) {
1354            return null;
1355        }
1356
1357        H5File file = (H5File) pgroup.getFileFormat();
1358        if (file == null) {
1359            return null;
1360        }
1361
1362        String path = HObject.separator;
1363        if (!pgroup.isRoot()) {
1364            path = pgroup.getPath() + pgroup.getName() + HObject.separator;
1365            if (name.endsWith("/")) {
1366                name = name.substring(0, name.length() - 1);
1367            }
1368            int idx = name.lastIndexOf("/");
1369            if (idx >= 0) {
1370                name = name.substring(idx + 1);
1371            }
1372        }
1373
1374        fullPath = path + name;
1375
1376        // setup chunking and compression
1377        boolean isExtentable = false;
1378        if (maxdims != null) {
1379            for (int i = 0; i < maxdims.length; i++) {
1380                if (maxdims[i] == 0) {
1381                    maxdims[i] = dims[i];
1382                }
1383                else if (maxdims[i] < 0) {
1384                    maxdims[i] = HDF5Constants.H5S_UNLIMITED;
1385                }
1386
1387                if (maxdims[i] != dims[i]) {
1388                    isExtentable = true;
1389                }
1390            }
1391        }
1392
1393        // HDF5 requires you to use chunking in order to define extendible
1394        // datasets. Chunking makes it possible to extend datasets efficiently,
1395        // without having to reorganize storage excessively. Using default size
1396        // of 64x...which has good performance
1397        if ((chunks == null) && isExtentable) {
1398            chunks = new long[dims.length];
1399            for (int i = 0; i < dims.length; i++)
1400                chunks[i] = Math.min(dims[i], 64);
1401        }
1402
1403        // prepare the dataspace and datatype
1404        int rank = dims.length;
1405
1406        if ((tid = type.toNative()) >= 0) {
1407            try {
1408                sid = H5.H5Screate_simple(rank, dims, maxdims);
1409
1410                // figure out creation properties
1411                plist = HDF5Constants.H5P_DEFAULT;
1412
1413                byte[] val_fill = null;
1414                try {
1415                    val_fill = parseFillValue(type, fillValue);
1416                }
1417                catch (Exception ex) {
1418                    log.debug("fill value:", ex);
1419                }
1420
1421                if (chunks != null || val_fill != null) {
1422                    plist = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
1423
1424                    if (chunks != null) {
1425                        H5.H5Pset_layout(plist, HDF5Constants.H5D_CHUNKED);
1426                        H5.H5Pset_chunk(plist, rank, chunks);
1427
1428                        // compression requires chunking
1429                        if (gzip > 0) {
1430                            H5.H5Pset_deflate(plist, gzip);
1431                        }
1432                    }
1433
1434                    if (val_fill != null) {
1435                        H5.H5Pset_fill_value(plist, tid, val_fill);
1436                    }
1437                }
1438
1439                int fid = file.getFID();
1440
1441                log.trace("H5ScalarDS create dataset");
1442                did = H5.H5Dcreate(fid, fullPath, tid, sid, HDF5Constants.H5P_DEFAULT, plist, HDF5Constants.H5P_DEFAULT);
1443                log.trace("H5ScalarDS create H5ScalarDS");
1444                dataset = new H5ScalarDS(file, name, path);
1445            }
1446            finally {
1447                try {
1448                    H5.H5Pclose(plist);
1449                }
1450                catch (HDF5Exception ex) {
1451                    log.debug("create finally close:", ex);
1452                }
1453                try {
1454                    H5.H5Sclose(sid);
1455                }
1456                catch (HDF5Exception ex) {
1457                    log.debug("create finally close:", ex);
1458                }
1459                try {
1460                    H5.H5Tclose(tid);
1461                }
1462                catch (HDF5Exception ex) {
1463                    log.debug("create finally close:", ex);
1464                }
1465                try {
1466                    H5.H5Dclose(did);
1467                }
1468                catch (HDF5Exception ex) {
1469                    log.debug("create finally close:", ex);
1470                }
1471            }
1472        }
1473
1474        if (dataset != null) {
1475            pgroup.addToMemberList(dataset);
1476            if (data != null) {
1477                dataset.init();
1478                long selected[] = dataset.getSelectedDims();
1479                for (int i = 0; i < rank; i++) {
1480                    selected[i] = dims[i];
1481                }
1482                dataset.write(data);
1483            }
1484        }
1485        log.trace("H5ScalarDS create finish");
1486
1487        return dataset;
1488    }
1489
1490    // check _FillValue, valid_min, valid_max, and valid_range
1491    private void checkCFconvention(int oid) throws Exception {
1492        Object avalue = getAttrValue(oid, "_FillValue");
1493
1494        if (avalue != null) {
1495            int n = Array.getLength(avalue);
1496            for (int i = 0; i < n; i++)
1497                addFilteredImageValue((Number) Array.get(avalue, i));
1498        }
1499
1500        if (imageDataRange == null || imageDataRange[1] <= imageDataRange[0]) {
1501            double x0 = 0, x1 = 0;
1502            avalue = getAttrValue(oid, "valid_range");
1503            if (avalue != null) {
1504                try {
1505                    x0 = Double.valueOf(java.lang.reflect.Array.get(avalue, 0).toString()).doubleValue();
1506                    x1 = Double.valueOf(java.lang.reflect.Array.get(avalue, 1).toString()).doubleValue();
1507                    imageDataRange = new double[2];
1508                    imageDataRange[0] = x0;
1509                    imageDataRange[1] = x1;
1510                    return;
1511                }
1512                catch (Exception ex) {
1513                    log.debug("valid_range:", ex);
1514                }
1515            }
1516
1517            avalue = getAttrValue(oid, "valid_min");
1518            if (avalue != null) {
1519                try {
1520                    x0 = Double.valueOf(java.lang.reflect.Array.get(avalue, 0).toString()).doubleValue();
1521                }
1522                catch (Exception ex) {
1523                    log.debug("valid_min:", ex);
1524                }
1525                avalue = getAttrValue(oid, "valid_max");
1526                if (avalue != null) {
1527                    try {
1528                        x1 = Double.valueOf(java.lang.reflect.Array.get(avalue, 0).toString()).doubleValue();
1529                        imageDataRange = new double[2];
1530                        imageDataRange[0] = x0;
1531                        imageDataRange[1] = x1;
1532                    }
1533                    catch (Exception ex) {
1534                        log.debug("valid_max:", ex);
1535                    }
1536                }
1537            }
1538        } // if (imageDataRange==null || imageDataRange[1]<=imageDataRange[0])
1539    }
1540
1541    private Object getAttrValue(int oid, String aname) {
1542        int aid = -1, atid = -1, asid = -1;
1543        Object avalue = null;
1544        log.trace("getAttrValue: start name={}", aname);
1545
1546        try {
1547            // try to find attribute name
1548            aid = H5.H5Aopen_by_name(oid, ".", aname, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
1549        }
1550        catch (HDF5LibraryException ex5) {
1551            log.debug("Failed to find attribute {} : Expected", aname);
1552        }
1553        catch (Exception ex) {
1554            log.debug("try to find attribute {}:", aname, ex);
1555        }
1556        if (aid > 0) {
1557            try {
1558                atid = H5.H5Aget_type(aid);
1559                int tmptid = atid;
1560                atid = H5.H5Tget_native_type(tmptid);
1561                try {
1562                    H5.H5Tclose(tmptid);
1563                }
1564                catch (Exception ex) {
1565                    log.debug("close H5Aget_type after getting H5Tget_native_type:", ex);
1566                }
1567
1568                asid = H5.H5Aget_space(aid);
1569                long adims[] = null;
1570
1571                int arank = H5.H5Sget_simple_extent_ndims(asid);
1572                if (arank > 0) {
1573                    adims = new long[arank];
1574                    H5.H5Sget_simple_extent_dims(asid, adims, null);
1575                }
1576                log.trace("getAttrValue: adims={}", adims);
1577
1578                // retrieve the attribute value
1579                long lsize = 1;
1580                if (adims != null) {
1581                    for (int j = 0; j < adims.length; j++) {
1582                        lsize *= adims[j];
1583                    }
1584                }
1585                log.trace("getAttrValue: lsize={}", lsize);
1586                avalue = H5Datatype.allocateArray(atid, (int) lsize);
1587
1588                if (avalue != null) {
1589                    log.trace("read attribute id {} of size={}", atid, lsize);
1590                    H5.H5Aread(aid, atid, avalue);
1591
1592                    if (H5Datatype.isUnsigned(atid)) {
1593                        log.trace("id {} is unsigned", atid);
1594                        avalue = convertFromUnsignedC(avalue, null);
1595                    }
1596                }
1597            }
1598            catch (Exception ex) {
1599                log.debug("try to get value for attribute {}:", aname, ex);
1600            }
1601            finally {
1602                try {
1603                    H5.H5Tclose(atid);
1604                }
1605                catch (HDF5Exception ex) {
1606                    log.debug("finally close:", ex);
1607                }
1608                try {
1609                    H5.H5Sclose(asid);
1610                }
1611                catch (HDF5Exception ex) {
1612                    log.debug("finally close:", ex);
1613                }
1614                try {
1615                    H5.H5Aclose(aid);
1616                }
1617                catch (HDF5Exception ex) {
1618                    log.debug("finally close:", ex);
1619                }
1620            }
1621        } // if (aid > 0)
1622
1623        log.trace("getAttrValue: finish");
1624        return avalue;
1625    }
1626
1627    private boolean isStringAttributeOf(int objID, String name, String value) {
1628        boolean retValue = false;
1629        int aid = -1, atid = -1;
1630
1631        try {
1632            // try to find out interlace mode
1633            aid = H5.H5Aopen_by_name(objID, ".", name, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
1634            atid = H5.H5Aget_type(aid);
1635            int size = H5.H5Tget_size(atid);
1636            byte[] attrValue = new byte[size];
1637            H5.H5Aread(aid, atid, attrValue);
1638            String strValue = new String(attrValue).trim();
1639            retValue = strValue.equalsIgnoreCase(value);
1640        }
1641        catch (Exception ex) {
1642            log.debug("try to find out interlace mode:", ex);
1643        }
1644        finally {
1645            try {
1646                H5.H5Tclose(atid);
1647            }
1648            catch (HDF5Exception ex) {
1649                log.debug("finally close:", ex);
1650            }
1651            try {
1652                H5.H5Aclose(aid);
1653            }
1654            catch (HDF5Exception ex) {
1655                log.debug("finally close:", ex);
1656            }
1657        }
1658
1659        return retValue;
1660    }
1661
1662    /*
1663     * (non-Javadoc)
1664     *
1665     * @see hdf.object.Dataset#copy(hdf.object.Group, java.lang.String, long[], java.lang.Object)
1666     */
1667    @Override
1668    public Dataset copy(Group pgroup, String dstName, long[] dims, Object buff) throws Exception {
1669        // must give a location to copy
1670        if (pgroup == null) {
1671            return null;
1672        }
1673
1674        Dataset dataset = null;
1675        int srcdid = -1, dstdid = -1, tid = -1, sid = -1, plist = -1;
1676        String dname = null, path = null;
1677
1678        if (pgroup.isRoot()) {
1679            path = HObject.separator;
1680        }
1681        else {
1682            path = pgroup.getPath() + pgroup.getName() + HObject.separator;
1683        }
1684        dname = path + dstName;
1685
1686        srcdid = open();
1687        if (srcdid >= 0) {
1688            try {
1689                tid = H5.H5Dget_type(srcdid);
1690                sid = H5.H5Screate_simple(dims.length, dims, null);
1691                plist = H5.H5Dget_create_plist(srcdid);
1692
1693                long[] chunks = new long[dims.length];
1694                boolean setChunkFlag = false;
1695                try {
1696                    H5.H5Pget_chunk(plist, dims.length, chunks);
1697                    for (int i = 0; i < dims.length; i++) {
1698                        if (dims[i] < chunks[i]) {
1699                            setChunkFlag = true;
1700                            if (dims[i] == 1)
1701                                chunks[i] = 1;
1702                            else
1703                                chunks[i] = dims[i] / 2;
1704                        }
1705                    }
1706                }
1707                catch (Exception ex) {
1708                    log.debug("copy chunk:", ex);
1709                }
1710
1711                if (setChunkFlag)
1712                    H5.H5Pset_chunk(plist, dims.length, chunks);
1713
1714                try {
1715                    dstdid = H5.H5Dcreate(pgroup.getFID(), dname, tid, sid, HDF5Constants.H5P_DEFAULT, plist,
1716                            HDF5Constants.H5P_DEFAULT);
1717                }
1718                catch (Exception e) {
1719                    log.debug("copy create:", e);
1720                }
1721                finally {
1722                    try {
1723                        H5.H5Dclose(dstdid);
1724                    }
1725                    catch (Exception ex2) {
1726                        log.debug("finally close:", ex2);
1727                    }
1728                }
1729
1730                dataset = new H5ScalarDS(pgroup.getFileFormat(), dstName, path);
1731                if (buff != null) {
1732                    dataset.init();
1733                    dataset.write(buff);
1734                }
1735
1736                dstdid = dataset.open();
1737                if (dstdid >= 0) {
1738                    try {
1739                        H5File.copyAttributes(srcdid, dstdid);
1740                    }
1741                    finally {
1742                        try {
1743                            H5.H5Dclose(dstdid);
1744                        }
1745                        catch (Exception ex) {
1746                            log.debug("finally close:", ex);
1747                        }
1748                    }
1749                }
1750            }
1751            finally {
1752                try {
1753                    H5.H5Pclose(plist);
1754                }
1755                catch (Exception ex) {
1756                    log.debug("finally close:", ex);
1757                }
1758                try {
1759                    H5.H5Sclose(sid);
1760                }
1761                catch (Exception ex) {
1762                    log.debug("finally close:", ex);
1763                }
1764                try {
1765                    H5.H5Tclose(tid);
1766                }
1767                catch (Exception ex) {
1768                    log.debug("finally close:", ex);
1769                }
1770                try {
1771                    H5.H5Dclose(srcdid);
1772                }
1773                catch (Exception ex) {
1774                    log.debug("finally close:", ex);
1775                }
1776            }
1777        }
1778
1779        pgroup.addToMemberList(dataset);
1780
1781        ((ScalarDS) dataset).setIsImage(isImage);
1782
1783        return dataset;
1784    }
1785
1786    /*
1787     * (non-Javadoc)
1788     *
1789     * @see hdf.object.ScalarDS#getPalette()
1790     */
1791    @Override
1792    public byte[][] getPalette() {
1793        if (palette == null) {
1794            palette = readPalette(0);
1795        }
1796
1797        return palette;
1798    }
1799
1800    /*
1801     * (non-Javadoc)
1802     *
1803     * @see hdf.object.ScalarDS#getPaletteName(int)
1804     */
1805    public String getPaletteName(int idx) {
1806
1807        byte[] refs = getPaletteRefs();
1808        int did = -1, pal_id = -1;
1809        String[] paletteName = { "" };
1810        long size = 100L;
1811
1812        if (refs == null) {
1813            return null;
1814        }
1815
1816        byte[] ref_buf = new byte[8];
1817
1818        try {
1819            System.arraycopy(refs, idx * 8, ref_buf, 0, 8);
1820        }
1821        catch (Throwable err) {
1822            return null;
1823        }
1824
1825        did = open();
1826        if (did >= 0) {
1827            try {
1828                pal_id = H5.H5Rdereference(getFID(), HDF5Constants.H5R_OBJECT, ref_buf);
1829                H5.H5Iget_name(pal_id, paletteName, size);
1830            }
1831            catch (Exception ex) {
1832                ex.printStackTrace();
1833            }
1834            finally {
1835                close(pal_id);
1836                close(did);
1837            }
1838        }
1839
1840        return paletteName[0];
1841    }
1842
1843    /*
1844     * (non-Javadoc)
1845     *
1846     * @see hdf.object.ScalarDS#readPalette(int)
1847     */
1848    @Override
1849    public byte[][] readPalette(int idx) {
1850        byte[][] thePalette = null;
1851        byte[] refs = getPaletteRefs();
1852        int did = -1, pal_id = -1, tid = -1;
1853
1854        if (refs == null) {
1855            return null;
1856        }
1857
1858        byte[] p = null;
1859        byte[] ref_buf = new byte[8];
1860
1861        try {
1862            System.arraycopy(refs, idx * 8, ref_buf, 0, 8);
1863        }
1864        catch (Throwable err) {
1865            return null;
1866        }
1867
1868        did = open();
1869        if (did >= 0) {
1870            try {
1871                pal_id = H5.H5Rdereference(getFID(), HDF5Constants.H5R_OBJECT, ref_buf);
1872                tid = H5.H5Dget_type(pal_id);
1873
1874                // support only 3*256 byte palette data
1875                if (H5.H5Dget_storage_size(pal_id) <= 768) {
1876                    p = new byte[3 * 256];
1877                    H5.H5Dread(pal_id, tid, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, p);
1878                }
1879            }
1880            catch (HDF5Exception ex) {
1881                p = null;
1882            }
1883            finally {
1884                try {
1885                    H5.H5Tclose(tid);
1886                }
1887                catch (HDF5Exception ex2) {
1888                }
1889                close(pal_id);
1890                close(did);
1891            }
1892        }
1893
1894        if (p != null) {
1895            thePalette = new byte[3][256];
1896            for (int i = 0; i < 256; i++) {
1897                thePalette[0][i] = p[i * 3];
1898                thePalette[1][i] = p[i * 3 + 1];
1899                thePalette[2][i] = p[i * 3 + 2];
1900            }
1901        }
1902
1903        return thePalette;
1904    }
1905
1906    private static byte[] parseFillValue(Datatype type, Object fillValue) throws Exception {
1907        byte[] data = null;
1908
1909        if (type == null || fillValue == null)
1910            return null;
1911
1912        int datatypeClass = type.getDatatypeClass();
1913        int datatypeSize = type.getDatatypeSize();
1914
1915        double val_dbl = 0;
1916        String val_str = null;
1917
1918        if (fillValue instanceof String) {
1919            val_str = (String) fillValue;
1920        }
1921        else if (fillValue.getClass().isArray()) {
1922            val_str = Array.get(fillValue, 0).toString();
1923        }
1924
1925        if (datatypeClass != Datatype.CLASS_STRING) {
1926            try {
1927                val_dbl = Double.parseDouble(val_str);
1928            }
1929            catch (NumberFormatException ex) {
1930                return null;
1931            }
1932        }
1933
1934        try {
1935            switch (datatypeClass) {
1936            case Datatype.CLASS_INTEGER:
1937            case Datatype.CLASS_ENUM:
1938            case Datatype.CLASS_CHAR:
1939                if (datatypeSize == 1) {
1940                    data = new byte[] { (byte) val_dbl };
1941                }
1942                else if (datatypeSize == 2) {
1943                    data = HDFNativeData.shortToByte((short) val_dbl);
1944                }
1945                else if (datatypeSize == 8) {
1946                    data = HDFNativeData.longToByte((long) val_dbl);
1947                }
1948                else {
1949                    data = HDFNativeData.intToByte((int) val_dbl);
1950                }
1951                break;
1952            case Datatype.CLASS_FLOAT:
1953                if (datatypeSize == 8) {
1954                    data = HDFNativeData.doubleToByte(val_dbl);
1955                }
1956                else {
1957                    data = HDFNativeData.floatToByte((float) val_dbl);
1958                    ;
1959                }
1960                break;
1961            case Datatype.CLASS_STRING:
1962                data = val_str.getBytes();
1963                break;
1964            case Datatype.CLASS_REFERENCE:
1965                data = HDFNativeData.longToByte((long) val_dbl);
1966                break;
1967            default:
1968                log.debug("parseFillValue datatypeClass unknown");
1969                break;
1970            } // switch (tclass)
1971        }
1972        catch (Exception ex) {
1973            data = null;
1974        }
1975
1976        return data;
1977    }
1978
1979    /*
1980     * (non-Javadoc)
1981     *
1982     * @see hdf.object.ScalarDS#getPaletteRefs()
1983     */
1984    @Override
1985    public byte[] getPaletteRefs() {
1986        if (rank <= 0) {
1987            init(); // init will be called to get refs
1988        }
1989
1990        return paletteRefs;
1991    }
1992
1993    /**
1994     * reads references of palettes into a byte array Each reference requires eight bytes storage. Therefore, the array
1995     * length is 8*numberOfPalettes.
1996     */
1997    private byte[] getPaletteRefs(int did) {
1998        int aid = -1, sid = -1, size = 0, rank = 0, atype = -1;
1999        byte[] ref_buf = null;
2000
2001        try {
2002            aid = H5.H5Aopen_by_name(did, ".", "PALETTE", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
2003            sid = H5.H5Aget_space(aid);
2004            rank = H5.H5Sget_simple_extent_ndims(sid);
2005            size = 1;
2006            if (rank > 0) {
2007                long[] dims = new long[rank];
2008                H5.H5Sget_simple_extent_dims(sid, dims, null);
2009                log.trace("getPaletteRefs() rank={}, dims={}", rank, dims);
2010                for (int i = 0; i < rank; i++) {
2011                    size *= (int) dims[i];
2012                }
2013            }
2014
2015            ref_buf = new byte[size * 8];
2016            atype = H5.H5Aget_type(aid);
2017
2018            H5.H5Aread(aid, atype, ref_buf);
2019        }
2020        catch (HDF5Exception ex) {
2021            log.debug("Palette attribute search failed: Expected");
2022            ref_buf = null;
2023        }
2024        finally {
2025            try {
2026                H5.H5Tclose(atype);
2027            }
2028            catch (HDF5Exception ex2) {
2029                log.debug("finally close:", ex2);
2030            }
2031            try {
2032                H5.H5Sclose(sid);
2033            }
2034            catch (HDF5Exception ex2) {
2035                log.debug("finally close:", ex2);
2036            }
2037            try {
2038                H5.H5Aclose(aid);
2039            }
2040            catch (HDF5Exception ex2) {
2041                log.debug("finally close:", ex2);
2042            }
2043        }
2044
2045        return ref_buf;
2046    }
2047
2048    /**
2049     * H5Dset_extent verifies that the dataset is at least of size size, extending it if necessary. The dimensionality
2050     * of size is the same as that of the dataspace of the dataset being changed.
2051     *
2052     * This function can be applied to the following datasets: 1) Any dataset with unlimited dimensions 2) A dataset
2053     * with fixed dimensions if the current dimension sizes are less than the maximum sizes set with maxdims (see
2054     * H5Screate_simple)
2055     *
2056     * @param newDims the dimension target size
2057     *
2058     * @throws HDF5Exception
2059     *             If there is an error at the HDF5 library level.
2060     */
2061    public void extend(long[] newDims) throws HDF5Exception {
2062        int did = -1, sid = -1;
2063
2064        did = open();
2065        if (did >= 0) {
2066            try {
2067                H5.H5Dset_extent(did, newDims);
2068                H5.H5Fflush(did, HDF5Constants.H5F_SCOPE_GLOBAL);
2069                sid = H5.H5Dget_space(did);
2070                long[] checkDims = new long[rank];
2071                H5.H5Sget_simple_extent_dims(sid, checkDims, null);
2072                log.trace("extend() rank={}, checkDims={}", rank, checkDims);
2073                for (int i = 0; i < rank; i++) {
2074                    if (checkDims[i] != newDims[i]) {
2075                        throw new HDF5Exception("error extending dataset " + getName());
2076                    }
2077                }
2078                dims = checkDims;
2079            }
2080            catch (Exception e) {
2081                throw new HDF5Exception(e.getMessage());
2082            }
2083            finally {
2084                if (sid > 0)
2085                    H5.H5Sclose(sid);
2086
2087                close(did);
2088            }
2089        }
2090    }
2091
2092}