001/*****************************************************************************
002 * Copyright by The HDF Group.                                               *
003 * Copyright by the Board of Trustees of the University of Illinois.         *
004 * All rights reserved.                                                      *
005 *                                                                           *
006 * This file is part of the HDF Java Products distribution.                  *
007 * The full copyright notice, including terms governing use, modification,   *
008 * and redistribution, is contained in the files COPYING and Copyright.html. *
009 * COPYING can be found at the root of the source code distribution tree.    *
010 * Or, see http://hdfgroup.org/products/hdf-java/doc/Copyright.html.         *
011 * If you do not have access to either file, you may request a copy from     *
012 * help@hdfgroup.org.                                                        *
013 ****************************************************************************/
014
015package hdf.object.h4;
016
017import java.util.List;
018import java.util.Vector;
019
020import hdf.hdflib.HDFChunkInfo;
021import hdf.hdflib.HDFCompInfo;
022import hdf.hdflib.HDFConstants;
023import hdf.hdflib.HDFDeflateCompInfo;
024import hdf.hdflib.HDFException;
025import hdf.hdflib.HDFLibrary;
026import hdf.object.Attribute;
027import hdf.object.Dataset;
028import hdf.object.Datatype;
029import hdf.object.FileFormat;
030import hdf.object.Group;
031import hdf.object.HObject;
032import hdf.object.ScalarDS;
033
034/**
035 * H4GRImage describes HDF4 general raster(GR) image and operations performed on
036 * the GR image. An HDF4 raster image is a two-dimension array of pixel values.
037 * <p>
038 * Every GR data set must contain the following components: image array, name,
039 * pixel type, and dimensions. The name, dimensions, and pixel type must be
040 * supplied by the user at the time the GR data set is defined.
041 * <p>
042 * An image array is a two-dimensional array of pixels. Each element in an image
043 * array corresponds to one pixel and each pixel can consist of a number of
044 * color component values or pixel components, e.g., Red-Green-Blue or RGB,
045 * Cyan-Magenta-Yellow-Black or CMYK, etc. Pixel components can be represented
046 * by different methods (8-bit lookup table or 24-bit direct representation) and
047 * may have different data types. The data type of pixel components and the number
048 * of components in each pixel are collectively known as the pixel type.
049 * <p>
050 * <b>How to Select a Subset</b>
051 * <p>
052 * Dataset defines APIs for read, write and subet a dataset. No function is defined
053 * to select a subset of a data array. The selection is done in an implicit way.
054 * Function calls to dimension information such as getSelectedDims() return an array
055 * of dimension values, which is a reference to the array in the dataset object.
056 * Changes of the array outside the dataset object directly change the values of
057 * the array in the dataset object. It is like pointers in C.
058 * <p>
059 *
060 * The following is an example of how to make a subset. In the example, the dataset
061 * is a 4-dimension with size of [200][100][50][10], i.e.
062 * dims[0]=200; dims[1]=100; dims[2]=50; dims[3]=10; <br>
063 * We want to select every other data points in dims[1] and dims[2]
064 * <pre>
065     int rank = dataset.getRank();   // number of dimension of the dataset
066     long[] dims = dataset.getDims(); // the dimension sizes of the dataset
067     long[] selected = dataset.getSelectedDims(); // the selected size of the dataet
068     long[] start = dataset.getStartDims(); // the off set of the selection
069     long[] stride = dataset.getStride(); // the stride of the dataset
070     int[]  selectedIndex = dataset.getSelectedIndex(); // the selected dimensions for display
071
072     // select dim1 and dim2 as 2D data for display,and slice through dim0
073     selectedIndex[0] = 1;
074     selectedIndex[1] = 2;
075     selectedIndex[1] = 0;
076
077     // reset the selection arrays
078     for (int i=0; i&lt;rank; i++) {
079         start[i] = 0;
080         selected[i] = 1;
081         stride[i] = 1;
082    }
083
084    // set stride to 2 on dim1 and dim2 so that every other data points are selected.
085    stride[1] = 2;
086    stride[2] = 2;
087
088    // set the selection size of dim1 and dim2
089    selected[1] = dims[1]/stride[1];
090    selected[2] = dims[1]/stride[2];
091
092    // when dataset.read() is called, the slection above will be used since
093    // the dimension arrays is passed by reference. Changes of these arrays
094    // outside the dataset object directly change the values of these array
095    // in the dataset object.
096
097 * </pre>
098 *
099 * @version 1.1 9/4/2007
100 * @author Peter X. Cao
101 */
102public class H4GRImage extends ScalarDS
103{
104    /**
105     *
106     */
107    private static final long serialVersionUID = 1029672744963360976L;
108
109    private final static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H4GRImage.class);
110
111    /**
112     * The list of attributes of this data object. Members of the list are
113     * instance of Attribute.
114     */
115    private List attributeList;
116
117    /**
118     * The GR interface identifier obtained from GRstart(fid)
119     */
120    private int grid;
121
122    /**
123     * The number of components in the raster image
124     */
125    private int ncomp;
126
127    /** the datatype identifier */
128    private int datatypeID = -1;
129
130    private int nAttributes = -1;
131
132
133    public H4GRImage(FileFormat theFile, String name, String path)
134    {
135        this(theFile, name, path, null);
136    }
137
138    /**
139     * Creates a H4GRImage object with specific name, path, and object ID.
140     *
141     * @param theFile the HDF file.
142     * @param name the name of this H4GRImage.
143     * @param path the full path of this H4GRImage.
144     * @param oid the unique identifier of this data object.
145     */
146    public H4GRImage(
147        FileFormat theFile,
148        String name,
149        String path,
150        long[] oid)
151    {
152        super (theFile, name, path, oid);
153        palette = null;
154        isImage = isImageDisplay = true;
155        unsignedConverted = false;
156        grid = ((H4File)getFileFormat()).getGRAccessID();
157    }
158
159    /*
160     * (non-Javadoc)
161     * @see hdf.object.DataFormat#hasAttribute()
162     */
163    public boolean hasAttribute ()
164    {
165        if (nAttributes < 0) {
166            grid = ((H4File)getFileFormat()).getGRAccessID();
167
168            int id = open();
169            String[] objName = {""};
170            int[] grInfo = new int[4]; //ncomp, data_type, interlace, and num_attrs
171            int[] idims = new int[2];
172            try {
173                HDFLibrary.GRgetiminfo(id, objName, grInfo, idims);
174                nAttributes = grInfo[3];
175            }
176            catch (Exception ex) {
177                nAttributes = 0;
178            }
179            close(id);
180        }
181
182        return (nAttributes>0);
183    }
184
185    // To do: Implementing Dataset
186    @Override
187    public Dataset copy(Group pgroup, String dname, long[] dims, Object buff) throws Exception
188    {
189        Dataset dataset = null;
190        int srcdid=-1, dstdid=-1;
191        String path=null;
192        int[] count=null;
193
194        if (pgroup == null) {
195            return null;
196        }
197
198        if (pgroup.isRoot()) {
199            path = HObject.separator;
200        }
201        else {
202            path = pgroup.getPath()+pgroup.getName()+HObject.separator;
203        }
204
205        srcdid = open();
206        if (srcdid < 0) {
207            return null;
208        }
209
210        if (dims != null) {
211            count = new int[2];
212            count[0] = (int)dims[0];
213            count[1] = (int)dims[1];
214        }
215
216        int[] grInfo = new int[4]; //ncomp, data_type, interlace and num_attrs
217        try {
218            String[] tmpName = {""};
219            int[] tmpDims = new int[2];
220            HDFLibrary.GRgetiminfo(srcdid, tmpName, grInfo, tmpDims);
221            if (count == null) {
222                count = tmpDims;
223            }
224        }
225        catch (HDFException ex) {
226            log.debug("copy.GRgetiminfo:", ex);
227        }
228
229        int ncomp = grInfo[0];
230        int tid = grInfo[1];
231        int interlace = grInfo[2];
232        int numberOfAttributes = grInfo[3];
233        dstdid = HDFLibrary.GRcreate(
234            ((H4File)pgroup.getFileFormat()).getGRAccessID(),
235            dname, ncomp, tid, interlace, count);
236        if (dstdid < 0) {
237            return null;
238        }
239
240        int ref = HDFLibrary.GRidtoref(dstdid);
241        if (!pgroup.isRoot()) {
242            int vgid = pgroup.open();
243            HDFLibrary.Vaddtagref(vgid, HDFConstants.DFTAG_RIG, ref);
244            pgroup.close(vgid);
245        }
246
247        // read data from the source dataset
248        int[] start = {0, 0};
249        if (buff == null) {
250            buff = new byte[count[0]*count[1] * HDFLibrary.DFKNTsize(tid)];
251            HDFLibrary.GRreadimage(srcdid, start, null, count, buff);
252        }
253
254        // write the data into the destination dataset
255        HDFLibrary.GRwriteimage(dstdid, start, null, count, buff);
256
257        // copy palette
258        int pid = HDFLibrary.GRgetlutid(srcdid, 0);
259        int[] palInfo = new int[4];
260
261        HDFLibrary.GRgetlutinfo(pid, palInfo);
262        palInfo[1] = HDFConstants.DFNT_UINT8; // support unsigned byte only. Other type does not work
263        int palSize = palInfo[0]*palInfo[3];
264        byte[] palBuff = new byte[palSize];
265        HDFLibrary.GRreadlut(pid, palBuff);
266        pid = HDFLibrary.GRgetlutid(dstdid, 0);
267        HDFLibrary.GRwritelut(pid, palInfo[0], palInfo[1], palInfo[2], palInfo[3], palBuff);
268
269        // copy attributes from one object to the new object
270        copyAttribute(srcdid, dstdid, numberOfAttributes);
271
272        long[] oid = {HDFConstants.DFTAG_RIG, ref};
273        dataset = new H4GRImage(pgroup.getFileFormat(), dname, path, oid);
274
275        pgroup.addToMemberList(dataset);
276
277        close(srcdid);
278        try {
279            HDFLibrary.GRendaccess(dstdid);
280        }
281        catch (HDFException ex) {
282            log.debug("copy.GRendaccess:", ex);
283        }
284
285        return dataset;
286    }
287
288    // ***** need to implement from ScalarDS *****
289    @Override
290    public byte[][] readPalette(int idx) { return null;}
291
292    // ***** need to implement from ScalarDS *****
293    @Override
294    public byte[] getPaletteRefs() { return null;}
295
296    // implementing ScalarDS
297    @Override
298    public Datatype getDatatype()
299    {
300        if (datatype == null) {
301            datatype = new H4Datatype(datatypeID);
302        }
303
304        return datatype;
305    }
306
307    // Implementing Dataset
308    @Override
309    public byte[] readBytes() throws HDFException
310    {
311        byte[] theData = null;
312
313        if (rank <=0 ) {
314            init();
315        }
316
317        int id = open();
318        if (id < 0) {
319            return null;
320        }
321
322        try {
323            // set the interlacing scheme for reading image data
324            HDFLibrary.GRreqimageil(id, interlace);
325            int datasize = (int)(getWidth()*getHeight()*ncomp);
326            int size = HDFLibrary.DFKNTsize(datatypeID)*datasize;
327            theData = new byte[size];
328            int[] start = {(int)startDims[0], (int)startDims[1]};
329            int[] select = {(int)selectedDims[0], (int)selectedDims[1]};
330
331            int[] stride = null;
332            if (selectedStride != null) {
333                stride = new int[rank];
334                for (int i=0; i<rank; i++) {
335                    stride[i] = (int)selectedStride[i];
336                }
337            }
338
339            HDFLibrary.GRreadimage(id, start, stride, select, theData);
340        }
341        finally {
342            close(id);
343        }
344
345        return theData;
346    }
347
348    // ***** need to implement from DataFormat *****
349    @Override
350    public Object read() throws HDFException
351    {
352        Object theData = null;
353
354        if (rank <=0 ) {
355            init();
356        }
357
358        int id = open();
359        if (id < 0) {
360            return null;
361        }
362
363        try {
364            // set the interlacing scheme for reading image data
365            HDFLibrary.GRreqimageil(id, interlace);
366            int datasize = (int)(getWidth()*getHeight()*ncomp);
367
368            theData = H4Datatype.allocateArray(datatypeID, datasize);
369
370            if (theData != null) {
371                // assume external data files are located in the same directory as the main file.
372                HDFLibrary.HXsetdir(getFileFormat().getParent());
373
374                int[] start = {(int)startDims[0], (int)startDims[1]};
375                int[] select = {(int)selectedDims[0], (int)selectedDims[1]};
376
377                int[] stride = null;
378                if (selectedStride != null) {
379                    stride = new int[rank];
380                    for (int i=0; i<rank; i++) {
381                        stride[i] = (int)selectedStride[i];
382                    }
383                }
384
385                HDFLibrary.GRreadimage(id, start, stride, select, theData);
386            }
387        }
388        finally {
389            close(id);
390        }
391
392        if ( (rank >1) && (selectedIndex[1]>selectedIndex[0]))
393            isDefaultImageOrder = false;
394        else
395            isDefaultImageOrder = true;
396
397        return theData;
398    }
399
400    // Implementing DataFormat
401    @Override
402    public void write(Object buf) throws HDFException
403    {
404        if (buf == null) {
405            return;
406        }
407
408        int id = open();
409        if (id < 0) {
410            return;
411        }
412
413        int[] select = new int[rank];
414        int[] start = new int[rank];
415        for (int i=0; i<rank; i++) {
416            select[i] = (int)selectedDims[i];
417            start[i] = (int)startDims[i];
418        }
419
420        int[] stride = null;
421        if (selectedStride != null) {
422            stride = new int[rank];
423            for (int i=0; i<rank; i++) {
424                stride[i] = (int)selectedStride[i];
425            }
426        }
427
428        Object tmpData = buf;
429        try {
430            if ( isUnsigned && unsignedConverted) {
431                tmpData = convertToUnsignedC(buf);
432            }
433            // assume external data files are located in the same directory as the main file.
434            HDFLibrary.HXsetdir(getFileFormat().getParent());
435
436            HDFLibrary.GRwriteimage(id, start, stride, select, tmpData);
437        }
438        finally {
439            tmpData = null;
440            close(id);
441        }
442    }
443
444    // ***** need to implement from DataFormat *****
445    public List getMetadata() throws HDFException
446    {
447        if (attributeList != null) {
448            return attributeList;
449        }
450
451        int id = open();
452        String[] objName = {""};
453        int[] grInfo = new int[4]; //ncomp, data_type, interlace, and num_attrs
454        int[] idims = new int[2];
455        try {
456            HDFLibrary.GRgetiminfo(id, objName, grInfo, idims);
457            // mask off the litend bit
458            grInfo[1] = grInfo[1] & (~HDFConstants.DFNT_LITEND);
459            int n = grInfo[3];
460
461            if ((attributeList == null) && (n>0)) {
462                attributeList = new Vector(n, 5);
463            }
464
465            boolean b = false;
466            String[] attrName = new String[1];
467            int[] attrInfo = {0, 0}; // data_type, length
468            for (int i=0; i<n; i++) {
469                attrName[0] = "";
470                try {
471                    b = HDFLibrary.GRattrinfo(id, i, attrName, attrInfo);
472                    // mask off the litend bit
473                    attrInfo[0] = attrInfo[0] & (~HDFConstants.DFNT_LITEND);
474                }
475                catch (HDFException ex) {
476                    b = false;
477                }
478
479                if (!b) {
480                    continue;
481                }
482
483                long[] attrDims = {attrInfo[1]};
484                Attribute attr = new Attribute(attrName[0], new H4Datatype(attrInfo[0]), attrDims);;
485                attributeList.add(attr);
486
487                Object buf = H4Datatype.allocateArray(attrInfo[0], attrInfo[1]);
488                try {
489                    HDFLibrary.GRgetattr(id, i, buf);
490                }
491                catch (HDFException ex) {
492                    buf = null;
493                }
494
495                if (buf != null) {
496                    if ((attrInfo[0] == HDFConstants.DFNT_CHAR) ||
497                        (attrInfo[0] ==  HDFConstants.DFNT_UCHAR8)) {
498                        buf = Dataset.byteToString((byte[])buf, attrInfo[1]);
499                    }
500
501                    attr.setValue(buf);
502                }
503            } // for (int i=0; i<n; i++)
504        }
505        finally {
506            close(id);
507        }
508
509        return attributeList;
510    }
511
512    // ***** need to implement from DataFormat *****
513    public void writeMetadata(Object info) throws Exception
514    {
515        // only attribute metadata is supported.
516        if (!(info instanceof Attribute)) {
517            return;
518        }
519
520        getFileFormat().writeAttribute(this, (Attribute)info, true);
521
522        if (attributeList == null) {
523            attributeList = new Vector();
524        }
525
526        attributeList.add(info);
527        nAttributes = attributeList.size();
528    }
529
530    // ***** need to implement from DataFormat *****
531    public void removeMetadata(Object info) throws HDFException {;}
532
533    // implementing DataFormat
534    public void updateMetadata(Object info) throws Exception {
535        log.trace("updateMetadata(): disabled");
536    }
537
538    // Implementing HObject.
539    @Override
540    public int open()
541    {
542
543        int id = -1;
544        try {
545            int index = HDFLibrary.GRreftoindex(grid, (short)oid[1]);
546            id = HDFLibrary.GRselect(grid, index);
547        }
548        catch (HDFException ex) {
549            id = -1;
550        }
551
552        return id;
553    }
554
555    // Implementing HObject.
556    @Override
557    public void close(int grid)
558    {
559        try { HDFLibrary.GRendaccess(grid); }
560        catch (HDFException ex) {;}
561    }
562
563    // Implementing Dataset.
564    @Override
565    public void init()
566    {
567        if (rank>0) {
568            return; // already called. Initialize only once
569        }
570
571        int id = open();
572        String[] objName = {""};
573        int[] grInfo = new int[4]; //ncomp, data_type, interlace and num_attrs
574        int[] idims = new int[2];
575        try {
576            HDFLibrary.GRgetiminfo(id, objName, grInfo, idims);
577            // mask off the litend bit
578            grInfo[1] = grInfo[1] & (~HDFConstants.DFNT_LITEND);
579            datatypeID = grInfo[1];
580
581            // get compression information
582            try {
583                HDFCompInfo compInfo = new HDFCompInfo();
584                boolean status = HDFLibrary.GRgetcompress(id, compInfo);
585                if (compInfo.ctype == HDFConstants.COMP_CODE_DEFLATE) {
586                    compression = "GZIP";
587                }
588                else if (compInfo.ctype == HDFConstants.COMP_CODE_SZIP) {
589                    compression = "SZIP";
590                }
591                else if (compInfo.ctype == HDFConstants.COMP_CODE_JPEG) {
592                    compression = "JPEG";
593                }
594                else if (compInfo.ctype == HDFConstants.COMP_CODE_SKPHUFF) {
595                    compression = "SKPHUFF";
596                }
597                else if (compInfo.ctype == HDFConstants.COMP_CODE_RLE) {
598                    compression = "RLE";
599                }
600                else if (compInfo.ctype == HDFConstants.COMP_CODE_NBIT) {
601                    compression = "NBIT";
602                }
603            }
604            catch (Exception ex) {
605                log.debug("get compression information:", ex);
606            }
607
608            // get chunk information
609            try {
610                HDFChunkInfo chunkInfo = new HDFChunkInfo();
611                int[] cflag = {HDFConstants.HDF_NONE};
612                boolean status = HDFLibrary.GRgetchunkinfo(id, chunkInfo, cflag);
613                if (cflag[0] == HDFConstants.HDF_NONE) {
614                    chunkSize = null;
615                }
616                else {
617                    chunkSize = new long[rank];
618                    for (int i=0; i<rank; i++) {
619                        chunkSize[i] = chunkInfo.chunk_lengths[i];
620                    }
621                }
622            }
623            catch (Exception ex) {
624                log.debug("get chunk information:", ex);
625            }
626
627        }
628        catch (HDFException ex) {
629            log.debug("H4GRImage.init():", ex);
630        }
631        finally {
632            close(id);
633        }
634
635        isUnsigned = H4Datatype.isUnsigned(datatypeID);
636
637        if (idims == null) {
638            return;
639        }
640
641        ncomp = grInfo[0];
642        isTrueColor = (ncomp >= 3);
643        interlace = grInfo[2];
644        rank = 2; // support only two dimensional raster image
645
646        // data in HDF4 GR image is arranged as dim[0]=width, dim[1]=height.
647        // other image data is arranged as dim[0]=height, dim[1]=width.
648        selectedIndex[0] = 1;
649        selectedIndex[1] = 0;
650
651        dims = new long[rank];
652        startDims = new long[rank];
653        selectedDims = new long[rank];
654        for (int i=0; i<rank; i++) {
655            startDims[i] = 0;
656            selectedDims[i] = idims[i];
657            dims[i] = idims[i];
658        }
659
660    }
661
662    // ***** need to implement from ScalarDS *****
663    @Override
664    public byte[][] getPalette()
665    {
666        if (palette != null) {
667            return palette;
668        }
669
670        int id = open();
671        if (id < 0) {
672            return null;
673        }
674
675        // get palette info.
676        int lutid  = -1;
677        int[] lutInfo = new int[4]; //ncomp, datatype, interlace, num_entries
678        try {
679            // find the first palette.
680            // Todo: get all the palettes
681            lutid = HDFLibrary.GRgetlutid(id, 0);
682            HDFLibrary.GRgetlutinfo(lutid, lutInfo);
683        }
684        catch (HDFException ex) {
685            close(id);
686            return null;
687        }
688
689        // check if there is palette data. HDFLibrary.GRgetlutinfo() sometimes
690        // return true even if there is no palette data, and check if it is a
691        // RGB with 256 colors
692        if ((lutInfo[0] != 3) || (lutInfo[2] < 0) | (lutInfo[3] != 256)) {
693            close(id);
694            return null;
695        }
696
697        // read palette data
698        boolean b = false;
699        byte[] pal = new byte[3*256];
700        try
701        {
702            HDFLibrary.GRreqlutil(id, lutInfo[2]);
703            b = HDFLibrary.GRreadlut(lutid, pal);
704        }
705        catch (HDFException ex) {
706            b = false;
707        }
708
709        if (!b) {
710            close(id);
711            return null;
712        }
713
714        palette = new byte[3][256];
715        if (lutInfo[2] == HDFConstants.MFGR_INTERLACE_PIXEL) {
716            // color conponents are arranged in RGB, RGB, RGB, ...
717            for (int i=0; i<256; i++) {
718                palette[0][i] = pal[i*3];
719                palette[1][i] = pal[i*3+1];
720                palette[2][i] = pal[i*3+2];
721            }
722        }
723        else {
724            for (int i=0; i<256; i++) {
725                palette[0][i] = pal[i];
726                palette[1][i] = pal[256+i];
727                palette[2][i] = pal[512+i];
728            }
729        }
730
731        close(id);
732        return palette;
733    }
734
735    /**
736     * Returns the number of components of this image data.
737     *
738     * @return the number of components
739     */
740    public int getComponentCount()
741    {
742        return ncomp;
743    }
744
745    /**
746     * Creates a new image.
747     *
748     * @param name the name of the dataset to create.
749     * @param pgroup the parent group of the new dataset.
750     * @param type the datatype of the dataset.
751     * @param dims the dimension size of the dataset.
752     * @param maxdims the max dimension size of the dataset.
753     * @param chunks the chunk size of the dataset.
754     * @param gzip the level of the gzip compression.
755     * @param ncomp number of components of the image data.
756     * @param interlace the interlace mode.
757     * @param data the array of data values.
758     *
759     * @return the new image if successful. Otherwise returns null.
760     *
761     * @throws Exception if the image can not be created
762     */
763    public static H4GRImage create(
764        String name,
765        Group pgroup,
766        Datatype type,
767        long[] dims,
768        long[] maxdims,
769        long[] chunks,
770        int gzip,
771        int ncomp,
772        int interlace,
773        Object data) throws Exception
774    {
775        H4GRImage dataset = null;
776        if ((name == null) ||
777            (pgroup == null) ||
778            (dims == null) ||
779            ((gzip>0) && (chunks==null))) {
780            return null;
781        }
782
783        H4File file = (H4File)pgroup.getFileFormat();
784        if (file == null) {
785            return null;
786        }
787
788        String path = HObject.separator;
789        if (!pgroup.isRoot()) {
790            path = pgroup.getPath()+pgroup.getName()+HObject.separator;
791        }
792        if (interlace == ScalarDS.INTERLACE_PLANE) {
793            interlace = HDFConstants.MFGR_INTERLACE_COMPONENT;
794        }
795        else {
796            interlace = HDFConstants.MFGR_INTERLACE_PIXEL;
797        }
798
799        int rank = 2;
800        int idims[] = new int[rank];
801        int imaxdims[] = new int[rank];
802        int start[] = new int [rank];
803        for (int i=0; i<rank; i++) {
804            idims[i] = (int)dims[i];
805            if (maxdims != null) {
806                imaxdims[i] = (int)maxdims[i];
807            }
808            else {
809                imaxdims[i] = idims[i];
810            }
811            start[i] = 0;
812        }
813
814        int ichunks[] = null;
815        if (chunks != null) {
816            ichunks = new int[rank];
817            for (int i=0; i<rank; i++) {
818                ichunks[i] = (int)chunks[i];
819            }
820        }
821
822        int grid = -1;
823        int vgid = -1;
824        int gid = (file).getGRAccessID();
825        int tid = type.toNative();
826
827        if(tid >= 0) {
828            try {
829                grid = HDFLibrary.GRcreate(gid, name, ncomp, tid, interlace, idims);
830            }
831            catch (Exception ex) {
832                throw (ex);
833            }
834        }
835
836        if (grid < 0) {
837            throw (new HDFException("Unable to create the new dataset."));
838        }
839
840        if ((grid > 0) && (data != null)) {
841            HDFLibrary.GRwriteimage(grid, start, null, idims, data);
842        }
843
844        if (chunks != null) {
845            // set chunk
846            HDFChunkInfo chunkInfo = new HDFChunkInfo(ichunks);
847            HDFLibrary.GRsetchunk(grid, chunkInfo, HDFConstants.HDF_CHUNK);
848        }
849
850        if (gzip > 0) {
851            // set compression
852            int compType = HDFConstants.COMP_CODE_DEFLATE;
853            HDFDeflateCompInfo compInfo = new HDFDeflateCompInfo();
854            compInfo.level = gzip;
855            HDFLibrary.GRsetcompress(grid, compType, compInfo);
856        }
857
858        int ref = HDFLibrary.GRidtoref(grid);
859
860        if (!pgroup.isRoot()) {
861            // add the dataset to the parent group
862            vgid = pgroup.open();
863            if (vgid < 0) {
864                if (grid > 0) {
865                    HDFLibrary.GRendaccess(grid);
866                }
867                throw (new HDFException("Unable to open the parent group."));
868            }
869
870            HDFLibrary.Vaddtagref(vgid, HDFConstants.DFTAG_RI, ref);
871
872            pgroup.close(vgid);
873        }
874
875        try {
876            if (grid > 0) {
877                HDFLibrary.GRendaccess(grid);
878            }
879        }
880        catch (Exception ex) {
881            log.debug("create.GRendaccess:", ex);
882        }
883
884        long[] oid = {HDFConstants.DFTAG_NDG, ref};
885        dataset = new H4GRImage(file, name, path, oid);
886
887        if (dataset != null) {
888            pgroup.addToMemberList(dataset);
889        }
890
891        return dataset;
892    }
893
894    /**
895     * copy attributes from one GR image to another GR image
896     */
897    private void copyAttribute(int srcdid, int dstdid, int numberOfAttributes)
898    {
899        if (numberOfAttributes <=0 ) {
900            return;
901        }
902
903        try {
904            boolean b = false;
905            String[] attrName = new String[1];
906            int[] attrInfo = {0, 0};
907            for (int i=0; i<numberOfAttributes; i++) {
908                attrName[0] = "";
909                try {
910                    b = HDFLibrary.GRattrinfo(srcdid, i, attrName, attrInfo);
911                }
912                catch (HDFException ex) {
913                    b = false;
914                }
915
916                if (!b) {
917                    continue;
918                }
919
920                // read attribute data from source dataset
921                byte[] attrBuff = new byte[attrInfo[1] * HDFLibrary.DFKNTsize(attrInfo[0])];
922                try {
923                    HDFLibrary.GRgetattr(srcdid, i, attrBuff);
924                }
925                catch (Exception ex) {
926                    attrBuff = null;
927                }
928
929                if (attrBuff == null) {
930                    continue;
931                }
932
933                // attach attribute to the destination dataset
934                HDFLibrary.GRsetattr(dstdid, attrName[0], attrInfo[0], attrInfo[1], attrBuff);
935            } // for (int i=0; i<numberOfAttributes; i++)
936        }
937        catch (Exception ex) {
938            log.debug("copyAttribute:", ex);
939        }
940    }
941
942    //Implementing DataFormat
943    public List getMetadata(int... attrPropList) throws Exception {
944        throw new UnsupportedOperationException("getMetadata(int... attrPropList) is not supported");
945    }
946
947}