attached is an example of writing a pdf file to an HDF5 dataset of opaque.

On 3/6/2012 2:54 AM, kuki wrote:
Hi people..
i am very new to hdf5
i have an object (message is the name of the object in the given code)which
carries a serialized text.
i want to write it to hdf5 file using opaque data type specifically.
how do i go about doing it?

private static int count = 0;
        public static void record(Message  message)
        {
        try
          {
                
                BufferedOutputStream buf=new BufferedOutputStream(new
FileOutputStream("E:/proj/"+count+".bin"));
                byte[] b =serializer.serialize(message);        
        buf.write(b);
        buf.flush();
            count++;
                        
       }
     catch(Exception e){}
        
   }

--
View this message in context: 
http://hdf-forum.184993.n3.nabble.com/Creating-opaque-data-types-from-Java-tp2714926p3803053.html
Sent from the hdf-forum mailing list archive at Nabble.com.

_______________________________________________
Hdf-forum is for HDF software users discussion.
[email protected]
http://mail.hdfgroup.org/mailman/listinfo/hdf-forum_hdfgroup.org


--
Thank you!
--pc

import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.StringTokenizer;

import ncsa.hdf.hdf5lib.H5;
import ncsa.hdf.hdf5lib.HDF5Constants;
import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;


public class PDF2H5 {
    private String gName = "/";

    private int pdfToH5(String fname, String h5Name, String dname) throws IOException {
        int fid = -1;


        // create HDF5 file
        try {
            fid = H5.H5Fcreate(h5Name, HDF5Constants.H5F_ACC_TRUNC,
                    HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
        } catch (HDF5Exception ex) {
            ex.printStackTrace();
            System.exit(0);
        }

        int idx = dname.lastIndexOf('/');
        if (idx > 0) {
            gName = dname.substring(0, idx);
            try {
                createH5Group(fid, gName);
            } catch (Exception ex) {
            }
        }

        try {
            createH5Dataset(fid, dname, fname);
        } catch (Exception ex) {
            ex.printStackTrace();
            return fid;
        }

        return fid;
    }

    private void createH5Group(int fid, String gname) throws Exception {
        int gid = -1;

        if (fid < 0 || gname == null || gname.length() <= 0)
            return;

        StringTokenizer st = new StringTokenizer(gname, "/");
        gname = "";
        while (st.hasMoreTokens()) {
            gname += "/" + st.nextToken();

            gid = -1;
            try {
                gid = H5.H5Gopen(fid, gname);
            } catch (Exception ex) {
            }

            if (gid < 0) // group does not exist
                gid = H5.H5Gcreate(fid, gname, 0);

            try {
                H5.H5Gclose(gid);
            } catch (Exception ex) {
            }
        }
    }

    private boolean createH5Dataset(int fid, String dsetName, String fname)
    throws Exception {
        int did = -1, tid = -1, sid = -1;
        long size = (new File(fname)).length();
        long dims[] = {size};

        if (size <=0)
            return false; // nothing to write

        tid = H5.H5Tcreate (HDF5Constants.H5T_OPAQUE, 1);
        H5.H5Tset_tag (tid, "Content-Type: application/pdf");
        sid = H5.H5Screate_simple (1, dims, null);

        did = H5.H5Dcreate (fid, dsetName, tid, sid, HDF5Constants.H5P_DEFAULT);

        BufferedInputStream bufferedInput = null;
        byte[] buffer = new byte[(int)size];

        bufferedInput = new BufferedInputStream(new FileInputStream(fname));
        bufferedInput.read(buffer);

        H5.H5Dwrite (did, tid, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, buffer);

        try {
            H5.H5Tclose(tid);
        } catch (HDF5Exception ex) {
        }
        try {
            H5.H5Sclose(sid);
        } catch (HDF5Exception ex) {
        }
        try {
            H5.H5Dclose(did);
        } catch (HDF5Exception ex) {
        }

        return (did > 0);
    }

    /**
     * Closes file associated with this H5File instance.
     * 
     * @see ncsa.hdf.object.FileFormat#close()
     * @throws HDF5Exception
     */
    private void closeH5File(int fid) throws HDF5Exception {
        // Close all open objects associated with this file.
        int n = 0, type = -1, oids[];

        if (fid < 0)
            return;

        n = H5.H5Fget_obj_count(fid, HDF5Constants.H5F_OBJ_ALL);

        if (n > 0) {
            oids = new int[n];
            H5.H5Fget_obj_ids(fid, HDF5Constants.H5F_OBJ_ALL, n, oids);

            for (int i = 0; i < n; i++) {
                type = H5.H5Iget_type(oids[i]);

                if (HDF5Constants.H5I_DATASET == type) {
                    try {
                        H5.H5Dclose(oids[i]);
                    } catch (Exception ex2) {
                    }
                } else if (HDF5Constants.H5I_GROUP == type) {
                    try {
                        H5.H5Gclose(oids[i]);
                    } catch (Exception ex2) {
                    }
                } else if (HDF5Constants.H5I_DATATYPE == type) {
                    try {
                        H5.H5Tclose(oids[i]);
                    } catch (Exception ex2) {
                    }
                } else if (HDF5Constants.H5I_ATTR == type) {
                    try {
                        H5.H5Aclose(oids[i]);
                    } catch (Exception ex2) {
                    }
                }
            } // for (int i=0; i<n; i++)
        } // if ( n>0)

        try {
            H5.H5Fflush(fid, HDF5Constants.H5F_SCOPE_GLOBAL);
        } catch (Exception ex) {
        }

        try {
            H5.H5Fclose(fid);
        } catch (Exception ex) {
        }
    }

    /**
     * @param args
     */
    public static void main(String[] args) {
        String dsetName = null, fname = null, h5Name = null;

        // Validate input.
        if (args.length < 1) {
            System.out.println("Usage: java PDF2H5 [OPTIONS] pdf_filename.");
            System.out.println("  OPTIONS:");
            System.out
            .println("  \t--dset_name name\tName of the HDF5 dataset, e.g. \"dset\" or \"/g1/dset\".");
            System.out
            .println("  \t--output h5_filename\tName of the output HDF5 file.");
            System.exit(-1);
        }

        int idx = 0;
        for (int i = 0; i < args.length; i++) {
            if ("--dset_name".equalsIgnoreCase(args[i])) {
                dsetName = args[i + 1];
                idx = i + 2;
            } else if ("--output".equalsIgnoreCase(args[i])) {
                h5Name = args[i + 1];
                idx = i + 2;
            }
        }
        fname = args[idx];

        PDF2H5 t2h5 = new PDF2H5();
        try {
            int fid = t2h5.pdfToH5(fname, h5Name, dsetName);
            t2h5.closeH5File(fid);
        } catch (Exception ex) {
            ex.printStackTrace();
        }


    }

}
_______________________________________________
Hdf-forum is for HDF software users discussion.
[email protected]
http://mail.hdfgroup.org/mailman/listinfo/hdf-forum_hdfgroup.org

Reply via email to