I have been using the nujan library to create HDF files. That works for floats and ints
as I followed the examples.

Now I would like to add an array of strings in order to insert some METADATA into the file.

The got the following error
// the following does not work
      idtype = HdfGroup.DTYPE_STRING_FIX;
      int [] dimsstr = {3};
      String [] stra = {doi, esdt, long_esdt};
       trad[5] = rootGroup.addVariable(
                       "METADATA",               // variable name
                    idtype,                       //
                    0,                            // stgFieldLen
                    dimsstr,                         // dimension lengths
                    null,                // chunk lengths
                    null,          // fill value or null
0); // compression: 0 is none, 9 is max
// error appears on the line trad[5] =
// this is what the error looks like:
/*
edu.ucar.ral.nujan.hdf.HdfException: Invalid stgFieldLen for DTYPE_STRING_FIX: must be > 0
        at edu.ucar.ral.nujan.hdf.BaseBlk.throwerr(BaseBlk.java:155)
        at edu.ucar.ral.nujan.hdf.MsgDataType.<init>(MsgDataType.java:233)
        at edu.ucar.ral.nujan.hdf.HdfGroup.<init>(HdfGroup.java:560)
        at edu.ucar.ral.nujan.hdf.HdfGroup.addVariable(HdfGroup.java:704)
        at ArcHDFmeta.writeHdf(ArcHDFmeta.java:371)
        at ArcHDFmeta.doFile(ArcHDFmeta.java:183)
        at FileDialogN.doDir(FileDialogN.java:255)
        at ArcHDFmeta.doDir(ArcHDFmeta.java:34)
        at FileDialogN.doDir(FileDialogN.java:240)
        at ArcHDFmeta.doDir(ArcHDFmeta.java:34)
        at FileDialogN.doDir(FileDialogN.java:240)
        at ArcHDFmeta.doDir(ArcHDFmeta.java:34)
        at FileDialogN.doDir(FileDialogN.java:240)
        at ArcHDFmeta.doDir(ArcHDFmeta.java:34)
        at FileDialogN.proc(FileDialogN.java:84)
        at ArcHDFmeta.proc(ArcHDFmeta.java:34)
        at ArcHDFmeta.main(ArcHDFmeta.java:104)
*/

(The full code is attached).
Any help would be appreciated.

--
G. Garrett Campbell
NSIDC
303 492 5194

/*
Simple write out of swath data
Will have radiative temp , lat, lon and time
Data is a rectangular array but not equally spaced in lat/lon
classpath = %classpath%;C:/nsidc/netcdf/nujan/nujan-1.4.jar

Pirated from http://www.ral.ucar.edu/~steves/nujan.html

this outputs the swath format data containing the maximum information
a companion remapped data set provides convenient access to the data
but one losses information about the view point.

G.G. Campbell 1/2012 NSIDC
*/


// package edu.ucar.ral.nujan.hdf;

import java.util.zip.*;
import java.util.*;
import java.io.*;
import edu.ucar.ral.nujan.hdf.HdfFileWriter;
import edu.ucar.ral.nujan.hdf.HdfGroup;
import edu.ucar.ral.nujan.hdf.HdfException;


/**
 * Simple example driver for HdfFileWriter.
 * set classpath=%classpath%;c:/jutil/netcdfAll-4.2.jar
 * verify with java ucar.nc2.NCdump c:\nsidc\nimbus3\dum.hdf -v {par}
 * using nujan library to write hdf5 files
 */

public class ArcHDFmeta extends FileDialogN {
//
// headless version
// could that be put into FileDialogN?
//

   long tlow = Long.MIN_VALUE;
   long thigh = Long.MAX_VALUE;
        ReadOrbCal rc = null;
        boolean doZip = false;
   boolean doComp = false;
   String outDir = "/projects/NIMBUS/netcdf/";
   boolean png = false;
   boolean visib = true;
   boolean equalize = false;

        public static void main( String[] args) {
        if(args.length < 1) {
        System.out.println("covert .arc files into .hdf5 or nc");
         System.out.println("gz will gzip the files to save space");
         System.out.println("comp will use hdf compression within the files 
(recomended)");
              System.out.println("seems readable by NCdump as netcdf and 
panoply");
           System.out.println("java ArcHDFN dir arc t<yymmdd t>tyymmdd out= 
[comp,png,gz,eq]");
         System.out.println("java ArcHDFN 
/projects/NIMBUS/scanned/nimbus2/N2.INS.1037 .arc. png gz 
out=/disks/sidads_ftp/pub/incoming/nimbus/avcs");
         System.out.println("about 20% of raw size, good result");
         System.out.println("java ArcHDFN /projects/NIMBUS/scanned/nimbus2 
.arc. \"t<660520\" \"t>660501\" comp png");
         System.out.println("java ArcHDFN 
/projects/NIMBUS/scanned/nimbus1/N1.INS.$1 .arc comp png");
         System.out.println("java ArcHDFN 
/projects/NIMBUS/scanned/nimbus1/N1.INS.$1 .arc comp"); // for sge
        System.exit(10);
           }
        ArcHDFmeta hg = new ArcHDFmeta();
      int i1966 = 1966;
      if(args[0].toLowerCase().contains("nimbus1")) i1966 = 1964;
      if(args.length > 2) {
         for (int m = 2; m < args.length; m++) {
            System.out.println(m + " " + args[m]);
            if(args[m].contains("out=")) {
               int k = args[m].indexOf("=")+1;
               hg.outDir = args[m].substring(k);
               if(!hg.outDir.endsWith(File.separator)) hg.outDir = hg.outDir 
+File.separator;
            }
            if(args[m].contains("<")) {
               int k = args[m].indexOf("<")+1;
               int iyymmdd = Integer.parseInt(args[m].substring(k));
               int iy = 1900+iyymmdd/10000;
                    int im = (iyymmdd/100)%100;
                 int id = iyymmdd%100;
                      hg.thigh = Tutil.tmillis(iy,im,id,0,0,0,1);
            }
            if(args[m].contains(">")) {
               int k = args[m].indexOf(">")+1;
               int iyymmdd = Integer.parseInt(args[m].substring(k));
               int iy = 1900+iyymmdd/10000;
               int im = (iyymmdd/100)%100;
               int id = iyymmdd%100;
               hg.tlow = Tutil.tmillis(iy,im,id,0,0,0,1);
            }
            if(args[m].contains("gz")) hg.doZip = true;
            if(args[m].contains("zip")) hg.doZip = true;
            if(args[m].contains("png")) {
                                        hg.png = true;
               hg.visib = false;
            }
            if(args[m].contains("eq")) {
               hg.equalize = true;
            }
            if(args[m].contains("comp")) hg.doComp = true;
         }
      }
                hg.rc = new ReadOrbCal(i1966);
           hg.proc(args);
        }

   public void gzip(String sfin, String sfout, boolean delInput) {
      try {
         File fin = new File(sfin);
         BufferedInputStream din = new BufferedInputStream(
                              new FileInputStream(fin));
         BufferedOutputStream dout = new BufferedOutputStream(
                                     new GZIPOutputStream(
                                     new FileOutputStream(
                                     new File(sfout))));
         byte [] b = new byte[10240];
         int iw = din.read(b,0,b.length);
         while(iw > 0) {
            dout.write(b,0,iw);
            iw = din.read(b,0,b.length);
         }
         dout.close();
         din.close();
         if(delInput) {
            fin.delete();
         }
      } catch (IOException ee) {
         ee.printStackTrace(System.out);
         System.exit(3);
      }
   }

   int [] calcYMD(String name) {
      StringTokenizer st = new StringTokenizer(name,".");
      String first = st.nextToken();
      if(first.contains("N") || first.contains("n")) {
         first = st.nextToken();
      }
      int iorb = Integer.parseInt(first);
      int icam = Integer.parseInt(st.nextToken());
      int [] it = new int[6];
      for (int m = 0; m < it.length; m++) {
         it[m] = Integer.parseInt(st.nextToken());
      }
      long tm = Tutil.tmillis(it);
      it = Tutil.parseTime(tm);
      return it;
   }


        public void doFile(File fin, String filt) {
        String sf = fin.toString();
      int k = sf.lastIndexOf("/")+1;
      if(k < 1) k = sf.lastIndexOf("\\")+1;
      String nsat = "N1.";
      if(sf.contains("N2")) nsat = "N2.";
           String sout = sf.substring(k);
      if(sout.startsWith("N")) {
      } else {
         sout = nsat+sout;
      }
      int [] itm = calcYMD(sout);
      String sday = Integer.toString(itm[6]);
//projects/NIMBUS/netcdf/
      String outD = outDir+sday+File.separator;
      File foutDir = new File(outD);
      if(!foutDir.exists()) foutDir.mkdirs();

                long time = calcTime(sf);
      if(time < tlow || time > thigh) return;
        String sfout = outD + sout.replace("arc","nc");
                String hout = sfout.replace(".gz","");
      System.out.println(" hout " + hout);
      File hdfout = new File(hout);
      if(hdfout.exists()) {
                        System.out.println(" did " + hout);
                        return;
      }

           SaveAVCS2 sv = new SaveAVCS2(fin);
   // first make unzipped nc file
      try {
                   writeHdf(hout, sv);
         writeXML(hout, sv);
         if(doZip) gzip(hout,hout+".gz",true);
         if(png) {
            String label = sout.substring(k);
            label = label.replace(".arc","").replace(".gz","");
            byte [] c = sv.getByteCalib(); // hope this makes a better picture
            ShowBig sb = new ShowBig(sv.nx, sv.ny, c, label, 3, 3, visib);
                                if(equalize) sb.di.equalize();
            String pout = hout.replace(".nc",".png");
            sb.di.savePNG(pout,label);
            sb.close();
         }
      } catch (Exception ehdf) {
         ehdf.printStackTrace(System.out);
      }
   // then zip it
        }

   public void writeXML(String hout, SaveAVCS2 sv) {
// need some help here
      // find corners
      // get doi
      // write out in xml format
      WriteXML w = new WriteXML();
      float [] latCorner = new float[4];
      float [] lonCorner = new float[4];
      int j = 10;
      int i = 10;
      int ij = i + j*sv.nx;
      latCorner[0] = sv.lat[ij];
      lonCorner[0] = sv.lon[ij];
      ij = ij + sv.nx-20;
      latCorner[1] = sv.lat[ij];
      lonCorner[1] = sv.lon[ij];
      ij = sv.nx-10 + (sv.ny-10)*sv.nx;
      latCorner[2] = sv.lat[ij];
      lonCorner[2] = sv.lon[ij];
      ij = 10 + (sv.ny-10)*sv.nx;
      latCorner[3] = sv.lat[ij];
      lonCorner[3] = sv.lon[ij];
      w.test(hout, latCorner, lonCorner, 0);
//              String doi = NimbusNames.getDOI(0);
//      String esdt = NimbusNames.getShort(0);
//      String long_esdt = NimbusNames.getLong(0);
   }

   long calcTime(String sf) {
      int k = sf.lastIndexOf("/")+1;
      if(k < 1) k = sf.lastIndexOf("\\")+1;
      StringTokenizer st = new StringTokenizer(sf.substring(k),".");
      String n3 = st.nextToken();
      int norb = Integer.parseInt(st.nextToken());
      int [] it = new int[6];
      for (int m = 0; m < 6; m++) {
         it[m] = Integer.parseInt(st.nextToken());
      }
      long time = Tutil.tmillis(it);
      return time;
   }

        String [] lab = {"brightness","qual","lat","lon","bcal"};
        String [] long_name = 
{"brightness","quality","latitude","longitude","byte_calibration"};
        String [] units = {"1","1","degrees_north","degrees_east","1"};

        boolean writeHdf(String fileName, SaveAVCS2 sv) throws 
edu.ucar.ral.nujan.hdf.HdfException {
                int option = HdfFileWriter.OPT_ALLOW_OVERWRITE;
           HdfFileWriter hdfFile = new HdfFileWriter( fileName, option);
                  prtln("hdfFile: " + hdfFile);
           Float fzindef = new Float(Float.MAX_VALUE);
/*
   byte [] b = null;
   byte [] qual = null;
   float [] lat = null;
   float [] lon = null;
   byte [] bcal = null;
*/
           HdfGroup rootGroup = hdfFile.getRootGroup();
           HdfGroup [] trad = new HdfGroup[6];
           int ncomp = 0; // compression (0 = none), must have chunked data
           int numx = sv.nx;
           int numy = sv.ny;
  // assume lat,lon of same size as data
           int[] dims = {numy, numx};      // dimension lengths
     // corresponds to arrays like da[numy][numx];

  // Add a variable with contiguous storage
          int[] specChunkDims = null;     // chunk lengths (contiguous, not 
chunked)
     int n2 = 2;
     if(doComp) {
             if((numy/n2)*n2 != numy) {
             n2 = 1;
          if((numy/3)*3 == numy) n2 = 3;
                if((numy/5)*5 == numy) n2 = 5;
                     System.out.println("numx not even" + numy + " new n2 " + 
n2);
             }
        specChunkDims = new int[2];
                  specChunkDims[0] = numy/n2;
                  specChunkDims[1] = numx;
        ncomp = 1;
     }
/** dtype value: 8 bit signed integer */
//public static final int DTYPE_SFIXED08      =  1;
/** dtype value: 8 bit unsigned integer */
//public static final int DTYPE_UFIXED08      =  2;
/** dtype value: 16 bit signed integer */
//public static final int DTYPE_FIXED16       =  3;
/** dtype value: 32 bit signed integer */
//public static final int DTYPE_FIXED32       =  4;
/** dtype value: 64 bit signed integer */
//public static final int DTYPE_FIXED64       =  5;
/** dtype value: 32 bit signed float */
//public static final int DTYPE_FLOAT32       =  6;
/** dtype value: 64 bit signed float */
//public static final int DTYPE_FLOAT64       =  7;
/*HdfGroup.DTYPE_STRING_VAR*/
           int idtype = 2;
        float zindef = Float.MAX_VALUE;
           byte bmiss = (byte)255;
                idtype = HdfGroup.DTYPE_SFIXED08;
           trad[0] = rootGroup.addVariable(
                                         long_name[0],               // 
variable name
                                    idtype,                       //
                                 0,                            // stgFieldLen
                                 dims,                         // dimension 
lengths
                                 specChunkDims,                // chunk lengths
                                    bmiss,          // fill value or null
                                 ncomp);                           // 
compression: 0 is none, 9 is max
                trad[0].addAttribute("units", HdfGroup.DTYPE_STRING_FIX, 0,
                                                                   units[0], 
false);
// new stuff
                String doi = NimbusNames.getDOI(0);
      String esdt = NimbusNames.getShort(0);
      String long_esdt = NimbusNames.getLong(0);
      trad[0].addAttribute("DOI",  HdfGroup.DTYPE_STRING_FIX, 0, doi, false);
      trad[0].addAttribute("ESDT", HdfGroup.DTYPE_STRING_FIX, 0, esdt, false);
      trad[0].addAttribute("long_ESDT", HdfGroup.DTYPE_STRING_FIX, 0, 
long_esdt, false);
// end new stuff
                idtype = HdfGroup.DTYPE_UFIXED08;
           trad[1] = rootGroup.addVariable(
                                         long_name[1],               // 
variable name
                                    idtype,                       //
                                 0,                            // stgFieldLen
                                 dims,                         // dimension 
lengths
                                 specChunkDims,                // chunk lengths
                                    bmiss,          // fill value or null
                                 ncomp);                           // 
compression: 0 is none, 9 is max
                idtype = HdfGroup.DTYPE_FLOAT32;
        trad[2] = rootGroup.addVariable(
                                 long_name[2],               // variable name
                                    idtype,                       //
                                 0,                            // stgFieldLen
                                    dims,                         // dimension 
lengths
                                 specChunkDims,                // chunk lengths
                                    zindef,          // fill value or null
                                 ncomp);                           // 
compression: 0 is none, 9 is max
                trad[2].addAttribute("units", HdfGroup.DTYPE_STRING_FIX, 0,
                                                                   units[2], 
false);
                idtype = HdfGroup.DTYPE_FLOAT32;
        trad[3] = rootGroup.addVariable(
                                 long_name[3],               // variable name
                                    idtype,                       //
                                 0,                            // stgFieldLen
                                    dims,                         // dimension 
lengths
                                 specChunkDims,                // chunk lengths
                                    zindef,          // fill value or null
                                 ncomp);                           // 
compression: 0 is none, 9 is max

                trad[3].addAttribute("units", HdfGroup.DTYPE_STRING_FIX, 0,
                                                                   units[3], 
false);
           int [] dimsone = {256};
                idtype = HdfGroup.DTYPE_UFIXED08;
        trad[4] = rootGroup.addVariable(
                                         long_name[4],               // 
variable name
                                    idtype,                       //
                                 0,                            // stgFieldLen
                                    dimsone,                         // 
dimension lengths
                                 null,                // chunk lengths
                                    bmiss,          // fill value or null
                                 0);                           // compression: 
0 is none, 9 is max
                trad[4].addAttribute("units", HdfGroup.DTYPE_STRING_FIX, 0,
                                                                   units[4], 
false);
// the following does not work
      idtype = HdfGroup.DTYPE_STRING_FIX;
      int [] dimsstr = {3};
      String [] stra = {doi, esdt, long_esdt};
        trad[5] = rootGroup.addVariable(
                                         "METADATA",               // variable 
name
                                    idtype,                       //
                                 0,                            // stgFieldLen
                                    dimsstr,                         // 
dimension lengths
                                 null,                // chunk lengths
                                    null,          // fill value or null
                                 0);                           // compression: 
0 is none, 9 is max
// error appears on the line trad[5] =
// this is what the error looks like:
/*
edu.ucar.ral.nujan.hdf.HdfException: Invalid stgFieldLen for DTYPE_STRING_FIX: 
must be > 0
        at edu.ucar.ral.nujan.hdf.BaseBlk.throwerr(BaseBlk.java:155)
        at edu.ucar.ral.nujan.hdf.MsgDataType.<init>(MsgDataType.java:233)
        at edu.ucar.ral.nujan.hdf.HdfGroup.<init>(HdfGroup.java:560)
        at edu.ucar.ral.nujan.hdf.HdfGroup.addVariable(HdfGroup.java:704)
        at ArcHDFmeta.writeHdf(ArcHDFmeta.java:371)
        at ArcHDFmeta.doFile(ArcHDFmeta.java:183)
        at FileDialogN.doDir(FileDialogN.java:255)
        at ArcHDFmeta.doDir(ArcHDFmeta.java:34)
        at FileDialogN.doDir(FileDialogN.java:240)
        at ArcHDFmeta.doDir(ArcHDFmeta.java:34)
        at FileDialogN.doDir(FileDialogN.java:240)
        at ArcHDFmeta.doDir(ArcHDFmeta.java:34)
        at FileDialogN.doDir(FileDialogN.java:240)
        at ArcHDFmeta.doDir(ArcHDFmeta.java:34)
        at FileDialogN.proc(FileDialogN.java:84)
        at ArcHDFmeta.proc(ArcHDFmeta.java:34)
        at ArcHDFmeta.main(ArcHDFmeta.java:104)
*/

      trad[5].addAttribute("DOI",  HdfGroup.DTYPE_STRING_FIX, 0, doi, false);
      trad[5].addAttribute("ESDT", HdfGroup.DTYPE_STRING_FIX, 0, esdt, false);
      trad[5].addAttribute("long_ESDT", HdfGroup.DTYPE_STRING_FIX, 0, 
long_esdt, false);
  // Add an attribute to the variable.
           for (int k = 0; k <trad.length; k++) {
                   prtln("trad: " + trad[k]);
           }
  // End the definition stage.
  // All groups, variables, and attributes are created before endDefine.
  // All calls to writeData occur after endDefine.
           hdfFile.endDefine();
        byte [] bcal = rc.bcalib(sv.time);
      byte [][] b2d = byte2d(sv.b, dims); // raw data (not calibrated)
                byte [][] q2d = byte2d(sv.qual, dims);
      float [][] lat2d = float2d(sv.lat, dims);
                float [][] lon2d = float2d(sv.lon, dims);
/*
      for (int m = 0; m < n2; m++) {
                  int[] startIxs = null;
  // Write out the temperatureData array in two chunks.
                  startIxs = new int[] {(m*numx)/n2, 0};
                  double[][] temperatureDataChunk
                 = new double [specChunkDims[0]] [specChunkDims[1]];
                  for (int ix = 0; ix < specChunkDims[0]; ix++) {
                 for (int iy = 0; iy < specChunkDims[1]; iy++) {
                temperatureDataChunk[ix][iy] = 
temperatureData[ix+startIxs[0]][iy];
                 }
                  }

  // Write out the humidityData array in one call.
                  temperature.writeData( startIxs, temperatureDataChunk, false);
      }
*/
      if(doComp) {
         for (int i = 0; i < n2; i++) {
                           int[] startIxs = {(i*numy)/n2,0}; // output location
            byte [][] b2dPart = new byte[specChunkDims[0]][specChunkDims[1]];
                                for (int ix = 0; ix < specChunkDims[0]; ix++) {
                      for (int iy = 0; iy < specChunkDims[1]; iy++) {
                      b2dPart[ix][iy] = b2d[ix+startIxs[0]][iy];
                }
                      }
                                trad[0].writeData(startIxs, b2dPart, false);
            byte [][] q2dPart = new byte[specChunkDims[0]][specChunkDims[1]];
                                for (int ix = 0; ix < specChunkDims[0]; ix++) {
                      for (int iy = 0; iy < specChunkDims[1]; iy++) {
                      q2dPart[ix][iy] = q2d[ix+startIxs[0]][iy];
                }
                      }
                                trad[1].writeData(startIxs, q2dPart, false);
            float [][] lat2dPart = new 
float[specChunkDims[0]][specChunkDims[1]];
                                for (int ix = 0; ix < specChunkDims[0]; ix++) {
                      for (int iy = 0; iy < specChunkDims[1]; iy++) {
                      lat2dPart[ix][iy] = lat2d[ix+startIxs[0]][iy];
                }
                      }
                        trad[2].writeData(startIxs, lat2dPart, false);
            float [][] lon2dPart = new 
float[specChunkDims[0]][specChunkDims[1]];
                                for (int ix = 0; ix < specChunkDims[0]; ix++) {
                      for (int iy = 0; iy < specChunkDims[1]; iy++) {
                      lon2dPart[ix][iy] = lon2d[ix+startIxs[0]][iy];
                }
                      }
                           trad[3].writeData(startIxs, lon2dPart, false);
         }
      } else {
                   int[] startIxs = null;
                        trad[0].writeData(startIxs, b2d, false);
                        trad[1].writeData(startIxs, q2d, false);
                trad[2].writeData(startIxs, lat2d, false);
                   trad[3].writeData(startIxs, lon2d, false);
      }
           int[] startCal = null;
           trad[4].writeData(startCal, bcal, false);
      trad[5].writeData(startCal, stra, false);
      sv.setCal(bcal);
  // Fill the humidityData array.
  // The type and dimensions must match those declared
  // in addVariable above.

  // Write out the array in one call.
                hdfFile.close();
                prtln("All done " +fileName);
                return true;

        } // end testIt

        byte [][] byte2d(byte [] b, int [] nxny) {
           int ny = nxny[0];
        int nx = nxny[1];
        byte [][] b2 = new byte[ny][nx];
           for (int j = 0; j < ny; j++) {
           System.arraycopy(b,j*nx,b2[j],0,nx);
           }
        return b2;
        }

        float [][] float2d(float [] f, int [] nxny) {
        int ny = nxny[0];
           int nx = nxny[1];
        float [][] f2 = new float[ny][nx];
           for (int j = 0; j < ny; j++) {
           System.arraycopy(f,j*nx,f2[j],0,nx);
           }
        return f2;
        }

        short [][] short2d(float [] f, int [] nxny) {
        int ny = nxny[0];
           int nx = nxny[1];
        short [][] i2 = new short[ny][nx];
           for (int j = 0; j < ny; j++) {
         for (int i = 0; i < nx; i++) {
            i2[j][i] = (short)(f[i+j*nx]*100.f);
         }
           }
        return i2;
        }


        static void prtln( String msg) {
                System.out.println( msg);
        }

} // end class
_______________________________________________
Hdf-forum is for HDF software users discussion.
Hdf-forum@hdfgroup.org
http://mail.hdfgroup.org/mailman/listinfo/hdf-forum_hdfgroup.org

Reply via email to