I am having a problem using compound data types which include doubles (and .NET 
4.0/x64).  There appears to be an issue where doubles are aligned on a boundary 
when in a structure, and so structures which naturally do not have aligned 
doubles have padding so that H5.write does not succeed.  A simple example of 
the issue is below.  

Is this a known issue?  Is there any known workaround, other than padding the 
struct to align doubles on an 8 byte boundary (or only using 64bit types)?

Thanks,
Kevin

PS Correct behavior occurs If the  H5T.H5Type.STD_I16LE is replaced with 
H5T.H5Type.STD_I64LE, and a long is used in the structure.  Sorry this is not 
the simplest example.

namespace Test2
{
    using HDF5DotNet;

    class Program
    {
        public struct testStruct
        {
            public short int1; 
            public double double1;
            public double double2;
        }

        static void Main(string[] args)
        {
            testStruct[] test = new testStruct[1];
            test[0].int1 = 9; 
            test[0].double1 = 3.14;
            test[0].double2 = 10.01;
            
            
            if (System.IO.File.Exists("test.h5"))
            {
                System.IO.File.Delete("test.h5");
            }

            var fileId = H5F.create("test.h5", H5F.CreateMode.ACC_TRUNC);


            var dims = new long[1] { 0 };
            var maxDims = new long[1] { -1 };
            
            var typeId = H5T.create(H5T.CreateClass.COMPOUND, 18);
            H5T.insert(typeId, "int1", 0, H5T.H5Type.STD_I16LE); 
            H5T.insert(typeId, "double1", 2, H5T.H5Type.IEEE_F64LE);
            H5T.insert(typeId, "double2", 10, H5T.H5Type.IEEE_F64LE);
            
            var dataId = H5S.create_simple(1, dims, maxDims);
            var propertyListDefault = new 
H5PropertyListId(H5P.Template.DEFAULT);
            var plist = H5P.create(H5P.PropertyListClass.DATASET_CREATE);
            var dim = new long[1] { 8192 };
            H5P.setChunk(plist, dim);
            H5P.setDeflate(plist, 6);

            var dtype = H5T.copy(H5T.H5Type.IEEE_F64LE);
            var dataSetId = H5D.create(fileId, "test", typeId, dataId, 
propertyListDefault, plist, propertyListDefault);
            H5D.close(dataSetId);
            H5P.close(plist);
            H5S.close(dataId);

            var dsetId = H5D.open(fileId, "test");
            var dataspaceId = H5D.getSpace(dsetId);
            var currentSize = H5S.getSimpleExtentNPoints(dataspaceId);
            long[] currentSizeArray = new long[1];
            currentSizeArray[0] = currentSize;
            long[] rowsToAdd = new long[1] { (long)currentSize + 
(long)test.Length };
            rowsToAdd[0] = (long)currentSize + (long)test.Length;
            H5D.setExtent(dsetId, rowsToAdd);
            var filespaceId = H5D.getSpace(dsetId);
            H5S.selectHyperslab(filespaceId, H5S.SelectOperator.SET, 
currentSizeArray, rowsToAdd);
            var memoryspaceId = H5S.create_simple(1, rowsToAdd);
            var xferPropListId = new H5PropertyListId(H5P.Template.DEFAULT);
            H5D.write(dsetId, typeId, memoryspaceId, filespaceId, 
xferPropListId, new H5Array<testStruct>(test));
        }
    }
}



_______________________________________________
Hdf-forum is for HDF software users discussion.
[email protected]
http://mail.hdfgroup.org/mailman/listinfo/hdf-forum_hdfgroup.org

Reply via email to