Hi Quincey,
>
> Hmm, nothing that I can recall with the CORE driver... Can you
> duplicate something in C?
>
I can't duplicate the problem on my machine, but the user was able to
compile and run the attached C program. FYI he has only 8GB of
physical ram, so the crash may be related to memory exhaustion.
His platform is:
OS 10.6, HDF5 1.8.7
Thanks,
Andrew
Program output:
HDF5-DIAG: Error detected in HDF5 (1.8.7) thread 140735089405120:
#000: H5F.c line 1991 in H5Fclose(): decrementing file ID failed
major: Object atom
minor: Unable to close file
#001: H5I.c line 1450 in H5I_dec_app_ref(): can't decrement ID ref count
major: Object atom
minor: Unable to decrement reference count
#002: H5F.c line 1775 in H5F_close(): can't close file
major: File accessability
minor: Unable to close file
#003: H5F.c line 1930 in H5F_try_close(): problems closing file
major: File accessability
minor: Unable to close file
#004: H5F.c line 1088 in H5F_dest(): unable to close file
major: File accessability
minor: Unable to close file
#005: H5FD.c line 1197 in H5FD_close(): close failed
major: Virtual File Layer
minor: Unable to close file
#006: H5FDcore.c line 545 in H5FD_core_close(): unable to flush file
major: File accessability
minor: Unable to flush data from cache
#007: H5FDcore.c line 1047 in H5FD_core_flush(): error writing backing store
major: Low-level I/O
minor: Write failed
#008: H5F.c line 1006 in H5F_dest(): unable to flush cache
major: Object cache
minor: Unable to flush data from cache
#009: H5F.c line 1710 in H5F_flush(): low level flush failed
major: Low-level I/O
minor: Write failed
#010: H5FD.c line 1894 in H5FD_flush(): driver flush request failed
major: Virtual File Layer
minor: Unable to initialize object
#011: H5FDcore.c line 1047 in H5FD_core_flush(): error writing backing store
major: Low-level I/O
minor: Write failed
Closing other objects...
Segmentation fault
GDB backtrace:
Program received signal EXC_BAD_ACCESS, Could not access memory.
Reason: KERN_INVALID_ADDRESS at address: 0x000000000000052c
0x00000001004de76e in H5F_close (f=0x1014d8e20) at H5F.c:1754
1754 if(f->shared->fc_degree == H5F_CLOSE_SEMI) {
(gdb) where
#0 0x00000001004de76e in H5F_close (f=0x1014d8e20) at H5F.c:1754
#1 0x0000000100538e6b in H5I_clear_type (type=<value temporarily
unavailable, due to optimizations>, force=0, app_ref=0) at H5I.c:599
(gdb) p f
$1 = (H5F_t *) 0x1014d8e20
(gdb) p f->shared
$2 = (H5F_file_t *) 0x0
(gdb)
/*
import h5py
import numpy as np
bs = 1024 * 1024
f = h5py.File('myfile.hdf5', driver='core', backing_store=True)
for ct in range(2000):
print ct
dset = f.create_dataset("MyDataset%d" % ct, (bs ,), 'd')
dset[...] = np.random.uniform(0, 1, (bs,))
*/
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include "hdf5.h"
#define NDATASETS 2000
#define BLOCKSIZE (1024*1024)
int main(){
hid_t fid, dsid, spaceid, fapl;
hsize_t* dims = NULL;
char* dsname = NULL;
double* data = NULL;
int i;
dsname = (char*)malloc(100);
data = (double*)malloc(sizeof(double)*BLOCKSIZE);
dims = (hsize_t*)malloc(sizeof(hsize_t));
dims[0] = BLOCKSIZE;
memset(data, 0, BLOCKSIZE*sizeof(double));
fapl = H5Pcreate(H5P_FILE_ACCESS);
H5Pset_fapl_core(fapl, 64*1024, 1);
fid = H5Fcreate("myfile.hdf5", H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
spaceid = H5Screate_simple(1, dims, dims);
for(i=0; i<NDATASETS; i++){
fprintf(stderr, "Writing dataset %d...\n", i);
sprintf(dsname, "MyDataset%d", i);
dsid = H5Dcreate1(fid, dsname, H5T_IEEE_F64LE, spaceid, H5P_DEFAULT);
H5Dwrite(dsid, H5T_NATIVE_DOUBLE, spaceid, spaceid, H5P_DEFAULT, data);
H5Dclose(dsid);
}
fprintf(stderr, "Closing file...\n");
H5Fclose(fid);
fprintf(stderr, "Closing other objects...\n");
H5Pclose(fapl);
H5Sclose(spaceid);
free(data);
free(dsname);
free(dims);
return 0;
}
_______________________________________________
Hdf-forum is for HDF software users discussion.
[email protected]
http://mail.hdfgroup.org/mailman/listinfo/hdf-forum_hdfgroup.org