Re: [zfs-discuss] zpool core dumped
S10U4 SPARC on V890 + patches, StorageTek 6140 + CSM200 Generic_127111-09 The same issue, still don't patched ? If I set NOINUSE_CHECK=1 pool is created succesfully Regards -- Piotr (DrFugazi) Tarnowski This message posted from opensolaris.org ___ zfs-discuss mailing list zfs-discuss@opensolaris.org http://mail.opensolaris.org/mailman/listinfo/zfs-discuss
[zfs-discuss] zpool core dumped
Hello zfs-discuss, S10U2 SPARC + patches Generic_118833-20 LUNs from 3510 array. bash-3.00# zpool import no pools available to import bash-3.00# zpool create f3-1 mirror c5t600C0FF0098FD535C3D2B900d0 c5t600C0FF0098FD54CB01E1100d0 mirror c5t600C0FF0098FD5255DA45900d0 c5t600C0FF0098FD56E93570600d0 mirror c5t600C0FF0098FD502D3066400d0 c5t600C0FF0098FD566A4CD4D00d0 mirror c5t600C0FF0098FD52CA2866D00d0 c5t600C0FF0098FD5644CDA1B00d0 free(fe746738): invalid or corrupted buffer stack trace: libumem.so.1'?? (0xff24b460) libCrun.so.1'__1c2k6Fpv_v_+0x4 libCstd_isa.so.1'__1cDstdMbasic_string4Ccn0ALchar_traits4Cc__n0AJallocator4Cc___2G6Mrk1_r1_+0xb8 libCstd.so.1'__1cH__rwstdNlocale_vector4nDstdMbasic_string4Ccn0BLchar_traits4Cc__n0BJallocator4Cc_Gresize6MIn0E__p3_+0xc4 libCstd.so.1'__1cH__rwstdKlocale_imp2t5B6MII_v_+0xc4 libCstd.so.1'__1cDstdGlocaleEinit6F_v_+0x44 libCstd.so.1'__1cDstdNbasic_istream4Cwn0ALchar_traits4Cw___2t6Mn0AIios_baseJEmptyCtor__v_+0x84 libCstd.so.1'?? (0xfe57b164) libCstd.so.1'?? (0xfe57b8ac) libCstd.so.1'_init+0x1e0 ld.so.1'?? (0xff3bfea8) ld.so.1'?? (0xff3cca04) ld.so.1'_elf_rtbndr+0x10 libCrun.so.1'?? (0xfe46a950) libCrun.so.1'__1cH__CimplKcplus_init6F_v_+0x48 libCstd_isa.so.1'_init+0xc8 ld.so.1'?? (0xff3bfea8) ld.so.1'?? (0xff3c5318) ld.so.1'?? (0xff3c5474) ld.so.1'dlopen+0x64 libmeta.so.1'sdssc_bind_library+0x88 libdiskmgt.so.1'?? (0xff2b080c) libdiskmgt.so.1'?? (0xff2aa594) libdiskmgt.so.1'?? (0xff2aa30c) libdiskmgt.so.1'dm_get_stats+0x12c libdiskmgt.so.1'dm_get_slice_stats+0x44 libdiskmgt.so.1'dm_inuse+0x74 zpool'check_slice+0x20 zpool'check_disk+0x13c zpool'check_device+0x48 zpool'check_in_use+0x80 zpool'check_in_use+0xd8 zpool'check_in_use+0xd8 zpool'make_root_vdev+0x30 zpool'?? (0x12f50) zpool'main+0xe8 zpool'_start+0x108 Abort (core dumped) bash-3.00# bash-3.00# mdb /var/core/core.XXX.zpool.1515.1156281273 Loading modules: [ libumem.so.1 libnvpair.so.1 libuutil.so.1 libc.so.1 libsysevent.so.1 libavl.so.1 ld.so.1 ] ::status debugging core file of zpool (32-bit) from nfs-10-1.srv file: /sbin/zpool initial argv: zpool create f3-1 mirror c5t600C0FF0098FD535C3D2B900d0 c5t600C0FF00 threading model: multi-threaded status: process terminated by SIGABRT (Abort) ::stack libc.so.1`_lwp_kill+8(6, 0, 20f50, ff24b998, ff26a000, ff26abc4) libumem.so.1`umem_do_abort+0x1c(26, ffbf9a08, 6, 20e8c, ff256a34, 0) libumem.so.1`umem_err_recoverable+0x7c(ff257a78, a, 20d84, ff3b3904, ff26d0d0, ff257a83) libumem.so.1`process_free+0x114(fe746738, 1, 0, 3e3a1000, 1ecac, fe46a950) libCrun.so.1`__1c2k6Fpv_v_+4(fe746738, fffb, 4, , fde3be7c, fc00) libCstd_isa.so.1`__1cDstdMbasic_string4Ccn0ALchar_traits4Cc__n0AJallocator4Cc___2G6Mrk1_r1_+0xb8(167c1c, fe746760, 18, 1cbc0, 4, ffbf9db8) libCstd.so.1`__1cH__rwstdNlocale_vector4nDstdMbasic_string4Ccn0BLchar_traits4Cc__n0BJallocator4Cc_Gresize6MIn0E__p3_+0xc4(177a10 , 6, ffbf9db8, 0, 167c18, 167c18) libCstd.so.1`__1cH__rwstdKlocale_imp2t5B6MII_v_+0xc4(1779f0, a, 1, 177a18, 177a10, fe5f97a8) libCstd.so.1`__1cDstdGlocaleEinit6F_v_+0x44(0, fe6008ec, fe5f97a8, 10c, 6ec58, 0) libCstd.so.1`__1cDstdNbasic_istream4Cwn0ALchar_traits4Cw___2t6Mn0AIios_baseJEmptyCtor__v_+0x84(fe5ff058, fe5ff090, fe5fbb58, fe5f97a8 , fe5fbb4c, fe5ff068) libCstd.so.1`__SLIP.INIT_A+0x4c(0, fe5ff0c0, fe5f97a8, 418, 7e68c, 400) libCstd.so.1`__1cU__STATIC_CONSTRUCTOR6F_v_+4(fe576cf4, fe5ff028, fe5f97a8, 11c, 82b24, 0) libCstd.so.1`_init+0x1e0(0, 1, ff3a2000, ff3ee980, 32380, ff3ee268) ld.so.1`call_init+0x1a0(1, fe59af64, fe620018, ffdf, ff3ee980, ) ld.so.1`elf_bndr+0x420(fe5888e0, 861, fe456ea8, ff3ee0f8, ff3f06d0, 0) ld.so.1`elf_rtbndr+0x10(fe456ea8, fe46af48, ff1ecbc0, ff3a2000, fe46f188, 0) 0xfe46a950(fe46af48, 1000, fe46f178, 153bc, ff1edb88, fe46af48) libCrun.so.1`__1cH__CimplKcplus_init6F_v_+0x48(1, fe46b018, a7b54, 0, 139c0, ff3a03c0) libCstd_isa.so.1`_init+0xc8(0, 1, ff3a2000, ff3ee980, 32380, ff3ee268) ld.so.1`call_init+0x1a0(1, fde2920c, fdfc16c8, ffdf, ff3ee980, ) ld.so.1`dlmopen_intn+0x70(fe880370, fe9ac074, ff3ee0f8, fee51cf8, 8000, 0) ld.so.1`dlmopen_check+0x14c(ff3ee0f8, fe9ac074, c01, fee51cf8, ffbfa484, 0) ld.so.1`dlopen+0x64(fe9ac074, 1, 861, 294f4, 0, 0) libmeta.so.1`sdssc_bind_library+0x88(1, ff2c5a04, 1, ff2c59b8, ff2c59bc, 4000) libdiskmgt.so.1`inuse_svm+0x6c(189e88, cdc00, ffbfa9cc, ff2c5150, 13840, ff2b01b4) libdiskmgt.so.1`add_inuse+0x40(189e88, cdc00, ff2c5014, cdc60, 19aa4, 0) libdiskmgt.so.1`slice_get_stats+0x68(167bd8, 0, ffbfb424, 19d3c, ff146964, ff1eb3d4) libdiskmgt.so.1`dm_get_stats+0x12c(0, 167bd8, 0, ffbfb424, 1ff58, 120) libdiskmgt.so.1`dm_get_slice_stats+0x44(0, 167bd8, ffbfb424, ff2c46d0, 12224, ff1eb3d4) libdiskmgt.so.1`dm_inuse+0x74(189d88, ffbfb420, 1, ffbfb424, 2f, 189e08) check_slice+0x20(189e08, 0, 1, ffbfb48c, e8, dffe8) check_disk+0x13c(0, 0, 167bd8, 0, d7f50, 1) check_device+0x48(ebfb0, 0, d226c,
Re: [zfs-discuss] zpool core dumped
Robert Milkowski wrote: Hello zfs-discuss, S10U2 SPARC + patches Generic_118833-20 LUNs from 3510 array. bash-3.00# zpool import no pools available to import bash-3.00# zpool create f3-1 mirror c5t600C0FF0098FD535C3D2B900d0 c5t600C0FF0098FD54CB01E1100d0 mirror c5t600C0FF0098FD5255DA45900d0 c5t600C0FF0098FD56E93570600d0 mirror c5t600C0FF0098FD502D3066400d0 c5t600C0FF0098FD566A4CD4D00d0 mirror c5t600C0FF0098FD52CA2866D00d0 c5t600C0FF0098FD5644CDA1B00d0 free(fe746738): invalid or corrupted buffer stack trace: libumem.so.1'?? (0xff24b460) libCrun.so.1'__1c2k6Fpv_v_+0x4 libCstd_isa.so.1'__1cDstdMbasic_string4Ccn0ALchar_traits4Cc__n0AJallocator4Cc___2G6Mrk1_r1_+0xb8 libCstd.so.1'__1cH__rwstdNlocale_vector4nDstdMbasic_string4Ccn0BLchar_traits4Cc__n0BJallocator4Cc_Gresize6MIn0E__p3_+0xc4 libCstd.so.1'__1cH__rwstdKlocale_imp2t5B6MII_v_+0xc4 libCstd.so.1'__1cDstdGlocaleEinit6F_v_+0x44 libCstd.so.1'__1cDstdNbasic_istream4Cwn0ALchar_traits4Cw___2t6Mn0AIios_baseJEmptyCtor__v_+0x84 Hi Robert, which locale are you using? Has this problem happened before in the particular locale? If you're not using locale=C, can you reproduce this with locale=C ? cheers, James ___ zfs-discuss mailing list zfs-discuss@opensolaris.org http://mail.opensolaris.org/mailman/listinfo/zfs-discuss
Re: [zfs-discuss] zpool core dumped
Hello Robert, After server restart I got: bash-3.00# zpool create test c5t600C0FF0098FD535C3D2B900d0 warning: device in use checking failed: No such device bash-3.00# zpool list NAMESIZEUSED AVAILCAP HEALTH ALTROOT test204G 84.5K204G 0% ONLINE - bash-3.00# bash-3.00# zpool status pool: test state: ONLINE scrub: none requested config: NAME STATE READ WRITE CKSUM test ONLINE 0 0 0 c5t600C0FF0098FD535C3D2B900d0 ONLINE 0 0 0 errors: No known data errors bash-3.00# bash-3.00# zpool destroy test bash-3.00# zpool create test c5t600C0FF0098FD535C3D2B900d0 warning: device in use checking failed: No such device bash-3.00# bash-3.00# zpool list NAMESIZEUSED AVAILCAP HEALTH ALTROOT test204G 84.5K204G 0% ONLINE - bash-3.00# bash-3.00# zpool destroy test bash-3.00# zpool create f3-1 mirror c5t600C0FF0098FD535C3D2B900d0 c5t600C0FF0098FD54CB01E1100d0 mirror c5t600C0FF0098FD5255DA45900d0 c5t600C0FF0098FD56E93570600d0 mirror c5t600C0FF0098FD502D3066400d0 c5t600C0FF0098FD566A4CD4D00d0 mirror c5t600C0FF0098FD52CA2866D00d0 c5t600C0FF0098FD5644CDA1B00d0 warning: device in use checking failed: No such device warning: device in use checking failed: No such device warning: device in use checking failed: No such device warning: device in use checking failed: No such device warning: device in use checking failed: No such device warning: device in use checking failed: No such device warning: device in use checking failed: No such device warning: device in use checking failed: No such device bash-3.00# zpool list NAMESIZEUSED AVAILCAP HEALTH ALTROOT f3-1816G 61.5K816G 0% ONLINE - bash-3.00# -- Best regards, Robertmailto:[EMAIL PROTECTED] http://milek.blogspot.com ___ zfs-discuss mailing list zfs-discuss@opensolaris.org http://mail.opensolaris.org/mailman/listinfo/zfs-discuss
Re[2]: [zfs-discuss] zpool core dumped
Hello James, Tuesday, August 22, 2006, 11:52:37 PM, you wrote: JCM Robert Milkowski wrote: Hello zfs-discuss, S10U2 SPARC + patches Generic_118833-20 LUNs from 3510 array. bash-3.00# zpool import no pools available to import bash-3.00# zpool create f3-1 mirror c5t600C0FF0098FD535C3D2B900d0 c5t600C0FF0098FD54CB01E1100d0 mirror c5t600C0FF0098FD5255DA45900d0 c5t600C0FF0098FD56E93570600d0 mirror c5t600C0FF0098FD502D3066400d0 c5t600C0FF0098FD566A4CD4D00d0 mirror c5t600C0FF0098FD52CA2866D00d0 c5t600C0FF0098FD5644CDA1B00d0 free(fe746738): invalid or corrupted buffer stack trace: libumem.so.1'?? (0xff24b460) libCrun.so.1'__1c2k6Fpv_v_+0x4 libCstd_isa.so.1'__1cDstdMbasic_string4Ccn0ALchar_traits4Cc__n0AJallocator4Cc___2G6Mrk1_r1_+0xb8 libCstd.so.1'__1cH__rwstdNlocale_vector4nDstdMbasic_string4Ccn0BLchar_traits4Cc__n0BJallocator4Cc_Gresize6MIn0E__p3_+0xc4 libCstd.so.1'__1cH__rwstdKlocale_imp2t5B6MII_v_+0xc4 libCstd.so.1'__1cDstdGlocaleEinit6F_v_+0x44 libCstd.so.1'__1cDstdNbasic_istream4Cwn0ALchar_traits4Cw___2t6Mn0AIios_baseJEmptyCtor__v_+0x84 JCM Hi Robert, JCM which locale are you using? Has this problem happened before JCM in the particular locale? If you're not using locale=C, can JCM you reproduce this with locale=C ? It's locale=C I think. Right now after restart it started to work so... and I have LC_CTYPE=C -- Best regards, Robertmailto:[EMAIL PROTECTED] http://milek.blogspot.com ___ zfs-discuss mailing list zfs-discuss@opensolaris.org http://mail.opensolaris.org/mailman/listinfo/zfs-discuss
Re[2]: [zfs-discuss] zpool core dumped
Hello Eric, Tuesday, August 22, 2006, 11:51:55 PM, you wrote: ES This looks like a bug in the in-use checking for SVM (?). What build ES are you running? S10 update2 + patches, kernel Generic_118833-20 sparc ES In the meantime, you can work around this by setting 'NOINUSE_CHECK' in ES your environment to disable in-use checking. Just be careful that ES you're not specifying disks which are actually in use, of course ;-) After reboot it started almost to work. However: bash-3.00# zpool destroy f3-1 bash-3.00# export NOINUSE_CHECK=1 ;zpool create f3-1 c5t600C0FF0098FD54CB01E1100d0 c5t600C0FF0098FD56E93570600d0 c5t600C0FF0098FD566A4CD4D00d0 warning: device in use checking failed: No such device warning: device in use checking failed: No such device warning: device in use checking failed: No such device bash-3.00# -- Best regards, Robertmailto:[EMAIL PROTECTED] http://milek.blogspot.com ___ zfs-discuss mailing list zfs-discuss@opensolaris.org http://mail.opensolaris.org/mailman/listinfo/zfs-discuss
Re: [zfs-discuss] zpool core dumped
Hi Robert, Looks like you are using libumem? And it looks like there is a possible memory issue in the libmeta code when we are trying to dlopen it from libdiskmgt. I think we would have seen this more if it was happening every time with u2 bits. Doesn't mean its not a bug, but looks like it is a libmeta memory issue. sarah Robert Milkowski wrote: Hello zfs-discuss, S10U2 SPARC + patches Generic_118833-20 LUNs from 3510 array. bash-3.00# zpool import no pools available to import bash-3.00# zpool create f3-1 mirror c5t600C0FF0098FD535C3D2B900d0 c5t600C0FF0098FD54CB01E1100d0 mirror c5t600C0FF0098FD5255DA45900d0 c5t600C0FF0098FD56E93570600d0 mirror c5t600C0FF0098FD502D3066400d0 c5t600C0FF0098FD566A4CD4D00d0 mirror c5t600C0FF0098FD52CA2866D00d0 c5t600C0FF0098FD5644CDA1B00d0 free(fe746738): invalid or corrupted buffer stack trace: libumem.so.1'?? (0xff24b460) libCrun.so.1'__1c2k6Fpv_v_+0x4 libCstd_isa.so.1'__1cDstdMbasic_string4Ccn0ALchar_traits4Cc__n0AJallocator4Cc___2G6Mrk1_r1_+0xb8 libCstd.so.1'__1cH__rwstdNlocale_vector4nDstdMbasic_string4Ccn0BLchar_traits4Cc__n0BJallocator4Cc_Gresize6MIn0E__p3_+0xc4 libCstd.so.1'__1cH__rwstdKlocale_imp2t5B6MII_v_+0xc4 libCstd.so.1'__1cDstdGlocaleEinit6F_v_+0x44 libCstd.so.1'__1cDstdNbasic_istream4Cwn0ALchar_traits4Cw___2t6Mn0AIios_baseJEmptyCtor__v_+0x84 libCstd.so.1'?? (0xfe57b164) libCstd.so.1'?? (0xfe57b8ac) libCstd.so.1'_init+0x1e0 ld.so.1'?? (0xff3bfea8) ld.so.1'?? (0xff3cca04) ld.so.1'_elf_rtbndr+0x10 libCrun.so.1'?? (0xfe46a950) libCrun.so.1'__1cH__CimplKcplus_init6F_v_+0x48 libCstd_isa.so.1'_init+0xc8 ld.so.1'?? (0xff3bfea8) ld.so.1'?? (0xff3c5318) ld.so.1'?? (0xff3c5474) ld.so.1'dlopen+0x64 libmeta.so.1'sdssc_bind_library+0x88 libdiskmgt.so.1'?? (0xff2b080c) libdiskmgt.so.1'?? (0xff2aa594) libdiskmgt.so.1'?? (0xff2aa30c) libdiskmgt.so.1'dm_get_stats+0x12c libdiskmgt.so.1'dm_get_slice_stats+0x44 libdiskmgt.so.1'dm_inuse+0x74 zpool'check_slice+0x20 zpool'check_disk+0x13c zpool'check_device+0x48 zpool'check_in_use+0x80 zpool'check_in_use+0xd8 zpool'check_in_use+0xd8 zpool'make_root_vdev+0x30 zpool'?? (0x12f50) zpool'main+0xe8 zpool'_start+0x108 Abort (core dumped) bash-3.00# bash-3.00# mdb /var/core/core.XXX.zpool.1515.1156281273 Loading modules: [ libumem.so.1 libnvpair.so.1 libuutil.so.1 libc.so.1 libsysevent.so.1 libavl.so.1 ld.so.1 ] ::status debugging core file of zpool (32-bit) from nfs-10-1.srv file: /sbin/zpool initial argv: zpool create f3-1 mirror c5t600C0FF0098FD535C3D2B900d0 c5t600C0FF00 threading model: multi-threaded status: process terminated by SIGABRT (Abort) ::stack libc.so.1`_lwp_kill+8(6, 0, 20f50, ff24b998, ff26a000, ff26abc4) libumem.so.1`umem_do_abort+0x1c(26, ffbf9a08, 6, 20e8c, ff256a34, 0) libumem.so.1`umem_err_recoverable+0x7c(ff257a78, a, 20d84, ff3b3904, ff26d0d0, ff257a83) libumem.so.1`process_free+0x114(fe746738, 1, 0, 3e3a1000, 1ecac, fe46a950) libCrun.so.1`__1c2k6Fpv_v_+4(fe746738, fffb, 4, , fde3be7c, fc00) libCstd_isa.so.1`__1cDstdMbasic_string4Ccn0ALchar_traits4Cc__n0AJallocator4Cc___2G6Mrk1_r1_+0xb8(167c1c, fe746760, 18, 1cbc0, 4, ffbf9db8) libCstd.so.1`__1cH__rwstdNlocale_vector4nDstdMbasic_string4Ccn0BLchar_traits4Cc__n0BJallocator4Cc_Gresize6MIn0E__p3_+0xc4(177a10 , 6, ffbf9db8, 0, 167c18, 167c18) libCstd.so.1`__1cH__rwstdKlocale_imp2t5B6MII_v_+0xc4(1779f0, a, 1, 177a18, 177a10, fe5f97a8) libCstd.so.1`__1cDstdGlocaleEinit6F_v_+0x44(0, fe6008ec, fe5f97a8, 10c, 6ec58, 0) libCstd.so.1`__1cDstdNbasic_istream4Cwn0ALchar_traits4Cw___2t6Mn0AIios_baseJEmptyCtor__v_+0x84(fe5ff058, fe5ff090, fe5fbb58, fe5f97a8 , fe5fbb4c, fe5ff068) libCstd.so.1`__SLIP.INIT_A+0x4c(0, fe5ff0c0, fe5f97a8, 418, 7e68c, 400) libCstd.so.1`__1cU__STATIC_CONSTRUCTOR6F_v_+4(fe576cf4, fe5ff028, fe5f97a8, 11c, 82b24, 0) libCstd.so.1`_init+0x1e0(0, 1, ff3a2000, ff3ee980, 32380, ff3ee268) ld.so.1`call_init+0x1a0(1, fe59af64, fe620018, ffdf, ff3ee980, ) ld.so.1`elf_bndr+0x420(fe5888e0, 861, fe456ea8, ff3ee0f8, ff3f06d0, 0) ld.so.1`elf_rtbndr+0x10(fe456ea8, fe46af48, ff1ecbc0, ff3a2000, fe46f188, 0) 0xfe46a950(fe46af48, 1000, fe46f178, 153bc, ff1edb88, fe46af48) libCrun.so.1`__1cH__CimplKcplus_init6F_v_+0x48(1, fe46b018, a7b54, 0, 139c0, ff3a03c0) libCstd_isa.so.1`_init+0xc8(0, 1, ff3a2000, ff3ee980, 32380, ff3ee268) ld.so.1`call_init+0x1a0(1, fde2920c, fdfc16c8, ffdf, ff3ee980, ) ld.so.1`dlmopen_intn+0x70(fe880370, fe9ac074, ff3ee0f8, fee51cf8, 8000, 0) ld.so.1`dlmopen_check+0x14c(ff3ee0f8, fe9ac074, c01, fee51cf8, ffbfa484, 0) ld.so.1`dlopen+0x64(fe9ac074, 1, 861, 294f4, 0, 0) libmeta.so.1`sdssc_bind_library+0x88(1, ff2c5a04, 1, ff2c59b8, ff2c59bc, 4000) libdiskmgt.so.1`inuse_svm+0x6c(189e88, cdc00, ffbfa9cc, ff2c5150, 13840, ff2b01b4) libdiskmgt.so.1`add_inuse+0x40(189e88, cdc00, ff2c5014, cdc60, 19aa4, 0) libdiskmgt.so.1`slice_get_stats+0x68(167bd8, 0, ffbfb424, 19d3c,