Update: as far as I can tell the Segmentation fault also arises if I am using
the lesmis data collection which hopefully makes this more reproducible for
others:

import graph_tool.all as gt
import numpy as np
import cPickle as pickle
import timeit

#g = gt.load_graph('graph_no_multi_1930.gt')
g = gt.collection.data["lesmis"]

#with open('model_selection_results_1930.dat','a') as output:
with open('model_selection_results_les_mis.dat','a') as output:
    deg_corr = False
    overlap = True
    nL = 10
    
    # Initialize the Markov chain from the "ground state"
    state = gt.minimize_nested_blockmodel_dl(g,
deg_corr=deg_corr,overlap=overlap)
    dl = state.entropy()
    temp = "Description length: "
    temp+= str(dl)
    temp+='\n'
    output.write(temp)
    output.flush()
    print 'minimised state'
    bs = state.get_bs()                     # Get hierarchical partition.
    bs += [np.zeros(1)] * (nL - len(bs))    # Augment it to L = 10 with
                                            # single-group levels.

    state = state.copy(bs=bs, sampling=True)

    dls = []                               # description length history
    vm = [None] * len(state.get_levels())  # vertex marginals
    em = None                              # edge marginals

    def collect_marginals(s):
        global vm, em
        levels = s.get_levels()
        vm = [sl.collect_vertex_marginals(vm[l]) for l, sl in
enumerate(levels)]
        em = levels[0].collect_edge_marginals(em)
        dls.append(s.entropy())

    # Now we collect the marginal distributions for exactly 200,000 sweeps
    print 'equilibrating'
    start=timeit.default_timer()
    gt.mcmc_equilibrate(state, force_niter=20000, mcmc_args=dict(niter=10),
                        callback=collect_marginals)
    duration=timeit.default_timer()-start
    print 'duration for equilibrating: ', duration

    S_mf = [gt.mf_entropy(sl.g, vm[l]) for l, sl in
enumerate(state.get_levels())]
    S_bethe = gt.bethe_entropy(g, em)[0]
    L = -np.mean(dls)
    
    val1 = L+sum(S_mf)
    val2 = L + S_bethe + sum(S_mf[1:])
    temp = "Model evidence for nested blockmodel, deg_corr = " 
    temp+=str(deg_corr)+', overlap = '+str(overlap)
    temp+=':'+str(val1)+"(mean field),"
    temp+=str(val2)+"(Bethe)"+'\n'
    output.write(temp)
    output.flush()
    with open('raw_results.dat','w') as f:
        f.write('DL: ')
        f.write(str(L))
        f.write('\n')
        f.write('S_mf: ')
        f.write(str(sum(S_mf)))
        f.write('\n')
        f.write('S_bethe: ')
        f.write(str(S_bethe))
        f.write('\n')
        f.write('S_bethe + sum(S_mf[1:]): ')
        f.write(str(S_bethe + sum(S_mf[1:])))



--
Sent from: 
http://main-discussion-list-for-the-graph-tool-project.982480.n3.nabble.com/
_______________________________________________
graph-tool mailing list
[email protected]
https://lists.skewed.de/mailman/listinfo/graph-tool

Reply via email to