Joseph Rushton Wakeling:
http://braingam.es/2013/09/betweenness-centrality-in-dgraph/

Some small improvements:


T[] betweenness(Graph, T = double)(ref Graph g, bool[] ignore = null)
    if (isGraph!Graph && isFloatingPoint!T)
{
    enum T T0 = 0;
    enum T T1 = 1;
auto centrality = minimallyInitializedArray!(typeof(return))(g.vertexCount);
    centrality[] = T0;
    auto stack = new size_t[g.vertexCount];
    auto sigma = minimallyInitializedArray!T(g.vertexCount);
    sigma[] = T0;
    auto delta = minimallyInitializedArray!T(g.vertexCount);
    delta[] = T0;
    auto d = minimallyInitializedArray!long(g.vertexCount);
    d[] = -1;
    auto q = VertexQueue(g.vertexCount);
    auto p = new Appender!(size_t[])[g.vertexCount];

    foreach (immutable s; 0 .. g.vertexCount)
    {
        if (ignore && ignore[s])
        {
            continue;
        }

        size_t stackLength = 0;
        assert(q.empty);
        sigma[s] = T1;
        d[s] = 0;
        q.push(s);

        while (!q.empty)
        {
            immutable size_t v = q.front;
            q.pop;
            stack[stackLength] = v;
            ++stackLength;
            foreach (immutable w; g.neighboursOut(v))
            {
                if (ignore && ignore[w])
                {
                    continue;
                }

                if (d[w] < 0)
                {
                    q.push(w);
                    d[w] = d[v] + 1;
                    assert(sigma[w] == T0);
                    sigma[w] = sigma[v];
                    p[w].clear;
                    p[w].put(v);
                }
                else if (d[w] > (d[v] + 1))
                {
                    /* w has already been encountered, but we've
                       found a shorter path to the source.  This
                       is probably only relevant to the weighted
                       case, but let's keep it here in order to
                       be ready for that update. */
                    d[w] = d[v] + 1L;
                    sigma[w] = sigma[v];
                    p[w].clear;
                    p[w].put(v);
                }
                else if (d[w] == (d[v] + 1))
                {
                    sigma[w] += sigma[v];
                    p[w].put(v);
                }
            }
        }

        while (stackLength > 0)
        {
            --stackLength;
            immutable w = stack[stackLength];
            foreach (immutable v; p[w].data)
            {
delta[v] += ((sigma[v] / sigma[w]) * (T1 + delta[w]));
            }
            if (w != s)
            {
                centrality[w] += delta[w];
            }
            sigma[w] = T0;
            delta[w] = T0;
            d[w] = -1;
        }
    }

    static if (!g.directed)
    {
        centrality[] /= 2;
    }

    return centrality;
}


Just for a test, try to allocate all those arrays in a different way:
- First try a std.array.Array using the LDC2 compiler;
- Another thing to try is to allocate them on the stack using core.stdc.stdlib.alloca:
auto p = cast(T*)alloca(T.sizeof * g.vertexCount);
if (!p) throw new MemoryError...
auto centrality = p[0 .. g.vertexCount];
This probably can't be used for very large graphs, so you have to add even more logic to allocate the arrays on the C heap if they are too much large. This could be useful if you call betweenness() many many times.
- Try to optionally accept the buffers from outside.

Bye,
bearophile

Reply via email to