On Saturday, 9 January 2016 at 23:20:00 UTC, Jay Norwood wrote:
I'm playing around with win32, v2.069.2 dmd and "dip80-ndslice": "~>0.8.8". If I convert the 2D slice with .array(), should that first dimension then be compatible with parallel foreach?

I find that without using parallel, all the means get computed, but with parallel, only about half of them are computed in this example. The others remain NaN, examined in the debugger in Visual D.

import std.range : iota;
import std.array : array;
import std.algorithm;
import std.datetime;
import std.conv : to;
import std.stdio;
import std.experimental.ndslice;

enum testCount = 1;
double[1000] means;
double[] data;

void f1() {
 import std.parallelism;
 auto sl = data.sliced(1000,100_000);
 auto sla = sl.array();
 foreach(i,vec; parallel(sla)){
  double v=vec.sum(0.0);
  means[i] = v / 100_000;
 }
}

void main() {
 data = new double[100_000_000];
 for(int i=0;i<100_000_000;i++){ data[i] = i/100_000_000.0;}
 auto r = benchmark!(f1)(testCount);
 auto f0Result = to!Duration(r[0] / testCount);
 f0Result.writeln;
 writeln(means[0]);
}

This is a bug in std.parallelism :-)

Proof:

import std.range : iota;
import std.array : array;
import std.algorithm;
import std.datetime;
import std.conv : to;
import std.stdio;
import mir.ndslice;

import std.parallelism;

enum testCount = 1;

double[1000] means;
double[] data;

void f1() {
        //auto sl = data.sliced(1000, 100_000);
        //auto sla = sl.array();
        auto sla = new double[][1000];
        foreach(i, ref e; sla)
        {
                e = data[i * 100_000 .. (i+1) * 100_000];
        }
        foreach(i,vec; parallel(sla))
        {
                double v = vec.sum;
                means[i] = v / vec.length;
        }
}

void main() {
        data = new double[100_000_000];
        foreach(i, ref e; data){
                e = i / 100_000_000.0;
        }
        auto r = benchmark!(f1)(testCount);
        auto f0Result = to!Duration(r[0] / testCount);
        f0Result.writeln;
        writeln(means);
}

Prints:
[0.000499995, 0.0015, 0.0025, 0.0035, 0.00449999, 0.00549999, 0.00649999, 0.00749999, 0.00849999, 0.00949999, 0.0105, 0.0115, 0.0125, 0.0135, 0.0145, 0.0155, 0.0165, 0.0175, 0.0185, 0.0195, 0.0205, 0.0215, 0.0225, 0.0235, 0.0245, 0.0255, 0.0265, 0.0275, 0.0285, 0.0295, 0.0305, 0.0315, 0.0325, 0.0335, 0.0345, 0.0355, 0.0365, 0.0375, 0.0385, 0.0395, 0.0405, 0.0415, 0.0425, 0.0435, 0.0445, 0.0455, 0.0465, 0.0475, 0.0485, 0.0495, 0.0505, 0.0515, 0.0525, 0.0535, 0.0545, 0.0555, 0.0565, 0.0575, 0.0585, 0.0595, 0.0605, 0.0615, 0.0625, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan ....

Reply via email to