As @moigagoo requested, I am posting the whole program. It's pretty 
straightforward. As the first nim programming I have tried, I am happy that it 
works at all (even if only in a debugging build).
    
    
    import odbc, csv, threadpool
    
    setMaxPoolSize(8)
    
    type DataFile = object
        name: string
        exportable: bool
        filename: string
    
    proc getConnection*(): ODBCConnection =
        var con = newODBCConnection()
        
        con.host = r"dbhostname"
        con.driver = "ODBC Driver 17 for SQL Server"
        con.userName = "user"
        con.password = "Passw0rd"
        con.database = "dev"
        
        
        con.integratedSecurity = false
        
        con.reporting.level = rlErrorsAndInfo
        con.reporting.destinations = {rdEcho}
        
        return con
    
    proc getExportList*(): seq[DataFile] =
        var con = getConnection()
        
        if not con.connect():
            echo "Couldn't connect to db"
        
        let r = con.executeFetch("SELECT * FROM data_to_extract")
        for i in 0 ..< r.len:
            result.add(
                DataFile(
                    name: r[i][0].strVal,
                    exportable: r.data(1, i).boolVal,
                    filename: r[i][2].strVal
                )
            )
        
        con.disconnect()
    
    proc writeSqlDataSet*(data: DataFile): void =
        var con = getConnection()
        
        if not con.connect():
            echo "Couldn't connect to db"
        
        let r = con.executeFetch("SELECT * FROM " & data.name)
        var ds: seq[seq[string]] = @[newSeq[string](r.colFields.len)] # need 
this structure for csv writing
        
        #Write the header names
        var idx: int
        for field in r.fields:
            ds[0][idx] = field.fieldname
            idx.inc
        
        #Write the data rows
        for i in 1 ..< r.len:
            ds.add(newSeq[string](r[i].len))
            
            for j in 0 ..< r[i].len:
                ds[i][j] = r[i][j].asString
                #echo r[i][j].asString
        
        # For now, just load the entire dataset in memory rather than page the 
SQL server query but re-eval if perf problem
        if ds.len <= 4000:
            discard writeAll(data.filename & ".csv", ds)
        else:
            var i = 0
            var j = 1
            while ds.len - i > 4000:
                discard writeAll(data.filename & $j & ".csv", ds[i .. i+4000])
                i += 4000
                j.inc
            
            #write out remainder
            if i != ds.len:
                discard writeAll(data.filename & $j & ".csv", ds[i ..< ds.len])
    
    
    
    let list = getExportList()
    
    for extract in list:
        if extract.exportable: spawn writeSqlDataSet(extract)
    sync()
    
    
    Run

All calls are blocking, so it shouldn't be possible for me to use something 
before it's there, right?

Reply via email to