zeroshade commented on code in PR #1267:
URL: https://github.com/apache/arrow-adbc/pull/1267#discussion_r1384825894
##########
go/adbc/driver/snowflake/record_reader.go:
##########
@@ -101,20 +101,36 @@ func getTransformer(sc *arrow.Schema, ld
gosnowflake.ArrowStreamLoader, useHighP
}
f.Type = dt
transformers[i] = func(ctx
context.Context, a arrow.Array) (arrow.Array, error) {
- return compute.CastArray(ctx,
a, compute.SafeCastOptions(dt))
+ return integerToDecimal128(ctx,
a, dt)
}
} else {
if srcMeta.Scale != 0 {
f.Type =
arrow.PrimitiveTypes.Float64
- transformers[i] = func(ctx
context.Context, a arrow.Array) (arrow.Array, error) {
- result, err :=
compute.Divide(ctx, compute.ArithmeticOptions{NoCheckOverflow: true},
-
&compute.ArrayDatum{Value: a.Data()},
-
compute.NewDatum(math.Pow10(int(srcMeta.Scale))))
- if err != nil {
- return nil, err
+ // For precisions of 16, 17 and
18, a conversion from int64 to float64 fails with an error
+ // So for these precisions, we
instead convert first to a decimal128 and then to a float64.
Review Comment:
What's the error? Should those precisions instead work and we should push a
fix upstream to the Arrow lib?
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]