This is an automated email from the ASF dual-hosted git repository.

kou pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/arrow-js.git


The following commit(s) were added to refs/heads/main by this push:
     new 15b0f0b  fix: interval MonthDayNano precision loss when reading 
JSON-encoded int64s (#379)
15b0f0b is described below

commit 15b0f0b528238fc5dec18a0facd3e7454228f24f
Author: George <[email protected]>
AuthorDate: Thu Feb 19 01:32:59 2026 -0500

    fix: interval MonthDayNano precision loss when reading JSON-encoded int64s 
(#379)
    
    ## Summary
    
    This PR fixes precision handling for `IntervalMonthDayNano` nanoseconds
    when reading JSON, and aligns the tests with the actual behavior across
    build targets.
    
    ## Changes
    
    - Switched JSON parsing in `src/util/json.ts` from `json-bignum` to
    `json-with-bigint` with native `BigInt` output.
    - Removed post-parse unsafe-integer normalization logic that could not
    recover precision once rounded.
    - Updated `test/unit/vector/interval-month-day-nano-tests.ts` to:
      - assert parsed nanoseconds are `bigint`,
    - include the additional `expect(vec.get(0)).toStrictEqual(array)`
    assertion.
    
    ## Validation
    
    Ran locally:
    
    - `yarn lint:ci`
    - `yarn test`
    
    Closes #15.
---
 bin/integration.ts                                |  5 ++-
 bin/json-to-arrow.ts                              |  5 ++-
 package.json                                      |  2 +-
 src/bin/arrow2csv.ts                              |  5 ++-
 src/util/json.ts                                  | 25 ++++++++++++++
 test/unit/vector/interval-month-day-nano-tests.ts | 42 +++++++++++++++++++++++
 yarn.lock                                         | 10 +++---
 7 files changed, 79 insertions(+), 15 deletions(-)

diff --git a/bin/integration.ts b/bin/integration.ts
index ee5182f..d1ba87c 100755
--- a/bin/integration.ts
+++ b/bin/integration.ts
@@ -22,8 +22,7 @@ import * as Path from 'node:path';
 import { glob } from 'glob';
 import { zip } from 'ix/iterable/zip';
 import commandLineArgs from 'command-line-args';
-// @ts-ignore
-import { parse as bignumJSONParse } from 'json-bignum';
+import { parseArrowJSON } from '../src/util/json.ts';
 
 import {
     Table,
@@ -138,7 +137,7 @@ async function validate(jsonPath: string, arrowPath: 
string) {
     ]);
 
     const arrowData = files[0];
-    const jsonData = bignumJSONParse(files[1]);
+    const jsonData = parseArrowJSON(files[1]);
 
     process.stdout.write(`\n`);
     process.stdout.write(` json: ${jsonPath}\n`);
diff --git a/bin/json-to-arrow.ts b/bin/json-to-arrow.ts
index f9b3952..2600964 100755
--- a/bin/json-to-arrow.ts
+++ b/bin/json-to-arrow.ts
@@ -21,8 +21,7 @@ import * as fs from 'node:fs';
 import * as Path from 'node:path';
 import commandLineArgs from 'command-line-args';
 import { finished as eos } from 'node:stream/promises';
-// @ts-ignore
-import { parse as bignumJSONParse } from 'json-bignum';
+import { parseArrowJSON } from '../src/util/json.ts';
 import { RecordBatchReader, RecordBatchFileWriter, RecordBatchStreamWriter } 
from '../index.ts';
 
 const argv = commandLineArgs(cliOpts(), { partial: true });
@@ -41,7 +40,7 @@ const arrowPaths = [...(argv.arrow || [])];
             ? RecordBatchFileWriter
             : RecordBatchStreamWriter;
 
-        const reader = RecordBatchReader.from(bignumJSONParse(
+        const reader = RecordBatchReader.from(parseArrowJSON(
             await fs.promises.readFile(Path.resolve(path), 'utf8')));
 
         const jsonToArrow = reader
diff --git a/package.json b/package.json
index 378ba9a..fdbe355 100644
--- a/package.json
+++ b/package.json
@@ -53,7 +53,7 @@
     "command-line-args": "^6.0.1",
     "command-line-usage": "^7.0.1",
     "flatbuffers": "^25.1.24",
-    "json-bignum": "^0.0.3",
+    "json-with-bigint": "^3.5.3",
     "tslib": "^2.6.2"
   },
   "devDependencies": {
diff --git a/src/bin/arrow2csv.ts b/src/bin/arrow2csv.ts
index 569e419..2835b25 100755
--- a/src/bin/arrow2csv.ts
+++ b/src/bin/arrow2csv.ts
@@ -25,8 +25,7 @@ import { Schema, RecordBatch, RecordBatchReader, 
AsyncByteQueue, util } from '..
 
 import * as commandLineUsage from 'command-line-usage';
 import * as commandLineArgs from 'command-line-args';
-// @ts-ignore
-import { parse as bignumJSONParse } from 'json-bignum';
+import { parseArrowJSON } from '../util/json.js';
 
 const argv = commandLineArgs(cliOpts(), { partial: true });
 const files = argv.help ? [] : [...(argv.file || []), ...(argv._unknown || 
[])].filter(Boolean);
@@ -114,7 +113,7 @@ async function* recordBatchReaders(createSourceStream: () 
=> NodeJS.ReadableStre
         if (source instanceof fs.ReadStream) { source.close(); }
         // If the data in the `json` ByteQueue parses to JSON, then assume 
it's Arrow JSON from a file or stdin
         try {
-            for await (reader of 
RecordBatchReader.readAll(bignumJSONParse(await json.toString()))) {
+            for await (reader of 
RecordBatchReader.readAll(parseArrowJSON(await json.toString()))) {
                 reader && (yield reader);
             }
         } catch { readers = null; }
diff --git a/src/util/json.ts b/src/util/json.ts
new file mode 100644
index 0000000..4c01470
--- /dev/null
+++ b/src/util/json.ts
@@ -0,0 +1,25 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+import { JSONParse } from 'json-with-bigint';
+
+/** @ignore */
+export function parseArrowJSON(source: string): any {
+    // Preserve exact integer literals beyond the IEEE-754 safe range (e.g.
+    // Int64 values and IntervalMonthDayNano nanoseconds) as BigInt.
+    return JSONParse(source);
+}
diff --git a/test/unit/vector/interval-month-day-nano-tests.ts 
b/test/unit/vector/interval-month-day-nano-tests.ts
index a385fa9..49dc023 100644
--- a/test/unit/vector/interval-month-day-nano-tests.ts
+++ b/test/unit/vector/interval-month-day-nano-tests.ts
@@ -16,6 +16,7 @@
 // under the License.
 
 import { IntervalMonthDayNano, IntervalMonthDayNanoObject, Vector, makeData, 
util } from 'apache-arrow';
+import { parseArrowJSON } from '../../../src/util/json.js';
 
 const { toIntervalMonthDayNanoInt32Array, toIntervalMonthDayNanoObjects } = 
util;
 
@@ -77,4 +78,45 @@ describe(`MonthDayNanoIntervalVector`, () => {
         expect(vec.get(0)).toStrictEqual(array);
         expect(toIntervalMonthDayNanoObjects(vec.get(0), 
false)).toStrictEqual([{ ...EMPTY_INTERVAL_MONTH_DAY_NANO_OBJECT, ...obj }]);
     });
+
+    test(`Unsafe integer nanoseconds represented as bigint roundtrip 
correctly`, () => {
+        const samples = [
+            '-390122861233460600',
+            '6684525287992311000'
+        ];
+        for (const sample of samples) {
+            const nanoseconds = BigInt(sample);
+            const obj: Partial<IntervalMonthDayNanoObject> = { nanoseconds };
+            const array = toIntervalMonthDayNanoInt32Array([obj]);
+            const vec = makeIntervalMonthDayNanoVector(array);
+            expect(vec.type).toBeInstanceOf(IntervalMonthDayNano);
+            expect(vec.get(0)).toStrictEqual(array);
+            expect(toIntervalMonthDayNanoObjects(vec.get(0), 
false)).toStrictEqual([{
+                ...EMPTY_INTERVAL_MONTH_DAY_NANO_OBJECT,
+                nanoseconds,
+            }]);
+        }
+    });
+
+    test(`Integer nanoseconds parsed from JSON preserve exact values`, () => {
+        const samples = [
+            '42',
+            '9007199254740991',
+            '9007199254740992',
+            '6684525287992311000',
+            '-9007199254740992',
+            '-390122861233460600'
+        ];
+        for (const sample of samples) {
+            const parsed = parseArrowJSON(`{"nanoseconds":${sample}}`);
+            const array = toIntervalMonthDayNanoInt32Array([parsed]);
+            const vec = makeIntervalMonthDayNanoVector(array);
+            expect(vec.type).toBeInstanceOf(IntervalMonthDayNano);
+            expect(vec.get(0)).toStrictEqual(array);
+            expect(toIntervalMonthDayNanoObjects(array, 
false)).toStrictEqual([{
+                ...EMPTY_INTERVAL_MONTH_DAY_NANO_OBJECT,
+                nanoseconds: BigInt(sample)
+            }]);
+        }
+    });
 });
diff --git a/yarn.lock b/yarn.lock
index 96caac9..ec241ae 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -4622,11 +4622,6 @@ jsesc@~3.0.2:
   resolved 
"https://registry.yarnpkg.com/jsesc/-/jsesc-3.0.2.tgz#bb8b09a6597ba426425f2e4a07245c3d00b9343e";
   integrity 
sha512-xKqzzWXDttJuOcawBt4KnKHHIf5oQ/Cxax+0PWFG+DFDgHNAdi+TXECADI+RYiFUMmx8792xsMbbgXj4CwnP4g==
 
-json-bignum@^0.0.3:
-  version "0.0.3"
-  resolved 
"https://registry.yarnpkg.com/json-bignum/-/json-bignum-0.0.3.tgz#41163b50436c773d82424dbc20ed70db7604b8d7";
-  integrity 
sha512-2WHyXj3OfHSgNyuzDbSxI1w2jgw5gkWSWhS7Qg4bWXx1nLk3jnbwfUeS0PSba3IzpTUWdHxBieELUzXRjQB2zg==
-
 [email protected]:
   version "3.0.1"
   resolved 
"https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.1.tgz#9338802a30d3b6605fbe0613e094008ca8c05a13";
@@ -4652,6 +4647,11 @@ json-stable-stringify-without-jsonify@^1.0.1:
   resolved 
"https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651";
   integrity 
sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==
 
+json-with-bigint@^3.5.3:
+  version "3.5.3"
+  resolved 
"https://registry.yarnpkg.com/json-with-bigint/-/json-with-bigint-3.5.3.tgz#292fe4d3fa941996a02294edf31fa59d8c9054ef";
+  integrity 
sha512-QObKu6nxy7NsxqR0VK4rkXnsNr5L9ElJaGEg+ucJ6J7/suoKZ0n+p76cu9aCqowytxEbwYNzvrMerfMkXneF5A==
+
 json2csv@^5.0.6:
   version "5.0.7"
   resolved 
"https://registry.yarnpkg.com/json2csv/-/json2csv-5.0.7.tgz#f3a583c25abd9804be873e495d1e65ad8d1b54ae";

Reply via email to