This is an automated email from the ASF dual-hosted git repository.
chenliang613 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/carbondata.git
The following commit(s) were added to refs/heads/master by this push:
new 1ed4e2634c Fix outdated packages (#4365)
1ed4e2634c is described below
commit 1ed4e2634cb26b3a1694588a756d2aa2ca546758
Author: Jacky Li <[email protected]>
AuthorDate: Tue Apr 1 07:27:30 2025 +0800
Fix outdated packages (#4365)
---
.../carbondata/spark/testsuite/index/CGIndexTestCase.scala | 7 +++----
.../carbondata/spark/testsuite/index/FGIndexTestCase.scala | 11 +++++------
pom.xml | 2 +-
3 files changed, 9 insertions(+), 11 deletions(-)
diff --git
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/index/CGIndexTestCase.scala
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/index/CGIndexTestCase.scala
index 35ff2d2218..6584cdb71a 100644
---
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/index/CGIndexTestCase.scala
+++
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/index/CGIndexTestCase.scala
@@ -17,13 +17,12 @@
package org.apache.carbondata.spark.testsuite.index
-import java.io.{ByteArrayInputStream, DataOutputStream, ObjectInputStream,
ObjectOutputStream}
+import java.io.{ByteArrayInputStream, ByteArrayOutputStream, DataOutputStream,
ObjectInputStream, ObjectOutputStream}
import java.util.UUID
import scala.collection.JavaConverters._
import scala.collection.mutable.ArrayBuffer
-import com.sun.xml.internal.messaging.saaj.util.ByteOutputStream
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.spark.sql.test.util.QueryTest
@@ -353,11 +352,11 @@ class CGIndexWriter(
val file = indexPath + "/testcg.indexSchema"
val stream: DataOutputStream = FileFactory
.getDataOutputStream(file)
- val out = new ByteOutputStream()
+ val out = new ByteArrayOutputStream()
val outStream = new ObjectOutputStream(out)
outStream.writeObject(maxMin)
outStream.close()
- val bytes = compressor.compressByte(out.getBytes)
+ val bytes = compressor.compressByte(out.toByteArray)
stream.write(bytes.array(), 0, bytes.limit())
stream.writeInt(bytes.limit())
stream.close()
diff --git
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/index/FGIndexTestCase.scala
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/index/FGIndexTestCase.scala
index 5731c95728..b2b98caddd 100644
---
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/index/FGIndexTestCase.scala
+++
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/index/FGIndexTestCase.scala
@@ -17,12 +17,11 @@
package org.apache.carbondata.spark.testsuite.index
-import java.io.{ByteArrayInputStream, DataOutputStream, ObjectInputStream,
ObjectOutputStream}
+import java.io.{ByteArrayInputStream, ByteArrayOutputStream, DataOutputStream,
ObjectInputStream, ObjectOutputStream}
import scala.collection.JavaConverters._
import scala.collection.mutable.ArrayBuffer
-import com.sun.xml.internal.messaging.saaj.util.ByteOutputStream
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.spark.sql.test.util.QueryTest
@@ -360,11 +359,11 @@ class FGIndexWriter(carbonTable: CarbonTable,
blockletListUpdated += oldValue
}
- val out = new ByteOutputStream()
+ val out = new ByteArrayOutputStream()
val outStream = new ObjectOutputStream(out)
outStream.writeObject(blockletListUpdated)
outStream.close()
- val bytes = compressor.compressByte(out.getBytes)
+ val bytes = compressor.compressByte(out.toByteArray)
stream.write(bytes.array(), 0, bytes.limit())
maxMin +=
((blockletId, (blockletListUpdated.head._1, blockletListUpdated.last
@@ -427,11 +426,11 @@ class FGIndexWriter(carbonTable: CarbonTable,
*/
override def finish(): Unit = {
FileFactory.mkdirs(fgwritepath)
- val out = new ByteOutputStream()
+ val out = new ByteArrayOutputStream()
val outStream = new ObjectOutputStream(out)
outStream.writeObject(maxMin)
outStream.close()
- val bytes = compressor.compressByte(out.getBytes)
+ val bytes = compressor.compressByte(out.toByteArray)
stream.write(bytes.array(), 0, bytes.limit())
stream.writeInt(bytes.limit())
stream.close()
diff --git a/pom.xml b/pom.xml
index 01130b9a37..ee18016e15 100644
--- a/pom.xml
+++ b/pom.xml
@@ -111,7 +111,7 @@
<module>integration/flink</module>
<module>integration/flink-build</module>
<module>integration/flink-proxy</module>
- <module>integration/presto</module>
+ <!--<module>integration/presto</module>-->
<module>sdk/sdk</module>
<module>tools/cli</module>
<module>examples/spark</module>