This is an automated email from the ASF dual-hosted git repository. richox pushed a commit to branch dev-add-asf-yml in repository https://gitbox.apache.org/repos/asf/auron.git
commit ea962d1d08cb49e73b8630df49bcbd59b15d5b6e Author: zhangli20 <[email protected]> AuthorDate: Thu Aug 28 17:47:37 2025 +0800 add .asf.yaml --- .asf.yaml | 26 ++++++++++++++++++++++ .../apache/spark/auron/FSDataInputWrapper.scala | 7 +++--- 2 files changed, 30 insertions(+), 3 deletions(-) diff --git a/.asf.yaml b/.asf.yaml new file mode 100644 index 00000000..c78b446e --- /dev/null +++ b/.asf.yaml @@ -0,0 +1,26 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +github: + description: "The Auron accelerator for distributed computing framework (e.g., Spark) leverages native vectorized execution to accelerate query processing" + homepage: https://auron.apache.org/ + labels: + - rust-lang + - big-data + - spark + enabled_merge_buttons: + squash: true + merge: false + rebase: false diff --git a/hadoop-shim/src/main/scala/org/apache/spark/auron/FSDataInputWrapper.scala b/hadoop-shim/src/main/scala/org/apache/spark/auron/FSDataInputWrapper.scala index b96a31c4..353ef033 100644 --- a/hadoop-shim/src/main/scala/org/apache/spark/auron/FSDataInputWrapper.scala +++ b/hadoop-shim/src/main/scala/org/apache/spark/auron/FSDataInputWrapper.scala @@ -20,9 +20,7 @@ import java.io.EOFException import java.nio.ByteBuffer import java.nio.channels.Channels import java.util.concurrent.ConcurrentHashMap - -import org.apache.hadoop.fs.FSDataInputStream -import org.apache.hadoop.fs.StreamCapabilities +import org.apache.hadoop.fs.{FSDataInputStream, FileSystem, StreamCapabilities} trait FSDataInputWrapper extends AutoCloseable { def readFully(pos: Long, buf: ByteBuffer): Unit @@ -30,6 +28,9 @@ trait FSDataInputWrapper extends AutoCloseable { object FSDataInputWrapper { def wrap(input: FSDataInputStream): FSDataInputWrapper = { + val fs = new FileSystem() + fs.c + val e: ClosedExcep if (canUsePositionedReadable(input)) { new PositionedReadableFSDataInputWrapper(input) } else {
