Github user marmbrus commented on a diff in the pull request:

    https://github.com/apache/spark/pull/4014#discussion_r23904318
  
    --- Diff: 
sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/ScriptTransformation.scala
 ---
    @@ -53,28 +69,205 @@ case class ScriptTransformation(
           val inputStream = proc.getInputStream
           val outputStream = proc.getOutputStream
           val reader = new BufferedReader(new InputStreamReader(inputStream))
    + 
    +      val (outputSerde, outputSoi) = ioschema.initOutputSerDe(output)
    +
    +      val iterator: Iterator[Row] = new Iterator[Row] with HiveInspectors {
    +        var cacheRow: Row = null
    +        var curLine: String = null
    +        var eof: Boolean = false
    +
    +        override def hasNext: Boolean = {
    +          if (outputSerde == null) {
    +            if (curLine == null) {
    +              curLine = reader.readLine()
    +              curLine != null
    +            } else {
    +              true
    +            }
    +          } else {
    +            !eof
    +          }
    +        }
     
    -      // TODO: This should be exposed as an iterator instead of reading in 
all the data at once.
    -      val outputLines = collection.mutable.ArrayBuffer[Row]()
    -      val readerThread = new Thread("Transform OutputReader") {
    -        override def run() {
    -          var curLine = reader.readLine()
    -          while (curLine != null) {
    -            // TODO: Use SerDe
    -            outputLines += new 
GenericRow(curLine.split("\t").asInstanceOf[Array[Any]])
    +        def deserialize(): Row = {
    +          if (cacheRow != null) return cacheRow
    +
    +          val mutableRow = new SpecificMutableRow(output.map(_.dataType))
    +          try {
    +            val dataInputStream = new DataInputStream(inputStream)
    +            val writable = outputSerde.getSerializedClass().newInstance
    +            writable.readFields(dataInputStream)
    +
    +            val raw = outputSerde.deserialize(writable)
    +            val dataList = outputSoi.getStructFieldsDataAsList(raw)
    +            val fieldList = outputSoi.getAllStructFieldRefs()
    +            
    +            var i = 0
    +            dataList.foreach( element => {
    +              if (element == null) {
    +                mutableRow.setNullAt(i)
    +              } else {
    +                mutableRow(i) = unwrap(element, 
fieldList(i).getFieldObjectInspector)
    +              }
    +              i += 1
    +            })
    +            return mutableRow
    +          } catch {
    +            case e: EOFException =>
    +              eof = true
    +              return null
    +          }
    +        }
    +
    +        override def next(): Row = {
    +          if (!hasNext) {
    +            throw new NoSuchElementException
    +          }
    + 
    +          if (outputSerde == null) {
    +            val prevLine = curLine
                 curLine = reader.readLine()
    + 
    +            if (!ioschema.schemaLess) {
    +              new GenericRow(
    +                
prevLine.split(ioschema.outputRowFormatMap("TOK_TABLEROWFORMATFIELD"))
    +                .asInstanceOf[Array[Any]])
    +            } else {
    +              new GenericRow(
    +                
prevLine.split(ioschema.outputRowFormatMap("TOK_TABLEROWFORMATFIELD"), 2)
    +                .asInstanceOf[Array[Any]])
    +            }
    +          } else {
    +            val ret = deserialize()
    +            if (!eof) {
    +              cacheRow = null
    +              cacheRow = deserialize()
    +            }
    +            ret
               }
             }
           }
    -      readerThread.start()
    +
    +      val (inputSerde, inputSoi) = ioschema.initInputSerDe(input)
    +      val dataOutputStream = new DataOutputStream(outputStream)
           val outputProjection = new InterpretedProjection(input, child.output)
    +
           iter
             .map(outputProjection)
    -        // TODO: Use SerDe
    -        .map(_.mkString("", "\t", 
"\n").getBytes("utf-8")).foreach(outputStream.write)
    +        .foreach { row =>
    +          if (inputSerde == null) {
    +            val data = row.mkString("", 
ioschema.inputRowFormatMap("TOK_TABLEROWFORMATFIELD"),
    +            
ioschema.inputRowFormatMap("TOK_TABLEROWFORMATLINES")).getBytes("utf-8")
    + 
    +            outputStream.write(data)
    +          } else {
    +            val writable = new ShimWritable(
    +              inputSerde.serialize(row.asInstanceOf[GenericRow].values, 
inputSoi))
    +            writable.write(dataOutputStream)
    +          }
    +        }
           outputStream.close()
    -      readerThread.join()
    -      outputLines.toIterator
    +      iterator
    +    }
    +  }
    +}
    +
    +/**
    + * The wrapper class of Hive input and output schema properties
    + *
    --- End diff --
    
    Remove this extra line.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to