This is an automated email from the ASF dual-hosted git repository.
mbeckerle pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/daffodil.git
The following commit(s) were added to refs/heads/main by this push:
new b48db8d Change layering to pass and use state.
b48db8d is described below
commit b48db8d7f3ad20e7df7a1452793ac49686f8e119
Author: Michael Beckerle <[email protected]>
AuthorDate: Wed Dec 15 09:11:14 2021 -0500
Change layering to pass and use state.
Removed state from LayerRuntimeInfo
This fixes layer unparsing as the passed state can be the one created for
use
by suspensions.
Eliminated LayerSerializedInfo, as it is the same as LayerRuntimeInfo now.
Eliminated VariableHandle (just use VariableRuntimeData)
Tested with PCAP and EthernetIP, both of which now work.
Add daffodil-runtime1-layers to sonarcube
DAFFODIL-2608
---
.sonar-project.properties | 2 +-
.../grammar/primitives/LayeredSequence.scala | 10 +-
.../apache/daffodil/layers/Base64Transformer.scala | 13 +--
.../daffodil/layers/ByteSwapTransformer.scala | 7 +-
.../apache/daffodil/layers/GZipTransformer.scala | 7 +-
.../daffodil/layers/LineFoldedTransformer.scala | 9 +-
.../unparsers/LayeredSequenceUnparser.scala | 10 +-
.../org/apache/daffodil/layers/LayerCompiler.scala | 38 ++++----
.../apache/daffodil/layers/LayerTransformer.scala | 102 ++++++++-------------
.../processors/parsers/LayeredSequenceParser.scala | 8 +-
.../apache/daffodil/layers/AISTransformer.scala | 5 +-
.../org/apache/daffodil/layers/CheckDigit.scala | 10 +-
.../daffodil/layers/CheckDigitLayerCompiler.scala | 7 +-
.../org/apache/daffodil/layers/IPv4Checksum.scala | 8 +-
.../layers/IPv4ChecksumLayerCompiler.scala | 5 +-
.../org/apache/daffodil/layers/TestIPv4.scala | 2 +-
16 files changed, 109 insertions(+), 134 deletions(-)
diff --git a/.sonar-project.properties b/.sonar-project.properties
index fa01312..c6cfd18 100644
--- a/.sonar-project.properties
+++ b/.sonar-project.properties
@@ -16,7 +16,7 @@
sonar.organization=apache
sonar.projectKey=apache-daffodil
-sonar.modules=daffodil-cli,daffodil-core,daffodil-io,daffodil-japi,daffodil-lib,daffodil-macro-lib,daffodil-propgen,daffodil-runtime1,daffodil-runtime1-unparser,daffodil-runtime2,daffodil-sapi,daffodil-tdml-lib,daffodil-tdml-processor,daffodil-test,daffodil-test-ibm1,daffodil-udf
+sonar.modules=daffodil-cli,daffodil-core,daffodil-io,daffodil-japi,daffodil-lib,daffodil-macro-lib,daffodil-propgen,daffodil-runtime1,daffodil-runtime1-layers,daffodil-runtime1-unparser,daffodil-runtime2,daffodil-sapi,daffodil-tdml-lib,daffodil-tdml-processor,daffodil-test,daffodil-test-ibm1,daffodil-udf
sonar.sources=src/main
sonar.tests=src/it,src/test
sonar.c.file.suffixes=-
diff --git
a/daffodil-core/src/main/scala/org/apache/daffodil/grammar/primitives/LayeredSequence.scala
b/daffodil-core/src/main/scala/org/apache/daffodil/grammar/primitives/LayeredSequence.scala
index b7ead31..785100f 100644
---
a/daffodil-core/src/main/scala/org/apache/daffodil/grammar/primitives/LayeredSequence.scala
+++
b/daffodil-core/src/main/scala/org/apache/daffodil/grammar/primitives/LayeredSequence.scala
@@ -20,7 +20,7 @@ package org.apache.daffodil.grammar.primitives
import org.apache.daffodil.grammar.Terminal
import org.apache.daffodil.dsom._
import org.apache.daffodil.layers.LayerCompileInfo
-import org.apache.daffodil.layers.LayerSerializedInfo
+import org.apache.daffodil.layers.LayerRuntimeInfo
import org.apache.daffodil.processors.parsers.{Parser => DaffodilParser}
import org.apache.daffodil.processors.unparsers.{Unparser => DaffodilUnparser}
import org.apache.daffodil.util.Misc
@@ -36,7 +36,7 @@ case class LayeredSequence(sq: SequenceGroupTermBase,
bodyTerm: SequenceChild)
val layerCompileInfo = new LayerCompileInfo(
sq,
- new LayerSerializedInfo(
+ new LayerRuntimeInfo(
sq.sequenceRuntimeData,
sq.maybeLayerCharsetEv,
Maybe.toMaybe(sq.optionLayerLengthKind),
@@ -62,12 +62,12 @@ case class LayeredSequence(sq: SequenceGroupTermBase,
bodyTerm: SequenceChild)
lazy val bodyParser = bodyTerm.parser
lazy val bodyUnparser = bodyTerm.unparser
- lazy val layerSerializedInfo = layerCompileInfo.layerSerializedInfo
+ lazy val layerRuntimeInfo = layerCompileInfo.layerRuntimeInfo
override lazy val parser: DaffodilParser =
- new LayeredSequenceParser(srd, layerTransformerFactory,
layerSerializedInfo, bodyParser)
+ new LayeredSequenceParser(srd, layerTransformerFactory, layerRuntimeInfo,
bodyParser)
override lazy val unparser: DaffodilUnparser = {
- new LayeredSequenceUnparser(srd, layerTransformerFactory,
layerSerializedInfo, bodyUnparser)
+ new LayeredSequenceUnparser(srd, layerTransformerFactory,
layerRuntimeInfo, bodyUnparser)
}
}
diff --git
a/daffodil-runtime1-layers/src/main/scala/org/apache/daffodil/layers/Base64Transformer.scala
b/daffodil-runtime1-layers/src/main/scala/org/apache/daffodil/layers/Base64Transformer.scala
index f096fd8..67c01d8 100644
---
a/daffodil-runtime1-layers/src/main/scala/org/apache/daffodil/layers/Base64Transformer.scala
+++
b/daffodil-runtime1-layers/src/main/scala/org/apache/daffodil/layers/Base64Transformer.scala
@@ -19,6 +19,7 @@ package org.apache.daffodil.layers
import org.apache.daffodil.io.BoundaryMarkLimitingStream
import org.apache.daffodil.io.LayerBoundaryMarkInsertingJavaOutputStream
+import org.apache.daffodil.processors.ParseOrUnparseState
import org.apache.daffodil.schema.annotation.props.gen.LayerLengthKind
final class Base64MIMELayerCompiler
@@ -67,9 +68,9 @@ final class Base64MIMETransformer(name: String,
layerRuntimeInfo: LayerRuntimeIn
b64
}
- override def wrapLimitingStream(jis: java.io.InputStream) = {
- val javaCharset = layerRuntimeInfo.optLayerCharset.get
- val layerBoundaryMark = layerRuntimeInfo.optLayerBoundaryMark.get
+ override def wrapLimitingStream(state: ParseOrUnparseState, jis:
java.io.InputStream) = {
+ val javaCharset = layerRuntimeInfo.optLayerCharset(state).get
+ val layerBoundaryMark = layerRuntimeInfo.optLayerBoundaryMark(state).get
val s = BoundaryMarkLimitingStream(jis, layerBoundaryMark, javaCharset)
s
}
@@ -79,9 +80,9 @@ final class Base64MIMETransformer(name: String,
layerRuntimeInfo: LayerRuntimeIn
b64
}
- override protected def wrapLimitingStream(jos: java.io.OutputStream) = {
- val javaCharset = layerRuntimeInfo.optLayerCharset.get
- val layerBoundaryMark = layerRuntimeInfo.optLayerBoundaryMark.get
+ override protected def wrapLimitingStream(state: ParseOrUnparseState, jos:
java.io.OutputStream) = {
+ val javaCharset = layerRuntimeInfo.optLayerCharset(state).get
+ val layerBoundaryMark = layerRuntimeInfo.optLayerBoundaryMark(state).get
val newJOS = new LayerBoundaryMarkInsertingJavaOutputStream(jos,
layerBoundaryMark, javaCharset)
newJOS
}
diff --git
a/daffodil-runtime1-layers/src/main/scala/org/apache/daffodil/layers/ByteSwapTransformer.scala
b/daffodil-runtime1-layers/src/main/scala/org/apache/daffodil/layers/ByteSwapTransformer.scala
index 8112e60..e48e3f8 100644
---
a/daffodil-runtime1-layers/src/main/scala/org/apache/daffodil/layers/ByteSwapTransformer.scala
+++
b/daffodil-runtime1-layers/src/main/scala/org/apache/daffodil/layers/ByteSwapTransformer.scala
@@ -24,6 +24,7 @@ import java.util.Deque
import org.apache.daffodil.exceptions.Assert
import org.apache.daffodil.schema.annotation.props.gen.LayerLengthKind
import org.apache.daffodil.io.ExplicitLengthLimitingStream
+import org.apache.daffodil.processors.ParseOrUnparseState
final class FourByteSwapLayerCompiler
extends LayerCompiler("fourbyteswap") {
@@ -189,8 +190,8 @@ class ByteSwapTransformer(wordsize: Int, name: String,
layerRuntimeInfo: LayerRu
s
}
- override def wrapLimitingStream(jis: java.io.InputStream) = {
- val layerLengthInBytes = layerRuntimeInfo.optLayerLength.get
+ override def wrapLimitingStream(state: ParseOrUnparseState, jis:
java.io.InputStream) = {
+ val layerLengthInBytes = layerRuntimeInfo.optLayerLength(state).get
val s = new ExplicitLengthLimitingStream(jis, layerLengthInBytes)
s
@@ -201,7 +202,7 @@ class ByteSwapTransformer(wordsize: Int, name: String,
layerRuntimeInfo: LayerRu
s
}
- override protected def wrapLimitingStream(jos: java.io.OutputStream) = {
+ override protected def wrapLimitingStream(state: ParseOrUnparseState, jos:
java.io.OutputStream) = {
jos // just return jos. The way the length will be used/stored is by way of
// taking the content length of the enclosing element. That will measure
the
// length relative to the "ultimate" data output stream.
diff --git
a/daffodil-runtime1-layers/src/main/scala/org/apache/daffodil/layers/GZipTransformer.scala
b/daffodil-runtime1-layers/src/main/scala/org/apache/daffodil/layers/GZipTransformer.scala
index 20a58f1..04a67c2 100644
---
a/daffodil-runtime1-layers/src/main/scala/org/apache/daffodil/layers/GZipTransformer.scala
+++
b/daffodil-runtime1-layers/src/main/scala/org/apache/daffodil/layers/GZipTransformer.scala
@@ -19,6 +19,7 @@ package org.apache.daffodil.layers
import org.apache.daffodil.schema.annotation.props.gen.LayerLengthKind
import org.apache.daffodil.io.ExplicitLengthLimitingStream
+import org.apache.daffodil.processors.ParseOrUnparseState
final class GZIPLayerCompiler
extends LayerCompiler("gzip") {
@@ -53,8 +54,8 @@ class GZIPTransformer(name: String, layerRuntimeInfo:
LayerRuntimeInfo)
s
}
- override def wrapLimitingStream(jis: java.io.InputStream) = {
- val layerLengthInBytes = layerRuntimeInfo.optLayerLength.get
+ override def wrapLimitingStream(state: ParseOrUnparseState, jis:
java.io.InputStream) = {
+ val layerLengthInBytes = layerRuntimeInfo.optLayerLength(state).get
val s = new ExplicitLengthLimitingStream(jis, layerLengthInBytes)
s
}
@@ -64,7 +65,7 @@ class GZIPTransformer(name: String, layerRuntimeInfo:
LayerRuntimeInfo)
s
}
- override protected def wrapLimitingStream(jis: java.io.OutputStream) = {
+ override protected def wrapLimitingStream(state: ParseOrUnparseState, jis:
java.io.OutputStream) = {
jis // just return jis. The way the length will be used/stored is by way of
// taking the content length of the enclosing element. That will measure
the
// length relative to the "ultimate" data output stream.
diff --git
a/daffodil-runtime1-layers/src/main/scala/org/apache/daffodil/layers/LineFoldedTransformer.scala
b/daffodil-runtime1-layers/src/main/scala/org/apache/daffodil/layers/LineFoldedTransformer.scala
index 927ead1..f182c45 100644
---
a/daffodil-runtime1-layers/src/main/scala/org/apache/daffodil/layers/LineFoldedTransformer.scala
+++
b/daffodil-runtime1-layers/src/main/scala/org/apache/daffodil/layers/LineFoldedTransformer.scala
@@ -28,6 +28,7 @@ import java.io.InputStream
import org.apache.daffodil.exceptions.ThrowsSDE
import org.apache.daffodil.schema.annotation.props.Enum
import org.apache.daffodil.io.RegexLimitingStream
+import org.apache.daffodil.processors.ParseOrUnparseState
/*
* This and related classes implement so called "line folding" from
@@ -140,14 +141,14 @@ object LineFoldMode extends Enum[LineFoldMode] {
final class LineFoldedTransformerDelimited(mode: LineFoldMode,
layerRuntimeInfo: LayerRuntimeInfo)
extends LayerTransformer(mode.transformName, layerRuntimeInfo) {
- override protected def wrapLimitingStream(jis: java.io.InputStream) = {
+ override protected def wrapLimitingStream(state: ParseOrUnparseState, jis:
java.io.InputStream) = {
// regex means CRLF not followed by space or tab.
// NOTE: this regex cannot contain ANY capturing groups (per scaladoc on
RegexLimitingStream)
val s = new RegexLimitingStream(jis, "\\r\\n(?!(?:\\t|\\ ))", "\r\n",
StandardCharsets.ISO_8859_1)
s
}
- override protected def wrapLimitingStream(jos: java.io.OutputStream) = {
+ override protected def wrapLimitingStream(state: ParseOrUnparseState, jos:
java.io.OutputStream) = {
//
// Q: How do we insert a CRLF "not followed by tab or space" when we don't
// control what follows?
@@ -177,11 +178,11 @@ final class LineFoldedTransformerDelimited(mode:
LineFoldMode, layerRuntimeInfo:
class LineFoldedTransformerImplicit(mode: LineFoldMode, layerRuntimeInfo:
LayerRuntimeInfo)
extends LayerTransformer(mode.transformName, layerRuntimeInfo) {
- override protected def wrapLimitingStream(jis: java.io.InputStream) = {
+ override protected def wrapLimitingStream(state: ParseOrUnparseState, jis:
java.io.InputStream) = {
jis // no limiting - just pull input until EOF.
}
- override protected def wrapLimitingStream(jos: java.io.OutputStream) = {
+ override protected def wrapLimitingStream(state: ParseOrUnparseState, jos:
java.io.OutputStream) = {
jos // no limiting - just write output until EOF.
}
diff --git
a/daffodil-runtime1-unparser/src/main/scala/org/apache/daffodil/processors/unparsers/LayeredSequenceUnparser.scala
b/daffodil-runtime1-unparser/src/main/scala/org/apache/daffodil/processors/unparsers/LayeredSequenceUnparser.scala
index e77ed48..97f9829 100644
---
a/daffodil-runtime1-unparser/src/main/scala/org/apache/daffodil/processors/unparsers/LayeredSequenceUnparser.scala
+++
b/daffodil-runtime1-unparser/src/main/scala/org/apache/daffodil/processors/unparsers/LayeredSequenceUnparser.scala
@@ -17,27 +17,27 @@
package org.apache.daffodil.processors.unparsers
-import org.apache.daffodil.layers.LayerSerializedInfo
+import org.apache.daffodil.layers.LayerRuntimeInfo
import org.apache.daffodil.layers.LayerTransformerFactory
import org.apache.daffodil.processors.SequenceRuntimeData
class LayeredSequenceUnparser(ctxt: SequenceRuntimeData,
layerTransformerFactory: LayerTransformerFactory,
- layerSerializedInfo: LayerSerializedInfo,
+ layerRuntimeInfo: LayerRuntimeInfo,
childUnparser: SequenceChildUnparser)
extends OrderedUnseparatedSequenceUnparser(ctxt, Seq(childUnparser)) {
override lazy val runtimeDependencies =
- layerSerializedInfo.evaluatables.toVector
+ layerRuntimeInfo.evaluatables.toVector
override def nom = "LayeredSequence"
override def unparse(state: UState): Unit = {
- val layerTransformer =
layerTransformerFactory.newInstance(layerSerializedInfo.layerRuntimeInfo(state))
+ val layerTransformer =
layerTransformerFactory.newInstance(layerRuntimeInfo)
val originalDOS = state.dataOutputStream // layer will output to the
original, then finish it upon closing.
- val newDOS = originalDOS.addBuffered // newDOS is where unparsers after
this one returns will unparse into.
+ val newDOS = originalDOS.addBuffered() // newDOS is where unparsers after
this one returns will unparse into.
//
// New layerDOS is where the layer will unparse into. Ultimately anything
written
// to layerDOS ends up, post transform, in originalDOS.
diff --git
a/daffodil-runtime1/src/main/scala/org/apache/daffodil/layers/LayerCompiler.scala
b/daffodil-runtime1/src/main/scala/org/apache/daffodil/layers/LayerCompiler.scala
index 70fd929..0775b5c 100644
---
a/daffodil-runtime1/src/main/scala/org/apache/daffodil/layers/LayerCompiler.scala
+++
b/daffodil-runtime1/src/main/scala/org/apache/daffodil/layers/LayerCompiler.scala
@@ -43,7 +43,7 @@ abstract class LayerCompiler(nom: String) {
/**
* Performs all schema-compile-time checking for the layer parameters, and
constructs
- * a LayerTransfomrmerFactory which is the serialized runtime object that
becomes part of the
+ * a LayerTransformerFactory which is the serialized runtime object that
becomes part of the
* processor.
* @param layerCompileInfo Schema-compilation time information about the
layer properties.
* @return
@@ -52,24 +52,18 @@ abstract class LayerCompiler(nom: String) {
}
-sealed trait VariableHandle
-
-final class VariableHandleImpl(private[layers] val vrd: VariableRuntimeData)
// package private because its an internal thing.
-extends VariableHandle with Serializable
-
-
/**
* Provides access to DFDL schema compile-time information about the layer
properties.
*
* Allows reporting of schema definition errors and warnings at schema compile
time.
*/
final class LayerCompileInfo(sequence: ImplementsThrowsOrSavesSDE,
- val layerSerializedInfo: LayerSerializedInfo) {
+ val layerRuntimeInfo: LayerRuntimeInfo) {
- private def lsi = layerSerializedInfo
- private def srd: SequenceRuntimeData = lsi.srd
+ private def lri = layerRuntimeInfo
+ private def srd: SequenceRuntimeData = lri.runtimeData
- def getVariableHandle(prefix: String, namespace: String, localName: String,
primType: PrimType) : VariableHandle = {
+ def getVariableRuntimeData(prefix: String, namespace: String, localName:
String, primType: PrimType) : VariableRuntimeData = {
val varNamespace = NS(namespace)
val qName = RefQName(Some(prefix), localName, varNamespace).toGlobalQName
val vrd = srd.variableMap.getVariableRuntimeData(qName).getOrElse {
@@ -77,7 +71,7 @@ final class LayerCompileInfo(sequence:
ImplementsThrowsOrSavesSDE,
}
srd.schemaDefinitionUnless(vrd.primType == primType,
"Variable '%s' is not of type '%s'.", qName.toExtendedSyntax, primType)
- new VariableHandleImpl(vrd)
+ vrd
}
/**
@@ -86,32 +80,32 @@ final class LayerCompileInfo(sequence:
ImplementsThrowsOrSavesSDE,
* If undefined, the value is None
*/
def optLayerJavaCharsetOptConstantValue:
Option[Option[java.nio.charset.Charset]] = {
- if (lsi.maybeLayerCharsetEv.isEmpty) None
+ if (lri.maybeLayerCharsetEv.isEmpty) None
else
- lsi.maybeLayerCharsetEv.get.optConstant.map {
+ lri.maybeLayerCharsetEv.get.optConstant.map {
case java: BitsCharsetJava => Some(java.javaCharset)
case _: BitsCharsetNonByteSize => None
}
}
def optLayerLengthKind: Option[LayerLengthKind] = {
- lsi.maybeLayerLengthKind.toScalaOption
+ lri.maybeLayerLengthKind.toScalaOption
}
def optLayerLengthOptConstantValue: Option[Option[Long]] = {
- if (lsi.maybeLayerLengthEv.isEmpty) None
- else Some(lsi.maybeLayerLengthEv.get.optConstant.map {
+ if (lri.maybeLayerLengthEv.isEmpty) None
+ else Some(lri.maybeLayerLengthEv.get.optConstant.map {
_.toLong
})
}
def optLayerLengthUnits: Option[LayerLengthUnits] = {
- lsi.maybeLayerLengthUnits.toScalaOption
+ lri.maybeLayerLengthUnits.toScalaOption
}
def optLayerBoundaryMarkOptConstantValue: Option[Option[String]] = {
- if (lsi.maybeLayerBoundaryMarkEv.isEmpty) None
- else Some(lsi.maybeLayerBoundaryMarkEv.get.optConstant)
+ if (lri.maybeLayerBoundaryMarkEv.isEmpty) None
+ else Some(lri.maybeLayerBoundaryMarkEv.get.optConstant)
}
def schemaDefinitionError(message: String, args: Any*): Nothing = {
@@ -122,9 +116,9 @@ final class LayerCompileInfo(sequence:
ImplementsThrowsOrSavesSDE,
sequence.SDW(WarnID.LayerCompileWarning, message, args: _*)
}
- final def SDEUnless(test: Boolean, message: String, args: Any*) = if (!test)
SDE(message, args: _*)
+ def SDEUnless(test: Boolean, message: String, args: Any*): Unit = if (!test)
SDE(message, args: _*)
- final def SDE(message: String, args: Any*): Nothing =
schemaDefinitionError(message, args: _*)
+ def SDE(message: String, args: Any*): Nothing =
schemaDefinitionError(message, args: _*)
}
diff --git
a/daffodil-runtime1/src/main/scala/org/apache/daffodil/layers/LayerTransformer.scala
b/daffodil-runtime1/src/main/scala/org/apache/daffodil/layers/LayerTransformer.scala
index 50560bb..e4bc1a5 100644
---
a/daffodil-runtime1/src/main/scala/org/apache/daffodil/layers/LayerTransformer.scala
+++
b/daffodil-runtime1/src/main/scala/org/apache/daffodil/layers/LayerTransformer.scala
@@ -33,7 +33,6 @@ import org.apache.daffodil.util.Misc
import org.apache.daffodil.exceptions.Assert
import org.apache.daffodil.exceptions.SchemaFileLocation
import org.apache.daffodil.infoset.DataValue
-import org.apache.daffodil.infoset.DataValue.DataValuePrimitive
import org.apache.daffodil.util.Maybe
import org.apache.daffodil.util.Maybe.One
import org.apache.daffodil.util.Maybe.Nope
@@ -43,10 +42,10 @@ import org.apache.daffodil.processors.parsers.PState
import org.apache.daffodil.processors.unparsers.UState
import org.apache.daffodil.io.DirectOrBufferedDataOutputStream
import org.apache.daffodil.processors.Evaluatable
-import org.apache.daffodil.processors.RuntimeData
import passera.unsigned.ULong
import org.apache.daffodil.processors.SequenceRuntimeData
import org.apache.daffodil.processors.SuspendableOperation
+import org.apache.daffodil.processors.VariableRuntimeData
import org.apache.daffodil.processors.charset.BitsCharsetJava
import org.apache.daffodil.util.ByteBufferOutputStream
@@ -67,7 +66,7 @@ abstract class LayerTransformer(layerName: String,
layerRuntimeInfo: LayerRuntim
protected def wrapLayerDecoder(jis: InputStream): InputStream
- protected def wrapLimitingStream(jis: InputStream): InputStream
+ protected def wrapLimitingStream(state: ParseOrUnparseState, jis:
InputStream): InputStream
final def wrapJavaInputStream(s: InputSourceDataInputStream, fInfo:
FormatInfo): InputStream = {
new JavaIOInputStream(s, fInfo)
@@ -75,7 +74,7 @@ abstract class LayerTransformer(layerName: String,
layerRuntimeInfo: LayerRuntim
protected def wrapLayerEncoder(jos: OutputStream): OutputStream
- protected def wrapLimitingStream(jis: OutputStream): OutputStream
+ protected def wrapLimitingStream(state: ParseOrUnparseState, jis:
OutputStream): OutputStream
final def wrapJavaOutputStream(s: DataOutputStream, fInfo: FormatInfo):
OutputStream = {
new JavaIOOutputStream(s, fInfo)
@@ -89,7 +88,7 @@ abstract class LayerTransformer(layerName: String,
layerRuntimeInfo: LayerRuntim
final def addLayer(s: InputSourceDataInputStream, state: PState):
InputSourceDataInputStream = {
val jis = wrapJavaInputStream(s, state)
- val limitedJIS = wrapLimitingStream(jis)
+ val limitedJIS = wrapLimitingStream(state, jis)
val decodedInputStream = wrapLayerDecoder(limitedJIS)
val newDIS = InputSourceDataInputStream(decodedInputStream)
@@ -104,7 +103,7 @@ abstract class LayerTransformer(layerName: String,
layerRuntimeInfo: LayerRuntim
final def addLayer(s: DataOutputStream, state: UState):
DirectOrBufferedDataOutputStream = {
val jos = wrapJavaOutputStream(s, state)
- val limitedJOS = wrapLimitingStream(jos)
+ val limitedJOS = wrapLimitingStream(state, jos)
val encodedOutputStream = wrapLayerEncoder(limitedJOS)
val newDOS = DirectOrBufferedDataOutputStream(
encodedOutputStream,
@@ -224,54 +223,28 @@ case class LayerNotEnoughDataException(sfl:
SchemaFileLocation, dataLocation: Da
override def modeName = "Parse"
}
-final class LayerSerializedInfo(val srd: SequenceRuntimeData,
+/**
+ * Allows access to all the layer properties, if defined, including
+ * evaluating expressions if the properties values are defined as expressions.
+ */
+final class LayerRuntimeInfo(
+ srd: SequenceRuntimeData,
val maybeLayerCharsetEv: Maybe[LayerCharsetEv],
val maybeLayerLengthKind: Maybe[LayerLengthKind],
val maybeLayerLengthEv: Maybe[LayerLengthEv],
val maybeLayerLengthUnits: Maybe[LayerLengthUnits],
val maybeLayerBoundaryMarkEv: Maybe[LayerBoundaryMarkEv])
-extends Serializable {
+ extends Serializable
+{
def evaluatables: Seq[Evaluatable[AnyRef]] =
maybeLayerCharsetEv.toScalaOption.toSeq ++
maybeLayerLengthEv.toScalaOption.toSeq ++
maybeLayerBoundaryMarkEv.toScalaOption.toSeq
+ def runtimeData: SequenceRuntimeData = srd
- def layerRuntimeInfo(state: ParseOrUnparseState): LayerRuntimeInfo =
- new LayerRuntimeInfo(state, srd,
- maybeLayerCharsetEv,
- maybeLayerLengthKind,
- maybeLayerLengthEv,
- maybeLayerLengthUnits,
- maybeLayerBoundaryMarkEv)
-}
-
-/**
- * Allows access to all the layer properties, if defined, including
- * evaluating expressions if the properties values are defined as expressions.
- * Also provides access to variables.
- */
-
-final class LayerRuntimeInfo(state: ParseOrUnparseState,
- srd: SequenceRuntimeData,
- maybeLayerCharsetEv: Maybe[LayerCharsetEv],
- maybeLayerLengthKind: Maybe[LayerLengthKind],
- maybeLayerLengthEv: Maybe[LayerLengthEv],
- maybeLayerLengthUnits: Maybe[LayerLengthUnits],
- maybeLayerBoundaryMarkEv: Maybe[LayerBoundaryMarkEv])
-{
-
- def SDE(msg: String, args: Any*) =
- state.SDE(msg, args)
-
- /**
- * Only needed because unparser suspensions need one, and it is used in too
many places
- * @return
- */
- def runtimeData: RuntimeData = srd
-
- def optLayerCharset: Option[Charset] = maybeLayerCharsetEv.toScalaOption.map
{
+ def optLayerCharset(state: ParseOrUnparseState): Option[Charset] =
maybeLayerCharsetEv.toScalaOption.map {
_.evaluate(state)
} match {
case Some(bitsCharsetJava: BitsCharsetJava) =>
Some(bitsCharsetJava.javaCharset)
@@ -281,16 +254,11 @@ final class LayerRuntimeInfo(state: ParseOrUnparseState,
def optLayerLengthKind: Option[LayerLengthKind] =
maybeLayerLengthKind.toScalaOption
- def optLayerLength: Option[Long] = maybeLayerLengthEv.toScalaOption.map{
_.evaluate(state) }
+ def optLayerLength(state: ParseOrUnparseState): Option[Long] =
maybeLayerLengthEv.toScalaOption.map{ _.evaluate(state) }
def optLayerLengthUnits: Option[LayerLengthUnits] =
maybeLayerLengthUnits.toScalaOption
- def optLayerBoundaryMark: Option[String] =
maybeLayerBoundaryMarkEv.toScalaOption.map{ _.evaluate(state) }
-
- def getVariable(vh: VariableHandle): DataValuePrimitive =
state.getVariable(vh.asInstanceOf[VariableHandleImpl].vrd, srd)
-
- def setVariable(vh: VariableHandle, value: DataValuePrimitive) =
- state.setVariable(vh.asInstanceOf[VariableHandleImpl].vrd, value, srd)
+ def optLayerBoundaryMark(state: ParseOrUnparseState): Option[String] =
maybeLayerBoundaryMarkEv.toScalaOption.map{ _.evaluate(state) }
def schemaFileLocation = srd.schemaFileLocation
}
@@ -299,8 +267,8 @@ final class LayerRuntimeInfo(state: ParseOrUnparseState,
abstract class ByteBufferExplicitLengthLayerTransform[T](
layerRuntimeInfo: LayerRuntimeInfo,
layerName: String,
- inputVars: Seq[VariableHandle],
- outputVar: VariableHandle)
+ inputVars: Seq[VariableRuntimeData],
+ outputVar: VariableRuntimeData)
extends LayerTransformer(layerName, layerRuntimeInfo) {
/**
@@ -325,7 +293,7 @@ abstract class ByteBufferExplicitLengthLayerTransform[T](
private var limitingOutputStream: ByteBufferOutputStream = _
- protected def compute(isUnparse: Boolean, inputs: Seq[Any],
byteBuffer:ByteBuffer): T
+ protected def compute(s: ParseOrUnparseState, isUnparse: Boolean, inputs:
Seq[Any], byteBuffer:ByteBuffer): T
/**
* Assigned by wrapLimitingStream for parsing to capture the original source
@@ -348,20 +316,20 @@ abstract class ByteBufferExplicitLengthLayerTransform[T](
protected def wrapLayerDecoder(jis: InputStream) = jis
- private def setup(layerRuntimeInfo: LayerRuntimeInfo): Unit = {
- val olc = layerRuntimeInfo.optLayerCharset
+ private def setup(state: ParseOrUnparseState, layerRuntimeInfo:
LayerRuntimeInfo): Unit = {
+ val olc = layerRuntimeInfo.optLayerCharset(state)
optLayerCharset_ = olc
explicitLengthInBytes_ =
if (layerBuiltInConstantLength.isDefined) layerBuiltInConstantLength.get
- else layerRuntimeInfo.optLayerLength.getOrElse {
- layerRuntimeInfo.SDE("The layer does not have a built in length and
the dfdl:layerLengthKind is 'explicit' yet no dfdlx:layerLength was provided.")
+ else layerRuntimeInfo.optLayerLength(state).getOrElse {
+ state.SDE("The layer does not have a built in length and the
dfdl:layerLengthKind is 'explicit' yet no dfdlx:layerLength was provided.")
}
byteArr = new Array[Byte](explicitLengthInBytes_ .toInt)
byteBuf = ByteBuffer.wrap(byteArr)
}
- protected def wrapLimitingStream(jis: InputStream) = {
- setup(layerRuntimeInfo)
+ protected def wrapLimitingStream(state: ParseOrUnparseState, jis:
InputStream) = {
+ setup(state, layerRuntimeInfo)
optOriginalInputStream = One(jis)
val limitingInputStream = new ByteArrayInputStream(byteArr)
limitingInputStream
@@ -369,8 +337,8 @@ abstract class ByteBufferExplicitLengthLayerTransform[T](
protected def wrapLayerEncoder(jos: OutputStream) = jos
- protected def wrapLimitingStream(jos: OutputStream) = {
- setup(layerRuntimeInfo)
+ protected def wrapLimitingStream(state: ParseOrUnparseState, jos:
OutputStream) = {
+ setup(state, layerRuntimeInfo)
optOriginalOutputStream = One(jos)
limitingOutputStream = new ByteBufferOutputStream(byteBuf)
limitingOutputStream
@@ -389,9 +357,9 @@ abstract class ByteBufferExplicitLengthLayerTransform[T](
case eof: EOFException =>
throw new
LayerNotEnoughDataException(layerRuntimeInfo.schemaFileLocation,
s.currentLocation, eof, explicitLengthInBytes_ .toInt)
}
- val inputs = inputVars.map{ inputVar =>
layerRuntimeInfo.getVariable(inputVar).getAnyRef }
- val checksum: T = compute(isUnparse = false, inputs, byteBuf)
- layerRuntimeInfo.setVariable(outputVar,
DataValue.unsafeFromAnyRef(checksum.asInstanceOf[AnyRef])) // assign to result
variable.
+ val inputs = inputVars.map{ inputVar => s.getVariable(inputVar,
layerRuntimeInfo.runtimeData).getAnyRef }
+ val checksum: T = compute(s, isUnparse = false, inputs, byteBuf)
+ s.setVariable(outputVar,
DataValue.unsafeFromAnyRef(checksum.asInstanceOf[AnyRef]),
layerRuntimeInfo.runtimeData) // assign to result variable.
}
final class SuspendableChecksumLayerOperation()
@@ -417,9 +385,11 @@ abstract class ByteBufferExplicitLengthLayerTransform[T](
protected def continuation(ustate: UState): Unit = {
Assert.invariant(optOriginalOutputStream.isDefined)
byteBuf.position(0).limit(byteBuf.capacity())
- val inputs = inputVars.map{ inputVRD =>
layerRuntimeInfo.getVariable(inputVRD).getAnyRef }
- val finalChecksum = compute(isUnparse = true, inputs, byteBuf)
- layerRuntimeInfo.setVariable(outputVar,
DataValue.unsafeFromAnyRef(finalChecksum.asInstanceOf[AnyRef])) // assign to
the result variable.
+ val inputs = inputVars.map{ inputVRD => ustate.getVariable(inputVRD,
layerRuntimeInfo.runtimeData).getAnyRef }
+ val finalChecksum = compute(ustate, isUnparse = true, inputs, byteBuf)
+ ustate.setVariable(outputVar,
+ DataValue.unsafeFromAnyRef(finalChecksum.asInstanceOf[AnyRef]),
+ layerRuntimeInfo.runtimeData) // assign to the result variable.
// write out the layer data (which has recomputed checksum in it.
optOriginalOutputStream.get.write(byteArr)
optOriginalOutputStream.get.close()
diff --git
a/daffodil-runtime1/src/main/scala/org/apache/daffodil/processors/parsers/LayeredSequenceParser.scala
b/daffodil-runtime1/src/main/scala/org/apache/daffodil/processors/parsers/LayeredSequenceParser.scala
index ba55a7e..c0023d0 100644
---
a/daffodil-runtime1/src/main/scala/org/apache/daffodil/processors/parsers/LayeredSequenceParser.scala
+++
b/daffodil-runtime1/src/main/scala/org/apache/daffodil/processors/parsers/LayeredSequenceParser.scala
@@ -18,7 +18,7 @@
package org.apache.daffodil.processors.parsers
import org.apache.daffodil.layers.LayerNotEnoughDataException
-import org.apache.daffodil.layers.LayerSerializedInfo
+import org.apache.daffodil.layers.LayerRuntimeInfo
import org.apache.daffodil.layers.LayerTransformerFactory
import org.apache.daffodil.processors.SequenceRuntimeData
import org.apache.daffodil.util.MaybeULong
@@ -26,17 +26,17 @@ import org.apache.daffodil.util.MaybeULong
class LayeredSequenceParser(
rd: SequenceRuntimeData,
layerTransformerFactory: LayerTransformerFactory,
- layerSerializedInfo: LayerSerializedInfo,
+ layerRuntimeInfo: LayerRuntimeInfo,
bodyParser: SequenceChildParser)
extends OrderedUnseparatedSequenceParser(rd, Vector(bodyParser)) {
override def nom = "LayeredSequence"
override lazy val runtimeDependencies =
- layerSerializedInfo.evaluatables.toVector
+ layerRuntimeInfo.evaluatables.toVector
override def parse(state: PState): Unit = {
- val layerTransformer =
layerTransformerFactory.newInstance(layerSerializedInfo.layerRuntimeInfo(state))
+ val layerTransformer =
layerTransformerFactory.newInstance(layerRuntimeInfo)
val savedDIS = state.dataInputStream
val isAligned =
savedDIS.align(layerTransformer.mandatoryLayerAlignmentInBits, state)
diff --git
a/daffodil-test/src/test/scala/org/apache/daffodil/layers/AISTransformer.scala
b/daffodil-test/src/test/scala/org/apache/daffodil/layers/AISTransformer.scala
index 05c15c3..a371cee 100644
---
a/daffodil-test/src/test/scala/org/apache/daffodil/layers/AISTransformer.scala
+++
b/daffodil-test/src/test/scala/org/apache/daffodil/layers/AISTransformer.scala
@@ -31,6 +31,7 @@ import org.apache.daffodil.io.BoundaryMarkLimitingStream
import org.apache.daffodil.io.LayerBoundaryMarkInsertingJavaOutputStream
import org.apache.daffodil.io.InputSourceDataInputStream
import org.apache.daffodil.io.FormatInfo
+import org.apache.daffodil.processors.ParseOrUnparseState
import org.apache.daffodil.schema.annotation.props.gen.BinaryFloatRep
import org.apache.daffodil.schema.annotation.props.gen.BitOrder
import org.apache.daffodil.schema.annotation.props.gen.ByteOrder
@@ -97,7 +98,7 @@ class AISPayloadArmoringTransformer(name: String,
layerRuntimeInfo: LayerRuntime
new AISPayloadArmoringInputStream(jis)
}
- override def wrapLimitingStream(jis: java.io.InputStream) = {
+ override def wrapLimitingStream(state: ParseOrUnparseState, jis:
java.io.InputStream) = {
val layerBoundaryMark = ","
val s = BoundaryMarkLimitingStream(jis, layerBoundaryMark, iso8859)
s
@@ -107,7 +108,7 @@ class AISPayloadArmoringTransformer(name: String,
layerRuntimeInfo: LayerRuntime
new AISPayloadArmoringOutputStream(jos)
}
- override protected def wrapLimitingStream(jos: java.io.OutputStream) = {
+ override protected def wrapLimitingStream(state: ParseOrUnparseState, jos:
java.io.OutputStream) = {
val layerBoundaryMark = ","
val newJOS = new LayerBoundaryMarkInsertingJavaOutputStream(jos,
layerBoundaryMark, iso8859)
newJOS
diff --git
a/daffodil-test/src/test/scala/org/apache/daffodil/layers/CheckDigit.scala
b/daffodil-test/src/test/scala/org/apache/daffodil/layers/CheckDigit.scala
index b8b6c7a..846355c 100644
--- a/daffodil-test/src/test/scala/org/apache/daffodil/layers/CheckDigit.scala
+++ b/daffodil-test/src/test/scala/org/apache/daffodil/layers/CheckDigit.scala
@@ -17,6 +17,8 @@
package org.apache.daffodil.layers
+import org.apache.daffodil.processors.ParseOrUnparseState
+import org.apache.daffodil.processors.VariableRuntimeData
import org.apache.daffodil.util.Logger
import java.nio.ByteBuffer
@@ -25,8 +27,8 @@ import java.nio.ByteBuffer
final class CheckDigitExplicit(
name: String,
layerRuntimeInfo: LayerRuntimeInfo,
- outputVar: VariableHandle,
- inputVars: Seq[VariableHandle])
+ outputVar: VariableRuntimeData,
+ inputVars: Seq[VariableRuntimeData])
extends ByteBufferExplicitLengthLayerTransform[Int](
layerRuntimeInfo,
name,
@@ -61,9 +63,9 @@ extends ByteBufferExplicitLengthLayerTransform[Int](
*
* The checkDigit is the total of all digits, viewed as a string, the last
digit of that total.
*/
- protected def compute(isUnparse: Boolean, inputs: Seq[Any], byteBuffer:
ByteBuffer) = {
+ protected def compute(state: ParseOrUnparseState, isUnparse: Boolean,
inputs: Seq[Any], byteBuffer: ByteBuffer) = {
assert(inputs.length == 1)
- val charset = layerRuntimeInfo.optLayerCharset.get
+ val charset = layerRuntimeInfo.optLayerCharset(state).get
assert(charset.newDecoder().maxCharsPerByte() == 1) // is a SBCS charset
val isVerbose = parseParams(inputs(0).asInstanceOf[String]).isVerbose
val s = new String(byteBuffer.array(), charset)
diff --git
a/daffodil-test/src/test/scala/org/apache/daffodil/layers/CheckDigitLayerCompiler.scala
b/daffodil-test/src/test/scala/org/apache/daffodil/layers/CheckDigitLayerCompiler.scala
index 5d88fd6..3707d4b 100644
---
a/daffodil-test/src/test/scala/org/apache/daffodil/layers/CheckDigitLayerCompiler.scala
+++
b/daffodil-test/src/test/scala/org/apache/daffodil/layers/CheckDigitLayerCompiler.scala
@@ -18,6 +18,7 @@
package org.apache.daffodil.layers
import org.apache.daffodil.dpath.NodeInfo.PrimType
+import org.apache.daffodil.processors.VariableRuntimeData
import org.apache.daffodil.schema.annotation.props.gen.LayerLengthKind
import org.apache.daffodil.schema.annotation.props.gen.LayerLengthUnits
@@ -32,7 +33,7 @@ extends LayerCompiler("checkDigit") {
override def compileLayer(layerCompileInfo: LayerCompileInfo) = {
val outputVar =
- layerCompileInfo.getVariableHandle(
+ layerCompileInfo.getVariableRuntimeData(
variablesPreferredNamespacePrefix,
variablesNamespace,
localNameOfVariableToWrite,
@@ -40,7 +41,7 @@ extends LayerCompiler("checkDigit") {
val inputVRDs = localNamesAndTypesOfVariablesToRead.map {
case (local, primType) =>
- layerCompileInfo.getVariableHandle(variablesPreferredNamespacePrefix,
variablesNamespace, local, primType)
+
layerCompileInfo.getVariableRuntimeData(variablesPreferredNamespacePrefix,
variablesNamespace, local, primType)
}
layerCompileInfo.optLayerLengthKind match {
case Some(LayerLengthKind.Explicit) => // ok
@@ -63,7 +64,7 @@ extends LayerCompiler("checkDigit") {
}
}
-class CheckDigitLayerTransformerFactory(name: String, inputVars:
Seq[VariableHandle], outputVar: VariableHandle)
+class CheckDigitLayerTransformerFactory(name: String, inputVars:
Seq[VariableRuntimeData], outputVar: VariableRuntimeData)
extends LayerTransformerFactory("checkDigit") {
override def newInstance(layerRuntimeInfo: LayerRuntimeInfo)= {
diff --git
a/daffodil-test/src/test/scala/org/apache/daffodil/layers/IPv4Checksum.scala
b/daffodil-test/src/test/scala/org/apache/daffodil/layers/IPv4Checksum.scala
index 49c010a..5850823 100644
--- a/daffodil-test/src/test/scala/org/apache/daffodil/layers/IPv4Checksum.scala
+++ b/daffodil-test/src/test/scala/org/apache/daffodil/layers/IPv4Checksum.scala
@@ -18,6 +18,8 @@
package org.apache.daffodil.layers
import org.apache.daffodil.exceptions.Assert
+import org.apache.daffodil.processors.ParseOrUnparseState
+import org.apache.daffodil.processors.VariableRuntimeData
import passera.unsigned.UShort
import java.nio.ByteBuffer
@@ -31,12 +33,12 @@ import java.nio.ByteBuffer
final class IPv4Checksum(
name: String,
layerRuntimeInfo: LayerRuntimeInfo,
- outputVar: VariableHandle)
+ outputVar: VariableRuntimeData)
extends ByteBufferExplicitLengthLayerTransform[Int](
layerRuntimeInfo,
name,
inputVars = Seq(),
- outputVar: VariableHandle) {
+ outputVar: VariableRuntimeData) {
/**
* This layer is always exactly 20 bytes long.
@@ -45,7 +47,7 @@ extends ByteBufferExplicitLengthLayerTransform[Int](
private def chksumShortIndex = 5
- override protected def compute(isUnparse: Boolean, inputs: Seq[Any],
byteBuffer: ByteBuffer) = {
+ override protected def compute(state: ParseOrUnparseState, isUnparse:
Boolean, inputs: Seq[Any], byteBuffer: ByteBuffer) = {
val shortBuf = byteBuffer.asShortBuffer()
var i = 0
diff --git
a/daffodil-test/src/test/scala/org/apache/daffodil/layers/IPv4ChecksumLayerCompiler.scala
b/daffodil-test/src/test/scala/org/apache/daffodil/layers/IPv4ChecksumLayerCompiler.scala
index 32c8297..412818c 100644
---
a/daffodil-test/src/test/scala/org/apache/daffodil/layers/IPv4ChecksumLayerCompiler.scala
+++
b/daffodil-test/src/test/scala/org/apache/daffodil/layers/IPv4ChecksumLayerCompiler.scala
@@ -18,6 +18,7 @@
package org.apache.daffodil.layers
import org.apache.daffodil.dpath.NodeInfo.PrimType
+import org.apache.daffodil.processors.VariableRuntimeData
import org.apache.daffodil.schema.annotation.props.gen.LayerLengthKind
import org.apache.daffodil.schema.annotation.props.gen.LayerLengthUnits
@@ -32,7 +33,7 @@ final class IPv4ChecksumLayerCompiler
override def compileLayer(layerCompileInfo: LayerCompileInfo) = {
- val outputVar = layerCompileInfo.getVariableHandle(
+ val outputVar = layerCompileInfo.getVariableRuntimeData(
variablesPreferredNamespacePrefix,
variablesNamespace,
localNameOfVariableToWrite,
@@ -57,7 +58,7 @@ final class IPv4ChecksumLayerCompiler
}
}
-class IPv4ChecksumLayerTransformerFactory(name: String, outputVar:
VariableHandle)
+class IPv4ChecksumLayerTransformerFactory(name: String, outputVar:
VariableRuntimeData)
extends LayerTransformerFactory(name) {
override def newInstance(layerRuntimeInfo: LayerRuntimeInfo)= {
diff --git
a/daffodil-test/src/test/scala/org/apache/daffodil/layers/TestIPv4.scala
b/daffodil-test/src/test/scala/org/apache/daffodil/layers/TestIPv4.scala
index d08d206..27e1c70 100644
--- a/daffodil-test/src/test/scala/org/apache/daffodil/layers/TestIPv4.scala
+++ b/daffodil-test/src/test/scala/org/apache/daffodil/layers/TestIPv4.scala
@@ -38,7 +38,7 @@ class TestIPv4 {
@Test def test_IPv4_1(): Unit = { runner.runOneTest("IPv4_1") }
// DAFFODIL-2608
- // @Test def test_IPv4_array(): Unit = { runner.runOneTest("IPv4_array") }
+ @Test def test_IPv4_array(): Unit = { runner.runOneTest("IPv4_array") }
@Test def test_IPv4_1e(): Unit = { runner.runOneTest("IPv4_1e") }