This is an automated email from the ASF dual-hosted git repository.
jinterrante pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-daffodil.git
The following commit(s) were added to refs/heads/master by this push:
new b6321ee Drop support for Scala 2.11
b6321ee is described below
commit b6321eec287d753fb6ac876f36d071c1199318d5
Author: John Interrante <[email protected]>
AuthorDate: Sat Oct 3 18:07:19 2020 -0400
Drop support for Scala 2.11
Scala 2.11 is end of life and there are no plans to make a new 2.11
release. The last time a new Scala 2.11 release came out was 2.11.12
at the end of 2017. We kept publishing Scala 2.11 builds because
Apache Spark did not start supporting Scala 2.12 until November 2018,
but the latest Spark 3.x releases support only 2.12 and have removed
support for 2.11.
A pull request in progress (#422) depends on an open source library
(os-lib) which has not published any new Scala 2.11 builds since March
2019. We also may need to stop building on Scala 2.11 before we can
update some of our dependencies to newer major versions
(DAFFODIL-2269).
In .github/workflows/main.yml, drop 2.11 compile checks. Also, no
need to install sbt since it's already installed on GitHub runners.
In build.sbt, publish only 2.12 builds.
In daffodil-lib/src/main/scala/org/apache/daffodil/util/MStack.scala,
and
daffodil-runtime1-unparser/src/main/scala/org/apache/daffodil/processors/unparsers/ChoiceAndOtherVariousUnparsers.scala,
delete obsolete comment.
Add a test to TestPropertyGenerator to increase total code coverage
(it dropped below 80%, this test helps but it's not enough though).
Remove
daffodil-runtime1/src/main/scala-2.11/org/apache/daffodil/infoset/DataValue.scala
and move
daffodil-runtime1/src/main/scala-2.12/org/apache/daffodil/infoset/DataValue.scala
to main scala directory (also delete obsolete comment).
DAFFODIL-2404
---
.github/workflows/main.yml | 30 +--
build.sbt | 2 +-
.../scala/org/apache/daffodil/util/MStack.scala | 1 -
.../daffodil/propGen/TestPropertyGenerator.scala | 21 ++
.../unparsers/ChoiceAndOtherVariousUnparsers.scala | 8 -
.../org/apache/daffodil/infoset/DataValue.scala | 253 ---------------------
.../org/apache/daffodil/infoset/DataValue.scala | 6 -
7 files changed, 30 insertions(+), 291 deletions(-)
diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index ce75e07..57c80c1 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -20,16 +20,21 @@ on: [push, pull_request]
jobs:
test:
name: Java ${{ matrix.java_version }}, Scala ${{ matrix.scala_version }},
${{ matrix.os }}
- runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
- java_version: [ '8.x', '9.x', '11.x' ]
- scala_version: [ '2.12.11', '2.11.12' ]
+ java_version: [ 8, 9, 11 ]
+ scala_version: [ 2.12.11 ]
os: [ 'ubuntu-latest', 'windows-latest' ]
env:
SBT: sbt -J-Xms1024m -J-Xmx5120m -J-XX:ReservedCodeCacheSize=512m
-J-XX:MaxMetaspaceSize=1024m ++${{ matrix.scala_version }} coverage
SBTNOCOV: sbt -J-Xms1024m -J-Xmx5120m -J-XX:ReservedCodeCacheSize=512m
-J-XX:MaxMetaspaceSize=1024m ++${{ matrix.scala_version }}
+
+ runs-on: ${{ matrix.os }}
+ defaults:
+ run:
+ shell: bash
+
steps:
############################################################
@@ -39,17 +44,6 @@ jobs:
- name: Checkout Repository
uses: actions/[email protected]
- - name: Install Dependencies (Windows)
- run: |
- choco install sbt
- REM choco doesn't update PATH, and SBT isn't in any of the default
- REM PATHs, and Github Actions doesn't have a built in way to modify
- REM PATH. So add a link to sbt in a directory that is in PATH and
that
- REM should always exist (bit of a hack).
- mklink "C:\ProgramData\Chocolatey\bin\sbt" "C:\Program Files
(x86)\sbt\bin\sbt"
- shell: cmd
- if: runner.os == 'Windows'
-
- name: Install Java
uses: actions/setup-java@v1
with:
@@ -61,20 +55,16 @@ jobs:
- name: Compile
run: $SBT compile test:compile it:compile
- shell: bash
- name: Build Documentation
run: $SBTNOCOV unidoc
- shell: bash
- name: Package Zip & Tar
run: $SBTNOCOV daffodil-cli/universal:packageBin
daffodil-cli/universal:packageZipTarball
- shell: bash
- name: Package RPM
run: $SBTNOCOV daffodil-cli/rpm:packageBin
if: runner.os == 'Linux'
- shell: bash
############################################################
# Test
@@ -82,19 +72,15 @@ jobs:
- name: Run Rat Check
run: $SBTNOCOV ratCheck || (cat target/rat.txt; exit 1)
- shell: bash
- name: Run Unit Tests
run: $SBT test
- shell: bash
- name: Run Integration Tests
run: $SBT it:test
- shell: bash
- name: Generate Coverage Report
run: $SBT coverageAggregate
- shell: bash
- name: Upload Coverage Report
uses: codecov/codecov-action@v1
diff --git a/build.sbt b/build.sbt
index 283a6ea..0f0b4ab 100644
--- a/build.sbt
+++ b/build.sbt
@@ -106,7 +106,7 @@ lazy val commonSettings = Seq(
organization := "org.apache.daffodil",
version := "3.0.0-SNAPSHOT",
scalaVersion := "2.12.11",
- crossScalaVersions := Seq("2.12.11", "2.11.12"),
+ crossScalaVersions := Seq("2.12.11"),
scalacOptions ++= Seq(
"-feature",
"-deprecation",
diff --git a/daffodil-lib/src/main/scala/org/apache/daffodil/util/MStack.scala
b/daffodil-lib/src/main/scala/org/apache/daffodil/util/MStack.scala
index bb49e64..2b09746 100644
--- a/daffodil-lib/src/main/scala/org/apache/daffodil/util/MStack.scala
+++ b/daffodil-lib/src/main/scala/org/apache/daffodil/util/MStack.scala
@@ -18,7 +18,6 @@
package org.apache.daffodil.util
import org.apache.daffodil.exceptions.Assert
-// import org.apache.daffodil.equality._ // TODO: Scala compiler bug - can't
use =#= in this file (scalac 2.11.7) because we get a spurious compile error
(unable to find ViewEquality in package equality.)
import Maybe._
object MStack {
diff --git
a/daffodil-propgen/src/test/scala/org/apache/daffodil/propGen/TestPropertyGenerator.scala
b/daffodil-propgen/src/test/scala/org/apache/daffodil/propGen/TestPropertyGenerator.scala
index 4865747..aa130db 100644
---
a/daffodil-propgen/src/test/scala/org/apache/daffodil/propGen/TestPropertyGenerator.scala
+++
b/daffodil-propgen/src/test/scala/org/apache/daffodil/propGen/TestPropertyGenerator.scala
@@ -17,8 +17,13 @@
package org.apache.daffodil.propGen
+import java.nio.file.Files
+import java.nio.file.Paths
+
import org.junit.Assert._
+import org.junit.Rule
import org.junit.Test
+import org.junit.rules.TemporaryFolder
class TestPropertyGenerator {
@@ -158,4 +163,20 @@ class TestPropertyGenerator {
assertTrue(mx.contains("""with DFDLDefineVariableTypeMixin"""))
}
+ /**
+ * Test the entire PropertyGenerator package and verify that it creates some
files.
+ * Ulterior motivation is to push codecov's total coverage above 80% too.
+ */
+ private val _folder = new TemporaryFolder()
+ @Rule def folder = _folder
+ @Test def testPropertyGeneratorMain(): Unit = {
+ val args = Array(folder.getRoot.getCanonicalPath)
+ PropertyGenerator.main(args)
+ val path1 = Paths.get(args(0),
"org/apache/daffodil/schema/annotation/props/gen/GeneratedCode.scala")
+ val path2 = Paths.get(args(0),
"org/apache/daffodil/api/DaffodilTunablesGen.scala")
+ val path3 = Paths.get(args(0), "org/apache/daffodil/api/WarnIdGen.scala")
+ assert(Files.exists(path1), "Expected PropertyGenerator to create a file")
+ assert(Files.exists(path2), "Expected PropertyGenerator to create a file")
+ assert(Files.exists(path3), "Expected PropertyGenerator to create a file")
+ }
}
diff --git
a/daffodil-runtime1-unparser/src/main/scala/org/apache/daffodil/processors/unparsers/ChoiceAndOtherVariousUnparsers.scala
b/daffodil-runtime1-unparser/src/main/scala/org/apache/daffodil/processors/unparsers/ChoiceAndOtherVariousUnparsers.scala
index c710c47..b2ec9f5 100644
---
a/daffodil-runtime1-unparser/src/main/scala/org/apache/daffodil/processors/unparsers/ChoiceAndOtherVariousUnparsers.scala
+++
b/daffodil-runtime1-unparser/src/main/scala/org/apache/daffodil/processors/unparsers/ChoiceAndOtherVariousUnparsers.scala
@@ -31,14 +31,6 @@ case class ChoiceBranchMap(
unmappedDefault: Option[Unparser])
extends PreSerialization {
- /**
- * On Scala 2.11 this declaration of writeObject seems to be required. Not
on 2.12.
- * @param out
- * @throws java.io.IOException
- */
- @throws(classOf[java.io.IOException])
- private def writeObject(out: java.io.ObjectOutputStream): Unit =
serializeObject(out)
-
def get(cbe: ChoiceBranchEvent): Maybe[Unparser] = {
val fromTable = lookupTable.get(cbe)
val res =
diff --git
a/daffodil-runtime1/src/main/scala-2.11/org/apache/daffodil/infoset/DataValue.scala
b/daffodil-runtime1/src/main/scala-2.11/org/apache/daffodil/infoset/DataValue.scala
deleted file mode 100644
index 45939d5..0000000
---
a/daffodil-runtime1/src/main/scala-2.11/org/apache/daffodil/infoset/DataValue.scala
+++ /dev/null
@@ -1,253 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/*
- * This is a variant of DataValue specifically for scala 2.11
- * Due to a bug in the 2.11 compiler
(https://github.com/scala/bug/issues/7521), AnyVal classes
- * do not behave as expected when wrapping a primitive array.
- *
- * For our use-case, this presents a problem, as one of our DataValue types is
Array[Byte].
- *
- * To work around this, this implementation will explicitly box/unbox all
instances of Array[Byte].
- */
-package org.apache.daffodil.infoset
-
-import java.lang.{ Boolean => JBoolean, Number => JNumber, Long => JLong,
Double => JDouble, String => JString, Float => JFloat, Byte => JByte, Integer
=> JInt, Short => JShort }
-import java.math.{ BigDecimal => JBigDecimal, BigInteger => JBigInt }
-import org.apache.daffodil.calendar.DFDLCalendar
-import org.apache.daffodil.calendar.DFDLDateTime
-import org.apache.daffodil.calendar.DFDLDate
-import org.apache.daffodil.calendar.DFDLTime
-import org.apache.daffodil.util.Maybe
-import org.apache.daffodil.exceptions.Assert
-import org.apache.daffodil.util.Maybe.One
-import org.apache.daffodil.util.Maybe.Nope
-import java.net.URI
-
-/*
- * These traits are used for the phantom type X. When considering the type of a
- * DataValue. See the comment on the DataValue class below.
- */
-trait NonNullable extends Nullable;
-trait Nullable extends tUseNilForDefault;
-trait tUseNilForDefault;
-sealed trait DataValuePrimitiveType
-
-/**
- * A class to provide basic type safety around infoset and DPath data values.
- * At runtime, this class goes away, and objects are passed around unboxed as
if
- * we were refering to the underlying objects directly as an AnyRef. Note,
however,
- * that in some circumstances, Scala may still choice to box these objects,
resulting
- * in the same overhead of a normal wrapper class.
- */
-/*
- * The type variable X here is a phantom type. That is to say, it is not
actually
- * used to describe the type of anything within DataValue. The goal is to
provide us
- * with more ability to control the inheritense relationship between various
DataValue types.
- * In theory, this would be accomplished by establishing a type hierarchy on
the types
- * used for T. Indeed, if Scala had type-classes as a first-class feature that
is likely
- * the approach we would have taken. However, since most of the types we use
for T
- * are out of our control, we are unable to control what type structure they
have.
- * By carefully selecting X in our type aliases, it is possible for to define
types
- * that can represent any subset of types which we want. It is further
possible to
- * allow for automatic upcasting between these types where appropriate.
- *
- */
-final class DataValue[+T <: AnyRef, +X <: AnyRef] private (val v: T) extends
AnyVal with Serializable{
- @inline def isEmpty = DataValue.NoValue.v eq v
- @inline def isDefined = !isEmpty
- @inline def value = v
- @inline override def toString = if (isEmpty) "NoValue" else "DataValue(" +
v.toString + ")"
-
- @inline def getAnyRef = {
- if (v.isInstanceOf[BoxedByteArray]) {
- v.asInstanceOf[BoxedByteArray].v
- } else {
- v.asInstanceOf[AnyRef]
- }
- }
- @inline def getBigDecimal = v.asInstanceOf[JBigDecimal]
- @inline def getCalendar = v.asInstanceOf[DFDLCalendar]
- @inline def getDate = v.asInstanceOf[DFDLDate]
- @inline def getTime = v.asInstanceOf[DFDLTime]
- @inline def getDateTime = v.asInstanceOf[DFDLDateTime]
- @inline def getByteArray = v.asInstanceOf[BoxedByteArray].v
- @inline def getBoolean = v.asInstanceOf[JBoolean]
- @inline def getNumber = v.asInstanceOf[JNumber]
- @inline def getByte = v.asInstanceOf[JByte]
- @inline def getShort = v.asInstanceOf[JShort]
- @inline def getInt = v.asInstanceOf[JInt]
- @inline def getLong = v.asInstanceOf[JLong]
- @inline def getDouble = v.asInstanceOf[JDouble]
- @inline def getFloat = v.asInstanceOf[JFloat]
- @inline def getBigInt = v.asInstanceOf[JBigInt]
- @inline def getString = v.asInstanceOf[JString]
- @inline def getURI = v.asInstanceOf[URI]
-
- @inline def getNonNullable:DataValue[T, X with NonNullable] = new
DataValue(v)
- @inline def getNullablePrimitive:DataValue.DataValuePrimitiveNullable = new
DataValue(v)
-
- @inline def getOptionAnyRef = {
- if (isEmpty) {
- None
- } else {
- Some(getAnyRef)
- }
- }
-
- @inline def getMaybe[T <: AnyRef]: Maybe[T] = {
- if (isEmpty) {
- Nope
- } else {
- One(getAnyRef.asInstanceOf[T])
- }
- }
-}
-
-object DataValue {
- /**
- * All values which are legal for DPath and infoset data values. Note that
this incudes
- * DINodes, which is legal for DPath, but not infoset data values.
- * Also note that at any given time, the infoset may have no value, which
is not directly
- * representable by this type.
- */
- type DataValuePrimitive = DataValue[AnyRef, NonNullable with
DataValuePrimitiveType]
- /**
- * A (set-theoretic) extension of DataValuePrimitive adjoining a NULL
element refered to as NoValue.
- * Since this just adjoins NoValue, we can think of it as a nullable varient
of DataValuePrimitive.
- * See https://en.wikipedia.org/wiki/Nullable_type
- */
- type DataValuePrimitiveNullable = DataValue[AnyRef, Nullable with
DataValuePrimitiveType]
- /**
- * All values of DataValuePrimitiveNullable, plus a sentinal
UseNilForDefault value.
- * Used only by the default field of ElementRuntimeData.
- */
- type DataValuePrimitiveOrUseNilForDefaultOrNull = DataValue[AnyRef,
tUseNilForDefault]
-
- type DataValueEmpty = DataValue[Null, Nullable with DataValuePrimitiveType]
- type DataValueBigDecimal = DataValue[JBigDecimal, NonNullable with
DataValuePrimitiveType]
- type DataValueCalendar = DataValue[DFDLCalendar, NonNullable with
DataValuePrimitiveType]
- type DataValueDateTime = DataValue[DFDLDateTime, NonNullable with
DataValuePrimitiveType]
- type DataValueDate = DataValue[DFDLDate, NonNullable with
DataValuePrimitiveType]
- type DataValueTime = DataValue[DFDLTime, NonNullable with
DataValuePrimitiveType]
- type DataValueByteArray = DataValue[BoxedByteArray, NonNullable with
DataValuePrimitiveType]
- type DataValueBool = DataValue[JBoolean, NonNullable with
DataValuePrimitiveType]
- type DataValueNumber = DataValue[JNumber, NonNullable with
DataValuePrimitiveType]
- type DataValueLong = DataValue[JLong, NonNullable with
DataValuePrimitiveType]
- type DataValueDouble = DataValue[JDouble, NonNullable with
DataValuePrimitiveType]
- type DataValueBigInt = DataValue[JBigInt, NonNullable with
DataValuePrimitiveType]
- type DataValueString = DataValue[JString, NonNullable with
DataValuePrimitiveType]
- type DataValueURI = DataValue[URI, NonNullable with DataValuePrimitiveType]
- type DataValueFloat = DataValue[JFloat, NonNullable with
DataValuePrimitiveType]
- type DataValueByte = DataValue[JByte, NonNullable with
DataValuePrimitiveType]
- type DataValueInt = DataValue[JInt, NonNullable with DataValuePrimitiveType]
- type DataValueShort = DataValue[JShort, NonNullable with
DataValuePrimitiveType]
- type DataValueDINode = DataValue[DINode, NonNullable with
DataValuePrimitiveType]
- type DataValueUseNilForDefault = DataValue[UseNilForDefaultObj, NonNullable]
-
- import scala.language.implicitConversions
-
- @inline implicit def toDataValue(v: JBigDecimal): DataValueBigDecimal = new
DataValue(v)
- @inline implicit def toDataValue(v: DFDLCalendar): DataValueCalendar = new
DataValue(v)
- @inline implicit def toDataValue(v: DFDLDateTime): DataValueDateTime = new
DataValue(v)
- @inline implicit def toDataValue(v: DFDLDate): DataValueDate = new
DataValue(v)
- @inline implicit def toDataValue(v: DFDLTime): DataValueTime = new
DataValue(v)
- @inline implicit def toDataValue(v: Array[Byte]): DataValueByteArray = new
DataValue(new BoxedByteArray(v))
- @inline implicit def toDataValue(v: JBoolean): DataValueBool = new
DataValue(v)
- @inline implicit def toDataValue(v: JNumber): DataValueNumber = new
DataValue(v)
- @inline implicit def toDataValue(v: JLong): DataValueLong = new DataValue(v)
- @inline implicit def toDataValue(v: JDouble): DataValueDouble = new
DataValue(v)
- @inline implicit def toDataValue(v: JBigInt): DataValueBigInt = new
DataValue(v)
- @inline implicit def toDataValue(v: JString): DataValueString = new
DataValue(v)
- @inline implicit def toDataValue(v: URI): DataValueURI = new DataValue(v)
- @inline implicit def toDataValue(v: JFloat): DataValueFloat = new
DataValue(v)
- @inline implicit def toDataValue(v: JByte): DataValueByte = new DataValue(v)
- @inline implicit def toDataValue(v: JInt): DataValueInt = new DataValue(v)
- @inline implicit def toDataValue(v: JShort): DataValueShort = new
DataValue(v)
- @inline implicit def toDataValue(v: DINode): DataValueDINode = new
DataValue(v)
-
- @inline implicit def toDataValue(v: Long): DataValueLong = new DataValue(v:
JLong)
- @inline implicit def toDataValue(v: Double): DataValueDouble = new
DataValue(v: JDouble)
- @inline implicit def toDataValue(v: Boolean): DataValueBool = new
DataValue(v: JBoolean)
- @inline implicit def toDataValue(v: Float): DataValueFloat = new
DataValue(v: JFloat)
- @inline implicit def toDataValue(v: Byte): DataValueByte = new DataValue(v:
JByte)
- @inline implicit def toDataValue(v: Int): DataValueInt = new DataValue(v:
JInt)
- @inline implicit def toDataValue(v: Short): DataValueShort = new
DataValue(v: JShort)
-
- @inline def unsafeFromAnyRef(v: AnyRef) = new DataValue(v)
- @inline def unsafeFromMaybeAnyRef(v: Maybe[AnyRef]) = {
- if (v.isDefined) {
- new DataValue(v.get)
- } else {
- NoValue
- }
- }
-
- val NoValue: DataValueEmpty = new DataValue(null)
-
- /**
- * Used as a sentinal value for Element's defaultValue, when said element
- * is nillable and has dfdl:useNilForDefault set to true,
- */
- val UseNilForDefault: DataValueUseNilForDefault = new DataValue(new
UseNilForDefaultObj)
-
- final protected class UseNilForDefaultObj {
- override def toString = "UseNilForDefault"
- }
-
- @inline def assertValueIsNotDataValue(v: AnyRef): Unit = {
-
- /*
- *
- * In our CompileExpressions classes, we use type variables declared as T <:
AnyRef
- *
- * Ideally, we would have declared T <: DataValuePrimitive
- * However, we need to be able to refer to something of the form Maybe[T].
- * In theory, it should be possible to take a T <: DataValuePrimitive,
- * and construct a T' <: DataValuePrimitiveNullable, such that T <: T'
- * In practice, it does not appear to be easy to tell Scala's type system
what we want to do,
- * so we instead punt on this issue and require the caller to translate
to/from AnyRef at the boundary.
- *
- * In theory, if a caller forgets to do so, and instead passes in a
DataValue instead of AnyRef,
- * the compiler would issue a type error because DataValue is an AnyVal
type, and so does not inherit from AnyRef.
- *
- * In practice, Scala will "helpfully" box the DataValue for us, which
causes all sorts of problems. For instance,
- * x.asInstanceOf[String] does not work when x is a DataValueString (even
though an unboxed DataValueString is literally just a String at runtime).
- *
- * To make matters worse, the Scala compiler, does not seem to realize the
implications of this autoboxing,
- * and so believes that is impossible for an AnyRef to ever be an instance
of DataValue.
- * As such, is issues a warning on the naive typecheck since it can "never"
fail. We silence this warning,
- * by first casting to Any.
- *
- *
- *
- */
- Assert.invariant(!v.asInstanceOf[Any].isInstanceOf[DataValue[AnyRef,
AnyRef]])
-
- // Ideally, we could compare against our DataValueAny type alias. However,
Scala (correctly) points out,
- // that type erasure means that DataValueAny contains information that
Scala cannot actually verify at runtime.
- // To silence this warning, we explictly check against the underlying
DataValue type.
- //
- // The commented out assertion below shows what we are trying to
accomplish.
- // The actual assertion above is equivalent, but does not result in
warnings.
- // Assert.invariant(!v.isInstanceOf[DataValueAny])
- }
-}
-
-class BoxedByteArray(val v: Array[Byte]) extends Serializable {
-
-}
diff --git
a/daffodil-runtime1/src/main/scala-2.12/org/apache/daffodil/infoset/DataValue.scala
b/daffodil-runtime1/src/main/scala/org/apache/daffodil/infoset/DataValue.scala
similarity index 96%
rename from
daffodil-runtime1/src/main/scala-2.12/org/apache/daffodil/infoset/DataValue.scala
rename to
daffodil-runtime1/src/main/scala/org/apache/daffodil/infoset/DataValue.scala
index bf3ad8c..c42fe5a 100644
---
a/daffodil-runtime1/src/main/scala-2.12/org/apache/daffodil/infoset/DataValue.scala
+++
b/daffodil-runtime1/src/main/scala/org/apache/daffodil/infoset/DataValue.scala
@@ -16,12 +16,6 @@
*/
package org.apache.daffodil.infoset
-/*
- * This version of DataValue is specialized to 2.12 because 2.11 requires
having a different implementation.
- * When supporting future versions of scala, it should be safe to copy this
version of DataValue without modification.
- * When we drop support for 2.11, it should be safe to remove version-specific
implementations, and move this file
- * back to the main src/main/scala tree.
- */
import java.lang.{ Boolean => JBoolean, Number => JNumber, Long => JLong,
Double => JDouble, String => JString, Float => JFloat, Byte => JByte, Integer
=> JInt, Short => JShort }
import java.math.{ BigDecimal => JBigDecimal, BigInteger => JBigInt }
import org.apache.daffodil.calendar.DFDLCalendar