This is an automated email from the ASF dual-hosted git repository.

desruisseaux pushed a commit to branch geoapi-4.0
in repository https://gitbox.apache.org/repos/asf/sis.git


The following commit(s) were added to refs/heads/geoapi-4.0 by this push:
     new 55d0df0  Add support for Horizontal Differencing Predictor.
55d0df0 is described below

commit 55d0df03247d9e4c6c76197c5234f020e298b962
Author: Martin Desruisseaux <martin.desruisse...@geomatys.com>
AuthorDate: Mon Aug 16 18:42:30 2021 +0200

    Add support for Horizontal Differencing Predictor.
---
 .../java/org/apache/sis/internal/jdk9/JDK9.java    |  26 +++++
 .../apache/sis/internal/geotiff/CopyFromBytes.java |   6 +-
 .../sis/internal/geotiff/HorizontalPredictor.java  | 117 +++++++++++++++++++++
 .../org/apache/sis/internal/geotiff/Inflater.java  |  71 +++++++++----
 .../sis/internal/geotiff/InflaterChannel.java      |   6 +-
 .../sis/internal/geotiff/InflaterPredictor.java    |  80 ++++++++++++++
 .../sis/storage/geotiff/CompressedSubset.java      |  48 +++------
 .../org/apache/sis/storage/geotiff/DataCube.java   |   5 +-
 .../org/apache/sis/storage/geotiff/DataSubset.java |  15 +--
 .../sis/storage/UnsupportedEncodingException.java  |  62 +++++++++++
 10 files changed, 360 insertions(+), 76 deletions(-)

diff --git 
a/core/sis-utility/src/main/java/org/apache/sis/internal/jdk9/JDK9.java 
b/core/sis-utility/src/main/java/org/apache/sis/internal/jdk9/JDK9.java
index dc86420..ec1de1d 100644
--- a/core/sis-utility/src/main/java/org/apache/sis/internal/jdk9/JDK9.java
+++ b/core/sis-utility/src/main/java/org/apache/sis/internal/jdk9/JDK9.java
@@ -165,6 +165,32 @@ public final class JDK9 {
     }
 
     /**
+     * Place holder for {@code ByteBuffer.get(int, byte[])}.
+     *
+     * @param  b     the buffer from which to get bytes.
+     * @param  index index from which the first byte will be read.
+     * @param  dst   destination array
+     */
+    public static void get(final ByteBuffer b, int index, final byte[] dst) {
+        JDK9.get(b, index, dst, 0, dst.length);
+    }
+
+    /**
+     * Place holder for {@code ByteBuffer.get(int, byte[], int, int)}.
+     *
+     * @param  b       the buffer from which to get bytes.
+     * @param  index   index from which the first byte will be read.
+     * @param  dst     destination array
+     * @param  offset  offset in the array of the first byte to write.
+     * @param  length  number of bytes to write.
+     */
+    public static void get(final ByteBuffer b, final int index, final byte[] 
dst, final int offset, final int length) {
+        for (int i=0; i<length; i++) {
+            dst[offset + i] = b.get(index + i);
+        }
+    }
+
+    /**
      * Place holder for {@code Class.getPackageName()}.
      *
      * @param  c  the class for which to get the package name.
diff --git 
a/storage/sis-geotiff/src/main/java/org/apache/sis/internal/geotiff/CopyFromBytes.java
 
b/storage/sis-geotiff/src/main/java/org/apache/sis/internal/geotiff/CopyFromBytes.java
index 0bc6e11..1ab6cca 100644
--- 
a/storage/sis-geotiff/src/main/java/org/apache/sis/internal/geotiff/CopyFromBytes.java
+++ 
b/storage/sis-geotiff/src/main/java/org/apache/sis/internal/geotiff/CopyFromBytes.java
@@ -25,6 +25,7 @@ import java.nio.LongBuffer;
 import java.nio.FloatBuffer;
 import java.nio.DoubleBuffer;
 import org.apache.sis.internal.storage.io.ChannelDataInput;
+import org.apache.sis.storage.UnsupportedEncodingException;
 import org.apache.sis.util.resources.Errors;
 import org.apache.sis.util.Classes;
 
@@ -103,10 +104,11 @@ abstract class CopyFromBytes extends Inflater {
      * @param  divisor  factor by which to divide sample size values. Always ≥ 
1 and usually = 1.
      * @param  banks    where to store sample values.
      * @return the inflater for the given targe type.
-     * @throws IllegalArgumentException if the buffer type is not recognized.
+     * @throws UnsupportedEncodingException if the buffer type is not 
recognized.
      */
     public static CopyFromBytes create(final ChannelDataInput input, final 
long start,
             final int count, final int size, final int[] skips, final int 
divisor, final Buffer banks)
+            throws UnsupportedEncodingException
     {
         if (banks instanceof   ByteBuffer) return new Bytes  (input, start, 
count, size, skips, divisor,   (ByteBuffer) banks);
         if (banks instanceof  ShortBuffer) return new Shorts (input, start, 
count, size, skips, divisor,  (ShortBuffer) banks);
@@ -114,7 +116,7 @@ abstract class CopyFromBytes extends Inflater {
         if (banks instanceof   LongBuffer) return new Longs  (input, start, 
count, size, skips, divisor,   (LongBuffer) banks);
         if (banks instanceof  FloatBuffer) return new Floats (input, start, 
count, size, skips, divisor,  (FloatBuffer) banks);
         if (banks instanceof DoubleBuffer) return new Doubles(input, start, 
count, size, skips, divisor, (DoubleBuffer) banks);
-        throw new 
IllegalArgumentException(Errors.format(Errors.Keys.UnsupportedType_1, 
Classes.getClass(banks)));
+        throw new 
UnsupportedEncodingException(Errors.format(Errors.Keys.UnsupportedType_1, 
Classes.getClass(banks)));
     }
 
     /**
diff --git 
a/storage/sis-geotiff/src/main/java/org/apache/sis/internal/geotiff/HorizontalPredictor.java
 
b/storage/sis-geotiff/src/main/java/org/apache/sis/internal/geotiff/HorizontalPredictor.java
new file mode 100644
index 0000000..a303a8d
--- /dev/null
+++ 
b/storage/sis-geotiff/src/main/java/org/apache/sis/internal/geotiff/HorizontalPredictor.java
@@ -0,0 +1,117 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sis.internal.geotiff;
+
+import java.nio.ByteBuffer;
+import org.apache.sis.internal.jdk9.JDK9;
+
+
+/**
+ * Implementation of {@link Predictor#HORIZONTAL}.
+ * Current implementation works only on 8-bits samples.
+ *
+ * <p><b>Note:</b> if we want to support 16 bits, 32 bits <i>etc.</i> sample 
values,
+ * the main difficulty is that if there buffer ends in the middle of a sample 
value,
+ * we need to stop the processing before that last value and stores it 
somewhere for
+ * processing in the next call to {@link InflaterChannel#read(ByteBuffer)}.</p>
+ *
+ * @author  Martin Desruisseaux (Geomatys)
+ * @version 1.1
+ * @since   1.1
+ * @module
+ */
+final class HorizontalPredictor extends InflaterPredictor {
+    /**
+     * Data on the previous column. The length of this array is the pixel 
stride.
+     */
+    private final byte[] previousColumns;
+
+    /**
+     * Number of sample values between a row and the next row (in saùme .
+     */
+    private final int scanlineStride;
+
+    /**
+     * Column index (as a count of sample values, not a count of pixels).
+     * Used for detecting when the decoding process starts a new row.
+     */
+    private int column;
+
+    /**
+     * Creates a new predictor.
+     *
+     * @param  input        the channel that decompress data.
+     * @param  pixelStride  number of sample values per pixel in the source 
image.
+     * @param  width        number of pixels in the source image.
+     */
+    HorizontalPredictor(final InflaterChannel input, final int pixelStride, 
final int width) {
+        super(input);
+        previousColumns = new byte[pixelStride];
+        scanlineStride  = Math.multiplyExact(width, pixelStride);
+    }
+
+    /**
+     * Applies the predictor on data in the given buffer,
+     * from the given start position until current buffer position.
+     *
+     * @param  buffer  the buffer on which to apply the predictor.
+     * @param  start   position of first sample value to process.
+     */
+    @Override
+    protected void uncompress(final ByteBuffer buffer, final int start) {
+        final int pixelStride = previousColumns.length;
+        final int limit = buffer.position();
+        int position = start;
+        while (position < limit) {
+            /*
+             * This loop body should be executed on a row-by-row basis. But 
the `startOfRow` and `endOfRow` indices
+             * may not be the real start/end of row if the previous call to 
this method finished before end of row,
+             * or if current call to this method also finishes before end of 
row (because of buffer limit).
+             */
+            final int startOfRow = position;
+            final int endOfRow   = Math.min(position + (scanlineStride - 
column), limit);
+            final int head       = Math.min(position + pixelStride, endOfRow);
+            if (column < pixelStride) {
+                // Pixels in the first column are left unchanged.
+                position += Math.min(pixelStride, endOfRow - position);
+            }
+            while (position < head) {
+                buffer.put(position, (byte) (buffer.get(position) + 
previousColumns[position - startOfRow]));
+                position++;
+            }
+            while (position < endOfRow) {
+                buffer.put(position, (byte) (buffer.get(position) + 
buffer.get(position - pixelStride)));
+                position++;
+            }
+            column += position - startOfRow;
+            if (column >= scanlineStride) {
+                column = 0;
+            }
+        }
+        /*
+         * Save the last bytes for next invocation of this method.
+         */
+        final int capacity = limit - start;
+        if (capacity >= pixelStride) {
+            JDK9.get(buffer, limit - pixelStride, previousColumns);
+        } else {
+            final int keep = pixelStride - capacity;
+            System.arraycopy(previousColumns, keep, previousColumns, 0, 
capacity);
+            JDK9.get(buffer, start, previousColumns, keep, capacity);
+        }
+    }
+}
diff --git 
a/storage/sis-geotiff/src/main/java/org/apache/sis/internal/geotiff/Inflater.java
 
b/storage/sis-geotiff/src/main/java/org/apache/sis/internal/geotiff/Inflater.java
index 9c1e841..f8d3a46 100644
--- 
a/storage/sis-geotiff/src/main/java/org/apache/sis/internal/geotiff/Inflater.java
+++ 
b/storage/sis-geotiff/src/main/java/org/apache/sis/internal/geotiff/Inflater.java
@@ -16,13 +16,16 @@
  */
 package org.apache.sis.internal.geotiff;
 
-import java.nio.Buffer;
-import java.io.IOException;
 import java.util.Arrays;
+import java.io.IOException;
+import java.nio.Buffer;
+import java.nio.channels.ReadableByteChannel;
 import org.apache.sis.math.MathFunctions;
 import org.apache.sis.internal.util.Numerics;
 import org.apache.sis.internal.storage.io.ChannelDataInput;
+import org.apache.sis.storage.UnsupportedEncodingException;
 import org.apache.sis.util.ArgumentChecks;
+import org.apache.sis.util.Localized;
 
 import static org.apache.sis.internal.util.Numerics.ceilDiv;
 
@@ -160,42 +163,66 @@ public abstract class Inflater {
      * (e.g. 1 bit) if {@code pixelsPerElement} is greater than 1. If that 
case, the {@link #elementsPerChunk}
      * and {@link #skipAfterChunks} values will be divided by {@code 
pixelsPerElement}.
      *
-     * @param  compression       the compression method.
-     * @param  input             the source of data to decompress.
-     * @param  start             stream position where to start reading.
-     * @param  byteCount         number of bytes to read before decompression.
-     * @param  sourceWidth       number of pixels in a row of source image.
-     * @param  chunksPerRow      number of chunks (usually pixels) per row in 
target image. Must be strictly positive.
-     * @param  samplesPerChunk   number of sample values per chunk (sample or 
pixel). Must be strictly positive.
-     * @param  skipAfterChunks   number of sample values to skip between 
chunks. May be empty or null.
-     * @param  pixelsPerElement  number of pixels per primitive element. 
Always 1 except for multi-pixels packed images.
-     * @param  banks             where to store sample values.
-     * @return the inflater for the given targe type, or {@code null} if the 
compression method is unknown.
+     * @param  compression        the compression method.
+     * @param  predictor          the mathematical operator to apply after 
decompression.
+     * @param  input              the source of data to decompress.
+     * @param  start              stream position where to start reading.
+     * @param  byteCount          number of bytes to read before decompression.
+     * @param  sourcePixelStride  number of sample values per pixel in the 
source image.
+     * @param  sourceWidth        number of pixels in a row of source image.
+     * @param  chunksPerRow       number of chunks (usually pixels) per row in 
target image. Must be strictly positive.
+     * @param  samplesPerChunk    number of sample values per chunk (sample or 
pixel). Must be strictly positive.
+     * @param  skipAfterChunks    number of sample values to skip between 
chunks. May be empty or null.
+     * @param  pixelsPerElement   number of pixels per primitive element. 
Always 1 except for multi-pixels packed images.
+     * @param  banks              where to store sample values.
+     * @param  caller             locale to use if an error message must be 
provided.
+     * @return the inflater for the given targe type.
      * @throws IOException if an I/O operation was required and failed.
+     * @throws UnsupportedEncodingException if the compression, predictor or 
data type is unsupported.
      */
-    public static Inflater create(final Compression compression,
-            final ChannelDataInput input, final long start, final long 
byteCount, final int sourceWidth,
+    public static Inflater create(final Compression compression, final 
Predictor predictor,
+            final ChannelDataInput input, final long start, final long 
byteCount,
+            final int sourcePixelStride, final int sourceWidth,
             final int chunksPerRow, final int samplesPerChunk, final int[] 
skipAfterChunks,
-            final int pixelsPerElement, final Buffer banks)
-            throws IOException
+            final int pixelsPerElement, final Buffer banks, final Localized 
caller)
+            throws IOException, UnsupportedEncodingException
     {
         ArgumentChecks.ensureNonNull("input", input);
         ArgumentChecks.ensureNonNull("banks", banks);
         final InflaterChannel inflated;
         switch (compression) {
-            case NONE: {
-                return CopyFromBytes.create(input, start, chunksPerRow, 
samplesPerChunk, skipAfterChunks, pixelsPerElement, banks);
-            }
             case LZW:      inflated = new LZW     (input, start, byteCount); 
break;
             case PACKBITS: inflated = new PackBits(input, start, byteCount); 
break;
             case CCITTRLE: inflated = new CCITTRLE(input, start, byteCount, 
sourceWidth); break;
-            default: return null;
+            case NONE: {
+                if (predictor == Predictor.NONE) {
+                    return CopyFromBytes.create(input, start,
+                            chunksPerRow, samplesPerChunk, skipAfterChunks, 
pixelsPerElement, banks);
+                }
+                throw 
unsupportedEncoding(Resources.Keys.UnsupportedPredictor_1, predictor, caller);
+            }
+            default: {
+                throw 
unsupportedEncoding(Resources.Keys.UnsupportedCompressionMethod_1, compression, 
caller);
+            }
         }
-        return CopyFromBytes.create(inflated.createDataInput(), 0,
+        final ReadableByteChannel channel;
+        switch (predictor) {
+            case NONE:       channel = inflated; break;
+            case HORIZONTAL: channel = new HorizontalPredictor(inflated, 
sourcePixelStride, sourceWidth); break;
+            default: throw 
unsupportedEncoding(Resources.Keys.UnsupportedPredictor_1, predictor, caller);
+        }
+        return CopyFromBytes.create(inflated.createDataInput(channel), 0,
                 chunksPerRow, samplesPerChunk, skipAfterChunks, 
pixelsPerElement, banks);
     }
 
     /**
+     * Returns the exception to throw for an unsupported compression or 
predictor.
+     */
+    private static UnsupportedEncodingException unsupportedEncoding(final 
short key, final Enum<?> value, final Localized caller) {
+        return new 
UnsupportedEncodingException(Resources.forLocale(caller.getLocale()).getString(key,
 value));
+    }
+
+    /**
      * Reads the given amount of sample values without storing them.
      * The given value is in units of sample values, not in bytes.
      *
diff --git 
a/storage/sis-geotiff/src/main/java/org/apache/sis/internal/geotiff/InflaterChannel.java
 
b/storage/sis-geotiff/src/main/java/org/apache/sis/internal/geotiff/InflaterChannel.java
index c87a655..12bcfc2 100644
--- 
a/storage/sis-geotiff/src/main/java/org/apache/sis/internal/geotiff/InflaterChannel.java
+++ 
b/storage/sis-geotiff/src/main/java/org/apache/sis/internal/geotiff/InflaterChannel.java
@@ -64,9 +64,11 @@ abstract class InflaterChannel implements 
ReadableByteChannel {
     /**
      * Creates the data input stream to use for getting uncompressed data.
      * The {@linkplain #input} stream must be on the start position before to 
invoke this method.
+     *
+     * @param  channel  the channel to wrap. This is {@code this} unless a 
{@link Predictor} is applied.
      */
-    final ChannelDataInput createDataInput() throws IOException {
-        return new ChannelDataInput(input.filename, this, 
ByteBuffer.allocate(BUFFER_SIZE), false);
+    final ChannelDataInput createDataInput(final ReadableByteChannel channel) 
throws IOException {
+        return new ChannelDataInput(input.filename, channel, 
ByteBuffer.allocate(BUFFER_SIZE), false);
     }
 
     /**
diff --git 
a/storage/sis-geotiff/src/main/java/org/apache/sis/internal/geotiff/InflaterPredictor.java
 
b/storage/sis-geotiff/src/main/java/org/apache/sis/internal/geotiff/InflaterPredictor.java
new file mode 100644
index 0000000..1b816ab
--- /dev/null
+++ 
b/storage/sis-geotiff/src/main/java/org/apache/sis/internal/geotiff/InflaterPredictor.java
@@ -0,0 +1,80 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sis.internal.geotiff;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.channels.ReadableByteChannel;
+
+
+/**
+ * Implementation of a {@link Predictor} to be executed after decompression.
+ *
+ * @author  Martin Desruisseaux (Geomatys)
+ * @version 1.1
+ * @since   1.1
+ * @module
+ */
+abstract class InflaterPredictor implements ReadableByteChannel {
+    /**
+     * The channel from which to read data.
+     */
+    private final InflaterChannel input;
+
+    /**
+     * Creates a predictor.
+     */
+    protected InflaterPredictor(final InflaterChannel input) {
+        this.input = input;
+    }
+
+    /**
+     * Applies the predictor on data in the given buffer,
+     * from the given start position until current buffer position.
+     *
+     * @param  buffer  the buffer on which to apply the predictor.
+     * @param  start   position of first sample value to process.
+     */
+    protected abstract void uncompress(ByteBuffer buffer, int start);
+
+    /**
+     * Decompresses some bytes from the {@linkplain #input} into the given 
destination buffer.
+     */
+    @Override
+    public int read(final ByteBuffer target) throws IOException {
+        final int start = target.position();
+        final int n = input.read(target);
+        uncompress(target, start);
+        return n;
+    }
+
+    /**
+     * Tells whether this channel is still open.
+     */
+    @Override
+    public final boolean isOpen() {
+        return input.isOpen();
+    }
+
+    /**
+     * Do nothing. The {@linkplain #input} channel is not closed by this 
operation
+     * because it will typically be needed again for decompressing other tiles.
+     */
+    @Override
+    public final void close() {
+    }
+}
diff --git 
a/storage/sis-geotiff/src/main/java/org/apache/sis/storage/geotiff/CompressedSubset.java
 
b/storage/sis-geotiff/src/main/java/org/apache/sis/storage/geotiff/CompressedSubset.java
index 070aa26..79d66b1 100644
--- 
a/storage/sis-geotiff/src/main/java/org/apache/sis/storage/geotiff/CompressedSubset.java
+++ 
b/storage/sis-geotiff/src/main/java/org/apache/sis/storage/geotiff/CompressedSubset.java
@@ -26,9 +26,7 @@ import org.apache.sis.internal.coverage.j2d.RasterFactory;
 import org.apache.sis.internal.geotiff.Compression;
 import org.apache.sis.internal.geotiff.Predictor;
 import org.apache.sis.internal.geotiff.Inflater;
-import org.apache.sis.internal.geotiff.Resources;
 import org.apache.sis.storage.DataStoreException;
-import org.apache.sis.storage.DataStoreContentException;
 import org.apache.sis.image.DataType;
 
 import static java.lang.Math.toIntExact;
@@ -45,16 +43,6 @@ import static org.apache.sis.internal.jdk9.JDK9.multiplyFull;
  */
 final class CompressedSubset extends DataSubset {
     /**
-     * The compression method.
-     */
-    private final Compression compression;
-
-    /**
-     * The mathematical operator that is applied to the image data before an 
encoding scheme is applied.
-     */
-    private final Predictor predictor;
-
-    /**
      * Number of sample values to skip for moving to the next row of a tile in 
the GeoTIFF file.
      * This is not necessarily the same scanline stride than for the tiles 
created by this class.
      */
@@ -111,20 +99,15 @@ final class CompressedSubset extends DataSubset {
      * by {@link ImageFileDirectory#validateMandatoryTags()} before this call.
      * This constructor should be invoked inside a synchronized block.
      *
-     * @param  source       the resource which contain this {@code DataSubset}.
-     * @param  subset       description of the {@code owner} subset to cover.
-     * @param  rasters      potentially shared cache of rasters read by this 
{@code DataSubset}.
-     * @param  compression  the compression method.
-     * @param  predictor    the mathematical operator applied to image data 
before compression.
+     * @param  source   the resource which contain this {@code DataSubset}.
+     * @param  subset   description of the {@code owner} subset to cover.
+     * @param  rasters  potentially shared cache of rasters read by this 
{@code DataSubset}.
      * @throws ArithmeticException if the number of tiles overflows 32 bits 
integer arithmetic.
      */
-    CompressedSubset(final DataCube source, final TiledGridResource.Subset 
subset,
-                     final Compression compression, final Predictor predictor)
+    CompressedSubset(final DataCube source, final TiledGridResource.Subset 
subset)
             throws DataStoreException
     {
         super(source, subset);
-        this.compression  = compression;
-        this.predictor    = predictor;
         scanlineStride    = multiplyFull(getTileSize(0), sourcePixelStride);
         final int between = sourcePixelStride * (getSubsampling(0) - 1);
         int afterLastBand = sourcePixelStride * (getTileSize(0) - 1);
@@ -229,27 +212,20 @@ final class CompressedSubset extends DataSubset {
          */
         final int pixelsPerElement = getPixelsPerElement();                 // 
Always ≥ 1 and usually = 1.
         assert (head % pixelsPerElement) == 0 : head;
-        final int capacity = getBankCapacity(pixelsPerElement);
-        final Buffer[] banks = new Buffer[numBanks];
-        final ChannelDataInput input = input();
+        final int              capacity    = getBankCapacity(pixelsPerElement);
+        final Buffer[]         banks       = new Buffer[numBanks];
+        final ChannelDataInput input       = input();
+        final Compression      compression = source.getCompression();
+        final Predictor        predictor   = source.getPredictor();
         for (int b=0; b<numBanks; b++) {
             /*
              * Prepare the object which will perform the actual decompression 
row-by-row,
              * optionally skipping chunks if a subsampling is applied.
              */
             final Buffer bank = RasterFactory.createBuffer(type, capacity);
-            final Inflater algo = Inflater.create(compression, input, 
offsets[b], byteCounts[b],
-                                    getTileSize(0), chunksPerRow, 
samplesPerChunk, skipAfterChunks,
-                                    pixelsPerElement, bank);
-            if (algo == null) {
-                throw new 
DataStoreContentException(reader().resources().getString(
-                        Resources.Keys.UnsupportedCompressionMethod_1, 
compression));
-            }
-            // TODO: Add predictor handling here.
-            if (predictor != Predictor.NONE) {
-                throw new 
DataStoreContentException(reader().resources().getString(
-                        Resources.Keys.UnsupportedPredictor_1, predictor));
-            }
+            final Inflater algo = Inflater.create(compression, predictor, 
input, offsets[b], byteCounts[b],
+                    sourcePixelStride, getTileSize(0), chunksPerRow, 
samplesPerChunk, skipAfterChunks,
+                    pixelsPerElement, bank, this);
             for (long y = lower[1]; --y >= 0;) {
                 algo.skip(scanlineStride);          // `skip(…)` may round to 
next element boundary.
             }
diff --git 
a/storage/sis-geotiff/src/main/java/org/apache/sis/storage/geotiff/DataCube.java
 
b/storage/sis-geotiff/src/main/java/org/apache/sis/storage/geotiff/DataCube.java
index 463d6ea..bcbaa7a 100644
--- 
a/storage/sis-geotiff/src/main/java/org/apache/sis/storage/geotiff/DataCube.java
+++ 
b/storage/sis-geotiff/src/main/java/org/apache/sis/storage/geotiff/DataCube.java
@@ -184,16 +184,15 @@ abstract class DataCube extends TiledGridResource 
implements ResourceOnFileSyste
                     throw new 
DataStoreContentException(reader.resources().getString(
                             Resources.Keys.MissingValue_2, 
Tags.name(Tags.Compression)));
                 }
-                final Predictor predictor = getPredictor();
                 /*
                  * The `DataSubset` parent class is the most efficient but has 
many limitations
                  * documented in the javadoc of its `readSlice(…)` method. If 
any pre-condition
                  * is not met, we need to fallback on the less direct 
`CompressedSubset` class.
                  */
-                if (compression == Compression.NONE && predictor == 
Predictor.NONE && canReadDirect(subset)) {
+                if (compression == Compression.NONE && getPredictor() == 
Predictor.NONE && canReadDirect(subset)) {
                     coverage = new DataSubset(this, subset);
                 } else {
-                    coverage = new CompressedSubset(this, subset, compression, 
predictor);
+                    coverage = new CompressedSubset(this, subset);
                 }
                 coverage = preload(coverage);
             }
diff --git 
a/storage/sis-geotiff/src/main/java/org/apache/sis/storage/geotiff/DataSubset.java
 
b/storage/sis-geotiff/src/main/java/org/apache/sis/storage/geotiff/DataSubset.java
index 9454d55..82f15e2 100644
--- 
a/storage/sis-geotiff/src/main/java/org/apache/sis/storage/geotiff/DataSubset.java
+++ 
b/storage/sis-geotiff/src/main/java/org/apache/sis/storage/geotiff/DataSubset.java
@@ -72,7 +72,7 @@ class DataSubset extends TiledGridCoverage implements 
Localized {
      * The resource which contain this {@code DataSubset}.
      * Used for fetching information like the input channel and where to 
report warnings.
      */
-    private final DataCube source;
+    final DataCube source;
 
     /**
      * For each tile, the byte offset of that tile as compressed and stored on 
disk.
@@ -208,18 +208,11 @@ class DataSubset extends TiledGridCoverage implements 
Localized {
     }
 
     /**
-     * Returns the GeoTIFF reader which contains this subset.
-     */
-    final Reader reader() {
-        return source.reader;
-    }
-
-    /**
      * Returns the input of bytes for compressed raster data. If the TIFF tag 
{@code FillOrder} is 2
      * (which should be very rare), the input stream reverse the order of all 
bits in each byte.
      */
     final ChannelDataInput input() throws IOException {
-        ChannelDataInput input = reader().input;
+        ChannelDataInput input = source.reader.input;
         if (source.isBitOrderReversed()) {
             input = ReversedBitsChannel.wrap(input);
         }
@@ -342,7 +335,7 @@ class DataSubset extends TiledGridCoverage implements 
Localized {
                     tile.copyTileInfo(tileOffsets,    offsets,    
includedBanks, numTiles);
                     tile.copyTileInfo(tileByteCounts, byteCounts, 
includedBanks, numTiles);
                     for (int b=0; b<offsets.length; b++) {
-                        offsets[b] = addExact(offsets[b], reader().origin);
+                        offsets[b] = addExact(offsets[b], 
source.reader.origin);
                     }
                     WritableRaster r = readSlice(offsets, byteCounts, lower, 
upper, subsampling, origin);
                     result[tile.indexInResultArray] = tile.cache(r);
@@ -435,7 +428,7 @@ class DataSubset extends TiledGridCoverage implements 
Localized {
         final Buffer[] banks = new Buffer[numBanks];
         for (int b=0; b<numBanks; b++) {
             if (b < byteCounts.length && length > byteCounts[b]) {
-                throw new 
DataStoreContentException(reader().resources().getString(
+                throw new 
DataStoreContentException(source.reader.resources().getString(
                         Resources.Keys.UnexpectedTileLength_2, length, 
byteCounts[b]));
             }
             hr.setOrigin(offsets[b]);
diff --git 
a/storage/sis-storage/src/main/java/org/apache/sis/storage/UnsupportedEncodingException.java
 
b/storage/sis-storage/src/main/java/org/apache/sis/storage/UnsupportedEncodingException.java
new file mode 100644
index 0000000..29f3f55
--- /dev/null
+++ 
b/storage/sis-storage/src/main/java/org/apache/sis/storage/UnsupportedEncodingException.java
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sis.storage;
+
+
+/**
+ * Thrown when a storage uses some encoding options not supported by current 
implementation.
+ * For example it may be a compression method not implemented by the reader.
+ *
+ * @author  Martin Desruisseaux (Geomatys)
+ * @version 1.1
+ * @since   1.1
+ * @module
+ */
+public class UnsupportedEncodingException extends DataStoreContentException {
+    /**
+     * For cross-version compatibility.
+     */
+    private static final long serialVersionUID = 4998668012290557156L;
+
+    /**
+     * Creates an exception with the specified details message.
+     *
+     * @param message  the detail message.
+     */
+    public UnsupportedEncodingException(final String message) {
+        super(message);
+    }
+
+    /**
+     * Creates an exception with the specified cause and no details message.
+     *
+     * @param cause  the cause for this exception.
+     */
+    public UnsupportedEncodingException(final Throwable cause) {
+        super(cause);
+    }
+
+    /**
+     * Creates an exception with the specified details message and cause.
+     *
+     * @param message  the detail message.
+     * @param cause    the cause for this exception.
+     */
+    public UnsupportedEncodingException(final String message, final Throwable 
cause) {
+        super(message, cause);
+    }
+}

Reply via email to