This is an automated email from the ASF dual-hosted git repository.
jiayu pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/sedona.git
The following commit(s) were added to refs/heads/master by this push:
new 150c53178f [GH-2662] Implement a pure Java single-thread COG writer
(#2663)
150c53178f is described below
commit 150c53178f6b4783e51042df5644f588996a7cce
Author: Jia Yu <[email protected]>
AuthorDate: Fri Feb 20 00:17:40 2026 -0700
[GH-2662] Implement a pure Java single-thread COG writer (#2663)
---
.../apache/sedona/common/raster/RasterOutputs.java | 21 +
.../sedona/common/raster/cog/CogAssembler.java | 381 +++++++++++
.../sedona/common/raster/cog/CogOptions.java | 251 ++++++++
.../apache/sedona/common/raster/cog/CogWriter.java | 293 +++++++++
.../sedona/common/raster/cog/TiffIfdParser.java | 371 +++++++++++
.../sedona/common/raster/cog/CogWriterTest.java | 693 +++++++++++++++++++++
6 files changed, 2010 insertions(+)
diff --git
a/common/src/main/java/org/apache/sedona/common/raster/RasterOutputs.java
b/common/src/main/java/org/apache/sedona/common/raster/RasterOutputs.java
index c2dc22782f..da49f3201e 100644
--- a/common/src/main/java/org/apache/sedona/common/raster/RasterOutputs.java
+++ b/common/src/main/java/org/apache/sedona/common/raster/RasterOutputs.java
@@ -36,6 +36,8 @@ import javax.imageio.ImageWriteParam;
import javax.media.jai.InterpolationNearest;
import javax.media.jai.JAI;
import javax.media.jai.RenderedOp;
+import org.apache.sedona.common.raster.cog.CogOptions;
+import org.apache.sedona.common.raster.cog.CogWriter;
import org.apache.sedona.common.utils.RasterUtils;
import org.geotools.api.coverage.grid.GridCoverageWriter;
import org.geotools.api.metadata.spatial.PixelOrientation;
@@ -87,6 +89,25 @@ public class RasterOutputs {
return asGeoTiff(raster, null, -1);
}
+ /**
+ * Creates a Cloud Optimized GeoTIFF (COG) byte array from the given raster.
The COG format
+ * arranges tiles and overviews in an order optimized for HTTP range-request
based access,
+ * enabling efficient partial reads from cloud storage.
+ *
+ * @param raster The input raster
+ * @param options COG generation options (compression, tileSize, resampling,
overviewCount). Use
+ * {@link CogOptions#defaults()} for default settings or {@link
CogOptions#builder()} to
+ * customize.
+ * @return COG file as byte array
+ */
+ public static byte[] asCloudOptimizedGeoTiff(GridCoverage2D raster,
CogOptions options) {
+ try {
+ return CogWriter.write(raster, options);
+ } catch (IOException e) {
+ throw new RuntimeException("Failed to write Cloud Optimized GeoTIFF", e);
+ }
+ }
+
/**
* Creates a GeoTiff file with the provided raster. Primarily used for
testing.
*
diff --git
a/common/src/main/java/org/apache/sedona/common/raster/cog/CogAssembler.java
b/common/src/main/java/org/apache/sedona/common/raster/cog/CogAssembler.java
new file mode 100644
index 0000000000..e1f9b6c275
--- /dev/null
+++ b/common/src/main/java/org/apache/sedona/common/raster/cog/CogAssembler.java
@@ -0,0 +1,381 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.sedona.common.raster.cog;
+
+import java.io.ByteArrayOutputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.util.List;
+
+/**
+ * Assembles multiple parsed TIFF IFDs into Cloud Optimized GeoTIFF (COG) byte
order.
+ *
+ * <p>COG layout (per the spec):
+ *
+ * <pre>
+ * [TIFF header - 8 bytes]
+ * [IFD 0: full-res tags + overflow data]
+ * [IFD 1: overview 2x tags + overflow data]
+ * ...
+ * [IFD N: smallest overview tags + overflow data]
+ * [smallest overview image data]
+ * ...
+ * [overview 2x image data]
+ * [full-res image data]
+ * </pre>
+ *
+ * <p>Key COG requirements:
+ *
+ * <ul>
+ * <li>All IFDs are contiguous at the start of the file
+ * <li>Image data follows all IFDs, ordered smallest overview first,
full-res last
+ * <li>TileOffsets/StripOffsets point forward to where image data will be
located
+ * <li>Overviews have NewSubfileType = 1 (ReducedImage)
+ * </ul>
+ *
+ * <p>Ported from GeoTrellis's {@code GeoTiffWriter.appendCloudOptimized()}.
+ */
+public class CogAssembler {
+
+ /** NewSubfileType value for reduced-resolution (overview) images */
+ private static final int REDUCED_IMAGE = 1;
+
+ /** Result of patching IFD entries: contains both the patched entries and
overflow data. */
+ private static class PatchedIfd {
+ final byte[] entries;
+ final byte[] overflow;
+
+ PatchedIfd(byte[] entries, byte[] overflow) {
+ this.entries = entries;
+ this.overflow = overflow;
+ }
+ }
+
+ /**
+ * Assemble parsed TIFF IFDs into COG byte order, returning a byte array.
+ *
+ * @param parsedTiffs List of parsed TIFFs, ordered: [full-res, overview-2x,
overview-4x, ...
+ * smallest]. The first element is the full resolution image, subsequent
elements are
+ * progressively smaller overviews.
+ * @return A byte array containing the complete COG file
+ * @throws IOException if writing fails
+ */
+ public static byte[] assemble(List<TiffIfdParser.ParsedTiff> parsedTiffs)
throws IOException {
+ // Compute total size for pre-allocated buffer
+ long totalSize = computeTotalSize(parsedTiffs);
+ ByteArrayOutputStream bos =
+ new ByteArrayOutputStream((int) Math.min(totalSize,
Integer.MAX_VALUE));
+ assemble(parsedTiffs, bos);
+ return bos.toByteArray();
+ }
+
+ /**
+ * Assemble parsed TIFF IFDs into COG byte order, writing directly to the
given output stream.
+ * This avoids allocating a final byte[] for the entire COG, making it
suitable for writing large
+ * COGs to disk or network streams.
+ *
+ * @param parsedTiffs List of parsed TIFFs, ordered: [full-res, overview-2x,
overview-4x, ...
+ * smallest].
+ * @param outputStream The stream to write the COG to. Not closed by this
method.
+ * @throws IOException if writing fails
+ */
+ public static void assemble(List<TiffIfdParser.ParsedTiff> parsedTiffs,
OutputStream outputStream)
+ throws IOException {
+ if (parsedTiffs.isEmpty()) {
+ throw new IllegalArgumentException("No TIFFs to assemble");
+ }
+
+ ByteOrder byteOrder = parsedTiffs.get(0).byteOrder;
+ int ifdCount = parsedTiffs.size();
+
+ // Determine which overview IFDs need NewSubfileType injection
+ boolean[] needsNewSubfileType = new boolean[ifdCount];
+ for (int i = 1; i < ifdCount; i++) {
+ needsNewSubfileType[i] = !parsedTiffs.get(i).hasNewSubfileType;
+ }
+
+ // Phase 1: Compute sizes of all IFD regions (IFD entries + overflow data)
+ // If we need to inject NewSubfileType, the IFD grows by 12 bytes (one tag
entry)
+ int[] ifdRegionSizes = new int[ifdCount];
+ int[] effectiveTagCounts = new int[ifdCount];
+ for (int i = 0; i < ifdCount; i++) {
+ TiffIfdParser.ParsedTiff pt = parsedTiffs.get(i);
+ int extraBytes = needsNewSubfileType[i] ? 12 : 0;
+ effectiveTagCounts[i] = pt.tagCount + (needsNewSubfileType[i] ? 1 : 0);
+ ifdRegionSizes[i] = pt.getIfdAndOverflowSize() + extraBytes;
+ }
+
+ // Phase 2: Compute absolute offsets for each IFD and its image data.
+ // Layout: [header=8] [IFD0+overflow] [IFD1+overflow] ... [IFDN+overflow]
+ // [imageN] ... [image1] [image0]
+ int[] ifdAbsoluteOffsets = new int[ifdCount];
+ int cursor = 8; // After TIFF header
+ for (int i = 0; i < ifdCount; i++) {
+ ifdAbsoluteOffsets[i] = cursor;
+ cursor += ifdRegionSizes[i];
+ }
+ int imageDataRegionStart = cursor;
+
+ // Image data is written in reverse order (smallest overview first,
full-res last)
+ // Compute absolute offset of each IFD's image data
+ int[] imageDataAbsoluteOffsets = new int[ifdCount];
+ int imageDataCursor = imageDataRegionStart;
+ for (int i = ifdCount - 1; i >= 0; i--) {
+ imageDataAbsoluteOffsets[i] = imageDataCursor;
+ imageDataCursor += parsedTiffs.get(i).imageDataLength;
+ }
+
+ // Phase 3: Write the COG
+ DataOutputStream dos = new DataOutputStream(outputStream);
+
+ // Write TIFF header
+ if (byteOrder == ByteOrder.LITTLE_ENDIAN) {
+ dos.writeByte('I');
+ dos.writeByte('I');
+ } else {
+ dos.writeByte('M');
+ dos.writeByte('M');
+ }
+ writeShort(dos, byteOrder, 42); // TIFF magic
+ writeInt(dos, byteOrder, ifdAbsoluteOffsets[0]); // Offset to first IFD
+
+ // Write each IFD + its overflow data
+ for (int i = 0; i < ifdCount; i++) {
+ TiffIfdParser.ParsedTiff pt = parsedTiffs.get(i);
+ boolean isOverview = i > 0;
+ int ifdStart = ifdAbsoluteOffsets[i];
+ int nextIfdOffset = (i + 1 < ifdCount) ? ifdAbsoluteOffsets[i + 1] : 0;
+ int tagCountForIfd = effectiveTagCounts[i];
+
+ // Compute where this IFD's overflow data will be in the output
+ // Account for possible extra 12 bytes from injected tag
+ int overflowStartInOutput = ifdStart + 2 + tagCountForIfd * 12 + 4;
+
+ // Patch the IFD entries:
+ // - Rebase overflow pointers from original file offsets to new output
offsets
+ // - Rewrite TileOffsets/StripOffsets to point to the new image data
location
+ // - Inject NewSubfileType=1 for overview IFDs if missing
+ PatchedIfd patched =
+ patchIfdEntries(
+ pt,
+ overflowStartInOutput,
+ imageDataAbsoluteOffsets[i],
+ isOverview,
+ needsNewSubfileType[i],
+ byteOrder);
+
+ // Write: tag count (2 bytes) + entries (tagCountForIfd*12) + next IFD
offset (4 bytes)
+ writeShort(dos, byteOrder, tagCountForIfd);
+ dos.write(patched.entries);
+ writeInt(dos, byteOrder, nextIfdOffset);
+
+ // Write patched overflow data
+ dos.write(patched.overflow);
+ }
+
+ // Write image data in reverse order (smallest overview first)
+ // Zero-copy: write directly from the source TIFF byte arrays
+ for (int i = ifdCount - 1; i >= 0; i--) {
+ TiffIfdParser.ParsedTiff pt = parsedTiffs.get(i);
+ dos.write(pt.sourceData, pt.imageDataOffset, pt.imageDataLength);
+ }
+
+ dos.flush();
+ }
+
+ /**
+ * Compute the total output size of the COG file. Used to pre-allocate the
byte array in {@link
+ * #assemble(List)}.
+ */
+ private static long computeTotalSize(List<TiffIfdParser.ParsedTiff>
parsedTiffs) {
+ long size = 8; // TIFF header
+ for (int i = 0; i < parsedTiffs.size(); i++) {
+ TiffIfdParser.ParsedTiff pt = parsedTiffs.get(i);
+ boolean needsInject = i > 0 && !pt.hasNewSubfileType;
+ int extraBytes = needsInject ? 12 : 0;
+ size += pt.getIfdAndOverflowSize() + extraBytes;
+ size += pt.imageDataLength;
+ }
+ return size;
+ }
+
+ /**
+ * Patch IFD entries to update:
+ *
+ * <ol>
+ * <li>Overflow data pointers (rebase from original file offset to new
output offset)
+ * <li>TileOffsets/StripOffsets values (point to new image data location)
+ * <li>Set or inject NewSubfileType=1 for overview IFDs
+ * </ol>
+ */
+ private static PatchedIfd patchIfdEntries(
+ TiffIfdParser.ParsedTiff pt,
+ int newOverflowStart,
+ int newImageDataStart,
+ boolean isOverview,
+ boolean injectNewSubfileType,
+ ByteOrder byteOrder) {
+
+ byte[] entries = pt.ifdEntries.clone();
+ byte[] patchedOverflow = pt.overflowData.clone();
+ ByteBuffer buf = ByteBuffer.wrap(entries).order(byteOrder);
+
+ int overflowDelta = newOverflowStart - pt.overflowDataStart;
+
+ for (int i = 0; i < pt.tagCount; i++) {
+ int offset = i * 12;
+ int tag = buf.getShort(offset) & 0xFFFF;
+ int fieldType = buf.getShort(offset + 2) & 0xFFFF;
+ int count = buf.getInt(offset + 4);
+ int valueSize = count * getFieldTypeSize(fieldType);
+
+ // Handle NewSubfileType tag for overviews (when already present)
+ if (tag == TiffIfdParser.TAG_NEW_SUBFILE_TYPE && isOverview) {
+ buf.putInt(offset + 8, REDUCED_IMAGE);
+ continue;
+ }
+
+ // Handle TileOffsets/StripOffsets — rewrite to point to new image data
location
+ if (tag == TiffIfdParser.TAG_TILE_OFFSETS || tag ==
TiffIfdParser.TAG_STRIP_OFFSETS) {
+ if (count == 1 && valueSize <= 4) {
+ // Single segment: offset stored inline
+ buf.putInt(offset + 8, newImageDataStart + pt.segmentOffsets[0]);
+ } else {
+ // Multiple segments: the entry points to an overflow array.
+ // We need to rewrite the overflow array with new absolute offsets.
+ // First, rebase the pointer to the overflow data.
+ int origPointer = buf.getInt(offset + 8);
+ int newPointer = origPointer + overflowDelta;
+ buf.putInt(offset + 8, newPointer);
+
+ // Now patch the overflow data copy with new image data offsets
+ int overflowArrayOffset = origPointer - pt.overflowDataStart;
+ ByteBuffer overflowBuf =
ByteBuffer.wrap(patchedOverflow).order(byteOrder);
+ for (int j = 0; j < count; j++) {
+ int newSegmentOffset = newImageDataStart + pt.segmentOffsets[j];
+ overflowBuf.putInt(overflowArrayOffset + j * 4, newSegmentOffset);
+ }
+ }
+ continue;
+ }
+
+ // For all other tags with overflow data (value > 4 bytes), rebase the
pointer
+ if (valueSize > 4) {
+ int origPointer = buf.getInt(offset + 8);
+ buf.putInt(offset + 8, origPointer + overflowDelta);
+ }
+ }
+
+ // If we need to inject NewSubfileType, insert a 12-byte entry in sorted
tag order
+ if (injectNewSubfileType) {
+ return new PatchedIfd(
+ insertNewSubfileTypeTag(entries, pt.tagCount, byteOrder),
patchedOverflow);
+ }
+
+ return new PatchedIfd(entries, patchedOverflow);
+ }
+
+ /**
+ * Insert a NewSubfileType tag entry (tag 254) into a sorted IFD entry
array. The new entry is
+ * placed at the correct position to maintain tag sort order, and existing
entries after it are
+ * shifted right by 12 bytes.
+ *
+ * @param entries The original IFD entries (tagCount * 12 bytes)
+ * @param tagCount The original number of tags
+ * @param byteOrder The byte order for writing the new entry
+ * @return A new byte array that is 12 bytes longer, with the NewSubfileType
entry inserted
+ */
+ private static byte[] insertNewSubfileTypeTag(byte[] entries, int tagCount,
ByteOrder byteOrder) {
+ // NewSubfileType = tag 254, very low, typically goes near the start
+ byte[] newEntries = new byte[entries.length + 12];
+ ByteBuffer srcBuf = ByteBuffer.wrap(entries).order(byteOrder);
+
+ // Find insertion point: first tag with code > 254
+ int insertIdx = tagCount; // default: append at end
+ for (int i = 0; i < tagCount; i++) {
+ int tag = srcBuf.getShort(i * 12) & 0xFFFF;
+ if (tag > TiffIfdParser.TAG_NEW_SUBFILE_TYPE) {
+ insertIdx = i;
+ break;
+ }
+ }
+
+ // Copy entries before insertion point
+ int beforeBytes = insertIdx * 12;
+ if (beforeBytes > 0) {
+ System.arraycopy(entries, 0, newEntries, 0, beforeBytes);
+ }
+
+ // Write the NewSubfileType entry at insertion point
+ ByteBuffer newBuf = ByteBuffer.wrap(newEntries).order(byteOrder);
+ int insertOffset = insertIdx * 12;
+ newBuf.putShort(insertOffset, (short) TiffIfdParser.TAG_NEW_SUBFILE_TYPE);
// tag = 254
+ newBuf.putShort(insertOffset + 2, (short) 4); // type = LONG
+ newBuf.putInt(insertOffset + 4, 1); // count = 1
+ newBuf.putInt(insertOffset + 8, REDUCED_IMAGE); // value = 1
+
+ // Copy entries after insertion point
+ int afterBytes = entries.length - beforeBytes;
+ if (afterBytes > 0) {
+ System.arraycopy(entries, beforeBytes, newEntries, beforeBytes + 12,
afterBytes);
+ }
+
+ return newEntries;
+ }
+
+ /** Write a 16-bit value respecting byte order */
+ private static void writeShort(DataOutputStream dos, ByteOrder order, int
value)
+ throws IOException {
+ if (order == ByteOrder.LITTLE_ENDIAN) {
+ dos.writeByte(value & 0xFF);
+ dos.writeByte((value >>> 8) & 0xFF);
+ } else {
+ dos.writeByte((value >>> 8) & 0xFF);
+ dos.writeByte(value & 0xFF);
+ }
+ }
+
+ /** Write a 32-bit value respecting byte order */
+ private static void writeInt(DataOutputStream dos, ByteOrder order, int
value)
+ throws IOException {
+ if (order == ByteOrder.LITTLE_ENDIAN) {
+ dos.writeByte(value & 0xFF);
+ dos.writeByte((value >>> 8) & 0xFF);
+ dos.writeByte((value >>> 16) & 0xFF);
+ dos.writeByte((value >>> 24) & 0xFF);
+ } else {
+ dos.writeByte((value >>> 24) & 0xFF);
+ dos.writeByte((value >>> 16) & 0xFF);
+ dos.writeByte((value >>> 8) & 0xFF);
+ dos.writeByte(value & 0xFF);
+ }
+ }
+
+ /** Get the byte size of a TIFF field type */
+ private static int getFieldTypeSize(int fieldType) {
+ int[] sizes = {0, 1, 1, 2, 4, 8, 1, 1, 2, 4, 8, 4, 8};
+ if (fieldType >= 1 && fieldType < sizes.length) {
+ return sizes[fieldType];
+ }
+ return 1;
+ }
+}
diff --git
a/common/src/main/java/org/apache/sedona/common/raster/cog/CogOptions.java
b/common/src/main/java/org/apache/sedona/common/raster/cog/CogOptions.java
new file mode 100644
index 0000000000..a5fc1814fc
--- /dev/null
+++ b/common/src/main/java/org/apache/sedona/common/raster/cog/CogOptions.java
@@ -0,0 +1,251 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.sedona.common.raster.cog;
+
+import java.util.Arrays;
+import java.util.List;
+import java.util.Locale;
+
+/**
+ * Options for Cloud Optimized GeoTIFF (COG) generation.
+ *
+ * <p>Use the {@link Builder} to construct instances:
+ *
+ * <pre>{@code
+ * CogOptions opts = CogOptions.builder()
+ * .compression("LZW")
+ * .compressionQuality(0.5)
+ * .tileSize(512)
+ * .resampling("Bilinear")
+ * .overviewCount(3)
+ * .build();
+ * }</pre>
+ *
+ * <p>All fields are immutable once constructed. Validation is performed in
{@link Builder#build()}.
+ */
+public final class CogOptions {
+
+ /** Supported compression algorithms. */
+ private static final List<String> VALID_COMPRESSION =
+ Arrays.asList("Deflate", "LZW", "JPEG", "PackBits");
+
+ /** Supported resampling algorithms for overview generation. */
+ private static final List<String> VALID_RESAMPLING =
+ Arrays.asList("Nearest", "Bilinear", "Bicubic");
+
+ private final String compression;
+ private final double compressionQuality;
+ private final int tileSize;
+ private final String resampling;
+ private final int overviewCount;
+
+ private CogOptions(Builder builder) {
+ this.compression = builder.compression;
+ this.compressionQuality = builder.compressionQuality;
+ this.tileSize = builder.tileSize;
+ this.resampling = builder.resampling;
+ this.overviewCount = builder.overviewCount;
+ }
+
+ /**
+ * @return Compression type: "Deflate", "LZW", "JPEG", "PackBits"
+ */
+ public String getCompression() {
+ return compression;
+ }
+
+ /**
+ * @return Compression quality from 0.0 (max compression) to 1.0 (no
compression)
+ */
+ public double getCompressionQuality() {
+ return compressionQuality;
+ }
+
+ /**
+ * @return Tile width and height in pixels (always a power of 2)
+ */
+ public int getTileSize() {
+ return tileSize;
+ }
+
+ /**
+ * @return Resampling algorithm for overview generation: "Nearest",
"Bilinear", or "Bicubic"
+ */
+ public String getResampling() {
+ return resampling;
+ }
+
+ /**
+ * @return Number of overview levels. -1 means auto-compute based on image
dimensions, 0 means no
+ * overviews.
+ */
+ public int getOverviewCount() {
+ return overviewCount;
+ }
+
+ /**
+ * @return A new builder initialized with default values
+ */
+ public static Builder builder() {
+ return new Builder();
+ }
+
+ /**
+ * @return The default options (Deflate, quality 0.2, 256px tiles, Nearest,
auto overviews)
+ */
+ public static CogOptions defaults() {
+ return new Builder().build();
+ }
+
+ @Override
+ public String toString() {
+ return "CogOptions{"
+ + "compression='"
+ + compression
+ + '\''
+ + ", compressionQuality="
+ + compressionQuality
+ + ", tileSize="
+ + tileSize
+ + ", resampling='"
+ + resampling
+ + '\''
+ + ", overviewCount="
+ + overviewCount
+ + '}';
+ }
+
+ /** Builder for {@link CogOptions}. */
+ public static final class Builder {
+ private String compression = "Deflate";
+ private double compressionQuality = 0.2;
+ private int tileSize = 256;
+ private String resampling = "Nearest";
+ private int overviewCount = -1;
+
+ private Builder() {}
+
+ /**
+ * Set the compression type. Default: "Deflate".
+ *
+ * @param compression One of "Deflate", "LZW", "JPEG", "PackBits"
+ * @return this builder
+ */
+ public Builder compression(String compression) {
+ this.compression = compression;
+ return this;
+ }
+
+ /**
+ * Set the compression quality. Default: 0.2.
+ *
+ * @param compressionQuality Value from 0.0 (max compression) to 1.0 (no
compression)
+ * @return this builder
+ */
+ public Builder compressionQuality(double compressionQuality) {
+ this.compressionQuality = compressionQuality;
+ return this;
+ }
+
+ /**
+ * Set the tile size for both width and height. Default: 256.
+ *
+ * @param tileSize Must be a positive power of 2 (e.g. 128, 256, 512, 1024)
+ * @return this builder
+ */
+ public Builder tileSize(int tileSize) {
+ this.tileSize = tileSize;
+ return this;
+ }
+
+ /**
+ * Set the resampling algorithm for overview generation. Default:
"Nearest".
+ *
+ * @param resampling One of "Nearest", "Bilinear", "Bicubic"
+ * @return this builder
+ */
+ public Builder resampling(String resampling) {
+ this.resampling = resampling;
+ return this;
+ }
+
+ /**
+ * Set the number of overview levels. Default: -1 (auto-compute).
+ *
+ * @param overviewCount -1 for auto, 0 for no overviews, or a positive
count
+ * @return this builder
+ */
+ public Builder overviewCount(int overviewCount) {
+ this.overviewCount = overviewCount;
+ return this;
+ }
+
+ /**
+ * Build and validate the options.
+ *
+ * @return A validated, immutable {@link CogOptions} instance
+ * @throws IllegalArgumentException if any option is invalid
+ */
+ public CogOptions build() {
+ if (compression == null || compression.isEmpty()) {
+ throw new IllegalArgumentException("compression must not be null or
empty");
+ }
+ if (!VALID_COMPRESSION.contains(compression)) {
+ throw new IllegalArgumentException(
+ "compression must be one of " + VALID_COMPRESSION + ", got: '" +
compression + "'");
+ }
+ if (compressionQuality < 0 || compressionQuality > 1.0) {
+ throw new IllegalArgumentException(
+ "compressionQuality must be between 0.0 and 1.0, got: " +
compressionQuality);
+ }
+ if (tileSize <= 0) {
+ throw new IllegalArgumentException("tileSize must be positive, got: "
+ tileSize);
+ }
+ if ((tileSize & (tileSize - 1)) != 0) {
+ throw new IllegalArgumentException("tileSize must be a power of 2,
got: " + tileSize);
+ }
+ if (overviewCount < -1) {
+ throw new IllegalArgumentException(
+ "overviewCount must be -1 (auto), 0 (none), or positive, got: " +
overviewCount);
+ }
+
+ // Normalize resampling to title-case for matching
+ String normalized = normalizeResampling(resampling);
+ if (!VALID_RESAMPLING.contains(normalized)) {
+ throw new IllegalArgumentException(
+ "resampling must be one of " + VALID_RESAMPLING + ", got: '" +
resampling + "'");
+ }
+ this.resampling = normalized;
+
+ return new CogOptions(this);
+ }
+
+ /**
+ * Normalize the resampling string to title-case (first letter uppercase,
rest lowercase) so
+ * callers can pass "nearest", "BILINEAR", etc.
+ */
+ private static String normalizeResampling(String value) {
+ if (value == null || value.isEmpty()) {
+ return "Nearest";
+ }
+ String lower = value.toLowerCase(Locale.ROOT);
+ return Character.toUpperCase(lower.charAt(0)) + lower.substring(1);
+ }
+ }
+}
diff --git
a/common/src/main/java/org/apache/sedona/common/raster/cog/CogWriter.java
b/common/src/main/java/org/apache/sedona/common/raster/cog/CogWriter.java
new file mode 100644
index 0000000000..1973575d75
--- /dev/null
+++ b/common/src/main/java/org/apache/sedona/common/raster/cog/CogWriter.java
@@ -0,0 +1,293 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.sedona.common.raster.cog;
+
+import java.awt.image.RenderedImage;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.List;
+import javax.imageio.ImageWriteParam;
+import javax.media.jai.Interpolation;
+import javax.media.jai.InterpolationBicubic;
+import javax.media.jai.InterpolationBilinear;
+import javax.media.jai.InterpolationNearest;
+import org.geotools.api.coverage.grid.GridCoverageWriter;
+import org.geotools.api.parameter.GeneralParameterValue;
+import org.geotools.api.parameter.ParameterValueGroup;
+import org.geotools.api.referencing.crs.CoordinateReferenceSystem;
+import org.geotools.api.referencing.datum.PixelInCell;
+import org.geotools.coverage.grid.GridCoverage2D;
+import org.geotools.coverage.grid.GridEnvelope2D;
+import org.geotools.coverage.grid.GridGeometry2D;
+import org.geotools.coverage.grid.io.AbstractGridFormat;
+import org.geotools.coverage.processing.Operations;
+import org.geotools.gce.geotiff.GeoTiffWriteParams;
+import org.geotools.gce.geotiff.GeoTiffWriter;
+import org.geotools.referencing.operation.transform.AffineTransform2D;
+
+/**
+ * Creates Cloud Optimized GeoTIFF (COG) files from GeoTools GridCoverage2D
rasters.
+ *
+ * <p>The COG generation process:
+ *
+ * <ol>
+ * <li>Compute overview decimation factors (power of 2: 2, 4, 8, ...)
+ * <li>Generate overview images by downsampling
+ * <li>Write each (full-res + overviews) as a separate tiled GeoTIFF via
GeoTools
+ * <li>Parse each TIFF's IFD structure
+ * <li>Reassemble into COG byte order using {@link CogAssembler}
+ * </ol>
+ *
+ * <p>Overview decimation algorithm ported from GeoTrellis's {@code
+ * GeoTiff.defaultOverviewDecimations}.
+ */
+public class CogWriter {
+
+ /** Default tile size for COG output, matching GDAL's default */
+ public static final int DEFAULT_TILE_SIZE = 256;
+
+ /** Minimum image dimension to create an overview for */
+ private static final int MIN_OVERVIEW_SIZE = 2;
+
+ /**
+ * Write a GridCoverage2D as a Cloud Optimized GeoTIFF byte array using the
given options.
+ *
+ * @param raster The input raster
+ * @param options COG generation options (compression, tileSize, resampling,
overviewCount)
+ * @return COG file as byte array
+ * @throws IOException if writing fails
+ */
+ public static byte[] write(GridCoverage2D raster, CogOptions options) throws
IOException {
+ ByteArrayOutputStream bos = new ByteArrayOutputStream();
+ write(raster, options, bos);
+ return bos.toByteArray();
+ }
+
+ /**
+ * Write a GridCoverage2D as a Cloud Optimized GeoTIFF directly to an output
stream. This avoids
+ * allocating a byte[] for the entire COG, making it suitable for writing
large rasters to disk or
+ * network streams.
+ *
+ * @param raster The input raster
+ * @param options COG generation options (compression, tileSize, resampling,
overviewCount)
+ * @param outputStream The stream to write the COG to. Not closed by this
method.
+ * @throws IOException if writing fails
+ */
+ public static void write(GridCoverage2D raster, CogOptions options,
OutputStream outputStream)
+ throws IOException {
+ List<TiffIfdParser.ParsedTiff> parsedTiffs = encodeAndParse(raster,
options);
+ CogAssembler.assemble(parsedTiffs, outputStream);
+ }
+
+ /**
+ * Internal: encode the raster and all overviews as tiled GeoTIFFs, then
parse each into a
+ * ParsedTiff. Each overview's tiled GeoTIFF bytes are parsed immediately,
allowing the overview
+ * GridCoverage2D to be released before the next level is generated.
+ *
+ * <p>With zero-copy parsing, each ParsedTiff holds a reference to the
source byte array (no
+ * separate imageData copy), so peak memory is: live raster + one overview +
all compressed TIFF
+ * byte arrays.
+ */
+ private static List<TiffIfdParser.ParsedTiff> encodeAndParse(
+ GridCoverage2D raster, CogOptions options) throws IOException {
+ String compressionType = options.getCompression();
+ double compressionQuality = options.getCompressionQuality();
+ int tileSize = options.getTileSize();
+ String resampling = options.getResampling();
+ int requestedOverviewCount = options.getOverviewCount();
+
+ RenderedImage image = raster.getRenderedImage();
+ int cols = image.getWidth();
+ int rows = image.getHeight();
+
+ // Step 1: Compute overview decimation factors
+ List<Integer> decimations;
+ if (requestedOverviewCount == 0) {
+ decimations = new ArrayList<>();
+ } else {
+ decimations = computeOverviewDecimations(cols, rows, tileSize);
+ if (requestedOverviewCount > 0 && requestedOverviewCount <
decimations.size()) {
+ decimations = decimations.subList(0, requestedOverviewCount);
+ }
+ }
+
+ Interpolation interpolation = getInterpolation(resampling);
+ List<TiffIfdParser.ParsedTiff> parsedTiffs = new ArrayList<>(1 +
decimations.size());
+
+ // Step 2: Encode full-res → parse immediately
+ byte[] fullResBytes =
+ writeAsTiledGeoTiff(raster, compressionType, compressionQuality,
tileSize);
+ parsedTiffs.add(TiffIfdParser.parse(fullResBytes));
+
+ // Step 3: For each overview level, generate → encode → parse.
+ // The overview GridCoverage2D becomes eligible for GC after parsing.
+ for (int decimation : decimations) {
+ GridCoverage2D overview = generateOverview(raster, decimation,
interpolation);
+ byte[] overviewBytes =
+ writeAsTiledGeoTiff(overview, compressionType, compressionQuality,
tileSize);
+ parsedTiffs.add(TiffIfdParser.parse(overviewBytes));
+ // overview and its RenderedImage are now eligible for GC
+ }
+
+ return parsedTiffs;
+ }
+
+ /**
+ * Compute overview decimation factors. Each level is a power of 2.
+ *
+ * <p>Ported from GeoTrellis: {@code GeoTiff.defaultOverviewDecimations()}
+ *
+ * @param cols Image width in pixels
+ * @param rows Image height in pixels
+ * @param blockSize Tile size for the overview
+ * @return List of decimation factors [2, 4, 8, ...] or empty if image is
too small
+ */
+ static List<Integer> computeOverviewDecimations(int cols, int rows, int
blockSize) {
+ List<Integer> decimations = new ArrayList<>();
+ double pixels = Math.max(cols, rows);
+ double blocks = pixels / blockSize;
+ int overviewLevels = (int) Math.ceil(Math.log(blocks) / Math.log(2));
+
+ for (int level = 0; level < overviewLevels; level++) {
+ int decimation = (int) Math.pow(2, level + 1);
+ int overviewCols = (int) Math.ceil((double) cols / decimation);
+ int overviewRows = (int) Math.ceil((double) rows / decimation);
+ if (overviewCols < MIN_OVERVIEW_SIZE || overviewRows <
MIN_OVERVIEW_SIZE) {
+ break;
+ }
+ decimations.add(decimation);
+ }
+ return decimations;
+ }
+
+ /**
+ * Generate an overview (reduced resolution) coverage by downsampling.
+ *
+ * @param raster The full resolution raster
+ * @param decimationFactor Factor to reduce by (2 = half size, 4 = quarter,
etc.)
+ * @param interpolation The interpolation method to use for resampling
+ * @return A new GridCoverage2D at reduced resolution
+ */
+ static GridCoverage2D generateOverview(
+ GridCoverage2D raster, int decimationFactor, Interpolation
interpolation) {
+ RenderedImage image = raster.getRenderedImage();
+ int newWidth = (int) Math.ceil((double) image.getWidth() /
decimationFactor);
+ int newHeight = (int) Math.ceil((double) image.getHeight() /
decimationFactor);
+
+ // Use GeoTools Operations.DEFAULT.resample to downsample
+ CoordinateReferenceSystem crs = raster.getCoordinateReferenceSystem2D();
+
+ AffineTransform2D originalTransform =
+ (AffineTransform2D) raster.getGridGeometry().getGridToCRS2D();
+ double newScaleX = originalTransform.getScaleX() * decimationFactor;
+ double newScaleY = originalTransform.getScaleY() * decimationFactor;
+
+ AffineTransform2D newTransform =
+ new AffineTransform2D(
+ newScaleX,
+ originalTransform.getShearY(),
+ originalTransform.getShearX(),
+ newScaleY,
+ originalTransform.getTranslateX(),
+ originalTransform.getTranslateY());
+
+ GridGeometry2D gridGeometry =
+ new GridGeometry2D(
+ new GridEnvelope2D(0, 0, newWidth, newHeight),
+ PixelInCell.CELL_CORNER,
+ newTransform,
+ crs,
+ null);
+
+ return (GridCoverage2D) Operations.DEFAULT.resample(raster, null,
gridGeometry, interpolation);
+ }
+
+ /**
+ * Generate an overview using default nearest-neighbor interpolation. Kept
for backward
+ * compatibility with tests.
+ */
+ static GridCoverage2D generateOverview(GridCoverage2D raster, int
decimationFactor) {
+ return generateOverview(raster, decimationFactor, new
InterpolationNearest());
+ }
+
+ /**
+ * Map a resampling algorithm name to a JAI Interpolation instance.
+ *
+ * @param resampling One of "Nearest", "Bilinear", "Bicubic"
+ * @return The corresponding JAI Interpolation
+ */
+ private static Interpolation getInterpolation(String resampling) {
+ switch (resampling) {
+ case "Bilinear":
+ return new InterpolationBilinear();
+ case "Bicubic":
+ return new InterpolationBicubic(8);
+ case "Nearest":
+ default:
+ return new InterpolationNearest();
+ }
+ }
+
+ /**
+ * Write a GridCoverage2D as a tiled GeoTIFF byte array using GeoTools.
+ *
+ * @param raster The input raster
+ * @param compressionType Compression type
+ * @param compressionQuality Quality 0.0 to 1.0
+ * @param tileSize Tile dimensions in pixels
+ * @return Tiled GeoTIFF as byte array
+ * @throws IOException if writing fails
+ */
+ private static byte[] writeAsTiledGeoTiff(
+ GridCoverage2D raster, String compressionType, double
compressionQuality, int tileSize)
+ throws IOException {
+
+ try (ByteArrayOutputStream out = new ByteArrayOutputStream()) {
+ GridCoverageWriter writer = new GeoTiffWriter(out);
+ try {
+ ParameterValueGroup defaultParams =
writer.getFormat().getWriteParameters();
+ GeoTiffWriteParams params = new GeoTiffWriteParams();
+
+ // Set tiling — must use the 2-arg overload from GeoToolsWriteParams
+ // which delegates to the inner write param. The 4-arg
ImageWriteParam.setTiling()
+ // writes to the wrong fields (parent vs inner param).
+ params.setTilingMode(ImageWriteParam.MODE_EXPLICIT);
+ params.setTiling(tileSize, tileSize);
+
+ // Set compression
+ params.setCompressionMode(ImageWriteParam.MODE_EXPLICIT);
+ params.setCompressionType(compressionType);
+ params.setCompressionQuality((float) compressionQuality);
+
+ defaultParams
+
.parameter(AbstractGridFormat.GEOTOOLS_WRITE_PARAMS.getName().toString())
+ .setValue(params);
+
+ GeneralParameterValue[] wps = defaultParams.values().toArray(new
GeneralParameterValue[0]);
+
+ writer.write(raster, wps);
+ } finally {
+ writer.dispose();
+ }
+ return out.toByteArray();
+ }
+ }
+}
diff --git
a/common/src/main/java/org/apache/sedona/common/raster/cog/TiffIfdParser.java
b/common/src/main/java/org/apache/sedona/common/raster/cog/TiffIfdParser.java
new file mode 100644
index 0000000000..9a7ccef62d
--- /dev/null
+++
b/common/src/main/java/org/apache/sedona/common/raster/cog/TiffIfdParser.java
@@ -0,0 +1,371 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.sedona.common.raster.cog;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+
+/**
+ * Parses the IFD (Image File Directory) structure from a TIFF byte array.
This is used to extract
+ * the structural components needed for COG assembly: the IFD entries,
overflow tag data, and image
+ * data regions.
+ *
+ * <p>Reference: TIFF 6.0 Specification, Section 2 (TIFF Structure).
+ */
+public class TiffIfdParser {
+
+ /** Tag code for TileOffsets (0x0144 = 324) */
+ public static final int TAG_TILE_OFFSETS = 324;
+
+ /** Tag code for StripOffsets (0x0111 = 273) */
+ public static final int TAG_STRIP_OFFSETS = 273;
+
+ /** Tag code for TileByteCounts (0x0145 = 325) */
+ public static final int TAG_TILE_BYTE_COUNTS = 325;
+
+ /** Tag code for StripByteCounts (0x0117 = 279) */
+ public static final int TAG_STRIP_BYTE_COUNTS = 279;
+
+ /** Tag code for NewSubfileType (0x00FE = 254) */
+ public static final int TAG_NEW_SUBFILE_TYPE = 254;
+
+ /** TIFF field type sizes in bytes */
+ private static final int[] FIELD_TYPE_SIZES = {
+ 0, // 0: unused
+ 1, // 1: BYTE
+ 1, // 2: ASCII
+ 2, // 3: SHORT
+ 4, // 4: LONG
+ 8, // 5: RATIONAL
+ 1, // 6: SBYTE
+ 1, // 7: UNDEFINED
+ 2, // 8: SSHORT
+ 4, // 9: SLONG
+ 8, // 10: SRATIONAL
+ 4, // 11: FLOAT
+ 8 // 12: DOUBLE
+ };
+
+ /**
+ * Result of parsing a TIFF file. Contains the byte order and the parsed IFD
data for the first
+ * IFD only (we write each overview as a separate TIFF, so there's always
exactly one IFD).
+ */
+ public static class ParsedTiff {
+ /** Byte order of the TIFF file */
+ public final ByteOrder byteOrder;
+
+ /** Offset where the first IFD starts (always 8 for standard TIFF) */
+ public final int ifdOffset;
+
+ /** Number of tag entries in the IFD */
+ public final int tagCount;
+
+ /**
+ * Raw bytes of all IFD tag entries (tagCount * 12 bytes). This includes
the 12-byte entries but
+ * NOT the 2-byte tag count or the 4-byte next-IFD pointer.
+ */
+ public final byte[] ifdEntries;
+
+ /**
+ * Overflow tag data — values that exceed 4 bytes and are stored outside
the IFD entries. This
+ * is collected in the order the tags reference them.
+ */
+ public final byte[] overflowData;
+
+ /**
+ * The start offset of the overflow data region in the original TIFF file.
Used to rebase
+ * overflow pointers when reassembling.
+ */
+ public final int overflowDataStart;
+
+ /**
+ * The original source TIFF byte array. Image data is referenced via
{@link #imageDataOffset}
+ * and {@link #imageDataLength} rather than being copied out, to avoid a
redundant allocation.
+ */
+ public final byte[] sourceData;
+
+ /** Byte offset within {@link #sourceData} where image data begins */
+ public final int imageDataOffset;
+
+ /** Total byte length of the image data region */
+ public final int imageDataLength;
+
+ /** Byte offsets of each tile/strip within imageData (relative to
imageData start) */
+ public final int[] segmentOffsets;
+
+ /** Byte counts of each tile/strip */
+ public final int[] segmentByteCounts;
+
+ /** Whether the IFD contains a NewSubfileType tag */
+ public final boolean hasNewSubfileType;
+
+ /** The total size of the IFD region: 2 (count) + tagCount*12 + 4 (next
pointer) */
+ public int getIfdSize() {
+ return 2 + tagCount * 12 + 4;
+ }
+
+ /** The total size of IFD + overflow data (everything before image data) */
+ public int getIfdAndOverflowSize() {
+ return getIfdSize() + overflowData.length;
+ }
+
+ ParsedTiff(
+ ByteOrder byteOrder,
+ int ifdOffset,
+ int tagCount,
+ byte[] ifdEntries,
+ byte[] overflowData,
+ int overflowDataStart,
+ byte[] sourceData,
+ int imageDataOffset,
+ int imageDataLength,
+ int[] segmentOffsets,
+ int[] segmentByteCounts,
+ boolean hasNewSubfileType) {
+ this.byteOrder = byteOrder;
+ this.ifdOffset = ifdOffset;
+ this.tagCount = tagCount;
+ this.ifdEntries = ifdEntries;
+ this.overflowData = overflowData;
+ this.overflowDataStart = overflowDataStart;
+ this.sourceData = sourceData;
+ this.imageDataOffset = imageDataOffset;
+ this.imageDataLength = imageDataLength;
+ this.segmentOffsets = segmentOffsets;
+ this.segmentByteCounts = segmentByteCounts;
+ this.hasNewSubfileType = hasNewSubfileType;
+ }
+ }
+
+ /**
+ * Parse a standard TIFF byte array and extract its first IFD structure.
+ *
+ * @param tiffBytes The complete TIFF file as a byte array
+ * @return ParsedTiff with all structural components extracted
+ * @throws IllegalArgumentException if the TIFF header is invalid
+ */
+ public static ParsedTiff parse(byte[] tiffBytes) {
+ if (tiffBytes.length < 8) {
+ throw new IllegalArgumentException("TIFF data too short: " +
tiffBytes.length + " bytes");
+ }
+
+ // Read byte order from first 2 bytes
+ ByteOrder byteOrder;
+ if (tiffBytes[0] == 'I' && tiffBytes[1] == 'I') {
+ byteOrder = ByteOrder.LITTLE_ENDIAN;
+ } else if (tiffBytes[0] == 'M' && tiffBytes[1] == 'M') {
+ byteOrder = ByteOrder.BIG_ENDIAN;
+ } else {
+ throw new IllegalArgumentException(
+ "Invalid TIFF byte order marker: " + tiffBytes[0] + ", " +
tiffBytes[1]);
+ }
+
+ ByteBuffer buf = ByteBuffer.wrap(tiffBytes).order(byteOrder);
+
+ // Verify TIFF magic number (42)
+ int magic = buf.getShort(2) & 0xFFFF;
+ if (magic != 42) {
+ throw new IllegalArgumentException("Not a standard TIFF file (magic=" +
magic + ")");
+ }
+
+ // Read first IFD offset
+ int ifdOffset = buf.getInt(4);
+ if (ifdOffset < 8 || ifdOffset >= tiffBytes.length - 2) {
+ throw new IllegalArgumentException(
+ "IFD offset out of range: " + ifdOffset + " (file size: " +
tiffBytes.length + ")");
+ }
+
+ // Read number of directory entries
+ int tagCount = buf.getShort(ifdOffset) & 0xFFFF;
+
+ // Read all IFD entries (12 bytes each)
+ int entriesStart = ifdOffset + 2;
+ int entriesLen = tagCount * 12;
+ if (entriesStart + entriesLen > tiffBytes.length) {
+ throw new IllegalArgumentException(
+ "IFD entries extend beyond file: entriesStart="
+ + entriesStart
+ + " entriesLen="
+ + entriesLen
+ + " fileSize="
+ + tiffBytes.length);
+ }
+ byte[] ifdEntries = new byte[entriesLen];
+ System.arraycopy(tiffBytes, entriesStart, ifdEntries, 0, entriesLen);
+
+ // Find the offsets tag and bytecounts tag to locate image data
+ int offsetsTag = -1;
+ int byteCountsTag = -1;
+ int segmentCount = 0;
+ boolean hasNewSubfileType = false;
+
+ // Also track the overflow data region
+ int overflowStart = Integer.MAX_VALUE;
+ int overflowEnd = 0;
+
+ // First pass: find offset/bytecount tags and overflow region
+ for (int i = 0; i < tagCount; i++) {
+ int entryOffset = entriesStart + i * 12;
+ int tag = buf.getShort(entryOffset) & 0xFFFF;
+ int fieldType = buf.getShort(entryOffset + 2) & 0xFFFF;
+ int count = buf.getInt(entryOffset + 4);
+ int valueSize = count * getFieldTypeSize(fieldType);
+
+ if (tag == TAG_TILE_OFFSETS || tag == TAG_STRIP_OFFSETS) {
+ offsetsTag = tag;
+ segmentCount = count;
+ } else if (tag == TAG_TILE_BYTE_COUNTS || tag == TAG_STRIP_BYTE_COUNTS) {
+ byteCountsTag = tag;
+ } else if (tag == TAG_NEW_SUBFILE_TYPE) {
+ hasNewSubfileType = true;
+ }
+
+ // Track overflow data region (values > 4 bytes stored outside IFD
entries)
+ if (valueSize > 4) {
+ int valOffset = buf.getInt(entryOffset + 8);
+ if (valOffset < 0 || valOffset + valueSize > tiffBytes.length) {
+ throw new IllegalArgumentException(
+ "Overflow data for tag "
+ + tag
+ + " out of range: offset="
+ + valOffset
+ + " size="
+ + valueSize
+ + " fileSize="
+ + tiffBytes.length);
+ }
+ overflowStart = Math.min(overflowStart, valOffset);
+ overflowEnd = Math.max(overflowEnd, valOffset + valueSize);
+ }
+ }
+
+ if (offsetsTag < 0 || byteCountsTag < 0) {
+ throw new IllegalArgumentException(
+ "TIFF missing TileOffsets/StripOffsets or
TileByteCounts/StripByteCounts tags");
+ }
+
+ // Read segment offsets and byte counts
+ int[] segmentOffsets = readIntArray(buf, tiffBytes, entriesStart,
tagCount, offsetsTag);
+ int[] segmentByteCounts = readIntArray(buf, tiffBytes, entriesStart,
tagCount, byteCountsTag);
+
+ // Extract overflow data
+ byte[] overflowData;
+ int overflowDataStart;
+ if (overflowStart < Integer.MAX_VALUE) {
+ overflowDataStart = overflowStart;
+ overflowData = new byte[overflowEnd - overflowStart];
+ System.arraycopy(tiffBytes, overflowStart, overflowData, 0,
overflowData.length);
+ } else {
+ overflowDataStart = 0;
+ overflowData = new byte[0];
+ }
+
+ // Find image data bounds
+ int imageDataStart = Integer.MAX_VALUE;
+ int imageDataEnd = 0;
+ for (int i = 0; i < segmentCount; i++) {
+ imageDataStart = Math.min(imageDataStart, segmentOffsets[i]);
+ imageDataEnd = Math.max(imageDataEnd, segmentOffsets[i] +
segmentByteCounts[i]);
+ }
+
+ if (imageDataStart < 0 || imageDataEnd > tiffBytes.length) {
+ throw new IllegalArgumentException(
+ "Image data region out of range: start="
+ + imageDataStart
+ + " end="
+ + imageDataEnd
+ + " fileSize="
+ + tiffBytes.length);
+ }
+
+ int imageDataLength = imageDataEnd - imageDataStart;
+
+ // Make segment offsets relative to imageData start
+ int[] relativeOffsets = new int[segmentCount];
+ for (int i = 0; i < segmentCount; i++) {
+ relativeOffsets[i] = segmentOffsets[i] - imageDataStart;
+ }
+
+ return new ParsedTiff(
+ byteOrder,
+ ifdOffset,
+ tagCount,
+ ifdEntries,
+ overflowData,
+ overflowDataStart,
+ tiffBytes,
+ imageDataStart,
+ imageDataLength,
+ relativeOffsets,
+ segmentByteCounts,
+ hasNewSubfileType);
+ }
+
+ /**
+ * Read an array of int values from an IFD tag entry. Handles both inline
(count=1, value in
+ * entry) and overflow (count>1, pointer in entry) cases.
+ */
+ private static int[] readIntArray(
+ ByteBuffer buf, byte[] tiffBytes, int entriesStart, int tagCount, int
targetTag) {
+ for (int i = 0; i < tagCount; i++) {
+ int entryOffset = entriesStart + i * 12;
+ int tag = buf.getShort(entryOffset) & 0xFFFF;
+ if (tag != targetTag) continue;
+
+ int fieldType = buf.getShort(entryOffset + 2) & 0xFFFF;
+ int count = buf.getInt(entryOffset + 4);
+
+ int valueSize = count * getFieldTypeSize(fieldType);
+ int[] result = new int[count];
+
+ if (valueSize <= 4) {
+ // Value stored inline in the entry
+ if (fieldType == 3) { // SHORT
+ for (int j = 0; j < count; j++) {
+ result[j] = buf.getShort(entryOffset + 8 + j * 2) & 0xFFFF;
+ }
+ } else { // LONG
+ result[0] = buf.getInt(entryOffset + 8);
+ }
+ } else {
+ // Value stored at offset
+ int valOffset = buf.getInt(entryOffset + 8);
+ if (fieldType == 3) { // SHORT
+ for (int j = 0; j < count; j++) {
+ result[j] = buf.getShort(valOffset + j * 2) & 0xFFFF;
+ }
+ } else { // LONG
+ for (int j = 0; j < count; j++) {
+ result[j] = buf.getInt(valOffset + j * 4);
+ }
+ }
+ }
+ return result;
+ }
+ throw new IllegalArgumentException("Tag " + targetTag + " not found in
IFD");
+ }
+
+ /** Get the byte size of a TIFF field type. */
+ private static int getFieldTypeSize(int fieldType) {
+ if (fieldType >= 1 && fieldType < FIELD_TYPE_SIZES.length) {
+ return FIELD_TYPE_SIZES[fieldType];
+ }
+ return 1; // default for unknown types
+ }
+}
diff --git
a/common/src/test/java/org/apache/sedona/common/raster/cog/CogWriterTest.java
b/common/src/test/java/org/apache/sedona/common/raster/cog/CogWriterTest.java
new file mode 100644
index 0000000000..3bf87b88d7
--- /dev/null
+++
b/common/src/test/java/org/apache/sedona/common/raster/cog/CogWriterTest.java
@@ -0,0 +1,693 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.sedona.common.raster.cog;
+
+import static org.junit.Assert.*;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.util.List;
+import org.apache.sedona.common.raster.MapAlgebra;
+import org.apache.sedona.common.raster.RasterConstructors;
+import org.apache.sedona.common.raster.RasterOutputs;
+import org.geotools.coverage.grid.GridCoverage2D;
+import org.junit.Test;
+
+public class CogWriterTest {
+
+ private static final String resourceFolder =
+ System.getProperty("user.dir") + "/../spark/common/src/test/resources/";
+
+ private GridCoverage2D rasterFromGeoTiff(String filePath) throws IOException
{
+ byte[] bytes = Files.readAllBytes(Paths.get(filePath));
+ return RasterConstructors.fromGeoTiff(bytes);
+ }
+
+ @Test
+ public void testComputeOverviewDecimations() {
+ // 1000x1000 with blockSize=256: ceil(log2(1000/256)) = ceil(1.97) = 2
levels -> [2, 4]
+ List<Integer> decimations = CogWriter.computeOverviewDecimations(1000,
1000, 256);
+ assertEquals(2, decimations.size());
+ assertEquals(Integer.valueOf(2), decimations.get(0));
+ assertEquals(Integer.valueOf(4), decimations.get(1));
+
+ // 10000x10000 with blockSize=256: ceil(log2(10000/256)) = ceil(5.29) = 6
levels
+ decimations = CogWriter.computeOverviewDecimations(10000, 10000, 256);
+ assertEquals(6, decimations.size());
+ assertEquals(Integer.valueOf(2), decimations.get(0));
+ assertEquals(Integer.valueOf(4), decimations.get(1));
+ assertEquals(Integer.valueOf(8), decimations.get(2));
+ assertEquals(Integer.valueOf(16), decimations.get(3));
+ assertEquals(Integer.valueOf(32), decimations.get(4));
+ assertEquals(Integer.valueOf(64), decimations.get(5));
+
+ // Very small image: 50x50 with blockSize=256 -> no overviews
+ decimations = CogWriter.computeOverviewDecimations(50, 50, 256);
+ assertEquals(0, decimations.size());
+
+ // Exactly one tile: 256x256 with blockSize=256 -> no overviews
+ decimations = CogWriter.computeOverviewDecimations(256, 256, 256);
+ assertEquals(0, decimations.size());
+ }
+
+ @Test
+ public void testGenerateOverview() {
+ // Create a 100x100 single-band raster
+ double[] bandValues = new double[100 * 100];
+ for (int i = 0; i < bandValues.length; i++) {
+ bandValues[i] = i % 256;
+ }
+ GridCoverage2D raster =
+ RasterConstructors.makeNonEmptyRaster(
+ 1, "d", 100, 100, 0, 0, 1, -1, 0, 0, 4326, new double[][]
{bandValues});
+
+ // Downsample by factor of 2
+ GridCoverage2D overview = CogWriter.generateOverview(raster, 2);
+ assertNotNull(overview);
+ assertEquals(50, overview.getRenderedImage().getWidth());
+ assertEquals(50, overview.getRenderedImage().getHeight());
+ }
+
+ @Test
+ public void testWriteSmallRasterAsCog() throws IOException {
+ // Create a small raster (no overviews expected due to small size)
+ double[] bandValues = new double[50 * 50];
+ for (int i = 0; i < bandValues.length; i++) {
+ bandValues[i] = i % 256;
+ }
+ GridCoverage2D raster =
+ RasterConstructors.makeNonEmptyRaster(
+ 1, "d", 50, 50, 0, 0, 1, -1, 0, 0, 4326, new double[][]
{bandValues});
+
+ byte[] cogBytes = RasterOutputs.asCloudOptimizedGeoTiff(raster,
CogOptions.defaults());
+ assertNotNull(cogBytes);
+ assertTrue(cogBytes.length > 0);
+
+ // Verify it's a valid TIFF
+ assertTrue(
+ (cogBytes[0] == 'I' && cogBytes[1] == 'I') || (cogBytes[0] == 'M' &&
cogBytes[1] == 'M'));
+
+ // Verify it can be read back
+ GridCoverage2D readBack = RasterConstructors.fromGeoTiff(cogBytes);
+ assertNotNull(readBack);
+ assertEquals(50, readBack.getRenderedImage().getWidth());
+ assertEquals(50, readBack.getRenderedImage().getHeight());
+ }
+
+ @Test
+ public void testWriteMediumRasterAsCog() throws IOException {
+ // Create a 512x512 raster (should produce overviews with 256 tile size)
+ double[] bandValues = new double[512 * 512];
+ for (int i = 0; i < bandValues.length; i++) {
+ bandValues[i] = (i * 7) % 256;
+ }
+ GridCoverage2D raster =
+ RasterConstructors.makeNonEmptyRaster(
+ 1, "d", 512, 512, 0, 0, 1, -1, 0, 0, 4326, new double[][]
{bandValues});
+
+ byte[] cogBytes =
+ RasterOutputs.asCloudOptimizedGeoTiff(
+ raster,
CogOptions.builder().compression("Deflate").compressionQuality(0.5).build());
+ assertNotNull(cogBytes);
+ assertTrue(cogBytes.length > 0);
+
+ // Verify COG structure: IFDs should be at the beginning of the file
+ ByteOrder byteOrder = (cogBytes[0] == 'I') ? ByteOrder.LITTLE_ENDIAN :
ByteOrder.BIG_ENDIAN;
+ ByteBuffer buf = ByteBuffer.wrap(cogBytes).order(byteOrder);
+
+ // First IFD should be at offset 8 (right after header)
+ int firstIfdOffset = buf.getInt(4);
+ assertEquals(8, firstIfdOffset);
+
+ // Read first IFD tag count
+ int tagCount = buf.getShort(firstIfdOffset) & 0xFFFF;
+ assertTrue("First IFD should have tags", tagCount > 0);
+
+ // Check that nextIFDOffset points to another IFD (should have at least 1
overview)
+ int nextIfdPointerPos = firstIfdOffset + 2 + tagCount * 12;
+ int nextIfdOffset = buf.getInt(nextIfdPointerPos);
+ // For a 512x512 image with 256 tile size, we expect at least one overview
+ assertTrue("Should have at least one overview IFD", nextIfdOffset > 0);
+ // The next IFD should be before any image data (COG requirement).
+ // Find the minimum TileOffset to use as an upper bound.
+ int minTileOffset = Integer.MAX_VALUE;
+ for (int i = 0; i < tagCount; i++) {
+ int entryOffset = firstIfdOffset + 2 + i * 12;
+ int tag = buf.getShort(entryOffset) & 0xFFFF;
+ if (tag == 324 || tag == 273) { // TileOffsets or StripOffsets
+ int count = buf.getInt(entryOffset + 4);
+ if (count == 1) {
+ minTileOffset = Math.min(minTileOffset, buf.getInt(entryOffset + 8));
+ } else {
+ int arrayOffset = buf.getInt(entryOffset + 8);
+ for (int j = 0; j < count; j++) {
+ minTileOffset = Math.min(minTileOffset, buf.getInt(arrayOffset + j
* 4));
+ }
+ }
+ }
+ }
+ assertTrue("Overview IFD should be before image data", nextIfdOffset <
minTileOffset);
+
+ // Verify it can be read back by GeoTools
+ GridCoverage2D readBack = RasterConstructors.fromGeoTiff(cogBytes);
+ assertNotNull(readBack);
+ assertEquals(512, readBack.getRenderedImage().getWidth());
+ assertEquals(512, readBack.getRenderedImage().getHeight());
+
+ // Verify pixel values are preserved
+ double[] originalValues = MapAlgebra.bandAsArray(raster, 1);
+ double[] readBackValues = MapAlgebra.bandAsArray(readBack, 1);
+ assertArrayEquals(originalValues, readBackValues, 0.01);
+ }
+
+ @Test
+ public void testWriteMultibandRasterAsCog() throws IOException {
+ // Create a 3-band 256x256 raster
+ int width = 256;
+ int height = 256;
+ int numBands = 3;
+ double[][] bandData = new double[numBands][width * height];
+ for (int b = 0; b < numBands; b++) {
+ for (int i = 0; i < width * height; i++) {
+ bandData[b][i] = (i * (b + 1)) % 256;
+ }
+ }
+
+ GridCoverage2D raster =
+ RasterConstructors.makeNonEmptyRaster(
+ numBands, "b", width, height, 0, 0, 1, -1, 0, 0, 4326, bandData);
+
+ byte[] cogBytes = RasterOutputs.asCloudOptimizedGeoTiff(raster,
CogOptions.defaults());
+ assertNotNull(cogBytes);
+
+ // Verify it can be read back
+ GridCoverage2D readBack = RasterConstructors.fromGeoTiff(cogBytes);
+ assertNotNull(readBack);
+ assertEquals(width, readBack.getRenderedImage().getWidth());
+ assertEquals(height, readBack.getRenderedImage().getHeight());
+ }
+
+ @Test
+ public void testWriteWithLZWCompression() throws IOException {
+ double[] bandValues = new double[100 * 100];
+ for (int i = 0; i < bandValues.length; i++) {
+ bandValues[i] = i % 10; // Highly compressible
+ }
+ GridCoverage2D raster =
+ RasterConstructors.makeNonEmptyRaster(
+ 1, "d", 100, 100, 0, 0, 1, -1, 0, 0, 4326, new double[][]
{bandValues});
+
+ byte[] cogBytes =
+ RasterOutputs.asCloudOptimizedGeoTiff(
+ raster,
CogOptions.builder().compression("LZW").compressionQuality(0.5).build());
+ assertNotNull(cogBytes);
+ assertTrue(cogBytes.length > 0);
+
+ GridCoverage2D readBack = RasterConstructors.fromGeoTiff(cogBytes);
+ assertNotNull(readBack);
+ }
+
+ @Test
+ public void testCogFromExistingGeoTiff() throws IOException {
+ // Test with a real GeoTIFF file from test resources
+ GridCoverage2D raster = rasterFromGeoTiff(resourceFolder +
"raster/test1.tiff");
+
+ byte[] cogBytes = RasterOutputs.asCloudOptimizedGeoTiff(raster,
CogOptions.defaults());
+ assertNotNull(cogBytes);
+ assertTrue(cogBytes.length > 0);
+
+ // Verify it can be read back
+ GridCoverage2D readBack = RasterConstructors.fromGeoTiff(cogBytes);
+ assertNotNull(readBack);
+ assertEquals(raster.getRenderedImage().getWidth(),
readBack.getRenderedImage().getWidth());
+ assertEquals(raster.getRenderedImage().getHeight(),
readBack.getRenderedImage().getHeight());
+ }
+
+ @Test
+ public void testTiffIfdParser() throws IOException {
+ // Write a tiled GeoTIFF and parse it
+ double[] bandValues = new double[256 * 256];
+ for (int i = 0; i < bandValues.length; i++) {
+ bandValues[i] = i % 256;
+ }
+ GridCoverage2D raster =
+ RasterConstructors.makeNonEmptyRaster(
+ 1, "d", 256, 256, 0, 0, 1, -1, 0, 0, 4326, new double[][]
{bandValues});
+
+ byte[] tiffBytes = RasterOutputs.asGeoTiff(raster, "Deflate", 0.5);
+
+ TiffIfdParser.ParsedTiff parsed = TiffIfdParser.parse(tiffBytes);
+ assertNotNull(parsed);
+ assertTrue(parsed.tagCount > 0);
+ assertTrue(parsed.imageDataLength > 0);
+ assertTrue(parsed.ifdEntries.length == parsed.tagCount * 12);
+ }
+
+ @Test
+ public void testOverviewIfdHasNewSubfileType() throws IOException {
+ // Create a 512x512 raster that will have at least one overview
+ double[] bandValues = new double[512 * 512];
+ for (int i = 0; i < bandValues.length; i++) {
+ bandValues[i] = (i * 3) % 256;
+ }
+ GridCoverage2D raster =
+ RasterConstructors.makeNonEmptyRaster(
+ 1, "d", 512, 512, 0, 0, 1, -1, 0, 0, 4326, new double[][]
{bandValues});
+
+ byte[] cogBytes = RasterOutputs.asCloudOptimizedGeoTiff(raster,
CogOptions.defaults());
+ ByteOrder byteOrder = (cogBytes[0] == 'I') ? ByteOrder.LITTLE_ENDIAN :
ByteOrder.BIG_ENDIAN;
+ ByteBuffer buf = ByteBuffer.wrap(cogBytes).order(byteOrder);
+
+ // Navigate to second IFD (first overview)
+ int firstIfdOffset = buf.getInt(4);
+ int tagCount0 = buf.getShort(firstIfdOffset) & 0xFFFF;
+ int nextIfdPointerPos = firstIfdOffset + 2 + tagCount0 * 12;
+ int secondIfdOffset = buf.getInt(nextIfdPointerPos);
+ assertTrue("Should have at least one overview IFD", secondIfdOffset > 0);
+
+ // Scan second IFD for NewSubfileType tag (254)
+ int tagCount1 = buf.getShort(secondIfdOffset) & 0xFFFF;
+ boolean foundNewSubfileType = false;
+ int newSubfileTypeValue = -1;
+ for (int i = 0; i < tagCount1; i++) {
+ int entryOffset = secondIfdOffset + 2 + i * 12;
+ int tag = buf.getShort(entryOffset) & 0xFFFF;
+ if (tag == 254) {
+ foundNewSubfileType = true;
+ newSubfileTypeValue = buf.getInt(entryOffset + 8);
+ break;
+ }
+ }
+ assertTrue("Overview IFD must contain NewSubfileType tag (254)",
foundNewSubfileType);
+ assertEquals("NewSubfileType must be 1 (ReducedImage)", 1,
newSubfileTypeValue);
+ }
+
+ @Test
+ public void testInvalidInputParameters() {
+ double[] bandValues = new double[50 * 50];
+ GridCoverage2D raster =
+ RasterConstructors.makeNonEmptyRaster(
+ 1, "d", 50, 50, 0, 0, 1, -1, 0, 0, 4326, new double[][]
{bandValues});
+
+ // Invalid compression type
+ try {
+ CogOptions.builder().compression("ZSTD").build();
+ fail("Expected IllegalArgumentException for invalid compression");
+ } catch (IllegalArgumentException e) {
+ assertTrue(e.getMessage().contains("compression must be one of"));
+ }
+
+ // compressionQuality > 1
+ try {
+
CogOptions.builder().compression("Deflate").compressionQuality(1.5).tileSize(256).build();
+ fail("Expected IllegalArgumentException for compressionQuality > 1");
+ } catch (IllegalArgumentException e) {
+ assertTrue(e.getMessage().contains("compressionQuality"));
+ }
+
+ // tileSize <= 0
+ try {
+
CogOptions.builder().compression("Deflate").compressionQuality(0.5).tileSize(0).build();
+ fail("Expected IllegalArgumentException for tileSize <= 0");
+ } catch (IllegalArgumentException e) {
+ assertTrue(e.getMessage().contains("tileSize"));
+ }
+
+ // tileSize not power of 2
+ try {
+
CogOptions.builder().compression("Deflate").compressionQuality(0.5).tileSize(100).build();
+ fail("Expected IllegalArgumentException for non-power-of-2 tileSize");
+ } catch (IllegalArgumentException e) {
+ assertTrue(e.getMessage().contains("power of 2"));
+ }
+ }
+
+ @Test
+ public void testParserRejectsMalformedTiff() {
+ // Too short
+ try {
+ TiffIfdParser.parse(new byte[] {0, 0, 0});
+ fail("Expected IllegalArgumentException for short input");
+ } catch (IllegalArgumentException e) {
+ assertTrue(e.getMessage().contains("too short"));
+ }
+
+ // Invalid byte order marker
+ try {
+ TiffIfdParser.parse(new byte[] {'X', 'X', 0, 42, 0, 0, 0, 8});
+ fail("Expected IllegalArgumentException for invalid byte order");
+ } catch (IllegalArgumentException e) {
+ assertTrue(e.getMessage().contains("byte order"));
+ }
+
+ // Valid header but IFD offset points beyond file
+ byte[] badOffset = new byte[16];
+ badOffset[0] = 'I';
+ badOffset[1] = 'I';
+ badOffset[2] = 42;
+ badOffset[3] = 0;
+ // IFD offset = 9999 (way beyond file)
+ ByteBuffer b = ByteBuffer.wrap(badOffset).order(ByteOrder.LITTLE_ENDIAN);
+ b.putInt(4, 9999);
+ try {
+ TiffIfdParser.parse(badOffset);
+ fail("Expected IllegalArgumentException for out-of-range IFD offset");
+ } catch (IllegalArgumentException e) {
+ assertTrue(e.getMessage().contains("out of range"));
+ }
+ }
+
+ @Test
+ public void testCogTileOffsetsAreForwardPointing() throws IOException {
+ // Create a raster with overviews
+ double[] bandValues = new double[512 * 512];
+ for (int i = 0; i < bandValues.length; i++) {
+ bandValues[i] = (i * 11) % 256;
+ }
+ GridCoverage2D raster =
+ RasterConstructors.makeNonEmptyRaster(
+ 1, "d", 512, 512, 0, 0, 1, -1, 0, 0, 4326, new double[][]
{bandValues});
+
+ byte[] cogBytes = RasterOutputs.asCloudOptimizedGeoTiff(raster,
CogOptions.defaults());
+ ByteOrder byteOrder = (cogBytes[0] == 'I') ? ByteOrder.LITTLE_ENDIAN :
ByteOrder.BIG_ENDIAN;
+ ByteBuffer buf = ByteBuffer.wrap(cogBytes).order(byteOrder);
+
+ // Walk all IFDs and verify TileOffsets/StripOffsets point within file
bounds
+ int ifdOffset = buf.getInt(4);
+ int ifdIndex = 0;
+ int lastIfdEnd = 0;
+
+ while (ifdOffset != 0) {
+ int tagCount = buf.getShort(ifdOffset) & 0xFFFF;
+ int ifdEnd = ifdOffset + 2 + tagCount * 12 + 4;
+ lastIfdEnd = Math.max(lastIfdEnd, ifdEnd);
+
+ for (int i = 0; i < tagCount; i++) {
+ int entryOffset = ifdOffset + 2 + i * 12;
+ int tag = buf.getShort(entryOffset) & 0xFFFF;
+ int fieldType = buf.getShort(entryOffset + 2) & 0xFFFF;
+ int count = buf.getInt(entryOffset + 4);
+
+ // Check TileOffsets (324) or StripOffsets (273)
+ if (tag == 324 || tag == 273) {
+ if (count == 1) {
+ int offset = buf.getInt(entryOffset + 8);
+ assertTrue(
+ "IFD " + ifdIndex + ": TileOffset " + offset + " must be
within file",
+ offset >= 0 && offset < cogBytes.length);
+ } else {
+ // Offsets stored in overflow area
+ int arrayOffset = buf.getInt(entryOffset + 8);
+ for (int j = 0; j < count; j++) {
+ int tileOffset = buf.getInt(arrayOffset + j * 4);
+ assertTrue(
+ "IFD " + ifdIndex + " tile " + j + ": offset " + tileOffset
+ " out of range",
+ tileOffset >= 0 && tileOffset < cogBytes.length);
+ }
+ }
+ }
+ }
+
+ // Read next IFD offset
+ int nextIfdPointerPos = ifdOffset + 2 + tagCount * 12;
+ ifdOffset = buf.getInt(nextIfdPointerPos);
+ ifdIndex++;
+ }
+
+ // Verify we found at least 2 IFDs (full-res + overview)
+ assertTrue("Expected at least 2 IFDs, found " + ifdIndex, ifdIndex >= 2);
+
+ // Verify all IFDs are before image data (forward-pointing)
+ // Find the minimum tile offset across all IFDs as the image data start
+ int minTileOffsetGlobal = Integer.MAX_VALUE;
+ int walkOffset = buf.getInt(4);
+ while (walkOffset != 0) {
+ int tc = buf.getShort(walkOffset) & 0xFFFF;
+ for (int i = 0; i < tc; i++) {
+ int eo = walkOffset + 2 + i * 12;
+ int t = buf.getShort(eo) & 0xFFFF;
+ if (t == 324 || t == 273) {
+ int c = buf.getInt(eo + 4);
+ if (c == 1) {
+ minTileOffsetGlobal = Math.min(minTileOffsetGlobal, buf.getInt(eo
+ 8));
+ } else {
+ int ao = buf.getInt(eo + 8);
+ for (int j = 0; j < c; j++) {
+ minTileOffsetGlobal = Math.min(minTileOffsetGlobal,
buf.getInt(ao + j * 4));
+ }
+ }
+ }
+ }
+ walkOffset = buf.getInt(walkOffset + 2 + tc * 12);
+ }
+ assertTrue("IFD region should end before image data starts", lastIfdEnd <=
minTileOffsetGlobal);
+ }
+
+ // --- CogOptions tests ---
+
+ @Test
+ public void testCogOptionsDefaults() {
+ CogOptions opts = CogOptions.defaults();
+ assertEquals("Deflate", opts.getCompression());
+ assertEquals(0.2, opts.getCompressionQuality(), 0.001);
+ assertEquals(256, opts.getTileSize());
+ assertEquals("Nearest", opts.getResampling());
+ assertEquals(-1, opts.getOverviewCount());
+ }
+
+ @Test
+ public void testCogOptionsBuilder() {
+ CogOptions opts =
+ CogOptions.builder()
+ .compression("LZW")
+ .compressionQuality(0.8)
+ .tileSize(512)
+ .resampling("Bilinear")
+ .overviewCount(3)
+ .build();
+
+ assertEquals("LZW", opts.getCompression());
+ assertEquals(0.8, opts.getCompressionQuality(), 0.001);
+ assertEquals(512, opts.getTileSize());
+ assertEquals("Bilinear", opts.getResampling());
+ assertEquals(3, opts.getOverviewCount());
+ }
+
+ @Test
+ public void testCogOptionsResamplingNormalization() {
+ // Case-insensitive resampling names
+ assertEquals("Nearest",
CogOptions.builder().resampling("nearest").build().getResampling());
+ assertEquals("Bilinear",
CogOptions.builder().resampling("BILINEAR").build().getResampling());
+ assertEquals("Bicubic",
CogOptions.builder().resampling("bicubic").build().getResampling());
+ }
+
+ @Test
+ public void testCogOptionsInvalidResampling() {
+ try {
+ CogOptions.builder().resampling("Lanczos").build();
+ fail("Expected IllegalArgumentException for invalid resampling");
+ } catch (IllegalArgumentException e) {
+ assertTrue(e.getMessage().contains("resampling"));
+ }
+ }
+
+ @Test
+ public void testCogOptionsInvalidTileSize() {
+ try {
+ CogOptions.builder().tileSize(300).build();
+ fail("Expected IllegalArgumentException for non-power-of-2 tileSize");
+ } catch (IllegalArgumentException e) {
+ assertTrue(e.getMessage().contains("power of 2"));
+ }
+ }
+
+ @Test
+ public void testCogOptionsInvalidOverviewCount() {
+ try {
+ CogOptions.builder().overviewCount(-2).build();
+ fail("Expected IllegalArgumentException for negative overviewCount");
+ } catch (IllegalArgumentException e) {
+ assertTrue(e.getMessage().contains("overviewCount"));
+ }
+ }
+
+ @Test
+ public void testWriteWithCogOptions() throws IOException {
+ double[] bandValues = new double[512 * 512];
+ for (int i = 0; i < bandValues.length; i++) {
+ bandValues[i] = (i * 7) % 256;
+ }
+ GridCoverage2D raster =
+ RasterConstructors.makeNonEmptyRaster(
+ 1, "d", 512, 512, 0, 0, 1, -1, 0, 0, 4326, new double[][]
{bandValues});
+
+ CogOptions opts =
+
CogOptions.builder().compression("LZW").compressionQuality(0.5).tileSize(256).build();
+
+ byte[] cogBytes = CogWriter.write(raster, opts);
+ assertNotNull(cogBytes);
+ assertTrue(cogBytes.length > 0);
+
+ GridCoverage2D readBack = RasterConstructors.fromGeoTiff(cogBytes);
+ assertEquals(512, readBack.getRenderedImage().getWidth());
+ assertEquals(512, readBack.getRenderedImage().getHeight());
+
+ double[] originalValues = MapAlgebra.bandAsArray(raster, 1);
+ double[] readBackValues = MapAlgebra.bandAsArray(readBack, 1);
+ assertArrayEquals(originalValues, readBackValues, 0.01);
+ }
+
+ @Test
+ public void testWriteWithBilinearResampling() throws IOException {
+ double[] bandValues = new double[512 * 512];
+ for (int i = 0; i < bandValues.length; i++) {
+ bandValues[i] = (i * 3) % 256;
+ }
+ GridCoverage2D raster =
+ RasterConstructors.makeNonEmptyRaster(
+ 1, "d", 512, 512, 0, 0, 1, -1, 0, 0, 4326, new double[][]
{bandValues});
+
+ CogOptions opts = CogOptions.builder().resampling("Bilinear").build();
+ byte[] cogBytes = CogWriter.write(raster, opts);
+ assertNotNull(cogBytes);
+ assertTrue(cogBytes.length > 0);
+
+ // Must be valid TIFF and readable
+ GridCoverage2D readBack = RasterConstructors.fromGeoTiff(cogBytes);
+ assertEquals(512, readBack.getRenderedImage().getWidth());
+ }
+
+ @Test
+ public void testWriteWithBicubicResampling() throws IOException {
+ double[] bandValues = new double[512 * 512];
+ for (int i = 0; i < bandValues.length; i++) {
+ bandValues[i] = (i * 5) % 256;
+ }
+ GridCoverage2D raster =
+ RasterConstructors.makeNonEmptyRaster(
+ 1, "d", 512, 512, 0, 0, 1, -1, 0, 0, 4326, new double[][]
{bandValues});
+
+ CogOptions opts = CogOptions.builder().resampling("Bicubic").build();
+ byte[] cogBytes = CogWriter.write(raster, opts);
+ assertNotNull(cogBytes);
+
+ GridCoverage2D readBack = RasterConstructors.fromGeoTiff(cogBytes);
+ assertEquals(512, readBack.getRenderedImage().getWidth());
+ }
+
+ @Test
+ public void testWriteWithOverviewCountZero() throws IOException {
+ double[] bandValues = new double[512 * 512];
+ for (int i = 0; i < bandValues.length; i++) {
+ bandValues[i] = (i * 11) % 256;
+ }
+ GridCoverage2D raster =
+ RasterConstructors.makeNonEmptyRaster(
+ 1, "d", 512, 512, 0, 0, 1, -1, 0, 0, 4326, new double[][]
{bandValues});
+
+ CogOptions opts = CogOptions.builder().overviewCount(0).build();
+ byte[] cogBytes = CogWriter.write(raster, opts);
+ assertNotNull(cogBytes);
+
+ // With overviewCount=0, there should be exactly 1 IFD (no overviews)
+ ByteOrder byteOrder = (cogBytes[0] == 'I') ? ByteOrder.LITTLE_ENDIAN :
ByteOrder.BIG_ENDIAN;
+ ByteBuffer buf = ByteBuffer.wrap(cogBytes).order(byteOrder);
+ int firstIfdOffset = buf.getInt(4);
+ int tagCount = buf.getShort(firstIfdOffset) & 0xFFFF;
+ int nextIfdOffset = buf.getInt(firstIfdOffset + 2 + tagCount * 12);
+ assertEquals("Should have no overview IFD when overviewCount=0", 0,
nextIfdOffset);
+
+ // Should still be readable
+ GridCoverage2D readBack = RasterConstructors.fromGeoTiff(cogBytes);
+ assertEquals(512, readBack.getRenderedImage().getWidth());
+ }
+
+ @Test
+ public void testWriteWithSpecificOverviewCount() throws IOException {
+ // 1024x1024 with tileSize=256 would normally produce 2 overviews (2, 4)
+ double[] bandValues = new double[1024 * 1024];
+ for (int i = 0; i < bandValues.length; i++) {
+ bandValues[i] = (i * 13) % 256;
+ }
+ GridCoverage2D raster =
+ RasterConstructors.makeNonEmptyRaster(
+ 1, "d", 1024, 1024, 0, 0, 1, -1, 0, 0, 4326, new double[][]
{bandValues});
+
+ // Request only 1 overview
+ CogOptions opts = CogOptions.builder().overviewCount(1).build();
+ byte[] cogBytes = CogWriter.write(raster, opts);
+ assertNotNull(cogBytes);
+
+ // Count IFDs: should have exactly 2 (full-res + 1 overview)
+ ByteOrder byteOrder = (cogBytes[0] == 'I') ? ByteOrder.LITTLE_ENDIAN :
ByteOrder.BIG_ENDIAN;
+ ByteBuffer buf = ByteBuffer.wrap(cogBytes).order(byteOrder);
+
+ int ifdCount = 0;
+ int ifdOffset = buf.getInt(4);
+ while (ifdOffset != 0) {
+ ifdCount++;
+ int tagCount = buf.getShort(ifdOffset) & 0xFFFF;
+ ifdOffset = buf.getInt(ifdOffset + 2 + tagCount * 12);
+ }
+ assertEquals("Should have exactly 2 IFDs (full-res + 1 overview)", 2,
ifdCount);
+
+ GridCoverage2D readBack = RasterConstructors.fromGeoTiff(cogBytes);
+ assertEquals(1024, readBack.getRenderedImage().getWidth());
+ }
+
+ @Test
+ public void testWriteWithTileSize512() throws IOException {
+ double[] bandValues = new double[1024 * 1024];
+ for (int i = 0; i < bandValues.length; i++) {
+ bandValues[i] = (i * 17) % 256;
+ }
+ GridCoverage2D raster =
+ RasterConstructors.makeNonEmptyRaster(
+ 1, "d", 1024, 1024, 0, 0, 1, -1, 0, 0, 4326, new double[][]
{bandValues});
+
+ CogOptions opts = CogOptions.builder().tileSize(512).build();
+ byte[] cogBytes = CogWriter.write(raster, opts);
+ assertNotNull(cogBytes);
+ assertTrue(cogBytes.length > 0);
+
+ GridCoverage2D readBack = RasterConstructors.fromGeoTiff(cogBytes);
+ assertEquals(1024, readBack.getRenderedImage().getWidth());
+ }
+
+ @Test
+ public void testRasterOutputsWithCogOptions() throws IOException {
+ double[] bandValues = new double[256 * 256];
+ for (int i = 0; i < bandValues.length; i++) {
+ bandValues[i] = i % 256;
+ }
+ GridCoverage2D raster =
+ RasterConstructors.makeNonEmptyRaster(
+ 1, "d", 256, 256, 0, 0, 1, -1, 0, 0, 4326, new double[][]
{bandValues});
+
+ CogOptions opts =
CogOptions.builder().compression("LZW").overviewCount(0).build();
+ byte[] cogBytes = RasterOutputs.asCloudOptimizedGeoTiff(raster, opts);
+ assertNotNull(cogBytes);
+
+ GridCoverage2D readBack = RasterConstructors.fromGeoTiff(cogBytes);
+ assertEquals(256, readBack.getRenderedImage().getWidth());
+ }
+}