Quickdraw packs data as a series of codes that the application is supposed
to handle, but it does not define any order in which they might appear.
Since it's unfeasible to support *all* opcodes defined by the spec,
only handle well-known blocks containing video data and ignore any unknown
or unsupported ones.

Move palette loading and rle decoding to separate functions.
---
 libavcodec/qdrw.c | 217 ++++++++++++++++++++++++++++++++++++++----------------
 1 file changed, 152 insertions(+), 65 deletions(-)

diff --git a/libavcodec/qdrw.c b/libavcodec/qdrw.c
index 6595c47..8f8dea9 100644
--- a/libavcodec/qdrw.c
+++ b/libavcodec/qdrw.c
@@ -1,6 +1,7 @@
 /*
  * QuickDraw (qdrw) codec
  * Copyright (c) 2004 Konstantin Shishkov
+ * Copyright (c) 2015 Vittorio Giovara
  *
  * This file is part of Libav.
  *
@@ -22,6 +23,7 @@
 /**
  * @file
  * Apple QuickDraw codec.
+ * 
https://developer.apple.com/legacy/library/documentation/mac/QuickDraw/QuickDraw-461.html
  */
 
 #include "libavutil/common.h"
@@ -30,106 +32,192 @@
 #include "bytestream.h"
 #include "internal.h"
 
-static int decode_frame(AVCodecContext *avctx,
-                        void *data, int *got_frame,
-                        AVPacket *avpkt)
-{
-    AVFrame * const p      = data;
-    GetByteContext gbc;
-    uint8_t* outdata;
-    int colors;
-    int i, ret;
-    uint32_t *pal;
-
-    if ((ret = ff_get_buffer(avctx, p, 0)) < 0) {
-        av_log(avctx, AV_LOG_ERROR, "get_buffer() failed\n");
-        return ret;
-    }
-    p->pict_type = AV_PICTURE_TYPE_I;
-    p->key_frame = 1;
-
-    outdata = p->data[0];
-
-    bytestream2_init(&gbc, avpkt->data, avpkt->size);
-
-    if (bytestream2_get_bytes_left(&gbc) < 0x68 + 4) {
-        av_log(avctx, AV_LOG_ERROR, "Frame is too small %d\n",
-               bytestream2_get_bytes_left(&gbc));
-        return AVERROR_INVALIDDATA;
-    }
+enum QuickdrawOpcodes {
+    PACKBITSRECT = 0x0098,
+    PACKBITSRGN,
 
-    /* jump to palette */
-    bytestream2_skip(&gbc, 0x68);
-    colors = bytestream2_get_be32(&gbc);
+    EOP = 0x00FF,
+};
 
-    if (colors < 0 || colors > 256) {
-        av_log(avctx, AV_LOG_ERROR, "Error color count - %i(0x%X)\n", colors, 
colors);
-        return AVERROR_INVALIDDATA;
-    }
-    if (bytestream2_get_bytes_left(&gbc) < (colors + 1) * 8) {
-        av_log(avctx, AV_LOG_ERROR, "Palette is too small %d\n",
-               bytestream2_get_bytes_left(&gbc));
-        return AVERROR_INVALIDDATA;
-    }
+static int parse_palette(AVCodecContext *avctx, GetByteContext *gbc,
+                         uint32_t *pal, int colors)
+{
+    int i;
 
-    pal = (uint32_t*)p->data[1];
     for (i = 0; i <= colors; i++) {
         uint8_t r, g, b;
-        unsigned int idx = bytestream2_get_be16(&gbc); /* color index */
+        unsigned int idx = bytestream2_get_be16(gbc); /* color index */
         if (idx > 255) {
-            av_log(avctx, AV_LOG_ERROR, "Palette index out of range: %u\n", 
idx);
-            bytestream2_skip(&gbc, 6);
+            av_log(avctx, AV_LOG_WARNING,
+                   "Palette index out of range: %u\n", idx);
+            bytestream2_skip(gbc, 6);
             continue;
         }
-        r = bytestream2_get_byte(&gbc);
-        bytestream2_skip(&gbc, 1);
-        g = bytestream2_get_byte(&gbc);
-        bytestream2_skip(&gbc, 1);
-        b = bytestream2_get_byte(&gbc);
-        bytestream2_skip(&gbc, 1);
+        r = bytestream2_get_byte(gbc);
+        bytestream2_skip(gbc, 1);
+        g = bytestream2_get_byte(gbc);
+        bytestream2_skip(gbc, 1);
+        b = bytestream2_get_byte(gbc);
+        bytestream2_skip(gbc, 1);
         pal[idx] = (r << 16) | (g << 8) | b;
     }
-    p->palette_has_changed = 1;
+    return 0;
+}
 
-    /* skip unneeded data */
-    bytestream2_skip(&gbc, 18);
+static int decode_rle(AVCodecContext *avctx, AVFrame *p, GetByteContext *gbc)
+{
+    int i;
+    uint8_t *outdata = p->data[0];
 
     for (i = 0; i < avctx->height; i++) {
         int size, left, code, pix;
         uint8_t *out = outdata;
 
         /* size of packed line */
-        size = left = bytestream2_get_be16(&gbc);
-        if (bytestream2_get_bytes_left(&gbc) < size)
+        size = left = bytestream2_get_be16(gbc);
+        if (bytestream2_get_bytes_left(gbc) < size)
             return AVERROR_INVALIDDATA;
 
         /* decode line */
         while (left > 0) {
-            code = bytestream2_get_byte(&gbc);
+            code = bytestream2_get_byte(gbc);
             if (code & 0x80 ) { /* run */
-                pix = bytestream2_get_byte(&gbc);
+                pix = bytestream2_get_byte(gbc);
                 memset(out, pix, 257 - code);
                 out   += 257 - code;
                 left  -= 2;
             } else { /* copy */
-                bytestream2_get_buffer(&gbc, out, code + 1);
+                bytestream2_get_buffer(gbc, out, code + 1);
                 out   += code + 1;
                 left  -= 2 + code;
             }
         }
         outdata += p->linesize[0];
     }
-
-    *got_frame      = 1;
-
-    return avpkt->size;
+    return 0;
 }
 
-static av_cold int decode_init(AVCodecContext *avctx)
+static int decode_frame(AVCodecContext *avctx,
+                        void *data, int *got_frame,
+                        AVPacket *avpkt)
 {
-    avctx->pix_fmt= AV_PIX_FMT_PAL8;
+    AVFrame * const p      = data;
+    GetByteContext gbc;
+    int colors;
+    int ret;
 
-    return 0;
+    bytestream2_init(&gbc, avpkt->data, avpkt->size);
+
+    /* smallest PICT header */
+    if (bytestream2_get_bytes_left(&gbc) < 40) {
+        av_log(avctx, AV_LOG_ERROR, "Frame is too small %d\n",
+               bytestream2_get_bytes_left(&gbc));
+        return AVERROR_INVALIDDATA;
+    }
+
+    bytestream2_skip(&gbc, 6);
+    avctx->height = bytestream2_get_be16(&gbc);
+    avctx->width  = bytestream2_get_be16(&gbc);
+
+    /* version 1 is identified by 0x1101
+     * it uses byte-aligned opcodes rather than word-aligned */
+    if (bytestream2_get_be32(&gbc) != 0x001102FF) {
+        avpriv_request_sample(avctx, "QuickDraw version 1");
+        return AVERROR_PATCHWELCOME;
+    }
+
+    bytestream2_skip(&gbc, 26);
+
+    while (bytestream2_get_bytes_left(&gbc) >= 4) {
+        int bppcnt, bpp;
+        int opcode = bytestream2_get_be16(&gbc);
+
+        switch(opcode) {
+        case PACKBITSRECT:
+        case PACKBITSRGN:
+            av_log(avctx, AV_LOG_DEBUG, "Parsing Packbit opcode\n");
+
+            bytestream2_skip(&gbc, 30);
+            bppcnt = bytestream2_get_be16(&gbc); /* cmpCount */
+            bpp    = bytestream2_get_be16(&gbc); /* cmpSize */
+
+            av_log(avctx, AV_LOG_DEBUG, "bppcount %d bpp %d\n", bppcnt, bpp);
+            if (bppcnt == 1 && bpp == 8) {
+                avctx->pix_fmt = AV_PIX_FMT_PAL8;
+            } else {
+                av_log(avctx, AV_LOG_ERROR,
+                       "Invalid pixel format (bppcnt %d bpp %d) in Packbit\n",
+                       bppcnt, bpp);
+                return AVERROR_INVALIDDATA;
+            }
+
+            /* jump to palette */
+            bytestream2_skip(&gbc, 18);
+            colors = bytestream2_get_be16(&gbc);
+
+            if (colors < 0 || colors > 256) {
+                av_log(avctx, AV_LOG_ERROR,
+                       "Error color count - %i(0x%X)\n", colors, colors);
+                return AVERROR_INVALIDDATA;
+            }
+            if (bytestream2_get_bytes_left(&gbc) < (colors + 1) * 8) {
+                av_log(avctx, AV_LOG_ERROR, "Palette is too small %d\n",
+                       bytestream2_get_bytes_left(&gbc));
+                return AVERROR_INVALIDDATA;
+            }
+            if ((ret = ff_get_buffer(avctx, p, 0)) < 0) {
+                av_log(avctx, AV_LOG_ERROR, "get_buffer() failed\n");
+                return ret;
+            }
+
+            parse_palette(avctx, &gbc, (uint32_t *)p->data[1], colors);
+            p->palette_has_changed = 1;
+
+            /* jump to image data */
+            bytestream2_skip(&gbc, 18);
+
+            if (opcode == PACKBITSRGN) {
+                bytestream2_skip(&gbc, 2 + 8); /* size + rect */
+                avpriv_report_missing_feature(avctx, "Packbit mask region");
+            }
+
+            ret = decode_rle(avctx, p, &gbc);
+            if (ret < 0)
+                return ret;
+            *got_frame = 1;
+            break;
+        default:
+            av_log(avctx, AV_LOG_TRACE, "Unknown 0x%04X opcode\n", opcode);
+            break;
+        }
+        /* exit the loop when a known pixel block has been found */
+        if (*got_frame) {
+            int eop, trail;
+
+            /* re-align to a word */
+            bytestream2_skip(&gbc, bytestream2_get_bytes_left(&gbc) % 2);
+
+            eop = bytestream2_get_be16(&gbc);
+            trail = bytestream2_get_bytes_left(&gbc);
+            if (eop != EOP)
+                av_log(avctx, AV_LOG_WARNING,
+                       "Missing end of picture opcode (found 0x%04X)\n", eop);
+            if (trail)
+                av_log(avctx, AV_LOG_WARNING, "Got %d trailing bytes\n", 
trail);
+            break;
+        }
+    }
+
+    if (*got_frame) {
+        p->pict_type = AV_PICTURE_TYPE_I;
+        p->key_frame = 1;
+
+        return avpkt->size;
+    } else {
+        av_log(avctx, AV_LOG_ERROR, "Frame contained no usable data\n");
+
+        return AVERROR_INVALIDDATA;
+    }
 }
 
 AVCodec ff_qdraw_decoder = {
@@ -137,7 +225,6 @@ AVCodec ff_qdraw_decoder = {
     .long_name      = NULL_IF_CONFIG_SMALL("Apple QuickDraw"),
     .type           = AVMEDIA_TYPE_VIDEO,
     .id             = AV_CODEC_ID_QDRAW,
-    .init           = decode_init,
     .decode         = decode_frame,
     .capabilities   = CODEC_CAP_DR1,
 };
-- 
1.9.5 (Apple Git-50.3)

_______________________________________________
libav-devel mailing list
[email protected]
https://lists.libav.org/mailman/listinfo/libav-devel

Reply via email to