Hello community,

here is the log from the commit of package ghc-conduit-extra for 
openSUSE:Factory checked in at 2016-02-23 16:57:38
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/ghc-conduit-extra (Old)
 and      /work/SRC/openSUSE:Factory/.ghc-conduit-extra.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "ghc-conduit-extra"

Changes:
--------
--- /work/SRC/openSUSE:Factory/ghc-conduit-extra/ghc-conduit-extra.changes      
2015-12-09 22:16:59.000000000 +0100
+++ /work/SRC/openSUSE:Factory/.ghc-conduit-extra.new/ghc-conduit-extra.changes 
2016-02-23 16:59:30.000000000 +0100
@@ -1,0 +2,11 @@
+Wed Feb 17 09:13:57 UTC 2016 - [email protected]
+
+- update to 1.1.10.1
+* multiple combinator for Data.Conduit.Zlib
+
+-------------------------------------------------------------------
+Mon Feb 15 19:54:55 UTC 2016 - [email protected]
+
+- update to 1.1.9.3
+
+-------------------------------------------------------------------

Old:
----
  conduit-extra-1.1.9.2.tar.gz

New:
----
  conduit-extra-1.1.10.1.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ ghc-conduit-extra.spec ++++++
--- /var/tmp/diff_new_pack.zhUXtA/_old  2016-02-23 16:59:30.000000000 +0100
+++ /var/tmp/diff_new_pack.zhUXtA/_new  2016-02-23 16:59:30.000000000 +0100
@@ -21,7 +21,7 @@
 %bcond_with tests
 
 Name:           ghc-conduit-extra
-Version:        1.1.9.2
+Version:        1.1.10.1
 Release:        0
 Summary:        Batteries included conduit: adapters for common libraries
 License:        MIT

++++++ conduit-extra-1.1.9.2.tar.gz -> conduit-extra-1.1.10.1.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/conduit-extra-1.1.9.2/ChangeLog.md 
new/conduit-extra-1.1.10.1/ChangeLog.md
--- old/conduit-extra-1.1.9.2/ChangeLog.md      2015-11-30 07:33:58.000000000 
+0100
+++ new/conduit-extra-1.1.10.1/ChangeLog.md     2016-02-17 12:57:07.000000000 
+0100
@@ -1,3 +1,15 @@
+## 1.1.10.1
+
+* Fix a leftovers bug in helperDecompress #254
+
+## 1.1.10
+
+* `multiple` combinator for `Data.Conduit.Zlib` 
[#254](https://github.com/snoyberg/conduit/issues/254)
+
+## 1.1.9.3
+
+* Some typo fixes in docs
+
 ## 1.1.9
 
 * detectUtf [#217](https://github.com/snoyberg/conduit/pull/217)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/conduit-extra-1.1.9.2/Data/Conduit/Binary.hs 
new/conduit-extra-1.1.10.1/Data/Conduit/Binary.hs
--- old/conduit-extra-1.1.9.2/Data/Conduit/Binary.hs    2015-11-30 
07:33:58.000000000 +0100
+++ new/conduit-extra-1.1.10.1/Data/Conduit/Binary.hs   2016-02-17 
12:57:07.000000000 +0100
@@ -129,7 +129,7 @@
 -- | An alternative to 'sourceHandle'.
 -- Instead of taking a pre-opened 'IO.Handle', it takes an action that opens
 -- a 'IO.Handle' (in read mode), so that it can open it only when needed
--- and closed it as soon as possible.
+-- and close it as soon as possible.
 --
 -- Since 0.3.0
 sourceIOHandle :: MonadResource m
@@ -250,7 +250,7 @@
 conduitHandle :: MonadIO m => IO.Handle -> Conduit S.ByteString m S.ByteString
 conduitHandle h = awaitForever $ \bs -> liftIO (S.hPut h bs) >> yield bs
 
--- | Ensure that only up to the given number of bytes are consume by the inner
+-- | Ensure that only up to the given number of bytes are consumed by the inner
 -- sink. Note that this does /not/ ensure that all of those bytes are in fact
 -- consumed.
 --
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/conduit-extra-1.1.9.2/Data/Conduit/Zlib.hs 
new/conduit-extra-1.1.10.1/Data/Conduit/Zlib.hs
--- old/conduit-extra-1.1.9.2/Data/Conduit/Zlib.hs      2015-11-30 
07:33:58.000000000 +0100
+++ new/conduit-extra-1.1.10.1/Data/Conduit/Zlib.hs     2016-02-17 
12:57:07.000000000 +0100
@@ -8,6 +8,8 @@
     compress, decompress, gzip, ungzip,
     -- * Flushing
     compressFlush, decompressFlush,
+    -- * Decompression combinators
+    multiple,
     -- * Re-exported from zlib-bindings
     WindowBits (..), defaultWindowBits
 ) where
@@ -21,6 +23,7 @@
 import Control.Monad.Primitive (PrimMonad, unsafePrimToPrim)
 import Control.Monad.Base (MonadBase, liftBase)
 import Control.Monad.Trans.Resource (MonadThrow, monadThrow)
+import Data.Function (fix)
 
 -- | Gzip compression with default parameters.
 gzip :: (MonadThrow m, MonadBase base m, PrimMonad base) => Conduit ByteString 
m ByteString
@@ -61,39 +64,78 @@
                  -> (ByteString -> t m ())
                  -> WindowBits
                  -> t m ()
-helperDecompress await' yield' leftover' config =
-    await' >>= maybe (return ()) start
-  where
-    start input = do
-        inf <- lift $ unsafeLiftIO $ initInflate config
-        push inf input
-
-        rem' <- lift $ unsafeLiftIO $ getUnusedInflate inf
-        unless (S.null rem') $ leftover' rem'
-
-    continue inf = await' >>= maybe (close inf) (push inf)
-
-    goPopper popper = do
-        mbs <- lift $ unsafeLiftIO popper
-        case mbs of
-            PRDone -> return ()
-            PRNext bs -> yield' (Chunk bs) >> goPopper popper
-            PRError e -> lift $ monadThrow e
-
-    push inf (Chunk x) = do
-        popper <- lift $ unsafeLiftIO $ feedInflate inf x
-        goPopper popper
-        continue inf
-
-    push inf Flush = do
-        chunk <- lift $ unsafeLiftIO $ flushInflate inf
-        unless (S.null chunk) $ yield' $ Chunk chunk
-        yield' Flush
-        continue inf
-
-    close inf = do
-        chunk <- lift $ unsafeLiftIO $ finishInflate inf
-        unless (S.null chunk) $ yield' $ Chunk chunk
+helperDecompress await' yield' leftover' config = do
+    -- Initialize the stateful inflater, which will be used below
+    -- This inflater is never exposed outside of this function
+    inf <- lift $ unsafeLiftIO $ initInflate config
+
+    -- Some helper functions used by the main feeder loop below
+
+    let -- Flush any remaining inflated bytes downstream
+        flush = do
+            chunk <- lift $ unsafeLiftIO $ flushInflate inf
+            unless (S.null chunk) $ yield' $ Chunk chunk
+
+        -- Get any input which is unused by the inflater
+        getUnused = lift $ unsafeLiftIO $ getUnusedInflate inf
+
+        -- If there is any unused data, return it as leftovers to the stream
+        unused = do
+            rem' <- getUnused
+            unless (S.null rem') $ leftover' rem'
+
+    -- Main loop: feed data from upstream into the inflater
+    fix $ \feeder -> do
+        mnext <- await'
+        case mnext of
+            -- No more data is available from upstream
+            Nothing -> do
+                -- Flush any remaining uncompressed data
+                flush
+                -- Return the rest of the unconsumed data as leftovers
+                unused
+            -- Another chunk of compressed data arrived
+            Just (Chunk x) -> do
+                -- Feed the compressed data into the inflater, returning a
+                -- "popper" which will return chunks of decompressed data
+                popper <- lift $ unsafeLiftIO $ feedInflate inf x
+
+                -- Loop over the popper grabbing decompressed chunks and
+                -- yielding them downstream
+                fix $ \pop -> do
+                    mbs <- lift $ unsafeLiftIO popper
+                    case mbs of
+                        -- No more data from this popper
+                        PRDone -> do
+                            rem' <- getUnused
+                            if S.null rem'
+                                -- No data was unused by the inflater, so let's
+                                -- fill it up again and get more data out of it
+                                then feeder
+                                -- In this case, there is some unconsumed data,
+                                -- meaning the compressed stream is complete.
+                                -- At this point, we need to stop feeding,
+                                -- return the unconsumed data as leftovers, and
+                                -- flush any remaining content (which should be
+                                -- nothing)
+                                else do
+                                    flush
+                                    leftover' rem'
+                        -- Another chunk available, yield it downstream and
+                        -- loop again
+                        PRNext bs -> do
+                            yield' (Chunk bs)
+                            pop
+                        -- An error occurred inside zlib, throw it
+                        PRError e -> lift $ monadThrow e
+            -- We've been asked to flush the stream
+            Just Flush -> do
+                -- Get any uncompressed data waiting for us
+                flush
+                -- Put a Flush in the stream
+                yield' Flush
+                -- Feed in more data
+                feeder
 
 -- |
 -- Compress (deflate) a stream of 'ByteString's. The 'WindowBits' also control
@@ -160,3 +202,37 @@
             PRDone -> return ()
             PRNext chunk -> yield' (Chunk chunk) >> close def
             PRError e -> lift $ monadThrow e
+
+-- | The standard 'decompress' and 'ungzip' functions will only decompress a
+-- single compressed entity from the stream. This combinator will exhaust the
+-- stream completely of all individual compressed entities. This is useful for
+-- cases where you have a concatenated archive, e.g. @cat file1.gz file2.gz >
+-- combined.gz@.
+--
+-- Usage:
+--
+-- > sourceFile "combined.gz" $$ multiple ungzip =$ consume
+--
+-- This combinator will not fail on an empty stream. If you want to ensure that
+-- at least one compressed entity in the stream exists, consider a usage such
+-- as:
+--
+-- > sourceFile "combined.gz" $$ (ungzip >> multiple ungzip) =$ consume
+--
+-- @since 1.1.10
+multiple :: Monad m
+         => Conduit ByteString m a
+         -> Conduit ByteString m a
+multiple inner =
+    loop
+  where
+    loop = do
+        mbs <- await
+        case mbs of
+            Nothing -> return ()
+            Just bs
+                | S.null bs -> loop
+                | otherwise -> do
+                    leftover bs
+                    inner
+                    loop
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/conduit-extra-1.1.9.2/conduit-extra.cabal 
new/conduit-extra-1.1.10.1/conduit-extra.cabal
--- old/conduit-extra-1.1.9.2/conduit-extra.cabal       2015-11-30 
07:33:58.000000000 +0100
+++ new/conduit-extra-1.1.10.1/conduit-extra.cabal      2016-02-17 
12:57:07.000000000 +0100
@@ -1,5 +1,5 @@
 Name:                conduit-extra
-Version:             1.1.9.2
+Version:             1.1.10.1
 Synopsis:            Batteries included conduit: adapters for common libraries.
 Description:
     The conduit package itself maintains relative small dependencies. The 
purpose of this package is to collect commonly used utility functions wrapping 
other library dependencies, without depending on heavier-weight dependencies. 
The basic idea is that this package should only depend on haskell-platform 
packages and conduit.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/conduit-extra-1.1.9.2/test/Data/Conduit/ZlibSpec.hs 
new/conduit-extra-1.1.10.1/test/Data/Conduit/ZlibSpec.hs
--- old/conduit-extra-1.1.9.2/test/Data/Conduit/ZlibSpec.hs     2015-11-30 
07:33:58.000000000 +0100
+++ new/conduit-extra-1.1.10.1/test/Data/Conduit/ZlibSpec.hs    2016-02-17 
12:57:07.000000000 +0100
@@ -20,6 +20,7 @@
 import Control.Monad.Trans.Class
 import Control.Monad.Catch.Pure
 import Control.Monad.Base
+import Control.Monad (replicateM_)
 
 instance MonadBase base m => MonadBase base (CatchT m) where
     liftBase = lift . liftBase
@@ -32,15 +33,14 @@
                 src = mconcat $ map (CL.sourceList . return) bss
             outBss <- runExceptionT_ $ src C.$= CZ.gzip C.$= CZ.ungzip C.$$ 
CL.consume
             return $ lbs == L.fromChunks outBss
-        prop "flush" $ \bss' -> runST $ do
+        prop "flush" $ \bss' -> do
             let bss = map S.pack $ filter (not . null) bss'
                 bssC = concatMap (\bs -> [C.Chunk bs, C.Flush]) bss
                 src = mconcat $ map (CL.sourceList . return) bssC
-            outBssC <- runExceptionT_
-                     $ src C.$= CZ.compressFlush 5 (CZ.WindowBits 31)
+            outBssC <- src C.$= CZ.compressFlush 5 (CZ.WindowBits 31)
                            C.$= CZ.decompressFlush (CZ.WindowBits 31)
                            C.$$ CL.consume
-            return $ bssC == outBssC
+            outBssC `shouldBe` bssC
         it "compressFlush large data" $ do
             let content = L.pack $ map (fromIntegral . fromEnum) $ concat $ 
["BEGIN"] ++ map show [1..100000 :: Int] ++ ["END"]
                 src = CL.sourceList $ map C.Chunk $ L.toChunks content
@@ -64,3 +64,35 @@
                 return (S.concat c', S.concat u')
             c' `shouldBe` c
             u' `shouldBe` u
+
+        it "multiple compressed values" $ do
+            let s1 = "hello"
+                s2 = "world"
+                src = do
+                    C.yield s1 C.$= CZ.gzip
+                    C.yield s2 C.$= CZ.gzip
+            actual <- src C.$$ CZ.multiple CZ.ungzip C.=$ CL.consume
+            S.concat actual `shouldBe` S.concat [s1, s2]
+
+        it "single compressed, multiple uncompressed chunks" $ do
+            let s1 = "hello"
+                s2 = "there"
+                s3 = "world"
+            s1Z <- fmap S.concat $ C.yield s1 C.$= CZ.gzip C.$$ CL.consume
+            let src = do
+                    C.yield $ S.append s1Z s2
+                    C.yield s3
+            actual <- src C.$$ do
+                x <- fmap S.concat $ CZ.ungzip C.=$ CL.consume
+                y <- CL.consume
+                return (x, y)
+            actual `shouldBe` (s1, [s2, s3])
+
+        it "multiple, over 32k" $ do
+            let str = "One line"
+                cnt = 30000
+                src = replicateM_ cnt $ C.yield str C.$= CZ.gzip
+            actual <- fmap S.concat $ src C.$$ CZ.multiple CZ.ungzip C.=$ 
CL.consume
+            let expected = S.concat (replicate cnt str)
+            S.length actual `shouldBe` S.length expected
+            actual `shouldBe` expected


Reply via email to