Hello community,

here is the log from the commit of package alex for openSUSE:Factory checked in 
at 2014-11-26 20:54:31
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/alex (Old)
 and      /work/SRC/openSUSE:Factory/.alex.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "alex"

Changes:
--------
--- /work/SRC/openSUSE:Factory/alex/alex.changes        2014-08-25 
11:05:38.000000000 +0200
+++ /work/SRC/openSUSE:Factory/.alex.new/alex.changes   2014-11-26 
20:54:32.000000000 +0100
@@ -1,0 +2,13 @@
+Mon Sep  8 20:08:49 UTC 2014 - [email protected]
+
+- update to 3.1.3
+* fix ghc 7.8 builds 
+* needed by Haskell Platform 2014.2.0.0
+
+-------------------------------------------------------------------
+Tue Sep  2 01:17:20 UTC 2014 - [email protected]
+
+- package examples 
+- cleanup spec file
+
+-------------------------------------------------------------------

Old:
----
  alex-3.0.5.tar.gz

New:
----
  alex-3.1.3.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ alex.spec ++++++
--- /var/tmp/diff_new_pack.R2hjeR/_old  2014-11-26 20:54:33.000000000 +0100
+++ /var/tmp/diff_new_pack.R2hjeR/_new  2014-11-26 20:54:33.000000000 +0100
@@ -1,7 +1,7 @@
 #
 # spec file for package alex
 #
-# Copyright (c) 2013 SUSE LINUX Products GmbH, Nuernberg, Germany.
+# Copyright (c) 2014 SUSE LINUX Products GmbH, Nuernberg, Germany.
 # Copyright (c) 2012 Peter Trommler [email protected]
 #
 # All modifications and additions to the file contributed by third parties
@@ -17,11 +17,8 @@
 #
 
 
-# Please submit bugfixes or comments via http://bugs.opensuse.org/
-#
-
 Name:           alex
-Version:        3.0.5
+Version:        3.1.3
 Release:        0
 Summary:        Tool for generating lexical analysers in Haskell
 License:        BSD-3-Clause
@@ -58,6 +55,7 @@
 %files
 %defattr(-,root,root,-)
 %doc LICENSE ANNOUNCE README TODO
+%doc examples
 %attr(755,root,root) %{_bindir}/%{name}
 %{_datadir}/%{name}-%{version}
 %{_mandir}/man1/*

++++++ alex-3.0.5.tar.gz -> alex-3.1.3.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/alex-3.0.5/Setup.lhs new/alex-3.1.3/Setup.lhs
--- old/alex-3.0.5/Setup.lhs    2013-03-10 13:19:13.000000000 +0100
+++ new/alex-3.1.3/Setup.lhs    2013-11-28 09:35:19.000000000 +0100
@@ -13,6 +13,7 @@
 import System.FilePath ((</>))
 import Control.Exception ( IOException, try )
 import System.Directory (removeFile)
+import Data.Char
 
 main :: IO ()
 main = defaultMainWithHooks simpleUserHooks{ postBuild = myPostBuild,
@@ -23,17 +24,20 @@
 -- hack to turn cpp-style '# 27 "GenericTemplate.hs"' into 
 -- '{-# LINE 27 "GenericTemplate.hs" #-}'.
 mungeLinePragma line = case symbols line of
- ["#", number, string] | length string >= 2
-                      && head string == '"'
-                      && last string == '"'
-   -> case reads number of
-        [(n, "")] -> "{-# LINE " ++ show (n :: Int) ++ " " ++ string ++ " #-}"
-        _         -> line
+ syms | Just prag <- getLinePrag syms  -> prag
  -- Also convert old-style CVS lines, no idea why we do this...
  ("--":"$":"Id":":":_) -> filter (/='$') line
  (     "$":"Id":":":_) -> filter (/='$') line
  _ -> line
 
+getLinePrag :: [String] -> Maybe String
+getLinePrag ("#" : n : string : rest)
+  | length rest <= 1   -- clang puts an extra field
+  , length string >= 2 && head string == '"' && last string == '"'
+  , all isDigit n
+  = Just $ "{-# LINE " ++ n ++ " " ++ string ++ " #-}"
+getLinePrag other = Nothing
+
 symbols :: String -> [String]
 symbols cs = case lex cs of
               (sym, cs'):_ | not (null sym) -> sym : symbols cs'
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/alex-3.0.5/alex.cabal new/alex-3.1.3/alex.cabal
--- old/alex-3.0.5/alex.cabal   2013-03-10 13:19:13.000000000 +0100
+++ new/alex-3.1.3/alex.cabal   2013-11-28 09:35:19.000000000 +0100
@@ -1,19 +1,37 @@
 name: alex
-version: 3.0.5
+version: 3.1.3
 license: BSD3
 license-file: LICENSE
 copyright: (c) Chis Dornan, Simon Marlow
 author: Chris Dornan and Simon Marlow
 maintainer: Simon Marlow <[email protected]>
-bug-reports: mailto:[email protected]
+bug-reports: https://github.com/simonmar/alex/issues
 stability: stable
 homepage: http://www.haskell.org/alex/
 synopsis: Alex is a tool for generating lexical analysers in Haskell
-description: Alex is a tool for generating lexical analysers in Haskell.
-             It takes a description of tokens based on regular
-             expressions and generates a Haskell module containing code
-             for scanning text efficiently. It is similar to the tool
-             lex or flex for C/C++.
+description:
+  Alex is a tool for generating lexical analysers in Haskell.
+  It takes a description of tokens based on regular
+  expressions and generates a Haskell module containing code
+  for scanning text efficiently. It is similar to the tool
+  lex or flex for C/C++.
+  .
+  Changes in 3.1.3:
+  .
+  * Fix for clang (XCode 5)
+  .
+  Changes in 3.1.2:
+  .
+  * Add missing file to extra-source-files
+  .
+  Changes in 3.1.1:
+  .
+  * Bug fixes (#24, #30, #31, #32)
+  .
+  Changes in 3.1.0:
+  .
+  * necessary changes to work with GHC 7.8.1
+
 category: Development
 cabal-version: >= 1.8
 build-type: Custom
@@ -51,13 +69,17 @@
        templates/GenericTemplate.hs
         templates/wrappers.hs
        tests/Makefile
-       tests/simple.x
-       tests/tokens.x
+        tests/simple.x
+        tests/null.x
+        tests/tokens.x
        tests/tokens_gscan.x
        tests/tokens_posn.x
         tests/tokens_bytestring.x
         tests/tokens_posn_bytestring.x
         tests/tokens_strict_bytestring.x
+        tests/tokens_monad_bytestring.x
+        tests/tokens_monadUserState_bytestring.x
+        tests/tokens_bytestring_unicode.x
         tests/unicode.x
 
 source-repository head
@@ -107,4 +129,4 @@
 test-suite tests
   type: exitcode-stdio-1.0
   main-is: test.hs
-  build-depends: process
+  build-depends: base, process
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/alex-3.0.5/dist/build/alex/alex-tmp/Parser.hs 
new/alex-3.1.3/dist/build/alex/alex-tmp/Parser.hs
--- old/alex-3.0.5/dist/build/alex/alex-tmp/Parser.hs   2013-03-10 
13:19:12.000000000 +0100
+++ new/alex-3.1.3/dist/build/alex/alex-tmp/Parser.hs   2013-11-28 
09:35:19.000000000 +0100
@@ -21,7 +21,7 @@
 import qualified Data.Array as Happy_Data_Array
 import qualified GHC.Exts as Happy_GHC_Exts
 
--- parser produced by Happy Version 1.18.9
+-- parser produced by Happy Version 1.19.0
 
 newtype HappyAbsSyn  = HappyAbsSyn HappyAny
 #if __GLASGOW_HASKELL__ >= 607
@@ -874,7 +874,22 @@
 {-# LINE 1 "templates/GenericTemplate.hs" #-}
 -- Id: GenericTemplate.hs,v 1.26 2005/01/14 14:47:22 simonmar Exp 
 
-{-# LINE 30 "templates/GenericTemplate.hs" #-}
+{-# LINE 13 "templates/GenericTemplate.hs" #-}
+
+
+
+
+
+#if __GLASGOW_HASKELL__ > 706
+#define LT(n,m) ((Happy_GHC_Exts.tagToEnum# (n Happy_GHC_Exts.<# m)) :: Bool)
+#define GTE(n,m) ((Happy_GHC_Exts.tagToEnum# (n Happy_GHC_Exts.>=# m)) :: Bool)
+#define EQ(n,m) ((Happy_GHC_Exts.tagToEnum# (n Happy_GHC_Exts.==# m)) :: Bool)
+#else
+#define LT(n,m) (n Happy_GHC_Exts.<# m)
+#define GTE(n,m) (n Happy_GHC_Exts.>=# m)
+#define EQ(n,m) (n Happy_GHC_Exts.==# m)
+#endif
+{-# LINE 45 "templates/GenericTemplate.hs" #-}
 
 
 data Happy_IntList = HappyCons Happy_GHC_Exts.Int# Happy_IntList
@@ -883,11 +898,11 @@
 
 
 
-{-# LINE 51 "templates/GenericTemplate.hs" #-}
+{-# LINE 66 "templates/GenericTemplate.hs" #-}
 
-{-# LINE 61 "templates/GenericTemplate.hs" #-}
+{-# LINE 76 "templates/GenericTemplate.hs" #-}
 
-{-# LINE 70 "templates/GenericTemplate.hs" #-}
+{-# LINE 85 "templates/GenericTemplate.hs" #-}
 
 infixr 9 `HappyStk`
 data HappyStk a = HappyStk a (HappyStk a)
@@ -922,7 +937,7 @@
                                     happyFail i tk st
                -1#       -> {- nothing -}
                                     happyAccept i tk st
-               n | (n Happy_GHC_Exts.<# (0# :: Happy_GHC_Exts.Int#)) -> {- 
nothing -}
+               n | LT(n,(0# :: Happy_GHC_Exts.Int#)) -> {- nothing -}
 
                                     (happyReduceArr Happy_Data_Array.! rule) i 
tk st
                                     where rule = (Happy_GHC_Exts.I# 
((Happy_GHC_Exts.negateInt# ((n Happy_GHC_Exts.+# (1# :: 
Happy_GHC_Exts.Int#))))))
@@ -930,18 +945,16 @@
 
 
                                     happyShift new_state i tk st
-                                    where (new_state) = (n Happy_GHC_Exts.-# 
(1# :: Happy_GHC_Exts.Int#))
-   where (off)    = indexShortOffAddr happyActOffsets st
-         (off_i)  = (off Happy_GHC_Exts.+# i)
-        check  = if (off_i Happy_GHC_Exts.>=# (0# :: Happy_GHC_Exts.Int#))
-                       then (indexShortOffAddr happyCheck off_i 
Happy_GHC_Exts.==#  i)
-                       else False
-         (action)
+                                     where new_state = (n Happy_GHC_Exts.-# 
(1# :: Happy_GHC_Exts.Int#))
+   where off    = indexShortOffAddr happyActOffsets st
+         off_i  = (off Happy_GHC_Exts.+# i)
+        check  = if GTE(off_i,(0# :: Happy_GHC_Exts.Int#))
+                  then EQ(indexShortOffAddr happyCheck off_i, i)
+                 else False
+         action
           | check     = indexShortOffAddr happyTable off_i
           | otherwise = indexShortOffAddr happyDefActions st
 
-{-# LINE 130 "templates/GenericTemplate.hs" #-}
-
 
 indexShortOffAddr (HappyA# arr) off =
        Happy_GHC_Exts.narrow16Int# i
@@ -963,13 +976,13 @@
 -----------------------------------------------------------------------------
 -- HappyState data type (not arrays)
 
-{-# LINE 163 "templates/GenericTemplate.hs" #-}
+{-# LINE 169 "templates/GenericTemplate.hs" #-}
 
 -----------------------------------------------------------------------------
 -- Shifting a token
 
 happyShift new_state 0# tk st sts stk@(x `HappyStk` _) =
-     let (i) = (case Happy_GHC_Exts.unsafeCoerce# x of { (Happy_GHC_Exts.I# 
(i)) -> i }) in
+     let i = (case Happy_GHC_Exts.unsafeCoerce# x of { (Happy_GHC_Exts.I# (i)) 
-> i }) in
 --     trace "shifting the error token" $
      happyDoAction i tk new_state (HappyCons (st) (sts)) (stk)
 
@@ -1012,23 +1025,26 @@
 happyMonadReduce k nt fn 0# tk st sts stk
      = happyFail 0# tk st sts stk
 happyMonadReduce k nt fn j tk st sts stk =
-        happyThen1 (fn stk tk) (\r -> happyGoto nt j tk st1 sts1 (r `HappyStk` 
drop_stk))
-       where (sts1@((HappyCons (st1@(action)) (_)))) = happyDrop k (HappyCons 
(st) (sts))
-             drop_stk = happyDropStk k stk
+      case happyDrop k (HappyCons (st) (sts)) of
+        sts1@((HappyCons (st1@(action)) (_))) ->
+          let drop_stk = happyDropStk k stk in
+          happyThen1 (fn stk tk) (\r -> happyGoto nt j tk st1 sts1 (r 
`HappyStk` drop_stk))
 
 happyMonad2Reduce k nt fn 0# tk st sts stk
      = happyFail 0# tk st sts stk
 happyMonad2Reduce k nt fn j tk st sts stk =
-       happyThen1 (fn stk tk) (\r -> happyNewToken new_state sts1 (r 
`HappyStk` drop_stk))
-       where (sts1@((HappyCons (st1@(action)) (_)))) = happyDrop k (HappyCons 
(st) (sts))
-             drop_stk = happyDropStk k stk
-
-             (off) = indexShortOffAddr happyGotoOffsets st1
-             (off_i) = (off Happy_GHC_Exts.+# nt)
-             (new_state) = indexShortOffAddr happyTable off_i
+      case happyDrop k (HappyCons (st) (sts)) of
+        sts1@((HappyCons (st1@(action)) (_))) ->
+         let drop_stk = happyDropStk k stk
+
+             off = indexShortOffAddr happyGotoOffsets st1
+             off_i = (off Happy_GHC_Exts.+# nt)
+             new_state = indexShortOffAddr happyTable off_i
 
 
 
+          in
+          happyThen1 (fn stk tk) (\r -> happyNewToken new_state sts1 (r 
`HappyStk` drop_stk))
 
 happyDrop 0# l = l
 happyDrop n (HappyCons (_) (t)) = happyDrop (n Happy_GHC_Exts.-# (1# :: 
Happy_GHC_Exts.Int#)) t
@@ -1043,9 +1059,9 @@
 happyGoto nt j tk st = 
    {- nothing -}
    happyDoAction j tk new_state
-   where (off) = indexShortOffAddr happyGotoOffsets st
-         (off_i) = (off Happy_GHC_Exts.+# nt)
-         (new_state) = indexShortOffAddr happyTable off_i
+   where off = indexShortOffAddr happyGotoOffsets st
+         off_i = (off Happy_GHC_Exts.+# nt)
+         new_state = indexShortOffAddr happyTable off_i
 
 
 
@@ -1055,7 +1071,7 @@
 
 -- parse error if we are in recovery and we fail again
 happyFail 0# tk old_st _ stk@(x `HappyStk` _) =
-     let (i) = (case Happy_GHC_Exts.unsafeCoerce# x of { (Happy_GHC_Exts.I# 
(i)) -> i }) in
+     let i = (case Happy_GHC_Exts.unsafeCoerce# x of { (Happy_GHC_Exts.I# (i)) 
-> i }) in
 --     trace "failing" $ 
         happyError_ i tk
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/alex-3.0.5/dist/build/alex/alex-tmp/Scan.hs 
new/alex-3.1.3/dist/build/alex/alex-tmp/Scan.hs
--- old/alex-3.0.5/dist/build/alex/alex-tmp/Scan.hs     2013-03-10 
13:19:13.000000000 +0100
+++ new/alex-3.1.3/dist/build/alex/alex-tmp/Scan.hs     2013-11-28 
09:35:19.000000000 +0100
@@ -1,4 +1,4 @@
-{-# LANGUAGE CPP,MagicHash,BangPatterns #-}
+{-# LANGUAGE CPP,MagicHash #-}
 {-# LINE 13 "src/Scan.x" #-}
 
 {-# OPTIONS_GHC -w #-}
@@ -39,7 +39,7 @@
 alex_deflt :: AlexAddr
 alex_deflt = AlexA# 
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x52\x00\xff\xff\xff\xff\xff\xff\x09\x00\xff\xff\x0b\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x2e\x00\x2e\x00\x34\x00\x34\x00\x36\x00\x36\x00\xff\xff\x3f\x00\x3f\x00\x40\x00\x40\x00\x46\x00\x46\x00\x4a\x00\x4a\x00\x4d\x00\x4d\x00\x4e\x00\x4e\x00\x50\x00\x50\x00\xff\xff\x52\x00\x52\x00\x52\x00\x57\x00\x57\x00\x58\x00\x58\x00\x5a\x00\x5a\x00\x5f\x00\x5f\x00\x0b\x00\x0b\x00\x0b\x00\x09\x00\x09\x00\x09\x00\x62\x00\x62\x00\x65\x00\x65\x00\x52\x00\xff\xff\xff\xff\x67\x00\x67\x00\x67\x00\x6a\x00\x6a\x00\x6e\x00\x6e\x00\x93\x00\x93\x00\x93\x00\x93\x00\x82\x00\x82\x00\x82\x00\x7d\x00\x7d\x00\x7d\x00\xff\xff\x67\x00\x71\x00\x71\x00\x71\x00\x6f\x00\x6f\x00\x6f\x00\x6f\x00\xff\xff\xff\xff\x71\x00\xff\xff\xff\xff\x52\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x67\x00\xff\xff\xff\xff\xff\xff\xff\xff\x09\x00\xff\xff\x0b\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"#
 
-alex_accept = listArray (0::Int,147) [[],[(AlexAcc 
(alex_action_21))],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[(AlexAccSkip)],[(AlexAcc
 (alex_action_0))],[(AlexAcc (alex_action_0))],[(AlexAcc 
(alex_action_1))],[(AlexAcc (alex_action_2))],[(AlexAcc 
(alex_action_2))],[(AlexAccPred  (alex_action_3) (alexRightContext 
108)),(AlexAcc (alex_action_4))],[(AlexAcc (alex_action_4))],[(AlexAcc 
(alex_action_4))],[(AlexAcc (alex_action_4))],[(AlexAcc 
(alex_action_5))],[(AlexAcc (alex_action_6))],[(AlexAcc 
(alex_action_7))],[(AlexAcc (alex_action_8))],[(AlexAcc 
(alex_action_9))],[(AlexAcc (alex_action_9))],[(AlexAcc 
(alex_action_9))],[(AlexAcc (alex_action_10))],[(AlexAcc 
(alex_action_10))],[(AlexAcc (alex_action_10))],[(AlexAcc 
(alex_action_10))],[(AlexAcc (alex_action_10))],[(AlexAcc 
(alex_action_10))],[(AlexAcc (alex_action_11))],[(AlexAcc 
(alex_action_11))],[(AlexAcc (alex_action_12))],[(AlexAcc 
(alex_action_12))],[(AlexAcc (alex_action_13))],[(AlexAcc 
(alex_action_13))],[(AlexAcc (alex_action_14))],[(AlexAcc 
(alex_action_14))],[(AlexAcc (alex_action_15))],[(AlexAcc 
(alex_action_16))],[(AlexAcc (alex_action_17))],[(AlexAcc 
(alex_action_18))],[(AlexAcc (alex_action_19))],[(AlexAcc (alex_action_20))]]
+alex_accept = listArray (0::Int,147) [AlexAccNone,AlexAcc 
(alex_action_21),AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccSkip,AlexAcc
 (alex_action_0),AlexAcc (alex_action_0),AlexAcc (alex_action_1),AlexAcc 
(alex_action_2),AlexAcc (alex_action_2),AlexAccPred  (alex_action_3) 
(alexRightContext 108)(AlexAcc (alex_action_4)),AlexAcc (alex_action_4),AlexAcc 
(alex_action_4),AlexAcc (alex_action_4),AlexAcc (alex_action_5),AlexAcc 
(alex_action_6),AlexAcc (alex_action_7),AlexAcc (alex_action_8),AlexAcc 
(alex_action_9),AlexAcc (alex_action_9),AlexAcc (alex_action_9),AlexAcc 
(alex_action_10),AlexAcc (alex_action_10),AlexAcc (alex_action_10),AlexAcc 
(alex_action_10),AlexAcc (alex_action_10),AlexAcc (alex_action_10),AlexAcc 
(alex_action_11),AlexAcc (alex_action_11),AlexAcc (alex_action_12),AlexAcc 
(alex_action_12),AlexAcc (alex_action_13),AlexAcc (alex_action_13),AlexAcc 
(alex_action_14),AlexAcc (alex_action_14),AlexAcc (alex_action_15),AlexAcc 
(alex_action_16),AlexAcc (alex_action_17),AlexAcc (alex_action_18),AlexAcc 
(alex_action_19),AlexAcc (alex_action_20)]
 {-# LINE 75 "src/Scan.x" #-}
 
 
@@ -225,13 +225,25 @@
 -- 
-----------------------------------------------------------------------------
 -- INTERNALS and main scanner engine
 
-{-# LINE 37 "templates/GenericTemplate.hs" #-}
+{-# LINE 21 "templates/GenericTemplate.hs" #-}
+
 
-{-# LINE 47 "templates/GenericTemplate.hs" #-}
 
 
-data AlexAddr = AlexA# Addr#
 
+-- Do not remove this comment. Required to fix CPP parsing when using GCC and 
a clang-compiled alex.
+#if __GLASGOW_HASKELL__ > 706
+#define GTE(n,m) (tagToEnum# (n >=# m))
+#define EQ(n,m) (tagToEnum# (n ==# m))
+#else
+#define GTE(n,m) (n >=# m)
+#define EQ(n,m) (n ==# m)
+#endif
+{-# LINE 51 "templates/GenericTemplate.hs" #-}
+
+
+data AlexAddr = AlexA# Addr#
+-- Do not remove this comment. Required to fix CPP parsing when using GCC and 
a clang-compiled alex.
 #if __GLASGOW_HASKELL__ < 503
 uncheckedShiftL# = shiftL#
 #endif
@@ -241,10 +253,10 @@
 #ifdef WORDS_BIGENDIAN
   narrow16Int# i
   where
-        !i    = word2Int# ((high `uncheckedShiftL#` 8#) `or#` low)
-        !high = int2Word# (ord# (indexCharOffAddr# arr (off' +# 1#)))
-        !low  = int2Word# (ord# (indexCharOffAddr# arr off'))
-        !off' = off *# 2#
+        i    = word2Int# ((high `uncheckedShiftL#` 8#) `or#` low)
+        high = int2Word# (ord# (indexCharOffAddr# arr (off' +# 1#)))
+        low  = int2Word# (ord# (indexCharOffAddr# arr off'))
+        off' = off *# 2#
 #else
   indexInt16OffAddr# arr off
 #endif
@@ -258,14 +270,14 @@
 #ifdef WORDS_BIGENDIAN
   narrow32Int# i
   where
-   !i    = word2Int# ((b3 `uncheckedShiftL#` 24#) `or#`
+   i    = word2Int# ((b3 `uncheckedShiftL#` 24#) `or#`
                     (b2 `uncheckedShiftL#` 16#) `or#`
                     (b1 `uncheckedShiftL#` 8#) `or#` b0)
-   !b3   = int2Word# (ord# (indexCharOffAddr# arr (off' +# 3#)))
-   !b2   = int2Word# (ord# (indexCharOffAddr# arr (off' +# 2#)))
-   !b1   = int2Word# (ord# (indexCharOffAddr# arr (off' +# 1#)))
-   !b0   = int2Word# (ord# (indexCharOffAddr# arr off'))
-   !off' = off *# 4#
+   b3   = int2Word# (ord# (indexCharOffAddr# arr (off' +# 3#)))
+   b2   = int2Word# (ord# (indexCharOffAddr# arr (off' +# 2#)))
+   b1   = int2Word# (ord# (indexCharOffAddr# arr (off' +# 1#)))
+   b0   = int2Word# (ord# (indexCharOffAddr# arr off'))
+   off' = off *# 4#
 #else
   indexInt32OffAddr# arr off
 #endif
@@ -274,6 +286,7 @@
 
 
 
+
 #if __GLASGOW_HASKELL__ < 503
 quickIndex arr i = arr ! i
 #else
@@ -340,35 +353,40 @@
 
 
 
-       let
-               (!(base)) = alexIndexInt32OffAddr alex_base s
-               (!((I# (ord_c)))) = fromIntegral c
-               (!(offset)) = (base +# ord_c)
-               (!(check))  = alexIndexInt16OffAddr alex_check offset
+      case fromIntegral c of { (I# (ord_c)) ->
+        let
+                base   = alexIndexInt32OffAddr alex_base s
+                offset = (base +# ord_c)
+                check  = alexIndexInt16OffAddr alex_check offset
                
-               (!(new_s)) = if (offset >=# 0#) && (check ==# ord_c)
+                new_s = if GTE(offset,0#) && EQ(check,ord_c)
                          then alexIndexInt16OffAddr alex_table offset
                          else alexIndexInt16OffAddr alex_deflt s
        in
-       case new_s of 
+        case new_s of
            -1# -> (new_acc, input)
                -- on an error, we want to keep the input *before* the
                -- character that failed, not after.
            _ -> alex_scan_tkn user orig_input (if c < 0x80 || c >= 0xC0 then 
(len +# 1#) else len)
                                                 -- note that the length is 
increased ONLY if this is the 1st byte in a char encoding)
                        new_input new_s new_acc
-
+      }
   where
-       check_accs [] = last_acc
-       check_accs (AlexAcc a : _) = AlexLastAcc a input (I# (len))
-       check_accs (AlexAccSkip : _)  = AlexLastSkip  input (I# (len))
-       check_accs (AlexAccPred a predx : rest)
+       check_accs (AlexAccNone) = last_acc
+       check_accs (AlexAcc a  ) = AlexLastAcc a input (I# (len))
+       check_accs (AlexAccSkip) = AlexLastSkip  input (I# (len))
+
+       check_accs (AlexAccPred a predx rest)
           | predx user orig_input (I# (len)) input
           = AlexLastAcc a input (I# (len))
-       check_accs (AlexAccSkipPred predx : rest)
+          | otherwise
+          = check_accs rest
+       check_accs (AlexAccSkipPred predx rest)
           | predx user orig_input (I# (len)) input
           = AlexLastSkip input (I# (len))
-       check_accs (_ : rest) = check_accs rest
+          | otherwise
+          = check_accs rest
+
 
 data AlexLastAcc a
   = AlexNone
@@ -381,10 +399,12 @@
     fmap f (AlexLastSkip x y) = AlexLastSkip x y
 
 data AlexAcc a user
-  = AlexAcc a
+  = AlexAccNone
+  | AlexAcc a
   | AlexAccSkip
-  | AlexAccPred a (AlexAccPred user)
-  | AlexAccSkipPred (AlexAccPred user)
+
+  | AlexAccPred a   (AlexAccPred user) (AlexAcc a user)
+  | AlexAccSkipPred (AlexAccPred user) (AlexAcc a user)
 
 type AlexAccPred user = user -> AlexInput -> Int -> AlexInput -> Bool
 
@@ -411,5 +431,6 @@
        -- match when checking the right context, just
        -- the first match will do.
 
+
 -- used by wrappers
 iUnbox (I# (i)) = i
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/alex-3.0.5/doc/alex.xml new/alex-3.1.3/doc/alex.xml
--- old/alex-3.0.5/doc/alex.xml 2013-03-10 13:19:12.000000000 +0100
+++ new/alex-3.1.3/doc/alex.xml 2013-11-28 09:35:19.000000000 +0100
@@ -362,7 +362,7 @@
     <para>Alex takes a description of tokens based on regular
     expressions and generates a Haskell module containing code for
     scanning text efficiently.  Alex is designed to be familiar to
-    exisiting lex users, although it does depart from lex in a number
+    existing lex users, although it does depart from lex in a number
     of ways.</para>
 
     <figure id="fig-tokens" float="1"><title>A simple Alex 
specification.</title>
@@ -429,7 +429,7 @@
 
 <programlisting><replaceable>regexp</replaceable>   { 
<replaceable>code</replaceable> }</programlisting>
 
-    <para>The meaming of a this rule is "if the input matches
+    <para>The meaning of a this rule is "if the input matches
     <replaceable>regexp</replaceable>, then return
     <replaceable>code</replaceable>".  The code part along with the
     braces can be replaced by simply
@@ -588,7 +588,7 @@
        <para>The rules are heralded by the sequence
        &lsquo;<literal><replaceable>id</replaceable> :-</literal>&rsquo;
         in the file.  It doesn't matter what you use for the
-        identifer, it is just there for documentation purposes.  In
+        identifier, it is just there for documentation purposes.  In
        fact, it can be omitted, but the <literal>:-</literal> must be
        left in.</para>
 
@@ -723,7 +723,7 @@
          specification, such that only certain rules will match for a
          given state.</para>
 
-         <para>A startcode is simply an identifer, or the special
+         <para>A startcode is simply an identifier, or the special
          start code &lsquo;<literal>0</literal>&rsquo;.  Each rule
          may be given a list of startcodes under which it
          applies:</para>
@@ -851,7 +851,7 @@
        <varlistentry>
          <term><literal><replaceable>r</replaceable>*</literal></term>
          <listitem>
-           <para>Matches zero or more occurences of
+           <para>Matches zero or more occurrences of
            <replaceable>r</replaceable>.</para>
          </listitem>
        </varlistentry>
@@ -859,7 +859,7 @@
        <varlistentry>
          <term><literal><replaceable>r</replaceable>+</literal></term>
          <listitem>
-           <para>Matches one or more occurences of
+           <para>Matches one or more occurrences of
            <replaceable>r</replaceable>.</para>
          </listitem>
        </varlistentry>
@@ -867,7 +867,7 @@
        <varlistentry>
          <term><literal><replaceable>r</replaceable>?</literal></term>
          <listitem>
-           <para>Matches zero or one occurences of
+           <para>Matches zero or one occurrences of
            <replaceable>r</replaceable>.</para>
          </listitem>
        </varlistentry>
@@ -1208,11 +1208,7 @@
          :: user             -- predicate state
          -> AlexInput        -- The current input
          -> Int              -- The "start code"
-         -> Maybe (          -- Nothing on error or EOF
-                 AlexInput,  -- The remaining input
-                 Int,        -- Length of this token
-                 action      -- The action (an unknown type)
-              )</programlisting>
+         -> AlexReturn action</programlisting>
 
       <para>The extra argument, of some type <literal>user</literal>,
       is passed to each predicate.</para>
@@ -1278,7 +1274,7 @@
 
        <para>The type signature for <literal>alexScanTokens</literal>
         is commented out, because the <literal>token</literal> type is
-        unkonwn.  All of the actions in your lexical specification
+        unknown.  All of the actions in your lexical specification
         should have type:</para>
 
 <programlisting>{ ... } :: String -> token</programlisting>
@@ -1374,6 +1370,12 @@
 <programlisting>type AlexAction result = AlexInput -> Int -> Alex result
 { ... }  :: AlexAction result</programlisting>
 
+        <para>The Alex file must also define a function
+        <literal>alexEOF</literal>, which will be executed on when the
+        end-of-file is scanned:</para>
+
+<programlisting>alexEOF :: Alex result</programlisting>
+
        <para>The <literal>monad</literal> wrapper also provides some
        useful combinators for constructing token actions:</para>
 
@@ -1403,7 +1405,7 @@
     it during the whole lexing phase.</para>
 
     <para>The generated code is the same as in the <literal>monad</literal> 
-    wrapper, except in 2 places:</para>
+    wrapper, except in 3 places:</para>
     <para>1) The definition of the general state, which now refers to a
     type (<literal>AlexUserState</literal>) that must be defined in the Alex 
file.</para>
 
@@ -1433,6 +1435,13 @@
                                           Right ( _, a ) -> Right a
 </programlisting>
 
+    <para>3) Two helper functions (<literal>alexGetUserState</literal>
+    and <literal>alexSetUserState</literal>) are defined.</para>
+
+<programlisting>
+alexGetUserState :: Alex AlexUserState
+alexSetUserState :: AlexUserState -> Alex ()
+</programlisting>
     <para>Here is an example of code in the user's Alex file defining
     the type and function:</para>
 
@@ -1450,19 +1459,19 @@
                    }
 
 getLexerCommentDepth :: Alex Int
-getLexerCommentDepth = Alex $ \s@AlexState{alex_ust=ust} -> Right (s, 
lexerCommentDepth ust)
+getLexerCommentDepth = do ust &lt;- alexGetUserState; return 
(lexerCommentDepth ust)
 
 setLexerCommentDepth :: Int -> Alex ()
-setLexerCommentDepth ss = Alex $ \s -> Right (s{alex_ust=(alex_ust 
s){lexerCommentDepth=ss}}, ())
+setLexerCommentDepth ss = do ust &lt;- alexGetUserState; alexSetUserState 
ust{lexerCommentDepth=ss}
 
 getLexerStringValue :: Alex String
-getLexerStringValue = Alex $ \s@AlexState{alex_ust=ust} -> Right (s, 
lexerStringValue ust)
+getLexerStringValue = do ust &lt;- alexGetUserState; return (lexerStringValue 
ust)
 
 setLexerStringValue :: String -> Alex ()
-setLexerStringValue ss = Alex $ \s -> Right (s{alex_ust=(alex_ust 
s){lexerStringValue=ss}}, ())
+setLexerStringValue ss = do ust &lt;- alexGetUserState; alexSetUserState 
ust{lexerStringValue=ss}
 
 addCharToLexerStringValue :: Char -> Alex ()
-addCharToLexerStringValue c = Alex $ \s -> Right (s{alex_ust=(alex_ust 
s){lexerStringValue=c:lexerStringValue (alex_ust s)}}, ())
+addCharToLexerStringValue c = do ust &lt;- alexGetUserState; alexSetUserState 
ust{lexerStringValue=c:(lexerStringValue ust)}
 </programlisting>
       </section>
 
@@ -1503,7 +1512,7 @@
        
        <para>The point of using these wrappers is that
        <literal>ByteString</literal>s provide a more memory efficient
-       representaion of an input stream. They can also be somewhat faster to
+       representation of an input stream. They can also be somewhat faster to
         process. Note that using these wrappers adds a dependency
        on the <literal>ByteString</literal> modules, which live in the
        <literal>bytestring</literal> package (or in the
@@ -1589,7 +1598,7 @@
 <programlisting>
 import qualified Data.ByteString.Lazy as ByteString
 
-ata AlexState = AlexState {
+data AlexState = AlexState {
         alex_pos :: !AlexPosn,  -- position at current input location
         alex_inp :: ByteString.ByteString, -- the current input
         alex_chr :: !Char,      -- the character before the input
@@ -1712,7 +1721,7 @@
        <term><option>--debug</option></term>
        <listitem>
          <para>Causes Alex to produce a lexer which will output
-         debugging messsages as it runs.</para>
+         debugging messages as it runs.</para>
        </listitem>
       </varlistentry>
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/alex-3.0.5/src/CharSet.hs 
new/alex-3.1.3/src/CharSet.hs
--- old/alex-3.0.5/src/CharSet.hs       2013-03-10 13:19:12.000000000 +0100
+++ new/alex-3.1.3/src/CharSet.hs       2013-11-28 09:35:19.000000000 +0100
@@ -126,7 +126,9 @@
 
 charRangeToCharSpan :: Bool -> Range Char -> Maybe (Span Char)
 charRangeToCharSpan _ (Range BoundaryAboveAll _) = Nothing
+charRangeToCharSpan _ (Range (BoundaryAbove c) _) | c == maxBound = Nothing
 charRangeToCharSpan _ (Range _ BoundaryBelowAll) = Nothing
+charRangeToCharSpan _ (Range _ (BoundaryBelow c)) | c == minBound = Nothing
 charRangeToCharSpan uni (Range x y) = Just (Span (l x) (h y))
     where l b = case b of
             BoundaryBelowAll -> '\0'
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/alex-3.0.5/templates/GenericTemplate.hs 
new/alex-3.1.3/templates/GenericTemplate.hs
--- old/alex-3.0.5/templates/GenericTemplate.hs 2013-03-10 13:19:12.000000000 
+0100
+++ new/alex-3.1.3/templates/GenericTemplate.hs 2013-11-28 09:35:19.000000000 
+0100
@@ -8,12 +8,29 @@
 -- INTERNALS and main scanner engine
 
 #ifdef ALEX_GHC
+#undef __GLASGOW_HASKELL__
+#define ALEX_IF_GHC_GT_500 #if __GLASGOW_HASKELL__ > 500
+#define ALEX_IF_GHC_LT_503 #if __GLASGOW_HASKELL__ < 503
+#define ALEX_IF_GHC_GT_706 #if __GLASGOW_HASKELL__ > 706
+#define ALEX_ELIF_GHC_500 #elif __GLASGOW_HASKELL__ == 500
+#define ALEX_IF_BIGENDIAN #ifdef WORDS_BIGENDIAN
+#define ALEX_ELSE #else
+#define ALEX_ENDIF #endif
+#define ALEX_DEFINE #define
+#endif
+
+#ifdef ALEX_GHC
 #define ILIT(n) n#
 #define IBOX(n) (I# (n))
 #define FAST_INT Int#
-#define LT(n,m) (n <# m)
-#define GTE(n,m) (n >=# m)
-#define EQ(n,m) (n ==# m)
+-- Do not remove this comment. Required to fix CPP parsing when using GCC and 
a clang-compiled alex.
+ALEX_IF_GHC_GT_706
+ALEX_DEFINE GTE(n,m) (tagToEnum# (n >=# m))
+ALEX_DEFINE EQ(n,m) (tagToEnum# (n ==# m))
+ALEX_ELSE
+ALEX_DEFINE GTE(n,m) (n >=# m)
+ALEX_DEFINE EQ(n,m) (n ==# m)
+ALEX_ENDIF
 #define PLUS(n,m) (n +# m)
 #define MINUS(n,m) (n -# m)
 #define TIMES(n,m) (n *# m)
@@ -23,7 +40,6 @@
 #define ILIT(n) (n)
 #define IBOX(n) (n)
 #define FAST_INT Int
-#define LT(n,m) (n < m)
 #define GTE(n,m) (n >= m)
 #define EQ(n,m) (n == m)
 #define PLUS(n,m) (n + m)
@@ -34,18 +50,8 @@
 #endif
 
 #ifdef ALEX_GHC
-#undef __GLASGOW_HASKELL__
-#define ALEX_IF_GHC_GT_500 #if __GLASGOW_HASKELL__ > 500
-#define ALEX_IF_GHC_LT_503 #if __GLASGOW_HASKELL__ < 503
-#define ALEX_ELIF_GHC_500 #elif __GLASGOW_HASKELL__ == 500
-#define ALEX_IF_BIGENDIAN #ifdef WORDS_BIGENDIAN
-#define ALEX_ELSE #else
-#define ALEX_ENDIF #endif
-#endif
-
-#ifdef ALEX_GHC
 data AlexAddr = AlexA# Addr#
-
+-- Do not remove this comment. Required to fix CPP parsing when using GCC and 
a clang-compiled alex.
 ALEX_IF_GHC_LT_503
 uncheckedShiftL# = shiftL#
 ALEX_ENDIF
@@ -88,6 +94,7 @@
 #endif
 
 #ifdef ALEX_GHC
+
 ALEX_IF_GHC_LT_503
 quickIndex arr i = arr ! i
 ALEX_ELSE
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/alex-3.0.5/templates/wrappers.hs 
new/alex-3.1.3/templates/wrappers.hs
--- old/alex-3.0.5/templates/wrappers.hs        2013-03-10 13:19:12.000000000 
+0100
+++ new/alex-3.1.3/templates/wrappers.hs        2013-11-28 09:35:19.000000000 
+0100
@@ -75,6 +75,9 @@
                   Char,         -- previous char
                   ByteString.ByteString)        -- current input string
 
+ignorePendingBytes :: AlexInput -> AlexInput
+ignorePendingBytes i = i   -- no pending bytes when lexing bytestrings
+
 alexInputPrevChar :: AlexInput -> Char
 alexInputPrevChar (p,c,s) = c
 
@@ -195,12 +198,20 @@
 alexSetStartCode :: Int -> Alex ()
 alexSetStartCode sc = Alex $ \s -> Right (s{alex_scd=sc}, ())
 
+#ifdef ALEX_MONAD_USER_STATE
+alexGetUserState :: Alex AlexUserState
+alexGetUserState = Alex $ \s@AlexState{alex_ust=ust} -> Right (s,ust)
+
+alexSetUserState :: AlexUserState -> Alex ()
+alexSetUserState ss = Alex $ \s -> Right (s{alex_ust=ss}, ())
+#endif
+
 alexMonadScan = do
   inp <- alexGetInput
   sc <- alexGetStartCode
   case alexScan inp sc of
     AlexEOF -> alexEOF
-    AlexError inp' -> alexError "lexical error"
+    AlexError ((AlexPn _ line column),_,_,_) -> alexError $ "lexical error at 
line " ++ (show line) ++ ", column " ++ (show column)
     AlexSkip  inp' len -> do
         alexSetInput inp'
         alexMonadScan
@@ -285,17 +296,19 @@
 alexSetStartCode sc = Alex $ \s -> Right (s{alex_scd=sc}, ())
 
 alexMonadScan = do
-  inp <- alexGetInput
+  inp@(_,_,str) <- alexGetInput
   sc <- alexGetStartCode
   case alexScan inp sc of
     AlexEOF -> alexEOF
-    AlexError inp' -> alexError "lexical error"
+    AlexError ((AlexPn _ line column),_,_) -> alexError $ "lexical error at 
line " ++ (show line) ++ ", column " ++ (show column)
     AlexSkip  inp' len -> do
         alexSetInput inp'
         alexMonadScan
-    AlexToken inp' len action -> do
+    AlexToken inp'@(_,_,str') len action -> do
         alexSetInput inp'
         action (ignorePendingBytes inp) len
+      where
+        len = ByteString.length str - ByteString.length str'
 
 -- 
-----------------------------------------------------------------------------
 -- Useful token actions
@@ -358,8 +371,8 @@
                 AlexEOF -> []
                 AlexError _ -> error "lexical error"
                 AlexSkip  inp' len     -> go inp'
-                AlexToken inp' len act -> act (ByteString.take (fromIntegral 
len) str) : go inp'
-
+                AlexToken inp'@(_,str') _ act -> act (ByteString.take len str) 
: go inp'
+                 where len = ByteString.length str - ByteString.length str'
 
 #endif
 
@@ -372,7 +385,8 @@
                 AlexEOF -> []
                 AlexError _ -> error "lexical error"
                 AlexSkip  inp' len     -> go inp'
-                AlexToken inp' len act -> act (ByteString.unsafeTake len str) 
: go inp'
+                AlexToken inp'@(AlexInput _ str') _ act -> act 
(ByteString.unsafeTake len str) : go inp'
+                 where len = ByteString.length str - ByteString.length str'
 
 #endif
 
@@ -388,7 +402,7 @@
   where go inp@(pos,_,_,str) =
           case alexScan inp 0 of
                 AlexEOF -> []
-                AlexError ((AlexPn _ line column),_,_,_) -> error $ "lexical 
error at " ++ (show line) ++ " line, " ++ (show column) ++ " column"
+                AlexError ((AlexPn _ line column),_,_,_) -> error $ "lexical 
error at line " ++ (show line) ++ ", column " ++ (show column)
                 AlexSkip  inp' len     -> go inp'
                 AlexToken inp' len act -> act pos (take len str) : go inp'
 #endif
@@ -403,7 +417,7 @@
   where go inp@(pos,_,str) =
           case alexScan inp 0 of
                 AlexEOF -> []
-                AlexError ((AlexPn _ line column),_,_) -> error $ "lexical 
error at " ++ (show line) ++ " line, " ++ (show column) ++ " column"
+                AlexError ((AlexPn _ line column),_,_) -> error $ "lexical 
error at line " ++ (show line) ++ ", column " ++ (show column)
                 AlexSkip  inp' len     -> go inp'
                 AlexToken inp' len act -> act pos (ByteString.take 
(fromIntegral len) str) : go inp'
 #endif
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/alex-3.0.5/tests/Makefile 
new/alex-3.1.3/tests/Makefile
--- old/alex-3.0.5/tests/Makefile       2013-03-10 13:19:12.000000000 +0100
+++ new/alex-3.1.3/tests/Makefile       2013-11-28 09:35:19.000000000 +0100
@@ -10,7 +10,7 @@
 HS_PROG_EXT = .bin
 endif
 
-TESTS = unicode.x simple.x tokens.x tokens_posn.x tokens_gscan.x 
tokens_bytestring.x tokens_posn_bytestring.x tokens_strict_bytestring.x
+TESTS = unicode.x simple.x tokens.x tokens_posn.x tokens_gscan.x 
tokens_bytestring.x tokens_posn_bytestring.x tokens_strict_bytestring.x 
tokens_monad_bytestring.x tokens_monadUserState_bytestring.x null.x 
tokens_bytestring_unicode.x
 
 TEST_ALEX_OPTS = --template=..
 
@@ -20,9 +20,6 @@
 %.g.hs : %.x
        $(ALEX) $(TEST_ALEX_OPTS) -g $< -o $@
 
-%.o : %.hs
-       $(HC) $(HC_OPTS) -c -o $@ $<
-
 CLEAN_FILES += *.n.hs *.g.hs *.info *.hi *.o *.bin *.exe
 
 ALL_TEST_HS = $(shell echo $(TESTS) | sed -e 's/\([^\. 
]*\)\.\(l\)\{0,1\}x/\1.n.hs \1.g.hs/g')
@@ -32,7 +29,7 @@
 %.run : %$(HS_PROG_EXT)
        ./$<
 
-%$(HS_PROG_EXT) : %.o
+%$(HS_PROG_EXT) : %.hs
        $(HC) $(HC_OPTS) -package array -package bytestring $($*_LD_OPTS) $< -o 
$@
 
 all :: $(ALL_TESTS)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/alex-3.0.5/tests/null.x new/alex-3.1.3/tests/null.x
--- old/alex-3.0.5/tests/null.x 1970-01-01 01:00:00.000000000 +0100
+++ new/alex-3.1.3/tests/null.x 2013-11-28 09:35:19.000000000 +0100
@@ -0,0 +1,71 @@
+{
+-- Tests the basic operation.
+module Main where
+
+import Data.Char (toUpper)
+import Control.Monad
+import System.Exit
+import System.IO
+import Prelude hiding (null)
+}
+
+%wrapper "monad"
+
+@word = [A-Za-z]+
+@null = \0
+
+$escchars = [abfnrtv\\"\'&]
+@escape = \\ ($escchars | \0)
+@gap = \\ $white+ \\
+@string = $printable # [\"] | " " | @escape | @gap
+
+@inComment = ([^\*] | $white)+ | ([\*]+ ([\x00-\xff] # [\/]))
+
+tokens :-
+
+$white+                        ;
+
+<0> {
+   @null                { null }
+   @word                { word }
+   \" @string \"        { string }
+   "--" @inComment \n   { word }
+}
+
+{
+{- we can now have comments in source code? -}
+word (p,_,_,input) len = return (take len input)
+
+null (p,_,_,input) len = return "\0"
+
+string (p,_,_,input) len = return (drop 1 (reverse (drop 1 (reverse input))))
+
+alexEOF = return "stopped."
+
+scanner str = runAlex str $ do
+  let loop = do tok <- alexMonadScan
+               if tok == "stopped." || tok == "error." 
+                       then return [tok]
+                       else do toks <- loop
+                               return (tok:toks)
+  loop  
+
+main = do
+  let test1 = scanner str1
+  when (test1 /= out1) $ 
+       do hPutStrLn stderr "Test 1 failed:"
+          print test1
+          exitFailure
+
+  let test2 = scanner str2
+  when (test2 /= out2) $
+       do hPutStrLn stderr "Test 2 failed:"
+          print test2
+          exitFailure
+
+str1 = "a\0bb\0ccc\0\0\"\\\0\""
+out1 = Right ["a","\NUL","bb","\NUL","ccc","\NUL","\NUL","\\\NUL", "stopped."]
+
+str2 = "."
+out2 = Left "lexical error at line 1, column 1"
+}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/alex-3.0.5/tests/simple.x 
new/alex-3.1.3/tests/simple.x
--- old/alex-3.0.5/tests/simple.x       2013-03-10 13:19:12.000000000 +0100
+++ new/alex-3.1.3/tests/simple.x       2013-11-28 09:35:19.000000000 +0100
@@ -69,5 +69,5 @@
 out1 = Right ["BOL:a","b","c","D","E","F","PING!","MAGIC","EOL:eol", 
"BOL:bol", "BOTH:both", "stopped."]
 
 str2 = "."
-out2 = Left "lexical error"
+out2 = Left "lexical error at line 1, column 1"
 }
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/alex-3.0.5/tests/tokens_bytestring_unicode.x 
new/alex-3.1.3/tests/tokens_bytestring_unicode.x
--- old/alex-3.0.5/tests/tokens_bytestring_unicode.x    1970-01-01 
01:00:00.000000000 +0100
+++ new/alex-3.1.3/tests/tokens_bytestring_unicode.x    2013-11-28 
09:35:19.000000000 +0100
@@ -0,0 +1,42 @@
+{
+{-# LANGUAGE OverloadedStrings #-}
+module Main (main) where
+import System.Exit
+import Data.ByteString.Lazy.Char8 (unpack)
+}
+
+%wrapper "basic-bytestring"
+
+$digit = 0-9      -- digits
+$alpha = [a-zA-Zαβ]    -- alphabetic characters
+
+tokens :-
+
+  $white+        ;
+  "--".*        ;
+  let          { \s -> Let }
+  in          { \s -> In }
+  $digit+                               { \s -> Int (read (unpack s)) }
+  [\=\+\-\*\/\(\)]                      { \s -> Sym (head (unpack s)) }
+  $alpha [$alpha $digit \_ \']*         { \s -> Var (unpack s) }
+
+{
+-- Each right-hand side has type :: ByteString -> Token
+
+-- The token type:
+data Token =
+  Let     |
+  In      |
+  Sym Char  |
+  Var String  |
+  Int Int    |
+  Err
+  deriving (Eq,Show)
+
+main = if test1 /= result1 then exitFailure
+                           else exitWith ExitSuccess
+
+-- \206\177\206\178\206\178 is "αββ" utf-8 encoded
+test1 = alexScanTokens "  let in 012334\n=+*foo \206\177\206\178\206\178 
bar__'"
+result1 = [Let,In,Int 12334,Sym '=',Sym '+',Sym '*',Var "foo",Var 
"\206\177\206\178\206\178",Var "bar__'"]
+}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/alex-3.0.5/tests/tokens_monadUserState_bytestring.x 
new/alex-3.1.3/tests/tokens_monadUserState_bytestring.x
--- old/alex-3.0.5/tests/tokens_monadUserState_bytestring.x     1970-01-01 
01:00:00.000000000 +0100
+++ new/alex-3.1.3/tests/tokens_monadUserState_bytestring.x     2013-11-28 
09:35:19.000000000 +0100
@@ -0,0 +1,63 @@
+{
+{-# LANGUAGE OverloadedStrings #-}
+module Main (main) where
+import System.Exit
+import qualified Data.ByteString.Lazy.Char8 as B
+}
+
+%wrapper "monadUserState-bytestring"
+
+$digit = 0-9                   -- digits
+$alpha = [a-zA-Z]              -- alphabetic characters
+
+tokens :-
+
+  $white+                              ;
+  "--".*                               ;
+  let                                  { tok (\p s -> Let p) }
+  in                                   { tok (\p s -> In p) }
+  $digit+                               { tok (\p s -> Int p (read (B.unpack 
s))) }
+  [\=\+\-\*\/\(\)]                      { tok (\p s -> Sym p (head (B.unpack 
s))) }
+  $alpha [$alpha $digit \_ \']*         { tok (\p s -> Var p (B.unpack s)) }
+
+{
+-- Each right-hand side has type :: AlexPosn -> String -> Token
+
+-- Some action helpers:
+tok f (p,_,input) len = return (f p (B.take (fromIntegral len) input))
+
+-- The token type:
+data Token =
+       Let AlexPosn            |
+       In  AlexPosn            |
+       Sym AlexPosn Char       |
+        Var AlexPosn String     |
+       Int AlexPosn Int        |
+        Err AlexPosn            |
+        EOF
+        deriving (Eq,Show)
+
+alexEOF = return EOF
+
+main = if test1 /= result1 then do print test1; exitFailure
+                          else exitWith ExitSuccess
+
+type AlexUserState = ()
+alexInitUserState = ()
+
+scanner str = runAlex str $ do
+  let loop = do tok <- alexMonadScan
+                if tok == EOF
+                       then return [tok]
+                       else do toks <- loop
+                               return (tok:toks)
+  loop  
+
+test1 = case scanner "  let in 012334\n=+*foo bar__'" of
+          Left err -> error err
+          Right toks -> toks
+
+result1 = [Let (AlexPn 2 1 3),In (AlexPn 6 1 7),Int (AlexPn 9 1 10) 12334,Sym 
(AlexPn 16 2 1) '=',Sym (AlexPn 17 2 2) '+',Sym (AlexPn 18 2 3) '*',Var (AlexPn 
19 2 4) "foo",Var (AlexPn 23 2 8) "bar__'", EOF]
+
+
+}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/alex-3.0.5/tests/tokens_monad_bytestring.x 
new/alex-3.1.3/tests/tokens_monad_bytestring.x
--- old/alex-3.0.5/tests/tokens_monad_bytestring.x      1970-01-01 
01:00:00.000000000 +0100
+++ new/alex-3.1.3/tests/tokens_monad_bytestring.x      2013-11-28 
09:35:19.000000000 +0100
@@ -0,0 +1,60 @@
+{
+{-# LANGUAGE OverloadedStrings #-}
+module Main (main) where
+import System.Exit
+import qualified Data.ByteString.Lazy.Char8 as B
+}
+
+%wrapper "monad-bytestring"
+
+$digit = 0-9                   -- digits
+$alpha = [a-zA-Z]              -- alphabetic characters
+
+tokens :-
+
+  $white+                              ;
+  "--".*                               ;
+  let                                  { tok (\p s -> Let p) }
+  in                                   { tok (\p s -> In p) }
+  $digit+                               { tok (\p s -> Int p (read (B.unpack 
s))) }
+  [\=\+\-\*\/\(\)]                      { tok (\p s -> Sym p (head (B.unpack 
s))) }
+  $alpha [$alpha $digit \_ \']*         { tok (\p s -> Var p (B.unpack s)) }
+
+{
+-- Each right-hand side has type :: AlexPosn -> String -> Token
+
+-- Some action helpers:
+tok f (p,_,input) len = return (f p (B.take (fromIntegral len) input))
+
+-- The token type:
+data Token =
+       Let AlexPosn            |
+       In  AlexPosn            |
+       Sym AlexPosn Char       |
+        Var AlexPosn String     |
+       Int AlexPosn Int        |
+        Err AlexPosn            |
+        EOF
+        deriving (Eq,Show)
+
+alexEOF = return EOF
+
+main = if test1 /= result1 then do print test1; exitFailure
+                          else exitWith ExitSuccess
+
+scanner str = runAlex str $ do
+  let loop = do tok <- alexMonadScan
+                if tok == EOF
+                       then return [tok]
+                       else do toks <- loop
+                               return (tok:toks)
+  loop  
+
+test1 = case scanner "  let in 012334\n=+*foo bar__'" of
+          Left err -> error err
+          Right toks -> toks
+
+result1 = [Let (AlexPn 2 1 3),In (AlexPn 6 1 7),Int (AlexPn 9 1 10) 12334,Sym 
(AlexPn 16 2 1) '=',Sym (AlexPn 17 2 2) '+',Sym (AlexPn 18 2 3) '*',Var (AlexPn 
19 2 4) "foo",Var (AlexPn 23 2 8) "bar__'", EOF]
+
+
+}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/alex-3.0.5/tests/unicode.x 
new/alex-3.1.3/tests/unicode.x
--- old/alex-3.0.5/tests/unicode.x      2013-03-10 13:19:12.000000000 +0100
+++ new/alex-3.1.3/tests/unicode.x      2013-11-28 09:35:19.000000000 +0100
@@ -75,7 +75,7 @@
 out1 = Right ["PING!",".","stopped."]
 
 str2 = "\n"
-out2 = Left "lexical error"
+out2 = Left "lexical error at line 1, column 1"
 
 
 str3 = "αω --"

--
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to