From c92498072de0c5f08773111e33fed835c11411d7 Mon Sep 17 00:00:00 2001 From: Due Date: Sun, 17 Nov 2024 17:29:53 +0100 Subject: [PATCH] Let mee se what I have changed. --- futhark/lexer.fut | 11 +-- src/Alpacc/Generator/Futhark/FutPrinter.hs | 11 ++- src/Alpacc/Generator/Futhark/Lexer.hs | 4 + src/Alpacc/Lexer/Encode.hs | 2 +- src/Alpacc/Lexer/ParallelLexing.hs | 101 +++++++++++---------- 5 files changed, 73 insertions(+), 56 deletions(-) diff --git a/futhark/lexer.fut b/futhark/lexer.fut index 9955b26..0f45454 100644 --- a/futhark/lexer.fut +++ b/futhark/lexer.fut @@ -12,16 +12,15 @@ module type lexer_context = { val endomorphism_size: i64 val endo_mask: endomorphism_module.t val terminal_mask: endomorphism_module.t - val accept_mask: endomorphism_module.t val produce_mask: endomorphism_module.t val endo_offset: endomorphism_module.t val terminal_offset: endomorphism_module.t - val accept_offset: endomorphism_module.t val produce_offset: endomorphism_module.t val is_ignore: terminal_module.t -> bool val transitions_to_endomorphisms: [256]endomorphism_module.t val compositions: [endomorphism_size * endomorphism_size]endomorphism_module.t val dead_terminal: terminal_module.t + val accept_array: [endomorphism_size]bool } module mk_lexer(L: lexer_context) = { @@ -34,11 +33,6 @@ module mk_lexer(L: lexer_context) = { endomorphism = let a' = mask L.endomorphism_module.& a in a' L.endomorphism_module.>> offset - - def is_accept (a: endomorphism): bool = - get_value L.accept_mask L.accept_offset a - |> L.endomorphism_module.to_i64 - |> bool.i64 def is_produce (a: endomorphism): bool = get_value L.produce_mask L.produce_offset a @@ -53,6 +47,9 @@ module mk_lexer(L: lexer_context) = { def to_index (a: endomorphism): i64 = get_value L.endo_mask L.endo_offset a |> L.endomorphism_module.to_i64 + + def is_accept (a: endomorphism): bool = + L.accept_array[to_index a] def compose (a: endomorphism) (b: endomorphism): endomorphism = #[unsafe] diff --git a/src/Alpacc/Generator/Futhark/FutPrinter.hs b/src/Alpacc/Generator/Futhark/FutPrinter.hs index ec316a5..ed271e6 100644 --- a/src/Alpacc/Generator/Futhark/FutPrinter.hs +++ b/src/Alpacc/Generator/Futhark/FutPrinter.hs @@ -8,8 +8,9 @@ where import Data.Foldable import Data.List qualified as List import Data.String.Interpolate (i) -import Data.Array (Array) import Data.Array as Array hiding (Array) +import Data.Array.Unboxed (UArray) +import Data.Array.IArray as IArray import Alpacc.Types import Numeric.Natural @@ -38,6 +39,11 @@ instance FutPrinter String where instance FutPrinter Int where futPrint = show +instance FutPrinter Bool where + futPrint True = "true" + futPrint False = "false" + + instance FutPrinter Natural where futPrint = show @@ -65,6 +71,9 @@ instance (FutPrinter a) => FutPrinter (NTuple a) where instance (FutPrinter a) => FutPrinter (Array i a) where futPrint = futPrint . Array.elems +instance (FutPrinter a, IArray UArray a, Ix i) => FutPrinter (UArray i a) where + futPrint = futPrint . IArray.elems + instance (FutPrinter a) => FutPrinter (Maybe a) where futPrint (Just a) = "#some " ++ futPrint a futPrint Nothing = "#none" diff --git a/src/Alpacc/Generator/Futhark/Lexer.hs b/src/Alpacc/Generator/Futhark/Lexer.hs index ef8d764..cb4cdce 100644 --- a/src/Alpacc/Generator/Futhark/Lexer.hs +++ b/src/Alpacc/Generator/Futhark/Lexer.hs @@ -87,6 +87,7 @@ generateLexer lexer terminal_index_map terminal_type = do } = parMasks int_parallel_lexer let parallel_lexer = parLexer int_parallel_lexer let _identity = identity parallel_lexer + let accept_array = acceptArray parallel_lexer endomorphism_type <- extEndoType parallel_lexer transitions_to_endo <- transitionsToEndomorphismsArray parallel_lexer compositions_table <- compositionsArray endomorphism_type parallel_lexer @@ -112,6 +113,9 @@ module lexer = mk_lexer { #{ignoreFunction terminal_index_map} #{defEndomorphismSize parallel_lexer} + + def accept_array: [endomorphism_size]bool = + sized endomorphism_size #{futPrint accept_array} #{transitions_to_endo} diff --git a/src/Alpacc/Lexer/Encode.hs b/src/Alpacc/Lexer/Encode.hs index 56081cb..817b116 100644 --- a/src/Alpacc/Lexer/Encode.hs +++ b/src/Alpacc/Lexer/Encode.hs @@ -141,7 +141,7 @@ intParallelLexer :: ParallelDFALexer t s k -> Either String (IntParallelLexer t) intParallelLexer to_int lexer = do - parallel_lexer <- parallelLexer lexer + let parallel_lexer = parallelLexer lexer ms <- lexerMasks parallel_lexer let encode = encodeEndoData ms to_int new_compositions <- mapM encode $ compositions parallel_lexer diff --git a/src/Alpacc/Lexer/ParallelLexing.hs b/src/Alpacc/Lexer/ParallelLexing.hs index aad5b96..5170fe7 100644 --- a/src/Alpacc/Lexer/ParallelLexing.hs +++ b/src/Alpacc/Lexer/ParallelLexing.hs @@ -23,7 +23,6 @@ import Data.Bifunctor (Bifunctor (..)) import Data.Tuple (swap) import Control.Monad.State.Strict import Data.List qualified as List -import Data.Either.Extra errorMessage :: String errorMessage = "Error: Happend during Parallel Lexing genration, contact a maintainer." @@ -49,28 +48,26 @@ toEndoData :: ParallelDFALexer t S k -> IntMap Endomorphism -> E -> - Either String (EndoData k) -toEndoData lexer to_endo e = do - (is_producing, s) <- maybeToEither errorMessage $ toState e - let maybe_token = toToken s - pure $ + EndoData k +toEndoData lexer to_endo e = EndoData { endo = e - , token = maybe_token + , token = Map.lookup s token_map , isAccepting = s `Set.member` accept_states , isProducing = is_producing } where initial_state = initial $ fsa $ parDFALexer lexer - toState e' = do - Endomorphism endo' producing <- IntMap.lookup e' to_endo - let (a, b) = bounds endo' - if a <= initial_state && initial_state <= b - then pure (producing UArray.! initial_state - ,endo' UArray.! initial_state) - else Nothing + (is_producing, s) = + case IntMap.lookup e to_endo of + Just (Endomorphism endo' producing) | in_bound -> + (producing UArray.! initial_state + ,endo' UArray.! initial_state) + where + (a, b) = bounds endo' + in_bound = a <= initial_state && initial_state <= b + _any -> error errorMessage token_map = terminalMap $ parDFALexer lexer - toToken s = Map.lookup s token_map accept_states = accepting $ fsa $ parDFALexer lexer data ParallelLexer t e = @@ -81,6 +78,7 @@ data ParallelLexer t e = , endomorphismsSize :: !Int , dead :: !e , tokenSize :: !Int + , acceptArray :: !(UArray E Bool) } deriving (Show, Eq, Ord) data EndoCtx = @@ -242,13 +240,14 @@ endoCompositionsTable _map = compositionsTable :: (Enum t, Bounded t, Ord t, Ord k) => ParallelDFALexer t S k -> - Either String (Map Endomorphism (EndoData k) - ,Map (E, E) (EndoData k)) -compositionsTable lexer = do - a <- to_endo - b <- _compositions - pure (a, b) + (UArray E Bool + ,Map Endomorphism (EndoData k) + ,Map (E, E) (EndoData k)) +compositionsTable lexer = (accept_array, to_endo, _compositions) where + accept_array = + toAcceptArray + $ IntMap.mapWithKey (\k _ -> toEndoData' k) inv_to_endo ctx = initEndoCtx lexer toEndoData' = toEndoData lexer inv_to_endo connected_map = connectedMap ctx @@ -260,7 +259,7 @@ compositionsTable lexer = do vec_dead = deadEndomorphism lexer vec_identity = identityEndomorphism lexer to_endo = - mapM toEndoData' + fmap toEndoData' $ Map.insert vec_dead _dead $ Map.insert vec_identity _identity to_endo' inv_to_endo = @@ -275,7 +274,7 @@ compositionsTable lexer = do Nothing -> succ _identity Just a -> a _compositions = - mapM toEndoData' + fmap toEndoData' $ addDead _dead $ addIdentity _identity _compositions' @@ -337,6 +336,13 @@ connectedTable lexer = Set.fromList $ mapMaybe (transitionLookup s) _alphabet +toAcceptArray :: IntMap (EndoData k) -> UArray E Bool +toAcceptArray endo_map = + UArray.array (fst $ IntMap.findMin endo_map + ,fst $ IntMap.findMax endo_map) + $ IntMap.assocs + $ isAccepting <$> endo_map + initEndoCtx :: (Enum t, Bounded t, Ord t, Ord k) => ParallelDFALexer t S k -> @@ -453,27 +459,28 @@ identityEndomorphism lexer = Endomorphism s b parallelLexer :: (Ord t, Enum t, Bounded t, Ord s, Ord k) => ParallelDFALexer t s k -> - Either String (ParallelLexer t (EndoData k)) -parallelLexer lexer' = do - let lexer = enumerateParLexer 0 lexer' - (to_endo, _compositions) <- compositionsTable lexer - let endo_size = Map.size to_endo - let toEndo x = maybeToEither errorMessage $ Map.lookup x to_endo - dead_e <- toEndo $ deadEndomorphism lexer - identity_e <- toEndo $ identityEndomorphism lexer - let _unknown_transitions = - Map.fromList - $ map (,dead_e) [minBound..maxBound] - _transitions_to_endo <- - fmap (`Map.union` _unknown_transitions) - $ mapM toEndo - $ endomorphismTable lexer - pure $ - ParallelLexer - { compositions = _compositions - , endomorphisms = _transitions_to_endo - , identity = identity_e - , endomorphismsSize = endo_size - , dead = dead_e - , tokenSize = Map.size $ terminalMap $ parDFALexer lexer - } + ParallelLexer t (EndoData k) +parallelLexer lexer' = + ParallelLexer + { compositions = _compositions + , endomorphisms = _transitions_to_endo + , identity = identity_e + , endomorphismsSize = endo_size + , dead = dead_e + , tokenSize = Map.size $ terminalMap $ parDFALexer lexer + , acceptArray = accept_array + } + where + lexer = enumerateParLexer 0 lexer' + (accept_array, to_endo, _compositions) = compositionsTable lexer + endo_size = Map.size to_endo + toEndo x = to_endo Map.! x + dead_e = toEndo $ deadEndomorphism lexer + identity_e = toEndo $ identityEndomorphism lexer + _unknown_transitions = + Map.fromList + $ map (,dead_e) [minBound..maxBound] + _transitions_to_endo = + (`Map.union` _unknown_transitions) + $ toEndo <$> endomorphismTable lexer +