Skip to content

Commit

Permalink
Let mee se what I have changed.
Browse files Browse the repository at this point in the history
  • Loading branch information
WilliamDue committed Nov 17, 2024
1 parent b228bad commit c924980
Show file tree
Hide file tree
Showing 5 changed files with 73 additions and 56 deletions.
11 changes: 4 additions & 7 deletions futhark/lexer.fut
Original file line number Diff line number Diff line change
Expand Up @@ -12,16 +12,15 @@ module type lexer_context = {
val endomorphism_size: i64
val endo_mask: endomorphism_module.t
val terminal_mask: endomorphism_module.t
val accept_mask: endomorphism_module.t
val produce_mask: endomorphism_module.t
val endo_offset: endomorphism_module.t
val terminal_offset: endomorphism_module.t
val accept_offset: endomorphism_module.t
val produce_offset: endomorphism_module.t
val is_ignore: terminal_module.t -> bool
val transitions_to_endomorphisms: [256]endomorphism_module.t
val compositions: [endomorphism_size * endomorphism_size]endomorphism_module.t
val dead_terminal: terminal_module.t
val accept_array: [endomorphism_size]bool
}

module mk_lexer(L: lexer_context) = {
Expand All @@ -34,11 +33,6 @@ module mk_lexer(L: lexer_context) = {
endomorphism =
let a' = mask L.endomorphism_module.& a
in a' L.endomorphism_module.>> offset

def is_accept (a: endomorphism): bool =
get_value L.accept_mask L.accept_offset a
|> L.endomorphism_module.to_i64
|> bool.i64

def is_produce (a: endomorphism): bool =
get_value L.produce_mask L.produce_offset a
Expand All @@ -53,6 +47,9 @@ module mk_lexer(L: lexer_context) = {
def to_index (a: endomorphism): i64 =
get_value L.endo_mask L.endo_offset a
|> L.endomorphism_module.to_i64

def is_accept (a: endomorphism): bool =
L.accept_array[to_index a]

def compose (a: endomorphism) (b: endomorphism): endomorphism =
#[unsafe]
Expand Down
11 changes: 10 additions & 1 deletion src/Alpacc/Generator/Futhark/FutPrinter.hs
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,9 @@ where
import Data.Foldable
import Data.List qualified as List
import Data.String.Interpolate (i)
import Data.Array (Array)
import Data.Array as Array hiding (Array)
import Data.Array.Unboxed (UArray)
import Data.Array.IArray as IArray
import Alpacc.Types
import Numeric.Natural

Expand Down Expand Up @@ -38,6 +39,11 @@ instance FutPrinter String where
instance FutPrinter Int where
futPrint = show

instance FutPrinter Bool where
futPrint True = "true"
futPrint False = "false"


instance FutPrinter Natural where
futPrint = show

Expand Down Expand Up @@ -65,6 +71,9 @@ instance (FutPrinter a) => FutPrinter (NTuple a) where
instance (FutPrinter a) => FutPrinter (Array i a) where
futPrint = futPrint . Array.elems

instance (FutPrinter a, IArray UArray a, Ix i) => FutPrinter (UArray i a) where
futPrint = futPrint . IArray.elems

instance (FutPrinter a) => FutPrinter (Maybe a) where
futPrint (Just a) = "#some " ++ futPrint a
futPrint Nothing = "#none"
4 changes: 4 additions & 0 deletions src/Alpacc/Generator/Futhark/Lexer.hs
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,7 @@ generateLexer lexer terminal_index_map terminal_type = do
} = parMasks int_parallel_lexer
let parallel_lexer = parLexer int_parallel_lexer
let _identity = identity parallel_lexer
let accept_array = acceptArray parallel_lexer
endomorphism_type <- extEndoType parallel_lexer
transitions_to_endo <- transitionsToEndomorphismsArray parallel_lexer
compositions_table <- compositionsArray endomorphism_type parallel_lexer
Expand All @@ -112,6 +113,9 @@ module lexer = mk_lexer {
#{ignoreFunction terminal_index_map}

#{defEndomorphismSize parallel_lexer}

def accept_array: [endomorphism_size]bool =
sized endomorphism_size #{futPrint accept_array}

#{transitions_to_endo}

Expand Down
2 changes: 1 addition & 1 deletion src/Alpacc/Lexer/Encode.hs
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ intParallelLexer ::
ParallelDFALexer t s k ->
Either String (IntParallelLexer t)
intParallelLexer to_int lexer = do
parallel_lexer <- parallelLexer lexer
let parallel_lexer = parallelLexer lexer
ms <- lexerMasks parallel_lexer
let encode = encodeEndoData ms to_int
new_compositions <- mapM encode $ compositions parallel_lexer
Expand Down
101 changes: 54 additions & 47 deletions src/Alpacc/Lexer/ParallelLexing.hs
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ import Data.Bifunctor (Bifunctor (..))
import Data.Tuple (swap)
import Control.Monad.State.Strict
import Data.List qualified as List
import Data.Either.Extra

errorMessage :: String
errorMessage = "Error: Happend during Parallel Lexing genration, contact a maintainer."
Expand All @@ -49,28 +48,26 @@ toEndoData ::
ParallelDFALexer t S k ->
IntMap Endomorphism ->
E ->
Either String (EndoData k)
toEndoData lexer to_endo e = do
(is_producing, s) <- maybeToEither errorMessage $ toState e
let maybe_token = toToken s
pure $
EndoData k
toEndoData lexer to_endo e =
EndoData
{ endo = e
, token = maybe_token
, token = Map.lookup s token_map
, isAccepting = s `Set.member` accept_states
, isProducing = is_producing
}
where
initial_state = initial $ fsa $ parDFALexer lexer
toState e' = do
Endomorphism endo' producing <- IntMap.lookup e' to_endo
let (a, b) = bounds endo'
if a <= initial_state && initial_state <= b
then pure (producing UArray.! initial_state
,endo' UArray.! initial_state)
else Nothing
(is_producing, s) =
case IntMap.lookup e to_endo of
Just (Endomorphism endo' producing) | in_bound ->
(producing UArray.! initial_state
,endo' UArray.! initial_state)
where
(a, b) = bounds endo'
in_bound = a <= initial_state && initial_state <= b
_any -> error errorMessage
token_map = terminalMap $ parDFALexer lexer
toToken s = Map.lookup s token_map
accept_states = accepting $ fsa $ parDFALexer lexer

data ParallelLexer t e =
Expand All @@ -81,6 +78,7 @@ data ParallelLexer t e =
, endomorphismsSize :: !Int
, dead :: !e
, tokenSize :: !Int
, acceptArray :: !(UArray E Bool)
} deriving (Show, Eq, Ord)

data EndoCtx =
Expand Down Expand Up @@ -242,13 +240,14 @@ endoCompositionsTable _map =
compositionsTable ::
(Enum t, Bounded t, Ord t, Ord k) =>
ParallelDFALexer t S k ->
Either String (Map Endomorphism (EndoData k)
,Map (E, E) (EndoData k))
compositionsTable lexer = do
a <- to_endo
b <- _compositions
pure (a, b)
(UArray E Bool
,Map Endomorphism (EndoData k)
,Map (E, E) (EndoData k))
compositionsTable lexer = (accept_array, to_endo, _compositions)
where
accept_array =
toAcceptArray
$ IntMap.mapWithKey (\k _ -> toEndoData' k) inv_to_endo
ctx = initEndoCtx lexer
toEndoData' = toEndoData lexer inv_to_endo
connected_map = connectedMap ctx
Expand All @@ -260,7 +259,7 @@ compositionsTable lexer = do
vec_dead = deadEndomorphism lexer
vec_identity = identityEndomorphism lexer
to_endo =
mapM toEndoData'
fmap toEndoData'
$ Map.insert vec_dead _dead
$ Map.insert vec_identity _identity to_endo'
inv_to_endo =
Expand All @@ -275,7 +274,7 @@ compositionsTable lexer = do
Nothing -> succ _identity
Just a -> a
_compositions =
mapM toEndoData'
fmap toEndoData'
$ addDead _dead
$ addIdentity _identity _compositions'

Expand Down Expand Up @@ -337,6 +336,13 @@ connectedTable lexer =
Set.fromList
$ mapMaybe (transitionLookup s) _alphabet

toAcceptArray :: IntMap (EndoData k) -> UArray E Bool
toAcceptArray endo_map =
UArray.array (fst $ IntMap.findMin endo_map
,fst $ IntMap.findMax endo_map)
$ IntMap.assocs
$ isAccepting <$> endo_map

initEndoCtx ::
(Enum t, Bounded t, Ord t, Ord k) =>
ParallelDFALexer t S k ->
Expand Down Expand Up @@ -453,27 +459,28 @@ identityEndomorphism lexer = Endomorphism s b
parallelLexer ::
(Ord t, Enum t, Bounded t, Ord s, Ord k) =>
ParallelDFALexer t s k ->
Either String (ParallelLexer t (EndoData k))
parallelLexer lexer' = do
let lexer = enumerateParLexer 0 lexer'
(to_endo, _compositions) <- compositionsTable lexer
let endo_size = Map.size to_endo
let toEndo x = maybeToEither errorMessage $ Map.lookup x to_endo
dead_e <- toEndo $ deadEndomorphism lexer
identity_e <- toEndo $ identityEndomorphism lexer
let _unknown_transitions =
Map.fromList
$ map (,dead_e) [minBound..maxBound]
_transitions_to_endo <-
fmap (`Map.union` _unknown_transitions)
$ mapM toEndo
$ endomorphismTable lexer
pure $
ParallelLexer
{ compositions = _compositions
, endomorphisms = _transitions_to_endo
, identity = identity_e
, endomorphismsSize = endo_size
, dead = dead_e
, tokenSize = Map.size $ terminalMap $ parDFALexer lexer
}
ParallelLexer t (EndoData k)
parallelLexer lexer' =
ParallelLexer
{ compositions = _compositions
, endomorphisms = _transitions_to_endo
, identity = identity_e
, endomorphismsSize = endo_size
, dead = dead_e
, tokenSize = Map.size $ terminalMap $ parDFALexer lexer
, acceptArray = accept_array
}
where
lexer = enumerateParLexer 0 lexer'
(accept_array, to_endo, _compositions) = compositionsTable lexer
endo_size = Map.size to_endo
toEndo x = to_endo Map.! x
dead_e = toEndo $ deadEndomorphism lexer
identity_e = toEndo $ identityEndomorphism lexer
_unknown_transitions =
Map.fromList
$ map (,dead_e) [minBound..maxBound]
_transitions_to_endo =
(`Map.union` _unknown_transitions)
$ toEndo <$> endomorphismTable lexer

0 comments on commit c924980

Please sign in to comment.