Skip to content

Commit

Permalink
Removed dead code.
Browse files Browse the repository at this point in the history
  • Loading branch information
WilliamDue committed Jun 11, 2023
1 parent 669952a commit 023c202
Show file tree
Hide file tree
Showing 5 changed files with 28 additions and 34 deletions.
4 changes: 3 additions & 1 deletion .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,9 @@ jobs:
run: |
python3 futhark-tests \
--test-type=stuck \
--grammar-size=1000
--grammar-size=1000 \
--lookback=1 \
--lookahead=1
- name: Run parse test
shell: bash
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ cabal run parallel-parser -- grammars/paper_grammar.cg -q 1 -k 1
```
This will create the Futhark source file `paper_grammar.fut` which contains the actual parser which is a function `parse`. This function takes as input an array of indexes assigned to terminals and maps this to an array of indexes assigned to the productions. The indexes are assigned in the order they are defined. For this example the indexes for the terminals would be `0` is `a`, `1` is `b`, `2` is `c` and `3` is `$`. For the productions `0` is `T' -> T $`, `1` is `T -> R`, `2` is `T -> a T c`, `3` is `R -> `, `4` is `R -> b R`.

A leftmost derivable string from this grammar is `aabbbcc$` which corresponds to the indexes `[0, 0, 1, 1, 1, 2, 2, 3]`. When parsing this array the resulting productions sequence is `[0, 2, 2, 1, 4, 4, 4, 3]`.
A leftmost derivable string from this grammar is `aabbbcc$` which corresponds to the indexes `[0, 0, 1, 1, 1, 2, 2, 3]`. When parsing this array the resulting production sequence is `[0, 2, 2, 1, 4, 4, 4, 3]`.

If an input is given that cannot be parsed then the empty array is returned.

Expand Down
2 changes: 1 addition & 1 deletion app/Main.hs
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ grammarError grammar
| not $ null p_dups = Just [i|The given grammar contains duplicate productions because of #{p_dups_str}.|]
| isLeftRecursive grammar = Just [i|The given grammar contains left recursion.|]
| not $ null left_factors = Just [i|The given grammar contains productions that has common left factors due to the following nonterminals #{left_factors_str}.|]
| rightNullableDoubleNT grammar = Just [i|The given grammar is able to derive two consecutive nonterminals that are the same and nullable.|]
| rightNullableDoubleNT grammar = Just [i|The given grammar should not be able to derive two consecutive nullable nonterminals that are the same where the tail is nullable.|]
| not $ null nonproductive = Just [i|The given grammar contains nonproductive productions due to the following nonterminals #{nonproductive_str}.|]
| otherwise = Nothing
where
Expand Down
32 changes: 23 additions & 9 deletions futhark-tests/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -383,6 +383,7 @@ def generate_random_llp_grammar(
no_duplicates: bool = True,
quiet: bool = False):

generated_count = 0
while True:
filename = f'{name}.fut'
grammar = generate_grammar(
Expand All @@ -409,32 +410,41 @@ def generate_random_llp_grammar(
could_create = True
except subprocess.CalledProcessError:
pass


generated_count += 1

if os.path.exists(f'{filename}') and could_create:
if not quiet:
print(f'{filename} contains a parser for the grammar: {grammar}.')
return name, grammar
return name, grammar, generated_count

def stuck_test(number_of_grammars: int):
def stuck_test(number_of_grammars: int, q: int = 1, k: int = 1):
count = 0
for i in range(number_of_grammars):
with DeleteNew():
filename = f'temp_{i}'
generate_random_llp_grammar(
_, _, generated_count = generate_random_llp_grammar(
filename,
3,
3,
3,
3,
no_direct_left_recursion=True,
no_duplicates=True
q=q,
k=k,
no_direct_left_recursion=False,
no_duplicates=False
)
count += generated_count
time.sleep(0.05)
acceptance_percent = 100 * number_of_grammars / count
print(f'{acceptance_percent:.02f}% of grammars were accepted.')

def stuck_test_timed(number_of_grammars: int):
def stuck_test_timed(number_of_grammars: int, q: int = 1, k: int = 1):
p = multiprocessing.Process(
target=stuck_test,
name="stuck_check",
args=(number_of_grammars,)
args=(number_of_grammars, q, k)
)
p.start()
p.join(18000) # 5 hours.
Expand Down Expand Up @@ -502,7 +512,7 @@ def generate_parser_test(
) for i in range(number_of_grammars)
)

for name, grammar in grammars:
for name, grammar, _ in grammars:
valid_strings = grammar.leftmost_derivations_index(
valid_string_length
)
Expand Down Expand Up @@ -557,8 +567,12 @@ def main():
), "The parallel-parser binaries does not exists."
elif args.test_type == 'stuck':
assert args.grammar_size is not None, "grammar-size must be set."
assert args.lookback is not None, "lookback must be set."
assert args.lookahead is not None, "lookahead must be set."
assert 0 == stuck_test_timed(
number_of_grammars=args.grammar_size
number_of_grammars=args.grammar_size,
q=args.lookback,
k=args.lookahead
), "The parser probably got stuck while creating some grammar."
elif args.test_type == 'parse':
assert args.grammar_size is not None, "grammar-size must be set."
Expand Down
22 changes: 0 additions & 22 deletions src/ParallelParser/LLP.hs
Original file line number Diff line number Diff line change
Expand Up @@ -845,34 +845,12 @@ llpParserTable = do
let psls_table = psls collection -- filterAdmissiblePairs q k grammar $
let unwrapped = (\[a] -> a) . Set.toList <$> psls_table
let parsed = Map.mapWithKey auxiliary unwrapped
is_ambiguous <- isAmbiguous
let result
| is_ambiguous = Nothing
| isNothing maybe_table = Nothing
| any ((/= 1) . Set.size) psls_table = Nothing
| otherwise = Just parsed
return result

isAmbiguous ::
(Ord nt, Ord t, Show nt, Show t, NFData t, NFData nt) =>
State (LlpContext nt t) Bool
isAmbiguous = do
ctx <- get
let grammar = theGrammar ctx
let prods = productions grammar
let prods_map = toProductionsMap prods
let isNullable = nullable grammar
let nullable_prods_map = List.filter isNullable <$> prods_map
let nullable_prods_count_map = List.length <$> nullable_prods_map
let over_one_nullable_prod = any (1<) nullable_prods_count_map
first_set_map <- mapM (mapM useFirst) prods_map
let firsts_union = unionsIfDisjoint <$> first_set_map
let overlapping_first_sets = any isNothing firsts_union
let not_nullable_prods_map = List.filter (not . isNullable) <$> prods_map
return $
over_one_nullable_prod ||
overlapping_first_sets

-- | Given a lsit create all the pairs with q lookback and k lookahead which
-- will be used as keys in the table.
pairLookup :: (Ord t, Show t) => Map ([t], [t]) v -> Int -> Int -> [t] -> [Maybe v]
Expand Down

0 comments on commit 023c202

Please sign in to comment.