Skip to content

Commit c3b3d2e

Browse files
committed
Add some tests
1 parent 8f23c9e commit c3b3d2e

File tree

2 files changed

+124
-14
lines changed

2 files changed

+124
-14
lines changed

test/lib/grammar_test.exs

+91
Original file line numberDiff line numberDiff line change
@@ -2,4 +2,95 @@ defmodule GrammarTest do
22
use ExUnit.Case
33

44
doctest Grammar
5+
6+
alias Grammar.SpecialTokens.Number
7+
alias Grammar.Tokenizer
8+
9+
test "strict matching of token" do
10+
grammar =
11+
Grammar.new()
12+
|> Grammar.add_clause(:start, [%Number{number: 42}, %Number{}], fn [a, b] -> {a, b} end)
13+
|> Grammar.prepare!()
14+
|> Grammar.start(:start)
15+
16+
# second number is loose, so it will match anything
17+
assert {:ok, {%Number{number: 42}, %Number{number: 42}}} = Grammar.loop(grammar, Tokenizer.new("42 42"))
18+
assert {:ok, {%Number{number: 42}, %Number{number: 1000}}} = Grammar.loop(grammar, Tokenizer.new("42 1000"))
19+
20+
# first number is strict, so it will match only 42
21+
assert {:error, {1, 1}, :no_clause_matched} = Grammar.loop(grammar, Tokenizer.new("43 1000"))
22+
end
23+
24+
test "matching prototypes and extracted tokens" do
25+
grammar =
26+
Grammar.new()
27+
# ordering of clauses is important ! if :non_contant is before constant, it will always match
28+
|> Grammar.add_clause(:choose, [:constant], fn [value] -> {:constant, value} end)
29+
|> Grammar.add_clause(:choose, [:non_constant], fn [value] -> {:non_constant, value} end)
30+
|> Grammar.add_clause(:constant, [%Number{number: 12}], &Enum.at(&1, 0))
31+
|> Grammar.add_clause(:non_constant, [%Number{}], &Enum.at(&1, 0))
32+
|> Grammar.prepare!()
33+
|> Grammar.start(:choose)
34+
35+
assert {:ok, {:non_constant, %Number{number: 123}}} = Grammar.loop(grammar, Tokenizer.new("123"))
36+
assert {:ok, {:constant, %Number{number: 12}}} = Grammar.loop(grammar, Tokenizer.new("12"))
37+
end
38+
39+
test "tokens drive through rules" do
40+
grammar =
41+
Grammar.new()
42+
|> Grammar.add_clause(:foo, [:bar], fn [value] -> {:foo_bar, value} end)
43+
|> Grammar.add_clause(:foo, [:neh], fn [value] -> {:foo_neh, value} end)
44+
|> Grammar.add_clause(:foo, [%Number{}], fn [value] -> {:foo_number, value} end)
45+
|> Grammar.add_clause(:bar, ["bar1"], fn ["bar1" = value] -> value end)
46+
|> Grammar.add_clause(:bar, ["bar2"], fn ["bar2" = value] -> value end)
47+
|> Grammar.add_clause(:bar, [~r/bar[0-9]+/], fn [value] -> "caught #{value}" end)
48+
|> Grammar.add_clause(:neh, ["neh1"], fn ["neh1" = value] -> value end)
49+
|> Grammar.add_clause(:neh, ["neh2"], fn ["neh2" = value] -> value end)
50+
|> Grammar.add_clause(:neh, [%Number{number: 12}], fn [value] -> value end)
51+
|> Grammar.prepare!()
52+
|> Grammar.start(:foo)
53+
54+
assert grammar.firsts == %{
55+
foo: [
56+
["bar1", "bar2", ~r/bar[0-9]+/],
57+
["neh1", "neh2", %Number{number: 12}],
58+
[%Number{}]
59+
],
60+
bar: [["bar1"], ["bar2"], [~r/bar[0-9]+/]],
61+
neh: [["neh1"], ["neh2"], [%Number{number: 12}]]
62+
}
63+
64+
assert {:ok, {:foo_bar, "bar1"}} = Grammar.loop(grammar, Tokenizer.new("bar1"))
65+
assert {:ok, {:foo_bar, "bar2"}} = Grammar.loop(grammar, Tokenizer.new("bar2"))
66+
assert {:ok, {:foo_bar, "caught bar42"}} = Grammar.loop(grammar, Tokenizer.new("bar42"))
67+
68+
assert {:ok, {:foo_neh, "neh1"}} = Grammar.loop(grammar, Tokenizer.new("neh1"))
69+
assert {:ok, {:foo_neh, "neh2"}} = Grammar.loop(grammar, Tokenizer.new("neh2"))
70+
assert {:ok, {:foo_neh, %Number{number: 12}}} = Grammar.loop(grammar, Tokenizer.new("12"))
71+
72+
assert {:ok, {:foo_number, %Number{number: 13}}} = Grammar.loop(grammar, Tokenizer.new("13"))
73+
end
74+
75+
test "ambiguities on token are resolved by clauses ordering" do
76+
assert {:ok, "ident foo"}
77+
78+
Grammar.new()
79+
|> Grammar.add_clause(:foo, [:ident], fn [ident] -> "ident #{ident}" end)
80+
|> Grammar.add_clause(:foo, ["foo"], fn _ -> "foo" end)
81+
|> Grammar.add_clause(:ident, [~r/[a-z]+/], & &1)
82+
|> Grammar.prepare!()
83+
|> Grammar.start(:foo)
84+
|> Grammar.loop(Tokenizer.new("foo"))
85+
86+
assert {:ok, "foo"}
87+
88+
Grammar.new()
89+
|> Grammar.add_clause(:foo, ["foo"], fn _ -> "foo" end)
90+
|> Grammar.add_clause(:foo, [:ident], fn [ident] -> "ident #{ident}" end)
91+
|> Grammar.add_clause(:ident, [~r/[a-z]+/], & &1)
92+
|> Grammar.prepare!()
93+
|> Grammar.start(:foo)
94+
|> Grammar.loop(Tokenizer.new("foo"))
95+
end
596
end

test/support/special_tokens.ex

+33-14
Original file line numberDiff line numberDiff line change
@@ -39,33 +39,52 @@ defmodule Grammar.SpecialTokens do
3939
end
4040

4141
defmodule Number do
42-
@moduledoc false
42+
@moduledoc """
43+
This module defines a token that represents a number.
44+
It could be used either as prototype for a constant number or as a placeholder for any number,
45+
depending on field `number` is set or not.
46+
"""
4347
defstruct number: nil
48+
49+
@type t :: %__MODULE__{
50+
number: integer() | nil
51+
}
4452
end
4553

4654
defimpl Grammar.Tokenizer.TokenExtractor, for: Number do
4755
@pattern ~r/^[0-9]+/
4856

4957
alias Grammar.Tokenizer.TokenExtractorHelper
5058

51-
def try_read(_token_prototype, input_string) do
52-
TokenExtractorHelper.try_read_from_regex(@pattern, input_string)
53-
|> case do
54-
nil ->
59+
def try_read(token_prototype, input_string) do
60+
with {extracted_string, length} <- read_token_string(input_string),
61+
token <- build_token_from_string(extracted_string),
62+
true <- value_compatible?(token_prototype, token) do
63+
{token, length}
64+
else
65+
_ ->
5566
nil
67+
end
68+
end
5669

57-
{extracted_string, length} ->
58-
token =
59-
extracted_string
60-
|> String.to_integer()
61-
|> then(&struct(Number, number: &1))
70+
def match?(%Number{number: nil}, %Number{}), do: true
71+
def match?(%Number{number: number}, %Number{number: number}), do: true
72+
def match?(_token_prototype, _token), do: false
6273

63-
{token, length}
64-
end
74+
@spec read_token_string(String.t()) :: {String.t(), integer()} | nil
75+
defp read_token_string(input_string), do: TokenExtractorHelper.try_read_from_regex(@pattern, input_string)
76+
77+
@spec build_token_from_string(String.t()) :: Number.t()
78+
defp build_token_from_string(extracted_string) do
79+
extracted_string
80+
|> String.to_integer()
81+
|> then(&struct(Number, number: &1))
6582
end
6683

67-
def match?(%Number{}, %Number{}), do: true
68-
def match?(_prototype, _token), do: false
84+
@spec value_compatible?(Number.t(), Number.t()) :: boolean()
85+
defp value_compatible?(%{number: nil}, _token), do: true
86+
defp value_compatible?(%{number: value}, %{number: value}), do: true
87+
defp value_compatible?(_token_prototype, _token), do: false
6988
end
7089

7190
defimpl String.Chars, for: Number do

0 commit comments

Comments
 (0)