Skip to content

Commit

Permalink
TUTORIAL: Refactoring of tokenizer of iteration 3
Browse files Browse the repository at this point in the history
  • Loading branch information
famished-tiger committed Jan 29, 2022
1 parent 836d3bf commit 7e36c00
Show file tree
Hide file tree
Showing 3 changed files with 96 additions and 89 deletions.
2 changes: 1 addition & 1 deletion tutorial/TOML/iter_2/toml_tokenizer.rb
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ def _next_token
token = nil

# Loop until end of input reached or token found
until scanner.eos? || token
until token || scanner.eos?
nl_found = scanner.skip(PATT_NEWLINE)
if nl_found
next_line
Expand Down
38 changes: 19 additions & 19 deletions tutorial/TOML/iter_3/spec/toml_tokenizer_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ def match_expectations(tokenizer, theExpectations)
# ]
# cases.each do |(token, lexeme)|
# subject.start_with(lexeme)
# subject.send(:equal_scanned)
# subject.send(:equal_found)
# expectations = [[token, lexeme]]
# match_expectations(subject, expectations)
# expect(subject.state).to eq(:expecting_value)
Expand All @@ -96,7 +96,7 @@ def match_expectations(tokenizer, theExpectations)
it 'should recognize a boolean literal' do
[['true', TrueClass], ['false', FalseClass]].each do |(str, klass)|
subject.start_with(str)
subject.send(:equal_scanned)
subject.send(:equal_found)
token = subject.tokens[0]
expect(token).to be_kind_of(Rley::Lexical::Literal)
expect(token.terminal).to eq('BOOLEAN')
Expand All @@ -122,7 +122,7 @@ def match_expectations(tokenizer, theExpectations)
]
cases.each do |(str, val)|
subject.start_with(str)
subject.send(:equal_scanned)
subject.send(:equal_found)
int_token = subject.tokens[0]
expect(int_token).to be_kind_of(Rley::Lexical::Literal)
expect(int_token.terminal).to eq('INTEGER')
Expand All @@ -144,7 +144,7 @@ def match_expectations(tokenizer, theExpectations)
]
cases.each do |(str, val)|
subject.start_with(str)
subject.send(:equal_scanned)
subject.send(:equal_found)
int_token = subject.tokens[0]
expect(int_token).to be_kind_of(Rley::Lexical::Literal)
expect(int_token.terminal).to eq('INTEGER')
Expand All @@ -166,7 +166,7 @@ def match_expectations(tokenizer, theExpectations)
]
cases.each do |(str, val)|
subject.start_with(str)
subject.send(:equal_scanned)
subject.send(:equal_found)
int_token = subject.tokens[0]
expect(int_token).to be_kind_of(Rley::Lexical::Literal)
expect(int_token.terminal).to eq('INTEGER')
Expand All @@ -187,7 +187,7 @@ def match_expectations(tokenizer, theExpectations)
]
cases.each do |(str, val)|
subject.start_with(str)
subject.send(:equal_scanned)
subject.send(:equal_found)
int_token = subject.tokens[0]
expect(int_token).to be_kind_of(Rley::Lexical::Literal)
expect(int_token.terminal).to eq('INTEGER')
Expand All @@ -211,7 +211,7 @@ def match_expectations(tokenizer, theExpectations)
]
cases.each do |(str, val)|
subject.start_with(str)
subject.send(:equal_scanned)
subject.send(:equal_found)
float_token = subject.tokens[0]
expect(float_token).to be_kind_of(Rley::Lexical::Literal)
expect(float_token.terminal).to eq('FLOAT')
Expand All @@ -230,7 +230,7 @@ def match_expectations(tokenizer, theExpectations)
]
cases.each do |(str, val)|
subject.start_with(str)
subject.send(:equal_scanned)
subject.send(:equal_found)
float_token = subject.tokens[0]
expect(float_token).to be_kind_of(Rley::Lexical::Literal)
expect(float_token.terminal).to eq('FLOAT')
Expand All @@ -249,7 +249,7 @@ def match_expectations(tokenizer, theExpectations)
]
cases.each do |(str, _val)|
subject.start_with(str)
subject.send(:equal_scanned)
subject.send(:equal_found)
float_token = subject.tokens[0]
expect(float_token).to be_kind_of(Rley::Lexical::Literal)
expect(float_token.terminal).to eq('FLOAT')
Expand All @@ -262,7 +262,7 @@ def match_expectations(tokenizer, theExpectations)
it 'should recognize offset date time' do
str = '1979-05-27T05:32:07.999999-07:00'
subject.start_with(str)
subject.send(:equal_scanned)
subject.send(:equal_found)
date_token = subject.tokens[0]
expect(date_token).to be_kind_of(Rley::Lexical::Literal)
expect(date_token.terminal).to eq('OFFSET-DATE-TIME')
Expand All @@ -279,7 +279,7 @@ def match_expectations(tokenizer, theExpectations)
]
cases.each do |(str, val)|
subject.start_with(str)
subject.send(:equal_scanned)
subject.send(:equal_found)
date_token = subject.tokens[0]
expect(date_token).to be_kind_of(Rley::Lexical::Literal)
expect(date_token.terminal).to eq('LOCAL-DATE-TIME')
Expand All @@ -297,7 +297,7 @@ def match_expectations(tokenizer, theExpectations)
]
cases.each do |(str, val)|
subject.start_with(str)
subject.send(:equal_scanned)
subject.send(:equal_found)
date_token = subject.tokens[0]
expect(date_token).to be_kind_of(Rley::Lexical::Literal)
expect(date_token.terminal).to eq('LOCAL-DATE')
Expand All @@ -315,7 +315,7 @@ def match_expectations(tokenizer, theExpectations)
]
cases.each do |(str, val)|
subject.start_with(str)
subject.send(:equal_scanned)
subject.send(:equal_found)
time_token = subject.tokens[0]
expect(time_token).to be_kind_of(Rley::Lexical::Literal)
expect(time_token.terminal).to eq('LOCAL-TIME')
Expand All @@ -341,7 +341,7 @@ def match_expectations(tokenizer, theExpectations)
]
cases.each do |str|
subject.start_with(str)
subject.send(:equal_scanned)
subject.send(:equal_found)
token = subject.tokens[0]
expect(token).to be_kind_of(Rley::Lexical::Literal)
expect(token.terminal).to eq('STRING')
Expand All @@ -364,7 +364,7 @@ def match_expectations(tokenizer, theExpectations)
]
cases.each do |str|
subject.start_with(str)
subject.send(:equal_scanned)
subject.send(:equal_found)
token = subject.tokens[0]
expect(token).to be_kind_of(Rley::Lexical::Literal)
expect(token.terminal).to eq('STRING')
Expand All @@ -384,7 +384,7 @@ def match_expectations(tokenizer, theExpectations)
'''
TOML
subject.start_with(str)
subject.send(:equal_scanned)
subject.send(:equal_found)
token = subject.tokens[0]
expect(token).to be_kind_of(Rley::Lexical::Literal)
expect(token.terminal).to eq('STRING')
Expand All @@ -404,7 +404,7 @@ def match_expectations(tokenizer, theExpectations)
]
cases.each do |str, expected|
subject.start_with(str)
subject.send(:equal_scanned)
subject.send(:equal_found)
token = subject.tokens[0]
expect(token).to be_kind_of(Rley::Lexical::Literal)
expect(token.terminal).to eq('STRING')
Expand All @@ -423,7 +423,7 @@ def match_expectations(tokenizer, theExpectations)
]
cases.each do |str, expected|
subject.start_with(str)
subject.send(:equal_scanned)
subject.send(:equal_found)
token = subject.tokens[0]
expect(token).to be_kind_of(Rley::Lexical::Literal)
expect(token.terminal).to eq('STRING')
Expand Down Expand Up @@ -452,7 +452,7 @@ def match_expectations(tokenizer, theExpectations)
TOML
[str2, str3].each do |str|
subject.start_with(str)
subject.send(:equal_scanned)
subject.send(:equal_found)
token = subject.tokens[0]
expect(token).to be_kind_of(Rley::Lexical::Literal)
expect(token.terminal).to eq('STRING')
Expand Down
Loading

0 comments on commit 7e36c00

Please sign in to comment.