mirror of
https://gitea.invidious.io/iv-org/shard-ameba.git
synced 2024-08-15 00:53:29 +00:00
Refactor tokenizer
This commit is contained in:
parent
fedc29ceb6
commit
e383ec17c2
3 changed files with 45 additions and 47 deletions
|
@ -1,17 +1,17 @@
|
||||||
require "../../spec_helper"
|
require "../../spec_helper"
|
||||||
|
|
||||||
private def it_transforms(number, expected)
|
module Ameba
|
||||||
it "transforms large number #{number}" do
|
subject = Rules::LargeNumbers.new
|
||||||
s = Ameba::Source.new number
|
|
||||||
Ameba::Rules::LargeNumbers.new.catch(s).should_not be_valid
|
private def it_transforms(number, expected)
|
||||||
s.errors.first.message.should contain expected
|
it "transforms large number #{number}" do
|
||||||
|
s = Source.new number
|
||||||
|
Rules::LargeNumbers.new.catch(s).should_not be_valid
|
||||||
|
s.errors.first.message.should contain expected
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
|
||||||
|
|
||||||
module Ameba::Rules
|
describe Rules::LargeNumbers do
|
||||||
subject = LargeNumbers.new
|
|
||||||
|
|
||||||
describe LargeNumbers do
|
|
||||||
it "passes if large number does not require underscore" do
|
it "passes if large number does not require underscore" do
|
||||||
s = Source.new %q(
|
s = Source.new %q(
|
||||||
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
|
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
|
||||||
|
|
|
@ -1,42 +1,44 @@
|
||||||
require "../spec_helper"
|
require "../spec_helper"
|
||||||
|
|
||||||
private def it_tokenizes(str, expected)
|
|
||||||
it "tokenizes #{str}" do
|
|
||||||
([] of Symbol).tap do |token_types|
|
|
||||||
Ameba::Tokenizer.new(Ameba::Source.new str).run do |token|
|
|
||||||
token_types << token.type
|
|
||||||
end.should be_true
|
|
||||||
end.should eq expected
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
module Ameba
|
module Ameba
|
||||||
|
private def it_tokenizes(str, expected)
|
||||||
|
it "tokenizes #{str}" do
|
||||||
|
([] of Symbol).tap do |token_types|
|
||||||
|
Tokenizer.new(Source.new str)
|
||||||
|
.run { |token| token_types << token.type }
|
||||||
|
.should be_true
|
||||||
|
end.should eq expected
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
describe Tokenizer do
|
describe Tokenizer do
|
||||||
describe "#run" do
|
describe "#run" do
|
||||||
it_tokenizes %("string"), %i(STRING)
|
it_tokenizes %("string"), %i(DELIMITER_START STRING DELIMITER_END EOF)
|
||||||
it_tokenizes %(100), %i(NUMBER)
|
it_tokenizes %(100), %i(NUMBER EOF)
|
||||||
it_tokenizes %('a'), %i(CHAR)
|
it_tokenizes %('a'), %i(CHAR EOF)
|
||||||
it_tokenizes %([]), %i([])
|
it_tokenizes %([]), %i([] EOF)
|
||||||
it_tokenizes %([] of String), %i([] SPACE IDENT SPACE CONST)
|
it_tokenizes %([] of String), %i([] SPACE IDENT SPACE CONST EOF)
|
||||||
it_tokenizes %q("str #{3}"), %i(STRING NUMBER)
|
it_tokenizes %q("str #{3}"), %i(
|
||||||
|
DELIMITER_START STRING INTERPOLATION_START NUMBER } DELIMITER_END EOF
|
||||||
|
)
|
||||||
|
|
||||||
it_tokenizes %(%w(1 2)),
|
it_tokenizes %(%w(1 2)),
|
||||||
%i(STRING_ARRAY_START STRING STRING STRING_ARRAY_END)
|
%i(STRING_ARRAY_START STRING STRING STRING_ARRAY_END EOF)
|
||||||
|
|
||||||
it_tokenizes %(%i(one two)),
|
it_tokenizes %(%i(one two)),
|
||||||
%i(SYMBOL_ARRAY_START STRING STRING STRING_ARRAY_END)
|
%i(SYMBOL_ARRAY_START STRING STRING STRING_ARRAY_END EOF)
|
||||||
|
|
||||||
it_tokenizes %(
|
it_tokenizes %(
|
||||||
class A
|
class A
|
||||||
def method
|
def method
|
||||||
puts "hello"
|
puts "hello"
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
), %i(
|
||||||
), [
|
NEWLINE SPACE IDENT SPACE CONST NEWLINE SPACE IDENT SPACE IDENT
|
||||||
:NEWLINE, :SPACE, :IDENT, :SPACE, :CONST, :NEWLINE, :SPACE, :IDENT,
|
NEWLINE SPACE IDENT SPACE DELIMITER_START STRING DELIMITER_END
|
||||||
:SPACE, :IDENT, :NEWLINE, :SPACE, :IDENT, :SPACE, :STRING, :NEWLINE,
|
NEWLINE SPACE IDENT NEWLINE SPACE IDENT NEWLINE SPACE EOF
|
||||||
:SPACE, :IDENT, :NEWLINE, :SPACE, :IDENT, :NEWLINE, :SPACE,
|
)
|
||||||
]
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -22,19 +22,17 @@ module Ameba
|
||||||
&block : Crystal::Token -> _)
|
&block : Crystal::Token -> _)
|
||||||
while true
|
while true
|
||||||
token = @lexer.next_token
|
token = @lexer.next_token
|
||||||
|
block.call token
|
||||||
|
|
||||||
case token.type
|
case token.type
|
||||||
when :DELIMITER_START
|
when :DELIMITER_START
|
||||||
run_delimiter_state lexer, token, &block
|
run_delimiter_state lexer, token, &block
|
||||||
when :STRING_ARRAY_START, :SYMBOL_ARRAY_START
|
when :STRING_ARRAY_START, :SYMBOL_ARRAY_START
|
||||||
block.call token
|
|
||||||
run_array_state lexer, token, &block
|
run_array_state lexer, token, &block
|
||||||
when :EOF
|
when :EOF
|
||||||
break
|
break
|
||||||
when :"}"
|
when :"}"
|
||||||
break if break_on_rcurly
|
break if break_on_rcurly
|
||||||
block.call token
|
|
||||||
else
|
|
||||||
block.call token
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -42,6 +40,8 @@ module Ameba
|
||||||
private def run_delimiter_state(lexer, token, &block : Crystal::Token -> _)
|
private def run_delimiter_state(lexer, token, &block : Crystal::Token -> _)
|
||||||
while true
|
while true
|
||||||
token = @lexer.next_string_token(token.delimiter_state)
|
token = @lexer.next_string_token(token.delimiter_state)
|
||||||
|
block.call token
|
||||||
|
|
||||||
case token.type
|
case token.type
|
||||||
when :DELIMITER_END
|
when :DELIMITER_END
|
||||||
break
|
break
|
||||||
|
@ -49,8 +49,6 @@ module Ameba
|
||||||
run_normal_state lexer, break_on_rcurly: true, &block
|
run_normal_state lexer, break_on_rcurly: true, &block
|
||||||
when :EOF
|
when :EOF
|
||||||
break
|
break
|
||||||
else
|
|
||||||
block.call token
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -58,15 +56,13 @@ module Ameba
|
||||||
private def run_array_state(lexer, token, &block : Crystal::Token -> _)
|
private def run_array_state(lexer, token, &block : Crystal::Token -> _)
|
||||||
while true
|
while true
|
||||||
lexer.next_string_array_token
|
lexer.next_string_array_token
|
||||||
|
block.call token
|
||||||
|
|
||||||
case token.type
|
case token.type
|
||||||
when :STRING_ARRAY_END
|
when :STRING_ARRAY_END
|
||||||
block.call token
|
|
||||||
break
|
break
|
||||||
when :EOF
|
when :EOF
|
||||||
raise "Unterminated symbol array literal"
|
break
|
||||||
else
|
|
||||||
block.call token
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
Loading…
Reference in a new issue