Fix specs for tokenizer

This commit is contained in:
Vitalii Elenhaupt 2022-03-11 16:18:33 +02:00
parent 03adc20872
commit 1cfc926a28
No known key found for this signature in database
GPG key ID: CD0BF17825928BC0
2 changed files with 13 additions and 13 deletions

View file

@ -3,9 +3,9 @@ require "../spec_helper"
module Ameba
private def it_tokenizes(str, expected)
it "tokenizes #{str}" do
([] of Crystal::Token::Kind).tap do |token_types|
([] of String).tap do |token_types|
Tokenizer.new(Source.new str, normalize: false)
.run { |token| token_types << token.type }
.run { |token| token_types << token.type.to_s }
.should be_true
end.should eq expected
end
@ -13,20 +13,20 @@ module Ameba
describe Tokenizer do
describe "#run" do
it_tokenizes %("string"), %i(DELIMITER_START STRING DELIMITER_END EOF)
it_tokenizes %(100), %i(NUMBER EOF)
it_tokenizes %('a'), %i(CHAR EOF)
it_tokenizes %([]), %i([] EOF)
it_tokenizes %([] of String), %i([] SPACE IDENT SPACE CONST EOF)
it_tokenizes %q("str #{3}"), %i(
it_tokenizes %("string"), %w(DELIMITER_START STRING DELIMITER_END EOF)
it_tokenizes %(100), %w(NUMBER EOF)
it_tokenizes %('a'), %w(CHAR EOF)
it_tokenizes %([]), %w([] EOF)
it_tokenizes %([] of String), %w([] SPACE IDENT SPACE CONST EOF)
it_tokenizes %q("str #{3}"), %w(
DELIMITER_START STRING INTERPOLATION_START NUMBER } DELIMITER_END EOF
)
it_tokenizes %(%w(1 2)),
%i(STRING_ARRAY_START STRING STRING STRING_ARRAY_END EOF)
%w(STRING_ARRAY_START STRING STRING STRING_ARRAY_END EOF)
it_tokenizes %(%i(one two)),
%i(SYMBOL_ARRAY_START STRING STRING STRING_ARRAY_END EOF)
%w(SYMBOL_ARRAY_START STRING STRING STRING_ARRAY_END EOF)
it_tokenizes %(
class A
@ -34,7 +34,7 @@ module Ameba
puts "hello"
end
end
), %i(
), %w(
NEWLINE SPACE IDENT SPACE CONST NEWLINE SPACE IDENT SPACE IDENT
NEWLINE SPACE IDENT SPACE DELIMITER_START STRING DELIMITER_END
NEWLINE SPACE IDENT NEWLINE SPACE IDENT NEWLINE SPACE EOF

View file

@ -60,9 +60,9 @@ module Ameba
run_delimiter_state lexer, token, &block
when .string_array_start?, .symbol_array_start?
run_array_state lexer, token, &block
when .op_rcurly?
break
when .eof?
break
when .op_rcurly?
break if break_on_rcurly
end
end