shard-ameba/spec/ameba/tokenizer_spec.cr

45 lines
1.4 KiB
Crystal
Raw Normal View History

2017-11-04 14:43:40 +00:00
require "../spec_helper"
2017-11-04 15:38:04 +00:00
module Ameba
private def it_tokenizes(str, expected, *, file = __FILE__, line = __LINE__)
it "tokenizes #{str}", file, line do
%w[].tap do |token_types|
2018-09-07 12:07:03 +00:00
Tokenizer.new(Source.new str, normalize: false)
2022-03-11 14:18:33 +00:00
.run { |token| token_types << token.type.to_s }
2018-09-02 21:17:56 +00:00
.should be_true
end.should eq(expected), file: file, line: line
2017-11-04 15:38:04 +00:00
end
2017-11-04 14:43:40 +00:00
end
describe Tokenizer do
describe "#run" do
2022-03-11 14:18:33 +00:00
it_tokenizes %("string"), %w(DELIMITER_START STRING DELIMITER_END EOF)
it_tokenizes %(100), %w(NUMBER EOF)
it_tokenizes %('a'), %w(CHAR EOF)
it_tokenizes %([]), %w([] EOF)
it_tokenizes %([] of String), %w([] SPACE IDENT SPACE CONST EOF)
it_tokenizes %q("str #{3}"), %w(
2017-11-04 15:38:04 +00:00
DELIMITER_START STRING INTERPOLATION_START NUMBER } DELIMITER_END EOF
)
2017-11-04 14:43:40 +00:00
it_tokenizes %(%w(1 2)),
2022-03-11 14:18:33 +00:00
%w(STRING_ARRAY_START STRING STRING STRING_ARRAY_END EOF)
2017-11-04 14:43:40 +00:00
it_tokenizes %(%i(one two)),
2022-03-11 14:18:33 +00:00
%w(SYMBOL_ARRAY_START STRING STRING STRING_ARRAY_END EOF)
2017-11-04 14:43:40 +00:00
it_tokenizes %(
2017-11-04 15:38:04 +00:00
class A
def method
puts "hello"
end
2017-11-04 14:43:40 +00:00
end
2022-03-11 14:18:33 +00:00
), %w(
2017-11-04 15:38:04 +00:00
NEWLINE SPACE IDENT SPACE CONST NEWLINE SPACE IDENT SPACE IDENT
NEWLINE SPACE IDENT SPACE DELIMITER_START STRING DELIMITER_END
NEWLINE SPACE IDENT NEWLINE SPACE IDENT NEWLINE SPACE EOF
)
2017-11-04 14:43:40 +00:00
end
end
end