Pass on crystal-nightly

This commit is contained in:
Vitalii Elenhaupt 2022-03-11 14:15:05 +02:00
parent 528ec460b0
commit 03adc20872
No known key found for this signature in database
GPG key ID: CD0BF17825928BC0
8 changed files with 26 additions and 26 deletions

View file

@ -3,7 +3,7 @@ require "../spec_helper"
module Ameba
private def it_tokenizes(str, expected)
it "tokenizes #{str}" do
([] of Symbol).tap do |token_types|
([] of Crystal::Token::Kind).tap do |token_types|
Tokenizer.new(Source.new str, normalize: false)
.run { |token| token_types << token.type }
.should be_true

View file

@ -194,9 +194,9 @@ module Ameba::AST::Util
return 0 unless node.responds_to?(:name) && (name = node.name)
case name
when Crystal::ASTNode then name.name_size
when Symbol then name.to_s.size # Crystal::MagicConstant
else name.size
when Crystal::ASTNode then name.name_size
when Crystal::Token::Kind then name.to_s.size # Crystal::MagicConstant
else name.size
end
end

View file

@ -28,7 +28,7 @@ module Ameba::Rule::Lint
def test(source)
Tokenizer.new(source).run do |token|
next unless token.type == :COMMENT
next unless token.type.comment?
next unless directive = source.parse_inline_directive(token.value.to_s)
check_action source, token, directive[:action]

View file

@ -38,13 +38,13 @@ module Ameba::Rule::Lint
Tokenizer.new(source).run do |token|
case token.type
when :STRING_ARRAY_START, :SYMBOL_ARRAY_START
when .string_array_start?, .symbol_array_start?
start_token = token.dup
when :STRING
when .string?
if start_token && issue.nil?
issue = array_entry_invalid?(token.value, start_token.not_nil!.raw)
end
when :STRING_ARRAY_END, :SYMBOL_ARRAY_END
when .string_array_end?
if issue
issue_for start_token.not_nil!, issue.not_nil!
end

View file

@ -33,7 +33,7 @@ module Ameba::Rule::Lint
def test(source)
Tokenizer.new(source).run do |token|
next unless token.type == :COMMENT
next unless token.type.comment?
next unless directive = source.parse_inline_directive(token.value.to_s)
next unless names = unneeded_disables(source, directive, token.location)
next if names.empty?

View file

@ -37,7 +37,7 @@ module Ameba::Rule::Style
def test(source)
Tokenizer.new(source).run do |token|
next unless token.type == :NUMBER && decimal?(token.raw)
next unless token.type.number? && decimal?(token.raw)
parsed = parse_number token.raw

View file

@ -119,19 +119,19 @@ module Ameba::Rule::Style
token = lexer.next_token
case token.type
when :EOF, :"->"
when .eof?, .op_minus_gt?
break
when :IDENT
when .ident?
next unless in_body
return unless token.value == :begin
return token.location
when :"("
when .op_lparen?
in_argument_list = true
when :")"
when .op_rparen?
in_argument_list = false
when :NEWLINE
when .newline?
in_body = true unless in_argument_list
when :SPACE
when .space?
# ignore
else
return if in_body
@ -142,7 +142,7 @@ module Ameba::Rule::Style
private def def_redundant_end_loc(lexer)
end_loc = def_end_loc = nil
while (token = lexer.next_token).type != :EOF
while !(token = lexer.next_token).type.eof?
next unless token.value == :end
end_loc, def_end_loc = def_end_loc, token.location

View file

@ -56,13 +56,13 @@ module Ameba
block.call token
case token.type
when :DELIMITER_START
when .delimiter_start?
run_delimiter_state lexer, token, &block
when :STRING_ARRAY_START, :SYMBOL_ARRAY_START
when .string_array_start?, .symbol_array_start?
run_array_state lexer, token, &block
when :EOF
when .op_rcurly?
break
when :"}"
when .eof?
break if break_on_rcurly
end
end
@ -74,11 +74,11 @@ module Ameba
block.call token
case token.type
when :DELIMITER_END
when .delimiter_end?
break
when :INTERPOLATION_START
when .interpolation_start?
run_normal_state lexer, break_on_rcurly: true, &block
when :EOF
when .eof?
break
end
end
@ -90,9 +90,9 @@ module Ameba
block.call token
case token.type
when :STRING_ARRAY_END
when .string_array_end?
break
when :EOF
when .eof?
break
end
end