mirror of
https://gitea.invidious.io/iv-org/shard-ameba.git
synced 2024-08-15 00:53:29 +00:00
Pass on crystal-nightly
This commit is contained in:
parent
528ec460b0
commit
03adc20872
8 changed files with 26 additions and 26 deletions
|
@ -3,7 +3,7 @@ require "../spec_helper"
|
||||||
module Ameba
|
module Ameba
|
||||||
private def it_tokenizes(str, expected)
|
private def it_tokenizes(str, expected)
|
||||||
it "tokenizes #{str}" do
|
it "tokenizes #{str}" do
|
||||||
([] of Symbol).tap do |token_types|
|
([] of Crystal::Token::Kind).tap do |token_types|
|
||||||
Tokenizer.new(Source.new str, normalize: false)
|
Tokenizer.new(Source.new str, normalize: false)
|
||||||
.run { |token| token_types << token.type }
|
.run { |token| token_types << token.type }
|
||||||
.should be_true
|
.should be_true
|
||||||
|
|
|
@ -195,7 +195,7 @@ module Ameba::AST::Util
|
||||||
|
|
||||||
case name
|
case name
|
||||||
when Crystal::ASTNode then name.name_size
|
when Crystal::ASTNode then name.name_size
|
||||||
when Symbol then name.to_s.size # Crystal::MagicConstant
|
when Crystal::Token::Kind then name.to_s.size # Crystal::MagicConstant
|
||||||
else name.size
|
else name.size
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -28,7 +28,7 @@ module Ameba::Rule::Lint
|
||||||
|
|
||||||
def test(source)
|
def test(source)
|
||||||
Tokenizer.new(source).run do |token|
|
Tokenizer.new(source).run do |token|
|
||||||
next unless token.type == :COMMENT
|
next unless token.type.comment?
|
||||||
next unless directive = source.parse_inline_directive(token.value.to_s)
|
next unless directive = source.parse_inline_directive(token.value.to_s)
|
||||||
|
|
||||||
check_action source, token, directive[:action]
|
check_action source, token, directive[:action]
|
||||||
|
|
|
@ -38,13 +38,13 @@ module Ameba::Rule::Lint
|
||||||
|
|
||||||
Tokenizer.new(source).run do |token|
|
Tokenizer.new(source).run do |token|
|
||||||
case token.type
|
case token.type
|
||||||
when :STRING_ARRAY_START, :SYMBOL_ARRAY_START
|
when .string_array_start?, .symbol_array_start?
|
||||||
start_token = token.dup
|
start_token = token.dup
|
||||||
when :STRING
|
when .string?
|
||||||
if start_token && issue.nil?
|
if start_token && issue.nil?
|
||||||
issue = array_entry_invalid?(token.value, start_token.not_nil!.raw)
|
issue = array_entry_invalid?(token.value, start_token.not_nil!.raw)
|
||||||
end
|
end
|
||||||
when :STRING_ARRAY_END, :SYMBOL_ARRAY_END
|
when .string_array_end?
|
||||||
if issue
|
if issue
|
||||||
issue_for start_token.not_nil!, issue.not_nil!
|
issue_for start_token.not_nil!, issue.not_nil!
|
||||||
end
|
end
|
||||||
|
|
|
@ -33,7 +33,7 @@ module Ameba::Rule::Lint
|
||||||
|
|
||||||
def test(source)
|
def test(source)
|
||||||
Tokenizer.new(source).run do |token|
|
Tokenizer.new(source).run do |token|
|
||||||
next unless token.type == :COMMENT
|
next unless token.type.comment?
|
||||||
next unless directive = source.parse_inline_directive(token.value.to_s)
|
next unless directive = source.parse_inline_directive(token.value.to_s)
|
||||||
next unless names = unneeded_disables(source, directive, token.location)
|
next unless names = unneeded_disables(source, directive, token.location)
|
||||||
next if names.empty?
|
next if names.empty?
|
||||||
|
|
|
@ -37,7 +37,7 @@ module Ameba::Rule::Style
|
||||||
|
|
||||||
def test(source)
|
def test(source)
|
||||||
Tokenizer.new(source).run do |token|
|
Tokenizer.new(source).run do |token|
|
||||||
next unless token.type == :NUMBER && decimal?(token.raw)
|
next unless token.type.number? && decimal?(token.raw)
|
||||||
|
|
||||||
parsed = parse_number token.raw
|
parsed = parse_number token.raw
|
||||||
|
|
||||||
|
|
|
@ -119,19 +119,19 @@ module Ameba::Rule::Style
|
||||||
token = lexer.next_token
|
token = lexer.next_token
|
||||||
|
|
||||||
case token.type
|
case token.type
|
||||||
when :EOF, :"->"
|
when .eof?, .op_minus_gt?
|
||||||
break
|
break
|
||||||
when :IDENT
|
when .ident?
|
||||||
next unless in_body
|
next unless in_body
|
||||||
return unless token.value == :begin
|
return unless token.value == :begin
|
||||||
return token.location
|
return token.location
|
||||||
when :"("
|
when .op_lparen?
|
||||||
in_argument_list = true
|
in_argument_list = true
|
||||||
when :")"
|
when .op_rparen?
|
||||||
in_argument_list = false
|
in_argument_list = false
|
||||||
when :NEWLINE
|
when .newline?
|
||||||
in_body = true unless in_argument_list
|
in_body = true unless in_argument_list
|
||||||
when :SPACE
|
when .space?
|
||||||
# ignore
|
# ignore
|
||||||
else
|
else
|
||||||
return if in_body
|
return if in_body
|
||||||
|
@ -142,7 +142,7 @@ module Ameba::Rule::Style
|
||||||
private def def_redundant_end_loc(lexer)
|
private def def_redundant_end_loc(lexer)
|
||||||
end_loc = def_end_loc = nil
|
end_loc = def_end_loc = nil
|
||||||
|
|
||||||
while (token = lexer.next_token).type != :EOF
|
while !(token = lexer.next_token).type.eof?
|
||||||
next unless token.value == :end
|
next unless token.value == :end
|
||||||
|
|
||||||
end_loc, def_end_loc = def_end_loc, token.location
|
end_loc, def_end_loc = def_end_loc, token.location
|
||||||
|
|
|
@ -56,13 +56,13 @@ module Ameba
|
||||||
block.call token
|
block.call token
|
||||||
|
|
||||||
case token.type
|
case token.type
|
||||||
when :DELIMITER_START
|
when .delimiter_start?
|
||||||
run_delimiter_state lexer, token, &block
|
run_delimiter_state lexer, token, &block
|
||||||
when :STRING_ARRAY_START, :SYMBOL_ARRAY_START
|
when .string_array_start?, .symbol_array_start?
|
||||||
run_array_state lexer, token, &block
|
run_array_state lexer, token, &block
|
||||||
when :EOF
|
when .op_rcurly?
|
||||||
break
|
break
|
||||||
when :"}"
|
when .eof?
|
||||||
break if break_on_rcurly
|
break if break_on_rcurly
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -74,11 +74,11 @@ module Ameba
|
||||||
block.call token
|
block.call token
|
||||||
|
|
||||||
case token.type
|
case token.type
|
||||||
when :DELIMITER_END
|
when .delimiter_end?
|
||||||
break
|
break
|
||||||
when :INTERPOLATION_START
|
when .interpolation_start?
|
||||||
run_normal_state lexer, break_on_rcurly: true, &block
|
run_normal_state lexer, break_on_rcurly: true, &block
|
||||||
when :EOF
|
when .eof?
|
||||||
break
|
break
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -90,9 +90,9 @@ module Ameba
|
||||||
block.call token
|
block.call token
|
||||||
|
|
||||||
case token.type
|
case token.type
|
||||||
when :STRING_ARRAY_END
|
when .string_array_end?
|
||||||
break
|
break
|
||||||
when :EOF
|
when .eof?
|
||||||
break
|
break
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue