Large numbers rule + tokenizer (#10)

This commit is contained in:
V. Elenhaupt 2017-11-04 16:43:40 +02:00 committed by GitHub
parent 3f2bcc56b9
commit 2f9ba27811
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
7 changed files with 323 additions and 18 deletions

View file

@ -0,0 +1,118 @@
require "../../spec_helper"
private def it_transforms(number, expected)
it "transforms large number #{number}" do
s = Ameba::Source.new number
Ameba::Rules::LargeNumbers.new.catch(s).should_not be_valid
s.errors.first.message.should contain expected
end
end
module Ameba::Rules
subject = LargeNumbers.new
describe LargeNumbers do
it "passes if large number does not require underscore" do
s = Source.new %q(
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
16 17 18 19 20 30 40 50 60 70 80 90
100
1_000
10_000
100_000
200_000
300_000
400_000
500_000
600_000
700_000
800_000
900_000
1_000_000
-9_223_372_036_854_775_808
9_223_372_036_854_775_807
141_592_654
141_592_654.0
141_592_654.001
141_592_654.001_2
141_592_654.001_23
141_592_654.001_234
141_592_654.001_234_5
0b1101
0o123
0xFE012D
0xfe012d
0xfe012dd11
1_i8
12_i16
123_i32
1_234_i64
12_u8
123_u16
1_234_u32
9_223_372_036_854_775_808_u64
9_223_372_036_854_775_808.000_123_456_789_f64
+100_u32
-900_000_i32
1_234.5e-7
11_234e10_f32
+1.123
-0.000_5
)
subject.catch(s).should be_valid
end
it_transforms "10000", "10_000"
it_transforms "+10000", "+10_000"
it_transforms "-10000", "-10_000"
it_transforms "9223372036854775808", "9_223_372_036_854_775_808"
it_transforms "-9223372036854775808", "-9_223_372_036_854_775_808"
it_transforms "+9223372036854775808", "+9_223_372_036_854_775_808"
it_transforms "1_00000", "100_000"
it_transforms "1_23_i8", "123_i8"
it_transforms "1000_i16", "1_000_i16"
it_transforms "1000_i32", "1_000_i32"
it_transforms "1000_i64", "1_000_i64"
it_transforms "1_23_u8", "123_u8"
it_transforms "1000_u16", "1_000_u16"
it_transforms "1000_u32", "1_000_u32"
it_transforms "1000_u64", "1_000_u64"
it_transforms "123456_f32", "123_456_f32"
it_transforms "123456_f64", "123_456_f64"
it_transforms "123456.5e-7_f32", "123_456.5e-7_f32"
it_transforms "123456e10_f64", "123_456e10_f64"
it_transforms "123456.5e-7", "123_456.5e-7"
it_transforms "123456e10", "123_456e10"
it_transforms "3.00_1", "3.001"
it_transforms "3.0012", "3.001_2"
it_transforms "3.00123", "3.001_23"
it_transforms "3.001234", "3.001_234"
it_transforms "3.0012345", "3.001_234_5"
it "reports rule, pos and message" do
s = Source.new %q(
1200000
)
subject.catch(s).should_not be_valid
error = s.errors.first
error.rule.should_not be_nil
error.pos.should eq 2
error.message.should match /1_200_000/
end
end
end

View file

@ -21,15 +21,5 @@ module Ameba
s.errors.first.message.should eq "Error!"
end
end
describe "#ast" do
it "returns ast nodes" do
s = Source.new %(
class A; end
class B; end
)
s.ast.to_s.should eq "class A\nend\nclass B\nend\n"
end
end
end
end

View file

@ -0,0 +1,42 @@
require "../spec_helper"
private def it_tokenizes(str, expected)
it "tokenizes #{str}" do
([] of Symbol).tap do |token_types|
Ameba::Tokenizer.new(Ameba::Source.new str).run do |token|
token_types << token.type
end.should be_true
end.should eq expected
end
end
module Ameba
describe Tokenizer do
describe "#run" do
it_tokenizes %("string"), %i(STRING)
it_tokenizes %(100), %i(NUMBER)
it_tokenizes %('a'), %i(CHAR)
it_tokenizes %([]), %i([])
it_tokenizes %([] of String), %i([] SPACE IDENT SPACE CONST)
it_tokenizes %q("str #{3}"), %i(STRING NUMBER)
it_tokenizes %(%w(1 2)),
%i(STRING_ARRAY_START STRING STRING STRING_ARRAY_END)
it_tokenizes %(%i(one two)),
%i(SYMBOL_ARRAY_START STRING STRING STRING_ARRAY_END)
it_tokenizes %(
class A
def method
puts "hello"
end
end
), [
:NEWLINE, :SPACE, :IDENT, :SPACE, :CONST, :NEWLINE, :SPACE, :IDENT,
:SPACE, :IDENT, :NEWLINE, :SPACE, :IDENT, :SPACE, :STRING, :NEWLINE,
:SPACE, :IDENT, :NEWLINE, :SPACE, :IDENT, :NEWLINE, :SPACE,
]
end
end
end