Refactor Token to be a struct
parent
28f9cfa1f5
commit
1c2e90d7a4
|
@ -28,8 +28,8 @@ describe CBOR::Lexer do
|
||||||
token.should_not be_nil
|
token.should_not be_nil
|
||||||
next unless token
|
next unless token
|
||||||
|
|
||||||
token[:kind].should eq(CBOR::Kind::Int)
|
token.kind.should eq(CBOR::Kind::Int)
|
||||||
token[:value].as(Int).should eq(tt[:value])
|
token.value.as(Int).should eq(tt[:value])
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,46 +0,0 @@
|
||||||
abstract class CBOR::Decoder
|
|
||||||
abstract def current_token : Token::T
|
|
||||||
abstract def read_token : Token::T
|
|
||||||
abstract def finish_token!
|
|
||||||
|
|
||||||
def read : Type
|
|
||||||
read_value
|
|
||||||
end
|
|
||||||
|
|
||||||
def read_value : Type
|
|
||||||
case token = current_token
|
|
||||||
when Token::IntT
|
|
||||||
finish_token!
|
|
||||||
token.value
|
|
||||||
when Token::BytesT
|
|
||||||
finish_token!
|
|
||||||
token.value
|
|
||||||
when Token::StringT
|
|
||||||
finish_token!
|
|
||||||
token.value
|
|
||||||
when Token::ByteArrayT
|
|
||||||
# Consume the array :)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
private def read_bytes_array_body
|
|
||||||
read_type(Token::ByteArrayT) do |token|
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
private macro read_type(type, finish_token = true, &block)
|
|
||||||
case token = current_token
|
|
||||||
when {{type}}
|
|
||||||
{% if finish_token %}finish_token!{% end %}
|
|
||||||
{{ block.body }}
|
|
||||||
else
|
|
||||||
unexpected_token(token, {{type.stringify.split("::").last}})
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
private def unexpected_token(token, expected = nil)
|
|
||||||
message = "Unexpected token #{Token.to_s(token)}"
|
|
||||||
message += " expected #{expected}" if expected
|
|
||||||
raise TypeCastError.new(message, token.byte_number)
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -1,25 +0,0 @@
|
||||||
class CBOR::IODecoder < CBOR::Decoder
|
|
||||||
def initialize(string_or_io : String | IO)
|
|
||||||
@lexer = Lexer.new(string_or_io)
|
|
||||||
end
|
|
||||||
|
|
||||||
def self.new(array : Array(UInt8))
|
|
||||||
slice = Bytes.new(array.to_unsafe, array.size)
|
|
||||||
new(slice)
|
|
||||||
end
|
|
||||||
|
|
||||||
@[AlwaysInline]
|
|
||||||
def current_token : Token::T
|
|
||||||
@lexer.current_token
|
|
||||||
end
|
|
||||||
|
|
||||||
@[AlwaysInline]
|
|
||||||
def read_token : Token::T
|
|
||||||
@lexer.read_token
|
|
||||||
end
|
|
||||||
|
|
||||||
@[AlwaysInline]
|
|
||||||
def finish_token!
|
|
||||||
@lexer.finish_token!
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -25,19 +25,19 @@ class CBOR::Diagnostic
|
||||||
token = @lexer.read_next
|
token = @lexer.read_next
|
||||||
return nil unless token
|
return nil unless token
|
||||||
|
|
||||||
case token[:kind]
|
case token.kind
|
||||||
when Kind::Int
|
when Kind::Int
|
||||||
token[:value].to_s
|
token.value.to_s
|
||||||
when Kind::String
|
when Kind::String
|
||||||
%("#{token[:value].as(String)}")
|
%("#{token.value.as(String)}")
|
||||||
when Kind::Bytes
|
when Kind::Bytes
|
||||||
"h'#{token[:value].as(Bytes).hexstring}'"
|
"h'#{token.value.as(Bytes).hexstring}'"
|
||||||
when Kind::BytesArray
|
when Kind::BytesArray
|
||||||
token[:value].as(BytesArray).to_diagnostic
|
token.value.as(BytesArray).to_diagnostic
|
||||||
when Kind::StringArray
|
when Kind::StringArray
|
||||||
token[:value].as(StringArray).to_diagnostic
|
token.value.as(StringArray).to_diagnostic
|
||||||
else
|
else
|
||||||
token[:kind].to_s
|
token.kind.to_s
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -6,8 +6,8 @@ class CBOR::Lexer
|
||||||
Bool |
|
Bool |
|
||||||
String |
|
String |
|
||||||
Bytes |
|
Bytes |
|
||||||
Array(Type) |
|
Array(CBOR::Type) |
|
||||||
Hash(Type, Type) |
|
Hash(CBOR::Type, CBOR::Type) |
|
||||||
Int8 |
|
Int8 |
|
||||||
UInt8 |
|
UInt8 |
|
||||||
Int16 |
|
Int16 |
|
||||||
|
@ -43,10 +43,10 @@ class CBOR::Lexer
|
||||||
def read_value : Type?
|
def read_value : Type?
|
||||||
res = read_next
|
res = read_next
|
||||||
return nil unless res
|
return nil unless res
|
||||||
res[:value]
|
res.value
|
||||||
end
|
end
|
||||||
|
|
||||||
# Readsn the next concrete value, returning the token kind.
|
# Reads the next concrete value, returning the token kind.
|
||||||
# Useful when you need to differentiate between Null and Undefined.
|
# Useful when you need to differentiate between Null and Undefined.
|
||||||
def read_next : Token?
|
def read_next : Token?
|
||||||
return nil if @eof
|
return nil if @eof
|
||||||
|
@ -54,7 +54,7 @@ class CBOR::Lexer
|
||||||
token = next_token
|
token = next_token
|
||||||
return nil unless token
|
return nil unless token
|
||||||
|
|
||||||
case token[:kind]
|
case token.kind
|
||||||
when Kind::Int,
|
when Kind::Int,
|
||||||
Kind::String,
|
Kind::String,
|
||||||
Kind::Bool,
|
Kind::Bool,
|
||||||
|
@ -63,11 +63,11 @@ class CBOR::Lexer
|
||||||
token
|
token
|
||||||
when Kind::Null,
|
when Kind::Null,
|
||||||
Kind::Undefined
|
Kind::Undefined
|
||||||
{kind: token[:kind], value: nil}
|
Token.new(kind: token.kind, value: nil)
|
||||||
when Kind::BytesArray
|
when Kind::BytesArray
|
||||||
{kind: token[:kind], value: read_bytes_array}
|
Token.new(kind: token.kind, value: read_bytes_array)
|
||||||
when Kind::StringArray
|
when Kind::StringArray
|
||||||
{kind: token[:kind], value: read_string_array}
|
Token.new(kind: token.kind, value: read_string_array)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -135,7 +135,7 @@ class CBOR::Lexer
|
||||||
when 0x5b
|
when 0x5b
|
||||||
consume_binary(read(UInt64))
|
consume_binary(read(UInt64))
|
||||||
when 0x5f
|
when 0x5f
|
||||||
{kind: open_token(Kind::BytesArray), value: nil}
|
Token.new(kind: open_token(Kind::BytesArray), value: nil)
|
||||||
when 0x60..0x77
|
when 0x60..0x77
|
||||||
consume_string(current_byte - 0x60)
|
consume_string(current_byte - 0x60)
|
||||||
when 0x78
|
when 0x78
|
||||||
|
@ -147,9 +147,11 @@ class CBOR::Lexer
|
||||||
when 0x7b
|
when 0x7b
|
||||||
consume_string(read(UInt16))
|
consume_string(read(UInt16))
|
||||||
when 0x7f
|
when 0x7f
|
||||||
{kind: open_token(Kind::StringArray), value: nil}
|
Token.new(kind: open_token(Kind::StringArray), value: nil)
|
||||||
when 0xff
|
when 0xff
|
||||||
{kind: finish_token, value: nil}
|
Token.new(kind: finish_token, value: nil)
|
||||||
|
when 0x80..0x97
|
||||||
|
consume_array(current_byte - 0x80)
|
||||||
else
|
else
|
||||||
raise ParseError.new("Unexpected first byte 0x#{current_byte.to_s(16)}")
|
raise ParseError.new("Unexpected first byte 0x#{current_byte.to_s(16)}")
|
||||||
end
|
end
|
||||||
|
@ -161,13 +163,13 @@ class CBOR::Lexer
|
||||||
loop do
|
loop do
|
||||||
token = next_token
|
token = next_token
|
||||||
raise ParseError.new("Unexpected EOF") unless token
|
raise ParseError.new("Unexpected EOF") unless token
|
||||||
break if token[:kind] == stop
|
break if token.kind == stop
|
||||||
|
|
||||||
if only && token[:kind] != only
|
if only && token.kind != only
|
||||||
raise ParseError.new("Illegal token #{token[:kind].to_s} while reading #{only.to_s} array")
|
raise ParseError.new("Illegal token #{token.kind.to_s} while reading #{only.to_s} array")
|
||||||
end
|
end
|
||||||
|
|
||||||
yield token[:value]
|
yield token.value
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -182,16 +184,21 @@ class CBOR::Lexer
|
||||||
end
|
end
|
||||||
|
|
||||||
private def consume_int(value)
|
private def consume_int(value)
|
||||||
{kind: Kind::Int, value: value}
|
Token.new(kind: Kind::Int, value: value)
|
||||||
end
|
end
|
||||||
|
|
||||||
private def consume_binary(size)
|
private def consume_binary(size)
|
||||||
bytes = read_bytes(size)
|
bytes = read_bytes(size)
|
||||||
{kind: Kind::Bytes, value: bytes}
|
Token.new(kind: Kind::Bytes, value: bytes)
|
||||||
end
|
end
|
||||||
|
|
||||||
private def consume_string(size)
|
private def consume_string(size)
|
||||||
{kind: Kind::String, value: @io.read_string(size)}
|
Token.new(kind: Kind::String, value: @io.read_string(size))
|
||||||
|
end
|
||||||
|
|
||||||
|
private def consume_array(size)
|
||||||
|
arr = Array(CBOR::Type).new(size)
|
||||||
|
Token.new(kind: Kind::Array, value: arr)
|
||||||
end
|
end
|
||||||
|
|
||||||
private def open_token(kind : Kind) : Kind
|
private def open_token(kind : Kind) : Kind
|
||||||
|
|
|
@ -15,4 +15,4 @@ enum CBOR::Kind
|
||||||
Map
|
Map
|
||||||
end
|
end
|
||||||
|
|
||||||
alias CBOR::Token = NamedTuple(kind: Kind, value: Lexer::Type)
|
record CBOR::Token, kind : Kind, value : Lexer::Type, size : Int64? = nil
|
||||||
|
|
Loading…
Reference in New Issue