Refactor Token to be a struct

dev
Alberto Restifo 2020-04-22 11:17:06 +02:00
parent 28f9cfa1f5
commit 1c2e90d7a4
6 changed files with 35 additions and 99 deletions

View File

@ -28,8 +28,8 @@ describe CBOR::Lexer do
token.should_not be_nil
next unless token
token[:kind].should eq(CBOR::Kind::Int)
token[:value].as(Int).should eq(tt[:value])
token.kind.should eq(CBOR::Kind::Int)
token.value.as(Int).should eq(tt[:value])
end
end
end

View File

@ -1,46 +0,0 @@
abstract class CBOR::Decoder
abstract def current_token : Token::T
abstract def read_token : Token::T
abstract def finish_token!
def read : Type
read_value
end
def read_value : Type
case token = current_token
when Token::IntT
finish_token!
token.value
when Token::BytesT
finish_token!
token.value
when Token::StringT
finish_token!
token.value
when Token::ByteArrayT
# Consume the array :)
end
end
private def read_bytes_array_body
read_type(Token::ByteArrayT) do |token|
end
end
private macro read_type(type, finish_token = true, &block)
case token = current_token
when {{type}}
{% if finish_token %}finish_token!{% end %}
{{ block.body }}
else
unexpected_token(token, {{type.stringify.split("::").last}})
end
end
private def unexpected_token(token, expected = nil)
message = "Unexpected token #{Token.to_s(token)}"
message += " expected #{expected}" if expected
raise TypeCastError.new(message, token.byte_number)
end
end

View File

@ -1,25 +0,0 @@
class CBOR::IODecoder < CBOR::Decoder
def initialize(string_or_io : String | IO)
@lexer = Lexer.new(string_or_io)
end
def self.new(array : Array(UInt8))
slice = Bytes.new(array.to_unsafe, array.size)
new(slice)
end
@[AlwaysInline]
def current_token : Token::T
@lexer.current_token
end
@[AlwaysInline]
def read_token : Token::T
@lexer.read_token
end
@[AlwaysInline]
def finish_token!
@lexer.finish_token!
end
end

View File

@ -25,19 +25,19 @@ class CBOR::Diagnostic
token = @lexer.read_next
return nil unless token
case token[:kind]
case token.kind
when Kind::Int
token[:value].to_s
token.value.to_s
when Kind::String
%("#{token[:value].as(String)}")
%("#{token.value.as(String)}")
when Kind::Bytes
"h'#{token[:value].as(Bytes).hexstring}'"
"h'#{token.value.as(Bytes).hexstring}'"
when Kind::BytesArray
token[:value].as(BytesArray).to_diagnostic
token.value.as(BytesArray).to_diagnostic
when Kind::StringArray
token[:value].as(StringArray).to_diagnostic
token.value.as(StringArray).to_diagnostic
else
token[:kind].to_s
token.kind.to_s
end
end
end

View File

@ -6,8 +6,8 @@ class CBOR::Lexer
Bool |
String |
Bytes |
Array(Type) |
Hash(Type, Type) |
Array(CBOR::Type) |
Hash(CBOR::Type, CBOR::Type) |
Int8 |
UInt8 |
Int16 |
@ -43,10 +43,10 @@ class CBOR::Lexer
def read_value : Type?
res = read_next
return nil unless res
res[:value]
res.value
end
# Readsn the next concrete value, returning the token kind.
# Reads the next concrete value, returning the token kind.
# Useful when you need to differentiate between Null and Undefined.
def read_next : Token?
return nil if @eof
@ -54,7 +54,7 @@ class CBOR::Lexer
token = next_token
return nil unless token
case token[:kind]
case token.kind
when Kind::Int,
Kind::String,
Kind::Bool,
@ -63,11 +63,11 @@ class CBOR::Lexer
token
when Kind::Null,
Kind::Undefined
{kind: token[:kind], value: nil}
Token.new(kind: token.kind, value: nil)
when Kind::BytesArray
{kind: token[:kind], value: read_bytes_array}
Token.new(kind: token.kind, value: read_bytes_array)
when Kind::StringArray
{kind: token[:kind], value: read_string_array}
Token.new(kind: token.kind, value: read_string_array)
end
end
@ -135,7 +135,7 @@ class CBOR::Lexer
when 0x5b
consume_binary(read(UInt64))
when 0x5f
{kind: open_token(Kind::BytesArray), value: nil}
Token.new(kind: open_token(Kind::BytesArray), value: nil)
when 0x60..0x77
consume_string(current_byte - 0x60)
when 0x78
@ -147,9 +147,11 @@ class CBOR::Lexer
when 0x7b
consume_string(read(UInt16))
when 0x7f
{kind: open_token(Kind::StringArray), value: nil}
Token.new(kind: open_token(Kind::StringArray), value: nil)
when 0xff
{kind: finish_token, value: nil}
Token.new(kind: finish_token, value: nil)
when 0x80..0x97
consume_array(current_byte - 0x80)
else
raise ParseError.new("Unexpected first byte 0x#{current_byte.to_s(16)}")
end
@ -161,13 +163,13 @@ class CBOR::Lexer
loop do
token = next_token
raise ParseError.new("Unexpected EOF") unless token
break if token[:kind] == stop
break if token.kind == stop
if only && token[:kind] != only
raise ParseError.new("Illegal token #{token[:kind].to_s} while reading #{only.to_s} array")
if only && token.kind != only
raise ParseError.new("Illegal token #{token.kind.to_s} while reading #{only.to_s} array")
end
yield token[:value]
yield token.value
end
end
@ -182,16 +184,21 @@ class CBOR::Lexer
end
private def consume_int(value)
{kind: Kind::Int, value: value}
Token.new(kind: Kind::Int, value: value)
end
private def consume_binary(size)
bytes = read_bytes(size)
{kind: Kind::Bytes, value: bytes}
Token.new(kind: Kind::Bytes, value: bytes)
end
private def consume_string(size)
{kind: Kind::String, value: @io.read_string(size)}
Token.new(kind: Kind::String, value: @io.read_string(size))
end
private def consume_array(size)
arr = Array(CBOR::Type).new(size)
Token.new(kind: Kind::Array, value: arr)
end
private def open_token(kind : Kind) : Kind

View File

@ -15,4 +15,4 @@ enum CBOR::Kind
Map
end
alias CBOR::Token = NamedTuple(kind: Kind, value: Lexer::Type)
record CBOR::Token, kind : Kind, value : Lexer::Type, size : Int64? = nil