Hide BytesArray and StringArray from outside Lexer
parent
1c2e90d7a4
commit
4958a4c068
|
@ -1,6 +1,3 @@
|
||||||
require "./lexer"
|
|
||||||
require "./token"
|
|
||||||
|
|
||||||
# Reads a CBOR input into a diagnostic string.
|
# Reads a CBOR input into a diagnostic string.
|
||||||
# This consumes the IO and is mostly usedful to tests again the example
|
# This consumes the IO and is mostly usedful to tests again the example
|
||||||
# provided in the RFC and ensuring a correct functioning of the `CBOR::Lexer`.
|
# provided in the RFC and ensuring a correct functioning of the `CBOR::Lexer`.
|
||||||
|
@ -29,15 +26,48 @@ class CBOR::Diagnostic
|
||||||
when Kind::Int
|
when Kind::Int
|
||||||
token.value.to_s
|
token.value.to_s
|
||||||
when Kind::String
|
when Kind::String
|
||||||
%("#{token.value.as(String)}")
|
if token.chunks
|
||||||
|
chunks = chunks(token.value.as(String), token.chunks.as(Array(Int32)))
|
||||||
|
"(_ #{chunks.map { |s| string(s) }.join(", ")})"
|
||||||
|
else
|
||||||
|
string(token.value.as(String))
|
||||||
|
end
|
||||||
when Kind::Bytes
|
when Kind::Bytes
|
||||||
"h'#{token.value.as(Bytes).hexstring}'"
|
if token.chunks
|
||||||
when Kind::BytesArray
|
chunks = chunks(token.value.as(Bytes), token.chunks.as(Array(Int32)))
|
||||||
token.value.as(BytesArray).to_diagnostic
|
"(_ #{chunks.map { |b| bytes(b) }.join(", ")})"
|
||||||
when Kind::StringArray
|
else
|
||||||
token.value.as(StringArray).to_diagnostic
|
bytes(token.value.as(Bytes))
|
||||||
|
end
|
||||||
else
|
else
|
||||||
token.kind.to_s
|
token.kind.to_s
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
private def chunks(value : Bytes, chunks : Array(Int32)) : Array(Bytes)
|
||||||
|
res = Array(Bytes).new
|
||||||
|
bytes = value.to_a
|
||||||
|
chunks.each do |size|
|
||||||
|
bytes_chunk = bytes.shift(size)
|
||||||
|
res << Bytes.new(bytes_chunk.to_unsafe, bytes_chunk.size)
|
||||||
|
end
|
||||||
|
res
|
||||||
|
end
|
||||||
|
|
||||||
|
private def chunks(value : String, chunks : Array(Int32)) : Array(String)
|
||||||
|
res = Array(String).new
|
||||||
|
arr = value.split("")
|
||||||
|
chunks.each do |size|
|
||||||
|
res << arr.shift(size).join
|
||||||
|
end
|
||||||
|
res
|
||||||
|
end
|
||||||
|
|
||||||
|
private def bytes(b : Bytes) : String
|
||||||
|
"h'#{b.hexstring}'"
|
||||||
|
end
|
||||||
|
|
||||||
|
private def string(s : String) : String
|
||||||
|
%("#{s}")
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,25 +1,4 @@
|
||||||
require "./token"
|
|
||||||
|
|
||||||
class CBOR::Lexer
|
class CBOR::Lexer
|
||||||
# Types returned by the lexer
|
|
||||||
alias Type = Nil |
|
|
||||||
Bool |
|
|
||||||
String |
|
|
||||||
Bytes |
|
|
||||||
Array(CBOR::Type) |
|
|
||||||
Hash(CBOR::Type, CBOR::Type) |
|
|
||||||
Int8 |
|
|
||||||
UInt8 |
|
|
||||||
Int16 |
|
|
||||||
UInt16 |
|
|
||||||
Int32 |
|
|
||||||
UInt32 |
|
|
||||||
Int64 |
|
|
||||||
UInt64 |
|
|
||||||
Int128 |
|
|
||||||
BytesArray |
|
|
||||||
StringArray
|
|
||||||
|
|
||||||
def self.new(string : String)
|
def self.new(string : String)
|
||||||
new IO::Memory.new(string)
|
new IO::Memory.new(string)
|
||||||
end
|
end
|
||||||
|
@ -65,32 +44,57 @@ class CBOR::Lexer
|
||||||
Kind::Undefined
|
Kind::Undefined
|
||||||
Token.new(kind: token.kind, value: nil)
|
Token.new(kind: token.kind, value: nil)
|
||||||
when Kind::BytesArray
|
when Kind::BytesArray
|
||||||
Token.new(kind: token.kind, value: read_bytes_array)
|
read_bytes_array
|
||||||
when Kind::StringArray
|
when Kind::StringArray
|
||||||
Token.new(kind: token.kind, value: read_string_array)
|
read_string_array
|
||||||
|
when Kind::Array
|
||||||
|
read_array(token)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
# Consumes the bytes array until it reaches a break
|
# Consumes the bytes array until it reaches a break
|
||||||
def read_bytes_array : CBOR::BytesArray
|
def read_bytes_array : Token
|
||||||
bytes = BytesArray.new
|
bytes = BytesArray.new
|
||||||
|
chunks = Array(Int32).new
|
||||||
|
|
||||||
read_until(Kind::BytesArrayEnd, only: Kind::Bytes) do |chunk|
|
read_until(Kind::BytesArrayEnd, only: Kind::Bytes) do |c|
|
||||||
bytes << chunk.as(Bytes)
|
chunk = c.as(Bytes)
|
||||||
|
chunks << chunk.size
|
||||||
|
bytes << chunk
|
||||||
end
|
end
|
||||||
|
|
||||||
bytes
|
Token.new(
|
||||||
|
kind: Kind::Bytes,
|
||||||
|
value: bytes.to_bytes,
|
||||||
|
chunks: chunks,
|
||||||
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
# Reads until break for chunks of strings
|
# Reads until break for chunks of strings
|
||||||
def read_string_array : CBOR::StringArray
|
def read_string_array : Token
|
||||||
strings = StringArray.new
|
value = ""
|
||||||
|
chunks = Array(Int32).new
|
||||||
|
|
||||||
read_until(Kind::StringArrayEnd, only: Kind::String) do |chunk|
|
read_until(Kind::StringArrayEnd, only: Kind::String) do |c|
|
||||||
strings << chunk.as(String)
|
chunk = c.as(String)
|
||||||
|
chunks << chunk.size
|
||||||
|
value += chunk
|
||||||
end
|
end
|
||||||
|
|
||||||
strings
|
Token.new(
|
||||||
|
kind: Kind::String,
|
||||||
|
value: value,
|
||||||
|
chunks: chunks,
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
def read_array(token : Token) : Token
|
||||||
|
if token.size.nil?
|
||||||
|
token.size.not_nil!.times { token.value.as(Array(Type)) << read_value }
|
||||||
|
else
|
||||||
|
read_until(Kind::ArrayEnd) { |element| token.value.as(Array(Type)) << element }
|
||||||
|
end
|
||||||
|
token
|
||||||
end
|
end
|
||||||
|
|
||||||
private def next_token : Token?
|
private def next_token : Token?
|
||||||
|
@ -151,7 +155,7 @@ class CBOR::Lexer
|
||||||
when 0xff
|
when 0xff
|
||||||
Token.new(kind: finish_token, value: nil)
|
Token.new(kind: finish_token, value: nil)
|
||||||
when 0x80..0x97
|
when 0x80..0x97
|
||||||
consume_array(current_byte - 0x80)
|
array_start(current_byte - 0x80)
|
||||||
else
|
else
|
||||||
raise ParseError.new("Unexpected first byte 0x#{current_byte.to_s(16)}")
|
raise ParseError.new("Unexpected first byte 0x#{current_byte.to_s(16)}")
|
||||||
end
|
end
|
||||||
|
@ -159,7 +163,7 @@ class CBOR::Lexer
|
||||||
|
|
||||||
# Reads tokens until it meets the stop kind.
|
# Reads tokens until it meets the stop kind.
|
||||||
# Optionally it can fail when the read token is not of the passed kind.
|
# Optionally it can fail when the read token is not of the passed kind.
|
||||||
private def read_until(stop : Kind, only : Kind?, &block)
|
private def read_until(stop : Kind, only : Kind? = nil, &block)
|
||||||
loop do
|
loop do
|
||||||
token = next_token
|
token = next_token
|
||||||
raise ParseError.new("Unexpected EOF") unless token
|
raise ParseError.new("Unexpected EOF") unless token
|
||||||
|
@ -187,7 +191,9 @@ class CBOR::Lexer
|
||||||
Token.new(kind: Kind::Int, value: value)
|
Token.new(kind: Kind::Int, value: value)
|
||||||
end
|
end
|
||||||
|
|
||||||
private def consume_binary(size)
|
private def consume_binary(size : Int)
|
||||||
|
raise ParseError.new("Maximum size for binary array exeeded") if size > Int32::MAX
|
||||||
|
|
||||||
bytes = read_bytes(size)
|
bytes = read_bytes(size)
|
||||||
Token.new(kind: Kind::Bytes, value: bytes)
|
Token.new(kind: Kind::Bytes, value: bytes)
|
||||||
end
|
end
|
||||||
|
@ -196,9 +202,10 @@ class CBOR::Lexer
|
||||||
Token.new(kind: Kind::String, value: @io.read_string(size))
|
Token.new(kind: Kind::String, value: @io.read_string(size))
|
||||||
end
|
end
|
||||||
|
|
||||||
private def consume_array(size)
|
private def array_start(size)
|
||||||
arr = Array(CBOR::Type).new(size)
|
raise ParseError.new("Maximum size for array exeeded") if size > Int32::MAX
|
||||||
Token.new(kind: Kind::Array, value: arr)
|
s = size.to_i32
|
||||||
|
Token.new(kind: Kind::Array, value: Array(Type).new(s), size: s)
|
||||||
end
|
end
|
||||||
|
|
||||||
private def open_token(kind : Kind) : Kind
|
private def open_token(kind : Kind) : Kind
|
||||||
|
|
|
@ -15,4 +15,10 @@ enum CBOR::Kind
|
||||||
Map
|
Map
|
||||||
end
|
end
|
||||||
|
|
||||||
record CBOR::Token, kind : Kind, value : Lexer::Type, size : Int64? = nil
|
record CBOR::Token,
|
||||||
|
kind : Kind,
|
||||||
|
value : Type,
|
||||||
|
size : Int32? = nil,
|
||||||
|
# Used only for BytesArray and StringArray: it contains the size of each
|
||||||
|
# chunks composing the type
|
||||||
|
chunks : Array(Int32)? = nil
|
||||||
|
|
|
@ -12,12 +12,4 @@ class CBOR::BytesArray < Array(Bytes)
|
||||||
|
|
||||||
bytes
|
bytes
|
||||||
end
|
end
|
||||||
|
|
||||||
def to_diagnostic : String
|
|
||||||
"(_ #{map { |chunk| to_byte_diagnostic(chunk) }.join(", ")})"
|
|
||||||
end
|
|
||||||
|
|
||||||
private def to_byte_diagnostic(chunk : Bytes) : String
|
|
||||||
"h'#{chunk.hexstring}'"
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,13 +0,0 @@
|
||||||
class CBOR::StringArray < Array(String)
|
|
||||||
def to_s : String
|
|
||||||
join
|
|
||||||
end
|
|
||||||
|
|
||||||
def to_diagnostic : String
|
|
||||||
"(_ #{map { |s| quote(s) }.join(", ")})"
|
|
||||||
end
|
|
||||||
|
|
||||||
private def quote(chunk : String) : String
|
|
||||||
%("#{chunk}")
|
|
||||||
end
|
|
||||||
end
|
|
Loading…
Reference in New Issue