2020-01-02 09:21:11 +01:00
|
|
|
require "uuid"
|
|
|
|
require "openssl"
|
|
|
|
require "json"
|
|
|
|
require "base64"
|
|
|
|
|
|
|
|
module FileStorage
|
|
|
|
|
|
|
|
extend self
|
|
|
|
|
|
|
|
# 1 MB read buffer, on-disk
|
|
|
|
def file_reading_buffer_size
|
|
|
|
1_000_000
|
|
|
|
end
|
|
|
|
|
|
|
|
# 1 KB message data buffer, on-network
|
|
|
|
def message_buffer_size
|
|
|
|
1_000
|
|
|
|
end
|
|
|
|
|
|
|
|
class Exception < ::Exception
|
|
|
|
end
|
|
|
|
|
|
|
|
enum MessageType
|
|
|
|
Error
|
|
|
|
Authentication
|
|
|
|
UploadRequest
|
|
|
|
DownloadRequest
|
|
|
|
Response
|
|
|
|
Responses
|
|
|
|
Transfer
|
|
|
|
end
|
|
|
|
|
|
|
|
class Chunk
|
|
|
|
JSON.mapping({
|
|
|
|
# chunk's number
|
|
|
|
n: Int32,
|
|
|
|
# number of chunks
|
|
|
|
on: Int32,
|
|
|
|
# digest of the current chunk
|
|
|
|
digest: String
|
|
|
|
})
|
|
|
|
|
|
|
|
def initialize(@n, @on, data)
|
|
|
|
@digest = FileStorage.data_digest data.to_slice
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# For now, upload and download are sequentials.
|
|
|
|
# In a future version, we will be able to send
|
|
|
|
# arbitrary parts of each file.
|
|
|
|
|
|
|
|
class Token
|
|
|
|
JSON.mapping({
|
|
|
|
uid: Int32,
|
|
|
|
login: String
|
|
|
|
})
|
|
|
|
|
|
|
|
def initialize(@uid, @login)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# Who knows, maybe someday we will be on UDP, too.
|
|
|
|
#class SHA256
|
|
|
|
# JSON.mapping({
|
|
|
|
# chunk: Slice(UInt8)
|
|
|
|
# })
|
|
|
|
#end
|
|
|
|
|
|
|
|
|
|
|
|
# A file has a name, a size and tags.
|
|
|
|
class FileInfo
|
|
|
|
JSON.mapping({
|
|
|
|
name: String,
|
|
|
|
size: UInt64,
|
|
|
|
nb_chunks: Int32,
|
|
|
|
# SHA256 file digest
|
|
|
|
digest: String,
|
|
|
|
|
|
|
|
# list of SHA256, if we are on UDP
|
|
|
|
# chunks: Array(SHA256),
|
|
|
|
tags: Array(String)?
|
|
|
|
})
|
|
|
|
|
|
|
|
def initialize(file : File, @tags = nil)
|
|
|
|
@name = File.basename file.path
|
|
|
|
@size = file.size
|
|
|
|
@digest = FileStorage.file_digest file
|
|
|
|
@nb_chunks = (@size / FileStorage.message_buffer_size).ceil.to_i
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
class Message
|
|
|
|
|
|
|
|
alias Request = UploadRequest | DownloadRequest
|
|
|
|
|
|
|
|
class UploadRequest
|
|
|
|
JSON.mapping({
|
|
|
|
# autogenerated
|
|
|
|
mid: String,
|
|
|
|
file: FileInfo
|
|
|
|
})
|
|
|
|
|
|
|
|
def initialize(@file)
|
|
|
|
@mid = UUID.random.to_s
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
# WIP
|
|
|
|
class DownloadRequest
|
|
|
|
JSON.mapping({
|
|
|
|
# autogenerated
|
|
|
|
mid: String,
|
|
|
|
# SHA256 digest of the file, used as ID
|
|
|
|
uuid: String?,
|
|
|
|
name: String?,
|
|
|
|
tags: Array(String)?
|
|
|
|
})
|
|
|
|
|
|
|
|
def initialize(@uuid = nil, @name = nil, @tags = nil)
|
|
|
|
@mid = UUID.random.to_s
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
class Authentication
|
|
|
|
JSON.mapping({
|
|
|
|
# autogenerated
|
|
|
|
mid: String,
|
|
|
|
token: Token,
|
2020-01-04 10:45:39 +01:00
|
|
|
uploads: Array(UploadRequest),
|
|
|
|
downloads: Array(DownloadRequest)
|
2020-01-02 09:21:11 +01:00
|
|
|
})
|
|
|
|
|
2020-01-04 10:45:39 +01:00
|
|
|
def initialize(@token, @uploads = Array(UploadRequest).new, @downloads = Array(DownloadRequest).new)
|
2020-01-02 09:21:11 +01:00
|
|
|
@mid = UUID.random.to_s
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
class Response
|
|
|
|
JSON.mapping({
|
|
|
|
mid: String,
|
|
|
|
response: String,
|
|
|
|
reason: String?
|
|
|
|
})
|
|
|
|
|
|
|
|
def initialize(@mid, @response, @reason = nil)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
class Error
|
|
|
|
JSON.mapping({
|
|
|
|
mid: String,
|
|
|
|
# a response for each request
|
|
|
|
response: String,
|
|
|
|
reason: String?
|
|
|
|
})
|
|
|
|
|
|
|
|
def initialize(@mid, @response, @reason = nil)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
class Responses
|
|
|
|
JSON.mapping({
|
|
|
|
mid: String,
|
|
|
|
# a response for each request
|
|
|
|
responses: Array(Response),
|
|
|
|
response: String,
|
|
|
|
reason: String?
|
|
|
|
})
|
|
|
|
|
|
|
|
def initialize(@mid, @response, @responses, @reason = nil)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
class Transfer
|
|
|
|
JSON.mapping({
|
|
|
|
# autogenerated
|
|
|
|
mid: String,
|
|
|
|
# SHA256 digest of the entire file
|
|
|
|
filedigest: String,
|
|
|
|
# For now, just the counter in a string
|
|
|
|
chunk: Chunk,
|
|
|
|
# base64 slice
|
|
|
|
data: String,
|
|
|
|
})
|
|
|
|
|
|
|
|
def initialize(file_info : FileInfo, count, bindata)
|
|
|
|
# count: chunk number
|
|
|
|
|
|
|
|
@filedigest = file_info.digest
|
|
|
|
@data = Base64.encode bindata
|
|
|
|
@chunk = FileStorage::Chunk.new count, file_info.nb_chunks, @data
|
|
|
|
@mid = UUID.random.to_s
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# private function
|
|
|
|
def data_digest(data : Bytes)
|
|
|
|
|
|
|
|
iodata = IO::Memory.new data, false
|
|
|
|
buffer = Bytes.new FileStorage.file_reading_buffer_size
|
|
|
|
|
|
|
|
io = OpenSSL::DigestIO.new(iodata, "SHA256")
|
|
|
|
while io.read(buffer) > 0; end
|
|
|
|
|
|
|
|
io.digest.hexstring
|
|
|
|
end
|
|
|
|
|
|
|
|
# private function
|
|
|
|
def file_digest(file : File)
|
|
|
|
# 1M read buffer
|
|
|
|
buffer = Bytes.new(1_000_000)
|
|
|
|
|
|
|
|
io = OpenSSL::DigestIO.new(file, "SHA256")
|
|
|
|
while io.read(buffer) > 0 ; end
|
|
|
|
|
|
|
|
io.digest.hexstring
|
|
|
|
end
|
|
|
|
end
|