Migration test and example.
This commit is contained in:
parent
4ff5c0c781
commit
c4030d4179
81
spec/test.cr
81
spec/test.cr
@ -5,6 +5,8 @@ require "uuid"
|
||||
|
||||
require "../src/*"
|
||||
|
||||
# FIXME: Split the test data in separate files. We don’t care about those here.
|
||||
|
||||
class Ship
|
||||
include JSON::Serializable
|
||||
|
||||
@ -57,11 +59,37 @@ class Ship
|
||||
end
|
||||
end
|
||||
|
||||
class DODB::SpecDataBase < DODB::DataBase(String, Ship)
|
||||
def initialize
|
||||
::FileUtils.rm_rf "test-storage"
|
||||
# This will be used for migration testing, but basically it’s a variant of
|
||||
# the class above, a few extra fields, a few missing ones.
|
||||
class PrimitiveShip
|
||||
include JSON::Serializable
|
||||
|
||||
initialize "test-storage"
|
||||
property id : String
|
||||
property name : String
|
||||
property wooden : Bool = false # Will be removed.
|
||||
property class_name : String # Will be renamed
|
||||
property flagship : Bool = false # Will be moved to tags.
|
||||
|
||||
def initialize(@name, @class_name = "<unknown>", @id = UUID.random.to_s, @flagship = false)
|
||||
end
|
||||
|
||||
class_getter kamikaze =
|
||||
PrimitiveShip.new("Kamikaze", "Kamikaze")
|
||||
class_getter asakaze =
|
||||
PrimitiveShip.new("Asakaze", "Kamikaze")
|
||||
class_getter all_ships : Array(PrimitiveShip) = [
|
||||
@@kamikaze,
|
||||
@@asakaze
|
||||
]
|
||||
end
|
||||
|
||||
class DODB::SpecDataBase < DODB::DataBase(String, Ship)
|
||||
def initialize(storage_ext = "")
|
||||
storage_dir = "test-storage#{storage_ext}"
|
||||
|
||||
::FileUtils.rm_rf storage_dir
|
||||
|
||||
super storage_dir
|
||||
end
|
||||
end
|
||||
|
||||
@ -270,9 +298,48 @@ describe "DODB::DataBase" do
|
||||
end
|
||||
end
|
||||
|
||||
# Migration testing code will go here as soon as migration testing
|
||||
# becomes relevant (due to format changes or so). For small projects,
|
||||
# reindexing will work very well in the meantime.
|
||||
it "migrates properly" do
|
||||
old_db = DODB::DataBase(String, PrimitiveShip).new "test-storage-migration-origin"
|
||||
|
||||
old_ships_by_name = old_db.new_index "name", &.name
|
||||
old_ships_by_class = old_db.new_partition "class", &.class_name
|
||||
|
||||
PrimitiveShip.all_ships.each do |ship|
|
||||
old_db[ship.id] = ship
|
||||
end
|
||||
|
||||
# At this point, the “old” DB is filled. Now we need to convert
|
||||
# to the new DB.
|
||||
|
||||
new_db = DODB::SpecDataBase.new "-migration-target"
|
||||
|
||||
new_ships_by_class = new_db.new_partition "class", &.class
|
||||
new_ships_by_tags = new_db.new_tags "tags", &.tags
|
||||
new_ships_by_tags = new_db.new_tags "tags", &.tags
|
||||
|
||||
old_db.each do |id, ship|
|
||||
new_ship = Ship.new ship.name,
|
||||
class: ship.class_name,
|
||||
id: ship.id,
|
||||
tags: Array(String).new.tap { |tags|
|
||||
tags << "name ship" if ship.name == ship.class_name
|
||||
}
|
||||
|
||||
new_db[new_ship.id] = new_ship
|
||||
end
|
||||
|
||||
# At this point, the conversion is done, so… we’re making a few
|
||||
# arbitrary tests on the new data.
|
||||
|
||||
old_db.each do |old_id, old_ship|
|
||||
ship = new_db[old_id]
|
||||
|
||||
ship.id.should eq(old_ship.id)
|
||||
ship.class.should eq(old_ship.class_name)
|
||||
|
||||
ship.tags.any?(&.==("name ship")).should be_true if ship.name == ship.class
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
177
src/fsdb.cr
177
src/fsdb.cr
@ -1,177 +0,0 @@
|
||||
require "file_utils"
|
||||
require "json"
|
||||
|
||||
require "./dodb/*"
|
||||
|
||||
class DODB::DataBase(K, V)
|
||||
@indexers = [] of Indexer(V)
|
||||
|
||||
def initialize(@directory_name : String)
|
||||
Dir.mkdir_p data_path
|
||||
end
|
||||
|
||||
##
|
||||
# name is the name that will be used on the file system.
|
||||
def new_partition(name : String, &block : Proc(V, String))
|
||||
Partition(V).new(@directory_name, name, block).tap do |table|
|
||||
@indexers << table
|
||||
end
|
||||
end
|
||||
|
||||
##
|
||||
# name is the name that will be used on the file system.
|
||||
def new_index(name : String, &block : Proc(V, String))
|
||||
Index(V).new(@directory_name, name, block).tap do |indexer|
|
||||
@indexers << indexer
|
||||
end
|
||||
end
|
||||
|
||||
def new_tags(name : String, &block : Proc(V, Array(String)))
|
||||
Tags(V).new(@directory_name, name, block).tap do |tags|
|
||||
@indexers << tags
|
||||
end
|
||||
end
|
||||
|
||||
def get_index(name : String, key)
|
||||
index = @indexers.find &.name.==(name)
|
||||
|
||||
index.not_nil!.as(DODB::Index).get key
|
||||
end
|
||||
|
||||
# FIXME: Is this “key” really a K, not just a String?
|
||||
def get_partition(table_name : String, partition_name : String)
|
||||
partition = @indexers.find &.name.==(table_name)
|
||||
|
||||
partition.not_nil!.as(DODB::Partition).get partition_name
|
||||
end
|
||||
|
||||
def get_tags(name, key : K)
|
||||
partition = @indexers.find &.name.==(name)
|
||||
|
||||
partition.not_nil!.as(DODB::Tags).get name, key
|
||||
end
|
||||
|
||||
def []?(key : K) : V?
|
||||
self[key]
|
||||
rescue MissingEntry
|
||||
# FIXME: Only rescue JSON and “no such file” errors.
|
||||
return nil
|
||||
end
|
||||
|
||||
def [](key : K) : V
|
||||
raise MissingEntry.new(key) unless ::File.exists? file_path key
|
||||
|
||||
read file_path key
|
||||
end
|
||||
|
||||
def []=(key : K, value : V)
|
||||
old_value = self.[key]?
|
||||
|
||||
check_collisions! key, value, old_value
|
||||
|
||||
# Removes any old indices or partitions pointing to a value about
|
||||
# to be replaced.
|
||||
if old_value
|
||||
remove_partitions key, old_value
|
||||
end
|
||||
|
||||
# Avoids corruption in case the application crashes while writing.
|
||||
file_path(key).tap do |path|
|
||||
::File.write "#{path}.new", value.to_json
|
||||
::FileUtils.mv "#{path}.new", path
|
||||
end
|
||||
|
||||
write_partitions key, value
|
||||
end
|
||||
|
||||
def check_collisions!(key : K, value : V, old_value : V?)
|
||||
@indexers.each &.check!(key, value, old_value)
|
||||
end
|
||||
|
||||
def write_partitions(key : K, value : V)
|
||||
@indexers.each &.index(key, value)
|
||||
end
|
||||
|
||||
def delete(key : K)
|
||||
value = self[key]?
|
||||
|
||||
return if value.nil?
|
||||
|
||||
begin
|
||||
::File.delete file_path key
|
||||
rescue
|
||||
# FIXME: Only intercept “no such file" errors
|
||||
end
|
||||
|
||||
remove_partitions key, value
|
||||
|
||||
value
|
||||
end
|
||||
|
||||
def remove_partitions(key : K, value : V)
|
||||
@indexers.each &.deindex(key, value)
|
||||
end
|
||||
|
||||
##
|
||||
# CAUTION: Very slow. Try not to use.
|
||||
# Can be useful for making dumps or to restore a database, however.
|
||||
def each
|
||||
dirname = data_path
|
||||
Dir.each_child dirname do |child|
|
||||
next if child.match /^\./
|
||||
|
||||
full_path = "#{dirname}/#{child}"
|
||||
|
||||
begin
|
||||
# FIXME: Only intercept JSON parsing errors.
|
||||
field = read full_path
|
||||
rescue
|
||||
next
|
||||
end
|
||||
|
||||
# FIXME: Will only work for String. :(
|
||||
key = child.gsub /\.json$/, ""
|
||||
|
||||
yield key, field
|
||||
end
|
||||
end
|
||||
|
||||
##
|
||||
# CAUTION: Very slow. Try not to use.
|
||||
def to_h
|
||||
hash = ::Hash(K, V).new
|
||||
|
||||
each do |key, value|
|
||||
hash[key] = value
|
||||
end
|
||||
|
||||
hash
|
||||
end
|
||||
|
||||
private def data_path
|
||||
"#{@directory_name}/data"
|
||||
end
|
||||
|
||||
private def file_path(key : K)
|
||||
"#{data_path}/#{key.to_s}.json"
|
||||
end
|
||||
|
||||
private def read(file_path : String)
|
||||
V.from_json ::File.read file_path
|
||||
end
|
||||
|
||||
# A very slow operation that removes all indices and then rewrites
|
||||
# them all.
|
||||
def reindex_everything!
|
||||
old_data = to_h
|
||||
|
||||
old_data.each do |key, value|
|
||||
self.delete key
|
||||
end
|
||||
|
||||
old_data.each do |key, value|
|
||||
self[key] = value
|
||||
end
|
||||
end
|
||||
end
|
||||
|
Loading…
Reference in New Issue
Block a user