2019-12-11 22:41:23 +01:00
|
|
|
|
require "spec"
|
|
|
|
|
require "file_utils"
|
2024-05-10 15:23:28 +02:00
|
|
|
|
require "./db-ships.cr"
|
2019-12-11 22:41:23 +01:00
|
|
|
|
|
2024-05-09 00:03:34 +02:00
|
|
|
|
def fork_process(&)
|
|
|
|
|
Process.new Crystal::System::Process.fork { yield }
|
|
|
|
|
end
|
2019-12-17 18:16:13 +01:00
|
|
|
|
|
2019-12-11 23:18:20 +01:00
|
|
|
|
describe "DODB::DataBase" do
|
2019-12-11 22:41:23 +01:00
|
|
|
|
describe "basics" do
|
|
|
|
|
it "store and get data" do
|
2024-05-09 12:18:46 +02:00
|
|
|
|
db = DODB::SpecDataBase(Ship).new
|
2019-12-11 22:41:23 +01:00
|
|
|
|
|
|
|
|
|
Ship.all_ships.each do |ship|
|
2019-12-18 03:43:09 +01:00
|
|
|
|
db << ship
|
2019-12-11 22:41:23 +01:00
|
|
|
|
end
|
|
|
|
|
|
2019-12-18 03:43:09 +01:00
|
|
|
|
db.to_a.sort.should eq(Ship.all_ships.sort)
|
2019-12-11 22:41:23 +01:00
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
it "rewrite already stored data" do
|
2024-05-09 12:18:46 +02:00
|
|
|
|
db = DODB::SpecDataBase(Ship).new
|
2019-12-11 22:41:23 +01:00
|
|
|
|
ship = Ship.all_ships[0]
|
|
|
|
|
|
2019-12-18 03:43:09 +01:00
|
|
|
|
key = db << ship
|
2019-12-11 22:41:23 +01:00
|
|
|
|
|
2019-12-18 03:43:09 +01:00
|
|
|
|
db[key] = Ship.new "broken"
|
|
|
|
|
db[key] = ship
|
|
|
|
|
|
|
|
|
|
db[key].should eq(ship)
|
2019-12-11 22:41:23 +01:00
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
it "properly remove data" do
|
2024-05-09 12:18:46 +02:00
|
|
|
|
db = DODB::SpecDataBase(Ship).new
|
2019-12-11 22:41:23 +01:00
|
|
|
|
|
|
|
|
|
Ship.all_ships.each do |ship|
|
2019-12-18 03:43:09 +01:00
|
|
|
|
db << ship
|
2019-12-11 22:41:23 +01:00
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
Ship.all_ships.each do |ship|
|
2019-12-18 03:43:09 +01:00
|
|
|
|
db.pop
|
2019-12-11 22:41:23 +01:00
|
|
|
|
end
|
|
|
|
|
|
2019-12-18 03:43:09 +01:00
|
|
|
|
Ship.all_ships.each_with_index do |ship, i|
|
2019-12-11 22:41:23 +01:00
|
|
|
|
# FIXME: Should it raise a particular exception?
|
2019-12-11 23:18:20 +01:00
|
|
|
|
expect_raises DODB::MissingEntry do
|
2019-12-18 03:43:09 +01:00
|
|
|
|
db[i]
|
2019-12-11 22:41:23 +01:00
|
|
|
|
end
|
|
|
|
|
|
2019-12-18 03:43:09 +01:00
|
|
|
|
db[i]?.should be_nil
|
2019-12-11 22:41:23 +01:00
|
|
|
|
end
|
|
|
|
|
end
|
2020-01-07 16:06:18 +01:00
|
|
|
|
|
|
|
|
|
it "preserves data on reopening" do
|
2024-05-09 12:18:46 +02:00
|
|
|
|
db1 = DODB::SpecDataBase(Ship).new
|
2020-01-07 16:06:18 +01:00
|
|
|
|
db1 << Ship.kisaragi
|
|
|
|
|
|
|
|
|
|
db1.to_a.size.should eq(1)
|
|
|
|
|
|
2024-05-09 12:18:46 +02:00
|
|
|
|
db2 = DODB::SpecDataBase(Ship).new remove_previous_data: false
|
2020-01-07 16:06:18 +01:00
|
|
|
|
db2 << Ship.mutsuki
|
|
|
|
|
|
|
|
|
|
db1.to_a.size.should eq(2)
|
|
|
|
|
end
|
2020-01-10 17:20:10 +01:00
|
|
|
|
|
|
|
|
|
it "iterates in normal and reversed order" do
|
2024-05-09 12:18:46 +02:00
|
|
|
|
db = DODB::SpecDataBase(Ship).new
|
2020-01-10 17:20:10 +01:00
|
|
|
|
|
|
|
|
|
Ship.all_ships.each do |ship|
|
|
|
|
|
db << ship
|
|
|
|
|
end
|
|
|
|
|
|
2020-01-13 13:48:24 +01:00
|
|
|
|
# The two #each test iteration.
|
2020-01-10 17:20:10 +01:00
|
|
|
|
db.each_with_index do |item, index|
|
|
|
|
|
item.should eq Ship.all_ships[index]
|
|
|
|
|
end
|
|
|
|
|
|
2020-01-12 14:50:10 +01:00
|
|
|
|
db.each_with_index(reversed: true) do |item, index|
|
2020-01-13 13:48:24 +01:00
|
|
|
|
item.should eq Ship.all_ships[index]
|
2020-01-10 17:20:10 +01:00
|
|
|
|
end
|
2020-01-13 13:48:24 +01:00
|
|
|
|
|
|
|
|
|
# Actual reversal is tested here.
|
|
|
|
|
db.to_a(reversed: true).should eq db.to_a.reverse
|
2020-01-10 17:20:10 +01:00
|
|
|
|
end
|
2020-01-12 15:12:01 +01:00
|
|
|
|
|
|
|
|
|
it "respects the provided offsets if any" do
|
2024-05-09 12:18:46 +02:00
|
|
|
|
db = DODB::SpecDataBase(Ship).new
|
2020-01-12 15:12:01 +01:00
|
|
|
|
|
|
|
|
|
Ship.all_ships.each do |ship|
|
|
|
|
|
db << ship
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
db.to_a(start_offset: 0, end_offset: 0)[0]?.should eq Ship.mutsuki
|
|
|
|
|
db.to_a(start_offset: 1, end_offset: 1)[0]?.should eq Ship.kisaragi
|
|
|
|
|
db.to_a(start_offset: 2, end_offset: 2)[0]?.should eq Ship.yayoi
|
|
|
|
|
|
|
|
|
|
db.to_a(start_offset: 0, end_offset: 2).should eq [
|
|
|
|
|
Ship.mutsuki, Ship.kisaragi, Ship.yayoi
|
|
|
|
|
]
|
|
|
|
|
end
|
2019-12-11 22:41:23 +01:00
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
describe "indices" do
|
|
|
|
|
it "do basic indexing" do
|
2024-05-09 12:18:46 +02:00
|
|
|
|
db = DODB::SpecDataBase(Ship).new
|
2019-12-11 22:41:23 +01:00
|
|
|
|
|
|
|
|
|
db_ships_by_name = db.new_index "name", &.name
|
|
|
|
|
|
|
|
|
|
Ship.all_ships.each do |ship|
|
2019-12-18 03:43:09 +01:00
|
|
|
|
db << ship
|
2019-12-11 22:41:23 +01:00
|
|
|
|
end
|
|
|
|
|
|
2019-12-18 03:43:09 +01:00
|
|
|
|
Ship.all_ships.each_with_index do |ship|
|
2019-12-11 22:41:23 +01:00
|
|
|
|
db_ships_by_name.get?(ship.name).should eq(ship)
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
it "raise on index overload" do
|
2024-05-09 12:18:46 +02:00
|
|
|
|
db = DODB::SpecDataBase(Ship).new
|
2019-12-11 22:41:23 +01:00
|
|
|
|
|
|
|
|
|
db_ships_by_name = db.new_index "name", &.name
|
|
|
|
|
|
2019-12-18 03:43:09 +01:00
|
|
|
|
db << Ship.kisaragi
|
2019-12-11 22:41:23 +01:00
|
|
|
|
|
|
|
|
|
# Should not be allowed to store an entry whose “name” field
|
|
|
|
|
# already exists.
|
2019-12-11 23:18:20 +01:00
|
|
|
|
expect_raises(DODB::IndexOverload) do
|
2019-12-18 03:43:09 +01:00
|
|
|
|
db << Ship.kisaragi
|
2019-12-11 22:41:23 +01:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
it "properly deindex" do
|
2024-05-09 12:18:46 +02:00
|
|
|
|
db = DODB::SpecDataBase(Ship).new
|
2019-12-11 22:41:23 +01:00
|
|
|
|
|
|
|
|
|
db_ships_by_name = db.new_index "name", &.name
|
|
|
|
|
|
|
|
|
|
Ship.all_ships.each do |ship|
|
2019-12-18 03:43:09 +01:00
|
|
|
|
db << ship
|
2019-12-11 22:41:23 +01:00
|
|
|
|
end
|
|
|
|
|
|
2019-12-18 03:43:09 +01:00
|
|
|
|
Ship.all_ships.each_with_index do |ship, i|
|
|
|
|
|
db.delete i
|
2019-12-11 22:41:23 +01:00
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
Ship.all_ships.each do |ship|
|
|
|
|
|
db_ships_by_name.get?(ship.name).should be_nil
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
it "properly reindex" do
|
2024-05-09 12:18:46 +02:00
|
|
|
|
db = DODB::SpecDataBase(Ship).new
|
2019-12-11 22:41:23 +01:00
|
|
|
|
|
|
|
|
|
db_ships_by_name = db.new_index "name", &.name
|
|
|
|
|
|
2019-12-18 03:43:09 +01:00
|
|
|
|
key = db << Ship.kisaragi
|
2019-12-11 22:41:23 +01:00
|
|
|
|
|
|
|
|
|
# We give the old id to the new ship, to get it replaced in
|
|
|
|
|
# the database.
|
|
|
|
|
some_new_ship = Ship.all_ships[2].clone
|
|
|
|
|
|
2019-12-18 03:43:09 +01:00
|
|
|
|
db[key] = some_new_ship
|
2019-12-11 22:41:23 +01:00
|
|
|
|
|
2019-12-18 03:43:09 +01:00
|
|
|
|
db[key].should eq(some_new_ship)
|
2019-12-11 22:41:23 +01:00
|
|
|
|
|
|
|
|
|
db_ships_by_name.get?(some_new_ship.name).should eq(some_new_ship)
|
|
|
|
|
end
|
2020-01-30 03:23:02 +01:00
|
|
|
|
|
|
|
|
|
it "properly updates" do
|
2024-05-09 12:18:46 +02:00
|
|
|
|
db = DODB::SpecDataBase(Ship).new
|
2020-01-30 03:23:02 +01:00
|
|
|
|
|
|
|
|
|
db_ships_by_name = db.new_index "name", &.name
|
|
|
|
|
|
|
|
|
|
Ship.all_ships.each do |ship|
|
|
|
|
|
db << ship
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
new_kisaragi = Ship.kisaragi.clone.tap do |s|
|
|
|
|
|
s.name = "Kisaragi Kai" # Don’t think about it too much.
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
# We’re changing an indexed value on purpose.
|
|
|
|
|
db_ships_by_name.update "Kisaragi", new_kisaragi
|
|
|
|
|
|
|
|
|
|
db_ships_by_name.get?("Kisaragi").should be_nil
|
|
|
|
|
db_ships_by_name.get?(new_kisaragi.name).should eq new_kisaragi
|
|
|
|
|
end
|
2019-12-11 22:41:23 +01:00
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
describe "partitions" do
|
|
|
|
|
it "do basic partitioning" do
|
2024-05-09 12:18:46 +02:00
|
|
|
|
db = DODB::SpecDataBase(Ship).new
|
2019-12-11 22:41:23 +01:00
|
|
|
|
|
2019-12-18 03:43:09 +01:00
|
|
|
|
db_ships_by_class = db.new_partition "class", &.klass
|
2019-12-11 22:41:23 +01:00
|
|
|
|
|
|
|
|
|
Ship.all_ships.each do |ship|
|
2019-12-18 03:43:09 +01:00
|
|
|
|
db << ship
|
2019-12-11 22:41:23 +01:00
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
Ship.all_ships.each do |ship|
|
2019-12-18 03:43:09 +01:00
|
|
|
|
db_ships_by_class.get(ship.klass).should contain(ship)
|
2019-12-11 22:41:23 +01:00
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
# We extract the possible classes to do test on them.
|
2019-12-18 03:43:09 +01:00
|
|
|
|
ship_classes = Ship.all_ships.map(&.klass).uniq
|
2019-12-11 22:41:23 +01:00
|
|
|
|
ship_classes.each do |klass|
|
|
|
|
|
partition = db_ships_by_class.get klass
|
|
|
|
|
|
|
|
|
|
# A partition on “class” should contain entries that all
|
|
|
|
|
# share the same value of “class”.
|
2019-12-18 03:43:09 +01:00
|
|
|
|
partition.map(&.klass.==(klass)).reduce { |a, b|
|
2019-12-11 22:41:23 +01:00
|
|
|
|
a && b
|
|
|
|
|
}.should be_true
|
|
|
|
|
end
|
2020-02-11 19:47:13 +01:00
|
|
|
|
|
2024-05-10 00:06:31 +02:00
|
|
|
|
db_ships_by_class.get?("does-not-exist").should be_nil
|
2019-12-11 22:41:23 +01:00
|
|
|
|
end
|
2020-02-11 19:45:54 +01:00
|
|
|
|
|
|
|
|
|
it "removes select elements from partitions" do
|
2024-05-09 12:18:46 +02:00
|
|
|
|
db = DODB::SpecDataBase(Ship).new
|
2020-02-11 19:45:54 +01:00
|
|
|
|
|
|
|
|
|
db_ships_by_class = db.new_partition "class", &.klass
|
|
|
|
|
|
|
|
|
|
Ship.all_ships.each do |ship|
|
|
|
|
|
db << ship
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
db_ships_by_class.delete "Mutsuki", &.name.==("Kisaragi")
|
|
|
|
|
|
|
|
|
|
Ship.all_ships.map(&.klass).uniq.each do |klass|
|
|
|
|
|
partition = db_ships_by_class.get klass
|
|
|
|
|
|
|
|
|
|
partition.any?(&.name.==("Kisaragi")).should be_false
|
|
|
|
|
end
|
|
|
|
|
end
|
2019-12-11 22:41:23 +01:00
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
describe "tags" do
|
|
|
|
|
it "do basic tagging" do
|
2024-05-09 12:18:46 +02:00
|
|
|
|
db = DODB::SpecDataBase(Ship).new
|
2019-12-11 22:41:23 +01:00
|
|
|
|
|
|
|
|
|
db_ships_by_tags = db.new_tags "tags", &.tags
|
|
|
|
|
|
|
|
|
|
Ship.all_ships.each do |ship|
|
2019-12-18 03:43:09 +01:00
|
|
|
|
db << ship
|
2019-12-11 22:41:23 +01:00
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
db_ships_by_tags.get("flagship").should eq([Ship.flagship])
|
|
|
|
|
|
|
|
|
|
# All returned entries should have the requested tag.
|
|
|
|
|
db_ships_by_tags.get("name ship")
|
|
|
|
|
.map(&.tags.includes?("name ship"))
|
|
|
|
|
.reduce { |a, e| a && e }
|
|
|
|
|
.should be_true
|
|
|
|
|
|
|
|
|
|
# There shouldn’t be one in our data about WWII Japanese warships…
|
2024-05-10 00:06:31 +02:00
|
|
|
|
db_ships_by_tags.get?("starship").should be_nil
|
2019-12-11 22:41:23 +01:00
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
it "properly removes tags" do
|
2024-05-09 12:18:46 +02:00
|
|
|
|
db = DODB::SpecDataBase(Ship).new
|
2019-12-11 22:41:23 +01:00
|
|
|
|
|
|
|
|
|
db_ships_by_tags = db.new_tags "tags", &.tags
|
|
|
|
|
|
|
|
|
|
Ship.all_ships.each do |ship|
|
2019-12-18 03:43:09 +01:00
|
|
|
|
db << ship
|
2019-12-11 22:41:23 +01:00
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
# Removing the “flagship” tag, brace for impact.
|
2024-04-26 19:35:00 +02:00
|
|
|
|
flagship, index = db_ships_by_tags.get_with_indice("flagship")[0]
|
2019-12-11 22:41:23 +01:00
|
|
|
|
flagship.tags = [] of String
|
2019-12-18 03:43:09 +01:00
|
|
|
|
db[index] = flagship
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# ship, index = db_ships_by_tags.update(tag: "flagship") do |ship, index|
|
|
|
|
|
# ship.tags = [] of String
|
|
|
|
|
# db[index] = ship
|
|
|
|
|
# end
|
2019-12-11 22:41:23 +01:00
|
|
|
|
|
|
|
|
|
db_ships_by_tags.get("flagship").should eq([] of Ship)
|
|
|
|
|
end
|
2020-01-29 16:59:39 +01:00
|
|
|
|
|
|
|
|
|
it "gets items that have multiple tags" do
|
2024-05-09 12:18:46 +02:00
|
|
|
|
db = DODB::SpecDataBase(Ship).new
|
2020-01-29 16:59:39 +01:00
|
|
|
|
|
|
|
|
|
db_ships_by_tags = db.new_tags "tags", &.tags
|
|
|
|
|
|
|
|
|
|
Ship.all_ships.each do |ship|
|
|
|
|
|
db << ship
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
results = db_ships_by_tags.get(["flagship", "name ship"])
|
|
|
|
|
results.should eq([Ship.yamato])
|
|
|
|
|
|
|
|
|
|
results = db_ships_by_tags.get(["name ship", "flagship"])
|
|
|
|
|
results.should eq([Ship.yamato])
|
|
|
|
|
|
|
|
|
|
results = db_ships_by_tags.get(["flagship"])
|
|
|
|
|
results.should eq([Ship.yamato])
|
|
|
|
|
end
|
2019-12-11 22:41:23 +01:00
|
|
|
|
end
|
2019-12-12 00:01:02 +01:00
|
|
|
|
|
2020-06-24 21:45:45 +02:00
|
|
|
|
describe "atomic operations" do
|
|
|
|
|
it "safe_get and safe_get?" do
|
2024-05-09 12:18:46 +02:00
|
|
|
|
db = DODB::SpecDataBase(Ship).new
|
2020-06-24 21:45:45 +02:00
|
|
|
|
|
|
|
|
|
db_ships_by_name = db.new_index "name", &.name
|
|
|
|
|
|
|
|
|
|
Ship.all_ships.each do |ship|
|
|
|
|
|
db << ship
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
Ship.all_ships.each do |ship|
|
|
|
|
|
db_ships_by_name.safe_get ship.name do |results|
|
|
|
|
|
results.should eq(ship)
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
db_ships_by_name.safe_get? ship.name do |results|
|
|
|
|
|
results.should eq(ship)
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2019-12-12 00:01:02 +01:00
|
|
|
|
describe "tools" do
|
|
|
|
|
it "rebuilds indexes" do
|
2024-05-09 12:18:46 +02:00
|
|
|
|
db = DODB::SpecDataBase(Ship).new
|
2019-12-12 00:01:02 +01:00
|
|
|
|
|
|
|
|
|
db_ships_by_name = db.new_index "name", &.name
|
2019-12-18 03:43:09 +01:00
|
|
|
|
db_ships_by_class = db.new_partition "class", &.klass
|
2019-12-12 00:01:02 +01:00
|
|
|
|
db_ships_by_tags = db.new_tags "tags", &.tags
|
|
|
|
|
|
|
|
|
|
Ship.all_ships.each do |ship|
|
2019-12-18 03:43:09 +01:00
|
|
|
|
db << ship
|
2019-12-12 00:01:02 +01:00
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
db.reindex_everything!
|
|
|
|
|
|
|
|
|
|
Ship.all_ships.each do |ship|
|
|
|
|
|
db_ships_by_name.get?(ship.name).should eq(ship)
|
2019-12-18 03:43:09 +01:00
|
|
|
|
db_ships_by_class.get(ship.klass).should contain(ship)
|
2019-12-12 00:01:02 +01:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2019-12-17 18:16:13 +01:00
|
|
|
|
it "migrates properly" do
|
2020-01-17 13:07:41 +01:00
|
|
|
|
::FileUtils.rm_rf "test-storage-migration-origin"
|
2019-12-28 03:24:05 +01:00
|
|
|
|
old_db = DODB::DataBase(PrimitiveShip).new "test-storage-migration-origin"
|
2019-12-17 18:16:13 +01:00
|
|
|
|
|
|
|
|
|
old_ships_by_name = old_db.new_index "name", &.name
|
|
|
|
|
old_ships_by_class = old_db.new_partition "class", &.class_name
|
|
|
|
|
|
|
|
|
|
PrimitiveShip.all_ships.each do |ship|
|
2019-12-18 03:43:09 +01:00
|
|
|
|
old_db << ship
|
2019-12-17 18:16:13 +01:00
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
# At this point, the “old” DB is filled. Now we need to convert
|
|
|
|
|
# to the new DB.
|
|
|
|
|
|
2024-05-09 12:18:46 +02:00
|
|
|
|
new_db = DODB::SpecDataBase(Ship).new "-migration-target"
|
2019-12-17 18:16:13 +01:00
|
|
|
|
|
2019-12-18 03:43:09 +01:00
|
|
|
|
new_ships_by_name = new_db.new_index "name", &.name
|
|
|
|
|
new_ships_by_class = new_db.new_partition "class", &.klass
|
2019-12-17 18:16:13 +01:00
|
|
|
|
new_ships_by_tags = new_db.new_tags "tags", &.tags
|
|
|
|
|
|
2019-12-18 03:43:09 +01:00
|
|
|
|
old_db.each_with_index do |ship, index|
|
2019-12-17 18:16:13 +01:00
|
|
|
|
new_ship = Ship.new ship.name,
|
2019-12-18 03:43:09 +01:00
|
|
|
|
klass: ship.class_name,
|
2019-12-17 18:16:13 +01:00
|
|
|
|
id: ship.id,
|
|
|
|
|
tags: Array(String).new.tap { |tags|
|
|
|
|
|
tags << "name ship" if ship.name == ship.class_name
|
|
|
|
|
}
|
|
|
|
|
|
2019-12-18 03:43:09 +01:00
|
|
|
|
new_db[index] = new_ship
|
2019-12-17 18:16:13 +01:00
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
# At this point, the conversion is done, so… we’re making a few
|
|
|
|
|
# arbitrary tests on the new data.
|
|
|
|
|
|
2019-12-18 03:43:09 +01:00
|
|
|
|
old_db.each_with_index do |old_ship, old_index|
|
|
|
|
|
ship = new_db[old_index]
|
2019-12-17 18:16:13 +01:00
|
|
|
|
|
|
|
|
|
ship.id.should eq(old_ship.id)
|
2019-12-18 03:43:09 +01:00
|
|
|
|
ship.klass.should eq(old_ship.class_name)
|
2019-12-17 18:16:13 +01:00
|
|
|
|
|
2019-12-18 03:43:09 +01:00
|
|
|
|
ship.tags.any?(&.==("name ship")).should be_true if ship.name == ship.klass
|
2019-12-17 18:16:13 +01:00
|
|
|
|
end
|
|
|
|
|
end
|
2019-12-12 00:01:02 +01:00
|
|
|
|
end
|
2020-07-20 14:23:10 +02:00
|
|
|
|
|
|
|
|
|
describe "parallel support" do
|
|
|
|
|
# Not sure how many forks would be safe in a test like that.
|
|
|
|
|
fork_count = 25
|
|
|
|
|
entries_per_fork = 100
|
|
|
|
|
|
|
|
|
|
it "works for pushing values" do
|
2024-05-09 12:18:46 +02:00
|
|
|
|
db = DODB::SpecDataBase(Ship).new
|
2020-07-20 14:23:10 +02:00
|
|
|
|
|
|
|
|
|
processes = [] of Process
|
|
|
|
|
|
|
|
|
|
fork_count.times do |fork_id|
|
2024-05-09 00:03:34 +02:00
|
|
|
|
processes << fork_process do
|
2020-07-20 14:23:10 +02:00
|
|
|
|
entries_per_fork.times do |entry_id|
|
|
|
|
|
db << Ship.new("entry-#{fork_id}-#{entry_id}", "???")
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
processes.each &.wait
|
|
|
|
|
|
|
|
|
|
dump = db.to_a
|
|
|
|
|
|
|
|
|
|
dump.size.should eq fork_count * entries_per_fork
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
it "works for updating values" do
|
2024-05-09 12:18:46 +02:00
|
|
|
|
db = DODB::SpecDataBase(Ship).new
|
2020-07-20 14:23:10 +02:00
|
|
|
|
db_entries_by_name = db.new_index "name", &.name
|
|
|
|
|
|
|
|
|
|
# First pass, creating data.
|
|
|
|
|
processes = [] of Process
|
|
|
|
|
fork_count.times do |fork_id|
|
2024-05-09 00:03:34 +02:00
|
|
|
|
processes << fork_process do
|
2020-07-20 14:23:10 +02:00
|
|
|
|
entries_per_fork.times do |entry_id|
|
|
|
|
|
db << Ship.new("entry-#{fork_id}-#{entry_id}", "???")
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
processes.each &.wait
|
|
|
|
|
|
|
|
|
|
# Second pass, updating data.
|
|
|
|
|
processes = [] of Process
|
|
|
|
|
fork_count.times do |fork_id|
|
2024-05-09 00:03:34 +02:00
|
|
|
|
processes << fork_process do
|
2020-07-20 14:23:10 +02:00
|
|
|
|
entries_per_fork.times do |entry_id|
|
|
|
|
|
db_entries_by_name.update Ship.new("entry-#{fork_id}-#{entry_id}", "???", tags: ["updated"])
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
processes.each &.wait
|
|
|
|
|
|
|
|
|
|
# Third pass, testing database content.
|
|
|
|
|
dump = db.to_a
|
|
|
|
|
|
|
|
|
|
fork_count.times do |fork_id|
|
|
|
|
|
entries_per_fork.times do |entry_id|
|
|
|
|
|
entry = db_entries_by_name.get "entry-#{fork_id}-#{entry_id}"
|
|
|
|
|
|
|
|
|
|
entry.tags.should eq ["updated"]
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
end
|
2020-07-20 15:04:17 +02:00
|
|
|
|
|
|
|
|
|
it "does parallel-safe updates" do
|
2024-05-09 12:18:46 +02:00
|
|
|
|
db = DODB::SpecDataBase(Ship).new
|
2020-07-20 15:04:17 +02:00
|
|
|
|
db_entries_by_name = db.new_index "name", &.name
|
|
|
|
|
|
|
|
|
|
# We’ll be storing an integer in the "klass" field, and incrementing
|
|
|
|
|
# it in forks in a second time.
|
|
|
|
|
db << Ship.new("test", "0")
|
|
|
|
|
|
|
|
|
|
processes = [] of Process
|
|
|
|
|
fork_count.times do |fork_id|
|
2024-05-09 00:03:34 +02:00
|
|
|
|
processes << fork_process do
|
2020-07-20 15:04:17 +02:00
|
|
|
|
entries_per_fork.times do |entry_id|
|
|
|
|
|
db_entries_by_name.safe_get "test" do |entry|
|
|
|
|
|
entry.klass = (entry.klass.to_i + 1).to_s
|
|
|
|
|
|
|
|
|
|
db_entries_by_name.update "test", entry
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
processes.each &.wait
|
|
|
|
|
|
|
|
|
|
db_entries_by_name.get("test").klass.should eq((fork_count * entries_per_fork).to_s)
|
|
|
|
|
end
|
2020-07-20 14:23:10 +02:00
|
|
|
|
end
|
2019-12-11 22:41:23 +01:00
|
|
|
|
end
|
|
|
|
|
|
2024-05-10 15:23:28 +02:00
|
|
|
|
# Basically the same thing as before, with some slight
|
|
|
|
|
# differences based on the fact that changing the on-disk data
|
|
|
|
|
# won't change the cached one.
|
|
|
|
|
describe "DODB::CachedDataBase" do
|
|
|
|
|
describe "basics" do
|
|
|
|
|
it "store and get data" do
|
|
|
|
|
db = DODB::CachedSpecDataBase(Ship).new
|
|
|
|
|
|
|
|
|
|
Ship.all_ships.each do |ship|
|
|
|
|
|
db << ship
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
db.to_a.sort.should eq(Ship.all_ships.sort)
|
|
|
|
|
|
|
|
|
|
db.rm_storage_dir
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
it "rewrite already stored data" do
|
|
|
|
|
db = DODB::CachedSpecDataBase(Ship).new
|
|
|
|
|
ship = Ship.all_ships[0]
|
|
|
|
|
|
|
|
|
|
key = db << ship
|
|
|
|
|
|
|
|
|
|
db[key] = Ship.new "broken"
|
|
|
|
|
db[key] = ship
|
|
|
|
|
|
|
|
|
|
db[key].should eq(ship)
|
|
|
|
|
|
|
|
|
|
db.rm_storage_dir
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
it "properly remove data" do
|
|
|
|
|
db = DODB::CachedSpecDataBase(Ship).new
|
|
|
|
|
|
|
|
|
|
Ship.all_ships.each do |ship|
|
|
|
|
|
db << ship
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
Ship.all_ships.each do |ship|
|
|
|
|
|
db.pop
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
Ship.all_ships.each_with_index do |ship, i|
|
|
|
|
|
# FIXME: Should it raise a particular exception?
|
|
|
|
|
expect_raises DODB::MissingEntry do
|
|
|
|
|
db[i]
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
db[i]?.should be_nil
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
db.rm_storage_dir
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
it "preserves data on reopening" do
|
|
|
|
|
db1 = DODB::CachedSpecDataBase(Ship).new
|
|
|
|
|
db1 << Ship.kisaragi
|
|
|
|
|
|
|
|
|
|
db1.to_a.size.should eq(1)
|
|
|
|
|
|
|
|
|
|
db2 = DODB::CachedSpecDataBase(Ship).new remove_previous_data: false
|
|
|
|
|
db2 << Ship.mutsuki
|
|
|
|
|
|
|
|
|
|
# Only difference with DODB::DataBase: concurrent DB cannot coexists.
|
|
|
|
|
db2.to_a.size.should eq(2)
|
|
|
|
|
|
|
|
|
|
db1.rm_storage_dir
|
|
|
|
|
db2.rm_storage_dir
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
it "iterates in normal and reversed order" do
|
|
|
|
|
db = DODB::CachedSpecDataBase(Ship).new
|
|
|
|
|
|
|
|
|
|
Ship.all_ships.each do |ship|
|
|
|
|
|
db << ship
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
# The two #each test iteration.
|
|
|
|
|
db.each_with_index do |item, index|
|
|
|
|
|
item.should eq Ship.all_ships[index]
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
db.each_with_index(reversed: true) do |item, index|
|
|
|
|
|
item.should eq Ship.all_ships[index]
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
# Actual reversal is tested here.
|
|
|
|
|
db.to_a(reversed: true).should eq db.to_a.reverse
|
|
|
|
|
|
|
|
|
|
db.rm_storage_dir
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
it "respects the provided offsets if any" do
|
|
|
|
|
db = DODB::CachedSpecDataBase(Ship).new
|
|
|
|
|
|
|
|
|
|
Ship.all_ships.each do |ship|
|
|
|
|
|
db << ship
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
db.to_a(start_offset: 0, end_offset: 0)[0]?.should eq Ship.mutsuki
|
|
|
|
|
db.to_a(start_offset: 1, end_offset: 1)[0]?.should eq Ship.kisaragi
|
|
|
|
|
db.to_a(start_offset: 2, end_offset: 2)[0]?.should eq Ship.yayoi
|
|
|
|
|
|
|
|
|
|
db.to_a(start_offset: 0, end_offset: 2).should eq [
|
|
|
|
|
Ship.mutsuki, Ship.kisaragi, Ship.yayoi
|
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
db.rm_storage_dir
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
describe "indices" do
|
|
|
|
|
it "do basic indexing" do
|
|
|
|
|
db = DODB::CachedSpecDataBase(Ship).new
|
|
|
|
|
|
|
|
|
|
db_ships_by_name = db.new_index "name", &.name
|
|
|
|
|
|
|
|
|
|
Ship.all_ships.each do |ship|
|
|
|
|
|
db << ship
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
Ship.all_ships.each_with_index do |ship|
|
|
|
|
|
db_ships_by_name.get?(ship.name).should eq(ship)
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
db.rm_storage_dir
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
it "raise on index overload" do
|
|
|
|
|
db = DODB::CachedSpecDataBase(Ship).new
|
|
|
|
|
|
|
|
|
|
db_ships_by_name = db.new_index "name", &.name
|
|
|
|
|
|
|
|
|
|
db << Ship.kisaragi
|
|
|
|
|
|
|
|
|
|
# Should not be allowed to store an entry whose “name” field
|
|
|
|
|
# already exists.
|
|
|
|
|
expect_raises(DODB::IndexOverload) do
|
|
|
|
|
db << Ship.kisaragi
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
db.rm_storage_dir
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
it "properly deindex" do
|
|
|
|
|
db = DODB::CachedSpecDataBase(Ship).new
|
|
|
|
|
|
|
|
|
|
db_ships_by_name = db.new_index "name", &.name
|
|
|
|
|
|
|
|
|
|
Ship.all_ships.each do |ship|
|
|
|
|
|
db << ship
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
Ship.all_ships.each_with_index do |ship, i|
|
|
|
|
|
db.delete i
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
Ship.all_ships.each do |ship|
|
|
|
|
|
db_ships_by_name.get?(ship.name).should be_nil
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
db.rm_storage_dir
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
it "properly reindex" do
|
|
|
|
|
db = DODB::CachedSpecDataBase(Ship).new
|
|
|
|
|
|
|
|
|
|
db_ships_by_name = db.new_index "name", &.name
|
|
|
|
|
|
|
|
|
|
key = db << Ship.kisaragi
|
|
|
|
|
|
|
|
|
|
# We give the old id to the new ship, to get it replaced in
|
|
|
|
|
# the database.
|
|
|
|
|
some_new_ship = Ship.all_ships[2].clone
|
|
|
|
|
|
|
|
|
|
db[key] = some_new_ship
|
|
|
|
|
|
|
|
|
|
db[key].should eq(some_new_ship)
|
|
|
|
|
|
|
|
|
|
db_ships_by_name.get?(some_new_ship.name).should eq(some_new_ship)
|
|
|
|
|
|
|
|
|
|
db.rm_storage_dir
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
it "properly updates" do
|
|
|
|
|
db = DODB::CachedSpecDataBase(Ship).new
|
|
|
|
|
|
|
|
|
|
db_ships_by_name = db.new_index "name", &.name
|
|
|
|
|
|
|
|
|
|
Ship.all_ships.each do |ship|
|
|
|
|
|
db << ship
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
new_kisaragi = Ship.kisaragi.clone.tap do |s|
|
|
|
|
|
s.name = "Kisaragi Kai" # Don’t think about it too much.
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
# We’re changing an indexed value on purpose.
|
|
|
|
|
db_ships_by_name.update "Kisaragi", new_kisaragi
|
|
|
|
|
|
|
|
|
|
db_ships_by_name.get?("Kisaragi").should be_nil
|
|
|
|
|
db_ships_by_name.get?(new_kisaragi.name).should eq new_kisaragi
|
|
|
|
|
|
|
|
|
|
db.rm_storage_dir
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
describe "partitions" do
|
|
|
|
|
it "do basic partitioning" do
|
|
|
|
|
db = DODB::CachedSpecDataBase(Ship).new
|
|
|
|
|
|
|
|
|
|
db_ships_by_class = db.new_partition "class", &.klass
|
|
|
|
|
|
|
|
|
|
Ship.all_ships.each do |ship|
|
|
|
|
|
db << ship
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
Ship.all_ships.each do |ship|
|
|
|
|
|
db_ships_by_class.get(ship.klass).should contain(ship)
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
# We extract the possible classes to do test on them.
|
|
|
|
|
ship_classes = Ship.all_ships.map(&.klass).uniq
|
|
|
|
|
ship_classes.each do |klass|
|
|
|
|
|
partition = db_ships_by_class.get klass
|
|
|
|
|
|
|
|
|
|
# A partition on “class” should contain entries that all
|
|
|
|
|
# share the same value of “class”.
|
|
|
|
|
partition.map(&.klass.==(klass)).reduce { |a, b|
|
|
|
|
|
a && b
|
|
|
|
|
}.should be_true
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
db_ships_by_class.get?("does-not-exist").should be_nil
|
|
|
|
|
|
|
|
|
|
db.rm_storage_dir
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
it "removes select elements from partitions" do
|
|
|
|
|
db = DODB::CachedSpecDataBase(Ship).new
|
|
|
|
|
|
|
|
|
|
db_ships_by_class = db.new_partition "class", &.klass
|
|
|
|
|
|
|
|
|
|
Ship.all_ships.each do |ship|
|
|
|
|
|
db << ship
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
db_ships_by_class.delete "Mutsuki", &.name.==("Kisaragi")
|
|
|
|
|
|
|
|
|
|
Ship.all_ships.map(&.klass).uniq.each do |klass|
|
|
|
|
|
partition = db_ships_by_class.get klass
|
|
|
|
|
|
|
|
|
|
partition.any?(&.name.==("Kisaragi")).should be_false
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
db.rm_storage_dir
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
describe "tags" do
|
|
|
|
|
it "do basic tagging" do
|
|
|
|
|
db = DODB::CachedSpecDataBase(Ship).new
|
|
|
|
|
|
|
|
|
|
db_ships_by_tags = db.new_tags "tags", &.tags
|
|
|
|
|
|
|
|
|
|
Ship.all_ships.each do |ship|
|
|
|
|
|
db << ship
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
db_ships_by_tags.get("flagship").should eq([Ship.flagship])
|
|
|
|
|
|
|
|
|
|
# All returned entries should have the requested tag.
|
|
|
|
|
db_ships_by_tags.get("name ship")
|
|
|
|
|
.map(&.tags.includes?("name ship"))
|
|
|
|
|
.reduce { |a, e| a && e }
|
|
|
|
|
.should be_true
|
|
|
|
|
|
|
|
|
|
# There shouldn’t be one in our data about WWII Japanese warships…
|
|
|
|
|
db_ships_by_tags.get?("starship").should be_nil
|
|
|
|
|
|
|
|
|
|
db.rm_storage_dir
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
it "properly removes tags" do
|
|
|
|
|
db = DODB::CachedSpecDataBase(Ship).new
|
|
|
|
|
|
|
|
|
|
db_ships_by_tags = db.new_tags "tags", &.tags
|
|
|
|
|
|
|
|
|
|
Ship.all_ships.each do |ship|
|
|
|
|
|
db << ship
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
# Removing the “flagship” tag, brace for impact.
|
|
|
|
|
flagship, index = db_ships_by_tags.get_with_indice("flagship")[0]
|
|
|
|
|
flagship = flagship.clone
|
|
|
|
|
flagship.tags = [] of String
|
|
|
|
|
db[index] = flagship
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# ship, index = db_ships_by_tags.update(tag: "flagship") do |ship, index|
|
|
|
|
|
# ship.tags = [] of String
|
|
|
|
|
# db[index] = ship
|
|
|
|
|
# end
|
|
|
|
|
|
|
|
|
|
db_ships_by_tags.get("flagship").should eq([] of Ship)
|
|
|
|
|
|
|
|
|
|
db.rm_storage_dir
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
it "gets items that have multiple tags" do
|
|
|
|
|
db = DODB::CachedSpecDataBase(Ship).new
|
|
|
|
|
|
|
|
|
|
db_ships_by_tags = db.new_tags "tags", &.tags
|
|
|
|
|
|
|
|
|
|
Ship.all_ships.each do |ship|
|
|
|
|
|
db << ship
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
results = db_ships_by_tags.get(["flagship", "name ship"])
|
|
|
|
|
results.should eq([Ship.yamato])
|
|
|
|
|
|
|
|
|
|
results = db_ships_by_tags.get(["name ship", "flagship"])
|
|
|
|
|
results.should eq([Ship.yamato])
|
|
|
|
|
|
|
|
|
|
results = db_ships_by_tags.get(["flagship"])
|
|
|
|
|
results.should eq([Ship.yamato])
|
|
|
|
|
|
|
|
|
|
db.rm_storage_dir
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
describe "atomic operations" do
|
|
|
|
|
it "safe_get and safe_get?" do
|
|
|
|
|
db = DODB::CachedSpecDataBase(Ship).new
|
|
|
|
|
|
|
|
|
|
db_ships_by_name = db.new_index "name", &.name
|
|
|
|
|
|
|
|
|
|
Ship.all_ships.each do |ship|
|
|
|
|
|
db << ship
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
Ship.all_ships.each do |ship|
|
|
|
|
|
db_ships_by_name.safe_get ship.name do |results|
|
|
|
|
|
results.should eq(ship)
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
db_ships_by_name.safe_get? ship.name do |results|
|
|
|
|
|
results.should eq(ship)
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
db.rm_storage_dir
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
describe "tools" do
|
|
|
|
|
it "rebuilds indexes" do
|
|
|
|
|
db = DODB::CachedSpecDataBase(Ship).new
|
|
|
|
|
|
|
|
|
|
db_ships_by_name = db.new_index "name", &.name
|
|
|
|
|
db_ships_by_class = db.new_partition "class", &.klass
|
|
|
|
|
db_ships_by_tags = db.new_tags "tags", &.tags
|
|
|
|
|
|
|
|
|
|
Ship.all_ships.each do |ship|
|
|
|
|
|
db << ship
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
db.reindex_everything!
|
|
|
|
|
|
|
|
|
|
Ship.all_ships.each do |ship|
|
|
|
|
|
db_ships_by_name.get?(ship.name).should eq(ship)
|
|
|
|
|
db_ships_by_class.get(ship.klass).should contain(ship)
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
db.rm_storage_dir
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
it "migrates properly" do
|
|
|
|
|
old_db = DODB::CachedSpecDataBase(PrimitiveShip).new "-migration-origin"
|
|
|
|
|
|
|
|
|
|
old_ships_by_name = old_db.new_index "name", &.name
|
|
|
|
|
old_ships_by_class = old_db.new_partition "class", &.class_name
|
|
|
|
|
|
|
|
|
|
PrimitiveShip.all_ships.each do |ship|
|
|
|
|
|
old_db << ship
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
# At this point, the “old” DB is filled. Now we need to convert
|
|
|
|
|
# to the new DB.
|
|
|
|
|
|
|
|
|
|
new_db = DODB::CachedSpecDataBase(Ship).new "-migration-target"
|
|
|
|
|
|
|
|
|
|
new_ships_by_name = new_db.new_index "name", &.name
|
|
|
|
|
new_ships_by_class = new_db.new_partition "class", &.klass
|
|
|
|
|
new_ships_by_tags = new_db.new_tags "tags", &.tags
|
|
|
|
|
|
|
|
|
|
old_db.each_with_index do |ship, index|
|
|
|
|
|
new_ship = Ship.new ship.name,
|
|
|
|
|
klass: ship.class_name,
|
|
|
|
|
id: ship.id,
|
|
|
|
|
tags: Array(String).new.tap { |tags|
|
|
|
|
|
tags << "name ship" if ship.name == ship.class_name
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
new_db[index] = new_ship
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
# At this point, the conversion is done, so… we’re making a few
|
|
|
|
|
# arbitrary tests on the new data.
|
|
|
|
|
|
|
|
|
|
old_db.each_with_index do |old_ship, old_index|
|
|
|
|
|
ship = new_db[old_index]
|
|
|
|
|
|
|
|
|
|
ship.id.should eq(old_ship.id)
|
|
|
|
|
ship.klass.should eq(old_ship.class_name)
|
|
|
|
|
|
|
|
|
|
ship.tags.any?(&.==("name ship")).should be_true if ship.name == ship.klass
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
old_db.rm_storage_dir
|
|
|
|
|
new_db.rm_storage_dir
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
end
|