Tests and benchmarks are now more consistent.
parent
0a357446ef
commit
c124b23d7a
|
@ -1,6 +1,6 @@
|
|||
require "benchmark"
|
||||
require "./benchmark-utilities.cr"
|
||||
require "./cars.cr"
|
||||
require "./utilities.cr"
|
||||
require "./db-cars.cr"
|
||||
|
||||
cars_ram = DODB::RAMOnlySpecDataBase(Car).new
|
||||
cars_cached = DODB::CachedSpecDataBase(Car).new
|
||||
|
@ -12,70 +12,75 @@ cached_searchby_name, cached_searchby_color, cached_searchby_keywords = ca
|
|||
semi_searchby_name, semi_searchby_color, semi_searchby_keywords = cached_indexes cars_semi
|
||||
uncached_searchby_name, uncached_searchby_color, uncached_searchby_keywords = uncached_indexes cars_uncached
|
||||
|
||||
add_cars cars_ram, 1_000
|
||||
add_cars cars_cached, 1_000
|
||||
add_cars cars_semi, 1_000
|
||||
add_cars cars_uncached, 1_000
|
||||
db_size = ENV["DBSIZE"].to_i rescue 1_000
|
||||
car_name_to_search = ENV["CARNAME"] rescue "Corvet-#{(db_size/2).to_i}"
|
||||
car_color_to_search = ENV["CARCOLOR"] rescue "red"
|
||||
car_keyword_to_search = ENV["CARKEYWORD"] rescue "spacious"
|
||||
|
||||
add_cars cars_ram, db_size
|
||||
add_cars cars_cached, db_size
|
||||
add_cars cars_semi, db_size
|
||||
add_cars cars_uncached, db_size
|
||||
|
||||
# Searching for data with an index.
|
||||
puts "search by index 'Corvet-500': get a single value"
|
||||
puts "search by index '#{car_name_to_search}': get a single value"
|
||||
Benchmark.ips do |x|
|
||||
x.report("(ram db and index) searching a data with an index") do
|
||||
corvet = ram_searchby_name.get "Corvet-500"
|
||||
x.report("(ram db and index) searching with an index") do
|
||||
corvet = ram_searchby_name.get car_name_to_search
|
||||
end
|
||||
|
||||
x.report("(cached db and index) searching a data with an index") do
|
||||
corvet = cached_searchby_name.get "Corvet-500"
|
||||
x.report("(cached db and index) searching with an index") do
|
||||
corvet = cached_searchby_name.get car_name_to_search
|
||||
end
|
||||
|
||||
x.report("(semi: uncached db but cached index) searching a data with an index") do
|
||||
corvet = semi_searchby_name.get "Corvet-500"
|
||||
x.report("(semi: uncached db but cached index) searching with an index") do
|
||||
corvet = semi_searchby_name.get car_name_to_search
|
||||
end
|
||||
|
||||
x.report("(uncached db and index) searching a data with an index") do
|
||||
corvet = uncached_searchby_name.get "Corvet-500"
|
||||
x.report("(uncached db and index) searching with an index") do
|
||||
corvet = uncached_searchby_name.get car_name_to_search
|
||||
end
|
||||
end
|
||||
|
||||
# Searching for data with a partition.
|
||||
puts ""
|
||||
puts "search by partition 'red': get #{ram_searchby_color.get("red").size} values"
|
||||
puts "search by partition #{car_color_to_search}: get #{ram_searchby_color.get(car_color_to_search).size} values"
|
||||
Benchmark.ips do |x|
|
||||
x.report("(ram db and partition) searching a data with a partition") do
|
||||
corvet = ram_searchby_color.get "red"
|
||||
x.report("(ram db and partition) searching with a partition") do
|
||||
corvet = ram_searchby_color.get car_color_to_search
|
||||
end
|
||||
|
||||
x.report("(cached db and partition) searching a data with a partition") do
|
||||
corvet = cached_searchby_color.get "red"
|
||||
x.report("(cached db and partition) searching with a partition") do
|
||||
corvet = cached_searchby_color.get car_color_to_search
|
||||
end
|
||||
|
||||
x.report("(semi: uncached db but cached partition) searching a data with a partition") do
|
||||
corvet = semi_searchby_color.get "red"
|
||||
x.report("(semi: uncached db but cached partition) searching with a partition") do
|
||||
corvet = semi_searchby_color.get car_color_to_search
|
||||
end
|
||||
|
||||
x.report("(uncached db and partition) searching a data with a partition") do
|
||||
corvet = uncached_searchby_color.get "red"
|
||||
x.report("(uncached db and partition) searching with a partition") do
|
||||
corvet = uncached_searchby_color.get car_color_to_search
|
||||
end
|
||||
end
|
||||
|
||||
# Searching for data with a tag.
|
||||
puts ""
|
||||
puts "search by tag 'spacious': get #{ram_searchby_keywords.get("spacious").size} values"
|
||||
puts "search by tag #{car_keyword_to_search}: get #{ram_searchby_keywords.get(car_keyword_to_search).size} values"
|
||||
Benchmark.ips do |x|
|
||||
x.report("(ram db and tag) searching a data with a tag") do
|
||||
corvet = ram_searchby_keywords.get "spacious"
|
||||
x.report("(ram db and tag) searching with a tag") do
|
||||
corvet = ram_searchby_keywords.get car_keyword_to_search
|
||||
end
|
||||
|
||||
x.report("(cached db and tag) searching a data with a tag") do
|
||||
corvet = cached_searchby_keywords.get "spacious"
|
||||
x.report("(cached db and tag) searching with a tag") do
|
||||
corvet = cached_searchby_keywords.get car_keyword_to_search
|
||||
end
|
||||
|
||||
x.report("(semi: uncached db but cached tag) searching a data with a tag") do
|
||||
corvet = semi_searchby_keywords.get "spacious"
|
||||
x.report("(semi: uncached db but cached tag) searching with a tag") do
|
||||
corvet = semi_searchby_keywords.get car_keyword_to_search
|
||||
end
|
||||
|
||||
x.report("(uncached db and tag) searching a data with a tag") do
|
||||
corvet = uncached_searchby_keywords.get "spacious"
|
||||
x.report("(uncached db and tag) searching with a tag") do
|
||||
corvet = uncached_searchby_keywords.get car_keyword_to_search
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -94,17 +99,12 @@ cached_searchby_name, cached_searchby_color, cached_searchby_keywords = ca
|
|||
semi_searchby_name, semi_searchby_color, semi_searchby_keywords = cached_indexes cars_semi
|
||||
uncached_searchby_name, uncached_searchby_color, uncached_searchby_keywords = uncached_indexes cars_uncached
|
||||
|
||||
add_cars cars_ram, 1_000
|
||||
add_cars cars_cached, 1_000
|
||||
add_cars cars_semi, 1_000
|
||||
add_cars cars_uncached, 1_000
|
||||
|
||||
def perform_add(storage : DODB::Storage(Car), nb_run)
|
||||
corvet0 = Car.new "Corvet", "red", [ "shiny", "impressive", "fast", "elegant" ]
|
||||
i = 0
|
||||
perform_benchmark_average nb_run, do
|
||||
corvet = corvet0.clone
|
||||
corvet.name = "Corvet-add-#{i}"
|
||||
corvet.name = "Corvet-#{i}"
|
||||
storage << corvet
|
||||
i += 1
|
||||
end
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
require "benchmark"
|
||||
|
||||
require "../src/dodb.cr"
|
||||
require "./test-data.cr"
|
||||
require "./db-ships.cr"
|
||||
|
||||
class DODBCached < DODB::CachedDataBase(Ship)
|
||||
def initialize(storage_ext = "", remove_previous_data = true)
|
||||
|
|
|
@ -1,32 +0,0 @@
|
|||
def perform_something(&block)
|
||||
start = Time.monotonic
|
||||
yield
|
||||
Time.monotonic - start
|
||||
end
|
||||
|
||||
def perform_benchmark_average(ntimes : Int32, &block)
|
||||
i = 1
|
||||
sum = Time::Span.zero
|
||||
while i <= ntimes
|
||||
elapsed_time = perform_something &block
|
||||
sum += elapsed_time
|
||||
i += 1
|
||||
end
|
||||
|
||||
sum / ntimes
|
||||
end
|
||||
|
||||
def perform_benchmark_average_verbose(title : String, ntimes : Int32, &block)
|
||||
i = 1
|
||||
sum = Time::Span.zero
|
||||
puts "Execute '#{title}' × #{ntimes}"
|
||||
while i <= ntimes
|
||||
elapsed_time = perform_something &block
|
||||
sum += elapsed_time
|
||||
STDOUT.write "\relapsed_time: #{elapsed_time}, average: #{sum/i}".to_slice
|
||||
|
||||
i += 1
|
||||
end
|
||||
puts ""
|
||||
puts "Average: #{sum/ntimes}"
|
||||
end
|
430
spec/cached.cr
430
spec/cached.cr
|
@ -1,430 +0,0 @@
|
|||
require "spec"
|
||||
require "file_utils"
|
||||
|
||||
require "../src/dodb.cr"
|
||||
require "./spec-database.cr"
|
||||
require "./test-data.cr"
|
||||
|
||||
describe "DODB::CachedDataBase" do
|
||||
describe "basics" do
|
||||
it "store and get data" do
|
||||
db = DODB::CachedSpecDataBase(Ship).new
|
||||
|
||||
Ship.all_ships.each do |ship|
|
||||
db << ship
|
||||
end
|
||||
|
||||
db.to_a.sort.should eq(Ship.all_ships.sort)
|
||||
|
||||
db.rm_storage_dir
|
||||
end
|
||||
|
||||
it "rewrite already stored data" do
|
||||
db = DODB::CachedSpecDataBase(Ship).new
|
||||
ship = Ship.all_ships[0]
|
||||
|
||||
key = db << ship
|
||||
|
||||
db[key] = Ship.new "broken"
|
||||
db[key] = ship
|
||||
|
||||
db[key].should eq(ship)
|
||||
|
||||
db.rm_storage_dir
|
||||
end
|
||||
|
||||
it "properly remove data" do
|
||||
db = DODB::CachedSpecDataBase(Ship).new
|
||||
|
||||
Ship.all_ships.each do |ship|
|
||||
db << ship
|
||||
end
|
||||
|
||||
Ship.all_ships.each do |ship|
|
||||
db.pop
|
||||
end
|
||||
|
||||
Ship.all_ships.each_with_index do |ship, i|
|
||||
# FIXME: Should it raise a particular exception?
|
||||
expect_raises DODB::MissingEntry do
|
||||
db[i]
|
||||
end
|
||||
|
||||
db[i]?.should be_nil
|
||||
end
|
||||
|
||||
db.rm_storage_dir
|
||||
end
|
||||
|
||||
it "preserves data on reopening" do
|
||||
db1 = DODB::CachedSpecDataBase(Ship).new
|
||||
db1 << Ship.kisaragi
|
||||
|
||||
db1.to_a.size.should eq(1)
|
||||
|
||||
db2 = DODB::CachedSpecDataBase(Ship).new remove_previous_data: false
|
||||
db2 << Ship.mutsuki
|
||||
|
||||
# Only difference with DODB::DataBase: concurrent DB cannot coexists.
|
||||
db2.to_a.size.should eq(2)
|
||||
|
||||
db1.rm_storage_dir
|
||||
db2.rm_storage_dir
|
||||
end
|
||||
|
||||
it "iterates in normal and reversed order" do
|
||||
db = DODB::CachedSpecDataBase(Ship).new
|
||||
|
||||
Ship.all_ships.each do |ship|
|
||||
db << ship
|
||||
end
|
||||
|
||||
# The two #each test iteration.
|
||||
db.each_with_index do |item, index|
|
||||
item.should eq Ship.all_ships[index]
|
||||
end
|
||||
|
||||
db.each_with_index(reversed: true) do |item, index|
|
||||
item.should eq Ship.all_ships[index]
|
||||
end
|
||||
|
||||
# Actual reversal is tested here.
|
||||
db.to_a(reversed: true).should eq db.to_a.reverse
|
||||
|
||||
db.rm_storage_dir
|
||||
end
|
||||
|
||||
it "respects the provided offsets if any" do
|
||||
db = DODB::CachedSpecDataBase(Ship).new
|
||||
|
||||
Ship.all_ships.each do |ship|
|
||||
db << ship
|
||||
end
|
||||
|
||||
db.to_a(start_offset: 0, end_offset: 0)[0]?.should eq Ship.mutsuki
|
||||
db.to_a(start_offset: 1, end_offset: 1)[0]?.should eq Ship.kisaragi
|
||||
db.to_a(start_offset: 2, end_offset: 2)[0]?.should eq Ship.yayoi
|
||||
|
||||
db.to_a(start_offset: 0, end_offset: 2).should eq [
|
||||
Ship.mutsuki, Ship.kisaragi, Ship.yayoi
|
||||
]
|
||||
|
||||
db.rm_storage_dir
|
||||
end
|
||||
end
|
||||
|
||||
describe "indices" do
|
||||
it "do basic indexing" do
|
||||
db = DODB::CachedSpecDataBase(Ship).new
|
||||
|
||||
db_ships_by_name = db.new_index "name", &.name
|
||||
|
||||
Ship.all_ships.each do |ship|
|
||||
db << ship
|
||||
end
|
||||
|
||||
Ship.all_ships.each_with_index do |ship|
|
||||
db_ships_by_name.get?(ship.name).should eq(ship)
|
||||
end
|
||||
|
||||
db.rm_storage_dir
|
||||
end
|
||||
|
||||
it "raise on index overload" do
|
||||
db = DODB::CachedSpecDataBase(Ship).new
|
||||
|
||||
db_ships_by_name = db.new_index "name", &.name
|
||||
|
||||
db << Ship.kisaragi
|
||||
|
||||
# Should not be allowed to store an entry whose “name” field
|
||||
# already exists.
|
||||
expect_raises(DODB::IndexOverload) do
|
||||
db << Ship.kisaragi
|
||||
end
|
||||
|
||||
db.rm_storage_dir
|
||||
end
|
||||
|
||||
it "properly deindex" do
|
||||
db = DODB::CachedSpecDataBase(Ship).new
|
||||
|
||||
db_ships_by_name = db.new_index "name", &.name
|
||||
|
||||
Ship.all_ships.each do |ship|
|
||||
db << ship
|
||||
end
|
||||
|
||||
Ship.all_ships.each_with_index do |ship, i|
|
||||
db.delete i
|
||||
end
|
||||
|
||||
Ship.all_ships.each do |ship|
|
||||
db_ships_by_name.get?(ship.name).should be_nil
|
||||
end
|
||||
|
||||
db.rm_storage_dir
|
||||
end
|
||||
|
||||
it "properly reindex" do
|
||||
db = DODB::CachedSpecDataBase(Ship).new
|
||||
|
||||
db_ships_by_name = db.new_index "name", &.name
|
||||
|
||||
key = db << Ship.kisaragi
|
||||
|
||||
# We give the old id to the new ship, to get it replaced in
|
||||
# the database.
|
||||
some_new_ship = Ship.all_ships[2].clone
|
||||
|
||||
db[key] = some_new_ship
|
||||
|
||||
db[key].should eq(some_new_ship)
|
||||
|
||||
db_ships_by_name.get?(some_new_ship.name).should eq(some_new_ship)
|
||||
|
||||
db.rm_storage_dir
|
||||
end
|
||||
|
||||
it "properly updates" do
|
||||
db = DODB::CachedSpecDataBase(Ship).new
|
||||
|
||||
db_ships_by_name = db.new_index "name", &.name
|
||||
|
||||
Ship.all_ships.each do |ship|
|
||||
db << ship
|
||||
end
|
||||
|
||||
new_kisaragi = Ship.kisaragi.clone.tap do |s|
|
||||
s.name = "Kisaragi Kai" # Don’t think about it too much.
|
||||
end
|
||||
|
||||
# We’re changing an indexed value on purpose.
|
||||
db_ships_by_name.update "Kisaragi", new_kisaragi
|
||||
|
||||
db_ships_by_name.get?("Kisaragi").should be_nil
|
||||
db_ships_by_name.get?(new_kisaragi.name).should eq new_kisaragi
|
||||
|
||||
db.rm_storage_dir
|
||||
end
|
||||
end
|
||||
|
||||
describe "partitions" do
|
||||
it "do basic partitioning" do
|
||||
db = DODB::CachedSpecDataBase(Ship).new
|
||||
|
||||
db_ships_by_class = db.new_partition "class", &.klass
|
||||
|
||||
Ship.all_ships.each do |ship|
|
||||
db << ship
|
||||
end
|
||||
|
||||
Ship.all_ships.each do |ship|
|
||||
db_ships_by_class.get(ship.klass).should contain(ship)
|
||||
end
|
||||
|
||||
# We extract the possible classes to do test on them.
|
||||
ship_classes = Ship.all_ships.map(&.klass).uniq
|
||||
ship_classes.each do |klass|
|
||||
partition = db_ships_by_class.get klass
|
||||
|
||||
# A partition on “class” should contain entries that all
|
||||
# share the same value of “class”.
|
||||
partition.map(&.klass.==(klass)).reduce { |a, b|
|
||||
a && b
|
||||
}.should be_true
|
||||
end
|
||||
|
||||
db_ships_by_class.get?("does-not-exist").should be_nil
|
||||
|
||||
db.rm_storage_dir
|
||||
end
|
||||
|
||||
it "removes select elements from partitions" do
|
||||
db = DODB::CachedSpecDataBase(Ship).new
|
||||
|
||||
db_ships_by_class = db.new_partition "class", &.klass
|
||||
|
||||
Ship.all_ships.each do |ship|
|
||||
db << ship
|
||||
end
|
||||
|
||||
db_ships_by_class.delete "Mutsuki", &.name.==("Kisaragi")
|
||||
|
||||
Ship.all_ships.map(&.klass).uniq.each do |klass|
|
||||
partition = db_ships_by_class.get klass
|
||||
|
||||
partition.any?(&.name.==("Kisaragi")).should be_false
|
||||
end
|
||||
|
||||
db.rm_storage_dir
|
||||
end
|
||||
end
|
||||
|
||||
describe "tags" do
|
||||
it "do basic tagging" do
|
||||
db = DODB::CachedSpecDataBase(Ship).new
|
||||
|
||||
db_ships_by_tags = db.new_tags "tags", &.tags
|
||||
|
||||
Ship.all_ships.each do |ship|
|
||||
db << ship
|
||||
end
|
||||
|
||||
db_ships_by_tags.get("flagship").should eq([Ship.flagship])
|
||||
|
||||
# All returned entries should have the requested tag.
|
||||
db_ships_by_tags.get("name ship")
|
||||
.map(&.tags.includes?("name ship"))
|
||||
.reduce { |a, e| a && e }
|
||||
.should be_true
|
||||
|
||||
# There shouldn’t be one in our data about WWII Japanese warships…
|
||||
db_ships_by_tags.get?("starship").should be_nil
|
||||
|
||||
db.rm_storage_dir
|
||||
end
|
||||
|
||||
it "properly removes tags" do
|
||||
db = DODB::CachedSpecDataBase(Ship).new
|
||||
|
||||
db_ships_by_tags = db.new_tags "tags", &.tags
|
||||
|
||||
Ship.all_ships.each do |ship|
|
||||
db << ship
|
||||
end
|
||||
|
||||
# Removing the “flagship” tag, brace for impact.
|
||||
flagship, index = db_ships_by_tags.get_with_indice("flagship")[0]
|
||||
flagship = flagship.clone
|
||||
flagship.tags = [] of String
|
||||
db[index] = flagship
|
||||
|
||||
|
||||
|
||||
# ship, index = db_ships_by_tags.update(tag: "flagship") do |ship, index|
|
||||
# ship.tags = [] of String
|
||||
# db[index] = ship
|
||||
# end
|
||||
|
||||
db_ships_by_tags.get("flagship").should eq([] of Ship)
|
||||
|
||||
db.rm_storage_dir
|
||||
end
|
||||
|
||||
it "gets items that have multiple tags" do
|
||||
db = DODB::CachedSpecDataBase(Ship).new
|
||||
|
||||
db_ships_by_tags = db.new_tags "tags", &.tags
|
||||
|
||||
Ship.all_ships.each do |ship|
|
||||
db << ship
|
||||
end
|
||||
|
||||
results = db_ships_by_tags.get(["flagship", "name ship"])
|
||||
results.should eq([Ship.yamato])
|
||||
|
||||
results = db_ships_by_tags.get(["name ship", "flagship"])
|
||||
results.should eq([Ship.yamato])
|
||||
|
||||
results = db_ships_by_tags.get(["flagship"])
|
||||
results.should eq([Ship.yamato])
|
||||
|
||||
db.rm_storage_dir
|
||||
end
|
||||
end
|
||||
|
||||
describe "atomic operations" do
|
||||
it "safe_get and safe_get?" do
|
||||
db = DODB::CachedSpecDataBase(Ship).new
|
||||
|
||||
db_ships_by_name = db.new_index "name", &.name
|
||||
|
||||
Ship.all_ships.each do |ship|
|
||||
db << ship
|
||||
end
|
||||
|
||||
Ship.all_ships.each do |ship|
|
||||
db_ships_by_name.safe_get ship.name do |results|
|
||||
results.should eq(ship)
|
||||
end
|
||||
|
||||
db_ships_by_name.safe_get? ship.name do |results|
|
||||
results.should eq(ship)
|
||||
end
|
||||
end
|
||||
|
||||
db.rm_storage_dir
|
||||
end
|
||||
end
|
||||
|
||||
describe "tools" do
|
||||
it "rebuilds indexes" do
|
||||
db = DODB::CachedSpecDataBase(Ship).new
|
||||
|
||||
db_ships_by_name = db.new_index "name", &.name
|
||||
db_ships_by_class = db.new_partition "class", &.klass
|
||||
db_ships_by_tags = db.new_tags "tags", &.tags
|
||||
|
||||
Ship.all_ships.each do |ship|
|
||||
db << ship
|
||||
end
|
||||
|
||||
db.reindex_everything!
|
||||
|
||||
Ship.all_ships.each do |ship|
|
||||
db_ships_by_name.get?(ship.name).should eq(ship)
|
||||
db_ships_by_class.get(ship.klass).should contain(ship)
|
||||
end
|
||||
|
||||
db.rm_storage_dir
|
||||
end
|
||||
|
||||
it "migrates properly" do
|
||||
old_db = DODB::CachedSpecDataBase(PrimitiveShip).new "-migration-origin"
|
||||
|
||||
old_ships_by_name = old_db.new_index "name", &.name
|
||||
old_ships_by_class = old_db.new_partition "class", &.class_name
|
||||
|
||||
PrimitiveShip.all_ships.each do |ship|
|
||||
old_db << ship
|
||||
end
|
||||
|
||||
# At this point, the “old” DB is filled. Now we need to convert
|
||||
# to the new DB.
|
||||
|
||||
new_db = DODB::CachedSpecDataBase(Ship).new "-migration-target"
|
||||
|
||||
new_ships_by_name = new_db.new_index "name", &.name
|
||||
new_ships_by_class = new_db.new_partition "class", &.klass
|
||||
new_ships_by_tags = new_db.new_tags "tags", &.tags
|
||||
|
||||
old_db.each_with_index do |ship, index|
|
||||
new_ship = Ship.new ship.name,
|
||||
klass: ship.class_name,
|
||||
id: ship.id,
|
||||
tags: Array(String).new.tap { |tags|
|
||||
tags << "name ship" if ship.name == ship.class_name
|
||||
}
|
||||
|
||||
new_db[index] = new_ship
|
||||
end
|
||||
|
||||
# At this point, the conversion is done, so… we’re making a few
|
||||
# arbitrary tests on the new data.
|
||||
|
||||
old_db.each_with_index do |old_ship, old_index|
|
||||
ship = new_db[old_index]
|
||||
|
||||
ship.id.should eq(old_ship.id)
|
||||
ship.klass.should eq(old_ship.class_name)
|
||||
|
||||
ship.tags.any?(&.==("name ship")).should be_true if ship.name == ship.klass
|
||||
end
|
||||
|
||||
old_db.rm_storage_dir
|
||||
new_db.rm_storage_dir
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -1,6 +1,9 @@
|
|||
require "uuid"
|
||||
require "json"
|
||||
|
||||
require "../src/dodb.cr"
|
||||
require "./spec-database.cr"
|
||||
|
||||
# FIXME: Split the test data in separate files. We don’t care about those here.
|
||||
|
||||
class Ship
|
|
@ -1,6 +1,5 @@
|
|||
require "spec"
|
||||
require "./benchmark-utilities.cr"
|
||||
require "./cars.cr"
|
||||
require "./db-cars.cr"
|
||||
|
||||
corvet0 = Car.new "Corvet-0", "red", [ "shiny", "impressive", "fast", "elegant" ]
|
||||
|
||||
|
|
|
@ -1,9 +1,6 @@
|
|||
require "spec"
|
||||
require "file_utils"
|
||||
|
||||
require "../src/dodb.cr"
|
||||
require "./spec-database.cr"
|
||||
require "./test-data.cr"
|
||||
require "./db-ships.cr"
|
||||
|
||||
def fork_process(&)
|
||||
Process.new Crystal::System::Process.fork { yield }
|
||||
|
@ -480,3 +477,428 @@ describe "DODB::DataBase" do
|
|||
end
|
||||
end
|
||||
|
||||
# Basically the same thing as before, with some slight
|
||||
# differences based on the fact that changing the on-disk data
|
||||
# won't change the cached one.
|
||||
describe "DODB::CachedDataBase" do
|
||||
describe "basics" do
|
||||
it "store and get data" do
|
||||
db = DODB::CachedSpecDataBase(Ship).new
|
||||
|
||||
Ship.all_ships.each do |ship|
|
||||
db << ship
|
||||
end
|
||||
|
||||
db.to_a.sort.should eq(Ship.all_ships.sort)
|
||||
|
||||
db.rm_storage_dir
|
||||
end
|
||||
|
||||
it "rewrite already stored data" do
|
||||
db = DODB::CachedSpecDataBase(Ship).new
|
||||
ship = Ship.all_ships[0]
|
||||
|
||||
key = db << ship
|
||||
|
||||
db[key] = Ship.new "broken"
|
||||
db[key] = ship
|
||||
|
||||
db[key].should eq(ship)
|
||||
|
||||
db.rm_storage_dir
|
||||
end
|
||||
|
||||
it "properly remove data" do
|
||||
db = DODB::CachedSpecDataBase(Ship).new
|
||||
|
||||
Ship.all_ships.each do |ship|
|
||||
db << ship
|
||||
end
|
||||
|
||||
Ship.all_ships.each do |ship|
|
||||
db.pop
|
||||
end
|
||||
|
||||
Ship.all_ships.each_with_index do |ship, i|
|
||||
# FIXME: Should it raise a particular exception?
|
||||
expect_raises DODB::MissingEntry do
|
||||
db[i]
|
||||
end
|
||||
|
||||
db[i]?.should be_nil
|
||||
end
|
||||
|
||||
db.rm_storage_dir
|
||||
end
|
||||
|
||||
it "preserves data on reopening" do
|
||||
db1 = DODB::CachedSpecDataBase(Ship).new
|
||||
db1 << Ship.kisaragi
|
||||
|
||||
db1.to_a.size.should eq(1)
|
||||
|
||||
db2 = DODB::CachedSpecDataBase(Ship).new remove_previous_data: false
|
||||
db2 << Ship.mutsuki
|
||||
|
||||
# Only difference with DODB::DataBase: concurrent DB cannot coexists.
|
||||
db2.to_a.size.should eq(2)
|
||||
|
||||
db1.rm_storage_dir
|
||||
db2.rm_storage_dir
|
||||
end
|
||||
|
||||
it "iterates in normal and reversed order" do
|
||||
db = DODB::CachedSpecDataBase(Ship).new
|
||||
|
||||
Ship.all_ships.each do |ship|
|
||||
db << ship
|
||||
end
|
||||
|
||||
# The two #each test iteration.
|
||||
db.each_with_index do |item, index|
|
||||
item.should eq Ship.all_ships[index]
|
||||
end
|
||||
|
||||
db.each_with_index(reversed: true) do |item, index|
|
||||
item.should eq Ship.all_ships[index]
|
||||
end
|
||||
|
||||
# Actual reversal is tested here.
|
||||
db.to_a(reversed: true).should eq db.to_a.reverse
|
||||
|
||||
db.rm_storage_dir
|
||||
end
|
||||
|
||||
it "respects the provided offsets if any" do
|
||||
db = DODB::CachedSpecDataBase(Ship).new
|
||||
|
||||
Ship.all_ships.each do |ship|
|
||||
db << ship
|
||||
end
|
||||
|
||||
db.to_a(start_offset: 0, end_offset: 0)[0]?.should eq Ship.mutsuki
|
||||
db.to_a(start_offset: 1, end_offset: 1)[0]?.should eq Ship.kisaragi
|
||||
db.to_a(start_offset: 2, end_offset: 2)[0]?.should eq Ship.yayoi
|
||||
|
||||
db.to_a(start_offset: 0, end_offset: 2).should eq [
|
||||
Ship.mutsuki, Ship.kisaragi, Ship.yayoi
|
||||
]
|
||||
|
||||
db.rm_storage_dir
|
||||
end
|
||||
end
|
||||
|
||||
describe "indices" do
|
||||
it "do basic indexing" do
|
||||
db = DODB::CachedSpecDataBase(Ship).new
|
||||
|
||||
db_ships_by_name = db.new_index "name", &.name
|
||||
|
||||
Ship.all_ships.each do |ship|
|
||||
db << ship
|
||||
end
|
||||
|
||||
Ship.all_ships.each_with_index do |ship|
|
||||
db_ships_by_name.get?(ship.name).should eq(ship)
|
||||
end
|
||||
|
||||
db.rm_storage_dir
|
||||
end
|
||||
|
||||
it "raise on index overload" do
|
||||
db = DODB::CachedSpecDataBase(Ship).new
|
||||
|
||||
db_ships_by_name = db.new_index "name", &.name
|
||||
|
||||
db << Ship.kisaragi
|
||||
|
||||
# Should not be allowed to store an entry whose “name” field
|
||||
# already exists.
|
||||
expect_raises(DODB::IndexOverload) do
|
||||
db << Ship.kisaragi
|
||||
end
|
||||
|
||||
db.rm_storage_dir
|
||||
end
|
||||
|
||||
it "properly deindex" do
|
||||
db = DODB::CachedSpecDataBase(Ship).new
|
||||
|
||||
db_ships_by_name = db.new_index "name", &.name
|
||||
|
||||
Ship.all_ships.each do |ship|
|
||||
db << ship
|
||||
end
|
||||
|
||||
Ship.all_ships.each_with_index do |ship, i|
|
||||
db.delete i
|
||||
end
|
||||
|
||||
Ship.all_ships.each do |ship|
|
||||
db_ships_by_name.get?(ship.name).should be_nil
|
||||
end
|
||||
|
||||
db.rm_storage_dir
|
||||
end
|
||||
|
||||
it "properly reindex" do
|
||||
db = DODB::CachedSpecDataBase(Ship).new
|
||||
|
||||
db_ships_by_name = db.new_index "name", &.name
|
||||
|
||||
key = db << Ship.kisaragi
|
||||
|
||||
# We give the old id to the new ship, to get it replaced in
|
||||
# the database.
|
||||
some_new_ship = Ship.all_ships[2].clone
|
||||
|
||||
db[key] = some_new_ship
|
||||
|
||||
db[key].should eq(some_new_ship)
|
||||
|
||||
db_ships_by_name.get?(some_new_ship.name).should eq(some_new_ship)
|
||||
|
||||
db.rm_storage_dir
|
||||
end
|
||||
|
||||
it "properly updates" do
|
||||
db = DODB::CachedSpecDataBase(Ship).new
|
||||
|
||||
db_ships_by_name = db.new_index "name", &.name
|
||||
|
||||
Ship.all_ships.each do |ship|
|
||||
db << ship
|
||||
end
|
||||
|
||||
new_kisaragi = Ship.kisaragi.clone.tap do |s|
|
||||
s.name = "Kisaragi Kai" # Don’t think about it too much.
|
||||
end
|
||||
|
||||
# We’re changing an indexed value on purpose.
|
||||
db_ships_by_name.update "Kisaragi", new_kisaragi
|
||||
|
||||
db_ships_by_name.get?("Kisaragi").should be_nil
|
||||
db_ships_by_name.get?(new_kisaragi.name).should eq new_kisaragi
|
||||
|
||||
db.rm_storage_dir
|
||||
end
|
||||
end
|
||||
|
||||
describe "partitions" do
|
||||
it "do basic partitioning" do
|
||||
db = DODB::CachedSpecDataBase(Ship).new
|
||||
|
||||
db_ships_by_class = db.new_partition "class", &.klass
|
||||
|
||||
Ship.all_ships.each do |ship|
|
||||
db << ship
|
||||
end
|
||||
|
||||
Ship.all_ships.each do |ship|
|
||||
db_ships_by_class.get(ship.klass).should contain(ship)
|
||||
end
|
||||
|
||||
# We extract the possible classes to do test on them.
|
||||
ship_classes = Ship.all_ships.map(&.klass).uniq
|
||||
ship_classes.each do |klass|
|
||||
partition = db_ships_by_class.get klass
|
||||
|
||||
# A partition on “class” should contain entries that all
|
||||
# share the same value of “class”.
|
||||
partition.map(&.klass.==(klass)).reduce { |a, b|
|
||||
a && b
|
||||
}.should be_true
|
||||
end
|
||||
|
||||
db_ships_by_class.get?("does-not-exist").should be_nil
|
||||
|
||||
db.rm_storage_dir
|
||||
end
|
||||
|
||||
it "removes select elements from partitions" do
|
||||
db = DODB::CachedSpecDataBase(Ship).new
|
||||
|
||||
db_ships_by_class = db.new_partition "class", &.klass
|
||||
|
||||
Ship.all_ships.each do |ship|
|
||||
db << ship
|
||||
end
|
||||
|
||||
db_ships_by_class.delete "Mutsuki", &.name.==("Kisaragi")
|
||||
|
||||
Ship.all_ships.map(&.klass).uniq.each do |klass|
|
||||
partition = db_ships_by_class.get klass
|
||||
|
||||
partition.any?(&.name.==("Kisaragi")).should be_false
|
||||
end
|
||||
|
||||
db.rm_storage_dir
|
||||
end
|
||||
end
|
||||
|
||||
describe "tags" do
|
||||
it "do basic tagging" do
|
||||
db = DODB::CachedSpecDataBase(Ship).new
|
||||
|
||||
db_ships_by_tags = db.new_tags "tags", &.tags
|
||||
|
||||
Ship.all_ships.each do |ship|
|
||||
db << ship
|
||||
end
|
||||
|
||||
db_ships_by_tags.get("flagship").should eq([Ship.flagship])
|
||||
|
||||
# All returned entries should have the requested tag.
|
||||
db_ships_by_tags.get("name ship")
|
||||
.map(&.tags.includes?("name ship"))
|
||||
.reduce { |a, e| a && e }
|
||||
.should be_true
|
||||
|
||||
# There shouldn’t be one in our data about WWII Japanese warships…
|
||||
db_ships_by_tags.get?("starship").should be_nil
|
||||
|
||||
db.rm_storage_dir
|
||||
end
|
||||
|
||||
it "properly removes tags" do
|
||||
db = DODB::CachedSpecDataBase(Ship).new
|
||||
|
||||
db_ships_by_tags = db.new_tags "tags", &.tags
|
||||
|
||||
Ship.all_ships.each do |ship|
|
||||
db << ship
|
||||
end
|
||||
|
||||
# Removing the “flagship” tag, brace for impact.
|
||||
flagship, index = db_ships_by_tags.get_with_indice("flagship")[0]
|
||||
flagship = flagship.clone
|
||||
flagship.tags = [] of String
|
||||
db[index] = flagship
|
||||
|
||||
|
||||
|
||||
# ship, index = db_ships_by_tags.update(tag: "flagship") do |ship, index|
|
||||
# ship.tags = [] of String
|
||||
# db[index] = ship
|
||||
# end
|
||||
|
||||
db_ships_by_tags.get("flagship").should eq([] of Ship)
|
||||
|
||||
db.rm_storage_dir
|
||||
end
|
||||
|
||||
it "gets items that have multiple tags" do
|
||||
db = DODB::CachedSpecDataBase(Ship).new
|
||||
|
||||
db_ships_by_tags = db.new_tags "tags", &.tags
|
||||
|
||||
Ship.all_ships.each do |ship|
|
||||
db << ship
|
||||
end
|
||||
|
||||
results = db_ships_by_tags.get(["flagship", "name ship"])
|
||||
results.should eq([Ship.yamato])
|
||||
|
||||
results = db_ships_by_tags.get(["name ship", "flagship"])
|
||||
results.should eq([Ship.yamato])
|
||||
|
||||
results = db_ships_by_tags.get(["flagship"])
|
||||
results.should eq([Ship.yamato])
|
||||
|
||||
db.rm_storage_dir
|
||||
end
|
||||
end
|
||||
|
||||
describe "atomic operations" do
|
||||
it "safe_get and safe_get?" do
|
||||
db = DODB::CachedSpecDataBase(Ship).new
|
||||
|
||||
db_ships_by_name = db.new_index "name", &.name
|
||||
|
||||
Ship.all_ships.each do |ship|
|
||||
db << ship
|
||||
end
|
||||
|
||||
Ship.all_ships.each do |ship|
|
||||
db_ships_by_name.safe_get ship.name do |results|
|
||||
results.should eq(ship)
|
||||
end
|
||||
|
||||
db_ships_by_name.safe_get? ship.name do |results|
|
||||
results.should eq(ship)
|
||||
end
|
||||
end
|
||||
|
||||
db.rm_storage_dir
|
||||
end
|
||||
end
|
||||
|
||||
describe "tools" do
|
||||
it "rebuilds indexes" do
|
||||
db = DODB::CachedSpecDataBase(Ship).new
|
||||
|
||||
db_ships_by_name = db.new_index "name", &.name
|
||||
db_ships_by_class = db.new_partition "class", &.klass
|
||||
db_ships_by_tags = db.new_tags "tags", &.tags
|
||||
|
||||
Ship.all_ships.each do |ship|
|
||||
db << ship
|
||||
end
|
||||
|
||||
db.reindex_everything!
|
||||
|
||||
Ship.all_ships.each do |ship|
|
||||
db_ships_by_name.get?(ship.name).should eq(ship)
|
||||
db_ships_by_class.get(ship.klass).should contain(ship)
|
||||
end
|
||||
|
||||
db.rm_storage_dir
|
||||
end
|
||||
|
||||
it "migrates properly" do
|
||||
old_db = DODB::CachedSpecDataBase(PrimitiveShip).new "-migration-origin"
|
||||
|
||||
old_ships_by_name = old_db.new_index "name", &.name
|
||||
old_ships_by_class = old_db.new_partition "class", &.class_name
|
||||
|
||||
PrimitiveShip.all_ships.each do |ship|
|
||||
old_db << ship
|
||||
end
|
||||
|
||||
# At this point, the “old” DB is filled. Now we need to convert
|
||||
# to the new DB.
|
||||
|
||||
new_db = DODB::CachedSpecDataBase(Ship).new "-migration-target"
|
||||
|
||||
new_ships_by_name = new_db.new_index "name", &.name
|
||||
new_ships_by_class = new_db.new_partition "class", &.klass
|
||||
new_ships_by_tags = new_db.new_tags "tags", &.tags
|
||||
|
||||
old_db.each_with_index do |ship, index|
|
||||
new_ship = Ship.new ship.name,
|
||||
klass: ship.class_name,
|
||||
id: ship.id,
|
||||
tags: Array(String).new.tap { |tags|
|
||||
tags << "name ship" if ship.name == ship.class_name
|
||||
}
|
||||
|
||||
new_db[index] = new_ship
|
||||
end
|
||||
|
||||
# At this point, the conversion is done, so… we’re making a few
|
||||
# arbitrary tests on the new data.
|
||||
|
||||
old_db.each_with_index do |old_ship, old_index|
|
||||
ship = new_db[old_index]
|
||||
|
||||
ship.id.should eq(old_ship.id)
|
||||
ship.klass.should eq(old_ship.class_name)
|
||||
|
||||
ship.tags.any?(&.==("name ship")).should be_true if ship.name == ship.klass
|
||||
end
|
||||
|
||||
old_db.rm_storage_dir
|
||||
new_db.rm_storage_dir
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,17 @@
|
|||
def perform_something(&block)
|
||||
start = Time.monotonic
|
||||
yield
|
||||
Time.monotonic - start
|
||||
end
|
||||
|
||||
def perform_benchmark_average(ntimes : Int32, &block)
|
||||
i = 1
|
||||
sum = Time::Span.zero
|
||||
while i <= ntimes
|
||||
elapsed_time = perform_something &block
|
||||
sum += elapsed_time
|
||||
i += 1
|
||||
end
|
||||
|
||||
sum / ntimes
|
||||
end
|
Loading…
Reference in New Issue