Adding cached operations DODB::CachedDataBase(V).

remotes/1701789573479317192/master
Karchnu 2020-07-21 12:49:32 +02:00
parent 800d139a3d
commit 161a6e1f44
8 changed files with 850 additions and 240 deletions

83
spec/benchmark.cr Normal file
View File

@ -0,0 +1,83 @@
require "benchmark"
require "../src/dodb.cr"
require "./test-data.cr"
class DODBCached < DODB::CachedDataBase(Ship)
def initialize(storage_ext = "", remove_previous_data = true)
storage_dir = "test-storage#{storage_ext}"
if remove_previous_data
::FileUtils.rm_rf storage_dir
end
super storage_dir
end
end
class DODBUnCached < DODB::DataBase(Ship)
def initialize(storage_ext = "", remove_previous_data = true)
storage_dir = "test-storage#{storage_ext}"
if remove_previous_data
::FileUtils.rm_rf storage_dir
end
super storage_dir
end
end
cached = DODBCached.new
uncached = DODBUnCached.new
Benchmark.ips do |x|
x.report("adding values with a cache") do
Ship.all_ships.each do |ship|
cached << ship
end
end
x.report("adding values without cache") do
Ship.all_ships.each do |ship|
uncached << ship
end
end
end
cached = DODBCached.new
uncached = DODBUnCached.new
Ship.all_ships.each do |ship|
cached << ship
uncached << ship
end
Benchmark.ips do |x|
x.report("to_a with a cache") do
cached.to_a
end
x.report("to_a without cache") do
uncached.to_a
end
end
Benchmark.ips do |x|
x.report("to_h with a cache") do
cached.to_h
end
x.report("to_h without cache") do
uncached.to_h
end
end
Benchmark.ips do |x|
x.report("[0] with a cache") do
cached[0]
end
x.report("[0] without cache") do
uncached[0]
end
end

402
spec/cached.cr Normal file
View File

@ -0,0 +1,402 @@
require "spec"
require "file_utils"
require "../src/dodb.cr"
require "./test-data.cr"
class DODB::SpecDataBase < DODB::CachedDataBase(Ship)
def initialize(storage_ext = "", remove_previous_data = true)
storage_dir = "test-storage#{storage_ext}"
if remove_previous_data
::FileUtils.rm_rf storage_dir
end
super storage_dir
end
end
describe "DODB::DataBase::Cached" do
describe "basics" do
it "store and get data" do
db = DODB::SpecDataBase.new
Ship.all_ships.each do |ship|
db << ship
end
db.to_a.sort.should eq(Ship.all_ships.sort)
end
it "rewrite already stored data" do
db = DODB::SpecDataBase.new
ship = Ship.all_ships[0]
key = db << ship
db[key] = Ship.new "broken"
db[key] = ship
db[key].should eq(ship)
end
it "properly remove data" do
db = DODB::SpecDataBase.new
Ship.all_ships.each do |ship|
db << ship
end
Ship.all_ships.each do |ship|
db.pop
end
Ship.all_ships.each_with_index do |ship, i|
# FIXME: Should it raise a particular exception?
expect_raises DODB::MissingEntry do
db[i]
end
db[i]?.should be_nil
end
end
it "preserves data on reopening" do
db1 = DODB::SpecDataBase.new
db1 << Ship.kisaragi
db1.to_a.size.should eq(1)
db2 = DODB::SpecDataBase.new remove_previous_data: false
db2 << Ship.mutsuki
# Only difference with DODB::DataBase: for now, concurrent DB cannot coexists.
db2.to_a.size.should eq(2)
end
it "iterates in normal and reversed order" do
db = DODB::SpecDataBase.new
Ship.all_ships.each do |ship|
db << ship
end
# The two #each test iteration.
db.each_with_index do |item, index|
item.should eq Ship.all_ships[index]
end
db.each_with_index(reversed: true) do |item, index|
item.should eq Ship.all_ships[index]
end
# Actual reversal is tested here.
db.to_a(reversed: true).should eq db.to_a.reverse
end
it "respects the provided offsets if any" do
db = DODB::SpecDataBase.new
Ship.all_ships.each do |ship|
db << ship
end
db.to_a(start_offset: 0, end_offset: 0)[0]?.should eq Ship.mutsuki
db.to_a(start_offset: 1, end_offset: 1)[0]?.should eq Ship.kisaragi
db.to_a(start_offset: 2, end_offset: 2)[0]?.should eq Ship.yayoi
db.to_a(start_offset: 0, end_offset: 2).should eq [
Ship.mutsuki, Ship.kisaragi, Ship.yayoi
]
end
end
describe "indices" do
it "do basic indexing" do
db = DODB::SpecDataBase.new
db_ships_by_name = db.new_index "name", &.name
Ship.all_ships.each do |ship|
db << ship
end
Ship.all_ships.each_with_index do |ship|
db_ships_by_name.get?(ship.name).should eq(ship)
end
end
it "raise on index overload" do
db = DODB::SpecDataBase.new
db_ships_by_name = db.new_index "name", &.name
db << Ship.kisaragi
# Should not be allowed to store an entry whose “name” field
# already exists.
expect_raises(DODB::IndexOverload) do
db << Ship.kisaragi
end
end
it "properly deindex" do
db = DODB::SpecDataBase.new
db_ships_by_name = db.new_index "name", &.name
Ship.all_ships.each do |ship|
db << ship
end
Ship.all_ships.each_with_index do |ship, i|
db.delete i
end
Ship.all_ships.each do |ship|
db_ships_by_name.get?(ship.name).should be_nil
end
end
it "properly reindex" do
db = DODB::SpecDataBase.new
db_ships_by_name = db.new_index "name", &.name
key = db << Ship.kisaragi
# We give the old id to the new ship, to get it replaced in
# the database.
some_new_ship = Ship.all_ships[2].clone
db[key] = some_new_ship
db[key].should eq(some_new_ship)
db_ships_by_name.get?(some_new_ship.name).should eq(some_new_ship)
end
it "properly updates" do
db = DODB::SpecDataBase.new
db_ships_by_name = db.new_index "name", &.name
Ship.all_ships.each do |ship|
db << ship
end
new_kisaragi = Ship.kisaragi.clone.tap do |s|
s.name = "Kisaragi Kai" # Dont think about it too much.
end
# Were changing an indexed value on purpose.
db_ships_by_name.update "Kisaragi", new_kisaragi
db_ships_by_name.get?("Kisaragi").should be_nil
db_ships_by_name.get?(new_kisaragi.name).should eq new_kisaragi
end
end
describe "partitions" do
it "do basic partitioning" do
db = DODB::SpecDataBase.new
db_ships_by_class = db.new_partition "class", &.klass
Ship.all_ships.each do |ship|
db << ship
end
Ship.all_ships.each do |ship|
db_ships_by_class.get(ship.klass).should contain(ship)
end
# We extract the possible classes to do test on them.
ship_classes = Ship.all_ships.map(&.klass).uniq
ship_classes.each do |klass|
partition = db_ships_by_class.get klass
# A partition on “class” should contain entries that all
# share the same value of “class”.
partition.map(&.klass.==(klass)).reduce { |a, b|
a && b
}.should be_true
end
db_ships_by_class.get("does-not-exist").should eq [] of Ship
end
it "removes select elements from partitions" do
db = DODB::SpecDataBase.new
db_ships_by_class = db.new_partition "class", &.klass
Ship.all_ships.each do |ship|
db << ship
end
db_ships_by_class.delete "Mutsuki", &.name.==("Kisaragi")
Ship.all_ships.map(&.klass).uniq.each do |klass|
partition = db_ships_by_class.get klass
partition.any?(&.name.==("Kisaragi")).should be_false
end
end
end
describe "tags" do
it "do basic tagging" do
db = DODB::SpecDataBase.new
db_ships_by_tags = db.new_tags "tags", &.tags
Ship.all_ships.each do |ship|
db << ship
end
db_ships_by_tags.get("flagship").should eq([Ship.flagship])
# All returned entries should have the requested tag.
db_ships_by_tags.get("name ship")
.map(&.tags.includes?("name ship"))
.reduce { |a, e| a && e }
.should be_true
# There shouldnt be one in our data about WWII Japanese warships…
db_ships_by_tags.get("starship").should eq([] of Ship)
end
it "properly removes tags" do
db = DODB::SpecDataBase.new
db_ships_by_tags = db.new_tags "tags", &.tags
Ship.all_ships.each do |ship|
db << ship
end
# Removing the “flagship” tag, brace for impact.
flagship, index = db_ships_by_tags.get_with_indices("flagship")[0]
flagship.tags = [] of String
db[index] = flagship
# ship, index = db_ships_by_tags.update(tag: "flagship") do |ship, index|
# ship.tags = [] of String
# db[index] = ship
# end
db_ships_by_tags.get("flagship").should eq([] of Ship)
end
it "gets items that have multiple tags" do
db = DODB::SpecDataBase.new
db_ships_by_tags = db.new_tags "tags", &.tags
Ship.all_ships.each do |ship|
db << ship
end
results = db_ships_by_tags.get(["flagship", "name ship"])
results.should eq([Ship.yamato])
results = db_ships_by_tags.get(["name ship", "flagship"])
results.should eq([Ship.yamato])
results = db_ships_by_tags.get(["flagship"])
results.should eq([Ship.yamato])
end
end
describe "atomic operations" do
it "safe_get and safe_get?" do
db = DODB::SpecDataBase.new
db_ships_by_name = db.new_index "name", &.name
Ship.all_ships.each do |ship|
db << ship
end
Ship.all_ships.each do |ship|
db_ships_by_name.safe_get ship.name do |results|
results.should eq(ship)
end
db_ships_by_name.safe_get? ship.name do |results|
results.should eq(ship)
end
end
end
end
describe "tools" do
it "rebuilds indexes" do
db = DODB::SpecDataBase.new
db_ships_by_name = db.new_index "name", &.name
db_ships_by_class = db.new_partition "class", &.klass
db_ships_by_tags = db.new_tags "tags", &.tags
Ship.all_ships.each do |ship|
db << ship
end
db.reindex_everything!
Ship.all_ships.each do |ship|
db_ships_by_name.get?(ship.name).should eq(ship)
db_ships_by_class.get(ship.klass).should contain(ship)
end
end
it "migrates properly" do
::FileUtils.rm_rf "test-storage-migration-origin"
old_db = DODB::DataBase(PrimitiveShip).new "test-storage-migration-origin"
old_ships_by_name = old_db.new_index "name", &.name
old_ships_by_class = old_db.new_partition "class", &.class_name
PrimitiveShip.all_ships.each do |ship|
old_db << ship
end
# At this point, the “old” DB is filled. Now we need to convert
# to the new DB.
new_db = DODB::SpecDataBase.new "-migration-target"
new_ships_by_name = new_db.new_index "name", &.name
new_ships_by_class = new_db.new_partition "class", &.klass
new_ships_by_tags = new_db.new_tags "tags", &.tags
old_db.each_with_index do |ship, index|
new_ship = Ship.new ship.name,
klass: ship.class_name,
id: ship.id,
tags: Array(String).new.tap { |tags|
tags << "name ship" if ship.name == ship.class_name
}
new_db[index] = new_ship
end
# At this point, the conversion is done, so… were making a few
# arbitrary tests on the new data.
old_db.each_with_index do |old_ship, old_index|
ship = new_db[old_index]
ship.id.should eq(old_ship.id)
ship.klass.should eq(old_ship.class_name)
ship.tags.any?(&.==("name ship")).should be_true if ship.name == ship.klass
end
end
end
end

87
spec/test-data.cr Normal file
View File

@ -0,0 +1,87 @@
require "uuid"
require "json"
# FIXME: Split the test data in separate files. We dont care about those here.
class Ship
include JSON::Serializable
def_clone
property id : String
property klass : String
property name : String
property tags : Array(String)
def initialize(@name, @klass = "<unknown>", @id = UUID.random.to_s, @tags = [] of String)
end
# Makes testing arrays of this class easier.
def <=>(other)
@name <=> other.name
end
# Common, reusable test data.
# Those data can be indexed, partitioned or tagged on different parameters,
# and can easily be extended.
class_getter kisaragi = Ship.new("Kisaragi", "Mutsuki")
class_getter mutsuki = Ship.new("Mutsuki", "Mutsuki", tags: ["name ship"])
class_getter yayoi = Ship.new("Yayoi", "Mutsuki")
class_getter destroyers = [
@@mutsuki,
@@kisaragi,
@@yayoi,
Ship.new("Uzuki", "Mutsuki"),
Ship.new("Satsuki", "Mutsuki"),
Ship.new("Shiratsuyu", "Shiratsuyu", tags: ["name ship"]),
Ship.new("Murasame", "Shiratsuyu"),
Ship.new("Yuudachi", "Shiratsuyu")
]
class_getter yamato =
Ship.new("Yamato", "Yamato", tags: ["name ship", "flagship"])
class_getter flagship : Ship = yamato
class_getter battleships = [
@@yamato,
Ship.new("Kongou", "Kongou", tags: ["name ship"]),
Ship.new("Haruna", "Kongou"),
Ship.new("Kirishima", "Kongou"),
Ship.new("Hiei" , "Kongou"),
Ship.new("Musashi", "Yamato"),
Ship.new("Shinano", "Yamato")
]
class_getter all_ships : Array(Ship) = @@destroyers + @@battleships
# Equality is true if every property is identical.
def ==(other)
@id == other.id && @klass == other.klass && @name == other.name &&
@tags == other.tags
end
end
# This will be used for migration testing, but basically its a variant of
# the class above, a few extra fields, a few missing ones.
class PrimitiveShip
include JSON::Serializable
property id : String
property name : String
property wooden : Bool = false # Will be removed.
property class_name : String # Will be renamed
property flagship : Bool = false # Will be moved to tags.
def initialize(@name, @class_name = "<unknown>", @id = UUID.random.to_s, @flagship = false)
end
class_getter kamikaze =
PrimitiveShip.new("Kamikaze", "Kamikaze")
class_getter asakaze =
PrimitiveShip.new("Asakaze", "Kamikaze")
class_getter all_ships : Array(PrimitiveShip) = [
@@kamikaze,
@@asakaze
]
end

View File

@ -1,94 +1,9 @@
require "spec"
require "file_utils"
require "json"
require "uuid"
require "../src/*"
require "../src/dodb.cr"
require "./test-data.cr"
# FIXME: Split the test data in separate files. We dont care about those here.
class Ship
include JSON::Serializable
def_clone
property id : String
property klass : String
property name : String
property tags : Array(String)
def initialize(@name, @klass = "<unknown>", @id = UUID.random.to_s, @tags = [] of String)
end
# Makes testing arrays of this class easier.
def <=>(other)
@name <=> other.name
end
# Common, reusable test data.
# Those data can be indexed, partitioned or tagged on different parameters,
# and can easily be extended.
class_getter kisaragi = Ship.new("Kisaragi", "Mutsuki")
class_getter mutsuki = Ship.new("Mutsuki", "Mutsuki", tags: ["name ship"])
class_getter yayoi = Ship.new("Yayoi", "Mutsuki")
class_getter destroyers = [
@@mutsuki,
@@kisaragi,
@@yayoi,
Ship.new("Uzuki", "Mutsuki"),
Ship.new("Satsuki", "Mutsuki"),
Ship.new("Shiratsuyu", "Shiratsuyu", tags: ["name ship"]),
Ship.new("Murasame", "Shiratsuyu"),
Ship.new("Yuudachi", "Shiratsuyu")
]
class_getter yamato =
Ship.new("Yamato", "Yamato", tags: ["name ship", "flagship"])
class_getter flagship : Ship = yamato
class_getter battleships = [
@@yamato,
Ship.new("Kongou", "Kongou", tags: ["name ship"]),
Ship.new("Haruna", "Kongou"),
Ship.new("Kirishima", "Kongou"),
Ship.new("Hiei" , "Kongou"),
Ship.new("Musashi", "Yamato"),
Ship.new("Shinano", "Yamato")
]
class_getter all_ships : Array(Ship) = @@destroyers + @@battleships
# Equality is true if every property is identical.
def ==(other)
@id == other.id && @klass == other.klass && @name == other.name &&
@tags == other.tags
end
end
# This will be used for migration testing, but basically its a variant of
# the class above, a few extra fields, a few missing ones.
class PrimitiveShip
include JSON::Serializable
property id : String
property name : String
property wooden : Bool = false # Will be removed.
property class_name : String # Will be renamed
property flagship : Bool = false # Will be moved to tags.
def initialize(@name, @class_name = "<unknown>", @id = UUID.random.to_s, @flagship = false)
end
class_getter kamikaze =
PrimitiveShip.new("Kamikaze", "Kamikaze")
class_getter asakaze =
PrimitiveShip.new("Asakaze", "Kamikaze")
class_getter all_ships : Array(PrimitiveShip) = [
@@kamikaze,
@@asakaze
]
end
class DODB::SpecDataBase < DODB::DataBase(Ship)
def initialize(storage_ext = "", remove_previous_data = true)

113
src/cached.cr Normal file
View File

@ -0,0 +1,113 @@
require "file_utils"
require "json"
class Hash(K,V)
def reverse
rev = Array(Tuple(K,V)).new
keys = Array(K).new
each_key do |k|
keys << k
end
keys.reverse.each do |k|
rev << {k, self.[k]}
end
rev
end
end
class DODB::CachedDataBase(V) < DODB::Storage(V)
@indexers = [] of Indexer(V)
property data = Hash(Int32, V).new
def initialize(@directory_name : String)
Dir.mkdir_p data_path
Dir.mkdir_p locks_directory
begin
self.last_index
rescue
self.last_index = -1
end
# TODO: load the database in RAM at start-up
DODB::DataBase(V).new(@directory_name).each_with_index do |v, index|
puts "loading value #{v} at index #{index}"
self[index] = v
end
end
# Getting data from the hash in RAM.
def []?(key : Int32) : V?
@data[key]
rescue e
# FIXME: rescues any error the same way.
return nil
end
def [](key : Int32) : V
# raise MissingEntry.new(key) unless ::File.exists? file_path key
# read file_path key
@data[key] rescue raise MissingEntry.new(key)
end
def []=(index : Int32, value : V)
old_value = self.[index]?
check_collisions! index, value, old_value
# Removes any old indices or partitions pointing to a value about
# to be replaced.
if old_value
remove_partitions index, old_value
end
# Avoids corruption in case the application crashes while writing.
file_path(index).tap do |path|
::File.write "#{path}.new", value.to_json
::FileUtils.mv "#{path}.new", path
end
write_partitions index, value
if index > last_index
self.last_index = index
end
@data[index] = value
end
##
# Can be useful for making dumps or to restore a database.
def each_with_index(reversed : Bool = false, start_offset = 0, end_offset : Int32? = nil)
i = -1 # do not trust key to be the right index
(reversed ? @data.reverse : @data).each do |index, v|
i += 1
next if start_offset > i
break unless end_offset.nil? || i <= end_offset
yield v, index
end
end
def delete(key : Int32)
value = self[key]?
return if value.nil?
begin
::File.delete file_path key
rescue
# FIXME: Only intercept “no such file" errors
end
remove_partitions key, value
@data.delete key
value
end
private def remove_data!
super
@data = Hash(Int32, V).new
end
end

View File

@ -3,18 +3,26 @@ require "json"
require "./dodb/*"
class DODB::DataBase(V)
@indexers = [] of Indexer(V)
abstract class DODB::Storage(V)
property directory_name : String
def initialize(@directory_name : String)
Dir.mkdir_p data_path
Dir.mkdir_p locks_directory
end
begin
self.last_index
rescue
self.last_index = -1
def request_lock(name)
r = -1
file_path = get_lock_file_path name
file_perms = 0o644
flags = LibC::O_EXCL | LibC::O_CREAT
while (r = LibC.open file_path, flags, file_perms) == -1
sleep 1.milliseconds
end
LibC.close r
end
def release_lock(name)
File.delete get_lock_file_path name
end
private def index_file
@ -44,28 +52,52 @@ class DODB::DataBase(V)
end
end
def request_lock(name)
r = -1
file_path = get_lock_file_path name
file_perms = 0o644
flags = LibC::O_EXCL | LibC::O_CREAT
while (r = LibC.open file_path, flags, file_perms) == -1
sleep 1.milliseconds
end
LibC.close r
def <<(item : V)
index = last_index + 1
self[index] = item
self.last_index = index
end
def release_lock(name)
File.delete get_lock_file_path name
def each(reversed : Bool = false, start_offset = 0, end_offset : Int32? = nil)
each_with_index(
reversed: reversed,
start_offset: start_offset,
end_offset: end_offset
) do |item, index|
yield item
end
end
##
# name is the name that will be used on the file system.
def new_partition(name : String, &block : Proc(V, String))
Partition(V).new(self, @directory_name, name, block).tap do |table|
@indexers << table
# CAUTION: Very slow. Try not to use.
def to_a(reversed : Bool = false, start_offset = 0, end_offset : Int32? = nil)
array = ::Array(V).new
each(
reversed: reversed,
start_offset: start_offset,
end_offset: end_offset
) do |value|
array << value
end
array
end
##
# CAUTION: Very slow. Try not to use.
def to_h(reversed : Bool = false, start_offset = 0, end_offset : Int32? = nil)
hash = ::Hash(Int32, V).new
each_with_index(
reversed: reversed,
start_offset: start_offset,
end_offset: end_offset
) do |element, index|
hash[index] = element
end
hash
end
##
@ -76,6 +108,14 @@ class DODB::DataBase(V)
end
end
##
# name is the name that will be used on the file system.
def new_partition(name : String, &block : Proc(V, String))
Partition(V).new(self, @directory_name, name, block).tap do |table|
@indexers << table
end
end
def new_tags(name : String, &block : Proc(V, Array(String)))
Tags(V).new(@directory_name, name, block).tap do |tags|
@indexers << tags
@ -100,12 +140,103 @@ class DODB::DataBase(V)
partition.not_nil!.as(DODB::Tags).get name, key
end
def <<(item : V)
index = last_index + 1
def check_collisions!(key : Int32, value : V, old_value : V?)
@indexers.each &.check!(stringify_key(key), value, old_value)
end
self[index] = item
def write_partitions(key : Int32, value : V)
@indexers.each &.index(stringify_key(key), value)
end
self.last_index = index
def pop
index = last_index
# Some entries may have been removed. Well skip over those.
# Not the most efficient if a large number of indices are empty.
while index >= 0 && self[index]?.nil?
index = index - 1
end
if index < 0
return nil
end
poped = self[index]
self.delete index
last_index = index - 1
poped
end
private def data_path
"#{@directory_name}/data"
end
private def file_path(key : Int32)
"#{data_path}/%010i.json" % key
end
private def locks_directory : String
"#{@directory_name}/locks"
end
private def get_lock_file_path(name : String)
"#{locks_directory}/#{name}.lock"
end
private def read(file_path : String)
V.from_json ::File.read file_path
end
private def remove_data!
FileUtils.rm_rf data_path
Dir.mkdir_p data_path
end
private def remove_indexing!
@indexers.each do |indexer|
FileUtils.rm_rf indexer.indexing_directory
end
end
# A very slow operation that removes all indices and then rewrites
# them all.
# FIXME: Is this really useful in its current form? We should remove the
# index directories, not the indices based on our current (and
# possiblly different from whats stored) data.
def reindex_everything!
old_data = to_h
remove_indexing!
remove_data!
old_data.each do |index, item|
self[index] = item
end
end
def remove_partitions(key : Int32, value : V)
@indexers.each &.deindex(stringify_key(key), value)
end
abstract def [](key : Int32)
abstract def delete(key : Int32)
end
class DODB::DataBase(V) < DODB::Storage(V)
@indexers = [] of Indexer(V)
def initialize(@directory_name : String)
Dir.mkdir_p data_path
Dir.mkdir_p locks_directory
begin
self.last_index
rescue
self.last_index = -1
end
end
def []?(key : Int32) : V?
@ -145,36 +276,6 @@ class DODB::DataBase(V)
end
end
def check_collisions!(key : Int32, value : V, old_value : V?)
@indexers.each &.check!(stringify_key(key), value, old_value)
end
def write_partitions(key : Int32, value : V)
@indexers.each &.index(stringify_key(key), value)
end
def pop
index = last_index
# Some entries may have been removed. Well skip over those.
# Not the most efficient if a large number of indices are empty.
while index >= 0 && self[index]?.nil?
index = index - 1
end
if index < 0
return nil
end
poped = self[index]
self.delete index
last_index = index - 1
poped
end
def delete(key : Int32)
value = self[key]?
@ -191,10 +292,6 @@ class DODB::DataBase(V)
value
end
def remove_partitions(key : Int32, value : V)
@indexers.each &.deindex(stringify_key(key), value)
end
private def each_key(reversed = false)
start = 0
_end = last_index
@ -246,93 +343,6 @@ class DODB::DataBase(V)
yield field, key
end
end
def each(reversed : Bool = false, start_offset = 0, end_offset : Int32? = nil)
each_with_index(
reversed: reversed,
start_offset: start_offset,
end_offset: end_offset
) do |item, index|
yield item
end
end
##
# CAUTION: Very slow. Try not to use.
def to_a(reversed : Bool = false, start_offset = 0, end_offset : Int32? = nil)
array = ::Array(V).new
each(
reversed: reversed,
start_offset: start_offset,
end_offset: end_offset
) do |value|
array << value
end
array
end
##
# CAUTION: Very slow. Try not to use.
def to_h(reversed : Bool = false, start_offset = 0, end_offset : Int32? = nil)
hash = ::Hash(Int32, V).new
each_with_index(
reversed: reversed,
start_offset: start_offset,
end_offset: end_offset
) do |element, index|
hash[index] = element
end
hash
end
private def data_path
"#{@directory_name}/data"
end
private def file_path(key : Int32)
"#{data_path}/%010i.json" % key
end
private def locks_directory : String
"#{@directory_name}/locks"
end
private def get_lock_file_path(name : String)
"#{locks_directory}/#{name}.lock"
end
private def read(file_path : String)
V.from_json ::File.read file_path
end
private def remove_data!
FileUtils.rm_rf data_path
Dir.mkdir_p data_path
end
private def remove_indexing!
@indexers.each do |indexer|
FileUtils.rm_rf indexer.indexing_directory
end
end
# A very slow operation that removes all indices and then rewrites
# them all.
# FIXME: Is this really useful in its current form? We should remove the
# index directories, not the indices based on our current (and
# possiblly different from whats stored) data.
def reindex_everything!
old_data = to_h
remove_indexing!
remove_data!
old_data.each do |index, item|
self[index] = item
end
end
end
require "./cached.cr"

View File

@ -9,7 +9,7 @@ class DODB::Index(V) < DODB::Indexer(V)
property key_proc : Proc(V, String)
getter storage_root : String
@storage : DODB::DataBase(V)
@storage : DODB::Storage(V)
def initialize(@storage, @storage_root, @name, @key_proc)
Dir.mkdir_p indexing_directory

View File

@ -8,7 +8,7 @@ class DODB::Partition(V) < DODB::Indexer(V)
property key_proc : Proc(V, String)
getter storage_root : String
@storage : DODB::DataBase(V)
@storage : DODB::Storage(V)
def initialize(@storage, @storage_root, @name, @key_proc)
::Dir.mkdir_p indexing_directory