Compare commits
3 commits
main
...
clear-cach
| Author | SHA1 | Date | |
|---|---|---|---|
| 456117bf20 | |||
| 96fd8ec3a1 | |||
| cd1ff4de79 |
10 changed files with 217 additions and 7 deletions
|
|
@ -120,4 +120,69 @@ describe "SPECDB::Common" do
|
|||
index4_color.data.size.should eq 1 # trigger cache: 1, the color "red"
|
||||
index4_color.data["red"].size.should eq 3 # 3 values in "red"
|
||||
end
|
||||
|
||||
it "testing caches: add some values, clear cache, check the values" do
|
||||
car0 = Car.new "Corvet-0", "red", [] of String
|
||||
car1 = Car.new "Corvet-1", "red", [] of String
|
||||
car2 = Car.new "Corvet-2", "blue", [] of String
|
||||
car3 = Car.new "Corvet-3", "violet", [] of String
|
||||
|
||||
db = SPECDB::Common(Car).new "reindex", 2
|
||||
index_name = db.new_index "name", &.name
|
||||
index_color = db.new_partition "color", &.color
|
||||
|
||||
db << car0
|
||||
|
||||
# First entry, each cache should have one item.
|
||||
index_name.data.size.should eq 1
|
||||
index_color.data.size.should eq 1
|
||||
|
||||
db << car1
|
||||
db << car2
|
||||
db << car3
|
||||
|
||||
# Storage cache should only have the maximum number of allowed entries (2).
|
||||
db.data.keys.size.should eq 2
|
||||
|
||||
# Trigger caches don't have limits on the number of entries, therefore they should have:
|
||||
# 4 entries for the name index (each entry = 1 car).
|
||||
index_name.data.size.should eq 4
|
||||
# 3 entries for the color index (each entry = 1 color).
|
||||
index_color.data.size.should eq 3
|
||||
|
||||
# All cached entries are dropped.
|
||||
db.clear_cache!
|
||||
|
||||
# Caches should be empty.
|
||||
db.data.keys.sort.should eq([] of Int32)
|
||||
index_name.data.size.should eq 0
|
||||
index_color.data.size.should eq 0
|
||||
|
||||
# Get a car from stored data based on its name.
|
||||
index_name.get?("Corvet-2").should eq car2
|
||||
index_name.data.size.should eq 1
|
||||
# This doesn't fill up the cache in the color index.
|
||||
index_color.data.size.should eq 0
|
||||
|
||||
# Get a car from stored data based on its color.
|
||||
index_color.get?("violet").size.should eq 1
|
||||
|
||||
# The "Storage" database should have both found items in cache.
|
||||
db.data.keys.sort.should eq([2, 3] of Int32)
|
||||
|
||||
# Trigger caches should have a single value given that we searched a single item through each one.
|
||||
index_name.data.size.should eq 1
|
||||
index_color.data.size.should eq 1
|
||||
|
||||
# Loop over entries, filling up the cache for Storage.
|
||||
total_values = 0
|
||||
db.each_with_key do |v, k|
|
||||
total_values += 1
|
||||
end
|
||||
total_values.should eq 4
|
||||
|
||||
# Caches should still have a single entry since we only searched through them only once.
|
||||
index_name.data.size.should eq 1
|
||||
index_color.data.size.should eq 1
|
||||
end
|
||||
end
|
||||
|
|
|
|||
79
spec/various-common-usages.cr
Normal file
79
spec/various-common-usages.cr
Normal file
|
|
@ -0,0 +1,79 @@
|
|||
require "./db-cars.cr"
|
||||
|
||||
rootdb = "/tmp/test-db"
|
||||
|
||||
db = DODB::Storage::Cached(Car).new rootdb
|
||||
#db = DODB::Storage::Common(Car).new rootdb, 2
|
||||
index_name = db.new_index "name", &.name
|
||||
index_color = db.new_partition "color", &.color
|
||||
index_keywords = db.new_tags "keywords", &.keywords
|
||||
|
||||
car0 = Car.new "Corvet-0", "red", ["shiny"]
|
||||
car1 = Car.new "Corvet-1", "red", ["slow", "expensive"]
|
||||
car2 = Car.new "Corvet-2", "blue", ["fast", "elegant"]
|
||||
car3 = Car.new "Corvet-3", "violet", ["expensive"]
|
||||
|
||||
db << car0
|
||||
db << car1
|
||||
db << car2
|
||||
db << car3
|
||||
|
||||
# Get a car from stored data based on its name.
|
||||
if (c = index_name.get?("Corvet-2")) == car2
|
||||
puts "We found the Corvet-2: #{c}"
|
||||
else
|
||||
puts "We didn't find the Corvet-2!!!"
|
||||
end
|
||||
|
||||
# Get a car from stored data based on a keyword.
|
||||
if c = index_keywords.get?("elegant")
|
||||
puts "We found the elegant car: #{c}"
|
||||
else
|
||||
puts "We didn't find the elegant car!!!"
|
||||
end
|
||||
|
||||
# All cached entries are dropped.
|
||||
# WARNING: the Storage::Cached database don't read anything on the filesystem and relies entirely
|
||||
# on cached values. Therefore, clearing the caches means the database won't answer any request
|
||||
# with the proper values. The content has to be read again through a reindex_everything! for example.
|
||||
#db.clear_cache!
|
||||
|
||||
db = DODB::Storage::Cached(Car).new rootdb
|
||||
index_name = db.new_index "name", &.name
|
||||
index_color = db.new_partition "color", &.color
|
||||
index_keywords = db.new_tags "keywords", &.keywords
|
||||
db.reindex_everything!
|
||||
|
||||
# Get a car from stored data based on its color.
|
||||
if index_color.get?("violet").size == 1
|
||||
puts "We found the violet car"
|
||||
else
|
||||
puts "We didn't find the violet car!!!"
|
||||
end
|
||||
|
||||
# Get a car from stored data based on a keyword.
|
||||
list_of_cars = index_keywords.get?("expensive")
|
||||
if ! list_of_cars.nil? && list_of_cars.size > 0
|
||||
puts "We found the expensive cars:"
|
||||
list_of_cars.each { |car| puts "- #{car}" }
|
||||
else
|
||||
puts "We didn't find the expensive car!!!"
|
||||
end
|
||||
|
||||
# Loop over entries, filling up the cache for Storage.
|
||||
total_values = 0
|
||||
db.each_with_key do |v, k|
|
||||
total_values += 1
|
||||
end
|
||||
|
||||
if total_values == 4
|
||||
puts "We found all the values"
|
||||
else
|
||||
puts "We didn't find all the values!!!"
|
||||
end
|
||||
|
||||
FileUtils.rm_r rootdb
|
||||
|
||||
# Caches should still have a single entry since we only searched through them only once.
|
||||
#index_name.data.size.should eq 1
|
||||
#index_color.data.size.should eq 1
|
||||
|
|
@ -350,9 +350,23 @@ abstract class DODB::Storage(V)
|
|||
end
|
||||
end
|
||||
|
||||
def clear_storage_cache!
|
||||
# There's no cache by default.
|
||||
# This function has to be changed in storage implementations.
|
||||
end
|
||||
|
||||
# NOTE: clears all caches (`storage` and `triggers`).
|
||||
def clear_cache!
|
||||
clear_storage_cache!
|
||||
@triggers.each do |trigger|
|
||||
trigger.clear_cache!
|
||||
end
|
||||
end
|
||||
|
||||
# Removes all indices and then rewrites them all.
|
||||
#
|
||||
# WARNING: slow operation.
|
||||
# NOTE: clears all caches (`storage` and `triggers`).
|
||||
def reindex_everything!
|
||||
nuke_triggers!
|
||||
|
||||
|
|
|
|||
|
|
@ -45,13 +45,33 @@ class DODB::Storage::Cached(V) < DODB::Storage(V)
|
|||
|
||||
@cached_last_key = init_last_key
|
||||
|
||||
# Load the database (to fill up the cache) at start-up.
|
||||
load_db!
|
||||
end
|
||||
|
||||
# `Storage::Cached` doesn't perform look-ups from the filesystem by itself upon requests,
|
||||
# the entire database has to be initialized by reading the entire on-disk data in order to
|
||||
# fill up the cache.
|
||||
#
|
||||
# This function is called once at start-up and should be used whenever the entire cache
|
||||
# is cleaned up for whatever reason, otherwise the database will be seen as empty.
|
||||
#
|
||||
# WARNING: beware of triggers.
|
||||
# NOTE: this function has no use in `Storage::Common` because the database entries are read from the disk
|
||||
# when the value isn't in cache.
|
||||
# This function doesn't exist in `Storage::Uncached` since there is no cache.
|
||||
def load_db!
|
||||
# Load the database in RAM at start-up.
|
||||
DODB::Storage::Uncached(V).new(@directory_name).each_with_key do |v, key|
|
||||
puts "\rloading data from #{@directory_name} at key #{key}"
|
||||
# puts "\rloading data from #{@directory_name} at key #{key}"
|
||||
self[key] = v
|
||||
end
|
||||
end
|
||||
|
||||
def clear_storage_cache!
|
||||
data.clear
|
||||
end
|
||||
|
||||
# Gets the data with the *key*.
|
||||
# In case the data is missing, returns an exception `DODB::MissingEntry`.
|
||||
#
|
||||
|
|
|
|||
|
|
@ -46,6 +46,17 @@ class DODB::Storage::Common(V) < DODB::Storage::Cached(V)
|
|||
@cached_last_key = init_last_key
|
||||
end
|
||||
|
||||
def clear_storage_cache!
|
||||
data.clear
|
||||
@lru = EfficientLRU(Int32).new lru.max_entries
|
||||
end
|
||||
|
||||
# :nodoc:
|
||||
# There is no need for this function in `Storage::Common`,
|
||||
# therefore it is put in private and removed from the documentation.
|
||||
private def load_db!
|
||||
end
|
||||
|
||||
# Verifies that the value is in cache, or read it on disk.
|
||||
# Pushes the key in the lru.
|
||||
def [](key : Int32) : V
|
||||
|
|
|
|||
|
|
@ -29,9 +29,15 @@ abstract class DODB::Trigger(V)
|
|||
# NOTE: used for internal operations.
|
||||
abstract def trigger_directory : String
|
||||
|
||||
# Removes all cached values.
|
||||
def clear_cache!
|
||||
# By default, there is no cache.
|
||||
end
|
||||
|
||||
# Removes all the index entries, removes the `#trigger_directory` by default.
|
||||
#
|
||||
# NOTE: used for internal operations.
|
||||
# NOTE: automatically clears cache (for triggers that do have a cache).
|
||||
def nuke_trigger
|
||||
FileUtils.rm_rf trigger_directory
|
||||
end
|
||||
|
|
|
|||
|
|
@ -344,10 +344,14 @@ class DODB::Trigger::IndexCached(V) < DODB::Trigger::Index(V)
|
|||
end
|
||||
end
|
||||
|
||||
def clear_cache!
|
||||
data.clear
|
||||
end
|
||||
|
||||
# Clears the cache and removes the `#trigger_directory`.
|
||||
def nuke_trigger
|
||||
super
|
||||
data.clear
|
||||
clear_cache!
|
||||
end
|
||||
|
||||
# Indexes the value on the file-system as `DODB::Trigger::Index#index` but also puts the index in a cache.
|
||||
|
|
@ -436,6 +440,6 @@ class DODB::Trigger::IndexRAMOnly(V) < DODB::Trigger::IndexCached(V)
|
|||
|
||||
# Clears the index.
|
||||
def nuke_trigger
|
||||
data.clear
|
||||
clear_cache!
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -260,6 +260,10 @@ class DODB::Trigger::PartitionCached(V) < DODB::Trigger::Partition(V)
|
|||
@data[partition] = array
|
||||
end
|
||||
|
||||
def clear_cache!
|
||||
data.clear
|
||||
end
|
||||
|
||||
# Removes the index of a value on the file-system as `DODB::Trigger::Partition#deindex` but also from
|
||||
# the cache, used for **internal operations**.
|
||||
#
|
||||
|
|
@ -309,7 +313,7 @@ class DODB::Trigger::PartitionCached(V) < DODB::Trigger::Partition(V)
|
|||
# Clears the cache and removes the `#trigger_directory`.
|
||||
def nuke_trigger
|
||||
super
|
||||
data.clear
|
||||
clear_cache!
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -379,6 +383,6 @@ class DODB::Trigger::PartitionRAMOnly(V) < DODB::Trigger::PartitionCached(V)
|
|||
|
||||
# Clears the cache.
|
||||
def nuke_trigger
|
||||
data.clear
|
||||
clear_cache!
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -300,6 +300,10 @@ class DODB::Trigger::TagsCached(V) < DODB::Trigger::Tags(V)
|
|||
end
|
||||
end
|
||||
|
||||
def clear_cache!
|
||||
data.clear
|
||||
end
|
||||
|
||||
# :inherit:
|
||||
# TODO: in case the tag is left empty, should it be removed from the cache?
|
||||
def deindex(key : String, value : V)
|
||||
|
|
@ -347,7 +351,7 @@ class DODB::Trigger::TagsCached(V) < DODB::Trigger::Tags(V)
|
|||
# Clears the cache and removes the `#trigger_directory`.
|
||||
def nuke_trigger
|
||||
super
|
||||
data.clear
|
||||
clear_cache!
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -421,6 +425,6 @@ class DODB::Trigger::TagsRAMOnly(V) < DODB::Trigger::TagsCached(V)
|
|||
|
||||
# Clears the cache.
|
||||
def nuke_trigger
|
||||
data.clear
|
||||
clear_cache!
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -88,6 +88,9 @@ class EfficientLRU(V)
|
|||
property list : DoubleLinkedList(V)
|
||||
property hash : Hash(V, DoubleLinkedList::Node(V))
|
||||
|
||||
# Maximum allowed entries in the structure.
|
||||
property max_entries : UInt32
|
||||
|
||||
def initialize(@max_entries : UInt32)
|
||||
@list = DoubleLinkedList(V).new
|
||||
@hash = Hash(V, DoubleLinkedList::Node(V)).new
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue