mirror of https://gitlab.com/famedly/conduit.git
Browse Source
add abstract test implementations and specific implementation for sqlite See merge request famedly/conduit!249merge-requests/249/merge
3 changed files with 403 additions and 0 deletions
@ -0,0 +1,397 @@
|
||||
use crate::database::{ |
||||
abstraction::{KeyValueDatabaseEngine, KvTree}, |
||||
Config, |
||||
}; |
||||
use std::sync::Arc; |
||||
use tempfile::{Builder, TempDir}; |
||||
|
||||
fn empty_config(database_path: &str) -> Config { |
||||
use figment::providers::{Format, Toml}; |
||||
Toml::from_str(&format!( |
||||
r#" |
||||
server_name = "test" |
||||
database_path = "{}" |
||||
"#, |
||||
database_path |
||||
)) |
||||
.unwrap() |
||||
} |
||||
|
||||
/// Make sure to keep the reference of the tree returned values for
|
||||
/// the length of the test, to avoid early cleanups that may create test issues
|
||||
fn open_tree<T>(test_name: &str) -> (Arc<dyn KvTree>, impl KeyValueDatabaseEngine, TempDir) |
||||
where |
||||
Arc<T>: KeyValueDatabaseEngine, |
||||
{ |
||||
let db_folder = Builder::new().prefix(test_name).tempdir().unwrap(); |
||||
let config = empty_config(db_folder.path().to_str().unwrap()); |
||||
let instance = Arc::<T>::open(&config).unwrap(); |
||||
let tree = instance.open_tree("test").unwrap(); |
||||
(tree, instance, db_folder) |
||||
} |
||||
|
||||
fn insert_get<T>(name: &str) |
||||
where |
||||
Arc<T>: KeyValueDatabaseEngine, |
||||
{ |
||||
let (tree, _inst, _temp_dir) = open_tree::<T>(name); |
||||
let key = "key".as_bytes(); |
||||
let value = "value".as_bytes(); |
||||
tree.insert(key, value).unwrap(); |
||||
let read = tree.get(key).unwrap(); |
||||
assert_eq!(read, Some(value.to_owned())); |
||||
} |
||||
|
||||
fn insert_get_replace<T>(name: &str) |
||||
where |
||||
Arc<T>: KeyValueDatabaseEngine, |
||||
{ |
||||
let (tree, _inst, _temp_dir) = open_tree::<T>(name); |
||||
let key = "key".as_bytes(); |
||||
let value = "value".as_bytes(); |
||||
tree.insert(key, value).unwrap(); |
||||
let read = tree.get(key).unwrap(); |
||||
assert_eq!(read, Some(value.to_owned())); |
||||
|
||||
let value1 = "value1".as_bytes(); |
||||
tree.insert(key, value1).unwrap(); |
||||
let read = tree.get(key).unwrap(); |
||||
assert_eq!(read, Some(value1.to_owned())); |
||||
} |
||||
|
||||
fn insert_get_remove<T>(name: &str) |
||||
where |
||||
Arc<T>: KeyValueDatabaseEngine, |
||||
{ |
||||
let (tree, _inst, _temp_dir) = open_tree::<T>(name); |
||||
let key = "key".as_bytes(); |
||||
let value = "value".as_bytes(); |
||||
tree.insert(key, value).unwrap(); |
||||
let read = tree.get(key).unwrap(); |
||||
assert_eq!(read, Some(value.to_owned())); |
||||
tree.remove(key).unwrap(); |
||||
let read = tree.get(key).unwrap(); |
||||
assert_eq!(read, None); |
||||
// Remove of not existing key should run seamless
|
||||
tree.remove(key).unwrap(); |
||||
} |
||||
|
||||
fn batch_insert_get<T>(name: &str) |
||||
where |
||||
Arc<T>: KeyValueDatabaseEngine, |
||||
{ |
||||
let (tree, _inst, _temp_dir) = open_tree::<T>(name); |
||||
let key = "key".as_bytes(); |
||||
let value = "value".as_bytes(); |
||||
let key1 = "key1".as_bytes(); |
||||
let value1 = "value1".as_bytes(); |
||||
let key2 = "key2".as_bytes(); |
||||
let value2 = "value2".as_bytes(); |
||||
tree.insert_batch( |
||||
&mut vec![ |
||||
(key.to_owned(), value.to_owned()), |
||||
(key1.to_owned(), value1.to_owned()), |
||||
(key2.to_owned(), value2.to_owned()), |
||||
] |
||||
.into_iter(), |
||||
) |
||||
.unwrap(); |
||||
let read = tree.get(key).unwrap(); |
||||
assert_eq!(read, Some(value.to_owned())); |
||||
let read = tree.get(key1).unwrap(); |
||||
assert_eq!(read, Some(value1.to_owned())); |
||||
let read = tree.get(key2).unwrap(); |
||||
assert_eq!(read, Some(value2.to_owned())); |
||||
} |
||||
|
||||
fn insert_iter<T>(name: &str) |
||||
where |
||||
Arc<T>: KeyValueDatabaseEngine, |
||||
{ |
||||
let (tree, _inst, _temp_dir) = open_tree::<T>(name); |
||||
let key = "key".as_bytes(); |
||||
let value = "value".as_bytes(); |
||||
tree.insert(key, value).unwrap(); |
||||
let key1 = "key1".as_bytes(); |
||||
let value1 = "value1".as_bytes(); |
||||
tree.insert(key1, value1).unwrap(); |
||||
let key2 = "key2".as_bytes(); |
||||
let value2 = "value2".as_bytes(); |
||||
tree.insert(key2, value2).unwrap(); |
||||
let mut iter = tree.iter(); |
||||
assert_eq!(iter.next(), Some((key.to_owned(), value.to_owned()))); |
||||
assert_eq!(iter.next(), Some((key1.to_owned(), value1.to_owned()))); |
||||
assert_eq!(iter.next(), Some((key2.to_owned(), value2.to_owned()))); |
||||
assert_eq!(iter.next(), None); |
||||
} |
||||
|
||||
fn insert_iter_from<T>(name: &str) |
||||
where |
||||
Arc<T>: KeyValueDatabaseEngine, |
||||
{ |
||||
let (tree, _inst, _temp_dir) = open_tree::<T>(name); |
||||
let key = "key".as_bytes(); |
||||
let value = "value".as_bytes(); |
||||
tree.insert(key, value).unwrap(); |
||||
let key1 = "key1".as_bytes(); |
||||
let value1 = "value1".as_bytes(); |
||||
tree.insert(key1, value1).unwrap(); |
||||
let key2 = "key2".as_bytes(); |
||||
let value2 = "value2".as_bytes(); |
||||
tree.insert(key2, value2).unwrap(); |
||||
let mut iter = tree.iter_from(key1, false); |
||||
assert_eq!(iter.next(), Some((key1.to_owned(), value1.to_owned()))); |
||||
assert_eq!(iter.next(), Some((key2.to_owned(), value2.to_owned()))); |
||||
assert_eq!(iter.next(), None); |
||||
let mut iter = tree.iter_from(key1, true); |
||||
assert_eq!(iter.next(), Some((key1.to_owned(), value1.to_owned()))); |
||||
assert_eq!(iter.next(), Some((key.to_owned(), value.to_owned()))); |
||||
assert_eq!(iter.next(), None); |
||||
} |
||||
|
||||
fn insert_iter_prefix<T>(name: &str) |
||||
where |
||||
Arc<T>: KeyValueDatabaseEngine, |
||||
{ |
||||
let (tree, _inst, _temp_dir) = open_tree::<T>(name); |
||||
let key = "key".as_bytes(); |
||||
let value = "value".as_bytes(); |
||||
tree.insert(key, value).unwrap(); |
||||
let key1 = "key1".as_bytes(); |
||||
let value1 = "value1".as_bytes(); |
||||
tree.insert(key1, value1).unwrap(); |
||||
let key11 = "key11".as_bytes(); |
||||
let value11 = "value11".as_bytes(); |
||||
tree.insert(key11, value11).unwrap(); |
||||
let key2 = "key2".as_bytes(); |
||||
let value2 = "value2".as_bytes(); |
||||
tree.insert(key2, value2).unwrap(); |
||||
let mut iter = tree.scan_prefix(key1.to_owned()); |
||||
assert_eq!(iter.next(), Some((key1.to_owned(), value1.to_owned()))); |
||||
assert_eq!(iter.next(), Some((key11.to_owned(), value11.to_owned()))); |
||||
assert_eq!(iter.next(), None); |
||||
} |
||||
|
||||
fn insert_clear<T>(name: &str) |
||||
where |
||||
Arc<T>: KeyValueDatabaseEngine, |
||||
{ |
||||
let (tree, _inst, _temp_dir) = open_tree::<T>(name); |
||||
let key = "key".as_bytes(); |
||||
let value = "value".as_bytes(); |
||||
tree.insert(key, value).unwrap(); |
||||
let key1 = "key1".as_bytes(); |
||||
let value1 = "value1".as_bytes(); |
||||
tree.insert(key1, value1).unwrap(); |
||||
let key2 = "key2".as_bytes(); |
||||
let value2 = "value2".as_bytes(); |
||||
tree.insert(key2, value2).unwrap(); |
||||
assert_eq!(tree.iter().count(), 3); |
||||
tree.clear().unwrap(); |
||||
assert_eq!(tree.iter().count(), 0); |
||||
} |
||||
|
||||
fn increment<T>(name: &str) |
||||
where |
||||
Arc<T>: KeyValueDatabaseEngine, |
||||
{ |
||||
let (tree, _inst, _temp_dir) = open_tree::<T>(name); |
||||
let key = "key".as_bytes(); |
||||
tree.increment(key).unwrap(); |
||||
let read = tree.get(key).unwrap(); |
||||
assert_eq!(crate::utils::u64_from_bytes(&read.unwrap()).unwrap(), 1); |
||||
tree.increment(key).unwrap(); |
||||
let read = tree.get(key).unwrap(); |
||||
assert_eq!(crate::utils::u64_from_bytes(&read.unwrap()).unwrap(), 2); |
||||
} |
||||
|
||||
fn increment_batch<T>(name: &str) |
||||
where |
||||
Arc<T>: KeyValueDatabaseEngine, |
||||
{ |
||||
let (tree, _inst, _temp_dir) = open_tree::<T>(name); |
||||
let key = "key".as_bytes(); |
||||
let key1 = "key1".as_bytes(); |
||||
tree.increment_batch(&mut vec![key.to_owned(), key1.to_owned()].into_iter()) |
||||
.unwrap(); |
||||
let read = tree.get(key).unwrap(); |
||||
assert_eq!(crate::utils::u64_from_bytes(&read.unwrap()).unwrap(), 1); |
||||
let read = tree.get(key1).unwrap(); |
||||
assert_eq!(crate::utils::u64_from_bytes(&read.unwrap()).unwrap(), 1); |
||||
tree.increment_batch(&mut vec![key.to_owned(), key1.to_owned()].into_iter()) |
||||
.unwrap(); |
||||
let read = tree.get(key).unwrap(); |
||||
assert_eq!(crate::utils::u64_from_bytes(&read.unwrap()).unwrap(), 2); |
||||
let read = tree.get(key1).unwrap(); |
||||
assert_eq!(crate::utils::u64_from_bytes(&read.unwrap()).unwrap(), 2); |
||||
} |
||||
|
||||
#[cfg(feature = "sqlite")] |
||||
mod sqlite { |
||||
|
||||
use super::*; |
||||
use crate::database::abstraction::sqlite::Engine; |
||||
|
||||
#[test] |
||||
fn sqlite_insert_get() { |
||||
insert_get::<Engine>("sqlite_insert_get") |
||||
} |
||||
|
||||
#[test] |
||||
fn sqlite_insert_replace_get() { |
||||
insert_get_replace::<Engine>("sqlite_insert_get_replace") |
||||
} |
||||
|
||||
#[test] |
||||
fn sqlite_insert_get_remove() { |
||||
insert_get_remove::<Engine>("sqlite_insert_get_remove") |
||||
} |
||||
|
||||
#[test] |
||||
fn sqlite_batch_insert_get() { |
||||
batch_insert_get::<Engine>("sqlite_batch_insert_get") |
||||
} |
||||
|
||||
#[test] |
||||
fn sqlite_insert_iter() { |
||||
insert_iter::<Engine>("sqlite_insert_iter") |
||||
} |
||||
|
||||
#[test] |
||||
fn sqlite_insert_iter_from() { |
||||
insert_iter_from::<Engine>("sqlite_insert_iter_from") |
||||
} |
||||
|
||||
#[test] |
||||
fn sqlite_insert_iter_prefix() { |
||||
insert_iter_prefix::<Engine>("sqlite_insert_iter_prefix") |
||||
} |
||||
|
||||
#[test] |
||||
fn sqlite_insert_clear() { |
||||
insert_clear::<Engine>("sqlite_insert_iter_prefix") |
||||
} |
||||
|
||||
#[test] |
||||
fn sqlite_increment() { |
||||
increment::<Engine>("sqlite_increment") |
||||
} |
||||
|
||||
#[test] |
||||
fn sqlite_increment_batch() { |
||||
increment_batch::<Engine>("sqlite_increment_batch") |
||||
} |
||||
} |
||||
|
||||
#[cfg(feature = "rocksdb")] |
||||
mod rocksdb { |
||||
|
||||
use super::*; |
||||
use crate::database::abstraction::rocksdb::Engine; |
||||
|
||||
#[test] |
||||
fn rocksdb_insert_get() { |
||||
insert_get::<Engine>("rocksdb_insert_get") |
||||
} |
||||
|
||||
#[test] |
||||
fn rocksdb_insert_replace_get() { |
||||
insert_get_replace::<Engine>("rocksdb_insert_get_replace") |
||||
} |
||||
|
||||
#[test] |
||||
fn rocksdb_insert_get_remove() { |
||||
insert_get_remove::<Engine>("rocksdb_insert_get_remove") |
||||
} |
||||
|
||||
#[test] |
||||
fn rocksdb_batch_insert_get() { |
||||
batch_insert_get::<Engine>("rocksdb_batch_insert_get") |
||||
} |
||||
|
||||
#[test] |
||||
fn rocksdb_insert_iter() { |
||||
insert_iter::<Engine>("rocksdb_insert_iter") |
||||
} |
||||
|
||||
#[test] |
||||
fn rocksdb_insert_iter_from() { |
||||
insert_iter_from::<Engine>("rocksdb_insert_iter_from") |
||||
} |
||||
|
||||
#[test] |
||||
fn rocksdb_insert_iter_prefix() { |
||||
insert_iter_prefix::<Engine>("rocksdb_insert_iter_prefix") |
||||
} |
||||
|
||||
#[test] |
||||
fn rocksdb_insert_clear() { |
||||
insert_clear::<Engine>("rocksdb_insert_iter_prefix") |
||||
} |
||||
|
||||
#[test] |
||||
fn rocksdb_increment() { |
||||
increment::<Engine>("rocksdb_increment") |
||||
} |
||||
|
||||
#[test] |
||||
fn rocksdb_increment_batch() { |
||||
increment_batch::<Engine>("rocksdb_increment_batch") |
||||
} |
||||
} |
||||
#[cfg(feature = "persy")] |
||||
mod persy { |
||||
|
||||
use super::*; |
||||
use crate::database::abstraction::persy::Engine; |
||||
|
||||
#[test] |
||||
fn persy_insert_get() { |
||||
insert_get::<Engine>("persy_insert_get") |
||||
} |
||||
|
||||
#[test] |
||||
fn persy_insert_replace_get() { |
||||
insert_get_replace::<Engine>("persy_insert_get_replace") |
||||
} |
||||
|
||||
#[test] |
||||
fn persy_insert_get_remove() { |
||||
insert_get_remove::<Engine>("persy_insert_get_remove") |
||||
} |
||||
|
||||
#[test] |
||||
fn persy_batch_insert_get() { |
||||
batch_insert_get::<Engine>("persy_batch_insert_get") |
||||
} |
||||
|
||||
#[test] |
||||
fn persy_insert_iter() { |
||||
insert_iter::<Engine>("persy_insert_iter") |
||||
} |
||||
|
||||
#[test] |
||||
fn persy_insert_iter_from() { |
||||
insert_iter_from::<Engine>("persy_insert_iter_from") |
||||
} |
||||
|
||||
#[test] |
||||
fn persy_insert_iter_prefix() { |
||||
insert_iter_prefix::<Engine>("persy_insert_iter_prefix") |
||||
} |
||||
|
||||
#[test] |
||||
fn persy_insert_clear() { |
||||
insert_clear::<Engine>("persy_insert_iter_prefix") |
||||
} |
||||
|
||||
#[test] |
||||
fn persy_increment() { |
||||
increment::<Engine>("persy_increment") |
||||
} |
||||
|
||||
#[test] |
||||
fn persy_increment_batch() { |
||||
increment_batch::<Engine>("persy_increment_batch") |
||||
} |
||||
} |
||||
Loading…
Reference in new issue