delete vfs

This commit is contained in:
Cyryl Płotnicki 2021-05-16 19:15:07 +01:00
parent 12dce7e676
commit 20373325a9
14 changed files with 275 additions and 270 deletions

10
Cargo.lock generated
View file

@ -198,7 +198,6 @@ dependencies = [
"thiserror", "thiserror",
"two-rusty-forks", "two-rusty-forks",
"uuid", "uuid",
"vfs",
"walkdir", "walkdir",
] ]
@ -1453,15 +1452,6 @@ version = "0.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5fecdca9a5291cc2b8dcf7dc02453fee791a280f3743cb0905f8822ae463b3fe" checksum = "5fecdca9a5291cc2b8dcf7dc02453fee791a280f3743cb0905f8822ae463b3fe"
[[package]]
name = "vfs"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "feb0df0abbe81534013b326c5e3d723a2c9b06c04160fa0f7d8b4d42eb9b7052"
dependencies = [
"thiserror",
]
[[package]] [[package]]
name = "wait-timeout" name = "wait-timeout"
version = "0.2.0" version = "0.2.0"

View file

@ -29,7 +29,6 @@ sha2 = "0.9"
tempfile = "3.2" tempfile = "3.2"
thiserror = "1.0" thiserror = "1.0"
uuid = { version = "0.8", features = ["v4"] } uuid = { version = "0.8", features = ["v4"] }
vfs = "0.5"
walkdir = "2.3" walkdir = "2.3"
[dev-dependencies] [dev-dependencies]

View file

@ -1,40 +1,35 @@
use std::path::Path;
use crate::repository::Repository; use crate::repository::Repository;
use anyhow::Result; use anyhow::Result;
use anyhow::*; use anyhow::*;
use vfs::VfsPath; use walkdir::WalkDir;
pub struct Engine<'a> { pub struct Engine<'a> {
source_path: &'a VfsPath, source_path: &'a Path,
repository: &'a mut Repository, repository: &'a mut Repository,
} }
impl<'a> Engine<'a> { impl<'a> Engine<'a> {
pub fn new(source_path: &'a VfsPath, repository: &'a mut Repository) -> Result<Self> { pub fn new(source_path: &'a Path, repository: &'a mut Repository) -> Result<Self> {
let mut ancestors = vec![]; let mut ancestors = vec![];
let mut current = Some(source_path.clone()); let mut current = Some(source_path.to_path_buf());
while let Some(path) = current { while let Some(path) = current {
ancestors.push(path.clone()); ancestors.push(path.to_path_buf());
current = path.parent(); current = path.parent().map(|p| p.to_path_buf());
} }
if ancestors.into_iter().any(|a| &a == repository.path()) { if ancestors.into_iter().any(|a| a == repository.path()) {
return Err(anyhow!("source same as repository")); return Err(anyhow!("source same as repository"));
} }
Ok(Engine { source_path, repository }) Ok(Engine { source_path, repository })
} }
pub fn backup(&mut self) -> Result<()> { pub fn backup(&mut self) -> Result<()> {
let walker = self.source_path.walk_dir()?; let walker = WalkDir::new(self.source_path);
let save_every = 16;
let mut save_counter = 0;
for maybe_entry in walker { for maybe_entry in walker {
let entry = maybe_entry?; let entry = maybe_entry?;
if &entry != self.source_path { if entry.path() != self.source_path {
self.repository.store(&entry)?; self.repository.store(&entry.path())?;
}
save_counter += 1;
if save_counter == save_every {
save_counter = 0;
self.repository.save_index()?;
} }
} }
self.repository.save_index()?; self.repository.save_index()?;

View file

@ -1,5 +1,9 @@
use std::collections::HashMap; use std::{
use vfs::VfsPath; collections::HashMap,
fs::{self, File},
io::Read,
path::{Path, PathBuf},
};
use uuid::Uuid; use uuid::Uuid;
@ -14,8 +18,8 @@ use nix::unistd::getpid;
use std::{cmp::max, io::Write}; use std::{cmp::max, io::Write};
impl Index { impl Index {
pub fn load(repository_path: &VfsPath) -> Result<Self> { pub fn load(repository_path: &Path) -> Result<Self> {
if !repository_path.exists()? { if !repository_path.exists() {
let mut index = Index::new()?; let mut index = Index::new()?;
index.save(repository_path)?; index.save(repository_path)?;
} }
@ -26,19 +30,19 @@ impl Index {
log::debug!( log::debug!(
"[{}] loaded index from {}, version: {}; {} items", "[{}] loaded index from {}, version: {}; {} items",
getpid(), getpid(),
index_file_path.as_str(), index_file_path.to_string_lossy(),
index.version, index.version,
index.newest_items_by_source_path.len() index.newest_items_by_source_path.len()
); );
Ok(index) Ok(index)
} }
pub fn save(&mut self, repository_path: &VfsPath) -> Result<()> { pub fn save(&mut self, repository_path: &Path) -> Result<()> {
let lock_id = Uuid::new_v4(); let lock_id = Uuid::new_v4();
let lock = Lock::lock(repository_path)?; let lock = Lock::lock(repository_path)?;
let index_file_path = &Index::index_file_path_for_repository_path(repository_path)?; let index_file_path = &Index::index_file_path_for_repository_path(repository_path)?;
if index_file_path.exists()? { if index_file_path.exists() {
let index = Index::load_from_file(&Index::index_file_path_for_repository_path(repository_path)?)?; let index = Index::load_from_file(&Index::index_file_path_for_repository_path(repository_path)?)?;
self.merge_items_by_file_id(index.items_by_file_id); self.merge_items_by_file_id(index.items_by_file_id);
self.merge_newest_items(index.newest_items_by_source_path); self.merge_newest_items(index.newest_items_by_source_path);
@ -52,20 +56,21 @@ impl Index {
getpid(), getpid(),
self.version, self.version,
lock_id, lock_id,
index_file_path.as_str(), index_file_path.to_string_lossy(),
self.newest_items_by_source_path.len() self.newest_items_by_source_path.len()
); );
Ok(()) Ok(())
} }
fn write_index_to_file(&mut self, index_file_path: &VfsPath) -> Result<()> { fn write_index_to_file(&mut self, index_file_path: &Path) -> Result<()> {
let parent = index_file_path.parent(); let parent = index_file_path.parent();
match parent { match parent {
None => Err(anyhow!(format!("cannot get parent for {}", index_file_path.as_str()))), None => Err(anyhow!(format!(
Some(parent) => Ok(parent "cannot get parent for {}",
.create_dir_all() index_file_path.to_string_lossy()
.context(format!("create index directory at {}", index_file_path.as_str()))?), ))),
}?; Some(parent) => Ok(fs::create_dir_all(parent)),
}??;
let serialised = serde_json::to_string(&self)?; let serialised = serde_json::to_string(&self)?;
@ -73,13 +78,13 @@ impl Index {
let encoded = error_correcting_encoder::encode(bytes)?; let encoded = error_correcting_encoder::encode(bytes)?;
{ {
let mut file = index_file_path.create_file()?; let mut file = File::create(index_file_path)?;
file.write_all(&encoded).context("writing index to disk")?; file.write_all(&encoded).context("writing index to disk")?;
file.flush()?; file.flush()?;
} }
let readback = { let readback = {
let mut file = index_file_path.open_file()?; let mut file = File::open(index_file_path)?;
let mut readback = vec![]; let mut readback = vec![];
file.read_to_end(&mut readback)?; file.read_to_end(&mut readback)?;
readback readback
@ -92,16 +97,16 @@ impl Index {
} }
} }
fn load_from_file(index_file_path: &VfsPath) -> Result<Self> { fn load_from_file(index_file_path: &Path) -> Result<Self> {
let mut file = index_file_path.open_file()?; let mut file = File::open(index_file_path)?;
let mut encoded = vec![]; let mut encoded = vec![];
file.read_to_end(&mut encoded)?; file.read_to_end(&mut encoded)?;
let decoded = error_correcting_encoder::decode(&encoded)?; let decoded = error_correcting_encoder::decode(&encoded)?;
let index_text = String::from_utf8(decoded)?; let index_text = String::from_utf8(decoded)?;
let index: Index = let index: Index = serde_json::from_str(&index_text)
serde_json::from_str(&index_text).context(format!("cannot read index from: {}", index_file_path.as_str()))?; .context(format!("cannot read index from: {}", index_file_path.to_string_lossy()))?;
Ok(index) Ok(index)
} }
@ -121,8 +126,8 @@ impl Index {
self.items_by_file_id.extend(old_items_by_file_id); self.items_by_file_id.extend(old_items_by_file_id);
} }
fn index_file_path_for_repository_path(path: &VfsPath) -> Result<VfsPath> { fn index_file_path_for_repository_path(path: &Path) -> Result<PathBuf> {
Ok(path.join("index")?) Ok(path.join("index"))
} }
} }
@ -130,16 +135,16 @@ impl Index {
mod must { mod must {
use crate::index::Index; use crate::index::Index;
use anyhow::Result; use anyhow::Result;
use pretty_assertions::assert_eq;
use vfs::{MemoryFS, VfsPath}; use tempfile::tempdir;
#[test] #[test]
fn have_version_increased_when_saved() -> Result<()> { fn have_version_increased_when_saved() -> Result<()> {
let temp_dir: VfsPath = MemoryFS::new().into(); let temp_dir = tempdir()?;
let mut index = Index::new()?; let mut index = Index::new()?;
let old_version = index.version; let old_version = index.version;
index.save(&temp_dir)?; index.save(&temp_dir.path())?;
let new_version = index.version; let new_version = index.version;
@ -150,11 +155,11 @@ mod must {
#[test] #[test]
fn be_same_when_loaded_from_disk() -> Result<()> { fn be_same_when_loaded_from_disk() -> Result<()> {
let repository_path: VfsPath = MemoryFS::new().into(); let repository_path = tempdir()?;
let mut original = Index::new()?; let mut original = Index::new()?;
original.save(&repository_path)?; original.save(&repository_path.path())?;
let loaded = Index::load(&repository_path)?; let loaded = Index::load(&repository_path.path())?;
assert_eq!(original, loaded); assert_eq!(original, loaded);

View file

@ -1,26 +1,31 @@
use anyhow::Result; use anyhow::Result;
use anyhow::*; use anyhow::*;
use fail::fail_point; use fail::fail_point;
use std::{io::Write, time::Instant}; use std::{
fs::{remove_file, File},
io::Write,
path::{Path, PathBuf},
time::Instant,
};
use uuid::Uuid; use uuid::Uuid;
use vfs::VfsPath; use walkdir::WalkDir;
use rand::{rngs::OsRng, RngCore}; use rand::{rngs::OsRng, RngCore};
use std::{thread, time}; use std::{thread, time};
pub struct Lock { pub struct Lock {
path: VfsPath, path: PathBuf,
} }
const MAX_TIMEOUT_MILLIS: u16 = 8192; const MAX_TIMEOUT_MILLIS: u16 = 8192;
const FILE_EXTENSION: &str = ".lock"; const FILE_EXTENSION: &str = ".lock";
impl Lock { impl Lock {
pub fn lock(index_directory: &VfsPath) -> Result<Self> { pub fn lock(index_directory: &Path) -> Result<Self> {
Lock::lock_with_timeout(index_directory, MAX_TIMEOUT_MILLIS) Lock::lock_with_timeout(index_directory, MAX_TIMEOUT_MILLIS)
} }
pub fn lock_with_timeout(index_directory: &VfsPath, max_timeout_millis: u16) -> Result<Self> { pub fn lock_with_timeout(index_directory: &Path, max_timeout_millis: u16) -> Result<Self> {
let mut buffer = [0u8; 16]; let mut buffer = [0u8; 16];
OsRng.fill_bytes(&mut buffer); OsRng.fill_bytes(&mut buffer);
let id = Uuid::from_bytes(buffer); let id = Uuid::from_bytes(buffer);
@ -35,23 +40,26 @@ impl Lock {
} }
fn delete_lock_file(&self) -> Result<()> { fn delete_lock_file(&self) -> Result<()> {
if self.path.exists()? { if self.path.exists() {
self.path.remove_file()?; remove_file(&self.path)?;
} }
Ok(()) Ok(())
} }
fn wait_to_have_sole_lock(lock_id: Uuid, index_directory: &VfsPath, max_timeout_millis: u16) -> Result<()> { fn wait_to_have_sole_lock(lock_id: Uuid, index_directory: &Path, max_timeout_millis: u16) -> Result<()> {
let start_time = Instant::now(); let start_time = Instant::now();
let _ = Lock::create_lock_file(lock_id, index_directory); let _ = Lock::create_lock_file(lock_id, index_directory);
while !Lock::sole_lock(lock_id, index_directory)? { while !Lock::sole_lock(lock_id, index_directory)? {
let path = Lock::lock_file_path(index_directory, lock_id)?; let path = Lock::lock_file_path(index_directory, lock_id)?;
if path.exists()? { if path.exists() {
path.remove_file()?; remove_file(path)?;
} }
let sleep_duration = time::Duration::from_millis((OsRng.next_u32() % 64).into()); let sleep_duration = time::Duration::from_millis((OsRng.next_u32() % 64).into());
thread::sleep(sleep_duration); thread::sleep(sleep_duration);
// timeout will take care of permanent errors
let _ = Lock::create_lock_file(lock_id, index_directory); let _ = Lock::create_lock_file(lock_id, index_directory);
if start_time.elapsed().as_millis() > max_timeout_millis.into() { if start_time.elapsed().as_millis() > max_timeout_millis.into() {
return Err(anyhow!("timed out waiting on lock")); return Err(anyhow!("timed out waiting on lock"));
} }
@ -59,42 +67,43 @@ impl Lock {
Ok(()) Ok(())
} }
fn sole_lock(lock_id: Uuid, index_directory: &VfsPath) -> Result<bool> { fn sole_lock(lock_id: Uuid, index_directory: &Path) -> Result<bool> {
let my_lock_file_path = Lock::lock_file_path(index_directory, lock_id)?; let my_lock_file_path = Lock::lock_file_path(index_directory, lock_id)?;
let locks = Lock::all_locks(index_directory)?;
let mut only_mine = true; let walker = WalkDir::new(index_directory);
for path in &locks { let all_locks: Vec<_> = walker
if path != &my_lock_file_path { .into_iter()
only_mine = false; .filter_map(|e| e.ok())
break; .filter(|e| e.file_name().to_string_lossy().ends_with(FILE_EXTENSION))
} .collect();
} if all_locks.len() != 1 {
if locks.is_empty() {
return Ok(false); return Ok(false);
} }
Ok(only_mine) let walker = WalkDir::new(index_directory);
} let my_locks: Vec<_> = walker
fn all_locks(index_directory: &VfsPath) -> Result<Vec<VfsPath>> {
Ok(index_directory
.read_dir()?
.into_iter() .into_iter()
.filter(|f| f.filename().ends_with(FILE_EXTENSION)) .filter_map(|e| e.ok())
.collect()) .filter(|e| e.path() == my_lock_file_path)
.collect();
if my_locks.len() != 1 {
return Ok(false);
}
let result = all_locks.first().unwrap().path() == my_locks.first().unwrap().path();
Ok(result)
} }
fn create_lock_file(lock_id: Uuid, index_directory: &VfsPath) -> Result<()> { fn create_lock_file(lock_id: Uuid, index_directory: &Path) -> Result<()> {
let lock_file_path = Lock::lock_file_path(index_directory, lock_id)?; let lock_file_path = Lock::lock_file_path(index_directory, lock_id)?;
fail_point!("create-lock-file", |e: Option<String>| Err(anyhow!(e.unwrap()))); fail_point!("create-lock-file", |e: Option<String>| Err(anyhow!(e.unwrap())));
let mut file = lock_file_path.create_file()?; let mut file = File::create(lock_file_path)?;
let lock_id_text = lock_id.to_hyphenated().to_string(); let lock_id_text = lock_id.to_hyphenated().to_string();
let lock_id_bytes = lock_id_text.as_bytes(); let lock_id_bytes = lock_id_text.as_bytes();
Ok(file.write_all(lock_id_bytes)?) Ok(file.write_all(lock_id_bytes)?)
} }
fn lock_file_path(path: &VfsPath, lock_id: Uuid) -> Result<VfsPath> { fn lock_file_path(path: &Path, lock_id: Uuid) -> Result<PathBuf> {
let file_name = format!("{}.{}", lock_id, FILE_EXTENSION); let file_name = format!("{}{}", lock_id, FILE_EXTENSION);
Ok(path.join(&file_name)?) Ok(path.join(&file_name))
} }
} }
@ -108,20 +117,22 @@ impl Drop for Lock {
mod must { mod must {
use super::Lock; use super::Lock;
use anyhow::Result; use anyhow::Result;
use pretty_assertions::assert_eq;
use tempfile::tempdir;
#[cfg(feature = "failpoints")] #[cfg(feature = "failpoints")]
use two_rusty_forks::rusty_fork_test; use two_rusty_forks::rusty_fork_test;
use vfs::{MemoryFS, VfsPath};
#[test] #[test]
fn be_released_when_dropped() -> Result<()> { fn be_released_when_dropped() -> Result<()> {
let temp_dir: VfsPath = MemoryFS::new().into(); let temp_dir = tempdir()?;
let initial_number_of_entries = temp_dir.path().read_dir()?.count();
{ {
let _lock = Lock::lock(&temp_dir); let _lock = Lock::lock(&temp_dir.path())?;
} }
let entries = temp_dir.read_dir()?.count(); let entries = temp_dir.path().read_dir()?.count();
assert_eq!(entries, 0); assert_eq!(entries, initial_number_of_entries);
Ok(()) Ok(())
} }
@ -130,9 +141,9 @@ mod must {
#[test] #[test]
fn be_able_to_lock_when_creating_lock_file_fails_sometimes() { fn be_able_to_lock_when_creating_lock_file_fails_sometimes() {
fail::cfg("create-lock-file", "90%10*return(some lock file creation error)->off").unwrap(); fail::cfg("create-lock-file", "90%10*return(some lock file creation error)->off").unwrap();
let path = MemoryFS::new().into(); let temp_dir = tempdir().unwrap();
let lock = Lock::lock(&path).unwrap(); let lock = Lock::lock(&temp_dir.path()).unwrap();
lock.release().unwrap(); lock.release().unwrap();
} }
} }
@ -142,9 +153,9 @@ mod must {
#[test] #[test]
fn know_to_give_up_when_creating_lock_file_always_fails() { fn know_to_give_up_when_creating_lock_file_always_fails() {
fail::cfg("create-lock-file", "return(persistent lock file creation error)").unwrap(); fail::cfg("create-lock-file", "return(persistent lock file creation error)").unwrap();
let path = MemoryFS::new().into(); let temp_dir = tempdir().unwrap();
assert!(Lock::lock_with_timeout(&path, 1).is_err()); assert!(Lock::lock_with_timeout(&temp_dir.path(), 1).is_err());
} }
} }
} }

View file

@ -1,8 +1,7 @@
use std::collections::hash_map::Iter;
use std::collections::HashMap; use std::collections::HashMap;
use std::{collections::hash_map::Iter, path::Path};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use vfs::VfsPath;
use crate::index::item::IndexItem; use crate::index::item::IndexItem;
use crate::repository::ItemId; use crate::repository::ItemId;
@ -29,15 +28,15 @@ impl Index {
}) })
} }
pub fn remember(&mut self, original_source_path: &VfsPath, relative_path: &str, id: ItemId) { pub fn remember(&mut self, original_source_path: &Path, relative_path: &str, id: ItemId) {
let item = if let Some(old) = self let item = if let Some(old) = self
.newest_items_by_source_path .newest_items_by_source_path
.get(&original_source_path.as_str().to_string()) .get(&original_source_path.to_string_lossy().to_string())
{ {
old.next_version(id, relative_path.to_string()) old.next_version(id, relative_path.to_string())
} else { } else {
IndexItem::from( IndexItem::from(
original_source_path.as_str().to_string(), original_source_path.to_string_lossy().to_string(),
relative_path.to_string(), relative_path.to_string(),
id, id,
Version::default(), Version::default(),
@ -46,11 +45,14 @@ impl Index {
self.items_by_file_id.insert(item.id(), item.clone()); self.items_by_file_id.insert(item.id(), item.clone());
self.newest_items_by_source_path self.newest_items_by_source_path
.insert(original_source_path.as_str().to_string(), item); .insert(original_source_path.to_string_lossy().to_string(), item);
} }
pub fn newest_item_by_source_path(&self, path: &VfsPath) -> Result<Option<IndexItem>> { pub fn newest_item_by_source_path(&self, path: &Path) -> Result<Option<IndexItem>> {
Ok(self.newest_items_by_source_path.get(&path.as_str().to_string()).cloned()) Ok(self
.newest_items_by_source_path
.get(&path.to_string_lossy().to_string())
.cloned())
} }
pub fn item_by_id(&self, id: &ItemId) -> Result<Option<IndexItem>> { pub fn item_by_id(&self, id: &ItemId) -> Result<Option<IndexItem>> {

View file

@ -2,15 +2,17 @@ use crate::{repository::ItemId, version::Version};
use anyhow::Result; use anyhow::Result;
use anyhow::*; use anyhow::*;
use nix::unistd::getpid; use nix::unistd::getpid;
use std::fmt;
use std::fmt::{Display, Formatter};
use std::path::Path; use std::path::Path;
use vfs::VfsPath; use std::{fmt, path::PathBuf};
use std::{
fmt::{Display, Formatter},
fs,
};
#[derive(Clone, Debug, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq, Eq)]
pub struct RepositoryItem { pub struct RepositoryItem {
relative_path: String, relative_path: String,
absolute_path: VfsPath, absolute_path: PathBuf,
original_source_path: String, original_source_path: String,
id: ItemId, id: ItemId,
version: Version, version: Version,
@ -23,39 +25,32 @@ impl PartialOrd for RepositoryItem {
} }
impl RepositoryItem { impl RepositoryItem {
pub fn from( pub fn from(original_source_path: &str, absolute_path: &Path, relative_path: &str, id: ItemId, version: Version) -> Self {
original_source_path: &str,
absolute_path: &VfsPath,
relative_path: &str,
id: ItemId,
version: Version,
) -> Self {
RepositoryItem { RepositoryItem {
relative_path: relative_path.to_string(), relative_path: relative_path.to_string(),
absolute_path: absolute_path.clone(), absolute_path: absolute_path.to_path_buf(),
original_source_path: original_source_path.to_string(), original_source_path: original_source_path.to_string(),
id, id,
version, version,
} }
} }
pub fn save(&self, save_to: &VfsPath) -> Result<()> { pub fn save(&self, save_to: &Path) -> Result<()> {
let original_source_path = Path::new(self.original_source_path()); let original_source_path = Path::new(self.original_source_path());
let source_path_relative = original_source_path.strip_prefix("/")?; let source_path_relative = original_source_path.strip_prefix("/")?;
let source_path_relative = source_path_relative.to_string_lossy();
let target_path = save_to.join(&source_path_relative)?; let target_path = save_to.join(&source_path_relative);
let parent = target_path let parent = target_path
.parent() .parent()
.ok_or_else(|| anyhow!("cannot compute parent path for {}", &target_path.as_str()))?; .ok_or_else(|| anyhow!("cannot compute parent path for {}", &target_path.to_string_lossy()))?;
log::debug!("[{}] saving data to {}", getpid(), target_path.as_str()); log::debug!("[{}] saving data to {}", getpid(), target_path.to_string_lossy());
parent.create_dir_all()?; fs::create_dir_all(parent)?;
if !self.absolute_path.exists()? { if !self.absolute_path.exists() {
return Err(anyhow!("corrupted repository")); return Err(anyhow!("corrupted repository"));
} }
self.absolute_path.copy_file(&target_path)?; fs::copy(&self.absolute_path, &target_path)?;
log::debug!("[{}] saved data to {}", getpid(), target_path.as_str()); log::debug!("[{}] saved data to {}", getpid(), &target_path.to_string_lossy());
Ok(()) Ok(())
} }

View file

@ -1,9 +1,12 @@
pub mod item; pub mod item;
use std::fmt::{Debug, Formatter};
use std::io::BufReader;
use std::path::Path;
use std::{fmt, io}; use std::{fmt, io};
use std::{
fmt::{Debug, Formatter},
path::PathBuf,
};
use std::{fs, path::Path};
use std::{fs::File, io::BufReader};
use crate::index::{Index, IndexItemIterator}; use crate::index::{Index, IndexItemIterator};
use anyhow::Result; use anyhow::Result;
@ -12,7 +15,7 @@ use item::RepositoryItem;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use sha2::Digest; use sha2::Digest;
use sha2::Sha512; use sha2::Sha512;
use vfs::{VfsFileType, VfsPath}; use walkdir::WalkDir;
/// represents a place where backup is stored an can be restored from. /// represents a place where backup is stored an can be restored from.
/// right now only on-disk directory storage is supported /// right now only on-disk directory storage is supported
@ -21,7 +24,7 @@ use vfs::{VfsFileType, VfsPath};
#[derive(Debug)] #[derive(Debug)]
pub struct Repository { pub struct Repository {
/// path to where the repository is stored on disk /// path to where the repository is stored on disk
path: VfsPath, path: PathBuf,
index: Index, index: Index,
} }
@ -94,26 +97,26 @@ impl Debug for ItemId {
} }
impl<'a> Repository { impl<'a> Repository {
pub fn init(path: &VfsPath) -> Result<Repository> { pub fn init(path: &Path) -> Result<Repository> {
path.create_dir_all()?; fs::create_dir_all(path)?;
let mut index = Index::new()?; let mut index = Index::new()?;
index.save(path)?; index.save(path)?;
let repository = Repository::open(path)?; let repository = Repository::open(path)?;
repository.data_dir()?.create_dir_all()?; fs::create_dir_all(repository.data_dir()?)?;
Ok(repository) Ok(repository)
} }
pub fn open(path: &VfsPath) -> Result<Repository> { pub fn open(path: &Path) -> Result<Repository> {
let index = Index::load(path)?; let index = Index::load(path)?;
let repository = Repository { let repository = Repository {
path: path.clone(), path: path.to_path_buf(),
index, index,
}; };
Ok(repository) Ok(repository)
} }
pub fn path(&self) -> &VfsPath { pub fn path(&self) -> &Path {
&self.path &self.path
} }
@ -121,28 +124,27 @@ impl<'a> Repository {
self.index.save(&self.path) self.index.save(&self.path)
} }
pub fn store(&mut self, source_path: &VfsPath) -> Result<()> { pub fn store(&mut self, source_path: &Path) -> Result<()> {
let id = Repository::calculate_id(source_path)?; let id = Repository::calculate_id(source_path)?;
let destination = self.data_dir()?; let destination = self.data_dir()?;
let destination = destination.join(&id.to_string())?; let destination = destination.join(&id.to_string());
if source_path.metadata()?.file_type != VfsFileType::File { if !source_path.metadata()?.is_file() {
return Ok(()); return Ok(());
} }
let parent = destination let parent = destination
.parent() .parent()
.ok_or_else(|| anyhow!("cannot compute parent path for {}", &destination.as_str()))?; .ok_or_else(|| anyhow!("cannot compute parent path for {}", &destination.to_string_lossy()))?;
parent.create_dir_all()?; fs::create_dir_all(parent)?;
if !destination.exists()? { if !destination.exists() {
source_path.copy_file(&destination)?; fs::copy(&source_path, &destination)?;
} }
let destination_path = Path::new(destination.as_str()); let relative_path = destination.strip_prefix(&self.path())?;
let relative_path = destination_path.strip_prefix(&self.path.as_str())?.to_string_lossy(); self.index.remember(source_path, &relative_path.to_string_lossy(), id);
self.index.remember(source_path, &relative_path, id);
Ok(()) Ok(())
} }
pub fn newest_item_by_source_path(&self, path: &VfsPath) -> Result<Option<RepositoryItem>> { pub fn newest_item_by_source_path(&self, path: &Path) -> Result<Option<RepositoryItem>> {
let item = self.index.newest_item_by_source_path(path)?; let item = self.index.newest_item_by_source_path(path)?;
match item { match item {
None => Ok(None), None => Ok(None),
@ -170,7 +172,7 @@ impl<'a> Repository {
let relative_path = index_item.relative_path(); let relative_path = index_item.relative_path();
let repository_path = self.path(); let repository_path = self.path();
let original_source_path = index_item.original_source_path(); let original_source_path = index_item.original_source_path();
let absolute_path = repository_path.join(relative_path)?; let absolute_path = repository_path.join(relative_path);
Ok(RepositoryItem::from( Ok(RepositoryItem::from(
&original_source_path, &original_source_path,
&absolute_path, &absolute_path,
@ -181,21 +183,22 @@ impl<'a> Repository {
} }
pub fn data_weight(&self) -> Result<u64> { pub fn data_weight(&self) -> Result<u64> {
let walkdir = self.data_dir()?.walk_dir()?; let walker = WalkDir::new(self.data_dir()?);
let total_size = walkdir let total_size = walker
.filter_map(|entry| entry.ok()) .into_iter()
.filter_map(|entry| entry.metadata().ok()) .filter_map(|e| e.ok())
.filter(|metadata| metadata.file_type == VfsFileType::File) .filter_map(|e| e.metadata().ok())
.fold(0, |acc, m| acc + m.len); .filter(|m| m.is_file())
.fold(0, |acc, m| acc + m.len());
Ok(total_size) Ok(total_size)
} }
fn data_dir(&self) -> Result<VfsPath> { fn data_dir(&self) -> Result<PathBuf> {
Ok(self.path().join(DATA_DIR_NAME)?) Ok(self.path().join(DATA_DIR_NAME))
} }
fn calculate_id(source_path: &VfsPath) -> Result<ItemId> { fn calculate_id(source_path: &Path) -> Result<ItemId> {
let source_file = source_path.open_file()?; let source_file = File::open(source_path)?;
let mut reader = BufReader::new(source_file); let mut reader = BufReader::new(source_file);
let mut hasher = Sha512::new(); let mut hasher = Sha512::new();
@ -210,17 +213,17 @@ mod must {
use super::Repository; use super::Repository;
use crate::test::source::TestSource; use crate::test::source::TestSource;
use anyhow::Result; use anyhow::Result;
use vfs::MemoryFS; use tempfile::tempdir;
#[test] #[test]
fn have_size_equal_to_sum_of_sizes_of_backed_up_files() -> Result<()> { fn have_size_equal_to_sum_of_sizes_of_backed_up_files() -> Result<()> {
let file_size1 = 13; let file_size1 = 13;
let file_size2 = 27; let file_size2 = 27;
let source = TestSource::new()?; let source = TestSource::new()?;
let repository_path = MemoryFS::new().into(); let repository_path = tempdir()?;
Repository::init(&repository_path)?; Repository::init(&repository_path.path())?;
let mut backup_repository = Repository::open(&repository_path)?; let mut backup_repository = Repository::open(&repository_path.path())?;
source.write_random_bytes_to_file("file1", file_size1)?; source.write_random_bytes_to_file("file1", file_size1)?;
backup_repository.store(&source.file_path("file1")?)?; backup_repository.store(&source.file_path("file1")?)?;

View file

@ -1,14 +1,15 @@
use std::path::Path;
use crate::repository::{item::RepositoryItem, Repository}; use crate::repository::{item::RepositoryItem, Repository};
use anyhow::Result; use anyhow::Result;
use vfs::VfsPath;
pub struct Engine<'a> { pub struct Engine<'a> {
repository: &'a mut Repository, repository: &'a mut Repository,
target_path: &'a VfsPath, target_path: &'a Path,
} }
impl<'a> Engine<'a> { impl<'a> Engine<'a> {
pub fn new(repository: &'a mut Repository, target_path: &'a VfsPath) -> Result<Self> { pub fn new(repository: &'a mut Repository, target_path: &'a Path) -> Result<Self> {
Ok(Engine { repository, target_path }) Ok(Engine { repository, target_path })
} }

View file

@ -1,5 +1,9 @@
pub mod in_memory { pub mod in_memory {
use std::path::Path; use std::{
fs::File,
io::Read,
path::{Path, PathBuf},
};
use crate::{ use crate::{
backup, backup,
@ -8,73 +12,60 @@ pub mod in_memory {
test::source::TestSource, test::source::TestSource,
}; };
use anyhow::Result; use anyhow::Result;
use vfs::{MemoryFS, VfsFileType, VfsPath};
use rand::Rng; use tempfile::tempdir;
use walkdir::WalkDir;
pub fn random_in_memory_path(prefix: &str) -> Result<VfsPath> { pub fn assert_same_after_restore(source_path: &Path) -> Result<()> {
let path: VfsPath = MemoryFS::new().into(); let repository_path = tempdir()?;
let path = path.join(&format!("{}-{}", prefix, rand::thread_rng().gen::<u64>()))?; let restore_target = tempdir()?;
Ok(path)
}
pub fn assert_same_after_restore(source_path: &VfsPath) -> Result<()> { Repository::init(&repository_path.path())?;
let repository_path: VfsPath = random_in_memory_path("repository")?;
let restore_target: VfsPath = random_in_memory_path("target")?;
assert_ne!(source_path, &repository_path);
assert_ne!(repository_path, restore_target);
Repository::init(&repository_path)?;
{ {
let mut backup_repository = Repository::open(&repository_path)?; let mut backup_repository = Repository::open(&repository_path.path())?;
let mut backup_engine = backup::Engine::new(source_path, &mut backup_repository)?; let mut backup_engine = backup::Engine::new(source_path, &mut backup_repository)?;
backup_engine.backup()?; backup_engine.backup()?;
} }
{ {
let mut restore_repository = Repository::open(&repository_path)?; let mut restore_repository = Repository::open(&repository_path.path())?;
let mut restore_engine = restore::Engine::new(&mut restore_repository, &restore_target)?; let mut restore_engine = restore::Engine::new(&mut restore_repository, &restore_target.path())?;
restore_engine.restore_all()?; restore_engine.restore_all()?;
} }
assert_directory_trees_have_same_contents(source_path, &restore_target)?; assert_directory_trees_have_same_contents(source_path, &restore_target.path())?;
Ok(()) Ok(())
} }
pub fn assert_restored_file_contents( pub fn assert_restored_file_contents(repository_path: &Path, source_file_full_path: &Path, contents: &[u8]) -> Result<()> {
repository_path: &VfsPath,
source_file_full_path: &VfsPath,
contents: &[u8],
) -> Result<()> {
let mut restore_repository = Repository::open(repository_path)?; let mut restore_repository = Repository::open(repository_path)?;
let item = restore_repository.newest_item_by_source_path(&source_file_full_path)?; let item = restore_repository.newest_item_by_source_path(&source_file_full_path)?;
let restore_target = random_in_memory_path("target")?; let restore_target = tempdir()?;
let restore_engine = restore::Engine::new(&mut restore_repository, &restore_target)?; let restore_engine = restore::Engine::new(&mut restore_repository, &restore_target.path())?;
restore_engine.restore(&item.unwrap())?; restore_engine.restore(&item.unwrap())?;
let source_file_relative_path = Path::new(source_file_full_path.as_str()).strip_prefix("/")?; let source_file_relative_path = Path::new(source_file_full_path).strip_prefix("/")?;
let restored_file_path = restore_target.join(&source_file_relative_path.to_string_lossy())?; let restored_file_path = restore_target.path().join(&source_file_relative_path);
assert_target_file_contents(&restored_file_path, contents) assert_target_file_contents(&restored_file_path, contents)
} }
pub fn assert_restored_from_version_has_contents( pub fn assert_restored_from_version_has_contents(
repository_path: &VfsPath, repository_path: &Path,
source_file_full_path: &VfsPath, source_file_full_path: &Path,
old_contents: &[u8], old_contents: &[u8],
old_id: &ItemId, old_id: &ItemId,
) -> Result<()> { ) -> Result<()> {
let mut restore_repository = Repository::open(repository_path)?; let mut restore_repository = Repository::open(repository_path)?;
let old_item = restore_repository.item_by_id(&old_id)?; let old_item = restore_repository.item_by_id(&old_id)?;
let restore_target = random_in_memory_path("target")?; let restore_target = tempdir()?;
let restore_engine = restore::Engine::new(&mut restore_repository, &restore_target)?; let restore_engine = restore::Engine::new(&mut restore_repository, &restore_target.path())?;
restore_engine.restore(&old_item.unwrap())?; restore_engine.restore(&old_item.unwrap())?;
let source_file_relative_path = Path::new(source_file_full_path.as_str()).strip_prefix("/")?; let source_file_relative_path = Path::new(source_file_full_path).strip_prefix("/")?;
let restored_file_path = restore_target.join(&source_file_relative_path.to_string_lossy())?; let restored_file_path = restore_target.path().join(&source_file_relative_path);
assert_target_file_contents(&restored_file_path, old_contents) assert_target_file_contents(&restored_file_path, old_contents)
} }
pub fn newest_item(repository_path: &VfsPath, source_file_full_path: &VfsPath) -> Result<RepositoryItem> { pub fn newest_item(repository_path: &Path, source_file_full_path: &Path) -> Result<RepositoryItem> {
let item = { let item = {
let reading_repository = Repository::open(repository_path)?; let reading_repository = Repository::open(repository_path)?;
let item = reading_repository.newest_item_by_source_path(&source_file_full_path)?; let item = reading_repository.newest_item_by_source_path(&source_file_full_path)?;
@ -84,7 +75,7 @@ pub mod in_memory {
Ok(item) Ok(item)
} }
pub fn restore_all_from_reloaded_repository(repository_path: &VfsPath, restore_target: &VfsPath) -> Result<()> { pub fn restore_all_from_reloaded_repository(repository_path: &Path, restore_target: &Path) -> Result<()> {
{ {
let mut restore_repository = Repository::open(repository_path)?; let mut restore_repository = Repository::open(repository_path)?;
let mut restore_engine = restore::Engine::new(&mut restore_repository, &restore_target)?; let mut restore_engine = restore::Engine::new(&mut restore_repository, &restore_target)?;
@ -95,7 +86,7 @@ pub mod in_memory {
pub fn backup_file_with_text_contents( pub fn backup_file_with_text_contents(
source: &TestSource, source: &TestSource,
repository_path: &VfsPath, repository_path: &Path,
source_file_relative_path: &str, source_file_relative_path: &str,
contents: &str, contents: &str,
) -> Result<()> { ) -> Result<()> {
@ -106,7 +97,7 @@ pub mod in_memory {
pub fn backup_file_with_byte_contents( pub fn backup_file_with_byte_contents(
source: &TestSource, source: &TestSource,
repository_path: &VfsPath, repository_path: &Path,
source_file_relative_path: &str, source_file_relative_path: &str,
contents: &[u8], contents: &[u8],
) -> Result<()> { ) -> Result<()> {
@ -120,22 +111,22 @@ pub mod in_memory {
} }
} }
pub fn data_weight(repository_path: &VfsPath) -> Result<u64> { pub fn data_weight(repository_path: &Path) -> Result<u64> {
{ {
let repository = Repository::open(repository_path)?; let repository = Repository::open(repository_path)?;
Ok(repository.data_weight()?) Ok(repository.data_weight()?)
} }
} }
fn assert_directory_trees_have_same_contents(left: &VfsPath, right: &VfsPath) -> Result<()> { fn assert_directory_trees_have_same_contents(left: &Path, right: &Path) -> Result<()> {
let left_files = get_sorted_files_recursively(left)?; let left_files = get_sorted_files_recursively(left)?;
let right_files = get_sorted_files_recursively(right)?; let right_files = get_sorted_files_recursively(right)?;
let pairs = left_files.iter().zip(right_files); let pairs = left_files.iter().zip(right_files);
for (l, r) in pairs { for (l, r) in pairs {
assert_eq!(l.filename(), r.filename()); assert_eq!(l.file_name(), r.file_name());
let mut fl = l.open_file()?; let mut fl = File::open(l)?;
let mut fr = r.open_file()?; let mut fr = File::open(r)?;
let mut bl = vec![]; let mut bl = vec![];
let mut br = vec![]; let mut br = vec![];
fl.read_to_end(&mut bl).unwrap(); fl.read_to_end(&mut bl).unwrap();
@ -145,35 +136,32 @@ pub mod in_memory {
Ok(()) Ok(())
} }
pub fn get_sorted_files_recursively(path: &VfsPath) -> Result<Vec<VfsPath>> { pub fn get_sorted_files_recursively(path: &Path) -> Result<Vec<PathBuf>> {
assert!( assert!(
path.exists()?, path.exists(),
"[get_sorted_files_recursively] invoked on a path that does not exist: {:?}", "[get_sorted_files_recursively] invoked on a path that does not exist: {:?}",
path path
); );
let walker = path.walk_dir()?; let walker = WalkDir::new(path);
let result = walker
let mut result = vec![]; .sort_by_file_name()
.into_iter()
for maybe_entry in walker { .filter_map(|e| e.ok())
let entry = &maybe_entry?; .filter(|e| e.metadata().map_or(false, |m| m.is_file()))
if entry == path { .map(|e| e.path().to_path_buf())
continue; .collect::<Vec<_>>();
}
if entry.metadata()?.file_type == VfsFileType::File {
result.push(entry.clone());
}
}
result.sort_by_key(|a| a.filename());
Ok(result) Ok(result)
} }
fn assert_target_file_contents(restored_path: &VfsPath, expected_contents: &[u8]) -> Result<()> { fn assert_target_file_contents(restored_path: &Path, expected_contents: &[u8]) -> Result<()> {
let mut actual_contents = vec![]; let mut actual_contents = vec![];
assert!(restored_path.exists()?, "expected '{}' to be there", restored_path.as_str()); assert!(
restored_path.open_file()?.read_to_end(&mut actual_contents)?; restored_path.exists(),
"expected '{}' to be there",
restored_path.to_string_lossy()
);
let mut file = File::open(restored_path)?;
file.read_to_end(&mut actual_contents)?;
assert_eq!(expected_contents, actual_contents); assert_eq!(expected_contents, actual_contents);
Ok(()) Ok(())
} }

View file

@ -1,24 +1,25 @@
use std::io::Write; use std::{
fs::{self, File},
io::Write,
path::{Path, PathBuf},
};
use anyhow::Result; use anyhow::Result;
use vfs::VfsPath; use tempfile::{tempdir, TempDir};
use super::assertions::in_memory::random_in_memory_path;
pub struct TestSource { pub struct TestSource {
directory: VfsPath, directory: TempDir,
} }
impl TestSource { impl TestSource {
pub fn new() -> Result<Self> { pub fn new() -> Result<Self> {
let path: VfsPath = random_in_memory_path("testsource")?; let dir = tempdir()?;
path.create_dir_all()?; Ok(Self { directory: dir })
Ok(Self { directory: path })
} }
pub fn write_bytes_to_file(&self, filename: &str, bytes: &[u8]) -> Result<()> { pub fn write_bytes_to_file(&self, filename: &str, bytes: &[u8]) -> Result<()> {
let path = self.file_path(filename)?; let path = self.file_path(filename)?;
let mut file = path.create_file()?; let mut file = File::create(path)?;
file.write_all(bytes)?; file.write_all(bytes)?;
Ok(()) Ok(())
} }
@ -33,19 +34,19 @@ impl TestSource {
Ok(()) Ok(())
} }
pub fn path(&self) -> &VfsPath { pub fn path(&self) -> &Path {
&self.directory &self.directory.path()
} }
pub fn file_path(&self, filename: &str) -> Result<VfsPath> { pub fn file_path(&self, filename: &str) -> Result<PathBuf> {
let file_path = self.directory.join(filename)?; let file_path = self.directory.path().join(filename);
Ok(file_path) Ok(file_path)
} }
} }
impl Drop for TestSource { impl Drop for TestSource {
fn drop(&mut self) { fn drop(&mut self) {
let _ = self.path().remove_dir_all(); let _ = fs::remove_dir_all(self.path());
} }
} }
#[cfg(test)] #[cfg(test)]
@ -59,10 +60,10 @@ mod must {
{ {
let source = TestSource::new()?; let source = TestSource::new()?;
source.write_random_bytes_to_file("somefile", 1)?; source.write_random_bytes_to_file("somefile", 1)?;
path = source.path().clone(); path = source.path().to_path_buf();
} }
assert!(!path.exists()?); assert!(!path.exists());
Ok(()) Ok(())
} }
} }

View file

@ -1,5 +1,10 @@
#[cfg(test)] #[cfg(test)]
mod must { mod must {
use std::{
fs,
path::{Path, PathBuf},
};
use anyhow::Result; use anyhow::Result;
use bakare::test::source::TestSource; use bakare::test::source::TestSource;
use bakare::{backup, restore}; use bakare::{backup, restore};
@ -9,15 +14,15 @@ mod must {
sys::wait::{waitpid, WaitStatus}, sys::wait::{waitpid, WaitStatus},
unistd::getpid, unistd::getpid,
}; };
use vfs::{PhysicalFS, VfsPath}; use tempfile::tempdir;
#[test] #[test]
fn handle_concurrent_backups() -> Result<()> { fn handle_concurrent_backups() -> Result<()> {
setup_logger(); setup_logger();
let repository_directory = tempfile::tempdir()?.into_path(); let dir = tempdir()?;
let repository_path: VfsPath = PhysicalFS::new(repository_directory).into(); let repository_path = dir.path();
let repository_path = repository_path.join(&format!("repository-{}", getpid()))?; let repository_path = repository_path.join(&format!("repository-{}", getpid()));
Repository::init(&repository_path)?; Repository::init(&repository_path)?;
let parallel_backups_number = 16; let parallel_backups_number = 16;
@ -28,8 +33,8 @@ mod must {
assert_eq!(finished_backup_runs.len(), parallel_backups_number); assert_eq!(finished_backup_runs.len(), parallel_backups_number);
assert!(data_weight(&repository_path)? > 0); assert!(data_weight(&repository_path)? > 0);
let target_path: VfsPath = random_in_memory_path("target")?; let target_path = tempdir()?;
let all_restored_files = restore_all(&repository_path, &target_path)?; let all_restored_files = restore_all(&repository_path, &target_path.path())?;
assert_eq!(all_restored_files.len(), total_number_of_files); assert_eq!(all_restored_files.len(), total_number_of_files);
assert_all_files_in_place(parallel_backups_number, files_per_backup_number, &all_restored_files)?; assert_all_files_in_place(parallel_backups_number, files_per_backup_number, &all_restored_files)?;
@ -39,14 +44,16 @@ mod must {
fn assert_all_files_in_place( fn assert_all_files_in_place(
parallel_backups_number: usize, parallel_backups_number: usize,
files_per_backup_number: usize, files_per_backup_number: usize,
all_restored_files: &[VfsPath], all_restored_files: &[PathBuf],
) -> Result<()> { ) -> Result<()> {
for i in 0..parallel_backups_number { for i in 0..parallel_backups_number {
for j in 0..files_per_backup_number { for j in 0..files_per_backup_number {
let id = file_id(i, j); let id = file_id(i, j);
let file = all_restored_files.iter().find(|f| f.filename() == id); let file = all_restored_files
assert!(file.unwrap().exists()?, "file {:?} does not exist", file); .iter()
let contents = file.unwrap().read_to_string()?; .find(|f| f.file_name().unwrap().to_string_lossy() == id);
assert!(file.unwrap().exists(), "file {:?} does not exist", file);
let contents = fs::read_to_string(file.unwrap())?;
assert_eq!(id.to_string(), contents.to_owned()); assert_eq!(id.to_string(), contents.to_owned());
} }
} }
@ -54,7 +61,7 @@ mod must {
} }
fn backup_in_parallel( fn backup_in_parallel(
repository_path: &VfsPath, repository_path: &Path,
parallel_backups_number: usize, parallel_backups_number: usize,
files_per_backup_number: usize, files_per_backup_number: usize,
) -> Result<Vec<usize>> { ) -> Result<Vec<usize>> {
@ -86,7 +93,7 @@ mod must {
Ok(task_numbers) Ok(task_numbers)
} }
fn backup_process(task_number: usize, repository_path: &VfsPath, files_per_backup_number: usize) -> Result<()> { fn backup_process(task_number: usize, repository_path: &Path, files_per_backup_number: usize) -> Result<()> {
let mut repository = Repository::open(repository_path)?; let mut repository = Repository::open(repository_path)?;
let source = TestSource::new().unwrap(); let source = TestSource::new().unwrap();
let mut backup_engine = backup::Engine::new(source.path(), &mut repository)?; let mut backup_engine = backup::Engine::new(source.path(), &mut repository)?;
@ -98,7 +105,7 @@ mod must {
Ok(()) Ok(())
} }
fn restore_all(repository_path: &VfsPath, restore_target: &VfsPath) -> Result<Vec<VfsPath>> { fn restore_all(repository_path: &Path, restore_target: &Path) -> Result<Vec<PathBuf>> {
let mut restore_repository = Repository::open(repository_path)?; let mut restore_repository = Repository::open(repository_path)?;
let mut restore_engine = restore::Engine::new(&mut restore_repository, &restore_target)?; let mut restore_engine = restore::Engine::new(&mut restore_repository, &restore_target)?;
restore_engine.restore_all()?; restore_engine.restore_all()?;

View file

@ -3,12 +3,14 @@ mod must {
use bakare::test::assertions::in_memory::*; use bakare::test::assertions::in_memory::*;
use bakare::{repository::Repository, test::source::TestSource}; use bakare::{repository::Repository, test::source::TestSource};
use proptest::prelude::*; use proptest::prelude::*;
use tempfile::tempdir;
proptest! { proptest! {
#[test] #[test]
fn store_duplicated_files_just_once(contents in any::<[u8;3]>()) { fn store_duplicated_files_just_once(contents in any::<[u8;3]>()) {
let source = TestSource::new().unwrap(); let source = TestSource::new().unwrap();
let repository_path = random_in_memory_path("repository").unwrap(); let dir = tempdir().unwrap();
let repository_path = dir.path();
Repository::init(&repository_path).unwrap(); Repository::init(&repository_path).unwrap();
assert_eq!(data_weight(&repository_path).unwrap(), 0); assert_eq!(data_weight(&repository_path).unwrap(), 0);

View file

@ -4,6 +4,7 @@ mod must {
use bakare::backup; use bakare::backup;
use bakare::test::assertions::in_memory::*; use bakare::test::assertions::in_memory::*;
use bakare::{repository::Repository, test::source::TestSource}; use bakare::{repository::Repository, test::source::TestSource};
use tempfile::tempdir;
#[test] #[test]
fn restore_multiple_files() -> Result<()> { fn restore_multiple_files() -> Result<()> {
@ -19,8 +20,9 @@ mod must {
#[test] #[test]
fn restore_files_after_reopening_repository() -> Result<()> { fn restore_files_after_reopening_repository() -> Result<()> {
let source = TestSource::new()?; let source = TestSource::new()?;
let repository_path = random_in_memory_path("repository")?; let dir = tempdir()?;
let restore_target = random_in_memory_path("target")?; let repository_path = dir.path();
let restore_target = tempdir()?;
Repository::init(&repository_path)?; Repository::init(&repository_path)?;
@ -29,7 +31,7 @@ mod must {
backup_file_with_text_contents(&source, &repository_path, source_file_relative_path, original_contents)?; backup_file_with_text_contents(&source, &repository_path, source_file_relative_path, original_contents)?;
restore_all_from_reloaded_repository(&repository_path, &restore_target)?; restore_all_from_reloaded_repository(&repository_path, &restore_target.path())?;
let source_file_full_path = &source.file_path(source_file_relative_path)?; let source_file_full_path = &source.file_path(source_file_relative_path)?;
assert_restored_file_contents(&repository_path, source_file_full_path, original_contents.as_bytes()) assert_restored_file_contents(&repository_path, source_file_full_path, original_contents.as_bytes())
@ -38,7 +40,8 @@ mod must {
#[test] #[test]
fn restore_older_version_of_file() -> Result<()> { fn restore_older_version_of_file() -> Result<()> {
let source = TestSource::new().unwrap(); let source = TestSource::new().unwrap();
let repository_path = random_in_memory_path("repository")?; let dir = tempdir()?;
let repository_path = dir.path();
Repository::init(&repository_path)?; Repository::init(&repository_path)?;
let source_file_relative_path = "some path"; let source_file_relative_path = "some path";
@ -59,7 +62,8 @@ mod must {
#[test] #[test]
fn newer_version_should_be_greater_than_earlier_version() -> Result<()> { fn newer_version_should_be_greater_than_earlier_version() -> Result<()> {
let source = TestSource::new().unwrap(); let source = TestSource::new().unwrap();
let repository_path = random_in_memory_path("repository")?; let dir = tempdir()?;
let repository_path = dir.path();
Repository::init(&repository_path)?; Repository::init(&repository_path)?;
let source_file_relative_path = "some path"; let source_file_relative_path = "some path";
@ -83,7 +87,8 @@ mod must {
#[test] #[test]
fn restore_latest_version_by_default() -> Result<()> { fn restore_latest_version_by_default() -> Result<()> {
let source = TestSource::new().unwrap(); let source = TestSource::new().unwrap();
let repository_path = random_in_memory_path("repository")?; let dir = tempdir()?;
let repository_path = dir.path();
Repository::init(&repository_path)?; Repository::init(&repository_path)?;
let source_file_relative_path = "some path"; let source_file_relative_path = "some path";
@ -97,7 +102,8 @@ mod must {
#[test] #[test]
fn forbid_backup_of_paths_within_repository() -> Result<()> { fn forbid_backup_of_paths_within_repository() -> Result<()> {
let repository_path = random_in_memory_path("repository")?; let dir = tempdir()?;
let repository_path = dir.path();
Repository::init(&repository_path)?; Repository::init(&repository_path)?;
let mut repository = Repository::open(&repository_path)?; let mut repository = Repository::open(&repository_path)?;