First working, tray version
This commit is contained in:
1
tests/__init__.py
Normal file
1
tests/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# Tests package
|
||||
117
tests/test_container.py
Normal file
117
tests/test_container.py
Normal file
@@ -0,0 +1,117 @@
|
||||
"""Tests for Container class.
|
||||
|
||||
Note: These tests require udisks2 to be installed and running.
|
||||
They actually mount/unmount images, so they're integration tests.
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
from src.core.container import Container, ContainerError
|
||||
from src.core.image_manager import create_sparse_image
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def vault_image(tmp_path: Path) -> Path:
|
||||
"""Create a temporary vault image for testing."""
|
||||
image_path = tmp_path / "test.vault"
|
||||
create_sparse_image(image_path, size_mb=10)
|
||||
return image_path
|
||||
|
||||
|
||||
class TestContainer:
|
||||
"""Tests for Container class."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_mount_and_unmount(self, vault_image: Path) -> None:
|
||||
"""Test mounting and unmounting a vault image."""
|
||||
container = Container(vault_image)
|
||||
|
||||
# Mount
|
||||
mount_point = container.mount()
|
||||
assert container.is_mounted()
|
||||
assert mount_point.exists()
|
||||
assert mount_point.is_dir()
|
||||
|
||||
# Should be able to write files
|
||||
test_file = mount_point / "test.txt"
|
||||
test_file.write_text("Hello, Vault!")
|
||||
assert test_file.exists()
|
||||
|
||||
# Unmount
|
||||
container.unmount()
|
||||
assert not container.is_mounted()
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_context_manager(self, vault_image: Path) -> None:
|
||||
"""Test using container as context manager."""
|
||||
with Container(vault_image) as container:
|
||||
assert container.is_mounted()
|
||||
mount_point = container.mount_point
|
||||
assert mount_point is not None
|
||||
assert mount_point.exists()
|
||||
|
||||
# Should be unmounted after context exits
|
||||
assert not container.is_mounted()
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_mount_creates_vault_directory(self, vault_image: Path) -> None:
|
||||
"""Test that .vault directory can be created in mounted image."""
|
||||
with Container(vault_image) as container:
|
||||
vault_dir = container.mount_point / ".vault" # type: ignore
|
||||
vault_dir.mkdir()
|
||||
assert vault_dir.exists()
|
||||
|
||||
# Create manifest file
|
||||
manifest = vault_dir / "manifest.json"
|
||||
manifest.write_text('{"test": true}')
|
||||
assert manifest.exists()
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_mount_already_mounted(self, vault_image: Path) -> None:
|
||||
"""Test that mounting already mounted container fails."""
|
||||
container = Container(vault_image)
|
||||
container.mount()
|
||||
|
||||
try:
|
||||
with pytest.raises(ContainerError, match="already mounted"):
|
||||
container.mount()
|
||||
finally:
|
||||
container.unmount()
|
||||
|
||||
def test_mount_nonexistent_image(self, tmp_path: Path) -> None:
|
||||
"""Test that mounting nonexistent image fails."""
|
||||
container = Container(tmp_path / "nonexistent.vault")
|
||||
|
||||
with pytest.raises(ContainerError, match="not found"):
|
||||
container.mount()
|
||||
|
||||
def test_is_mounted_initially_false(self, vault_image: Path) -> None:
|
||||
"""Test that container is not mounted initially."""
|
||||
container = Container(vault_image)
|
||||
assert not container.is_mounted()
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_unmount_not_mounted(self, vault_image: Path) -> None:
|
||||
"""Test that unmounting not mounted container is safe."""
|
||||
container = Container(vault_image)
|
||||
|
||||
# Should not raise
|
||||
container.unmount()
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_data_persists_after_remount(self, vault_image: Path) -> None:
|
||||
"""Test that data persists after unmount and remount."""
|
||||
test_content = "Persistent data test"
|
||||
|
||||
# Write data
|
||||
with Container(vault_image) as container:
|
||||
test_file = container.mount_point / "persistent.txt" # type: ignore
|
||||
test_file.write_text(test_content)
|
||||
|
||||
# Read data after remount
|
||||
with Container(vault_image) as container:
|
||||
test_file = container.mount_point / "persistent.txt" # type: ignore
|
||||
assert test_file.exists()
|
||||
assert test_file.read_text() == test_content
|
||||
151
tests/test_file_entry.py
Normal file
151
tests/test_file_entry.py
Normal file
@@ -0,0 +1,151 @@
|
||||
"""Tests for FileEntry dataclass."""
|
||||
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
import tempfile
|
||||
|
||||
import pytest
|
||||
|
||||
from src.core.file_entry import FileEntry
|
||||
|
||||
|
||||
class TestFileEntry:
|
||||
"""Tests for FileEntry dataclass."""
|
||||
|
||||
def test_create_file_entry(self) -> None:
|
||||
"""Test creating a FileEntry instance."""
|
||||
now = datetime.now()
|
||||
entry = FileEntry(
|
||||
path="documents/test.txt",
|
||||
hash="sha256:abc123",
|
||||
size=1024,
|
||||
created=now,
|
||||
modified=now,
|
||||
)
|
||||
|
||||
assert entry.path == "documents/test.txt"
|
||||
assert entry.hash == "sha256:abc123"
|
||||
assert entry.size == 1024
|
||||
assert entry.created == now
|
||||
assert entry.modified == now
|
||||
|
||||
def test_file_entry_is_immutable(self) -> None:
|
||||
"""Test that FileEntry is frozen (immutable)."""
|
||||
now = datetime.now()
|
||||
entry = FileEntry(
|
||||
path="test.txt",
|
||||
hash="sha256:abc",
|
||||
size=100,
|
||||
created=now,
|
||||
modified=now,
|
||||
)
|
||||
|
||||
with pytest.raises(AttributeError):
|
||||
entry.path = "other.txt" # type: ignore
|
||||
|
||||
def test_to_dict(self) -> None:
|
||||
"""Test serialization to dictionary."""
|
||||
created = datetime(2026, 1, 28, 10, 30, 0)
|
||||
modified = datetime(2026, 1, 28, 14, 20, 0)
|
||||
entry = FileEntry(
|
||||
path="documents/file.txt",
|
||||
hash="sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
|
||||
size=1234,
|
||||
created=created,
|
||||
modified=modified,
|
||||
)
|
||||
|
||||
result = entry.to_dict()
|
||||
|
||||
assert result == {
|
||||
"path": "documents/file.txt",
|
||||
"hash": "sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
|
||||
"size": 1234,
|
||||
"created": "2026-01-28T10:30:00",
|
||||
"modified": "2026-01-28T14:20:00",
|
||||
}
|
||||
|
||||
def test_from_dict(self) -> None:
|
||||
"""Test deserialization from dictionary."""
|
||||
data = {
|
||||
"path": "documents/file.txt",
|
||||
"hash": "sha256:abc123",
|
||||
"size": 1234,
|
||||
"created": "2026-01-28T10:30:00",
|
||||
"modified": "2026-01-28T14:20:00",
|
||||
}
|
||||
|
||||
entry = FileEntry.from_dict(data)
|
||||
|
||||
assert entry.path == "documents/file.txt"
|
||||
assert entry.hash == "sha256:abc123"
|
||||
assert entry.size == 1234
|
||||
assert entry.created == datetime(2026, 1, 28, 10, 30, 0)
|
||||
assert entry.modified == datetime(2026, 1, 28, 14, 20, 0)
|
||||
|
||||
def test_from_path(self) -> None:
|
||||
"""Test creating FileEntry from actual file."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
base_path = Path(tmpdir)
|
||||
file_path = base_path / "test.txt"
|
||||
file_path.write_text("Hello, World!")
|
||||
|
||||
entry = FileEntry.from_path(base_path, file_path)
|
||||
|
||||
assert entry.path == "test.txt"
|
||||
assert entry.hash.startswith("sha256:")
|
||||
assert entry.size == 13 # len("Hello, World!")
|
||||
assert entry.created is not None
|
||||
assert entry.modified is not None
|
||||
|
||||
def test_from_path_nested_directory(self) -> None:
|
||||
"""Test creating FileEntry from file in nested directory."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
base_path = Path(tmpdir)
|
||||
nested_dir = base_path / "documents" / "work"
|
||||
nested_dir.mkdir(parents=True)
|
||||
file_path = nested_dir / "report.txt"
|
||||
file_path.write_text("Test content")
|
||||
|
||||
entry = FileEntry.from_path(base_path, file_path)
|
||||
|
||||
assert entry.path == "documents/work/report.txt"
|
||||
|
||||
def test_has_changed_same_hash(self) -> None:
|
||||
"""Test has_changed returns False for same hash."""
|
||||
now = datetime.now()
|
||||
entry1 = FileEntry(
|
||||
path="test.txt", hash="sha256:abc", size=100, created=now, modified=now
|
||||
)
|
||||
entry2 = FileEntry(
|
||||
path="test.txt", hash="sha256:abc", size=100, created=now, modified=now
|
||||
)
|
||||
|
||||
assert not entry1.has_changed(entry2)
|
||||
|
||||
def test_has_changed_different_hash(self) -> None:
|
||||
"""Test has_changed returns True for different hash."""
|
||||
now = datetime.now()
|
||||
entry1 = FileEntry(
|
||||
path="test.txt", hash="sha256:abc", size=100, created=now, modified=now
|
||||
)
|
||||
entry2 = FileEntry(
|
||||
path="test.txt", hash="sha256:xyz", size=100, created=now, modified=now
|
||||
)
|
||||
|
||||
assert entry1.has_changed(entry2)
|
||||
|
||||
def test_is_newer_than(self) -> None:
|
||||
"""Test is_newer_than comparison."""
|
||||
old_time = datetime(2026, 1, 1, 10, 0, 0)
|
||||
new_time = datetime(2026, 1, 2, 10, 0, 0)
|
||||
|
||||
old_entry = FileEntry(
|
||||
path="test.txt", hash="sha256:abc", size=100, created=old_time, modified=old_time
|
||||
)
|
||||
new_entry = FileEntry(
|
||||
path="test.txt", hash="sha256:xyz", size=100, created=new_time, modified=new_time
|
||||
)
|
||||
|
||||
assert new_entry.is_newer_than(old_entry)
|
||||
assert not old_entry.is_newer_than(new_entry)
|
||||
324
tests/test_file_sync.py
Normal file
324
tests/test_file_sync.py
Normal file
@@ -0,0 +1,324 @@
|
||||
"""Tests for file_sync module."""
|
||||
|
||||
import time
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
from src.core.file_sync import (
|
||||
CopyProgress,
|
||||
copy_directory_with_progress,
|
||||
copy_file_with_progress,
|
||||
delete_directory,
|
||||
delete_file,
|
||||
move_file,
|
||||
sync_file,
|
||||
)
|
||||
|
||||
|
||||
class TestCopyProgress:
|
||||
"""Tests for CopyProgress dataclass."""
|
||||
|
||||
def test_percent_calculation(self) -> None:
|
||||
progress = CopyProgress(
|
||||
src_path=Path("src"),
|
||||
dst_path=Path("dst"),
|
||||
bytes_copied=50,
|
||||
total_bytes=100,
|
||||
)
|
||||
assert progress.percent == 50.0
|
||||
|
||||
def test_percent_with_zero_total(self) -> None:
|
||||
progress = CopyProgress(
|
||||
src_path=Path("src"),
|
||||
dst_path=Path("dst"),
|
||||
bytes_copied=0,
|
||||
total_bytes=0,
|
||||
)
|
||||
assert progress.percent == 100.0
|
||||
|
||||
def test_is_complete_true(self) -> None:
|
||||
progress = CopyProgress(
|
||||
src_path=Path("src"),
|
||||
dst_path=Path("dst"),
|
||||
bytes_copied=100,
|
||||
total_bytes=100,
|
||||
)
|
||||
assert progress.is_complete is True
|
||||
|
||||
def test_is_complete_false(self) -> None:
|
||||
progress = CopyProgress(
|
||||
src_path=Path("src"),
|
||||
dst_path=Path("dst"),
|
||||
bytes_copied=50,
|
||||
total_bytes=100,
|
||||
)
|
||||
assert progress.is_complete is False
|
||||
|
||||
|
||||
class TestCopyFileWithProgress:
|
||||
"""Tests for copy_file_with_progress function."""
|
||||
|
||||
def test_copy_file(self, tmp_path: Path) -> None:
|
||||
src = tmp_path / "source.txt"
|
||||
dst = tmp_path / "dest.txt"
|
||||
src.write_text("Hello, World!")
|
||||
|
||||
copy_file_with_progress(src, dst)
|
||||
|
||||
assert dst.exists()
|
||||
assert dst.read_text() == "Hello, World!"
|
||||
|
||||
def test_copy_file_creates_parent_dirs(self, tmp_path: Path) -> None:
|
||||
src = tmp_path / "source.txt"
|
||||
dst = tmp_path / "nested" / "deep" / "dest.txt"
|
||||
src.write_text("content")
|
||||
|
||||
copy_file_with_progress(src, dst)
|
||||
|
||||
assert dst.exists()
|
||||
assert dst.read_text() == "content"
|
||||
|
||||
def test_copy_file_with_callback(self, tmp_path: Path) -> None:
|
||||
src = tmp_path / "source.txt"
|
||||
dst = tmp_path / "dest.txt"
|
||||
# Create a file larger than chunk size
|
||||
content = "x" * 5000
|
||||
src.write_text(content)
|
||||
|
||||
progress_calls: list[CopyProgress] = []
|
||||
|
||||
def callback(progress: CopyProgress) -> None:
|
||||
progress_calls.append(progress)
|
||||
|
||||
copy_file_with_progress(src, dst, callback=callback, chunk_size=1000)
|
||||
|
||||
assert len(progress_calls) >= 1
|
||||
assert progress_calls[-1].is_complete
|
||||
|
||||
def test_copy_file_preserves_timestamps(self, tmp_path: Path) -> None:
|
||||
src = tmp_path / "source.txt"
|
||||
dst = tmp_path / "dest.txt"
|
||||
src.write_text("content")
|
||||
|
||||
# Get original timestamps
|
||||
src_stat = src.stat()
|
||||
|
||||
copy_file_with_progress(src, dst)
|
||||
|
||||
dst_stat = dst.stat()
|
||||
assert abs(src_stat.st_mtime - dst_stat.st_mtime) < 1
|
||||
|
||||
def test_copy_file_source_not_found(self, tmp_path: Path) -> None:
|
||||
src = tmp_path / "nonexistent.txt"
|
||||
dst = tmp_path / "dest.txt"
|
||||
|
||||
with pytest.raises(FileNotFoundError):
|
||||
copy_file_with_progress(src, dst)
|
||||
|
||||
def test_copy_file_source_is_directory(self, tmp_path: Path) -> None:
|
||||
src = tmp_path / "srcdir"
|
||||
dst = tmp_path / "dest.txt"
|
||||
src.mkdir()
|
||||
|
||||
with pytest.raises(IsADirectoryError):
|
||||
copy_file_with_progress(src, dst)
|
||||
|
||||
def test_copy_file_destination_is_directory(self, tmp_path: Path) -> None:
|
||||
src = tmp_path / "source.txt"
|
||||
dst = tmp_path / "dstdir"
|
||||
src.write_text("content")
|
||||
dst.mkdir()
|
||||
|
||||
with pytest.raises(IsADirectoryError):
|
||||
copy_file_with_progress(src, dst)
|
||||
|
||||
|
||||
class TestCopyDirectoryWithProgress:
|
||||
"""Tests for copy_directory_with_progress function."""
|
||||
|
||||
def test_copy_directory(self, tmp_path: Path) -> None:
|
||||
src = tmp_path / "srcdir"
|
||||
dst = tmp_path / "dstdir"
|
||||
src.mkdir()
|
||||
(src / "file1.txt").write_text("content1")
|
||||
(src / "file2.txt").write_text("content2")
|
||||
|
||||
copy_directory_with_progress(src, dst)
|
||||
|
||||
assert dst.exists()
|
||||
assert (dst / "file1.txt").read_text() == "content1"
|
||||
assert (dst / "file2.txt").read_text() == "content2"
|
||||
|
||||
def test_copy_nested_directory(self, tmp_path: Path) -> None:
|
||||
src = tmp_path / "srcdir"
|
||||
dst = tmp_path / "dstdir"
|
||||
src.mkdir()
|
||||
(src / "nested").mkdir()
|
||||
(src / "nested" / "deep.txt").write_text("deep content")
|
||||
|
||||
copy_directory_with_progress(src, dst)
|
||||
|
||||
assert (dst / "nested" / "deep.txt").read_text() == "deep content"
|
||||
|
||||
def test_copy_directory_not_found(self, tmp_path: Path) -> None:
|
||||
src = tmp_path / "nonexistent"
|
||||
dst = tmp_path / "dstdir"
|
||||
|
||||
with pytest.raises(FileNotFoundError):
|
||||
copy_directory_with_progress(src, dst)
|
||||
|
||||
def test_copy_directory_source_is_file(self, tmp_path: Path) -> None:
|
||||
src = tmp_path / "source.txt"
|
||||
dst = tmp_path / "dstdir"
|
||||
src.write_text("content")
|
||||
|
||||
with pytest.raises(NotADirectoryError):
|
||||
copy_directory_with_progress(src, dst)
|
||||
|
||||
|
||||
class TestDeleteFile:
|
||||
"""Tests for delete_file function."""
|
||||
|
||||
def test_delete_file(self, tmp_path: Path) -> None:
|
||||
file = tmp_path / "test.txt"
|
||||
file.write_text("content")
|
||||
|
||||
delete_file(file)
|
||||
|
||||
assert not file.exists()
|
||||
|
||||
def test_delete_file_not_found(self, tmp_path: Path) -> None:
|
||||
file = tmp_path / "nonexistent.txt"
|
||||
|
||||
with pytest.raises(FileNotFoundError):
|
||||
delete_file(file)
|
||||
|
||||
def test_delete_file_is_directory(self, tmp_path: Path) -> None:
|
||||
dir_path = tmp_path / "testdir"
|
||||
dir_path.mkdir()
|
||||
|
||||
with pytest.raises(IsADirectoryError):
|
||||
delete_file(dir_path)
|
||||
|
||||
|
||||
class TestDeleteDirectory:
|
||||
"""Tests for delete_directory function."""
|
||||
|
||||
def test_delete_directory(self, tmp_path: Path) -> None:
|
||||
dir_path = tmp_path / "testdir"
|
||||
dir_path.mkdir()
|
||||
(dir_path / "file.txt").write_text("content")
|
||||
|
||||
delete_directory(dir_path)
|
||||
|
||||
assert not dir_path.exists()
|
||||
|
||||
def test_delete_nested_directory(self, tmp_path: Path) -> None:
|
||||
dir_path = tmp_path / "testdir"
|
||||
dir_path.mkdir()
|
||||
(dir_path / "nested").mkdir()
|
||||
(dir_path / "nested" / "deep.txt").write_text("content")
|
||||
|
||||
delete_directory(dir_path)
|
||||
|
||||
assert not dir_path.exists()
|
||||
|
||||
def test_delete_directory_not_found(self, tmp_path: Path) -> None:
|
||||
dir_path = tmp_path / "nonexistent"
|
||||
|
||||
with pytest.raises(FileNotFoundError):
|
||||
delete_directory(dir_path)
|
||||
|
||||
def test_delete_directory_is_file(self, tmp_path: Path) -> None:
|
||||
file = tmp_path / "test.txt"
|
||||
file.write_text("content")
|
||||
|
||||
with pytest.raises(NotADirectoryError):
|
||||
delete_directory(file)
|
||||
|
||||
|
||||
class TestMoveFile:
|
||||
"""Tests for move_file function."""
|
||||
|
||||
def test_move_file(self, tmp_path: Path) -> None:
|
||||
src = tmp_path / "source.txt"
|
||||
dst = tmp_path / "dest.txt"
|
||||
src.write_text("content")
|
||||
|
||||
move_file(src, dst)
|
||||
|
||||
assert not src.exists()
|
||||
assert dst.exists()
|
||||
assert dst.read_text() == "content"
|
||||
|
||||
def test_move_file_creates_parent_dirs(self, tmp_path: Path) -> None:
|
||||
src = tmp_path / "source.txt"
|
||||
dst = tmp_path / "nested" / "deep" / "dest.txt"
|
||||
src.write_text("content")
|
||||
|
||||
move_file(src, dst)
|
||||
|
||||
assert not src.exists()
|
||||
assert dst.exists()
|
||||
|
||||
def test_move_file_not_found(self, tmp_path: Path) -> None:
|
||||
src = tmp_path / "nonexistent.txt"
|
||||
dst = tmp_path / "dest.txt"
|
||||
|
||||
with pytest.raises(FileNotFoundError):
|
||||
move_file(src, dst)
|
||||
|
||||
|
||||
class TestSyncFile:
|
||||
"""Tests for sync_file function."""
|
||||
|
||||
def test_sync_new_file(self, tmp_path: Path) -> None:
|
||||
src = tmp_path / "source.txt"
|
||||
dst = tmp_path / "dest.txt"
|
||||
src.write_text("content")
|
||||
|
||||
result = sync_file(src, dst)
|
||||
|
||||
assert result is True
|
||||
assert dst.exists()
|
||||
assert dst.read_text() == "content"
|
||||
|
||||
def test_sync_newer_source(self, tmp_path: Path) -> None:
|
||||
src = tmp_path / "source.txt"
|
||||
dst = tmp_path / "dest.txt"
|
||||
|
||||
# Create destination first
|
||||
dst.write_text("old content")
|
||||
time.sleep(0.1)
|
||||
|
||||
# Create newer source
|
||||
src.write_text("new content")
|
||||
|
||||
result = sync_file(src, dst)
|
||||
|
||||
assert result is True
|
||||
assert dst.read_text() == "new content"
|
||||
|
||||
def test_sync_older_source(self, tmp_path: Path) -> None:
|
||||
src = tmp_path / "source.txt"
|
||||
dst = tmp_path / "dest.txt"
|
||||
|
||||
# Create source first
|
||||
src.write_text("old content")
|
||||
time.sleep(0.1)
|
||||
|
||||
# Create newer destination
|
||||
dst.write_text("new content")
|
||||
|
||||
result = sync_file(src, dst)
|
||||
|
||||
assert result is False
|
||||
assert dst.read_text() == "new content"
|
||||
|
||||
def test_sync_file_not_found(self, tmp_path: Path) -> None:
|
||||
src = tmp_path / "nonexistent.txt"
|
||||
dst = tmp_path / "dest.txt"
|
||||
|
||||
with pytest.raises(FileNotFoundError):
|
||||
sync_file(src, dst)
|
||||
242
tests/test_file_watcher.py
Normal file
242
tests/test_file_watcher.py
Normal file
@@ -0,0 +1,242 @@
|
||||
"""Tests for file_watcher module."""
|
||||
|
||||
import time
|
||||
from pathlib import Path
|
||||
from threading import Event
|
||||
|
||||
import pytest
|
||||
|
||||
from src.core.file_watcher import EventType, FileEvent, FileWatcher
|
||||
|
||||
|
||||
class TestFileEvent:
|
||||
"""Tests for FileEvent dataclass."""
|
||||
|
||||
def test_create_file_event(self) -> None:
|
||||
event = FileEvent(
|
||||
event_type=EventType.CREATED,
|
||||
path="test.txt",
|
||||
is_directory=False,
|
||||
)
|
||||
assert event.event_type == EventType.CREATED
|
||||
assert event.path == "test.txt"
|
||||
assert event.is_directory is False
|
||||
assert event.dest_path is None
|
||||
|
||||
def test_move_event_with_dest(self) -> None:
|
||||
event = FileEvent(
|
||||
event_type=EventType.MOVED,
|
||||
path="old.txt",
|
||||
is_directory=False,
|
||||
dest_path="new.txt",
|
||||
)
|
||||
assert event.event_type == EventType.MOVED
|
||||
assert event.path == "old.txt"
|
||||
assert event.dest_path == "new.txt"
|
||||
|
||||
def test_str_representation(self) -> None:
|
||||
event = FileEvent(EventType.CREATED, "test.txt", False)
|
||||
assert str(event) == "created: test.txt"
|
||||
|
||||
def test_str_representation_moved(self) -> None:
|
||||
event = FileEvent(EventType.MOVED, "old.txt", False, "new.txt")
|
||||
assert str(event) == "moved: old.txt -> new.txt"
|
||||
|
||||
|
||||
class TestFileWatcher:
|
||||
"""Tests for FileWatcher class."""
|
||||
|
||||
def test_start_and_stop(self, tmp_path: Path) -> None:
|
||||
events: list[FileEvent] = []
|
||||
watcher = FileWatcher(tmp_path, callback=events.append)
|
||||
|
||||
assert not watcher.is_running()
|
||||
watcher.start()
|
||||
assert watcher.is_running()
|
||||
watcher.stop()
|
||||
assert not watcher.is_running()
|
||||
|
||||
def test_context_manager(self, tmp_path: Path) -> None:
|
||||
events: list[FileEvent] = []
|
||||
|
||||
with FileWatcher(tmp_path, callback=events.append) as watcher:
|
||||
assert watcher.is_running()
|
||||
|
||||
assert not watcher.is_running()
|
||||
|
||||
def test_start_nonexistent_path_raises(self, tmp_path: Path) -> None:
|
||||
nonexistent = tmp_path / "nonexistent"
|
||||
events: list[FileEvent] = []
|
||||
watcher = FileWatcher(nonexistent, callback=events.append)
|
||||
|
||||
with pytest.raises(FileNotFoundError):
|
||||
watcher.start()
|
||||
|
||||
def test_double_start_is_safe(self, tmp_path: Path) -> None:
|
||||
events: list[FileEvent] = []
|
||||
watcher = FileWatcher(tmp_path, callback=events.append)
|
||||
|
||||
watcher.start()
|
||||
watcher.start() # Should not raise
|
||||
assert watcher.is_running()
|
||||
watcher.stop()
|
||||
|
||||
def test_double_stop_is_safe(self, tmp_path: Path) -> None:
|
||||
events: list[FileEvent] = []
|
||||
watcher = FileWatcher(tmp_path, callback=events.append)
|
||||
|
||||
watcher.start()
|
||||
watcher.stop()
|
||||
watcher.stop() # Should not raise
|
||||
assert not watcher.is_running()
|
||||
|
||||
def test_detects_file_creation(self, tmp_path: Path) -> None:
|
||||
events: list[FileEvent] = []
|
||||
event_received = Event()
|
||||
|
||||
def callback(event: FileEvent) -> None:
|
||||
events.append(event)
|
||||
event_received.set()
|
||||
|
||||
with FileWatcher(tmp_path, callback=callback):
|
||||
# Create a file
|
||||
test_file = tmp_path / "test.txt"
|
||||
test_file.write_text("hello")
|
||||
|
||||
# Wait for event
|
||||
event_received.wait(timeout=2.0)
|
||||
|
||||
# Check that we got a CREATED event
|
||||
created_events = [e for e in events if e.event_type == EventType.CREATED]
|
||||
assert len(created_events) >= 1
|
||||
assert any(e.path == "test.txt" for e in created_events)
|
||||
|
||||
def test_detects_file_deletion(self, tmp_path: Path) -> None:
|
||||
# Create file first
|
||||
test_file = tmp_path / "test.txt"
|
||||
test_file.write_text("hello")
|
||||
|
||||
events: list[FileEvent] = []
|
||||
event_received = Event()
|
||||
|
||||
def callback(event: FileEvent) -> None:
|
||||
events.append(event)
|
||||
if event.event_type == EventType.DELETED:
|
||||
event_received.set()
|
||||
|
||||
with FileWatcher(tmp_path, callback=callback):
|
||||
# Delete the file
|
||||
test_file.unlink()
|
||||
|
||||
# Wait for event
|
||||
event_received.wait(timeout=2.0)
|
||||
|
||||
# Check that we got a DELETED event
|
||||
deleted_events = [e for e in events if e.event_type == EventType.DELETED]
|
||||
assert len(deleted_events) >= 1
|
||||
assert any(e.path == "test.txt" for e in deleted_events)
|
||||
|
||||
def test_detects_file_move(self, tmp_path: Path) -> None:
|
||||
# Create file first
|
||||
test_file = tmp_path / "old.txt"
|
||||
test_file.write_text("hello")
|
||||
|
||||
events: list[FileEvent] = []
|
||||
event_received = Event()
|
||||
|
||||
def callback(event: FileEvent) -> None:
|
||||
events.append(event)
|
||||
if event.event_type == EventType.MOVED:
|
||||
event_received.set()
|
||||
|
||||
with FileWatcher(tmp_path, callback=callback):
|
||||
# Move the file
|
||||
new_file = tmp_path / "new.txt"
|
||||
test_file.rename(new_file)
|
||||
|
||||
# Wait for event
|
||||
event_received.wait(timeout=2.0)
|
||||
|
||||
# Check that we got a MOVED event
|
||||
moved_events = [e for e in events if e.event_type == EventType.MOVED]
|
||||
assert len(moved_events) >= 1
|
||||
assert any(e.path == "old.txt" and e.dest_path == "new.txt" for e in moved_events)
|
||||
|
||||
def test_ignores_vault_directory(self, tmp_path: Path) -> None:
|
||||
# Create .vault directory
|
||||
vault_dir = tmp_path / ".vault"
|
||||
vault_dir.mkdir()
|
||||
|
||||
events: list[FileEvent] = []
|
||||
|
||||
with FileWatcher(tmp_path, callback=events.append):
|
||||
# Create file inside .vault
|
||||
(vault_dir / "manifest.json").write_text("{}")
|
||||
time.sleep(0.5)
|
||||
|
||||
# No events should be recorded for .vault directory
|
||||
assert all(".vault" not in e.path for e in events)
|
||||
|
||||
def test_custom_ignore_patterns(self, tmp_path: Path) -> None:
|
||||
events: list[FileEvent] = []
|
||||
event_received = Event()
|
||||
|
||||
def callback(event: FileEvent) -> None:
|
||||
events.append(event)
|
||||
event_received.set()
|
||||
|
||||
with FileWatcher(tmp_path, callback=callback, ignore_patterns=[".vault", "__pycache__"]):
|
||||
# Create ignored directory
|
||||
cache_dir = tmp_path / "__pycache__"
|
||||
cache_dir.mkdir()
|
||||
(cache_dir / "test.pyc").write_text("cached")
|
||||
time.sleep(0.2)
|
||||
|
||||
# Create non-ignored file
|
||||
(tmp_path / "regular.txt").write_text("hello")
|
||||
event_received.wait(timeout=2.0)
|
||||
|
||||
# Only regular.txt events should be recorded
|
||||
assert all("__pycache__" not in e.path for e in events)
|
||||
assert any("regular.txt" in e.path for e in events)
|
||||
|
||||
def test_detects_nested_file_creation(self, tmp_path: Path) -> None:
|
||||
# Create nested directory
|
||||
nested = tmp_path / "subdir" / "nested"
|
||||
nested.mkdir(parents=True)
|
||||
|
||||
events: list[FileEvent] = []
|
||||
event_received = Event()
|
||||
|
||||
def callback(event: FileEvent) -> None:
|
||||
events.append(event)
|
||||
if event.event_type == EventType.CREATED and "deep.txt" in event.path:
|
||||
event_received.set()
|
||||
|
||||
with FileWatcher(tmp_path, callback=callback):
|
||||
# Create file in nested directory
|
||||
(nested / "deep.txt").write_text("nested content")
|
||||
event_received.wait(timeout=2.0)
|
||||
|
||||
# Check event has correct relative path
|
||||
created_events = [e for e in events if e.event_type == EventType.CREATED]
|
||||
assert any("subdir/nested/deep.txt" in e.path or "subdir\\nested\\deep.txt" in e.path for e in created_events)
|
||||
|
||||
def test_detects_directory_creation(self, tmp_path: Path) -> None:
|
||||
events: list[FileEvent] = []
|
||||
event_received = Event()
|
||||
|
||||
def callback(event: FileEvent) -> None:
|
||||
events.append(event)
|
||||
if event.is_directory and event.event_type == EventType.CREATED:
|
||||
event_received.set()
|
||||
|
||||
with FileWatcher(tmp_path, callback=callback):
|
||||
# Create directory
|
||||
(tmp_path / "newdir").mkdir()
|
||||
event_received.wait(timeout=2.0)
|
||||
|
||||
# Check directory creation event
|
||||
dir_events = [e for e in events if e.is_directory and e.event_type == EventType.CREATED]
|
||||
assert len(dir_events) >= 1
|
||||
assert any(e.path == "newdir" for e in dir_events)
|
||||
147
tests/test_image_manager.py
Normal file
147
tests/test_image_manager.py
Normal file
@@ -0,0 +1,147 @@
|
||||
"""Tests for image_manager module."""
|
||||
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
from src.core.image_manager import (
|
||||
ImageError,
|
||||
create_sparse_image,
|
||||
delete_image,
|
||||
get_image_info,
|
||||
resize_image,
|
||||
)
|
||||
|
||||
|
||||
class TestCreateSparseImage:
|
||||
"""Tests for create_sparse_image function."""
|
||||
|
||||
def test_create_sparse_image(self) -> None:
|
||||
"""Test creating a sparse image."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
path = Path(tmpdir) / "test.vault"
|
||||
|
||||
create_sparse_image(path, size_mb=10)
|
||||
|
||||
assert path.exists()
|
||||
# File should be 10MB in logical size
|
||||
assert path.stat().st_size == 10 * 1024 * 1024
|
||||
|
||||
def test_create_sparse_image_is_sparse(self) -> None:
|
||||
"""Test that created image is actually sparse (uses less disk space)."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
path = Path(tmpdir) / "test.vault"
|
||||
|
||||
create_sparse_image(path, size_mb=100)
|
||||
|
||||
stat = path.stat()
|
||||
# Actual disk usage should be much less than logical size
|
||||
# st_blocks is in 512-byte units
|
||||
actual_size = stat.st_blocks * 512
|
||||
logical_size = stat.st_size
|
||||
|
||||
# Actual size should be less than 10% of logical size for a sparse file
|
||||
# (exFAT metadata takes some space, so not 0)
|
||||
assert actual_size < logical_size * 0.1
|
||||
|
||||
def test_create_sparse_image_already_exists(self) -> None:
|
||||
"""Test that creating image fails if file exists."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
path = Path(tmpdir) / "test.vault"
|
||||
path.touch()
|
||||
|
||||
with pytest.raises(ImageError, match="already exists"):
|
||||
create_sparse_image(path, size_mb=10)
|
||||
|
||||
def test_create_sparse_image_invalid_path(self) -> None:
|
||||
"""Test that creating image fails for invalid path."""
|
||||
path = Path("/nonexistent/directory/test.vault")
|
||||
|
||||
with pytest.raises(ImageError):
|
||||
create_sparse_image(path, size_mb=10)
|
||||
|
||||
|
||||
class TestResizeImage:
|
||||
"""Tests for resize_image function."""
|
||||
|
||||
def test_resize_image(self) -> None:
|
||||
"""Test resizing an image."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
path = Path(tmpdir) / "test.vault"
|
||||
create_sparse_image(path, size_mb=10)
|
||||
|
||||
resize_image(path, new_size_mb=20)
|
||||
|
||||
assert path.stat().st_size == 20 * 1024 * 1024
|
||||
|
||||
def test_resize_image_smaller_fails(self) -> None:
|
||||
"""Test that shrinking an image fails."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
path = Path(tmpdir) / "test.vault"
|
||||
create_sparse_image(path, size_mb=20)
|
||||
|
||||
with pytest.raises(ImageError, match="must be larger"):
|
||||
resize_image(path, new_size_mb=10)
|
||||
|
||||
def test_resize_image_same_size_fails(self) -> None:
|
||||
"""Test that resizing to same size fails."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
path = Path(tmpdir) / "test.vault"
|
||||
create_sparse_image(path, size_mb=10)
|
||||
|
||||
with pytest.raises(ImageError, match="must be larger"):
|
||||
resize_image(path, new_size_mb=10)
|
||||
|
||||
def test_resize_nonexistent_image(self) -> None:
|
||||
"""Test that resizing nonexistent image fails."""
|
||||
path = Path("/nonexistent/test.vault")
|
||||
|
||||
with pytest.raises(ImageError, match="not found"):
|
||||
resize_image(path, new_size_mb=20)
|
||||
|
||||
|
||||
class TestGetImageInfo:
|
||||
"""Tests for get_image_info function."""
|
||||
|
||||
def test_get_image_info(self) -> None:
|
||||
"""Test getting image info."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
path = Path(tmpdir) / "test.vault"
|
||||
create_sparse_image(path, size_mb=50)
|
||||
|
||||
info = get_image_info(path)
|
||||
|
||||
assert info["path"] == str(path)
|
||||
assert info["size_mb"] == 50
|
||||
assert info["actual_size_mb"] < 50 # Sparse file
|
||||
assert 0 < info["sparse_ratio"] < 1
|
||||
|
||||
def test_get_image_info_nonexistent(self) -> None:
|
||||
"""Test getting info for nonexistent image."""
|
||||
path = Path("/nonexistent/test.vault")
|
||||
|
||||
with pytest.raises(ImageError, match="not found"):
|
||||
get_image_info(path)
|
||||
|
||||
|
||||
class TestDeleteImage:
|
||||
"""Tests for delete_image function."""
|
||||
|
||||
def test_delete_image(self) -> None:
|
||||
"""Test deleting an image."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
path = Path(tmpdir) / "test.vault"
|
||||
create_sparse_image(path, size_mb=10)
|
||||
assert path.exists()
|
||||
|
||||
delete_image(path)
|
||||
|
||||
assert not path.exists()
|
||||
|
||||
def test_delete_nonexistent_image(self) -> None:
|
||||
"""Test deleting nonexistent image fails."""
|
||||
path = Path("/nonexistent/test.vault")
|
||||
|
||||
with pytest.raises(ImageError, match="not found"):
|
||||
delete_image(path)
|
||||
182
tests/test_lock.py
Normal file
182
tests/test_lock.py
Normal file
@@ -0,0 +1,182 @@
|
||||
"""Tests for VaultLock."""
|
||||
|
||||
import multiprocessing
|
||||
import os
|
||||
import tempfile
|
||||
import time
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
from src.core.lock import VaultLock, VaultLockError
|
||||
|
||||
|
||||
class TestVaultLock:
|
||||
"""Tests for VaultLock class."""
|
||||
|
||||
def test_acquire_and_release(self) -> None:
|
||||
"""Test basic lock acquire and release."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
lock_path = Path(tmpdir) / ".vault" / "lock"
|
||||
lock = VaultLock(lock_path)
|
||||
|
||||
assert lock.acquire()
|
||||
assert lock_path.exists()
|
||||
lock.release()
|
||||
|
||||
def test_lock_creates_directory(self) -> None:
|
||||
"""Test that lock creates parent directory if needed."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
lock_path = Path(tmpdir) / "nested" / "dir" / "lock"
|
||||
lock = VaultLock(lock_path)
|
||||
|
||||
assert lock.acquire()
|
||||
assert lock_path.parent.exists()
|
||||
lock.release()
|
||||
|
||||
def test_lock_writes_pid(self) -> None:
|
||||
"""Test that lock file contains PID."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
lock_path = Path(tmpdir) / "lock"
|
||||
lock = VaultLock(lock_path)
|
||||
|
||||
lock.acquire()
|
||||
pid = lock.get_owner_pid()
|
||||
lock.release()
|
||||
|
||||
assert pid == os.getpid()
|
||||
|
||||
def test_release_removes_lock_file(self) -> None:
|
||||
"""Test that release removes lock file."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
lock_path = Path(tmpdir) / "lock"
|
||||
lock = VaultLock(lock_path)
|
||||
|
||||
lock.acquire()
|
||||
lock.release()
|
||||
|
||||
assert not lock_path.exists()
|
||||
|
||||
def test_release_safe_when_not_locked(self) -> None:
|
||||
"""Test that release is safe to call when not locked."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
lock_path = Path(tmpdir) / "lock"
|
||||
lock = VaultLock(lock_path)
|
||||
|
||||
# Should not raise
|
||||
lock.release()
|
||||
|
||||
def test_is_locked_when_not_locked(self) -> None:
|
||||
"""Test is_locked returns False when not locked."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
lock_path = Path(tmpdir) / "lock"
|
||||
lock = VaultLock(lock_path)
|
||||
|
||||
assert not lock.is_locked()
|
||||
|
||||
def test_is_locked_when_locked(self) -> None:
|
||||
"""Test is_locked returns True when locked."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
lock_path = Path(tmpdir) / "lock"
|
||||
lock = VaultLock(lock_path)
|
||||
|
||||
lock.acquire()
|
||||
try:
|
||||
# Check from different VaultLock instance
|
||||
other_lock = VaultLock(lock_path)
|
||||
assert other_lock.is_locked()
|
||||
finally:
|
||||
lock.release()
|
||||
|
||||
def test_context_manager(self) -> None:
|
||||
"""Test context manager usage."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
lock_path = Path(tmpdir) / "lock"
|
||||
|
||||
with VaultLock(lock_path):
|
||||
assert lock_path.exists()
|
||||
|
||||
assert not lock_path.exists()
|
||||
|
||||
def test_context_manager_raises_when_locked(self) -> None:
|
||||
"""Test context manager raises when already locked."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
lock_path = Path(tmpdir) / "lock"
|
||||
lock1 = VaultLock(lock_path)
|
||||
lock1.acquire()
|
||||
|
||||
try:
|
||||
with pytest.raises(VaultLockError):
|
||||
with VaultLock(lock_path):
|
||||
pass
|
||||
finally:
|
||||
lock1.release()
|
||||
|
||||
def test_get_owner_pid_no_lock_file(self) -> None:
|
||||
"""Test get_owner_pid returns None when no lock file."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
lock_path = Path(tmpdir) / "lock"
|
||||
lock = VaultLock(lock_path)
|
||||
|
||||
assert lock.get_owner_pid() is None
|
||||
|
||||
|
||||
def _acquire_lock_in_subprocess(lock_path: str, result_queue: multiprocessing.Queue) -> None:
|
||||
"""Helper function to acquire lock in subprocess."""
|
||||
lock = VaultLock(Path(lock_path))
|
||||
acquired = lock.acquire()
|
||||
result_queue.put(acquired)
|
||||
if acquired:
|
||||
time.sleep(0.5) # Hold lock briefly
|
||||
lock.release()
|
||||
|
||||
|
||||
class TestVaultLockMultiprocess:
|
||||
"""Tests for VaultLock with multiple processes."""
|
||||
|
||||
def test_second_process_cannot_acquire(self) -> None:
|
||||
"""Test that second process cannot acquire lock."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
lock_path = Path(tmpdir) / "lock"
|
||||
lock = VaultLock(lock_path)
|
||||
|
||||
# Acquire lock in main process
|
||||
assert lock.acquire()
|
||||
|
||||
try:
|
||||
# Try to acquire in subprocess
|
||||
result_queue: multiprocessing.Queue = multiprocessing.Queue()
|
||||
process = multiprocessing.Process(
|
||||
target=_acquire_lock_in_subprocess,
|
||||
args=(str(lock_path), result_queue),
|
||||
)
|
||||
process.start()
|
||||
process.join(timeout=2)
|
||||
|
||||
# Subprocess should not have acquired the lock
|
||||
acquired_in_subprocess = result_queue.get(timeout=1)
|
||||
assert not acquired_in_subprocess
|
||||
finally:
|
||||
lock.release()
|
||||
|
||||
def test_process_can_acquire_after_release(self) -> None:
|
||||
"""Test that process can acquire lock after it's released."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
lock_path = Path(tmpdir) / "lock"
|
||||
lock = VaultLock(lock_path)
|
||||
|
||||
# Acquire and release
|
||||
lock.acquire()
|
||||
lock.release()
|
||||
|
||||
# Now subprocess should be able to acquire
|
||||
result_queue: multiprocessing.Queue = multiprocessing.Queue()
|
||||
process = multiprocessing.Process(
|
||||
target=_acquire_lock_in_subprocess,
|
||||
args=(str(lock_path), result_queue),
|
||||
)
|
||||
process.start()
|
||||
process.join(timeout=2)
|
||||
|
||||
acquired_in_subprocess = result_queue.get(timeout=1)
|
||||
assert acquired_in_subprocess
|
||||
266
tests/test_manifest.py
Normal file
266
tests/test_manifest.py
Normal file
@@ -0,0 +1,266 @@
|
||||
"""Tests for Manifest dataclass."""
|
||||
|
||||
import tempfile
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
from src.core.file_entry import FileEntry
|
||||
from src.core.manifest import Location, Manifest
|
||||
|
||||
|
||||
class TestLocation:
|
||||
"""Tests for Location dataclass."""
|
||||
|
||||
def test_create_location(self) -> None:
|
||||
"""Test creating a Location instance."""
|
||||
now = datetime.now()
|
||||
loc = Location(
|
||||
path="/mnt/disk1/vault.vault",
|
||||
last_seen=now,
|
||||
status="active",
|
||||
)
|
||||
|
||||
assert loc.path == "/mnt/disk1/vault.vault"
|
||||
assert loc.last_seen == now
|
||||
assert loc.status == "active"
|
||||
|
||||
def test_to_dict(self) -> None:
|
||||
"""Test serialization to dictionary."""
|
||||
time = datetime(2026, 1, 28, 15, 45, 0)
|
||||
loc = Location(
|
||||
path="/mnt/disk1/vault.vault",
|
||||
last_seen=time,
|
||||
status="active",
|
||||
)
|
||||
|
||||
result = loc.to_dict()
|
||||
|
||||
assert result == {
|
||||
"path": "/mnt/disk1/vault.vault",
|
||||
"last_seen": "2026-01-28T15:45:00",
|
||||
"status": "active",
|
||||
}
|
||||
|
||||
def test_from_dict(self) -> None:
|
||||
"""Test deserialization from dictionary."""
|
||||
data = {
|
||||
"path": "/mnt/nas/vault.vault",
|
||||
"last_seen": "2026-01-25T08:00:00",
|
||||
"status": "unreachable",
|
||||
}
|
||||
|
||||
loc = Location.from_dict(data)
|
||||
|
||||
assert loc.path == "/mnt/nas/vault.vault"
|
||||
assert loc.last_seen == datetime(2026, 1, 25, 8, 0, 0)
|
||||
assert loc.status == "unreachable"
|
||||
|
||||
|
||||
class TestManifest:
|
||||
"""Tests for Manifest dataclass."""
|
||||
|
||||
def test_create_new_manifest(self) -> None:
|
||||
"""Test creating a new manifest."""
|
||||
manifest = Manifest.create_new(
|
||||
vault_name="My Vault",
|
||||
image_size_mb=1024,
|
||||
location_path="/mnt/disk1/myvault.vault",
|
||||
)
|
||||
|
||||
assert manifest.vault_name == "My Vault"
|
||||
assert manifest.image_size_mb == 1024
|
||||
assert manifest.version == 1
|
||||
assert len(manifest.locations) == 1
|
||||
assert manifest.locations[0].path == "/mnt/disk1/myvault.vault"
|
||||
assert manifest.locations[0].status == "active"
|
||||
assert len(manifest.files) == 0
|
||||
assert manifest.vault_id # UUID should be set
|
||||
|
||||
def test_to_dict(self) -> None:
|
||||
"""Test serialization to dictionary."""
|
||||
manifest = Manifest.create_new(
|
||||
vault_name="Test Vault",
|
||||
image_size_mb=512,
|
||||
location_path="/test/vault.vault",
|
||||
)
|
||||
|
||||
result = manifest.to_dict()
|
||||
|
||||
assert result["vault_name"] == "Test Vault"
|
||||
assert result["image_size_mb"] == 512
|
||||
assert result["version"] == 1
|
||||
assert len(result["locations"]) == 1
|
||||
assert len(result["files"]) == 0
|
||||
assert "vault_id" in result
|
||||
assert "created" in result
|
||||
assert "last_modified" in result
|
||||
|
||||
def test_from_dict(self) -> None:
|
||||
"""Test deserialization from dictionary."""
|
||||
data = {
|
||||
"vault_id": "550e8400-e29b-41d4-a716-446655440000",
|
||||
"vault_name": "My Vault",
|
||||
"version": 1,
|
||||
"created": "2026-01-28T10:30:00",
|
||||
"last_modified": "2026-01-28T15:45:00",
|
||||
"image_size_mb": 10240,
|
||||
"locations": [
|
||||
{
|
||||
"path": "/mnt/disk1/myvault.vault",
|
||||
"last_seen": "2026-01-28T15:45:00",
|
||||
"status": "active",
|
||||
}
|
||||
],
|
||||
"files": [
|
||||
{
|
||||
"path": "documents/file.txt",
|
||||
"hash": "sha256:abc123",
|
||||
"size": 1234,
|
||||
"created": "2026-01-28T10:30:00",
|
||||
"modified": "2026-01-28T14:20:00",
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
manifest = Manifest.from_dict(data)
|
||||
|
||||
assert manifest.vault_id == "550e8400-e29b-41d4-a716-446655440000"
|
||||
assert manifest.vault_name == "My Vault"
|
||||
assert manifest.image_size_mb == 10240
|
||||
assert len(manifest.locations) == 1
|
||||
assert len(manifest.files) == 1
|
||||
assert manifest.files[0].path == "documents/file.txt"
|
||||
|
||||
def test_save_and_load(self) -> None:
|
||||
"""Test saving and loading manifest to/from file."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
mount_point = Path(tmpdir)
|
||||
|
||||
# Create and save
|
||||
manifest = Manifest.create_new(
|
||||
vault_name="Test Vault",
|
||||
image_size_mb=512,
|
||||
location_path="/test/vault.vault",
|
||||
)
|
||||
manifest.save(mount_point)
|
||||
|
||||
# Verify file exists
|
||||
manifest_path = mount_point / ".vault" / "manifest.json"
|
||||
assert manifest_path.exists()
|
||||
|
||||
# Load and verify
|
||||
loaded = Manifest.load(mount_point)
|
||||
assert loaded.vault_id == manifest.vault_id
|
||||
assert loaded.vault_name == manifest.vault_name
|
||||
assert loaded.image_size_mb == manifest.image_size_mb
|
||||
|
||||
def test_add_location(self) -> None:
|
||||
"""Test adding a new location."""
|
||||
manifest = Manifest.create_new(
|
||||
vault_name="Test",
|
||||
image_size_mb=512,
|
||||
location_path="/disk1/vault.vault",
|
||||
)
|
||||
original_modified = manifest.last_modified
|
||||
|
||||
manifest.add_location("/disk2/vault.vault")
|
||||
|
||||
assert len(manifest.locations) == 2
|
||||
assert manifest.locations[1].path == "/disk2/vault.vault"
|
||||
assert manifest.locations[1].status == "active"
|
||||
assert manifest.last_modified >= original_modified
|
||||
|
||||
def test_update_location_status(self) -> None:
|
||||
"""Test updating location status."""
|
||||
manifest = Manifest.create_new(
|
||||
vault_name="Test",
|
||||
image_size_mb=512,
|
||||
location_path="/disk1/vault.vault",
|
||||
)
|
||||
|
||||
manifest.update_location_status("/disk1/vault.vault", "unreachable")
|
||||
|
||||
assert manifest.locations[0].status == "unreachable"
|
||||
|
||||
def test_add_file(self) -> None:
|
||||
"""Test adding a file entry."""
|
||||
manifest = Manifest.create_new(
|
||||
vault_name="Test",
|
||||
image_size_mb=512,
|
||||
location_path="/disk1/vault.vault",
|
||||
)
|
||||
now = datetime.now()
|
||||
file_entry = FileEntry(
|
||||
path="test.txt",
|
||||
hash="sha256:abc",
|
||||
size=100,
|
||||
created=now,
|
||||
modified=now,
|
||||
)
|
||||
|
||||
manifest.add_file(file_entry)
|
||||
|
||||
assert len(manifest.files) == 1
|
||||
assert manifest.files[0].path == "test.txt"
|
||||
|
||||
def test_add_file_updates_existing(self) -> None:
|
||||
"""Test that adding a file with same path updates it."""
|
||||
manifest = Manifest.create_new(
|
||||
vault_name="Test",
|
||||
image_size_mb=512,
|
||||
location_path="/disk1/vault.vault",
|
||||
)
|
||||
now = datetime.now()
|
||||
|
||||
# Add first version
|
||||
entry1 = FileEntry(
|
||||
path="test.txt", hash="sha256:old", size=100, created=now, modified=now
|
||||
)
|
||||
manifest.add_file(entry1)
|
||||
|
||||
# Add updated version
|
||||
entry2 = FileEntry(
|
||||
path="test.txt", hash="sha256:new", size=200, created=now, modified=now
|
||||
)
|
||||
manifest.add_file(entry2)
|
||||
|
||||
assert len(manifest.files) == 1
|
||||
assert manifest.files[0].hash == "sha256:new"
|
||||
assert manifest.files[0].size == 200
|
||||
|
||||
def test_remove_file(self) -> None:
|
||||
"""Test removing a file entry."""
|
||||
manifest = Manifest.create_new(
|
||||
vault_name="Test",
|
||||
image_size_mb=512,
|
||||
location_path="/disk1/vault.vault",
|
||||
)
|
||||
now = datetime.now()
|
||||
entry = FileEntry(
|
||||
path="test.txt", hash="sha256:abc", size=100, created=now, modified=now
|
||||
)
|
||||
manifest.add_file(entry)
|
||||
|
||||
manifest.remove_file("test.txt")
|
||||
|
||||
assert len(manifest.files) == 0
|
||||
|
||||
def test_get_file(self) -> None:
|
||||
"""Test getting a file entry by path."""
|
||||
manifest = Manifest.create_new(
|
||||
vault_name="Test",
|
||||
image_size_mb=512,
|
||||
location_path="/disk1/vault.vault",
|
||||
)
|
||||
now = datetime.now()
|
||||
entry = FileEntry(
|
||||
path="test.txt", hash="sha256:abc", size=100, created=now, modified=now
|
||||
)
|
||||
manifest.add_file(entry)
|
||||
|
||||
found = manifest.get_file("test.txt")
|
||||
not_found = manifest.get_file("nonexistent.txt")
|
||||
|
||||
assert found is not None
|
||||
assert found.path == "test.txt"
|
||||
assert not_found is None
|
||||
391
tests/test_sync_manager.py
Normal file
391
tests/test_sync_manager.py
Normal file
@@ -0,0 +1,391 @@
|
||||
"""Tests for sync_manager module."""
|
||||
|
||||
import time
|
||||
from pathlib import Path
|
||||
from threading import Event
|
||||
|
||||
import pytest
|
||||
|
||||
from src.core.file_watcher import EventType
|
||||
from src.core.manifest import Manifest
|
||||
from src.core.sync_manager import (
|
||||
ReplicaMount,
|
||||
SyncEvent,
|
||||
SyncManager,
|
||||
SyncStatus,
|
||||
)
|
||||
|
||||
|
||||
class TestReplicaMount:
|
||||
"""Tests for ReplicaMount dataclass."""
|
||||
|
||||
def test_get_file_path(self, tmp_path: Path) -> None:
|
||||
replica = ReplicaMount(
|
||||
mount_point=tmp_path / "mount",
|
||||
image_path=tmp_path / "vault.vault",
|
||||
is_primary=False,
|
||||
)
|
||||
assert replica.get_file_path("docs/file.txt") == tmp_path / "mount" / "docs" / "file.txt"
|
||||
|
||||
|
||||
class TestSyncManager:
|
||||
"""Tests for SyncManager class."""
|
||||
|
||||
def test_initial_state(self) -> None:
|
||||
manager = SyncManager()
|
||||
assert manager.status == SyncStatus.IDLE
|
||||
assert manager.replica_count == 0
|
||||
assert manager.primary_mount is None
|
||||
|
||||
def test_add_replica(self, tmp_path: Path) -> None:
|
||||
manager = SyncManager()
|
||||
mount = tmp_path / "mount"
|
||||
mount.mkdir()
|
||||
image = tmp_path / "vault.vault"
|
||||
|
||||
manager.add_replica(mount, image, is_primary=False)
|
||||
|
||||
assert manager.replica_count == 1
|
||||
assert manager.primary_mount is None
|
||||
|
||||
def test_add_primary_replica(self, tmp_path: Path) -> None:
|
||||
manager = SyncManager()
|
||||
mount = tmp_path / "mount"
|
||||
mount.mkdir()
|
||||
image = tmp_path / "vault.vault"
|
||||
|
||||
manager.add_replica(mount, image, is_primary=True)
|
||||
|
||||
assert manager.replica_count == 1
|
||||
assert manager.primary_mount == mount
|
||||
|
||||
def test_remove_replica(self, tmp_path: Path) -> None:
|
||||
manager = SyncManager()
|
||||
mount = tmp_path / "mount"
|
||||
mount.mkdir()
|
||||
image = tmp_path / "vault.vault"
|
||||
|
||||
manager.add_replica(mount, image)
|
||||
assert manager.replica_count == 1
|
||||
|
||||
result = manager.remove_replica(mount)
|
||||
assert result is True
|
||||
assert manager.replica_count == 0
|
||||
|
||||
def test_remove_nonexistent_replica(self, tmp_path: Path) -> None:
|
||||
manager = SyncManager()
|
||||
result = manager.remove_replica(tmp_path / "nonexistent")
|
||||
assert result is False
|
||||
|
||||
def test_start_watching_without_primary_raises(self) -> None:
|
||||
manager = SyncManager()
|
||||
with pytest.raises(ValueError, match="No primary replica"):
|
||||
manager.start_watching()
|
||||
|
||||
def test_start_and_stop_watching(self, tmp_path: Path) -> None:
|
||||
manager = SyncManager()
|
||||
mount = tmp_path / "mount"
|
||||
mount.mkdir()
|
||||
image = tmp_path / "vault.vault"
|
||||
|
||||
manager.add_replica(mount, image, is_primary=True)
|
||||
manager.start_watching()
|
||||
manager.stop_watching()
|
||||
|
||||
def test_pause_and_resume_sync(self, tmp_path: Path) -> None:
|
||||
events: list[SyncEvent] = []
|
||||
manager = SyncManager(on_sync_event=events.append)
|
||||
|
||||
primary = tmp_path / "primary"
|
||||
secondary = tmp_path / "secondary"
|
||||
primary.mkdir()
|
||||
secondary.mkdir()
|
||||
|
||||
manager.add_replica(primary, tmp_path / "primary.vault", is_primary=True)
|
||||
manager.add_replica(secondary, tmp_path / "secondary.vault")
|
||||
manager.start_watching()
|
||||
|
||||
# Pause sync
|
||||
manager.pause_sync()
|
||||
|
||||
# Create file while paused
|
||||
(primary / "paused.txt").write_text("created while paused")
|
||||
time.sleep(0.3)
|
||||
|
||||
# No events should be recorded
|
||||
assert len(events) == 0
|
||||
assert not (secondary / "paused.txt").exists()
|
||||
|
||||
manager.stop_watching()
|
||||
|
||||
|
||||
class TestSyncManagerPropagation:
|
||||
"""Tests for file propagation in SyncManager."""
|
||||
|
||||
def test_propagate_file_creation(self, tmp_path: Path) -> None:
|
||||
events: list[SyncEvent] = []
|
||||
event_received = Event()
|
||||
|
||||
def on_event(event: SyncEvent) -> None:
|
||||
events.append(event)
|
||||
event_received.set()
|
||||
|
||||
manager = SyncManager(on_sync_event=on_event)
|
||||
|
||||
primary = tmp_path / "primary"
|
||||
secondary = tmp_path / "secondary"
|
||||
primary.mkdir()
|
||||
secondary.mkdir()
|
||||
|
||||
manager.add_replica(primary, tmp_path / "primary.vault", is_primary=True)
|
||||
manager.add_replica(secondary, tmp_path / "secondary.vault")
|
||||
manager.start_watching()
|
||||
|
||||
# Create file in primary
|
||||
(primary / "test.txt").write_text("hello")
|
||||
|
||||
# Wait for sync
|
||||
event_received.wait(timeout=2.0)
|
||||
manager.stop_watching()
|
||||
|
||||
# Check file was synced
|
||||
assert (secondary / "test.txt").exists()
|
||||
assert (secondary / "test.txt").read_text() == "hello"
|
||||
|
||||
# Check event
|
||||
created_events = [e for e in events if e.event_type == EventType.CREATED]
|
||||
assert len(created_events) >= 1
|
||||
|
||||
def test_propagate_file_deletion(self, tmp_path: Path) -> None:
|
||||
events: list[SyncEvent] = []
|
||||
event_received = Event()
|
||||
|
||||
def on_event(event: SyncEvent) -> None:
|
||||
events.append(event)
|
||||
if event.event_type == EventType.DELETED:
|
||||
event_received.set()
|
||||
|
||||
manager = SyncManager(on_sync_event=on_event)
|
||||
|
||||
primary = tmp_path / "primary"
|
||||
secondary = tmp_path / "secondary"
|
||||
primary.mkdir()
|
||||
secondary.mkdir()
|
||||
|
||||
# Create file in both
|
||||
(primary / "delete.txt").write_text("to delete")
|
||||
(secondary / "delete.txt").write_text("to delete")
|
||||
|
||||
manager.add_replica(primary, tmp_path / "primary.vault", is_primary=True)
|
||||
manager.add_replica(secondary, tmp_path / "secondary.vault")
|
||||
manager.start_watching()
|
||||
|
||||
# Delete file in primary
|
||||
(primary / "delete.txt").unlink()
|
||||
|
||||
# Wait for sync
|
||||
event_received.wait(timeout=2.0)
|
||||
manager.stop_watching()
|
||||
|
||||
# Check file was deleted in secondary
|
||||
assert not (secondary / "delete.txt").exists()
|
||||
|
||||
def test_propagate_file_move(self, tmp_path: Path) -> None:
|
||||
events: list[SyncEvent] = []
|
||||
event_received = Event()
|
||||
|
||||
def on_event(event: SyncEvent) -> None:
|
||||
events.append(event)
|
||||
if event.event_type == EventType.MOVED:
|
||||
event_received.set()
|
||||
|
||||
manager = SyncManager(on_sync_event=on_event)
|
||||
|
||||
primary = tmp_path / "primary"
|
||||
secondary = tmp_path / "secondary"
|
||||
primary.mkdir()
|
||||
secondary.mkdir()
|
||||
|
||||
# Create file in both
|
||||
(primary / "old.txt").write_text("content")
|
||||
(secondary / "old.txt").write_text("content")
|
||||
|
||||
manager.add_replica(primary, tmp_path / "primary.vault", is_primary=True)
|
||||
manager.add_replica(secondary, tmp_path / "secondary.vault")
|
||||
manager.start_watching()
|
||||
|
||||
# Move file in primary
|
||||
(primary / "old.txt").rename(primary / "new.txt")
|
||||
|
||||
# Wait for sync
|
||||
event_received.wait(timeout=2.0)
|
||||
manager.stop_watching()
|
||||
|
||||
# Check file was moved in secondary
|
||||
assert not (secondary / "old.txt").exists()
|
||||
assert (secondary / "new.txt").exists()
|
||||
|
||||
def test_propagate_to_multiple_replicas(self, tmp_path: Path) -> None:
|
||||
events: list[SyncEvent] = []
|
||||
event_received = Event()
|
||||
|
||||
def on_event(event: SyncEvent) -> None:
|
||||
events.append(event)
|
||||
event_received.set()
|
||||
|
||||
manager = SyncManager(on_sync_event=on_event)
|
||||
|
||||
primary = tmp_path / "primary"
|
||||
secondary1 = tmp_path / "secondary1"
|
||||
secondary2 = tmp_path / "secondary2"
|
||||
primary.mkdir()
|
||||
secondary1.mkdir()
|
||||
secondary2.mkdir()
|
||||
|
||||
manager.add_replica(primary, tmp_path / "primary.vault", is_primary=True)
|
||||
manager.add_replica(secondary1, tmp_path / "secondary1.vault")
|
||||
manager.add_replica(secondary2, tmp_path / "secondary2.vault")
|
||||
manager.start_watching()
|
||||
|
||||
# Create file in primary
|
||||
(primary / "multi.txt").write_text("multi content")
|
||||
|
||||
# Wait for sync
|
||||
event_received.wait(timeout=2.0)
|
||||
time.sleep(0.2) # Extra time for all replicas
|
||||
manager.stop_watching()
|
||||
|
||||
# Check file was synced to all secondaries
|
||||
assert (secondary1 / "multi.txt").exists()
|
||||
assert (secondary2 / "multi.txt").exists()
|
||||
|
||||
|
||||
class TestSyncManagerManifestSync:
|
||||
"""Tests for manifest-based synchronization."""
|
||||
|
||||
def test_sync_from_manifest_new_file(self, tmp_path: Path) -> None:
|
||||
manager = SyncManager()
|
||||
|
||||
primary = tmp_path / "primary"
|
||||
secondary = tmp_path / "secondary"
|
||||
primary.mkdir()
|
||||
secondary.mkdir()
|
||||
(primary / ".vault").mkdir()
|
||||
(secondary / ".vault").mkdir()
|
||||
|
||||
# Create file in primary
|
||||
(primary / "newfile.txt").write_text("new content")
|
||||
|
||||
manager.add_replica(primary, tmp_path / "primary.vault", is_primary=True)
|
||||
manager.add_replica(secondary, tmp_path / "secondary.vault")
|
||||
|
||||
# Create manifests
|
||||
source_manifest = Manifest.create_new("Test", 100, str(tmp_path / "primary.vault"))
|
||||
source_manifest.add_file_from_path(primary, primary / "newfile.txt")
|
||||
|
||||
target_manifest = Manifest.create_new("Test", 100, str(tmp_path / "secondary.vault"))
|
||||
|
||||
# Sync
|
||||
synced = manager.sync_from_manifest(source_manifest, secondary, target_manifest)
|
||||
|
||||
assert synced == 1
|
||||
assert (secondary / "newfile.txt").exists()
|
||||
assert (secondary / "newfile.txt").read_text() == "new content"
|
||||
|
||||
def test_sync_from_manifest_newer_source(self, tmp_path: Path) -> None:
|
||||
manager = SyncManager()
|
||||
|
||||
primary = tmp_path / "primary"
|
||||
secondary = tmp_path / "secondary"
|
||||
primary.mkdir()
|
||||
secondary.mkdir()
|
||||
(primary / ".vault").mkdir()
|
||||
(secondary / ".vault").mkdir()
|
||||
|
||||
# Create file in both with different content
|
||||
(secondary / "update.txt").write_text("old content")
|
||||
time.sleep(0.1)
|
||||
(primary / "update.txt").write_text("new content")
|
||||
|
||||
manager.add_replica(primary, tmp_path / "primary.vault", is_primary=True)
|
||||
manager.add_replica(secondary, tmp_path / "secondary.vault")
|
||||
|
||||
# Create manifests
|
||||
source_manifest = Manifest.create_new("Test", 100, str(tmp_path / "primary.vault"))
|
||||
source_manifest.add_file_from_path(primary, primary / "update.txt")
|
||||
|
||||
target_manifest = Manifest.create_new("Test", 100, str(tmp_path / "secondary.vault"))
|
||||
target_manifest.add_file_from_path(secondary, secondary / "update.txt")
|
||||
|
||||
# Sync
|
||||
synced = manager.sync_from_manifest(source_manifest, secondary, target_manifest)
|
||||
|
||||
assert synced == 1
|
||||
assert (secondary / "update.txt").read_text() == "new content"
|
||||
|
||||
def test_sync_from_manifest_deleted_file(self, tmp_path: Path) -> None:
|
||||
manager = SyncManager()
|
||||
|
||||
primary = tmp_path / "primary"
|
||||
secondary = tmp_path / "secondary"
|
||||
primary.mkdir()
|
||||
secondary.mkdir()
|
||||
(primary / ".vault").mkdir()
|
||||
(secondary / ".vault").mkdir()
|
||||
|
||||
# Create file only in secondary (simulating deletion in primary)
|
||||
(secondary / "deleted.txt").write_text("will be deleted")
|
||||
|
||||
manager.add_replica(primary, tmp_path / "primary.vault", is_primary=True)
|
||||
manager.add_replica(secondary, tmp_path / "secondary.vault")
|
||||
|
||||
# Create manifests - source has no files, target has one
|
||||
source_manifest = Manifest.create_new("Test", 100, str(tmp_path / "primary.vault"))
|
||||
|
||||
target_manifest = Manifest.create_new("Test", 100, str(tmp_path / "secondary.vault"))
|
||||
target_manifest.add_file_from_path(secondary, secondary / "deleted.txt")
|
||||
|
||||
# Sync
|
||||
synced = manager.sync_from_manifest(source_manifest, secondary, target_manifest)
|
||||
|
||||
assert synced == 1
|
||||
assert not (secondary / "deleted.txt").exists()
|
||||
|
||||
def test_full_sync(self, tmp_path: Path) -> None:
|
||||
manager = SyncManager()
|
||||
|
||||
primary = tmp_path / "primary"
|
||||
secondary1 = tmp_path / "secondary1"
|
||||
secondary2 = tmp_path / "secondary2"
|
||||
primary.mkdir()
|
||||
secondary1.mkdir()
|
||||
secondary2.mkdir()
|
||||
(primary / ".vault").mkdir()
|
||||
(secondary1 / ".vault").mkdir()
|
||||
(secondary2 / ".vault").mkdir()
|
||||
|
||||
# Create files in primary
|
||||
(primary / "file1.txt").write_text("content1")
|
||||
(primary / "file2.txt").write_text("content2")
|
||||
|
||||
manager.add_replica(primary, tmp_path / "primary.vault", is_primary=True)
|
||||
manager.add_replica(secondary1, tmp_path / "secondary1.vault")
|
||||
manager.add_replica(secondary2, tmp_path / "secondary2.vault")
|
||||
|
||||
# Create and save primary manifest
|
||||
primary_manifest = Manifest.create_new("Test", 100, str(tmp_path / "primary.vault"))
|
||||
primary_manifest.add_file_from_path(primary, primary / "file1.txt")
|
||||
primary_manifest.add_file_from_path(primary, primary / "file2.txt")
|
||||
primary_manifest.save(primary)
|
||||
|
||||
# Create empty manifests for secondaries
|
||||
Manifest.create_new("Test", 100, str(tmp_path / "secondary1.vault")).save(secondary1)
|
||||
Manifest.create_new("Test", 100, str(tmp_path / "secondary2.vault")).save(secondary2)
|
||||
|
||||
# Full sync
|
||||
results = manager.full_sync()
|
||||
|
||||
assert results[secondary1] == 2
|
||||
assert results[secondary2] == 2
|
||||
assert (secondary1 / "file1.txt").read_text() == "content1"
|
||||
assert (secondary2 / "file2.txt").read_text() == "content2"
|
||||
233
tests/test_vault.py
Normal file
233
tests/test_vault.py
Normal file
@@ -0,0 +1,233 @@
|
||||
"""Tests for vault module."""
|
||||
|
||||
import time
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
from src.core.image_manager import create_sparse_image
|
||||
from src.core.vault import Vault, VaultError, VaultState
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def vault_image(tmp_path: Path) -> Path:
|
||||
"""Create a test vault image."""
|
||||
image_path = tmp_path / "test.vault"
|
||||
create_sparse_image(image_path, 10) # 10 MB
|
||||
return image_path
|
||||
|
||||
|
||||
class TestVault:
|
||||
"""Tests for Vault class."""
|
||||
|
||||
def test_initial_state(self) -> None:
|
||||
vault = Vault()
|
||||
assert vault.state == VaultState.CLOSED
|
||||
assert vault.is_open is False
|
||||
assert vault.mount_point is None
|
||||
assert vault.replica_count == 0
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_open_and_close(self, vault_image: Path) -> None:
|
||||
vault = Vault()
|
||||
|
||||
# Open
|
||||
mount = vault.open(vault_image)
|
||||
assert vault.is_open
|
||||
assert vault.state == VaultState.OPEN
|
||||
assert vault.mount_point == mount
|
||||
assert mount.exists()
|
||||
|
||||
# Close
|
||||
vault.close()
|
||||
assert vault.state == VaultState.CLOSED
|
||||
assert vault.is_open is False
|
||||
assert vault.mount_point is None
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_context_manager(self, vault_image: Path) -> None:
|
||||
with Vault() as vault:
|
||||
vault.open(vault_image)
|
||||
assert vault.is_open
|
||||
|
||||
assert vault.state == VaultState.CLOSED
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_state_change_callback(self, vault_image: Path) -> None:
|
||||
states: list[VaultState] = []
|
||||
|
||||
def on_state_change(state: VaultState) -> None:
|
||||
states.append(state)
|
||||
|
||||
vault = Vault(on_state_change=on_state_change)
|
||||
vault.open(vault_image)
|
||||
vault.close()
|
||||
|
||||
assert VaultState.OPENING in states
|
||||
assert VaultState.OPEN in states
|
||||
assert VaultState.CLOSED in states
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_open_creates_manifest(self, vault_image: Path) -> None:
|
||||
vault = Vault()
|
||||
mount = vault.open(vault_image)
|
||||
|
||||
assert vault.manifest is not None
|
||||
assert vault.manifest.vault_name == "test" # from filename
|
||||
assert (mount / ".vault" / "manifest.json").exists()
|
||||
|
||||
vault.close()
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_open_already_open_raises(self, vault_image: Path) -> None:
|
||||
vault = Vault()
|
||||
vault.open(vault_image)
|
||||
|
||||
with pytest.raises(VaultError, match="already open"):
|
||||
vault.open(vault_image)
|
||||
|
||||
vault.close()
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_get_replicas(self, vault_image: Path) -> None:
|
||||
vault = Vault()
|
||||
vault.open(vault_image)
|
||||
|
||||
replicas = vault.get_replicas()
|
||||
assert len(replicas) == 1
|
||||
assert replicas[0].is_primary is True
|
||||
assert replicas[0].is_mounted is True
|
||||
assert replicas[0].image_path == vault_image
|
||||
|
||||
vault.close()
|
||||
|
||||
|
||||
class TestVaultLocking:
|
||||
"""Tests for vault locking."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_vault_is_locked_when_open(self, vault_image: Path) -> None:
|
||||
vault = Vault()
|
||||
vault.open(vault_image)
|
||||
|
||||
# Lock file should exist
|
||||
lock_path = vault_image.parent / f".{vault_image.stem}.lock"
|
||||
assert lock_path.exists()
|
||||
|
||||
vault.close()
|
||||
|
||||
# Lock should be released
|
||||
assert not lock_path.exists()
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_second_vault_cannot_open_locked(self, vault_image: Path) -> None:
|
||||
vault1 = Vault()
|
||||
vault1.open(vault_image)
|
||||
|
||||
vault2 = Vault()
|
||||
with pytest.raises(VaultError, match="locked"):
|
||||
vault2.open(vault_image)
|
||||
|
||||
vault1.close()
|
||||
|
||||
|
||||
class TestVaultReplicas:
|
||||
"""Tests for vault replica management."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_add_replica(self, vault_image: Path, tmp_path: Path) -> None:
|
||||
vault = Vault()
|
||||
vault.open(vault_image)
|
||||
|
||||
# Create a file in primary
|
||||
(vault.mount_point / "test.txt").write_text("hello") # type: ignore
|
||||
|
||||
# Add replica
|
||||
replica_path = tmp_path / "replica.vault"
|
||||
replica_mount = vault.add_replica(replica_path)
|
||||
|
||||
assert vault.replica_count == 2
|
||||
assert replica_path.exists()
|
||||
assert (replica_mount / "test.txt").exists()
|
||||
assert (replica_mount / "test.txt").read_text() == "hello"
|
||||
|
||||
vault.close()
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_remove_replica(self, vault_image: Path, tmp_path: Path) -> None:
|
||||
vault = Vault()
|
||||
vault.open(vault_image)
|
||||
|
||||
# Add replica
|
||||
replica_path = tmp_path / "replica.vault"
|
||||
vault.add_replica(replica_path)
|
||||
assert vault.replica_count == 2
|
||||
|
||||
# Remove replica
|
||||
vault.remove_replica(replica_path)
|
||||
assert vault.replica_count == 1
|
||||
|
||||
vault.close()
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_cannot_remove_primary(self, vault_image: Path) -> None:
|
||||
vault = Vault()
|
||||
vault.open(vault_image)
|
||||
|
||||
with pytest.raises(VaultError, match="primary"):
|
||||
vault.remove_replica(vault_image)
|
||||
|
||||
vault.close()
|
||||
|
||||
|
||||
class TestVaultSync:
|
||||
"""Tests for vault synchronization."""
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_file_propagation(self, vault_image: Path, tmp_path: Path) -> None:
|
||||
vault = Vault()
|
||||
vault.open(vault_image)
|
||||
|
||||
# Add replica
|
||||
replica_path = tmp_path / "replica.vault"
|
||||
replica_mount = vault.add_replica(replica_path)
|
||||
|
||||
# Create file in primary - should propagate to replica
|
||||
(vault.mount_point / "sync_test.txt").write_text("synced content") # type: ignore
|
||||
time.sleep(0.5) # Wait for sync
|
||||
|
||||
assert (replica_mount / "sync_test.txt").exists()
|
||||
assert (replica_mount / "sync_test.txt").read_text() == "synced content"
|
||||
|
||||
vault.close()
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_manual_sync(self, vault_image: Path, tmp_path: Path) -> None:
|
||||
vault = Vault()
|
||||
vault.open(vault_image)
|
||||
|
||||
# Create file before adding replica
|
||||
(vault.mount_point / "existing.txt").write_text("existing") # type: ignore
|
||||
|
||||
# Add replica (should sync during add)
|
||||
replica_path = tmp_path / "replica.vault"
|
||||
replica_mount = vault.add_replica(replica_path)
|
||||
|
||||
assert (replica_mount / "existing.txt").exists()
|
||||
|
||||
# Create another file
|
||||
(vault.mount_point / "new.txt").write_text("new") # type: ignore
|
||||
|
||||
# Pause sync and modify
|
||||
vault._sync_manager.pause_sync() # type: ignore
|
||||
(vault.mount_point / "paused.txt").write_text("paused") # type: ignore
|
||||
time.sleep(0.3)
|
||||
|
||||
# File shouldn't be in replica yet
|
||||
# (might be there due to timing, so just trigger manual sync)
|
||||
|
||||
# Resume and sync manually
|
||||
vault._sync_manager.resume_sync() # type: ignore
|
||||
vault.sync()
|
||||
|
||||
vault.close()
|
||||
Reference in New Issue
Block a user