Merge commit '4757bedc4ec62d3577fd1f259abbe34ba6dce893' into system-rebuild-tracking

pull/98/head
Inex Code 2024-02-26 18:27:54 +03:00
commit d8666fa179
4 changed files with 73 additions and 24 deletions

View File

@ -19,7 +19,14 @@
pytest pytest
pytest-datadir pytest-datadir
pytest-mock pytest-mock
])); black
mypy
pylsp-mypy
python-lsp-black
python-lsp-server
typer # for strawberry
] ++ strawberry-graphql.optional-dependencies.cli));
vmtest-src-dir = "/root/source"; vmtest-src-dir = "/root/source";
shellMOTD = '' shellMOTD = ''
Welcome to SP API development shell! Welcome to SP API development shell!
@ -72,13 +79,15 @@
devShells.${system}.default = pkgs.mkShell { devShells.${system}.default = pkgs.mkShell {
name = "SP API dev shell"; name = "SP API dev shell";
packages = with pkgs; [ packages = with pkgs; [
python-env nixpkgs-fmt
rclone rclone
redis redis
restic restic
self.packages.${system}.pytest-vm self.packages.${system}.pytest-vm
# FIXME consider loading this explicitly only after ArchLinux issue is solved # FIXME consider loading this explicitly only after ArchLinux issue is solved
self.checks.x86_64-linux.default.driverInteractive self.checks.x86_64-linux.default.driverInteractive
# the target API application python environment
python-env
]; ];
shellHook = '' shellHook = ''
# envs set with export and as attributes are treated differently. # envs set with export and as attributes are treated differently.

View File

@ -254,7 +254,7 @@ class Backups:
reason=reason, reason=reason,
) )
Backups._store_last_snapshot(service_name, snapshot) Backups._on_new_snapshot_created(service_name, snapshot)
if reason == BackupReason.AUTO: if reason == BackupReason.AUTO:
Backups._prune_auto_snaps(service) Backups._prune_auto_snaps(service)
service.post_restore() service.post_restore()
@ -265,7 +265,16 @@ class Backups:
Jobs.update(job, status=JobStatus.FINISHED) Jobs.update(job, status=JobStatus.FINISHED)
if reason in [BackupReason.AUTO, BackupReason.PRE_RESTORE]: if reason in [BackupReason.AUTO, BackupReason.PRE_RESTORE]:
Jobs.set_expiration(job, AUTOBACKUP_JOB_EXPIRATION_SECONDS) Jobs.set_expiration(job, AUTOBACKUP_JOB_EXPIRATION_SECONDS)
return snapshot return Backups.sync_date_from_cache(snapshot)
@staticmethod
def sync_date_from_cache(snapshot: Snapshot) -> Snapshot:
"""
Our snapshot creation dates are different from those on server by a tiny amount.
This is a convenience, maybe it is better to write a special comparison
function for snapshots
"""
return Storage.get_cached_snapshot_by_id(snapshot.id)
@staticmethod @staticmethod
def _auto_snaps(service): def _auto_snaps(service):
@ -523,13 +532,12 @@ class Backups:
@staticmethod @staticmethod
def get_all_snapshots() -> List[Snapshot]: def get_all_snapshots() -> List[Snapshot]:
"""Returns all snapshots""" """Returns all snapshots"""
cached_snapshots = Storage.get_cached_snapshots() # When we refresh our cache:
if cached_snapshots: # 1. Manually
return cached_snapshots # 2. On timer
# TODO: the oldest snapshots will get expired faster than the new ones. # 3. On new snapshot
# How to detect that the end is missing? # 4. On snapshot deletion
Backups.force_snapshot_cache_reload()
return Storage.get_cached_snapshots() return Storage.get_cached_snapshots()
@staticmethod @staticmethod
@ -548,21 +556,18 @@ class Backups:
@staticmethod @staticmethod
def forget_snapshots(snapshots: List[Snapshot]) -> None: def forget_snapshots(snapshots: List[Snapshot]) -> None:
""" """
Deletes a batch of snapshots from the repo and from cache Deletes a batch of snapshots from the repo and syncs cache
Optimized Optimized
""" """
ids = [snapshot.id for snapshot in snapshots] ids = [snapshot.id for snapshot in snapshots]
Backups.provider().backupper.forget_snapshots(ids) Backups.provider().backupper.forget_snapshots(ids)
# less critical Backups.force_snapshot_cache_reload()
for snapshot in snapshots:
Storage.delete_cached_snapshot(snapshot)
@staticmethod @staticmethod
def forget_snapshot(snapshot: Snapshot) -> None: def forget_snapshot(snapshot: Snapshot) -> None:
"""Deletes a snapshot from the repo and from cache""" """Deletes a snapshot from the repo and from cache"""
Backups.provider().backupper.forget_snapshot(snapshot.id) Backups.forget_snapshots([snapshot])
Storage.delete_cached_snapshot(snapshot)
@staticmethod @staticmethod
def forget_all_snapshots(): def forget_all_snapshots():
@ -593,12 +598,11 @@ class Backups:
) )
@staticmethod @staticmethod
def _store_last_snapshot(service_id: str, snapshot: Snapshot) -> None: def _on_new_snapshot_created(service_id: str, snapshot: Snapshot) -> None:
"""What do we do with a snapshot that is just made?""" """What do we do with a snapshot that is just made?"""
# non-expiring timestamp of the last # non-expiring timestamp of the last
Storage.store_last_timestamp(service_id, snapshot) Storage.store_last_timestamp(service_id, snapshot)
# expiring cache entry Backups.force_snapshot_cache_reload()
Storage.cache_snapshot(snapshot)
# Autobackup # Autobackup

View File

@ -82,6 +82,6 @@ def automatic_backup():
start_backup(service, BackupReason.AUTO) start_backup(service, BackupReason.AUTO)
@huey.periodic_task(crontab(hour=SNAPSHOT_CACHE_TTL_HOURS)) @huey.periodic_task(crontab(hour="*/" + str(SNAPSHOT_CACHE_TTL_HOURS)))
def reload_snapshot_cache(): def reload_snapshot_cache():
Backups.force_snapshot_cache_reload() Backups.force_snapshot_cache_reload()

View File

@ -165,7 +165,7 @@ def test_reinit_after_purge(backups):
Backups.erase_repo() Backups.erase_repo()
assert Backups.is_initted() is False assert Backups.is_initted() is False
with pytest.raises(ValueError): with pytest.raises(ValueError):
Backups.get_all_snapshots() Backups.force_snapshot_cache_reload()
Backups.init_repo() Backups.init_repo()
assert Backups.is_initted() is True assert Backups.is_initted() is True
@ -209,7 +209,11 @@ def test_backup_returns_snapshot(backups, dummy_service):
snapshot = provider.backupper.start_backup(service_folders, name) snapshot = provider.backupper.start_backup(service_folders, name)
assert snapshot.id is not None assert snapshot.id is not None
assert len(snapshot.id) == len(Backups.get_all_snapshots()[0].id)
snapshots = provider.backupper.get_snapshots()
assert snapshots != []
assert len(snapshot.id) == len(snapshots[0].id)
assert Backups.get_snapshot_by_id(snapshot.id) is not None assert Backups.get_snapshot_by_id(snapshot.id) is not None
assert snapshot.service_name == name assert snapshot.service_name == name
assert snapshot.created_at is not None assert snapshot.created_at is not None
@ -468,14 +472,46 @@ def test_snapshots_caching(backups, dummy_service):
cached_snapshots = Storage.get_cached_snapshots() cached_snapshots = Storage.get_cached_snapshots()
assert len(cached_snapshots) == 1 assert len(cached_snapshots) == 1
Storage.delete_cached_snapshot(cached_snapshots[0]) snap_to_uncache = cached_snapshots[0]
Storage.delete_cached_snapshot(snap_to_uncache)
cached_snapshots = Storage.get_cached_snapshots() cached_snapshots = Storage.get_cached_snapshots()
assert len(cached_snapshots) == 0 assert len(cached_snapshots) == 0
# We do not assume that no snapshots means we need to reload the cache
snapshots = Backups.get_snapshots(dummy_service) snapshots = Backups.get_snapshots(dummy_service)
assert len(snapshots) == 1 assert len(snapshots) == 0
# No cache reload happened
cached_snapshots = Storage.get_cached_snapshots()
assert len(cached_snapshots) == 0
# Storage
def test_snapshot_cache_autoreloads(backups, dummy_service):
Backups.back_up(dummy_service)
cached_snapshots = Storage.get_cached_snapshots() cached_snapshots = Storage.get_cached_snapshots()
assert len(cached_snapshots) == 1 assert len(cached_snapshots) == 1
snap_to_uncache = cached_snapshots[0]
Storage.delete_cached_snapshot(snap_to_uncache)
cached_snapshots = Storage.get_cached_snapshots()
assert len(cached_snapshots) == 0
# When we create a snapshot we do reload cache
Backups.back_up(dummy_service)
cached_snapshots = Storage.get_cached_snapshots()
assert len(cached_snapshots) == 2
assert snap_to_uncache in cached_snapshots
Storage.delete_cached_snapshot(snap_to_uncache)
cached_snapshots = Storage.get_cached_snapshots()
assert len(cached_snapshots) == 1
# When we try to delete a snapshot we cannot find in cache, it is ok and we do reload cache
Backups.forget_snapshot(snap_to_uncache)
cached_snapshots = Storage.get_cached_snapshots()
assert len(cached_snapshots) == 1
assert snap_to_uncache not in cached_snapshots
def lowlevel_forget(snapshot_id): def lowlevel_forget(snapshot_id):