diff --git a/flake.nix b/flake.nix index f1a317a..f82fcf5 100644 --- a/flake.nix +++ b/flake.nix @@ -19,7 +19,14 @@ pytest pytest-datadir pytest-mock - ])); + black + mypy + pylsp-mypy + python-lsp-black + python-lsp-server + typer # for strawberry + ] ++ strawberry-graphql.optional-dependencies.cli)); + vmtest-src-dir = "/root/source"; shellMOTD = '' Welcome to SP API development shell! @@ -72,13 +79,15 @@ devShells.${system}.default = pkgs.mkShell { name = "SP API dev shell"; packages = with pkgs; [ - python-env + nixpkgs-fmt rclone redis restic self.packages.${system}.pytest-vm # FIXME consider loading this explicitly only after ArchLinux issue is solved self.checks.x86_64-linux.default.driverInteractive + # the target API application python environment + python-env ]; shellHook = '' # envs set with export and as attributes are treated differently. diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 1c4c4e0..e4b5db7 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -254,7 +254,7 @@ class Backups: reason=reason, ) - Backups._store_last_snapshot(service_name, snapshot) + Backups._on_new_snapshot_created(service_name, snapshot) if reason == BackupReason.AUTO: Backups._prune_auto_snaps(service) service.post_restore() @@ -265,7 +265,16 @@ class Backups: Jobs.update(job, status=JobStatus.FINISHED) if reason in [BackupReason.AUTO, BackupReason.PRE_RESTORE]: Jobs.set_expiration(job, AUTOBACKUP_JOB_EXPIRATION_SECONDS) - return snapshot + return Backups.sync_date_from_cache(snapshot) + + @staticmethod + def sync_date_from_cache(snapshot: Snapshot) -> Snapshot: + """ + Our snapshot creation dates are different from those on server by a tiny amount. + This is a convenience, maybe it is better to write a special comparison + function for snapshots + """ + return Storage.get_cached_snapshot_by_id(snapshot.id) @staticmethod def _auto_snaps(service): @@ -523,13 +532,12 @@ class Backups: @staticmethod def get_all_snapshots() -> List[Snapshot]: """Returns all snapshots""" - cached_snapshots = Storage.get_cached_snapshots() - if cached_snapshots: - return cached_snapshots - # TODO: the oldest snapshots will get expired faster than the new ones. - # How to detect that the end is missing? + # When we refresh our cache: + # 1. Manually + # 2. On timer + # 3. On new snapshot + # 4. On snapshot deletion - Backups.force_snapshot_cache_reload() return Storage.get_cached_snapshots() @staticmethod @@ -548,21 +556,18 @@ class Backups: @staticmethod def forget_snapshots(snapshots: List[Snapshot]) -> None: """ - Deletes a batch of snapshots from the repo and from cache + Deletes a batch of snapshots from the repo and syncs cache Optimized """ ids = [snapshot.id for snapshot in snapshots] Backups.provider().backupper.forget_snapshots(ids) - # less critical - for snapshot in snapshots: - Storage.delete_cached_snapshot(snapshot) + Backups.force_snapshot_cache_reload() @staticmethod def forget_snapshot(snapshot: Snapshot) -> None: """Deletes a snapshot from the repo and from cache""" - Backups.provider().backupper.forget_snapshot(snapshot.id) - Storage.delete_cached_snapshot(snapshot) + Backups.forget_snapshots([snapshot]) @staticmethod def forget_all_snapshots(): @@ -593,12 +598,11 @@ class Backups: ) @staticmethod - def _store_last_snapshot(service_id: str, snapshot: Snapshot) -> None: + def _on_new_snapshot_created(service_id: str, snapshot: Snapshot) -> None: """What do we do with a snapshot that is just made?""" # non-expiring timestamp of the last Storage.store_last_timestamp(service_id, snapshot) - # expiring cache entry - Storage.cache_snapshot(snapshot) + Backups.force_snapshot_cache_reload() # Autobackup diff --git a/selfprivacy_api/backup/tasks.py b/selfprivacy_api/backup/tasks.py index 6520c70..465b1a8 100644 --- a/selfprivacy_api/backup/tasks.py +++ b/selfprivacy_api/backup/tasks.py @@ -82,6 +82,6 @@ def automatic_backup(): start_backup(service, BackupReason.AUTO) -@huey.periodic_task(crontab(hour=SNAPSHOT_CACHE_TTL_HOURS)) +@huey.periodic_task(crontab(hour="*/" + str(SNAPSHOT_CACHE_TTL_HOURS))) def reload_snapshot_cache(): Backups.force_snapshot_cache_reload() diff --git a/tests/test_backup.py b/tests/test_backup.py index 23569eb..0a2b3ed 100644 --- a/tests/test_backup.py +++ b/tests/test_backup.py @@ -165,7 +165,7 @@ def test_reinit_after_purge(backups): Backups.erase_repo() assert Backups.is_initted() is False with pytest.raises(ValueError): - Backups.get_all_snapshots() + Backups.force_snapshot_cache_reload() Backups.init_repo() assert Backups.is_initted() is True @@ -209,7 +209,11 @@ def test_backup_returns_snapshot(backups, dummy_service): snapshot = provider.backupper.start_backup(service_folders, name) assert snapshot.id is not None - assert len(snapshot.id) == len(Backups.get_all_snapshots()[0].id) + + snapshots = provider.backupper.get_snapshots() + assert snapshots != [] + + assert len(snapshot.id) == len(snapshots[0].id) assert Backups.get_snapshot_by_id(snapshot.id) is not None assert snapshot.service_name == name assert snapshot.created_at is not None @@ -468,14 +472,46 @@ def test_snapshots_caching(backups, dummy_service): cached_snapshots = Storage.get_cached_snapshots() assert len(cached_snapshots) == 1 - Storage.delete_cached_snapshot(cached_snapshots[0]) + snap_to_uncache = cached_snapshots[0] + Storage.delete_cached_snapshot(snap_to_uncache) cached_snapshots = Storage.get_cached_snapshots() assert len(cached_snapshots) == 0 + # We do not assume that no snapshots means we need to reload the cache snapshots = Backups.get_snapshots(dummy_service) - assert len(snapshots) == 1 + assert len(snapshots) == 0 + # No cache reload happened + cached_snapshots = Storage.get_cached_snapshots() + assert len(cached_snapshots) == 0 + + +# Storage +def test_snapshot_cache_autoreloads(backups, dummy_service): + Backups.back_up(dummy_service) + cached_snapshots = Storage.get_cached_snapshots() assert len(cached_snapshots) == 1 + snap_to_uncache = cached_snapshots[0] + + Storage.delete_cached_snapshot(snap_to_uncache) + cached_snapshots = Storage.get_cached_snapshots() + assert len(cached_snapshots) == 0 + + # When we create a snapshot we do reload cache + Backups.back_up(dummy_service) + cached_snapshots = Storage.get_cached_snapshots() + assert len(cached_snapshots) == 2 + assert snap_to_uncache in cached_snapshots + + Storage.delete_cached_snapshot(snap_to_uncache) + cached_snapshots = Storage.get_cached_snapshots() + assert len(cached_snapshots) == 1 + + # When we try to delete a snapshot we cannot find in cache, it is ok and we do reload cache + Backups.forget_snapshot(snap_to_uncache) + cached_snapshots = Storage.get_cached_snapshots() + assert len(cached_snapshots) == 1 + assert snap_to_uncache not in cached_snapshots def lowlevel_forget(snapshot_id):