From e4865aa09401052414c8a825c7309eb31ee39161 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 28 Jun 2023 11:45:07 +0000 Subject: [PATCH] fix(services): proper backup progress reporting --- .../backup/backuppers/restic_backupper.py | 10 +++--- tests/test_graphql/test_backup.py | 32 +++++++++++++++++-- 2 files changed, 36 insertions(+), 6 deletions(-) diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index e5d7955..ad163ea 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -127,19 +127,21 @@ class ResticBackupper(AbstractBackupper): return ResticBackupper._snapshot_from_fresh_summary(message, repo_name) raise ValueError("no summary message in restic json output") - def parse_message(self, raw_message, job=None) -> object: - message = ResticBackupper.parse_json_output(raw_message) + def parse_message(self, raw_message_line: str, job=None) -> dict: + message = ResticBackupper.parse_json_output(raw_message_line) + if not isinstance(message, dict): + raise ValueError("we have too many messages on one line?") if message["message_type"] == "status": if job is not None: # only update status if we run under some job Jobs.update( job, JobStatus.RUNNING, - progress=int(message["percent_done"]), + progress=int(message["percent_done"] * 100), ) return message @staticmethod - def _snapshot_from_fresh_summary(message: object, repo_name) -> Snapshot: + def _snapshot_from_fresh_summary(message: dict, repo_name) -> Snapshot: return Snapshot( id=message["snapshot_id"], created_at=datetime.datetime.now(datetime.timezone.utc), diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index bf1be69..0ab2136 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -3,6 +3,7 @@ import os.path as path from os import makedirs from os import remove from os import listdir +from os import urandom from datetime import datetime, timedelta, timezone import selfprivacy_api.services as services @@ -259,9 +260,18 @@ def assert_job_has_run(job_type): assert JobStatus.RUNNING in Jobs.status_updates(job) -def assert_job_had_progress(job_type): +def job_progress_updates(job_type): job = [job for job in finished_jobs() if job.type_id == job_type][0] - assert len(Jobs.progress_updates(job)) > 0 + return Jobs.progress_updates(job) + + +def assert_job_had_progress(job_type): + assert len(job_progress_updates(job_type)) > 0 + + +def make_large_file(path: str, bytes: int): + with open(path, "wb") as file: + file.write(urandom(bytes)) def test_snapshots_by_id(backups, dummy_service): @@ -290,6 +300,24 @@ def test_backup_service_task(backups, dummy_service): assert_job_had_progress(job_type_id) +def test_backup_larger_file(backups, dummy_service): + dir = path.join(dummy_service.get_folders()[0], "LARGEFILE") + mega = 2**20 + make_large_file(dir, 10 * mega) + + handle = start_backup(dummy_service) + handle(blocking=True) + + # results will be slightly different on different machines. if someone has troubles with it on their machine, consider dropping this test. + id = dummy_service.get_id() + job_type_id = f"services.{id}.backup" + assert_job_finished(job_type_id, count=1) + assert_job_has_run(job_type_id) + updates = job_progress_updates(job_type_id) + assert len(updates) > 3 + assert updates[1] > 10 + + def test_restore_snapshot_task(backups, dummy_service): Backups.back_up(dummy_service) snaps = Backups.get_snapshots(dummy_service)