From 80f2c99ba4fd28a09730e83ce831d4c1fb94180a Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 18 Jan 2023 09:33:14 +0000 Subject: [PATCH 001/537] refactor(backup): delete unused import --- selfprivacy_api/restic_controller/__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/selfprivacy_api/restic_controller/__init__.py b/selfprivacy_api/restic_controller/__init__.py index b4efba2..803d469 100644 --- a/selfprivacy_api/restic_controller/__init__.py +++ b/selfprivacy_api/restic_controller/__init__.py @@ -3,7 +3,6 @@ from datetime import datetime import json import subprocess import os -from threading import Lock from enum import Enum import portalocker from selfprivacy_api.utils import ReadUserData From 060117a3559df03d57734e6a5c5c19a4dff839d7 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 18 Jan 2023 09:40:04 +0000 Subject: [PATCH 002/537] refactor(backup): extract rclone args --- selfprivacy_api/restic_controller/__init__.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/selfprivacy_api/restic_controller/__init__.py b/selfprivacy_api/restic_controller/__init__.py index 803d469..a5a6634 100644 --- a/selfprivacy_api/restic_controller/__init__.py +++ b/selfprivacy_api/restic_controller/__init__.py @@ -90,7 +90,7 @@ class ResticController(metaclass=SingletonMetaclass): backup_listing_command = [ "restic", "-o", - "rclone.args=serve restic --stdio", + self.rclone_args(), "-r", f"rclone:backblaze:{self._repository_name}/sfbackup", "snapshots", @@ -122,6 +122,9 @@ class ResticController(metaclass=SingletonMetaclass): self.error_message = snapshots_list return + def rclone_args(self): + return "rclone.args=serve restic --stdio" + def initialize_repository(self): """ Initialize repository with restic @@ -129,7 +132,7 @@ class ResticController(metaclass=SingletonMetaclass): initialize_repository_command = [ "restic", "-o", - "rclone.args=serve restic --stdio", + self.rclone_args(), "-r", f"rclone:backblaze:{self._repository_name}/sfbackup", "init", @@ -158,7 +161,7 @@ class ResticController(metaclass=SingletonMetaclass): backup_command = [ "restic", "-o", - "rclone.args=serve restic --stdio", + self.rclone_args(), "-r", f"rclone:backblaze:{self._repository_name}/sfbackup", "--verbose", @@ -227,7 +230,7 @@ class ResticController(metaclass=SingletonMetaclass): backup_restoration_command = [ "restic", "-o", - "rclone.args=serve restic --stdio", + self.rclone_args(), "-r", f"rclone:backblaze:{self._repository_name}/sfbackup", "restore", From 487195853411c83ad74901ef955b135e5c6af9b7 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 18 Jan 2023 09:49:02 +0000 Subject: [PATCH 003/537] refactor(backup): extract restic repo --- selfprivacy_api/restic_controller/__init__.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/selfprivacy_api/restic_controller/__init__.py b/selfprivacy_api/restic_controller/__init__.py index a5a6634..592b651 100644 --- a/selfprivacy_api/restic_controller/__init__.py +++ b/selfprivacy_api/restic_controller/__init__.py @@ -92,7 +92,7 @@ class ResticController(metaclass=SingletonMetaclass): "-o", self.rclone_args(), "-r", - f"rclone:backblaze:{self._repository_name}/sfbackup", + self.restic_repo(), "snapshots", "--json", ] @@ -122,6 +122,9 @@ class ResticController(metaclass=SingletonMetaclass): self.error_message = snapshots_list return + def restic_repo(self): + return f"rclone:backblaze:{self._repository_name}/sfbackup" + def rclone_args(self): return "rclone.args=serve restic --stdio" @@ -134,7 +137,7 @@ class ResticController(metaclass=SingletonMetaclass): "-o", self.rclone_args(), "-r", - f"rclone:backblaze:{self._repository_name}/sfbackup", + self.restic_repo(), "init", ] with subprocess.Popen( @@ -163,7 +166,7 @@ class ResticController(metaclass=SingletonMetaclass): "-o", self.rclone_args(), "-r", - f"rclone:backblaze:{self._repository_name}/sfbackup", + self.restic_repo(), "--verbose", "--json", "backup", @@ -232,7 +235,7 @@ class ResticController(metaclass=SingletonMetaclass): "-o", self.rclone_args(), "-r", - f"rclone:backblaze:{self._repository_name}/sfbackup", + self.restic_repo(), "restore", snapshot_id, "--target", From cf3cbd179f75c84a9650338ea7eeb31fd2f004f9 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 18 Jan 2023 10:07:04 +0000 Subject: [PATCH 004/537] refactor(backup): pass key and account to exec --- selfprivacy_api/restic_controller/__init__.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/restic_controller/__init__.py b/selfprivacy_api/restic_controller/__init__.py index 592b651..0f6ad61 100644 --- a/selfprivacy_api/restic_controller/__init__.py +++ b/selfprivacy_api/restic_controller/__init__.py @@ -126,7 +126,10 @@ class ResticController(metaclass=SingletonMetaclass): return f"rclone:backblaze:{self._repository_name}/sfbackup" def rclone_args(self): - return "rclone.args=serve restic --stdio" + return "rclone.args=serve restic --stdio" + self.backend_rclone_args() + + def backend_rclone_args(self): + return f"--b2-account {self._backblaze_account} --b2-key {self._backblaze_key}" def initialize_repository(self): """ From 740b072b079ab582d55837f31c5878c8bf19cc61 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 18 Jan 2023 12:43:45 +0000 Subject: [PATCH 005/537] refactor(backup): do not use config file --- selfprivacy_api/restic_controller/__init__.py | 25 +++---------------- 1 file changed, 3 insertions(+), 22 deletions(-) diff --git a/selfprivacy_api/restic_controller/__init__.py b/selfprivacy_api/restic_controller/__init__.py index 0f6ad61..4ac84e8 100644 --- a/selfprivacy_api/restic_controller/__init__.py +++ b/selfprivacy_api/restic_controller/__init__.py @@ -4,7 +4,6 @@ import json import subprocess import os from enum import Enum -import portalocker from selfprivacy_api.utils import ReadUserData from selfprivacy_api.utils.singleton_metaclass import SingletonMetaclass @@ -50,7 +49,6 @@ class ResticController(metaclass=SingletonMetaclass): self.error_message = None self._initialized = True self.load_configuration() - self.write_rclone_config() self.load_snapshots() def load_configuration(self): @@ -64,25 +62,6 @@ class ResticController(metaclass=SingletonMetaclass): else: self.state = ResticStates.NO_KEY - def write_rclone_config(self): - """ - Open /root/.config/rclone/rclone.conf with portalocker - and write configuration in the following format: - [backblaze] - type = b2 - account = {self.backblaze_account} - key = {self.backblaze_key} - """ - with portalocker.Lock( - "/root/.config/rclone/rclone.conf", "w", timeout=None - ) as rclone_config: - rclone_config.write( - f"[backblaze]\n" - f"type = b2\n" - f"account = {self._backblaze_account}\n" - f"key = {self._backblaze_key}\n" - ) - def load_snapshots(self): """ Load list of snapshots from repository @@ -123,7 +102,9 @@ class ResticController(metaclass=SingletonMetaclass): return def restic_repo(self): - return f"rclone:backblaze:{self._repository_name}/sfbackup" + # https://restic.readthedocs.io/en/latest/030_preparing_a_new_repo.html#other-services-via-rclone + # https://forum.rclone.org/t/can-rclone-be-run-solely-with-command-line-options-no-config-no-env-vars/6314/5 + return f"rclone::b2:{self._repository_name}/sfbackup" def rclone_args(self): return "rclone.args=serve restic --stdio" + self.backend_rclone_args() From 6d73405118983548bd363e9d8ee981b985a31901 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 23 Jan 2023 11:15:05 +0000 Subject: [PATCH 006/537] feature(backups): add backup structures and queries --- .../graphql/common_types/backup_snapshot.py | 9 +++++++++ selfprivacy_api/graphql/common_types/service.py | 6 ++++++ selfprivacy_api/graphql/queries/backup.py | 14 ++++++++++++++ 3 files changed, 29 insertions(+) create mode 100644 selfprivacy_api/graphql/common_types/backup_snapshot.py create mode 100644 selfprivacy_api/graphql/queries/backup.py diff --git a/selfprivacy_api/graphql/common_types/backup_snapshot.py b/selfprivacy_api/graphql/common_types/backup_snapshot.py new file mode 100644 index 0000000..3256e0c --- /dev/null +++ b/selfprivacy_api/graphql/common_types/backup_snapshot.py @@ -0,0 +1,9 @@ +import datetime +import strawberry + + +@strawberry.type +class SnapshotInfo: + id: str + service_name: str + created_at: datetime.datetime diff --git a/selfprivacy_api/graphql/common_types/service.py b/selfprivacy_api/graphql/common_types/service.py index c1246ca..61ed5af 100644 --- a/selfprivacy_api/graphql/common_types/service.py +++ b/selfprivacy_api/graphql/common_types/service.py @@ -1,7 +1,9 @@ from enum import Enum import typing import strawberry +import datetime from selfprivacy_api.graphql.common_types.dns import DnsRecord +from selfprivacy_api.graphql.common_types.backup_snapshot import SnapshotInfo from selfprivacy_api.services import get_service_by_id, get_services_by_location from selfprivacy_api.services import Service as ServiceInterface @@ -101,6 +103,10 @@ class Service: """Get storage usage for a service""" return get_storage_usage(self) + @strawberry.field + def backup_snapshots(self) -> typing.Optional[typing.List[SnapshotInfo]]: + return None + def service_to_graphql_service(service: ServiceInterface) -> Service: """Convert service to graphql service""" diff --git a/selfprivacy_api/graphql/queries/backup.py b/selfprivacy_api/graphql/queries/backup.py new file mode 100644 index 0000000..ef61b10 --- /dev/null +++ b/selfprivacy_api/graphql/queries/backup.py @@ -0,0 +1,14 @@ +"""Backup""" +# pylint: disable=too-few-public-methods +import typing +import strawberry +from selfprivacy_api.graphql.common_types.backup_snapshot import SnapshotInfo + + +@strawberry.type +class Backup: + backend: str + + @strawberry.field + def get_backups(self) -> typing.List[SnapshotInfo]: + return [] From a98314bb6053941fb3d7d7299870f009b18ea50d Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 23 Jan 2023 12:08:17 +0000 Subject: [PATCH 007/537] feature(backups): placeholders for the modules of the new backup system --- selfprivacy_api/backup/__init__.py | 0 selfprivacy_api/backup/providers/__init__.py | 0 selfprivacy_api/backup/providers/backblaze.py | 0 selfprivacy_api/backup/providers/provider.py | 0 4 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 selfprivacy_api/backup/__init__.py create mode 100644 selfprivacy_api/backup/providers/__init__.py create mode 100644 selfprivacy_api/backup/providers/backblaze.py create mode 100644 selfprivacy_api/backup/providers/provider.py diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/selfprivacy_api/backup/providers/__init__.py b/selfprivacy_api/backup/providers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/selfprivacy_api/backup/providers/backblaze.py b/selfprivacy_api/backup/providers/backblaze.py new file mode 100644 index 0000000..e69de29 diff --git a/selfprivacy_api/backup/providers/provider.py b/selfprivacy_api/backup/providers/provider.py new file mode 100644 index 0000000..e69de29 From 23e3d553e6956a68cbd09378d373e265d484be2b Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 23 Jan 2023 13:43:18 +0000 Subject: [PATCH 008/537] feature(backups): placeholders for the backupers and backup providers --- selfprivacy_api/backup/__init__.py | 5 +++++ selfprivacy_api/backup/providers/backblaze.py | 6 ++++++ selfprivacy_api/backup/providers/provider.py | 13 +++++++++++++ selfprivacy_api/backup/restic_backuper.py | 6 ++++++ 4 files changed, 30 insertions(+) create mode 100644 selfprivacy_api/backup/restic_backuper.py diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index e69de29..ff9bb2d 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -0,0 +1,5 @@ +from abc import ABC + +class AbstractBackuper(ABC): + def __init__(self): + pass \ No newline at end of file diff --git a/selfprivacy_api/backup/providers/backblaze.py b/selfprivacy_api/backup/providers/backblaze.py index e69de29..6dfa1a7 100644 --- a/selfprivacy_api/backup/providers/backblaze.py +++ b/selfprivacy_api/backup/providers/backblaze.py @@ -0,0 +1,6 @@ +from .provider import AbstractBackupProvider +from selfprivacy_api.backup.restic_backuper import ResticBackuper + + +class Backblaze(AbstractBackupProvider): + backuper = ResticBackuper() diff --git a/selfprivacy_api/backup/providers/provider.py b/selfprivacy_api/backup/providers/provider.py index e69de29..9bd8a60 100644 --- a/selfprivacy_api/backup/providers/provider.py +++ b/selfprivacy_api/backup/providers/provider.py @@ -0,0 +1,13 @@ +""" +An abstract class for BackBlaze, S3 etc. +It assumes that while some providers are supported via restic/rclone, others may +require different backends +""" +from abc import ABC +from selfprivacy_api.backup import AbstractBackuper + + +class AbstractBackupProvider(ABC): + @property + def backuper(self) -> AbstractBackuper: + raise NotImplementedError diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py new file mode 100644 index 0000000..f1775ff --- /dev/null +++ b/selfprivacy_api/backup/restic_backuper.py @@ -0,0 +1,6 @@ +from selfprivacy_api.backup import AbstractBackuper + + +class ResticBackuper(AbstractBackuper): + def __init__(self): + pass From 24d170b3cbc48290bd7425739fd350ac2ef4a3ec Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 23 Jan 2023 14:21:43 +0000 Subject: [PATCH 009/537] feature(backups): copy cli logic to new restic backuper --- selfprivacy_api/backup/restic_backuper.py | 26 +++++++++++++++++++++-- 1 file changed, 24 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index f1775ff..99a29ab 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -2,5 +2,27 @@ from selfprivacy_api.backup import AbstractBackuper class ResticBackuper(AbstractBackuper): - def __init__(self): - pass + def __init__(self, login_flag: str, key_flag: str, type: str): + self.login_flag = login_flag + self.key_flag = key_flag + self.type = type + + def restic_repo(self, repository_name: str) -> str: + # https://restic.readthedocs.io/en/latest/030_preparing_a_new_repo.html#other-services-via-rclone + # https://forum.rclone.org/t/can-rclone-be-run-solely-with-command-line-options-no-config-no-env-vars/6314/5 + return f"rclone::{self.type}:{self._repository_name}/sfbackup" + + def rclone_args(self): + return "rclone.args=serve restic --stdio" + self.backend_rclone_args() + + def backend_rclone_args(self, account: str, key: str): + return f"{self.login_flag} {account} {self.key_flag} {key}" + + def restic_command(self, account: str, key: str, *args): + return [ + "restic", + "-o", + self.rclone_args(), + "-r", + self.restic_repo(account, key), + ].extend(args) From af6a49b0a3d3b97081ec2d4ff587e3a0f848eece Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 1 Feb 2023 11:58:55 +0000 Subject: [PATCH 010/537] test(backup): provider class selection --- selfprivacy_api/backup/providers/__init__.py | 11 +++++++++++ selfprivacy_api/backup/providers/backblaze.py | 2 +- selfprivacy_api/backup/providers/provider.py | 4 ++++ tests/test_graphql/test_backup.py | 11 +++++++++++ 4 files changed, 27 insertions(+), 1 deletion(-) create mode 100644 tests/test_graphql/test_backup.py diff --git a/selfprivacy_api/backup/providers/__init__.py b/selfprivacy_api/backup/providers/__init__.py index e69de29..be09cb2 100644 --- a/selfprivacy_api/backup/providers/__init__.py +++ b/selfprivacy_api/backup/providers/__init__.py @@ -0,0 +1,11 @@ +from selfprivacy_api.graphql.queries.providers import BackupProvider +from selfprivacy_api.backup.providers.provider import AbstractBackupProvider + +from selfprivacy_api.backup.providers.backblaze import Backblaze + +PROVIDER_MAPPING = { + BackupProvider.BACKBLAZE: Backblaze +} + +def get_provider(provider_type : BackupProvider) -> AbstractBackupProvider: + return PROVIDER_MAPPING[provider_type] diff --git a/selfprivacy_api/backup/providers/backblaze.py b/selfprivacy_api/backup/providers/backblaze.py index 6dfa1a7..e16e9d3 100644 --- a/selfprivacy_api/backup/providers/backblaze.py +++ b/selfprivacy_api/backup/providers/backblaze.py @@ -3,4 +3,4 @@ from selfprivacy_api.backup.restic_backuper import ResticBackuper class Backblaze(AbstractBackupProvider): - backuper = ResticBackuper() + backuper = ResticBackuper("--b2-account", "--b2-key", "b2") diff --git a/selfprivacy_api/backup/providers/provider.py b/selfprivacy_api/backup/providers/provider.py index 9bd8a60..dd41e9a 100644 --- a/selfprivacy_api/backup/providers/provider.py +++ b/selfprivacy_api/backup/providers/provider.py @@ -11,3 +11,7 @@ class AbstractBackupProvider(ABC): @property def backuper(self) -> AbstractBackuper: raise NotImplementedError + + def __init__(self, login, key): + self.login = login + self.key = key diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py new file mode 100644 index 0000000..70b3ce7 --- /dev/null +++ b/tests/test_graphql/test_backup.py @@ -0,0 +1,11 @@ +import selfprivacy_api.backup.providers as providers +from selfprivacy_api.backup.providers import AbstractBackupProvider + +from selfprivacy_api.backup.providers.backblaze import Backblaze +from selfprivacy_api.graphql.queries.providers import BackupProvider + + +def test_select_backend(): + provider = providers.get_provider(BackupProvider.BACKBLAZE) + assert provider is not None + assert provider == Backblaze From 934afeb5318a7c88adb22ad03c38f556f1da9e7a Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 3 Feb 2023 17:04:35 +0000 Subject: [PATCH 011/537] test(backup): dummy service --- .../services/test_service/__init__.py | 136 ++++++++++++++++++ .../services/test_service/bitwarden.svg | 3 + selfprivacy_api/services/test_service/icon.py | 5 + tests/test_graphql/test_backup.py | 14 ++ 4 files changed, 158 insertions(+) create mode 100644 selfprivacy_api/services/test_service/__init__.py create mode 100644 selfprivacy_api/services/test_service/bitwarden.svg create mode 100644 selfprivacy_api/services/test_service/icon.py diff --git a/selfprivacy_api/services/test_service/__init__.py b/selfprivacy_api/services/test_service/__init__.py new file mode 100644 index 0000000..53fe0cf --- /dev/null +++ b/selfprivacy_api/services/test_service/__init__.py @@ -0,0 +1,136 @@ +"""Class representing Bitwarden service""" +import base64 +import typing + +from selfprivacy_api.jobs import Job +from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus +from selfprivacy_api.utils import ReadUserData, get_domain +from selfprivacy_api.utils.block_devices import BlockDevice +import selfprivacy_api.utils.network as network_utils + +from selfprivacy_api.services.test_service.icon import BITWARDEN_ICON + + +class DummyService(Service): + """A test service""" + + def __init__(self, location): + self.loccation = location + + @staticmethod + def get_id() -> str: + """Return service id.""" + return "testservice" + + @staticmethod + def get_display_name() -> str: + """Return service display name.""" + return "Test Service" + + @staticmethod + def get_description() -> str: + """Return service description.""" + return "A small service used for test purposes. Does nothing." + + @staticmethod + def get_svg_icon() -> str: + """Read SVG icon from file and return it as base64 encoded string.""" + # return "" + return base64.b64encode(BITWARDEN_ICON.encode("utf-8")).decode("utf-8") + + @staticmethod + def get_url() -> typing.Optional[str]: + """Return service url.""" + domain = get_domain() + return f"https://password.{domain}" + + @staticmethod + def is_movable() -> bool: + return True + + @staticmethod + def is_required() -> bool: + return False + + @staticmethod + def is_enabled() -> bool: + return True + + @staticmethod + def get_status() -> ServiceStatus: + """ + Return Bitwarden status from systemd. + Use command return code to determine status. + + Return code 0 means service is running. + Return code 1 or 2 means service is in error stat. + Return code 3 means service is stopped. + Return code 4 means service is off. + """ + return 0 + + @staticmethod + def enable(): + pass + + @staticmethod + def disable(): + pass + + @staticmethod + def stop(): + pass + + @staticmethod + def start(): + pass + + @staticmethod + def restart(): + pass + + @staticmethod + def get_configuration(): + return {} + + @staticmethod + def set_configuration(config_items): + return super().set_configuration(config_items) + + @staticmethod + def get_logs(): + return "" + + @staticmethod + def get_storage_usage() -> int: + storage_usage = 0 + return storage_usage + + @staticmethod + def get_location() -> str: + with ReadUserData() as user_data: + if user_data.get("useBinds", False): + return user_data.get("bitwarden", {}).get("location", "sda1") + else: + return "sda1" + + @staticmethod + def get_dns_records() -> typing.List[ServiceDnsRecord]: + """Return list of DNS records for Bitwarden service.""" + return [ + ServiceDnsRecord( + type="A", + name="password", + content=network_utils.get_ip4(), + ttl=3600, + ), + ServiceDnsRecord( + type="AAAA", + name="password", + content=network_utils.get_ip6(), + ttl=3600, + ), + ] + + def move_to_volume(self, volume: BlockDevice) -> Job: + pass diff --git a/selfprivacy_api/services/test_service/bitwarden.svg b/selfprivacy_api/services/test_service/bitwarden.svg new file mode 100644 index 0000000..ced270c --- /dev/null +++ b/selfprivacy_api/services/test_service/bitwarden.svg @@ -0,0 +1,3 @@ + + + diff --git a/selfprivacy_api/services/test_service/icon.py b/selfprivacy_api/services/test_service/icon.py new file mode 100644 index 0000000..f9280e0 --- /dev/null +++ b/selfprivacy_api/services/test_service/icon.py @@ -0,0 +1,5 @@ +BITWARDEN_ICON = """ + + + +""" diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 70b3ce7..b63097c 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -1,3 +1,7 @@ +import pytest + +from selfprivacy_api.services.test_service import DummyService + import selfprivacy_api.backup.providers as providers from selfprivacy_api.backup.providers import AbstractBackupProvider @@ -5,7 +9,17 @@ from selfprivacy_api.backup.providers.backblaze import Backblaze from selfprivacy_api.graphql.queries.providers import BackupProvider +@pytest.fixture() +def test_service(tmpdir): + return DummyService(tmpdir) + + def test_select_backend(): provider = providers.get_provider(BackupProvider.BACKBLAZE) assert provider is not None assert provider == Backblaze + + +def test_backup(test_service): + # temporarily incomplete + assert test_service is not None From f5066082c8525b1639ae724f74dfdfe836fc8c86 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 3 Feb 2023 18:03:13 +0000 Subject: [PATCH 012/537] feat(backup): allow no auth --- selfprivacy_api/backup/restic_backuper.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index 99a29ab..fd42791 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -15,8 +15,15 @@ class ResticBackuper(AbstractBackuper): def rclone_args(self): return "rclone.args=serve restic --stdio" + self.backend_rclone_args() - def backend_rclone_args(self, account: str, key: str): - return f"{self.login_flag} {account} {self.key_flag} {key}" + def backend_rclone_args(self, account: str, key: str) -> str: + acc_arg = "" + key_arg = "" + if account != "": + acc_arg = f"{self.login_flag} {account}" + if key != "": + key_arg = f"{self.key_flag} {key}" + + return f"{acc_arg} {key_arg}" def restic_command(self, account: str, key: str, *args): return [ From a66a3f35fc70196049f0d27bd7ddca28d83a44c7 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 3 Feb 2023 18:29:45 +0000 Subject: [PATCH 013/537] feat(backup): add in-memory backup --- selfprivacy_api/backup/providers/__init__.py | 7 +++++-- selfprivacy_api/graphql/queries/providers.py | 2 ++ 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/providers/__init__.py b/selfprivacy_api/backup/providers/__init__.py index be09cb2..66fb9e6 100644 --- a/selfprivacy_api/backup/providers/__init__.py +++ b/selfprivacy_api/backup/providers/__init__.py @@ -2,10 +2,13 @@ from selfprivacy_api.graphql.queries.providers import BackupProvider from selfprivacy_api.backup.providers.provider import AbstractBackupProvider from selfprivacy_api.backup.providers.backblaze import Backblaze +from selfprivacy_api.backup.providers.memory import InMemoryBackup PROVIDER_MAPPING = { - BackupProvider.BACKBLAZE: Backblaze + BackupProvider.BACKBLAZE: Backblaze, + BackupProvider.MEMORY: InMemoryBackup, } -def get_provider(provider_type : BackupProvider) -> AbstractBackupProvider: + +def get_provider(provider_type: BackupProvider) -> AbstractBackupProvider: return PROVIDER_MAPPING[provider_type] diff --git a/selfprivacy_api/graphql/queries/providers.py b/selfprivacy_api/graphql/queries/providers.py index 1759d7b..ecc7f11 100644 --- a/selfprivacy_api/graphql/queries/providers.py +++ b/selfprivacy_api/graphql/queries/providers.py @@ -19,3 +19,5 @@ class ServerProvider(Enum): @strawberry.enum class BackupProvider(Enum): BACKBLAZE = "BACKBLAZE" + # for testing purposes, make sure not selectable in prod. + MEMORY = "MEMORY" From e54224f15da8c8b37853568791db7520caaf9ff6 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 3 Feb 2023 18:49:24 +0000 Subject: [PATCH 014/537] test(backup): init an in-memory backup class --- tests/test_graphql/test_backup.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index b63097c..abd05c2 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -14,12 +14,22 @@ def test_service(tmpdir): return DummyService(tmpdir) +@pytest.fixture() +def memory_backup(): + ProviderClass = providers.get_provider(BackupProvider.MEMORY) + assert ProviderClass is not None + memory_provider = ProviderClass(login="", key="") + assert memory_provider is not None + return memory_provider + + def test_select_backend(): provider = providers.get_provider(BackupProvider.BACKBLAZE) assert provider is not None assert provider == Backblaze -def test_backup(test_service): +def test_backup_service(test_service, memory_backup): # temporarily incomplete assert test_service is not None + assert memory_backup is not None From 90c25248009e2db697f2ec7e2211355f6d48d05f Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 3 Feb 2023 19:09:24 +0000 Subject: [PATCH 015/537] test(backup): make a testfile to backup --- tests/test_graphql/test_backup.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index abd05c2..264a9bf 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -1,4 +1,5 @@ import pytest +import os.path as path from selfprivacy_api.services.test_service import DummyService @@ -9,8 +10,14 @@ from selfprivacy_api.backup.providers.backblaze import Backblaze from selfprivacy_api.graphql.queries.providers import BackupProvider +TESTFILE_BODY = "testytest!" + + @pytest.fixture() def test_service(tmpdir): + testile_path = path.join(tmpdir, "testfile.txt") + with open(testile_path, "w") as file: + file.write(TESTFILE_BODY) return DummyService(tmpdir) From 474f04f923e6527dd2ca9f67620e26338fa82a8f Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 3 Feb 2023 20:28:55 +0000 Subject: [PATCH 016/537] feat(backup): add backuping to restic backuper --- selfprivacy_api/backup/restic_backuper.py | 26 +++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index fd42791..dbfa0a9 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -1,3 +1,5 @@ +import subprocess + from selfprivacy_api.backup import AbstractBackuper @@ -6,6 +8,12 @@ class ResticBackuper(AbstractBackuper): self.login_flag = login_flag self.key_flag = key_flag self.type = type + self.account = "" + self.key = "" + + def set_creds(self, account: str, key: str): + self.account = account + self.key = key def restic_repo(self, repository_name: str) -> str: # https://restic.readthedocs.io/en/latest/030_preparing_a_new_repo.html#other-services-via-rclone @@ -33,3 +41,21 @@ class ResticBackuper(AbstractBackuper): "-r", self.restic_repo(account, key), ].extend(args) + + def start_backup(self, folder: str): + """ + Start backup with restic + """ + backup_command = self.restic_command( + self.account, + self.key, + "backup", + folder, + ) + with open("/var/backup.log", "w", encoding="utf-8") as log_file: + subprocess.Popen( + backup_command, + shell=False, + stdout=log_file, + stderr=subprocess.STDOUT, + ) From 6fa72dbac1f2c44af75377b0b017773f19cfb3a4 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 8 Feb 2023 14:04:55 +0000 Subject: [PATCH 017/537] fix(backup): add memory backup class,forgot to add to git --- selfprivacy_api/backup/providers/memory.py | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 selfprivacy_api/backup/providers/memory.py diff --git a/selfprivacy_api/backup/providers/memory.py b/selfprivacy_api/backup/providers/memory.py new file mode 100644 index 0000000..4ddf571 --- /dev/null +++ b/selfprivacy_api/backup/providers/memory.py @@ -0,0 +1,6 @@ +from .provider import AbstractBackupProvider +from selfprivacy_api.backup.restic_backuper import ResticBackuper + + +class InMemoryBackup(AbstractBackupProvider): + backuper = ResticBackuper("", "", "memory") From 17df21964ab01234af785bd93d93b43eef4c9a6f Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 8 Feb 2023 14:05:25 +0000 Subject: [PATCH 018/537] test(backup): try to back up! --- selfprivacy_api/backup/__init__.py | 9 +++- selfprivacy_api/backup/restic_backuper.py | 50 +++++++++++-------- .../services/test_service/__init__.py | 14 ++---- tests/test_graphql/test_backup.py | 11 +++- 4 files changed, 51 insertions(+), 33 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index ff9bb2d..024beee 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -1,5 +1,10 @@ -from abc import ABC +from abc import ABC, abstractmethod + class AbstractBackuper(ABC): def __init__(self): - pass \ No newline at end of file + pass + + @abstractmethod + def start_backup(self, folder: str): + raise NotImplementedError diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index dbfa0a9..7c379a7 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -18,44 +18,54 @@ class ResticBackuper(AbstractBackuper): def restic_repo(self, repository_name: str) -> str: # https://restic.readthedocs.io/en/latest/030_preparing_a_new_repo.html#other-services-via-rclone # https://forum.rclone.org/t/can-rclone-be-run-solely-with-command-line-options-no-config-no-env-vars/6314/5 - return f"rclone::{self.type}:{self._repository_name}/sfbackup" + return f"rclone::{self.type}:{repository_name}/sfbackup" def rclone_args(self): return "rclone.args=serve restic --stdio" + self.backend_rclone_args() - def backend_rclone_args(self, account: str, key: str) -> str: + def backend_rclone_args(self) -> str: acc_arg = "" key_arg = "" - if account != "": - acc_arg = f"{self.login_flag} {account}" - if key != "": - key_arg = f"{self.key_flag} {key}" + if self.account != "": + acc_arg = f"{self.login_flag} {self.account}" + if self.key != "": + key_arg = f"{self.key_flag} {self.key}" return f"{acc_arg} {key_arg}" - def restic_command(self, account: str, key: str, *args): - return [ + def restic_command(self, repo_name: str, *args): + command = [ "restic", "-o", self.rclone_args(), "-r", - self.restic_repo(account, key), - ].extend(args) + self.restic_repo(repo_name), + ] + if args != []: + command.extend(args) + return command - def start_backup(self, folder: str): + def start_backup(self, folder: str, repo_name: str): """ Start backup with restic """ backup_command = self.restic_command( - self.account, - self.key, + repo_name, "backup", folder, ) - with open("/var/backup.log", "w", encoding="utf-8") as log_file: - subprocess.Popen( - backup_command, - shell=False, - stdout=log_file, - stderr=subprocess.STDOUT, - ) + subprocess.Popen( + backup_command, + shell=False, + stderr=subprocess.STDOUT, + ) + + # TODO: we might want to provide logging facilities + # that are reroutable for testing + # with open("/var/backup.log", "w", encoding="utf-8") as log_file: + # subprocess.Popen( + # backup_command, + # shell=False, + # stdout=log_file, + # stderr=subprocess.STDOUT, + # ) diff --git a/selfprivacy_api/services/test_service/__init__.py b/selfprivacy_api/services/test_service/__init__.py index 53fe0cf..0118dbc 100644 --- a/selfprivacy_api/services/test_service/__init__.py +++ b/selfprivacy_api/services/test_service/__init__.py @@ -14,8 +14,8 @@ from selfprivacy_api.services.test_service.icon import BITWARDEN_ICON class DummyService(Service): """A test service""" - def __init__(self, location): - self.loccation = location + def __init_subclass__(cls, location): + cls.location = location @staticmethod def get_id() -> str: @@ -106,13 +106,9 @@ class DummyService(Service): storage_usage = 0 return storage_usage - @staticmethod - def get_location() -> str: - with ReadUserData() as user_data: - if user_data.get("useBinds", False): - return user_data.get("bitwarden", {}).get("location", "sda1") - else: - return "sda1" + @classmethod + def get_location(cls) -> str: + return cls.location @staticmethod def get_dns_records() -> typing.List[ServiceDnsRecord]: diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 264a9bf..8fc821a 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -11,6 +11,7 @@ from selfprivacy_api.graphql.queries.providers import BackupProvider TESTFILE_BODY = "testytest!" +REPO_NAME = "test_backup" @pytest.fixture() @@ -18,11 +19,16 @@ def test_service(tmpdir): testile_path = path.join(tmpdir, "testfile.txt") with open(testile_path, "w") as file: file.write(TESTFILE_BODY) - return DummyService(tmpdir) + + # we need this to not change get_location() much + class TestDummyService (DummyService, location=tmpdir): + pass + + return TestDummyService() @pytest.fixture() -def memory_backup(): +def memory_backup() -> AbstractBackupProvider: ProviderClass = providers.get_provider(BackupProvider.MEMORY) assert ProviderClass is not None memory_provider = ProviderClass(login="", key="") @@ -40,3 +46,4 @@ def test_backup_service(test_service, memory_backup): # temporarily incomplete assert test_service is not None assert memory_backup is not None + memory_backup.backuper.start_backup(test_service.get_location(), REPO_NAME) \ No newline at end of file From d5881cc52304e84f22a57dd3dbcb69ba9a8333a9 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 8 Feb 2023 14:18:45 +0000 Subject: [PATCH 019/537] refactor(backup): add a placeholder Backups singleton class --- selfprivacy_api/backup/__init__.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 024beee..3054f2e 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -1,6 +1,10 @@ from abc import ABC, abstractmethod +class Backups: + """A singleton controller for backups""" + + class AbstractBackuper(ABC): def __init__(self): pass From 7e243146db83a68d6237bef27b1bd4879d410877 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 8 Feb 2023 14:57:34 +0000 Subject: [PATCH 020/537] refactor(backup): add a backup function to Backups singleton class --- selfprivacy_api/backup/__init__.py | 38 ++++++++++++++++---- selfprivacy_api/backup/backuper.py | 10 ++++++ selfprivacy_api/backup/providers/provider.py | 2 +- selfprivacy_api/backup/restic_backuper.py | 2 +- 4 files changed, 44 insertions(+), 8 deletions(-) create mode 100644 selfprivacy_api/backup/backuper.py diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 3054f2e..79125ef 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -1,14 +1,40 @@ -from abc import ABC, abstractmethod +from selfprivacy_api.utils.singleton_metaclass import SingletonMetaclass + +from selfprivacy_api.services.service import Service +from selfprivacy_api.backup.providers.provider import AbstractBackupProvider +from selfprivacy_api.backup.providers import get_provider +from selfprivacy_api.graphql.queries.providers import BackupProvider -class Backups: +class Backups(metaclass=SingletonMetaclass): """A singleton controller for backups""" + provider: AbstractBackupProvider -class AbstractBackuper(ABC): def __init__(self): + self.lookup_provider() + + def lookup_provider(self): + redis_provider = Backups.load_provider_redis() + if redis_provider is not None: + self.provider = redis_provider + + json_provider = Backups.load_provider_json() + if json_provider is not None: + self.provider = json_provider + + provider_class = get_provider(BackupProvider.MEMORY) + self.provider = provider_class(login="", key="") + + @staticmethod + def load_provider_redis() -> AbstractBackupProvider: pass - @abstractmethod - def start_backup(self, folder: str): - raise NotImplementedError + @staticmethod + def load_provider_json() -> AbstractBackupProvider: + pass + + def back_up(self, service: Service): + folder = service.get_location() + repo_name = service.get_id() + self.provider.backuper.start_backup(folder, repo_name) diff --git a/selfprivacy_api/backup/backuper.py b/selfprivacy_api/backup/backuper.py new file mode 100644 index 0000000..f428145 --- /dev/null +++ b/selfprivacy_api/backup/backuper.py @@ -0,0 +1,10 @@ +from abc import ABC, abstractmethod + + +class AbstractBackuper(ABC): + def __init__(self): + pass + + @abstractmethod + def start_backup(self, folder: str, repo_name: str): + raise NotImplementedError diff --git a/selfprivacy_api/backup/providers/provider.py b/selfprivacy_api/backup/providers/provider.py index dd41e9a..0b57528 100644 --- a/selfprivacy_api/backup/providers/provider.py +++ b/selfprivacy_api/backup/providers/provider.py @@ -4,7 +4,7 @@ It assumes that while some providers are supported via restic/rclone, others may require different backends """ from abc import ABC -from selfprivacy_api.backup import AbstractBackuper +from selfprivacy_api.backup.backuper import AbstractBackuper class AbstractBackupProvider(ABC): diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index 7c379a7..5098e97 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -1,6 +1,6 @@ import subprocess -from selfprivacy_api.backup import AbstractBackuper +from selfprivacy_api.backup.backuper import AbstractBackuper class ResticBackuper(AbstractBackuper): From bf263f68b9abbb81f0f6f2682dbdf74514f71bf0 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 8 Feb 2023 15:14:08 +0000 Subject: [PATCH 021/537] test(backup): use a backup service function --- tests/test_graphql/test_backup.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 8fc821a..64efe05 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -3,6 +3,7 @@ import os.path as path from selfprivacy_api.services.test_service import DummyService +from selfprivacy_api.backup import Backups import selfprivacy_api.backup.providers as providers from selfprivacy_api.backup.providers import AbstractBackupProvider @@ -21,7 +22,7 @@ def test_service(tmpdir): file.write(TESTFILE_BODY) # we need this to not change get_location() much - class TestDummyService (DummyService, location=tmpdir): + class TestDummyService(DummyService, location=tmpdir): pass return TestDummyService() @@ -36,14 +37,23 @@ def memory_backup() -> AbstractBackupProvider: return memory_provider +@pytest.fixture() +def backups(): + return Backups() + + def test_select_backend(): provider = providers.get_provider(BackupProvider.BACKBLAZE) assert provider is not None assert provider == Backblaze -def test_backup_service(test_service, memory_backup): +def test_backup_simple(test_service, memory_backup): # temporarily incomplete assert test_service is not None assert memory_backup is not None - memory_backup.backuper.start_backup(test_service.get_location(), REPO_NAME) \ No newline at end of file + memory_backup.backuper.start_backup(test_service.get_location(), REPO_NAME) + + +def test_backup_service(test_service, backups): + backups.back_up(test_service) From 3970524bcef39cc98d70d4c9fc66a98bc6bcc124 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 8 Feb 2023 15:27:49 +0000 Subject: [PATCH 022/537] feat(backup): hooks --- selfprivacy_api/backup/__init__.py | 3 +++ selfprivacy_api/services/service.py | 6 ++++++ 2 files changed, 9 insertions(+) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 79125ef..5ddd378 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -37,4 +37,7 @@ class Backups(metaclass=SingletonMetaclass): def back_up(self, service: Service): folder = service.get_location() repo_name = service.get_id() + + service.pre_backup() self.provider.backuper.start_backup(folder, repo_name) + service.post_restore() diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index 515e28f..f191149 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -138,3 +138,9 @@ class Service(ABC): @abstractmethod def move_to_volume(self, volume: BlockDevice) -> Job: pass + + def pre_backup(self): + pass + + def post_restore(self): + pass From 1568ee68a4870523fea54d7b8e9fd5a4b28b6ad2 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 8 Feb 2023 15:40:45 +0000 Subject: [PATCH 023/537] feature(backup): add a restore function to restic backuper --- selfprivacy_api/backup/restic_backuper.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index 5098e97..be17473 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -69,3 +69,13 @@ class ResticBackuper(AbstractBackuper): # stdout=log_file, # stderr=subprocess.STDOUT, # ) + + def restore_from_backup(self, repo_name, snapshot_id, folder): + """ + Restore from backup with restic + """ + restore_command = self.restic_command( + repo_name, "restore", snapshot_id, "--target", folder + ) + + subprocess.run(restore_command, shell=False) From 3afe19690116786d339b1bef6aa516c8927cb9e8 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 8 Feb 2023 16:28:05 +0000 Subject: [PATCH 024/537] feature(backup): loading snapshots --- selfprivacy_api/backup/restic_backuper.py | 40 +++++++++++++++++++++++ 1 file changed, 40 insertions(+) diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index be17473..fdecf1b 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -1,4 +1,5 @@ import subprocess +import json from selfprivacy_api.backup.backuper import AbstractBackuper @@ -79,3 +80,42 @@ class ResticBackuper(AbstractBackuper): ) subprocess.run(restore_command, shell=False) + + def _load_snapshots(self, repo_name) -> object: + """ + Load list of snapshots from repository + """ + listing_command = self.restic_command( + repo_name, + "snapshots", + "--json", + ) + + with subprocess.Popen( + listing_command, + shell=False, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + ) as backup_listing_process_descriptor: + output = backup_listing_process_descriptor.communicate()[0].decode("utf-8") + + try: + return self.parse_snapshot_output(output) + except ValueError: + if "Is there a repository at the following location?" in output: + return [] + self.error_message = output + return [] + + def get_snapshots(self): + # No transformation for now + snapshots = [] + for snapshot in self._load_snapshots(): + snapshots.append(snapshot) + return snapshots + + def parse_snapshot_output(self, output: str) -> object: + starting_index = output.find("[") + json.loads(output[starting_index:]) + self.snapshot_list = json.loads(output[starting_index:]) + print(output) From e447419f96f7f1866b5c0b4f4677245c0540e334 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 13 Feb 2023 10:33:39 +0000 Subject: [PATCH 025/537] refactor(backup): snapshot model --- selfprivacy_api/models/backup/__init__.py | 0 selfprivacy_api/models/backup/snapshot.py | 6 ++++++ 2 files changed, 6 insertions(+) create mode 100644 selfprivacy_api/models/backup/__init__.py create mode 100644 selfprivacy_api/models/backup/snapshot.py diff --git a/selfprivacy_api/models/backup/__init__.py b/selfprivacy_api/models/backup/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/selfprivacy_api/models/backup/snapshot.py b/selfprivacy_api/models/backup/snapshot.py new file mode 100644 index 0000000..b9c9e6e --- /dev/null +++ b/selfprivacy_api/models/backup/snapshot.py @@ -0,0 +1,6 @@ +from pydantic import BaseModel + +class Snapshot(BaseModel): + id: str + service_name: str + created_at: datetime.datetime From a043392b7b3bb6c666d608dac3443b38ead6965d Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 13 Feb 2023 11:16:35 +0000 Subject: [PATCH 026/537] test(backup): no snapshots --- selfprivacy_api/backup/backuper.py | 8 ++++++++ selfprivacy_api/backup/restic_backuper.py | 8 ++++++-- selfprivacy_api/models/backup/snapshot.py | 2 ++ tests/test_graphql/test_backup.py | 4 ++++ 4 files changed, 20 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/backuper.py b/selfprivacy_api/backup/backuper.py index f428145..c2353ad 100644 --- a/selfprivacy_api/backup/backuper.py +++ b/selfprivacy_api/backup/backuper.py @@ -1,4 +1,7 @@ from abc import ABC, abstractmethod +from typing import List + +from selfprivacy_api.models.backup.snapshot import Snapshot class AbstractBackuper(ABC): @@ -8,3 +11,8 @@ class AbstractBackuper(ABC): @abstractmethod def start_backup(self, folder: str, repo_name: str): raise NotImplementedError + + @abstractmethod + def get_snapshots(self, repo_name) -> List[Snapshot]: + """Get all snapshots from the repo""" + raise NotImplementedError diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index fdecf1b..2c120f1 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -1,7 +1,10 @@ import subprocess import json +from typing import List + from selfprivacy_api.backup.backuper import AbstractBackuper +from selfprivacy_api.models.backup.snapshot import Snapshot class ResticBackuper(AbstractBackuper): @@ -107,10 +110,11 @@ class ResticBackuper(AbstractBackuper): self.error_message = output return [] - def get_snapshots(self): + def get_snapshots(self, repo_name) -> List[Snapshot]: + """Get all snapshots from the repo""" # No transformation for now snapshots = [] - for snapshot in self._load_snapshots(): + for snapshot in self._load_snapshots(repo_name): snapshots.append(snapshot) return snapshots diff --git a/selfprivacy_api/models/backup/snapshot.py b/selfprivacy_api/models/backup/snapshot.py index b9c9e6e..9893f03 100644 --- a/selfprivacy_api/models/backup/snapshot.py +++ b/selfprivacy_api/models/backup/snapshot.py @@ -1,5 +1,7 @@ +import datetime from pydantic import BaseModel + class Snapshot(BaseModel): id: str service_name: str diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 64efe05..ee8ee0e 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -57,3 +57,7 @@ def test_backup_simple(test_service, memory_backup): def test_backup_service(test_service, backups): backups.back_up(test_service) + + +def test_no_snapshots(memory_backup): + assert memory_backup.backuper.get_snapshots("") == [] From c9fd6561813b5d2acedd0141aa7ac8d448c2b521 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 17 Feb 2023 15:55:19 +0000 Subject: [PATCH 027/537] refactor(backups): snapshotlist and local secret groundwork --- selfprivacy_api/backup/__init__.py | 9 +++++++ selfprivacy_api/backup/local_secret.py | 30 +++++++++++++++++++++++ selfprivacy_api/backup/restic_backuper.py | 23 +++++++++++------ tests/test_graphql/test_backup.py | 10 ++++++-- 4 files changed, 62 insertions(+), 10 deletions(-) create mode 100644 selfprivacy_api/backup/local_secret.py diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 5ddd378..4410809 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -1,3 +1,7 @@ +from typing import List + +from selfprivacy_api.models.backup.snapshot import Snapshot + from selfprivacy_api.utils.singleton_metaclass import SingletonMetaclass from selfprivacy_api.services.service import Service @@ -41,3 +45,8 @@ class Backups(metaclass=SingletonMetaclass): service.pre_backup() self.provider.backuper.start_backup(folder, repo_name) service.post_restore() + + def get_snapshots(self, service: Service) -> List[Snapshot]: + repo_name = service.get_id() + + return self.provider.backuper.get_snapshots(repo_name) diff --git a/selfprivacy_api/backup/local_secret.py b/selfprivacy_api/backup/local_secret.py new file mode 100644 index 0000000..f2ebf06 --- /dev/null +++ b/selfprivacy_api/backup/local_secret.py @@ -0,0 +1,30 @@ +"""Handling of local secret used for encrypted backups. +Separated out for circular dependency reasons +""" + +REDIS_KEY = "backup:local_secret" + + +class LocalBackupSecret: + @staticmethod + def get(): + """A secret string which backblaze/other clouds do not know. + Serves as encryption key. + TODO: generate and save in redis + """ + return "TEMPORARY_SECRET" + + @staticmethod + def reset(): + pass + + def exists(): + pass + + @staticmethod + def _generate(): + pass + + @staticmethod + def _store(secret: str): + pass diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index 2c120f1..3d02d07 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -6,6 +6,8 @@ from typing import List from selfprivacy_api.backup.backuper import AbstractBackuper from selfprivacy_api.models.backup.snapshot import Snapshot +from selfprivacy_api.backup.local_secret import LocalBackupSecret + class ResticBackuper(AbstractBackuper): def __init__(self, login_flag: str, key_flag: str, type: str): @@ -37,6 +39,9 @@ class ResticBackuper(AbstractBackuper): return f"{acc_arg} {key_arg}" + def _password_command(self): + return f"echo {LocalBackupSecret.get()}" + def restic_command(self, repo_name: str, *args): command = [ "restic", @@ -44,6 +49,8 @@ class ResticBackuper(AbstractBackuper): self.rclone_args(), "-r", self.restic_repo(repo_name), + "--password-command", + self._password_command(), ] if args != []: command.extend(args) @@ -87,6 +94,7 @@ class ResticBackuper(AbstractBackuper): def _load_snapshots(self, repo_name) -> object: """ Load list of snapshots from repository + raises Value Error if repo does not exist """ listing_command = self.restic_command( repo_name, @@ -102,13 +110,12 @@ class ResticBackuper(AbstractBackuper): ) as backup_listing_process_descriptor: output = backup_listing_process_descriptor.communicate()[0].decode("utf-8") + if "Is there a repository at the following location?" in output: + raise ValueError("No repository! : " + output) try: return self.parse_snapshot_output(output) - except ValueError: - if "Is there a repository at the following location?" in output: - return [] - self.error_message = output - return [] + except ValueError as e: + raise ValueError("Cannot load snapshots: ") from e def get_snapshots(self, repo_name) -> List[Snapshot]: """Get all snapshots from the repo""" @@ -119,7 +126,7 @@ class ResticBackuper(AbstractBackuper): return snapshots def parse_snapshot_output(self, output: str) -> object: + if "[" not in output: + raise ValueError("There is no json in the restic snapshot output") starting_index = output.find("[") - json.loads(output[starting_index:]) - self.snapshot_list = json.loads(output[starting_index:]) - print(output) + return json.loads(output[starting_index:]) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index ee8ee0e..bb3b624 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -59,5 +59,11 @@ def test_backup_service(test_service, backups): backups.back_up(test_service) -def test_no_snapshots(memory_backup): - assert memory_backup.backuper.get_snapshots("") == [] +def test_no_repo(memory_backup): + with pytest.raises(ValueError): + assert memory_backup.backuper.get_snapshots("") == [] + + +# def test_one_snapshot(backups, test_service): +# backups.back_up(test_service) +# assert len(backups.get_snapshots(test_service)) == 1 From 86e2b90bef5819eb25fdca16e84c2a83c26989a0 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 17 Feb 2023 15:59:27 +0000 Subject: [PATCH 028/537] refactor(backups): add repo init --- selfprivacy_api/backup/restic_backuper.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index 3d02d07..0e36f4d 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -81,6 +81,17 @@ class ResticBackuper(AbstractBackuper): # stderr=subprocess.STDOUT, # ) + def init(self, repo_name): + init_command = self.restic_command( + repo_name, + "init", + ) + subprocess.Popen( + init_command, + shell=False, + stderr=subprocess.STDOUT, + ) + def restore_from_backup(self, repo_name, snapshot_id, folder): """ Restore from backup with restic From bd744ba21152f796527a7a74322337786d4f3ef1 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 17 Feb 2023 16:11:17 +0000 Subject: [PATCH 029/537] refactor(backups): repo init service method --- selfprivacy_api/backup/__init__.py | 4 ++++ selfprivacy_api/backup/backuper.py | 4 ++++ 2 files changed, 8 insertions(+) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 4410809..210c998 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -46,6 +46,10 @@ class Backups(metaclass=SingletonMetaclass): self.provider.backuper.start_backup(folder, repo_name) service.post_restore() + def init_repo(self, service: Service): + repo_name = service.get_id() + self.provider.backuper.init(repo_name) + def get_snapshots(self, service: Service) -> List[Snapshot]: repo_name = service.get_id() diff --git a/selfprivacy_api/backup/backuper.py b/selfprivacy_api/backup/backuper.py index c2353ad..676a0a1 100644 --- a/selfprivacy_api/backup/backuper.py +++ b/selfprivacy_api/backup/backuper.py @@ -16,3 +16,7 @@ class AbstractBackuper(ABC): def get_snapshots(self, repo_name) -> List[Snapshot]: """Get all snapshots from the repo""" raise NotImplementedError + + @abstractmethod + def init(self, repo_name): + raise NotImplementedError From 865c8f52803cc3acbe8d45464de47bd97a76a5c9 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 20 Feb 2023 10:35:51 +0000 Subject: [PATCH 030/537] test(backups): test repo init --- tests/test_graphql/test_backup.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index bb3b624..5e702b3 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -16,7 +16,7 @@ REPO_NAME = "test_backup" @pytest.fixture() -def test_service(tmpdir): +def test_service(tmpdir, backups): testile_path = path.join(tmpdir, "testfile.txt") with open(testile_path, "w") as file: file.write(TESTFILE_BODY) @@ -25,7 +25,9 @@ def test_service(tmpdir): class TestDummyService(DummyService, location=tmpdir): pass - return TestDummyService() + service = TestDummyService() + backups.init_repo(service) + return service @pytest.fixture() From c493a49daa835623d8525d20ded9b74006e53810 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 20 Feb 2023 11:32:25 +0000 Subject: [PATCH 031/537] feature(backups): localfile repo --- selfprivacy_api/backup/providers/backblaze.py | 2 +- selfprivacy_api/backup/providers/local_file.py | 9 +++++++++ selfprivacy_api/backup/providers/memory.py | 2 +- selfprivacy_api/backup/restic_backuper.py | 2 +- 4 files changed, 12 insertions(+), 3 deletions(-) create mode 100644 selfprivacy_api/backup/providers/local_file.py diff --git a/selfprivacy_api/backup/providers/backblaze.py b/selfprivacy_api/backup/providers/backblaze.py index e16e9d3..9ec5eba 100644 --- a/selfprivacy_api/backup/providers/backblaze.py +++ b/selfprivacy_api/backup/providers/backblaze.py @@ -3,4 +3,4 @@ from selfprivacy_api.backup.restic_backuper import ResticBackuper class Backblaze(AbstractBackupProvider): - backuper = ResticBackuper("--b2-account", "--b2-key", "b2") + backuper = ResticBackuper("--b2-account", "--b2-key", ":b2:") diff --git a/selfprivacy_api/backup/providers/local_file.py b/selfprivacy_api/backup/providers/local_file.py new file mode 100644 index 0000000..5ae45bd --- /dev/null +++ b/selfprivacy_api/backup/providers/local_file.py @@ -0,0 +1,9 @@ +from .provider import AbstractBackupProvider +from selfprivacy_api.backup.restic_backuper import ResticBackuper + + +class LocalFileBackup(AbstractBackupProvider): + backuper = ResticBackuper("", "", "memory") + + def __init__(self, filename: str): + self.backuper = ResticBackuper("", "", f":local:{filename}/") diff --git a/selfprivacy_api/backup/providers/memory.py b/selfprivacy_api/backup/providers/memory.py index 4ddf571..3f257bf 100644 --- a/selfprivacy_api/backup/providers/memory.py +++ b/selfprivacy_api/backup/providers/memory.py @@ -3,4 +3,4 @@ from selfprivacy_api.backup.restic_backuper import ResticBackuper class InMemoryBackup(AbstractBackupProvider): - backuper = ResticBackuper("", "", "memory") + backuper = ResticBackuper("", "", ":memory:") diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index 0e36f4d..7980e46 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -24,7 +24,7 @@ class ResticBackuper(AbstractBackuper): def restic_repo(self, repository_name: str) -> str: # https://restic.readthedocs.io/en/latest/030_preparing_a_new_repo.html#other-services-via-rclone # https://forum.rclone.org/t/can-rclone-be-run-solely-with-command-line-options-no-config-no-env-vars/6314/5 - return f"rclone::{self.type}:{repository_name}/sfbackup" + return f"rclone:{self.type}{repository_name}/sfbackup" def rclone_args(self): return "rclone.args=serve restic --stdio" + self.backend_rclone_args() From f1987edd9966a720bde8e1e06d44da6f842bf452 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 20 Feb 2023 11:44:59 +0000 Subject: [PATCH 032/537] feature(backups): register localfile backend --- selfprivacy_api/backup/providers/__init__.py | 2 ++ selfprivacy_api/graphql/queries/providers.py | 1 + 2 files changed, 3 insertions(+) diff --git a/selfprivacy_api/backup/providers/__init__.py b/selfprivacy_api/backup/providers/__init__.py index 66fb9e6..21c4467 100644 --- a/selfprivacy_api/backup/providers/__init__.py +++ b/selfprivacy_api/backup/providers/__init__.py @@ -3,10 +3,12 @@ from selfprivacy_api.backup.providers.provider import AbstractBackupProvider from selfprivacy_api.backup.providers.backblaze import Backblaze from selfprivacy_api.backup.providers.memory import InMemoryBackup +from selfprivacy_api.backup.providers.local_file import LocalFileBackup PROVIDER_MAPPING = { BackupProvider.BACKBLAZE: Backblaze, BackupProvider.MEMORY: InMemoryBackup, + BackupProvider.FILE: LocalFileBackup, } diff --git a/selfprivacy_api/graphql/queries/providers.py b/selfprivacy_api/graphql/queries/providers.py index ecc7f11..2a9fcec 100644 --- a/selfprivacy_api/graphql/queries/providers.py +++ b/selfprivacy_api/graphql/queries/providers.py @@ -21,3 +21,4 @@ class BackupProvider(Enum): BACKBLAZE = "BACKBLAZE" # for testing purposes, make sure not selectable in prod. MEMORY = "MEMORY" + FILE = "FILE" From e944f4a26754d88b8f611c9faf8a68a4d0864840 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 20 Feb 2023 11:50:52 +0000 Subject: [PATCH 033/537] test(backups): basic file backend init test --- tests/test_graphql/test_backup.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 5e702b3..33ecc65 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -39,6 +39,16 @@ def memory_backup() -> AbstractBackupProvider: return memory_provider +@pytest.fixture() +def file_backup(tmpdir) -> AbstractBackupProvider: + test_repo_path = path.join(tmpdir, "test_repo") + ProviderClass = providers.get_provider(BackupProvider.FILE) + assert ProviderClass is not None + provider = ProviderClass(test_repo_path) + assert provider is not None + return provider + + @pytest.fixture() def backups(): return Backups() @@ -50,6 +60,10 @@ def test_select_backend(): assert provider == Backblaze +def test_file_backend_init(file_backup): + file_backup.backuper.init("somerepo") + + def test_backup_simple(test_service, memory_backup): # temporarily incomplete assert test_service is not None From e091fbd4a27a9b4e9242ef8c68a7deb82629a502 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 20 Feb 2023 13:04:39 +0000 Subject: [PATCH 034/537] feature(backups): throw an error if repo init fails --- selfprivacy_api/backup/restic_backuper.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index 7980e46..5cf99cd 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -86,11 +86,15 @@ class ResticBackuper(AbstractBackuper): repo_name, "init", ) - subprocess.Popen( + with subprocess.Popen( init_command, shell=False, + stdout=subprocess.PIPE, stderr=subprocess.STDOUT, - ) + ) as process_handle: + output = process_handle.communicate()[0].decode("utf-8") + if not "created restic repository" in output: + raise ValueError("cannot init a repo: " + output) def restore_from_backup(self, repo_name, snapshot_id, folder): """ From 06deb83b339dee536c5741140715f803bdbee2c5 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 20 Feb 2023 13:51:06 +0000 Subject: [PATCH 035/537] test(backups): localfile repo by default in tests --- selfprivacy_api/backup/__init__.py | 9 ++++++++- tests/test_graphql/test_backup.py | 5 +++-- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 210c998..d948d34 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -15,9 +15,16 @@ class Backups(metaclass=SingletonMetaclass): provider: AbstractBackupProvider - def __init__(self): + def __init__(self, test_repo_file: str = ""): + if test_repo_file != "": + self.set_localfile_repo(test_repo_file) self.lookup_provider() + def set_localfile_repo(self, file_path: str): + ProviderClass = get_provider(BackupProvider.FILE) + provider = ProviderClass(file_path) + self.provider = provider + def lookup_provider(self): redis_provider = Backups.load_provider_redis() if redis_provider is not None: diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 33ecc65..28ef828 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -50,8 +50,9 @@ def file_backup(tmpdir) -> AbstractBackupProvider: @pytest.fixture() -def backups(): - return Backups() +def backups(tmpdir): + test_repo_path = path.join(tmpdir, "test_repo") + return Backups(test_repo_path) def test_select_backend(): From 01797cb922af89380c149b95eda800f43aa3140b Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 20 Feb 2023 16:09:01 +0000 Subject: [PATCH 036/537] fix(backups): singleton metaclass was screwing with tests --- selfprivacy_api/backup/__init__.py | 6 ++-- tests/test_graphql/test_backup.py | 55 +++++++++++++++++++----------- 2 files changed, 40 insertions(+), 21 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index d948d34..b328831 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -10,7 +10,8 @@ from selfprivacy_api.backup.providers import get_provider from selfprivacy_api.graphql.queries.providers import BackupProvider -class Backups(metaclass=SingletonMetaclass): +# class Backups(metaclass=SingletonMetaclass): +class Backups: """A singleton controller for backups""" provider: AbstractBackupProvider @@ -18,7 +19,8 @@ class Backups(metaclass=SingletonMetaclass): def __init__(self, test_repo_file: str = ""): if test_repo_file != "": self.set_localfile_repo(test_repo_file) - self.lookup_provider() + else: + self.lookup_provider() def set_localfile_repo(self, file_path: str): ProviderClass = get_provider(BackupProvider.FILE) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 28ef828..9753217 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -1,5 +1,6 @@ import pytest import os.path as path +from os import makedirs from selfprivacy_api.services.test_service import DummyService @@ -15,17 +16,36 @@ TESTFILE_BODY = "testytest!" REPO_NAME = "test_backup" +@pytest.fixture(scope="function") +def backups(tmpdir): + test_repo_path = path.join(tmpdir, "totallyunrelated") + return Backups(test_repo_path) + + @pytest.fixture() -def test_service(tmpdir, backups): - testile_path = path.join(tmpdir, "testfile.txt") - with open(testile_path, "w") as file: +def raw_dummy_service(tmpdir, backups): + service_dir = path.join(tmpdir, "test_service") + makedirs(service_dir) + + testfile_path = path.join(service_dir, "testfile.txt") + with open(testfile_path, "w") as file: file.write(TESTFILE_BODY) # we need this to not change get_location() much - class TestDummyService(DummyService, location=tmpdir): + class TestDummyService(DummyService, location=service_dir): pass service = TestDummyService() + return service + + +@pytest.fixture() +def dummy_service(tmpdir, backups, raw_dummy_service): + service = raw_dummy_service + repo_path = path.join(tmpdir, "test_repo") + assert not path.exists(repo_path) + # assert not repo_path + backups.init_repo(service) return service @@ -49,12 +69,6 @@ def file_backup(tmpdir) -> AbstractBackupProvider: return provider -@pytest.fixture() -def backups(tmpdir): - test_repo_path = path.join(tmpdir, "test_repo") - return Backups(test_repo_path) - - def test_select_backend(): provider = providers.get_provider(BackupProvider.BACKBLAZE) assert provider is not None @@ -65,15 +79,18 @@ def test_file_backend_init(file_backup): file_backup.backuper.init("somerepo") -def test_backup_simple(test_service, memory_backup): +def test_backup_simple_file(raw_dummy_service, file_backup): # temporarily incomplete - assert test_service is not None - assert memory_backup is not None - memory_backup.backuper.start_backup(test_service.get_location(), REPO_NAME) + service = raw_dummy_service + assert service is not None + assert file_backup is not None + + name = service.get_id() + file_backup.backuper.init(name) -def test_backup_service(test_service, backups): - backups.back_up(test_service) +def test_backup_service(dummy_service, backups): + backups.back_up(dummy_service) def test_no_repo(memory_backup): @@ -81,6 +98,6 @@ def test_no_repo(memory_backup): assert memory_backup.backuper.get_snapshots("") == [] -# def test_one_snapshot(backups, test_service): -# backups.back_up(test_service) -# assert len(backups.get_snapshots(test_service)) == 1 +# def test_one_snapshot(backups, dummy_service): +# backups.back_up(dummy_service) +# assert len(backups.get_snapshots(dummy_service)) == 1 From b3633d1a96535042f6946f455a9ef032a9b6240f Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 22 Feb 2023 10:07:05 +0000 Subject: [PATCH 037/537] feat(backups): throw an error on a failed backup --- selfprivacy_api/backup/restic_backuper.py | 18 ++++++------------ 1 file changed, 6 insertions(+), 12 deletions(-) diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index 5cf99cd..99d6a81 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -65,21 +65,15 @@ class ResticBackuper(AbstractBackuper): "backup", folder, ) - subprocess.Popen( + with subprocess.Popen( backup_command, shell=False, + stdout=subprocess.PIPE, stderr=subprocess.STDOUT, - ) - - # TODO: we might want to provide logging facilities - # that are reroutable for testing - # with open("/var/backup.log", "w", encoding="utf-8") as log_file: - # subprocess.Popen( - # backup_command, - # shell=False, - # stdout=log_file, - # stderr=subprocess.STDOUT, - # ) + ) as handle: + output = handle.communicate()[0].decode("utf-8") + if "saved" not in output: + raise ValueError("could not create a new snapshot: " + output) def init(self, repo_name): init_command = self.restic_command( From 8a1b3be9fa8c628c45b75a6b87f5f23e997a449b Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 22 Feb 2023 10:25:51 +0000 Subject: [PATCH 038/537] test(backups): reenable snapshot testing --- tests/test_graphql/test_backup.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 9753217..193e8ab 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -98,6 +98,6 @@ def test_no_repo(memory_backup): assert memory_backup.backuper.get_snapshots("") == [] -# def test_one_snapshot(backups, dummy_service): -# backups.back_up(dummy_service) -# assert len(backups.get_snapshots(dummy_service)) == 1 +def test_one_snapshot(backups, dummy_service): + backups.back_up(dummy_service) + assert len(backups.get_snapshots(dummy_service)) == 1 From c32353fe9b8c0764ac1d7822c9e354a95b71b364 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 22 Feb 2023 13:35:55 +0000 Subject: [PATCH 039/537] feat(backups): return proper snapshot structs when listing --- selfprivacy_api/backup/restic_backuper.py | 9 +++++++-- tests/test_graphql/test_backup.py | 8 +++++++- 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index 99d6a81..2a41967 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -128,9 +128,14 @@ class ResticBackuper(AbstractBackuper): def get_snapshots(self, repo_name) -> List[Snapshot]: """Get all snapshots from the repo""" - # No transformation for now snapshots = [] - for snapshot in self._load_snapshots(repo_name): + for restic_snapshot in self._load_snapshots(repo_name): + snapshot = Snapshot( + id=restic_snapshot["short_id"], + created_at=restic_snapshot["time"], + service_name=repo_name, + ) + snapshots.append(snapshot) return snapshots diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 193e8ab..2ed2f67 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -4,6 +4,8 @@ from os import makedirs from selfprivacy_api.services.test_service import DummyService +from selfprivacy_api.models.backup.snapshot import Snapshot + from selfprivacy_api.backup import Backups import selfprivacy_api.backup.providers as providers from selfprivacy_api.backup.providers import AbstractBackupProvider @@ -100,4 +102,8 @@ def test_no_repo(memory_backup): def test_one_snapshot(backups, dummy_service): backups.back_up(dummy_service) - assert len(backups.get_snapshots(dummy_service)) == 1 + + snaps = backups.get_snapshots(dummy_service) + assert len(snaps) == 1 + snap = snaps[0] + assert snap.service_name == dummy_service.get_id() From 98e1c9ebaafce7af7e170895514fe27983c014a8 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 22 Feb 2023 13:46:28 +0000 Subject: [PATCH 040/537] feat(backups): a better error on failed snapshot retrieval --- selfprivacy_api/backup/restic_backuper.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index 2a41967..e485e01 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -141,6 +141,8 @@ class ResticBackuper(AbstractBackuper): def parse_snapshot_output(self, output: str) -> object: if "[" not in output: - raise ValueError("There is no json in the restic snapshot output") + raise ValueError( + "There is no json in the restic snapshot output : " + output + ) starting_index = output.find("[") return json.loads(output[starting_index:]) From dc3032eb0329caccef87dc3ddd826392f720e3c8 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 22 Feb 2023 14:45:11 +0000 Subject: [PATCH 041/537] feat(backups): add restore_snapshot and restore_service_from_snapshot --- selfprivacy_api/backup/__init__.py | 16 +++++++++++++++- selfprivacy_api/backup/backuper.py | 5 +++++ selfprivacy_api/backup/restic_backuper.py | 8 +++++++- 3 files changed, 27 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index b328831..4261e35 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -4,12 +4,14 @@ from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.utils.singleton_metaclass import SingletonMetaclass +from selfprivacy_api.services import get_service_by_id from selfprivacy_api.services.service import Service + from selfprivacy_api.backup.providers.provider import AbstractBackupProvider from selfprivacy_api.backup.providers import get_provider from selfprivacy_api.graphql.queries.providers import BackupProvider - +# Singleton has a property of being persistent between tests. I don't know what to do with this yet # class Backups(metaclass=SingletonMetaclass): class Backups: """A singleton controller for backups""" @@ -63,3 +65,15 @@ class Backups: repo_name = service.get_id() return self.provider.backuper.get_snapshots(repo_name) + + def restore_service_from_snapshot(self, service: Service, snapshot_id: str): + repo_name = service.get_id() + folder = service.get_location() + + self.provider.backuper.restore_from_backup(repo_name, snapshot_id, folder) + + # Our dummy service is not yet globally registered so this is not testable yet + def restore_snapshot(self, snapshot: Snapshot): + self.restore_service_from_snapshot( + get_service_by_id(snapshot.service_name), snapshot.id + ) diff --git a/selfprivacy_api/backup/backuper.py b/selfprivacy_api/backup/backuper.py index 676a0a1..f4c25a8 100644 --- a/selfprivacy_api/backup/backuper.py +++ b/selfprivacy_api/backup/backuper.py @@ -20,3 +20,8 @@ class AbstractBackuper(ABC): @abstractmethod def init(self, repo_name): raise NotImplementedError + + @abstractmethod + def restore_from_backup(self, repo_name: str, snapshot_id: str, folder: str): + """Restore a target folder using a snapshot""" + raise NotImplementedError diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index e485e01..0db5a42 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -98,7 +98,13 @@ class ResticBackuper(AbstractBackuper): repo_name, "restore", snapshot_id, "--target", folder ) - subprocess.run(restore_command, shell=False) + with subprocess.Popen( + restore_command, stdout=subprocess.PIPE, shell=False + ) as handle: + + output = handle.communicate()[0].decode("utf-8") + if "restored" not in output: + raise ValueError("cannot restore a snapshot: " + output) def _load_snapshots(self, repo_name) -> object: """ From d62d7534d77dc5faeb49fce1f36c697b4ba1622c Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 22 Feb 2023 15:58:36 +0000 Subject: [PATCH 042/537] test(backups): test restoring a file --- selfprivacy_api/backup/restic_backuper.py | 11 +++++++++-- tests/test_graphql/test_backup.py | 20 ++++++++++++++++++++ 2 files changed, 29 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index 0db5a42..04461ca 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -94,8 +94,15 @@ class ResticBackuper(AbstractBackuper): """ Restore from backup with restic """ + # snapshots save the path of the folder in the file system + # I do not alter the signature yet because maybe this can be + # changed with flags restore_command = self.restic_command( - repo_name, "restore", snapshot_id, "--target", folder + repo_name, + "restore", + snapshot_id, + "--target", + "/", ) with subprocess.Popen( @@ -103,7 +110,7 @@ class ResticBackuper(AbstractBackuper): ) as handle: output = handle.communicate()[0].decode("utf-8") - if "restored" not in output: + if "restoring" not in output: raise ValueError("cannot restore a snapshot: " + output) def _load_snapshots(self, repo_name) -> object: diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 2ed2f67..0e8e246 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -1,6 +1,8 @@ import pytest import os.path as path from os import makedirs +from os import remove +from os import listdir from selfprivacy_api.services.test_service import DummyService @@ -107,3 +109,21 @@ def test_one_snapshot(backups, dummy_service): assert len(snaps) == 1 snap = snaps[0] assert snap.service_name == dummy_service.get_id() + + +def test_restore(backups, dummy_service): + service_folder = dummy_service.get_location() + file_to_nuke = listdir(service_folder)[0] + assert file_to_nuke is not None + path_to_nuke = path.join(service_folder, file_to_nuke) + + backups.back_up(dummy_service) + snap = backups.get_snapshots(dummy_service)[0] + assert snap is not None + + assert path.exists(path_to_nuke) + remove(path_to_nuke) + assert not path.exists(path_to_nuke) + + backups.restore_service_from_snapshot(dummy_service, snap.id) + assert path.exists(path_to_nuke) From 23ee958bfb44196d76cb4f98f7801d67c7ae115a Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 22 Feb 2023 18:48:08 +0000 Subject: [PATCH 043/537] feat(backups): sizing up snapshots --- selfprivacy_api/backup/__init__.py | 10 +++++++ selfprivacy_api/backup/backuper.py | 4 +++ selfprivacy_api/backup/restic_backuper.py | 35 ++++++++++++++++++++--- tests/test_graphql/test_backup.py | 8 ++++++ 4 files changed, 53 insertions(+), 4 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 4261e35..903e38b 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -77,3 +77,13 @@ class Backups: self.restore_service_from_snapshot( get_service_by_id(snapshot.service_name), snapshot.id ) + + def service_snapshot_size(self, service: Service, snapshot_id: str) -> float: + repo_name = service.get_id() + return self.provider.backuper.restored_size(repo_name, snapshot_id) + + # Our dummy service is not yet globally registered so this is not testable yet + def snapshot_restored_size(self, snapshot: Snapshot) -> float: + return self.service_snapshot_size( + get_service_by_id(snapshot.service_name), snapshot.id + ) diff --git a/selfprivacy_api/backup/backuper.py b/selfprivacy_api/backup/backuper.py index f4c25a8..5d9b1c3 100644 --- a/selfprivacy_api/backup/backuper.py +++ b/selfprivacy_api/backup/backuper.py @@ -25,3 +25,7 @@ class AbstractBackuper(ABC): def restore_from_backup(self, repo_name: str, snapshot_id: str, folder: str): """Restore a target folder using a snapshot""" raise NotImplementedError + + @abstractmethod + def restored_size(self, repo_name, snapshot_id) -> float: + raise NotImplementedError diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index 04461ca..a4a4830 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -90,6 +90,25 @@ class ResticBackuper(AbstractBackuper): if not "created restic repository" in output: raise ValueError("cannot init a repo: " + output) + def restored_size(self, repo_name, snapshot_id) -> float: + """ + Size of a snapshot + """ + command = self.restic_command( + repo_name, + "stats", + snapshot_id, + "--json", + ) + + with subprocess.Popen(command, stdout=subprocess.PIPE, shell=False) as handle: + output = handle.communicate()[0].decode("utf-8") + try: + parsed_output = self.parse_json_output(output) + return parsed_output["total_size"] + except ValueError as e: + raise ValueError("cannot restore a snapshot: " + output) from e + def restore_from_backup(self, repo_name, snapshot_id, folder): """ Restore from backup with restic @@ -135,7 +154,7 @@ class ResticBackuper(AbstractBackuper): if "Is there a repository at the following location?" in output: raise ValueError("No repository! : " + output) try: - return self.parse_snapshot_output(output) + return self.parse_json_output(output) except ValueError as e: raise ValueError("Cannot load snapshots: ") from e @@ -152,10 +171,18 @@ class ResticBackuper(AbstractBackuper): snapshots.append(snapshot) return snapshots - def parse_snapshot_output(self, output: str) -> object: - if "[" not in output: + def parse_json_output(self, output: str) -> object: + indices = [ + output.find("["), + output.find("{"), + ] + indices = [x for x in indices if x != -1] + + if indices == []: raise ValueError( "There is no json in the restic snapshot output : " + output ) - starting_index = output.find("[") + + starting_index = min(indices) + return json.loads(output[starting_index:]) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 0e8e246..4c6b2dd 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -127,3 +127,11 @@ def test_restore(backups, dummy_service): backups.restore_service_from_snapshot(dummy_service, snap.id) assert path.exists(path_to_nuke) + + +def test_sizing(backups, dummy_service): + backups.back_up(dummy_service) + snap = backups.get_snapshots(dummy_service)[0] + size = backups.service_snapshot_size(dummy_service, snap.id) + assert size is not None + assert size > 0 From 4e329299e015d5b0fc95a612c466e2c717544386 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 22 Feb 2023 19:28:04 +0000 Subject: [PATCH 044/537] feat(backups): local secret generation and storage --- selfprivacy_api/backup/local_secret.py | 29 ++++++++++++++++++-------- 1 file changed, 20 insertions(+), 9 deletions(-) diff --git a/selfprivacy_api/backup/local_secret.py b/selfprivacy_api/backup/local_secret.py index f2ebf06..02d78a4 100644 --- a/selfprivacy_api/backup/local_secret.py +++ b/selfprivacy_api/backup/local_secret.py @@ -2,29 +2,40 @@ Separated out for circular dependency reasons """ +from __future__ import annotations +import secrets + +from selfprivacy_api.utils.redis_pool import RedisPool + + REDIS_KEY = "backup:local_secret" +redis = RedisPool().get_connection() + class LocalBackupSecret: @staticmethod def get(): """A secret string which backblaze/other clouds do not know. Serves as encryption key. - TODO: generate and save in redis """ - return "TEMPORARY_SECRET" + if not LocalBackupSecret.exists(): + LocalBackupSecret.reset() + return redis.get(REDIS_KEY) @staticmethod def reset(): - pass - - def exists(): - pass + new_secret = LocalBackupSecret._generate() + LocalBackupSecret._store(new_secret) @staticmethod - def _generate(): - pass + def exists() -> bool: + return redis.exists(REDIS_KEY) + + @staticmethod + def _generate() -> str: + return secrets.token_urlsafe(256) @staticmethod def _store(secret: str): - pass + redis.set(REDIS_KEY, secret) From ea6a04747886710603021f12ac7a0cb3785bd8d8 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 10 Mar 2023 14:14:41 +0000 Subject: [PATCH 045/537] feature(backups): load from json --- selfprivacy_api/backup/__init__.py | 42 +++++++++++++++++---- tests/conftest.py | 20 ++++++++++ tests/data/turned_on.json | 60 ++++++++++++++++++++++++++++++ tests/test_graphql/test_backup.py | 10 +++++ 4 files changed, 124 insertions(+), 8 deletions(-) create mode 100644 tests/data/turned_on.json diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 903e38b..f34f43d 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -3,6 +3,7 @@ from typing import List from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.utils.singleton_metaclass import SingletonMetaclass +from selfprivacy_api.utils import ReadUserData from selfprivacy_api.services import get_service_by_id from selfprivacy_api.services.service import Service @@ -11,7 +12,9 @@ from selfprivacy_api.backup.providers.provider import AbstractBackupProvider from selfprivacy_api.backup.providers import get_provider from selfprivacy_api.graphql.queries.providers import BackupProvider -# Singleton has a property of being persistent between tests. I don't know what to do with this yet + +# Singleton has a property of being persistent between tests. +# I don't know what to do with this yet # class Backups(metaclass=SingletonMetaclass): class Backups: """A singleton controller for backups""" @@ -22,24 +25,28 @@ class Backups: if test_repo_file != "": self.set_localfile_repo(test_repo_file) else: - self.lookup_provider() + self.provider = self.lookup_provider() def set_localfile_repo(self, file_path: str): ProviderClass = get_provider(BackupProvider.FILE) provider = ProviderClass(file_path) self.provider = provider - def lookup_provider(self): + @staticmethod + def construct_provider(kind: str, login: str, key: str): + provider_class = get_provider(BackupProvider[kind]) + return provider_class(login=login, key=key) + + def lookup_provider(self) -> AbstractBackupProvider: redis_provider = Backups.load_provider_redis() if redis_provider is not None: - self.provider = redis_provider + return redis_provider json_provider = Backups.load_provider_json() if json_provider is not None: - self.provider = json_provider + return json_provider - provider_class = get_provider(BackupProvider.MEMORY) - self.provider = provider_class(login="", key="") + return Backups.construct_provider("MEMORY", login="", key="") @staticmethod def load_provider_redis() -> AbstractBackupProvider: @@ -47,7 +54,26 @@ class Backups: @staticmethod def load_provider_json() -> AbstractBackupProvider: - pass + with ReadUserData() as user_data: + account = "" + key = "" + + if "backup" not in user_data.keys(): + if "backblaze" in user_data.keys(): + account = user_data["backblaze"]["accountId"] + key = user_data["backblaze"]["accountKey"] + provider_string = "BACKBLAZE" + return Backups.construct_provider( + kind=provider_string, login=account, key=key + ) + return None + + account = user_data["backup"]["accountId"] + key = user_data["backup"]["accountKey"] + provider_string = user_data["backup"]["provider"] + return Backups.construct_provider( + kind=provider_string, login=account, key=key + ) def back_up(self, service: Service): folder = service.get_location() diff --git a/tests/conftest.py b/tests/conftest.py index ea7a66a..7e8ae11 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -3,6 +3,8 @@ # pylint: disable=unused-argument import os import pytest +from os import path + from fastapi.testclient import TestClient @@ -10,6 +12,10 @@ def pytest_generate_tests(metafunc): os.environ["TEST_MODE"] = "true" +def global_data_dir(): + return path.join(path.dirname(__file__), "data") + + @pytest.fixture def tokens_file(mocker, shared_datadir): """Mock tokens file.""" @@ -26,6 +32,20 @@ def jobs_file(mocker, shared_datadir): return mock +@pytest.fixture +def generic_userdata(mocker, tmpdir): + filename = "turned_on.json" + source_path = path.join(global_data_dir(), filename) + userdata_path = path.join(tmpdir, filename) + + with open(userdata_path, "w") as file: + with open(source_path, "r") as source: + file.write(source.read()) + + mock = mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=userdata_path) + return mock + + @pytest.fixture def huey_database(mocker, shared_datadir): """Mock huey database.""" diff --git a/tests/data/turned_on.json b/tests/data/turned_on.json new file mode 100644 index 0000000..c6b758b --- /dev/null +++ b/tests/data/turned_on.json @@ -0,0 +1,60 @@ +{ + "api": { + "token": "TEST_TOKEN", + "enableSwagger": false + }, + "bitwarden": { + "enable": true + }, + "databasePassword": "PASSWORD", + "domain": "test.tld", + "hashedMasterPassword": "HASHED_PASSWORD", + "hostname": "test-instance", + "nextcloud": { + "adminPassword": "ADMIN", + "databasePassword": "ADMIN", + "enable": true + }, + "resticPassword": "PASS", + "ssh": { + "enable": true, + "passwordAuthentication": true, + "rootKeys": [ + "ssh-ed25519 KEY test@pc" + ] + }, + "username": "tester", + "gitea": { + "enable": true + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true + }, + "jitsi": { + "enable": true + }, + "autoUpgrade": { + "enable": true, + "allowReboot": true + }, + "timezone": "Europe/Moscow", + "sshKeys": [ + "ssh-rsa KEY test@pc" + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "selfprivacy" + } +} diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 4c6b2dd..6d21c15 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -73,6 +73,16 @@ def file_backup(tmpdir) -> AbstractBackupProvider: return provider +def test_config_load(generic_userdata): + backups = Backups() + provider = backups.provider + + assert provider is not None + assert isinstance(provider, Backblaze) + assert provider.login == "ID" + assert provider.key == "KEY" + + def test_select_backend(): provider = providers.get_provider(BackupProvider.BACKBLAZE) assert provider is not None From a0afe63b3d0b5b9dea50f37ef992ff1f778452e9 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 13 Mar 2023 19:00:39 +0000 Subject: [PATCH 046/537] refactor(backups): redis model storage utils --- selfprivacy_api/utils/redis_model_storage.py | 30 ++++++++++++++++ tests/test_model_storage.py | 36 ++++++++++++++++++++ 2 files changed, 66 insertions(+) create mode 100644 selfprivacy_api/utils/redis_model_storage.py create mode 100644 tests/test_model_storage.py diff --git a/selfprivacy_api/utils/redis_model_storage.py b/selfprivacy_api/utils/redis_model_storage.py new file mode 100644 index 0000000..51faff7 --- /dev/null +++ b/selfprivacy_api/utils/redis_model_storage.py @@ -0,0 +1,30 @@ +from datetime import datetime +from typing import Optional + + +def store_model_as_hash(redis, redis_key, model): + for key, value in model.dict().items(): + if isinstance(value, datetime): + value = value.isoformat() + redis.hset(redis_key, key, str(value)) + + +def hash_as_model(redis, redis_key: str, model_class): + token_dict = _model_dict_from_hash(redis, redis_key) + if token_dict is not None: + return model_class(**token_dict) + return None + + +def _prepare_model_dict(d: dict): + for key in d.keys(): + if d[key] == "None": + d[key] = None + + +def _model_dict_from_hash(redis, redis_key: str) -> Optional[dict]: + if redis.exists(redis_key): + token_dict = redis.hgetall(redis_key) + _prepare_model_dict(token_dict) + return token_dict + return None diff --git a/tests/test_model_storage.py b/tests/test_model_storage.py new file mode 100644 index 0000000..d26fabb --- /dev/null +++ b/tests/test_model_storage.py @@ -0,0 +1,36 @@ +import pytest + +from pydantic import BaseModel +from datetime import datetime +from typing import Optional + +from selfprivacy_api.utils.redis_model_storage import store_model_as_hash, hash_as_model +from selfprivacy_api.utils.redis_pool import RedisPool + +TEST_KEY = "model_storage" +redis = RedisPool().get_connection() + +@pytest.fixture() +def clean_redis(): + redis.delete(TEST_KEY) + + +class DummyModel(BaseModel): + name: str + date: Optional[datetime] + +def test_store_retrieve(): + model = DummyModel( + name= "test", + date= datetime.now() + ) + store_model_as_hash(redis, TEST_KEY, model) + assert hash_as_model(redis, TEST_KEY, DummyModel) == model + +def test_store_retrieve_none(): + model = DummyModel( + name= "test", + date= None + ) + store_model_as_hash(redis, TEST_KEY, model) + assert hash_as_model(redis, TEST_KEY, DummyModel) == model From 6524c981313016cb3f68ca1cbb3c3040eabe9e75 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 13 Mar 2023 19:02:03 +0000 Subject: [PATCH 047/537] refactor(backups): add a provider model for redis storage --- selfprivacy_api/models/backup/provider.py | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100644 selfprivacy_api/models/backup/provider.py diff --git a/selfprivacy_api/models/backup/provider.py b/selfprivacy_api/models/backup/provider.py new file mode 100644 index 0000000..e454c39 --- /dev/null +++ b/selfprivacy_api/models/backup/provider.py @@ -0,0 +1,9 @@ +from pydantic import BaseModel + +"""for storage in Redis""" + + +class BackupProviderModel(BaseModel): + kind: str + login: str + key: str From 4b2cecac8f0d53bbe07bc4118edec7d64c11e458 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 13 Mar 2023 19:03:41 +0000 Subject: [PATCH 048/537] feature(backups): provider storage and retrieval --- selfprivacy_api/backup/__init__.py | 37 +++++++++++++++++--- selfprivacy_api/backup/providers/__init__.py | 6 ++++ tests/test_graphql/test_backup.py | 30 +++++++++++++++- 3 files changed, 67 insertions(+), 6 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index f34f43d..d24872d 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -1,17 +1,25 @@ from typing import List from selfprivacy_api.models.backup.snapshot import Snapshot +from selfprivacy_api.models.backup.provider import BackupProviderModel from selfprivacy_api.utils.singleton_metaclass import SingletonMetaclass from selfprivacy_api.utils import ReadUserData +from selfprivacy_api.utils.redis_pool import RedisPool +from selfprivacy_api.utils.redis_model_storage import store_model_as_hash, hash_as_model + from selfprivacy_api.services import get_service_by_id from selfprivacy_api.services.service import Service from selfprivacy_api.backup.providers.provider import AbstractBackupProvider -from selfprivacy_api.backup.providers import get_provider +from selfprivacy_api.backup.providers import get_provider, get_kind from selfprivacy_api.graphql.queries.providers import BackupProvider +REDIS_PROVIDER_KEY = "backups:provider" + +redis = RedisPool().get_connection() + # Singleton has a property of being persistent between tests. # I don't know what to do with this yet @@ -37,6 +45,29 @@ class Backups: provider_class = get_provider(BackupProvider[kind]) return provider_class(login=login, key=key) + @staticmethod + def store_provider_redis(provider: AbstractBackupProvider): + store_model_as_hash( + redis, + REDIS_PROVIDER_KEY, + BackupProviderModel( + kind=get_kind(provider), login=provider.login, key=provider.key + ), + ) + + @staticmethod + def load_provider_redis() -> AbstractBackupProvider: + provider_model = hash_as_model(redis, REDIS_PROVIDER_KEY, BackupProviderModel) + if provider_model is None: + return None + return Backups.construct_provider( + provider_model.kind, provider_model.login, provider_model.key + ) + + @staticmethod + def reset(): + redis.delete(REDIS_PROVIDER_KEY) + def lookup_provider(self) -> AbstractBackupProvider: redis_provider = Backups.load_provider_redis() if redis_provider is not None: @@ -48,10 +79,6 @@ class Backups: return Backups.construct_provider("MEMORY", login="", key="") - @staticmethod - def load_provider_redis() -> AbstractBackupProvider: - pass - @staticmethod def load_provider_json() -> AbstractBackupProvider: with ReadUserData() as user_data: diff --git a/selfprivacy_api/backup/providers/__init__.py b/selfprivacy_api/backup/providers/__init__.py index 21c4467..5428e80 100644 --- a/selfprivacy_api/backup/providers/__init__.py +++ b/selfprivacy_api/backup/providers/__init__.py @@ -14,3 +14,9 @@ PROVIDER_MAPPING = { def get_provider(provider_type: BackupProvider) -> AbstractBackupProvider: return PROVIDER_MAPPING[provider_type] + + +def get_kind(provider: AbstractBackupProvider) -> str: + for key, value in PROVIDER_MAPPING.items(): + if isinstance(provider, value): + return key.value diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 6d21c15..f6f3526 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -23,7 +23,16 @@ REPO_NAME = "test_backup" @pytest.fixture(scope="function") def backups(tmpdir): test_repo_path = path.join(tmpdir, "totallyunrelated") - return Backups(test_repo_path) + backups = Backups(test_repo_path) + backups.reset() + return backups + + +@pytest.fixture() +def backups_backblaze(generic_userdata): + backups = Backups() + backups.reset() + return backups @pytest.fixture() @@ -75,6 +84,7 @@ def file_backup(tmpdir) -> AbstractBackupProvider: def test_config_load(generic_userdata): backups = Backups() + backups.reset() provider = backups.provider assert provider is not None @@ -145,3 +155,21 @@ def test_sizing(backups, dummy_service): size = backups.service_snapshot_size(dummy_service, snap.id) assert size is not None assert size > 0 + + +def test_redis_storage(backups_backblaze): + backups = Backups() + backups.reset() + provider = backups.provider + + assert provider is not None + + assert isinstance(provider, Backblaze) + assert provider.login == "ID" + assert provider.key == "KEY" + + backups.store_provider_redis(provider) + restored_provider = backups.load_provider_redis() + assert isinstance(restored_provider, Backblaze) + assert restored_provider.login == "ID" + assert restored_provider.key == "KEY" From a2dd47130b47cfdff09234e7b7f8eb5129dd285c Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Tue, 14 Mar 2023 00:39:15 +0000 Subject: [PATCH 049/537] feature(backups): repo init tracking --- selfprivacy_api/backup/__init__.py | 26 ++++++++++++++++ selfprivacy_api/backup/backuper.py | 4 +++ selfprivacy_api/backup/restic_backuper.py | 36 +++++++++++++++++++---- tests/test_graphql/test_backup.py | 28 ++++++++++++++++++ 4 files changed, 88 insertions(+), 6 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index d24872d..550555d 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -17,6 +17,7 @@ from selfprivacy_api.backup.providers import get_provider, get_kind from selfprivacy_api.graphql.queries.providers import BackupProvider REDIS_PROVIDER_KEY = "backups:provider" +REDIS_INITTED_CACHE_PREFIX = "backups:initted_services:" redis = RedisPool().get_connection() @@ -67,6 +68,8 @@ class Backups: @staticmethod def reset(): redis.delete(REDIS_PROVIDER_KEY) + for key in redis.keys(REDIS_INITTED_CACHE_PREFIX + "*"): + redis.delete(key) def lookup_provider(self) -> AbstractBackupProvider: redis_provider = Backups.load_provider_redis() @@ -113,6 +116,29 @@ class Backups: def init_repo(self, service: Service): repo_name = service.get_id() self.provider.backuper.init(repo_name) + self._redis_mark_as_init(service) + + def _has_redis_init_mark(self, service: Service) -> bool: + repo_name = service.get_id() + if redis.exists(REDIS_INITTED_CACHE_PREFIX + repo_name): + return True + return False + + def _redis_mark_as_init(self, service: Service): + repo_name = service.get_id() + redis.set(REDIS_INITTED_CACHE_PREFIX + repo_name, 1) + + def is_initted(self, service: Service) -> bool: + repo_name = service.get_id() + if self._has_redis_init_mark(service): + return True + + initted = self.provider.backuper.is_initted(repo_name) + if initted: + self._redis_mark_as_init(service) + return True + + return False def get_snapshots(self, service: Service) -> List[Snapshot]: repo_name = service.get_id() diff --git a/selfprivacy_api/backup/backuper.py b/selfprivacy_api/backup/backuper.py index 5d9b1c3..5bba9d5 100644 --- a/selfprivacy_api/backup/backuper.py +++ b/selfprivacy_api/backup/backuper.py @@ -8,6 +8,10 @@ class AbstractBackuper(ABC): def __init__(self): pass + @abstractmethod + def is_initted(self, repo_name: str) -> bool: + raise NotImplementedError + @abstractmethod def start_backup(self, folder: str, repo_name: str): raise NotImplementedError diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index a4a4830..8d9ac99 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -90,6 +90,20 @@ class ResticBackuper(AbstractBackuper): if not "created restic repository" in output: raise ValueError("cannot init a repo: " + output) + def is_initted(self, repo_name: str) -> bool: + command = self.restic_command( + repo_name, + "check", + "--json", + ) + + with subprocess.Popen(command, stdout=subprocess.PIPE, shell=False) as handle: + output = handle.communicate()[0].decode("utf-8") + if not self.has_json(output): + return False + # raise NotImplementedError("error(big): " + output) + return True + def restored_size(self, repo_name, snapshot_id) -> float: """ Size of a snapshot @@ -172,6 +186,16 @@ class ResticBackuper(AbstractBackuper): return snapshots def parse_json_output(self, output: str) -> object: + starting_index = self.json_start(output) + + if starting_index == -1: + raise ValueError( + "There is no json in the restic snapshot output : " + output + ) + + return json.loads(output[starting_index:]) + + def json_start(self, output: str) -> int: indices = [ output.find("["), output.find("{"), @@ -179,10 +203,10 @@ class ResticBackuper(AbstractBackuper): indices = [x for x in indices if x != -1] if indices == []: - raise ValueError( - "There is no json in the restic snapshot output : " + output - ) + return -1 + return min(indices) - starting_index = min(indices) - - return json.loads(output[starting_index:]) + def has_json(self, output: str) -> bool: + if self.json_start(output) == -1: + return False + return True diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index f6f3526..233014f 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -173,3 +173,31 @@ def test_redis_storage(backups_backblaze): assert isinstance(restored_provider, Backblaze) assert restored_provider.login == "ID" assert restored_provider.key == "KEY" + + +# lowlevel +def test_init_tracking_caching(backups, raw_dummy_service): + assert backups._has_redis_init_mark(raw_dummy_service) is False + + backups._redis_mark_as_init(raw_dummy_service) + + assert backups._has_redis_init_mark(raw_dummy_service) is True + assert backups.is_initted(raw_dummy_service) is True + + +# lowlevel +def test_init_tracking_caching2(backups, raw_dummy_service): + assert backups._has_redis_init_mark(raw_dummy_service) is False + + backups.init_repo(raw_dummy_service) + + assert backups._has_redis_init_mark(raw_dummy_service) is True + + +# only public API +def test_init_tracking(backups, raw_dummy_service): + assert backups.is_initted(raw_dummy_service) is False + + backups.init_repo(raw_dummy_service) + + assert backups.is_initted(raw_dummy_service) is True From 144e4e5e915e38c37e31206da2254e09431d1aa5 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 29 Mar 2023 11:15:38 +0000 Subject: [PATCH 050/537] refactor(backups): make backups stateless --- selfprivacy_api/backup/__init__.py | 88 ++++++++++++------- .../backup/providers/local_file.py | 4 +- selfprivacy_api/backup/providers/provider.py | 2 +- tests/test_graphql/test_backup.py | 64 +++++++------- 4 files changed, 92 insertions(+), 66 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 550555d..9ac8895 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -16,6 +16,9 @@ from selfprivacy_api.backup.providers.provider import AbstractBackupProvider from selfprivacy_api.backup.providers import get_provider, get_kind from selfprivacy_api.graphql.queries.providers import BackupProvider +# a hack to store file path. +REDIS_REPO_PATH_KEY = "backups:test_repo_path" + REDIS_PROVIDER_KEY = "backups:provider" REDIS_INITTED_CACHE_PREFIX = "backups:initted_services:" @@ -30,20 +33,30 @@ class Backups: provider: AbstractBackupProvider - def __init__(self, test_repo_file: str = ""): - if test_repo_file != "": - self.set_localfile_repo(test_repo_file) - else: - self.provider = self.lookup_provider() - - def set_localfile_repo(self, file_path: str): + @staticmethod + def set_localfile_repo(file_path: str): ProviderClass = get_provider(BackupProvider.FILE) provider = ProviderClass(file_path) - self.provider = provider + redis.set(REDIS_REPO_PATH_KEY, file_path) + Backups.store_provider_redis(provider) + + @staticmethod + def provider(): + return Backups.lookup_provider() + + @staticmethod + def set_provider(kind: str, login: str, key: str): + provider = Backups.construct_provider(kind, login, key) + Backups.store_provider_redis(provider) @staticmethod def construct_provider(kind: str, login: str, key: str): provider_class = get_provider(BackupProvider[kind]) + + if kind == "FILE": + path = redis.get(REDIS_REPO_PATH_KEY) + return provider_class(path) + return provider_class(login=login, key=key) @staticmethod @@ -68,19 +81,24 @@ class Backups: @staticmethod def reset(): redis.delete(REDIS_PROVIDER_KEY) + redis.delete(REDIS_REPO_PATH_KEY) for key in redis.keys(REDIS_INITTED_CACHE_PREFIX + "*"): redis.delete(key) - def lookup_provider(self) -> AbstractBackupProvider: + @staticmethod + def lookup_provider() -> AbstractBackupProvider: redis_provider = Backups.load_provider_redis() if redis_provider is not None: return redis_provider json_provider = Backups.load_provider_json() if json_provider is not None: + Backups.store_provider_redis(json_provider) return json_provider - return Backups.construct_provider("MEMORY", login="", key="") + memory_provider = Backups.construct_provider("MEMORY", login="", key="") + Backups.store_provider_redis(memory_provider) + return memory_provider @staticmethod def load_provider_json() -> AbstractBackupProvider: @@ -105,64 +123,74 @@ class Backups: kind=provider_string, login=account, key=key ) - def back_up(self, service: Service): + @staticmethod + def back_up(service: Service): folder = service.get_location() repo_name = service.get_id() service.pre_backup() - self.provider.backuper.start_backup(folder, repo_name) + Backups.provider().backuper.start_backup(folder, repo_name) service.post_restore() - def init_repo(self, service: Service): + @staticmethod + def init_repo(service: Service): repo_name = service.get_id() - self.provider.backuper.init(repo_name) - self._redis_mark_as_init(service) + Backups.provider().backuper.init(repo_name) + Backups._redis_mark_as_init(service) - def _has_redis_init_mark(self, service: Service) -> bool: + @staticmethod + def _has_redis_init_mark(service: Service) -> bool: repo_name = service.get_id() if redis.exists(REDIS_INITTED_CACHE_PREFIX + repo_name): return True return False - def _redis_mark_as_init(self, service: Service): + @staticmethod + def _redis_mark_as_init(service: Service): repo_name = service.get_id() redis.set(REDIS_INITTED_CACHE_PREFIX + repo_name, 1) - def is_initted(self, service: Service) -> bool: + @staticmethod + def is_initted(service: Service) -> bool: repo_name = service.get_id() - if self._has_redis_init_mark(service): + if Backups._has_redis_init_mark(service): return True - initted = self.provider.backuper.is_initted(repo_name) + initted = Backups.provider().backuper.is_initted(repo_name) if initted: - self._redis_mark_as_init(service) + Backups._redis_mark_as_init(service) return True return False - def get_snapshots(self, service: Service) -> List[Snapshot]: + @staticmethod + def get_snapshots(service: Service) -> List[Snapshot]: repo_name = service.get_id() - return self.provider.backuper.get_snapshots(repo_name) + return Backups.provider().backuper.get_snapshots(repo_name) - def restore_service_from_snapshot(self, service: Service, snapshot_id: str): + @staticmethod + def restore_service_from_snapshot(service: Service, snapshot_id: str): repo_name = service.get_id() folder = service.get_location() - self.provider.backuper.restore_from_backup(repo_name, snapshot_id, folder) + Backups.provider().backuper.restore_from_backup(repo_name, snapshot_id, folder) # Our dummy service is not yet globally registered so this is not testable yet - def restore_snapshot(self, snapshot: Snapshot): - self.restore_service_from_snapshot( + @staticmethod + def restore_snapshot(snapshot: Snapshot): + Backups.restore_service_from_snapshot( get_service_by_id(snapshot.service_name), snapshot.id ) - def service_snapshot_size(self, service: Service, snapshot_id: str) -> float: + @staticmethod + def service_snapshot_size(service: Service, snapshot_id: str) -> float: repo_name = service.get_id() - return self.provider.backuper.restored_size(repo_name, snapshot_id) + return Backups.provider().backuper.restored_size(repo_name, snapshot_id) # Our dummy service is not yet globally registered so this is not testable yet - def snapshot_restored_size(self, snapshot: Snapshot) -> float: + @staticmethod + def snapshot_restored_size(snapshot: Snapshot) -> float: return self.service_snapshot_size( get_service_by_id(snapshot.service_name), snapshot.id ) diff --git a/selfprivacy_api/backup/providers/local_file.py b/selfprivacy_api/backup/providers/local_file.py index 5ae45bd..bdd9213 100644 --- a/selfprivacy_api/backup/providers/local_file.py +++ b/selfprivacy_api/backup/providers/local_file.py @@ -5,5 +5,7 @@ from selfprivacy_api.backup.restic_backuper import ResticBackuper class LocalFileBackup(AbstractBackupProvider): backuper = ResticBackuper("", "", "memory") - def __init__(self, filename: str): + # login and key args are for compatibility with generic provider methods. They are ignored. + def __init__(self, filename: str, login: str = "", key: str = ""): + super().__init__() self.backuper = ResticBackuper("", "", f":local:{filename}/") diff --git a/selfprivacy_api/backup/providers/provider.py b/selfprivacy_api/backup/providers/provider.py index 0b57528..017c03d 100644 --- a/selfprivacy_api/backup/providers/provider.py +++ b/selfprivacy_api/backup/providers/provider.py @@ -12,6 +12,6 @@ class AbstractBackupProvider(ABC): def backuper(self) -> AbstractBackuper: raise NotImplementedError - def __init__(self, login, key): + def __init__(self, login="", key=""): self.login = login self.key = key diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 233014f..2e6c6d6 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -22,17 +22,15 @@ REPO_NAME = "test_backup" @pytest.fixture(scope="function") def backups(tmpdir): + Backups.reset() + test_repo_path = path.join(tmpdir, "totallyunrelated") - backups = Backups(test_repo_path) - backups.reset() - return backups + Backups.set_localfile_repo(test_repo_path) @pytest.fixture() def backups_backblaze(generic_userdata): - backups = Backups() - backups.reset() - return backups + Backups.reset() @pytest.fixture() @@ -59,7 +57,7 @@ def dummy_service(tmpdir, backups, raw_dummy_service): assert not path.exists(repo_path) # assert not repo_path - backups.init_repo(service) + Backups.init_repo(service) return service @@ -83,9 +81,8 @@ def file_backup(tmpdir) -> AbstractBackupProvider: def test_config_load(generic_userdata): - backups = Backups() - backups.reset() - provider = backups.provider + Backups.reset() + provider = Backups.provider() assert provider is not None assert isinstance(provider, Backblaze) @@ -114,7 +111,7 @@ def test_backup_simple_file(raw_dummy_service, file_backup): def test_backup_service(dummy_service, backups): - backups.back_up(dummy_service) + Backups.back_up(dummy_service) def test_no_repo(memory_backup): @@ -123,9 +120,9 @@ def test_no_repo(memory_backup): def test_one_snapshot(backups, dummy_service): - backups.back_up(dummy_service) + Backups.back_up(dummy_service) - snaps = backups.get_snapshots(dummy_service) + snaps = Backups.get_snapshots(dummy_service) assert len(snaps) == 1 snap = snaps[0] assert snap.service_name == dummy_service.get_id() @@ -137,30 +134,29 @@ def test_restore(backups, dummy_service): assert file_to_nuke is not None path_to_nuke = path.join(service_folder, file_to_nuke) - backups.back_up(dummy_service) - snap = backups.get_snapshots(dummy_service)[0] + Backups.back_up(dummy_service) + snap = Backups.get_snapshots(dummy_service)[0] assert snap is not None assert path.exists(path_to_nuke) remove(path_to_nuke) assert not path.exists(path_to_nuke) - backups.restore_service_from_snapshot(dummy_service, snap.id) + Backups.restore_service_from_snapshot(dummy_service, snap.id) assert path.exists(path_to_nuke) def test_sizing(backups, dummy_service): - backups.back_up(dummy_service) - snap = backups.get_snapshots(dummy_service)[0] - size = backups.service_snapshot_size(dummy_service, snap.id) + Backups.back_up(dummy_service) + snap = Backups.get_snapshots(dummy_service)[0] + size = Backups.service_snapshot_size(dummy_service, snap.id) assert size is not None assert size > 0 def test_redis_storage(backups_backblaze): - backups = Backups() - backups.reset() - provider = backups.provider + Backups.reset() + provider = Backups.provider() assert provider is not None @@ -168,8 +164,8 @@ def test_redis_storage(backups_backblaze): assert provider.login == "ID" assert provider.key == "KEY" - backups.store_provider_redis(provider) - restored_provider = backups.load_provider_redis() + Backups.store_provider_redis(provider) + restored_provider = Backups.load_provider_redis() assert isinstance(restored_provider, Backblaze) assert restored_provider.login == "ID" assert restored_provider.key == "KEY" @@ -177,27 +173,27 @@ def test_redis_storage(backups_backblaze): # lowlevel def test_init_tracking_caching(backups, raw_dummy_service): - assert backups._has_redis_init_mark(raw_dummy_service) is False + assert Backups._has_redis_init_mark(raw_dummy_service) is False - backups._redis_mark_as_init(raw_dummy_service) + Backups._redis_mark_as_init(raw_dummy_service) - assert backups._has_redis_init_mark(raw_dummy_service) is True - assert backups.is_initted(raw_dummy_service) is True + assert Backups._has_redis_init_mark(raw_dummy_service) is True + assert Backups.is_initted(raw_dummy_service) is True # lowlevel def test_init_tracking_caching2(backups, raw_dummy_service): - assert backups._has_redis_init_mark(raw_dummy_service) is False + assert Backups._has_redis_init_mark(raw_dummy_service) is False - backups.init_repo(raw_dummy_service) + Backups.init_repo(raw_dummy_service) - assert backups._has_redis_init_mark(raw_dummy_service) is True + assert Backups._has_redis_init_mark(raw_dummy_service) is True # only public API def test_init_tracking(backups, raw_dummy_service): - assert backups.is_initted(raw_dummy_service) is False + assert Backups.is_initted(raw_dummy_service) is False - backups.init_repo(raw_dummy_service) + Backups.init_repo(raw_dummy_service) - assert backups.is_initted(raw_dummy_service) is True + assert Backups.is_initted(raw_dummy_service) is True From 772b499b460f206eb8ed28e02ae4c6e0a14aaebd Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 29 Mar 2023 11:45:52 +0000 Subject: [PATCH 051/537] feature(backups): huey task to back up --- selfprivacy_api/backup/tasks.py | 9 +++++++++ tests/test_graphql/test_backup.py | 10 ++++++++++ 2 files changed, 19 insertions(+) create mode 100644 selfprivacy_api/backup/tasks.py diff --git a/selfprivacy_api/backup/tasks.py b/selfprivacy_api/backup/tasks.py new file mode 100644 index 0000000..f1715cc --- /dev/null +++ b/selfprivacy_api/backup/tasks.py @@ -0,0 +1,9 @@ +from selfprivacy_api.utils.huey import huey +from selfprivacy_api.services.service import Service +from selfprivacy_api.backup import Backups + +# huey tasks need to return something +@huey.task() +def start_backup(service: Service) -> bool: + Backups.back_up(service) + return True diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 2e6c6d6..7683d4d 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -13,8 +13,10 @@ import selfprivacy_api.backup.providers as providers from selfprivacy_api.backup.providers import AbstractBackupProvider from selfprivacy_api.backup.providers.backblaze import Backblaze + from selfprivacy_api.graphql.queries.providers import BackupProvider +from selfprivacy_api.backup.tasks import start_backup TESTFILE_BODY = "testytest!" REPO_NAME = "test_backup" @@ -197,3 +199,11 @@ def test_init_tracking(backups, raw_dummy_service): Backups.init_repo(raw_dummy_service) assert Backups.is_initted(raw_dummy_service) is True + + +def test_backup_service_task(backups, dummy_service): + handle = start_backup(dummy_service) + handle(blocking=True) + + snaps = Backups.get_snapshots(dummy_service) + assert len(snaps) == 1 From 2eb64db19925de206841f0d1cfa00ca4d4ed76d5 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 3 Apr 2023 16:29:06 +0000 Subject: [PATCH 052/537] feature(backups): return snapshot info from backup function --- selfprivacy_api/backup/restic_backuper.py | 39 +++++++++++++++++++---- 1 file changed, 33 insertions(+), 6 deletions(-) diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index 8d9ac99..73292f0 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -1,5 +1,6 @@ import subprocess import json +import datetime from typing import List @@ -63,6 +64,7 @@ class ResticBackuper(AbstractBackuper): backup_command = self.restic_command( repo_name, "backup", + "--json", folder, ) with subprocess.Popen( @@ -72,8 +74,27 @@ class ResticBackuper(AbstractBackuper): stderr=subprocess.STDOUT, ) as handle: output = handle.communicate()[0].decode("utf-8") - if "saved" not in output: - raise ValueError("could not create a new snapshot: " + output) + try: + messages = self.parse_json_output(output) + return ResticBackuper._snapshot_from_backup_messages( + messages, repo_name + ) + except ValueError as e: + raise ValueError("could not create a snapshot: ") from e + + @staticmethod + def _snapshot_from_backup_messages(messages, repo_name) -> Snapshot: + for message in messages: + if message["message_type"] == "summary": + return ResticBackuper._snapshot_from_fresh_summary(message, repo_name) + + @staticmethod + def _snapshot_from_fresh_summary(message: object, repo_name) -> Snapshot: + return Snapshot( + id=message["snapshot_id"], + created_at=datetime.datetime.now(datetime.timezone.utc), + service_name=repo_name, + ) def init(self, repo_name): init_command = self.restic_command( @@ -189,11 +210,17 @@ class ResticBackuper(AbstractBackuper): starting_index = self.json_start(output) if starting_index == -1: - raise ValueError( - "There is no json in the restic snapshot output : " + output - ) + raise ValueError("There is no json in the restic output : " + output) - return json.loads(output[starting_index:]) + truncated_output = output[starting_index:] + json_messages = truncated_output.splitlines() + if len(json_messages) == 1: + return json.loads(truncated_output) + + result_array = [] + for message in json_messages: + result_array.append(json.loads(message)) + return result_array def json_start(self, output: str) -> int: indices = [ From ae065867b31507bcf045d20e031a3970a8c4c05e Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 3 Apr 2023 17:23:16 +0000 Subject: [PATCH 053/537] test(backups): test that we do return snapshot on backup --- selfprivacy_api/backup/restic_backuper.py | 1 + tests/test_graphql/test_backup.py | 11 +++++++++++ 2 files changed, 12 insertions(+) diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index 73292f0..cc81361 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -87,6 +87,7 @@ class ResticBackuper(AbstractBackuper): for message in messages: if message["message_type"] == "summary": return ResticBackuper._snapshot_from_fresh_summary(message, repo_name) + raise ValueError("no summary message in restic json output") @staticmethod def _snapshot_from_fresh_summary(message: object, repo_name) -> Snapshot: diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 7683d4d..ec16306 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -130,6 +130,17 @@ def test_one_snapshot(backups, dummy_service): assert snap.service_name == dummy_service.get_id() +def test_backup_returns_snapshot(backups, dummy_service): + service_folder = dummy_service.get_location() + provider = Backups.provider() + name = dummy_service.get_id() + snapshot = provider.backuper.start_backup(service_folder, name) + + assert snapshot.id is not None + assert snapshot.service_name == name + assert snapshot.created_at is not None + + def test_restore(backups, dummy_service): service_folder = dummy_service.get_location() file_to_nuke = listdir(service_folder)[0] From 18d5cd2b8327664eec400a229c4e531d0f30d783 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 3 Apr 2023 18:18:23 +0000 Subject: [PATCH 054/537] feature(backups): cache snapshots and last backup timestamps --- selfprivacy_api/backup/__init__.py | 47 ++++++++++++++++++++++++++++-- 1 file changed, 45 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 9ac8895..9bf87a0 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -1,4 +1,5 @@ -from typing import List +from typing import List, Optional +from datetime import datetime from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.models.backup.provider import BackupProviderModel @@ -17,6 +18,10 @@ from selfprivacy_api.backup.providers import get_provider, get_kind from selfprivacy_api.graphql.queries.providers import BackupProvider # a hack to store file path. +REDIS_SNAPSHOT_CACHE_EXPIRE_SECONDS = 24 * 60 * 60 # one day + +REDIS_SNAPSHOTS_PREFIX = "backups:snapshots:" +REDIS_LAST_BACKUP_PREFIX = "backups:last-backed-up:" REDIS_REPO_PATH_KEY = "backups:test_repo_path" REDIS_PROVIDER_KEY = "backups:provider" @@ -40,6 +45,35 @@ class Backups: redis.set(REDIS_REPO_PATH_KEY, file_path) Backups.store_provider_redis(provider) + @staticmethod + def _redis_last_backup_key(service_id): + return REDIS_LAST_BACKUP_PREFIX + service_id + + @staticmethod + def _redis_snapshot_key(snapshot: Snapshot): + return REDIS_SNAPSHOTS_PREFIX + snapshot.id + + @staticmethod + def get_last_backed_up(service: Service) -> Optional[datetime]: + return Backups._get_last_backup_time_redis(service.get_id()) + + @staticmethod + def _get_last_backup_time_redis(service_id: str) -> Optional[datetime]: + key = Backups._redis_last_backup_key(service_id) + if not redis.exists(key): + return None + + snapshot = hash_as_model(redis, key) + return snapshot.created_at + + @staticmethod + def _store_last_snapshot(service_id: str, snapshot: Snapshot): + store_model_as_hash(redis, Backups._redis_last_backup_key(service_id), snapshot) + + snapshot_key = Backups._redis_snapshot_key(snapshot) + store_model_as_hash(redis, snapshot_key, snapshot) + redis.expire(snapshot_key, REDIS_SNAPSHOT_CACHE_EXPIRE_SECONDS) + @staticmethod def provider(): return Backups.lookup_provider() @@ -82,9 +116,16 @@ class Backups: def reset(): redis.delete(REDIS_PROVIDER_KEY) redis.delete(REDIS_REPO_PATH_KEY) + for key in redis.keys(REDIS_INITTED_CACHE_PREFIX + "*"): redis.delete(key) + for key in redis.keys(REDIS_SNAPSHOTS_PREFIX + "*"): + redis.delete(key) + + for key in redis.keys(REDIS_LAST_BACKUP_PREFIX + "*"): + redis.delete(key) + @staticmethod def lookup_provider() -> AbstractBackupProvider: redis_provider = Backups.load_provider_redis() @@ -129,7 +170,9 @@ class Backups: repo_name = service.get_id() service.pre_backup() - Backups.provider().backuper.start_backup(folder, repo_name) + snapshot = Backups.provider().backuper.start_backup(folder, repo_name) + Backups._store_last_snapshot(repo_name, snapshot) + service.post_restore() @staticmethod From 45f33e2d31653c9125c4e8d315faa56e71402545 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 3 Apr 2023 18:37:12 +0000 Subject: [PATCH 055/537] feature(backups): check, set and unset service autobackup status --- selfprivacy_api/backup/__init__.py | 33 +++++++++++++++++++++++------- 1 file changed, 26 insertions(+), 7 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 9bf87a0..79844a3 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -20,6 +20,8 @@ from selfprivacy_api.graphql.queries.providers import BackupProvider # a hack to store file path. REDIS_SNAPSHOT_CACHE_EXPIRE_SECONDS = 24 * 60 * 60 # one day +REDIS_AUTOBACKUP_ENABLED_PREFIX = "backup:autobackup:services:" + REDIS_SNAPSHOTS_PREFIX = "backups:snapshots:" REDIS_LAST_BACKUP_PREFIX = "backups:last-backed-up:" REDIS_REPO_PATH_KEY = "backups:test_repo_path" @@ -74,6 +76,22 @@ class Backups: store_model_as_hash(redis, snapshot_key, snapshot) redis.expire(snapshot_key, REDIS_SNAPSHOT_CACHE_EXPIRE_SECONDS) + @staticmethod + def _redis_autobackup_key(service: Service): + return REDIS_AUTOBACKUP_ENABLED_PREFIX + service.get_id() + + @staticmethod + def enable_autobackup(service: Service): + redis.set(Backups._redis_autobackup_key(service), True) + + @staticmethod + def disable_autobackup(service: Service): + redis.delete(Backups._redis_autobackup_key(service)) + + @staticmethod + def is_autobackup_enabled(service: Service) -> bool: + return redis.exists(Backups._redis_autobackup_key(service)) + @staticmethod def provider(): return Backups.lookup_provider() @@ -117,14 +135,15 @@ class Backups: redis.delete(REDIS_PROVIDER_KEY) redis.delete(REDIS_REPO_PATH_KEY) - for key in redis.keys(REDIS_INITTED_CACHE_PREFIX + "*"): - redis.delete(key) + prefixes_to_clean = [ + REDIS_INITTED_CACHE_PREFIX, + REDIS_SNAPSHOTS_PREFIX, + REDIS_LAST_BACKUP_PREFIX, + ] - for key in redis.keys(REDIS_SNAPSHOTS_PREFIX + "*"): - redis.delete(key) - - for key in redis.keys(REDIS_LAST_BACKUP_PREFIX + "*"): - redis.delete(key) + for prefix in prefixes_to_clean: + for key in redis.keys(prefix + "*"): + redis.delete(key) @staticmethod def lookup_provider() -> AbstractBackupProvider: From 417533fc04a3a231944e296185a3e0d4f787f477 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 3 Apr 2023 18:39:55 +0000 Subject: [PATCH 056/537] fix(backups): remove self from static method --- selfprivacy_api/backup/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 79844a3..2c1e250 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -253,6 +253,6 @@ class Backups: # Our dummy service is not yet globally registered so this is not testable yet @staticmethod def snapshot_restored_size(snapshot: Snapshot) -> float: - return self.service_snapshot_size( + return Backups.service_snapshot_size( get_service_by_id(snapshot.service_name), snapshot.id ) From e043720289fd11b6c69657d8e96fef1c9977f26f Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 3 Apr 2023 18:54:27 +0000 Subject: [PATCH 057/537] feature(backups): methods for autobackup period setting and getting --- selfprivacy_api/backup/__init__.py | 28 ++++++++++++++++++++++++---- 1 file changed, 24 insertions(+), 4 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 2c1e250..b3a72c6 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -21,14 +21,15 @@ from selfprivacy_api.graphql.queries.providers import BackupProvider REDIS_SNAPSHOT_CACHE_EXPIRE_SECONDS = 24 * 60 * 60 # one day REDIS_AUTOBACKUP_ENABLED_PREFIX = "backup:autobackup:services:" - REDIS_SNAPSHOTS_PREFIX = "backups:snapshots:" REDIS_LAST_BACKUP_PREFIX = "backups:last-backed-up:" -REDIS_REPO_PATH_KEY = "backups:test_repo_path" - -REDIS_PROVIDER_KEY = "backups:provider" REDIS_INITTED_CACHE_PREFIX = "backups:initted_services:" +REDIS_REPO_PATH_KEY = "backups:test_repo_path" +REDIS_PROVIDER_KEY = "backups:provider" +REDIS_AUTOBACKUP_PERIOD_KEY = "backups:autobackup_period" + + redis = RedisPool().get_connection() @@ -86,12 +87,30 @@ class Backups: @staticmethod def disable_autobackup(service: Service): + """also see disable_all_autobackup()""" redis.delete(Backups._redis_autobackup_key(service)) @staticmethod def is_autobackup_enabled(service: Service) -> bool: return redis.exists(Backups._redis_autobackup_key(service)) + @staticmethod + def autobackup_period_minutes() -> Optional[int]: + """None means autobackup is disabled""" + if not redis.exists(REDIS_AUTOBACKUP_PERIOD_KEY): + return None + return redis.get(REDIS_AUTOBACKUP_PERIOD_KEY) + + @staticmethod + def set_autobackup_period_minutes(minutes: int): + """This initiates backup very soon if some services are not backed up""" + redis.set(REDIS_AUTOBACKUP_PERIOD_KEY, minutes) + + @staticmethod + def disable_all_autobackup(): + """disables all automatic backing up, but does not change per-service settings""" + redis.delete(REDIS_AUTOBACKUP_PERIOD_KEY) + @staticmethod def provider(): return Backups.lookup_provider() @@ -134,6 +153,7 @@ class Backups: def reset(): redis.delete(REDIS_PROVIDER_KEY) redis.delete(REDIS_REPO_PATH_KEY) + redis.delete(REDIS_AUTOBACKUP_PERIOD_KEY) prefixes_to_clean = [ REDIS_INITTED_CACHE_PREFIX, From 3c42d8c413717af2fc765606efeb04e810b13b40 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 3 Apr 2023 21:59:14 +0000 Subject: [PATCH 058/537] test(backups): test setting services as enabled for autobackups --- selfprivacy_api/backup/__init__.py | 5 +++-- tests/test_graphql/test_backup.py | 10 ++++++++++ 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index b3a72c6..e3143b5 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -78,12 +78,12 @@ class Backups: redis.expire(snapshot_key, REDIS_SNAPSHOT_CACHE_EXPIRE_SECONDS) @staticmethod - def _redis_autobackup_key(service: Service): + def _redis_autobackup_key(service: Service) -> str: return REDIS_AUTOBACKUP_ENABLED_PREFIX + service.get_id() @staticmethod def enable_autobackup(service: Service): - redis.set(Backups._redis_autobackup_key(service), True) + redis.set(Backups._redis_autobackup_key(service), 1) @staticmethod def disable_autobackup(service: Service): @@ -159,6 +159,7 @@ class Backups: REDIS_INITTED_CACHE_PREFIX, REDIS_SNAPSHOTS_PREFIX, REDIS_LAST_BACKUP_PREFIX, + REDIS_AUTOBACKUP_ENABLED_PREFIX, ] for prefix in prefixes_to_clean: diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index ec16306..97dd6af 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -218,3 +218,13 @@ def test_backup_service_task(backups, dummy_service): snaps = Backups.get_snapshots(dummy_service) assert len(snaps) == 1 + + +def test_autobackup_enable_service(backups, dummy_service): + assert not Backups.is_autobackup_enabled(dummy_service) + + Backups.enable_autobackup(dummy_service) + assert Backups.is_autobackup_enabled(dummy_service) + + Backups.disable_autobackup(dummy_service) + assert not Backups.is_autobackup_enabled(dummy_service) From d4cad61d56d188614435ede74f350b416d6ba7c6 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 3 Apr 2023 22:39:04 +0000 Subject: [PATCH 059/537] test(backups): test setting autobackup period --- selfprivacy_api/backup/__init__.py | 10 ++++++++-- tests/test_graphql/test_backup.py | 22 ++++++++++++++++++++++ 2 files changed, 30 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index e3143b5..b54482b 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -99,11 +99,17 @@ class Backups: """None means autobackup is disabled""" if not redis.exists(REDIS_AUTOBACKUP_PERIOD_KEY): return None - return redis.get(REDIS_AUTOBACKUP_PERIOD_KEY) + return int(redis.get(REDIS_AUTOBACKUP_PERIOD_KEY)) @staticmethod def set_autobackup_period_minutes(minutes: int): - """This initiates backup very soon if some services are not backed up""" + """ + 0 and negative numbers are equivalent to disable. + Setting to a positive number may result in a backup very soon if some services are not backed up. + """ + if minutes <= 0: + Backups.disable_all_autobackup() + return redis.set(REDIS_AUTOBACKUP_PERIOD_KEY, minutes) @staticmethod diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 97dd6af..31ecefa 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -228,3 +228,25 @@ def test_autobackup_enable_service(backups, dummy_service): Backups.disable_autobackup(dummy_service) assert not Backups.is_autobackup_enabled(dummy_service) + + +def test_set_autobackup_period(backups): + assert Backups.autobackup_period_minutes() is None + + Backups.set_autobackup_period_minutes(2) + assert Backups.autobackup_period_minutes() == 2 + + Backups.disable_all_autobackup() + assert Backups.autobackup_period_minutes() is None + + Backups.set_autobackup_period_minutes(3) + assert Backups.autobackup_period_minutes() == 3 + + Backups.set_autobackup_period_minutes(0) + assert Backups.autobackup_period_minutes() is None + + Backups.set_autobackup_period_minutes(3) + assert Backups.autobackup_period_minutes() == 3 + + Backups.set_autobackup_period_minutes(-1) + assert Backups.autobackup_period_minutes() is None From 146b0ca02fac34ab89cd424cebf234320debd7a3 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 3 Apr 2023 23:23:47 +0000 Subject: [PATCH 060/537] feature(backups): add a datetime validator function for huey autobackups --- selfprivacy_api/backup/__init__.py | 43 +++++++++++++++++++++++++----- 1 file changed, 37 insertions(+), 6 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index b54482b..f58d4c4 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -1,5 +1,5 @@ from typing import List, Optional -from datetime import datetime +from datetime import datetime, timezone, timedelta from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.models.backup.provider import BackupProviderModel @@ -78,21 +78,52 @@ class Backups: redis.expire(snapshot_key, REDIS_SNAPSHOT_CACHE_EXPIRE_SECONDS) @staticmethod - def _redis_autobackup_key(service: Service) -> str: - return REDIS_AUTOBACKUP_ENABLED_PREFIX + service.get_id() + def _redis_autobackup_key(service_name: str) -> str: + return REDIS_AUTOBACKUP_ENABLED_PREFIX + service_name @staticmethod def enable_autobackup(service: Service): - redis.set(Backups._redis_autobackup_key(service), 1) + redis.set(Backups._redis_autobackup_key(service.get_id()), 1) + + @staticmethod + def is_time_to_backup(time: datetime) -> bool: + """ + Intended as a time validator for huey cron scheduler of automatic backups + """ + for key in redis.keys(REDIS_AUTOBACKUP_ENABLED_PREFIX + "*"): + service_id = key.split(":")[-1] + if Backups.is_time_to_backup_service(service_id, time): + return True + return False + + @staticmethod + def is_time_to_backup_service(service_id: str, time: datetime): + period = Backups.autobackup_period_minutes() + if period is None: + return False + if not Backups._is_autobackup_enabled_by_name(service_id) is None: + return False + + last_backup = Backups._get_last_backup_time_redis(service_id) + if last_backup is None: + return True # queue a backup immediately if there are no previous backups + + if time > last_backup + timedelta(minutes=period): + return True + return False @staticmethod def disable_autobackup(service: Service): """also see disable_all_autobackup()""" - redis.delete(Backups._redis_autobackup_key(service)) + redis.delete(Backups._redis_autobackup_key(service.get_id())) @staticmethod def is_autobackup_enabled(service: Service) -> bool: - return redis.exists(Backups._redis_autobackup_key(service)) + return Backups._is_autobackup_enabled_by_name(service.get_id()) + + @staticmethod + def _is_autobackup_enabled_by_name(service_name: str): + return redis.exists(Backups._redis_autobackup_key(service_name)) @staticmethod def autobackup_period_minutes() -> Optional[int]: From 7699ba0d9b497bd7fd63aaac8152347b50dd6149 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 7 Apr 2023 15:18:54 +0000 Subject: [PATCH 061/537] test(backups): test last backup date retrieval --- selfprivacy_api/backup/__init__.py | 3 ++- tests/test_graphql/test_backup.py | 8 ++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index f58d4c4..7e73ebd 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -58,6 +58,7 @@ class Backups: @staticmethod def get_last_backed_up(service: Service) -> Optional[datetime]: + """Get a timezone-aware time of the last backup of a service""" return Backups._get_last_backup_time_redis(service.get_id()) @staticmethod @@ -66,7 +67,7 @@ class Backups: if not redis.exists(key): return None - snapshot = hash_as_model(redis, key) + snapshot = hash_as_model(redis, key, Snapshot) return snapshot.created_at @staticmethod diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 31ecefa..f6ad24a 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -3,6 +3,7 @@ import os.path as path from os import makedirs from os import remove from os import listdir +from datetime import datetime, timedelta, timezone from selfprivacy_api.services.test_service import DummyService @@ -113,8 +114,15 @@ def test_backup_simple_file(raw_dummy_service, file_backup): def test_backup_service(dummy_service, backups): + assert Backups.get_last_backed_up(dummy_service) is None Backups.back_up(dummy_service) + now = datetime.now(timezone.utc) + date = Backups.get_last_backed_up(dummy_service) + assert date is not None + assert now > date + assert now - date < timedelta(minutes=1) + def test_no_repo(memory_backup): with pytest.raises(ValueError): From a2ff74244e8f0e427bb560262927622bfafcdfeb Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 7 Apr 2023 15:41:02 +0000 Subject: [PATCH 062/537] feature(backups): add snapshot cache sync functions --- selfprivacy_api/backup/__init__.py | 35 ++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 7e73ebd..13eb7de 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -72,12 +72,46 @@ class Backups: @staticmethod def _store_last_snapshot(service_id: str, snapshot: Snapshot): + # non-expiring timestamp of the last store_model_as_hash(redis, Backups._redis_last_backup_key(service_id), snapshot) + # expiring cache entry + Backups.cache_snapshot(snapshot) + @staticmethod + def cache_snapshot(snapshot: Snapshot): snapshot_key = Backups._redis_snapshot_key(snapshot) store_model_as_hash(redis, snapshot_key, snapshot) redis.expire(snapshot_key, REDIS_SNAPSHOT_CACHE_EXPIRE_SECONDS) + @staticmethod + def delete_cached_snapshot(snapshot: Snapshot): + snapshot_key = Backups._redis_snapshot_key(snapshot) + redis.delete(snapshot_key) + + @staticmethod + def get_cached_snapshots() -> List[Snapshot]: + keys = redis.keys(REDIS_SNAPSHOTS_PREFIX + "*") + result = [] + + for key in keys: + snapshot = hash_as_model(redis, key, Snapshot) + result.append(snapshot) + return result + + @staticmethod + def get_cached_snapshots_service(service_id: str) -> List[Snapshot]: + snapshots = Backups.get_cached_snapshots() + return [snap for snap in snapshots if snap.service_name == service_id] + + @staticmethod + def sync_service_snapshots(service_id: str, snapshots: List[Snapshot]): + for snapshot in snapshots: + if snapshot.service_name == service_id: + Backups.cache_snapshot(snapshot) + for snapshot in Backups.get_cached_snapshots_service(service_id): + if snapshot.id not in [snap.id for snap in snapshots]: + Backups.delete_cached_snapshot(snapshot) + @staticmethod def _redis_autobackup_key(service_name: str) -> str: return REDIS_AUTOBACKUP_ENABLED_PREFIX + service_name @@ -244,6 +278,7 @@ class Backups: @staticmethod def back_up(service: Service): + """The top-level function to back up a service""" folder = service.get_location() repo_name = service.get_id() From 5ad9f50b9453575dd5cfc6d7707ff8c1e4ff4140 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 7 Apr 2023 17:24:53 +0000 Subject: [PATCH 063/537] feature(backups): enable snapshot cache usage --- selfprivacy_api/backup/__init__.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 13eb7de..d22c4c9 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -321,9 +321,16 @@ class Backups: @staticmethod def get_snapshots(service: Service) -> List[Snapshot]: - repo_name = service.get_id() + service_id = service.get_id() + cached_snapshots = Backups.get_cached_snapshots_service(service_id) + if cached_snapshots != []: + return cached_snapshots + # TODO: the oldest snapshots will get expired faster than the new ones. + # How to detect that the end is missing? - return Backups.provider().backuper.get_snapshots(repo_name) + upstream_snapshots = Backups.provider().backuper.get_snapshots(service_id) + Backups.sync_service_snapshots(service_id, upstream_snapshots) + return upstream_snapshots @staticmethod def restore_service_from_snapshot(service: Service, snapshot_id: str): From f25e57c51a004ae602d801b0134eb3860038d8f8 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 7 Apr 2023 18:12:05 +0000 Subject: [PATCH 064/537] test(backups): test that we do use cache --- tests/test_graphql/test_backup.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index f6ad24a..619656d 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -192,6 +192,17 @@ def test_redis_storage(backups_backblaze): assert restored_provider.key == "KEY" +def test_snapshots_caching(backups, dummy_service): + Backups.back_up(dummy_service) + + # we test indirectly that we do redis calls instead of shell calls + start = datetime.now() + for i in range(10): + snapshots = Backups.get_snapshots(dummy_service) + assert len(snapshots) == 1 + assert datetime.now() - start < timedelta(seconds=0.5) + + # lowlevel def test_init_tracking_caching(backups, raw_dummy_service): assert Backups._has_redis_init_mark(raw_dummy_service) is False From 283c8d09cc87f64c51d27883ef2e7141d3dd5f09 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 10 Apr 2023 13:22:33 +0000 Subject: [PATCH 065/537] refactor(backups): split out storage --- selfprivacy_api/backup/__init__.py | 192 +++++++---------------------- selfprivacy_api/backup/storage.py | 168 +++++++++++++++++++++++++ tests/test_graphql/test_backup.py | 118 ++++++++++-------- 3 files changed, 278 insertions(+), 200 deletions(-) create mode 100644 selfprivacy_api/backup/storage.py diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index d22c4c9..c931e57 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -1,41 +1,22 @@ from typing import List, Optional -from datetime import datetime, timezone, timedelta +from datetime import datetime, timedelta from selfprivacy_api.models.backup.snapshot import Snapshot -from selfprivacy_api.models.backup.provider import BackupProviderModel -from selfprivacy_api.utils.singleton_metaclass import SingletonMetaclass from selfprivacy_api.utils import ReadUserData from selfprivacy_api.utils.redis_pool import RedisPool -from selfprivacy_api.utils.redis_model_storage import store_model_as_hash, hash_as_model from selfprivacy_api.services import get_service_by_id from selfprivacy_api.services.service import Service -from selfprivacy_api.backup.providers.provider import AbstractBackupProvider -from selfprivacy_api.backup.providers import get_provider, get_kind from selfprivacy_api.graphql.queries.providers import BackupProvider -# a hack to store file path. -REDIS_SNAPSHOT_CACHE_EXPIRE_SECONDS = 24 * 60 * 60 # one day - -REDIS_AUTOBACKUP_ENABLED_PREFIX = "backup:autobackup:services:" -REDIS_SNAPSHOTS_PREFIX = "backups:snapshots:" -REDIS_LAST_BACKUP_PREFIX = "backups:last-backed-up:" -REDIS_INITTED_CACHE_PREFIX = "backups:initted_services:" - -REDIS_REPO_PATH_KEY = "backups:test_repo_path" -REDIS_PROVIDER_KEY = "backups:provider" -REDIS_AUTOBACKUP_PERIOD_KEY = "backups:autobackup_period" +from selfprivacy_api.backup.providers.provider import AbstractBackupProvider +from selfprivacy_api.backup.providers import get_provider +from selfprivacy_api.backup.storage import Storage -redis = RedisPool().get_connection() - - -# Singleton has a property of being persistent between tests. -# I don't know what to do with this yet -# class Backups(metaclass=SingletonMetaclass): class Backups: """A singleton controller for backups""" @@ -45,88 +26,40 @@ class Backups: def set_localfile_repo(file_path: str): ProviderClass = get_provider(BackupProvider.FILE) provider = ProviderClass(file_path) - redis.set(REDIS_REPO_PATH_KEY, file_path) - Backups.store_provider_redis(provider) - - @staticmethod - def _redis_last_backup_key(service_id): - return REDIS_LAST_BACKUP_PREFIX + service_id - - @staticmethod - def _redis_snapshot_key(snapshot: Snapshot): - return REDIS_SNAPSHOTS_PREFIX + snapshot.id + Storage.store_testrepo_path(file_path) + Storage.store_provider(provider) @staticmethod def get_last_backed_up(service: Service) -> Optional[datetime]: """Get a timezone-aware time of the last backup of a service""" - return Backups._get_last_backup_time_redis(service.get_id()) - - @staticmethod - def _get_last_backup_time_redis(service_id: str) -> Optional[datetime]: - key = Backups._redis_last_backup_key(service_id) - if not redis.exists(key): - return None - - snapshot = hash_as_model(redis, key, Snapshot) - return snapshot.created_at - - @staticmethod - def _store_last_snapshot(service_id: str, snapshot: Snapshot): - # non-expiring timestamp of the last - store_model_as_hash(redis, Backups._redis_last_backup_key(service_id), snapshot) - # expiring cache entry - Backups.cache_snapshot(snapshot) - - @staticmethod - def cache_snapshot(snapshot: Snapshot): - snapshot_key = Backups._redis_snapshot_key(snapshot) - store_model_as_hash(redis, snapshot_key, snapshot) - redis.expire(snapshot_key, REDIS_SNAPSHOT_CACHE_EXPIRE_SECONDS) - - @staticmethod - def delete_cached_snapshot(snapshot: Snapshot): - snapshot_key = Backups._redis_snapshot_key(snapshot) - redis.delete(snapshot_key) - - @staticmethod - def get_cached_snapshots() -> List[Snapshot]: - keys = redis.keys(REDIS_SNAPSHOTS_PREFIX + "*") - result = [] - - for key in keys: - snapshot = hash_as_model(redis, key, Snapshot) - result.append(snapshot) - return result + return Storage.get_last_backup_time(service.get_id()) @staticmethod def get_cached_snapshots_service(service_id: str) -> List[Snapshot]: - snapshots = Backups.get_cached_snapshots() + snapshots = Storage.get_cached_snapshots() return [snap for snap in snapshots if snap.service_name == service_id] @staticmethod def sync_service_snapshots(service_id: str, snapshots: List[Snapshot]): for snapshot in snapshots: if snapshot.service_name == service_id: - Backups.cache_snapshot(snapshot) + Storage.cache_snapshot(snapshot) for snapshot in Backups.get_cached_snapshots_service(service_id): if snapshot.id not in [snap.id for snap in snapshots]: - Backups.delete_cached_snapshot(snapshot) - - @staticmethod - def _redis_autobackup_key(service_name: str) -> str: - return REDIS_AUTOBACKUP_ENABLED_PREFIX + service_name + Storage.delete_cached_snapshot(snapshot) @staticmethod def enable_autobackup(service: Service): - redis.set(Backups._redis_autobackup_key(service.get_id()), 1) + Storage.set_autobackup(service) @staticmethod def is_time_to_backup(time: datetime) -> bool: """ Intended as a time validator for huey cron scheduler of automatic backups """ - for key in redis.keys(REDIS_AUTOBACKUP_ENABLED_PREFIX + "*"): - service_id = key.split(":")[-1] + + enabled_services = Storage.services_with_autobackup() + for service_id in enabled_services: if Backups.is_time_to_backup_service(service_id, time): return True return False @@ -136,10 +69,10 @@ class Backups: period = Backups.autobackup_period_minutes() if period is None: return False - if not Backups._is_autobackup_enabled_by_name(service_id) is None: + if not Storage.is_autobackup_set_by_name(service_id) is None: return False - last_backup = Backups._get_last_backup_time_redis(service_id) + last_backup = Storage.get_last_backup_time(service_id) if last_backup is None: return True # queue a backup immediately if there are no previous backups @@ -150,22 +83,16 @@ class Backups: @staticmethod def disable_autobackup(service: Service): """also see disable_all_autobackup()""" - redis.delete(Backups._redis_autobackup_key(service.get_id())) + Storage.unset_autobackup(service) @staticmethod def is_autobackup_enabled(service: Service) -> bool: - return Backups._is_autobackup_enabled_by_name(service.get_id()) - - @staticmethod - def _is_autobackup_enabled_by_name(service_name: str): - return redis.exists(Backups._redis_autobackup_key(service_name)) + return Storage.is_autobackup_set(service.get_id()) @staticmethod def autobackup_period_minutes() -> Optional[int]: """None means autobackup is disabled""" - if not redis.exists(REDIS_AUTOBACKUP_PERIOD_KEY): - return None - return int(redis.get(REDIS_AUTOBACKUP_PERIOD_KEY)) + return Storage.autobackup_period_minutes() @staticmethod def set_autobackup_period_minutes(minutes: int): @@ -176,12 +103,12 @@ class Backups: if minutes <= 0: Backups.disable_all_autobackup() return - redis.set(REDIS_AUTOBACKUP_PERIOD_KEY, minutes) + Storage.store_autobackup_period_minutes(minutes) @staticmethod def disable_all_autobackup(): """disables all automatic backing up, but does not change per-service settings""" - redis.delete(REDIS_AUTOBACKUP_PERIOD_KEY) + Storage.delete_backup_period() @staticmethod def provider(): @@ -190,53 +117,21 @@ class Backups: @staticmethod def set_provider(kind: str, login: str, key: str): provider = Backups.construct_provider(kind, login, key) - Backups.store_provider_redis(provider) + Storage.store_provider(provider) @staticmethod def construct_provider(kind: str, login: str, key: str): provider_class = get_provider(BackupProvider[kind]) if kind == "FILE": - path = redis.get(REDIS_REPO_PATH_KEY) + path = Storage.get_testrepo_path() return provider_class(path) return provider_class(login=login, key=key) - @staticmethod - def store_provider_redis(provider: AbstractBackupProvider): - store_model_as_hash( - redis, - REDIS_PROVIDER_KEY, - BackupProviderModel( - kind=get_kind(provider), login=provider.login, key=provider.key - ), - ) - - @staticmethod - def load_provider_redis() -> AbstractBackupProvider: - provider_model = hash_as_model(redis, REDIS_PROVIDER_KEY, BackupProviderModel) - if provider_model is None: - return None - return Backups.construct_provider( - provider_model.kind, provider_model.login, provider_model.key - ) - @staticmethod def reset(): - redis.delete(REDIS_PROVIDER_KEY) - redis.delete(REDIS_REPO_PATH_KEY) - redis.delete(REDIS_AUTOBACKUP_PERIOD_KEY) - - prefixes_to_clean = [ - REDIS_INITTED_CACHE_PREFIX, - REDIS_SNAPSHOTS_PREFIX, - REDIS_LAST_BACKUP_PREFIX, - REDIS_AUTOBACKUP_ENABLED_PREFIX, - ] - - for prefix in prefixes_to_clean: - for key in redis.keys(prefix + "*"): - redis.delete(key) + Storage.reset() @staticmethod def lookup_provider() -> AbstractBackupProvider: @@ -246,11 +141,11 @@ class Backups: json_provider = Backups.load_provider_json() if json_provider is not None: - Backups.store_provider_redis(json_provider) + Storage.store_provider(json_provider) return json_provider memory_provider = Backups.construct_provider("MEMORY", login="", key="") - Backups.store_provider_redis(memory_provider) + Storage.store_provider(memory_provider) return memory_provider @staticmethod @@ -276,6 +171,15 @@ class Backups: kind=provider_string, login=account, key=key ) + @staticmethod + def load_provider_redis() -> AbstractBackupProvider: + provider_model = Storage.load_provider() + if provider_model is None: + return None + return Backups.construct_provider( + provider_model.kind, provider_model.login, provider_model.key + ) + @staticmethod def back_up(service: Service): """The top-level function to back up a service""" @@ -292,29 +196,17 @@ class Backups: def init_repo(service: Service): repo_name = service.get_id() Backups.provider().backuper.init(repo_name) - Backups._redis_mark_as_init(service) - - @staticmethod - def _has_redis_init_mark(service: Service) -> bool: - repo_name = service.get_id() - if redis.exists(REDIS_INITTED_CACHE_PREFIX + repo_name): - return True - return False - - @staticmethod - def _redis_mark_as_init(service: Service): - repo_name = service.get_id() - redis.set(REDIS_INITTED_CACHE_PREFIX + repo_name, 1) + Storage.mark_as_init(service) @staticmethod def is_initted(service: Service) -> bool: repo_name = service.get_id() - if Backups._has_redis_init_mark(service): + if Storage.has_init_mark(service): return True initted = Backups.provider().backuper.is_initted(repo_name) if initted: - Backups._redis_mark_as_init(service) + Storage.mark_as_init(service) return True return False @@ -357,3 +249,11 @@ class Backups: return Backups.service_snapshot_size( get_service_by_id(snapshot.service_name), snapshot.id ) + + @staticmethod + def _store_last_snapshot(service_id: str, snapshot: Snapshot): + """What do we do with a snapshot that is just made?""" + # non-expiring timestamp of the last + Storage.store_last_timestamp(service_id, snapshot) + # expiring cache entry + Storage.cache_snapshot(snapshot) diff --git a/selfprivacy_api/backup/storage.py b/selfprivacy_api/backup/storage.py new file mode 100644 index 0000000..cf7d93a --- /dev/null +++ b/selfprivacy_api/backup/storage.py @@ -0,0 +1,168 @@ +from typing import List, Optional +from datetime import datetime + +from selfprivacy_api.models.backup.snapshot import Snapshot +from selfprivacy_api.models.backup.provider import BackupProviderModel + +from selfprivacy_api.utils.redis_pool import RedisPool +from selfprivacy_api.utils.redis_model_storage import store_model_as_hash, hash_as_model + + +from selfprivacy_api.services.service import Service + +from selfprivacy_api.backup.providers.provider import AbstractBackupProvider +from selfprivacy_api.backup.providers import get_kind + +# a hack to store file path. +REDIS_SNAPSHOT_CACHE_EXPIRE_SECONDS = 24 * 60 * 60 # one day + +REDIS_AUTOBACKUP_ENABLED_PREFIX = "backup:autobackup:services:" +REDIS_SNAPSHOTS_PREFIX = "backups:snapshots:" +REDIS_LAST_BACKUP_PREFIX = "backups:last-backed-up:" +REDIS_INITTED_CACHE_PREFIX = "backups:initted_services:" + +REDIS_REPO_PATH_KEY = "backups:test_repo_path" +REDIS_PROVIDER_KEY = "backups:provider" +REDIS_AUTOBACKUP_PERIOD_KEY = "backups:autobackup_period" + + +redis = RedisPool().get_connection() + + +class Storage: + @staticmethod + def reset(): + redis.delete(REDIS_PROVIDER_KEY) + redis.delete(REDIS_REPO_PATH_KEY) + redis.delete(REDIS_AUTOBACKUP_PERIOD_KEY) + + prefixes_to_clean = [ + REDIS_INITTED_CACHE_PREFIX, + REDIS_SNAPSHOTS_PREFIX, + REDIS_LAST_BACKUP_PREFIX, + REDIS_AUTOBACKUP_ENABLED_PREFIX, + ] + + for prefix in prefixes_to_clean: + for key in redis.keys(prefix + "*"): + redis.delete(key) + + @staticmethod + def store_testrepo_path(path: str): + redis.set(REDIS_REPO_PATH_KEY, path) + + @staticmethod + def get_testrepo_path() -> str: + if not redis.exists(REDIS_REPO_PATH_KEY): + raise ValueError( + "No test repository filepath is set, but we tried to access it" + ) + return redis.get(REDIS_REPO_PATH_KEY) + + @staticmethod + def services_with_autobackup() -> List[str]: + keys = redis.keys(REDIS_AUTOBACKUP_ENABLED_PREFIX + "*") + service_ids = [key.split(":")[-1] for key in keys] + return service_ids + + @staticmethod + def __last_backup_key(service_id): + return REDIS_LAST_BACKUP_PREFIX + service_id + + @staticmethod + def __snapshot_key(snapshot: Snapshot): + return REDIS_SNAPSHOTS_PREFIX + snapshot.id + + @staticmethod + def get_last_backup_time(service_id: str) -> Optional[datetime]: + key = Storage.__last_backup_key(service_id) + if not redis.exists(key): + return None + + snapshot = hash_as_model(redis, key, Snapshot) + return snapshot.created_at + + @staticmethod + def store_last_timestamp(service_id: str, snapshot: Snapshot): + store_model_as_hash(redis, Storage.__last_backup_key(service_id), snapshot) + + @staticmethod + def cache_snapshot(snapshot: Snapshot): + snapshot_key = Storage.__snapshot_key(snapshot) + store_model_as_hash(redis, snapshot_key, snapshot) + redis.expire(snapshot_key, REDIS_SNAPSHOT_CACHE_EXPIRE_SECONDS) + + @staticmethod + def delete_cached_snapshot(snapshot: Snapshot): + snapshot_key = Storage.__snapshot_key(snapshot) + redis.delete(snapshot_key) + + @staticmethod + def get_cached_snapshots() -> List[Snapshot]: + keys = redis.keys(REDIS_SNAPSHOTS_PREFIX + "*") + result = [] + + for key in keys: + snapshot = hash_as_model(redis, key, Snapshot) + result.append(snapshot) + return result + + @staticmethod + def __autobackup_key(service_name: str) -> str: + return REDIS_AUTOBACKUP_ENABLED_PREFIX + service_name + + @staticmethod + def set_autobackup(service: Service): + # shortcut this + redis.set(Storage.__autobackup_key(service.get_id()), 1) + + @staticmethod + def unset_autobackup(service: Service): + """also see disable_all_autobackup()""" + redis.delete(Storage.__autobackup_key(service.get_id())) + + @staticmethod + def is_autobackup_set(service_name: str): + return redis.exists(Storage.__autobackup_key(service_name)) + + @staticmethod + def autobackup_period_minutes() -> Optional[int]: + """None means autobackup is disabled""" + if not redis.exists(REDIS_AUTOBACKUP_PERIOD_KEY): + return None + return int(redis.get(REDIS_AUTOBACKUP_PERIOD_KEY)) + + @staticmethod + def store_autobackup_period_minutes(minutes: int): + redis.set(REDIS_AUTOBACKUP_PERIOD_KEY, minutes) + + @staticmethod + def delete_backup_period(): + redis.delete(REDIS_AUTOBACKUP_PERIOD_KEY) + + @staticmethod + def store_provider(provider: AbstractBackupProvider): + store_model_as_hash( + redis, + REDIS_PROVIDER_KEY, + BackupProviderModel( + kind=get_kind(provider), login=provider.login, key=provider.key + ), + ) + + @staticmethod + def load_provider() -> BackupProviderModel: + provider_model = hash_as_model(redis, REDIS_PROVIDER_KEY, BackupProviderModel) + return provider_model + + @staticmethod + def has_init_mark(service: Service) -> bool: + repo_name = service.get_id() + if redis.exists(REDIS_INITTED_CACHE_PREFIX + repo_name): + return True + return False + + @staticmethod + def mark_as_init(service: Service): + repo_name = service.get_id() + redis.set(REDIS_INITTED_CACHE_PREFIX + repo_name, 1) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 619656d..54701b1 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -6,18 +6,15 @@ from os import listdir from datetime import datetime, timedelta, timezone from selfprivacy_api.services.test_service import DummyService - -from selfprivacy_api.models.backup.snapshot import Snapshot +from selfprivacy_api.graphql.queries.providers import BackupProvider from selfprivacy_api.backup import Backups import selfprivacy_api.backup.providers as providers from selfprivacy_api.backup.providers import AbstractBackupProvider - from selfprivacy_api.backup.providers.backblaze import Backblaze - -from selfprivacy_api.graphql.queries.providers import BackupProvider - from selfprivacy_api.backup.tasks import start_backup +from selfprivacy_api.backup.storage import Storage + TESTFILE_BODY = "testytest!" REPO_NAME = "test_backup" @@ -175,54 +172,6 @@ def test_sizing(backups, dummy_service): assert size > 0 -def test_redis_storage(backups_backblaze): - Backups.reset() - provider = Backups.provider() - - assert provider is not None - - assert isinstance(provider, Backblaze) - assert provider.login == "ID" - assert provider.key == "KEY" - - Backups.store_provider_redis(provider) - restored_provider = Backups.load_provider_redis() - assert isinstance(restored_provider, Backblaze) - assert restored_provider.login == "ID" - assert restored_provider.key == "KEY" - - -def test_snapshots_caching(backups, dummy_service): - Backups.back_up(dummy_service) - - # we test indirectly that we do redis calls instead of shell calls - start = datetime.now() - for i in range(10): - snapshots = Backups.get_snapshots(dummy_service) - assert len(snapshots) == 1 - assert datetime.now() - start < timedelta(seconds=0.5) - - -# lowlevel -def test_init_tracking_caching(backups, raw_dummy_service): - assert Backups._has_redis_init_mark(raw_dummy_service) is False - - Backups._redis_mark_as_init(raw_dummy_service) - - assert Backups._has_redis_init_mark(raw_dummy_service) is True - assert Backups.is_initted(raw_dummy_service) is True - - -# lowlevel -def test_init_tracking_caching2(backups, raw_dummy_service): - assert Backups._has_redis_init_mark(raw_dummy_service) is False - - Backups.init_repo(raw_dummy_service) - - assert Backups._has_redis_init_mark(raw_dummy_service) is True - - -# only public API def test_init_tracking(backups, raw_dummy_service): assert Backups.is_initted(raw_dummy_service) is False @@ -269,3 +218,64 @@ def test_set_autobackup_period(backups): Backups.set_autobackup_period_minutes(-1) assert Backups.autobackup_period_minutes() is None + + +# Storage +def test_snapshots_caching(backups, dummy_service): + Backups.back_up(dummy_service) + + # we test indirectly that we do redis calls instead of shell calls + start = datetime.now() + for i in range(10): + snapshots = Backups.get_snapshots(dummy_service) + assert len(snapshots) == 1 + assert datetime.now() - start < timedelta(seconds=0.5) + + cached_snapshots = Storage.get_cached_snapshots() + assert len(cached_snapshots) == 1 + + Storage.delete_cached_snapshot(cached_snapshots[0]) + cached_snapshots = Storage.get_cached_snapshots() + assert len(cached_snapshots) == 0 + + snapshots = Backups.get_snapshots(dummy_service) + assert len(snapshots) == 1 + cached_snapshots = Storage.get_cached_snapshots() + assert len(cached_snapshots) == 1 + + +# Storage +def test_init_tracking_caching(backups, raw_dummy_service): + assert Storage.has_init_mark(raw_dummy_service) is False + + Storage.mark_as_init(raw_dummy_service) + + assert Storage.has_init_mark(raw_dummy_service) is True + assert Backups.is_initted(raw_dummy_service) is True + + +# Storage +def test_init_tracking_caching2(backups, raw_dummy_service): + assert Storage.has_init_mark(raw_dummy_service) is False + + Backups.init_repo(raw_dummy_service) + + assert Storage.has_init_mark(raw_dummy_service) is True + + +# Storage +def test_provider_storage(backups_backblaze): + Backups.reset() + provider = Backups.provider() + + assert provider is not None + + assert isinstance(provider, Backblaze) + assert provider.login == "ID" + assert provider.key == "KEY" + + Storage.store_provider(provider) + restored_provider = Backups.load_provider_redis() + assert isinstance(restored_provider, Backblaze) + assert restored_provider.login == "ID" + assert restored_provider.key == "KEY" From ec85f060f87436a5f31815fbc553cbbb9e3c735a Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 10 Apr 2023 15:51:54 +0000 Subject: [PATCH 066/537] test(backups): test autobackup timing --- selfprivacy_api/backup/__init__.py | 2 +- selfprivacy_api/backup/storage.py | 2 +- tests/test_graphql/test_backup.py | 75 ++++++++++++++++++++++++++++++ 3 files changed, 77 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index c931e57..1676ca5 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -69,7 +69,7 @@ class Backups: period = Backups.autobackup_period_minutes() if period is None: return False - if not Storage.is_autobackup_set_by_name(service_id) is None: + if not Storage.is_autobackup_set(service_id): return False last_backup = Storage.get_last_backup_time(service_id) diff --git a/selfprivacy_api/backup/storage.py b/selfprivacy_api/backup/storage.py index cf7d93a..7ca5f18 100644 --- a/selfprivacy_api/backup/storage.py +++ b/selfprivacy_api/backup/storage.py @@ -122,7 +122,7 @@ class Storage: redis.delete(Storage.__autobackup_key(service.get_id())) @staticmethod - def is_autobackup_set(service_name: str): + def is_autobackup_set(service_name: str) -> bool: return redis.exists(Storage.__autobackup_key(service_name)) @staticmethod diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 54701b1..710e7b8 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -198,6 +198,17 @@ def test_autobackup_enable_service(backups, dummy_service): assert not Backups.is_autobackup_enabled(dummy_service) +def test_autobackup_enable_service_storage(backups, dummy_service): + assert len(Storage.services_with_autobackup()) == 0 + + Backups.enable_autobackup(dummy_service) + assert len(Storage.services_with_autobackup()) == 1 + assert Storage.services_with_autobackup()[0] == dummy_service.get_id() + + Backups.disable_autobackup(dummy_service) + assert len(Storage.services_with_autobackup()) == 0 + + def test_set_autobackup_period(backups): assert Backups.autobackup_period_minutes() is None @@ -220,6 +231,70 @@ def test_set_autobackup_period(backups): assert Backups.autobackup_period_minutes() is None +def test_no_default_autobackup(backups, dummy_service): + now = datetime.now(timezone.utc) + assert not Backups.is_time_to_backup_service(dummy_service.get_id(), now) + assert not Backups.is_time_to_backup(now) + + +def test_autobackup_timer_periods(backups, dummy_service): + now = datetime.now(timezone.utc) + backup_period = 13 # minutes + + Backups.enable_autobackup(dummy_service) + assert not Backups.is_time_to_backup_service(dummy_service.get_id(), now) + assert not Backups.is_time_to_backup(now) + + Backups.set_autobackup_period_minutes(backup_period) + assert Backups.is_time_to_backup_service(dummy_service.get_id(), now) + assert Backups.is_time_to_backup(now) + + Backups.set_autobackup_period_minutes(0) + assert not Backups.is_time_to_backup_service(dummy_service.get_id(), now) + assert not Backups.is_time_to_backup(now) + + +def test_autobackup_timer_enabling(backups, dummy_service): + now = datetime.now(timezone.utc) + backup_period = 13 # minutes + + Backups.set_autobackup_period_minutes(backup_period) + assert not Backups.is_time_to_backup_service(dummy_service.get_id(), now) + assert not Backups.is_time_to_backup(now) + + Backups.enable_autobackup(dummy_service) + assert Backups.is_time_to_backup_service(dummy_service.get_id(), now) + assert Backups.is_time_to_backup(now) + + Backups.disable_autobackup(dummy_service) + assert not Backups.is_time_to_backup_service(dummy_service.get_id(), now) + assert not Backups.is_time_to_backup(now) + + +def test_autobackup_timing(backups, dummy_service): + backup_period = 13 # minutes + now = datetime.now(timezone.utc) + + Backups.enable_autobackup(dummy_service) + Backups.set_autobackup_period_minutes(backup_period) + assert Backups.is_time_to_backup_service(dummy_service.get_id(), now) + assert Backups.is_time_to_backup(now) + + Backups.back_up(dummy_service) + + now = datetime.now(timezone.utc) + assert not Backups.is_time_to_backup_service(dummy_service.get_id(), now) + assert not Backups.is_time_to_backup(now) + + past = datetime.now(timezone.utc) - timedelta(minutes=1) + assert not Backups.is_time_to_backup_service(dummy_service.get_id(), past) + assert not Backups.is_time_to_backup(past) + + future = datetime.now(timezone.utc) + timedelta(minutes=backup_period + 2) + assert Backups.is_time_to_backup_service(dummy_service.get_id(), future) + assert Backups.is_time_to_backup(future) + + # Storage def test_snapshots_caching(backups, dummy_service): Backups.back_up(dummy_service) From 4018dca184742a28c5879bc7336a584f5f32e7f0 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 10 Apr 2023 16:35:35 +0000 Subject: [PATCH 067/537] feature(backups): automatic backup --- selfprivacy_api/backup/__init__.py | 22 +++++++++++++++++----- selfprivacy_api/backup/tasks.py | 22 ++++++++++++++++++++++ 2 files changed, 39 insertions(+), 5 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 1676ca5..d9b811c 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -52,17 +52,29 @@ class Backups: def enable_autobackup(service: Service): Storage.set_autobackup(service) + @staticmethod + def _service_ids_to_back_up(time: datetime) -> List[str]: + services = Storage.services_with_autobackup() + return [id for id in services if Backups.is_time_to_backup_service(id, time)] + + # untestable until the dummy service is registered + @staticmethod + def services_to_back_up(time: datetime) -> List[Service]: + result = [] + for id in Backups._service_ids_to_back_up(time): + service = get_service_by_id(id) + if service is None: + raise ValueError("Cannot look up a service scheduled for backup!") + result.append(service) + return result + @staticmethod def is_time_to_backup(time: datetime) -> bool: """ Intended as a time validator for huey cron scheduler of automatic backups """ - enabled_services = Storage.services_with_autobackup() - for service_id in enabled_services: - if Backups.is_time_to_backup_service(service_id, time): - return True - return False + return Backups._service_ids_to_back_up(time) != [] @staticmethod def is_time_to_backup_service(service_id: str, time: datetime): diff --git a/selfprivacy_api/backup/tasks.py b/selfprivacy_api/backup/tasks.py index f1715cc..4f6ab16 100644 --- a/selfprivacy_api/backup/tasks.py +++ b/selfprivacy_api/backup/tasks.py @@ -1,9 +1,31 @@ +from datetime import datetime + from selfprivacy_api.utils.huey import huey from selfprivacy_api.services.service import Service from selfprivacy_api.backup import Backups + +def validate_datetime(dt: datetime): + # dt = datetime.now(timezone.utc) + if dt.timetz is None: + raise ValueError( + """ + huey passed in the timezone-unaware time! + Post it in support chat or maybe try uncommenting a line above + """ + ) + return Backups.is_time_to_backup(dt) + + # huey tasks need to return something @huey.task() def start_backup(service: Service) -> bool: Backups.back_up(service) return True + + +@huey.periodic_task(validate_datetime=validate_datetime) +def automatic_backup(): + time = datetime.now() + for service in Backups.services_to_back_up(time): + start_backup(service) From 0ae4192081ce66fb128244df1b8ad62133dbc73f Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 12 Apr 2023 17:18:12 +0000 Subject: [PATCH 068/537] test(backups): register dummy service --- selfprivacy_api/backup/__init__.py | 3 --- tests/test_graphql/test_backup.py | 19 +++++++++++++++++++ 2 files changed, 19 insertions(+), 3 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index d9b811c..674abf0 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -57,7 +57,6 @@ class Backups: services = Storage.services_with_autobackup() return [id for id in services if Backups.is_time_to_backup_service(id, time)] - # untestable until the dummy service is registered @staticmethod def services_to_back_up(time: datetime) -> List[Service]: result = [] @@ -243,7 +242,6 @@ class Backups: Backups.provider().backuper.restore_from_backup(repo_name, snapshot_id, folder) - # Our dummy service is not yet globally registered so this is not testable yet @staticmethod def restore_snapshot(snapshot: Snapshot): Backups.restore_service_from_snapshot( @@ -255,7 +253,6 @@ class Backups: repo_name = service.get_id() return Backups.provider().backuper.restored_size(repo_name, snapshot_id) - # Our dummy service is not yet globally registered so this is not testable yet @staticmethod def snapshot_restored_size(snapshot: Snapshot) -> float: return Backups.service_snapshot_size( diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 710e7b8..4d4f421 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -5,6 +5,8 @@ from os import remove from os import listdir from datetime import datetime, timedelta, timezone +import selfprivacy_api.services as services +from selfprivacy_api.services import get_service_by_id from selfprivacy_api.services.test_service import DummyService from selfprivacy_api.graphql.queries.providers import BackupProvider @@ -58,6 +60,11 @@ def dummy_service(tmpdir, backups, raw_dummy_service): # assert not repo_path Backups.init_repo(service) + + # register our service + services.services.append(service) + + assert get_service_by_id(service.get_id()) is not None return service @@ -354,3 +361,15 @@ def test_provider_storage(backups_backblaze): assert isinstance(restored_provider, Backblaze) assert restored_provider.login == "ID" assert restored_provider.key == "KEY" + + +def test_services_to_back_up(backups, dummy_service): + backup_period = 13 # minutes + now = datetime.now(timezone.utc) + + Backups.enable_autobackup(dummy_service) + Backups.set_autobackup_period_minutes(backup_period) + + services = Backups.services_to_back_up(now) + assert len(services) == 1 + assert services[0].get_id() == dummy_service.get_id() \ No newline at end of file From 1b1052d2051577a56520bc21d957dc8eca431a30 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 14 Apr 2023 10:18:21 +0000 Subject: [PATCH 069/537] refactor(services): rename get_location() to get_drive() --- selfprivacy_api/backup/__init__.py | 4 ++-- selfprivacy_api/graphql/common_types/service.py | 4 ++-- selfprivacy_api/services/bitwarden/__init__.py | 2 +- selfprivacy_api/services/generic_service_mover.py | 2 +- selfprivacy_api/services/gitea/__init__.py | 2 +- selfprivacy_api/services/jitsi/__init__.py | 2 +- selfprivacy_api/services/mailserver/__init__.py | 2 +- selfprivacy_api/services/nextcloud/__init__.py | 2 +- selfprivacy_api/services/ocserv/__init__.py | 2 +- selfprivacy_api/services/pleroma/__init__.py | 2 +- selfprivacy_api/services/service.py | 2 +- selfprivacy_api/services/test_service/__init__.py | 2 +- tests/test_graphql/test_backup.py | 6 +++--- 13 files changed, 17 insertions(+), 17 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 674abf0..72d6168 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -194,7 +194,7 @@ class Backups: @staticmethod def back_up(service: Service): """The top-level function to back up a service""" - folder = service.get_location() + folder = service.get_drive() repo_name = service.get_id() service.pre_backup() @@ -238,7 +238,7 @@ class Backups: @staticmethod def restore_service_from_snapshot(service: Service, snapshot_id: str): repo_name = service.get_id() - folder = service.get_location() + folder = service.get_drive() Backups.provider().backuper.restore_from_backup(repo_name, snapshot_id, folder) diff --git a/selfprivacy_api/graphql/common_types/service.py b/selfprivacy_api/graphql/common_types/service.py index 61ed5af..8f27386 100644 --- a/selfprivacy_api/graphql/common_types/service.py +++ b/selfprivacy_api/graphql/common_types/service.py @@ -17,7 +17,7 @@ def get_usages(root: "StorageVolume") -> list["StorageUsageInterface"]: service=service_to_graphql_service(service), title=service.get_display_name(), used_space=str(service.get_storage_usage()), - volume=get_volume_by_id(service.get_location()), + volume=get_volume_by_id(service.get_drive()), ) for service in get_services_by_location(root.name) ] @@ -81,7 +81,7 @@ def get_storage_usage(root: "Service") -> ServiceStorageUsage: service=service_to_graphql_service(service), title=service.get_display_name(), used_space=str(service.get_storage_usage()), - volume=get_volume_by_id(service.get_location()), + volume=get_volume_by_id(service.get_drive()), ) diff --git a/selfprivacy_api/services/bitwarden/__init__.py b/selfprivacy_api/services/bitwarden/__init__.py index 16d7746..702eca6 100644 --- a/selfprivacy_api/services/bitwarden/__init__.py +++ b/selfprivacy_api/services/bitwarden/__init__.py @@ -118,7 +118,7 @@ class Bitwarden(Service): return storage_usage @staticmethod - def get_location() -> str: + def get_drive() -> str: with ReadUserData() as user_data: if user_data.get("useBinds", False): return user_data.get("bitwarden", {}).get("location", "sda1") diff --git a/selfprivacy_api/services/generic_service_mover.py b/selfprivacy_api/services/generic_service_mover.py index 6c1b426..148ac1a 100644 --- a/selfprivacy_api/services/generic_service_mover.py +++ b/selfprivacy_api/services/generic_service_mover.py @@ -44,7 +44,7 @@ def move_service( ) return # Check if we are on the same volume - old_volume = service.get_location() + old_volume = service.get_drive() if old_volume == volume.name: Jobs.update( job=job, diff --git a/selfprivacy_api/services/gitea/__init__.py b/selfprivacy_api/services/gitea/__init__.py index aacda5f..12b3b19 100644 --- a/selfprivacy_api/services/gitea/__init__.py +++ b/selfprivacy_api/services/gitea/__init__.py @@ -116,7 +116,7 @@ class Gitea(Service): return storage_usage @staticmethod - def get_location() -> str: + def get_drive() -> str: with ReadUserData() as user_data: if user_data.get("useBinds", False): return user_data.get("gitea", {}).get("location", "sda1") diff --git a/selfprivacy_api/services/jitsi/__init__.py b/selfprivacy_api/services/jitsi/__init__.py index 6b3a973..f156f5a 100644 --- a/selfprivacy_api/services/jitsi/__init__.py +++ b/selfprivacy_api/services/jitsi/__init__.py @@ -116,7 +116,7 @@ class Jitsi(Service): return storage_usage @staticmethod - def get_location() -> str: + def get_drive() -> str: return "sda1" @staticmethod diff --git a/selfprivacy_api/services/mailserver/__init__.py b/selfprivacy_api/services/mailserver/__init__.py index 78a2441..ed04e73 100644 --- a/selfprivacy_api/services/mailserver/__init__.py +++ b/selfprivacy_api/services/mailserver/__init__.py @@ -101,7 +101,7 @@ class MailServer(Service): return get_storage_usage("/var/vmail") @staticmethod - def get_location() -> str: + def get_drive() -> str: with utils.ReadUserData() as user_data: if user_data.get("useBinds", False): return user_data.get("mailserver", {}).get("location", "sda1") diff --git a/selfprivacy_api/services/nextcloud/__init__.py b/selfprivacy_api/services/nextcloud/__init__.py index ad74354..37b4742 100644 --- a/selfprivacy_api/services/nextcloud/__init__.py +++ b/selfprivacy_api/services/nextcloud/__init__.py @@ -123,7 +123,7 @@ class Nextcloud(Service): return get_storage_usage("/var/lib/nextcloud") @staticmethod - def get_location() -> str: + def get_drive() -> str: """Get the name of disk where Nextcloud is installed.""" with ReadUserData() as user_data: if user_data.get("useBinds", False): diff --git a/selfprivacy_api/services/ocserv/__init__.py b/selfprivacy_api/services/ocserv/__init__.py index dcfacaa..bad3ad4 100644 --- a/selfprivacy_api/services/ocserv/__init__.py +++ b/selfprivacy_api/services/ocserv/__init__.py @@ -93,7 +93,7 @@ class Ocserv(Service): return "" @staticmethod - def get_location() -> str: + def get_drive() -> str: return "sda1" @staticmethod diff --git a/selfprivacy_api/services/pleroma/__init__.py b/selfprivacy_api/services/pleroma/__init__.py index 4d2b85e..2b8c4cb 100644 --- a/selfprivacy_api/services/pleroma/__init__.py +++ b/selfprivacy_api/services/pleroma/__init__.py @@ -104,7 +104,7 @@ class Pleroma(Service): return storage_usage @staticmethod - def get_location() -> str: + def get_drive() -> str: with ReadUserData() as user_data: if user_data.get("useBinds", False): return user_data.get("pleroma", {}).get("location", "sda1") diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index f191149..3f1f4af 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -132,7 +132,7 @@ class Service(ABC): @staticmethod @abstractmethod - def get_location() -> str: + def get_drive() -> str: pass @abstractmethod diff --git a/selfprivacy_api/services/test_service/__init__.py b/selfprivacy_api/services/test_service/__init__.py index 0118dbc..822348c 100644 --- a/selfprivacy_api/services/test_service/__init__.py +++ b/selfprivacy_api/services/test_service/__init__.py @@ -107,7 +107,7 @@ class DummyService(Service): return storage_usage @classmethod - def get_location(cls) -> str: + def get_drive(cls) -> str: return cls.location @staticmethod diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 4d4f421..d33cde1 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -44,7 +44,7 @@ def raw_dummy_service(tmpdir, backups): with open(testfile_path, "w") as file: file.write(TESTFILE_BODY) - # we need this to not change get_location() much + # we need this to not change get_drive() much class TestDummyService(DummyService, location=service_dir): pass @@ -143,7 +143,7 @@ def test_one_snapshot(backups, dummy_service): def test_backup_returns_snapshot(backups, dummy_service): - service_folder = dummy_service.get_location() + service_folder = dummy_service.get_drive() provider = Backups.provider() name = dummy_service.get_id() snapshot = provider.backuper.start_backup(service_folder, name) @@ -154,7 +154,7 @@ def test_backup_returns_snapshot(backups, dummy_service): def test_restore(backups, dummy_service): - service_folder = dummy_service.get_location() + service_folder = dummy_service.get_drive() file_to_nuke = listdir(service_folder)[0] assert file_to_nuke is not None path_to_nuke = path.join(service_folder, file_to_nuke) From 919ba1ad0353bc8f86e078538a955e77e093a3d5 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 14 Apr 2023 10:32:14 +0000 Subject: [PATCH 070/537] refactor(backups): make a dedicated get_folders() function --- selfprivacy_api/backup/__init__.py | 4 ++-- selfprivacy_api/services/service.py | 5 +++++ selfprivacy_api/services/test_service/__init__.py | 7 ++++++- tests/test_graphql/test_backup.py | 6 +++--- 4 files changed, 16 insertions(+), 6 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 72d6168..708d4a5 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -194,7 +194,7 @@ class Backups: @staticmethod def back_up(service: Service): """The top-level function to back up a service""" - folder = service.get_drive() + folder = service.get_folders() repo_name = service.get_id() service.pre_backup() @@ -238,7 +238,7 @@ class Backups: @staticmethod def restore_service_from_snapshot(service: Service, snapshot_id: str): repo_name = service.get_id() - folder = service.get_drive() + folder = service.get_folders() Backups.provider().backuper.restore_from_backup(repo_name, snapshot_id, folder) diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index 3f1f4af..9a7663a 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -135,6 +135,11 @@ class Service(ABC): def get_drive() -> str: pass + # @staticmethod + # @abstractmethod + # def get_folders() -> str: + # pass + @abstractmethod def move_to_volume(self, volume: BlockDevice) -> Job: pass diff --git a/selfprivacy_api/services/test_service/__init__.py b/selfprivacy_api/services/test_service/__init__.py index 822348c..2906244 100644 --- a/selfprivacy_api/services/test_service/__init__.py +++ b/selfprivacy_api/services/test_service/__init__.py @@ -106,8 +106,13 @@ class DummyService(Service): storage_usage = 0 return storage_usage - @classmethod + @staticmethod def get_drive(cls) -> str: + return "sda1" + + @classmethod + def get_folders(cls) -> str: + # for now only a single folder return cls.location @staticmethod diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index d33cde1..9c8af3f 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -44,7 +44,7 @@ def raw_dummy_service(tmpdir, backups): with open(testfile_path, "w") as file: file.write(TESTFILE_BODY) - # we need this to not change get_drive() much + # we need this to not change get_folders() much class TestDummyService(DummyService, location=service_dir): pass @@ -143,7 +143,7 @@ def test_one_snapshot(backups, dummy_service): def test_backup_returns_snapshot(backups, dummy_service): - service_folder = dummy_service.get_drive() + service_folder = dummy_service.get_folders() provider = Backups.provider() name = dummy_service.get_id() snapshot = provider.backuper.start_backup(service_folder, name) @@ -154,7 +154,7 @@ def test_backup_returns_snapshot(backups, dummy_service): def test_restore(backups, dummy_service): - service_folder = dummy_service.get_drive() + service_folder = dummy_service.get_folders() file_to_nuke = listdir(service_folder)[0] assert file_to_nuke is not None path_to_nuke = path.join(service_folder, file_to_nuke) From 08739f7ca8b925755db426ad5bbeee1586fdc2a5 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 14 Apr 2023 11:20:03 +0000 Subject: [PATCH 071/537] refactor(backups): make api accept a list of folders --- selfprivacy_api/backup/__init__.py | 8 ++++---- selfprivacy_api/backup/backuper.py | 4 ++-- selfprivacy_api/backup/restic_backuper.py | 10 +++++++--- selfprivacy_api/services/test_service/__init__.py | 6 +++--- tests/test_graphql/test_backup.py | 6 +++--- 5 files changed, 19 insertions(+), 15 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 708d4a5..f89a54e 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -194,11 +194,11 @@ class Backups: @staticmethod def back_up(service: Service): """The top-level function to back up a service""" - folder = service.get_folders() + folders = service.get_folders() repo_name = service.get_id() service.pre_backup() - snapshot = Backups.provider().backuper.start_backup(folder, repo_name) + snapshot = Backups.provider().backuper.start_backup(folders, repo_name) Backups._store_last_snapshot(repo_name, snapshot) service.post_restore() @@ -238,9 +238,9 @@ class Backups: @staticmethod def restore_service_from_snapshot(service: Service, snapshot_id: str): repo_name = service.get_id() - folder = service.get_folders() + folders = service.get_folders() - Backups.provider().backuper.restore_from_backup(repo_name, snapshot_id, folder) + Backups.provider().backuper.restore_from_backup(repo_name, snapshot_id, folders) @staticmethod def restore_snapshot(snapshot: Snapshot): diff --git a/selfprivacy_api/backup/backuper.py b/selfprivacy_api/backup/backuper.py index 5bba9d5..908c1fc 100644 --- a/selfprivacy_api/backup/backuper.py +++ b/selfprivacy_api/backup/backuper.py @@ -13,7 +13,7 @@ class AbstractBackuper(ABC): raise NotImplementedError @abstractmethod - def start_backup(self, folder: str, repo_name: str): + def start_backup(self, folders: List[str], repo_name: str): raise NotImplementedError @abstractmethod @@ -26,7 +26,7 @@ class AbstractBackuper(ABC): raise NotImplementedError @abstractmethod - def restore_from_backup(self, repo_name: str, snapshot_id: str, folder: str): + def restore_from_backup(self, repo_name: str, snapshot_id: str, folders: List[str]): """Restore a target folder using a snapshot""" raise NotImplementedError diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index cc81361..2af7d44 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -57,15 +57,19 @@ class ResticBackuper(AbstractBackuper): command.extend(args) return command - def start_backup(self, folder: str, repo_name: str): + def start_backup(self, folders: List[str], repo_name: str): """ Start backup with restic """ + + # but maybe it is ok to accept a union of a string and an array of strings + assert not isinstance(folders, str) + backup_command = self.restic_command( repo_name, "backup", "--json", - folder, + folders[0], ) with subprocess.Popen( backup_command, @@ -145,7 +149,7 @@ class ResticBackuper(AbstractBackuper): except ValueError as e: raise ValueError("cannot restore a snapshot: " + output) from e - def restore_from_backup(self, repo_name, snapshot_id, folder): + def restore_from_backup(self, repo_name, snapshot_id, folders): """ Restore from backup with restic """ diff --git a/selfprivacy_api/services/test_service/__init__.py b/selfprivacy_api/services/test_service/__init__.py index 2906244..e790be9 100644 --- a/selfprivacy_api/services/test_service/__init__.py +++ b/selfprivacy_api/services/test_service/__init__.py @@ -1,6 +1,7 @@ """Class representing Bitwarden service""" import base64 import typing +from typing import List from selfprivacy_api.jobs import Job from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus @@ -111,9 +112,8 @@ class DummyService(Service): return "sda1" @classmethod - def get_folders(cls) -> str: - # for now only a single folder - return cls.location + def get_folders(cls) -> List[str]: + return [cls.location] @staticmethod def get_dns_records() -> typing.List[ServiceDnsRecord]: diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 9c8af3f..460fa44 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -143,10 +143,10 @@ def test_one_snapshot(backups, dummy_service): def test_backup_returns_snapshot(backups, dummy_service): - service_folder = dummy_service.get_folders() + service_folders = dummy_service.get_folders() provider = Backups.provider() name = dummy_service.get_id() - snapshot = provider.backuper.start_backup(service_folder, name) + snapshot = provider.backuper.start_backup(service_folders, name) assert snapshot.id is not None assert snapshot.service_name == name @@ -154,7 +154,7 @@ def test_backup_returns_snapshot(backups, dummy_service): def test_restore(backups, dummy_service): - service_folder = dummy_service.get_folders() + service_folder = dummy_service.get_folders()[0] file_to_nuke = listdir(service_folder)[0] assert file_to_nuke is not None path_to_nuke = path.join(service_folder, file_to_nuke) From fa26379a686aa0b6ecbabe3604f197c2012643dd Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 14 Apr 2023 11:58:39 +0000 Subject: [PATCH 072/537] refactor(backups): actually accept a list of folders --- selfprivacy_api/backup/restic_backuper.py | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index 2af7d44..896f68d 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -3,6 +3,7 @@ import json import datetime from typing import List +from collections.abc import Iterable from selfprivacy_api.backup.backuper import AbstractBackuper from selfprivacy_api.models.backup.snapshot import Snapshot @@ -54,9 +55,20 @@ class ResticBackuper(AbstractBackuper): self._password_command(), ] if args != []: - command.extend(args) + command.extend(ResticBackuper.__flatten_list(args)) return command + @staticmethod + def __flatten_list(list): + """string-aware list flattener""" + result = [] + for item in list: + if isinstance(item, Iterable) and not isinstance(item, str): + result.extend(ResticBackuper.__flatten_list(item)) + continue + result.append(item) + return result + def start_backup(self, folders: List[str], repo_name: str): """ Start backup with restic @@ -69,7 +81,7 @@ class ResticBackuper(AbstractBackuper): repo_name, "backup", "--json", - folders[0], + folders, ) with subprocess.Popen( backup_command, From 30ac99098513af721b24c74ee3d8c769d7356ba6 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 14 Apr 2023 12:29:23 +0000 Subject: [PATCH 073/537] refactor(backups): set a list of folders for our dummy service --- selfprivacy_api/services/test_service/__init__.py | 6 +++--- tests/test_graphql/test_backup.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/selfprivacy_api/services/test_service/__init__.py b/selfprivacy_api/services/test_service/__init__.py index e790be9..c14feca 100644 --- a/selfprivacy_api/services/test_service/__init__.py +++ b/selfprivacy_api/services/test_service/__init__.py @@ -15,8 +15,8 @@ from selfprivacy_api.services.test_service.icon import BITWARDEN_ICON class DummyService(Service): """A test service""" - def __init_subclass__(cls, location): - cls.location = location + def __init_subclass__(cls, folders: List[str]): + cls.folders = folders @staticmethod def get_id() -> str: @@ -113,7 +113,7 @@ class DummyService(Service): @classmethod def get_folders(cls) -> List[str]: - return [cls.location] + return cls.folders @staticmethod def get_dns_records() -> typing.List[ServiceDnsRecord]: diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 460fa44..ff89ccb 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -45,7 +45,7 @@ def raw_dummy_service(tmpdir, backups): file.write(TESTFILE_BODY) # we need this to not change get_folders() much - class TestDummyService(DummyService, location=service_dir): + class TestDummyService(DummyService, folders=[service_dir]): pass service = TestDummyService() From a68e94fad37cf2a2ae484f2f5ddf12bc3802fd62 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 14 Apr 2023 12:40:41 +0000 Subject: [PATCH 074/537] test(backups): actually back up 2 folders --- tests/test_graphql/test_backup.py | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index ff89ccb..a14d268 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -19,6 +19,7 @@ from selfprivacy_api.backup.storage import Storage TESTFILE_BODY = "testytest!" +TESTFILE_2_BODY = "testissimo!" REPO_NAME = "test_backup" @@ -37,15 +38,23 @@ def backups_backblaze(generic_userdata): @pytest.fixture() def raw_dummy_service(tmpdir, backups): - service_dir = path.join(tmpdir, "test_service") - makedirs(service_dir) + dirnames = ["test_service", "also_test_service"] + service_dirs = [] + for d in dirnames: + service_dir = path.join(tmpdir, d) + makedirs(service_dir) + service_dirs.append(service_dir) - testfile_path = path.join(service_dir, "testfile.txt") - with open(testfile_path, "w") as file: + testfile_path_1 = path.join(service_dirs[0], "testfile.txt") + with open(testfile_path_1, "w") as file: file.write(TESTFILE_BODY) + testfile_path_2 = path.join(service_dirs[1], "testfile2.txt") + with open(testfile_path_2, "w") as file: + file.write(TESTFILE_2_BODY) + # we need this to not change get_folders() much - class TestDummyService(DummyService, folders=[service_dir]): + class TestDummyService(DummyService, folders=service_dirs): pass service = TestDummyService() From 48359ffd20c3d4e20df06b29949faa48efd4fe13 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 14 Apr 2023 13:06:17 +0000 Subject: [PATCH 075/537] test(backups): test 2-folder restoration --- tests/test_graphql/test_backup.py | 28 +++++++++++++++++++--------- 1 file changed, 19 insertions(+), 9 deletions(-) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index a14d268..d0f5d00 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -163,21 +163,31 @@ def test_backup_returns_snapshot(backups, dummy_service): def test_restore(backups, dummy_service): - service_folder = dummy_service.get_folders()[0] - file_to_nuke = listdir(service_folder)[0] - assert file_to_nuke is not None - path_to_nuke = path.join(service_folder, file_to_nuke) + paths_to_nuke = [] + contents = [] + + for service_folder in dummy_service.get_folders(): + file_to_nuke = listdir(service_folder)[0] + assert file_to_nuke is not None + path_to_nuke = path.join(service_folder, file_to_nuke) + paths_to_nuke.append(path_to_nuke) + with open(path_to_nuke, "r") as file: + contents.append(file.read()) Backups.back_up(dummy_service) snap = Backups.get_snapshots(dummy_service)[0] assert snap is not None - assert path.exists(path_to_nuke) - remove(path_to_nuke) - assert not path.exists(path_to_nuke) + for p in paths_to_nuke: + assert path.exists(p) + remove(p) + assert not path.exists(p) Backups.restore_service_from_snapshot(dummy_service, snap.id) - assert path.exists(path_to_nuke) + for p, content in zip(paths_to_nuke, contents): + assert path.exists(p) + with open(p, "r") as file: + assert file.read() == content def test_sizing(backups, dummy_service): @@ -381,4 +391,4 @@ def test_services_to_back_up(backups, dummy_service): services = Backups.services_to_back_up(now) assert len(services) == 1 - assert services[0].get_id() == dummy_service.get_id() \ No newline at end of file + assert services[0].get_id() == dummy_service.get_id() From 378f0ebd733b4b413c376b9b0f0095c3609ecf03 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 17 Apr 2023 12:33:27 +0000 Subject: [PATCH 076/537] test(backups): implement get_folders() for bitwarden --- .../services/bitwarden/__init__.py | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/selfprivacy_api/services/bitwarden/__init__.py b/selfprivacy_api/services/bitwarden/__init__.py index 702eca6..a3ed94e 100644 --- a/selfprivacy_api/services/bitwarden/__init__.py +++ b/selfprivacy_api/services/bitwarden/__init__.py @@ -113,10 +113,14 @@ class Bitwarden(Service): @staticmethod def get_storage_usage() -> int: storage_usage = 0 - storage_usage += get_storage_usage("/var/lib/bitwarden") - storage_usage += get_storage_usage("/var/lib/bitwarden_rs") + for folder in Bitwarden.get_folders(): + storage_usage += get_storage_usage(folder) return storage_usage + @staticmethod + def get_folders() -> typing.List[str]: + return ["/var/lib/bitwarden", "/var/lib/bitwarden_rs"] + @staticmethod def get_drive() -> str: with ReadUserData() as user_data: @@ -157,16 +161,11 @@ class Bitwarden(Service): [ FolderMoveNames( name="bitwarden", - bind_location="/var/lib/bitwarden", + bind_location=folder, group="vaultwarden", owner="vaultwarden", - ), - FolderMoveNames( - name="bitwarden_rs", - bind_location="/var/lib/bitwarden_rs", - group="vaultwarden", - owner="vaultwarden", - ), + ) + for folder in Bitwarden.get_folders() ], "bitwarden", ) From 1c0e3f0f92fe8b581620a71e8630ba66f3eae58d Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 17 Apr 2023 12:48:11 +0000 Subject: [PATCH 077/537] test(backups): implement get_folders() for gitea --- selfprivacy_api/services/gitea/__init__.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/selfprivacy_api/services/gitea/__init__.py b/selfprivacy_api/services/gitea/__init__.py index 12b3b19..de7d858 100644 --- a/selfprivacy_api/services/gitea/__init__.py +++ b/selfprivacy_api/services/gitea/__init__.py @@ -112,9 +112,14 @@ class Gitea(Service): @staticmethod def get_storage_usage() -> int: storage_usage = 0 - storage_usage += get_storage_usage("/var/lib/gitea") + for folder in Gitea.get_folders(): + storage_usage += get_storage_usage() return storage_usage + @staticmethod + def get_folders() -> typing.List[str]: + return ["/var/lib/gitea"] + @staticmethod def get_drive() -> str: with ReadUserData() as user_data: @@ -154,10 +159,11 @@ class Gitea(Service): [ FolderMoveNames( name="gitea", - bind_location="/var/lib/gitea", + bind_location=folder, group="gitea", owner="gitea", - ), + ) + for folder in Gitea.get_folders() ], "gitea", ) From 0207b5a4734c5daba840b347e9d639ebfde6b82b Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 17 Apr 2023 12:54:21 +0000 Subject: [PATCH 078/537] fix(services): use get_foldername() for moving around --- selfprivacy_api/services/bitwarden/__init__.py | 2 +- selfprivacy_api/services/gitea/__init__.py | 2 +- selfprivacy_api/services/service.py | 4 ++++ 3 files changed, 6 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/services/bitwarden/__init__.py b/selfprivacy_api/services/bitwarden/__init__.py index a3ed94e..90763bd 100644 --- a/selfprivacy_api/services/bitwarden/__init__.py +++ b/selfprivacy_api/services/bitwarden/__init__.py @@ -160,7 +160,7 @@ class Bitwarden(Service): job, [ FolderMoveNames( - name="bitwarden", + name=Bitwarden.get_foldername(folder), bind_location=folder, group="vaultwarden", owner="vaultwarden", diff --git a/selfprivacy_api/services/gitea/__init__.py b/selfprivacy_api/services/gitea/__init__.py index de7d858..a07ccd1 100644 --- a/selfprivacy_api/services/gitea/__init__.py +++ b/selfprivacy_api/services/gitea/__init__.py @@ -158,7 +158,7 @@ class Gitea(Service): job, [ FolderMoveNames( - name="gitea", + name=Gitea.get_foldername(folder), bind_location=folder, group="gitea", owner="gitea", diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index 9a7663a..9ca8eba 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -140,6 +140,10 @@ class Service(ABC): # def get_folders() -> str: # pass + @staticmethod + def get_foldername(path: str) -> str: + return path.split("/")[-1] + @abstractmethod def move_to_volume(self, volume: BlockDevice) -> Job: pass From 8a4f256c123805d27ec5f490f1b7d6ed8bac65d2 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 17 Apr 2023 13:47:49 +0000 Subject: [PATCH 079/537] refactor(services): add get_folders() to the rest of the services --- selfprivacy_api/services/jitsi/__init__.py | 4 ++++ selfprivacy_api/services/mailserver/__init__.py | 4 ++++ selfprivacy_api/services/nextcloud/__init__.py | 4 ++++ selfprivacy_api/services/ocserv/__init__.py | 4 ++++ selfprivacy_api/services/pleroma/__init__.py | 4 ++++ 5 files changed, 20 insertions(+) diff --git a/selfprivacy_api/services/jitsi/__init__.py b/selfprivacy_api/services/jitsi/__init__.py index f156f5a..60a94b3 100644 --- a/selfprivacy_api/services/jitsi/__init__.py +++ b/selfprivacy_api/services/jitsi/__init__.py @@ -115,6 +115,10 @@ class Jitsi(Service): storage_usage += get_storage_usage("/var/lib/jitsi-meet") return storage_usage + @staticmethod + def get_folders() -> typing.List[str]: + return ["/var/lib/jitsi-meet"] + @staticmethod def get_drive() -> str: return "sda1" diff --git a/selfprivacy_api/services/mailserver/__init__.py b/selfprivacy_api/services/mailserver/__init__.py index ed04e73..9c61b7a 100644 --- a/selfprivacy_api/services/mailserver/__init__.py +++ b/selfprivacy_api/services/mailserver/__init__.py @@ -100,6 +100,10 @@ class MailServer(Service): def get_storage_usage() -> int: return get_storage_usage("/var/vmail") + @staticmethod + def get_folders() -> typing.List[str]: + return ["/var/vmail", "/var/sieve"] + @staticmethod def get_drive() -> str: with utils.ReadUserData() as user_data: diff --git a/selfprivacy_api/services/nextcloud/__init__.py b/selfprivacy_api/services/nextcloud/__init__.py index 37b4742..eb0f83f 100644 --- a/selfprivacy_api/services/nextcloud/__init__.py +++ b/selfprivacy_api/services/nextcloud/__init__.py @@ -122,6 +122,10 @@ class Nextcloud(Service): """ return get_storage_usage("/var/lib/nextcloud") + @staticmethod + def get_folders() -> typing.List[str]: + return ["/var/lib/nextcloud"] + @staticmethod def get_drive() -> str: """Get the name of disk where Nextcloud is installed.""" diff --git a/selfprivacy_api/services/ocserv/__init__.py b/selfprivacy_api/services/ocserv/__init__.py index bad3ad4..54a4b98 100644 --- a/selfprivacy_api/services/ocserv/__init__.py +++ b/selfprivacy_api/services/ocserv/__init__.py @@ -117,5 +117,9 @@ class Ocserv(Service): def get_storage_usage() -> int: return 0 + @staticmethod + def get_folders() -> typing.List[str]: + return [] + def move_to_volume(self, volume: BlockDevice) -> Job: raise NotImplementedError("ocserv service is not movable") diff --git a/selfprivacy_api/services/pleroma/__init__.py b/selfprivacy_api/services/pleroma/__init__.py index 2b8c4cb..f3656a6 100644 --- a/selfprivacy_api/services/pleroma/__init__.py +++ b/selfprivacy_api/services/pleroma/__init__.py @@ -103,6 +103,10 @@ class Pleroma(Service): storage_usage += get_storage_usage("/var/lib/postgresql") return storage_usage + @staticmethod + def get_folders() -> typing.List[str]: + return ["/var/lib/pleroma", "/var/lib/postgresql"] + @staticmethod def get_drive() -> str: with ReadUserData() as user_data: From a34b14449b0304b99e6de2e8a9469a92380a29a5 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 17 Apr 2023 13:53:51 +0000 Subject: [PATCH 080/537] refactor(services): make get_folders() a mandatory part of Service interface --- selfprivacy_api/services/service.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index 9ca8eba..192f98e 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -135,10 +135,10 @@ class Service(ABC): def get_drive() -> str: pass - # @staticmethod - # @abstractmethod - # def get_folders() -> str: - # pass + @staticmethod + @abstractmethod + def get_folders() -> str: + pass @staticmethod def get_foldername(path: str) -> str: From 789fd71e2ff23d31010b1907bc1c5f8e453f2109 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 17 Apr 2023 14:02:07 +0000 Subject: [PATCH 081/537] refactor(services): add a generic storage counter --- selfprivacy_api/services/gitea/__init__.py | 2 +- selfprivacy_api/services/service.py | 12 ++++++++---- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/selfprivacy_api/services/gitea/__init__.py b/selfprivacy_api/services/gitea/__init__.py index a07ccd1..bb885a5 100644 --- a/selfprivacy_api/services/gitea/__init__.py +++ b/selfprivacy_api/services/gitea/__init__.py @@ -113,7 +113,7 @@ class Gitea(Service): def get_storage_usage() -> int: storage_usage = 0 for folder in Gitea.get_folders(): - storage_usage += get_storage_usage() + storage_usage += get_storage_usage(folder) return storage_usage @staticmethod diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index 192f98e..c5a7527 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -8,6 +8,8 @@ from selfprivacy_api.jobs import Job from selfprivacy_api.utils.block_devices import BlockDevice +from selfprivacy_api.services.generic_size_counter import get_storage_usage + class ServiceStatus(Enum): """Enum for service status""" @@ -120,10 +122,12 @@ class Service(ABC): def get_logs(): pass - @staticmethod - @abstractmethod - def get_storage_usage() -> int: - pass + @classmethod + def get_storage_usage(cls) -> int: + storage_used = 0 + for folder in cls.get_folders(): + storage_used += get_storage_usage(folder) + return storage_used @staticmethod @abstractmethod From f8edcac33fbe47c733fb001eff08f320b96263dd Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 17 Apr 2023 14:05:17 +0000 Subject: [PATCH 082/537] refactor(services): remove special storage counting from bitwarden --- selfprivacy_api/services/bitwarden/__init__.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/selfprivacy_api/services/bitwarden/__init__.py b/selfprivacy_api/services/bitwarden/__init__.py index 90763bd..8d2a903 100644 --- a/selfprivacy_api/services/bitwarden/__init__.py +++ b/selfprivacy_api/services/bitwarden/__init__.py @@ -5,7 +5,6 @@ import typing from selfprivacy_api.jobs import Job, JobStatus, Jobs from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service -from selfprivacy_api.services.generic_size_counter import get_storage_usage from selfprivacy_api.services.generic_status_getter import get_service_status from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain @@ -110,13 +109,6 @@ class Bitwarden(Service): def get_logs(): return "" - @staticmethod - def get_storage_usage() -> int: - storage_usage = 0 - for folder in Bitwarden.get_folders(): - storage_usage += get_storage_usage(folder) - return storage_usage - @staticmethod def get_folders() -> typing.List[str]: return ["/var/lib/bitwarden", "/var/lib/bitwarden_rs"] From c040f0825cb12af4cc8a7c7c024cd8aa0d72b5ee Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 17 Apr 2023 14:07:04 +0000 Subject: [PATCH 083/537] refactor(services): remove special storage counting from gitea --- selfprivacy_api/services/gitea/__init__.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/selfprivacy_api/services/gitea/__init__.py b/selfprivacy_api/services/gitea/__init__.py index bb885a5..707cdec 100644 --- a/selfprivacy_api/services/gitea/__init__.py +++ b/selfprivacy_api/services/gitea/__init__.py @@ -5,7 +5,6 @@ import typing from selfprivacy_api.jobs import Job, Jobs from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service -from selfprivacy_api.services.generic_size_counter import get_storage_usage from selfprivacy_api.services.generic_status_getter import get_service_status from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain @@ -109,13 +108,6 @@ class Gitea(Service): def get_logs(): return "" - @staticmethod - def get_storage_usage() -> int: - storage_usage = 0 - for folder in Gitea.get_folders(): - storage_usage += get_storage_usage(folder) - return storage_usage - @staticmethod def get_folders() -> typing.List[str]: return ["/var/lib/gitea"] From 312328af9569c8cf7e2fb9271de0c1e085d7f8e5 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 17 Apr 2023 14:11:47 +0000 Subject: [PATCH 084/537] refactor(services): remove special storage counting from jitsi --- selfprivacy_api/services/jitsi/__init__.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/selfprivacy_api/services/jitsi/__init__.py b/selfprivacy_api/services/jitsi/__init__.py index 60a94b3..a969eb2 100644 --- a/selfprivacy_api/services/jitsi/__init__.py +++ b/selfprivacy_api/services/jitsi/__init__.py @@ -5,7 +5,6 @@ import typing from selfprivacy_api.jobs import Job, Jobs from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service -from selfprivacy_api.services.generic_size_counter import get_storage_usage from selfprivacy_api.services.generic_status_getter import ( get_service_status, get_service_status_from_several_units, @@ -109,12 +108,6 @@ class Jitsi(Service): def get_logs(): return "" - @staticmethod - def get_storage_usage() -> int: - storage_usage = 0 - storage_usage += get_storage_usage("/var/lib/jitsi-meet") - return storage_usage - @staticmethod def get_folders() -> typing.List[str]: return ["/var/lib/jitsi-meet"] From 042a2e4cf2db4545c7fe780a409fd3a3c7389da0 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 17 Apr 2023 14:14:48 +0000 Subject: [PATCH 085/537] refactor(services): remove special storage counting from mail --- selfprivacy_api/services/mailserver/__init__.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/selfprivacy_api/services/mailserver/__init__.py b/selfprivacy_api/services/mailserver/__init__.py index 9c61b7a..eb69ae9 100644 --- a/selfprivacy_api/services/mailserver/__init__.py +++ b/selfprivacy_api/services/mailserver/__init__.py @@ -6,7 +6,6 @@ import typing from selfprivacy_api.jobs import Job, JobStatus, Jobs from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service -from selfprivacy_api.services.generic_size_counter import get_storage_usage from selfprivacy_api.services.generic_status_getter import ( get_service_status, get_service_status_from_several_units, @@ -96,10 +95,6 @@ class MailServer(Service): def get_logs(): return "" - @staticmethod - def get_storage_usage() -> int: - return get_storage_usage("/var/vmail") - @staticmethod def get_folders() -> typing.List[str]: return ["/var/vmail", "/var/sieve"] From 4475bcea45af2d325666dc7c809ce8fa43f470ac Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 17 Apr 2023 14:18:44 +0000 Subject: [PATCH 086/537] documentation(services): move the storage count docstring to parent service class --- selfprivacy_api/services/service.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index c5a7527..1a1cb48 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -124,6 +124,11 @@ class Service(ABC): @classmethod def get_storage_usage(cls) -> int: + """ + Calculate the real storage usage of folders occupied by service + Calculate using pathlib. + Do not follow symlinks. + """ storage_used = 0 for folder in cls.get_folders(): storage_used += get_storage_usage(folder) From 617f7f162842bc88d51f8c4d6ba194c17531d19d Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 17 Apr 2023 14:19:15 +0000 Subject: [PATCH 087/537] refactor(services): remove special storage counting from nextcloud --- selfprivacy_api/services/nextcloud/__init__.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/selfprivacy_api/services/nextcloud/__init__.py b/selfprivacy_api/services/nextcloud/__init__.py index eb0f83f..0036c77 100644 --- a/selfprivacy_api/services/nextcloud/__init__.py +++ b/selfprivacy_api/services/nextcloud/__init__.py @@ -4,7 +4,6 @@ import subprocess import typing from selfprivacy_api.jobs import Job, Jobs from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service -from selfprivacy_api.services.generic_size_counter import get_storage_usage from selfprivacy_api.services.generic_status_getter import get_service_status from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain @@ -113,15 +112,6 @@ class Nextcloud(Service): """Return Nextcloud logs.""" return "" - @staticmethod - def get_storage_usage() -> int: - """ - Calculate the real storage usage of /var/lib/nextcloud and all subdirectories. - Calculate using pathlib. - Do not follow symlinks. - """ - return get_storage_usage("/var/lib/nextcloud") - @staticmethod def get_folders() -> typing.List[str]: return ["/var/lib/nextcloud"] From 3605a71c1dfa25a4abb1d861c9aad1b501d99d04 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 17 Apr 2023 14:28:12 +0000 Subject: [PATCH 088/537] refactor(services): remove special storage counting from ocserv --- selfprivacy_api/services/ocserv/__init__.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/selfprivacy_api/services/ocserv/__init__.py b/selfprivacy_api/services/ocserv/__init__.py index 54a4b98..a15cb84 100644 --- a/selfprivacy_api/services/ocserv/__init__.py +++ b/selfprivacy_api/services/ocserv/__init__.py @@ -4,7 +4,6 @@ import subprocess import typing from selfprivacy_api.jobs import Job, Jobs from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service -from selfprivacy_api.services.generic_size_counter import get_storage_usage from selfprivacy_api.services.generic_status_getter import get_service_status from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus from selfprivacy_api.utils import ReadUserData, WriteUserData @@ -113,10 +112,6 @@ class Ocserv(Service): ), ] - @staticmethod - def get_storage_usage() -> int: - return 0 - @staticmethod def get_folders() -> typing.List[str]: return [] From a664ab3dd468f44f223512adbc553805234f2e56 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 17 Apr 2023 14:31:04 +0000 Subject: [PATCH 089/537] refactor(services): remove special storage counting from pleroma --- selfprivacy_api/services/pleroma/__init__.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/selfprivacy_api/services/pleroma/__init__.py b/selfprivacy_api/services/pleroma/__init__.py index f3656a6..0191c18 100644 --- a/selfprivacy_api/services/pleroma/__init__.py +++ b/selfprivacy_api/services/pleroma/__init__.py @@ -4,7 +4,6 @@ import subprocess import typing from selfprivacy_api.jobs import Job, Jobs from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service -from selfprivacy_api.services.generic_size_counter import get_storage_usage from selfprivacy_api.services.generic_status_getter import get_service_status from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain @@ -96,13 +95,6 @@ class Pleroma(Service): def get_logs(): return "" - @staticmethod - def get_storage_usage() -> int: - storage_usage = 0 - storage_usage += get_storage_usage("/var/lib/pleroma") - storage_usage += get_storage_usage("/var/lib/postgresql") - return storage_usage - @staticmethod def get_folders() -> typing.List[str]: return ["/var/lib/pleroma", "/var/lib/postgresql"] From 0b7d2d0bf4021d832a0fc00552a6416024805588 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 17 Apr 2023 14:54:42 +0000 Subject: [PATCH 090/537] refactor(services): add OwnedPath struct --- selfprivacy_api/services/owned_path.py | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 selfprivacy_api/services/owned_path.py diff --git a/selfprivacy_api/services/owned_path.py b/selfprivacy_api/services/owned_path.py new file mode 100644 index 0000000..23542dc --- /dev/null +++ b/selfprivacy_api/services/owned_path.py @@ -0,0 +1,7 @@ +from pydantic import BaseModel + + +class OwnedPath(BaseModel): + path: str + owner: str + group: str From c34eb6d44767f3ec0c6261c12ed3d5ee7ae44029 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 17 Apr 2023 15:00:36 +0000 Subject: [PATCH 091/537] refactor(services): add overridable get owner and get group --- selfprivacy_api/services/service.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index 1a1cb48..cc34aa2 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -62,6 +62,14 @@ class Service(ABC): def get_url() -> typing.Optional[str]: pass + @classmethod + def get_user(cls) -> typing.Optional[str]: + return cls.get_id() + + @classmethod + def get_group(cls) -> typing.Optional[str]: + return cls.get_user() + @staticmethod @abstractmethod def is_movable() -> bool: From ad66513f27525c9f0fb2af1eff625073e2ccd72f Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 17 Apr 2023 15:37:11 +0000 Subject: [PATCH 092/537] refactor(services): add folder owner derivation --- selfprivacy_api/services/service.py | 37 +++++++++++++++++++++++++---- tests/test_services.py | 20 ++++++++++++++++ 2 files changed, 53 insertions(+), 4 deletions(-) create mode 100644 tests/test_services.py diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index cc34aa2..1a1d56f 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -9,6 +9,7 @@ from selfprivacy_api.jobs import Job from selfprivacy_api.utils.block_devices import BlockDevice from selfprivacy_api.services.generic_size_counter import get_storage_usage +from selfprivacy_api.services.owned_path import OwnedPath class ServiceStatus(Enum): @@ -152,10 +153,29 @@ class Service(ABC): def get_drive() -> str: pass - @staticmethod - @abstractmethod - def get_folders() -> str: - pass + @classmethod + def get_folders(cls) -> str: + """ + get a plain list of occupied directories + Default extracts info from overriden get_owned_folders() + """ + if cls.get_owned_folders == Service.get_owned_folders: + raise NotImplementedError( + "you need to implement at least one of get_folders() or get_owned_folders()" + ) + return [owned_folder.path for owned_folder in cls.get_owned_folders()] + + @classmethod + def get_owned_folders(cls) -> str: + """ + Get a list of occupied directories with ownership info + Default extracts info from overriden get_folders() + """ + if cls.get_folders == Service.get_folders: + raise NotImplementedError( + "you need to implement at least one of get_folders() or get_owned_folders()" + ) + return [cls.owned_path(path) for path in cls.get_folders()] @staticmethod def get_foldername(path: str) -> str: @@ -165,6 +185,15 @@ class Service(ABC): def move_to_volume(self, volume: BlockDevice) -> Job: pass + @classmethod + def owned_path(cls, path: str): + """A default guess on folder ownership""" + return OwnedPath( + path=path, + owner=cls.get_user(), + group=cls.get_group(), + ) + def pre_backup(self): pass diff --git a/tests/test_services.py b/tests/test_services.py new file mode 100644 index 0000000..52164a9 --- /dev/null +++ b/tests/test_services.py @@ -0,0 +1,20 @@ +""" + Tests for generic service methods +""" +from pytest import raises + +from selfprivacy_api.services.test_service import DummyService +from selfprivacy_api.services.service import Service + + +def test_unimplemented_folders_raises(): + with raises(NotImplementedError): + Service.get_folders() + with raises(NotImplementedError): + Service.get_owned_folders() + + class OurDummy(DummyService, folders=["testydir", "dirtessimo"]): + pass + + owned_folders = OurDummy.get_owned_folders() + assert owned_folders is not None From 238a656cd9a6cbe663a56f7b3bf37147ad06b48b Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 17 Apr 2023 16:01:51 +0000 Subject: [PATCH 093/537] refactor(services): make a foldermove from owned path --- selfprivacy_api/services/generic_service_mover.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/selfprivacy_api/services/generic_service_mover.py b/selfprivacy_api/services/generic_service_mover.py index 148ac1a..cf353cd 100644 --- a/selfprivacy_api/services/generic_service_mover.py +++ b/selfprivacy_api/services/generic_service_mover.py @@ -1,5 +1,6 @@ """Generic handler for moving services""" +from __future__ import annotations import subprocess import time import pathlib @@ -11,6 +12,7 @@ from selfprivacy_api.utils.huey import huey from selfprivacy_api.utils.block_devices import BlockDevice from selfprivacy_api.utils import ReadUserData, WriteUserData from selfprivacy_api.services.service import Service, ServiceStatus +from selfprivacy_api.services.owned_path import OwnedPath class FolderMoveNames(BaseModel): @@ -19,6 +21,19 @@ class FolderMoveNames(BaseModel): owner: str group: str + @staticmethod + def from_owned_path(path: OwnedPath) -> FolderMoveNames: + return FolderMoveNames( + name=FolderMoveNames.get_foldername(), + bind_location=path.path, + owner=path.owner, + group=path.group, + ) + + @staticmethod + def get_foldername(path: str) -> str: + return path.split("/")[-1] + @huey.task() def move_service( From 043675ce140a6b3c434ca9018ab5ce4a63e691c0 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 17 Apr 2023 16:04:21 +0000 Subject: [PATCH 094/537] fix(services): folder methods typing --- selfprivacy_api/services/service.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index 1a1d56f..f804773 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -154,7 +154,7 @@ class Service(ABC): pass @classmethod - def get_folders(cls) -> str: + def get_folders(cls) -> typing.List[str]: """ get a plain list of occupied directories Default extracts info from overriden get_owned_folders() @@ -166,7 +166,7 @@ class Service(ABC): return [owned_folder.path for owned_folder in cls.get_owned_folders()] @classmethod - def get_owned_folders(cls) -> str: + def get_owned_folders(cls) -> typing.List[OwnedPath]: """ Get a list of occupied directories with ownership info Default extracts info from overriden get_folders() From ba72fadb8de1031c803a151472ac2e1c9e718c26 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 19 Apr 2023 11:34:14 +0000 Subject: [PATCH 095/537] test(services): test that we indeed return correct folders and owned folders from real services --- .../services/bitwarden/__init__.py | 4 ++++ selfprivacy_api/services/pleroma/__init__.py | 20 +++++++++++++++-- tests/test_services.py | 22 +++++++++++++++++++ 3 files changed, 44 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/services/bitwarden/__init__.py b/selfprivacy_api/services/bitwarden/__init__.py index 8d2a903..56eb0fd 100644 --- a/selfprivacy_api/services/bitwarden/__init__.py +++ b/selfprivacy_api/services/bitwarden/__init__.py @@ -37,6 +37,10 @@ class Bitwarden(Service): """Read SVG icon from file and return it as base64 encoded string.""" return base64.b64encode(BITWARDEN_ICON.encode("utf-8")).decode("utf-8") + @staticmethod + def get_user() -> str: + return "vaultwarden" + @staticmethod def get_url() -> typing.Optional[str]: """Return service url.""" diff --git a/selfprivacy_api/services/pleroma/__init__.py b/selfprivacy_api/services/pleroma/__init__.py index 0191c18..f7e185b 100644 --- a/selfprivacy_api/services/pleroma/__init__.py +++ b/selfprivacy_api/services/pleroma/__init__.py @@ -6,6 +6,7 @@ from selfprivacy_api.jobs import Job, Jobs from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service from selfprivacy_api.services.generic_status_getter import get_service_status from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus +from selfprivacy_api.services.owned_path import OwnedPath from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain from selfprivacy_api.utils.block_devices import BlockDevice import selfprivacy_api.utils.network as network_utils @@ -96,8 +97,23 @@ class Pleroma(Service): return "" @staticmethod - def get_folders() -> typing.List[str]: - return ["/var/lib/pleroma", "/var/lib/postgresql"] + def get_owned_folders() -> typing.List[OwnedPath]: + """ + Get a list of occupied directories with ownership info + pleroma has folders that are owned by different users + """ + return [ + OwnedPath( + path="/var/lib/pleroma", + owner="pleroma", + group="pleroma", + ), + OwnedPath( + path="/var/lib/postgresql", + owner="postgres", + group="postgres", + ), + ] @staticmethod def get_drive() -> str: diff --git a/tests/test_services.py b/tests/test_services.py index 52164a9..fbbae09 100644 --- a/tests/test_services.py +++ b/tests/test_services.py @@ -3,6 +3,10 @@ """ from pytest import raises +from selfprivacy_api.services.bitwarden import Bitwarden +from selfprivacy_api.services.pleroma import Pleroma +from selfprivacy_api.services.owned_path import OwnedPath + from selfprivacy_api.services.test_service import DummyService from selfprivacy_api.services.service import Service @@ -18,3 +22,21 @@ def test_unimplemented_folders_raises(): owned_folders = OurDummy.get_owned_folders() assert owned_folders is not None + + +def test_owned_folders_from_not_owned(): + assert Bitwarden.get_owned_folders() == [ + OwnedPath( + path=folder, + group="vaultwarden", + owner="vaultwarden", + ) + for folder in Bitwarden.get_folders() + ] + + +def test_paths_from_owned_paths(): + assert len(Pleroma.get_folders()) == 2 + assert Pleroma.get_folders() == [ + ownedpath.path for ownedpath in Pleroma.get_owned_folders() + ] From dde86725b9feaa184d17d97c47e3ef59608dea89 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 19 Apr 2023 12:12:46 +0000 Subject: [PATCH 096/537] test(services): test derivation of foldermoves --- .../services/generic_service_mover.py | 2 +- tests/test_services.py | 16 ++++++++++++++++ 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/services/generic_service_mover.py b/selfprivacy_api/services/generic_service_mover.py index cf353cd..1e5efe6 100644 --- a/selfprivacy_api/services/generic_service_mover.py +++ b/selfprivacy_api/services/generic_service_mover.py @@ -24,7 +24,7 @@ class FolderMoveNames(BaseModel): @staticmethod def from_owned_path(path: OwnedPath) -> FolderMoveNames: return FolderMoveNames( - name=FolderMoveNames.get_foldername(), + name=FolderMoveNames.get_foldername(path.path), bind_location=path.path, owner=path.owner, group=path.group, diff --git a/tests/test_services.py b/tests/test_services.py index fbbae09..5816140 100644 --- a/tests/test_services.py +++ b/tests/test_services.py @@ -6,6 +6,7 @@ from pytest import raises from selfprivacy_api.services.bitwarden import Bitwarden from selfprivacy_api.services.pleroma import Pleroma from selfprivacy_api.services.owned_path import OwnedPath +from selfprivacy_api.services.generic_service_mover import FolderMoveNames from selfprivacy_api.services.test_service import DummyService from selfprivacy_api.services.service import Service @@ -40,3 +41,18 @@ def test_paths_from_owned_paths(): assert Pleroma.get_folders() == [ ownedpath.path for ownedpath in Pleroma.get_owned_folders() ] + + +def test_foldermoves_from_ownedpaths(): + owned = OwnedPath( + path="var/lib/bitwarden", + group="vaultwarden", + owner="vaultwarden", + ) + + assert FolderMoveNames.from_owned_path(owned) == FolderMoveNames( + name="bitwarden", + bind_location="var/lib/bitwarden", + group="vaultwarden", + owner="vaultwarden", + ) From 65acd3173ab8f076951b5aff72164454d728d592 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 19 Apr 2023 12:43:47 +0000 Subject: [PATCH 097/537] refactor(services): use fully generic foldermoves --- .../services/bitwarden/__init__.py | 10 +--------- .../services/generic_service_mover.py | 9 +++++++++ selfprivacy_api/services/gitea/__init__.py | 10 +--------- .../services/mailserver/__init__.py | 19 +++++-------------- .../services/nextcloud/__init__.py | 9 +-------- selfprivacy_api/services/pleroma/__init__.py | 15 +-------------- 6 files changed, 18 insertions(+), 54 deletions(-) diff --git a/selfprivacy_api/services/bitwarden/__init__.py b/selfprivacy_api/services/bitwarden/__init__.py index 56eb0fd..6842af6 100644 --- a/selfprivacy_api/services/bitwarden/__init__.py +++ b/selfprivacy_api/services/bitwarden/__init__.py @@ -154,15 +154,7 @@ class Bitwarden(Service): self, volume, job, - [ - FolderMoveNames( - name=Bitwarden.get_foldername(folder), - bind_location=folder, - group="vaultwarden", - owner="vaultwarden", - ) - for folder in Bitwarden.get_folders() - ], + FolderMoveNames.default_foldermoves(self), "bitwarden", ) diff --git a/selfprivacy_api/services/generic_service_mover.py b/selfprivacy_api/services/generic_service_mover.py index 1e5efe6..e2b26f4 100644 --- a/selfprivacy_api/services/generic_service_mover.py +++ b/selfprivacy_api/services/generic_service_mover.py @@ -34,6 +34,15 @@ class FolderMoveNames(BaseModel): def get_foldername(path: str) -> str: return path.split("/")[-1] + @staticmethod + def default_foldermoves(service: Service): + return ( + [ + FolderMoveNames.from_owned_path(folder) + for folder in service.get_owned_folders() + ], + ) + @huey.task() def move_service( diff --git a/selfprivacy_api/services/gitea/__init__.py b/selfprivacy_api/services/gitea/__init__.py index 707cdec..f9ff3d2 100644 --- a/selfprivacy_api/services/gitea/__init__.py +++ b/selfprivacy_api/services/gitea/__init__.py @@ -148,15 +148,7 @@ class Gitea(Service): self, volume, job, - [ - FolderMoveNames( - name=Gitea.get_foldername(folder), - bind_location=folder, - group="gitea", - owner="gitea", - ) - for folder in Gitea.get_folders() - ], + FolderMoveNames.default_foldermoves(self), "gitea", ) diff --git a/selfprivacy_api/services/mailserver/__init__.py b/selfprivacy_api/services/mailserver/__init__.py index eb69ae9..b0a6e30 100644 --- a/selfprivacy_api/services/mailserver/__init__.py +++ b/selfprivacy_api/services/mailserver/__init__.py @@ -37,6 +37,10 @@ class MailServer(Service): def get_svg_icon() -> str: return base64.b64encode(MAILSERVER_ICON.encode("utf-8")).decode("utf-8") + @staticmethod + def get_user() -> str: + return "virtualMail" + @staticmethod def get_url() -> typing.Optional[str]: """Return service url.""" @@ -158,20 +162,7 @@ class MailServer(Service): self, volume, job, - [ - FolderMoveNames( - name="vmail", - bind_location="/var/vmail", - group="virtualMail", - owner="virtualMail", - ), - FolderMoveNames( - name="sieve", - bind_location="/var/sieve", - group="virtualMail", - owner="virtualMail", - ), - ], + FolderMoveNames.default_foldermoves(self), "mailserver", ) diff --git a/selfprivacy_api/services/nextcloud/__init__.py b/selfprivacy_api/services/nextcloud/__init__.py index 0036c77..ae81403 100644 --- a/selfprivacy_api/services/nextcloud/__init__.py +++ b/selfprivacy_api/services/nextcloud/__init__.py @@ -152,14 +152,7 @@ class Nextcloud(Service): self, volume, job, - [ - FolderMoveNames( - name="nextcloud", - bind_location="/var/lib/nextcloud", - owner="nextcloud", - group="nextcloud", - ), - ], + FolderMoveNames.default_foldermoves(self), "nextcloud", ) return job diff --git a/selfprivacy_api/services/pleroma/__init__.py b/selfprivacy_api/services/pleroma/__init__.py index f7e185b..0d5b338 100644 --- a/selfprivacy_api/services/pleroma/__init__.py +++ b/selfprivacy_api/services/pleroma/__init__.py @@ -150,20 +150,7 @@ class Pleroma(Service): self, volume, job, - [ - FolderMoveNames( - name="pleroma", - bind_location="/var/lib/pleroma", - owner="pleroma", - group="pleroma", - ), - FolderMoveNames( - name="postgresql", - bind_location="/var/lib/postgresql", - owner="postgres", - group="postgres", - ), - ], + FolderMoveNames.default_foldermoves(self), "pleroma", ) return job From fa360655fee5503db1475380fe00cd318e15f194 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 19 Apr 2023 13:59:03 +0000 Subject: [PATCH 098/537] refactor(services): make local secret setting public --- selfprivacy_api/backup/local_secret.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/selfprivacy_api/backup/local_secret.py b/selfprivacy_api/backup/local_secret.py index 02d78a4..e04733c 100644 --- a/selfprivacy_api/backup/local_secret.py +++ b/selfprivacy_api/backup/local_secret.py @@ -23,10 +23,14 @@ class LocalBackupSecret: LocalBackupSecret.reset() return redis.get(REDIS_KEY) + @staticmethod + def set(secret: str): + redis.set(REDIS_KEY, secret) + @staticmethod def reset(): new_secret = LocalBackupSecret._generate() - LocalBackupSecret._store(new_secret) + LocalBackupSecret.set(new_secret) @staticmethod def exists() -> bool: @@ -35,7 +39,3 @@ class LocalBackupSecret: @staticmethod def _generate() -> str: return secrets.token_urlsafe(256) - - @staticmethod - def _store(secret: str): - redis.set(REDIS_KEY, secret) From 72f4fc8ae731ea576c12bfb7daa592394136552c Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 19 Apr 2023 14:24:53 +0000 Subject: [PATCH 099/537] test(backups): test local secrets --- selfprivacy_api/backup/local_secret.py | 4 +++ tests/test_graphql/test_localsecret.py | 38 ++++++++++++++++++++++++++ 2 files changed, 42 insertions(+) create mode 100644 tests/test_graphql/test_localsecret.py diff --git a/selfprivacy_api/backup/local_secret.py b/selfprivacy_api/backup/local_secret.py index e04733c..76237f3 100644 --- a/selfprivacy_api/backup/local_secret.py +++ b/selfprivacy_api/backup/local_secret.py @@ -32,6 +32,10 @@ class LocalBackupSecret: new_secret = LocalBackupSecret._generate() LocalBackupSecret.set(new_secret) + @staticmethod + def _full_reset(): + redis.delete(REDIS_KEY) + @staticmethod def exists() -> bool: return redis.exists(REDIS_KEY) diff --git a/tests/test_graphql/test_localsecret.py b/tests/test_graphql/test_localsecret.py new file mode 100644 index 0000000..d4b637a --- /dev/null +++ b/tests/test_graphql/test_localsecret.py @@ -0,0 +1,38 @@ +from selfprivacy_api.backup.local_secret import LocalBackupSecret +from pytest import fixture + + +@fixture() +def localsecret(): + LocalBackupSecret._full_reset() + return LocalBackupSecret + + +def test_local_secret_firstget(localsecret): + assert not LocalBackupSecret.exists() + secret = LocalBackupSecret.get() + assert LocalBackupSecret.exists() + assert secret is not None + + # making sure it does not reset again + secret2 = LocalBackupSecret.get() + assert LocalBackupSecret.exists() + assert secret2 == secret + + +def test_local_secret_reset(localsecret): + secret1 = LocalBackupSecret.get() + + LocalBackupSecret.reset() + secret2 = LocalBackupSecret.get() + assert secret2 is not None + assert secret2 != secret1 + + +def test_local_secret_set(localsecret): + newsecret = "great and totally safe secret" + oldsecret = LocalBackupSecret.get() + assert oldsecret != newsecret + + LocalBackupSecret.set(newsecret) + assert LocalBackupSecret.get() == newsecret \ No newline at end of file From d1425561d99a9dd631997106dfd6b6ca4bf68e3a Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 19 Apr 2023 15:09:06 +0000 Subject: [PATCH 100/537] feature(backups): restore task --- selfprivacy_api/backup/tasks.py | 7 +++++ tests/test_graphql/test_backup.py | 46 +++++++++++++++++++++++++------ 2 files changed, 45 insertions(+), 8 deletions(-) diff --git a/selfprivacy_api/backup/tasks.py b/selfprivacy_api/backup/tasks.py index 4f6ab16..3236029 100644 --- a/selfprivacy_api/backup/tasks.py +++ b/selfprivacy_api/backup/tasks.py @@ -1,5 +1,6 @@ from datetime import datetime +from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.utils.huey import huey from selfprivacy_api.services.service import Service from selfprivacy_api.backup import Backups @@ -24,6 +25,12 @@ def start_backup(service: Service) -> bool: return True +@huey.task() +def restore_snapshot(snapshot: Snapshot) -> bool: + Backups.restore_snapshot(snapshot) + return True + + @huey.periodic_task(validate_datetime=validate_datetime) def automatic_backup(): time = datetime.now() diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index d0f5d00..5e064fa 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -14,7 +14,7 @@ from selfprivacy_api.backup import Backups import selfprivacy_api.backup.providers as providers from selfprivacy_api.backup.providers import AbstractBackupProvider from selfprivacy_api.backup.providers.backblaze import Backblaze -from selfprivacy_api.backup.tasks import start_backup +from selfprivacy_api.backup.tasks import start_backup, restore_snapshot from selfprivacy_api.backup.storage import Storage @@ -162,16 +162,22 @@ def test_backup_returns_snapshot(backups, dummy_service): assert snapshot.created_at is not None +def service_files(service): + result = [] + for service_folder in service.get_folders(): + service_filename = listdir(service_folder)[0] + assert service_filename is not None + service_file = path.join(service_folder, service_filename) + result.append(service_file) + return result + + def test_restore(backups, dummy_service): - paths_to_nuke = [] + paths_to_nuke = service_files(dummy_service) contents = [] - for service_folder in dummy_service.get_folders(): - file_to_nuke = listdir(service_folder)[0] - assert file_to_nuke is not None - path_to_nuke = path.join(service_folder, file_to_nuke) - paths_to_nuke.append(path_to_nuke) - with open(path_to_nuke, "r") as file: + for service_file in paths_to_nuke: + with open(service_file, "r") as file: contents.append(file.read()) Backups.back_up(dummy_service) @@ -214,6 +220,30 @@ def test_backup_service_task(backups, dummy_service): assert len(snaps) == 1 +def test_restore_snapshot_task(backups, dummy_service): + Backups.back_up(dummy_service) + snaps = Backups.get_snapshots(dummy_service) + assert len(snaps) == 1 + + paths_to_nuke = service_files(dummy_service) + contents = [] + + for service_file in paths_to_nuke: + with open(service_file, "r") as file: + contents.append(file.read()) + + for p in paths_to_nuke: + remove(p) + + handle = restore_snapshot(snaps[0]) + handle(blocking=True) + + for p, content in zip(paths_to_nuke, contents): + assert path.exists(p) + with open(p, "r") as file: + assert file.read() == content + + def test_autobackup_enable_service(backups, dummy_service): assert not Backups.is_autobackup_enabled(dummy_service) From 108fca0eb3a1365fb51d0b312fbb9b6304e0bbb4 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 21 Apr 2023 11:59:15 +0000 Subject: [PATCH 101/537] feature(backups): simplest jobs intergration in tasks: created and finished --- selfprivacy_api/backup/jobs.py | 31 +++++++++++++++++++++++++++++++ selfprivacy_api/backup/tasks.py | 5 +++++ 2 files changed, 36 insertions(+) create mode 100644 selfprivacy_api/backup/jobs.py diff --git a/selfprivacy_api/backup/jobs.py b/selfprivacy_api/backup/jobs.py new file mode 100644 index 0000000..a90c4bd --- /dev/null +++ b/selfprivacy_api/backup/jobs.py @@ -0,0 +1,31 @@ +from typing import Optional + +from selfprivacy_api.jobs import Jobs, Job, JobStatus +from selfprivacy_api.services.service import Service + + +def backup_job_type(service: Service): + return f"services.{service.get_id()}.backup" + + +def add_backup_job(service: Service) -> Job: + display_name = service.get_display_name() + job = Jobs.add( + type_id=backup_job_type(service), + name=f"Backup {display_name}", + description=f"Backing up {display_name}", + ) + return job + + +def get_job_by_type(type_id: str) -> Optional[Job]: + for job in Jobs.get_jobs(): + if job.type_id == type_id and job.status in [ + JobStatus.CREATED, + JobStatus.RUNNING, + ]: + return job + + +def get_backup_job(service: Service) -> Optional[Job]: + return get_job_by_type(backup_job_type(service)) diff --git a/selfprivacy_api/backup/tasks.py b/selfprivacy_api/backup/tasks.py index 3236029..111f255 100644 --- a/selfprivacy_api/backup/tasks.py +++ b/selfprivacy_api/backup/tasks.py @@ -4,6 +4,8 @@ from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.utils.huey import huey from selfprivacy_api.services.service import Service from selfprivacy_api.backup import Backups +from selfprivacy_api.backup.jobs import get_backup_job, add_backup_job +from selfprivacy_api.jobs import Jobs, JobStatus def validate_datetime(dt: datetime): @@ -21,7 +23,10 @@ def validate_datetime(dt: datetime): # huey tasks need to return something @huey.task() def start_backup(service: Service) -> bool: + add_backup_job(service) Backups.back_up(service) + job = get_backup_job(service) + Jobs.update(job, status=JobStatus.FINISHED) return True From cb1dd1011e91dec84471fabbe2bac8e7bb1b5185 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 21 Apr 2023 12:19:59 +0000 Subject: [PATCH 102/537] test(backups): assure that jobs are created and not duplicated --- tests/test_graphql/test_backup.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 5e064fa..420e14e 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -9,6 +9,7 @@ import selfprivacy_api.services as services from selfprivacy_api.services import get_service_by_id from selfprivacy_api.services.test_service import DummyService from selfprivacy_api.graphql.queries.providers import BackupProvider +from selfprivacy_api.jobs import Jobs, JobStatus from selfprivacy_api.backup import Backups import selfprivacy_api.backup.providers as providers @@ -16,6 +17,7 @@ from selfprivacy_api.backup.providers import AbstractBackupProvider from selfprivacy_api.backup.providers.backblaze import Backblaze from selfprivacy_api.backup.tasks import start_backup, restore_snapshot from selfprivacy_api.backup.storage import Storage +from selfprivacy_api.backup.jobs import get_backup_job TESTFILE_BODY = "testytest!" @@ -30,6 +32,8 @@ def backups(tmpdir): test_repo_path = path.join(tmpdir, "totallyunrelated") Backups.set_localfile_repo(test_repo_path) + Jobs.reset() + @pytest.fixture() def backups_backblaze(generic_userdata): @@ -219,6 +223,11 @@ def test_backup_service_task(backups, dummy_service): snaps = Backups.get_snapshots(dummy_service) assert len(snaps) == 1 + id = dummy_service.get_id() + finished_jobs = [job for job in Jobs.get_jobs() if job.status is JobStatus.FINISHED] + finished_types = [job.type_id for job in finished_jobs] + assert finished_types.count(f"services.{id}.backup") == 1 + def test_restore_snapshot_task(backups, dummy_service): Backups.back_up(dummy_service) From 3eb8382d9bce699263ff166344710e963cb968d1 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 24 Apr 2023 16:15:12 +0000 Subject: [PATCH 103/537] feature(backups): also create a job if not called from a task --- selfprivacy_api/backup/__init__.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index f89a54e..0f35456 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -15,6 +15,7 @@ from selfprivacy_api.graphql.queries.providers import BackupProvider from selfprivacy_api.backup.providers.provider import AbstractBackupProvider from selfprivacy_api.backup.providers import get_provider from selfprivacy_api.backup.storage import Storage +from selfprivacy_api.backup.jobs import get_backup_job, add_backup_job class Backups: @@ -197,6 +198,9 @@ class Backups: folders = service.get_folders() repo_name = service.get_id() + if get_backup_job(service) is None: + add_backup_job(service) + service.pre_backup() snapshot = Backups.provider().backuper.start_backup(folders, repo_name) Backups._store_last_snapshot(repo_name, snapshot) From c0dfbb3ca262fc51fc33f5d48b1fd29e271e27c9 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 24 Apr 2023 16:16:45 +0000 Subject: [PATCH 104/537] refactor(backups): delete unused redis import from backups ckass --- selfprivacy_api/backup/__init__.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 0f35456..e74877d 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -4,8 +4,6 @@ from datetime import datetime, timedelta from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.utils import ReadUserData -from selfprivacy_api.utils.redis_pool import RedisPool - from selfprivacy_api.services import get_service_by_id from selfprivacy_api.services.service import Service From ad6cc5e1bc0294c147f59c8794293a45153d82af Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 24 Apr 2023 16:37:07 +0000 Subject: [PATCH 105/537] refactor(backups): make a backup job running when the backup code itself is executed --- selfprivacy_api/backup/__init__.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index e74877d..ec1180e 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -14,6 +14,7 @@ from selfprivacy_api.backup.providers.provider import AbstractBackupProvider from selfprivacy_api.backup.providers import get_provider from selfprivacy_api.backup.storage import Storage from selfprivacy_api.backup.jobs import get_backup_job, add_backup_job +from selfprivacy_api.jobs import Jobs, JobStatus class Backups: @@ -196,8 +197,10 @@ class Backups: folders = service.get_folders() repo_name = service.get_id() - if get_backup_job(service) is None: - add_backup_job(service) + job = get_backup_job(service) + if job is None: + job = add_backup_job(service) + Jobs.update(job, status=JobStatus.RUNNING) service.pre_backup() snapshot = Backups.provider().backuper.start_backup(folders, repo_name) From d340b0ca67f63a71e5dff43bb280a12fd23cf404 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 24 Apr 2023 16:50:22 +0000 Subject: [PATCH 106/537] test(backups): break out a finished job checker --- tests/test_graphql/test_backup.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 420e14e..cb52e88 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -216,6 +216,12 @@ def test_init_tracking(backups, raw_dummy_service): assert Backups.is_initted(raw_dummy_service) is True +def assert_job_finished(job_type, count): + finished_jobs = [job for job in Jobs.get_jobs() if job.status is JobStatus.FINISHED] + finished_types = [job.type_id for job in finished_jobs] + assert finished_types.count(job_type) == count + + def test_backup_service_task(backups, dummy_service): handle = start_backup(dummy_service) handle(blocking=True) @@ -224,9 +230,7 @@ def test_backup_service_task(backups, dummy_service): assert len(snaps) == 1 id = dummy_service.get_id() - finished_jobs = [job for job in Jobs.get_jobs() if job.status is JobStatus.FINISHED] - finished_types = [job.type_id for job in finished_jobs] - assert finished_types.count(f"services.{id}.backup") == 1 + assert_job_finished(f"services.{id}.backup", count=1) def test_restore_snapshot_task(backups, dummy_service): From 396b42b69cfd95c0ef758065ba0b8d991f84d65e Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 24 Apr 2023 17:03:56 +0000 Subject: [PATCH 107/537] test(backups): test jobs starting and finishing when from Backups --- selfprivacy_api/backup/__init__.py | 1 + selfprivacy_api/backup/tasks.py | 5 +++-- tests/test_graphql/test_backup.py | 5 +++++ 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index ec1180e..1992648 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -207,6 +207,7 @@ class Backups: Backups._store_last_snapshot(repo_name, snapshot) service.post_restore() + Jobs.update(job, status=JobStatus.FINISHED) @staticmethod def init_repo(service: Service): diff --git a/selfprivacy_api/backup/tasks.py b/selfprivacy_api/backup/tasks.py index 111f255..3b3051e 100644 --- a/selfprivacy_api/backup/tasks.py +++ b/selfprivacy_api/backup/tasks.py @@ -23,10 +23,11 @@ def validate_datetime(dt: datetime): # huey tasks need to return something @huey.task() def start_backup(service: Service) -> bool: + # Backups can create the job, but doing this here + # allows us to see the job as queued before it is actually executed add_backup_job(service) + Backups.back_up(service) - job = get_backup_job(service) - Jobs.update(job, status=JobStatus.FINISHED) return True diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index cb52e88..a185492 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -131,7 +131,10 @@ def test_backup_simple_file(raw_dummy_service, file_backup): def test_backup_service(dummy_service, backups): + id = dummy_service.get_id() + assert_job_finished(f"services.{id}.backup", count=0) assert Backups.get_last_backed_up(dummy_service) is None + Backups.back_up(dummy_service) now = datetime.now(timezone.utc) @@ -140,6 +143,8 @@ def test_backup_service(dummy_service, backups): assert now > date assert now - date < timedelta(minutes=1) + assert_job_finished(f"services.{id}.backup", count=1) + def test_no_repo(memory_backup): with pytest.raises(ValueError): From 0bf18dcdc7b16103c31ad86a24bc2848941d75dc Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 8 May 2023 10:49:18 +0000 Subject: [PATCH 108/537] refactor(backups): cleanup unused imports in tasks --- selfprivacy_api/backup/tasks.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/tasks.py b/selfprivacy_api/backup/tasks.py index 3b3051e..d92a926 100644 --- a/selfprivacy_api/backup/tasks.py +++ b/selfprivacy_api/backup/tasks.py @@ -4,8 +4,7 @@ from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.utils.huey import huey from selfprivacy_api.services.service import Service from selfprivacy_api.backup import Backups -from selfprivacy_api.backup.jobs import get_backup_job, add_backup_job -from selfprivacy_api.jobs import Jobs, JobStatus +from selfprivacy_api.backup.jobs import add_backup_job def validate_datetime(dt: datetime): From b2e231ebae2188851791bfd54dda22772b0b0442 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 8 May 2023 10:55:22 +0000 Subject: [PATCH 109/537] feature(backups): set job status to error if backup fails --- selfprivacy_api/backup/__init__.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 1992648..f3e2ba5 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -202,11 +202,15 @@ class Backups: job = add_backup_job(service) Jobs.update(job, status=JobStatus.RUNNING) - service.pre_backup() - snapshot = Backups.provider().backuper.start_backup(folders, repo_name) - Backups._store_last_snapshot(repo_name, snapshot) + try: + service.pre_backup() + snapshot = Backups.provider().backuper.start_backup(folders, repo_name) + Backups._store_last_snapshot(repo_name, snapshot) + service.post_restore() + except Exception as e: + Jobs.update(job, status=JobStatus.ERROR) + raise e - service.post_restore() Jobs.update(job, status=JobStatus.FINISHED) @staticmethod From 7ddfad10d4d8f6f51a8a5b6b171e925bed45f545 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 8 May 2023 12:21:02 +0000 Subject: [PATCH 110/537] refactor(backups): quick-expiration logs of jobs status updates --- selfprivacy_api/jobs/__init__.py | 23 ++++++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py index fe4a053..211a2ab 100644 --- a/selfprivacy_api/jobs/__init__.py +++ b/selfprivacy_api/jobs/__init__.py @@ -27,7 +27,7 @@ from selfprivacy_api.utils.redis_pool import RedisPool JOB_EXPIRATION_SECONDS = 10 * 24 * 60 * 60 # ten days -class JobStatus(Enum): +class JobStatus(str, Enum): """ Status of a job. """ @@ -70,6 +70,7 @@ class Jobs: jobs = Jobs.get_jobs() for job in jobs: Jobs.remove(job) + Jobs.reset_logs() @staticmethod def add( @@ -120,6 +121,21 @@ class Jobs: return True return False + @staticmethod + def reset_logs(): + redis = RedisPool().get_connection() + for key in redis.keys("jobs_logs:" + "*"): + redis.delete(key) + + @staticmethod + def log_status_update(job: Job, status: JobStatus): + redis = RedisPool().get_connection() + key = _redis_log_key_from_uuid(job.uid) + if redis.exists(key): + assert redis.type(key) == "list" + redis.lpush(key, str(status)) + redis.expire(key, 10) + @staticmethod def update( job: Job, @@ -143,6 +159,7 @@ class Jobs: if progress is not None: job.progress = progress job.status = status + Jobs.log_status_update(job, status) job.updated_at = datetime.datetime.now() job.error = error job.result = result @@ -198,6 +215,10 @@ def _redis_key_from_uuid(uuid_string): return "jobs:" + str(uuid_string) +def _redis_log_key_from_uuid(uuid_string): + return "jobs_logs:" + str(uuid_string) + + def _store_job_as_hash(redis, redis_key, model): for key, value in model.dict().items(): if isinstance(value, uuid.UUID): From e25aa2cb33df5743b5f3b60136ab53b79c7c1ddc Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 8 May 2023 12:43:11 +0000 Subject: [PATCH 111/537] test(backups): test that the job has run --- selfprivacy_api/jobs/__init__.py | 19 ++++++++++++++++++- tests/test_graphql/test_backup.py | 10 +++++++++- 2 files changed, 27 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py index 211a2ab..16306a7 100644 --- a/selfprivacy_api/jobs/__init__.py +++ b/selfprivacy_api/jobs/__init__.py @@ -133,9 +133,26 @@ class Jobs: key = _redis_log_key_from_uuid(job.uid) if redis.exists(key): assert redis.type(key) == "list" - redis.lpush(key, str(status)) + redis.lpush(key, status.value) redis.expire(key, 10) + @staticmethod + def status_updates(job: Job) -> typing.List[JobStatus]: + result = [] + + redis = RedisPool().get_connection() + key = _redis_log_key_from_uuid(job.uid) + if not redis.exists(key): + return [] + + status_strings = redis.lrange(key, 0, -1) + for status in status_strings: + try: + result.append(JobStatus[status]) + except KeyError as e: + raise ValueError("impossible job status: " + status) from e + return result + @staticmethod def update( job: Job, diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index a185492..ec12506 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -227,6 +227,12 @@ def assert_job_finished(job_type, count): assert finished_types.count(job_type) == count +def assert_job_has_run(job_type): + finished_jobs = [job for job in Jobs.get_jobs() if job.status is JobStatus.FINISHED] + job = [job for job in finished_jobs if job.type_id == job_type][0] + assert JobStatus.RUNNING in Jobs.status_updates(job) + + def test_backup_service_task(backups, dummy_service): handle = start_backup(dummy_service) handle(blocking=True) @@ -235,7 +241,9 @@ def test_backup_service_task(backups, dummy_service): assert len(snaps) == 1 id = dummy_service.get_id() - assert_job_finished(f"services.{id}.backup", count=1) + job_type_id = f"services.{id}.backup" + assert_job_finished(job_type_id, count=1) + assert_job_has_run(job_type_id) def test_restore_snapshot_task(backups, dummy_service): From a6b3a5e5901d15513abbf262220e68003ae9a873 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 12 May 2023 11:07:55 +0000 Subject: [PATCH 112/537] feature(backups): deny adding a backup job if another one is already queued --- selfprivacy_api/backup/jobs.py | 31 ++++++++++++++++++++++++++++--- 1 file changed, 28 insertions(+), 3 deletions(-) diff --git a/selfprivacy_api/backup/jobs.py b/selfprivacy_api/backup/jobs.py index a90c4bd..ebc2ea0 100644 --- a/selfprivacy_api/backup/jobs.py +++ b/selfprivacy_api/backup/jobs.py @@ -1,14 +1,39 @@ -from typing import Optional +from typing import Optional, List from selfprivacy_api.jobs import Jobs, Job, JobStatus from selfprivacy_api.services.service import Service -def backup_job_type(service: Service): - return f"services.{service.get_id()}.backup" +def job_type_prefix(service: Service) -> str: + return f"services.{service.get_id()}" + + +def backup_job_type(service: Service) -> str: + return f"{job_type_prefix(service)}.backup" + + +def get_jobs_by_service(service: Service) -> List[Job]: + result = [] + for job in Jobs.get_jobs(): + if job.type_id.startswith(job_type_prefix(service)) and job.status in [ + JobStatus.CREATED, + JobStatus.RUNNING, + ]: + result.append(job) + return result + + +def is_something_queued_for(service: Service) -> bool: + return len(get_jobs_by_service(service)) != 0 def add_backup_job(service: Service) -> Job: + if is_something_queued_for(service): + message = ( + f"Cannot start a backup of {service.get_id()}, another operation is queued: " + + get_jobs_by_service(service)[0].type_id + ) + raise ValueError(message) display_name = service.get_display_name() job = Jobs.add( type_id=backup_job_type(service), From deb857bca9ea81dec93e0ba97409d3977d936419 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 17 May 2023 17:48:06 +0000 Subject: [PATCH 113/537] refactor(backups): use single repo and multiplex by tags --- .../backup/providers/local_file.py | 3 +- selfprivacy_api/backup/restic_backuper.py | 28 +++++++++++-------- 2 files changed, 18 insertions(+), 13 deletions(-) diff --git a/selfprivacy_api/backup/providers/local_file.py b/selfprivacy_api/backup/providers/local_file.py index bdd9213..a20f615 100644 --- a/selfprivacy_api/backup/providers/local_file.py +++ b/selfprivacy_api/backup/providers/local_file.py @@ -8,4 +8,5 @@ class LocalFileBackup(AbstractBackupProvider): # login and key args are for compatibility with generic provider methods. They are ignored. def __init__(self, filename: str, login: str = "", key: str = ""): super().__init__() - self.backuper = ResticBackuper("", "", f":local:{filename}/") + self.backuper = ResticBackuper("", "", ":local:") + self.backuper.set_creds("", "", filename) diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index 896f68d..07ddb1c 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -18,15 +18,17 @@ class ResticBackuper(AbstractBackuper): self.type = type self.account = "" self.key = "" + self.repo = "" - def set_creds(self, account: str, key: str): + def set_creds(self, account: str, key: str, repo: str): self.account = account self.key = key + self.repo = repo - def restic_repo(self, repository_name: str) -> str: + def restic_repo(self) -> str: # https://restic.readthedocs.io/en/latest/030_preparing_a_new_repo.html#other-services-via-rclone # https://forum.rclone.org/t/can-rclone-be-run-solely-with-command-line-options-no-config-no-env-vars/6314/5 - return f"rclone:{self.type}{repository_name}/sfbackup" + return f"rclone:{self.type}{self.repo}" def rclone_args(self): return "rclone.args=serve restic --stdio" + self.backend_rclone_args() @@ -44,16 +46,23 @@ class ResticBackuper(AbstractBackuper): def _password_command(self): return f"echo {LocalBackupSecret.get()}" - def restic_command(self, repo_name: str, *args): + def restic_command(self, *args, branch_name: str = ""): command = [ "restic", "-o", self.rclone_args(), "-r", - self.restic_repo(repo_name), + self.restic_repo(), "--password-command", self._password_command(), ] + if branch_name != "": + command.extend( + [ + "--tag", + branch_name, + ] + ) if args != []: command.extend(ResticBackuper.__flatten_list(args)) return command @@ -78,10 +87,10 @@ class ResticBackuper(AbstractBackuper): assert not isinstance(folders, str) backup_command = self.restic_command( - repo_name, "backup", "--json", folders, + branch_name=repo_name, ) with subprocess.Popen( backup_command, @@ -115,7 +124,6 @@ class ResticBackuper(AbstractBackuper): def init(self, repo_name): init_command = self.restic_command( - repo_name, "init", ) with subprocess.Popen( @@ -130,7 +138,6 @@ class ResticBackuper(AbstractBackuper): def is_initted(self, repo_name: str) -> bool: command = self.restic_command( - repo_name, "check", "--json", ) @@ -147,7 +154,6 @@ class ResticBackuper(AbstractBackuper): Size of a snapshot """ command = self.restic_command( - repo_name, "stats", snapshot_id, "--json", @@ -169,7 +175,6 @@ class ResticBackuper(AbstractBackuper): # I do not alter the signature yet because maybe this can be # changed with flags restore_command = self.restic_command( - repo_name, "restore", snapshot_id, "--target", @@ -190,7 +195,6 @@ class ResticBackuper(AbstractBackuper): raises Value Error if repo does not exist """ listing_command = self.restic_command( - repo_name, "snapshots", "--json", ) @@ -217,7 +221,7 @@ class ResticBackuper(AbstractBackuper): snapshot = Snapshot( id=restic_snapshot["short_id"], created_at=restic_snapshot["time"], - service_name=repo_name, + service_name=restic_snapshot["tags"][0], ) snapshots.append(snapshot) From ddd2176a5d1deaa2e4df37d373074b71fb3bd448 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 17 May 2023 17:59:56 +0000 Subject: [PATCH 114/537] refactor(backups): break out job logs status prefix --- selfprivacy_api/jobs/__init__.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py index 16306a7..d7e4f31 100644 --- a/selfprivacy_api/jobs/__init__.py +++ b/selfprivacy_api/jobs/__init__.py @@ -26,6 +26,9 @@ from selfprivacy_api.utils.redis_pool import RedisPool JOB_EXPIRATION_SECONDS = 10 * 24 * 60 * 60 # ten days +STATUS_LOGS_PREFIX = "jobs_logs:status:" +PROGRESS_LOGS_PREFIX = "jobs_logs:progress:" + class JobStatus(str, Enum): """ @@ -124,7 +127,7 @@ class Jobs: @staticmethod def reset_logs(): redis = RedisPool().get_connection() - for key in redis.keys("jobs_logs:" + "*"): + for key in redis.keys(STATUS_LOGS_PREFIX + "*"): redis.delete(key) @staticmethod @@ -233,7 +236,7 @@ def _redis_key_from_uuid(uuid_string): def _redis_log_key_from_uuid(uuid_string): - return "jobs_logs:" + str(uuid_string) + return STATUS_LOGS_PREFIX + str(uuid_string) def _store_job_as_hash(redis, redis_key, model): From 60806cd5362af92035a3cc66e9fd73f2780d4cd6 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 17 May 2023 18:36:39 +0000 Subject: [PATCH 115/537] feature(backups): job progress logs --- selfprivacy_api/jobs/__init__.py | 37 +++++++++++++++++++++++++++++--- 1 file changed, 34 insertions(+), 3 deletions(-) diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py index d7e4f31..5c3ea62 100644 --- a/selfprivacy_api/jobs/__init__.py +++ b/selfprivacy_api/jobs/__init__.py @@ -133,18 +133,27 @@ class Jobs: @staticmethod def log_status_update(job: Job, status: JobStatus): redis = RedisPool().get_connection() - key = _redis_log_key_from_uuid(job.uid) + key = _status_log_key_from_uuid(job.uid) if redis.exists(key): assert redis.type(key) == "list" redis.lpush(key, status.value) redis.expire(key, 10) + @staticmethod + def log_progress_update(job: Job, progress: int): + redis = RedisPool().get_connection() + key = _progress_log_key_from_uuid(job.uid) + if redis.exists(key): + assert redis.type(key) == "list" + redis.lpush(key, progress) + redis.expire(key, 10) + @staticmethod def status_updates(job: Job) -> typing.List[JobStatus]: result = [] redis = RedisPool().get_connection() - key = _redis_log_key_from_uuid(job.uid) + key = _status_log_key_from_uuid(job.uid) if not redis.exists(key): return [] @@ -156,6 +165,23 @@ class Jobs: raise ValueError("impossible job status: " + status) from e return result + @staticmethod + def progress_updates(job: Job) -> typing.List[int]: + result = [] + + redis = RedisPool().get_connection() + key = _progress_log_key_from_uuid(job.uid) + if not redis.exists(key): + return [] + + progress_strings = redis.lrange(key, 0, -1) + for progress in progress_strings: + try: + result.append(int(progress)) + except KeyError as e: + raise ValueError("impossible job progress: " + progress) from e + return result + @staticmethod def update( job: Job, @@ -178,6 +204,7 @@ class Jobs: job.status_text = status_text if progress is not None: job.progress = progress + Jobs.log_progress_update(job, progress) job.status = status Jobs.log_status_update(job, status) job.updated_at = datetime.datetime.now() @@ -235,10 +262,14 @@ def _redis_key_from_uuid(uuid_string): return "jobs:" + str(uuid_string) -def _redis_log_key_from_uuid(uuid_string): +def _status_log_key_from_uuid(uuid_string): return STATUS_LOGS_PREFIX + str(uuid_string) +def _progress_log_key_from_uuid(uuid_string): + return PROGRESS_LOGS_PREFIX + str(uuid_string) + + def _store_job_as_hash(redis, redis_key, model): for key, value in model.dict().items(): if isinstance(value, uuid.UUID): From 54a8e0b2b073b558a072cef035dc00d12f73b391 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 17 May 2023 20:02:21 +0000 Subject: [PATCH 116/537] test(backups): break out obtaining finished jobs --- tests/test_graphql/test_backup.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index ec12506..94d8ef3 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -221,15 +221,17 @@ def test_init_tracking(backups, raw_dummy_service): assert Backups.is_initted(raw_dummy_service) is True +def finished_jobs(): + return [job for job in Jobs.get_jobs() if job.status is JobStatus.FINISHED] + + def assert_job_finished(job_type, count): - finished_jobs = [job for job in Jobs.get_jobs() if job.status is JobStatus.FINISHED] - finished_types = [job.type_id for job in finished_jobs] + finished_types = [job.type_id for job in finished_jobs()] assert finished_types.count(job_type) == count def assert_job_has_run(job_type): - finished_jobs = [job for job in Jobs.get_jobs() if job.status is JobStatus.FINISHED] - job = [job for job in finished_jobs if job.type_id == job_type][0] + job = [job for job in finished_jobs() if job.type_id == job_type][0] assert JobStatus.RUNNING in Jobs.status_updates(job) From b1d0a80963d7c49cb31f3010f4bd96ef16cd6d76 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 17 May 2023 20:09:29 +0000 Subject: [PATCH 117/537] feature(backups): realtime progress updates of backups --- selfprivacy_api/backup/restic_backuper.py | 53 +++++++++++++++++------ tests/test_graphql/test_backup.py | 6 +++ 2 files changed, 45 insertions(+), 14 deletions(-) diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index 07ddb1c..3e72561 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -7,6 +7,9 @@ from collections.abc import Iterable from selfprivacy_api.backup.backuper import AbstractBackuper from selfprivacy_api.models.backup.snapshot import Snapshot +from selfprivacy_api.backup.jobs import get_backup_job +from selfprivacy_api.services import get_service_by_id +from selfprivacy_api.jobs import Jobs, JobStatus from selfprivacy_api.backup.local_secret import LocalBackupSecret @@ -78,6 +81,19 @@ class ResticBackuper(AbstractBackuper): result.append(item) return result + @staticmethod + def output_yielder(command): + with subprocess.Popen( + command, + shell=False, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + universal_newlines=True, + ) as handle: + for line in iter(handle.stdout.readline, ""): + if not "NOTICE:" in line: + yield line + def start_backup(self, folders: List[str], repo_name: str): """ Start backup with restic @@ -92,20 +108,25 @@ class ResticBackuper(AbstractBackuper): folders, branch_name=repo_name, ) - with subprocess.Popen( - backup_command, - shell=False, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - ) as handle: - output = handle.communicate()[0].decode("utf-8") - try: - messages = self.parse_json_output(output) - return ResticBackuper._snapshot_from_backup_messages( - messages, repo_name - ) - except ValueError as e: - raise ValueError("could not create a snapshot: ") from e + + messages = [] + try: + for raw_message in ResticBackuper.output_yielder(backup_command): + message = self.parse_json_output(raw_message) + if message["message_type"] == "status": + job = get_backup_job(get_service_by_id(repo_name)) + if job is not None: # only update status if we run under some job + Jobs.update( + job, + JobStatus.RUNNING, + progress=ResticBackuper.progress_from_status_message( + message + ), + ) + messages.append(message) + return ResticBackuper._snapshot_from_backup_messages(messages, repo_name) + except ValueError as e: + raise ValueError("could not create a snapshot: ", messages) from e @staticmethod def _snapshot_from_backup_messages(messages, repo_name) -> Snapshot: @@ -114,6 +135,10 @@ class ResticBackuper(AbstractBackuper): return ResticBackuper._snapshot_from_fresh_summary(message, repo_name) raise ValueError("no summary message in restic json output") + @staticmethod + def progress_from_status_message(message: object) -> int: + return int(message["percent_done"]) + @staticmethod def _snapshot_from_fresh_summary(message: object, repo_name) -> Snapshot: return Snapshot( diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 94d8ef3..89978b1 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -235,6 +235,11 @@ def assert_job_has_run(job_type): assert JobStatus.RUNNING in Jobs.status_updates(job) +def assert_job_had_progress(job_type): + job = [job for job in finished_jobs() if job.type_id == job_type][0] + assert len(Jobs.progress_updates(job)) > 0 + + def test_backup_service_task(backups, dummy_service): handle = start_backup(dummy_service) handle(blocking=True) @@ -246,6 +251,7 @@ def test_backup_service_task(backups, dummy_service): job_type_id = f"services.{id}.backup" assert_job_finished(job_type_id, count=1) assert_job_has_run(job_type_id) + assert_job_had_progress(job_type_id) def test_restore_snapshot_task(backups, dummy_service): From 9fab596f91bcb10261424dd352bebac3e6809cab Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 17 May 2023 20:21:37 +0000 Subject: [PATCH 118/537] refactor(backups): refactor realtime updating --- selfprivacy_api/backup/restic_backuper.py | 26 +++++++++++------------ 1 file changed, 12 insertions(+), 14 deletions(-) diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index 3e72561..bc92148 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -110,19 +110,10 @@ class ResticBackuper(AbstractBackuper): ) messages = [] + job = get_backup_job(get_service_by_id(repo_name)) try: for raw_message in ResticBackuper.output_yielder(backup_command): - message = self.parse_json_output(raw_message) - if message["message_type"] == "status": - job = get_backup_job(get_service_by_id(repo_name)) - if job is not None: # only update status if we run under some job - Jobs.update( - job, - JobStatus.RUNNING, - progress=ResticBackuper.progress_from_status_message( - message - ), - ) + message = self.parse_message(raw_message, job) messages.append(message) return ResticBackuper._snapshot_from_backup_messages(messages, repo_name) except ValueError as e: @@ -135,9 +126,16 @@ class ResticBackuper(AbstractBackuper): return ResticBackuper._snapshot_from_fresh_summary(message, repo_name) raise ValueError("no summary message in restic json output") - @staticmethod - def progress_from_status_message(message: object) -> int: - return int(message["percent_done"]) + def parse_message(self, raw_message, job=None) -> object: + message = self.parse_json_output(raw_message) + if message["message_type"] == "status": + if job is not None: # only update status if we run under some job + Jobs.update( + job, + JobStatus.RUNNING, + progress=int(message["percent_done"]), + ) + return message @staticmethod def _snapshot_from_fresh_summary(message: object, repo_name) -> Snapshot: From 03d4632465e583db24598987d74ebb59e60da874 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 17 May 2023 20:24:29 +0000 Subject: [PATCH 119/537] refactor(backups): remove extraneous asserts from jobs --- selfprivacy_api/jobs/__init__.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py index 5c3ea62..5e86c5f 100644 --- a/selfprivacy_api/jobs/__init__.py +++ b/selfprivacy_api/jobs/__init__.py @@ -134,8 +134,6 @@ class Jobs: def log_status_update(job: Job, status: JobStatus): redis = RedisPool().get_connection() key = _status_log_key_from_uuid(job.uid) - if redis.exists(key): - assert redis.type(key) == "list" redis.lpush(key, status.value) redis.expire(key, 10) @@ -143,8 +141,6 @@ class Jobs: def log_progress_update(job: Job, progress: int): redis = RedisPool().get_connection() key = _progress_log_key_from_uuid(job.uid) - if redis.exists(key): - assert redis.type(key) == "list" redis.lpush(key, progress) redis.expire(key, 10) From 0dc6f7475478d6ccb2cd450f6cab8b1a965faad0 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 22 May 2023 16:01:57 +0000 Subject: [PATCH 120/537] fix(backups): make sure location and credentials get properly passed around --- selfprivacy_api/backup/providers/provider.py | 3 ++- tests/test_graphql/test_backup.py | 3 +++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/backup/providers/provider.py b/selfprivacy_api/backup/providers/provider.py index 017c03d..ce9d055 100644 --- a/selfprivacy_api/backup/providers/provider.py +++ b/selfprivacy_api/backup/providers/provider.py @@ -12,6 +12,7 @@ class AbstractBackupProvider(ABC): def backuper(self) -> AbstractBackuper: raise NotImplementedError - def __init__(self, login="", key=""): + def __init__(self, login="", key="", location=""): + self.backuper.set_creds(login, key, location) self.login = login self.key = key diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 89978b1..95001cd 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -109,6 +109,9 @@ def test_config_load(generic_userdata): assert provider.login == "ID" assert provider.key == "KEY" + assert provider.backuper.account == "ID" + assert provider.backuper.key == "KEY" + def test_select_backend(): provider = providers.get_provider(BackupProvider.BACKBLAZE) From 97e4c529f6fd040249b88dcd9da96a4d995c7b2d Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 29 May 2023 15:34:26 +0000 Subject: [PATCH 121/537] feature(backups): init repo mutation --- selfprivacy_api/backup/__init__.py | 34 ++++++++---- selfprivacy_api/backup/providers/provider.py | 6 ++- selfprivacy_api/backup/storage.py | 6 ++- .../graphql/mutations/backup_mutations.py | 53 +++++++++++++++++++ selfprivacy_api/models/backup/provider.py | 2 + 5 files changed, 89 insertions(+), 12 deletions(-) create mode 100644 selfprivacy_api/graphql/mutations/backup_mutations.py diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index f3e2ba5..a5d5416 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -29,6 +29,9 @@ class Backups: Storage.store_testrepo_path(file_path) Storage.store_provider(provider) + def set_provider(provider: AbstractBackupProvider): + Storage.store_provider(provider) + @staticmethod def get_last_backed_up(service: Service) -> Optional[datetime]: """Get a timezone-aware time of the last backup of a service""" @@ -126,19 +129,21 @@ class Backups: return Backups.lookup_provider() @staticmethod - def set_provider(kind: str, login: str, key: str): - provider = Backups.construct_provider(kind, login, key) + def set_provider(kind: str, login: str, key: str, location: str, repo_id: str = ""): + provider = Backups.construct_provider(kind, login, key, location, id) Storage.store_provider(provider) @staticmethod - def construct_provider(kind: str, login: str, key: str): + def construct_provider( + kind: str, login: str, key: str, location: str, repo_id: str = "" + ): provider_class = get_provider(BackupProvider[kind]) if kind == "FILE": path = Storage.get_testrepo_path() return provider_class(path) - return provider_class(login=login, key=key) + return provider_class(login=login, key=key, location=location, repo_id=repo_id) @staticmethod def reset(): @@ -169,17 +174,19 @@ class Backups: if "backblaze" in user_data.keys(): account = user_data["backblaze"]["accountId"] key = user_data["backblaze"]["accountKey"] + location = user_data["backblaze"]["bucket"] provider_string = "BACKBLAZE" return Backups.construct_provider( - kind=provider_string, login=account, key=key + kind=provider_string, login=account, key=key, location=location ) return None account = user_data["backup"]["accountId"] key = user_data["backup"]["accountKey"] provider_string = user_data["backup"]["provider"] + location = user_data["backup"]["bucket"] return Backups.construct_provider( - kind=provider_string, login=account, key=key + kind=provider_string, login=account, key=key, location=location ) @staticmethod @@ -188,7 +195,11 @@ class Backups: if provider_model is None: return None return Backups.construct_provider( - provider_model.kind, provider_model.login, provider_model.key + provider_model.kind, + provider_model.login, + provider_model.key, + provider_model.location, + provider_model.repo_id, ) @staticmethod @@ -214,10 +225,13 @@ class Backups: Jobs.update(job, status=JobStatus.FINISHED) @staticmethod - def init_repo(service: Service): - repo_name = service.get_id() + def init_repo(service: Optional[Service] = None): + if service is not None: + repo_name = service.get_id() + Backups.provider().backuper.init(repo_name) - Storage.mark_as_init(service) + if service is not None: + Storage.mark_as_init(service) @staticmethod def is_initted(service: Service) -> bool: diff --git a/selfprivacy_api/backup/providers/provider.py b/selfprivacy_api/backup/providers/provider.py index ce9d055..c303d4e 100644 --- a/selfprivacy_api/backup/providers/provider.py +++ b/selfprivacy_api/backup/providers/provider.py @@ -12,7 +12,11 @@ class AbstractBackupProvider(ABC): def backuper(self) -> AbstractBackuper: raise NotImplementedError - def __init__(self, login="", key="", location=""): + def __init__(self, login="", key="", location="", repo_id=""): self.backuper.set_creds(login, key, location) self.login = login self.key = key + self.location = location + # We do not need to do anything with this one + # Just remember in case the app forgets + self.repo_id = repo_id diff --git a/selfprivacy_api/backup/storage.py b/selfprivacy_api/backup/storage.py index 7ca5f18..072c80f 100644 --- a/selfprivacy_api/backup/storage.py +++ b/selfprivacy_api/backup/storage.py @@ -146,7 +146,11 @@ class Storage: redis, REDIS_PROVIDER_KEY, BackupProviderModel( - kind=get_kind(provider), login=provider.login, key=provider.key + kind=get_kind(provider), + login=provider.login, + key=provider.key, + location=provider.location, + repo_id=provider.repo_id, ), ) diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py new file mode 100644 index 0000000..4b61f43 --- /dev/null +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -0,0 +1,53 @@ +import datetime +import typing +import strawberry +from strawberry.types import Info + +from selfprivacy_api.graphql import IsAuthenticated +from selfprivacy_api.graphql.mutations.mutation_interface import ( + GenericMutationReturn, + MutationReturnInterface, +) +from selfprivacy_api.graphql.queries.backup import BackupConfiguration +from selfprivacy_api.graphql.queries.providers import BackupProvider + +from selfprivacy_api.backup import Backups + + +@strawberry.input +class InitializeRepositoryInput: + """Initialize repository input""" + + provider: BackupProvider + # The following field may become optional for other providers? + # Backblaze takes bucket id and name + location_id: str + location_name: str + # Key ID and key for Backblaze + login: str + password: str + + +@strawberry.type +class GenericBackupConfigReturn(MutationReturnInterface): + """Generic backup config return""" + + configuration: typing.Optional[BackupConfiguration] + + +@strawberry.type +class BackupMutations: + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def initialize_repository( + self, repository: InitializeRepositoryInput + ) -> GenericBackupConfigReturn: + """Initialize a new repository""" + provider = Backups.construct_provider( + kind=repository.provider, + login=repository.login, + key=repository.password, + location=repository.location_name, + repo_id=repository.location_id, + ) + Backups.set_provider(provider) + Backups.init_repo() diff --git a/selfprivacy_api/models/backup/provider.py b/selfprivacy_api/models/backup/provider.py index e454c39..e05a7f7 100644 --- a/selfprivacy_api/models/backup/provider.py +++ b/selfprivacy_api/models/backup/provider.py @@ -7,3 +7,5 @@ class BackupProviderModel(BaseModel): kind: str login: str key: str + location: str + repo_id: str # for app usage, not for us From 345e2c10c44eeb615c44aeb3f8b0c967485c6f8e Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 29 May 2023 16:12:22 +0000 Subject: [PATCH 122/537] feature(backups): a graphql query to get provider info --- .../graphql/common_types/service.py | 7 ++++ selfprivacy_api/graphql/queries/backup.py | 32 +++++++++++++++++-- 2 files changed, 36 insertions(+), 3 deletions(-) diff --git a/selfprivacy_api/graphql/common_types/service.py b/selfprivacy_api/graphql/common_types/service.py index 8f27386..9e04254 100644 --- a/selfprivacy_api/graphql/common_types/service.py +++ b/selfprivacy_api/graphql/common_types/service.py @@ -108,6 +108,13 @@ class Service: return None +@strawberry.type +class SnapshotInfo: + id: str + service: "Service" + created_at: datetime.datetime + + def service_to_graphql_service(service: ServiceInterface) -> Service: """Convert service to graphql service""" return Service( diff --git a/selfprivacy_api/graphql/queries/backup.py b/selfprivacy_api/graphql/queries/backup.py index ef61b10..80b08e9 100644 --- a/selfprivacy_api/graphql/queries/backup.py +++ b/selfprivacy_api/graphql/queries/backup.py @@ -2,13 +2,39 @@ # pylint: disable=too-few-public-methods import typing import strawberry -from selfprivacy_api.graphql.common_types.backup_snapshot import SnapshotInfo + + +from selfprivacy_api.backup import Backups +from selfprivacy_api.backup.local_secret import LocalBackupSecret +from selfprivacy_api.graphql.queries.providers import BackupProvider +from selfprivacy_api.graphql.common_types.service import SnapshotInfo + + +@strawberry.type +class BackupConfiguration: + provider: BackupProvider + # When server is lost, the app should have the key to decrypt backups on a new server + encryption_key: str + # If none, autobackups are disabled + autobackup_period: typing.Optional[int] = None + # Bucket name for Backblaze, path for some other providers + location_name: typing.Optional[str] = None + location_id: typing.Optional[str] = None + # False when repo is not initialized and not ready to be used + is_initialized: bool @strawberry.type class Backup: - backend: str + @strawberry.field + def configuration() -> BackupConfiguration: + config = BackupConfiguration() + config.encryption_key = LocalBackupSecret.get() + config.is_initialized = Backups.is_initted() + config.autobackup_period = Backups.autobackup_period_minutes() + config.location_name = Backups.provider().location + config.location_id = Backups.provider().repo_id @strawberry.field - def get_backups(self) -> typing.List[SnapshotInfo]: + def all_snapshots(self) -> typing.List[SnapshotInfo]: return [] From c3696d3a4b6a6f3715045a0d0cb1b1659f589c54 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 29 May 2023 16:50:14 +0000 Subject: [PATCH 123/537] feature(backups): global init instead of per-service --- selfprivacy_api/backup/__init__.py | 14 ++++++------- selfprivacy_api/backup/restic_backuper.py | 4 ++-- selfprivacy_api/backup/storage.py | 10 ++++------ tests/test_graphql/test_backup.py | 24 +++++++++++------------ 4 files changed, 24 insertions(+), 28 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index a5d5416..b10ee39 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -229,19 +229,17 @@ class Backups: if service is not None: repo_name = service.get_id() - Backups.provider().backuper.init(repo_name) - if service is not None: - Storage.mark_as_init(service) + Backups.provider().backuper.init() + Storage.mark_as_init() @staticmethod - def is_initted(service: Service) -> bool: - repo_name = service.get_id() - if Storage.has_init_mark(service): + def is_initted() -> bool: + if Storage.has_init_mark(): return True - initted = Backups.provider().backuper.is_initted(repo_name) + initted = Backups.provider().backuper.is_initted() if initted: - Storage.mark_as_init(service) + Storage.mark_as_init() return True return False diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index bc92148..c16f444 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -145,7 +145,7 @@ class ResticBackuper(AbstractBackuper): service_name=repo_name, ) - def init(self, repo_name): + def init(self): init_command = self.restic_command( "init", ) @@ -159,7 +159,7 @@ class ResticBackuper(AbstractBackuper): if not "created restic repository" in output: raise ValueError("cannot init a repo: " + output) - def is_initted(self, repo_name: str) -> bool: + def is_initted(self) -> bool: command = self.restic_command( "check", "--json", diff --git a/selfprivacy_api/backup/storage.py b/selfprivacy_api/backup/storage.py index 072c80f..dd23210 100644 --- a/selfprivacy_api/backup/storage.py +++ b/selfprivacy_api/backup/storage.py @@ -160,13 +160,11 @@ class Storage: return provider_model @staticmethod - def has_init_mark(service: Service) -> bool: - repo_name = service.get_id() - if redis.exists(REDIS_INITTED_CACHE_PREFIX + repo_name): + def has_init_mark() -> bool: + if redis.exists(REDIS_INITTED_CACHE_PREFIX): return True return False @staticmethod - def mark_as_init(service: Service): - repo_name = service.get_id() - redis.set(REDIS_INITTED_CACHE_PREFIX + repo_name, 1) + def mark_as_init(): + redis.set(REDIS_INITTED_CACHE_PREFIX, 1) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 95001cd..3e1536e 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -120,7 +120,7 @@ def test_select_backend(): def test_file_backend_init(file_backup): - file_backup.backuper.init("somerepo") + file_backup.backuper.init() def test_backup_simple_file(raw_dummy_service, file_backup): @@ -130,7 +130,7 @@ def test_backup_simple_file(raw_dummy_service, file_backup): assert file_backup is not None name = service.get_id() - file_backup.backuper.init(name) + file_backup.backuper.init() def test_backup_service(dummy_service, backups): @@ -217,11 +217,11 @@ def test_sizing(backups, dummy_service): def test_init_tracking(backups, raw_dummy_service): - assert Backups.is_initted(raw_dummy_service) is False + assert Backups.is_initted() is False - Backups.init_repo(raw_dummy_service) + Backups.init_repo() - assert Backups.is_initted(raw_dummy_service) is True + assert Backups.is_initted() is True def finished_jobs(): @@ -414,21 +414,21 @@ def test_snapshots_caching(backups, dummy_service): # Storage def test_init_tracking_caching(backups, raw_dummy_service): - assert Storage.has_init_mark(raw_dummy_service) is False + assert Storage.has_init_mark() is False - Storage.mark_as_init(raw_dummy_service) + Storage.mark_as_init() - assert Storage.has_init_mark(raw_dummy_service) is True - assert Backups.is_initted(raw_dummy_service) is True + assert Storage.has_init_mark() is True + assert Backups.is_initted() is True # Storage def test_init_tracking_caching2(backups, raw_dummy_service): - assert Storage.has_init_mark(raw_dummy_service) is False + assert Storage.has_init_mark() is False - Backups.init_repo(raw_dummy_service) + Backups.init_repo() - assert Storage.has_init_mark(raw_dummy_service) is True + assert Storage.has_init_mark() is True # Storage From 9c4d2a0ba517775cb335d8f8b933d1907e5ac3a2 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 29 May 2023 18:24:38 +0000 Subject: [PATCH 124/537] feature(backups): drop repository call --- selfprivacy_api/graphql/mutations/backup_mutations.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index 4b61f43..8bb2d94 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -9,6 +9,7 @@ from selfprivacy_api.graphql.mutations.mutation_interface import ( MutationReturnInterface, ) from selfprivacy_api.graphql.queries.backup import BackupConfiguration +from selfprivacy_api.graphql.queries.backup import Backup from selfprivacy_api.graphql.queries.providers import BackupProvider from selfprivacy_api.backup import Backups @@ -51,3 +52,10 @@ class BackupMutations: ) Backups.set_provider(provider) Backups.init_repo() + + +@strawberry.mutation(permission_classes=[IsAuthenticated]) +def remove_repository(self) -> GenericBackupConfigReturn: + """Remove repository""" + Backups.reset() + return Backup.configuration() From 603ed2ddf96d666bc0d089d4d8287a61757c5a8c Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 31 May 2023 11:30:09 +0000 Subject: [PATCH 125/537] feature(backups): start backup graphql API --- .../graphql/mutations/backup_mutations.py | 31 ++++++++++++++++--- 1 file changed, 26 insertions(+), 5 deletions(-) diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index 8bb2d94..4704df2 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -13,6 +13,8 @@ from selfprivacy_api.graphql.queries.backup import Backup from selfprivacy_api.graphql.queries.providers import BackupProvider from selfprivacy_api.backup import Backups +from selfprivacy_api.services import get_all_services, get_service_by_id +from selfprivacy_api.backup.tasks import start_backup @strawberry.input @@ -36,6 +38,10 @@ class GenericBackupConfigReturn(MutationReturnInterface): configuration: typing.Optional[BackupConfiguration] +class GenericJobMutationReturn: + pass + + @strawberry.type class BackupMutations: @strawberry.mutation(permission_classes=[IsAuthenticated]) @@ -53,9 +59,24 @@ class BackupMutations: Backups.set_provider(provider) Backups.init_repo() + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def remove_repository(self) -> GenericBackupConfigReturn: + """Remove repository""" + Backups.reset() + return Backup.configuration() -@strawberry.mutation(permission_classes=[IsAuthenticated]) -def remove_repository(self) -> GenericBackupConfigReturn: - """Remove repository""" - Backups.reset() - return Backup.configuration() + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def start_backup( + self, service_id: typing.Optional[str] = None + ) -> GenericJobMutationReturn: + """Start backup. If service not provided, backup all services""" + if service_id is None: + for service in get_all_services(): + start_backup(service) + else: + service = get_service_by_id(service_id) + if service is None: + raise ValueError(f"nonexistent service: {service_id}") + start_backup(service) + + return GenericJobMutationReturn() From 4b07d4de4141d6a606bcb46e8c63fdf642376c92 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 31 May 2023 13:16:08 +0000 Subject: [PATCH 126/537] refactor(backups): global snapshots --- selfprivacy_api/backup/__init__.py | 2 +- selfprivacy_api/backup/restic_backuper.py | 6 +++--- tests/test_graphql/test_backup.py | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index b10ee39..4e13ea9 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -253,7 +253,7 @@ class Backups: # TODO: the oldest snapshots will get expired faster than the new ones. # How to detect that the end is missing? - upstream_snapshots = Backups.provider().backuper.get_snapshots(service_id) + upstream_snapshots = Backups.provider().backuper.get_snapshots() Backups.sync_service_snapshots(service_id, upstream_snapshots) return upstream_snapshots diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index c16f444..2c98b46 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -212,7 +212,7 @@ class ResticBackuper(AbstractBackuper): if "restoring" not in output: raise ValueError("cannot restore a snapshot: " + output) - def _load_snapshots(self, repo_name) -> object: + def _load_snapshots(self) -> object: """ Load list of snapshots from repository raises Value Error if repo does not exist @@ -237,10 +237,10 @@ class ResticBackuper(AbstractBackuper): except ValueError as e: raise ValueError("Cannot load snapshots: ") from e - def get_snapshots(self, repo_name) -> List[Snapshot]: + def get_snapshots(self) -> List[Snapshot]: """Get all snapshots from the repo""" snapshots = [] - for restic_snapshot in self._load_snapshots(repo_name): + for restic_snapshot in self._load_snapshots(): snapshot = Snapshot( id=restic_snapshot["short_id"], created_at=restic_snapshot["time"], diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 3e1536e..a50fed4 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -151,7 +151,7 @@ def test_backup_service(dummy_service, backups): def test_no_repo(memory_backup): with pytest.raises(ValueError): - assert memory_backup.backuper.get_snapshots("") == [] + assert memory_backup.backuper.get_snapshots() == [] def test_one_snapshot(backups, dummy_service): From f7c0821675b68ecdc53704286d7f8569f8b329ea Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Thu, 1 Jun 2023 12:44:14 +0000 Subject: [PATCH 127/537] fix(backups): return correct snapshots per service --- selfprivacy_api/backup/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 4e13ea9..7a3b37d 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -255,7 +255,7 @@ class Backups: upstream_snapshots = Backups.provider().backuper.get_snapshots() Backups.sync_service_snapshots(service_id, upstream_snapshots) - return upstream_snapshots + return [snap for snap in upstream_snapshots if snap.service_name == service_id] @staticmethod def restore_service_from_snapshot(service: Service, snapshot_id: str): From 50952d688aee00c940e064246bf13344cbece58f Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Thu, 1 Jun 2023 14:03:26 +0000 Subject: [PATCH 128/537] feature(backups): graphql mutation for restore --- selfprivacy_api/backup/__init__.py | 19 +++++++++++++++++++ selfprivacy_api/backup/storage.py | 12 ++++++++++++ .../graphql/mutations/backup_mutations.py | 12 +++++++++++- 3 files changed, 42 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 7a3b37d..7001d03 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -257,6 +257,25 @@ class Backups: Backups.sync_service_snapshots(service_id, upstream_snapshots) return [snap for snap in upstream_snapshots if snap.service_name == service_id] + @staticmethod + def get_snapshot_by_id(id: str) -> Optional[Snapshot]: + snap = Storage.get_cached_snapshot_by_id(id) + if snap is not None: + return snap + + # Possibly our cache entry got invalidated, let's try one more time + Backups.sync_all_snapshots() + snap = Storage.get_cached_snapshot_by_id(id) + + return snap + + @staticmethod + def sync_all_snapshots(): + upstream_snapshots = Backups.provider().backuper.get_snapshots() + Storage.invalidate_snapshot_storage() + for snapshot in upstream_snapshots: + Storage.cache_snapshot(snapshot) + @staticmethod def restore_service_from_snapshot(service: Service, snapshot_id: str): repo_name = service.get_id() diff --git a/selfprivacy_api/backup/storage.py b/selfprivacy_api/backup/storage.py index dd23210..38155e6 100644 --- a/selfprivacy_api/backup/storage.py +++ b/selfprivacy_api/backup/storage.py @@ -47,6 +47,11 @@ class Storage: for key in redis.keys(prefix + "*"): redis.delete(key) + @staticmethod + def invalidate_snapshot_storage(): + for key in redis.keys(REDIS_SNAPSHOTS_PREFIX + "*"): + redis.delete(key) + @staticmethod def store_testrepo_path(path: str): redis.set(REDIS_REPO_PATH_KEY, path) @@ -97,6 +102,13 @@ class Storage: snapshot_key = Storage.__snapshot_key(snapshot) redis.delete(snapshot_key) + @staticmethod + def get_cached_snapshot_by_id(snapshot_id: str) -> Optional[Snapshot]: + key = redis.keys(REDIS_SNAPSHOTS_PREFIX + snapshot_id) + if not redis.exists(key): + return None + return hash_as_model(redis, key, Snapshot) + @staticmethod def get_cached_snapshots() -> List[Snapshot]: keys = redis.keys(REDIS_SNAPSHOTS_PREFIX + "*") diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index 4704df2..8ae19bb 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -14,7 +14,7 @@ from selfprivacy_api.graphql.queries.providers import BackupProvider from selfprivacy_api.backup import Backups from selfprivacy_api.services import get_all_services, get_service_by_id -from selfprivacy_api.backup.tasks import start_backup +from selfprivacy_api.backup.tasks import start_backup, restore_snapshot @strawberry.input @@ -80,3 +80,13 @@ class BackupMutations: start_backup(service) return GenericJobMutationReturn() + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def restore_backup(self, snapshot_id: str) -> GenericJobMutationReturn: + """Restore backup""" + snap = Backups.get_snapshot_by_id(snapshot_id) + if snap in None: + raise ValueError(f"No such snapshot: {snapshot_id}") + restore_snapshot(snap) + + return GenericJobMutationReturn() From 9a94f7624e4dad4f6ea41a63ce09179169bbef08 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Thu, 1 Jun 2023 16:12:32 +0000 Subject: [PATCH 129/537] feature(backups): a graphql call to invalidate cache --- selfprivacy_api/backup/__init__.py | 4 ++++ selfprivacy_api/graphql/mutations/backup_mutations.py | 6 ++++++ 2 files changed, 10 insertions(+) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 7001d03..86445ba 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -269,6 +269,10 @@ class Backups: return snap + @staticmethod + def force_snapshot_reload(): + Backups.sync_all_snapshots() + @staticmethod def sync_all_snapshots(): upstream_snapshots = Backups.provider().backuper.get_snapshots() diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index 8ae19bb..c5c63bf 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -90,3 +90,9 @@ class BackupMutations: restore_snapshot(snap) return GenericJobMutationReturn() + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def force_snapshots_reload(self) -> GenericMutationReturn: + """Force snapshots reload""" + Backups.force_snapshot_reload() + return GenericMutationReturn() From 9d5335f62c62dbd202308930698dc91dd3f58c74 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 5 Jun 2023 11:19:01 +0000 Subject: [PATCH 130/537] feature(backups): feature(backups): return a snapshot from start_backup --- selfprivacy_api/backup/__init__.py | 1 + selfprivacy_api/backup/storage.py | 2 +- tests/test_graphql/test_backup.py | 12 ++++++++++++ 3 files changed, 14 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 86445ba..37b9517 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -223,6 +223,7 @@ class Backups: raise e Jobs.update(job, status=JobStatus.FINISHED) + return snapshot @staticmethod def init_repo(service: Optional[Service] = None): diff --git a/selfprivacy_api/backup/storage.py b/selfprivacy_api/backup/storage.py index 38155e6..680f39f 100644 --- a/selfprivacy_api/backup/storage.py +++ b/selfprivacy_api/backup/storage.py @@ -104,7 +104,7 @@ class Storage: @staticmethod def get_cached_snapshot_by_id(snapshot_id: str) -> Optional[Snapshot]: - key = redis.keys(REDIS_SNAPSHOTS_PREFIX + snapshot_id) + key = REDIS_SNAPSHOTS_PREFIX + snapshot_id if not redis.exists(key): return None return hash_as_model(redis, key, Snapshot) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index a50fed4..5ac024f 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -243,6 +243,18 @@ def assert_job_had_progress(job_type): assert len(Jobs.progress_updates(job)) > 0 +def test_snapshots_by_id(backups, dummy_service): + snap1 = Backups.back_up(dummy_service) + snap2 = Backups.back_up(dummy_service) + snap3 = Backups.back_up(dummy_service) + + assert snap2.id is not None + assert snap2.id != "" + + assert len(Backups.get_snapshots(dummy_service)) == 3 + assert Backups.get_snapshot_by_id(snap2.id).id == snap2.id + + def test_backup_service_task(backups, dummy_service): handle = start_backup(dummy_service) handle(blocking=True) From 2168037a103c565d5d9616afd21884e2b5123126 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 5 Jun 2023 11:28:53 +0000 Subject: [PATCH 131/537] refactor(backups): introduce get_all_snapshots() --- selfprivacy_api/backup/__init__.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 37b9517..7678258 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -247,16 +247,20 @@ class Backups: @staticmethod def get_snapshots(service: Service) -> List[Snapshot]: - service_id = service.get_id() - cached_snapshots = Backups.get_cached_snapshots_service(service_id) + snapshots = Backups.get_all_snapshots() + return [snap for snap in snapshots if snap.service_name == service.get_id()] + + @staticmethod + def get_all_snapshots() -> List[Snapshot]: + cached_snapshots = Storage.get_cached_snapshots() if cached_snapshots != []: return cached_snapshots # TODO: the oldest snapshots will get expired faster than the new ones. # How to detect that the end is missing? upstream_snapshots = Backups.provider().backuper.get_snapshots() - Backups.sync_service_snapshots(service_id, upstream_snapshots) - return [snap for snap in upstream_snapshots if snap.service_name == service_id] + Backups.sync_all_snapshots() + return upstream_snapshots @staticmethod def get_snapshot_by_id(id: str) -> Optional[Snapshot]: From d6609b29e82c3c21cb2367037775336263cc0616 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 5 Jun 2023 11:36:58 +0000 Subject: [PATCH 132/537] feature(backups): get all snapshots if requested by api --- selfprivacy_api/graphql/queries/backup.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/graphql/queries/backup.py b/selfprivacy_api/graphql/queries/backup.py index 80b08e9..4d41c36 100644 --- a/selfprivacy_api/graphql/queries/backup.py +++ b/selfprivacy_api/graphql/queries/backup.py @@ -37,4 +37,11 @@ class Backup: @strawberry.field def all_snapshots(self) -> typing.List[SnapshotInfo]: - return [] + result = [] + snapshots = Backups.get_all_snapshots() + for snap in snapshots: + graphql_snap = SnapshotInfo( + id=snap.id, service=snap.service_name, created_at=snap.created_at + ) + result.append(graphql_snap) + return result From ff58bdccbb26e01d94996cca7ecb00afbe61d727 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 5 Jun 2023 11:49:37 +0000 Subject: [PATCH 133/537] feature(backups): set autobackup period from gql --- selfprivacy_api/graphql/mutations/backup_mutations.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index c5c63bf..a227106 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -65,6 +65,14 @@ class BackupMutations: Backups.reset() return Backup.configuration() + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def set_autobackup_period( + self, period: typing.Optional[int] = None + ) -> GenericBackupConfigReturn: + """Set autobackup period. None is to disable autobackup""" + Backups.set_autobackup_period_minutes(period) + return Backup.configuration() + @strawberry.mutation(permission_classes=[IsAuthenticated]) def start_backup( self, service_id: typing.Optional[str] = None From 0c95c5913b5068ee21118eb488f7912ae11cc9a4 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 7 Jun 2023 14:11:22 +0000 Subject: [PATCH 134/537] BREAKING CHANGE(backups): support only individual service backup requests(combinable) --- selfprivacy_api/backup/jobs.py | 8 +++ .../graphql/mutations/backup_mutations.py | 54 ++++++++++++------- 2 files changed, 43 insertions(+), 19 deletions(-) diff --git a/selfprivacy_api/backup/jobs.py b/selfprivacy_api/backup/jobs.py index ebc2ea0..2293da0 100644 --- a/selfprivacy_api/backup/jobs.py +++ b/selfprivacy_api/backup/jobs.py @@ -12,6 +12,10 @@ def backup_job_type(service: Service) -> str: return f"{job_type_prefix(service)}.backup" +def restore_job_type(service: Service) -> str: + return f"{job_type_prefix(service)}.restore" + + def get_jobs_by_service(service: Service) -> List[Job]: result = [] for job in Jobs.get_jobs(): @@ -54,3 +58,7 @@ def get_job_by_type(type_id: str) -> Optional[Job]: def get_backup_job(service: Service) -> Optional[Job]: return get_job_by_type(backup_job_type(service)) + + +def get_restore_job(service: Service) -> Optional[Job]: + return get_job_by_type(restore_job_type(service)) diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index a227106..8ddd9f6 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -6,15 +6,18 @@ from strawberry.types import Info from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.graphql.mutations.mutation_interface import ( GenericMutationReturn, + GenericJobButationReturn, MutationReturnInterface, ) from selfprivacy_api.graphql.queries.backup import BackupConfiguration from selfprivacy_api.graphql.queries.backup import Backup from selfprivacy_api.graphql.queries.providers import BackupProvider +from selfprivacy_api.graphql.common_types.jobs import job_to_api_job from selfprivacy_api.backup import Backups from selfprivacy_api.services import get_all_services, get_service_by_id from selfprivacy_api.backup.tasks import start_backup, restore_snapshot +from selfprivacy_api.backup.jobs import get_backup_job, get_restore_job @strawberry.input @@ -38,10 +41,6 @@ class GenericBackupConfigReturn(MutationReturnInterface): configuration: typing.Optional[BackupConfiguration] -class GenericJobMutationReturn: - pass - - @strawberry.type class BackupMutations: @strawberry.mutation(permission_classes=[IsAuthenticated]) @@ -76,28 +75,45 @@ class BackupMutations: @strawberry.mutation(permission_classes=[IsAuthenticated]) def start_backup( self, service_id: typing.Optional[str] = None - ) -> GenericJobMutationReturn: - """Start backup. If service not provided, backup all services""" - if service_id is None: - for service in get_all_services(): - start_backup(service) - else: - service = get_service_by_id(service_id) - if service is None: - raise ValueError(f"nonexistent service: {service_id}") - start_backup(service) + ) -> GenericJobButationReturn: + """Start backup""" - return GenericJobMutationReturn() + service = get_service_by_id(service_id) + if service is None: + return GenericJobButationReturn( + success=False, + code=300, + message=f"nonexistent service: {service_id}", + job=None, + ) + start_backup(service) + job = get_backup_job(service) + + return GenericJobButationReturn( + success=True, code=200, message="Backup job queued", job=job_to_api_job(job) + ) @strawberry.mutation(permission_classes=[IsAuthenticated]) - def restore_backup(self, snapshot_id: str) -> GenericJobMutationReturn: + def restore_backup(self, snapshot_id: str) -> GenericJobButationReturn: """Restore backup""" snap = Backups.get_snapshot_by_id(snapshot_id) - if snap in None: - raise ValueError(f"No such snapshot: {snapshot_id}") + service = get_service_by_id(snap.service_name) + if snap is None: + return GenericJobButationReturn( + success=False, + code=300, + message=f"No such snapshot: {snapshot_id}", + job=None, + ) + restore_snapshot(snap) - return GenericJobMutationReturn() + return GenericJobButationReturn( + success=True, + code=200, + message="restore job created", + jobs=[get_restore_job(service)], + ) @strawberry.mutation(permission_classes=[IsAuthenticated]) def force_snapshots_reload(self) -> GenericMutationReturn: From 4b8abb6a66fd99b5603535ccc9ef5630df23a48e Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 7 Jun 2023 14:25:17 +0000 Subject: [PATCH 135/537] fix(backups): return 400, not 300 --- selfprivacy_api/graphql/mutations/backup_mutations.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index 8ddd9f6..9c2f567 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -101,7 +101,7 @@ class BackupMutations: if snap is None: return GenericJobButationReturn( success=False, - code=300, + code=400, message=f"No such snapshot: {snapshot_id}", job=None, ) From 98de85e569854e1c310e03a94a38c86a10a04335 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 7 Jun 2023 14:27:37 +0000 Subject: [PATCH 136/537] fix(backups): return one job, not an array of one --- selfprivacy_api/graphql/mutations/backup_mutations.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index 9c2f567..2a916f5 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -112,7 +112,7 @@ class BackupMutations: success=True, code=200, message="restore job created", - jobs=[get_restore_job(service)], + job=get_restore_job(service), ) @strawberry.mutation(permission_classes=[IsAuthenticated]) From 7f984b678f4161245945269de3cab6cb2a06778c Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 7 Jun 2023 15:05:58 +0000 Subject: [PATCH 137/537] feature(backups): integration between restore and jobs --- selfprivacy_api/backup/__init__.py | 26 ++++++++++++++++++++++---- selfprivacy_api/backup/jobs.py | 19 +++++++++++++++++++ selfprivacy_api/backup/tasks.py | 5 ++++- 3 files changed, 45 insertions(+), 5 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 7678258..b313165 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -13,7 +13,12 @@ from selfprivacy_api.graphql.queries.providers import BackupProvider from selfprivacy_api.backup.providers.provider import AbstractBackupProvider from selfprivacy_api.backup.providers import get_provider from selfprivacy_api.backup.storage import Storage -from selfprivacy_api.backup.jobs import get_backup_job, add_backup_job +from selfprivacy_api.backup.jobs import ( + get_backup_job, + add_backup_job, + get_restore_job, + add_restore_job, +) from selfprivacy_api.jobs import Jobs, JobStatus @@ -285,6 +290,7 @@ class Backups: for snapshot in upstream_snapshots: Storage.cache_snapshot(snapshot) + # to be deprecated/internalized in favor of restore_snapshot() @staticmethod def restore_service_from_snapshot(service: Service, snapshot_id: str): repo_name = service.get_id() @@ -294,9 +300,21 @@ class Backups: @staticmethod def restore_snapshot(snapshot: Snapshot): - Backups.restore_service_from_snapshot( - get_service_by_id(snapshot.service_name), snapshot.id - ) + service = get_service_by_id(snapshot.service_name) + + job = get_restore_job(service) + if job is None: + job = add_restore_job(snapshot) + + Jobs.update(job, status=JobStatus.RUNNING) + try: + Backups.restore_service_from_snapshot(service, snapshot.id) + service.post_restore() + except Exception as e: + Jobs.update(job, status=JobStatus.ERROR) + raise e + + Jobs.update(job, status=JobStatus.FINISHED) @staticmethod def service_snapshot_size(service: Service, snapshot_id: str) -> float: diff --git a/selfprivacy_api/backup/jobs.py b/selfprivacy_api/backup/jobs.py index 2293da0..5a9cb0d 100644 --- a/selfprivacy_api/backup/jobs.py +++ b/selfprivacy_api/backup/jobs.py @@ -1,7 +1,9 @@ from typing import Optional, List +from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.jobs import Jobs, Job, JobStatus from selfprivacy_api.services.service import Service +from selfprivacy_api.services import get_service_by_id def job_type_prefix(service: Service) -> str: @@ -47,6 +49,23 @@ def add_backup_job(service: Service) -> Job: return job +def add_restore_job(snapshot: Snapshot) -> Job: + service = get_service_by_id(snapshot.service_name) + if is_something_queued_for(service): + message = ( + f"Cannot start a restore of {service.get_id()}, another operation is queued: " + + get_jobs_by_service(service)[0].type_id + ) + raise ValueError(message) + display_name = service.get_display_name() + job = Jobs.add( + type_id=restore_job_type(service), + name=f"Restore {display_name}", + description=f"restoring {display_name} from {snapshot.id}", + ) + return job + + def get_job_by_type(type_id: str) -> Optional[Job]: for job in Jobs.get_jobs(): if job.type_id == type_id and job.status in [ diff --git a/selfprivacy_api/backup/tasks.py b/selfprivacy_api/backup/tasks.py index d92a926..e88f651 100644 --- a/selfprivacy_api/backup/tasks.py +++ b/selfprivacy_api/backup/tasks.py @@ -2,9 +2,10 @@ from datetime import datetime from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.utils.huey import huey +from selfprivacy_api.services import get_service_by_id from selfprivacy_api.services.service import Service from selfprivacy_api.backup import Backups -from selfprivacy_api.backup.jobs import add_backup_job +from selfprivacy_api.backup.jobs import add_backup_job, add_restore_job def validate_datetime(dt: datetime): @@ -32,6 +33,8 @@ def start_backup(service: Service) -> bool: @huey.task() def restore_snapshot(snapshot: Snapshot) -> bool: + add_restore_job(snapshot) + Backups.restore_snapshot(snapshot) return True From 981445d594fb897e4254833143d33b83e2e4f31e Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 7 Jun 2023 15:55:38 +0000 Subject: [PATCH 138/537] refactor(backups): expect one more error of restic json output parsing --- selfprivacy_api/backup/restic_backuper.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index 2c98b46..69bdea9 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -4,6 +4,7 @@ import datetime from typing import List from collections.abc import Iterable +from json.decoder import JSONDecodeError from selfprivacy_api.backup.backuper import AbstractBackuper from selfprivacy_api.models.backup.snapshot import Snapshot @@ -208,6 +209,7 @@ class ResticBackuper(AbstractBackuper): restore_command, stdout=subprocess.PIPE, shell=False ) as handle: + # for some reason restore does not support nice reporting of progress via json output = handle.communicate()[0].decode("utf-8") if "restoring" not in output: raise ValueError("cannot restore a snapshot: " + output) @@ -259,7 +261,12 @@ class ResticBackuper(AbstractBackuper): truncated_output = output[starting_index:] json_messages = truncated_output.splitlines() if len(json_messages) == 1: - return json.loads(truncated_output) + try: + return json.loads(truncated_output) + except JSONDecodeError as e: + raise ValueError( + "There is no json in the restic output : " + output + ) from e result_array = [] for message in json_messages: From 549b149aaf41d8e02727f5c3741168814c19aad6 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 7 Jun 2023 16:33:13 +0000 Subject: [PATCH 139/537] feature(backups): check available space before restoring --- selfprivacy_api/backup/__init__.py | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index b313165..d496758 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -1,5 +1,6 @@ from typing import List, Optional from datetime import datetime, timedelta +from os import statvfs from selfprivacy_api.models.backup.snapshot import Snapshot @@ -298,6 +299,21 @@ class Backups: Backups.provider().backuper.restore_from_backup(repo_name, snapshot_id, folders) + @staticmethod + def assert_restorable(snapshot: Snapshot): + service = get_service_by_id(snapshot.service_name) + if service is None: + raise ValueError( + f"snapshot has a nonexistent service: {snapshot.service_name}" + ) + + needed_space = Backups.snapshot_restored_size(snapshot) + available_space = Backups.space_usable_for_service(service) + if needed_space > available_space: + raise ValueError( + f"we only have {available_space} bytes but snapshot needs{ needed_space}" + ) + @staticmethod def restore_snapshot(snapshot: Snapshot): service = get_service_by_id(snapshot.service_name) @@ -308,6 +324,7 @@ class Backups: Jobs.update(job, status=JobStatus.RUNNING) try: + Backups.assert_restorable(snapshot) Backups.restore_service_from_snapshot(service, snapshot.id) service.post_restore() except Exception as e: @@ -327,6 +344,16 @@ class Backups: get_service_by_id(snapshot.service_name), snapshot.id ) + @staticmethod + def space_usable_for_service(service: Service) -> bool: + folders = service.get_folders() + if folders == []: + raise ValueError("unallocated service", service.get_id()) + + fs_info = statvfs(folders[0]) + usable_bytes = fs_info.f_frsize * fs_info.f_bavail + return usable_bytes + @staticmethod def _store_last_snapshot(service_id: str, snapshot: Snapshot): """What do we do with a snapshot that is just made?""" From d02302c7b236d8fbb230e16923ceef31a0d2c2a4 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Tue, 13 Jun 2023 23:54:02 +0300 Subject: [PATCH 140/537] fix: BackupConfiguration argument order --- selfprivacy_api/graphql/queries/backup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/graphql/queries/backup.py b/selfprivacy_api/graphql/queries/backup.py index 4d41c36..97b4682 100644 --- a/selfprivacy_api/graphql/queries/backup.py +++ b/selfprivacy_api/graphql/queries/backup.py @@ -15,13 +15,13 @@ class BackupConfiguration: provider: BackupProvider # When server is lost, the app should have the key to decrypt backups on a new server encryption_key: str + # False when repo is not initialized and not ready to be used + is_initialized: bool # If none, autobackups are disabled autobackup_period: typing.Optional[int] = None # Bucket name for Backblaze, path for some other providers location_name: typing.Optional[str] = None location_id: typing.Optional[str] = None - # False when repo is not initialized and not ready to be used - is_initialized: bool @strawberry.type From f950dd1e9334ab93439989c5ebe5b47daae46155 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Wed, 14 Jun 2023 00:00:29 +0300 Subject: [PATCH 141/537] feat(backups): register backups in GraphQL schema --- selfprivacy_api/graphql/schema.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/selfprivacy_api/graphql/schema.py b/selfprivacy_api/graphql/schema.py index dff9304..e364dd1 100644 --- a/selfprivacy_api/graphql/schema.py +++ b/selfprivacy_api/graphql/schema.py @@ -6,6 +6,7 @@ from typing import AsyncGenerator import strawberry from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.graphql.mutations.api_mutations import ApiMutations +from selfprivacy_api.graphql.mutations.backup_mutations import BackupMutations from selfprivacy_api.graphql.mutations.job_mutations import JobMutations from selfprivacy_api.graphql.mutations.mutation_interface import GenericMutationReturn from selfprivacy_api.graphql.mutations.services_mutations import ServicesMutations @@ -14,6 +15,7 @@ from selfprivacy_api.graphql.mutations.storage_mutations import StorageMutations from selfprivacy_api.graphql.mutations.system_mutations import SystemMutations from selfprivacy_api.graphql.queries.api_queries import Api +from selfprivacy_api.graphql.queries.backup import Backup from selfprivacy_api.graphql.queries.jobs import Job from selfprivacy_api.graphql.queries.services import Services from selfprivacy_api.graphql.queries.storage import Storage @@ -58,6 +60,11 @@ class Query: """Services queries""" return Services() + @strawberry.field(permission_classes=[IsAuthenticated]) + def backup(self) -> Backup: + """Backup queries""" + return Backup() + @strawberry.type class Mutation( @@ -68,6 +75,7 @@ class Mutation( StorageMutations, ServicesMutations, JobMutations, + BackupMutations, ): """Root schema for mutations""" From 0ef6569d976fee8ce6f26babb0cf49cde1a2b805 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Wed, 14 Jun 2023 00:43:01 +0300 Subject: [PATCH 142/537] fix(backups): try to actually get backup configuration --- selfprivacy_api/backup/providers/backblaze.py | 2 ++ .../backup/providers/local_file.py | 2 ++ selfprivacy_api/backup/providers/memory.py | 2 ++ selfprivacy_api/graphql/queries/backup.py | 23 +++++++++++-------- selfprivacy_api/graphql/queries/providers.py | 1 + 5 files changed, 20 insertions(+), 10 deletions(-) diff --git a/selfprivacy_api/backup/providers/backblaze.py b/selfprivacy_api/backup/providers/backblaze.py index 9ec5eba..f474a99 100644 --- a/selfprivacy_api/backup/providers/backblaze.py +++ b/selfprivacy_api/backup/providers/backblaze.py @@ -4,3 +4,5 @@ from selfprivacy_api.backup.restic_backuper import ResticBackuper class Backblaze(AbstractBackupProvider): backuper = ResticBackuper("--b2-account", "--b2-key", ":b2:") + + name = "BACKBLAZE" diff --git a/selfprivacy_api/backup/providers/local_file.py b/selfprivacy_api/backup/providers/local_file.py index a20f615..95075dd 100644 --- a/selfprivacy_api/backup/providers/local_file.py +++ b/selfprivacy_api/backup/providers/local_file.py @@ -5,6 +5,8 @@ from selfprivacy_api.backup.restic_backuper import ResticBackuper class LocalFileBackup(AbstractBackupProvider): backuper = ResticBackuper("", "", "memory") + name = "FILE" + # login and key args are for compatibility with generic provider methods. They are ignored. def __init__(self, filename: str, login: str = "", key: str = ""): super().__init__() diff --git a/selfprivacy_api/backup/providers/memory.py b/selfprivacy_api/backup/providers/memory.py index 3f257bf..a481559 100644 --- a/selfprivacy_api/backup/providers/memory.py +++ b/selfprivacy_api/backup/providers/memory.py @@ -4,3 +4,5 @@ from selfprivacy_api.backup.restic_backuper import ResticBackuper class InMemoryBackup(AbstractBackupProvider): backuper = ResticBackuper("", "", ":memory:") + + name = "MEMORY" diff --git a/selfprivacy_api/graphql/queries/backup.py b/selfprivacy_api/graphql/queries/backup.py index 97b4682..c20be35 100644 --- a/selfprivacy_api/graphql/queries/backup.py +++ b/selfprivacy_api/graphql/queries/backup.py @@ -18,22 +18,25 @@ class BackupConfiguration: # False when repo is not initialized and not ready to be used is_initialized: bool # If none, autobackups are disabled - autobackup_period: typing.Optional[int] = None + autobackup_period: typing.Optional[int] # Bucket name for Backblaze, path for some other providers - location_name: typing.Optional[str] = None - location_id: typing.Optional[str] = None + location_name: typing.Optional[str] + location_id: typing.Optional[str] @strawberry.type class Backup: @strawberry.field - def configuration() -> BackupConfiguration: - config = BackupConfiguration() - config.encryption_key = LocalBackupSecret.get() - config.is_initialized = Backups.is_initted() - config.autobackup_period = Backups.autobackup_period_minutes() - config.location_name = Backups.provider().location - config.location_id = Backups.provider().repo_id + def configuration(self) -> BackupConfiguration: + encryption_key = LocalBackupSecret.get() + return BackupConfiguration( + provider=BackupProvider[Backups.provider().name], + encryption_key=encryption_key.decode() if encryption_key else "", + is_initialized=Backups.is_initted(), + autobackup_period=Backups.autobackup_period_minutes(), + location_name=Backups.provider().location, + location_id=Backups.provider().repo_id, + ) @strawberry.field def all_snapshots(self) -> typing.List[SnapshotInfo]: diff --git a/selfprivacy_api/graphql/queries/providers.py b/selfprivacy_api/graphql/queries/providers.py index 2a9fcec..b9ca7ef 100644 --- a/selfprivacy_api/graphql/queries/providers.py +++ b/selfprivacy_api/graphql/queries/providers.py @@ -19,6 +19,7 @@ class ServerProvider(Enum): @strawberry.enum class BackupProvider(Enum): BACKBLAZE = "BACKBLAZE" + NONE = "NONE" # for testing purposes, make sure not selectable in prod. MEMORY = "MEMORY" FILE = "FILE" From f4df1f6a6263b459c6b367b766c0318a2aa5b532 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Wed, 14 Jun 2023 01:40:53 +0300 Subject: [PATCH 143/537] fix(backups): return type of encryption key --- selfprivacy_api/backup/local_secret.py | 4 ++-- selfprivacy_api/backup/providers/provider.py | 2 ++ selfprivacy_api/graphql/queries/backup.py | 3 +-- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/selfprivacy_api/backup/local_secret.py b/selfprivacy_api/backup/local_secret.py index 76237f3..389f3a3 100644 --- a/selfprivacy_api/backup/local_secret.py +++ b/selfprivacy_api/backup/local_secret.py @@ -15,13 +15,13 @@ redis = RedisPool().get_connection() class LocalBackupSecret: @staticmethod - def get(): + def get() -> str: """A secret string which backblaze/other clouds do not know. Serves as encryption key. """ if not LocalBackupSecret.exists(): LocalBackupSecret.reset() - return redis.get(REDIS_KEY) + return redis.get(REDIS_KEY) # type: ignore @staticmethod def set(secret: str): diff --git a/selfprivacy_api/backup/providers/provider.py b/selfprivacy_api/backup/providers/provider.py index c303d4e..fcf179b 100644 --- a/selfprivacy_api/backup/providers/provider.py +++ b/selfprivacy_api/backup/providers/provider.py @@ -12,6 +12,8 @@ class AbstractBackupProvider(ABC): def backuper(self) -> AbstractBackuper: raise NotImplementedError + name = "NONE" + def __init__(self, login="", key="", location="", repo_id=""): self.backuper.set_creds(login, key, location) self.login = login diff --git a/selfprivacy_api/graphql/queries/backup.py b/selfprivacy_api/graphql/queries/backup.py index c20be35..2cc8d83 100644 --- a/selfprivacy_api/graphql/queries/backup.py +++ b/selfprivacy_api/graphql/queries/backup.py @@ -28,10 +28,9 @@ class BackupConfiguration: class Backup: @strawberry.field def configuration(self) -> BackupConfiguration: - encryption_key = LocalBackupSecret.get() return BackupConfiguration( provider=BackupProvider[Backups.provider().name], - encryption_key=encryption_key.decode() if encryption_key else "", + encryption_key=LocalBackupSecret.get(), is_initialized=Backups.is_initted(), autobackup_period=Backups.autobackup_period_minutes(), location_name=Backups.provider().location, From d356fad5347ba72dc2cc88578d17c85e72bf6a32 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Wed, 14 Jun 2023 02:52:10 +0300 Subject: [PATCH 144/537] fix(backups): Handle orphaned snapshots --- selfprivacy_api/backup/local_secret.py | 2 +- .../graphql/common_types/backup_snapshot.py | 9 ----- .../graphql/common_types/service.py | 5 ++- selfprivacy_api/graphql/queries/backup.py | 33 +++++++++++++++++-- 4 files changed, 33 insertions(+), 16 deletions(-) delete mode 100644 selfprivacy_api/graphql/common_types/backup_snapshot.py diff --git a/selfprivacy_api/backup/local_secret.py b/selfprivacy_api/backup/local_secret.py index 389f3a3..ea2afec 100644 --- a/selfprivacy_api/backup/local_secret.py +++ b/selfprivacy_api/backup/local_secret.py @@ -38,7 +38,7 @@ class LocalBackupSecret: @staticmethod def exists() -> bool: - return redis.exists(REDIS_KEY) + return redis.exists(REDIS_KEY) == 1 @staticmethod def _generate() -> str: diff --git a/selfprivacy_api/graphql/common_types/backup_snapshot.py b/selfprivacy_api/graphql/common_types/backup_snapshot.py deleted file mode 100644 index 3256e0c..0000000 --- a/selfprivacy_api/graphql/common_types/backup_snapshot.py +++ /dev/null @@ -1,9 +0,0 @@ -import datetime -import strawberry - - -@strawberry.type -class SnapshotInfo: - id: str - service_name: str - created_at: datetime.datetime diff --git a/selfprivacy_api/graphql/common_types/service.py b/selfprivacy_api/graphql/common_types/service.py index 9e04254..b3403e9 100644 --- a/selfprivacy_api/graphql/common_types/service.py +++ b/selfprivacy_api/graphql/common_types/service.py @@ -3,7 +3,6 @@ import typing import strawberry import datetime from selfprivacy_api.graphql.common_types.dns import DnsRecord -from selfprivacy_api.graphql.common_types.backup_snapshot import SnapshotInfo from selfprivacy_api.services import get_service_by_id, get_services_by_location from selfprivacy_api.services import Service as ServiceInterface @@ -104,14 +103,14 @@ class Service: return get_storage_usage(self) @strawberry.field - def backup_snapshots(self) -> typing.Optional[typing.List[SnapshotInfo]]: + def backup_snapshots(self) -> typing.Optional[typing.List["SnapshotInfo"]]: return None @strawberry.type class SnapshotInfo: id: str - service: "Service" + service: Service created_at: datetime.datetime diff --git a/selfprivacy_api/graphql/queries/backup.py b/selfprivacy_api/graphql/queries/backup.py index 2cc8d83..9858543 100644 --- a/selfprivacy_api/graphql/queries/backup.py +++ b/selfprivacy_api/graphql/queries/backup.py @@ -7,13 +7,20 @@ import strawberry from selfprivacy_api.backup import Backups from selfprivacy_api.backup.local_secret import LocalBackupSecret from selfprivacy_api.graphql.queries.providers import BackupProvider -from selfprivacy_api.graphql.common_types.service import SnapshotInfo +from selfprivacy_api.graphql.common_types.service import ( + Service, + ServiceStatusEnum, + SnapshotInfo, + service_to_graphql_service, +) +from selfprivacy_api.services import get_service_by_id @strawberry.type class BackupConfiguration: provider: BackupProvider - # When server is lost, the app should have the key to decrypt backups on a new server + # When server is lost, the app should have the key to decrypt backups + # on a new server encryption_key: str # False when repo is not initialized and not ready to be used is_initialized: bool @@ -39,11 +46,31 @@ class Backup: @strawberry.field def all_snapshots(self) -> typing.List[SnapshotInfo]: + if not Backups.is_initted(): + return [] result = [] snapshots = Backups.get_all_snapshots() for snap in snapshots: + service = get_service_by_id(snap.service_name) + if service is None: + service = Service( + id=snap.service_name, + display_name=f"{snap.service_name} (Orphaned)", + description="", + svg_icon="", + is_movable=False, + is_required=False, + is_enabled=False, + status=ServiceStatusEnum.OFF, + url=None, + dns_records=None, + ) + else: + service = service_to_graphql_service(service) graphql_snap = SnapshotInfo( - id=snap.id, service=snap.service_name, created_at=snap.created_at + id=snap.id, + service=service, + created_at=snap.created_at, ) result.append(graphql_snap) return result From 865e304f429bf0151feba79f0569e696ac581964 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Jun 2023 09:52:44 +0000 Subject: [PATCH 145/537] test(backups): test backup API - backing up --- selfprivacy_api/backup/tasks.py | 6 ------ .../graphql/mutations/backup_mutations.py | 14 ++++++++++---- selfprivacy_api/graphql/schema.py | 1 + 3 files changed, 11 insertions(+), 10 deletions(-) diff --git a/selfprivacy_api/backup/tasks.py b/selfprivacy_api/backup/tasks.py index e88f651..bd3925d 100644 --- a/selfprivacy_api/backup/tasks.py +++ b/selfprivacy_api/backup/tasks.py @@ -23,18 +23,12 @@ def validate_datetime(dt: datetime): # huey tasks need to return something @huey.task() def start_backup(service: Service) -> bool: - # Backups can create the job, but doing this here - # allows us to see the job as queued before it is actually executed - add_backup_job(service) - Backups.back_up(service) return True @huey.task() def restore_snapshot(snapshot: Snapshot) -> bool: - add_restore_job(snapshot) - Backups.restore_snapshot(snapshot) return True diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index 2a916f5..898702a 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -17,7 +17,7 @@ from selfprivacy_api.graphql.common_types.jobs import job_to_api_job from selfprivacy_api.backup import Backups from selfprivacy_api.services import get_all_services, get_service_by_id from selfprivacy_api.backup.tasks import start_backup, restore_snapshot -from selfprivacy_api.backup.jobs import get_backup_job, get_restore_job +from selfprivacy_api.backup.jobs import add_backup_job, add_restore_job @strawberry.input @@ -86,11 +86,16 @@ class BackupMutations: message=f"nonexistent service: {service_id}", job=None, ) + + job = add_backup_job(service) start_backup(service) - job = get_backup_job(service) + job = job_to_api_job(job) return GenericJobButationReturn( - success=True, code=200, message="Backup job queued", job=job_to_api_job(job) + success=True, + code=200, + message="Backup job queued", + job=job, ) @strawberry.mutation(permission_classes=[IsAuthenticated]) @@ -106,13 +111,14 @@ class BackupMutations: job=None, ) + job = add_restore_job(snap) restore_snapshot(snap) return GenericJobButationReturn( success=True, code=200, message="restore job created", - job=get_restore_job(service), + job=job, ) @strawberry.mutation(permission_classes=[IsAuthenticated]) diff --git a/selfprivacy_api/graphql/schema.py b/selfprivacy_api/graphql/schema.py index e364dd1..9e40d7c 100644 --- a/selfprivacy_api/graphql/schema.py +++ b/selfprivacy_api/graphql/schema.py @@ -13,6 +13,7 @@ from selfprivacy_api.graphql.mutations.services_mutations import ServicesMutatio from selfprivacy_api.graphql.mutations.ssh_mutations import SshMutations from selfprivacy_api.graphql.mutations.storage_mutations import StorageMutations from selfprivacy_api.graphql.mutations.system_mutations import SystemMutations +from selfprivacy_api.graphql.mutations.backup_mutations import BackupMutations from selfprivacy_api.graphql.queries.api_queries import Api from selfprivacy_api.graphql.queries.backup import Backup From d9bde6930b70369606bed22629a3e5f7005e0c89 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Jun 2023 10:06:01 +0000 Subject: [PATCH 146/537] fix(backups): register queries --- selfprivacy_api/graphql/schema.py | 1 + tests/common.py | 4 ++++ 2 files changed, 5 insertions(+) diff --git a/selfprivacy_api/graphql/schema.py b/selfprivacy_api/graphql/schema.py index 9e40d7c..7107e20 100644 --- a/selfprivacy_api/graphql/schema.py +++ b/selfprivacy_api/graphql/schema.py @@ -21,6 +21,7 @@ from selfprivacy_api.graphql.queries.jobs import Job from selfprivacy_api.graphql.queries.services import Services from selfprivacy_api.graphql.queries.storage import Storage from selfprivacy_api.graphql.queries.system import System +from selfprivacy_api.graphql.queries.backup import Backup from selfprivacy_api.graphql.mutations.users_mutations import UserMutations from selfprivacy_api.graphql.queries.users import Users diff --git a/tests/common.py b/tests/common.py index 18e065c..e4a283d 100644 --- a/tests/common.py +++ b/tests/common.py @@ -24,5 +24,9 @@ def generate_users_query(query_array): return "query TestUsers {\n users {" + "\n".join(query_array) + "}\n}" +def generate_backup_query(query_array): + return "query TestBackup {\n backup {" + "\n".join(query_array) + "}\n}" + + def mnemonic_to_hex(mnemonic): return Mnemonic(language="english").to_entropy(mnemonic).hex() From c47977c1002d7275277b24fc7f8acdf358df0756 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Jun 2023 10:09:38 +0000 Subject: [PATCH 147/537] test(backups): actual testfile --- tests/test_graphql/test_api_backup.py | 38 +++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) create mode 100644 tests/test_graphql/test_api_backup.py diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py new file mode 100644 index 0000000..1d944f4 --- /dev/null +++ b/tests/test_graphql/test_api_backup.py @@ -0,0 +1,38 @@ +from tests.test_graphql.test_backup import dummy_service, backups, raw_dummy_service + +# from tests.common import generate_api_query + +# from selfprivacy_api.graphql.mutations.backup_mutations import BackupMutations +from selfprivacy_api.jobs import Jobs, JobStatus + +API_BACK_UP_MUTATION = """ +mutation TestBackupService($service_id: String) { + startBackup(serviceId: $service_id) { + success + message + code + job { + uid + status + } + } +} +""" + + +def api_backup(authorized_client, service): + response = authorized_client.post( + "/graphql", + json={ + "query": API_BACK_UP_MUTATION, + "variables": {"service_id": service.get_id()}, + }, + ).json() + return response + + +def test_start_backup(authorized_client, dummy_service): + response = api_backup(authorized_client, dummy_service) + assert response["data"]["startBackup"]["success"] is True + job = response["data"]["startBackup"]["job"] + assert Jobs.get_job(job["uid"]).status == JobStatus.FINISHED From 928f026e7ce82bf3785c504a4c57cd02f993a0b2 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Jun 2023 11:14:52 +0000 Subject: [PATCH 148/537] test(backups): snapshot query --- tests/test_graphql/test_api_backup.py | 38 ++++++++++++++++++++++++--- 1 file changed, 35 insertions(+), 3 deletions(-) diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index 1d944f4..c0961b7 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -1,10 +1,18 @@ from tests.test_graphql.test_backup import dummy_service, backups, raw_dummy_service +from tests.common import generate_backup_query -# from tests.common import generate_api_query - -# from selfprivacy_api.graphql.mutations.backup_mutations import BackupMutations from selfprivacy_api.jobs import Jobs, JobStatus +API_SNAPSHOTS_QUERY = """ +allSnapshots { + id + service { + id + } + createdAt +} +""" + API_BACK_UP_MUTATION = """ mutation TestBackupService($service_id: String) { startBackup(serviceId: $service_id) { @@ -31,6 +39,30 @@ def api_backup(authorized_client, service): return response +def get_data(response): + assert response.status_code == 200 + response = response.json() + assert response["data"] is not None + data = response["data"] + return data + + +def api_snapshots(authorized_client, service): + response = authorized_client.post( + "/graphql", + json={"query": generate_backup_query([API_SNAPSHOTS_QUERY])}, + ) + data = get_data(response) + result = data["backup"]["allSnapshots"] + assert result is not None + return result + + +def test_snapshots_empty(authorized_client, dummy_service): + snaps = api_snapshots(authorized_client, dummy_service) + assert snaps == [] + + def test_start_backup(authorized_client, dummy_service): response = api_backup(authorized_client, dummy_service) assert response["data"]["startBackup"]["success"] is True From f1654c699cdfca1ce1464f36947c062b660e70f4 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Jun 2023 11:52:57 +0000 Subject: [PATCH 149/537] fix(backups): shorten snapshot query signature --- tests/test_graphql/test_api_backup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index c0961b7..5b9aabf 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -47,7 +47,7 @@ def get_data(response): return data -def api_snapshots(authorized_client, service): +def api_snapshots(authorized_client): response = authorized_client.post( "/graphql", json={"query": generate_backup_query([API_SNAPSHOTS_QUERY])}, @@ -59,7 +59,7 @@ def api_snapshots(authorized_client, service): def test_snapshots_empty(authorized_client, dummy_service): - snaps = api_snapshots(authorized_client, dummy_service) + snaps = api_snapshots(authorized_client) assert snaps == [] From aa9d082adc2620ca5100e6cd4127cdcb7e634c2f Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Jun 2023 11:54:24 +0000 Subject: [PATCH 150/537] test(backups): display errors from api --- tests/test_graphql/test_api_backup.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index 5b9aabf..40c81fa 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -42,6 +42,10 @@ def api_backup(authorized_client, service): def get_data(response): assert response.status_code == 200 response = response.json() + if ( + "errors" in response.keys() + ): # convenience for debugging, this will display error + assert response["errors"] == [] assert response["data"] is not None data = response["data"] return data From 21791f20a2ec8c338822e020c911875012954bda Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Jun 2023 11:55:46 +0000 Subject: [PATCH 151/537] test(backups): make dummy service more compliant --- selfprivacy_api/services/test_service/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/services/test_service/__init__.py b/selfprivacy_api/services/test_service/__init__.py index c14feca..b1c2924 100644 --- a/selfprivacy_api/services/test_service/__init__.py +++ b/selfprivacy_api/services/test_service/__init__.py @@ -42,7 +42,7 @@ class DummyService(Service): @staticmethod def get_url() -> typing.Optional[str]: """Return service url.""" - domain = get_domain() + domain = "test.com" return f"https://password.{domain}" @staticmethod @@ -68,7 +68,7 @@ class DummyService(Service): Return code 3 means service is stopped. Return code 4 means service is off. """ - return 0 + return ServiceStatus.ACTIVE @staticmethod def enable(): From 92322d8fada298a4b2c1b7961fa0a63ce8315cea Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Jun 2023 12:08:07 +0000 Subject: [PATCH 152/537] test(backups): test dummy service compliance --- tests/test_graphql/test_api_backup.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index 40c81fa..2f11077 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -1,6 +1,8 @@ from tests.test_graphql.test_backup import dummy_service, backups, raw_dummy_service from tests.common import generate_backup_query + +from selfprivacy_api.graphql.common_types.service import service_to_graphql_service from selfprivacy_api.jobs import Jobs, JobStatus API_SNAPSHOTS_QUERY = """ @@ -62,6 +64,11 @@ def api_snapshots(authorized_client): return result +def test_dummy_service_convertible_to_gql(dummy_service): + gql_service = service_to_graphql_service(dummy_service) + assert gql_service is not None + + def test_snapshots_empty(authorized_client, dummy_service): snaps = api_snapshots(authorized_client) assert snaps == [] From 39cd4b714be734e8bee76827b9d06210a5093c38 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Jun 2023 12:35:41 +0000 Subject: [PATCH 153/537] test(backups): check snapshots getting created --- tests/test_graphql/test_api_backup.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index 2f11077..4c13398 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -78,4 +78,9 @@ def test_start_backup(authorized_client, dummy_service): response = api_backup(authorized_client, dummy_service) assert response["data"]["startBackup"]["success"] is True job = response["data"]["startBackup"]["job"] + assert Jobs.get_job(job["uid"]).status == JobStatus.FINISHED + snaps = api_snapshots(authorized_client) + assert len(snaps) == 1 + snap = snaps[0] + assert snap["service"]["id"] == "testservice" From 17c7dffb07783c4c72537f4fa9bd8737cb83e829 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Jun 2023 12:41:45 +0000 Subject: [PATCH 154/537] test(backups): use get_data --- tests/test_graphql/test_api_backup.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index 4c13398..7de6401 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -37,7 +37,7 @@ def api_backup(authorized_client, service): "query": API_BACK_UP_MUTATION, "variables": {"service_id": service.get_id()}, }, - ).json() + ) return response @@ -76,8 +76,9 @@ def test_snapshots_empty(authorized_client, dummy_service): def test_start_backup(authorized_client, dummy_service): response = api_backup(authorized_client, dummy_service) - assert response["data"]["startBackup"]["success"] is True - job = response["data"]["startBackup"]["job"] + data = get_data(response)["startBackup"] + assert data["success"] is True + job = data["job"] assert Jobs.get_job(job["uid"]).status == JobStatus.FINISHED snaps = api_snapshots(authorized_client) From 4862cdc72fa51a97f03a2b5372f4dfe846d24756 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Jun 2023 13:00:23 +0000 Subject: [PATCH 155/537] test(backups): test restore --- tests/test_graphql/test_api_backup.py | 42 +++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index 7de6401..f78bb59 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -5,6 +5,21 @@ from tests.common import generate_backup_query from selfprivacy_api.graphql.common_types.service import service_to_graphql_service from selfprivacy_api.jobs import Jobs, JobStatus + +API_RESTORE_MUTATION = """ +mutation TestRestoreService($snapshot_id: String!) { + restoreBackup(snapshotId: $snapshot_id) { + success + message + code + job { + uid + status + } + } +} +""" + API_SNAPSHOTS_QUERY = """ allSnapshots { id @@ -30,6 +45,17 @@ mutation TestBackupService($service_id: String) { """ +def api_restore(authorized_client, snapshot_id): + response = authorized_client.post( + "/graphql", + json={ + "query": API_RESTORE_MUTATION, + "variables": {"snapshot_id": snapshot_id}, + }, + ) + return response + + def api_backup(authorized_client, service): response = authorized_client.post( "/graphql", @@ -84,4 +110,20 @@ def test_start_backup(authorized_client, dummy_service): snaps = api_snapshots(authorized_client) assert len(snaps) == 1 snap = snaps[0] + + assert snap["id"] is not None + assert snap["id"] != "" assert snap["service"]["id"] == "testservice" + + +def test_restore(authorized_client, dummy_service): + api_backup(authorized_client, dummy_service) + snap = api_snapshots(authorized_client)[0] + assert snap["id"] is not None + + response = api_restore(authorized_client, snap["id"]) + data = get_data(response)["restoreBackup"] + assert data["success"] is True + job = data["job"] + + assert Jobs.get_job(job["uid"]).status == JobStatus.FINISHED From cbedd196e452d6049a10aa81429591baf35c1ef0 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Jun 2023 13:06:32 +0000 Subject: [PATCH 156/537] fix(backups): non-nullable service when backing up --- tests/test_graphql/test_api_backup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index f78bb59..660df95 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -31,7 +31,7 @@ allSnapshots { """ API_BACK_UP_MUTATION = """ -mutation TestBackupService($service_id: String) { +mutation TestBackupService($service_id: String!) { startBackup(serviceId: $service_id) { success message From 9e0b0d8caae9c39402c70915592382c8a8d98588 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Jun 2023 14:07:51 +0000 Subject: [PATCH 157/537] refactor(backups): make localfile repos normal --- selfprivacy_api/backup/__init__.py | 7 +------ selfprivacy_api/backup/providers/local_file.py | 11 ++--------- tests/test_graphql/test_backup.py | 2 +- 3 files changed, 4 insertions(+), 16 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index d496758..f6e2bfc 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -31,8 +31,7 @@ class Backups: @staticmethod def set_localfile_repo(file_path: str): ProviderClass = get_provider(BackupProvider.FILE) - provider = ProviderClass(file_path) - Storage.store_testrepo_path(file_path) + provider = ProviderClass(login="", key="", location=file_path, repo_id="") Storage.store_provider(provider) def set_provider(provider: AbstractBackupProvider): @@ -145,10 +144,6 @@ class Backups: ): provider_class = get_provider(BackupProvider[kind]) - if kind == "FILE": - path = Storage.get_testrepo_path() - return provider_class(path) - return provider_class(login=login, key=key, location=location, repo_id=repo_id) @staticmethod diff --git a/selfprivacy_api/backup/providers/local_file.py b/selfprivacy_api/backup/providers/local_file.py index 95075dd..77b0c92 100644 --- a/selfprivacy_api/backup/providers/local_file.py +++ b/selfprivacy_api/backup/providers/local_file.py @@ -3,12 +3,5 @@ from selfprivacy_api.backup.restic_backuper import ResticBackuper class LocalFileBackup(AbstractBackupProvider): - backuper = ResticBackuper("", "", "memory") - - name = "FILE" - - # login and key args are for compatibility with generic provider methods. They are ignored. - def __init__(self, filename: str, login: str = "", key: str = ""): - super().__init__() - self.backuper = ResticBackuper("", "", ":local:") - self.backuper.set_creds("", "", filename) + backuper = ResticBackuper("", "", ":local:") + name = "FILE" \ No newline at end of file diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 5ac024f..f0462c8 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -95,7 +95,7 @@ def file_backup(tmpdir) -> AbstractBackupProvider: test_repo_path = path.join(tmpdir, "test_repo") ProviderClass = providers.get_provider(BackupProvider.FILE) assert ProviderClass is not None - provider = ProviderClass(test_repo_path) + provider = ProviderClass(location=test_repo_path) assert provider is not None return provider From b3724e240e405b3b8af8dab144f5be6a01e2b4db Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Jun 2023 14:17:04 +0000 Subject: [PATCH 158/537] refactor(backups): cleanup localfile-specific logic --- selfprivacy_api/backup/storage.py | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/selfprivacy_api/backup/storage.py b/selfprivacy_api/backup/storage.py index 680f39f..bff4047 100644 --- a/selfprivacy_api/backup/storage.py +++ b/selfprivacy_api/backup/storage.py @@ -21,7 +21,6 @@ REDIS_SNAPSHOTS_PREFIX = "backups:snapshots:" REDIS_LAST_BACKUP_PREFIX = "backups:last-backed-up:" REDIS_INITTED_CACHE_PREFIX = "backups:initted_services:" -REDIS_REPO_PATH_KEY = "backups:test_repo_path" REDIS_PROVIDER_KEY = "backups:provider" REDIS_AUTOBACKUP_PERIOD_KEY = "backups:autobackup_period" @@ -33,7 +32,6 @@ class Storage: @staticmethod def reset(): redis.delete(REDIS_PROVIDER_KEY) - redis.delete(REDIS_REPO_PATH_KEY) redis.delete(REDIS_AUTOBACKUP_PERIOD_KEY) prefixes_to_clean = [ @@ -52,18 +50,6 @@ class Storage: for key in redis.keys(REDIS_SNAPSHOTS_PREFIX + "*"): redis.delete(key) - @staticmethod - def store_testrepo_path(path: str): - redis.set(REDIS_REPO_PATH_KEY, path) - - @staticmethod - def get_testrepo_path() -> str: - if not redis.exists(REDIS_REPO_PATH_KEY): - raise ValueError( - "No test repository filepath is set, but we tried to access it" - ) - return redis.get(REDIS_REPO_PATH_KEY) - @staticmethod def services_with_autobackup() -> List[str]: keys = redis.keys(REDIS_AUTOBACKUP_ENABLED_PREFIX + "*") From b3a37e8b1fd50457b4d48b9421d77603437e6f2f Mon Sep 17 00:00:00 2001 From: Inex Code Date: Wed, 14 Jun 2023 19:27:11 +0300 Subject: [PATCH 159/537] fix: Migrate to SP channel from 22.11 installations --- .gitignore | 1 + api.nix | 64 ------------------- default.nix | 2 - selfprivacy_api/dependencies.py | 2 +- selfprivacy_api/migrations/__init__.py | 4 ++ .../migrations/prepare_for_nixos_2305.py | 58 +++++++++++++++++ setup.py | 2 +- 7 files changed, 65 insertions(+), 68 deletions(-) delete mode 100644 api.nix delete mode 100644 default.nix create mode 100644 selfprivacy_api/migrations/prepare_for_nixos_2305.py diff --git a/.gitignore b/.gitignore index 7941396..7f93e02 100755 --- a/.gitignore +++ b/.gitignore @@ -147,3 +147,4 @@ cython_debug/ # End of https://www.toptal.com/developers/gitignore/api/flask *.db +*.rdb diff --git a/api.nix b/api.nix deleted file mode 100644 index 83bc695..0000000 --- a/api.nix +++ /dev/null @@ -1,64 +0,0 @@ -{ lib, python39Packages }: -with python39Packages; -buildPythonApplication { - pname = "selfprivacy-api"; - version = "2.0.0"; - - propagatedBuildInputs = [ - setuptools - portalocker - pytz - pytest - pytest-mock - pytest-datadir - huey - gevent - mnemonic - pydantic - typing-extensions - psutil - fastapi - uvicorn - (buildPythonPackage rec { - pname = "strawberry-graphql"; - version = "0.123.0"; - format = "pyproject"; - patches = [ - ./strawberry-graphql.patch - ]; - propagatedBuildInputs = [ - typing-extensions - python-multipart - python-dateutil - # flask - pydantic - pygments - poetry - # flask-cors - (buildPythonPackage rec { - pname = "graphql-core"; - version = "3.2.0"; - format = "setuptools"; - src = fetchPypi { - inherit pname version; - sha256 = "sha256-huKgvgCL/eGe94OI3opyWh2UKpGQykMcJKYIN5c4A84="; - }; - checkInputs = [ - pytest-asyncio - pytest-benchmark - pytestCheckHook - ]; - pythonImportsCheck = [ - "graphql" - ]; - }) - ]; - src = fetchPypi { - inherit pname version; - sha256 = "KsmZ5Xv8tUg6yBxieAEtvoKoRG60VS+iVGV0X6oCExo="; - }; - }) - ]; - - src = ./.; -} diff --git a/default.nix b/default.nix deleted file mode 100644 index 740c7ce..0000000 --- a/default.nix +++ /dev/null @@ -1,2 +0,0 @@ -{ pkgs ? import {} }: -pkgs.callPackage ./api.nix {} diff --git a/selfprivacy_api/dependencies.py b/selfprivacy_api/dependencies.py index 3284fd8..d7b12fe 100644 --- a/selfprivacy_api/dependencies.py +++ b/selfprivacy_api/dependencies.py @@ -27,4 +27,4 @@ async def get_token_header( def get_api_version() -> str: """Get API version""" - return "2.1.2" + return "2.1.3" diff --git a/selfprivacy_api/migrations/__init__.py b/selfprivacy_api/migrations/__init__.py index adb7d24..33472b9 100644 --- a/selfprivacy_api/migrations/__init__.py +++ b/selfprivacy_api/migrations/__init__.py @@ -22,6 +22,9 @@ from selfprivacy_api.migrations.providers import CreateProviderFields from selfprivacy_api.migrations.prepare_for_nixos_2211 import ( MigrateToSelfprivacyChannelFrom2205, ) +from selfprivacy_api.migrations.prepare_for_nixos_2305 import ( + MigrateToSelfprivacyChannelFrom2211, +) migrations = [ FixNixosConfigBranch(), @@ -31,6 +34,7 @@ migrations = [ CheckForFailedBindsMigration(), CreateProviderFields(), MigrateToSelfprivacyChannelFrom2205(), + MigrateToSelfprivacyChannelFrom2211(), ] diff --git a/selfprivacy_api/migrations/prepare_for_nixos_2305.py b/selfprivacy_api/migrations/prepare_for_nixos_2305.py new file mode 100644 index 0000000..d9fed28 --- /dev/null +++ b/selfprivacy_api/migrations/prepare_for_nixos_2305.py @@ -0,0 +1,58 @@ +import os +import subprocess + +from selfprivacy_api.migrations.migration import Migration + + +class MigrateToSelfprivacyChannelFrom2211(Migration): + """Migrate to selfprivacy Nix channel. + For some reason NixOS 22.11 servers initialized with the nixos channel instead of selfprivacy. + This stops us from upgrading to NixOS 23.05 + """ + + def get_migration_name(self): + return "migrate_to_selfprivacy_channel_from_2211" + + def get_migration_description(self): + return "Migrate to selfprivacy Nix channel from NixOS 22.11." + + def is_migration_needed(self): + try: + output = subprocess.check_output( + ["nix-channel", "--list"], start_new_session=True + ) + output = output.decode("utf-8") + first_line = output.split("\n", maxsplit=1)[0] + return first_line.startswith("nixos") and ( + first_line.endswith("nixos-22.11") + ) + except subprocess.CalledProcessError: + return False + + def migrate(self): + # Change the channel and update them. + # Also, go to /etc/nixos directory and make a git pull + current_working_directory = os.getcwd() + try: + print("Changing channel") + os.chdir("/etc/nixos") + subprocess.check_output( + [ + "nix-channel", + "--add", + "https://channel.selfprivacy.org/nixos-selfpricacy", + "nixos", + ] + ) + subprocess.check_output(["nix-channel", "--update"]) + nixos_config_branch = subprocess.check_output( + ["git", "rev-parse", "--abbrev-ref", "HEAD"], start_new_session=True + ) + if nixos_config_branch.decode("utf-8").strip() == "api-redis": + print("Also changing nixos-config branch from api-redis to master") + subprocess.check_output(["git", "checkout", "master"]) + subprocess.check_output(["git", "pull"]) + os.chdir(current_working_directory) + except subprocess.CalledProcessError: + os.chdir(current_working_directory) + print("Error") diff --git a/setup.py b/setup.py index 51606b6..d20bf9a 100755 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import setup, find_packages setup( name="selfprivacy_api", - version="2.1.2", + version="2.1.3", packages=find_packages(), scripts=[ "selfprivacy_api/app.py", From 33c60f971d9a2dd5bbd8e8fbb16ed659a7a763dc Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 16 Jun 2023 13:43:41 +0000 Subject: [PATCH 160/537] test(backups): test reinitting repository --- selfprivacy_api/backup/__init__.py | 2 +- .../graphql/mutations/backup_mutations.py | 12 ++-- tests/test_graphql/test_api_backup.py | 66 +++++++++++++++++++ 3 files changed, 73 insertions(+), 7 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index f6e2bfc..b340034 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -135,7 +135,7 @@ class Backups: @staticmethod def set_provider(kind: str, login: str, key: str, location: str, repo_id: str = ""): - provider = Backups.construct_provider(kind, login, key, location, id) + provider = Backups.construct_provider(kind, login, key, location, repo_id) Storage.store_provider(provider) @staticmethod diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index 898702a..ad43686 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -48,15 +48,17 @@ class BackupMutations: self, repository: InitializeRepositoryInput ) -> GenericBackupConfigReturn: """Initialize a new repository""" - provider = Backups.construct_provider( - kind=repository.provider, + Backups.set_provider( + kind=repository.provider.value, login=repository.login, key=repository.password, location=repository.location_name, repo_id=repository.location_id, ) - Backups.set_provider(provider) Backups.init_repo() + return GenericBackupConfigReturn( + success=True, message="", code="200", configuration=Backup().configuration() + ) @strawberry.mutation(permission_classes=[IsAuthenticated]) def remove_repository(self) -> GenericBackupConfigReturn: @@ -73,9 +75,7 @@ class BackupMutations: return Backup.configuration() @strawberry.mutation(permission_classes=[IsAuthenticated]) - def start_backup( - self, service_id: typing.Optional[str] = None - ) -> GenericJobButationReturn: + def start_backup(self, service_id: str) -> GenericJobButationReturn: """Start backup""" service = get_service_by_id(service_id) diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index 660df95..3c9c7f2 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -1,3 +1,4 @@ +from os import path from tests.test_graphql.test_backup import dummy_service, backups, raw_dummy_service from tests.common import generate_backup_query @@ -5,6 +6,23 @@ from tests.common import generate_backup_query from selfprivacy_api.graphql.common_types.service import service_to_graphql_service from selfprivacy_api.jobs import Jobs, JobStatus +API_INIT_MUTATION = """ +mutation TestInitRepo($input: InitializeRepositoryInput!) { + initializeRepository(repository: $input) { + success + message + code + configuration { + provider + encryptionKey + isInitialized + autobackupPeriod + locationName + locationId + } + } +} +""" API_RESTORE_MUTATION = """ mutation TestRestoreService($snapshot_id: String!) { @@ -67,6 +85,32 @@ def api_backup(authorized_client, service): return response +def api_init_without_key( + authorized_client, kind, login, password, location_name, location_id +): + response = authorized_client.post( + "/graphql", + json={ + "query": API_INIT_MUTATION, + "variables": { + "input": { + "provider": kind, + "locationId": location_id, + "locationName": location_name, + "login": login, + "password": password, + } + }, + }, + ) + return response + + +def assert_ok(data): + assert data["code"] == 200 + assert data["success"] is True + + def get_data(response): assert response.status_code == 200 response = response.json() @@ -127,3 +171,25 @@ def test_restore(authorized_client, dummy_service): job = data["job"] assert Jobs.get_job(job["uid"]).status == JobStatus.FINISHED + + +def test_reinit(authorized_client, dummy_service, tmpdir): + test_repo_path = path.join(tmpdir, "not_at_all_sus") + response = api_init_without_key( + authorized_client, "FILE", "", "", test_repo_path, "" + ) + data = get_data(response)["initializeRepository"] + assert_ok(data) + configuration = data["configuration"] + assert configuration["provider"] == "FILE" + assert configuration["locationId"] == "" + assert configuration["locationName"] == test_repo_path + assert len(configuration["encryptionKey"]) > 1 + assert configuration["isInitialized"] is True + + response = api_backup(authorized_client, dummy_service) + data = get_data(response)["startBackup"] + assert data["success"] is True + job = data["job"] + + assert Jobs.get_job(job["uid"]).status == JobStatus.FINISHED From 8e1e37c76699fbe236467fced40b99726b2811d5 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 16 Jun 2023 14:33:48 +0000 Subject: [PATCH 161/537] test(backups): preliminary test of repo reset --- .../graphql/mutations/backup_mutations.py | 4 +- tests/test_graphql/test_api_backup.py | 43 +++++++++++++++++++ 2 files changed, 46 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index ad43686..110e731 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -64,7 +64,9 @@ class BackupMutations: def remove_repository(self) -> GenericBackupConfigReturn: """Remove repository""" Backups.reset() - return Backup.configuration() + return GenericBackupConfigReturn( + success=True, message="", code="200", configuration=Backup().configuration() + ) @strawberry.mutation(permission_classes=[IsAuthenticated]) def set_autobackup_period( diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index 3c9c7f2..7d23902 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -6,6 +6,24 @@ from tests.common import generate_backup_query from selfprivacy_api.graphql.common_types.service import service_to_graphql_service from selfprivacy_api.jobs import Jobs, JobStatus +API_REMOVE_REPOSITORY_MUTATION = """ +mutation TestRemoveRepo { + removeRepository { + success + message + code + configuration { + provider + encryptionKey + isInitialized + autobackupPeriod + locationName + locationId + } + } +} +""" + API_INIT_MUTATION = """ mutation TestInitRepo($input: InitializeRepositoryInput!) { initializeRepository(repository: $input) { @@ -85,6 +103,17 @@ def api_backup(authorized_client, service): return response +def api_remove(authorized_client): + response = authorized_client.post( + "/graphql", + json={ + "query": API_REMOVE_REPOSITORY_MUTATION, + "variables": {}, + }, + ) + return response + + def api_init_without_key( authorized_client, kind, login, password, location_name, location_id ): @@ -193,3 +222,17 @@ def test_reinit(authorized_client, dummy_service, tmpdir): job = data["job"] assert Jobs.get_job(job["uid"]).status == JobStatus.FINISHED + + +def test_remove(authorized_client, generic_userdata): + response = api_remove(authorized_client) + data = get_data(response)["removeRepository"] + assert_ok(data) + + configuration = data["configuration"] + assert configuration["provider"] == "BACKBLAZE" + assert configuration["locationId"] == "" + assert configuration["locationName"] == "selfprivacy" + # still generated every time it is missing + assert len(configuration["encryptionKey"]) > 1 + assert configuration["isInitialized"] is False From 47aee3c1f10655ec6e1996f5d1099a295327d2cf Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 16 Jun 2023 14:39:00 +0000 Subject: [PATCH 162/537] fix(backups): fix output API return types for configuration --- selfprivacy_api/graphql/mutations/backup_mutations.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index 110e731..1488a2c 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -74,7 +74,9 @@ class BackupMutations: ) -> GenericBackupConfigReturn: """Set autobackup period. None is to disable autobackup""" Backups.set_autobackup_period_minutes(period) - return Backup.configuration() + return GenericBackupConfigReturn( + success=True, message="", code="200", configuration=Backup().configuration() + ) @strawberry.mutation(permission_classes=[IsAuthenticated]) def start_backup(self, service_id: str) -> GenericJobButationReturn: @@ -127,4 +129,8 @@ class BackupMutations: def force_snapshots_reload(self) -> GenericMutationReturn: """Force snapshots reload""" Backups.force_snapshot_reload() - return GenericMutationReturn() + return GenericMutationReturn( + success=True, + code=200, + message="", + ) From 79c3b9598ce01af48c29209f10b5e18c8dedc735 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 16 Jun 2023 15:09:39 +0000 Subject: [PATCH 163/537] feature(backups): resetting json config too --- selfprivacy_api/backup/__init__.py | 23 +++++++++++++++++++++-- tests/test_graphql/test_api_backup.py | 2 +- tests/test_graphql/test_backup.py | 5 ++--- 3 files changed, 24 insertions(+), 6 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index b340034..329f81f 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -4,7 +4,7 @@ from os import statvfs from selfprivacy_api.models.backup.snapshot import Snapshot -from selfprivacy_api.utils import ReadUserData +from selfprivacy_api.utils import ReadUserData, WriteUserData from selfprivacy_api.services import get_service_by_id from selfprivacy_api.services.service import Service @@ -22,6 +22,13 @@ from selfprivacy_api.backup.jobs import ( ) from selfprivacy_api.jobs import Jobs, JobStatus +DEFAULT_JSON_PROVIDER = { + "provider": "BACKBLAZE", + "accountId": "", + "accountKey": "", + "bucket": "", +} + class Backups: """A singleton controller for backups""" @@ -147,8 +154,13 @@ class Backups: return provider_class(login=login, key=key, location=location, repo_id=repo_id) @staticmethod - def reset(): + def reset(reset_json=True): Storage.reset() + if reset_json: + try: + Backups.reset_provider_json() + except FileNotFoundError: # if there is no userdata file, we do not need to reset it + pass @staticmethod def lookup_provider() -> AbstractBackupProvider: @@ -190,6 +202,13 @@ class Backups: kind=provider_string, login=account, key=key, location=location ) + def reset_provider_json() -> AbstractBackupProvider: + with WriteUserData() as user_data: + if "backblaze" in user_data.keys(): + del user_data["backblaze"] + + user_data["backup"] = DEFAULT_JSON_PROVIDER + @staticmethod def load_provider_redis() -> AbstractBackupProvider: provider_model = Storage.load_provider() diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index 7d23902..90381c4 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -232,7 +232,7 @@ def test_remove(authorized_client, generic_userdata): configuration = data["configuration"] assert configuration["provider"] == "BACKBLAZE" assert configuration["locationId"] == "" - assert configuration["locationName"] == "selfprivacy" + assert configuration["locationName"] == "" # still generated every time it is missing assert len(configuration["encryptionKey"]) > 1 assert configuration["isInitialized"] is False diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index f0462c8..a70cdb8 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -37,7 +37,7 @@ def backups(tmpdir): @pytest.fixture() def backups_backblaze(generic_userdata): - Backups.reset() + Backups.reset(reset_json=False) @pytest.fixture() @@ -101,7 +101,7 @@ def file_backup(tmpdir) -> AbstractBackupProvider: def test_config_load(generic_userdata): - Backups.reset() + Backups.reset(reset_json=False) provider = Backups.provider() assert provider is not None @@ -445,7 +445,6 @@ def test_init_tracking_caching2(backups, raw_dummy_service): # Storage def test_provider_storage(backups_backblaze): - Backups.reset() provider = Backups.provider() assert provider is not None From f02e27cf068f00fdb46fa4aa5f88de1fef1ae652 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 16 Jun 2023 15:19:22 +0000 Subject: [PATCH 164/537] test(backups): add a backend json reset test --- tests/test_graphql/test_backup.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index a70cdb8..645b2fa 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -108,11 +108,31 @@ def test_config_load(generic_userdata): assert isinstance(provider, Backblaze) assert provider.login == "ID" assert provider.key == "KEY" + assert provider.location == "selfprivacy" assert provider.backuper.account == "ID" assert provider.backuper.key == "KEY" +def test_json_reset(generic_userdata): + Backups.reset(reset_json=False) + provider = Backups.provider() + assert provider is not None + assert isinstance(provider, Backblaze) + assert provider.login == "ID" + assert provider.key == "KEY" + assert provider.location == "selfprivacy" + + Backups.reset() + provider = Backups.provider() + assert provider is not None + assert isinstance(provider, Backblaze) + assert provider.login == "" + assert provider.key == "" + assert provider.location == "" + assert provider.repo_id == "" + + def test_select_backend(): provider = providers.get_provider(BackupProvider.BACKBLAZE) assert provider is not None From 3449837de95b5d1a33c0433712240dc276231eb8 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 16 Jun 2023 15:48:58 +0000 Subject: [PATCH 165/537] refactor(backups): make a dir for backuppers --- selfprivacy_api/backup/{backuper.py => backuppers/__init__.py} | 0 .../{restic_backuper.py => backuppers/restic_backupper.py} | 2 +- selfprivacy_api/backup/providers/backblaze.py | 2 +- selfprivacy_api/backup/providers/local_file.py | 2 +- selfprivacy_api/backup/providers/memory.py | 2 +- selfprivacy_api/backup/providers/provider.py | 2 +- 6 files changed, 5 insertions(+), 5 deletions(-) rename selfprivacy_api/backup/{backuper.py => backuppers/__init__.py} (100%) rename selfprivacy_api/backup/{restic_backuper.py => backuppers/restic_backupper.py} (99%) diff --git a/selfprivacy_api/backup/backuper.py b/selfprivacy_api/backup/backuppers/__init__.py similarity index 100% rename from selfprivacy_api/backup/backuper.py rename to selfprivacy_api/backup/backuppers/__init__.py diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py similarity index 99% rename from selfprivacy_api/backup/restic_backuper.py rename to selfprivacy_api/backup/backuppers/restic_backupper.py index 69bdea9..7b58a2c 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -6,7 +6,7 @@ from typing import List from collections.abc import Iterable from json.decoder import JSONDecodeError -from selfprivacy_api.backup.backuper import AbstractBackuper +from selfprivacy_api.backup.backuppers import AbstractBackuper from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.backup.jobs import get_backup_job from selfprivacy_api.services import get_service_by_id diff --git a/selfprivacy_api/backup/providers/backblaze.py b/selfprivacy_api/backup/providers/backblaze.py index f474a99..3f2d873 100644 --- a/selfprivacy_api/backup/providers/backblaze.py +++ b/selfprivacy_api/backup/providers/backblaze.py @@ -1,5 +1,5 @@ from .provider import AbstractBackupProvider -from selfprivacy_api.backup.restic_backuper import ResticBackuper +from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackuper class Backblaze(AbstractBackupProvider): diff --git a/selfprivacy_api/backup/providers/local_file.py b/selfprivacy_api/backup/providers/local_file.py index 77b0c92..bdb2113 100644 --- a/selfprivacy_api/backup/providers/local_file.py +++ b/selfprivacy_api/backup/providers/local_file.py @@ -1,5 +1,5 @@ from .provider import AbstractBackupProvider -from selfprivacy_api.backup.restic_backuper import ResticBackuper +from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackuper class LocalFileBackup(AbstractBackupProvider): diff --git a/selfprivacy_api/backup/providers/memory.py b/selfprivacy_api/backup/providers/memory.py index a481559..e73af51 100644 --- a/selfprivacy_api/backup/providers/memory.py +++ b/selfprivacy_api/backup/providers/memory.py @@ -1,5 +1,5 @@ from .provider import AbstractBackupProvider -from selfprivacy_api.backup.restic_backuper import ResticBackuper +from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackuper class InMemoryBackup(AbstractBackupProvider): diff --git a/selfprivacy_api/backup/providers/provider.py b/selfprivacy_api/backup/providers/provider.py index fcf179b..c14f1e7 100644 --- a/selfprivacy_api/backup/providers/provider.py +++ b/selfprivacy_api/backup/providers/provider.py @@ -4,7 +4,7 @@ It assumes that while some providers are supported via restic/rclone, others may require different backends """ from abc import ABC -from selfprivacy_api.backup.backuper import AbstractBackuper +from selfprivacy_api.backup.backuppers import AbstractBackuper class AbstractBackupProvider(ABC): From 4f2f24daace9caa7856d93251498720c76689922 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 16 Jun 2023 16:03:09 +0000 Subject: [PATCH 166/537] refactor(backups): NoneBackupper class for those cases when we do not know --- .../backup/backuppers/none_backupper.py | 26 +++++++++++++++++++ selfprivacy_api/backup/providers/__init__.py | 1 + selfprivacy_api/backup/providers/provider.py | 3 ++- 3 files changed, 29 insertions(+), 1 deletion(-) create mode 100644 selfprivacy_api/backup/backuppers/none_backupper.py diff --git a/selfprivacy_api/backup/backuppers/none_backupper.py b/selfprivacy_api/backup/backuppers/none_backupper.py new file mode 100644 index 0000000..0c9509e --- /dev/null +++ b/selfprivacy_api/backup/backuppers/none_backupper.py @@ -0,0 +1,26 @@ +from typing import List + +from selfprivacy_api.models.backup.snapshot import Snapshot +from selfprivacy_api.backup.backuppers import AbstractBackuper + + +class NoneBackupper(AbstractBackuper): + def is_initted(self, repo_name: str) -> bool: + return False + + def start_backup(self, folders: List[str], repo_name: str): + raise NotImplementedError + + def get_snapshots(self, repo_name) -> List[Snapshot]: + """Get all snapshots from the repo""" + raise NotImplementedError + + def init(self, repo_name): + raise NotImplementedError + + def restore_from_backup(self, repo_name: str, snapshot_id: str, folders: List[str]): + """Restore a target folder using a snapshot""" + raise NotImplementedError + + def restored_size(self, repo_name, snapshot_id) -> float: + raise NotImplementedError diff --git a/selfprivacy_api/backup/providers/__init__.py b/selfprivacy_api/backup/providers/__init__.py index 5428e80..bac51e5 100644 --- a/selfprivacy_api/backup/providers/__init__.py +++ b/selfprivacy_api/backup/providers/__init__.py @@ -9,6 +9,7 @@ PROVIDER_MAPPING = { BackupProvider.BACKBLAZE: Backblaze, BackupProvider.MEMORY: InMemoryBackup, BackupProvider.FILE: LocalFileBackup, + BackupProvider.NONE: AbstractBackupProvider, } diff --git a/selfprivacy_api/backup/providers/provider.py b/selfprivacy_api/backup/providers/provider.py index c14f1e7..c6da12d 100644 --- a/selfprivacy_api/backup/providers/provider.py +++ b/selfprivacy_api/backup/providers/provider.py @@ -5,12 +5,13 @@ require different backends """ from abc import ABC from selfprivacy_api.backup.backuppers import AbstractBackuper +from selfprivacy_api.backup.backuppers.none_backupper import NoneBackupper class AbstractBackupProvider(ABC): @property def backuper(self) -> AbstractBackuper: - raise NotImplementedError + return NoneBackupper name = "NONE" From 612a46ee42f10d52319f6a4e54d026a373283cbf Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 19 Jun 2023 11:09:10 +0000 Subject: [PATCH 167/537] feature(backups): actually dealing with situation when the provider is not configured --- selfprivacy_api/backup/__init__.py | 47 +++++++++++-------- selfprivacy_api/backup/backuppers/__init__.py | 6 ++- .../backup/backuppers/none_backupper.py | 5 +- selfprivacy_api/backup/providers/provider.py | 2 +- tests/test_graphql/test_api_backup.py | 2 +- tests/test_graphql/test_backup.py | 3 +- 6 files changed, 41 insertions(+), 24 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 329f81f..29fac73 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -168,38 +168,47 @@ class Backups: if redis_provider is not None: return redis_provider - json_provider = Backups.load_provider_json() + try: + json_provider = Backups.load_provider_json() + except FileNotFoundError: + json_provider = None + if json_provider is not None: Storage.store_provider(json_provider) return json_provider - memory_provider = Backups.construct_provider("MEMORY", login="", key="") - Storage.store_provider(memory_provider) - return memory_provider + none_provider = Backups.construct_provider( + "NONE", login="", key="", location="" + ) + Storage.store_provider(none_provider) + return none_provider @staticmethod - def load_provider_json() -> AbstractBackupProvider: + def load_provider_json() -> Optional[AbstractBackupProvider]: with ReadUserData() as user_data: - account = "" - key = "" + provider_dict = { + "provider": "", + "accountId": "", + "accountKey": "", + "bucket": "", + } if "backup" not in user_data.keys(): if "backblaze" in user_data.keys(): - account = user_data["backblaze"]["accountId"] - key = user_data["backblaze"]["accountKey"] - location = user_data["backblaze"]["bucket"] - provider_string = "BACKBLAZE" - return Backups.construct_provider( - kind=provider_string, login=account, key=key, location=location - ) + provider_dict.update(user_data["backblaze"]) + provider_dict["provider"] = "BACKBLAZE" + return None + else: + provider_dict.update(user_data["backup"]) + + if provider_dict == DEFAULT_JSON_PROVIDER: return None - account = user_data["backup"]["accountId"] - key = user_data["backup"]["accountKey"] - provider_string = user_data["backup"]["provider"] - location = user_data["backup"]["bucket"] return Backups.construct_provider( - kind=provider_string, login=account, key=key, location=location + kind=provider_dict["provider"], + login=provider_dict["accountId"], + key=provider_dict["accountKey"], + location=provider_dict["bucket"], ) def reset_provider_json() -> AbstractBackupProvider: diff --git a/selfprivacy_api/backup/backuppers/__init__.py b/selfprivacy_api/backup/backuppers/__init__.py index 908c1fc..61ef1ba 100644 --- a/selfprivacy_api/backup/backuppers/__init__.py +++ b/selfprivacy_api/backup/backuppers/__init__.py @@ -9,7 +9,11 @@ class AbstractBackuper(ABC): pass @abstractmethod - def is_initted(self, repo_name: str) -> bool: + def is_initted(self) -> bool: + raise NotImplementedError + + @abstractmethod + def set_creds(self, account: str, key: str, repo: str): raise NotImplementedError @abstractmethod diff --git a/selfprivacy_api/backup/backuppers/none_backupper.py b/selfprivacy_api/backup/backuppers/none_backupper.py index 0c9509e..de51d6a 100644 --- a/selfprivacy_api/backup/backuppers/none_backupper.py +++ b/selfprivacy_api/backup/backuppers/none_backupper.py @@ -5,9 +5,12 @@ from selfprivacy_api.backup.backuppers import AbstractBackuper class NoneBackupper(AbstractBackuper): - def is_initted(self, repo_name: str) -> bool: + def is_initted(self, repo_name: str = "") -> bool: return False + def set_creds(self, account: str, key: str, repo: str): + pass + def start_backup(self, folders: List[str], repo_name: str): raise NotImplementedError diff --git a/selfprivacy_api/backup/providers/provider.py b/selfprivacy_api/backup/providers/provider.py index c6da12d..dddc53f 100644 --- a/selfprivacy_api/backup/providers/provider.py +++ b/selfprivacy_api/backup/providers/provider.py @@ -11,7 +11,7 @@ from selfprivacy_api.backup.backuppers.none_backupper import NoneBackupper class AbstractBackupProvider(ABC): @property def backuper(self) -> AbstractBackuper: - return NoneBackupper + return NoneBackupper() name = "NONE" diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index 90381c4..d65d6f1 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -230,7 +230,7 @@ def test_remove(authorized_client, generic_userdata): assert_ok(data) configuration = data["configuration"] - assert configuration["provider"] == "BACKBLAZE" + assert configuration["provider"] == "NONE" assert configuration["locationId"] == "" assert configuration["locationName"] == "" # still generated every time it is missing diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 645b2fa..428e3dd 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -15,6 +15,7 @@ from selfprivacy_api.backup import Backups import selfprivacy_api.backup.providers as providers from selfprivacy_api.backup.providers import AbstractBackupProvider from selfprivacy_api.backup.providers.backblaze import Backblaze + from selfprivacy_api.backup.tasks import start_backup, restore_snapshot from selfprivacy_api.backup.storage import Storage from selfprivacy_api.backup.jobs import get_backup_job @@ -126,7 +127,7 @@ def test_json_reset(generic_userdata): Backups.reset() provider = Backups.provider() assert provider is not None - assert isinstance(provider, Backblaze) + assert isinstance(provider, AbstractBackupProvider) assert provider.login == "" assert provider.key == "" assert provider.location == "" From b5e2499a30c72ebc03e428d87d490796228eaed0 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 19 Jun 2023 11:17:09 +0000 Subject: [PATCH 168/537] refactor(backups): delete legacy provider setting --- selfprivacy_api/backup/__init__.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 29fac73..bb17254 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -41,9 +41,6 @@ class Backups: provider = ProviderClass(login="", key="", location=file_path, repo_id="") Storage.store_provider(provider) - def set_provider(provider: AbstractBackupProvider): - Storage.store_provider(provider) - @staticmethod def get_last_backed_up(service: Service) -> Optional[datetime]: """Get a timezone-aware time of the last backup of a service""" From 25f3115c0554c92930cce83dbb6f5e4224680be3 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 19 Jun 2023 13:28:02 +0000 Subject: [PATCH 169/537] test(backups): setting autobackup period --- .../graphql/mutations/backup_mutations.py | 6 +- tests/test_graphql/test_api_backup.py | 76 +++++++++++++++++++ 2 files changed, 81 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index 1488a2c..ad7c0c6 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -73,7 +73,11 @@ class BackupMutations: self, period: typing.Optional[int] = None ) -> GenericBackupConfigReturn: """Set autobackup period. None is to disable autobackup""" - Backups.set_autobackup_period_minutes(period) + if period is not None: + Backups.set_autobackup_period_minutes(period) + else: + Backups.set_autobackup_period_minutes(0) + return GenericBackupConfigReturn( success=True, message="", code="200", configuration=Backup().configuration() ) diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index d65d6f1..b8c09dc 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -6,6 +6,24 @@ from tests.common import generate_backup_query from selfprivacy_api.graphql.common_types.service import service_to_graphql_service from selfprivacy_api.jobs import Jobs, JobStatus +API_SET_AUTOBACKUP_PERIOD_MUTATION = """ +mutation TestAutobackupPeriod($period: Int) { + setAutobackupPeriod(period: $period) { + success + message + code + configuration { + provider + encryptionKey + isInitialized + autobackupPeriod + locationName + locationId + } + } +} +""" + API_REMOVE_REPOSITORY_MUTATION = """ mutation TestRemoveRepo { removeRepository { @@ -103,6 +121,17 @@ def api_backup(authorized_client, service): return response +def api_set_period(authorized_client, period): + response = authorized_client.post( + "/graphql", + json={ + "query": API_SET_AUTOBACKUP_PERIOD_MUTATION, + "variables": {"period": period}, + }, + ) + return response + + def api_remove(authorized_client): response = authorized_client.post( "/graphql", @@ -236,3 +265,50 @@ def test_remove(authorized_client, generic_userdata): # still generated every time it is missing assert len(configuration["encryptionKey"]) > 1 assert configuration["isInitialized"] is False + + +def test_autobackup_period_nonzero(authorized_client): + new_period = 11 + response = api_set_period(authorized_client, new_period) + data = get_data(response)["setAutobackupPeriod"] + assert_ok(data) + + configuration = data["configuration"] + assert configuration["autobackupPeriod"] == new_period + + +def test_autobackup_period_zero(authorized_client): + new_period = 0 + # since it is none by default, we better first set it to something non-negative + response = api_set_period(authorized_client, 11) + # and now we nullify it + response = api_set_period(authorized_client, new_period) + data = get_data(response)["setAutobackupPeriod"] + assert_ok(data) + + configuration = data["configuration"] + assert configuration["autobackupPeriod"] == None + + +def test_autobackup_period_none(authorized_client): + # since it is none by default, we better first set it to something non-negative + response = api_set_period(authorized_client, 11) + # and now we nullify it + response = api_set_period(authorized_client, None) + data = get_data(response)["setAutobackupPeriod"] + assert_ok(data) + + configuration = data["configuration"] + assert configuration["autobackupPeriod"] == None + + +def test_autobackup_period_negative(authorized_client): + # since it is none by default, we better first set it to something non-negative + response = api_set_period(authorized_client, 11) + # and now we nullify it + response = api_set_period(authorized_client, -12) + data = get_data(response)["setAutobackupPeriod"] + assert_ok(data) + + configuration = data["configuration"] + assert configuration["autobackupPeriod"] == None From e3a87f1d982df083f72ee06f26bc77556b01f1f0 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 19 Jun 2023 14:12:40 +0000 Subject: [PATCH 170/537] test(backups): ensure asking to reload snaps does not explode the server --- selfprivacy_api/backup/backuppers/__init__.py | 2 +- .../backup/backuppers/none_backupper.py | 4 +- tests/test_graphql/test_api_backup.py | 46 +++++++++++++++++++ 3 files changed, 49 insertions(+), 3 deletions(-) diff --git a/selfprivacy_api/backup/backuppers/__init__.py b/selfprivacy_api/backup/backuppers/__init__.py index 61ef1ba..f20496d 100644 --- a/selfprivacy_api/backup/backuppers/__init__.py +++ b/selfprivacy_api/backup/backuppers/__init__.py @@ -21,7 +21,7 @@ class AbstractBackuper(ABC): raise NotImplementedError @abstractmethod - def get_snapshots(self, repo_name) -> List[Snapshot]: + def get_snapshots(self) -> List[Snapshot]: """Get all snapshots from the repo""" raise NotImplementedError diff --git a/selfprivacy_api/backup/backuppers/none_backupper.py b/selfprivacy_api/backup/backuppers/none_backupper.py index de51d6a..e687323 100644 --- a/selfprivacy_api/backup/backuppers/none_backupper.py +++ b/selfprivacy_api/backup/backuppers/none_backupper.py @@ -14,9 +14,9 @@ class NoneBackupper(AbstractBackuper): def start_backup(self, folders: List[str], repo_name: str): raise NotImplementedError - def get_snapshots(self, repo_name) -> List[Snapshot]: + def get_snapshots(self) -> List[Snapshot]: """Get all snapshots from the repo""" - raise NotImplementedError + return [] def init(self, repo_name): raise NotImplementedError diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index b8c09dc..3eed12a 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -6,6 +6,16 @@ from tests.common import generate_backup_query from selfprivacy_api.graphql.common_types.service import service_to_graphql_service from selfprivacy_api.jobs import Jobs, JobStatus +API_RELOAD_SNAPSHOTS = """ +mutation TestSnapshotsReload { + forceSnapshotsReload { + success + message + code + } +} +""" + API_SET_AUTOBACKUP_PERIOD_MUTATION = """ mutation TestAutobackupPeriod($period: Int) { setAutobackupPeriod(period: $period) { @@ -143,6 +153,17 @@ def api_remove(authorized_client): return response +def api_reload_snapshots(authorized_client): + response = authorized_client.post( + "/graphql", + json={ + "query": API_RELOAD_SNAPSHOTS, + "variables": {}, + }, + ) + return response + + def api_init_without_key( authorized_client, kind, login, password, location_name, location_id ): @@ -312,3 +333,28 @@ def test_autobackup_period_negative(authorized_client): configuration = data["configuration"] assert configuration["autobackupPeriod"] == None + + +# We cannot really check the effect at this level, we leave it to backend tests +# But we still make it run in both empty and full scenarios and ask for snaps afterwards +def test_reload_snapshots_bare_bare_bare(authorized_client, dummy_service): + api_remove(authorized_client) + + response = api_reload_snapshots(authorized_client) + data = get_data(response)["forceSnapshotsReload"] + assert_ok(data) + + snaps = api_snapshots(authorized_client) + assert snaps == [] + + +def test_reload_snapshots(authorized_client, dummy_service): + response = api_backup(authorized_client, dummy_service) + data = get_data(response)["startBackup"] + + response = api_reload_snapshots(authorized_client) + data = get_data(response)["forceSnapshotsReload"] + assert_ok(data) + + snaps = api_snapshots(authorized_client) + assert len(snaps) == 1 From c6919293b6659cfeba210b826b5c268f68f2ae34 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Wed, 21 Jun 2023 06:46:56 +0300 Subject: [PATCH 171/537] refactor(api): Group mutations I've learned that there is no problem in grouping mutations like we do with queries. This is a big mistake from my side, now we have legacy not so conveniently placed endpoints. I've grouped all mutations, left the copies of old ones flattened in the root for backwards compatibility. We will migrate to mutation groups on client side, and backups now only use grouped mutations. Tests are updated. --- .../backup/providers/local_file.py | 2 +- .../graphql/mutations/backup_mutations.py | 14 +- .../graphql/mutations/deprecated_mutations.py | 215 +++++++++++++++ .../graphql/mutations/mutation_interface.py | 2 +- .../graphql/mutations/services_mutations.py | 4 +- .../graphql/mutations/ssh_mutations.py | 102 ------- .../graphql/mutations/storage_mutations.py | 8 +- .../graphql/mutations/users_mutations.py | 88 +++++- selfprivacy_api/graphql/schema.py | 76 +++-- tests/test_graphql/test_api_backup.py | 46 ++-- tests/test_graphql/test_api_devices.py | 259 +++++++++++------- tests/test_graphql/test_api_recovery.py | 158 ++++++----- tests/test_graphql/test_localsecret.py | 2 +- tests/test_graphql/test_ssh.py | 122 +++++---- tests/test_graphql/test_system.py | 241 +++++++++++----- tests/test_graphql/test_system_nixos_tasks.py | 64 +++-- tests/test_graphql/test_users.py | 168 ++++++------ tests/test_model_storage.py | 17 +- 18 files changed, 1023 insertions(+), 565 deletions(-) create mode 100644 selfprivacy_api/graphql/mutations/deprecated_mutations.py delete mode 100644 selfprivacy_api/graphql/mutations/ssh_mutations.py diff --git a/selfprivacy_api/backup/providers/local_file.py b/selfprivacy_api/backup/providers/local_file.py index bdb2113..d31417e 100644 --- a/selfprivacy_api/backup/providers/local_file.py +++ b/selfprivacy_api/backup/providers/local_file.py @@ -4,4 +4,4 @@ from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackuper class LocalFileBackup(AbstractBackupProvider): backuper = ResticBackuper("", "", ":local:") - name = "FILE" \ No newline at end of file + name = "FILE" diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index ad7c0c6..f2bade0 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -6,7 +6,7 @@ from strawberry.types import Info from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.graphql.mutations.mutation_interface import ( GenericMutationReturn, - GenericJobButationReturn, + GenericJobMutationReturn, MutationReturnInterface, ) from selfprivacy_api.graphql.queries.backup import BackupConfiguration @@ -83,12 +83,12 @@ class BackupMutations: ) @strawberry.mutation(permission_classes=[IsAuthenticated]) - def start_backup(self, service_id: str) -> GenericJobButationReturn: + def start_backup(self, service_id: str) -> GenericJobMutationReturn: """Start backup""" service = get_service_by_id(service_id) if service is None: - return GenericJobButationReturn( + return GenericJobMutationReturn( success=False, code=300, message=f"nonexistent service: {service_id}", @@ -99,7 +99,7 @@ class BackupMutations: start_backup(service) job = job_to_api_job(job) - return GenericJobButationReturn( + return GenericJobMutationReturn( success=True, code=200, message="Backup job queued", @@ -107,12 +107,12 @@ class BackupMutations: ) @strawberry.mutation(permission_classes=[IsAuthenticated]) - def restore_backup(self, snapshot_id: str) -> GenericJobButationReturn: + def restore_backup(self, snapshot_id: str) -> GenericJobMutationReturn: """Restore backup""" snap = Backups.get_snapshot_by_id(snapshot_id) service = get_service_by_id(snap.service_name) if snap is None: - return GenericJobButationReturn( + return GenericJobMutationReturn( success=False, code=400, message=f"No such snapshot: {snapshot_id}", @@ -122,7 +122,7 @@ class BackupMutations: job = add_restore_job(snap) restore_snapshot(snap) - return GenericJobButationReturn( + return GenericJobMutationReturn( success=True, code=200, message="restore job created", diff --git a/selfprivacy_api/graphql/mutations/deprecated_mutations.py b/selfprivacy_api/graphql/mutations/deprecated_mutations.py new file mode 100644 index 0000000..6d187c6 --- /dev/null +++ b/selfprivacy_api/graphql/mutations/deprecated_mutations.py @@ -0,0 +1,215 @@ +"""Deprecated mutations + +There was made a mistake, where mutations were not grouped, and were instead +placed in the root of mutations schema. In this file, we import all the +mutations from and provide them to the root for backwards compatibility. +""" + +import strawberry +from selfprivacy_api.graphql import IsAuthenticated +from selfprivacy_api.graphql.common_types.user import UserMutationReturn +from selfprivacy_api.graphql.mutations.api_mutations import ( + ApiKeyMutationReturn, + ApiMutations, + DeviceApiTokenMutationReturn, +) +from selfprivacy_api.graphql.mutations.backup_mutations import BackupMutations +from selfprivacy_api.graphql.mutations.job_mutations import JobMutations +from selfprivacy_api.graphql.mutations.mutation_interface import ( + GenericJobMutationReturn, + GenericMutationReturn, +) +from selfprivacy_api.graphql.mutations.services_mutations import ( + ServiceMutationReturn, + ServicesMutations, +) +from selfprivacy_api.graphql.mutations.storage_mutations import StorageMutations +from selfprivacy_api.graphql.mutations.system_mutations import ( + AutoUpgradeSettingsMutationReturn, + SystemMutations, + TimezoneMutationReturn, +) +from selfprivacy_api.graphql.mutations.backup_mutations import BackupMutations +from selfprivacy_api.graphql.mutations.users_mutations import UsersMutations + + +def deprecated_mutation(func, group, auth=True): + return strawberry.mutation( + resolver=func, + permission_classes=[IsAuthenticated] if auth else [], + deprecation_reason=f"Use `{group}.{func.__name__}` instead", + ) + + +@strawberry.type +class DeprecatedApiMutations: + get_new_recovery_api_key: ApiKeyMutationReturn = deprecated_mutation( + ApiMutations.get_new_recovery_api_key, + "api", + ) + + use_recovery_api_key: DeviceApiTokenMutationReturn = deprecated_mutation( + ApiMutations.use_recovery_api_key, + "api", + auth=False, + ) + + refresh_device_api_token: DeviceApiTokenMutationReturn = deprecated_mutation( + ApiMutations.refresh_device_api_token, + "api", + ) + + delete_device_api_token: GenericMutationReturn = deprecated_mutation( + ApiMutations.delete_device_api_token, + "api", + ) + + get_new_device_api_key: ApiKeyMutationReturn = deprecated_mutation( + ApiMutations.get_new_device_api_key, + "api", + ) + + invalidate_new_device_api_key: GenericMutationReturn = deprecated_mutation( + ApiMutations.invalidate_new_device_api_key, + "api", + ) + + authorize_with_new_device_api_key: DeviceApiTokenMutationReturn = ( + deprecated_mutation( + ApiMutations.authorize_with_new_device_api_key, + "api", + auth=False, + ) + ) + + +@strawberry.type +class DeprecatedSystemMutations: + change_timezone: TimezoneMutationReturn = deprecated_mutation( + SystemMutations.change_timezone, + "system", + ) + + change_auto_upgrade_settings: AutoUpgradeSettingsMutationReturn = ( + deprecated_mutation( + SystemMutations.change_auto_upgrade_settings, + "system", + ) + ) + + run_system_rebuild: GenericMutationReturn = deprecated_mutation( + SystemMutations.run_system_rebuild, + "system", + ) + + run_system_rollback: GenericMutationReturn = deprecated_mutation( + SystemMutations.run_system_rollback, + "system", + ) + + run_system_upgrade: GenericMutationReturn = deprecated_mutation( + SystemMutations.run_system_upgrade, + "system", + ) + + reboot_system: GenericMutationReturn = deprecated_mutation( + SystemMutations.reboot_system, + "system", + ) + + pull_repository_changes: GenericMutationReturn = deprecated_mutation( + SystemMutations.pull_repository_changes, + "system", + ) + + +@strawberry.type +class DeprecatedUsersMutations: + create_user: UserMutationReturn = deprecated_mutation( + UsersMutations.create_user, + "users", + ) + + delete_user: GenericMutationReturn = deprecated_mutation( + UsersMutations.delete_user, + "users", + ) + + update_user: UserMutationReturn = deprecated_mutation( + UsersMutations.update_user, + "users", + ) + + add_ssh_key: UserMutationReturn = deprecated_mutation( + UsersMutations.add_ssh_key, + "users", + ) + + remove_ssh_key: UserMutationReturn = deprecated_mutation( + UsersMutations.remove_ssh_key, + "users", + ) + + +@strawberry.type +class DeprecatedStorageMutations: + resize_volume: GenericMutationReturn = deprecated_mutation( + StorageMutations.resize_volume, + "storage", + ) + + mount_volume: GenericMutationReturn = deprecated_mutation( + StorageMutations.mount_volume, + "storage", + ) + + unmount_volume: GenericMutationReturn = deprecated_mutation( + StorageMutations.unmount_volume, + "storage", + ) + + migrate_to_binds: GenericJobMutationReturn = deprecated_mutation( + StorageMutations.migrate_to_binds, + "storage", + ) + + +@strawberry.type +class DeprecatedServicesMutations: + enable_service: ServiceMutationReturn = deprecated_mutation( + ServicesMutations.enable_service, + "services", + ) + + disable_service: ServiceMutationReturn = deprecated_mutation( + ServicesMutations.disable_service, + "services", + ) + + stop_service: ServiceMutationReturn = deprecated_mutation( + ServicesMutations.stop_service, + "services", + ) + + start_service: ServiceMutationReturn = deprecated_mutation( + ServicesMutations.start_service, + "services", + ) + + restart_service: ServiceMutationReturn = deprecated_mutation( + ServicesMutations.restart_service, + "services", + ) + + move_service: ServiceMutationReturn = deprecated_mutation( + ServicesMutations.move_service, + "services", + ) + + +@strawberry.type +class DeprecatedJobMutations: + remove_job: GenericMutationReturn = deprecated_mutation( + JobMutations.remove_job, + "jobs", + ) diff --git a/selfprivacy_api/graphql/mutations/mutation_interface.py b/selfprivacy_api/graphql/mutations/mutation_interface.py index 33a6b02..94fde2f 100644 --- a/selfprivacy_api/graphql/mutations/mutation_interface.py +++ b/selfprivacy_api/graphql/mutations/mutation_interface.py @@ -17,5 +17,5 @@ class GenericMutationReturn(MutationReturnInterface): @strawberry.type -class GenericJobButationReturn(MutationReturnInterface): +class GenericJobMutationReturn(MutationReturnInterface): job: typing.Optional[ApiJob] = None diff --git a/selfprivacy_api/graphql/mutations/services_mutations.py b/selfprivacy_api/graphql/mutations/services_mutations.py index 38a0d7f..86cab10 100644 --- a/selfprivacy_api/graphql/mutations/services_mutations.py +++ b/selfprivacy_api/graphql/mutations/services_mutations.py @@ -10,7 +10,7 @@ from selfprivacy_api.graphql.common_types.service import ( service_to_graphql_service, ) from selfprivacy_api.graphql.mutations.mutation_interface import ( - GenericJobButationReturn, + GenericJobMutationReturn, GenericMutationReturn, ) @@ -34,7 +34,7 @@ class MoveServiceInput: @strawberry.type -class ServiceJobMutationReturn(GenericJobButationReturn): +class ServiceJobMutationReturn(GenericJobMutationReturn): """Service job mutation return type.""" service: typing.Optional[Service] = None diff --git a/selfprivacy_api/graphql/mutations/ssh_mutations.py b/selfprivacy_api/graphql/mutations/ssh_mutations.py deleted file mode 100644 index 60f81a8..0000000 --- a/selfprivacy_api/graphql/mutations/ssh_mutations.py +++ /dev/null @@ -1,102 +0,0 @@ -#!/usr/bin/env python3 -"""Users management module""" -# pylint: disable=too-few-public-methods - -import strawberry -from selfprivacy_api.actions.users import UserNotFound - -from selfprivacy_api.graphql import IsAuthenticated -from selfprivacy_api.actions.ssh import ( - InvalidPublicKey, - KeyAlreadyExists, - KeyNotFound, - create_ssh_key, - remove_ssh_key, -) -from selfprivacy_api.graphql.common_types.user import ( - UserMutationReturn, - get_user_by_username, -) - - -@strawberry.input -class SshMutationInput: - """Input type for ssh mutation""" - - username: str - ssh_key: str - - -@strawberry.type -class SshMutations: - """Mutations ssh""" - - @strawberry.mutation(permission_classes=[IsAuthenticated]) - def add_ssh_key(self, ssh_input: SshMutationInput) -> UserMutationReturn: - """Add a new ssh key""" - - try: - create_ssh_key(ssh_input.username, ssh_input.ssh_key) - except KeyAlreadyExists: - return UserMutationReturn( - success=False, - message="Key already exists", - code=409, - ) - except InvalidPublicKey: - return UserMutationReturn( - success=False, - message="Invalid key type. Only ssh-ed25519 and ssh-rsa are supported", - code=400, - ) - except UserNotFound: - return UserMutationReturn( - success=False, - message="User not found", - code=404, - ) - except Exception as e: - return UserMutationReturn( - success=False, - message=str(e), - code=500, - ) - - return UserMutationReturn( - success=True, - message="New SSH key successfully written", - code=201, - user=get_user_by_username(ssh_input.username), - ) - - @strawberry.mutation(permission_classes=[IsAuthenticated]) - def remove_ssh_key(self, ssh_input: SshMutationInput) -> UserMutationReturn: - """Remove ssh key from user""" - - try: - remove_ssh_key(ssh_input.username, ssh_input.ssh_key) - except KeyNotFound: - return UserMutationReturn( - success=False, - message="Key not found", - code=404, - ) - except UserNotFound: - return UserMutationReturn( - success=False, - message="User not found", - code=404, - ) - except Exception as e: - return UserMutationReturn( - success=False, - message=str(e), - code=500, - ) - - return UserMutationReturn( - success=True, - message="SSH key successfully removed", - code=200, - user=get_user_by_username(ssh_input.username), - ) diff --git a/selfprivacy_api/graphql/mutations/storage_mutations.py b/selfprivacy_api/graphql/mutations/storage_mutations.py index 1b6d74e..243220b 100644 --- a/selfprivacy_api/graphql/mutations/storage_mutations.py +++ b/selfprivacy_api/graphql/mutations/storage_mutations.py @@ -4,7 +4,7 @@ from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.graphql.common_types.jobs import job_to_api_job from selfprivacy_api.utils.block_devices import BlockDevices from selfprivacy_api.graphql.mutations.mutation_interface import ( - GenericJobButationReturn, + GenericJobMutationReturn, GenericMutationReturn, ) from selfprivacy_api.jobs.migrate_to_binds import ( @@ -79,10 +79,10 @@ class StorageMutations: ) @strawberry.mutation(permission_classes=[IsAuthenticated]) - def migrate_to_binds(self, input: MigrateToBindsInput) -> GenericJobButationReturn: + def migrate_to_binds(self, input: MigrateToBindsInput) -> GenericJobMutationReturn: """Migrate to binds""" if is_bind_migrated(): - return GenericJobButationReturn( + return GenericJobMutationReturn( success=False, code=409, message="Already migrated to binds" ) job = start_bind_migration( @@ -94,7 +94,7 @@ class StorageMutations: pleroma_block_device=input.pleroma_block_device, ) ) - return GenericJobButationReturn( + return GenericJobMutationReturn( success=True, code=200, message="Migration to binds started, rebuild the system to apply changes", diff --git a/selfprivacy_api/graphql/mutations/users_mutations.py b/selfprivacy_api/graphql/mutations/users_mutations.py index 27be1d7..f7317fb 100644 --- a/selfprivacy_api/graphql/mutations/users_mutations.py +++ b/selfprivacy_api/graphql/mutations/users_mutations.py @@ -3,10 +3,18 @@ # pylint: disable=too-few-public-methods import strawberry from selfprivacy_api.graphql import IsAuthenticated +from selfprivacy_api.actions.users import UserNotFound from selfprivacy_api.graphql.common_types.user import ( UserMutationReturn, get_user_by_username, ) +from selfprivacy_api.actions.ssh import ( + InvalidPublicKey, + KeyAlreadyExists, + KeyNotFound, + create_ssh_key, + remove_ssh_key, +) from selfprivacy_api.graphql.mutations.mutation_interface import ( GenericMutationReturn, ) @@ -21,8 +29,16 @@ class UserMutationInput: password: str +@strawberry.input +class SshMutationInput: + """Input type for ssh mutation""" + + username: str + ssh_key: str + + @strawberry.type -class UserMutations: +class UsersMutations: """Mutations change user settings""" @strawberry.mutation(permission_classes=[IsAuthenticated]) @@ -115,3 +131,73 @@ class UserMutations: code=200, user=get_user_by_username(user.username), ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def add_ssh_key(self, ssh_input: SshMutationInput) -> UserMutationReturn: + """Add a new ssh key""" + + try: + create_ssh_key(ssh_input.username, ssh_input.ssh_key) + except KeyAlreadyExists: + return UserMutationReturn( + success=False, + message="Key already exists", + code=409, + ) + except InvalidPublicKey: + return UserMutationReturn( + success=False, + message="Invalid key type. Only ssh-ed25519 and ssh-rsa are supported", + code=400, + ) + except UserNotFound: + return UserMutationReturn( + success=False, + message="User not found", + code=404, + ) + except Exception as e: + return UserMutationReturn( + success=False, + message=str(e), + code=500, + ) + + return UserMutationReturn( + success=True, + message="New SSH key successfully written", + code=201, + user=get_user_by_username(ssh_input.username), + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def remove_ssh_key(self, ssh_input: SshMutationInput) -> UserMutationReturn: + """Remove ssh key from user""" + + try: + remove_ssh_key(ssh_input.username, ssh_input.ssh_key) + except KeyNotFound: + return UserMutationReturn( + success=False, + message="Key not found", + code=404, + ) + except UserNotFound: + return UserMutationReturn( + success=False, + message="User not found", + code=404, + ) + except Exception as e: + return UserMutationReturn( + success=False, + message=str(e), + code=500, + ) + + return UserMutationReturn( + success=True, + message="SSH key successfully removed", + code=200, + user=get_user_by_username(ssh_input.username), + ) diff --git a/selfprivacy_api/graphql/schema.py b/selfprivacy_api/graphql/schema.py index 7107e20..9a6c82c 100644 --- a/selfprivacy_api/graphql/schema.py +++ b/selfprivacy_api/graphql/schema.py @@ -7,10 +7,17 @@ import strawberry from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.graphql.mutations.api_mutations import ApiMutations from selfprivacy_api.graphql.mutations.backup_mutations import BackupMutations +from selfprivacy_api.graphql.mutations.deprecated_mutations import ( + DeprecatedApiMutations, + DeprecatedJobMutations, + DeprecatedServicesMutations, + DeprecatedStorageMutations, + DeprecatedSystemMutations, + DeprecatedUsersMutations, +) from selfprivacy_api.graphql.mutations.job_mutations import JobMutations from selfprivacy_api.graphql.mutations.mutation_interface import GenericMutationReturn from selfprivacy_api.graphql.mutations.services_mutations import ServicesMutations -from selfprivacy_api.graphql.mutations.ssh_mutations import SshMutations from selfprivacy_api.graphql.mutations.storage_mutations import StorageMutations from selfprivacy_api.graphql.mutations.system_mutations import SystemMutations from selfprivacy_api.graphql.mutations.backup_mutations import BackupMutations @@ -23,7 +30,7 @@ from selfprivacy_api.graphql.queries.storage import Storage from selfprivacy_api.graphql.queries.system import System from selfprivacy_api.graphql.queries.backup import Backup -from selfprivacy_api.graphql.mutations.users_mutations import UserMutations +from selfprivacy_api.graphql.mutations.users_mutations import UsersMutations from selfprivacy_api.graphql.queries.users import Users from selfprivacy_api.jobs.test import test_job @@ -32,16 +39,16 @@ from selfprivacy_api.jobs.test import test_job class Query: """Root schema for queries""" - @strawberry.field(permission_classes=[IsAuthenticated]) - def system(self) -> System: - """System queries""" - return System() - @strawberry.field def api(self) -> Api: """API access status""" return Api() + @strawberry.field(permission_classes=[IsAuthenticated]) + def system(self) -> System: + """System queries""" + return System() + @strawberry.field(permission_classes=[IsAuthenticated]) def users(self) -> Users: """Users queries""" @@ -70,17 +77,50 @@ class Query: @strawberry.type class Mutation( - ApiMutations, - SystemMutations, - UserMutations, - SshMutations, - StorageMutations, - ServicesMutations, - JobMutations, - BackupMutations, + DeprecatedApiMutations, + DeprecatedSystemMutations, + DeprecatedUsersMutations, + DeprecatedStorageMutations, + DeprecatedServicesMutations, + DeprecatedJobMutations, ): """Root schema for mutations""" + @strawberry.field + def api(self) -> ApiMutations: + """API mutations""" + return ApiMutations() + + @strawberry.field(permission_classes=[IsAuthenticated]) + def system(self) -> SystemMutations: + """System mutations""" + return SystemMutations() + + @strawberry.field(permission_classes=[IsAuthenticated]) + def users(self) -> UsersMutations: + """Users mutations""" + return UsersMutations() + + @strawberry.field(permission_classes=[IsAuthenticated]) + def storage(self) -> StorageMutations: + """Storage mutations""" + return StorageMutations() + + @strawberry.field(permission_classes=[IsAuthenticated]) + def services(self) -> ServicesMutations: + """Services mutations""" + return ServicesMutations() + + @strawberry.field(permission_classes=[IsAuthenticated]) + def jobs(self) -> JobMutations: + """Jobs mutations""" + return JobMutations() + + @strawberry.field(permission_classes=[IsAuthenticated]) + def backup(self) -> BackupMutations: + """Backup mutations""" + return BackupMutations() + @strawberry.mutation(permission_classes=[IsAuthenticated]) def test_mutation(self) -> GenericMutationReturn: """Test mutation""" @@ -105,4 +145,8 @@ class Subscription: await asyncio.sleep(0.5) -schema = strawberry.Schema(query=Query, mutation=Mutation, subscription=Subscription) +schema = strawberry.Schema( + query=Query, + mutation=Mutation, + subscription=Subscription, +) diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index 3eed12a..bfa315b 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -8,21 +8,24 @@ from selfprivacy_api.jobs import Jobs, JobStatus API_RELOAD_SNAPSHOTS = """ mutation TestSnapshotsReload { + backup { forceSnapshotsReload { success message code } + } } """ API_SET_AUTOBACKUP_PERIOD_MUTATION = """ mutation TestAutobackupPeriod($period: Int) { + backup { setAutobackupPeriod(period: $period) { success message code - configuration { + configuration { provider encryptionKey isInitialized @@ -31,16 +34,18 @@ mutation TestAutobackupPeriod($period: Int) { locationId } } + } } """ API_REMOVE_REPOSITORY_MUTATION = """ mutation TestRemoveRepo { + backup { removeRepository { success message code - configuration { + configuration { provider encryptionKey isInitialized @@ -49,16 +54,18 @@ mutation TestRemoveRepo { locationId } } + } } """ API_INIT_MUTATION = """ mutation TestInitRepo($input: InitializeRepositoryInput!) { + backup { initializeRepository(repository: $input) { success message code - configuration { + configuration { provider encryptionKey isInitialized @@ -67,20 +74,23 @@ mutation TestInitRepo($input: InitializeRepositoryInput!) { locationId } } + } } """ API_RESTORE_MUTATION = """ mutation TestRestoreService($snapshot_id: String!) { + backup { restoreBackup(snapshotId: $snapshot_id) { success message code - job { + job { uid status } } + } } """ @@ -96,15 +106,17 @@ allSnapshots { API_BACK_UP_MUTATION = """ mutation TestBackupService($service_id: String!) { + backup { startBackup(serviceId: $service_id) { success message code - job { + job { uid status } } + } } """ @@ -225,7 +237,7 @@ def test_snapshots_empty(authorized_client, dummy_service): def test_start_backup(authorized_client, dummy_service): response = api_backup(authorized_client, dummy_service) - data = get_data(response)["startBackup"] + data = get_data(response)["backup"]["startBackup"] assert data["success"] is True job = data["job"] @@ -245,7 +257,7 @@ def test_restore(authorized_client, dummy_service): assert snap["id"] is not None response = api_restore(authorized_client, snap["id"]) - data = get_data(response)["restoreBackup"] + data = get_data(response)["backup"]["restoreBackup"] assert data["success"] is True job = data["job"] @@ -257,7 +269,7 @@ def test_reinit(authorized_client, dummy_service, tmpdir): response = api_init_without_key( authorized_client, "FILE", "", "", test_repo_path, "" ) - data = get_data(response)["initializeRepository"] + data = get_data(response)["backup"]["initializeRepository"] assert_ok(data) configuration = data["configuration"] assert configuration["provider"] == "FILE" @@ -267,7 +279,7 @@ def test_reinit(authorized_client, dummy_service, tmpdir): assert configuration["isInitialized"] is True response = api_backup(authorized_client, dummy_service) - data = get_data(response)["startBackup"] + data = get_data(response)["backup"]["startBackup"] assert data["success"] is True job = data["job"] @@ -276,7 +288,7 @@ def test_reinit(authorized_client, dummy_service, tmpdir): def test_remove(authorized_client, generic_userdata): response = api_remove(authorized_client) - data = get_data(response)["removeRepository"] + data = get_data(response)["backup"]["removeRepository"] assert_ok(data) configuration = data["configuration"] @@ -291,7 +303,7 @@ def test_remove(authorized_client, generic_userdata): def test_autobackup_period_nonzero(authorized_client): new_period = 11 response = api_set_period(authorized_client, new_period) - data = get_data(response)["setAutobackupPeriod"] + data = get_data(response)["backup"]["setAutobackupPeriod"] assert_ok(data) configuration = data["configuration"] @@ -304,7 +316,7 @@ def test_autobackup_period_zero(authorized_client): response = api_set_period(authorized_client, 11) # and now we nullify it response = api_set_period(authorized_client, new_period) - data = get_data(response)["setAutobackupPeriod"] + data = get_data(response)["backup"]["setAutobackupPeriod"] assert_ok(data) configuration = data["configuration"] @@ -316,7 +328,7 @@ def test_autobackup_period_none(authorized_client): response = api_set_period(authorized_client, 11) # and now we nullify it response = api_set_period(authorized_client, None) - data = get_data(response)["setAutobackupPeriod"] + data = get_data(response)["backup"]["setAutobackupPeriod"] assert_ok(data) configuration = data["configuration"] @@ -328,7 +340,7 @@ def test_autobackup_period_negative(authorized_client): response = api_set_period(authorized_client, 11) # and now we nullify it response = api_set_period(authorized_client, -12) - data = get_data(response)["setAutobackupPeriod"] + data = get_data(response)["backup"]["setAutobackupPeriod"] assert_ok(data) configuration = data["configuration"] @@ -341,7 +353,7 @@ def test_reload_snapshots_bare_bare_bare(authorized_client, dummy_service): api_remove(authorized_client) response = api_reload_snapshots(authorized_client) - data = get_data(response)["forceSnapshotsReload"] + data = get_data(response)["backup"]["forceSnapshotsReload"] assert_ok(data) snaps = api_snapshots(authorized_client) @@ -350,10 +362,10 @@ def test_reload_snapshots_bare_bare_bare(authorized_client, dummy_service): def test_reload_snapshots(authorized_client, dummy_service): response = api_backup(authorized_client, dummy_service) - data = get_data(response)["startBackup"] + data = get_data(response)["backup"]["startBackup"] response = api_reload_snapshots(authorized_client) - data = get_data(response)["forceSnapshotsReload"] + data = get_data(response)["backup"]["forceSnapshotsReload"] assert_ok(data) snaps = api_snapshots(authorized_client) diff --git a/tests/test_graphql/test_api_devices.py b/tests/test_graphql/test_api_devices.py index 07cf42a..cd76ef7 100644 --- a/tests/test_graphql/test_api_devices.py +++ b/tests/test_graphql/test_api_devices.py @@ -75,10 +75,12 @@ def test_graphql_tokens_info_unauthorized(client, tokens_file): DELETE_TOKEN_MUTATION = """ mutation DeleteToken($device: String!) { - deleteDeviceApiToken(device: $device) { - success - message - code + api { + deleteDeviceApiToken(device: $device) { + success + message + code + } } } """ @@ -110,9 +112,9 @@ def test_graphql_delete_token(authorized_client, tokens_file): ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["deleteDeviceApiToken"]["success"] is True - assert response.json()["data"]["deleteDeviceApiToken"]["message"] is not None - assert response.json()["data"]["deleteDeviceApiToken"]["code"] == 200 + assert response.json()["data"]["api"]["deleteDeviceApiToken"]["success"] is True + assert response.json()["data"]["api"]["deleteDeviceApiToken"]["message"] is not None + assert response.json()["data"]["api"]["deleteDeviceApiToken"]["code"] == 200 assert read_json(tokens_file) == { "tokens": [ { @@ -136,13 +138,16 @@ def test_graphql_delete_self_token(authorized_client, tokens_file): ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["deleteDeviceApiToken"]["success"] is False - assert response.json()["data"]["deleteDeviceApiToken"]["message"] is not None - assert response.json()["data"]["deleteDeviceApiToken"]["code"] == 400 + assert response.json()["data"]["api"]["deleteDeviceApiToken"]["success"] is False + assert response.json()["data"]["api"]["deleteDeviceApiToken"]["message"] is not None + assert response.json()["data"]["api"]["deleteDeviceApiToken"]["code"] == 400 assert read_json(tokens_file) == TOKENS_FILE_CONTETS -def test_graphql_delete_nonexistent_token(authorized_client, tokens_file): +def test_graphql_delete_nonexistent_token( + authorized_client, + tokens_file, +): response = authorized_client.post( "/graphql", json={ @@ -154,19 +159,21 @@ def test_graphql_delete_nonexistent_token(authorized_client, tokens_file): ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["deleteDeviceApiToken"]["success"] is False - assert response.json()["data"]["deleteDeviceApiToken"]["message"] is not None - assert response.json()["data"]["deleteDeviceApiToken"]["code"] == 404 + assert response.json()["data"]["api"]["deleteDeviceApiToken"]["success"] is False + assert response.json()["data"]["api"]["deleteDeviceApiToken"]["message"] is not None + assert response.json()["data"]["api"]["deleteDeviceApiToken"]["code"] == 404 assert read_json(tokens_file) == TOKENS_FILE_CONTETS REFRESH_TOKEN_MUTATION = """ mutation RefreshToken { - refreshDeviceApiToken { - success - message - code - token + api { + refreshDeviceApiToken { + success + message + code + token + } } } """ @@ -181,19 +188,25 @@ def test_graphql_refresh_token_unauthorized(client, tokens_file): assert response.json()["data"] is None -def test_graphql_refresh_token(authorized_client, tokens_file, token_repo): +def test_graphql_refresh_token( + authorized_client, + tokens_file, + token_repo, +): response = authorized_client.post( "/graphql", json={"query": REFRESH_TOKEN_MUTATION}, ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["refreshDeviceApiToken"]["success"] is True - assert response.json()["data"]["refreshDeviceApiToken"]["message"] is not None - assert response.json()["data"]["refreshDeviceApiToken"]["code"] == 200 + assert response.json()["data"]["api"]["refreshDeviceApiToken"]["success"] is True + assert ( + response.json()["data"]["api"]["refreshDeviceApiToken"]["message"] is not None + ) + assert response.json()["data"]["api"]["refreshDeviceApiToken"]["code"] == 200 token = token_repo.get_token_by_name("test_token") assert token == Token( - token=response.json()["data"]["refreshDeviceApiToken"]["token"], + token=response.json()["data"]["api"]["refreshDeviceApiToken"]["token"], device_name="test_token", created_at=datetime.datetime(2022, 1, 14, 8, 31, 10, 789314), ) @@ -201,17 +214,22 @@ def test_graphql_refresh_token(authorized_client, tokens_file, token_repo): NEW_DEVICE_KEY_MUTATION = """ mutation NewDeviceKey { - getNewDeviceApiKey { - success - message - code - key + api { + getNewDeviceApiKey { + success + message + code + key + } } } """ -def test_graphql_get_new_device_auth_key_unauthorized(client, tokens_file): +def test_graphql_get_new_device_auth_key_unauthorized( + client, + tokens_file, +): response = client.post( "/graphql", json={"query": NEW_DEVICE_KEY_MUTATION}, @@ -220,22 +238,26 @@ def test_graphql_get_new_device_auth_key_unauthorized(client, tokens_file): assert response.json()["data"] is None -def test_graphql_get_new_device_auth_key(authorized_client, tokens_file): +def test_graphql_get_new_device_auth_key( + authorized_client, + tokens_file, +): response = authorized_client.post( "/graphql", json={"query": NEW_DEVICE_KEY_MUTATION}, ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["getNewDeviceApiKey"]["success"] is True - assert response.json()["data"]["getNewDeviceApiKey"]["message"] is not None - assert response.json()["data"]["getNewDeviceApiKey"]["code"] == 200 + assert response.json()["data"]["api"]["getNewDeviceApiKey"]["success"] is True + assert response.json()["data"]["api"]["getNewDeviceApiKey"]["message"] is not None + assert response.json()["data"]["api"]["getNewDeviceApiKey"]["code"] == 200 assert ( - response.json()["data"]["getNewDeviceApiKey"]["key"].split(" ").__len__() == 12 + response.json()["data"]["api"]["getNewDeviceApiKey"]["key"].split(" ").__len__() + == 12 ) token = ( Mnemonic(language="english") - .to_entropy(response.json()["data"]["getNewDeviceApiKey"]["key"]) + .to_entropy(response.json()["data"]["api"]["getNewDeviceApiKey"]["key"]) .hex() ) assert read_json(tokens_file)["new_device"]["token"] == token @@ -243,20 +265,25 @@ def test_graphql_get_new_device_auth_key(authorized_client, tokens_file): INVALIDATE_NEW_DEVICE_KEY_MUTATION = """ mutation InvalidateNewDeviceKey { - invalidateNewDeviceApiKey { - success - message - code + api { + invalidateNewDeviceApiKey { + success + message + code + } } } """ -def test_graphql_invalidate_new_device_token_unauthorized(client, tokens_file): +def test_graphql_invalidate_new_device_token_unauthorized( + client, + tokens_file, +): response = client.post( "/graphql", json={ - "query": DELETE_TOKEN_MUTATION, + "query": INVALIDATE_NEW_DEVICE_KEY_MUTATION, "variables": { "device": "test_token", }, @@ -266,22 +293,26 @@ def test_graphql_invalidate_new_device_token_unauthorized(client, tokens_file): assert response.json()["data"] is None -def test_graphql_get_and_delete_new_device_key(authorized_client, tokens_file): +def test_graphql_get_and_delete_new_device_key( + authorized_client, + tokens_file, +): response = authorized_client.post( "/graphql", json={"query": NEW_DEVICE_KEY_MUTATION}, ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["getNewDeviceApiKey"]["success"] is True - assert response.json()["data"]["getNewDeviceApiKey"]["message"] is not None - assert response.json()["data"]["getNewDeviceApiKey"]["code"] == 200 + assert response.json()["data"]["api"]["getNewDeviceApiKey"]["success"] is True + assert response.json()["data"]["api"]["getNewDeviceApiKey"]["message"] is not None + assert response.json()["data"]["api"]["getNewDeviceApiKey"]["code"] == 200 assert ( - response.json()["data"]["getNewDeviceApiKey"]["key"].split(" ").__len__() == 12 + response.json()["data"]["api"]["getNewDeviceApiKey"]["key"].split(" ").__len__() + == 12 ) token = ( Mnemonic(language="english") - .to_entropy(response.json()["data"]["getNewDeviceApiKey"]["key"]) + .to_entropy(response.json()["data"]["api"]["getNewDeviceApiKey"]["key"]) .hex() ) assert read_json(tokens_file)["new_device"]["token"] == token @@ -291,35 +322,46 @@ def test_graphql_get_and_delete_new_device_key(authorized_client, tokens_file): ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["invalidateNewDeviceApiKey"]["success"] is True - assert response.json()["data"]["invalidateNewDeviceApiKey"]["message"] is not None - assert response.json()["data"]["invalidateNewDeviceApiKey"]["code"] == 200 + assert ( + response.json()["data"]["api"]["invalidateNewDeviceApiKey"]["success"] is True + ) + assert ( + response.json()["data"]["api"]["invalidateNewDeviceApiKey"]["message"] + is not None + ) + assert response.json()["data"]["api"]["invalidateNewDeviceApiKey"]["code"] == 200 assert read_json(tokens_file) == TOKENS_FILE_CONTETS AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION = """ mutation AuthorizeWithNewDeviceKey($input: UseNewDeviceKeyInput!) { - authorizeWithNewDeviceApiKey(input: $input) { - success - message - code - token + api { + authorizeWithNewDeviceApiKey(input: $input) { + success + message + code + token + } } } """ -def test_graphql_get_and_authorize_new_device(client, authorized_client, tokens_file): +def test_graphql_get_and_authorize_new_device( + client, + authorized_client, + tokens_file, +): response = authorized_client.post( "/graphql", json={"query": NEW_DEVICE_KEY_MUTATION}, ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["getNewDeviceApiKey"]["success"] is True - assert response.json()["data"]["getNewDeviceApiKey"]["message"] is not None - assert response.json()["data"]["getNewDeviceApiKey"]["code"] == 200 - mnemonic_key = response.json()["data"]["getNewDeviceApiKey"]["key"] + assert response.json()["data"]["api"]["getNewDeviceApiKey"]["success"] is True + assert response.json()["data"]["api"]["getNewDeviceApiKey"]["message"] is not None + assert response.json()["data"]["api"]["getNewDeviceApiKey"]["code"] == 200 + mnemonic_key = response.json()["data"]["api"]["getNewDeviceApiKey"]["key"] assert mnemonic_key.split(" ").__len__() == 12 key = Mnemonic(language="english").to_entropy(mnemonic_key).hex() assert read_json(tokens_file)["new_device"]["token"] == key @@ -337,17 +379,24 @@ def test_graphql_get_and_authorize_new_device(client, authorized_client, tokens_ ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["success"] is True assert ( - response.json()["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None + response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["success"] + is True ) - assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["code"] == 200 - token = response.json()["data"]["authorizeWithNewDeviceApiKey"]["token"] + assert ( + response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["message"] + is not None + ) + assert response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["code"] == 200 + token = response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["token"] assert read_json(tokens_file)["tokens"][2]["token"] == token assert read_json(tokens_file)["tokens"][2]["name"] == "new_device" -def test_graphql_authorize_new_device_with_invalid_key(client, tokens_file): +def test_graphql_authorize_new_device_with_invalid_key( + client, + tokens_file, +): response = client.post( "/graphql", json={ @@ -362,25 +411,33 @@ def test_graphql_authorize_new_device_with_invalid_key(client, tokens_file): ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["success"] is False assert ( - response.json()["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None + response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["success"] + is False ) - assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["code"] == 404 + assert ( + response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["message"] + is not None + ) + assert response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["code"] == 404 assert read_json(tokens_file) == TOKENS_FILE_CONTETS -def test_graphql_get_and_authorize_used_key(client, authorized_client, tokens_file): +def test_graphql_get_and_authorize_used_key( + client, + authorized_client, + tokens_file, +): response = authorized_client.post( "/graphql", json={"query": NEW_DEVICE_KEY_MUTATION}, ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["getNewDeviceApiKey"]["success"] is True - assert response.json()["data"]["getNewDeviceApiKey"]["message"] is not None - assert response.json()["data"]["getNewDeviceApiKey"]["code"] == 200 - mnemonic_key = response.json()["data"]["getNewDeviceApiKey"]["key"] + assert response.json()["data"]["api"]["getNewDeviceApiKey"]["success"] is True + assert response.json()["data"]["api"]["getNewDeviceApiKey"]["message"] is not None + assert response.json()["data"]["api"]["getNewDeviceApiKey"]["code"] == 200 + mnemonic_key = response.json()["data"]["api"]["getNewDeviceApiKey"]["key"] assert mnemonic_key.split(" ").__len__() == 12 key = Mnemonic(language="english").to_entropy(mnemonic_key).hex() assert read_json(tokens_file)["new_device"]["token"] == key @@ -398,14 +455,18 @@ def test_graphql_get_and_authorize_used_key(client, authorized_client, tokens_fi ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["success"] is True assert ( - response.json()["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None + response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["success"] + is True ) - assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["code"] == 200 + assert ( + response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["message"] + is not None + ) + assert response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["code"] == 200 assert ( read_json(tokens_file)["tokens"][2]["token"] - == response.json()["data"]["authorizeWithNewDeviceApiKey"]["token"] + == response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["token"] ) assert read_json(tokens_file)["tokens"][2]["name"] == "new_token" @@ -415,7 +476,7 @@ def test_graphql_get_and_authorize_used_key(client, authorized_client, tokens_fi "query": AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION, "variables": { "input": { - "key": mnemonic_key, + "key": NEW_DEVICE_KEY_MUTATION, "deviceName": "test_token2", } }, @@ -423,16 +484,22 @@ def test_graphql_get_and_authorize_used_key(client, authorized_client, tokens_fi ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["success"] is False assert ( - response.json()["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None + response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["success"] + is False ) - assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["code"] == 404 + assert ( + response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["message"] + is not None + ) + assert response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["code"] == 404 assert read_json(tokens_file)["tokens"].__len__() == 3 def test_graphql_get_and_authorize_key_after_12_minutes( - client, authorized_client, tokens_file + client, + authorized_client, + tokens_file, ): response = authorized_client.post( "/graphql", @@ -440,15 +507,16 @@ def test_graphql_get_and_authorize_key_after_12_minutes( ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["getNewDeviceApiKey"]["success"] is True - assert response.json()["data"]["getNewDeviceApiKey"]["message"] is not None - assert response.json()["data"]["getNewDeviceApiKey"]["code"] == 200 + assert response.json()["data"]["api"]["getNewDeviceApiKey"]["success"] is True + assert response.json()["data"]["api"]["getNewDeviceApiKey"]["message"] is not None + assert response.json()["data"]["api"]["getNewDeviceApiKey"]["code"] == 200 assert ( - response.json()["data"]["getNewDeviceApiKey"]["key"].split(" ").__len__() == 12 + response.json()["data"]["api"]["getNewDeviceApiKey"]["key"].split(" ").__len__() + == 12 ) key = ( Mnemonic(language="english") - .to_entropy(response.json()["data"]["getNewDeviceApiKey"]["key"]) + .to_entropy(response.json()["data"]["api"]["getNewDeviceApiKey"]["key"]) .hex() ) assert read_json(tokens_file)["new_device"]["token"] == key @@ -473,14 +541,21 @@ def test_graphql_get_and_authorize_key_after_12_minutes( ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["success"] is False assert ( - response.json()["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None + response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["success"] + is False ) - assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["code"] == 404 + assert ( + response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["message"] + is not None + ) + assert response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["code"] == 404 -def test_graphql_authorize_without_token(client, tokens_file): +def test_graphql_authorize_without_token( + client, + tokens_file, +): response = client.post( "/graphql", json={ diff --git a/tests/test_graphql/test_api_recovery.py b/tests/test_graphql/test_api_recovery.py index c5e229e..87df666 100644 --- a/tests/test_graphql/test_api_recovery.py +++ b/tests/test_graphql/test_api_recovery.py @@ -57,22 +57,26 @@ def test_graphql_recovery_key_status_when_none_exists(authorized_client, tokens_ API_RECOVERY_KEY_GENERATE_MUTATION = """ mutation TestGenerateRecoveryKey($limits: RecoveryKeyLimitsInput) { - getNewRecoveryApiKey(limits: $limits) { - success - message - code - key + api { + getNewRecoveryApiKey(limits: $limits) { + success + message + code + key + } } } """ API_RECOVERY_KEY_USE_MUTATION = """ mutation TestUseRecoveryKey($input: UseRecoveryKeyInput!) { - useRecoveryApiKey(input: $input) { - success - message - code - token + api { + useRecoveryApiKey(input: $input) { + success + message + code + token + } } } """ @@ -87,18 +91,20 @@ def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is True - assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 200 - assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is not None + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["success"] is True + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] is not None assert ( - response.json()["data"]["getNewRecoveryApiKey"]["key"].split(" ").__len__() + response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] + .split(" ") + .__len__() == 18 ) assert read_json(tokens_file)["recovery_token"] is not None time_generated = read_json(tokens_file)["recovery_token"]["date"] assert time_generated is not None - key = response.json()["data"]["getNewRecoveryApiKey"]["key"] + key = response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] assert ( datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%f") - datetime.timedelta(seconds=5) @@ -136,12 +142,12 @@ def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["useRecoveryApiKey"]["success"] is True - assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200 - assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None + assert response.json()["data"]["api"]["useRecoveryApiKey"]["success"] is True + assert response.json()["data"]["api"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["api"]["useRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["api"]["useRecoveryApiKey"]["token"] is not None assert ( - response.json()["data"]["useRecoveryApiKey"]["token"] + response.json()["data"]["api"]["useRecoveryApiKey"]["token"] == read_json(tokens_file)["tokens"][2]["token"] ) assert read_json(tokens_file)["tokens"][2]["name"] == "new_test_token" @@ -161,12 +167,12 @@ def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["useRecoveryApiKey"]["success"] is True - assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200 - assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None + assert response.json()["data"]["api"]["useRecoveryApiKey"]["success"] is True + assert response.json()["data"]["api"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["api"]["useRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["api"]["useRecoveryApiKey"]["token"] is not None assert ( - response.json()["data"]["useRecoveryApiKey"]["token"] + response.json()["data"]["api"]["useRecoveryApiKey"]["token"] == read_json(tokens_file)["tokens"][3]["token"] ) assert read_json(tokens_file)["tokens"][3]["name"] == "new_test_token2" @@ -190,17 +196,19 @@ def test_graphql_generate_recovery_key_with_expiration_date( ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is True - assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 200 - assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is not None + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["success"] is True + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] is not None assert ( - response.json()["data"]["getNewRecoveryApiKey"]["key"].split(" ").__len__() + response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] + .split(" ") + .__len__() == 18 ) assert read_json(tokens_file)["recovery_token"] is not None - key = response.json()["data"]["getNewRecoveryApiKey"]["key"] + key = response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] assert read_json(tokens_file)["recovery_token"]["expiration"] == expiration_date_str assert read_json(tokens_file)["recovery_token"]["token"] == mnemonic_to_hex(key) @@ -246,12 +254,12 @@ def test_graphql_generate_recovery_key_with_expiration_date( ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["useRecoveryApiKey"]["success"] is True - assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200 - assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None + assert response.json()["data"]["api"]["useRecoveryApiKey"]["success"] is True + assert response.json()["data"]["api"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["api"]["useRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["api"]["useRecoveryApiKey"]["token"] is not None assert ( - response.json()["data"]["useRecoveryApiKey"]["token"] + response.json()["data"]["api"]["useRecoveryApiKey"]["token"] == read_json(tokens_file)["tokens"][2]["token"] ) @@ -270,12 +278,12 @@ def test_graphql_generate_recovery_key_with_expiration_date( ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["useRecoveryApiKey"]["success"] is True - assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200 - assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None + assert response.json()["data"]["api"]["useRecoveryApiKey"]["success"] is True + assert response.json()["data"]["api"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["api"]["useRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["api"]["useRecoveryApiKey"]["token"] is not None assert ( - response.json()["data"]["useRecoveryApiKey"]["token"] + response.json()["data"]["api"]["useRecoveryApiKey"]["token"] == read_json(tokens_file)["tokens"][3]["token"] ) @@ -299,10 +307,10 @@ def test_graphql_generate_recovery_key_with_expiration_date( ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["useRecoveryApiKey"]["success"] is False - assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["useRecoveryApiKey"]["code"] == 404 - assert response.json()["data"]["useRecoveryApiKey"]["token"] is None + assert response.json()["data"]["api"]["useRecoveryApiKey"]["success"] is False + assert response.json()["data"]["api"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["api"]["useRecoveryApiKey"]["code"] == 404 + assert response.json()["data"]["api"]["useRecoveryApiKey"]["token"] is None assert read_json(tokens_file)["tokens"] == new_data["tokens"] @@ -345,10 +353,10 @@ def test_graphql_generate_recovery_key_with_expiration_in_the_past( ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is False - assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 400 - assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is None + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["success"] is False + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["code"] == 400 + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] is None assert "recovery_token" not in read_json(tokens_file) @@ -393,12 +401,12 @@ def test_graphql_generate_recovery_key_with_limited_uses( ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is True - assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 200 - assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is not None + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["success"] is True + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] is not None - mnemonic_key = response.json()["data"]["getNewRecoveryApiKey"]["key"] + mnemonic_key = response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] key = mnemonic_to_hex(mnemonic_key) assert read_json(tokens_file)["recovery_token"]["token"] == key @@ -433,10 +441,10 @@ def test_graphql_generate_recovery_key_with_limited_uses( ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["useRecoveryApiKey"]["success"] is True - assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200 - assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None + assert response.json()["data"]["api"]["useRecoveryApiKey"]["success"] is True + assert response.json()["data"]["api"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["api"]["useRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["api"]["useRecoveryApiKey"]["token"] is not None # Try to get token status response = authorized_client.post( @@ -467,10 +475,10 @@ def test_graphql_generate_recovery_key_with_limited_uses( ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["useRecoveryApiKey"]["success"] is True - assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200 - assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None + assert response.json()["data"]["api"]["useRecoveryApiKey"]["success"] is True + assert response.json()["data"]["api"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["api"]["useRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["api"]["useRecoveryApiKey"]["token"] is not None # Try to get token status response = authorized_client.post( @@ -501,10 +509,10 @@ def test_graphql_generate_recovery_key_with_limited_uses( ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["useRecoveryApiKey"]["success"] is False - assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["useRecoveryApiKey"]["code"] == 404 - assert response.json()["data"]["useRecoveryApiKey"]["token"] is None + assert response.json()["data"]["api"]["useRecoveryApiKey"]["success"] is False + assert response.json()["data"]["api"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["api"]["useRecoveryApiKey"]["code"] == 404 + assert response.json()["data"]["api"]["useRecoveryApiKey"]["token"] is None def test_graphql_generate_recovery_key_with_negative_uses( @@ -524,10 +532,10 @@ def test_graphql_generate_recovery_key_with_negative_uses( ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is False - assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 400 - assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is None + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["success"] is False + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["code"] == 400 + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] is None def test_graphql_generate_recovery_key_with_zero_uses(authorized_client, tokens_file): @@ -545,7 +553,7 @@ def test_graphql_generate_recovery_key_with_zero_uses(authorized_client, tokens_ ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is False - assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 400 - assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is None + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["success"] is False + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["code"] == 400 + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] is None diff --git a/tests/test_graphql/test_localsecret.py b/tests/test_graphql/test_localsecret.py index d4b637a..91c2e26 100644 --- a/tests/test_graphql/test_localsecret.py +++ b/tests/test_graphql/test_localsecret.py @@ -35,4 +35,4 @@ def test_local_secret_set(localsecret): assert oldsecret != newsecret LocalBackupSecret.set(newsecret) - assert LocalBackupSecret.get() == newsecret \ No newline at end of file + assert LocalBackupSecret.get() == newsecret diff --git a/tests/test_graphql/test_ssh.py b/tests/test_graphql/test_ssh.py index 4831692..5f888c8 100644 --- a/tests/test_graphql/test_ssh.py +++ b/tests/test_graphql/test_ssh.py @@ -44,13 +44,15 @@ def some_users(mocker, datadir): API_CREATE_SSH_KEY_MUTATION = """ mutation addSshKey($sshInput: SshMutationInput!) { - addSshKey(sshInput: $sshInput) { - success - message - code - user { - username - sshKeys + users { + addSshKey(sshInput: $sshInput) { + success + message + code + user { + username + sshKeys + } } } } @@ -90,12 +92,12 @@ def test_graphql_add_ssh_key(authorized_client, some_users, mock_subprocess_pope assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["addSshKey"]["code"] == 201 - assert response.json()["data"]["addSshKey"]["message"] is not None - assert response.json()["data"]["addSshKey"]["success"] is True + assert response.json()["data"]["users"]["addSshKey"]["code"] == 201 + assert response.json()["data"]["users"]["addSshKey"]["message"] is not None + assert response.json()["data"]["users"]["addSshKey"]["success"] is True - assert response.json()["data"]["addSshKey"]["user"]["username"] == "user1" - assert response.json()["data"]["addSshKey"]["user"]["sshKeys"] == [ + assert response.json()["data"]["users"]["addSshKey"]["user"]["username"] == "user1" + assert response.json()["data"]["users"]["addSshKey"]["user"]["sshKeys"] == [ "ssh-rsa KEY user1@pc", "ssh-rsa KEY test_key@pc", ] @@ -117,12 +119,12 @@ def test_graphql_add_root_ssh_key(authorized_client, some_users, mock_subprocess assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["addSshKey"]["code"] == 201 - assert response.json()["data"]["addSshKey"]["message"] is not None - assert response.json()["data"]["addSshKey"]["success"] is True + assert response.json()["data"]["users"]["addSshKey"]["code"] == 201 + assert response.json()["data"]["users"]["addSshKey"]["message"] is not None + assert response.json()["data"]["users"]["addSshKey"]["success"] is True - assert response.json()["data"]["addSshKey"]["user"]["username"] == "root" - assert response.json()["data"]["addSshKey"]["user"]["sshKeys"] == [ + assert response.json()["data"]["users"]["addSshKey"]["user"]["username"] == "root" + assert response.json()["data"]["users"]["addSshKey"]["user"]["sshKeys"] == [ "ssh-ed25519 KEY test@pc", "ssh-rsa KEY test_key@pc", ] @@ -144,12 +146,12 @@ def test_graphql_add_main_ssh_key(authorized_client, some_users, mock_subprocess assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["addSshKey"]["code"] == 201 - assert response.json()["data"]["addSshKey"]["message"] is not None - assert response.json()["data"]["addSshKey"]["success"] is True + assert response.json()["data"]["users"]["addSshKey"]["code"] == 201 + assert response.json()["data"]["users"]["addSshKey"]["message"] is not None + assert response.json()["data"]["users"]["addSshKey"]["success"] is True - assert response.json()["data"]["addSshKey"]["user"]["username"] == "tester" - assert response.json()["data"]["addSshKey"]["user"]["sshKeys"] == [ + assert response.json()["data"]["users"]["addSshKey"]["user"]["username"] == "tester" + assert response.json()["data"]["users"]["addSshKey"]["user"]["sshKeys"] == [ "ssh-rsa KEY test@pc", "ssh-rsa KEY test_key@pc", ] @@ -171,9 +173,9 @@ def test_graphql_add_bad_ssh_key(authorized_client, some_users, mock_subprocess_ assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["addSshKey"]["code"] == 400 - assert response.json()["data"]["addSshKey"]["message"] is not None - assert response.json()["data"]["addSshKey"]["success"] is False + assert response.json()["data"]["users"]["addSshKey"]["code"] == 400 + assert response.json()["data"]["users"]["addSshKey"]["message"] is not None + assert response.json()["data"]["users"]["addSshKey"]["success"] is False def test_graphql_add_ssh_key_nonexistent_user( @@ -194,20 +196,22 @@ def test_graphql_add_ssh_key_nonexistent_user( assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["addSshKey"]["code"] == 404 - assert response.json()["data"]["addSshKey"]["message"] is not None - assert response.json()["data"]["addSshKey"]["success"] is False + assert response.json()["data"]["users"]["addSshKey"]["code"] == 404 + assert response.json()["data"]["users"]["addSshKey"]["message"] is not None + assert response.json()["data"]["users"]["addSshKey"]["success"] is False API_REMOVE_SSH_KEY_MUTATION = """ mutation removeSshKey($sshInput: SshMutationInput!) { - removeSshKey(sshInput: $sshInput) { - success - message - code - user { - username - sshKeys + users { + removeSshKey(sshInput: $sshInput) { + success + message + code + user { + username + sshKeys + } } } } @@ -247,12 +251,14 @@ def test_graphql_remove_ssh_key(authorized_client, some_users, mock_subprocess_p assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["removeSshKey"]["code"] == 200 - assert response.json()["data"]["removeSshKey"]["message"] is not None - assert response.json()["data"]["removeSshKey"]["success"] is True + assert response.json()["data"]["users"]["removeSshKey"]["code"] == 200 + assert response.json()["data"]["users"]["removeSshKey"]["message"] is not None + assert response.json()["data"]["users"]["removeSshKey"]["success"] is True - assert response.json()["data"]["removeSshKey"]["user"]["username"] == "user1" - assert response.json()["data"]["removeSshKey"]["user"]["sshKeys"] == [] + assert ( + response.json()["data"]["users"]["removeSshKey"]["user"]["username"] == "user1" + ) + assert response.json()["data"]["users"]["removeSshKey"]["user"]["sshKeys"] == [] def test_graphql_remove_root_ssh_key( @@ -273,12 +279,14 @@ def test_graphql_remove_root_ssh_key( assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["removeSshKey"]["code"] == 200 - assert response.json()["data"]["removeSshKey"]["message"] is not None - assert response.json()["data"]["removeSshKey"]["success"] is True + assert response.json()["data"]["users"]["removeSshKey"]["code"] == 200 + assert response.json()["data"]["users"]["removeSshKey"]["message"] is not None + assert response.json()["data"]["users"]["removeSshKey"]["success"] is True - assert response.json()["data"]["removeSshKey"]["user"]["username"] == "root" - assert response.json()["data"]["removeSshKey"]["user"]["sshKeys"] == [] + assert ( + response.json()["data"]["users"]["removeSshKey"]["user"]["username"] == "root" + ) + assert response.json()["data"]["users"]["removeSshKey"]["user"]["sshKeys"] == [] def test_graphql_remove_main_ssh_key( @@ -299,12 +307,14 @@ def test_graphql_remove_main_ssh_key( assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["removeSshKey"]["code"] == 200 - assert response.json()["data"]["removeSshKey"]["message"] is not None - assert response.json()["data"]["removeSshKey"]["success"] is True + assert response.json()["data"]["users"]["removeSshKey"]["code"] == 200 + assert response.json()["data"]["users"]["removeSshKey"]["message"] is not None + assert response.json()["data"]["users"]["removeSshKey"]["success"] is True - assert response.json()["data"]["removeSshKey"]["user"]["username"] == "tester" - assert response.json()["data"]["removeSshKey"]["user"]["sshKeys"] == [] + assert ( + response.json()["data"]["users"]["removeSshKey"]["user"]["username"] == "tester" + ) + assert response.json()["data"]["users"]["removeSshKey"]["user"]["sshKeys"] == [] def test_graphql_remove_nonexistent_ssh_key( @@ -325,9 +335,9 @@ def test_graphql_remove_nonexistent_ssh_key( assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["removeSshKey"]["code"] == 404 - assert response.json()["data"]["removeSshKey"]["message"] is not None - assert response.json()["data"]["removeSshKey"]["success"] is False + assert response.json()["data"]["users"]["removeSshKey"]["code"] == 404 + assert response.json()["data"]["users"]["removeSshKey"]["message"] is not None + assert response.json()["data"]["users"]["removeSshKey"]["success"] is False def test_graphql_remove_ssh_key_nonexistent_user( @@ -348,6 +358,6 @@ def test_graphql_remove_ssh_key_nonexistent_user( assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["removeSshKey"]["code"] == 404 - assert response.json()["data"]["removeSshKey"]["message"] is not None - assert response.json()["data"]["removeSshKey"]["success"] is False + assert response.json()["data"]["users"]["removeSshKey"]["code"] == 404 + assert response.json()["data"]["users"]["removeSshKey"]["message"] is not None + assert response.json()["data"]["users"]["removeSshKey"]["success"] is False diff --git a/tests/test_graphql/test_system.py b/tests/test_graphql/test_system.py index a021a16..3de4816 100644 --- a/tests/test_graphql/test_system.py +++ b/tests/test_graphql/test_system.py @@ -382,11 +382,13 @@ def test_graphql_get_timezone_on_undefined(authorized_client, undefined_config): API_CHANGE_TIMEZONE_MUTATION = """ mutation changeTimezone($timezone: String!) { - changeTimezone(timezone: $timezone) { - success - message - code - timezone + system { + changeTimezone(timezone: $timezone) { + success + message + code + timezone + } } } """ @@ -420,10 +422,13 @@ def test_graphql_change_timezone(authorized_client, turned_on): ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["changeTimezone"]["success"] is True - assert response.json()["data"]["changeTimezone"]["message"] is not None - assert response.json()["data"]["changeTimezone"]["code"] == 200 - assert response.json()["data"]["changeTimezone"]["timezone"] == "Europe/Helsinki" + assert response.json()["data"]["system"]["changeTimezone"]["success"] is True + assert response.json()["data"]["system"]["changeTimezone"]["message"] is not None + assert response.json()["data"]["system"]["changeTimezone"]["code"] == 200 + assert ( + response.json()["data"]["system"]["changeTimezone"]["timezone"] + == "Europe/Helsinki" + ) assert read_json(turned_on / "turned_on.json")["timezone"] == "Europe/Helsinki" @@ -440,10 +445,13 @@ def test_graphql_change_timezone_on_undefined(authorized_client, undefined_confi ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["changeTimezone"]["success"] is True - assert response.json()["data"]["changeTimezone"]["message"] is not None - assert response.json()["data"]["changeTimezone"]["code"] == 200 - assert response.json()["data"]["changeTimezone"]["timezone"] == "Europe/Helsinki" + assert response.json()["data"]["system"]["changeTimezone"]["success"] is True + assert response.json()["data"]["system"]["changeTimezone"]["message"] is not None + assert response.json()["data"]["system"]["changeTimezone"]["code"] == 200 + assert ( + response.json()["data"]["system"]["changeTimezone"]["timezone"] + == "Europe/Helsinki" + ) assert ( read_json(undefined_config / "undefined.json")["timezone"] == "Europe/Helsinki" ) @@ -462,10 +470,10 @@ def test_graphql_change_timezone_without_timezone(authorized_client, turned_on): ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["changeTimezone"]["success"] is False - assert response.json()["data"]["changeTimezone"]["message"] is not None - assert response.json()["data"]["changeTimezone"]["code"] == 400 - assert response.json()["data"]["changeTimezone"]["timezone"] is None + assert response.json()["data"]["system"]["changeTimezone"]["success"] is False + assert response.json()["data"]["system"]["changeTimezone"]["message"] is not None + assert response.json()["data"]["system"]["changeTimezone"]["code"] == 400 + assert response.json()["data"]["system"]["changeTimezone"]["timezone"] is None assert read_json(turned_on / "turned_on.json")["timezone"] == "Europe/Moscow" @@ -482,10 +490,10 @@ def test_graphql_change_timezone_with_invalid_timezone(authorized_client, turned ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["changeTimezone"]["success"] is False - assert response.json()["data"]["changeTimezone"]["message"] is not None - assert response.json()["data"]["changeTimezone"]["code"] == 400 - assert response.json()["data"]["changeTimezone"]["timezone"] is None + assert response.json()["data"]["system"]["changeTimezone"]["success"] is False + assert response.json()["data"]["system"]["changeTimezone"]["message"] is not None + assert response.json()["data"]["system"]["changeTimezone"]["code"] == 400 + assert response.json()["data"]["system"]["changeTimezone"]["timezone"] is None assert read_json(turned_on / "turned_on.json")["timezone"] == "Europe/Moscow" @@ -589,12 +597,14 @@ def test_graphql_get_auto_upgrade_turned_off(authorized_client, turned_off): API_CHANGE_AUTO_UPGRADE_SETTINGS = """ mutation changeServerSettings($settings: AutoUpgradeSettingsInput!) { - changeAutoUpgradeSettings(settings: $settings) { - success - message - code - enableAutoUpgrade - allowReboot + system { + changeAutoUpgradeSettings(settings: $settings) { + success + message + code + enableAutoUpgrade + allowReboot + } } } """ @@ -634,14 +644,25 @@ def test_graphql_change_auto_upgrade(authorized_client, turned_on): ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["changeAutoUpgradeSettings"]["success"] is True - assert response.json()["data"]["changeAutoUpgradeSettings"]["message"] is not None - assert response.json()["data"]["changeAutoUpgradeSettings"]["code"] == 200 assert ( - response.json()["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] + response.json()["data"]["system"]["changeAutoUpgradeSettings"]["success"] + is True + ) + assert ( + response.json()["data"]["system"]["changeAutoUpgradeSettings"]["message"] + is not None + ) + assert response.json()["data"]["system"]["changeAutoUpgradeSettings"]["code"] == 200 + assert ( + response.json()["data"]["system"]["changeAutoUpgradeSettings"][ + "enableAutoUpgrade" + ] is False ) - assert response.json()["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True + assert ( + response.json()["data"]["system"]["changeAutoUpgradeSettings"]["allowReboot"] + is True + ) assert read_json(turned_on / "turned_on.json")["autoUpgrade"]["enable"] is False assert read_json(turned_on / "turned_on.json")["autoUpgrade"]["allowReboot"] is True @@ -662,14 +683,25 @@ def test_graphql_change_auto_upgrade_on_undefined(authorized_client, undefined_c ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["changeAutoUpgradeSettings"]["success"] is True - assert response.json()["data"]["changeAutoUpgradeSettings"]["message"] is not None - assert response.json()["data"]["changeAutoUpgradeSettings"]["code"] == 200 assert ( - response.json()["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] + response.json()["data"]["system"]["changeAutoUpgradeSettings"]["success"] + is True + ) + assert ( + response.json()["data"]["system"]["changeAutoUpgradeSettings"]["message"] + is not None + ) + assert response.json()["data"]["system"]["changeAutoUpgradeSettings"]["code"] == 200 + assert ( + response.json()["data"]["system"]["changeAutoUpgradeSettings"][ + "enableAutoUpgrade" + ] is False ) - assert response.json()["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True + assert ( + response.json()["data"]["system"]["changeAutoUpgradeSettings"]["allowReboot"] + is True + ) assert ( read_json(undefined_config / "undefined.json")["autoUpgrade"]["enable"] is False ) @@ -695,14 +727,25 @@ def test_graphql_change_auto_upgrade_without_vlaues(authorized_client, no_values ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["changeAutoUpgradeSettings"]["success"] is True - assert response.json()["data"]["changeAutoUpgradeSettings"]["message"] is not None - assert response.json()["data"]["changeAutoUpgradeSettings"]["code"] == 200 assert ( - response.json()["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] + response.json()["data"]["system"]["changeAutoUpgradeSettings"]["success"] + is True + ) + assert ( + response.json()["data"]["system"]["changeAutoUpgradeSettings"]["message"] + is not None + ) + assert response.json()["data"]["system"]["changeAutoUpgradeSettings"]["code"] == 200 + assert ( + response.json()["data"]["system"]["changeAutoUpgradeSettings"][ + "enableAutoUpgrade" + ] + is True + ) + assert ( + response.json()["data"]["system"]["changeAutoUpgradeSettings"]["allowReboot"] is True ) - assert response.json()["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True assert read_json(no_values / "no_values.json")["autoUpgrade"]["enable"] is True assert read_json(no_values / "no_values.json")["autoUpgrade"]["allowReboot"] is True @@ -723,14 +766,25 @@ def test_graphql_change_auto_upgrade_turned_off(authorized_client, turned_off): ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["changeAutoUpgradeSettings"]["success"] is True - assert response.json()["data"]["changeAutoUpgradeSettings"]["message"] is not None - assert response.json()["data"]["changeAutoUpgradeSettings"]["code"] == 200 assert ( - response.json()["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] + response.json()["data"]["system"]["changeAutoUpgradeSettings"]["success"] + is True + ) + assert ( + response.json()["data"]["system"]["changeAutoUpgradeSettings"]["message"] + is not None + ) + assert response.json()["data"]["system"]["changeAutoUpgradeSettings"]["code"] == 200 + assert ( + response.json()["data"]["system"]["changeAutoUpgradeSettings"][ + "enableAutoUpgrade" + ] + is True + ) + assert ( + response.json()["data"]["system"]["changeAutoUpgradeSettings"]["allowReboot"] is True ) - assert response.json()["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True assert read_json(turned_off / "turned_off.json")["autoUpgrade"]["enable"] is True assert ( read_json(turned_off / "turned_off.json")["autoUpgrade"]["allowReboot"] is True @@ -752,14 +806,25 @@ def test_grphql_change_auto_upgrade_without_enable(authorized_client, turned_off ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["changeAutoUpgradeSettings"]["success"] is True - assert response.json()["data"]["changeAutoUpgradeSettings"]["message"] is not None - assert response.json()["data"]["changeAutoUpgradeSettings"]["code"] == 200 assert ( - response.json()["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] + response.json()["data"]["system"]["changeAutoUpgradeSettings"]["success"] + is True + ) + assert ( + response.json()["data"]["system"]["changeAutoUpgradeSettings"]["message"] + is not None + ) + assert response.json()["data"]["system"]["changeAutoUpgradeSettings"]["code"] == 200 + assert ( + response.json()["data"]["system"]["changeAutoUpgradeSettings"][ + "enableAutoUpgrade" + ] is False ) - assert response.json()["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True + assert ( + response.json()["data"]["system"]["changeAutoUpgradeSettings"]["allowReboot"] + is True + ) assert read_json(turned_off / "turned_off.json")["autoUpgrade"]["enable"] is False assert ( read_json(turned_off / "turned_off.json")["autoUpgrade"]["allowReboot"] is True @@ -783,14 +848,25 @@ def test_graphql_change_auto_upgrade_without_allow_reboot( ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["changeAutoUpgradeSettings"]["success"] is True - assert response.json()["data"]["changeAutoUpgradeSettings"]["message"] is not None - assert response.json()["data"]["changeAutoUpgradeSettings"]["code"] == 200 assert ( - response.json()["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] + response.json()["data"]["system"]["changeAutoUpgradeSettings"]["success"] is True ) - assert response.json()["data"]["changeAutoUpgradeSettings"]["allowReboot"] is False + assert ( + response.json()["data"]["system"]["changeAutoUpgradeSettings"]["message"] + is not None + ) + assert response.json()["data"]["system"]["changeAutoUpgradeSettings"]["code"] == 200 + assert ( + response.json()["data"]["system"]["changeAutoUpgradeSettings"][ + "enableAutoUpgrade" + ] + is True + ) + assert ( + response.json()["data"]["system"]["changeAutoUpgradeSettings"]["allowReboot"] + is False + ) assert read_json(turned_off / "turned_off.json")["autoUpgrade"]["enable"] is True assert ( read_json(turned_off / "turned_off.json")["autoUpgrade"]["allowReboot"] is False @@ -810,14 +886,25 @@ def test_graphql_change_auto_upgrade_with_empty_input(authorized_client, turned_ ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["changeAutoUpgradeSettings"]["success"] is True - assert response.json()["data"]["changeAutoUpgradeSettings"]["message"] is not None - assert response.json()["data"]["changeAutoUpgradeSettings"]["code"] == 200 assert ( - response.json()["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] + response.json()["data"]["system"]["changeAutoUpgradeSettings"]["success"] + is True + ) + assert ( + response.json()["data"]["system"]["changeAutoUpgradeSettings"]["message"] + is not None + ) + assert response.json()["data"]["system"]["changeAutoUpgradeSettings"]["code"] == 200 + assert ( + response.json()["data"]["system"]["changeAutoUpgradeSettings"][ + "enableAutoUpgrade" + ] + is False + ) + assert ( + response.json()["data"]["system"]["changeAutoUpgradeSettings"]["allowReboot"] is False ) - assert response.json()["data"]["changeAutoUpgradeSettings"]["allowReboot"] is False assert read_json(turned_off / "turned_off.json")["autoUpgrade"]["enable"] is False assert ( read_json(turned_off / "turned_off.json")["autoUpgrade"]["allowReboot"] is False @@ -826,10 +913,12 @@ def test_graphql_change_auto_upgrade_with_empty_input(authorized_client, turned_ API_PULL_SYSTEM_CONFIGURATION_MUTATION = """ mutation testPullSystemConfiguration { - pullRepositoryChanges { - success - message - code + system { + pullRepositoryChanges { + success + message + code + } } } """ @@ -861,9 +950,12 @@ def test_graphql_pull_system_configuration( assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["pullRepositoryChanges"]["success"] is True - assert response.json()["data"]["pullRepositoryChanges"]["message"] is not None - assert response.json()["data"]["pullRepositoryChanges"]["code"] == 200 + assert response.json()["data"]["system"]["pullRepositoryChanges"]["success"] is True + assert ( + response.json()["data"]["system"]["pullRepositoryChanges"]["message"] + is not None + ) + assert response.json()["data"]["system"]["pullRepositoryChanges"]["code"] == 200 assert mock_subprocess_popen.call_count == 1 assert mock_subprocess_popen.call_args[0][0] == ["git", "pull"] @@ -886,9 +978,14 @@ def test_graphql_pull_system_broken_repo( assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["pullRepositoryChanges"]["success"] is False - assert response.json()["data"]["pullRepositoryChanges"]["message"] is not None - assert response.json()["data"]["pullRepositoryChanges"]["code"] == 500 + assert ( + response.json()["data"]["system"]["pullRepositoryChanges"]["success"] is False + ) + assert ( + response.json()["data"]["system"]["pullRepositoryChanges"]["message"] + is not None + ) + assert response.json()["data"]["system"]["pullRepositoryChanges"]["code"] == 500 assert mock_broken_service.call_count == 1 assert mock_os_chdir.call_count == 2 diff --git a/tests/test_graphql/test_system_nixos_tasks.py b/tests/test_graphql/test_system_nixos_tasks.py index 3e823b6..b292fda 100644 --- a/tests/test_graphql/test_system_nixos_tasks.py +++ b/tests/test_graphql/test_system_nixos_tasks.py @@ -54,10 +54,12 @@ def mock_subprocess_check_output(mocker): API_REBUILD_SYSTEM_MUTATION = """ mutation rebuildSystem { - runSystemRebuild { - success - message - code + system { + runSystemRebuild { + success + message + code + } } } """ @@ -86,9 +88,9 @@ def test_graphql_system_rebuild(authorized_client, mock_subprocess_popen): ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["runSystemRebuild"]["success"] is True - assert response.json()["data"]["runSystemRebuild"]["message"] is not None - assert response.json()["data"]["runSystemRebuild"]["code"] == 200 + assert response.json()["data"]["system"]["runSystemRebuild"]["success"] is True + assert response.json()["data"]["system"]["runSystemRebuild"]["message"] is not None + assert response.json()["data"]["system"]["runSystemRebuild"]["code"] == 200 assert mock_subprocess_popen.call_count == 1 assert mock_subprocess_popen.call_args[0][0] == [ "systemctl", @@ -99,10 +101,12 @@ def test_graphql_system_rebuild(authorized_client, mock_subprocess_popen): API_UPGRADE_SYSTEM_MUTATION = """ mutation upgradeSystem { - runSystemUpgrade { - success - message - code + system { + runSystemUpgrade { + success + message + code + } } } """ @@ -131,9 +135,9 @@ def test_graphql_system_upgrade(authorized_client, mock_subprocess_popen): ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["runSystemUpgrade"]["success"] is True - assert response.json()["data"]["runSystemUpgrade"]["message"] is not None - assert response.json()["data"]["runSystemUpgrade"]["code"] == 200 + assert response.json()["data"]["system"]["runSystemUpgrade"]["success"] is True + assert response.json()["data"]["system"]["runSystemUpgrade"]["message"] is not None + assert response.json()["data"]["system"]["runSystemUpgrade"]["code"] == 200 assert mock_subprocess_popen.call_count == 1 assert mock_subprocess_popen.call_args[0][0] == [ "systemctl", @@ -144,10 +148,12 @@ def test_graphql_system_upgrade(authorized_client, mock_subprocess_popen): API_ROLLBACK_SYSTEM_MUTATION = """ mutation rollbackSystem { - runSystemRollback { - success - message - code + system { + runSystemRollback { + success + message + code + } } } """ @@ -176,9 +182,9 @@ def test_graphql_system_rollback(authorized_client, mock_subprocess_popen): ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["runSystemRollback"]["success"] is True - assert response.json()["data"]["runSystemRollback"]["message"] is not None - assert response.json()["data"]["runSystemRollback"]["code"] == 200 + assert response.json()["data"]["system"]["runSystemRollback"]["success"] is True + assert response.json()["data"]["system"]["runSystemRollback"]["message"] is not None + assert response.json()["data"]["system"]["runSystemRollback"]["code"] == 200 assert mock_subprocess_popen.call_count == 1 assert mock_subprocess_popen.call_args[0][0] == [ "systemctl", @@ -189,10 +195,12 @@ def test_graphql_system_rollback(authorized_client, mock_subprocess_popen): API_REBOOT_SYSTEM_MUTATION = """ mutation system { - rebootSystem { - success - message - code + system { + rebootSystem { + success + message + code + } } } """ @@ -223,9 +231,9 @@ def test_graphql_reboot_system(authorized_client, mock_subprocess_popen): assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["rebootSystem"]["success"] is True - assert response.json()["data"]["rebootSystem"]["message"] is not None - assert response.json()["data"]["rebootSystem"]["code"] == 200 + assert response.json()["data"]["system"]["rebootSystem"]["success"] is True + assert response.json()["data"]["system"]["rebootSystem"]["message"] is not None + assert response.json()["data"]["system"]["rebootSystem"]["code"] == 200 assert mock_subprocess_popen.call_count == 1 assert mock_subprocess_popen.call_args[0][0] == ["reboot"] diff --git a/tests/test_graphql/test_users.py b/tests/test_graphql/test_users.py index 7a65736..9554195 100644 --- a/tests/test_graphql/test_users.py +++ b/tests/test_graphql/test_users.py @@ -295,13 +295,15 @@ def test_graphql_get_nonexistent_user( API_CREATE_USERS_MUTATION = """ mutation createUser($user: UserMutationInput!) { - createUser(user: $user) { - success - message - code - user { - username - sshKeys + users { + createUser(user: $user) { + success + message + code + user { + username + sshKeys + } } } } @@ -341,12 +343,12 @@ def test_graphql_add_user(authorized_client, one_user, mock_subprocess_popen): assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["createUser"]["message"] is not None - assert response.json()["data"]["createUser"]["code"] == 201 - assert response.json()["data"]["createUser"]["success"] is True + assert response.json()["data"]["users"]["createUser"]["message"] is not None + assert response.json()["data"]["users"]["createUser"]["code"] == 201 + assert response.json()["data"]["users"]["createUser"]["success"] is True - assert response.json()["data"]["createUser"]["user"]["username"] == "user2" - assert response.json()["data"]["createUser"]["user"]["sshKeys"] == [] + assert response.json()["data"]["users"]["createUser"]["user"]["username"] == "user2" + assert response.json()["data"]["users"]["createUser"]["user"]["sshKeys"] == [] def test_graphql_add_undefined_settings( @@ -367,12 +369,12 @@ def test_graphql_add_undefined_settings( assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["createUser"]["message"] is not None - assert response.json()["data"]["createUser"]["code"] == 201 - assert response.json()["data"]["createUser"]["success"] is True + assert response.json()["data"]["users"]["createUser"]["message"] is not None + assert response.json()["data"]["users"]["createUser"]["code"] == 201 + assert response.json()["data"]["users"]["createUser"]["success"] is True - assert response.json()["data"]["createUser"]["user"]["username"] == "user2" - assert response.json()["data"]["createUser"]["user"]["sshKeys"] == [] + assert response.json()["data"]["users"]["createUser"]["user"]["username"] == "user2" + assert response.json()["data"]["users"]["createUser"]["user"]["sshKeys"] == [] def test_graphql_add_without_password( @@ -393,11 +395,11 @@ def test_graphql_add_without_password( assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["createUser"]["message"] is not None - assert response.json()["data"]["createUser"]["code"] == 400 - assert response.json()["data"]["createUser"]["success"] is False + assert response.json()["data"]["users"]["createUser"]["message"] is not None + assert response.json()["data"]["users"]["createUser"]["code"] == 400 + assert response.json()["data"]["users"]["createUser"]["success"] is False - assert response.json()["data"]["createUser"]["user"] is None + assert response.json()["data"]["users"]["createUser"]["user"] is None def test_graphql_add_without_both(authorized_client, one_user, mock_subprocess_popen): @@ -416,11 +418,11 @@ def test_graphql_add_without_both(authorized_client, one_user, mock_subprocess_p assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["createUser"]["message"] is not None - assert response.json()["data"]["createUser"]["code"] == 400 - assert response.json()["data"]["createUser"]["success"] is False + assert response.json()["data"]["users"]["createUser"]["message"] is not None + assert response.json()["data"]["users"]["createUser"]["code"] == 400 + assert response.json()["data"]["users"]["createUser"]["success"] is False - assert response.json()["data"]["createUser"]["user"] is None + assert response.json()["data"]["users"]["createUser"]["user"] is None @pytest.mark.parametrize("username", invalid_usernames) @@ -442,11 +444,11 @@ def test_graphql_add_system_username( assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["createUser"]["message"] is not None - assert response.json()["data"]["createUser"]["code"] == 409 - assert response.json()["data"]["createUser"]["success"] is False + assert response.json()["data"]["users"]["createUser"]["message"] is not None + assert response.json()["data"]["users"]["createUser"]["code"] == 409 + assert response.json()["data"]["users"]["createUser"]["success"] is False - assert response.json()["data"]["createUser"]["user"] is None + assert response.json()["data"]["users"]["createUser"]["user"] is None def test_graphql_add_existing_user(authorized_client, one_user, mock_subprocess_popen): @@ -465,13 +467,13 @@ def test_graphql_add_existing_user(authorized_client, one_user, mock_subprocess_ assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["createUser"]["message"] is not None - assert response.json()["data"]["createUser"]["code"] == 409 - assert response.json()["data"]["createUser"]["success"] is False + assert response.json()["data"]["users"]["createUser"]["message"] is not None + assert response.json()["data"]["users"]["createUser"]["code"] == 409 + assert response.json()["data"]["users"]["createUser"]["success"] is False - assert response.json()["data"]["createUser"]["user"]["username"] == "user1" + assert response.json()["data"]["users"]["createUser"]["user"]["username"] == "user1" assert ( - response.json()["data"]["createUser"]["user"]["sshKeys"][0] + response.json()["data"]["users"]["createUser"]["user"]["sshKeys"][0] == "ssh-rsa KEY user1@pc" ) @@ -492,13 +494,15 @@ def test_graphql_add_main_user(authorized_client, one_user, mock_subprocess_pope assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["createUser"]["message"] is not None - assert response.json()["data"]["createUser"]["code"] == 409 - assert response.json()["data"]["createUser"]["success"] is False + assert response.json()["data"]["users"]["createUser"]["message"] is not None + assert response.json()["data"]["users"]["createUser"]["code"] == 409 + assert response.json()["data"]["users"]["createUser"]["success"] is False - assert response.json()["data"]["createUser"]["user"]["username"] == "tester" assert ( - response.json()["data"]["createUser"]["user"]["sshKeys"][0] + response.json()["data"]["users"]["createUser"]["user"]["username"] == "tester" + ) + assert ( + response.json()["data"]["users"]["createUser"]["user"]["sshKeys"][0] == "ssh-rsa KEY test@pc" ) @@ -518,11 +522,11 @@ def test_graphql_add_long_username(authorized_client, one_user, mock_subprocess_ ) assert response.json().get("data") is not None - assert response.json()["data"]["createUser"]["message"] is not None - assert response.json()["data"]["createUser"]["code"] == 400 - assert response.json()["data"]["createUser"]["success"] is False + assert response.json()["data"]["users"]["createUser"]["message"] is not None + assert response.json()["data"]["users"]["createUser"]["code"] == 400 + assert response.json()["data"]["users"]["createUser"]["success"] is False - assert response.json()["data"]["createUser"]["user"] is None + assert response.json()["data"]["users"]["createUser"]["user"] is None @pytest.mark.parametrize("username", ["", "1", "Ñ„Ñ‹Ñ€", "user1@", "^-^"]) @@ -544,19 +548,21 @@ def test_graphql_add_invalid_username( assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["createUser"]["message"] is not None - assert response.json()["data"]["createUser"]["code"] == 400 - assert response.json()["data"]["createUser"]["success"] is False + assert response.json()["data"]["users"]["createUser"]["message"] is not None + assert response.json()["data"]["users"]["createUser"]["code"] == 400 + assert response.json()["data"]["users"]["createUser"]["success"] is False - assert response.json()["data"]["createUser"]["user"] is None + assert response.json()["data"]["users"]["createUser"]["user"] is None API_DELETE_USER_MUTATION = """ mutation deleteUser($username: String!) { - deleteUser(username: $username) { - success - message - code + users { + deleteUser(username: $username) { + success + message + code + } } } """ @@ -585,9 +591,9 @@ def test_graphql_delete_user(authorized_client, some_users, mock_subprocess_pope assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["deleteUser"]["code"] == 200 - assert response.json()["data"]["deleteUser"]["message"] is not None - assert response.json()["data"]["deleteUser"]["success"] is True + assert response.json()["data"]["users"]["deleteUser"]["code"] == 200 + assert response.json()["data"]["users"]["deleteUser"]["message"] is not None + assert response.json()["data"]["users"]["deleteUser"]["success"] is True @pytest.mark.parametrize("username", ["", "def"]) @@ -604,9 +610,9 @@ def test_graphql_delete_nonexistent_users( assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["deleteUser"]["code"] == 404 - assert response.json()["data"]["deleteUser"]["message"] is not None - assert response.json()["data"]["deleteUser"]["success"] is False + assert response.json()["data"]["users"]["deleteUser"]["code"] == 404 + assert response.json()["data"]["users"]["deleteUser"]["message"] is not None + assert response.json()["data"]["users"]["deleteUser"]["success"] is False @pytest.mark.parametrize("username", invalid_usernames) @@ -624,11 +630,11 @@ def test_graphql_delete_system_users( assert response.json().get("data") is not None assert ( - response.json()["data"]["deleteUser"]["code"] == 404 - or response.json()["data"]["deleteUser"]["code"] == 400 + response.json()["data"]["users"]["deleteUser"]["code"] == 404 + or response.json()["data"]["users"]["deleteUser"]["code"] == 400 ) - assert response.json()["data"]["deleteUser"]["message"] is not None - assert response.json()["data"]["deleteUser"]["success"] is False + assert response.json()["data"]["users"]["deleteUser"]["message"] is not None + assert response.json()["data"]["users"]["deleteUser"]["success"] is False def test_graphql_delete_main_user(authorized_client, some_users, mock_subprocess_popen): @@ -642,20 +648,22 @@ def test_graphql_delete_main_user(authorized_client, some_users, mock_subprocess assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["deleteUser"]["code"] == 400 - assert response.json()["data"]["deleteUser"]["message"] is not None - assert response.json()["data"]["deleteUser"]["success"] is False + assert response.json()["data"]["users"]["deleteUser"]["code"] == 400 + assert response.json()["data"]["users"]["deleteUser"]["message"] is not None + assert response.json()["data"]["users"]["deleteUser"]["success"] is False API_UPDATE_USER_MUTATION = """ mutation updateUser($user: UserMutationInput!) { - updateUser(user: $user) { - success - message - code - user { - username - sshKeys + users { + updateUser(user: $user) { + success + message + code + user { + username + sshKeys + } } } } @@ -695,12 +703,12 @@ def test_graphql_update_user(authorized_client, some_users, mock_subprocess_pope assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["updateUser"]["code"] == 200 - assert response.json()["data"]["updateUser"]["message"] is not None - assert response.json()["data"]["updateUser"]["success"] is True + assert response.json()["data"]["users"]["updateUser"]["code"] == 200 + assert response.json()["data"]["users"]["updateUser"]["message"] is not None + assert response.json()["data"]["users"]["updateUser"]["success"] is True - assert response.json()["data"]["updateUser"]["user"]["username"] == "user1" - assert response.json()["data"]["updateUser"]["user"]["sshKeys"] == [ + assert response.json()["data"]["users"]["updateUser"]["user"]["username"] == "user1" + assert response.json()["data"]["users"]["updateUser"]["user"]["sshKeys"] == [ "ssh-rsa KEY user1@pc" ] assert mock_subprocess_popen.call_count == 1 @@ -724,9 +732,9 @@ def test_graphql_update_nonexistent_user( assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["updateUser"]["code"] == 404 - assert response.json()["data"]["updateUser"]["message"] is not None - assert response.json()["data"]["updateUser"]["success"] is False + assert response.json()["data"]["users"]["updateUser"]["code"] == 404 + assert response.json()["data"]["users"]["updateUser"]["message"] is not None + assert response.json()["data"]["users"]["updateUser"]["success"] is False - assert response.json()["data"]["updateUser"]["user"] is None + assert response.json()["data"]["users"]["updateUser"]["user"] is None assert mock_subprocess_popen.call_count == 1 diff --git a/tests/test_model_storage.py b/tests/test_model_storage.py index d26fabb..c9ab582 100644 --- a/tests/test_model_storage.py +++ b/tests/test_model_storage.py @@ -10,6 +10,7 @@ from selfprivacy_api.utils.redis_pool import RedisPool TEST_KEY = "model_storage" redis = RedisPool().get_connection() + @pytest.fixture() def clean_redis(): redis.delete(TEST_KEY) @@ -19,18 +20,14 @@ class DummyModel(BaseModel): name: str date: Optional[datetime] + def test_store_retrieve(): - model = DummyModel( - name= "test", - date= datetime.now() - ) + model = DummyModel(name="test", date=datetime.now()) store_model_as_hash(redis, TEST_KEY, model) - assert hash_as_model(redis, TEST_KEY, DummyModel) == model + assert hash_as_model(redis, TEST_KEY, DummyModel) == model + def test_store_retrieve_none(): - model = DummyModel( - name= "test", - date= None - ) + model = DummyModel(name="test", date=None) store_model_as_hash(redis, TEST_KEY, model) - assert hash_as_model(redis, TEST_KEY, DummyModel) == model + assert hash_as_model(redis, TEST_KEY, DummyModel) == model From c9cfb7d7bc83597d165779fdd45d4c9d01917575 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Fri, 23 Jun 2023 04:50:00 +0300 Subject: [PATCH 172/537] ci: fix killing redis-server --- .drone.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.drone.yml b/.drone.yml index 24ab5da..ffef56a 100644 --- a/.drone.yml +++ b/.drone.yml @@ -5,7 +5,7 @@ name: default steps: - name: Run Tests and Generate Coverage Report commands: - - kill $(ps aux | grep '[r]edis-server 127.0.0.1:6389' | awk '{print $2}') + - kill $(ps aux | grep 'redis-server 127.0.0.1:6389' | awk '{print $2}') - redis-server --bind 127.0.0.1 --port 6389 >/dev/null & - coverage run -m pytest -q - coverage xml From e7e0fdc4a1f18d4fc92f6f552fa71830c7155da8 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Fri, 23 Jun 2023 12:40:10 +0300 Subject: [PATCH 173/537] refactor(backups): fix typing errors --- selfprivacy_api/backup/__init__.py | 179 ++++++++++++------ selfprivacy_api/backup/backuppers/__init__.py | 6 +- .../backup/backuppers/none_backupper.py | 6 +- .../backup/backuppers/restic_backupper.py | 47 +++-- selfprivacy_api/backup/jobs.py | 2 + selfprivacy_api/backup/providers/__init__.py | 26 ++- selfprivacy_api/backup/providers/backblaze.py | 9 +- .../backup/providers/local_file.py | 10 +- selfprivacy_api/backup/providers/memory.py | 9 +- selfprivacy_api/backup/providers/none.py | 13 ++ selfprivacy_api/backup/providers/provider.py | 15 +- selfprivacy_api/backup/storage.py | 13 +- .../graphql/mutations/backup_mutations.py | 45 ++++- tests/test_graphql/test_backup.py | 4 +- 14 files changed, 265 insertions(+), 119 deletions(-) create mode 100644 selfprivacy_api/backup/providers/none.py diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index bb17254..35b98eb 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -1,3 +1,4 @@ +from operator import add from typing import List, Optional from datetime import datetime, timedelta from os import statvfs @@ -9,7 +10,9 @@ from selfprivacy_api.utils import ReadUserData, WriteUserData from selfprivacy_api.services import get_service_by_id from selfprivacy_api.services.service import Service -from selfprivacy_api.graphql.queries.providers import BackupProvider +from selfprivacy_api.graphql.queries.providers import ( + BackupProvider as BackupProviderEnum, +) from selfprivacy_api.backup.providers.provider import AbstractBackupProvider from selfprivacy_api.backup.providers import get_provider @@ -33,12 +36,15 @@ DEFAULT_JSON_PROVIDER = { class Backups: """A singleton controller for backups""" - provider: AbstractBackupProvider - @staticmethod def set_localfile_repo(file_path: str): - ProviderClass = get_provider(BackupProvider.FILE) - provider = ProviderClass(login="", key="", location=file_path, repo_id="") + ProviderClass = get_provider(BackupProviderEnum.FILE) + provider = ProviderClass( + login="", + key="", + location=file_path, + repo_id="", + ) Storage.store_provider(provider) @staticmethod @@ -67,7 +73,14 @@ class Backups: @staticmethod def _service_ids_to_back_up(time: datetime) -> List[str]: services = Storage.services_with_autobackup() - return [id for id in services if Backups.is_time_to_backup_service(id, time)] + return [ + id + for id in services + if Backups.is_time_to_backup_service( + id, + time, + ) + ] @staticmethod def services_to_back_up(time: datetime) -> List[Service]: @@ -75,14 +88,17 @@ class Backups: for id in Backups._service_ids_to_back_up(time): service = get_service_by_id(id) if service is None: - raise ValueError("Cannot look up a service scheduled for backup!") + raise ValueError( + "Cannot look up a service scheduled for backup!", + ) result.append(service) return result @staticmethod def is_time_to_backup(time: datetime) -> bool: """ - Intended as a time validator for huey cron scheduler of automatic backups + Intended as a time validator for huey cron scheduler + of automatic backups """ return Backups._service_ids_to_back_up(time) != [] @@ -97,7 +113,8 @@ class Backups: last_backup = Storage.get_last_backup_time(service_id) if last_backup is None: - return True # queue a backup immediately if there are no previous backups + # queue a backup immediately if there are no previous backups + return True if time > last_backup + timedelta(minutes=period): return True @@ -121,7 +138,8 @@ class Backups: def set_autobackup_period_minutes(minutes: int): """ 0 and negative numbers are equivalent to disable. - Setting to a positive number may result in a backup very soon if some services are not backed up. + Setting to a positive number may result in a backup very soon + if some services are not backed up. """ if minutes <= 0: Backups.disable_all_autobackup() @@ -130,7 +148,10 @@ class Backups: @staticmethod def disable_all_autobackup(): - """disables all automatic backing up, but does not change per-service settings""" + """ + Disables all automatic backing up, + but does not change per-service settings + """ Storage.delete_backup_period() @staticmethod @@ -138,17 +159,38 @@ class Backups: return Backups.lookup_provider() @staticmethod - def set_provider(kind: str, login: str, key: str, location: str, repo_id: str = ""): - provider = Backups.construct_provider(kind, login, key, location, repo_id) + def set_provider( + kind: BackupProviderEnum, + login: str, + key: str, + location: str, + repo_id: str = "", + ): + provider = Backups.construct_provider( + kind, + login, + key, + location, + repo_id, + ) Storage.store_provider(provider) @staticmethod def construct_provider( - kind: str, login: str, key: str, location: str, repo_id: str = "" - ): - provider_class = get_provider(BackupProvider[kind]) + kind: BackupProviderEnum, + login: str, + key: str, + location: str, + repo_id: str = "", + ) -> AbstractBackupProvider: + provider_class = get_provider(kind) - return provider_class(login=login, key=key, location=location, repo_id=repo_id) + return provider_class( + login=login, + key=key, + location=location, + repo_id=repo_id, + ) @staticmethod def reset(reset_json=True): @@ -156,7 +198,8 @@ class Backups: if reset_json: try: Backups.reset_provider_json() - except FileNotFoundError: # if there is no userdata file, we do not need to reset it + except FileNotFoundError: + # if there is no userdata file, we do not need to reset it pass @staticmethod @@ -175,7 +218,7 @@ class Backups: return json_provider none_provider = Backups.construct_provider( - "NONE", login="", key="", location="" + BackupProviderEnum.NONE, login="", key="", location="" ) Storage.store_provider(none_provider) return none_provider @@ -200,15 +243,18 @@ class Backups: if provider_dict == DEFAULT_JSON_PROVIDER: return None + try: + return Backups.construct_provider( + kind=BackupProviderEnum[provider_dict["provider"]], + login=provider_dict["accountId"], + key=provider_dict["accountKey"], + location=provider_dict["bucket"], + ) + except KeyError: + return None - return Backups.construct_provider( - kind=provider_dict["provider"], - login=provider_dict["accountId"], - key=provider_dict["accountKey"], - location=provider_dict["bucket"], - ) - - def reset_provider_json() -> AbstractBackupProvider: + @staticmethod + def reset_provider_json() -> None: with WriteUserData() as user_data: if "backblaze" in user_data.keys(): del user_data["backblaze"] @@ -216,12 +262,12 @@ class Backups: user_data["backup"] = DEFAULT_JSON_PROVIDER @staticmethod - def load_provider_redis() -> AbstractBackupProvider: + def load_provider_redis() -> Optional[AbstractBackupProvider]: provider_model = Storage.load_provider() if provider_model is None: return None return Backups.construct_provider( - provider_model.kind, + BackupProviderEnum[provider_model.kind], provider_model.login, provider_model.key, provider_model.location, @@ -232,7 +278,7 @@ class Backups: def back_up(service: Service): """The top-level function to back up a service""" folders = service.get_folders() - repo_name = service.get_id() + tag = service.get_id() job = get_backup_job(service) if job is None: @@ -241,8 +287,11 @@ class Backups: try: service.pre_backup() - snapshot = Backups.provider().backuper.start_backup(folders, repo_name) - Backups._store_last_snapshot(repo_name, snapshot) + snapshot = Backups.provider().backuper.start_backup( + folders, + tag, + ) + Backups._store_last_snapshot(tag, snapshot) service.post_restore() except Exception as e: Jobs.update(job, status=JobStatus.ERROR) @@ -252,10 +301,7 @@ class Backups: return snapshot @staticmethod - def init_repo(service: Optional[Service] = None): - if service is not None: - repo_name = service.get_id() - + def init_repo(): Backups.provider().backuper.init() Storage.mark_as_init() @@ -274,7 +320,13 @@ class Backups: @staticmethod def get_snapshots(service: Service) -> List[Snapshot]: snapshots = Backups.get_all_snapshots() - return [snap for snap in snapshots if snap.service_name == service.get_id()] + service_id = service.get_id() + return list( + filter( + lambda snap: snap.service_name == service_id, + snapshots, + ) + ) @staticmethod def get_all_snapshots() -> List[Snapshot]: @@ -314,10 +366,12 @@ class Backups: # to be deprecated/internalized in favor of restore_snapshot() @staticmethod def restore_service_from_snapshot(service: Service, snapshot_id: str): - repo_name = service.get_id() folders = service.get_folders() - Backups.provider().backuper.restore_from_backup(repo_name, snapshot_id, folders) + Backups.provider().backuper.restore_from_backup( + snapshot_id, + folders, + ) @staticmethod def assert_restorable(snapshot: Snapshot): @@ -327,45 +381,58 @@ class Backups: f"snapshot has a nonexistent service: {snapshot.service_name}" ) - needed_space = Backups.snapshot_restored_size(snapshot) + needed_space = Backups.service_snapshot_size(snapshot.id) available_space = Backups.space_usable_for_service(service) if needed_space > available_space: raise ValueError( - f"we only have {available_space} bytes but snapshot needs{ needed_space}" + f"we only have {available_space} bytes " + f"but snapshot needs {needed_space}" ) @staticmethod def restore_snapshot(snapshot: Snapshot): service = get_service_by_id(snapshot.service_name) + if service is None: + raise ValueError( + f"snapshot has a nonexistent service: {snapshot.service_name}" + ) + job = get_restore_job(service) if job is None: job = add_restore_job(snapshot) - Jobs.update(job, status=JobStatus.RUNNING) + Jobs.update( + job, + status=JobStatus.RUNNING, + ) try: Backups.assert_restorable(snapshot) - Backups.restore_service_from_snapshot(service, snapshot.id) + Backups.restore_service_from_snapshot( + service, + snapshot.id, + ) service.post_restore() except Exception as e: - Jobs.update(job, status=JobStatus.ERROR) + Jobs.update( + job, + status=JobStatus.ERROR, + ) raise e - Jobs.update(job, status=JobStatus.FINISHED) - - @staticmethod - def service_snapshot_size(service: Service, snapshot_id: str) -> float: - repo_name = service.get_id() - return Backups.provider().backuper.restored_size(repo_name, snapshot_id) - - @staticmethod - def snapshot_restored_size(snapshot: Snapshot) -> float: - return Backups.service_snapshot_size( - get_service_by_id(snapshot.service_name), snapshot.id + Jobs.update( + job, + status=JobStatus.FINISHED, ) @staticmethod - def space_usable_for_service(service: Service) -> bool: + def service_snapshot_size(snapshot_id: str) -> int: + return Backups.provider().backuper.restored_size( + snapshot_id, + ) + + @staticmethod + def space_usable_for_service(service: Service) -> int: folders = service.get_folders() if folders == []: raise ValueError("unallocated service", service.get_id()) diff --git a/selfprivacy_api/backup/backuppers/__init__.py b/selfprivacy_api/backup/backuppers/__init__.py index f20496d..169a502 100644 --- a/selfprivacy_api/backup/backuppers/__init__.py +++ b/selfprivacy_api/backup/backuppers/__init__.py @@ -26,14 +26,14 @@ class AbstractBackuper(ABC): raise NotImplementedError @abstractmethod - def init(self, repo_name): + def init(self): raise NotImplementedError @abstractmethod - def restore_from_backup(self, repo_name: str, snapshot_id: str, folders: List[str]): + def restore_from_backup(self, snapshot_id: str, folders: List[str]): """Restore a target folder using a snapshot""" raise NotImplementedError @abstractmethod - def restored_size(self, repo_name, snapshot_id) -> float: + def restored_size(self, snapshot_id: str) -> int: raise NotImplementedError diff --git a/selfprivacy_api/backup/backuppers/none_backupper.py b/selfprivacy_api/backup/backuppers/none_backupper.py index e687323..d0f0dda 100644 --- a/selfprivacy_api/backup/backuppers/none_backupper.py +++ b/selfprivacy_api/backup/backuppers/none_backupper.py @@ -18,12 +18,12 @@ class NoneBackupper(AbstractBackuper): """Get all snapshots from the repo""" return [] - def init(self, repo_name): + def init(self): raise NotImplementedError - def restore_from_backup(self, repo_name: str, snapshot_id: str, folders: List[str]): + def restore_from_backup(self, snapshot_id: str, folders: List[str]): """Restore a target folder using a snapshot""" raise NotImplementedError - def restored_size(self, repo_name, snapshot_id) -> float: + def restored_size(self, snapshot_id: str) -> int: raise NotImplementedError diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index 7b58a2c..5a16812 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -50,7 +50,7 @@ class ResticBackuper(AbstractBackuper): def _password_command(self): return f"echo {LocalBackupSecret.get()}" - def restic_command(self, *args, branch_name: str = ""): + def restic_command(self, *args, tag: str = ""): command = [ "restic", "-o", @@ -60,11 +60,11 @@ class ResticBackuper(AbstractBackuper): "--password-command", self._password_command(), ] - if branch_name != "": + if tag != "": command.extend( [ "--tag", - branch_name, + tag, ] ) if args != []: @@ -92,10 +92,10 @@ class ResticBackuper(AbstractBackuper): universal_newlines=True, ) as handle: for line in iter(handle.stdout.readline, ""): - if not "NOTICE:" in line: + if "NOTICE:" not in line: yield line - def start_backup(self, folders: List[str], repo_name: str): + def start_backup(self, folders: List[str], tag: str): """ Start backup with restic """ @@ -107,16 +107,16 @@ class ResticBackuper(AbstractBackuper): "backup", "--json", folders, - branch_name=repo_name, + tag=tag, ) messages = [] - job = get_backup_job(get_service_by_id(repo_name)) + job = get_backup_job(get_service_by_id(tag)) try: for raw_message in ResticBackuper.output_yielder(backup_command): message = self.parse_message(raw_message, job) messages.append(message) - return ResticBackuper._snapshot_from_backup_messages(messages, repo_name) + return ResticBackuper._snapshot_from_backup_messages(messages, tag) except ValueError as e: raise ValueError("could not create a snapshot: ", messages) from e @@ -128,7 +128,7 @@ class ResticBackuper(AbstractBackuper): raise ValueError("no summary message in restic json output") def parse_message(self, raw_message, job=None) -> object: - message = self.parse_json_output(raw_message) + message = ResticBackuper.parse_json_output(raw_message) if message["message_type"] == "status": if job is not None: # only update status if we run under some job Jobs.update( @@ -168,12 +168,12 @@ class ResticBackuper(AbstractBackuper): with subprocess.Popen(command, stdout=subprocess.PIPE, shell=False) as handle: output = handle.communicate()[0].decode("utf-8") - if not self.has_json(output): + if not ResticBackuper.has_json(output): return False # raise NotImplementedError("error(big): " + output) return True - def restored_size(self, repo_name, snapshot_id) -> float: + def restored_size(self, snapshot_id: str) -> int: """ Size of a snapshot """ @@ -183,15 +183,19 @@ class ResticBackuper(AbstractBackuper): "--json", ) - with subprocess.Popen(command, stdout=subprocess.PIPE, shell=False) as handle: + with subprocess.Popen( + command, + stdout=subprocess.PIPE, + shell=False, + ) as handle: output = handle.communicate()[0].decode("utf-8") try: - parsed_output = self.parse_json_output(output) + parsed_output = ResticBackuper.parse_json_output(output) return parsed_output["total_size"] except ValueError as e: raise ValueError("cannot restore a snapshot: " + output) from e - def restore_from_backup(self, repo_name, snapshot_id, folders): + def restore_from_backup(self, snapshot_id, folders): """ Restore from backup with restic """ @@ -235,7 +239,7 @@ class ResticBackuper(AbstractBackuper): if "Is there a repository at the following location?" in output: raise ValueError("No repository! : " + output) try: - return self.parse_json_output(output) + return ResticBackuper.parse_json_output(output) except ValueError as e: raise ValueError("Cannot load snapshots: ") from e @@ -252,8 +256,9 @@ class ResticBackuper(AbstractBackuper): snapshots.append(snapshot) return snapshots - def parse_json_output(self, output: str) -> object: - starting_index = self.json_start(output) + @staticmethod + def parse_json_output(output: str) -> object: + starting_index = ResticBackuper.json_start(output) if starting_index == -1: raise ValueError("There is no json in the restic output : " + output) @@ -273,7 +278,8 @@ class ResticBackuper(AbstractBackuper): result_array.append(json.loads(message)) return result_array - def json_start(self, output: str) -> int: + @staticmethod + def json_start(output: str) -> int: indices = [ output.find("["), output.find("{"), @@ -284,7 +290,8 @@ class ResticBackuper(AbstractBackuper): return -1 return min(indices) - def has_json(self, output: str) -> bool: - if self.json_start(output) == -1: + @staticmethod + def has_json(output: str) -> bool: + if ResticBackuper.json_start(output) == -1: return False return True diff --git a/selfprivacy_api/backup/jobs.py b/selfprivacy_api/backup/jobs.py index 5a9cb0d..38e9ad1 100644 --- a/selfprivacy_api/backup/jobs.py +++ b/selfprivacy_api/backup/jobs.py @@ -51,6 +51,8 @@ def add_backup_job(service: Service) -> Job: def add_restore_job(snapshot: Snapshot) -> Job: service = get_service_by_id(snapshot.service_name) + if service is None: + raise ValueError(f"no such service: {snapshot.service_name}") if is_something_queued_for(service): message = ( f"Cannot start a restore of {service.get_id()}, another operation is queued: " diff --git a/selfprivacy_api/backup/providers/__init__.py b/selfprivacy_api/backup/providers/__init__.py index bac51e5..4f8bb75 100644 --- a/selfprivacy_api/backup/providers/__init__.py +++ b/selfprivacy_api/backup/providers/__init__.py @@ -1,23 +1,29 @@ -from selfprivacy_api.graphql.queries.providers import BackupProvider +from typing import Type + +from selfprivacy_api.graphql.queries.providers import ( + BackupProvider as BackupProviderEnum, +) from selfprivacy_api.backup.providers.provider import AbstractBackupProvider from selfprivacy_api.backup.providers.backblaze import Backblaze from selfprivacy_api.backup.providers.memory import InMemoryBackup from selfprivacy_api.backup.providers.local_file import LocalFileBackup +from selfprivacy_api.backup.providers.none import NoBackups -PROVIDER_MAPPING = { - BackupProvider.BACKBLAZE: Backblaze, - BackupProvider.MEMORY: InMemoryBackup, - BackupProvider.FILE: LocalFileBackup, - BackupProvider.NONE: AbstractBackupProvider, +PROVIDER_MAPPING: dict[BackupProviderEnum, Type[AbstractBackupProvider]] = { + BackupProviderEnum.BACKBLAZE: Backblaze, + BackupProviderEnum.MEMORY: InMemoryBackup, + BackupProviderEnum.FILE: LocalFileBackup, + BackupProviderEnum.NONE: NoBackups, } -def get_provider(provider_type: BackupProvider) -> AbstractBackupProvider: +def get_provider( + provider_type: BackupProviderEnum, +) -> Type[AbstractBackupProvider]: return PROVIDER_MAPPING[provider_type] def get_kind(provider: AbstractBackupProvider) -> str: - for key, value in PROVIDER_MAPPING.items(): - if isinstance(provider, value): - return key.value + """Get the kind of the provider in the form of a string""" + return provider.name.value diff --git a/selfprivacy_api/backup/providers/backblaze.py b/selfprivacy_api/backup/providers/backblaze.py index 3f2d873..b826bdd 100644 --- a/selfprivacy_api/backup/providers/backblaze.py +++ b/selfprivacy_api/backup/providers/backblaze.py @@ -1,8 +1,13 @@ from .provider import AbstractBackupProvider from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackuper +from selfprivacy_api.graphql.queries.providers import ( + BackupProvider as BackupProviderEnum, +) class Backblaze(AbstractBackupProvider): - backuper = ResticBackuper("--b2-account", "--b2-key", ":b2:") + @property + def backuper(self): + return ResticBackuper("--b2-account", "--b2-key", ":b2:") - name = "BACKBLAZE" + name = BackupProviderEnum.BACKBLAZE diff --git a/selfprivacy_api/backup/providers/local_file.py b/selfprivacy_api/backup/providers/local_file.py index d31417e..f4fda29 100644 --- a/selfprivacy_api/backup/providers/local_file.py +++ b/selfprivacy_api/backup/providers/local_file.py @@ -1,7 +1,13 @@ from .provider import AbstractBackupProvider from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackuper +from selfprivacy_api.graphql.queries.providers import ( + BackupProvider as BackupProviderEnum, +) class LocalFileBackup(AbstractBackupProvider): - backuper = ResticBackuper("", "", ":local:") - name = "FILE" + @property + def backuper(self): + return ResticBackuper("", "", ":local:") + + name = BackupProviderEnum.FILE diff --git a/selfprivacy_api/backup/providers/memory.py b/selfprivacy_api/backup/providers/memory.py index e73af51..6d1ba48 100644 --- a/selfprivacy_api/backup/providers/memory.py +++ b/selfprivacy_api/backup/providers/memory.py @@ -1,8 +1,13 @@ from .provider import AbstractBackupProvider from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackuper +from selfprivacy_api.graphql.queries.providers import ( + BackupProvider as BackupProviderEnum, +) class InMemoryBackup(AbstractBackupProvider): - backuper = ResticBackuper("", "", ":memory:") + @property + def backuper(self): + return ResticBackuper("", "", ":memory:") - name = "MEMORY" + name = BackupProviderEnum.MEMORY diff --git a/selfprivacy_api/backup/providers/none.py b/selfprivacy_api/backup/providers/none.py new file mode 100644 index 0000000..8c8bfba --- /dev/null +++ b/selfprivacy_api/backup/providers/none.py @@ -0,0 +1,13 @@ +from .provider import AbstractBackupProvider +from selfprivacy_api.backup.backuppers.none_backupper import NoneBackupper +from selfprivacy_api.graphql.queries.providers import ( + BackupProvider as BackupProviderEnum, +) + + +class NoBackups(AbstractBackupProvider): + @property + def backuper(self): + return NoneBackupper() + + name = BackupProviderEnum.NONE diff --git a/selfprivacy_api/backup/providers/provider.py b/selfprivacy_api/backup/providers/provider.py index dddc53f..8ee1ced 100644 --- a/selfprivacy_api/backup/providers/provider.py +++ b/selfprivacy_api/backup/providers/provider.py @@ -1,19 +1,22 @@ """ An abstract class for BackBlaze, S3 etc. -It assumes that while some providers are supported via restic/rclone, others may -require different backends +It assumes that while some providers are supported via restic/rclone, others +may require different backends """ -from abc import ABC +from abc import ABC, abstractmethod from selfprivacy_api.backup.backuppers import AbstractBackuper -from selfprivacy_api.backup.backuppers.none_backupper import NoneBackupper +from selfprivacy_api.graphql.queries.providers import ( + BackupProvider as BackupProviderEnum, +) class AbstractBackupProvider(ABC): @property + @abstractmethod def backuper(self) -> AbstractBackuper: - return NoneBackupper() + raise NotImplementedError - name = "NONE" + name: BackupProviderEnum def __init__(self, login="", key="", location="", repo_id=""): self.backuper.set_creds(login, key, location) diff --git a/selfprivacy_api/backup/storage.py b/selfprivacy_api/backup/storage.py index bff4047..29a5462 100644 --- a/selfprivacy_api/backup/storage.py +++ b/selfprivacy_api/backup/storage.py @@ -5,7 +5,10 @@ from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.models.backup.provider import BackupProviderModel from selfprivacy_api.utils.redis_pool import RedisPool -from selfprivacy_api.utils.redis_model_storage import store_model_as_hash, hash_as_model +from selfprivacy_api.utils.redis_model_storage import ( + store_model_as_hash, + hash_as_model, +) from selfprivacy_api.services.service import Service @@ -153,8 +156,12 @@ class Storage: ) @staticmethod - def load_provider() -> BackupProviderModel: - provider_model = hash_as_model(redis, REDIS_PROVIDER_KEY, BackupProviderModel) + def load_provider() -> Optional[BackupProviderModel]: + provider_model = hash_as_model( + redis, + REDIS_PROVIDER_KEY, + BackupProviderModel, + ) return provider_model @staticmethod diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index f2bade0..6ab3e1a 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -49,7 +49,7 @@ class BackupMutations: ) -> GenericBackupConfigReturn: """Initialize a new repository""" Backups.set_provider( - kind=repository.provider.value, + kind=repository.provider, login=repository.login, key=repository.password, location=repository.location_name, @@ -57,7 +57,10 @@ class BackupMutations: ) Backups.init_repo() return GenericBackupConfigReturn( - success=True, message="", code="200", configuration=Backup().configuration() + success=True, + message="", + code="200", + configuration=Backup().configuration(), ) @strawberry.mutation(permission_classes=[IsAuthenticated]) @@ -65,7 +68,10 @@ class BackupMutations: """Remove repository""" Backups.reset() return GenericBackupConfigReturn( - success=True, message="", code="200", configuration=Backup().configuration() + success=True, + message="", + code="200", + configuration=Backup().configuration(), ) @strawberry.mutation(permission_classes=[IsAuthenticated]) @@ -79,7 +85,10 @@ class BackupMutations: Backups.set_autobackup_period_minutes(0) return GenericBackupConfigReturn( - success=True, message="", code="200", configuration=Backup().configuration() + success=True, + message="", + code="200", + configuration=Backup().configuration(), ) @strawberry.mutation(permission_classes=[IsAuthenticated]) @@ -97,36 +106,52 @@ class BackupMutations: job = add_backup_job(service) start_backup(service) - job = job_to_api_job(job) return GenericJobMutationReturn( success=True, code=200, message="Backup job queued", - job=job, + job=job_to_api_job(job), ) @strawberry.mutation(permission_classes=[IsAuthenticated]) def restore_backup(self, snapshot_id: str) -> GenericJobMutationReturn: """Restore backup""" snap = Backups.get_snapshot_by_id(snapshot_id) - service = get_service_by_id(snap.service_name) if snap is None: return GenericJobMutationReturn( success=False, - code=400, + code=404, message=f"No such snapshot: {snapshot_id}", job=None, ) - job = add_restore_job(snap) + service = get_service_by_id(snap.service_name) + if service is None: + return GenericJobMutationReturn( + success=False, + code=404, + message=f"nonexistent service: {snap.service_name}", + job=None, + ) + + try: + job = add_restore_job(snap) + except ValueError as e: + return GenericJobMutationReturn( + success=False, + code=400, + message=str(e), + job=None, + ) + restore_snapshot(snap) return GenericJobMutationReturn( success=True, code=200, message="restore job created", - job=job, + job=job_to_api_job(job), ) @strawberry.mutation(permission_classes=[IsAuthenticated]) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 428e3dd..c1d668e 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -73,7 +73,7 @@ def dummy_service(tmpdir, backups, raw_dummy_service): assert not path.exists(repo_path) # assert not repo_path - Backups.init_repo(service) + Backups.init_repo() # register our service services.services.append(service) @@ -232,7 +232,7 @@ def test_restore(backups, dummy_service): def test_sizing(backups, dummy_service): Backups.back_up(dummy_service) snap = Backups.get_snapshots(dummy_service)[0] - size = Backups.service_snapshot_size(dummy_service, snap.id) + size = Backups.service_snapshot_size(snap.id) assert size is not None assert size > 0 From 0f1d8e22f228f9e25cc79b3558ffd16e49ea6cbf Mon Sep 17 00:00:00 2001 From: Inex Code Date: Fri, 23 Jun 2023 12:57:39 +0300 Subject: [PATCH 174/537] ci: ignore the failure when trying to kill redis --- .drone.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.drone.yml b/.drone.yml index ffef56a..8d99fd5 100644 --- a/.drone.yml +++ b/.drone.yml @@ -5,7 +5,7 @@ name: default steps: - name: Run Tests and Generate Coverage Report commands: - - kill $(ps aux | grep 'redis-server 127.0.0.1:6389' | awk '{print $2}') + - kill $(ps aux | grep 'redis-server 127.0.0.1:6389' | awk '{print $2}') || true - redis-server --bind 127.0.0.1 --port 6389 >/dev/null & - coverage run -m pytest -q - coverage xml From 1f558d6cf9a59f040aa17e4f2a1662aa4bbe006d Mon Sep 17 00:00:00 2001 From: Inex Code Date: Fri, 23 Jun 2023 13:02:52 +0300 Subject: [PATCH 175/537] ci: only run on push event --- .drone.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.drone.yml b/.drone.yml index 8d99fd5..a1bd384 100644 --- a/.drone.yml +++ b/.drone.yml @@ -26,3 +26,7 @@ steps: node: server: builder + +trigger: + event: + - push From b86d0cd8508bcc831613ff7ca8c8a3107cca962e Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 12 Apr 2023 13:20:41 +0000 Subject: [PATCH 176/537] fix(redis): Do not shut down redis on ctrl c see https://github.com/NixOS/nix/issues/2141 --- shell.nix | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/shell.nix b/shell.nix index d7f08b4..b94b71d 100644 --- a/shell.nix +++ b/shell.nix @@ -35,7 +35,8 @@ pkgs.mkShell { # for example. printenv will not fetch the value of an attribute. export USE_REDIS_PORT=6379 pkill redis-server - redis-server --bind 127.0.0.1 --port $USE_REDIS_PORT >/dev/null & + sleep 2 + setsid redis-server --bind 127.0.0.1 --port $USE_REDIS_PORT >/dev/null 2>/dev/null & # maybe set more env-vars ''; } From 1664f857ea86bc48e658ee5bec16de6c9b35bdb3 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Fri, 23 Jun 2023 14:36:16 +0300 Subject: [PATCH 177/537] fix(backups): Providers were not initialized corretly --- selfprivacy_api/backup/providers/backblaze.py | 4 +--- selfprivacy_api/backup/providers/local_file.py | 4 +--- selfprivacy_api/backup/providers/memory.py | 4 +--- selfprivacy_api/backup/providers/none.py | 4 +--- selfprivacy_api/backup/providers/provider.py | 5 +---- selfprivacy_api/graphql/mutations/backup_mutations.py | 6 +++--- selfprivacy_api/graphql/queries/backup.py | 2 +- 7 files changed, 9 insertions(+), 20 deletions(-) diff --git a/selfprivacy_api/backup/providers/backblaze.py b/selfprivacy_api/backup/providers/backblaze.py index b826bdd..349c5c7 100644 --- a/selfprivacy_api/backup/providers/backblaze.py +++ b/selfprivacy_api/backup/providers/backblaze.py @@ -6,8 +6,6 @@ from selfprivacy_api.graphql.queries.providers import ( class Backblaze(AbstractBackupProvider): - @property - def backuper(self): - return ResticBackuper("--b2-account", "--b2-key", ":b2:") + backuper = ResticBackuper("--b2-account", "--b2-key", ":b2:") name = BackupProviderEnum.BACKBLAZE diff --git a/selfprivacy_api/backup/providers/local_file.py b/selfprivacy_api/backup/providers/local_file.py index f4fda29..9afc61a 100644 --- a/selfprivacy_api/backup/providers/local_file.py +++ b/selfprivacy_api/backup/providers/local_file.py @@ -6,8 +6,6 @@ from selfprivacy_api.graphql.queries.providers import ( class LocalFileBackup(AbstractBackupProvider): - @property - def backuper(self): - return ResticBackuper("", "", ":local:") + backuper = ResticBackuper("", "", ":local:") name = BackupProviderEnum.FILE diff --git a/selfprivacy_api/backup/providers/memory.py b/selfprivacy_api/backup/providers/memory.py index 6d1ba48..31a4cbb 100644 --- a/selfprivacy_api/backup/providers/memory.py +++ b/selfprivacy_api/backup/providers/memory.py @@ -6,8 +6,6 @@ from selfprivacy_api.graphql.queries.providers import ( class InMemoryBackup(AbstractBackupProvider): - @property - def backuper(self): - return ResticBackuper("", "", ":memory:") + backuper = ResticBackuper("", "", ":memory:") name = BackupProviderEnum.MEMORY diff --git a/selfprivacy_api/backup/providers/none.py b/selfprivacy_api/backup/providers/none.py index 8c8bfba..f190324 100644 --- a/selfprivacy_api/backup/providers/none.py +++ b/selfprivacy_api/backup/providers/none.py @@ -6,8 +6,6 @@ from selfprivacy_api.graphql.queries.providers import ( class NoBackups(AbstractBackupProvider): - @property - def backuper(self): - return NoneBackupper() + backuper = NoneBackupper() name = BackupProviderEnum.NONE diff --git a/selfprivacy_api/backup/providers/provider.py b/selfprivacy_api/backup/providers/provider.py index 8ee1ced..3c4fc43 100644 --- a/selfprivacy_api/backup/providers/provider.py +++ b/selfprivacy_api/backup/providers/provider.py @@ -11,10 +11,7 @@ from selfprivacy_api.graphql.queries.providers import ( class AbstractBackupProvider(ABC): - @property - @abstractmethod - def backuper(self) -> AbstractBackuper: - raise NotImplementedError + backuper: AbstractBackuper name: BackupProviderEnum diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index 6ab3e1a..b7720c5 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -59,7 +59,7 @@ class BackupMutations: return GenericBackupConfigReturn( success=True, message="", - code="200", + code=200, configuration=Backup().configuration(), ) @@ -70,7 +70,7 @@ class BackupMutations: return GenericBackupConfigReturn( success=True, message="", - code="200", + code=200, configuration=Backup().configuration(), ) @@ -87,7 +87,7 @@ class BackupMutations: return GenericBackupConfigReturn( success=True, message="", - code="200", + code=200, configuration=Backup().configuration(), ) diff --git a/selfprivacy_api/graphql/queries/backup.py b/selfprivacy_api/graphql/queries/backup.py index 9858543..322dab0 100644 --- a/selfprivacy_api/graphql/queries/backup.py +++ b/selfprivacy_api/graphql/queries/backup.py @@ -36,7 +36,7 @@ class Backup: @strawberry.field def configuration(self) -> BackupConfiguration: return BackupConfiguration( - provider=BackupProvider[Backups.provider().name], + provider=Backups.provider().name, encryption_key=LocalBackupSecret.get(), is_initialized=Backups.is_initted(), autobackup_period=Backups.autobackup_period_minutes(), From 6ca68fae171e18518b2e8caa0f1f0fc237506f97 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 21 Jun 2023 12:15:33 +0000 Subject: [PATCH 178/537] fix(tokens-repo): persistent hashing --- .../tokens/redis_tokens_repository.py | 24 +++++++++++++++---- 1 file changed, 20 insertions(+), 4 deletions(-) diff --git a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py index c72e231..8baa16d 100644 --- a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py @@ -3,6 +3,7 @@ Token repository using Redis as backend. """ from typing import Optional from datetime import datetime +from hashlib import md5 from selfprivacy_api.repositories.tokens.abstract_tokens_repository import ( AbstractTokensRepository, @@ -28,7 +29,10 @@ class RedisTokensRepository(AbstractTokensRepository): @staticmethod def token_key_for_device(device_name: str): - return TOKENS_PREFIX + str(hash(device_name)) + hash = md5() + hash.update(bytes(device_name, "utf-8")) + digest = hash.hexdigest() + return TOKENS_PREFIX + digest def get_tokens(self) -> list[Token]: """Get the tokens""" @@ -41,11 +45,20 @@ class RedisTokensRepository(AbstractTokensRepository): tokens.append(token) return tokens + def _discover_token_key(self, input_token: Token) -> str: + """brute-force searching for tokens, for robust deletion""" + redis = self.connection + token_keys = redis.keys(TOKENS_PREFIX + "*") + for key in token_keys: + token = self._token_from_hash(key) + if token == input_token: + return key + def delete_token(self, input_token: Token) -> None: """Delete the token""" redis = self.connection - key = RedisTokensRepository._token_redis_key(input_token) - if input_token not in self.get_tokens(): + key = self._discover_token_key(input_token) + if key is None: raise TokenNotFound redis.delete(key) @@ -138,7 +151,10 @@ class RedisTokensRepository(AbstractTokensRepository): return None def _token_from_hash(self, redis_key: str) -> Optional[Token]: - return self._hash_as_model(redis_key, Token) + token = self._hash_as_model(redis_key, Token) + if token is not None: + token.created_at = token.created_at.replace(tzinfo=None) + return token def _recovery_key_from_hash(self, redis_key: str) -> Optional[RecoveryKey]: return self._hash_as_model(redis_key, RecoveryKey) From 2f71469f39db715229389d6dcf72d392ed5b3c25 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 23 Jun 2023 12:04:33 +0000 Subject: [PATCH 179/537] refactoring(backups): backuper -> backupper --- selfprivacy_api/backup/__init__.py | 14 +++++----- selfprivacy_api/backup/backuppers/__init__.py | 2 +- .../backup/backuppers/none_backupper.py | 4 +-- .../backup/backuppers/restic_backupper.py | 26 +++++++++---------- selfprivacy_api/backup/providers/backblaze.py | 4 +-- .../backup/providers/local_file.py | 4 +-- selfprivacy_api/backup/providers/memory.py | 4 +-- selfprivacy_api/backup/providers/none.py | 2 +- selfprivacy_api/backup/providers/provider.py | 6 ++--- tests/test_graphql/test_backup.py | 12 ++++----- 10 files changed, 39 insertions(+), 39 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 35b98eb..eaed6f8 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -287,7 +287,7 @@ class Backups: try: service.pre_backup() - snapshot = Backups.provider().backuper.start_backup( + snapshot = Backups.provider().backupper.start_backup( folders, tag, ) @@ -302,7 +302,7 @@ class Backups: @staticmethod def init_repo(): - Backups.provider().backuper.init() + Backups.provider().backupper.init() Storage.mark_as_init() @staticmethod @@ -310,7 +310,7 @@ class Backups: if Storage.has_init_mark(): return True - initted = Backups.provider().backuper.is_initted() + initted = Backups.provider().backupper.is_initted() if initted: Storage.mark_as_init() return True @@ -336,7 +336,7 @@ class Backups: # TODO: the oldest snapshots will get expired faster than the new ones. # How to detect that the end is missing? - upstream_snapshots = Backups.provider().backuper.get_snapshots() + upstream_snapshots = Backups.provider().backupper.get_snapshots() Backups.sync_all_snapshots() return upstream_snapshots @@ -358,7 +358,7 @@ class Backups: @staticmethod def sync_all_snapshots(): - upstream_snapshots = Backups.provider().backuper.get_snapshots() + upstream_snapshots = Backups.provider().backupper.get_snapshots() Storage.invalidate_snapshot_storage() for snapshot in upstream_snapshots: Storage.cache_snapshot(snapshot) @@ -368,7 +368,7 @@ class Backups: def restore_service_from_snapshot(service: Service, snapshot_id: str): folders = service.get_folders() - Backups.provider().backuper.restore_from_backup( + Backups.provider().backupper.restore_from_backup( snapshot_id, folders, ) @@ -427,7 +427,7 @@ class Backups: @staticmethod def service_snapshot_size(snapshot_id: str) -> int: - return Backups.provider().backuper.restored_size( + return Backups.provider().backupper.restored_size( snapshot_id, ) diff --git a/selfprivacy_api/backup/backuppers/__init__.py b/selfprivacy_api/backup/backuppers/__init__.py index 169a502..16cde07 100644 --- a/selfprivacy_api/backup/backuppers/__init__.py +++ b/selfprivacy_api/backup/backuppers/__init__.py @@ -4,7 +4,7 @@ from typing import List from selfprivacy_api.models.backup.snapshot import Snapshot -class AbstractBackuper(ABC): +class AbstractBackupper(ABC): def __init__(self): pass diff --git a/selfprivacy_api/backup/backuppers/none_backupper.py b/selfprivacy_api/backup/backuppers/none_backupper.py index d0f0dda..014f755 100644 --- a/selfprivacy_api/backup/backuppers/none_backupper.py +++ b/selfprivacy_api/backup/backuppers/none_backupper.py @@ -1,10 +1,10 @@ from typing import List from selfprivacy_api.models.backup.snapshot import Snapshot -from selfprivacy_api.backup.backuppers import AbstractBackuper +from selfprivacy_api.backup.backuppers import AbstractBackupper -class NoneBackupper(AbstractBackuper): +class NoneBackupper(AbstractBackupper): def is_initted(self, repo_name: str = "") -> bool: return False diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index 5a16812..8ec2cc5 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -6,7 +6,7 @@ from typing import List from collections.abc import Iterable from json.decoder import JSONDecodeError -from selfprivacy_api.backup.backuppers import AbstractBackuper +from selfprivacy_api.backup.backuppers import AbstractBackupper from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.backup.jobs import get_backup_job from selfprivacy_api.services import get_service_by_id @@ -15,7 +15,7 @@ from selfprivacy_api.jobs import Jobs, JobStatus from selfprivacy_api.backup.local_secret import LocalBackupSecret -class ResticBackuper(AbstractBackuper): +class ResticBackupper(AbstractBackupper): def __init__(self, login_flag: str, key_flag: str, type: str): self.login_flag = login_flag self.key_flag = key_flag @@ -68,7 +68,7 @@ class ResticBackuper(AbstractBackuper): ] ) if args != []: - command.extend(ResticBackuper.__flatten_list(args)) + command.extend(ResticBackupper.__flatten_list(args)) return command @staticmethod @@ -77,7 +77,7 @@ class ResticBackuper(AbstractBackuper): result = [] for item in list: if isinstance(item, Iterable) and not isinstance(item, str): - result.extend(ResticBackuper.__flatten_list(item)) + result.extend(ResticBackupper.__flatten_list(item)) continue result.append(item) return result @@ -113,10 +113,10 @@ class ResticBackuper(AbstractBackuper): messages = [] job = get_backup_job(get_service_by_id(tag)) try: - for raw_message in ResticBackuper.output_yielder(backup_command): + for raw_message in ResticBackupper.output_yielder(backup_command): message = self.parse_message(raw_message, job) messages.append(message) - return ResticBackuper._snapshot_from_backup_messages(messages, tag) + return ResticBackupper._snapshot_from_backup_messages(messages, tag) except ValueError as e: raise ValueError("could not create a snapshot: ", messages) from e @@ -124,11 +124,11 @@ class ResticBackuper(AbstractBackuper): def _snapshot_from_backup_messages(messages, repo_name) -> Snapshot: for message in messages: if message["message_type"] == "summary": - return ResticBackuper._snapshot_from_fresh_summary(message, repo_name) + return ResticBackupper._snapshot_from_fresh_summary(message, repo_name) raise ValueError("no summary message in restic json output") def parse_message(self, raw_message, job=None) -> object: - message = ResticBackuper.parse_json_output(raw_message) + message = ResticBackupper.parse_json_output(raw_message) if message["message_type"] == "status": if job is not None: # only update status if we run under some job Jobs.update( @@ -168,7 +168,7 @@ class ResticBackuper(AbstractBackuper): with subprocess.Popen(command, stdout=subprocess.PIPE, shell=False) as handle: output = handle.communicate()[0].decode("utf-8") - if not ResticBackuper.has_json(output): + if not ResticBackupper.has_json(output): return False # raise NotImplementedError("error(big): " + output) return True @@ -190,7 +190,7 @@ class ResticBackuper(AbstractBackuper): ) as handle: output = handle.communicate()[0].decode("utf-8") try: - parsed_output = ResticBackuper.parse_json_output(output) + parsed_output = ResticBackupper.parse_json_output(output) return parsed_output["total_size"] except ValueError as e: raise ValueError("cannot restore a snapshot: " + output) from e @@ -239,7 +239,7 @@ class ResticBackuper(AbstractBackuper): if "Is there a repository at the following location?" in output: raise ValueError("No repository! : " + output) try: - return ResticBackuper.parse_json_output(output) + return ResticBackupper.parse_json_output(output) except ValueError as e: raise ValueError("Cannot load snapshots: ") from e @@ -258,7 +258,7 @@ class ResticBackuper(AbstractBackuper): @staticmethod def parse_json_output(output: str) -> object: - starting_index = ResticBackuper.json_start(output) + starting_index = ResticBackupper.json_start(output) if starting_index == -1: raise ValueError("There is no json in the restic output : " + output) @@ -292,6 +292,6 @@ class ResticBackuper(AbstractBackuper): @staticmethod def has_json(output: str) -> bool: - if ResticBackuper.json_start(output) == -1: + if ResticBackupper.json_start(output) == -1: return False return True diff --git a/selfprivacy_api/backup/providers/backblaze.py b/selfprivacy_api/backup/providers/backblaze.py index 349c5c7..74f3411 100644 --- a/selfprivacy_api/backup/providers/backblaze.py +++ b/selfprivacy_api/backup/providers/backblaze.py @@ -1,11 +1,11 @@ from .provider import AbstractBackupProvider -from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackuper +from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackupper from selfprivacy_api.graphql.queries.providers import ( BackupProvider as BackupProviderEnum, ) class Backblaze(AbstractBackupProvider): - backuper = ResticBackuper("--b2-account", "--b2-key", ":b2:") + backupper = ResticBackupper("--b2-account", "--b2-key", ":b2:") name = BackupProviderEnum.BACKBLAZE diff --git a/selfprivacy_api/backup/providers/local_file.py b/selfprivacy_api/backup/providers/local_file.py index 9afc61a..af38579 100644 --- a/selfprivacy_api/backup/providers/local_file.py +++ b/selfprivacy_api/backup/providers/local_file.py @@ -1,11 +1,11 @@ from .provider import AbstractBackupProvider -from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackuper +from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackupper from selfprivacy_api.graphql.queries.providers import ( BackupProvider as BackupProviderEnum, ) class LocalFileBackup(AbstractBackupProvider): - backuper = ResticBackuper("", "", ":local:") + backupper = ResticBackupper("", "", ":local:") name = BackupProviderEnum.FILE diff --git a/selfprivacy_api/backup/providers/memory.py b/selfprivacy_api/backup/providers/memory.py index 31a4cbb..18cdee5 100644 --- a/selfprivacy_api/backup/providers/memory.py +++ b/selfprivacy_api/backup/providers/memory.py @@ -1,11 +1,11 @@ from .provider import AbstractBackupProvider -from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackuper +from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackupper from selfprivacy_api.graphql.queries.providers import ( BackupProvider as BackupProviderEnum, ) class InMemoryBackup(AbstractBackupProvider): - backuper = ResticBackuper("", "", ":memory:") + backupper = ResticBackupper("", "", ":memory:") name = BackupProviderEnum.MEMORY diff --git a/selfprivacy_api/backup/providers/none.py b/selfprivacy_api/backup/providers/none.py index f190324..474d0a2 100644 --- a/selfprivacy_api/backup/providers/none.py +++ b/selfprivacy_api/backup/providers/none.py @@ -6,6 +6,6 @@ from selfprivacy_api.graphql.queries.providers import ( class NoBackups(AbstractBackupProvider): - backuper = NoneBackupper() + backupper = NoneBackupper() name = BackupProviderEnum.NONE diff --git a/selfprivacy_api/backup/providers/provider.py b/selfprivacy_api/backup/providers/provider.py index 3c4fc43..077e920 100644 --- a/selfprivacy_api/backup/providers/provider.py +++ b/selfprivacy_api/backup/providers/provider.py @@ -4,19 +4,19 @@ It assumes that while some providers are supported via restic/rclone, others may require different backends """ from abc import ABC, abstractmethod -from selfprivacy_api.backup.backuppers import AbstractBackuper +from selfprivacy_api.backup.backuppers import AbstractBackupper from selfprivacy_api.graphql.queries.providers import ( BackupProvider as BackupProviderEnum, ) class AbstractBackupProvider(ABC): - backuper: AbstractBackuper + backupper: AbstractBackupper name: BackupProviderEnum def __init__(self, login="", key="", location="", repo_id=""): - self.backuper.set_creds(login, key, location) + self.backupper.set_creds(login, key, location) self.login = login self.key = key self.location = location diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index c1d668e..e3bf681 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -111,8 +111,8 @@ def test_config_load(generic_userdata): assert provider.key == "KEY" assert provider.location == "selfprivacy" - assert provider.backuper.account == "ID" - assert provider.backuper.key == "KEY" + assert provider.backupper.account == "ID" + assert provider.backupper.key == "KEY" def test_json_reset(generic_userdata): @@ -141,7 +141,7 @@ def test_select_backend(): def test_file_backend_init(file_backup): - file_backup.backuper.init() + file_backup.backupper.init() def test_backup_simple_file(raw_dummy_service, file_backup): @@ -151,7 +151,7 @@ def test_backup_simple_file(raw_dummy_service, file_backup): assert file_backup is not None name = service.get_id() - file_backup.backuper.init() + file_backup.backupper.init() def test_backup_service(dummy_service, backups): @@ -172,7 +172,7 @@ def test_backup_service(dummy_service, backups): def test_no_repo(memory_backup): with pytest.raises(ValueError): - assert memory_backup.backuper.get_snapshots() == [] + assert memory_backup.backupper.get_snapshots() == [] def test_one_snapshot(backups, dummy_service): @@ -188,7 +188,7 @@ def test_backup_returns_snapshot(backups, dummy_service): service_folders = dummy_service.get_folders() provider = Backups.provider() name = dummy_service.get_id() - snapshot = provider.backuper.start_backup(service_folders, name) + snapshot = provider.backupper.start_backup(service_folders, name) assert snapshot.id is not None assert snapshot.service_name == name From 273a1935a85501dfd559577bcef8007941eaa836 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 23 Jun 2023 12:17:48 +0000 Subject: [PATCH 180/537] feature(dev_qol): mypy type checking and rope refactoring support --- shell.nix | 3 +++ 1 file changed, 3 insertions(+) diff --git a/shell.nix b/shell.nix index b94b71d..f1ce9b2 100644 --- a/shell.nix +++ b/shell.nix @@ -12,6 +12,9 @@ let mnemonic coverage pylint + rope + mypy + pylsp-mypy pydantic typing-extensions psutil From 8604caa3314f94d78cd7aa6387006b88ad002779 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Jun 2023 18:00:42 +0000 Subject: [PATCH 181/537] refactor(backups): api readability reorg --- selfprivacy_api/backup/__init__.py | 348 +++++++++++++++-------------- 1 file changed, 183 insertions(+), 165 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index eaed6f8..2957832 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -34,125 +34,9 @@ DEFAULT_JSON_PROVIDER = { class Backups: - """A singleton controller for backups""" + """A stateless controller class for backups""" - @staticmethod - def set_localfile_repo(file_path: str): - ProviderClass = get_provider(BackupProviderEnum.FILE) - provider = ProviderClass( - login="", - key="", - location=file_path, - repo_id="", - ) - Storage.store_provider(provider) - - @staticmethod - def get_last_backed_up(service: Service) -> Optional[datetime]: - """Get a timezone-aware time of the last backup of a service""" - return Storage.get_last_backup_time(service.get_id()) - - @staticmethod - def get_cached_snapshots_service(service_id: str) -> List[Snapshot]: - snapshots = Storage.get_cached_snapshots() - return [snap for snap in snapshots if snap.service_name == service_id] - - @staticmethod - def sync_service_snapshots(service_id: str, snapshots: List[Snapshot]): - for snapshot in snapshots: - if snapshot.service_name == service_id: - Storage.cache_snapshot(snapshot) - for snapshot in Backups.get_cached_snapshots_service(service_id): - if snapshot.id not in [snap.id for snap in snapshots]: - Storage.delete_cached_snapshot(snapshot) - - @staticmethod - def enable_autobackup(service: Service): - Storage.set_autobackup(service) - - @staticmethod - def _service_ids_to_back_up(time: datetime) -> List[str]: - services = Storage.services_with_autobackup() - return [ - id - for id in services - if Backups.is_time_to_backup_service( - id, - time, - ) - ] - - @staticmethod - def services_to_back_up(time: datetime) -> List[Service]: - result = [] - for id in Backups._service_ids_to_back_up(time): - service = get_service_by_id(id) - if service is None: - raise ValueError( - "Cannot look up a service scheduled for backup!", - ) - result.append(service) - return result - - @staticmethod - def is_time_to_backup(time: datetime) -> bool: - """ - Intended as a time validator for huey cron scheduler - of automatic backups - """ - - return Backups._service_ids_to_back_up(time) != [] - - @staticmethod - def is_time_to_backup_service(service_id: str, time: datetime): - period = Backups.autobackup_period_minutes() - if period is None: - return False - if not Storage.is_autobackup_set(service_id): - return False - - last_backup = Storage.get_last_backup_time(service_id) - if last_backup is None: - # queue a backup immediately if there are no previous backups - return True - - if time > last_backup + timedelta(minutes=period): - return True - return False - - @staticmethod - def disable_autobackup(service: Service): - """also see disable_all_autobackup()""" - Storage.unset_autobackup(service) - - @staticmethod - def is_autobackup_enabled(service: Service) -> bool: - return Storage.is_autobackup_set(service.get_id()) - - @staticmethod - def autobackup_period_minutes() -> Optional[int]: - """None means autobackup is disabled""" - return Storage.autobackup_period_minutes() - - @staticmethod - def set_autobackup_period_minutes(minutes: int): - """ - 0 and negative numbers are equivalent to disable. - Setting to a positive number may result in a backup very soon - if some services are not backed up. - """ - if minutes <= 0: - Backups.disable_all_autobackup() - return - Storage.store_autobackup_period_minutes(minutes) - - @staticmethod - def disable_all_autobackup(): - """ - Disables all automatic backing up, - but does not change per-service settings - """ - Storage.delete_backup_period() +### Providers @staticmethod def provider(): @@ -175,32 +59,6 @@ class Backups: ) Storage.store_provider(provider) - @staticmethod - def construct_provider( - kind: BackupProviderEnum, - login: str, - key: str, - location: str, - repo_id: str = "", - ) -> AbstractBackupProvider: - provider_class = get_provider(kind) - - return provider_class( - login=login, - key=key, - location=location, - repo_id=repo_id, - ) - - @staticmethod - def reset(reset_json=True): - Storage.reset() - if reset_json: - try: - Backups.reset_provider_json() - except FileNotFoundError: - # if there is no userdata file, we do not need to reset it - pass @staticmethod def lookup_provider() -> AbstractBackupProvider: @@ -223,6 +81,36 @@ class Backups: Storage.store_provider(none_provider) return none_provider + @staticmethod + def construct_provider( + kind: BackupProviderEnum, + login: str, + key: str, + location: str, + repo_id: str = "", + ) -> AbstractBackupProvider: + provider_class = get_provider(kind) + + return provider_class( + login=login, + key=key, + location=location, + repo_id=repo_id, + ) + + @staticmethod + def load_provider_redis() -> Optional[AbstractBackupProvider]: + provider_model = Storage.load_provider() + if provider_model is None: + return None + return Backups.construct_provider( + BackupProviderEnum[provider_model.kind], + provider_model.login, + provider_model.key, + provider_model.location, + provider_model.repo_id, + ) + @staticmethod def load_provider_json() -> Optional[AbstractBackupProvider]: with ReadUserData() as user_data: @@ -261,18 +149,18 @@ class Backups: user_data["backup"] = DEFAULT_JSON_PROVIDER + @staticmethod - def load_provider_redis() -> Optional[AbstractBackupProvider]: - provider_model = Storage.load_provider() - if provider_model is None: - return None - return Backups.construct_provider( - BackupProviderEnum[provider_model.kind], - provider_model.login, - provider_model.key, - provider_model.location, - provider_model.repo_id, - ) + def reset(reset_json=True): + Storage.reset() + if reset_json: + try: + Backups.reset_provider_json() + except FileNotFoundError: + # if there is no userdata file, we do not need to reset it + pass + +### Backup @staticmethod def back_up(service: Service): @@ -300,6 +188,8 @@ class Backups: Jobs.update(job, status=JobStatus.FINISHED) return snapshot +### Init + @staticmethod def init_repo(): Backups.provider().backupper.init() @@ -317,6 +207,8 @@ class Backups: return False +### Snapshots + @staticmethod def get_snapshots(service: Service) -> List[Snapshot]: snapshots = Backups.get_all_snapshots() @@ -363,6 +255,36 @@ class Backups: for snapshot in upstream_snapshots: Storage.cache_snapshot(snapshot) + @staticmethod + def service_snapshot_size(snapshot_id: str) -> int: + return Backups.provider().backupper.restored_size( + snapshot_id, + ) + + @staticmethod + def _store_last_snapshot(service_id: str, snapshot: Snapshot): + """What do we do with a snapshot that is just made?""" + # non-expiring timestamp of the last + Storage.store_last_timestamp(service_id, snapshot) + # expiring cache entry + Storage.cache_snapshot(snapshot) + + @staticmethod + def get_cached_snapshots_service(service_id: str) -> List[Snapshot]: + snapshots = Storage.get_cached_snapshots() + return [snap for snap in snapshots if snap.service_name == service_id] + + @staticmethod + def sync_service_snapshots(service_id: str, snapshots: List[Snapshot]): + for snapshot in snapshots: + if snapshot.service_name == service_id: + Storage.cache_snapshot(snapshot) + for snapshot in Backups.get_cached_snapshots_service(service_id): + if snapshot.id not in [snap.id for snap in snapshots]: + Storage.delete_cached_snapshot(snapshot) + +### Restoring + # to be deprecated/internalized in favor of restore_snapshot() @staticmethod def restore_service_from_snapshot(service: Service, snapshot_id: str): @@ -425,11 +347,101 @@ class Backups: status=JobStatus.FINISHED, ) +### Autobackup @staticmethod - def service_snapshot_size(snapshot_id: str) -> int: - return Backups.provider().backupper.restored_size( - snapshot_id, - ) + def is_autobackup_enabled(service: Service) -> bool: + return Storage.is_autobackup_set(service.get_id()) + + @staticmethod + def enable_autobackup(service: Service): + Storage.set_autobackup(service) + + @staticmethod + def disable_autobackup(service: Service): + """also see disable_all_autobackup()""" + Storage.unset_autobackup(service) + + @staticmethod + def disable_all_autobackup(): + """ + Disables all automatic backing up, + but does not change per-service settings + """ + Storage.delete_backup_period() + + @staticmethod + def autobackup_period_minutes() -> Optional[int]: + """None means autobackup is disabled""" + return Storage.autobackup_period_minutes() + + @staticmethod + def set_autobackup_period_minutes(minutes: int): + """ + 0 and negative numbers are equivalent to disable. + Setting to a positive number may result in a backup very soon + if some services are not backed up. + """ + if minutes <= 0: + Backups.disable_all_autobackup() + return + Storage.store_autobackup_period_minutes(minutes) + + @staticmethod + def is_time_to_backup(time: datetime) -> bool: + """ + Intended as a time validator for huey cron scheduler + of automatic backups + """ + + return Backups._service_ids_to_back_up(time) != [] + + @staticmethod + def services_to_back_up(time: datetime) -> List[Service]: + result = [] + for id in Backups._service_ids_to_back_up(time): + service = get_service_by_id(id) + if service is None: + raise ValueError( + "Cannot look up a service scheduled for backup!", + ) + result.append(service) + return result + + @staticmethod + def get_last_backed_up(service: Service) -> Optional[datetime]: + """Get a timezone-aware time of the last backup of a service""" + return Storage.get_last_backup_time(service.get_id()) + + @staticmethod + def is_time_to_backup_service(service_id: str, time: datetime): + period = Backups.autobackup_period_minutes() + if period is None: + return False + if not Storage.is_autobackup_set(service_id): + return False + + last_backup = Storage.get_last_backup_time(service_id) + if last_backup is None: + # queue a backup immediately if there are no previous backups + return True + + if time > last_backup + timedelta(minutes=period): + return True + return False + + @staticmethod + def _service_ids_to_back_up(time: datetime) -> List[str]: + services = Storage.services_with_autobackup() + return [ + id + for id in services + if Backups.is_time_to_backup_service( + id, + time, + ) + ] + +### Helpers @staticmethod def space_usable_for_service(service: Service) -> int: @@ -442,9 +454,15 @@ class Backups: return usable_bytes @staticmethod - def _store_last_snapshot(service_id: str, snapshot: Snapshot): - """What do we do with a snapshot that is just made?""" - # non-expiring timestamp of the last - Storage.store_last_timestamp(service_id, snapshot) - # expiring cache entry - Storage.cache_snapshot(snapshot) + def set_localfile_repo(file_path: str): + ProviderClass = get_provider(BackupProviderEnum.FILE) + provider = ProviderClass( + login="", + key="", + location=file_path, + repo_id="", + ) + Storage.store_provider(provider) + + + From e159d2f1a742630cc39ce303831990acfb7e95b5 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Jun 2023 18:07:47 +0000 Subject: [PATCH 182/537] refactor(backups): reorder imports --- selfprivacy_api/backup/__init__.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 2957832..06c0842 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -1,19 +1,21 @@ -from operator import add -from typing import List, Optional from datetime import datetime, timedelta +from operator import add from os import statvfs - -from selfprivacy_api.models.backup.snapshot import Snapshot +from typing import List, Optional from selfprivacy_api.utils import ReadUserData, WriteUserData from selfprivacy_api.services import get_service_by_id from selfprivacy_api.services.service import Service +from selfprivacy_api.jobs import Jobs, JobStatus + from selfprivacy_api.graphql.queries.providers import ( BackupProvider as BackupProviderEnum, ) +from selfprivacy_api.models.backup.snapshot import Snapshot + from selfprivacy_api.backup.providers.provider import AbstractBackupProvider from selfprivacy_api.backup.providers import get_provider from selfprivacy_api.backup.storage import Storage @@ -23,7 +25,6 @@ from selfprivacy_api.backup.jobs import ( get_restore_job, add_restore_job, ) -from selfprivacy_api.jobs import Jobs, JobStatus DEFAULT_JSON_PROVIDER = { "provider": "BACKBLAZE", From 81d1762518b1f8846ffe4ddfff67cfe1fa6ce668 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Jun 2023 18:11:11 +0000 Subject: [PATCH 183/537] refactor(backups): make lookup_provider not public --- selfprivacy_api/backup/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 06c0842..380c399 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -41,7 +41,7 @@ class Backups: @staticmethod def provider(): - return Backups.lookup_provider() + return Backups._lookup_provider() @staticmethod def set_provider( @@ -62,7 +62,7 @@ class Backups: @staticmethod - def lookup_provider() -> AbstractBackupProvider: + def _lookup_provider() -> AbstractBackupProvider: redis_provider = Backups.load_provider_redis() if redis_provider is not None: return redis_provider From 71eeed926de326066c33b7361109176a27303a77 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Jun 2023 18:14:15 +0000 Subject: [PATCH 184/537] refactor(backups): make construct_provider not public --- selfprivacy_api/backup/__init__.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 380c399..23f5c77 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -51,7 +51,7 @@ class Backups: location: str, repo_id: str = "", ): - provider = Backups.construct_provider( + provider = Backups._construct_provider( kind, login, key, @@ -76,14 +76,14 @@ class Backups: Storage.store_provider(json_provider) return json_provider - none_provider = Backups.construct_provider( + none_provider = Backups._construct_provider( BackupProviderEnum.NONE, login="", key="", location="" ) Storage.store_provider(none_provider) return none_provider @staticmethod - def construct_provider( + def _construct_provider( kind: BackupProviderEnum, login: str, key: str, @@ -104,7 +104,7 @@ class Backups: provider_model = Storage.load_provider() if provider_model is None: return None - return Backups.construct_provider( + return Backups._construct_provider( BackupProviderEnum[provider_model.kind], provider_model.login, provider_model.key, @@ -133,7 +133,7 @@ class Backups: if provider_dict == DEFAULT_JSON_PROVIDER: return None try: - return Backups.construct_provider( + return Backups._construct_provider( kind=BackupProviderEnum[provider_dict["provider"]], login=provider_dict["accountId"], key=provider_dict["accountKey"], From 1593474dc18984470912d06bde393aecf05c2c4c Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Jun 2023 18:20:22 +0000 Subject: [PATCH 185/537] refactor(backups): make redis and json provider related lowlevels private --- selfprivacy_api/backup/__init__.py | 12 ++++++------ tests/test_graphql/test_backup.py | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 23f5c77..6439fb2 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -63,12 +63,12 @@ class Backups: @staticmethod def _lookup_provider() -> AbstractBackupProvider: - redis_provider = Backups.load_provider_redis() + redis_provider = Backups._load_provider_redis() if redis_provider is not None: return redis_provider try: - json_provider = Backups.load_provider_json() + json_provider = Backups._load_provider_json() except FileNotFoundError: json_provider = None @@ -100,7 +100,7 @@ class Backups: ) @staticmethod - def load_provider_redis() -> Optional[AbstractBackupProvider]: + def _load_provider_redis() -> Optional[AbstractBackupProvider]: provider_model = Storage.load_provider() if provider_model is None: return None @@ -113,7 +113,7 @@ class Backups: ) @staticmethod - def load_provider_json() -> Optional[AbstractBackupProvider]: + def _load_provider_json() -> Optional[AbstractBackupProvider]: with ReadUserData() as user_data: provider_dict = { "provider": "", @@ -143,7 +143,7 @@ class Backups: return None @staticmethod - def reset_provider_json() -> None: + def _reset_provider_json() -> None: with WriteUserData() as user_data: if "backblaze" in user_data.keys(): del user_data["backblaze"] @@ -156,7 +156,7 @@ class Backups: Storage.reset() if reset_json: try: - Backups.reset_provider_json() + Backups._reset_provider_json() except FileNotFoundError: # if there is no userdata file, we do not need to reset it pass diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index e3bf681..23fa685 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -475,7 +475,7 @@ def test_provider_storage(backups_backblaze): assert provider.key == "KEY" Storage.store_provider(provider) - restored_provider = Backups.load_provider_redis() + restored_provider = Backups._load_provider_redis() assert isinstance(restored_provider, Backblaze) assert restored_provider.login == "ID" assert restored_provider.key == "KEY" From e6efd1b42dfb5504cd36ddca016f2590df386479 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Jun 2023 18:21:50 +0000 Subject: [PATCH 186/537] refactor(backups): move reset() to top because toplevel interface --- selfprivacy_api/backup/__init__.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 6439fb2..fff9c66 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -60,6 +60,16 @@ class Backups: ) Storage.store_provider(provider) + @staticmethod + def reset(reset_json=True): + Storage.reset() + if reset_json: + try: + Backups._reset_provider_json() + except FileNotFoundError: + # if there is no userdata file, we do not need to reset it + pass + @staticmethod def _lookup_provider() -> AbstractBackupProvider: @@ -151,16 +161,6 @@ class Backups: user_data["backup"] = DEFAULT_JSON_PROVIDER - @staticmethod - def reset(reset_json=True): - Storage.reset() - if reset_json: - try: - Backups._reset_provider_json() - except FileNotFoundError: - # if there is no userdata file, we do not need to reset it - pass - ### Backup @staticmethod From 2f2c4f14afca41368912d7e15ac87d4a3aca2eba Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Jun 2023 18:30:31 +0000 Subject: [PATCH 187/537] refactor(backups): group operations together --- selfprivacy_api/backup/__init__.py | 152 ++++++++++++++--------------- 1 file changed, 76 insertions(+), 76 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index fff9c66..6878ce5 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -160,6 +160,24 @@ class Backups: user_data["backup"] = DEFAULT_JSON_PROVIDER +### Init + + @staticmethod + def init_repo(): + Backups.provider().backupper.init() + Storage.mark_as_init() + + @staticmethod + def is_initted() -> bool: + if Storage.has_init_mark(): + return True + + initted = Backups.provider().backupper.is_initted() + if initted: + Storage.mark_as_init() + return True + + return False ### Backup @@ -189,24 +207,69 @@ class Backups: Jobs.update(job, status=JobStatus.FINISHED) return snapshot -### Init +### Restoring + + # to be deprecated/internalized in favor of restore_snapshot() + @staticmethod + def restore_service_from_snapshot(service: Service, snapshot_id: str): + folders = service.get_folders() + + Backups.provider().backupper.restore_from_backup( + snapshot_id, + folders, + ) @staticmethod - def init_repo(): - Backups.provider().backupper.init() - Storage.mark_as_init() + def assert_restorable(snapshot: Snapshot): + service = get_service_by_id(snapshot.service_name) + if service is None: + raise ValueError( + f"snapshot has a nonexistent service: {snapshot.service_name}" + ) + + needed_space = Backups.service_snapshot_size(snapshot.id) + available_space = Backups.space_usable_for_service(service) + if needed_space > available_space: + raise ValueError( + f"we only have {available_space} bytes " + f"but snapshot needs {needed_space}" + ) @staticmethod - def is_initted() -> bool: - if Storage.has_init_mark(): - return True + def restore_snapshot(snapshot: Snapshot): + service = get_service_by_id(snapshot.service_name) - initted = Backups.provider().backupper.is_initted() - if initted: - Storage.mark_as_init() - return True + if service is None: + raise ValueError( + f"snapshot has a nonexistent service: {snapshot.service_name}" + ) - return False + job = get_restore_job(service) + if job is None: + job = add_restore_job(snapshot) + + Jobs.update( + job, + status=JobStatus.RUNNING, + ) + try: + Backups.assert_restorable(snapshot) + Backups.restore_service_from_snapshot( + service, + snapshot.id, + ) + service.post_restore() + except Exception as e: + Jobs.update( + job, + status=JobStatus.ERROR, + ) + raise e + + Jobs.update( + job, + status=JobStatus.FINISHED, + ) ### Snapshots @@ -284,71 +347,8 @@ class Backups: if snapshot.id not in [snap.id for snap in snapshots]: Storage.delete_cached_snapshot(snapshot) -### Restoring - - # to be deprecated/internalized in favor of restore_snapshot() - @staticmethod - def restore_service_from_snapshot(service: Service, snapshot_id: str): - folders = service.get_folders() - - Backups.provider().backupper.restore_from_backup( - snapshot_id, - folders, - ) - - @staticmethod - def assert_restorable(snapshot: Snapshot): - service = get_service_by_id(snapshot.service_name) - if service is None: - raise ValueError( - f"snapshot has a nonexistent service: {snapshot.service_name}" - ) - - needed_space = Backups.service_snapshot_size(snapshot.id) - available_space = Backups.space_usable_for_service(service) - if needed_space > available_space: - raise ValueError( - f"we only have {available_space} bytes " - f"but snapshot needs {needed_space}" - ) - - @staticmethod - def restore_snapshot(snapshot: Snapshot): - service = get_service_by_id(snapshot.service_name) - - if service is None: - raise ValueError( - f"snapshot has a nonexistent service: {snapshot.service_name}" - ) - - job = get_restore_job(service) - if job is None: - job = add_restore_job(snapshot) - - Jobs.update( - job, - status=JobStatus.RUNNING, - ) - try: - Backups.assert_restorable(snapshot) - Backups.restore_service_from_snapshot( - service, - snapshot.id, - ) - service.post_restore() - except Exception as e: - Jobs.update( - job, - status=JobStatus.ERROR, - ) - raise e - - Jobs.update( - job, - status=JobStatus.FINISHED, - ) - ### Autobackup + @staticmethod def is_autobackup_enabled(service: Service) -> bool: return Storage.is_autobackup_set(service.get_id()) From b9be0be6a2944bb8934cdcc6ca0d65b996c7b526 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Jun 2023 18:42:26 +0000 Subject: [PATCH 188/537] refactor(backups): privatize assert_restorable and restore_snapshot_from_id --- selfprivacy_api/backup/__init__.py | 54 +++++++++++++++--------------- tests/test_graphql/test_backup.py | 2 +- 2 files changed, 28 insertions(+), 28 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 6878ce5..5fe46ef 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -209,31 +209,6 @@ class Backups: ### Restoring - # to be deprecated/internalized in favor of restore_snapshot() - @staticmethod - def restore_service_from_snapshot(service: Service, snapshot_id: str): - folders = service.get_folders() - - Backups.provider().backupper.restore_from_backup( - snapshot_id, - folders, - ) - - @staticmethod - def assert_restorable(snapshot: Snapshot): - service = get_service_by_id(snapshot.service_name) - if service is None: - raise ValueError( - f"snapshot has a nonexistent service: {snapshot.service_name}" - ) - - needed_space = Backups.service_snapshot_size(snapshot.id) - available_space = Backups.space_usable_for_service(service) - if needed_space > available_space: - raise ValueError( - f"we only have {available_space} bytes " - f"but snapshot needs {needed_space}" - ) @staticmethod def restore_snapshot(snapshot: Snapshot): @@ -253,8 +228,8 @@ class Backups: status=JobStatus.RUNNING, ) try: - Backups.assert_restorable(snapshot) - Backups.restore_service_from_snapshot( + Backups._assert_restorable(snapshot) + Backups._restore_service_from_snapshot( service, snapshot.id, ) @@ -271,6 +246,31 @@ class Backups: status=JobStatus.FINISHED, ) + @staticmethod + def _assert_restorable(snapshot: Snapshot): + service = get_service_by_id(snapshot.service_name) + if service is None: + raise ValueError( + f"snapshot has a nonexistent service: {snapshot.service_name}" + ) + + needed_space = Backups.service_snapshot_size(snapshot.id) + available_space = Backups.space_usable_for_service(service) + if needed_space > available_space: + raise ValueError( + f"we only have {available_space} bytes " + f"but snapshot needs {needed_space}" + ) + + @staticmethod + def _restore_service_from_snapshot(service: Service, snapshot_id: str): + folders = service.get_folders() + + Backups.provider().backupper.restore_from_backup( + snapshot_id, + folders, + ) + ### Snapshots @staticmethod diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 23fa685..c882372 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -222,7 +222,7 @@ def test_restore(backups, dummy_service): remove(p) assert not path.exists(p) - Backups.restore_service_from_snapshot(dummy_service, snap.id) + Backups._restore_service_from_snapshot(dummy_service, snap.id) for p, content in zip(paths_to_nuke, contents): assert path.exists(p) with open(p, "r") as file: From 4ad4c3cc67493cb799d5ffeaa900044dd2d758f9 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Jun 2023 18:50:37 +0000 Subject: [PATCH 189/537] refactor(backups): delete sync_service_snapshots --- selfprivacy_api/backup/__init__.py | 9 --------- 1 file changed, 9 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 5fe46ef..b378ef0 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -338,15 +338,6 @@ class Backups: snapshots = Storage.get_cached_snapshots() return [snap for snap in snapshots if snap.service_name == service_id] - @staticmethod - def sync_service_snapshots(service_id: str, snapshots: List[Snapshot]): - for snapshot in snapshots: - if snapshot.service_name == service_id: - Storage.cache_snapshot(snapshot) - for snapshot in Backups.get_cached_snapshots_service(service_id): - if snapshot.id not in [snap.id for snap in snapshots]: - Storage.delete_cached_snapshot(snapshot) - ### Autobackup @staticmethod From f8029d133a9e9c3f0c72aa9cf6652bd474c22808 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Jun 2023 19:01:26 +0000 Subject: [PATCH 190/537] refactor(backups): straighten get_all_snapshots --- selfprivacy_api/backup/__init__.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index b378ef0..f6d9e0b 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -292,9 +292,8 @@ class Backups: # TODO: the oldest snapshots will get expired faster than the new ones. # How to detect that the end is missing? - upstream_snapshots = Backups.provider().backupper.get_snapshots() Backups.sync_all_snapshots() - return upstream_snapshots + return Storage.get_cached_snapshots() @staticmethod def get_snapshot_by_id(id: str) -> Optional[Snapshot]: From a76834d1baa41155887c08049d6d3871a7c5f240 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Jun 2023 19:16:18 +0000 Subject: [PATCH 191/537] refactor(backups): merge sync_all_snapshots with force_snapshot_reload --- selfprivacy_api/backup/__init__.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index f6d9e0b..7edba6f 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -292,7 +292,7 @@ class Backups: # TODO: the oldest snapshots will get expired faster than the new ones. # How to detect that the end is missing? - Backups.sync_all_snapshots() + Backups.force_snapshot_reload() return Storage.get_cached_snapshots() @staticmethod @@ -302,17 +302,13 @@ class Backups: return snap # Possibly our cache entry got invalidated, let's try one more time - Backups.sync_all_snapshots() + Backups.force_snapshot_reload() snap = Storage.get_cached_snapshot_by_id(id) return snap @staticmethod def force_snapshot_reload(): - Backups.sync_all_snapshots() - - @staticmethod - def sync_all_snapshots(): upstream_snapshots = Backups.provider().backupper.get_snapshots() Storage.invalidate_snapshot_storage() for snapshot in upstream_snapshots: From bae81b2f69393419c2b5edf90d2c26d3fa7d2664 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Jun 2023 19:20:49 +0000 Subject: [PATCH 192/537] refactor(backups): rename force_snapshot_reload to force_snapshot_cache_reload --- selfprivacy_api/backup/__init__.py | 6 +++--- selfprivacy_api/graphql/mutations/backup_mutations.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 7edba6f..b76d483 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -292,7 +292,7 @@ class Backups: # TODO: the oldest snapshots will get expired faster than the new ones. # How to detect that the end is missing? - Backups.force_snapshot_reload() + Backups.force_snapshot_cache_reload() return Storage.get_cached_snapshots() @staticmethod @@ -302,13 +302,13 @@ class Backups: return snap # Possibly our cache entry got invalidated, let's try one more time - Backups.force_snapshot_reload() + Backups.force_snapshot_cache_reload() snap = Storage.get_cached_snapshot_by_id(id) return snap @staticmethod - def force_snapshot_reload(): + def force_snapshot_cache_reload(): upstream_snapshots = Backups.provider().backupper.get_snapshots() Storage.invalidate_snapshot_storage() for snapshot in upstream_snapshots: diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index b7720c5..adc3873 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -157,7 +157,7 @@ class BackupMutations: @strawberry.mutation(permission_classes=[IsAuthenticated]) def force_snapshots_reload(self) -> GenericMutationReturn: """Force snapshots reload""" - Backups.force_snapshot_reload() + Backups.force_snapshot_cache_reload() return GenericMutationReturn( success=True, code=200, From 7147f970779e94d1d521c13d1cb6e30b4cab3389 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Jun 2023 19:29:20 +0000 Subject: [PATCH 193/537] refactor(backups): remove the by-service getting of cached snapshots --- selfprivacy_api/backup/__init__.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index b76d483..dac10f2 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -328,10 +328,6 @@ class Backups: # expiring cache entry Storage.cache_snapshot(snapshot) - @staticmethod - def get_cached_snapshots_service(service_id: str) -> List[Snapshot]: - snapshots = Storage.get_cached_snapshots() - return [snap for snap in snapshots if snap.service_name == service_id] ### Autobackup From ae16a527a236368759532c4db41fb9de810e6df0 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Jun 2023 19:41:18 +0000 Subject: [PATCH 194/537] refactor(backups): rename service_snapshot_size to snapshot_restored_size --- selfprivacy_api/backup/__init__.py | 4 ++-- tests/test_graphql/test_backup.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index dac10f2..f4d7ab2 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -254,7 +254,7 @@ class Backups: f"snapshot has a nonexistent service: {snapshot.service_name}" ) - needed_space = Backups.service_snapshot_size(snapshot.id) + needed_space = Backups.snapshot_restored_size(snapshot.id) available_space = Backups.space_usable_for_service(service) if needed_space > available_space: raise ValueError( @@ -315,7 +315,7 @@ class Backups: Storage.cache_snapshot(snapshot) @staticmethod - def service_snapshot_size(snapshot_id: str) -> int: + def snapshot_restored_size(snapshot_id: str) -> int: return Backups.provider().backupper.restored_size( snapshot_id, ) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index c882372..bf1be69 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -232,7 +232,7 @@ def test_restore(backups, dummy_service): def test_sizing(backups, dummy_service): Backups.back_up(dummy_service) snap = Backups.get_snapshots(dummy_service)[0] - size = Backups.service_snapshot_size(snap.id) + size = Backups.snapshot_restored_size(snap.id) assert size is not None assert size > 0 From d9b26e12e22b52fa491a0d0764f8f649e8b12651 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Wed, 28 Jun 2023 12:10:12 +0300 Subject: [PATCH 195/537] fix(backups): missing space in rclone args --- selfprivacy_api/backup/backuppers/restic_backupper.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index 8ec2cc5..e5d7955 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -35,7 +35,7 @@ class ResticBackupper(AbstractBackupper): return f"rclone:{self.type}{self.repo}" def rclone_args(self): - return "rclone.args=serve restic --stdio" + self.backend_rclone_args() + return "rclone.args=serve restic --stdio " + self.backend_rclone_args() def backend_rclone_args(self) -> str: acc_arg = "" From 59fef1d0165517da34483ed974f1833f7f0c6732 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 28 Jun 2023 09:41:03 +0000 Subject: [PATCH 196/537] fix(backups): register the correct tasks --- selfprivacy_api/task_registry.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/selfprivacy_api/task_registry.py b/selfprivacy_api/task_registry.py index 82eaf06..dfd329c 100644 --- a/selfprivacy_api/task_registry.py +++ b/selfprivacy_api/task_registry.py @@ -1,4 +1,4 @@ from selfprivacy_api.utils.huey import huey from selfprivacy_api.jobs.test import test_job -from selfprivacy_api.restic_controller.tasks import * +from selfprivacy_api.backup.tasks import * from selfprivacy_api.services.generic_service_mover import move_service From 35258bad38e4691938711317e4876c8dbf6edb8c Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 28 Jun 2023 09:48:32 +0000 Subject: [PATCH 197/537] fix(services): cleanup a stray get_location --- selfprivacy_api/services/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/selfprivacy_api/services/__init__.py b/selfprivacy_api/services/__init__.py index a688734..02bb1d3 100644 --- a/selfprivacy_api/services/__init__.py +++ b/selfprivacy_api/services/__init__.py @@ -42,7 +42,7 @@ def get_disabled_services() -> list[Service]: def get_services_by_location(location: str) -> list[Service]: - return [service for service in services if service.get_location() == location] + return [service for service in services if service.get_drive() == location] def get_all_required_dns_records() -> list[ServiceDnsRecord]: From e4865aa09401052414c8a825c7309eb31ee39161 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 28 Jun 2023 11:45:07 +0000 Subject: [PATCH 198/537] fix(services): proper backup progress reporting --- .../backup/backuppers/restic_backupper.py | 10 +++--- tests/test_graphql/test_backup.py | 32 +++++++++++++++++-- 2 files changed, 36 insertions(+), 6 deletions(-) diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index e5d7955..ad163ea 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -127,19 +127,21 @@ class ResticBackupper(AbstractBackupper): return ResticBackupper._snapshot_from_fresh_summary(message, repo_name) raise ValueError("no summary message in restic json output") - def parse_message(self, raw_message, job=None) -> object: - message = ResticBackupper.parse_json_output(raw_message) + def parse_message(self, raw_message_line: str, job=None) -> dict: + message = ResticBackupper.parse_json_output(raw_message_line) + if not isinstance(message, dict): + raise ValueError("we have too many messages on one line?") if message["message_type"] == "status": if job is not None: # only update status if we run under some job Jobs.update( job, JobStatus.RUNNING, - progress=int(message["percent_done"]), + progress=int(message["percent_done"] * 100), ) return message @staticmethod - def _snapshot_from_fresh_summary(message: object, repo_name) -> Snapshot: + def _snapshot_from_fresh_summary(message: dict, repo_name) -> Snapshot: return Snapshot( id=message["snapshot_id"], created_at=datetime.datetime.now(datetime.timezone.utc), diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index bf1be69..0ab2136 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -3,6 +3,7 @@ import os.path as path from os import makedirs from os import remove from os import listdir +from os import urandom from datetime import datetime, timedelta, timezone import selfprivacy_api.services as services @@ -259,9 +260,18 @@ def assert_job_has_run(job_type): assert JobStatus.RUNNING in Jobs.status_updates(job) -def assert_job_had_progress(job_type): +def job_progress_updates(job_type): job = [job for job in finished_jobs() if job.type_id == job_type][0] - assert len(Jobs.progress_updates(job)) > 0 + return Jobs.progress_updates(job) + + +def assert_job_had_progress(job_type): + assert len(job_progress_updates(job_type)) > 0 + + +def make_large_file(path: str, bytes: int): + with open(path, "wb") as file: + file.write(urandom(bytes)) def test_snapshots_by_id(backups, dummy_service): @@ -290,6 +300,24 @@ def test_backup_service_task(backups, dummy_service): assert_job_had_progress(job_type_id) +def test_backup_larger_file(backups, dummy_service): + dir = path.join(dummy_service.get_folders()[0], "LARGEFILE") + mega = 2**20 + make_large_file(dir, 10 * mega) + + handle = start_backup(dummy_service) + handle(blocking=True) + + # results will be slightly different on different machines. if someone has troubles with it on their machine, consider dropping this test. + id = dummy_service.get_id() + job_type_id = f"services.{id}.backup" + assert_job_finished(job_type_id, count=1) + assert_job_has_run(job_type_id) + updates = job_progress_updates(job_type_id) + assert len(updates) > 3 + assert updates[1] > 10 + + def test_restore_snapshot_task(backups, dummy_service): Backups.back_up(dummy_service) snaps = Backups.get_snapshots(dummy_service) From 11184a55e8d2f5baff606562715d6082129e06ae Mon Sep 17 00:00:00 2001 From: Inex Code Date: Wed, 28 Jun 2023 15:56:30 +0300 Subject: [PATCH 199/537] feat(jobs): return type_id of the job in graphql api --- selfprivacy_api/graphql/common_types/jobs.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/selfprivacy_api/graphql/common_types/jobs.py b/selfprivacy_api/graphql/common_types/jobs.py index 3019a70..1a644ec 100644 --- a/selfprivacy_api/graphql/common_types/jobs.py +++ b/selfprivacy_api/graphql/common_types/jobs.py @@ -12,6 +12,7 @@ class ApiJob: """Job type for GraphQL.""" uid: str + type_id: str name: str description: str status: str @@ -28,6 +29,7 @@ def job_to_api_job(job: Job) -> ApiJob: """Convert a Job from jobs controller to a GraphQL ApiJob.""" return ApiJob( uid=str(job.uid), + type_id=job.type_id, name=job.name, description=job.description, status=job.status.name, From 3ee90617ba934d4df225314e3cc15d23cf135a47 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 28 Jun 2023 13:04:57 +0000 Subject: [PATCH 200/537] test(backup): make large testfile larger --- tests/test_graphql/test_backup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 0ab2136..cc5a1eb 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -303,7 +303,7 @@ def test_backup_service_task(backups, dummy_service): def test_backup_larger_file(backups, dummy_service): dir = path.join(dummy_service.get_folders()[0], "LARGEFILE") mega = 2**20 - make_large_file(dir, 10 * mega) + make_large_file(dir, 100 * mega) handle = start_backup(dummy_service) handle(blocking=True) @@ -315,7 +315,7 @@ def test_backup_larger_file(backups, dummy_service): assert_job_has_run(job_type_id) updates = job_progress_updates(job_type_id) assert len(updates) > 3 - assert updates[1] > 10 + assert updates[int((len(updates)-1)/2.0)] > 10 def test_restore_snapshot_task(backups, dummy_service): From cd2e9d3ba399c32b67a4c6814de30f5ecd70a193 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 28 Jun 2023 13:22:53 +0000 Subject: [PATCH 201/537] fix(jobs): make finishing the job set progress to 100 --- selfprivacy_api/jobs/__init__.py | 3 +++ tests/test_jobs.py | 23 +++++++++++++++++++++++ 2 files changed, 26 insertions(+) diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py index 5e86c5f..2551237 100644 --- a/selfprivacy_api/jobs/__init__.py +++ b/selfprivacy_api/jobs/__init__.py @@ -198,7 +198,10 @@ class Jobs: job.description = description if status_text is not None: job.status_text = status_text + if status == JobStatus.FINISHED: + job.progress = 100 if progress is not None: + # explicitly provided progress has priority job.progress = progress Jobs.log_progress_update(job, progress) job.status = status diff --git a/tests/test_jobs.py b/tests/test_jobs.py index 56e4aa3..0a4271e 100644 --- a/tests/test_jobs.py +++ b/tests/test_jobs.py @@ -80,6 +80,29 @@ def test_jobs(jobs_with_one_job): jobsmodule.JOB_EXPIRATION_SECONDS = backup +def test_finishing_equals_100(jobs_with_one_job): + jobs = jobs_with_one_job + test_job = jobs.get_jobs()[0] + assert not jobs.is_busy() + assert test_job.progress != 100 + + jobs.update(job=test_job, status=JobStatus.FINISHED) + + assert test_job.progress == 100 + + +def test_finishing_equals_100_unless_stated_otherwise(jobs_with_one_job): + jobs = jobs_with_one_job + test_job = jobs.get_jobs()[0] + assert not jobs.is_busy() + assert test_job.progress != 100 + assert test_job.progress != 23 + + jobs.update(job=test_job, status=JobStatus.FINISHED, progress=23) + + assert test_job.progress == 23 + + @pytest.fixture def jobs(): j = Jobs() From b480c840416a79540a58ea76dbd9e50e4af23e0d Mon Sep 17 00:00:00 2001 From: Inex Code Date: Thu, 29 Jun 2023 13:44:29 +0300 Subject: [PATCH 202/537] style: fix styling --- selfprivacy_api/backup/__init__.py | 20 +++++++------------- tests/test_graphql/test_backup.py | 2 +- 2 files changed, 8 insertions(+), 14 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index f4d7ab2..7a60ecb 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -37,7 +37,7 @@ DEFAULT_JSON_PROVIDER = { class Backups: """A stateless controller class for backups""" -### Providers + ### Providers @staticmethod def provider(): @@ -70,7 +70,6 @@ class Backups: # if there is no userdata file, we do not need to reset it pass - @staticmethod def _lookup_provider() -> AbstractBackupProvider: redis_provider = Backups._load_provider_redis() @@ -160,7 +159,7 @@ class Backups: user_data["backup"] = DEFAULT_JSON_PROVIDER -### Init + ### Init @staticmethod def init_repo(): @@ -179,7 +178,7 @@ class Backups: return False -### Backup + ### Backup @staticmethod def back_up(service: Service): @@ -207,8 +206,7 @@ class Backups: Jobs.update(job, status=JobStatus.FINISHED) return snapshot -### Restoring - + ### Restoring @staticmethod def restore_snapshot(snapshot: Snapshot): @@ -271,7 +269,7 @@ class Backups: folders, ) -### Snapshots + ### Snapshots @staticmethod def get_snapshots(service: Service) -> List[Snapshot]: @@ -328,9 +326,8 @@ class Backups: # expiring cache entry Storage.cache_snapshot(snapshot) + ### Autobackup -### Autobackup - @staticmethod def is_autobackup_enabled(service: Service) -> bool: return Storage.is_autobackup_set(service.get_id()) @@ -424,7 +421,7 @@ class Backups: ) ] -### Helpers + ### Helpers @staticmethod def space_usable_for_service(service: Service) -> int: @@ -446,6 +443,3 @@ class Backups: repo_id="", ) Storage.store_provider(provider) - - - diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index cc5a1eb..a212ade 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -315,7 +315,7 @@ def test_backup_larger_file(backups, dummy_service): assert_job_has_run(job_type_id) updates = job_progress_updates(job_type_id) assert len(updates) > 3 - assert updates[int((len(updates)-1)/2.0)] > 10 + assert updates[int((len(updates) - 1) / 2.0)] > 10 def test_restore_snapshot_task(backups, dummy_service): From 15ce344bc88f13fac7b9ba4a0bd38400377d60f7 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Thu, 29 Jun 2023 13:45:00 +0300 Subject: [PATCH 203/537] feat(backups): expose if the service can be backed up --- selfprivacy_api/graphql/common_types/service.py | 2 ++ selfprivacy_api/services/ocserv/__init__.py | 4 ++++ selfprivacy_api/services/service.py | 4 ++++ 3 files changed, 10 insertions(+) diff --git a/selfprivacy_api/graphql/common_types/service.py b/selfprivacy_api/graphql/common_types/service.py index b3403e9..fd671d4 100644 --- a/selfprivacy_api/graphql/common_types/service.py +++ b/selfprivacy_api/graphql/common_types/service.py @@ -93,6 +93,7 @@ class Service: is_movable: bool is_required: bool is_enabled: bool + can_be_backed_up: bool status: ServiceStatusEnum url: typing.Optional[str] dns_records: typing.Optional[typing.List[DnsRecord]] @@ -124,6 +125,7 @@ def service_to_graphql_service(service: ServiceInterface) -> Service: is_movable=service.is_movable(), is_required=service.is_required(), is_enabled=service.is_enabled(), + can_be_backed_up=service.can_be_backed_up(), status=ServiceStatusEnum(service.get_status().value), url=service.get_url(), dns_records=[ diff --git a/selfprivacy_api/services/ocserv/__init__.py b/selfprivacy_api/services/ocserv/__init__.py index a15cb84..4f46692 100644 --- a/selfprivacy_api/services/ocserv/__init__.py +++ b/selfprivacy_api/services/ocserv/__init__.py @@ -44,6 +44,10 @@ class Ocserv(Service): def is_required() -> bool: return False + @staticmethod + def can_be_backed_up() -> bool: + return False + @staticmethod def is_enabled() -> bool: with ReadUserData() as user_data: diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index f804773..65337b4 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -81,6 +81,10 @@ class Service(ABC): def is_required() -> bool: pass + @staticmethod + def can_be_backed_up() -> bool: + return True + @staticmethod @abstractmethod def is_enabled() -> bool: From 64e7afe53e014ed010fb3a4a36e02a18323cfd9f Mon Sep 17 00:00:00 2001 From: Inex Code Date: Thu, 29 Jun 2023 14:27:08 +0300 Subject: [PATCH 204/537] feat(backups): Add backup descriptions for UI --- .../graphql/common_types/service.py | 2 + .../services/bitwarden/__init__.py | 4 ++ selfprivacy_api/services/gitea/__init__.py | 4 ++ selfprivacy_api/services/jitsi/__init__.py | 4 ++ .../services/mailserver/__init__.py | 4 ++ .../services/nextcloud/__init__.py | 4 ++ selfprivacy_api/services/ocserv/__init__.py | 4 ++ selfprivacy_api/services/pleroma/__init__.py | 4 ++ selfprivacy_api/services/service.py | 42 +++++++++++++++++++ .../services/test_service/__init__.py | 4 ++ 10 files changed, 76 insertions(+) diff --git a/selfprivacy_api/graphql/common_types/service.py b/selfprivacy_api/graphql/common_types/service.py index fd671d4..836a3df 100644 --- a/selfprivacy_api/graphql/common_types/service.py +++ b/selfprivacy_api/graphql/common_types/service.py @@ -94,6 +94,7 @@ class Service: is_required: bool is_enabled: bool can_be_backed_up: bool + backup_description: str status: ServiceStatusEnum url: typing.Optional[str] dns_records: typing.Optional[typing.List[DnsRecord]] @@ -126,6 +127,7 @@ def service_to_graphql_service(service: ServiceInterface) -> Service: is_required=service.is_required(), is_enabled=service.is_enabled(), can_be_backed_up=service.can_be_backed_up(), + backup_description=service.get_backup_description(), status=ServiceStatusEnum(service.get_status().value), url=service.get_url(), dns_records=[ diff --git a/selfprivacy_api/services/bitwarden/__init__.py b/selfprivacy_api/services/bitwarden/__init__.py index 6842af6..98455d8 100644 --- a/selfprivacy_api/services/bitwarden/__init__.py +++ b/selfprivacy_api/services/bitwarden/__init__.py @@ -55,6 +55,10 @@ class Bitwarden(Service): def is_required() -> bool: return False + @staticmethod + def get_backup_description() -> str: + return "Password database, encryption certificate and attachments." + @staticmethod def is_enabled() -> bool: with ReadUserData() as user_data: diff --git a/selfprivacy_api/services/gitea/__init__.py b/selfprivacy_api/services/gitea/__init__.py index f9ff3d2..ce73dc6 100644 --- a/selfprivacy_api/services/gitea/__init__.py +++ b/selfprivacy_api/services/gitea/__init__.py @@ -51,6 +51,10 @@ class Gitea(Service): def is_required() -> bool: return False + @staticmethod + def get_backup_description() -> str: + return "Git repositories, database and user data." + @staticmethod def is_enabled() -> bool: with ReadUserData() as user_data: diff --git a/selfprivacy_api/services/jitsi/__init__.py b/selfprivacy_api/services/jitsi/__init__.py index a969eb2..2b54ae1 100644 --- a/selfprivacy_api/services/jitsi/__init__.py +++ b/selfprivacy_api/services/jitsi/__init__.py @@ -54,6 +54,10 @@ class Jitsi(Service): def is_required() -> bool: return False + @staticmethod + def get_backup_description() -> str: + return "Secrets that are used to encrypt the communication." + @staticmethod def is_enabled() -> bool: with ReadUserData() as user_data: diff --git a/selfprivacy_api/services/mailserver/__init__.py b/selfprivacy_api/services/mailserver/__init__.py index b0a6e30..d3600e5 100644 --- a/selfprivacy_api/services/mailserver/__init__.py +++ b/selfprivacy_api/services/mailserver/__init__.py @@ -54,6 +54,10 @@ class MailServer(Service): def is_required() -> bool: return True + @staticmethod + def get_backup_description() -> str: + return "Mail boxes and filters." + @staticmethod def is_enabled() -> bool: return True diff --git a/selfprivacy_api/services/nextcloud/__init__.py b/selfprivacy_api/services/nextcloud/__init__.py index ae81403..632c5d3 100644 --- a/selfprivacy_api/services/nextcloud/__init__.py +++ b/selfprivacy_api/services/nextcloud/__init__.py @@ -49,6 +49,10 @@ class Nextcloud(Service): def is_required() -> bool: return False + @staticmethod + def get_backup_description() -> str: + return "All the files and other data stored in Nextcloud." + @staticmethod def is_enabled() -> bool: with ReadUserData() as user_data: diff --git a/selfprivacy_api/services/ocserv/__init__.py b/selfprivacy_api/services/ocserv/__init__.py index 4f46692..3860b19 100644 --- a/selfprivacy_api/services/ocserv/__init__.py +++ b/selfprivacy_api/services/ocserv/__init__.py @@ -48,6 +48,10 @@ class Ocserv(Service): def can_be_backed_up() -> bool: return False + @staticmethod + def get_backup_description() -> str: + return "Nothing to backup." + @staticmethod def is_enabled() -> bool: with ReadUserData() as user_data: diff --git a/selfprivacy_api/services/pleroma/__init__.py b/selfprivacy_api/services/pleroma/__init__.py index 0d5b338..bac1cda 100644 --- a/selfprivacy_api/services/pleroma/__init__.py +++ b/selfprivacy_api/services/pleroma/__init__.py @@ -46,6 +46,10 @@ class Pleroma(Service): def is_required() -> bool: return False + @staticmethod + def get_backup_description() -> str: + return "Your Pleroma accounts, posts and media." + @staticmethod def is_enabled() -> bool: with ReadUserData() as user_data: diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index 65337b4..286fab7 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -41,83 +41,125 @@ class Service(ABC): @staticmethod @abstractmethod def get_id() -> str: + """ + The unique id of the service. + """ pass @staticmethod @abstractmethod def get_display_name() -> str: + """ + The name of the service that is shown to the user. + """ pass @staticmethod @abstractmethod def get_description() -> str: + """ + The description of the service that is shown to the user. + """ pass @staticmethod @abstractmethod def get_svg_icon() -> str: + """ + The monochrome svg icon of the service. + """ pass @staticmethod @abstractmethod def get_url() -> typing.Optional[str]: + """ + The url of the service if it is accessible from the internet browser. + """ pass @classmethod def get_user(cls) -> typing.Optional[str]: + """ + The user that owns the service's files. + Defaults to the service's id. + """ return cls.get_id() @classmethod def get_group(cls) -> typing.Optional[str]: + """ + The group that owns the service's files. + Defaults to the service's user. + """ return cls.get_user() @staticmethod @abstractmethod def is_movable() -> bool: + """`True` if the service can be moved to the non-system volume.""" pass @staticmethod @abstractmethod def is_required() -> bool: + """`True` if the service is required for the server to function.""" pass @staticmethod def can_be_backed_up() -> bool: + """`True` if the service can be backed up.""" return True + @staticmethod + @abstractmethod + def get_backup_description() -> str: + """ + The text shown to the user that exlplains what data will be + backed up. + """ + pass + @staticmethod @abstractmethod def is_enabled() -> bool: + """`True` if the service is enabled.""" pass @staticmethod @abstractmethod def get_status() -> ServiceStatus: + """The status of the service, reported by systemd.""" pass @staticmethod @abstractmethod def enable(): + """Enable the service. Usually this means enabling systemd unit.""" pass @staticmethod @abstractmethod def disable(): + """Disable the service. Usually this means disabling systemd unit.""" pass @staticmethod @abstractmethod def stop(): + """Stop the service. Usually this means stopping systemd unit.""" pass @staticmethod @abstractmethod def start(): + """Start the service. Usually this means starting systemd unit.""" pass @staticmethod @abstractmethod def restart(): + """Restart the service. Usually this means restarting systemd unit.""" pass @staticmethod diff --git a/selfprivacy_api/services/test_service/__init__.py b/selfprivacy_api/services/test_service/__init__.py index b1c2924..af527a0 100644 --- a/selfprivacy_api/services/test_service/__init__.py +++ b/selfprivacy_api/services/test_service/__init__.py @@ -53,6 +53,10 @@ class DummyService(Service): def is_required() -> bool: return False + @staticmethod + def get_backup_description() -> str: + return "How did we get here?" + @staticmethod def is_enabled() -> bool: return True From 9793201ca178cb91f7ae69a58e50dc90d116a645 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 3 Jul 2023 12:15:36 +0000 Subject: [PATCH 205/537] feature(backups): a wrapper for rclone sync --- .../backup/backuppers/restic_backupper.py | 15 ++++++++++ tests/test_graphql/test_backup.py | 28 ++++++++++++++++++- 2 files changed, 42 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index ad163ea..826b336 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -5,6 +5,7 @@ import datetime from typing import List from collections.abc import Iterable from json.decoder import JSONDecodeError +from os.path import exists from selfprivacy_api.backup.backuppers import AbstractBackupper from selfprivacy_api.models.backup.snapshot import Snapshot @@ -95,6 +96,20 @@ class ResticBackupper(AbstractBackupper): if "NOTICE:" not in line: yield line + + @staticmethod + def sync (src_path: str, dest_path:str): + """a wrapper around rclone sync""" + + if not exists(src_path): + raise ValueError("source dir for rclone sync must exist") + + rclone_command = ["rclone", "sync", "-P", src_path, dest_path] + for raw_message in ResticBackupper.output_yielder(rclone_command): + if "ERROR" in raw_message: + raise ValueError(raw_message) + + def start_backup(self, folders: List[str], tag: str): """ Start backup with restic diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index a212ade..86310c7 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -7,6 +7,8 @@ from os import urandom from datetime import datetime, timedelta, timezone import selfprivacy_api.services as services +from selfprivacy_api.services import Service + from selfprivacy_api.services import get_service_by_id from selfprivacy_api.services.test_service import DummyService from selfprivacy_api.graphql.queries.providers import BackupProvider @@ -17,6 +19,8 @@ import selfprivacy_api.backup.providers as providers from selfprivacy_api.backup.providers import AbstractBackupProvider from selfprivacy_api.backup.providers.backblaze import Backblaze +from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackupper + from selfprivacy_api.backup.tasks import start_backup, restore_snapshot from selfprivacy_api.backup.storage import Storage from selfprivacy_api.backup.jobs import get_backup_job @@ -68,7 +72,7 @@ def raw_dummy_service(tmpdir, backups): @pytest.fixture() -def dummy_service(tmpdir, backups, raw_dummy_service): +def dummy_service(tmpdir, backups, raw_dummy_service) -> Service: service = raw_dummy_service repo_path = path.join(tmpdir, "test_repo") assert not path.exists(repo_path) @@ -519,3 +523,25 @@ def test_services_to_back_up(backups, dummy_service): services = Backups.services_to_back_up(now) assert len(services) == 1 assert services[0].get_id() == dummy_service.get_id() + + +def test_sync(dummy_service): + src = dummy_service.get_folders()[0] + dst = dummy_service.get_folders()[1] + old_files_src = listdir(src) + old_files_dst = listdir(dst) + assert old_files_src != old_files_dst + + ResticBackupper.sync(src, dst) + new_files_src = listdir(src) + new_files_dst = listdir(dst) + assert new_files_src == old_files_src + assert new_files_dst == new_files_src + + +def test_sync_nonexistent_src(dummy_service): + src = "/var/lib/nonexistentFluffyBunniesOfUnix" + dst = dummy_service.get_folders()[1] + + with pytest.raises(ValueError): + ResticBackupper.sync(src, dst) From 08cc7740b3981e7468e9258517756f9a425cc02c Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 3 Jul 2023 12:54:43 +0000 Subject: [PATCH 206/537] test(backups): actually list folders --- tests/test_graphql/test_backup.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 86310c7..0a150a6 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -200,13 +200,18 @@ def test_backup_returns_snapshot(backups, dummy_service): assert snapshot.created_at is not None +def folder_files(folder): + return [ + path.join(folder, filename) + for filename in listdir(folder) + if filename is not None + ] + + def service_files(service): result = [] for service_folder in service.get_folders(): - service_filename = listdir(service_folder)[0] - assert service_filename is not None - service_file = path.join(service_folder, service_filename) - result.append(service_file) + result.extend(folder_files(service_folder)) return result From 6a00d3cff987bcd3257afff7c15996ad88a50ab1 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 3 Jul 2023 13:06:54 +0000 Subject: [PATCH 207/537] refactor(backups): move output yielding into backup utils --- .../backup/backuppers/restic_backupper.py | 22 ++++--------------- selfprivacy_api/backup/util.py | 14 ++++++++++++ 2 files changed, 18 insertions(+), 18 deletions(-) create mode 100644 selfprivacy_api/backup/util.py diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index 826b336..bbaf0c0 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -7,6 +7,7 @@ from collections.abc import Iterable from json.decoder import JSONDecodeError from os.path import exists +from selfprivacy_api.backup.util import output_yielder from selfprivacy_api.backup.backuppers import AbstractBackupper from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.backup.jobs import get_backup_job @@ -84,32 +85,17 @@ class ResticBackupper(AbstractBackupper): return result @staticmethod - def output_yielder(command): - with subprocess.Popen( - command, - shell=False, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - universal_newlines=True, - ) as handle: - for line in iter(handle.stdout.readline, ""): - if "NOTICE:" not in line: - yield line - - - @staticmethod - def sync (src_path: str, dest_path:str): + def sync(src_path: str, dest_path: str): """a wrapper around rclone sync""" if not exists(src_path): raise ValueError("source dir for rclone sync must exist") rclone_command = ["rclone", "sync", "-P", src_path, dest_path] - for raw_message in ResticBackupper.output_yielder(rclone_command): + for raw_message in output_yielder(rclone_command): if "ERROR" in raw_message: raise ValueError(raw_message) - def start_backup(self, folders: List[str], tag: str): """ Start backup with restic @@ -128,7 +114,7 @@ class ResticBackupper(AbstractBackupper): messages = [] job = get_backup_job(get_service_by_id(tag)) try: - for raw_message in ResticBackupper.output_yielder(backup_command): + for raw_message in output_yielder(backup_command): message = self.parse_message(raw_message, job) messages.append(message) return ResticBackupper._snapshot_from_backup_messages(messages, tag) diff --git a/selfprivacy_api/backup/util.py b/selfprivacy_api/backup/util.py new file mode 100644 index 0000000..8af74e0 --- /dev/null +++ b/selfprivacy_api/backup/util.py @@ -0,0 +1,14 @@ +import subprocess + + +def output_yielder(command): + with subprocess.Popen( + command, + shell=False, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + universal_newlines=True, + ) as handle: + for line in iter(handle.stdout.readline, ""): + if "NOTICE:" not in line: + yield line From 953860a02ce0c727982efd928b8ea08debe0cab7 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 3 Jul 2023 13:28:23 +0000 Subject: [PATCH 208/537] refactor(backups): move syncing (non-restic) into backup utils --- .../backup/backuppers/restic_backupper.py | 12 ------------ selfprivacy_api/backup/util.py | 13 +++++++++++++ tests/test_graphql/test_backup.py | 6 +++--- 3 files changed, 16 insertions(+), 15 deletions(-) diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index bbaf0c0..a94c993 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -84,18 +84,6 @@ class ResticBackupper(AbstractBackupper): result.append(item) return result - @staticmethod - def sync(src_path: str, dest_path: str): - """a wrapper around rclone sync""" - - if not exists(src_path): - raise ValueError("source dir for rclone sync must exist") - - rclone_command = ["rclone", "sync", "-P", src_path, dest_path] - for raw_message in output_yielder(rclone_command): - if "ERROR" in raw_message: - raise ValueError(raw_message) - def start_backup(self, folders: List[str], tag: str): """ Start backup with restic diff --git a/selfprivacy_api/backup/util.py b/selfprivacy_api/backup/util.py index 8af74e0..bda421e 100644 --- a/selfprivacy_api/backup/util.py +++ b/selfprivacy_api/backup/util.py @@ -1,4 +1,5 @@ import subprocess +from os.path import exists def output_yielder(command): @@ -12,3 +13,15 @@ def output_yielder(command): for line in iter(handle.stdout.readline, ""): if "NOTICE:" not in line: yield line + + +def sync(src_path: str, dest_path: str): + """a wrapper around rclone sync""" + + if not exists(src_path): + raise ValueError("source dir for rclone sync must exist") + + rclone_command = ["rclone", "sync", "-P", src_path, dest_path] + for raw_message in output_yielder(rclone_command): + if "ERROR" in raw_message: + raise ValueError(raw_message) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 0a150a6..e269cf1 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -18,8 +18,8 @@ from selfprivacy_api.backup import Backups import selfprivacy_api.backup.providers as providers from selfprivacy_api.backup.providers import AbstractBackupProvider from selfprivacy_api.backup.providers.backblaze import Backblaze +from selfprivacy_api.backup.util import sync -from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackupper from selfprivacy_api.backup.tasks import start_backup, restore_snapshot from selfprivacy_api.backup.storage import Storage @@ -537,7 +537,7 @@ def test_sync(dummy_service): old_files_dst = listdir(dst) assert old_files_src != old_files_dst - ResticBackupper.sync(src, dst) + sync(src, dst) new_files_src = listdir(src) new_files_dst = listdir(dst) assert new_files_src == old_files_src @@ -549,4 +549,4 @@ def test_sync_nonexistent_src(dummy_service): dst = dummy_service.get_folders()[1] with pytest.raises(ValueError): - ResticBackupper.sync(src, dst) + sync(src, dst) From c5088e0e2cce3af34b10a1a6ce1e8c4eda0eb368 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 3 Jul 2023 13:29:31 +0000 Subject: [PATCH 209/537] test(backups): remove the 100mb file after test --- tests/test_graphql/test_backup.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index e269cf1..337ef86 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -325,6 +325,8 @@ def test_backup_larger_file(backups, dummy_service): updates = job_progress_updates(job_type_id) assert len(updates) > 3 assert updates[int((len(updates) - 1) / 2.0)] > 10 + #clean up a bit + remove(dir) def test_restore_snapshot_task(backups, dummy_service): From b6eb27dc5e36a1a06fdf09dea4be08c619713fb3 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 3 Jul 2023 15:28:12 +0000 Subject: [PATCH 210/537] feature(backups): mounting a repo --- .../backup/backuppers/restic_backupper.py | 26 ++++++++++++++++++- tests/test_graphql/test_backup.py | 23 +++++++++++++++- 2 files changed, 47 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index a94c993..d9f278c 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -6,6 +6,8 @@ from typing import List from collections.abc import Iterable from json.decoder import JSONDecodeError from os.path import exists +from os import listdir +from time import sleep from selfprivacy_api.backup.util import output_yielder from selfprivacy_api.backup.backuppers import AbstractBackupper @@ -52,7 +54,7 @@ class ResticBackupper(AbstractBackupper): def _password_command(self): return f"echo {LocalBackupSecret.get()}" - def restic_command(self, *args, tag: str = ""): + def restic_command(self, *args, tag: str = "") -> List[str]: command = [ "restic", "-o", @@ -73,6 +75,28 @@ class ResticBackupper(AbstractBackupper): command.extend(ResticBackupper.__flatten_list(args)) return command + def mount_repo(self, dir): + mount_command = self.restic_command("mount", dir) + mount_command.insert(0, "nohup") + handle = subprocess.Popen(mount_command, stdout=subprocess.DEVNULL, shell=False) + sleep(2) + if not "ids" in listdir(dir): + raise IOError("failed to mount dir ", dir) + return handle + + def unmount_repo(self, dir): + mount_command = ["umount", "-l", dir] + with subprocess.Popen( + mount_command, stdout=subprocess.PIPE, shell=False + ) as handle: + output = handle.communicate()[0].decode("utf-8") + # TODO: check for exit code? + if "error" in output.lower(): + return IOError("failed to unmount dir ", dir, ": ", output) + + if not listdir(dir) == []: + return IOError("failed to unmount dir ", dir) + @staticmethod def __flatten_list(list): """string-aware list flattener""" diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 337ef86..8fe3c99 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -5,6 +5,7 @@ from os import remove from os import listdir from os import urandom from datetime import datetime, timedelta, timezone +from subprocess import Popen import selfprivacy_api.services as services from selfprivacy_api.services import Service @@ -19,6 +20,7 @@ import selfprivacy_api.backup.providers as providers from selfprivacy_api.backup.providers import AbstractBackupProvider from selfprivacy_api.backup.providers.backblaze import Backblaze from selfprivacy_api.backup.util import sync +from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackupper from selfprivacy_api.backup.tasks import start_backup, restore_snapshot @@ -325,7 +327,7 @@ def test_backup_larger_file(backups, dummy_service): updates = job_progress_updates(job_type_id) assert len(updates) > 3 assert updates[int((len(updates) - 1) / 2.0)] > 10 - #clean up a bit + # clean up a bit remove(dir) @@ -552,3 +554,22 @@ def test_sync_nonexistent_src(dummy_service): with pytest.raises(ValueError): sync(src, dst) + + +# Restic lowlevel +def test_mount_umount(backups, dummy_service, tmpdir): + Backups.back_up(dummy_service) + backupper = Backups.provider().backupper + assert isinstance(backupper, ResticBackupper) + + mountpoint = tmpdir / "mount" + makedirs(mountpoint) + assert path.exists(mountpoint) + assert len(listdir(mountpoint)) == 0 + + handle = backupper.mount_repo(mountpoint) + assert len(listdir(mountpoint)) != 0 + + backupper.unmount_repo(mountpoint) + # handle.terminate() + assert len(listdir(mountpoint)) == 0 From 3dc6fb91f2b9ca2d13e9448a94a6f7669fde2dbc Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 3 Jul 2023 20:41:52 +0000 Subject: [PATCH 211/537] refactor(backups): download a copy before replacing original --- .../backup/backuppers/restic_backupper.py | 30 ++++++++++++++----- tests/test_graphql/test_backup.py | 5 +++- 2 files changed, 27 insertions(+), 8 deletions(-) diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index d9f278c..14a8be8 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -1,15 +1,16 @@ import subprocess import json import datetime +import tempfile from typing import List from collections.abc import Iterable from json.decoder import JSONDecodeError -from os.path import exists +from os.path import exists, join from os import listdir from time import sleep -from selfprivacy_api.backup.util import output_yielder +from selfprivacy_api.backup.util import output_yielder, sync from selfprivacy_api.backup.backuppers import AbstractBackupper from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.backup.jobs import get_backup_job @@ -210,19 +211,34 @@ class ResticBackupper(AbstractBackupper): except ValueError as e: raise ValueError("cannot restore a snapshot: " + output) from e - def restore_from_backup(self, snapshot_id, folders): + def restore_from_backup(self, snapshot_id, folders: List[str], verify=True): """ Restore from backup with restic """ - # snapshots save the path of the folder in the file system - # I do not alter the signature yet because maybe this can be - # changed with flags + if folders is None or folders == []: + raise ValueError("cannot restore without knowing where to!") + + with tempfile.TemporaryDirectory() as dir: + self.do_restore(snapshot_id, target=dir) + for folder in folders: + src = join(dir, folder.strip("/")) + if not exists(src): + raise ValueError( + f"there is no such path: {src}. We tried to find {folder}" + ) + dst = folder + sync(src, dst) + + def do_restore(self, snapshot_id, target="/", verify=False): + """barebones restic restore""" restore_command = self.restic_command( "restore", snapshot_id, "--target", - "/", + target, ) + if verify: + restore_command.append("--verify") with subprocess.Popen( restore_command, stdout=subprocess.PIPE, shell=False diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 8fe3c99..872b6ad 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -86,7 +86,10 @@ def dummy_service(tmpdir, backups, raw_dummy_service) -> Service: services.services.append(service) assert get_service_by_id(service.get_id()) is not None - return service + yield service + + # cleanup because apparently it matters wrt tasks + services.services.remove(service) @pytest.fixture() From 03313b739a4c9849a909ac1b4a8e58102429d1be Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 3 Jul 2023 20:53:21 +0000 Subject: [PATCH 212/537] feature(backups): check restore exit code --- selfprivacy_api/backup/backuppers/restic_backupper.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index 14a8be8..ae86efc 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -219,7 +219,7 @@ class ResticBackupper(AbstractBackupper): raise ValueError("cannot restore without knowing where to!") with tempfile.TemporaryDirectory() as dir: - self.do_restore(snapshot_id, target=dir) + self.do_restore(snapshot_id, target=dir, verify=verify) for folder in folders: src = join(dir, folder.strip("/")) if not exists(src): @@ -249,6 +249,14 @@ class ResticBackupper(AbstractBackupper): if "restoring" not in output: raise ValueError("cannot restore a snapshot: " + output) + assert ( + handle.returncode is not None + ) # none should be impossible after communicate + if handle.returncode != 0: + raise ValueError( + "restore exited with errorcode", returncode, ":", output + ) + def _load_snapshots(self) -> object: """ Load list of snapshots from repository From 53bb5cc4e20080e16146e5ebcd49b67a653a28cd Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 5 Jul 2023 13:13:30 +0000 Subject: [PATCH 213/537] feature(backups): forgetting snapshots --- selfprivacy_api/backup/__init__.py | 5 ++++ selfprivacy_api/backup/backuppers/__init__.py | 4 +++ .../backup/backuppers/none_backupper.py | 3 +++ .../backup/backuppers/restic_backupper.py | 26 +++++++++++++++++++ tests/test_graphql/test_backup.py | 26 +++++++++++++++++++ 5 files changed, 64 insertions(+) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 7a60ecb..216cf65 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -305,6 +305,11 @@ class Backups: return snap + @staticmethod + def forget_snapshot(snapshot: Snapshot): + Backups.provider().backupper.forget_snapshot(snapshot.id) + Storage.delete_cached_snapshot(snapshot) + @staticmethod def force_snapshot_cache_reload(): upstream_snapshots = Backups.provider().backupper.get_snapshots() diff --git a/selfprivacy_api/backup/backuppers/__init__.py b/selfprivacy_api/backup/backuppers/__init__.py index 16cde07..335cdfd 100644 --- a/selfprivacy_api/backup/backuppers/__init__.py +++ b/selfprivacy_api/backup/backuppers/__init__.py @@ -37,3 +37,7 @@ class AbstractBackupper(ABC): @abstractmethod def restored_size(self, snapshot_id: str) -> int: raise NotImplementedError + + @abstractmethod + def forget_snapshot(self, snapshot_id): + raise NotImplementedError diff --git a/selfprivacy_api/backup/backuppers/none_backupper.py b/selfprivacy_api/backup/backuppers/none_backupper.py index 014f755..2ac2035 100644 --- a/selfprivacy_api/backup/backuppers/none_backupper.py +++ b/selfprivacy_api/backup/backuppers/none_backupper.py @@ -27,3 +27,6 @@ class NoneBackupper(AbstractBackupper): def restored_size(self, snapshot_id: str) -> int: raise NotImplementedError + + def forget_snapshot(self, snapshot_id): + raise NotImplementedError diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index ae86efc..7f16a91 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -257,6 +257,32 @@ class ResticBackupper(AbstractBackupper): "restore exited with errorcode", returncode, ":", output ) + def forget_snapshot(self, snapshot_id): + """either removes snapshot or marks it for deletion later depending on server settings""" + forget_command = self.restic_command( + "forget", + snapshot_id, + ) + + with subprocess.Popen( + forget_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False + ) as handle: + # for some reason restore does not support nice reporting of progress via json + output, err = [string.decode("utf-8") for string in handle.communicate()] + + if "no matching ID found" in err: + raise ValueError( + "trying to delete, but no such snapshot: ", snapshot_id + ) + + assert ( + handle.returncode is not None + ) # none should be impossible after communicate + if handle.returncode != 0: + raise ValueError( + "forget exited with errorcode", returncode, ":", output + ) + def _load_snapshots(self) -> object: """ Load list of snapshots from repository diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 872b6ad..928c1b7 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -15,6 +15,8 @@ from selfprivacy_api.services.test_service import DummyService from selfprivacy_api.graphql.queries.providers import BackupProvider from selfprivacy_api.jobs import Jobs, JobStatus +from selfprivacy_api.models.backup.snapshot import Snapshot + from selfprivacy_api.backup import Backups import selfprivacy_api.backup.providers as providers from selfprivacy_api.backup.providers import AbstractBackupProvider @@ -314,6 +316,30 @@ def test_backup_service_task(backups, dummy_service): assert_job_had_progress(job_type_id) +def test_forget_snapshot(backups, dummy_service): + snap1 = Backups.back_up(dummy_service) + snap2 = Backups.back_up(dummy_service) + assert len(Backups.get_snapshots(dummy_service)) == 2 + + Backups.forget_snapshot(snap2) + assert len(Backups.get_snapshots(dummy_service)) == 1 + Backups.force_snapshot_cache_reload() + assert len(Backups.get_snapshots(dummy_service)) == 1 + + assert Backups.get_snapshots(dummy_service)[0].id == snap1.id + + Backups.forget_snapshot(snap1) + assert len(Backups.get_snapshots(dummy_service)) == 0 + + +def test_forget_nonexistent_snapshot(backups, dummy_service): + bogus = Snapshot( + id="gibberjibber", service_name="nohoho", created_at=datetime.now(timezone.utc) + ) + with pytest.raises(ValueError): + Backups.forget_snapshot(bogus) + + def test_backup_larger_file(backups, dummy_service): dir = path.join(dummy_service.get_folders()[0], "LARGEFILE") mega = 2**20 From c74b3df32c75a338d43a999c4659b96ffb724778 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 7 Jul 2023 10:50:59 +0000 Subject: [PATCH 214/537] feature(backups): restore strategies enum --- selfprivacy_api/backup/__init__.py | 39 ++++++++----------- .../graphql/common_types/backup.py | 10 +++++ 2 files changed, 27 insertions(+), 22 deletions(-) create mode 100644 selfprivacy_api/graphql/common_types/backup.py diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 216cf65..c3deee0 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -8,11 +8,12 @@ from selfprivacy_api.utils import ReadUserData, WriteUserData from selfprivacy_api.services import get_service_by_id from selfprivacy_api.services.service import Service -from selfprivacy_api.jobs import Jobs, JobStatus +from selfprivacy_api.jobs import Jobs, JobStatus, Job from selfprivacy_api.graphql.queries.providers import ( BackupProvider as BackupProviderEnum, ) +from selfprivacy_api.graphql.common_types.backup import RestoreStrategy from selfprivacy_api.models.backup.snapshot import Snapshot @@ -207,42 +208,36 @@ class Backups: return snapshot ### Restoring + @staticmethod + def _ensure_active_restore_job(service, snapshot) -> Job: + job = get_restore_job(service) + if job is None: + job = add_restore_job(snapshot) + + Jobs.update(job, status=JobStatus.RUNNING) + return job @staticmethod - def restore_snapshot(snapshot: Snapshot): + def restore_snapshot( + snapshot: Snapshot, strategy=RestoreStrategy.DOWNLOAD_VERIFY_OVERWRITE + ): service = get_service_by_id(snapshot.service_name) - if service is None: raise ValueError( f"snapshot has a nonexistent service: {snapshot.service_name}" ) - job = get_restore_job(service) - if job is None: - job = add_restore_job(snapshot) + job = Backups._ensure_active_restore_job(service, snapshot) - Jobs.update( - job, - status=JobStatus.RUNNING, - ) try: Backups._assert_restorable(snapshot) - Backups._restore_service_from_snapshot( - service, - snapshot.id, - ) + Backups._restore_service_from_snapshot(service, snapshot.id) service.post_restore() except Exception as e: - Jobs.update( - job, - status=JobStatus.ERROR, - ) + Jobs.update(job, status=JobStatus.ERROR) raise e - Jobs.update( - job, - status=JobStatus.FINISHED, - ) + Jobs.update(job, status=JobStatus.FINISHED) @staticmethod def _assert_restorable(snapshot: Snapshot): diff --git a/selfprivacy_api/graphql/common_types/backup.py b/selfprivacy_api/graphql/common_types/backup.py new file mode 100644 index 0000000..992363b --- /dev/null +++ b/selfprivacy_api/graphql/common_types/backup.py @@ -0,0 +1,10 @@ +"""Backup""" +# pylint: disable=too-few-public-methods +import strawberry +from enum import Enum + + +@strawberry.enum +class RestoreStrategy(Enum): + INPLACE = "INPLACE" + DOWNLOAD_VERIFY_OVERWRITE = "DOWNLOAD_VERIFY_OVERWRITE" From af5edb695ffa9bf10c3481d971b8026bfa8aec89 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 7 Jul 2023 11:54:48 +0000 Subject: [PATCH 215/537] feature(backups): implement inplace restore strategy --- selfprivacy_api/backup/__init__.py | 34 +++++++++++++++---- selfprivacy_api/backup/backuppers/__init__.py | 2 +- 2 files changed, 29 insertions(+), 7 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index c3deee0..ddfd6be 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -209,14 +209,28 @@ class Backups: ### Restoring @staticmethod - def _ensure_active_restore_job(service, snapshot) -> Job: + def _ensure_queued_restore_job(service, snapshot) -> Job: job = get_restore_job(service) if job is None: job = add_restore_job(snapshot) - Jobs.update(job, status=JobStatus.RUNNING) + Jobs.update(job, status=JobStatus.CREATED) return job + @staticmethod + def _inplace_restore(service: Service, snapshot: Snapshot, job: Job): + failsafe_snapshot = Backups.back_up(service) + + Jobs.update(job, status=JobStatus.RUNNING) + try: + Backups._restore_service_from_snapshot(service, snapshot.id, verify=False) + except Exception as e: + Backups._restore_service_from_snapshot( + service, failsafe_snapshot.id, verify=False + ) + raise e + Backups.forget_snapshot(failsafe_snapshot) + @staticmethod def restore_snapshot( snapshot: Snapshot, strategy=RestoreStrategy.DOWNLOAD_VERIFY_OVERWRITE @@ -226,13 +240,21 @@ class Backups: raise ValueError( f"snapshot has a nonexistent service: {snapshot.service_name}" ) - - job = Backups._ensure_active_restore_job(service, snapshot) + job = Backups._ensure_queued_restore_job(service, snapshot) try: Backups._assert_restorable(snapshot) - Backups._restore_service_from_snapshot(service, snapshot.id) + + if strategy == RestoreStrategy.INPLACE: + Backups._inplace_restore(service, snapshot, job) + else: # verify_before_download is our default + Jobs.update(job, status=JobStatus.RUNNING) + Backups._restore_service_from_snapshot( + service, snapshot.id, verify=True + ) + service.post_restore() + except Exception as e: Jobs.update(job, status=JobStatus.ERROR) raise e @@ -256,7 +278,7 @@ class Backups: ) @staticmethod - def _restore_service_from_snapshot(service: Service, snapshot_id: str): + def _restore_service_from_snapshot(service: Service, snapshot_id: str, verify=True): folders = service.get_folders() Backups.provider().backupper.restore_from_backup( diff --git a/selfprivacy_api/backup/backuppers/__init__.py b/selfprivacy_api/backup/backuppers/__init__.py index 335cdfd..24eb108 100644 --- a/selfprivacy_api/backup/backuppers/__init__.py +++ b/selfprivacy_api/backup/backuppers/__init__.py @@ -30,7 +30,7 @@ class AbstractBackupper(ABC): raise NotImplementedError @abstractmethod - def restore_from_backup(self, snapshot_id: str, folders: List[str]): + def restore_from_backup(self, snapshot_id: str, folders: List[str], verify=True): """Restore a target folder using a snapshot""" raise NotImplementedError From 9075afd38a819a67b563a83ba40e5fa5e466652b Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 7 Jul 2023 12:49:52 +0000 Subject: [PATCH 216/537] test(backups): test out that pre-restore backup plays nice with jobs --- selfprivacy_api/backup/jobs.py | 15 +++++++++------ selfprivacy_api/backup/tasks.py | 9 +++++++-- tests/test_graphql/test_backup.py | 16 ++++++++++++++-- 3 files changed, 30 insertions(+), 10 deletions(-) diff --git a/selfprivacy_api/backup/jobs.py b/selfprivacy_api/backup/jobs.py index 38e9ad1..ab4eaca 100644 --- a/selfprivacy_api/backup/jobs.py +++ b/selfprivacy_api/backup/jobs.py @@ -29,14 +29,17 @@ def get_jobs_by_service(service: Service) -> List[Job]: return result -def is_something_queued_for(service: Service) -> bool: - return len(get_jobs_by_service(service)) != 0 +def is_something_running_for(service: Service) -> bool: + running_jobs = [ + job for job in get_jobs_by_service(service) if job.status == JobStatus.RUNNING + ] + return len(running_jobs) != 0 def add_backup_job(service: Service) -> Job: - if is_something_queued_for(service): + if is_something_running_for(service): message = ( - f"Cannot start a backup of {service.get_id()}, another operation is queued: " + f"Cannot start a backup of {service.get_id()}, another operation is running: " + get_jobs_by_service(service)[0].type_id ) raise ValueError(message) @@ -53,9 +56,9 @@ def add_restore_job(snapshot: Snapshot) -> Job: service = get_service_by_id(snapshot.service_name) if service is None: raise ValueError(f"no such service: {snapshot.service_name}") - if is_something_queued_for(service): + if is_something_running_for(service): message = ( - f"Cannot start a restore of {service.get_id()}, another operation is queued: " + f"Cannot start a restore of {service.get_id()}, another operation is running: " + get_jobs_by_service(service)[0].type_id ) raise ValueError(message) diff --git a/selfprivacy_api/backup/tasks.py b/selfprivacy_api/backup/tasks.py index bd3925d..ac8f2e2 100644 --- a/selfprivacy_api/backup/tasks.py +++ b/selfprivacy_api/backup/tasks.py @@ -1,5 +1,7 @@ from datetime import datetime +from selfprivacy_api.graphql.common_types.backup import RestoreStrategy + from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.utils.huey import huey from selfprivacy_api.services import get_service_by_id @@ -28,8 +30,11 @@ def start_backup(service: Service) -> bool: @huey.task() -def restore_snapshot(snapshot: Snapshot) -> bool: - Backups.restore_snapshot(snapshot) +def restore_snapshot( + snapshot: Snapshot, + strategy: RestoreStrategy = RestoreStrategy.DOWNLOAD_VERIFY_OVERWRITE, +) -> bool: + Backups.restore_snapshot(snapshot, strategy) return True diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 928c1b7..e54be85 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -13,6 +13,7 @@ from selfprivacy_api.services import Service from selfprivacy_api.services import get_service_by_id from selfprivacy_api.services.test_service import DummyService from selfprivacy_api.graphql.queries.providers import BackupProvider +from selfprivacy_api.graphql.common_types.backup import RestoreStrategy from selfprivacy_api.jobs import Jobs, JobStatus from selfprivacy_api.models.backup.snapshot import Snapshot @@ -360,7 +361,15 @@ def test_backup_larger_file(backups, dummy_service): remove(dir) -def test_restore_snapshot_task(backups, dummy_service): +@pytest.fixture(params=["verify", "inplace"]) +def restore_strategy(request) -> RestoreStrategy: + if request.param == "verify": + return RestoreStrategy.DOWNLOAD_VERIFY_OVERWRITE + else: + return RestoreStrategy.INPLACE + + +def test_restore_snapshot_task(backups, dummy_service, restore_strategy): Backups.back_up(dummy_service) snaps = Backups.get_snapshots(dummy_service) assert len(snaps) == 1 @@ -375,7 +384,7 @@ def test_restore_snapshot_task(backups, dummy_service): for p in paths_to_nuke: remove(p) - handle = restore_snapshot(snaps[0]) + handle = restore_snapshot(snaps[0], restore_strategy) handle(blocking=True) for p, content in zip(paths_to_nuke, contents): @@ -383,6 +392,9 @@ def test_restore_snapshot_task(backups, dummy_service): with open(p, "r") as file: assert file.read() == content + snaps = Backups.get_snapshots(dummy_service) + assert len(snaps) == 1 + def test_autobackup_enable_service(backups, dummy_service): assert not Backups.is_autobackup_enabled(dummy_service) From 2743441e1e25773b1cc076ec7163d48470b163be Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 7 Jul 2023 13:14:30 +0000 Subject: [PATCH 217/537] fix(backups): actually mount if asked for an inplace restore --- selfprivacy_api/backup/backuppers/restic_backupper.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index 7f16a91..565a084 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -219,9 +219,16 @@ class ResticBackupper(AbstractBackupper): raise ValueError("cannot restore without knowing where to!") with tempfile.TemporaryDirectory() as dir: - self.do_restore(snapshot_id, target=dir, verify=verify) + if verify: + self.do_restore(snapshot_id, target=dir, verify=verify) + snapshot_root = dir + else: # attempting inplace restore via mount + sync + self.mount_repo(dir) + snapshot_root = join(dir, "ids", snapshot_id) + + assert snapshot_root is not None for folder in folders: - src = join(dir, folder.strip("/")) + src = join(snapshot_root, folder.strip("/")) if not exists(src): raise ValueError( f"there is no such path: {src}. We tried to find {folder}" From 1cefaefa3bbb3c23cbc94ad7f18e57977641caba Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 7 Jul 2023 13:24:05 +0000 Subject: [PATCH 218/537] feature(backups): expose restore strategies to the API --- selfprivacy_api/graphql/mutations/backup_mutations.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index adc3873..5c8163c 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -13,6 +13,7 @@ from selfprivacy_api.graphql.queries.backup import BackupConfiguration from selfprivacy_api.graphql.queries.backup import Backup from selfprivacy_api.graphql.queries.providers import BackupProvider from selfprivacy_api.graphql.common_types.jobs import job_to_api_job +from selfprivacy_api.graphql.common_types.backup import RestoreStrategy from selfprivacy_api.backup import Backups from selfprivacy_api.services import get_all_services, get_service_by_id @@ -115,7 +116,11 @@ class BackupMutations: ) @strawberry.mutation(permission_classes=[IsAuthenticated]) - def restore_backup(self, snapshot_id: str) -> GenericJobMutationReturn: + def restore_backup( + self, + snapshot_id: str, + strategy: RestoreStrategy = RestoreStrategy.DOWNLOAD_VERIFY_OVERWRITE, + ) -> GenericJobMutationReturn: """Restore backup""" snap = Backups.get_snapshot_by_id(snapshot_id) if snap is None: @@ -145,7 +150,7 @@ class BackupMutations: job=None, ) - restore_snapshot(snap) + restore_snapshot(snap, strategy) return GenericJobMutationReturn( success=True, From 6523105d89c3bbc60aee8dba6245ee9d749086cc Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 10 Jul 2023 10:33:01 +0000 Subject: [PATCH 219/537] feature(utils): a hopefully reusable waitloop --- selfprivacy_api/utils/waitloop.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) create mode 100644 selfprivacy_api/utils/waitloop.py diff --git a/selfprivacy_api/utils/waitloop.py b/selfprivacy_api/utils/waitloop.py new file mode 100644 index 0000000..a375b8e --- /dev/null +++ b/selfprivacy_api/utils/waitloop.py @@ -0,0 +1,15 @@ +from time import sleep +from typing import Callable +from typing import Optional + +def wait_until_true(readiness_checker: Callable[[],bool],*,interval: float =0.1, timeout_sec: Optional[float] = None): + elapsed = 0.0 + if timeout_sec is None: + timeout_sec = 10e16 + while not readiness_checker or elapsed > timeout_sec: + sleep(interval) + elapsed += interval + if elapsed > timeout_sec: + raise TimeoutError() + + From 169e9ad57d9396220af205617019d3e561622935 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 10 Jul 2023 17:03:10 +0000 Subject: [PATCH 220/537] test(backups): simulating async service start n stop --- .../services/test_service/__init__.py | 89 +++++++++++++------ 1 file changed, 64 insertions(+), 25 deletions(-) diff --git a/selfprivacy_api/services/test_service/__init__.py b/selfprivacy_api/services/test_service/__init__.py index af527a0..da4960a 100644 --- a/selfprivacy_api/services/test_service/__init__.py +++ b/selfprivacy_api/services/test_service/__init__.py @@ -1,7 +1,12 @@ """Class representing Bitwarden service""" import base64 import typing +import subprocess + from typing import List +from os import path + +# from enum import Enum from selfprivacy_api.jobs import Job from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus @@ -11,13 +16,24 @@ import selfprivacy_api.utils.network as network_utils from selfprivacy_api.services.test_service.icon import BITWARDEN_ICON +DEFAULT_DELAY = 0 + class DummyService(Service): """A test service""" + folders: List[str] = [] + def __init_subclass__(cls, folders: List[str]): cls.folders = folders + def __init__(self): + super().__init__() + dir = self.folders[0] + status_file = path.join(dir, "service_status") + with open(status_file, "w") as file: + file.write(ServiceStatus.ACTIVE.value) + @staticmethod def get_id() -> str: """Return service id.""" @@ -61,38 +77,61 @@ class DummyService(Service): def is_enabled() -> bool: return True - @staticmethod - def get_status() -> ServiceStatus: - """ - Return Bitwarden status from systemd. - Use command return code to determine status. + @classmethod + def status_file(cls) -> str: + dir = cls.folders[0] + return path.join(dir, "service_status") - Return code 0 means service is running. - Return code 1 or 2 means service is in error stat. - Return code 3 means service is stopped. - Return code 4 means service is off. - """ - return ServiceStatus.ACTIVE + @classmethod + def set_status(cls, status: ServiceStatus): + with open(cls.status_file(), "w") as file: + status_string = file.write(status.value) - @staticmethod - def enable(): + @classmethod + def get_status(cls) -> ServiceStatus: + with open(cls.status_file(), "r") as file: + status_string = file.read().strip() + return ServiceStatus[status_string] + + @classmethod + def change_status_with_async_delay( + cls, new_status: ServiceStatus, delay_sec: float + ): + """simulating a delay on systemd side""" + dir = cls.folders[0] + status_file = path.join(dir, "service_status") + + command = [ + "bash", + "-c", + f" sleep {delay_sec} && echo {new_status.value} > {status_file}", + ] + handle = subprocess.Popen(command) + if delay_sec == 0: + handle.communicate() + + @classmethod + def enable(cls): pass - @staticmethod - def disable(): + @classmethod + def disable(cls, delay): pass - @staticmethod - def stop(): - pass + @classmethod + def stop(cls, delay=DEFAULT_DELAY): + cls.set_status(ServiceStatus.DEACTIVATING) + cls.change_status_with_async_delay(ServiceStatus.INACTIVE, delay) - @staticmethod - def start(): - pass + @classmethod + def start(cls, delay=DEFAULT_DELAY): + cls.set_status(ServiceStatus.ACTIVATING) + cls.change_status_with_async_delay(ServiceStatus.ACTIVE, delay) - @staticmethod - def restart(): - pass + @classmethod + def restart(cls, delay=DEFAULT_DELAY): + cls.set_status(ServiceStatus.RELOADING) # is a correct one? + cls.change_status_with_async_delay(ServiceStatus.ACTIVE, delay) @staticmethod def get_configuration(): @@ -112,7 +151,7 @@ class DummyService(Service): return storage_usage @staticmethod - def get_drive(cls) -> str: + def get_drive() -> str: return "sda1" @classmethod From e7a67005227a7c1a9d74b759460f7ca21e4512be Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 12 Jul 2023 12:10:40 +0000 Subject: [PATCH 221/537] test(backups): test async service start n stop simulation --- selfprivacy_api/utils/waitloop.py | 13 +++++++++---- tests/test_graphql/test_backup.py | 10 +++++----- tests/test_services.py | 19 ++++++++++++++++++- 3 files changed, 32 insertions(+), 10 deletions(-) diff --git a/selfprivacy_api/utils/waitloop.py b/selfprivacy_api/utils/waitloop.py index a375b8e..9f71a37 100644 --- a/selfprivacy_api/utils/waitloop.py +++ b/selfprivacy_api/utils/waitloop.py @@ -2,14 +2,19 @@ from time import sleep from typing import Callable from typing import Optional -def wait_until_true(readiness_checker: Callable[[],bool],*,interval: float =0.1, timeout_sec: Optional[float] = None): + +def wait_until_true( + readiness_checker: Callable[[], bool], + *, + interval: float = 0.1, + timeout_sec: Optional[float] = None +): elapsed = 0.0 if timeout_sec is None: timeout_sec = 10e16 - while not readiness_checker or elapsed > timeout_sec: + + while (not readiness_checker()) and elapsed < timeout_sec: sleep(interval) elapsed += interval if elapsed > timeout_sec: raise TimeoutError() - - diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index e54be85..573480c 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -52,7 +52,7 @@ def backups_backblaze(generic_userdata): @pytest.fixture() -def raw_dummy_service(tmpdir, backups): +def raw_dummy_service(tmpdir): dirnames = ["test_service", "also_test_service"] service_dirs = [] for d in dirnames: @@ -578,13 +578,13 @@ def test_services_to_back_up(backups, dummy_service): def test_sync(dummy_service): src = dummy_service.get_folders()[0] dst = dummy_service.get_folders()[1] - old_files_src = listdir(src) - old_files_dst = listdir(dst) + old_files_src = set(listdir(src)) + old_files_dst = set(listdir(dst)) assert old_files_src != old_files_dst sync(src, dst) - new_files_src = listdir(src) - new_files_dst = listdir(dst) + new_files_src = set(listdir(src)) + new_files_dst = set(listdir(dst)) assert new_files_src == old_files_src assert new_files_dst == new_files_src diff --git a/tests/test_services.py b/tests/test_services.py index 5816140..4d4c8f4 100644 --- a/tests/test_services.py +++ b/tests/test_services.py @@ -9,7 +9,10 @@ from selfprivacy_api.services.owned_path import OwnedPath from selfprivacy_api.services.generic_service_mover import FolderMoveNames from selfprivacy_api.services.test_service import DummyService -from selfprivacy_api.services.service import Service +from selfprivacy_api.services.service import Service, ServiceStatus +from selfprivacy_api.utils.waitloop import wait_until_true + +from tests.test_graphql.test_backup import raw_dummy_service def test_unimplemented_folders_raises(): @@ -25,6 +28,20 @@ def test_unimplemented_folders_raises(): assert owned_folders is not None +def test_delayed_start_stop(raw_dummy_service): + dummy = raw_dummy_service + + dummy.stop(delay=0.3) + assert dummy.get_status() == ServiceStatus.DEACTIVATING + wait_until_true(lambda: dummy.get_status() == ServiceStatus.INACTIVE) + assert dummy.get_status() == ServiceStatus.INACTIVE + + dummy.start(delay=0.3) + assert dummy.get_status() == ServiceStatus.ACTIVATING + wait_until_true(lambda: dummy.get_status() == ServiceStatus.ACTIVE) + assert dummy.get_status() == ServiceStatus.ACTIVE + + def test_owned_folders_from_not_owned(): assert Bitwarden.get_owned_folders() == [ OwnedPath( From 713296c5206da5c9287ff4d6b7507b6baa9f71c7 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 12 Jul 2023 12:27:55 +0000 Subject: [PATCH 222/537] test(backups): make delay settable per dummyservice --- .../services/test_service/__init__.py | 17 +++++++++++------ tests/test_services.py | 5 +++-- 2 files changed, 14 insertions(+), 8 deletions(-) diff --git a/selfprivacy_api/services/test_service/__init__.py b/selfprivacy_api/services/test_service/__init__.py index da4960a..07b460b 100644 --- a/selfprivacy_api/services/test_service/__init__.py +++ b/selfprivacy_api/services/test_service/__init__.py @@ -23,6 +23,7 @@ class DummyService(Service): """A test service""" folders: List[str] = [] + startstop_delay = 0 def __init_subclass__(cls, folders: List[str]): cls.folders = folders @@ -119,19 +120,23 @@ class DummyService(Service): pass @classmethod - def stop(cls, delay=DEFAULT_DELAY): + def set_delay(cls, new_delay): + cls.startstop_delay = new_delay + + @classmethod + def stop(cls): cls.set_status(ServiceStatus.DEACTIVATING) - cls.change_status_with_async_delay(ServiceStatus.INACTIVE, delay) + cls.change_status_with_async_delay(ServiceStatus.INACTIVE, cls.startstop_delay) @classmethod - def start(cls, delay=DEFAULT_DELAY): + def start(cls): cls.set_status(ServiceStatus.ACTIVATING) - cls.change_status_with_async_delay(ServiceStatus.ACTIVE, delay) + cls.change_status_with_async_delay(ServiceStatus.ACTIVE, cls.startstop_delay) @classmethod - def restart(cls, delay=DEFAULT_DELAY): + def restart(cls): cls.set_status(ServiceStatus.RELOADING) # is a correct one? - cls.change_status_with_async_delay(ServiceStatus.ACTIVE, delay) + cls.change_status_with_async_delay(ServiceStatus.ACTIVE, cls.startstop_delay) @staticmethod def get_configuration(): diff --git a/tests/test_services.py b/tests/test_services.py index 4d4c8f4..12889c3 100644 --- a/tests/test_services.py +++ b/tests/test_services.py @@ -30,13 +30,14 @@ def test_unimplemented_folders_raises(): def test_delayed_start_stop(raw_dummy_service): dummy = raw_dummy_service + dummy.set_delay(0.3) - dummy.stop(delay=0.3) + dummy.stop() assert dummy.get_status() == ServiceStatus.DEACTIVATING wait_until_true(lambda: dummy.get_status() == ServiceStatus.INACTIVE) assert dummy.get_status() == ServiceStatus.INACTIVE - dummy.start(delay=0.3) + dummy.start() assert dummy.get_status() == ServiceStatus.ACTIVATING wait_until_true(lambda: dummy.get_status() == ServiceStatus.ACTIVE) assert dummy.get_status() == ServiceStatus.ACTIVE From de8ef744ebbeb78961306c84ed87c5e53630dc98 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 12 Jul 2023 15:02:45 +0000 Subject: [PATCH 223/537] refactor(backups): make a StoppedService context manager --- selfprivacy_api/services/service.py | 30 +++++++++++++++++++++++++++++ tests/test_services.py | 15 ++++++++++++++- 2 files changed, 44 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index 286fab7..e2c7c01 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -10,6 +10,7 @@ from selfprivacy_api.utils.block_devices import BlockDevice from selfprivacy_api.services.generic_size_counter import get_storage_usage from selfprivacy_api.services.owned_path import OwnedPath +from selfprivacy_api.utils.waitloop import wait_until_true class ServiceStatus(Enum): @@ -245,3 +246,32 @@ class Service(ABC): def post_restore(self): pass + + +class StoppedService: + """ + A context manager that stops the service if needed and reactivates it + after you are done if it was active + + Example: + ``` + assert service.get_status() == ServiceStatus.ACTIVE + with StoppedService(service) [as stopped_service]: + assert service.get_status() == ServiceStatus.INACTIVE + ``` + """ + def __init__(self, service: Service): + self.service = service + self.original_status = service.get_status() + + def __enter__(self) -> Service: + self.original_status = self.service.get_status() + if self.original_status != ServiceStatus.INACTIVE: + self.service.stop() + wait_until_true(lambda: self.service.get_status() == ServiceStatus.INACTIVE) + return self.service + + def __exit__(self, type, value, traceback): + if self.original_status in [ServiceStatus.ACTIVATING, ServiceStatus.ACTIVE]: + self.service.start() + wait_until_true(lambda: self.service.get_status() == ServiceStatus.ACTIVE) diff --git a/tests/test_services.py b/tests/test_services.py index 12889c3..b83a7f2 100644 --- a/tests/test_services.py +++ b/tests/test_services.py @@ -9,7 +9,7 @@ from selfprivacy_api.services.owned_path import OwnedPath from selfprivacy_api.services.generic_service_mover import FolderMoveNames from selfprivacy_api.services.test_service import DummyService -from selfprivacy_api.services.service import Service, ServiceStatus +from selfprivacy_api.services.service import Service, ServiceStatus, StoppedService from selfprivacy_api.utils.waitloop import wait_until_true from tests.test_graphql.test_backup import raw_dummy_service @@ -28,6 +28,19 @@ def test_unimplemented_folders_raises(): assert owned_folders is not None +def test_service_stopper(raw_dummy_service): + dummy: Service = raw_dummy_service + dummy.set_delay(0.3) + + assert dummy.get_status() == ServiceStatus.ACTIVE + + with StoppedService(dummy) as stopped_dummy: + assert stopped_dummy.get_status() == ServiceStatus.INACTIVE + assert dummy.get_status() == ServiceStatus.INACTIVE + + assert dummy.get_status() == ServiceStatus.ACTIVE + + def test_delayed_start_stop(raw_dummy_service): dummy = raw_dummy_service dummy.set_delay(0.3) From 326e3d3b0c9324499ba78b005903b59eb1d19829 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 12 Jul 2023 16:00:36 +0000 Subject: [PATCH 224/537] test(backups): do not store the status file in backupped folders --- selfprivacy_api/services/test_service/__init__.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/selfprivacy_api/services/test_service/__init__.py b/selfprivacy_api/services/test_service/__init__.py index 07b460b..a0fb02a 100644 --- a/selfprivacy_api/services/test_service/__init__.py +++ b/selfprivacy_api/services/test_service/__init__.py @@ -30,8 +30,7 @@ class DummyService(Service): def __init__(self): super().__init__() - dir = self.folders[0] - status_file = path.join(dir, "service_status") + status_file = self.status_file() with open(status_file, "w") as file: file.write(ServiceStatus.ACTIVE.value) @@ -81,7 +80,8 @@ class DummyService(Service): @classmethod def status_file(cls) -> str: dir = cls.folders[0] - return path.join(dir, "service_status") + # we do not REALLY want to store our state in our declared folders + return path.join(dir, "..", "service_status") @classmethod def set_status(cls, status: ServiceStatus): @@ -99,8 +99,7 @@ class DummyService(Service): cls, new_status: ServiceStatus, delay_sec: float ): """simulating a delay on systemd side""" - dir = cls.folders[0] - status_file = path.join(dir, "service_status") + status_file = cls.status_file() command = [ "bash", From 7af76005992807a4a23952fbc200346c8a2af6da Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 12 Jul 2023 16:43:26 +0000 Subject: [PATCH 225/537] feature(backups): stop services before backups --- selfprivacy_api/backup/__init__.py | 38 +++++++++++++++++++++++------- tests/test_graphql/test_backup.py | 12 +++++++++- 2 files changed, 41 insertions(+), 9 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index ddfd6be..3bbd721 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -6,7 +6,7 @@ from typing import List, Optional from selfprivacy_api.utils import ReadUserData, WriteUserData from selfprivacy_api.services import get_service_by_id -from selfprivacy_api.services.service import Service +from selfprivacy_api.services.service import Service, ServiceStatus, StoppedService from selfprivacy_api.jobs import Jobs, JobStatus, Job @@ -35,6 +35,18 @@ DEFAULT_JSON_PROVIDER = { } +class NotDeadError(AssertionError): + def __init__(self, service: Service): + self.service_name = service.get_id() + + def __str__(self): + return f""" + Service {self.service_name} should be either stopped or dead from an error before we back up. + Normally, this error is unreachable because we do try ensure this. + Apparently, not this time. + """ + + class Backups: """A stateless controller class for backups""" @@ -193,13 +205,15 @@ class Backups: Jobs.update(job, status=JobStatus.RUNNING) try: - service.pre_backup() - snapshot = Backups.provider().backupper.start_backup( - folders, - tag, - ) - Backups._store_last_snapshot(tag, snapshot) - service.post_restore() + with StoppedService(service): + Backups.assert_dead(service) # to be extra sure + service.pre_backup() + snapshot = Backups.provider().backupper.start_backup( + folders, + tag, + ) + Backups._store_last_snapshot(tag, snapshot) + service.post_restore() except Exception as e: Jobs.update(job, status=JobStatus.ERROR) raise e @@ -465,3 +479,11 @@ class Backups: repo_id="", ) Storage.store_provider(provider) + + @staticmethod + def assert_dead(service: Service): + # if we backup the service that is failing to restore it to the + # previous snapshot, its status can be FAILED + # And obviously restoring a failed service is the moun route + if service.get_status() not in [ServiceStatus.INACTIVE, ServiceStatus.FAILED]: + raise NotDeadError(service) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 573480c..319fb53 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -303,7 +303,17 @@ def test_snapshots_by_id(backups, dummy_service): assert Backups.get_snapshot_by_id(snap2.id).id == snap2.id -def test_backup_service_task(backups, dummy_service): +@pytest.fixture(params=["instant_server_stop", "delayed_server_stop"]) +def simulated_service_stopping_delay(request) -> float: + if request.param == "instant_server_stop": + return 0.0 + else: + return 0.3 + + +def test_backup_service_task(backups, dummy_service, simulated_service_stopping_delay): + dummy_service.set_delay(simulated_service_stopping_delay) + handle = start_backup(dummy_service) handle(blocking=True) From e43478d437b1aa747a3c3a347f6e8419cca35b2b Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 12 Jul 2023 16:53:49 +0000 Subject: [PATCH 226/537] feature(backups): stop services before restores --- selfprivacy_api/backup/__init__.py | 19 ++++++++++--------- tests/test_graphql/test_backup.py | 6 +++++- 2 files changed, 15 insertions(+), 10 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 3bbd721..0f93667 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -258,16 +258,17 @@ class Backups: try: Backups._assert_restorable(snapshot) + with StoppedService(service): + Backups.assert_dead(service) + if strategy == RestoreStrategy.INPLACE: + Backups._inplace_restore(service, snapshot, job) + else: # verify_before_download is our default + Jobs.update(job, status=JobStatus.RUNNING) + Backups._restore_service_from_snapshot( + service, snapshot.id, verify=True + ) - if strategy == RestoreStrategy.INPLACE: - Backups._inplace_restore(service, snapshot, job) - else: # verify_before_download is our default - Jobs.update(job, status=JobStatus.RUNNING) - Backups._restore_service_from_snapshot( - service, snapshot.id, verify=True - ) - - service.post_restore() + service.post_restore() except Exception as e: Jobs.update(job, status=JobStatus.ERROR) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 319fb53..3709440 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -379,7 +379,11 @@ def restore_strategy(request) -> RestoreStrategy: return RestoreStrategy.INPLACE -def test_restore_snapshot_task(backups, dummy_service, restore_strategy): +def test_restore_snapshot_task( + backups, dummy_service, restore_strategy, simulated_service_stopping_delay +): + dummy_service.set_delay(simulated_service_stopping_delay) + Backups.back_up(dummy_service) snaps = Backups.get_snapshots(dummy_service) assert len(snaps) == 1 From 92cfd00f9376e1d4abbf421777b0afc39431afb7 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 12 Jul 2023 17:02:54 +0000 Subject: [PATCH 227/537] feature(servers): set default timeout of server operations to 10 min --- selfprivacy_api/services/service.py | 29 +++++++++++++++++++---------- 1 file changed, 19 insertions(+), 10 deletions(-) diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index e2c7c01..c1cc5be 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -12,6 +12,8 @@ from selfprivacy_api.services.generic_size_counter import get_storage_usage from selfprivacy_api.services.owned_path import OwnedPath from selfprivacy_api.utils.waitloop import wait_until_true +DEFAULT_START_STOP_TIMEOUT = 10 * 60 + class ServiceStatus(Enum): """Enum for service status""" @@ -250,16 +252,17 @@ class Service(ABC): class StoppedService: """ - A context manager that stops the service if needed and reactivates it - after you are done if it was active + A context manager that stops the service if needed and reactivates it + after you are done if it was active - Example: - ``` - assert service.get_status() == ServiceStatus.ACTIVE - with StoppedService(service) [as stopped_service]: - assert service.get_status() == ServiceStatus.INACTIVE - ``` + Example: + ``` + assert service.get_status() == ServiceStatus.ACTIVE + with StoppedService(service) [as stopped_service]: + assert service.get_status() == ServiceStatus.INACTIVE + ``` """ + def __init__(self, service: Service): self.service = service self.original_status = service.get_status() @@ -268,10 +271,16 @@ class StoppedService: self.original_status = self.service.get_status() if self.original_status != ServiceStatus.INACTIVE: self.service.stop() - wait_until_true(lambda: self.service.get_status() == ServiceStatus.INACTIVE) + wait_until_true( + lambda: self.service.get_status() == ServiceStatus.INACTIVE, + timeout_sec=DEFAULT_START_STOP_TIMEOUT, + ) return self.service def __exit__(self, type, value, traceback): if self.original_status in [ServiceStatus.ACTIVATING, ServiceStatus.ACTIVE]: self.service.start() - wait_until_true(lambda: self.service.get_status() == ServiceStatus.ACTIVE) + wait_until_true( + lambda: self.service.get_status() == ServiceStatus.ACTIVE, + timeout_sec=DEFAULT_START_STOP_TIMEOUT, + ) From 4aa87edf470140a8cc75491712beb4a4f3777e15 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 14 Jul 2023 10:58:31 +0000 Subject: [PATCH 228/537] fix(servers): hopefully fix moving --- selfprivacy_api/services/generic_service_mover.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/selfprivacy_api/services/generic_service_mover.py b/selfprivacy_api/services/generic_service_mover.py index e2b26f4..d858b93 100644 --- a/selfprivacy_api/services/generic_service_mover.py +++ b/selfprivacy_api/services/generic_service_mover.py @@ -35,13 +35,11 @@ class FolderMoveNames(BaseModel): return path.split("/")[-1] @staticmethod - def default_foldermoves(service: Service): - return ( - [ - FolderMoveNames.from_owned_path(folder) - for folder in service.get_owned_folders() - ], - ) + def default_foldermoves(service: Service) -> list[FolderMoveNames]: + return [ + FolderMoveNames.from_owned_path(folder) + for folder in service.get_owned_folders() + ] @huey.task() From 926d0c27c5cfc798896dc29edee54511a2d62ac0 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 14 Jul 2023 11:41:03 +0000 Subject: [PATCH 229/537] test(backup): test moving preventing backups --- tests/test_graphql/test_backup.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 3709440..02d3487 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -24,6 +24,7 @@ from selfprivacy_api.backup.providers import AbstractBackupProvider from selfprivacy_api.backup.providers.backblaze import Backblaze from selfprivacy_api.backup.util import sync from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackupper +from selfprivacy_api.backup.jobs import add_backup_job, add_restore_job from selfprivacy_api.backup.tasks import start_backup, restore_snapshot @@ -628,3 +629,19 @@ def test_mount_umount(backups, dummy_service, tmpdir): backupper.unmount_repo(mountpoint) # handle.terminate() assert len(listdir(mountpoint)) == 0 + + +def test_move_blocks_backups(backups, dummy_service, restore_strategy): + snap = Backups.back_up(dummy_service) + job = Jobs.add( + type_id=f"services.{dummy_service.get_id()}.move", + name="Move Dummy", + description=f"Moving Dummy data to the Rainbow Land", + status=JobStatus.RUNNING, + ) + + with pytest.raises(ValueError): + Backups.back_up(dummy_service) + + with pytest.raises(ValueError): + Backups.restore_snapshot(snap, restore_strategy) From 9f19c677d09baef5cd63422c51b23a118996a499 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 14 Jul 2023 12:34:45 +0000 Subject: [PATCH 230/537] feature(backup):calculate needed space for inplace restoration --- selfprivacy_api/backup/__init__.py | 21 ++++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 0f93667..56150db 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -1,6 +1,6 @@ from datetime import datetime, timedelta from operator import add -from os import statvfs +from os import statvfs, path, walk from typing import List, Optional from selfprivacy_api.utils import ReadUserData, WriteUserData @@ -277,14 +277,28 @@ class Backups: Jobs.update(job, status=JobStatus.FINISHED) @staticmethod - def _assert_restorable(snapshot: Snapshot): + def _assert_restorable( + snapshot: Snapshot, strategy=RestoreStrategy.DOWNLOAD_VERIFY_OVERWRITE + ): service = get_service_by_id(snapshot.service_name) if service is None: raise ValueError( f"snapshot has a nonexistent service: {snapshot.service_name}" ) - needed_space = Backups.snapshot_restored_size(snapshot.id) + restored_snap_size = Backups.snapshot_restored_size(snapshot.id) + + if strategy == RestoreStrategy.DOWNLOAD_VERIFY_OVERWRITE: + needed_space = restored_snap_size + elif strategy == RestoreStrategy.INPLACE: + needed_space = restored_snap_size - service.get_storage_usage() + else: + raise NotImplementedError( + """ + We do not know if there is enough space for restoration because there is some novel restore strategy used! + This is a developer's fault, open a issue please + """ + ) available_space = Backups.space_usable_for_service(service) if needed_space > available_space: raise ValueError( @@ -466,6 +480,7 @@ class Backups: if folders == []: raise ValueError("unallocated service", service.get_id()) + # We assume all folders of one service live at the same volume fs_info = statvfs(folders[0]) usable_bytes = fs_info.f_frsize * fs_info.f_bavail return usable_bytes From 70cf0306a9b93e447a7082529d8ab01f7964c5fb Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 18 Jan 2023 09:33:14 +0000 Subject: [PATCH 231/537] refactor(backup): delete unused import --- selfprivacy_api/restic_controller/__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/selfprivacy_api/restic_controller/__init__.py b/selfprivacy_api/restic_controller/__init__.py index b4efba2..803d469 100644 --- a/selfprivacy_api/restic_controller/__init__.py +++ b/selfprivacy_api/restic_controller/__init__.py @@ -3,7 +3,6 @@ from datetime import datetime import json import subprocess import os -from threading import Lock from enum import Enum import portalocker from selfprivacy_api.utils import ReadUserData From 8eab26d55228712801f774783d792251df225061 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 18 Jan 2023 09:40:04 +0000 Subject: [PATCH 232/537] refactor(backup): extract rclone args --- selfprivacy_api/restic_controller/__init__.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/selfprivacy_api/restic_controller/__init__.py b/selfprivacy_api/restic_controller/__init__.py index 803d469..a5a6634 100644 --- a/selfprivacy_api/restic_controller/__init__.py +++ b/selfprivacy_api/restic_controller/__init__.py @@ -90,7 +90,7 @@ class ResticController(metaclass=SingletonMetaclass): backup_listing_command = [ "restic", "-o", - "rclone.args=serve restic --stdio", + self.rclone_args(), "-r", f"rclone:backblaze:{self._repository_name}/sfbackup", "snapshots", @@ -122,6 +122,9 @@ class ResticController(metaclass=SingletonMetaclass): self.error_message = snapshots_list return + def rclone_args(self): + return "rclone.args=serve restic --stdio" + def initialize_repository(self): """ Initialize repository with restic @@ -129,7 +132,7 @@ class ResticController(metaclass=SingletonMetaclass): initialize_repository_command = [ "restic", "-o", - "rclone.args=serve restic --stdio", + self.rclone_args(), "-r", f"rclone:backblaze:{self._repository_name}/sfbackup", "init", @@ -158,7 +161,7 @@ class ResticController(metaclass=SingletonMetaclass): backup_command = [ "restic", "-o", - "rclone.args=serve restic --stdio", + self.rclone_args(), "-r", f"rclone:backblaze:{self._repository_name}/sfbackup", "--verbose", @@ -227,7 +230,7 @@ class ResticController(metaclass=SingletonMetaclass): backup_restoration_command = [ "restic", "-o", - "rclone.args=serve restic --stdio", + self.rclone_args(), "-r", f"rclone:backblaze:{self._repository_name}/sfbackup", "restore", From 6bf5ee4b64d958ccb46cbb2c6617923011a1f14e Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 18 Jan 2023 09:49:02 +0000 Subject: [PATCH 233/537] refactor(backup): extract restic repo --- selfprivacy_api/restic_controller/__init__.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/selfprivacy_api/restic_controller/__init__.py b/selfprivacy_api/restic_controller/__init__.py index a5a6634..592b651 100644 --- a/selfprivacy_api/restic_controller/__init__.py +++ b/selfprivacy_api/restic_controller/__init__.py @@ -92,7 +92,7 @@ class ResticController(metaclass=SingletonMetaclass): "-o", self.rclone_args(), "-r", - f"rclone:backblaze:{self._repository_name}/sfbackup", + self.restic_repo(), "snapshots", "--json", ] @@ -122,6 +122,9 @@ class ResticController(metaclass=SingletonMetaclass): self.error_message = snapshots_list return + def restic_repo(self): + return f"rclone:backblaze:{self._repository_name}/sfbackup" + def rclone_args(self): return "rclone.args=serve restic --stdio" @@ -134,7 +137,7 @@ class ResticController(metaclass=SingletonMetaclass): "-o", self.rclone_args(), "-r", - f"rclone:backblaze:{self._repository_name}/sfbackup", + self.restic_repo(), "init", ] with subprocess.Popen( @@ -163,7 +166,7 @@ class ResticController(metaclass=SingletonMetaclass): "-o", self.rclone_args(), "-r", - f"rclone:backblaze:{self._repository_name}/sfbackup", + self.restic_repo(), "--verbose", "--json", "backup", @@ -232,7 +235,7 @@ class ResticController(metaclass=SingletonMetaclass): "-o", self.rclone_args(), "-r", - f"rclone:backblaze:{self._repository_name}/sfbackup", + self.restic_repo(), "restore", snapshot_id, "--target", From f65c0522b0c46eb669989780331b7b9cae9e7bf3 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 18 Jan 2023 10:07:04 +0000 Subject: [PATCH 234/537] refactor(backup): pass key and account to exec --- selfprivacy_api/restic_controller/__init__.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/restic_controller/__init__.py b/selfprivacy_api/restic_controller/__init__.py index 592b651..0f6ad61 100644 --- a/selfprivacy_api/restic_controller/__init__.py +++ b/selfprivacy_api/restic_controller/__init__.py @@ -126,7 +126,10 @@ class ResticController(metaclass=SingletonMetaclass): return f"rclone:backblaze:{self._repository_name}/sfbackup" def rclone_args(self): - return "rclone.args=serve restic --stdio" + return "rclone.args=serve restic --stdio" + self.backend_rclone_args() + + def backend_rclone_args(self): + return f"--b2-account {self._backblaze_account} --b2-key {self._backblaze_key}" def initialize_repository(self): """ From 7b7f7821857d81f174364e8dc94637209af0d4f4 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 18 Jan 2023 12:43:45 +0000 Subject: [PATCH 235/537] refactor(backup): do not use config file --- selfprivacy_api/restic_controller/__init__.py | 25 +++---------------- 1 file changed, 3 insertions(+), 22 deletions(-) diff --git a/selfprivacy_api/restic_controller/__init__.py b/selfprivacy_api/restic_controller/__init__.py index 0f6ad61..4ac84e8 100644 --- a/selfprivacy_api/restic_controller/__init__.py +++ b/selfprivacy_api/restic_controller/__init__.py @@ -4,7 +4,6 @@ import json import subprocess import os from enum import Enum -import portalocker from selfprivacy_api.utils import ReadUserData from selfprivacy_api.utils.singleton_metaclass import SingletonMetaclass @@ -50,7 +49,6 @@ class ResticController(metaclass=SingletonMetaclass): self.error_message = None self._initialized = True self.load_configuration() - self.write_rclone_config() self.load_snapshots() def load_configuration(self): @@ -64,25 +62,6 @@ class ResticController(metaclass=SingletonMetaclass): else: self.state = ResticStates.NO_KEY - def write_rclone_config(self): - """ - Open /root/.config/rclone/rclone.conf with portalocker - and write configuration in the following format: - [backblaze] - type = b2 - account = {self.backblaze_account} - key = {self.backblaze_key} - """ - with portalocker.Lock( - "/root/.config/rclone/rclone.conf", "w", timeout=None - ) as rclone_config: - rclone_config.write( - f"[backblaze]\n" - f"type = b2\n" - f"account = {self._backblaze_account}\n" - f"key = {self._backblaze_key}\n" - ) - def load_snapshots(self): """ Load list of snapshots from repository @@ -123,7 +102,9 @@ class ResticController(metaclass=SingletonMetaclass): return def restic_repo(self): - return f"rclone:backblaze:{self._repository_name}/sfbackup" + # https://restic.readthedocs.io/en/latest/030_preparing_a_new_repo.html#other-services-via-rclone + # https://forum.rclone.org/t/can-rclone-be-run-solely-with-command-line-options-no-config-no-env-vars/6314/5 + return f"rclone::b2:{self._repository_name}/sfbackup" def rclone_args(self): return "rclone.args=serve restic --stdio" + self.backend_rclone_args() From a1071fd2c9b78d4d616673919b2201b9e3542375 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 23 Jan 2023 11:15:05 +0000 Subject: [PATCH 236/537] feature(backups): add backup structures and queries --- .../graphql/common_types/backup_snapshot.py | 9 +++++++++ selfprivacy_api/graphql/common_types/service.py | 6 ++++++ selfprivacy_api/graphql/queries/backup.py | 14 ++++++++++++++ 3 files changed, 29 insertions(+) create mode 100644 selfprivacy_api/graphql/common_types/backup_snapshot.py create mode 100644 selfprivacy_api/graphql/queries/backup.py diff --git a/selfprivacy_api/graphql/common_types/backup_snapshot.py b/selfprivacy_api/graphql/common_types/backup_snapshot.py new file mode 100644 index 0000000..3256e0c --- /dev/null +++ b/selfprivacy_api/graphql/common_types/backup_snapshot.py @@ -0,0 +1,9 @@ +import datetime +import strawberry + + +@strawberry.type +class SnapshotInfo: + id: str + service_name: str + created_at: datetime.datetime diff --git a/selfprivacy_api/graphql/common_types/service.py b/selfprivacy_api/graphql/common_types/service.py index c1246ca..61ed5af 100644 --- a/selfprivacy_api/graphql/common_types/service.py +++ b/selfprivacy_api/graphql/common_types/service.py @@ -1,7 +1,9 @@ from enum import Enum import typing import strawberry +import datetime from selfprivacy_api.graphql.common_types.dns import DnsRecord +from selfprivacy_api.graphql.common_types.backup_snapshot import SnapshotInfo from selfprivacy_api.services import get_service_by_id, get_services_by_location from selfprivacy_api.services import Service as ServiceInterface @@ -101,6 +103,10 @@ class Service: """Get storage usage for a service""" return get_storage_usage(self) + @strawberry.field + def backup_snapshots(self) -> typing.Optional[typing.List[SnapshotInfo]]: + return None + def service_to_graphql_service(service: ServiceInterface) -> Service: """Convert service to graphql service""" diff --git a/selfprivacy_api/graphql/queries/backup.py b/selfprivacy_api/graphql/queries/backup.py new file mode 100644 index 0000000..ef61b10 --- /dev/null +++ b/selfprivacy_api/graphql/queries/backup.py @@ -0,0 +1,14 @@ +"""Backup""" +# pylint: disable=too-few-public-methods +import typing +import strawberry +from selfprivacy_api.graphql.common_types.backup_snapshot import SnapshotInfo + + +@strawberry.type +class Backup: + backend: str + + @strawberry.field + def get_backups(self) -> typing.List[SnapshotInfo]: + return [] From a3d58be0d5897f69786b7e1856cd01d6f555fe24 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 23 Jan 2023 12:08:17 +0000 Subject: [PATCH 237/537] feature(backups): placeholders for the modules of the new backup system --- selfprivacy_api/backup/__init__.py | 0 selfprivacy_api/backup/providers/__init__.py | 0 selfprivacy_api/backup/providers/backblaze.py | 0 selfprivacy_api/backup/providers/provider.py | 0 4 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 selfprivacy_api/backup/__init__.py create mode 100644 selfprivacy_api/backup/providers/__init__.py create mode 100644 selfprivacy_api/backup/providers/backblaze.py create mode 100644 selfprivacy_api/backup/providers/provider.py diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/selfprivacy_api/backup/providers/__init__.py b/selfprivacy_api/backup/providers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/selfprivacy_api/backup/providers/backblaze.py b/selfprivacy_api/backup/providers/backblaze.py new file mode 100644 index 0000000..e69de29 diff --git a/selfprivacy_api/backup/providers/provider.py b/selfprivacy_api/backup/providers/provider.py new file mode 100644 index 0000000..e69de29 From 1e5fb67374c761fe05480800e43ecb09a09e6fd8 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 23 Jan 2023 13:43:18 +0000 Subject: [PATCH 238/537] feature(backups): placeholders for the backupers and backup providers --- selfprivacy_api/backup/__init__.py | 5 +++++ selfprivacy_api/backup/providers/backblaze.py | 6 ++++++ selfprivacy_api/backup/providers/provider.py | 13 +++++++++++++ selfprivacy_api/backup/restic_backuper.py | 6 ++++++ 4 files changed, 30 insertions(+) create mode 100644 selfprivacy_api/backup/restic_backuper.py diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index e69de29..ff9bb2d 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -0,0 +1,5 @@ +from abc import ABC + +class AbstractBackuper(ABC): + def __init__(self): + pass \ No newline at end of file diff --git a/selfprivacy_api/backup/providers/backblaze.py b/selfprivacy_api/backup/providers/backblaze.py index e69de29..6dfa1a7 100644 --- a/selfprivacy_api/backup/providers/backblaze.py +++ b/selfprivacy_api/backup/providers/backblaze.py @@ -0,0 +1,6 @@ +from .provider import AbstractBackupProvider +from selfprivacy_api.backup.restic_backuper import ResticBackuper + + +class Backblaze(AbstractBackupProvider): + backuper = ResticBackuper() diff --git a/selfprivacy_api/backup/providers/provider.py b/selfprivacy_api/backup/providers/provider.py index e69de29..9bd8a60 100644 --- a/selfprivacy_api/backup/providers/provider.py +++ b/selfprivacy_api/backup/providers/provider.py @@ -0,0 +1,13 @@ +""" +An abstract class for BackBlaze, S3 etc. +It assumes that while some providers are supported via restic/rclone, others may +require different backends +""" +from abc import ABC +from selfprivacy_api.backup import AbstractBackuper + + +class AbstractBackupProvider(ABC): + @property + def backuper(self) -> AbstractBackuper: + raise NotImplementedError diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py new file mode 100644 index 0000000..f1775ff --- /dev/null +++ b/selfprivacy_api/backup/restic_backuper.py @@ -0,0 +1,6 @@ +from selfprivacy_api.backup import AbstractBackuper + + +class ResticBackuper(AbstractBackuper): + def __init__(self): + pass From 7d76b74dbc246916734cb4bf648b65fdb7306664 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 23 Jan 2023 14:21:43 +0000 Subject: [PATCH 239/537] feature(backups): copy cli logic to new restic backuper --- selfprivacy_api/backup/restic_backuper.py | 26 +++++++++++++++++++++-- 1 file changed, 24 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index f1775ff..99a29ab 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -2,5 +2,27 @@ from selfprivacy_api.backup import AbstractBackuper class ResticBackuper(AbstractBackuper): - def __init__(self): - pass + def __init__(self, login_flag: str, key_flag: str, type: str): + self.login_flag = login_flag + self.key_flag = key_flag + self.type = type + + def restic_repo(self, repository_name: str) -> str: + # https://restic.readthedocs.io/en/latest/030_preparing_a_new_repo.html#other-services-via-rclone + # https://forum.rclone.org/t/can-rclone-be-run-solely-with-command-line-options-no-config-no-env-vars/6314/5 + return f"rclone::{self.type}:{self._repository_name}/sfbackup" + + def rclone_args(self): + return "rclone.args=serve restic --stdio" + self.backend_rclone_args() + + def backend_rclone_args(self, account: str, key: str): + return f"{self.login_flag} {account} {self.key_flag} {key}" + + def restic_command(self, account: str, key: str, *args): + return [ + "restic", + "-o", + self.rclone_args(), + "-r", + self.restic_repo(account, key), + ].extend(args) From 9097ba02d784a4bc8992c94ec113aa396ba98432 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 1 Feb 2023 11:58:55 +0000 Subject: [PATCH 240/537] test(backup): provider class selection --- selfprivacy_api/backup/providers/__init__.py | 11 +++++++++++ selfprivacy_api/backup/providers/backblaze.py | 2 +- selfprivacy_api/backup/providers/provider.py | 4 ++++ tests/test_graphql/test_backup.py | 11 +++++++++++ 4 files changed, 27 insertions(+), 1 deletion(-) create mode 100644 tests/test_graphql/test_backup.py diff --git a/selfprivacy_api/backup/providers/__init__.py b/selfprivacy_api/backup/providers/__init__.py index e69de29..be09cb2 100644 --- a/selfprivacy_api/backup/providers/__init__.py +++ b/selfprivacy_api/backup/providers/__init__.py @@ -0,0 +1,11 @@ +from selfprivacy_api.graphql.queries.providers import BackupProvider +from selfprivacy_api.backup.providers.provider import AbstractBackupProvider + +from selfprivacy_api.backup.providers.backblaze import Backblaze + +PROVIDER_MAPPING = { + BackupProvider.BACKBLAZE: Backblaze +} + +def get_provider(provider_type : BackupProvider) -> AbstractBackupProvider: + return PROVIDER_MAPPING[provider_type] diff --git a/selfprivacy_api/backup/providers/backblaze.py b/selfprivacy_api/backup/providers/backblaze.py index 6dfa1a7..e16e9d3 100644 --- a/selfprivacy_api/backup/providers/backblaze.py +++ b/selfprivacy_api/backup/providers/backblaze.py @@ -3,4 +3,4 @@ from selfprivacy_api.backup.restic_backuper import ResticBackuper class Backblaze(AbstractBackupProvider): - backuper = ResticBackuper() + backuper = ResticBackuper("--b2-account", "--b2-key", "b2") diff --git a/selfprivacy_api/backup/providers/provider.py b/selfprivacy_api/backup/providers/provider.py index 9bd8a60..dd41e9a 100644 --- a/selfprivacy_api/backup/providers/provider.py +++ b/selfprivacy_api/backup/providers/provider.py @@ -11,3 +11,7 @@ class AbstractBackupProvider(ABC): @property def backuper(self) -> AbstractBackuper: raise NotImplementedError + + def __init__(self, login, key): + self.login = login + self.key = key diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py new file mode 100644 index 0000000..70b3ce7 --- /dev/null +++ b/tests/test_graphql/test_backup.py @@ -0,0 +1,11 @@ +import selfprivacy_api.backup.providers as providers +from selfprivacy_api.backup.providers import AbstractBackupProvider + +from selfprivacy_api.backup.providers.backblaze import Backblaze +from selfprivacy_api.graphql.queries.providers import BackupProvider + + +def test_select_backend(): + provider = providers.get_provider(BackupProvider.BACKBLAZE) + assert provider is not None + assert provider == Backblaze From 45ab9423b94189025204297e9077fb9b61f5b8e7 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 3 Feb 2023 17:04:35 +0000 Subject: [PATCH 241/537] test(backup): dummy service --- .../services/test_service/__init__.py | 136 ++++++++++++++++++ .../services/test_service/bitwarden.svg | 3 + selfprivacy_api/services/test_service/icon.py | 5 + tests/test_graphql/test_backup.py | 14 ++ 4 files changed, 158 insertions(+) create mode 100644 selfprivacy_api/services/test_service/__init__.py create mode 100644 selfprivacy_api/services/test_service/bitwarden.svg create mode 100644 selfprivacy_api/services/test_service/icon.py diff --git a/selfprivacy_api/services/test_service/__init__.py b/selfprivacy_api/services/test_service/__init__.py new file mode 100644 index 0000000..53fe0cf --- /dev/null +++ b/selfprivacy_api/services/test_service/__init__.py @@ -0,0 +1,136 @@ +"""Class representing Bitwarden service""" +import base64 +import typing + +from selfprivacy_api.jobs import Job +from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus +from selfprivacy_api.utils import ReadUserData, get_domain +from selfprivacy_api.utils.block_devices import BlockDevice +import selfprivacy_api.utils.network as network_utils + +from selfprivacy_api.services.test_service.icon import BITWARDEN_ICON + + +class DummyService(Service): + """A test service""" + + def __init__(self, location): + self.loccation = location + + @staticmethod + def get_id() -> str: + """Return service id.""" + return "testservice" + + @staticmethod + def get_display_name() -> str: + """Return service display name.""" + return "Test Service" + + @staticmethod + def get_description() -> str: + """Return service description.""" + return "A small service used for test purposes. Does nothing." + + @staticmethod + def get_svg_icon() -> str: + """Read SVG icon from file and return it as base64 encoded string.""" + # return "" + return base64.b64encode(BITWARDEN_ICON.encode("utf-8")).decode("utf-8") + + @staticmethod + def get_url() -> typing.Optional[str]: + """Return service url.""" + domain = get_domain() + return f"https://password.{domain}" + + @staticmethod + def is_movable() -> bool: + return True + + @staticmethod + def is_required() -> bool: + return False + + @staticmethod + def is_enabled() -> bool: + return True + + @staticmethod + def get_status() -> ServiceStatus: + """ + Return Bitwarden status from systemd. + Use command return code to determine status. + + Return code 0 means service is running. + Return code 1 or 2 means service is in error stat. + Return code 3 means service is stopped. + Return code 4 means service is off. + """ + return 0 + + @staticmethod + def enable(): + pass + + @staticmethod + def disable(): + pass + + @staticmethod + def stop(): + pass + + @staticmethod + def start(): + pass + + @staticmethod + def restart(): + pass + + @staticmethod + def get_configuration(): + return {} + + @staticmethod + def set_configuration(config_items): + return super().set_configuration(config_items) + + @staticmethod + def get_logs(): + return "" + + @staticmethod + def get_storage_usage() -> int: + storage_usage = 0 + return storage_usage + + @staticmethod + def get_location() -> str: + with ReadUserData() as user_data: + if user_data.get("useBinds", False): + return user_data.get("bitwarden", {}).get("location", "sda1") + else: + return "sda1" + + @staticmethod + def get_dns_records() -> typing.List[ServiceDnsRecord]: + """Return list of DNS records for Bitwarden service.""" + return [ + ServiceDnsRecord( + type="A", + name="password", + content=network_utils.get_ip4(), + ttl=3600, + ), + ServiceDnsRecord( + type="AAAA", + name="password", + content=network_utils.get_ip6(), + ttl=3600, + ), + ] + + def move_to_volume(self, volume: BlockDevice) -> Job: + pass diff --git a/selfprivacy_api/services/test_service/bitwarden.svg b/selfprivacy_api/services/test_service/bitwarden.svg new file mode 100644 index 0000000..ced270c --- /dev/null +++ b/selfprivacy_api/services/test_service/bitwarden.svg @@ -0,0 +1,3 @@ + + + diff --git a/selfprivacy_api/services/test_service/icon.py b/selfprivacy_api/services/test_service/icon.py new file mode 100644 index 0000000..f9280e0 --- /dev/null +++ b/selfprivacy_api/services/test_service/icon.py @@ -0,0 +1,5 @@ +BITWARDEN_ICON = """ + + + +""" diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 70b3ce7..b63097c 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -1,3 +1,7 @@ +import pytest + +from selfprivacy_api.services.test_service import DummyService + import selfprivacy_api.backup.providers as providers from selfprivacy_api.backup.providers import AbstractBackupProvider @@ -5,7 +9,17 @@ from selfprivacy_api.backup.providers.backblaze import Backblaze from selfprivacy_api.graphql.queries.providers import BackupProvider +@pytest.fixture() +def test_service(tmpdir): + return DummyService(tmpdir) + + def test_select_backend(): provider = providers.get_provider(BackupProvider.BACKBLAZE) assert provider is not None assert provider == Backblaze + + +def test_backup(test_service): + # temporarily incomplete + assert test_service is not None From e5a965ea2960228b484c61bfac7326b39d9e0c95 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 3 Feb 2023 18:03:13 +0000 Subject: [PATCH 242/537] feat(backup): allow no auth --- selfprivacy_api/backup/restic_backuper.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index 99a29ab..fd42791 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -15,8 +15,15 @@ class ResticBackuper(AbstractBackuper): def rclone_args(self): return "rclone.args=serve restic --stdio" + self.backend_rclone_args() - def backend_rclone_args(self, account: str, key: str): - return f"{self.login_flag} {account} {self.key_flag} {key}" + def backend_rclone_args(self, account: str, key: str) -> str: + acc_arg = "" + key_arg = "" + if account != "": + acc_arg = f"{self.login_flag} {account}" + if key != "": + key_arg = f"{self.key_flag} {key}" + + return f"{acc_arg} {key_arg}" def restic_command(self, account: str, key: str, *args): return [ From 37c18ead99399ad34784edbefd6ddf08f826fe31 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 3 Feb 2023 18:29:45 +0000 Subject: [PATCH 243/537] feat(backup): add in-memory backup --- selfprivacy_api/backup/providers/__init__.py | 7 +++++-- selfprivacy_api/graphql/queries/providers.py | 2 ++ 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/providers/__init__.py b/selfprivacy_api/backup/providers/__init__.py index be09cb2..66fb9e6 100644 --- a/selfprivacy_api/backup/providers/__init__.py +++ b/selfprivacy_api/backup/providers/__init__.py @@ -2,10 +2,13 @@ from selfprivacy_api.graphql.queries.providers import BackupProvider from selfprivacy_api.backup.providers.provider import AbstractBackupProvider from selfprivacy_api.backup.providers.backblaze import Backblaze +from selfprivacy_api.backup.providers.memory import InMemoryBackup PROVIDER_MAPPING = { - BackupProvider.BACKBLAZE: Backblaze + BackupProvider.BACKBLAZE: Backblaze, + BackupProvider.MEMORY: InMemoryBackup, } -def get_provider(provider_type : BackupProvider) -> AbstractBackupProvider: + +def get_provider(provider_type: BackupProvider) -> AbstractBackupProvider: return PROVIDER_MAPPING[provider_type] diff --git a/selfprivacy_api/graphql/queries/providers.py b/selfprivacy_api/graphql/queries/providers.py index 1759d7b..ecc7f11 100644 --- a/selfprivacy_api/graphql/queries/providers.py +++ b/selfprivacy_api/graphql/queries/providers.py @@ -19,3 +19,5 @@ class ServerProvider(Enum): @strawberry.enum class BackupProvider(Enum): BACKBLAZE = "BACKBLAZE" + # for testing purposes, make sure not selectable in prod. + MEMORY = "MEMORY" From fc7483a6f25b1e9c8fb4faa020dad184c8cef15d Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 3 Feb 2023 18:49:24 +0000 Subject: [PATCH 244/537] test(backup): init an in-memory backup class --- tests/test_graphql/test_backup.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index b63097c..abd05c2 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -14,12 +14,22 @@ def test_service(tmpdir): return DummyService(tmpdir) +@pytest.fixture() +def memory_backup(): + ProviderClass = providers.get_provider(BackupProvider.MEMORY) + assert ProviderClass is not None + memory_provider = ProviderClass(login="", key="") + assert memory_provider is not None + return memory_provider + + def test_select_backend(): provider = providers.get_provider(BackupProvider.BACKBLAZE) assert provider is not None assert provider == Backblaze -def test_backup(test_service): +def test_backup_service(test_service, memory_backup): # temporarily incomplete assert test_service is not None + assert memory_backup is not None From 3f2c1e0593769c8116a6add9d657d792fe32eeed Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 3 Feb 2023 19:09:24 +0000 Subject: [PATCH 245/537] test(backup): make a testfile to backup --- tests/test_graphql/test_backup.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index abd05c2..264a9bf 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -1,4 +1,5 @@ import pytest +import os.path as path from selfprivacy_api.services.test_service import DummyService @@ -9,8 +10,14 @@ from selfprivacy_api.backup.providers.backblaze import Backblaze from selfprivacy_api.graphql.queries.providers import BackupProvider +TESTFILE_BODY = "testytest!" + + @pytest.fixture() def test_service(tmpdir): + testile_path = path.join(tmpdir, "testfile.txt") + with open(testile_path, "w") as file: + file.write(TESTFILE_BODY) return DummyService(tmpdir) From 86c99c0be8a46dc91feb33e2477c0163e59935d9 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 3 Feb 2023 20:28:55 +0000 Subject: [PATCH 246/537] feat(backup): add backuping to restic backuper --- selfprivacy_api/backup/restic_backuper.py | 26 +++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index fd42791..dbfa0a9 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -1,3 +1,5 @@ +import subprocess + from selfprivacy_api.backup import AbstractBackuper @@ -6,6 +8,12 @@ class ResticBackuper(AbstractBackuper): self.login_flag = login_flag self.key_flag = key_flag self.type = type + self.account = "" + self.key = "" + + def set_creds(self, account: str, key: str): + self.account = account + self.key = key def restic_repo(self, repository_name: str) -> str: # https://restic.readthedocs.io/en/latest/030_preparing_a_new_repo.html#other-services-via-rclone @@ -33,3 +41,21 @@ class ResticBackuper(AbstractBackuper): "-r", self.restic_repo(account, key), ].extend(args) + + def start_backup(self, folder: str): + """ + Start backup with restic + """ + backup_command = self.restic_command( + self.account, + self.key, + "backup", + folder, + ) + with open("/var/backup.log", "w", encoding="utf-8") as log_file: + subprocess.Popen( + backup_command, + shell=False, + stdout=log_file, + stderr=subprocess.STDOUT, + ) From a9cd8dda375c347379823cb5aca801c1b5957ab9 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 8 Feb 2023 14:04:55 +0000 Subject: [PATCH 247/537] fix(backup): add memory backup class,forgot to add to git --- selfprivacy_api/backup/providers/memory.py | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 selfprivacy_api/backup/providers/memory.py diff --git a/selfprivacy_api/backup/providers/memory.py b/selfprivacy_api/backup/providers/memory.py new file mode 100644 index 0000000..4ddf571 --- /dev/null +++ b/selfprivacy_api/backup/providers/memory.py @@ -0,0 +1,6 @@ +from .provider import AbstractBackupProvider +from selfprivacy_api.backup.restic_backuper import ResticBackuper + + +class InMemoryBackup(AbstractBackupProvider): + backuper = ResticBackuper("", "", "memory") From 54103973bce51cb4bef0509dcdc8a54eb2503319 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 8 Feb 2023 14:05:25 +0000 Subject: [PATCH 248/537] test(backup): try to back up! --- selfprivacy_api/backup/__init__.py | 9 +++- selfprivacy_api/backup/restic_backuper.py | 50 +++++++++++-------- .../services/test_service/__init__.py | 14 ++---- tests/test_graphql/test_backup.py | 11 +++- 4 files changed, 51 insertions(+), 33 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index ff9bb2d..024beee 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -1,5 +1,10 @@ -from abc import ABC +from abc import ABC, abstractmethod + class AbstractBackuper(ABC): def __init__(self): - pass \ No newline at end of file + pass + + @abstractmethod + def start_backup(self, folder: str): + raise NotImplementedError diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index dbfa0a9..7c379a7 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -18,44 +18,54 @@ class ResticBackuper(AbstractBackuper): def restic_repo(self, repository_name: str) -> str: # https://restic.readthedocs.io/en/latest/030_preparing_a_new_repo.html#other-services-via-rclone # https://forum.rclone.org/t/can-rclone-be-run-solely-with-command-line-options-no-config-no-env-vars/6314/5 - return f"rclone::{self.type}:{self._repository_name}/sfbackup" + return f"rclone::{self.type}:{repository_name}/sfbackup" def rclone_args(self): return "rclone.args=serve restic --stdio" + self.backend_rclone_args() - def backend_rclone_args(self, account: str, key: str) -> str: + def backend_rclone_args(self) -> str: acc_arg = "" key_arg = "" - if account != "": - acc_arg = f"{self.login_flag} {account}" - if key != "": - key_arg = f"{self.key_flag} {key}" + if self.account != "": + acc_arg = f"{self.login_flag} {self.account}" + if self.key != "": + key_arg = f"{self.key_flag} {self.key}" return f"{acc_arg} {key_arg}" - def restic_command(self, account: str, key: str, *args): - return [ + def restic_command(self, repo_name: str, *args): + command = [ "restic", "-o", self.rclone_args(), "-r", - self.restic_repo(account, key), - ].extend(args) + self.restic_repo(repo_name), + ] + if args != []: + command.extend(args) + return command - def start_backup(self, folder: str): + def start_backup(self, folder: str, repo_name: str): """ Start backup with restic """ backup_command = self.restic_command( - self.account, - self.key, + repo_name, "backup", folder, ) - with open("/var/backup.log", "w", encoding="utf-8") as log_file: - subprocess.Popen( - backup_command, - shell=False, - stdout=log_file, - stderr=subprocess.STDOUT, - ) + subprocess.Popen( + backup_command, + shell=False, + stderr=subprocess.STDOUT, + ) + + # TODO: we might want to provide logging facilities + # that are reroutable for testing + # with open("/var/backup.log", "w", encoding="utf-8") as log_file: + # subprocess.Popen( + # backup_command, + # shell=False, + # stdout=log_file, + # stderr=subprocess.STDOUT, + # ) diff --git a/selfprivacy_api/services/test_service/__init__.py b/selfprivacy_api/services/test_service/__init__.py index 53fe0cf..0118dbc 100644 --- a/selfprivacy_api/services/test_service/__init__.py +++ b/selfprivacy_api/services/test_service/__init__.py @@ -14,8 +14,8 @@ from selfprivacy_api.services.test_service.icon import BITWARDEN_ICON class DummyService(Service): """A test service""" - def __init__(self, location): - self.loccation = location + def __init_subclass__(cls, location): + cls.location = location @staticmethod def get_id() -> str: @@ -106,13 +106,9 @@ class DummyService(Service): storage_usage = 0 return storage_usage - @staticmethod - def get_location() -> str: - with ReadUserData() as user_data: - if user_data.get("useBinds", False): - return user_data.get("bitwarden", {}).get("location", "sda1") - else: - return "sda1" + @classmethod + def get_location(cls) -> str: + return cls.location @staticmethod def get_dns_records() -> typing.List[ServiceDnsRecord]: diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 264a9bf..8fc821a 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -11,6 +11,7 @@ from selfprivacy_api.graphql.queries.providers import BackupProvider TESTFILE_BODY = "testytest!" +REPO_NAME = "test_backup" @pytest.fixture() @@ -18,11 +19,16 @@ def test_service(tmpdir): testile_path = path.join(tmpdir, "testfile.txt") with open(testile_path, "w") as file: file.write(TESTFILE_BODY) - return DummyService(tmpdir) + + # we need this to not change get_location() much + class TestDummyService (DummyService, location=tmpdir): + pass + + return TestDummyService() @pytest.fixture() -def memory_backup(): +def memory_backup() -> AbstractBackupProvider: ProviderClass = providers.get_provider(BackupProvider.MEMORY) assert ProviderClass is not None memory_provider = ProviderClass(login="", key="") @@ -40,3 +46,4 @@ def test_backup_service(test_service, memory_backup): # temporarily incomplete assert test_service is not None assert memory_backup is not None + memory_backup.backuper.start_backup(test_service.get_location(), REPO_NAME) \ No newline at end of file From ff72d4124e0a8253efc1cd271328000db8c2965f Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 8 Feb 2023 14:18:45 +0000 Subject: [PATCH 249/537] refactor(backup): add a placeholder Backups singleton class --- selfprivacy_api/backup/__init__.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 024beee..3054f2e 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -1,6 +1,10 @@ from abc import ABC, abstractmethod +class Backups: + """A singleton controller for backups""" + + class AbstractBackuper(ABC): def __init__(self): pass From 178c456593a1c00fbe2fb2e8adb0adb9c2799150 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 8 Feb 2023 14:57:34 +0000 Subject: [PATCH 250/537] refactor(backup): add a backup function to Backups singleton class --- selfprivacy_api/backup/__init__.py | 38 ++++++++++++++++---- selfprivacy_api/backup/backuper.py | 10 ++++++ selfprivacy_api/backup/providers/provider.py | 2 +- selfprivacy_api/backup/restic_backuper.py | 2 +- 4 files changed, 44 insertions(+), 8 deletions(-) create mode 100644 selfprivacy_api/backup/backuper.py diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 3054f2e..79125ef 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -1,14 +1,40 @@ -from abc import ABC, abstractmethod +from selfprivacy_api.utils.singleton_metaclass import SingletonMetaclass + +from selfprivacy_api.services.service import Service +from selfprivacy_api.backup.providers.provider import AbstractBackupProvider +from selfprivacy_api.backup.providers import get_provider +from selfprivacy_api.graphql.queries.providers import BackupProvider -class Backups: +class Backups(metaclass=SingletonMetaclass): """A singleton controller for backups""" + provider: AbstractBackupProvider -class AbstractBackuper(ABC): def __init__(self): + self.lookup_provider() + + def lookup_provider(self): + redis_provider = Backups.load_provider_redis() + if redis_provider is not None: + self.provider = redis_provider + + json_provider = Backups.load_provider_json() + if json_provider is not None: + self.provider = json_provider + + provider_class = get_provider(BackupProvider.MEMORY) + self.provider = provider_class(login="", key="") + + @staticmethod + def load_provider_redis() -> AbstractBackupProvider: pass - @abstractmethod - def start_backup(self, folder: str): - raise NotImplementedError + @staticmethod + def load_provider_json() -> AbstractBackupProvider: + pass + + def back_up(self, service: Service): + folder = service.get_location() + repo_name = service.get_id() + self.provider.backuper.start_backup(folder, repo_name) diff --git a/selfprivacy_api/backup/backuper.py b/selfprivacy_api/backup/backuper.py new file mode 100644 index 0000000..f428145 --- /dev/null +++ b/selfprivacy_api/backup/backuper.py @@ -0,0 +1,10 @@ +from abc import ABC, abstractmethod + + +class AbstractBackuper(ABC): + def __init__(self): + pass + + @abstractmethod + def start_backup(self, folder: str, repo_name: str): + raise NotImplementedError diff --git a/selfprivacy_api/backup/providers/provider.py b/selfprivacy_api/backup/providers/provider.py index dd41e9a..0b57528 100644 --- a/selfprivacy_api/backup/providers/provider.py +++ b/selfprivacy_api/backup/providers/provider.py @@ -4,7 +4,7 @@ It assumes that while some providers are supported via restic/rclone, others may require different backends """ from abc import ABC -from selfprivacy_api.backup import AbstractBackuper +from selfprivacy_api.backup.backuper import AbstractBackuper class AbstractBackupProvider(ABC): diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index 7c379a7..5098e97 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -1,6 +1,6 @@ import subprocess -from selfprivacy_api.backup import AbstractBackuper +from selfprivacy_api.backup.backuper import AbstractBackuper class ResticBackuper(AbstractBackuper): From 95e2032c6366f52ba63cb38e34215419a6986a0b Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 8 Feb 2023 15:14:08 +0000 Subject: [PATCH 251/537] test(backup): use a backup service function --- tests/test_graphql/test_backup.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 8fc821a..64efe05 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -3,6 +3,7 @@ import os.path as path from selfprivacy_api.services.test_service import DummyService +from selfprivacy_api.backup import Backups import selfprivacy_api.backup.providers as providers from selfprivacy_api.backup.providers import AbstractBackupProvider @@ -21,7 +22,7 @@ def test_service(tmpdir): file.write(TESTFILE_BODY) # we need this to not change get_location() much - class TestDummyService (DummyService, location=tmpdir): + class TestDummyService(DummyService, location=tmpdir): pass return TestDummyService() @@ -36,14 +37,23 @@ def memory_backup() -> AbstractBackupProvider: return memory_provider +@pytest.fixture() +def backups(): + return Backups() + + def test_select_backend(): provider = providers.get_provider(BackupProvider.BACKBLAZE) assert provider is not None assert provider == Backblaze -def test_backup_service(test_service, memory_backup): +def test_backup_simple(test_service, memory_backup): # temporarily incomplete assert test_service is not None assert memory_backup is not None - memory_backup.backuper.start_backup(test_service.get_location(), REPO_NAME) \ No newline at end of file + memory_backup.backuper.start_backup(test_service.get_location(), REPO_NAME) + + +def test_backup_service(test_service, backups): + backups.back_up(test_service) From a0a0e1fb3bc9a223b04aa2bfc20540cf260c71ce Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 8 Feb 2023 15:27:49 +0000 Subject: [PATCH 252/537] feat(backup): hooks --- selfprivacy_api/backup/__init__.py | 3 +++ selfprivacy_api/services/service.py | 6 ++++++ 2 files changed, 9 insertions(+) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 79125ef..5ddd378 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -37,4 +37,7 @@ class Backups(metaclass=SingletonMetaclass): def back_up(self, service: Service): folder = service.get_location() repo_name = service.get_id() + + service.pre_backup() self.provider.backuper.start_backup(folder, repo_name) + service.post_restore() diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index 515e28f..f191149 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -138,3 +138,9 @@ class Service(ABC): @abstractmethod def move_to_volume(self, volume: BlockDevice) -> Job: pass + + def pre_backup(self): + pass + + def post_restore(self): + pass From a42294b706545dedc0609affc995710f98352a0c Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 8 Feb 2023 15:40:45 +0000 Subject: [PATCH 253/537] feature(backup): add a restore function to restic backuper --- selfprivacy_api/backup/restic_backuper.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index 5098e97..be17473 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -69,3 +69,13 @@ class ResticBackuper(AbstractBackuper): # stdout=log_file, # stderr=subprocess.STDOUT, # ) + + def restore_from_backup(self, repo_name, snapshot_id, folder): + """ + Restore from backup with restic + """ + restore_command = self.restic_command( + repo_name, "restore", snapshot_id, "--target", folder + ) + + subprocess.run(restore_command, shell=False) From 4ca2e62b5c7cdb6b7ef957e0675c6930d632a528 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 8 Feb 2023 16:28:05 +0000 Subject: [PATCH 254/537] feature(backup): loading snapshots --- selfprivacy_api/backup/restic_backuper.py | 40 +++++++++++++++++++++++ 1 file changed, 40 insertions(+) diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index be17473..fdecf1b 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -1,4 +1,5 @@ import subprocess +import json from selfprivacy_api.backup.backuper import AbstractBackuper @@ -79,3 +80,42 @@ class ResticBackuper(AbstractBackuper): ) subprocess.run(restore_command, shell=False) + + def _load_snapshots(self, repo_name) -> object: + """ + Load list of snapshots from repository + """ + listing_command = self.restic_command( + repo_name, + "snapshots", + "--json", + ) + + with subprocess.Popen( + listing_command, + shell=False, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + ) as backup_listing_process_descriptor: + output = backup_listing_process_descriptor.communicate()[0].decode("utf-8") + + try: + return self.parse_snapshot_output(output) + except ValueError: + if "Is there a repository at the following location?" in output: + return [] + self.error_message = output + return [] + + def get_snapshots(self): + # No transformation for now + snapshots = [] + for snapshot in self._load_snapshots(): + snapshots.append(snapshot) + return snapshots + + def parse_snapshot_output(self, output: str) -> object: + starting_index = output.find("[") + json.loads(output[starting_index:]) + self.snapshot_list = json.loads(output[starting_index:]) + print(output) From 83b24f5fcd92d0182142c9d873ed58356507cc1e Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 13 Feb 2023 10:33:39 +0000 Subject: [PATCH 255/537] refactor(backup): snapshot model --- selfprivacy_api/models/backup/__init__.py | 0 selfprivacy_api/models/backup/snapshot.py | 6 ++++++ 2 files changed, 6 insertions(+) create mode 100644 selfprivacy_api/models/backup/__init__.py create mode 100644 selfprivacy_api/models/backup/snapshot.py diff --git a/selfprivacy_api/models/backup/__init__.py b/selfprivacy_api/models/backup/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/selfprivacy_api/models/backup/snapshot.py b/selfprivacy_api/models/backup/snapshot.py new file mode 100644 index 0000000..b9c9e6e --- /dev/null +++ b/selfprivacy_api/models/backup/snapshot.py @@ -0,0 +1,6 @@ +from pydantic import BaseModel + +class Snapshot(BaseModel): + id: str + service_name: str + created_at: datetime.datetime From e156e9cd584dc2e6d40c319ecc6afe2a3bf55eff Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 13 Feb 2023 11:16:35 +0000 Subject: [PATCH 256/537] test(backup): no snapshots --- selfprivacy_api/backup/backuper.py | 8 ++++++++ selfprivacy_api/backup/restic_backuper.py | 8 ++++++-- selfprivacy_api/models/backup/snapshot.py | 2 ++ tests/test_graphql/test_backup.py | 4 ++++ 4 files changed, 20 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/backuper.py b/selfprivacy_api/backup/backuper.py index f428145..c2353ad 100644 --- a/selfprivacy_api/backup/backuper.py +++ b/selfprivacy_api/backup/backuper.py @@ -1,4 +1,7 @@ from abc import ABC, abstractmethod +from typing import List + +from selfprivacy_api.models.backup.snapshot import Snapshot class AbstractBackuper(ABC): @@ -8,3 +11,8 @@ class AbstractBackuper(ABC): @abstractmethod def start_backup(self, folder: str, repo_name: str): raise NotImplementedError + + @abstractmethod + def get_snapshots(self, repo_name) -> List[Snapshot]: + """Get all snapshots from the repo""" + raise NotImplementedError diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index fdecf1b..2c120f1 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -1,7 +1,10 @@ import subprocess import json +from typing import List + from selfprivacy_api.backup.backuper import AbstractBackuper +from selfprivacy_api.models.backup.snapshot import Snapshot class ResticBackuper(AbstractBackuper): @@ -107,10 +110,11 @@ class ResticBackuper(AbstractBackuper): self.error_message = output return [] - def get_snapshots(self): + def get_snapshots(self, repo_name) -> List[Snapshot]: + """Get all snapshots from the repo""" # No transformation for now snapshots = [] - for snapshot in self._load_snapshots(): + for snapshot in self._load_snapshots(repo_name): snapshots.append(snapshot) return snapshots diff --git a/selfprivacy_api/models/backup/snapshot.py b/selfprivacy_api/models/backup/snapshot.py index b9c9e6e..9893f03 100644 --- a/selfprivacy_api/models/backup/snapshot.py +++ b/selfprivacy_api/models/backup/snapshot.py @@ -1,5 +1,7 @@ +import datetime from pydantic import BaseModel + class Snapshot(BaseModel): id: str service_name: str diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 64efe05..ee8ee0e 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -57,3 +57,7 @@ def test_backup_simple(test_service, memory_backup): def test_backup_service(test_service, backups): backups.back_up(test_service) + + +def test_no_snapshots(memory_backup): + assert memory_backup.backuper.get_snapshots("") == [] From 5371c7feef9d7c1e1f758336c540c8665839dee1 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 17 Feb 2023 15:55:19 +0000 Subject: [PATCH 257/537] refactor(backups): snapshotlist and local secret groundwork --- selfprivacy_api/backup/__init__.py | 9 +++++++ selfprivacy_api/backup/local_secret.py | 30 +++++++++++++++++++++++ selfprivacy_api/backup/restic_backuper.py | 23 +++++++++++------ tests/test_graphql/test_backup.py | 10 ++++++-- 4 files changed, 62 insertions(+), 10 deletions(-) create mode 100644 selfprivacy_api/backup/local_secret.py diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 5ddd378..4410809 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -1,3 +1,7 @@ +from typing import List + +from selfprivacy_api.models.backup.snapshot import Snapshot + from selfprivacy_api.utils.singleton_metaclass import SingletonMetaclass from selfprivacy_api.services.service import Service @@ -41,3 +45,8 @@ class Backups(metaclass=SingletonMetaclass): service.pre_backup() self.provider.backuper.start_backup(folder, repo_name) service.post_restore() + + def get_snapshots(self, service: Service) -> List[Snapshot]: + repo_name = service.get_id() + + return self.provider.backuper.get_snapshots(repo_name) diff --git a/selfprivacy_api/backup/local_secret.py b/selfprivacy_api/backup/local_secret.py new file mode 100644 index 0000000..f2ebf06 --- /dev/null +++ b/selfprivacy_api/backup/local_secret.py @@ -0,0 +1,30 @@ +"""Handling of local secret used for encrypted backups. +Separated out for circular dependency reasons +""" + +REDIS_KEY = "backup:local_secret" + + +class LocalBackupSecret: + @staticmethod + def get(): + """A secret string which backblaze/other clouds do not know. + Serves as encryption key. + TODO: generate and save in redis + """ + return "TEMPORARY_SECRET" + + @staticmethod + def reset(): + pass + + def exists(): + pass + + @staticmethod + def _generate(): + pass + + @staticmethod + def _store(secret: str): + pass diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index 2c120f1..3d02d07 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -6,6 +6,8 @@ from typing import List from selfprivacy_api.backup.backuper import AbstractBackuper from selfprivacy_api.models.backup.snapshot import Snapshot +from selfprivacy_api.backup.local_secret import LocalBackupSecret + class ResticBackuper(AbstractBackuper): def __init__(self, login_flag: str, key_flag: str, type: str): @@ -37,6 +39,9 @@ class ResticBackuper(AbstractBackuper): return f"{acc_arg} {key_arg}" + def _password_command(self): + return f"echo {LocalBackupSecret.get()}" + def restic_command(self, repo_name: str, *args): command = [ "restic", @@ -44,6 +49,8 @@ class ResticBackuper(AbstractBackuper): self.rclone_args(), "-r", self.restic_repo(repo_name), + "--password-command", + self._password_command(), ] if args != []: command.extend(args) @@ -87,6 +94,7 @@ class ResticBackuper(AbstractBackuper): def _load_snapshots(self, repo_name) -> object: """ Load list of snapshots from repository + raises Value Error if repo does not exist """ listing_command = self.restic_command( repo_name, @@ -102,13 +110,12 @@ class ResticBackuper(AbstractBackuper): ) as backup_listing_process_descriptor: output = backup_listing_process_descriptor.communicate()[0].decode("utf-8") + if "Is there a repository at the following location?" in output: + raise ValueError("No repository! : " + output) try: return self.parse_snapshot_output(output) - except ValueError: - if "Is there a repository at the following location?" in output: - return [] - self.error_message = output - return [] + except ValueError as e: + raise ValueError("Cannot load snapshots: ") from e def get_snapshots(self, repo_name) -> List[Snapshot]: """Get all snapshots from the repo""" @@ -119,7 +126,7 @@ class ResticBackuper(AbstractBackuper): return snapshots def parse_snapshot_output(self, output: str) -> object: + if "[" not in output: + raise ValueError("There is no json in the restic snapshot output") starting_index = output.find("[") - json.loads(output[starting_index:]) - self.snapshot_list = json.loads(output[starting_index:]) - print(output) + return json.loads(output[starting_index:]) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index ee8ee0e..bb3b624 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -59,5 +59,11 @@ def test_backup_service(test_service, backups): backups.back_up(test_service) -def test_no_snapshots(memory_backup): - assert memory_backup.backuper.get_snapshots("") == [] +def test_no_repo(memory_backup): + with pytest.raises(ValueError): + assert memory_backup.backuper.get_snapshots("") == [] + + +# def test_one_snapshot(backups, test_service): +# backups.back_up(test_service) +# assert len(backups.get_snapshots(test_service)) == 1 From a405eddbcf4dba13dffcb67dda9a2a7272a6a5e1 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 17 Feb 2023 15:59:27 +0000 Subject: [PATCH 258/537] refactor(backups): add repo init --- selfprivacy_api/backup/restic_backuper.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index 3d02d07..0e36f4d 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -81,6 +81,17 @@ class ResticBackuper(AbstractBackuper): # stderr=subprocess.STDOUT, # ) + def init(self, repo_name): + init_command = self.restic_command( + repo_name, + "init", + ) + subprocess.Popen( + init_command, + shell=False, + stderr=subprocess.STDOUT, + ) + def restore_from_backup(self, repo_name, snapshot_id, folder): """ Restore from backup with restic From 3f30469532a89af2e6f025c6e7d53f5774ad71d4 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 17 Feb 2023 16:11:17 +0000 Subject: [PATCH 259/537] refactor(backups): repo init service method --- selfprivacy_api/backup/__init__.py | 4 ++++ selfprivacy_api/backup/backuper.py | 4 ++++ 2 files changed, 8 insertions(+) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 4410809..210c998 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -46,6 +46,10 @@ class Backups(metaclass=SingletonMetaclass): self.provider.backuper.start_backup(folder, repo_name) service.post_restore() + def init_repo(self, service: Service): + repo_name = service.get_id() + self.provider.backuper.init(repo_name) + def get_snapshots(self, service: Service) -> List[Snapshot]: repo_name = service.get_id() diff --git a/selfprivacy_api/backup/backuper.py b/selfprivacy_api/backup/backuper.py index c2353ad..676a0a1 100644 --- a/selfprivacy_api/backup/backuper.py +++ b/selfprivacy_api/backup/backuper.py @@ -16,3 +16,7 @@ class AbstractBackuper(ABC): def get_snapshots(self, repo_name) -> List[Snapshot]: """Get all snapshots from the repo""" raise NotImplementedError + + @abstractmethod + def init(self, repo_name): + raise NotImplementedError From 29c4b74a86d013f3cad1d8e6e23ae030a4efb306 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 20 Feb 2023 10:35:51 +0000 Subject: [PATCH 260/537] test(backups): test repo init --- tests/test_graphql/test_backup.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index bb3b624..5e702b3 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -16,7 +16,7 @@ REPO_NAME = "test_backup" @pytest.fixture() -def test_service(tmpdir): +def test_service(tmpdir, backups): testile_path = path.join(tmpdir, "testfile.txt") with open(testile_path, "w") as file: file.write(TESTFILE_BODY) @@ -25,7 +25,9 @@ def test_service(tmpdir): class TestDummyService(DummyService, location=tmpdir): pass - return TestDummyService() + service = TestDummyService() + backups.init_repo(service) + return service @pytest.fixture() From 529608d52e0f0d8e853b26bbf3f769e496184b1f Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 20 Feb 2023 11:32:25 +0000 Subject: [PATCH 261/537] feature(backups): localfile repo --- selfprivacy_api/backup/providers/backblaze.py | 2 +- selfprivacy_api/backup/providers/local_file.py | 9 +++++++++ selfprivacy_api/backup/providers/memory.py | 2 +- selfprivacy_api/backup/restic_backuper.py | 2 +- 4 files changed, 12 insertions(+), 3 deletions(-) create mode 100644 selfprivacy_api/backup/providers/local_file.py diff --git a/selfprivacy_api/backup/providers/backblaze.py b/selfprivacy_api/backup/providers/backblaze.py index e16e9d3..9ec5eba 100644 --- a/selfprivacy_api/backup/providers/backblaze.py +++ b/selfprivacy_api/backup/providers/backblaze.py @@ -3,4 +3,4 @@ from selfprivacy_api.backup.restic_backuper import ResticBackuper class Backblaze(AbstractBackupProvider): - backuper = ResticBackuper("--b2-account", "--b2-key", "b2") + backuper = ResticBackuper("--b2-account", "--b2-key", ":b2:") diff --git a/selfprivacy_api/backup/providers/local_file.py b/selfprivacy_api/backup/providers/local_file.py new file mode 100644 index 0000000..5ae45bd --- /dev/null +++ b/selfprivacy_api/backup/providers/local_file.py @@ -0,0 +1,9 @@ +from .provider import AbstractBackupProvider +from selfprivacy_api.backup.restic_backuper import ResticBackuper + + +class LocalFileBackup(AbstractBackupProvider): + backuper = ResticBackuper("", "", "memory") + + def __init__(self, filename: str): + self.backuper = ResticBackuper("", "", f":local:{filename}/") diff --git a/selfprivacy_api/backup/providers/memory.py b/selfprivacy_api/backup/providers/memory.py index 4ddf571..3f257bf 100644 --- a/selfprivacy_api/backup/providers/memory.py +++ b/selfprivacy_api/backup/providers/memory.py @@ -3,4 +3,4 @@ from selfprivacy_api.backup.restic_backuper import ResticBackuper class InMemoryBackup(AbstractBackupProvider): - backuper = ResticBackuper("", "", "memory") + backuper = ResticBackuper("", "", ":memory:") diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index 0e36f4d..7980e46 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -24,7 +24,7 @@ class ResticBackuper(AbstractBackuper): def restic_repo(self, repository_name: str) -> str: # https://restic.readthedocs.io/en/latest/030_preparing_a_new_repo.html#other-services-via-rclone # https://forum.rclone.org/t/can-rclone-be-run-solely-with-command-line-options-no-config-no-env-vars/6314/5 - return f"rclone::{self.type}:{repository_name}/sfbackup" + return f"rclone:{self.type}{repository_name}/sfbackup" def rclone_args(self): return "rclone.args=serve restic --stdio" + self.backend_rclone_args() From 5efb351159387c195bf7e6e2102c4d7e98157712 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 20 Feb 2023 11:44:59 +0000 Subject: [PATCH 262/537] feature(backups): register localfile backend --- selfprivacy_api/backup/providers/__init__.py | 2 ++ selfprivacy_api/graphql/queries/providers.py | 1 + 2 files changed, 3 insertions(+) diff --git a/selfprivacy_api/backup/providers/__init__.py b/selfprivacy_api/backup/providers/__init__.py index 66fb9e6..21c4467 100644 --- a/selfprivacy_api/backup/providers/__init__.py +++ b/selfprivacy_api/backup/providers/__init__.py @@ -3,10 +3,12 @@ from selfprivacy_api.backup.providers.provider import AbstractBackupProvider from selfprivacy_api.backup.providers.backblaze import Backblaze from selfprivacy_api.backup.providers.memory import InMemoryBackup +from selfprivacy_api.backup.providers.local_file import LocalFileBackup PROVIDER_MAPPING = { BackupProvider.BACKBLAZE: Backblaze, BackupProvider.MEMORY: InMemoryBackup, + BackupProvider.FILE: LocalFileBackup, } diff --git a/selfprivacy_api/graphql/queries/providers.py b/selfprivacy_api/graphql/queries/providers.py index ecc7f11..2a9fcec 100644 --- a/selfprivacy_api/graphql/queries/providers.py +++ b/selfprivacy_api/graphql/queries/providers.py @@ -21,3 +21,4 @@ class BackupProvider(Enum): BACKBLAZE = "BACKBLAZE" # for testing purposes, make sure not selectable in prod. MEMORY = "MEMORY" + FILE = "FILE" From b27f19b201013734eb65f1d61624d4f97fd7260f Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 20 Feb 2023 11:50:52 +0000 Subject: [PATCH 263/537] test(backups): basic file backend init test --- tests/test_graphql/test_backup.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 5e702b3..33ecc65 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -39,6 +39,16 @@ def memory_backup() -> AbstractBackupProvider: return memory_provider +@pytest.fixture() +def file_backup(tmpdir) -> AbstractBackupProvider: + test_repo_path = path.join(tmpdir, "test_repo") + ProviderClass = providers.get_provider(BackupProvider.FILE) + assert ProviderClass is not None + provider = ProviderClass(test_repo_path) + assert provider is not None + return provider + + @pytest.fixture() def backups(): return Backups() @@ -50,6 +60,10 @@ def test_select_backend(): assert provider == Backblaze +def test_file_backend_init(file_backup): + file_backup.backuper.init("somerepo") + + def test_backup_simple(test_service, memory_backup): # temporarily incomplete assert test_service is not None From add4e21f3969537a2ed81a7752ef214266e7978e Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 20 Feb 2023 13:04:39 +0000 Subject: [PATCH 264/537] feature(backups): throw an error if repo init fails --- selfprivacy_api/backup/restic_backuper.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index 7980e46..5cf99cd 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -86,11 +86,15 @@ class ResticBackuper(AbstractBackuper): repo_name, "init", ) - subprocess.Popen( + with subprocess.Popen( init_command, shell=False, + stdout=subprocess.PIPE, stderr=subprocess.STDOUT, - ) + ) as process_handle: + output = process_handle.communicate()[0].decode("utf-8") + if not "created restic repository" in output: + raise ValueError("cannot init a repo: " + output) def restore_from_backup(self, repo_name, snapshot_id, folder): """ From a280e5c999e7b861b5a72761cb2bb99eb6faee41 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 20 Feb 2023 13:51:06 +0000 Subject: [PATCH 265/537] test(backups): localfile repo by default in tests --- selfprivacy_api/backup/__init__.py | 9 ++++++++- tests/test_graphql/test_backup.py | 5 +++-- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 210c998..d948d34 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -15,9 +15,16 @@ class Backups(metaclass=SingletonMetaclass): provider: AbstractBackupProvider - def __init__(self): + def __init__(self, test_repo_file: str = ""): + if test_repo_file != "": + self.set_localfile_repo(test_repo_file) self.lookup_provider() + def set_localfile_repo(self, file_path: str): + ProviderClass = get_provider(BackupProvider.FILE) + provider = ProviderClass(file_path) + self.provider = provider + def lookup_provider(self): redis_provider = Backups.load_provider_redis() if redis_provider is not None: diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 33ecc65..28ef828 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -50,8 +50,9 @@ def file_backup(tmpdir) -> AbstractBackupProvider: @pytest.fixture() -def backups(): - return Backups() +def backups(tmpdir): + test_repo_path = path.join(tmpdir, "test_repo") + return Backups(test_repo_path) def test_select_backend(): From 348ece8b9c47d4494a8bfa2808be727d4ca3bc1c Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 20 Feb 2023 16:09:01 +0000 Subject: [PATCH 266/537] fix(backups): singleton metaclass was screwing with tests --- selfprivacy_api/backup/__init__.py | 6 ++-- tests/test_graphql/test_backup.py | 55 +++++++++++++++++++----------- 2 files changed, 40 insertions(+), 21 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index d948d34..b328831 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -10,7 +10,8 @@ from selfprivacy_api.backup.providers import get_provider from selfprivacy_api.graphql.queries.providers import BackupProvider -class Backups(metaclass=SingletonMetaclass): +# class Backups(metaclass=SingletonMetaclass): +class Backups: """A singleton controller for backups""" provider: AbstractBackupProvider @@ -18,7 +19,8 @@ class Backups(metaclass=SingletonMetaclass): def __init__(self, test_repo_file: str = ""): if test_repo_file != "": self.set_localfile_repo(test_repo_file) - self.lookup_provider() + else: + self.lookup_provider() def set_localfile_repo(self, file_path: str): ProviderClass = get_provider(BackupProvider.FILE) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 28ef828..9753217 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -1,5 +1,6 @@ import pytest import os.path as path +from os import makedirs from selfprivacy_api.services.test_service import DummyService @@ -15,17 +16,36 @@ TESTFILE_BODY = "testytest!" REPO_NAME = "test_backup" +@pytest.fixture(scope="function") +def backups(tmpdir): + test_repo_path = path.join(tmpdir, "totallyunrelated") + return Backups(test_repo_path) + + @pytest.fixture() -def test_service(tmpdir, backups): - testile_path = path.join(tmpdir, "testfile.txt") - with open(testile_path, "w") as file: +def raw_dummy_service(tmpdir, backups): + service_dir = path.join(tmpdir, "test_service") + makedirs(service_dir) + + testfile_path = path.join(service_dir, "testfile.txt") + with open(testfile_path, "w") as file: file.write(TESTFILE_BODY) # we need this to not change get_location() much - class TestDummyService(DummyService, location=tmpdir): + class TestDummyService(DummyService, location=service_dir): pass service = TestDummyService() + return service + + +@pytest.fixture() +def dummy_service(tmpdir, backups, raw_dummy_service): + service = raw_dummy_service + repo_path = path.join(tmpdir, "test_repo") + assert not path.exists(repo_path) + # assert not repo_path + backups.init_repo(service) return service @@ -49,12 +69,6 @@ def file_backup(tmpdir) -> AbstractBackupProvider: return provider -@pytest.fixture() -def backups(tmpdir): - test_repo_path = path.join(tmpdir, "test_repo") - return Backups(test_repo_path) - - def test_select_backend(): provider = providers.get_provider(BackupProvider.BACKBLAZE) assert provider is not None @@ -65,15 +79,18 @@ def test_file_backend_init(file_backup): file_backup.backuper.init("somerepo") -def test_backup_simple(test_service, memory_backup): +def test_backup_simple_file(raw_dummy_service, file_backup): # temporarily incomplete - assert test_service is not None - assert memory_backup is not None - memory_backup.backuper.start_backup(test_service.get_location(), REPO_NAME) + service = raw_dummy_service + assert service is not None + assert file_backup is not None + + name = service.get_id() + file_backup.backuper.init(name) -def test_backup_service(test_service, backups): - backups.back_up(test_service) +def test_backup_service(dummy_service, backups): + backups.back_up(dummy_service) def test_no_repo(memory_backup): @@ -81,6 +98,6 @@ def test_no_repo(memory_backup): assert memory_backup.backuper.get_snapshots("") == [] -# def test_one_snapshot(backups, test_service): -# backups.back_up(test_service) -# assert len(backups.get_snapshots(test_service)) == 1 +# def test_one_snapshot(backups, dummy_service): +# backups.back_up(dummy_service) +# assert len(backups.get_snapshots(dummy_service)) == 1 From 228eab44bbc650c0fb681378c1e3fde52f186a7d Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 22 Feb 2023 10:07:05 +0000 Subject: [PATCH 267/537] feat(backups): throw an error on a failed backup --- selfprivacy_api/backup/restic_backuper.py | 18 ++++++------------ 1 file changed, 6 insertions(+), 12 deletions(-) diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index 5cf99cd..99d6a81 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -65,21 +65,15 @@ class ResticBackuper(AbstractBackuper): "backup", folder, ) - subprocess.Popen( + with subprocess.Popen( backup_command, shell=False, + stdout=subprocess.PIPE, stderr=subprocess.STDOUT, - ) - - # TODO: we might want to provide logging facilities - # that are reroutable for testing - # with open("/var/backup.log", "w", encoding="utf-8") as log_file: - # subprocess.Popen( - # backup_command, - # shell=False, - # stdout=log_file, - # stderr=subprocess.STDOUT, - # ) + ) as handle: + output = handle.communicate()[0].decode("utf-8") + if "saved" not in output: + raise ValueError("could not create a new snapshot: " + output) def init(self, repo_name): init_command = self.restic_command( From a0a32a7f37d0bbb5868925787fffe12f6492b43f Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 22 Feb 2023 10:25:51 +0000 Subject: [PATCH 268/537] test(backups): reenable snapshot testing --- tests/test_graphql/test_backup.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 9753217..193e8ab 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -98,6 +98,6 @@ def test_no_repo(memory_backup): assert memory_backup.backuper.get_snapshots("") == [] -# def test_one_snapshot(backups, dummy_service): -# backups.back_up(dummy_service) -# assert len(backups.get_snapshots(dummy_service)) == 1 +def test_one_snapshot(backups, dummy_service): + backups.back_up(dummy_service) + assert len(backups.get_snapshots(dummy_service)) == 1 From e56907f2cd3db4cde72891734aa0b11a226680da Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 22 Feb 2023 13:35:55 +0000 Subject: [PATCH 269/537] feat(backups): return proper snapshot structs when listing --- selfprivacy_api/backup/restic_backuper.py | 9 +++++++-- tests/test_graphql/test_backup.py | 8 +++++++- 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index 99d6a81..2a41967 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -128,9 +128,14 @@ class ResticBackuper(AbstractBackuper): def get_snapshots(self, repo_name) -> List[Snapshot]: """Get all snapshots from the repo""" - # No transformation for now snapshots = [] - for snapshot in self._load_snapshots(repo_name): + for restic_snapshot in self._load_snapshots(repo_name): + snapshot = Snapshot( + id=restic_snapshot["short_id"], + created_at=restic_snapshot["time"], + service_name=repo_name, + ) + snapshots.append(snapshot) return snapshots diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 193e8ab..2ed2f67 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -4,6 +4,8 @@ from os import makedirs from selfprivacy_api.services.test_service import DummyService +from selfprivacy_api.models.backup.snapshot import Snapshot + from selfprivacy_api.backup import Backups import selfprivacy_api.backup.providers as providers from selfprivacy_api.backup.providers import AbstractBackupProvider @@ -100,4 +102,8 @@ def test_no_repo(memory_backup): def test_one_snapshot(backups, dummy_service): backups.back_up(dummy_service) - assert len(backups.get_snapshots(dummy_service)) == 1 + + snaps = backups.get_snapshots(dummy_service) + assert len(snaps) == 1 + snap = snaps[0] + assert snap.service_name == dummy_service.get_id() From ff6bc2a14227cc5b1193f6a5e61ac45a04c3f3b4 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 22 Feb 2023 13:46:28 +0000 Subject: [PATCH 270/537] feat(backups): a better error on failed snapshot retrieval --- selfprivacy_api/backup/restic_backuper.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index 2a41967..e485e01 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -141,6 +141,8 @@ class ResticBackuper(AbstractBackuper): def parse_snapshot_output(self, output: str) -> object: if "[" not in output: - raise ValueError("There is no json in the restic snapshot output") + raise ValueError( + "There is no json in the restic snapshot output : " + output + ) starting_index = output.find("[") return json.loads(output[starting_index:]) From c8a8d45110f433d3f4380393ea36c6811a597dce Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 22 Feb 2023 14:45:11 +0000 Subject: [PATCH 271/537] feat(backups): add restore_snapshot and restore_service_from_snapshot --- selfprivacy_api/backup/__init__.py | 16 +++++++++++++++- selfprivacy_api/backup/backuper.py | 5 +++++ selfprivacy_api/backup/restic_backuper.py | 8 +++++++- 3 files changed, 27 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index b328831..4261e35 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -4,12 +4,14 @@ from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.utils.singleton_metaclass import SingletonMetaclass +from selfprivacy_api.services import get_service_by_id from selfprivacy_api.services.service import Service + from selfprivacy_api.backup.providers.provider import AbstractBackupProvider from selfprivacy_api.backup.providers import get_provider from selfprivacy_api.graphql.queries.providers import BackupProvider - +# Singleton has a property of being persistent between tests. I don't know what to do with this yet # class Backups(metaclass=SingletonMetaclass): class Backups: """A singleton controller for backups""" @@ -63,3 +65,15 @@ class Backups: repo_name = service.get_id() return self.provider.backuper.get_snapshots(repo_name) + + def restore_service_from_snapshot(self, service: Service, snapshot_id: str): + repo_name = service.get_id() + folder = service.get_location() + + self.provider.backuper.restore_from_backup(repo_name, snapshot_id, folder) + + # Our dummy service is not yet globally registered so this is not testable yet + def restore_snapshot(self, snapshot: Snapshot): + self.restore_service_from_snapshot( + get_service_by_id(snapshot.service_name), snapshot.id + ) diff --git a/selfprivacy_api/backup/backuper.py b/selfprivacy_api/backup/backuper.py index 676a0a1..f4c25a8 100644 --- a/selfprivacy_api/backup/backuper.py +++ b/selfprivacy_api/backup/backuper.py @@ -20,3 +20,8 @@ class AbstractBackuper(ABC): @abstractmethod def init(self, repo_name): raise NotImplementedError + + @abstractmethod + def restore_from_backup(self, repo_name: str, snapshot_id: str, folder: str): + """Restore a target folder using a snapshot""" + raise NotImplementedError diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index e485e01..0db5a42 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -98,7 +98,13 @@ class ResticBackuper(AbstractBackuper): repo_name, "restore", snapshot_id, "--target", folder ) - subprocess.run(restore_command, shell=False) + with subprocess.Popen( + restore_command, stdout=subprocess.PIPE, shell=False + ) as handle: + + output = handle.communicate()[0].decode("utf-8") + if "restored" not in output: + raise ValueError("cannot restore a snapshot: " + output) def _load_snapshots(self, repo_name) -> object: """ From 1d403b0e948ae758b75692b21cd7805c9c501025 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 22 Feb 2023 15:58:36 +0000 Subject: [PATCH 272/537] test(backups): test restoring a file --- selfprivacy_api/backup/restic_backuper.py | 11 +++++++++-- tests/test_graphql/test_backup.py | 20 ++++++++++++++++++++ 2 files changed, 29 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index 0db5a42..04461ca 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -94,8 +94,15 @@ class ResticBackuper(AbstractBackuper): """ Restore from backup with restic """ + # snapshots save the path of the folder in the file system + # I do not alter the signature yet because maybe this can be + # changed with flags restore_command = self.restic_command( - repo_name, "restore", snapshot_id, "--target", folder + repo_name, + "restore", + snapshot_id, + "--target", + "/", ) with subprocess.Popen( @@ -103,7 +110,7 @@ class ResticBackuper(AbstractBackuper): ) as handle: output = handle.communicate()[0].decode("utf-8") - if "restored" not in output: + if "restoring" not in output: raise ValueError("cannot restore a snapshot: " + output) def _load_snapshots(self, repo_name) -> object: diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 2ed2f67..0e8e246 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -1,6 +1,8 @@ import pytest import os.path as path from os import makedirs +from os import remove +from os import listdir from selfprivacy_api.services.test_service import DummyService @@ -107,3 +109,21 @@ def test_one_snapshot(backups, dummy_service): assert len(snaps) == 1 snap = snaps[0] assert snap.service_name == dummy_service.get_id() + + +def test_restore(backups, dummy_service): + service_folder = dummy_service.get_location() + file_to_nuke = listdir(service_folder)[0] + assert file_to_nuke is not None + path_to_nuke = path.join(service_folder, file_to_nuke) + + backups.back_up(dummy_service) + snap = backups.get_snapshots(dummy_service)[0] + assert snap is not None + + assert path.exists(path_to_nuke) + remove(path_to_nuke) + assert not path.exists(path_to_nuke) + + backups.restore_service_from_snapshot(dummy_service, snap.id) + assert path.exists(path_to_nuke) From 60dcde458c59935771fc7df94345abb5f0dcba7a Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 22 Feb 2023 18:48:08 +0000 Subject: [PATCH 273/537] feat(backups): sizing up snapshots --- selfprivacy_api/backup/__init__.py | 10 +++++++ selfprivacy_api/backup/backuper.py | 4 +++ selfprivacy_api/backup/restic_backuper.py | 35 ++++++++++++++++++++--- tests/test_graphql/test_backup.py | 8 ++++++ 4 files changed, 53 insertions(+), 4 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 4261e35..903e38b 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -77,3 +77,13 @@ class Backups: self.restore_service_from_snapshot( get_service_by_id(snapshot.service_name), snapshot.id ) + + def service_snapshot_size(self, service: Service, snapshot_id: str) -> float: + repo_name = service.get_id() + return self.provider.backuper.restored_size(repo_name, snapshot_id) + + # Our dummy service is not yet globally registered so this is not testable yet + def snapshot_restored_size(self, snapshot: Snapshot) -> float: + return self.service_snapshot_size( + get_service_by_id(snapshot.service_name), snapshot.id + ) diff --git a/selfprivacy_api/backup/backuper.py b/selfprivacy_api/backup/backuper.py index f4c25a8..5d9b1c3 100644 --- a/selfprivacy_api/backup/backuper.py +++ b/selfprivacy_api/backup/backuper.py @@ -25,3 +25,7 @@ class AbstractBackuper(ABC): def restore_from_backup(self, repo_name: str, snapshot_id: str, folder: str): """Restore a target folder using a snapshot""" raise NotImplementedError + + @abstractmethod + def restored_size(self, repo_name, snapshot_id) -> float: + raise NotImplementedError diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index 04461ca..a4a4830 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -90,6 +90,25 @@ class ResticBackuper(AbstractBackuper): if not "created restic repository" in output: raise ValueError("cannot init a repo: " + output) + def restored_size(self, repo_name, snapshot_id) -> float: + """ + Size of a snapshot + """ + command = self.restic_command( + repo_name, + "stats", + snapshot_id, + "--json", + ) + + with subprocess.Popen(command, stdout=subprocess.PIPE, shell=False) as handle: + output = handle.communicate()[0].decode("utf-8") + try: + parsed_output = self.parse_json_output(output) + return parsed_output["total_size"] + except ValueError as e: + raise ValueError("cannot restore a snapshot: " + output) from e + def restore_from_backup(self, repo_name, snapshot_id, folder): """ Restore from backup with restic @@ -135,7 +154,7 @@ class ResticBackuper(AbstractBackuper): if "Is there a repository at the following location?" in output: raise ValueError("No repository! : " + output) try: - return self.parse_snapshot_output(output) + return self.parse_json_output(output) except ValueError as e: raise ValueError("Cannot load snapshots: ") from e @@ -152,10 +171,18 @@ class ResticBackuper(AbstractBackuper): snapshots.append(snapshot) return snapshots - def parse_snapshot_output(self, output: str) -> object: - if "[" not in output: + def parse_json_output(self, output: str) -> object: + indices = [ + output.find("["), + output.find("{"), + ] + indices = [x for x in indices if x != -1] + + if indices == []: raise ValueError( "There is no json in the restic snapshot output : " + output ) - starting_index = output.find("[") + + starting_index = min(indices) + return json.loads(output[starting_index:]) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 0e8e246..4c6b2dd 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -127,3 +127,11 @@ def test_restore(backups, dummy_service): backups.restore_service_from_snapshot(dummy_service, snap.id) assert path.exists(path_to_nuke) + + +def test_sizing(backups, dummy_service): + backups.back_up(dummy_service) + snap = backups.get_snapshots(dummy_service)[0] + size = backups.service_snapshot_size(dummy_service, snap.id) + assert size is not None + assert size > 0 From 0847e16089c5b108d46bd3854780a057dc57ba97 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 22 Feb 2023 19:28:04 +0000 Subject: [PATCH 274/537] feat(backups): local secret generation and storage --- selfprivacy_api/backup/local_secret.py | 29 ++++++++++++++++++-------- 1 file changed, 20 insertions(+), 9 deletions(-) diff --git a/selfprivacy_api/backup/local_secret.py b/selfprivacy_api/backup/local_secret.py index f2ebf06..02d78a4 100644 --- a/selfprivacy_api/backup/local_secret.py +++ b/selfprivacy_api/backup/local_secret.py @@ -2,29 +2,40 @@ Separated out for circular dependency reasons """ +from __future__ import annotations +import secrets + +from selfprivacy_api.utils.redis_pool import RedisPool + + REDIS_KEY = "backup:local_secret" +redis = RedisPool().get_connection() + class LocalBackupSecret: @staticmethod def get(): """A secret string which backblaze/other clouds do not know. Serves as encryption key. - TODO: generate and save in redis """ - return "TEMPORARY_SECRET" + if not LocalBackupSecret.exists(): + LocalBackupSecret.reset() + return redis.get(REDIS_KEY) @staticmethod def reset(): - pass - - def exists(): - pass + new_secret = LocalBackupSecret._generate() + LocalBackupSecret._store(new_secret) @staticmethod - def _generate(): - pass + def exists() -> bool: + return redis.exists(REDIS_KEY) + + @staticmethod + def _generate() -> str: + return secrets.token_urlsafe(256) @staticmethod def _store(secret: str): - pass + redis.set(REDIS_KEY, secret) From c928263fcee46658b32e25c146f865c981bbc6e5 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 10 Mar 2023 14:14:41 +0000 Subject: [PATCH 275/537] feature(backups): load from json --- selfprivacy_api/backup/__init__.py | 42 +++++++++++++++++---- tests/conftest.py | 20 ++++++++++ tests/data/turned_on.json | 60 ++++++++++++++++++++++++++++++ tests/test_graphql/test_backup.py | 10 +++++ 4 files changed, 124 insertions(+), 8 deletions(-) create mode 100644 tests/data/turned_on.json diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 903e38b..f34f43d 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -3,6 +3,7 @@ from typing import List from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.utils.singleton_metaclass import SingletonMetaclass +from selfprivacy_api.utils import ReadUserData from selfprivacy_api.services import get_service_by_id from selfprivacy_api.services.service import Service @@ -11,7 +12,9 @@ from selfprivacy_api.backup.providers.provider import AbstractBackupProvider from selfprivacy_api.backup.providers import get_provider from selfprivacy_api.graphql.queries.providers import BackupProvider -# Singleton has a property of being persistent between tests. I don't know what to do with this yet + +# Singleton has a property of being persistent between tests. +# I don't know what to do with this yet # class Backups(metaclass=SingletonMetaclass): class Backups: """A singleton controller for backups""" @@ -22,24 +25,28 @@ class Backups: if test_repo_file != "": self.set_localfile_repo(test_repo_file) else: - self.lookup_provider() + self.provider = self.lookup_provider() def set_localfile_repo(self, file_path: str): ProviderClass = get_provider(BackupProvider.FILE) provider = ProviderClass(file_path) self.provider = provider - def lookup_provider(self): + @staticmethod + def construct_provider(kind: str, login: str, key: str): + provider_class = get_provider(BackupProvider[kind]) + return provider_class(login=login, key=key) + + def lookup_provider(self) -> AbstractBackupProvider: redis_provider = Backups.load_provider_redis() if redis_provider is not None: - self.provider = redis_provider + return redis_provider json_provider = Backups.load_provider_json() if json_provider is not None: - self.provider = json_provider + return json_provider - provider_class = get_provider(BackupProvider.MEMORY) - self.provider = provider_class(login="", key="") + return Backups.construct_provider("MEMORY", login="", key="") @staticmethod def load_provider_redis() -> AbstractBackupProvider: @@ -47,7 +54,26 @@ class Backups: @staticmethod def load_provider_json() -> AbstractBackupProvider: - pass + with ReadUserData() as user_data: + account = "" + key = "" + + if "backup" not in user_data.keys(): + if "backblaze" in user_data.keys(): + account = user_data["backblaze"]["accountId"] + key = user_data["backblaze"]["accountKey"] + provider_string = "BACKBLAZE" + return Backups.construct_provider( + kind=provider_string, login=account, key=key + ) + return None + + account = user_data["backup"]["accountId"] + key = user_data["backup"]["accountKey"] + provider_string = user_data["backup"]["provider"] + return Backups.construct_provider( + kind=provider_string, login=account, key=key + ) def back_up(self, service: Service): folder = service.get_location() diff --git a/tests/conftest.py b/tests/conftest.py index ea7a66a..7e8ae11 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -3,6 +3,8 @@ # pylint: disable=unused-argument import os import pytest +from os import path + from fastapi.testclient import TestClient @@ -10,6 +12,10 @@ def pytest_generate_tests(metafunc): os.environ["TEST_MODE"] = "true" +def global_data_dir(): + return path.join(path.dirname(__file__), "data") + + @pytest.fixture def tokens_file(mocker, shared_datadir): """Mock tokens file.""" @@ -26,6 +32,20 @@ def jobs_file(mocker, shared_datadir): return mock +@pytest.fixture +def generic_userdata(mocker, tmpdir): + filename = "turned_on.json" + source_path = path.join(global_data_dir(), filename) + userdata_path = path.join(tmpdir, filename) + + with open(userdata_path, "w") as file: + with open(source_path, "r") as source: + file.write(source.read()) + + mock = mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=userdata_path) + return mock + + @pytest.fixture def huey_database(mocker, shared_datadir): """Mock huey database.""" diff --git a/tests/data/turned_on.json b/tests/data/turned_on.json new file mode 100644 index 0000000..c6b758b --- /dev/null +++ b/tests/data/turned_on.json @@ -0,0 +1,60 @@ +{ + "api": { + "token": "TEST_TOKEN", + "enableSwagger": false + }, + "bitwarden": { + "enable": true + }, + "databasePassword": "PASSWORD", + "domain": "test.tld", + "hashedMasterPassword": "HASHED_PASSWORD", + "hostname": "test-instance", + "nextcloud": { + "adminPassword": "ADMIN", + "databasePassword": "ADMIN", + "enable": true + }, + "resticPassword": "PASS", + "ssh": { + "enable": true, + "passwordAuthentication": true, + "rootKeys": [ + "ssh-ed25519 KEY test@pc" + ] + }, + "username": "tester", + "gitea": { + "enable": true + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true + }, + "jitsi": { + "enable": true + }, + "autoUpgrade": { + "enable": true, + "allowReboot": true + }, + "timezone": "Europe/Moscow", + "sshKeys": [ + "ssh-rsa KEY test@pc" + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "selfprivacy" + } +} diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 4c6b2dd..6d21c15 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -73,6 +73,16 @@ def file_backup(tmpdir) -> AbstractBackupProvider: return provider +def test_config_load(generic_userdata): + backups = Backups() + provider = backups.provider + + assert provider is not None + assert isinstance(provider, Backblaze) + assert provider.login == "ID" + assert provider.key == "KEY" + + def test_select_backend(): provider = providers.get_provider(BackupProvider.BACKBLAZE) assert provider is not None From 873bc8282e367701247115bc3ed6b1ce09d1864a Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 13 Mar 2023 19:00:39 +0000 Subject: [PATCH 276/537] refactor(backups): redis model storage utils --- selfprivacy_api/utils/redis_model_storage.py | 30 ++++++++++++++++ tests/test_model_storage.py | 36 ++++++++++++++++++++ 2 files changed, 66 insertions(+) create mode 100644 selfprivacy_api/utils/redis_model_storage.py create mode 100644 tests/test_model_storage.py diff --git a/selfprivacy_api/utils/redis_model_storage.py b/selfprivacy_api/utils/redis_model_storage.py new file mode 100644 index 0000000..51faff7 --- /dev/null +++ b/selfprivacy_api/utils/redis_model_storage.py @@ -0,0 +1,30 @@ +from datetime import datetime +from typing import Optional + + +def store_model_as_hash(redis, redis_key, model): + for key, value in model.dict().items(): + if isinstance(value, datetime): + value = value.isoformat() + redis.hset(redis_key, key, str(value)) + + +def hash_as_model(redis, redis_key: str, model_class): + token_dict = _model_dict_from_hash(redis, redis_key) + if token_dict is not None: + return model_class(**token_dict) + return None + + +def _prepare_model_dict(d: dict): + for key in d.keys(): + if d[key] == "None": + d[key] = None + + +def _model_dict_from_hash(redis, redis_key: str) -> Optional[dict]: + if redis.exists(redis_key): + token_dict = redis.hgetall(redis_key) + _prepare_model_dict(token_dict) + return token_dict + return None diff --git a/tests/test_model_storage.py b/tests/test_model_storage.py new file mode 100644 index 0000000..d26fabb --- /dev/null +++ b/tests/test_model_storage.py @@ -0,0 +1,36 @@ +import pytest + +from pydantic import BaseModel +from datetime import datetime +from typing import Optional + +from selfprivacy_api.utils.redis_model_storage import store_model_as_hash, hash_as_model +from selfprivacy_api.utils.redis_pool import RedisPool + +TEST_KEY = "model_storage" +redis = RedisPool().get_connection() + +@pytest.fixture() +def clean_redis(): + redis.delete(TEST_KEY) + + +class DummyModel(BaseModel): + name: str + date: Optional[datetime] + +def test_store_retrieve(): + model = DummyModel( + name= "test", + date= datetime.now() + ) + store_model_as_hash(redis, TEST_KEY, model) + assert hash_as_model(redis, TEST_KEY, DummyModel) == model + +def test_store_retrieve_none(): + model = DummyModel( + name= "test", + date= None + ) + store_model_as_hash(redis, TEST_KEY, model) + assert hash_as_model(redis, TEST_KEY, DummyModel) == model From 48dc63a590f156ebdb3694dc5e2ece74bee3047a Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 13 Mar 2023 19:02:03 +0000 Subject: [PATCH 277/537] refactor(backups): add a provider model for redis storage --- selfprivacy_api/models/backup/provider.py | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100644 selfprivacy_api/models/backup/provider.py diff --git a/selfprivacy_api/models/backup/provider.py b/selfprivacy_api/models/backup/provider.py new file mode 100644 index 0000000..e454c39 --- /dev/null +++ b/selfprivacy_api/models/backup/provider.py @@ -0,0 +1,9 @@ +from pydantic import BaseModel + +"""for storage in Redis""" + + +class BackupProviderModel(BaseModel): + kind: str + login: str + key: str From 02deae217d5696ff19b77ad48fc60d398a791aa5 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 13 Mar 2023 19:03:41 +0000 Subject: [PATCH 278/537] feature(backups): provider storage and retrieval --- selfprivacy_api/backup/__init__.py | 37 +++++++++++++++++--- selfprivacy_api/backup/providers/__init__.py | 6 ++++ tests/test_graphql/test_backup.py | 30 +++++++++++++++- 3 files changed, 67 insertions(+), 6 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index f34f43d..d24872d 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -1,17 +1,25 @@ from typing import List from selfprivacy_api.models.backup.snapshot import Snapshot +from selfprivacy_api.models.backup.provider import BackupProviderModel from selfprivacy_api.utils.singleton_metaclass import SingletonMetaclass from selfprivacy_api.utils import ReadUserData +from selfprivacy_api.utils.redis_pool import RedisPool +from selfprivacy_api.utils.redis_model_storage import store_model_as_hash, hash_as_model + from selfprivacy_api.services import get_service_by_id from selfprivacy_api.services.service import Service from selfprivacy_api.backup.providers.provider import AbstractBackupProvider -from selfprivacy_api.backup.providers import get_provider +from selfprivacy_api.backup.providers import get_provider, get_kind from selfprivacy_api.graphql.queries.providers import BackupProvider +REDIS_PROVIDER_KEY = "backups:provider" + +redis = RedisPool().get_connection() + # Singleton has a property of being persistent between tests. # I don't know what to do with this yet @@ -37,6 +45,29 @@ class Backups: provider_class = get_provider(BackupProvider[kind]) return provider_class(login=login, key=key) + @staticmethod + def store_provider_redis(provider: AbstractBackupProvider): + store_model_as_hash( + redis, + REDIS_PROVIDER_KEY, + BackupProviderModel( + kind=get_kind(provider), login=provider.login, key=provider.key + ), + ) + + @staticmethod + def load_provider_redis() -> AbstractBackupProvider: + provider_model = hash_as_model(redis, REDIS_PROVIDER_KEY, BackupProviderModel) + if provider_model is None: + return None + return Backups.construct_provider( + provider_model.kind, provider_model.login, provider_model.key + ) + + @staticmethod + def reset(): + redis.delete(REDIS_PROVIDER_KEY) + def lookup_provider(self) -> AbstractBackupProvider: redis_provider = Backups.load_provider_redis() if redis_provider is not None: @@ -48,10 +79,6 @@ class Backups: return Backups.construct_provider("MEMORY", login="", key="") - @staticmethod - def load_provider_redis() -> AbstractBackupProvider: - pass - @staticmethod def load_provider_json() -> AbstractBackupProvider: with ReadUserData() as user_data: diff --git a/selfprivacy_api/backup/providers/__init__.py b/selfprivacy_api/backup/providers/__init__.py index 21c4467..5428e80 100644 --- a/selfprivacy_api/backup/providers/__init__.py +++ b/selfprivacy_api/backup/providers/__init__.py @@ -14,3 +14,9 @@ PROVIDER_MAPPING = { def get_provider(provider_type: BackupProvider) -> AbstractBackupProvider: return PROVIDER_MAPPING[provider_type] + + +def get_kind(provider: AbstractBackupProvider) -> str: + for key, value in PROVIDER_MAPPING.items(): + if isinstance(provider, value): + return key.value diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 6d21c15..f6f3526 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -23,7 +23,16 @@ REPO_NAME = "test_backup" @pytest.fixture(scope="function") def backups(tmpdir): test_repo_path = path.join(tmpdir, "totallyunrelated") - return Backups(test_repo_path) + backups = Backups(test_repo_path) + backups.reset() + return backups + + +@pytest.fixture() +def backups_backblaze(generic_userdata): + backups = Backups() + backups.reset() + return backups @pytest.fixture() @@ -75,6 +84,7 @@ def file_backup(tmpdir) -> AbstractBackupProvider: def test_config_load(generic_userdata): backups = Backups() + backups.reset() provider = backups.provider assert provider is not None @@ -145,3 +155,21 @@ def test_sizing(backups, dummy_service): size = backups.service_snapshot_size(dummy_service, snap.id) assert size is not None assert size > 0 + + +def test_redis_storage(backups_backblaze): + backups = Backups() + backups.reset() + provider = backups.provider + + assert provider is not None + + assert isinstance(provider, Backblaze) + assert provider.login == "ID" + assert provider.key == "KEY" + + backups.store_provider_redis(provider) + restored_provider = backups.load_provider_redis() + assert isinstance(restored_provider, Backblaze) + assert restored_provider.login == "ID" + assert restored_provider.key == "KEY" From 6f8f5cbb9e3448e8747ee864c235b98f1566a931 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Tue, 14 Mar 2023 00:39:15 +0000 Subject: [PATCH 279/537] feature(backups): repo init tracking --- selfprivacy_api/backup/__init__.py | 26 ++++++++++++++++ selfprivacy_api/backup/backuper.py | 4 +++ selfprivacy_api/backup/restic_backuper.py | 36 +++++++++++++++++++---- tests/test_graphql/test_backup.py | 28 ++++++++++++++++++ 4 files changed, 88 insertions(+), 6 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index d24872d..550555d 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -17,6 +17,7 @@ from selfprivacy_api.backup.providers import get_provider, get_kind from selfprivacy_api.graphql.queries.providers import BackupProvider REDIS_PROVIDER_KEY = "backups:provider" +REDIS_INITTED_CACHE_PREFIX = "backups:initted_services:" redis = RedisPool().get_connection() @@ -67,6 +68,8 @@ class Backups: @staticmethod def reset(): redis.delete(REDIS_PROVIDER_KEY) + for key in redis.keys(REDIS_INITTED_CACHE_PREFIX + "*"): + redis.delete(key) def lookup_provider(self) -> AbstractBackupProvider: redis_provider = Backups.load_provider_redis() @@ -113,6 +116,29 @@ class Backups: def init_repo(self, service: Service): repo_name = service.get_id() self.provider.backuper.init(repo_name) + self._redis_mark_as_init(service) + + def _has_redis_init_mark(self, service: Service) -> bool: + repo_name = service.get_id() + if redis.exists(REDIS_INITTED_CACHE_PREFIX + repo_name): + return True + return False + + def _redis_mark_as_init(self, service: Service): + repo_name = service.get_id() + redis.set(REDIS_INITTED_CACHE_PREFIX + repo_name, 1) + + def is_initted(self, service: Service) -> bool: + repo_name = service.get_id() + if self._has_redis_init_mark(service): + return True + + initted = self.provider.backuper.is_initted(repo_name) + if initted: + self._redis_mark_as_init(service) + return True + + return False def get_snapshots(self, service: Service) -> List[Snapshot]: repo_name = service.get_id() diff --git a/selfprivacy_api/backup/backuper.py b/selfprivacy_api/backup/backuper.py index 5d9b1c3..5bba9d5 100644 --- a/selfprivacy_api/backup/backuper.py +++ b/selfprivacy_api/backup/backuper.py @@ -8,6 +8,10 @@ class AbstractBackuper(ABC): def __init__(self): pass + @abstractmethod + def is_initted(self, repo_name: str) -> bool: + raise NotImplementedError + @abstractmethod def start_backup(self, folder: str, repo_name: str): raise NotImplementedError diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index a4a4830..8d9ac99 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -90,6 +90,20 @@ class ResticBackuper(AbstractBackuper): if not "created restic repository" in output: raise ValueError("cannot init a repo: " + output) + def is_initted(self, repo_name: str) -> bool: + command = self.restic_command( + repo_name, + "check", + "--json", + ) + + with subprocess.Popen(command, stdout=subprocess.PIPE, shell=False) as handle: + output = handle.communicate()[0].decode("utf-8") + if not self.has_json(output): + return False + # raise NotImplementedError("error(big): " + output) + return True + def restored_size(self, repo_name, snapshot_id) -> float: """ Size of a snapshot @@ -172,6 +186,16 @@ class ResticBackuper(AbstractBackuper): return snapshots def parse_json_output(self, output: str) -> object: + starting_index = self.json_start(output) + + if starting_index == -1: + raise ValueError( + "There is no json in the restic snapshot output : " + output + ) + + return json.loads(output[starting_index:]) + + def json_start(self, output: str) -> int: indices = [ output.find("["), output.find("{"), @@ -179,10 +203,10 @@ class ResticBackuper(AbstractBackuper): indices = [x for x in indices if x != -1] if indices == []: - raise ValueError( - "There is no json in the restic snapshot output : " + output - ) + return -1 + return min(indices) - starting_index = min(indices) - - return json.loads(output[starting_index:]) + def has_json(self, output: str) -> bool: + if self.json_start(output) == -1: + return False + return True diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index f6f3526..233014f 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -173,3 +173,31 @@ def test_redis_storage(backups_backblaze): assert isinstance(restored_provider, Backblaze) assert restored_provider.login == "ID" assert restored_provider.key == "KEY" + + +# lowlevel +def test_init_tracking_caching(backups, raw_dummy_service): + assert backups._has_redis_init_mark(raw_dummy_service) is False + + backups._redis_mark_as_init(raw_dummy_service) + + assert backups._has_redis_init_mark(raw_dummy_service) is True + assert backups.is_initted(raw_dummy_service) is True + + +# lowlevel +def test_init_tracking_caching2(backups, raw_dummy_service): + assert backups._has_redis_init_mark(raw_dummy_service) is False + + backups.init_repo(raw_dummy_service) + + assert backups._has_redis_init_mark(raw_dummy_service) is True + + +# only public API +def test_init_tracking(backups, raw_dummy_service): + assert backups.is_initted(raw_dummy_service) is False + + backups.init_repo(raw_dummy_service) + + assert backups.is_initted(raw_dummy_service) is True From d972fdc3cce5e1524752bce46024137b564b2884 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 29 Mar 2023 11:15:38 +0000 Subject: [PATCH 280/537] refactor(backups): make backups stateless --- selfprivacy_api/backup/__init__.py | 88 ++++++++++++------- .../backup/providers/local_file.py | 4 +- selfprivacy_api/backup/providers/provider.py | 2 +- tests/test_graphql/test_backup.py | 64 +++++++------- 4 files changed, 92 insertions(+), 66 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 550555d..9ac8895 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -16,6 +16,9 @@ from selfprivacy_api.backup.providers.provider import AbstractBackupProvider from selfprivacy_api.backup.providers import get_provider, get_kind from selfprivacy_api.graphql.queries.providers import BackupProvider +# a hack to store file path. +REDIS_REPO_PATH_KEY = "backups:test_repo_path" + REDIS_PROVIDER_KEY = "backups:provider" REDIS_INITTED_CACHE_PREFIX = "backups:initted_services:" @@ -30,20 +33,30 @@ class Backups: provider: AbstractBackupProvider - def __init__(self, test_repo_file: str = ""): - if test_repo_file != "": - self.set_localfile_repo(test_repo_file) - else: - self.provider = self.lookup_provider() - - def set_localfile_repo(self, file_path: str): + @staticmethod + def set_localfile_repo(file_path: str): ProviderClass = get_provider(BackupProvider.FILE) provider = ProviderClass(file_path) - self.provider = provider + redis.set(REDIS_REPO_PATH_KEY, file_path) + Backups.store_provider_redis(provider) + + @staticmethod + def provider(): + return Backups.lookup_provider() + + @staticmethod + def set_provider(kind: str, login: str, key: str): + provider = Backups.construct_provider(kind, login, key) + Backups.store_provider_redis(provider) @staticmethod def construct_provider(kind: str, login: str, key: str): provider_class = get_provider(BackupProvider[kind]) + + if kind == "FILE": + path = redis.get(REDIS_REPO_PATH_KEY) + return provider_class(path) + return provider_class(login=login, key=key) @staticmethod @@ -68,19 +81,24 @@ class Backups: @staticmethod def reset(): redis.delete(REDIS_PROVIDER_KEY) + redis.delete(REDIS_REPO_PATH_KEY) for key in redis.keys(REDIS_INITTED_CACHE_PREFIX + "*"): redis.delete(key) - def lookup_provider(self) -> AbstractBackupProvider: + @staticmethod + def lookup_provider() -> AbstractBackupProvider: redis_provider = Backups.load_provider_redis() if redis_provider is not None: return redis_provider json_provider = Backups.load_provider_json() if json_provider is not None: + Backups.store_provider_redis(json_provider) return json_provider - return Backups.construct_provider("MEMORY", login="", key="") + memory_provider = Backups.construct_provider("MEMORY", login="", key="") + Backups.store_provider_redis(memory_provider) + return memory_provider @staticmethod def load_provider_json() -> AbstractBackupProvider: @@ -105,64 +123,74 @@ class Backups: kind=provider_string, login=account, key=key ) - def back_up(self, service: Service): + @staticmethod + def back_up(service: Service): folder = service.get_location() repo_name = service.get_id() service.pre_backup() - self.provider.backuper.start_backup(folder, repo_name) + Backups.provider().backuper.start_backup(folder, repo_name) service.post_restore() - def init_repo(self, service: Service): + @staticmethod + def init_repo(service: Service): repo_name = service.get_id() - self.provider.backuper.init(repo_name) - self._redis_mark_as_init(service) + Backups.provider().backuper.init(repo_name) + Backups._redis_mark_as_init(service) - def _has_redis_init_mark(self, service: Service) -> bool: + @staticmethod + def _has_redis_init_mark(service: Service) -> bool: repo_name = service.get_id() if redis.exists(REDIS_INITTED_CACHE_PREFIX + repo_name): return True return False - def _redis_mark_as_init(self, service: Service): + @staticmethod + def _redis_mark_as_init(service: Service): repo_name = service.get_id() redis.set(REDIS_INITTED_CACHE_PREFIX + repo_name, 1) - def is_initted(self, service: Service) -> bool: + @staticmethod + def is_initted(service: Service) -> bool: repo_name = service.get_id() - if self._has_redis_init_mark(service): + if Backups._has_redis_init_mark(service): return True - initted = self.provider.backuper.is_initted(repo_name) + initted = Backups.provider().backuper.is_initted(repo_name) if initted: - self._redis_mark_as_init(service) + Backups._redis_mark_as_init(service) return True return False - def get_snapshots(self, service: Service) -> List[Snapshot]: + @staticmethod + def get_snapshots(service: Service) -> List[Snapshot]: repo_name = service.get_id() - return self.provider.backuper.get_snapshots(repo_name) + return Backups.provider().backuper.get_snapshots(repo_name) - def restore_service_from_snapshot(self, service: Service, snapshot_id: str): + @staticmethod + def restore_service_from_snapshot(service: Service, snapshot_id: str): repo_name = service.get_id() folder = service.get_location() - self.provider.backuper.restore_from_backup(repo_name, snapshot_id, folder) + Backups.provider().backuper.restore_from_backup(repo_name, snapshot_id, folder) # Our dummy service is not yet globally registered so this is not testable yet - def restore_snapshot(self, snapshot: Snapshot): - self.restore_service_from_snapshot( + @staticmethod + def restore_snapshot(snapshot: Snapshot): + Backups.restore_service_from_snapshot( get_service_by_id(snapshot.service_name), snapshot.id ) - def service_snapshot_size(self, service: Service, snapshot_id: str) -> float: + @staticmethod + def service_snapshot_size(service: Service, snapshot_id: str) -> float: repo_name = service.get_id() - return self.provider.backuper.restored_size(repo_name, snapshot_id) + return Backups.provider().backuper.restored_size(repo_name, snapshot_id) # Our dummy service is not yet globally registered so this is not testable yet - def snapshot_restored_size(self, snapshot: Snapshot) -> float: + @staticmethod + def snapshot_restored_size(snapshot: Snapshot) -> float: return self.service_snapshot_size( get_service_by_id(snapshot.service_name), snapshot.id ) diff --git a/selfprivacy_api/backup/providers/local_file.py b/selfprivacy_api/backup/providers/local_file.py index 5ae45bd..bdd9213 100644 --- a/selfprivacy_api/backup/providers/local_file.py +++ b/selfprivacy_api/backup/providers/local_file.py @@ -5,5 +5,7 @@ from selfprivacy_api.backup.restic_backuper import ResticBackuper class LocalFileBackup(AbstractBackupProvider): backuper = ResticBackuper("", "", "memory") - def __init__(self, filename: str): + # login and key args are for compatibility with generic provider methods. They are ignored. + def __init__(self, filename: str, login: str = "", key: str = ""): + super().__init__() self.backuper = ResticBackuper("", "", f":local:{filename}/") diff --git a/selfprivacy_api/backup/providers/provider.py b/selfprivacy_api/backup/providers/provider.py index 0b57528..017c03d 100644 --- a/selfprivacy_api/backup/providers/provider.py +++ b/selfprivacy_api/backup/providers/provider.py @@ -12,6 +12,6 @@ class AbstractBackupProvider(ABC): def backuper(self) -> AbstractBackuper: raise NotImplementedError - def __init__(self, login, key): + def __init__(self, login="", key=""): self.login = login self.key = key diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 233014f..2e6c6d6 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -22,17 +22,15 @@ REPO_NAME = "test_backup" @pytest.fixture(scope="function") def backups(tmpdir): + Backups.reset() + test_repo_path = path.join(tmpdir, "totallyunrelated") - backups = Backups(test_repo_path) - backups.reset() - return backups + Backups.set_localfile_repo(test_repo_path) @pytest.fixture() def backups_backblaze(generic_userdata): - backups = Backups() - backups.reset() - return backups + Backups.reset() @pytest.fixture() @@ -59,7 +57,7 @@ def dummy_service(tmpdir, backups, raw_dummy_service): assert not path.exists(repo_path) # assert not repo_path - backups.init_repo(service) + Backups.init_repo(service) return service @@ -83,9 +81,8 @@ def file_backup(tmpdir) -> AbstractBackupProvider: def test_config_load(generic_userdata): - backups = Backups() - backups.reset() - provider = backups.provider + Backups.reset() + provider = Backups.provider() assert provider is not None assert isinstance(provider, Backblaze) @@ -114,7 +111,7 @@ def test_backup_simple_file(raw_dummy_service, file_backup): def test_backup_service(dummy_service, backups): - backups.back_up(dummy_service) + Backups.back_up(dummy_service) def test_no_repo(memory_backup): @@ -123,9 +120,9 @@ def test_no_repo(memory_backup): def test_one_snapshot(backups, dummy_service): - backups.back_up(dummy_service) + Backups.back_up(dummy_service) - snaps = backups.get_snapshots(dummy_service) + snaps = Backups.get_snapshots(dummy_service) assert len(snaps) == 1 snap = snaps[0] assert snap.service_name == dummy_service.get_id() @@ -137,30 +134,29 @@ def test_restore(backups, dummy_service): assert file_to_nuke is not None path_to_nuke = path.join(service_folder, file_to_nuke) - backups.back_up(dummy_service) - snap = backups.get_snapshots(dummy_service)[0] + Backups.back_up(dummy_service) + snap = Backups.get_snapshots(dummy_service)[0] assert snap is not None assert path.exists(path_to_nuke) remove(path_to_nuke) assert not path.exists(path_to_nuke) - backups.restore_service_from_snapshot(dummy_service, snap.id) + Backups.restore_service_from_snapshot(dummy_service, snap.id) assert path.exists(path_to_nuke) def test_sizing(backups, dummy_service): - backups.back_up(dummy_service) - snap = backups.get_snapshots(dummy_service)[0] - size = backups.service_snapshot_size(dummy_service, snap.id) + Backups.back_up(dummy_service) + snap = Backups.get_snapshots(dummy_service)[0] + size = Backups.service_snapshot_size(dummy_service, snap.id) assert size is not None assert size > 0 def test_redis_storage(backups_backblaze): - backups = Backups() - backups.reset() - provider = backups.provider + Backups.reset() + provider = Backups.provider() assert provider is not None @@ -168,8 +164,8 @@ def test_redis_storage(backups_backblaze): assert provider.login == "ID" assert provider.key == "KEY" - backups.store_provider_redis(provider) - restored_provider = backups.load_provider_redis() + Backups.store_provider_redis(provider) + restored_provider = Backups.load_provider_redis() assert isinstance(restored_provider, Backblaze) assert restored_provider.login == "ID" assert restored_provider.key == "KEY" @@ -177,27 +173,27 @@ def test_redis_storage(backups_backblaze): # lowlevel def test_init_tracking_caching(backups, raw_dummy_service): - assert backups._has_redis_init_mark(raw_dummy_service) is False + assert Backups._has_redis_init_mark(raw_dummy_service) is False - backups._redis_mark_as_init(raw_dummy_service) + Backups._redis_mark_as_init(raw_dummy_service) - assert backups._has_redis_init_mark(raw_dummy_service) is True - assert backups.is_initted(raw_dummy_service) is True + assert Backups._has_redis_init_mark(raw_dummy_service) is True + assert Backups.is_initted(raw_dummy_service) is True # lowlevel def test_init_tracking_caching2(backups, raw_dummy_service): - assert backups._has_redis_init_mark(raw_dummy_service) is False + assert Backups._has_redis_init_mark(raw_dummy_service) is False - backups.init_repo(raw_dummy_service) + Backups.init_repo(raw_dummy_service) - assert backups._has_redis_init_mark(raw_dummy_service) is True + assert Backups._has_redis_init_mark(raw_dummy_service) is True # only public API def test_init_tracking(backups, raw_dummy_service): - assert backups.is_initted(raw_dummy_service) is False + assert Backups.is_initted(raw_dummy_service) is False - backups.init_repo(raw_dummy_service) + Backups.init_repo(raw_dummy_service) - assert backups.is_initted(raw_dummy_service) is True + assert Backups.is_initted(raw_dummy_service) is True From a1340091657a0080a9fd5912a0ced6ebf2d24fc2 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 29 Mar 2023 11:45:52 +0000 Subject: [PATCH 281/537] feature(backups): huey task to back up --- selfprivacy_api/backup/tasks.py | 9 +++++++++ tests/test_graphql/test_backup.py | 10 ++++++++++ 2 files changed, 19 insertions(+) create mode 100644 selfprivacy_api/backup/tasks.py diff --git a/selfprivacy_api/backup/tasks.py b/selfprivacy_api/backup/tasks.py new file mode 100644 index 0000000..f1715cc --- /dev/null +++ b/selfprivacy_api/backup/tasks.py @@ -0,0 +1,9 @@ +from selfprivacy_api.utils.huey import huey +from selfprivacy_api.services.service import Service +from selfprivacy_api.backup import Backups + +# huey tasks need to return something +@huey.task() +def start_backup(service: Service) -> bool: + Backups.back_up(service) + return True diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 2e6c6d6..7683d4d 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -13,8 +13,10 @@ import selfprivacy_api.backup.providers as providers from selfprivacy_api.backup.providers import AbstractBackupProvider from selfprivacy_api.backup.providers.backblaze import Backblaze + from selfprivacy_api.graphql.queries.providers import BackupProvider +from selfprivacy_api.backup.tasks import start_backup TESTFILE_BODY = "testytest!" REPO_NAME = "test_backup" @@ -197,3 +199,11 @@ def test_init_tracking(backups, raw_dummy_service): Backups.init_repo(raw_dummy_service) assert Backups.is_initted(raw_dummy_service) is True + + +def test_backup_service_task(backups, dummy_service): + handle = start_backup(dummy_service) + handle(blocking=True) + + snaps = Backups.get_snapshots(dummy_service) + assert len(snaps) == 1 From 35a4fec9d4023c1db9dea99bea3dffdc14ff5081 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 3 Apr 2023 16:29:06 +0000 Subject: [PATCH 282/537] feature(backups): return snapshot info from backup function --- selfprivacy_api/backup/restic_backuper.py | 39 +++++++++++++++++++---- 1 file changed, 33 insertions(+), 6 deletions(-) diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index 8d9ac99..73292f0 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -1,5 +1,6 @@ import subprocess import json +import datetime from typing import List @@ -63,6 +64,7 @@ class ResticBackuper(AbstractBackuper): backup_command = self.restic_command( repo_name, "backup", + "--json", folder, ) with subprocess.Popen( @@ -72,8 +74,27 @@ class ResticBackuper(AbstractBackuper): stderr=subprocess.STDOUT, ) as handle: output = handle.communicate()[0].decode("utf-8") - if "saved" not in output: - raise ValueError("could not create a new snapshot: " + output) + try: + messages = self.parse_json_output(output) + return ResticBackuper._snapshot_from_backup_messages( + messages, repo_name + ) + except ValueError as e: + raise ValueError("could not create a snapshot: ") from e + + @staticmethod + def _snapshot_from_backup_messages(messages, repo_name) -> Snapshot: + for message in messages: + if message["message_type"] == "summary": + return ResticBackuper._snapshot_from_fresh_summary(message, repo_name) + + @staticmethod + def _snapshot_from_fresh_summary(message: object, repo_name) -> Snapshot: + return Snapshot( + id=message["snapshot_id"], + created_at=datetime.datetime.now(datetime.timezone.utc), + service_name=repo_name, + ) def init(self, repo_name): init_command = self.restic_command( @@ -189,11 +210,17 @@ class ResticBackuper(AbstractBackuper): starting_index = self.json_start(output) if starting_index == -1: - raise ValueError( - "There is no json in the restic snapshot output : " + output - ) + raise ValueError("There is no json in the restic output : " + output) - return json.loads(output[starting_index:]) + truncated_output = output[starting_index:] + json_messages = truncated_output.splitlines() + if len(json_messages) == 1: + return json.loads(truncated_output) + + result_array = [] + for message in json_messages: + result_array.append(json.loads(message)) + return result_array def json_start(self, output: str) -> int: indices = [ From d57dc3f7d2a084be3b92f9c054bd140bd97fd3e0 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 3 Apr 2023 17:23:16 +0000 Subject: [PATCH 283/537] test(backups): test that we do return snapshot on backup --- selfprivacy_api/backup/restic_backuper.py | 1 + tests/test_graphql/test_backup.py | 11 +++++++++++ 2 files changed, 12 insertions(+) diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index 73292f0..cc81361 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -87,6 +87,7 @@ class ResticBackuper(AbstractBackuper): for message in messages: if message["message_type"] == "summary": return ResticBackuper._snapshot_from_fresh_summary(message, repo_name) + raise ValueError("no summary message in restic json output") @staticmethod def _snapshot_from_fresh_summary(message: object, repo_name) -> Snapshot: diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 7683d4d..ec16306 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -130,6 +130,17 @@ def test_one_snapshot(backups, dummy_service): assert snap.service_name == dummy_service.get_id() +def test_backup_returns_snapshot(backups, dummy_service): + service_folder = dummy_service.get_location() + provider = Backups.provider() + name = dummy_service.get_id() + snapshot = provider.backuper.start_backup(service_folder, name) + + assert snapshot.id is not None + assert snapshot.service_name == name + assert snapshot.created_at is not None + + def test_restore(backups, dummy_service): service_folder = dummy_service.get_location() file_to_nuke = listdir(service_folder)[0] From 43b6ebd04d32190e0e07d990878900b9f49060b8 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 3 Apr 2023 18:18:23 +0000 Subject: [PATCH 284/537] feature(backups): cache snapshots and last backup timestamps --- selfprivacy_api/backup/__init__.py | 47 ++++++++++++++++++++++++++++-- 1 file changed, 45 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 9ac8895..9bf87a0 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -1,4 +1,5 @@ -from typing import List +from typing import List, Optional +from datetime import datetime from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.models.backup.provider import BackupProviderModel @@ -17,6 +18,10 @@ from selfprivacy_api.backup.providers import get_provider, get_kind from selfprivacy_api.graphql.queries.providers import BackupProvider # a hack to store file path. +REDIS_SNAPSHOT_CACHE_EXPIRE_SECONDS = 24 * 60 * 60 # one day + +REDIS_SNAPSHOTS_PREFIX = "backups:snapshots:" +REDIS_LAST_BACKUP_PREFIX = "backups:last-backed-up:" REDIS_REPO_PATH_KEY = "backups:test_repo_path" REDIS_PROVIDER_KEY = "backups:provider" @@ -40,6 +45,35 @@ class Backups: redis.set(REDIS_REPO_PATH_KEY, file_path) Backups.store_provider_redis(provider) + @staticmethod + def _redis_last_backup_key(service_id): + return REDIS_LAST_BACKUP_PREFIX + service_id + + @staticmethod + def _redis_snapshot_key(snapshot: Snapshot): + return REDIS_SNAPSHOTS_PREFIX + snapshot.id + + @staticmethod + def get_last_backed_up(service: Service) -> Optional[datetime]: + return Backups._get_last_backup_time_redis(service.get_id()) + + @staticmethod + def _get_last_backup_time_redis(service_id: str) -> Optional[datetime]: + key = Backups._redis_last_backup_key(service_id) + if not redis.exists(key): + return None + + snapshot = hash_as_model(redis, key) + return snapshot.created_at + + @staticmethod + def _store_last_snapshot(service_id: str, snapshot: Snapshot): + store_model_as_hash(redis, Backups._redis_last_backup_key(service_id), snapshot) + + snapshot_key = Backups._redis_snapshot_key(snapshot) + store_model_as_hash(redis, snapshot_key, snapshot) + redis.expire(snapshot_key, REDIS_SNAPSHOT_CACHE_EXPIRE_SECONDS) + @staticmethod def provider(): return Backups.lookup_provider() @@ -82,9 +116,16 @@ class Backups: def reset(): redis.delete(REDIS_PROVIDER_KEY) redis.delete(REDIS_REPO_PATH_KEY) + for key in redis.keys(REDIS_INITTED_CACHE_PREFIX + "*"): redis.delete(key) + for key in redis.keys(REDIS_SNAPSHOTS_PREFIX + "*"): + redis.delete(key) + + for key in redis.keys(REDIS_LAST_BACKUP_PREFIX + "*"): + redis.delete(key) + @staticmethod def lookup_provider() -> AbstractBackupProvider: redis_provider = Backups.load_provider_redis() @@ -129,7 +170,9 @@ class Backups: repo_name = service.get_id() service.pre_backup() - Backups.provider().backuper.start_backup(folder, repo_name) + snapshot = Backups.provider().backuper.start_backup(folder, repo_name) + Backups._store_last_snapshot(repo_name, snapshot) + service.post_restore() @staticmethod From d354f4ac0bb04e579ca3f412fe56ee6b9e560940 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 3 Apr 2023 18:37:12 +0000 Subject: [PATCH 285/537] feature(backups): check, set and unset service autobackup status --- selfprivacy_api/backup/__init__.py | 33 +++++++++++++++++++++++------- 1 file changed, 26 insertions(+), 7 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 9bf87a0..79844a3 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -20,6 +20,8 @@ from selfprivacy_api.graphql.queries.providers import BackupProvider # a hack to store file path. REDIS_SNAPSHOT_CACHE_EXPIRE_SECONDS = 24 * 60 * 60 # one day +REDIS_AUTOBACKUP_ENABLED_PREFIX = "backup:autobackup:services:" + REDIS_SNAPSHOTS_PREFIX = "backups:snapshots:" REDIS_LAST_BACKUP_PREFIX = "backups:last-backed-up:" REDIS_REPO_PATH_KEY = "backups:test_repo_path" @@ -74,6 +76,22 @@ class Backups: store_model_as_hash(redis, snapshot_key, snapshot) redis.expire(snapshot_key, REDIS_SNAPSHOT_CACHE_EXPIRE_SECONDS) + @staticmethod + def _redis_autobackup_key(service: Service): + return REDIS_AUTOBACKUP_ENABLED_PREFIX + service.get_id() + + @staticmethod + def enable_autobackup(service: Service): + redis.set(Backups._redis_autobackup_key(service), True) + + @staticmethod + def disable_autobackup(service: Service): + redis.delete(Backups._redis_autobackup_key(service)) + + @staticmethod + def is_autobackup_enabled(service: Service) -> bool: + return redis.exists(Backups._redis_autobackup_key(service)) + @staticmethod def provider(): return Backups.lookup_provider() @@ -117,14 +135,15 @@ class Backups: redis.delete(REDIS_PROVIDER_KEY) redis.delete(REDIS_REPO_PATH_KEY) - for key in redis.keys(REDIS_INITTED_CACHE_PREFIX + "*"): - redis.delete(key) + prefixes_to_clean = [ + REDIS_INITTED_CACHE_PREFIX, + REDIS_SNAPSHOTS_PREFIX, + REDIS_LAST_BACKUP_PREFIX, + ] - for key in redis.keys(REDIS_SNAPSHOTS_PREFIX + "*"): - redis.delete(key) - - for key in redis.keys(REDIS_LAST_BACKUP_PREFIX + "*"): - redis.delete(key) + for prefix in prefixes_to_clean: + for key in redis.keys(prefix + "*"): + redis.delete(key) @staticmethod def lookup_provider() -> AbstractBackupProvider: From 951bb8d5ec71e70515046f931e26dab351c0ced8 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 3 Apr 2023 18:39:55 +0000 Subject: [PATCH 286/537] fix(backups): remove self from static method --- selfprivacy_api/backup/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 79844a3..2c1e250 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -253,6 +253,6 @@ class Backups: # Our dummy service is not yet globally registered so this is not testable yet @staticmethod def snapshot_restored_size(snapshot: Snapshot) -> float: - return self.service_snapshot_size( + return Backups.service_snapshot_size( get_service_by_id(snapshot.service_name), snapshot.id ) From 79b9bb352ad9115953b1569c1754592f8189fafe Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 3 Apr 2023 18:54:27 +0000 Subject: [PATCH 287/537] feature(backups): methods for autobackup period setting and getting --- selfprivacy_api/backup/__init__.py | 28 ++++++++++++++++++++++++---- 1 file changed, 24 insertions(+), 4 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 2c1e250..b3a72c6 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -21,14 +21,15 @@ from selfprivacy_api.graphql.queries.providers import BackupProvider REDIS_SNAPSHOT_CACHE_EXPIRE_SECONDS = 24 * 60 * 60 # one day REDIS_AUTOBACKUP_ENABLED_PREFIX = "backup:autobackup:services:" - REDIS_SNAPSHOTS_PREFIX = "backups:snapshots:" REDIS_LAST_BACKUP_PREFIX = "backups:last-backed-up:" -REDIS_REPO_PATH_KEY = "backups:test_repo_path" - -REDIS_PROVIDER_KEY = "backups:provider" REDIS_INITTED_CACHE_PREFIX = "backups:initted_services:" +REDIS_REPO_PATH_KEY = "backups:test_repo_path" +REDIS_PROVIDER_KEY = "backups:provider" +REDIS_AUTOBACKUP_PERIOD_KEY = "backups:autobackup_period" + + redis = RedisPool().get_connection() @@ -86,12 +87,30 @@ class Backups: @staticmethod def disable_autobackup(service: Service): + """also see disable_all_autobackup()""" redis.delete(Backups._redis_autobackup_key(service)) @staticmethod def is_autobackup_enabled(service: Service) -> bool: return redis.exists(Backups._redis_autobackup_key(service)) + @staticmethod + def autobackup_period_minutes() -> Optional[int]: + """None means autobackup is disabled""" + if not redis.exists(REDIS_AUTOBACKUP_PERIOD_KEY): + return None + return redis.get(REDIS_AUTOBACKUP_PERIOD_KEY) + + @staticmethod + def set_autobackup_period_minutes(minutes: int): + """This initiates backup very soon if some services are not backed up""" + redis.set(REDIS_AUTOBACKUP_PERIOD_KEY, minutes) + + @staticmethod + def disable_all_autobackup(): + """disables all automatic backing up, but does not change per-service settings""" + redis.delete(REDIS_AUTOBACKUP_PERIOD_KEY) + @staticmethod def provider(): return Backups.lookup_provider() @@ -134,6 +153,7 @@ class Backups: def reset(): redis.delete(REDIS_PROVIDER_KEY) redis.delete(REDIS_REPO_PATH_KEY) + redis.delete(REDIS_AUTOBACKUP_PERIOD_KEY) prefixes_to_clean = [ REDIS_INITTED_CACHE_PREFIX, From 0a4338596b0a01e046b9dd0463866227229628d0 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 3 Apr 2023 21:59:14 +0000 Subject: [PATCH 288/537] test(backups): test setting services as enabled for autobackups --- selfprivacy_api/backup/__init__.py | 5 +++-- tests/test_graphql/test_backup.py | 10 ++++++++++ 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index b3a72c6..e3143b5 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -78,12 +78,12 @@ class Backups: redis.expire(snapshot_key, REDIS_SNAPSHOT_CACHE_EXPIRE_SECONDS) @staticmethod - def _redis_autobackup_key(service: Service): + def _redis_autobackup_key(service: Service) -> str: return REDIS_AUTOBACKUP_ENABLED_PREFIX + service.get_id() @staticmethod def enable_autobackup(service: Service): - redis.set(Backups._redis_autobackup_key(service), True) + redis.set(Backups._redis_autobackup_key(service), 1) @staticmethod def disable_autobackup(service: Service): @@ -159,6 +159,7 @@ class Backups: REDIS_INITTED_CACHE_PREFIX, REDIS_SNAPSHOTS_PREFIX, REDIS_LAST_BACKUP_PREFIX, + REDIS_AUTOBACKUP_ENABLED_PREFIX, ] for prefix in prefixes_to_clean: diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index ec16306..97dd6af 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -218,3 +218,13 @@ def test_backup_service_task(backups, dummy_service): snaps = Backups.get_snapshots(dummy_service) assert len(snaps) == 1 + + +def test_autobackup_enable_service(backups, dummy_service): + assert not Backups.is_autobackup_enabled(dummy_service) + + Backups.enable_autobackup(dummy_service) + assert Backups.is_autobackup_enabled(dummy_service) + + Backups.disable_autobackup(dummy_service) + assert not Backups.is_autobackup_enabled(dummy_service) From 343fda0630a67e1bb622bcd731c372d1d77f60c2 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 3 Apr 2023 22:39:04 +0000 Subject: [PATCH 289/537] test(backups): test setting autobackup period --- selfprivacy_api/backup/__init__.py | 10 ++++++++-- tests/test_graphql/test_backup.py | 22 ++++++++++++++++++++++ 2 files changed, 30 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index e3143b5..b54482b 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -99,11 +99,17 @@ class Backups: """None means autobackup is disabled""" if not redis.exists(REDIS_AUTOBACKUP_PERIOD_KEY): return None - return redis.get(REDIS_AUTOBACKUP_PERIOD_KEY) + return int(redis.get(REDIS_AUTOBACKUP_PERIOD_KEY)) @staticmethod def set_autobackup_period_minutes(minutes: int): - """This initiates backup very soon if some services are not backed up""" + """ + 0 and negative numbers are equivalent to disable. + Setting to a positive number may result in a backup very soon if some services are not backed up. + """ + if minutes <= 0: + Backups.disable_all_autobackup() + return redis.set(REDIS_AUTOBACKUP_PERIOD_KEY, minutes) @staticmethod diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 97dd6af..31ecefa 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -228,3 +228,25 @@ def test_autobackup_enable_service(backups, dummy_service): Backups.disable_autobackup(dummy_service) assert not Backups.is_autobackup_enabled(dummy_service) + + +def test_set_autobackup_period(backups): + assert Backups.autobackup_period_minutes() is None + + Backups.set_autobackup_period_minutes(2) + assert Backups.autobackup_period_minutes() == 2 + + Backups.disable_all_autobackup() + assert Backups.autobackup_period_minutes() is None + + Backups.set_autobackup_period_minutes(3) + assert Backups.autobackup_period_minutes() == 3 + + Backups.set_autobackup_period_minutes(0) + assert Backups.autobackup_period_minutes() is None + + Backups.set_autobackup_period_minutes(3) + assert Backups.autobackup_period_minutes() == 3 + + Backups.set_autobackup_period_minutes(-1) + assert Backups.autobackup_period_minutes() is None From 054b07baa3b32658a5fe911cef9e9bdd789c43f0 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 3 Apr 2023 23:23:47 +0000 Subject: [PATCH 290/537] feature(backups): add a datetime validator function for huey autobackups --- selfprivacy_api/backup/__init__.py | 43 +++++++++++++++++++++++++----- 1 file changed, 37 insertions(+), 6 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index b54482b..f58d4c4 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -1,5 +1,5 @@ from typing import List, Optional -from datetime import datetime +from datetime import datetime, timezone, timedelta from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.models.backup.provider import BackupProviderModel @@ -78,21 +78,52 @@ class Backups: redis.expire(snapshot_key, REDIS_SNAPSHOT_CACHE_EXPIRE_SECONDS) @staticmethod - def _redis_autobackup_key(service: Service) -> str: - return REDIS_AUTOBACKUP_ENABLED_PREFIX + service.get_id() + def _redis_autobackup_key(service_name: str) -> str: + return REDIS_AUTOBACKUP_ENABLED_PREFIX + service_name @staticmethod def enable_autobackup(service: Service): - redis.set(Backups._redis_autobackup_key(service), 1) + redis.set(Backups._redis_autobackup_key(service.get_id()), 1) + + @staticmethod + def is_time_to_backup(time: datetime) -> bool: + """ + Intended as a time validator for huey cron scheduler of automatic backups + """ + for key in redis.keys(REDIS_AUTOBACKUP_ENABLED_PREFIX + "*"): + service_id = key.split(":")[-1] + if Backups.is_time_to_backup_service(service_id, time): + return True + return False + + @staticmethod + def is_time_to_backup_service(service_id: str, time: datetime): + period = Backups.autobackup_period_minutes() + if period is None: + return False + if not Backups._is_autobackup_enabled_by_name(service_id) is None: + return False + + last_backup = Backups._get_last_backup_time_redis(service_id) + if last_backup is None: + return True # queue a backup immediately if there are no previous backups + + if time > last_backup + timedelta(minutes=period): + return True + return False @staticmethod def disable_autobackup(service: Service): """also see disable_all_autobackup()""" - redis.delete(Backups._redis_autobackup_key(service)) + redis.delete(Backups._redis_autobackup_key(service.get_id())) @staticmethod def is_autobackup_enabled(service: Service) -> bool: - return redis.exists(Backups._redis_autobackup_key(service)) + return Backups._is_autobackup_enabled_by_name(service.get_id()) + + @staticmethod + def _is_autobackup_enabled_by_name(service_name: str): + return redis.exists(Backups._redis_autobackup_key(service_name)) @staticmethod def autobackup_period_minutes() -> Optional[int]: From ed0861aacc230ba1040c2ef1da6ce22f14cf10d4 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 7 Apr 2023 15:18:54 +0000 Subject: [PATCH 291/537] test(backups): test last backup date retrieval --- selfprivacy_api/backup/__init__.py | 3 ++- tests/test_graphql/test_backup.py | 8 ++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index f58d4c4..7e73ebd 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -58,6 +58,7 @@ class Backups: @staticmethod def get_last_backed_up(service: Service) -> Optional[datetime]: + """Get a timezone-aware time of the last backup of a service""" return Backups._get_last_backup_time_redis(service.get_id()) @staticmethod @@ -66,7 +67,7 @@ class Backups: if not redis.exists(key): return None - snapshot = hash_as_model(redis, key) + snapshot = hash_as_model(redis, key, Snapshot) return snapshot.created_at @staticmethod diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 31ecefa..f6ad24a 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -3,6 +3,7 @@ import os.path as path from os import makedirs from os import remove from os import listdir +from datetime import datetime, timedelta, timezone from selfprivacy_api.services.test_service import DummyService @@ -113,8 +114,15 @@ def test_backup_simple_file(raw_dummy_service, file_backup): def test_backup_service(dummy_service, backups): + assert Backups.get_last_backed_up(dummy_service) is None Backups.back_up(dummy_service) + now = datetime.now(timezone.utc) + date = Backups.get_last_backed_up(dummy_service) + assert date is not None + assert now > date + assert now - date < timedelta(minutes=1) + def test_no_repo(memory_backup): with pytest.raises(ValueError): From 2c510ae8842d2710d53a893d61c420ceae5988ba Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 7 Apr 2023 15:41:02 +0000 Subject: [PATCH 292/537] feature(backups): add snapshot cache sync functions --- selfprivacy_api/backup/__init__.py | 35 ++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 7e73ebd..13eb7de 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -72,12 +72,46 @@ class Backups: @staticmethod def _store_last_snapshot(service_id: str, snapshot: Snapshot): + # non-expiring timestamp of the last store_model_as_hash(redis, Backups._redis_last_backup_key(service_id), snapshot) + # expiring cache entry + Backups.cache_snapshot(snapshot) + @staticmethod + def cache_snapshot(snapshot: Snapshot): snapshot_key = Backups._redis_snapshot_key(snapshot) store_model_as_hash(redis, snapshot_key, snapshot) redis.expire(snapshot_key, REDIS_SNAPSHOT_CACHE_EXPIRE_SECONDS) + @staticmethod + def delete_cached_snapshot(snapshot: Snapshot): + snapshot_key = Backups._redis_snapshot_key(snapshot) + redis.delete(snapshot_key) + + @staticmethod + def get_cached_snapshots() -> List[Snapshot]: + keys = redis.keys(REDIS_SNAPSHOTS_PREFIX + "*") + result = [] + + for key in keys: + snapshot = hash_as_model(redis, key, Snapshot) + result.append(snapshot) + return result + + @staticmethod + def get_cached_snapshots_service(service_id: str) -> List[Snapshot]: + snapshots = Backups.get_cached_snapshots() + return [snap for snap in snapshots if snap.service_name == service_id] + + @staticmethod + def sync_service_snapshots(service_id: str, snapshots: List[Snapshot]): + for snapshot in snapshots: + if snapshot.service_name == service_id: + Backups.cache_snapshot(snapshot) + for snapshot in Backups.get_cached_snapshots_service(service_id): + if snapshot.id not in [snap.id for snap in snapshots]: + Backups.delete_cached_snapshot(snapshot) + @staticmethod def _redis_autobackup_key(service_name: str) -> str: return REDIS_AUTOBACKUP_ENABLED_PREFIX + service_name @@ -244,6 +278,7 @@ class Backups: @staticmethod def back_up(service: Service): + """The top-level function to back up a service""" folder = service.get_location() repo_name = service.get_id() From f9eaaab929583d52e17d8b0fd3f0c0434feb4928 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 7 Apr 2023 17:24:53 +0000 Subject: [PATCH 293/537] feature(backups): enable snapshot cache usage --- selfprivacy_api/backup/__init__.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 13eb7de..d22c4c9 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -321,9 +321,16 @@ class Backups: @staticmethod def get_snapshots(service: Service) -> List[Snapshot]: - repo_name = service.get_id() + service_id = service.get_id() + cached_snapshots = Backups.get_cached_snapshots_service(service_id) + if cached_snapshots != []: + return cached_snapshots + # TODO: the oldest snapshots will get expired faster than the new ones. + # How to detect that the end is missing? - return Backups.provider().backuper.get_snapshots(repo_name) + upstream_snapshots = Backups.provider().backuper.get_snapshots(service_id) + Backups.sync_service_snapshots(service_id, upstream_snapshots) + return upstream_snapshots @staticmethod def restore_service_from_snapshot(service: Service, snapshot_id: str): From ef57e25a2629524feacf878cd645bad92647cc6c Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 7 Apr 2023 18:12:05 +0000 Subject: [PATCH 294/537] test(backups): test that we do use cache --- tests/test_graphql/test_backup.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index f6ad24a..619656d 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -192,6 +192,17 @@ def test_redis_storage(backups_backblaze): assert restored_provider.key == "KEY" +def test_snapshots_caching(backups, dummy_service): + Backups.back_up(dummy_service) + + # we test indirectly that we do redis calls instead of shell calls + start = datetime.now() + for i in range(10): + snapshots = Backups.get_snapshots(dummy_service) + assert len(snapshots) == 1 + assert datetime.now() - start < timedelta(seconds=0.5) + + # lowlevel def test_init_tracking_caching(backups, raw_dummy_service): assert Backups._has_redis_init_mark(raw_dummy_service) is False From b305c19559d619df60d302e3f405a18f363f0264 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 10 Apr 2023 13:22:33 +0000 Subject: [PATCH 295/537] refactor(backups): split out storage --- selfprivacy_api/backup/__init__.py | 192 +++++++---------------------- selfprivacy_api/backup/storage.py | 168 +++++++++++++++++++++++++ tests/test_graphql/test_backup.py | 118 ++++++++++-------- 3 files changed, 278 insertions(+), 200 deletions(-) create mode 100644 selfprivacy_api/backup/storage.py diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index d22c4c9..c931e57 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -1,41 +1,22 @@ from typing import List, Optional -from datetime import datetime, timezone, timedelta +from datetime import datetime, timedelta from selfprivacy_api.models.backup.snapshot import Snapshot -from selfprivacy_api.models.backup.provider import BackupProviderModel -from selfprivacy_api.utils.singleton_metaclass import SingletonMetaclass from selfprivacy_api.utils import ReadUserData from selfprivacy_api.utils.redis_pool import RedisPool -from selfprivacy_api.utils.redis_model_storage import store_model_as_hash, hash_as_model from selfprivacy_api.services import get_service_by_id from selfprivacy_api.services.service import Service -from selfprivacy_api.backup.providers.provider import AbstractBackupProvider -from selfprivacy_api.backup.providers import get_provider, get_kind from selfprivacy_api.graphql.queries.providers import BackupProvider -# a hack to store file path. -REDIS_SNAPSHOT_CACHE_EXPIRE_SECONDS = 24 * 60 * 60 # one day - -REDIS_AUTOBACKUP_ENABLED_PREFIX = "backup:autobackup:services:" -REDIS_SNAPSHOTS_PREFIX = "backups:snapshots:" -REDIS_LAST_BACKUP_PREFIX = "backups:last-backed-up:" -REDIS_INITTED_CACHE_PREFIX = "backups:initted_services:" - -REDIS_REPO_PATH_KEY = "backups:test_repo_path" -REDIS_PROVIDER_KEY = "backups:provider" -REDIS_AUTOBACKUP_PERIOD_KEY = "backups:autobackup_period" +from selfprivacy_api.backup.providers.provider import AbstractBackupProvider +from selfprivacy_api.backup.providers import get_provider +from selfprivacy_api.backup.storage import Storage -redis = RedisPool().get_connection() - - -# Singleton has a property of being persistent between tests. -# I don't know what to do with this yet -# class Backups(metaclass=SingletonMetaclass): class Backups: """A singleton controller for backups""" @@ -45,88 +26,40 @@ class Backups: def set_localfile_repo(file_path: str): ProviderClass = get_provider(BackupProvider.FILE) provider = ProviderClass(file_path) - redis.set(REDIS_REPO_PATH_KEY, file_path) - Backups.store_provider_redis(provider) - - @staticmethod - def _redis_last_backup_key(service_id): - return REDIS_LAST_BACKUP_PREFIX + service_id - - @staticmethod - def _redis_snapshot_key(snapshot: Snapshot): - return REDIS_SNAPSHOTS_PREFIX + snapshot.id + Storage.store_testrepo_path(file_path) + Storage.store_provider(provider) @staticmethod def get_last_backed_up(service: Service) -> Optional[datetime]: """Get a timezone-aware time of the last backup of a service""" - return Backups._get_last_backup_time_redis(service.get_id()) - - @staticmethod - def _get_last_backup_time_redis(service_id: str) -> Optional[datetime]: - key = Backups._redis_last_backup_key(service_id) - if not redis.exists(key): - return None - - snapshot = hash_as_model(redis, key, Snapshot) - return snapshot.created_at - - @staticmethod - def _store_last_snapshot(service_id: str, snapshot: Snapshot): - # non-expiring timestamp of the last - store_model_as_hash(redis, Backups._redis_last_backup_key(service_id), snapshot) - # expiring cache entry - Backups.cache_snapshot(snapshot) - - @staticmethod - def cache_snapshot(snapshot: Snapshot): - snapshot_key = Backups._redis_snapshot_key(snapshot) - store_model_as_hash(redis, snapshot_key, snapshot) - redis.expire(snapshot_key, REDIS_SNAPSHOT_CACHE_EXPIRE_SECONDS) - - @staticmethod - def delete_cached_snapshot(snapshot: Snapshot): - snapshot_key = Backups._redis_snapshot_key(snapshot) - redis.delete(snapshot_key) - - @staticmethod - def get_cached_snapshots() -> List[Snapshot]: - keys = redis.keys(REDIS_SNAPSHOTS_PREFIX + "*") - result = [] - - for key in keys: - snapshot = hash_as_model(redis, key, Snapshot) - result.append(snapshot) - return result + return Storage.get_last_backup_time(service.get_id()) @staticmethod def get_cached_snapshots_service(service_id: str) -> List[Snapshot]: - snapshots = Backups.get_cached_snapshots() + snapshots = Storage.get_cached_snapshots() return [snap for snap in snapshots if snap.service_name == service_id] @staticmethod def sync_service_snapshots(service_id: str, snapshots: List[Snapshot]): for snapshot in snapshots: if snapshot.service_name == service_id: - Backups.cache_snapshot(snapshot) + Storage.cache_snapshot(snapshot) for snapshot in Backups.get_cached_snapshots_service(service_id): if snapshot.id not in [snap.id for snap in snapshots]: - Backups.delete_cached_snapshot(snapshot) - - @staticmethod - def _redis_autobackup_key(service_name: str) -> str: - return REDIS_AUTOBACKUP_ENABLED_PREFIX + service_name + Storage.delete_cached_snapshot(snapshot) @staticmethod def enable_autobackup(service: Service): - redis.set(Backups._redis_autobackup_key(service.get_id()), 1) + Storage.set_autobackup(service) @staticmethod def is_time_to_backup(time: datetime) -> bool: """ Intended as a time validator for huey cron scheduler of automatic backups """ - for key in redis.keys(REDIS_AUTOBACKUP_ENABLED_PREFIX + "*"): - service_id = key.split(":")[-1] + + enabled_services = Storage.services_with_autobackup() + for service_id in enabled_services: if Backups.is_time_to_backup_service(service_id, time): return True return False @@ -136,10 +69,10 @@ class Backups: period = Backups.autobackup_period_minutes() if period is None: return False - if not Backups._is_autobackup_enabled_by_name(service_id) is None: + if not Storage.is_autobackup_set_by_name(service_id) is None: return False - last_backup = Backups._get_last_backup_time_redis(service_id) + last_backup = Storage.get_last_backup_time(service_id) if last_backup is None: return True # queue a backup immediately if there are no previous backups @@ -150,22 +83,16 @@ class Backups: @staticmethod def disable_autobackup(service: Service): """also see disable_all_autobackup()""" - redis.delete(Backups._redis_autobackup_key(service.get_id())) + Storage.unset_autobackup(service) @staticmethod def is_autobackup_enabled(service: Service) -> bool: - return Backups._is_autobackup_enabled_by_name(service.get_id()) - - @staticmethod - def _is_autobackup_enabled_by_name(service_name: str): - return redis.exists(Backups._redis_autobackup_key(service_name)) + return Storage.is_autobackup_set(service.get_id()) @staticmethod def autobackup_period_minutes() -> Optional[int]: """None means autobackup is disabled""" - if not redis.exists(REDIS_AUTOBACKUP_PERIOD_KEY): - return None - return int(redis.get(REDIS_AUTOBACKUP_PERIOD_KEY)) + return Storage.autobackup_period_minutes() @staticmethod def set_autobackup_period_minutes(minutes: int): @@ -176,12 +103,12 @@ class Backups: if minutes <= 0: Backups.disable_all_autobackup() return - redis.set(REDIS_AUTOBACKUP_PERIOD_KEY, minutes) + Storage.store_autobackup_period_minutes(minutes) @staticmethod def disable_all_autobackup(): """disables all automatic backing up, but does not change per-service settings""" - redis.delete(REDIS_AUTOBACKUP_PERIOD_KEY) + Storage.delete_backup_period() @staticmethod def provider(): @@ -190,53 +117,21 @@ class Backups: @staticmethod def set_provider(kind: str, login: str, key: str): provider = Backups.construct_provider(kind, login, key) - Backups.store_provider_redis(provider) + Storage.store_provider(provider) @staticmethod def construct_provider(kind: str, login: str, key: str): provider_class = get_provider(BackupProvider[kind]) if kind == "FILE": - path = redis.get(REDIS_REPO_PATH_KEY) + path = Storage.get_testrepo_path() return provider_class(path) return provider_class(login=login, key=key) - @staticmethod - def store_provider_redis(provider: AbstractBackupProvider): - store_model_as_hash( - redis, - REDIS_PROVIDER_KEY, - BackupProviderModel( - kind=get_kind(provider), login=provider.login, key=provider.key - ), - ) - - @staticmethod - def load_provider_redis() -> AbstractBackupProvider: - provider_model = hash_as_model(redis, REDIS_PROVIDER_KEY, BackupProviderModel) - if provider_model is None: - return None - return Backups.construct_provider( - provider_model.kind, provider_model.login, provider_model.key - ) - @staticmethod def reset(): - redis.delete(REDIS_PROVIDER_KEY) - redis.delete(REDIS_REPO_PATH_KEY) - redis.delete(REDIS_AUTOBACKUP_PERIOD_KEY) - - prefixes_to_clean = [ - REDIS_INITTED_CACHE_PREFIX, - REDIS_SNAPSHOTS_PREFIX, - REDIS_LAST_BACKUP_PREFIX, - REDIS_AUTOBACKUP_ENABLED_PREFIX, - ] - - for prefix in prefixes_to_clean: - for key in redis.keys(prefix + "*"): - redis.delete(key) + Storage.reset() @staticmethod def lookup_provider() -> AbstractBackupProvider: @@ -246,11 +141,11 @@ class Backups: json_provider = Backups.load_provider_json() if json_provider is not None: - Backups.store_provider_redis(json_provider) + Storage.store_provider(json_provider) return json_provider memory_provider = Backups.construct_provider("MEMORY", login="", key="") - Backups.store_provider_redis(memory_provider) + Storage.store_provider(memory_provider) return memory_provider @staticmethod @@ -276,6 +171,15 @@ class Backups: kind=provider_string, login=account, key=key ) + @staticmethod + def load_provider_redis() -> AbstractBackupProvider: + provider_model = Storage.load_provider() + if provider_model is None: + return None + return Backups.construct_provider( + provider_model.kind, provider_model.login, provider_model.key + ) + @staticmethod def back_up(service: Service): """The top-level function to back up a service""" @@ -292,29 +196,17 @@ class Backups: def init_repo(service: Service): repo_name = service.get_id() Backups.provider().backuper.init(repo_name) - Backups._redis_mark_as_init(service) - - @staticmethod - def _has_redis_init_mark(service: Service) -> bool: - repo_name = service.get_id() - if redis.exists(REDIS_INITTED_CACHE_PREFIX + repo_name): - return True - return False - - @staticmethod - def _redis_mark_as_init(service: Service): - repo_name = service.get_id() - redis.set(REDIS_INITTED_CACHE_PREFIX + repo_name, 1) + Storage.mark_as_init(service) @staticmethod def is_initted(service: Service) -> bool: repo_name = service.get_id() - if Backups._has_redis_init_mark(service): + if Storage.has_init_mark(service): return True initted = Backups.provider().backuper.is_initted(repo_name) if initted: - Backups._redis_mark_as_init(service) + Storage.mark_as_init(service) return True return False @@ -357,3 +249,11 @@ class Backups: return Backups.service_snapshot_size( get_service_by_id(snapshot.service_name), snapshot.id ) + + @staticmethod + def _store_last_snapshot(service_id: str, snapshot: Snapshot): + """What do we do with a snapshot that is just made?""" + # non-expiring timestamp of the last + Storage.store_last_timestamp(service_id, snapshot) + # expiring cache entry + Storage.cache_snapshot(snapshot) diff --git a/selfprivacy_api/backup/storage.py b/selfprivacy_api/backup/storage.py new file mode 100644 index 0000000..cf7d93a --- /dev/null +++ b/selfprivacy_api/backup/storage.py @@ -0,0 +1,168 @@ +from typing import List, Optional +from datetime import datetime + +from selfprivacy_api.models.backup.snapshot import Snapshot +from selfprivacy_api.models.backup.provider import BackupProviderModel + +from selfprivacy_api.utils.redis_pool import RedisPool +from selfprivacy_api.utils.redis_model_storage import store_model_as_hash, hash_as_model + + +from selfprivacy_api.services.service import Service + +from selfprivacy_api.backup.providers.provider import AbstractBackupProvider +from selfprivacy_api.backup.providers import get_kind + +# a hack to store file path. +REDIS_SNAPSHOT_CACHE_EXPIRE_SECONDS = 24 * 60 * 60 # one day + +REDIS_AUTOBACKUP_ENABLED_PREFIX = "backup:autobackup:services:" +REDIS_SNAPSHOTS_PREFIX = "backups:snapshots:" +REDIS_LAST_BACKUP_PREFIX = "backups:last-backed-up:" +REDIS_INITTED_CACHE_PREFIX = "backups:initted_services:" + +REDIS_REPO_PATH_KEY = "backups:test_repo_path" +REDIS_PROVIDER_KEY = "backups:provider" +REDIS_AUTOBACKUP_PERIOD_KEY = "backups:autobackup_period" + + +redis = RedisPool().get_connection() + + +class Storage: + @staticmethod + def reset(): + redis.delete(REDIS_PROVIDER_KEY) + redis.delete(REDIS_REPO_PATH_KEY) + redis.delete(REDIS_AUTOBACKUP_PERIOD_KEY) + + prefixes_to_clean = [ + REDIS_INITTED_CACHE_PREFIX, + REDIS_SNAPSHOTS_PREFIX, + REDIS_LAST_BACKUP_PREFIX, + REDIS_AUTOBACKUP_ENABLED_PREFIX, + ] + + for prefix in prefixes_to_clean: + for key in redis.keys(prefix + "*"): + redis.delete(key) + + @staticmethod + def store_testrepo_path(path: str): + redis.set(REDIS_REPO_PATH_KEY, path) + + @staticmethod + def get_testrepo_path() -> str: + if not redis.exists(REDIS_REPO_PATH_KEY): + raise ValueError( + "No test repository filepath is set, but we tried to access it" + ) + return redis.get(REDIS_REPO_PATH_KEY) + + @staticmethod + def services_with_autobackup() -> List[str]: + keys = redis.keys(REDIS_AUTOBACKUP_ENABLED_PREFIX + "*") + service_ids = [key.split(":")[-1] for key in keys] + return service_ids + + @staticmethod + def __last_backup_key(service_id): + return REDIS_LAST_BACKUP_PREFIX + service_id + + @staticmethod + def __snapshot_key(snapshot: Snapshot): + return REDIS_SNAPSHOTS_PREFIX + snapshot.id + + @staticmethod + def get_last_backup_time(service_id: str) -> Optional[datetime]: + key = Storage.__last_backup_key(service_id) + if not redis.exists(key): + return None + + snapshot = hash_as_model(redis, key, Snapshot) + return snapshot.created_at + + @staticmethod + def store_last_timestamp(service_id: str, snapshot: Snapshot): + store_model_as_hash(redis, Storage.__last_backup_key(service_id), snapshot) + + @staticmethod + def cache_snapshot(snapshot: Snapshot): + snapshot_key = Storage.__snapshot_key(snapshot) + store_model_as_hash(redis, snapshot_key, snapshot) + redis.expire(snapshot_key, REDIS_SNAPSHOT_CACHE_EXPIRE_SECONDS) + + @staticmethod + def delete_cached_snapshot(snapshot: Snapshot): + snapshot_key = Storage.__snapshot_key(snapshot) + redis.delete(snapshot_key) + + @staticmethod + def get_cached_snapshots() -> List[Snapshot]: + keys = redis.keys(REDIS_SNAPSHOTS_PREFIX + "*") + result = [] + + for key in keys: + snapshot = hash_as_model(redis, key, Snapshot) + result.append(snapshot) + return result + + @staticmethod + def __autobackup_key(service_name: str) -> str: + return REDIS_AUTOBACKUP_ENABLED_PREFIX + service_name + + @staticmethod + def set_autobackup(service: Service): + # shortcut this + redis.set(Storage.__autobackup_key(service.get_id()), 1) + + @staticmethod + def unset_autobackup(service: Service): + """also see disable_all_autobackup()""" + redis.delete(Storage.__autobackup_key(service.get_id())) + + @staticmethod + def is_autobackup_set(service_name: str): + return redis.exists(Storage.__autobackup_key(service_name)) + + @staticmethod + def autobackup_period_minutes() -> Optional[int]: + """None means autobackup is disabled""" + if not redis.exists(REDIS_AUTOBACKUP_PERIOD_KEY): + return None + return int(redis.get(REDIS_AUTOBACKUP_PERIOD_KEY)) + + @staticmethod + def store_autobackup_period_minutes(minutes: int): + redis.set(REDIS_AUTOBACKUP_PERIOD_KEY, minutes) + + @staticmethod + def delete_backup_period(): + redis.delete(REDIS_AUTOBACKUP_PERIOD_KEY) + + @staticmethod + def store_provider(provider: AbstractBackupProvider): + store_model_as_hash( + redis, + REDIS_PROVIDER_KEY, + BackupProviderModel( + kind=get_kind(provider), login=provider.login, key=provider.key + ), + ) + + @staticmethod + def load_provider() -> BackupProviderModel: + provider_model = hash_as_model(redis, REDIS_PROVIDER_KEY, BackupProviderModel) + return provider_model + + @staticmethod + def has_init_mark(service: Service) -> bool: + repo_name = service.get_id() + if redis.exists(REDIS_INITTED_CACHE_PREFIX + repo_name): + return True + return False + + @staticmethod + def mark_as_init(service: Service): + repo_name = service.get_id() + redis.set(REDIS_INITTED_CACHE_PREFIX + repo_name, 1) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 619656d..54701b1 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -6,18 +6,15 @@ from os import listdir from datetime import datetime, timedelta, timezone from selfprivacy_api.services.test_service import DummyService - -from selfprivacy_api.models.backup.snapshot import Snapshot +from selfprivacy_api.graphql.queries.providers import BackupProvider from selfprivacy_api.backup import Backups import selfprivacy_api.backup.providers as providers from selfprivacy_api.backup.providers import AbstractBackupProvider - from selfprivacy_api.backup.providers.backblaze import Backblaze - -from selfprivacy_api.graphql.queries.providers import BackupProvider - from selfprivacy_api.backup.tasks import start_backup +from selfprivacy_api.backup.storage import Storage + TESTFILE_BODY = "testytest!" REPO_NAME = "test_backup" @@ -175,54 +172,6 @@ def test_sizing(backups, dummy_service): assert size > 0 -def test_redis_storage(backups_backblaze): - Backups.reset() - provider = Backups.provider() - - assert provider is not None - - assert isinstance(provider, Backblaze) - assert provider.login == "ID" - assert provider.key == "KEY" - - Backups.store_provider_redis(provider) - restored_provider = Backups.load_provider_redis() - assert isinstance(restored_provider, Backblaze) - assert restored_provider.login == "ID" - assert restored_provider.key == "KEY" - - -def test_snapshots_caching(backups, dummy_service): - Backups.back_up(dummy_service) - - # we test indirectly that we do redis calls instead of shell calls - start = datetime.now() - for i in range(10): - snapshots = Backups.get_snapshots(dummy_service) - assert len(snapshots) == 1 - assert datetime.now() - start < timedelta(seconds=0.5) - - -# lowlevel -def test_init_tracking_caching(backups, raw_dummy_service): - assert Backups._has_redis_init_mark(raw_dummy_service) is False - - Backups._redis_mark_as_init(raw_dummy_service) - - assert Backups._has_redis_init_mark(raw_dummy_service) is True - assert Backups.is_initted(raw_dummy_service) is True - - -# lowlevel -def test_init_tracking_caching2(backups, raw_dummy_service): - assert Backups._has_redis_init_mark(raw_dummy_service) is False - - Backups.init_repo(raw_dummy_service) - - assert Backups._has_redis_init_mark(raw_dummy_service) is True - - -# only public API def test_init_tracking(backups, raw_dummy_service): assert Backups.is_initted(raw_dummy_service) is False @@ -269,3 +218,64 @@ def test_set_autobackup_period(backups): Backups.set_autobackup_period_minutes(-1) assert Backups.autobackup_period_minutes() is None + + +# Storage +def test_snapshots_caching(backups, dummy_service): + Backups.back_up(dummy_service) + + # we test indirectly that we do redis calls instead of shell calls + start = datetime.now() + for i in range(10): + snapshots = Backups.get_snapshots(dummy_service) + assert len(snapshots) == 1 + assert datetime.now() - start < timedelta(seconds=0.5) + + cached_snapshots = Storage.get_cached_snapshots() + assert len(cached_snapshots) == 1 + + Storage.delete_cached_snapshot(cached_snapshots[0]) + cached_snapshots = Storage.get_cached_snapshots() + assert len(cached_snapshots) == 0 + + snapshots = Backups.get_snapshots(dummy_service) + assert len(snapshots) == 1 + cached_snapshots = Storage.get_cached_snapshots() + assert len(cached_snapshots) == 1 + + +# Storage +def test_init_tracking_caching(backups, raw_dummy_service): + assert Storage.has_init_mark(raw_dummy_service) is False + + Storage.mark_as_init(raw_dummy_service) + + assert Storage.has_init_mark(raw_dummy_service) is True + assert Backups.is_initted(raw_dummy_service) is True + + +# Storage +def test_init_tracking_caching2(backups, raw_dummy_service): + assert Storage.has_init_mark(raw_dummy_service) is False + + Backups.init_repo(raw_dummy_service) + + assert Storage.has_init_mark(raw_dummy_service) is True + + +# Storage +def test_provider_storage(backups_backblaze): + Backups.reset() + provider = Backups.provider() + + assert provider is not None + + assert isinstance(provider, Backblaze) + assert provider.login == "ID" + assert provider.key == "KEY" + + Storage.store_provider(provider) + restored_provider = Backups.load_provider_redis() + assert isinstance(restored_provider, Backblaze) + assert restored_provider.login == "ID" + assert restored_provider.key == "KEY" From 5e9c651c65bdb99d38396d49efd784fd8655a00e Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 10 Apr 2023 15:51:54 +0000 Subject: [PATCH 296/537] test(backups): test autobackup timing --- selfprivacy_api/backup/__init__.py | 2 +- selfprivacy_api/backup/storage.py | 2 +- tests/test_graphql/test_backup.py | 75 ++++++++++++++++++++++++++++++ 3 files changed, 77 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index c931e57..1676ca5 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -69,7 +69,7 @@ class Backups: period = Backups.autobackup_period_minutes() if period is None: return False - if not Storage.is_autobackup_set_by_name(service_id) is None: + if not Storage.is_autobackup_set(service_id): return False last_backup = Storage.get_last_backup_time(service_id) diff --git a/selfprivacy_api/backup/storage.py b/selfprivacy_api/backup/storage.py index cf7d93a..7ca5f18 100644 --- a/selfprivacy_api/backup/storage.py +++ b/selfprivacy_api/backup/storage.py @@ -122,7 +122,7 @@ class Storage: redis.delete(Storage.__autobackup_key(service.get_id())) @staticmethod - def is_autobackup_set(service_name: str): + def is_autobackup_set(service_name: str) -> bool: return redis.exists(Storage.__autobackup_key(service_name)) @staticmethod diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 54701b1..710e7b8 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -198,6 +198,17 @@ def test_autobackup_enable_service(backups, dummy_service): assert not Backups.is_autobackup_enabled(dummy_service) +def test_autobackup_enable_service_storage(backups, dummy_service): + assert len(Storage.services_with_autobackup()) == 0 + + Backups.enable_autobackup(dummy_service) + assert len(Storage.services_with_autobackup()) == 1 + assert Storage.services_with_autobackup()[0] == dummy_service.get_id() + + Backups.disable_autobackup(dummy_service) + assert len(Storage.services_with_autobackup()) == 0 + + def test_set_autobackup_period(backups): assert Backups.autobackup_period_minutes() is None @@ -220,6 +231,70 @@ def test_set_autobackup_period(backups): assert Backups.autobackup_period_minutes() is None +def test_no_default_autobackup(backups, dummy_service): + now = datetime.now(timezone.utc) + assert not Backups.is_time_to_backup_service(dummy_service.get_id(), now) + assert not Backups.is_time_to_backup(now) + + +def test_autobackup_timer_periods(backups, dummy_service): + now = datetime.now(timezone.utc) + backup_period = 13 # minutes + + Backups.enable_autobackup(dummy_service) + assert not Backups.is_time_to_backup_service(dummy_service.get_id(), now) + assert not Backups.is_time_to_backup(now) + + Backups.set_autobackup_period_minutes(backup_period) + assert Backups.is_time_to_backup_service(dummy_service.get_id(), now) + assert Backups.is_time_to_backup(now) + + Backups.set_autobackup_period_minutes(0) + assert not Backups.is_time_to_backup_service(dummy_service.get_id(), now) + assert not Backups.is_time_to_backup(now) + + +def test_autobackup_timer_enabling(backups, dummy_service): + now = datetime.now(timezone.utc) + backup_period = 13 # minutes + + Backups.set_autobackup_period_minutes(backup_period) + assert not Backups.is_time_to_backup_service(dummy_service.get_id(), now) + assert not Backups.is_time_to_backup(now) + + Backups.enable_autobackup(dummy_service) + assert Backups.is_time_to_backup_service(dummy_service.get_id(), now) + assert Backups.is_time_to_backup(now) + + Backups.disable_autobackup(dummy_service) + assert not Backups.is_time_to_backup_service(dummy_service.get_id(), now) + assert not Backups.is_time_to_backup(now) + + +def test_autobackup_timing(backups, dummy_service): + backup_period = 13 # minutes + now = datetime.now(timezone.utc) + + Backups.enable_autobackup(dummy_service) + Backups.set_autobackup_period_minutes(backup_period) + assert Backups.is_time_to_backup_service(dummy_service.get_id(), now) + assert Backups.is_time_to_backup(now) + + Backups.back_up(dummy_service) + + now = datetime.now(timezone.utc) + assert not Backups.is_time_to_backup_service(dummy_service.get_id(), now) + assert not Backups.is_time_to_backup(now) + + past = datetime.now(timezone.utc) - timedelta(minutes=1) + assert not Backups.is_time_to_backup_service(dummy_service.get_id(), past) + assert not Backups.is_time_to_backup(past) + + future = datetime.now(timezone.utc) + timedelta(minutes=backup_period + 2) + assert Backups.is_time_to_backup_service(dummy_service.get_id(), future) + assert Backups.is_time_to_backup(future) + + # Storage def test_snapshots_caching(backups, dummy_service): Backups.back_up(dummy_service) From 1940b291619604936ca6e674afa8cd26df312c2b Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 10 Apr 2023 16:35:35 +0000 Subject: [PATCH 297/537] feature(backups): automatic backup --- selfprivacy_api/backup/__init__.py | 22 +++++++++++++++++----- selfprivacy_api/backup/tasks.py | 22 ++++++++++++++++++++++ 2 files changed, 39 insertions(+), 5 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 1676ca5..d9b811c 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -52,17 +52,29 @@ class Backups: def enable_autobackup(service: Service): Storage.set_autobackup(service) + @staticmethod + def _service_ids_to_back_up(time: datetime) -> List[str]: + services = Storage.services_with_autobackup() + return [id for id in services if Backups.is_time_to_backup_service(id, time)] + + # untestable until the dummy service is registered + @staticmethod + def services_to_back_up(time: datetime) -> List[Service]: + result = [] + for id in Backups._service_ids_to_back_up(time): + service = get_service_by_id(id) + if service is None: + raise ValueError("Cannot look up a service scheduled for backup!") + result.append(service) + return result + @staticmethod def is_time_to_backup(time: datetime) -> bool: """ Intended as a time validator for huey cron scheduler of automatic backups """ - enabled_services = Storage.services_with_autobackup() - for service_id in enabled_services: - if Backups.is_time_to_backup_service(service_id, time): - return True - return False + return Backups._service_ids_to_back_up(time) != [] @staticmethod def is_time_to_backup_service(service_id: str, time: datetime): diff --git a/selfprivacy_api/backup/tasks.py b/selfprivacy_api/backup/tasks.py index f1715cc..4f6ab16 100644 --- a/selfprivacy_api/backup/tasks.py +++ b/selfprivacy_api/backup/tasks.py @@ -1,9 +1,31 @@ +from datetime import datetime + from selfprivacy_api.utils.huey import huey from selfprivacy_api.services.service import Service from selfprivacy_api.backup import Backups + +def validate_datetime(dt: datetime): + # dt = datetime.now(timezone.utc) + if dt.timetz is None: + raise ValueError( + """ + huey passed in the timezone-unaware time! + Post it in support chat or maybe try uncommenting a line above + """ + ) + return Backups.is_time_to_backup(dt) + + # huey tasks need to return something @huey.task() def start_backup(service: Service) -> bool: Backups.back_up(service) return True + + +@huey.periodic_task(validate_datetime=validate_datetime) +def automatic_backup(): + time = datetime.now() + for service in Backups.services_to_back_up(time): + start_backup(service) From 6132f1bb4cb90fdd0e7edbc3a9d1271b9581c72e Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 12 Apr 2023 17:18:12 +0000 Subject: [PATCH 298/537] test(backups): register dummy service --- selfprivacy_api/backup/__init__.py | 3 --- tests/test_graphql/test_backup.py | 19 +++++++++++++++++++ 2 files changed, 19 insertions(+), 3 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index d9b811c..674abf0 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -57,7 +57,6 @@ class Backups: services = Storage.services_with_autobackup() return [id for id in services if Backups.is_time_to_backup_service(id, time)] - # untestable until the dummy service is registered @staticmethod def services_to_back_up(time: datetime) -> List[Service]: result = [] @@ -243,7 +242,6 @@ class Backups: Backups.provider().backuper.restore_from_backup(repo_name, snapshot_id, folder) - # Our dummy service is not yet globally registered so this is not testable yet @staticmethod def restore_snapshot(snapshot: Snapshot): Backups.restore_service_from_snapshot( @@ -255,7 +253,6 @@ class Backups: repo_name = service.get_id() return Backups.provider().backuper.restored_size(repo_name, snapshot_id) - # Our dummy service is not yet globally registered so this is not testable yet @staticmethod def snapshot_restored_size(snapshot: Snapshot) -> float: return Backups.service_snapshot_size( diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 710e7b8..4d4f421 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -5,6 +5,8 @@ from os import remove from os import listdir from datetime import datetime, timedelta, timezone +import selfprivacy_api.services as services +from selfprivacy_api.services import get_service_by_id from selfprivacy_api.services.test_service import DummyService from selfprivacy_api.graphql.queries.providers import BackupProvider @@ -58,6 +60,11 @@ def dummy_service(tmpdir, backups, raw_dummy_service): # assert not repo_path Backups.init_repo(service) + + # register our service + services.services.append(service) + + assert get_service_by_id(service.get_id()) is not None return service @@ -354,3 +361,15 @@ def test_provider_storage(backups_backblaze): assert isinstance(restored_provider, Backblaze) assert restored_provider.login == "ID" assert restored_provider.key == "KEY" + + +def test_services_to_back_up(backups, dummy_service): + backup_period = 13 # minutes + now = datetime.now(timezone.utc) + + Backups.enable_autobackup(dummy_service) + Backups.set_autobackup_period_minutes(backup_period) + + services = Backups.services_to_back_up(now) + assert len(services) == 1 + assert services[0].get_id() == dummy_service.get_id() \ No newline at end of file From 507cdb3bbd97f7b137575e7056aa29db71bd57b7 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 14 Apr 2023 10:18:21 +0000 Subject: [PATCH 299/537] refactor(services): rename get_location() to get_drive() --- selfprivacy_api/backup/__init__.py | 4 ++-- selfprivacy_api/graphql/common_types/service.py | 4 ++-- selfprivacy_api/services/bitwarden/__init__.py | 2 +- selfprivacy_api/services/generic_service_mover.py | 2 +- selfprivacy_api/services/gitea/__init__.py | 2 +- selfprivacy_api/services/jitsi/__init__.py | 2 +- selfprivacy_api/services/mailserver/__init__.py | 2 +- selfprivacy_api/services/nextcloud/__init__.py | 2 +- selfprivacy_api/services/ocserv/__init__.py | 2 +- selfprivacy_api/services/pleroma/__init__.py | 2 +- selfprivacy_api/services/service.py | 2 +- selfprivacy_api/services/test_service/__init__.py | 2 +- tests/test_graphql/test_backup.py | 6 +++--- 13 files changed, 17 insertions(+), 17 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 674abf0..72d6168 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -194,7 +194,7 @@ class Backups: @staticmethod def back_up(service: Service): """The top-level function to back up a service""" - folder = service.get_location() + folder = service.get_drive() repo_name = service.get_id() service.pre_backup() @@ -238,7 +238,7 @@ class Backups: @staticmethod def restore_service_from_snapshot(service: Service, snapshot_id: str): repo_name = service.get_id() - folder = service.get_location() + folder = service.get_drive() Backups.provider().backuper.restore_from_backup(repo_name, snapshot_id, folder) diff --git a/selfprivacy_api/graphql/common_types/service.py b/selfprivacy_api/graphql/common_types/service.py index 61ed5af..8f27386 100644 --- a/selfprivacy_api/graphql/common_types/service.py +++ b/selfprivacy_api/graphql/common_types/service.py @@ -17,7 +17,7 @@ def get_usages(root: "StorageVolume") -> list["StorageUsageInterface"]: service=service_to_graphql_service(service), title=service.get_display_name(), used_space=str(service.get_storage_usage()), - volume=get_volume_by_id(service.get_location()), + volume=get_volume_by_id(service.get_drive()), ) for service in get_services_by_location(root.name) ] @@ -81,7 +81,7 @@ def get_storage_usage(root: "Service") -> ServiceStorageUsage: service=service_to_graphql_service(service), title=service.get_display_name(), used_space=str(service.get_storage_usage()), - volume=get_volume_by_id(service.get_location()), + volume=get_volume_by_id(service.get_drive()), ) diff --git a/selfprivacy_api/services/bitwarden/__init__.py b/selfprivacy_api/services/bitwarden/__init__.py index 16d7746..702eca6 100644 --- a/selfprivacy_api/services/bitwarden/__init__.py +++ b/selfprivacy_api/services/bitwarden/__init__.py @@ -118,7 +118,7 @@ class Bitwarden(Service): return storage_usage @staticmethod - def get_location() -> str: + def get_drive() -> str: with ReadUserData() as user_data: if user_data.get("useBinds", False): return user_data.get("bitwarden", {}).get("location", "sda1") diff --git a/selfprivacy_api/services/generic_service_mover.py b/selfprivacy_api/services/generic_service_mover.py index 6c1b426..148ac1a 100644 --- a/selfprivacy_api/services/generic_service_mover.py +++ b/selfprivacy_api/services/generic_service_mover.py @@ -44,7 +44,7 @@ def move_service( ) return # Check if we are on the same volume - old_volume = service.get_location() + old_volume = service.get_drive() if old_volume == volume.name: Jobs.update( job=job, diff --git a/selfprivacy_api/services/gitea/__init__.py b/selfprivacy_api/services/gitea/__init__.py index aacda5f..12b3b19 100644 --- a/selfprivacy_api/services/gitea/__init__.py +++ b/selfprivacy_api/services/gitea/__init__.py @@ -116,7 +116,7 @@ class Gitea(Service): return storage_usage @staticmethod - def get_location() -> str: + def get_drive() -> str: with ReadUserData() as user_data: if user_data.get("useBinds", False): return user_data.get("gitea", {}).get("location", "sda1") diff --git a/selfprivacy_api/services/jitsi/__init__.py b/selfprivacy_api/services/jitsi/__init__.py index 6b3a973..f156f5a 100644 --- a/selfprivacy_api/services/jitsi/__init__.py +++ b/selfprivacy_api/services/jitsi/__init__.py @@ -116,7 +116,7 @@ class Jitsi(Service): return storage_usage @staticmethod - def get_location() -> str: + def get_drive() -> str: return "sda1" @staticmethod diff --git a/selfprivacy_api/services/mailserver/__init__.py b/selfprivacy_api/services/mailserver/__init__.py index 78a2441..ed04e73 100644 --- a/selfprivacy_api/services/mailserver/__init__.py +++ b/selfprivacy_api/services/mailserver/__init__.py @@ -101,7 +101,7 @@ class MailServer(Service): return get_storage_usage("/var/vmail") @staticmethod - def get_location() -> str: + def get_drive() -> str: with utils.ReadUserData() as user_data: if user_data.get("useBinds", False): return user_data.get("mailserver", {}).get("location", "sda1") diff --git a/selfprivacy_api/services/nextcloud/__init__.py b/selfprivacy_api/services/nextcloud/__init__.py index ad74354..37b4742 100644 --- a/selfprivacy_api/services/nextcloud/__init__.py +++ b/selfprivacy_api/services/nextcloud/__init__.py @@ -123,7 +123,7 @@ class Nextcloud(Service): return get_storage_usage("/var/lib/nextcloud") @staticmethod - def get_location() -> str: + def get_drive() -> str: """Get the name of disk where Nextcloud is installed.""" with ReadUserData() as user_data: if user_data.get("useBinds", False): diff --git a/selfprivacy_api/services/ocserv/__init__.py b/selfprivacy_api/services/ocserv/__init__.py index dcfacaa..bad3ad4 100644 --- a/selfprivacy_api/services/ocserv/__init__.py +++ b/selfprivacy_api/services/ocserv/__init__.py @@ -93,7 +93,7 @@ class Ocserv(Service): return "" @staticmethod - def get_location() -> str: + def get_drive() -> str: return "sda1" @staticmethod diff --git a/selfprivacy_api/services/pleroma/__init__.py b/selfprivacy_api/services/pleroma/__init__.py index 4d2b85e..2b8c4cb 100644 --- a/selfprivacy_api/services/pleroma/__init__.py +++ b/selfprivacy_api/services/pleroma/__init__.py @@ -104,7 +104,7 @@ class Pleroma(Service): return storage_usage @staticmethod - def get_location() -> str: + def get_drive() -> str: with ReadUserData() as user_data: if user_data.get("useBinds", False): return user_data.get("pleroma", {}).get("location", "sda1") diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index f191149..3f1f4af 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -132,7 +132,7 @@ class Service(ABC): @staticmethod @abstractmethod - def get_location() -> str: + def get_drive() -> str: pass @abstractmethod diff --git a/selfprivacy_api/services/test_service/__init__.py b/selfprivacy_api/services/test_service/__init__.py index 0118dbc..822348c 100644 --- a/selfprivacy_api/services/test_service/__init__.py +++ b/selfprivacy_api/services/test_service/__init__.py @@ -107,7 +107,7 @@ class DummyService(Service): return storage_usage @classmethod - def get_location(cls) -> str: + def get_drive(cls) -> str: return cls.location @staticmethod diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 4d4f421..d33cde1 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -44,7 +44,7 @@ def raw_dummy_service(tmpdir, backups): with open(testfile_path, "w") as file: file.write(TESTFILE_BODY) - # we need this to not change get_location() much + # we need this to not change get_drive() much class TestDummyService(DummyService, location=service_dir): pass @@ -143,7 +143,7 @@ def test_one_snapshot(backups, dummy_service): def test_backup_returns_snapshot(backups, dummy_service): - service_folder = dummy_service.get_location() + service_folder = dummy_service.get_drive() provider = Backups.provider() name = dummy_service.get_id() snapshot = provider.backuper.start_backup(service_folder, name) @@ -154,7 +154,7 @@ def test_backup_returns_snapshot(backups, dummy_service): def test_restore(backups, dummy_service): - service_folder = dummy_service.get_location() + service_folder = dummy_service.get_drive() file_to_nuke = listdir(service_folder)[0] assert file_to_nuke is not None path_to_nuke = path.join(service_folder, file_to_nuke) From d1e1039519ed5e726d19f69b41a2980c11fa9e3b Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 14 Apr 2023 10:32:14 +0000 Subject: [PATCH 300/537] refactor(backups): make a dedicated get_folders() function --- selfprivacy_api/backup/__init__.py | 4 ++-- selfprivacy_api/services/service.py | 5 +++++ selfprivacy_api/services/test_service/__init__.py | 7 ++++++- tests/test_graphql/test_backup.py | 6 +++--- 4 files changed, 16 insertions(+), 6 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 72d6168..708d4a5 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -194,7 +194,7 @@ class Backups: @staticmethod def back_up(service: Service): """The top-level function to back up a service""" - folder = service.get_drive() + folder = service.get_folders() repo_name = service.get_id() service.pre_backup() @@ -238,7 +238,7 @@ class Backups: @staticmethod def restore_service_from_snapshot(service: Service, snapshot_id: str): repo_name = service.get_id() - folder = service.get_drive() + folder = service.get_folders() Backups.provider().backuper.restore_from_backup(repo_name, snapshot_id, folder) diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index 3f1f4af..9a7663a 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -135,6 +135,11 @@ class Service(ABC): def get_drive() -> str: pass + # @staticmethod + # @abstractmethod + # def get_folders() -> str: + # pass + @abstractmethod def move_to_volume(self, volume: BlockDevice) -> Job: pass diff --git a/selfprivacy_api/services/test_service/__init__.py b/selfprivacy_api/services/test_service/__init__.py index 822348c..2906244 100644 --- a/selfprivacy_api/services/test_service/__init__.py +++ b/selfprivacy_api/services/test_service/__init__.py @@ -106,8 +106,13 @@ class DummyService(Service): storage_usage = 0 return storage_usage - @classmethod + @staticmethod def get_drive(cls) -> str: + return "sda1" + + @classmethod + def get_folders(cls) -> str: + # for now only a single folder return cls.location @staticmethod diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index d33cde1..9c8af3f 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -44,7 +44,7 @@ def raw_dummy_service(tmpdir, backups): with open(testfile_path, "w") as file: file.write(TESTFILE_BODY) - # we need this to not change get_drive() much + # we need this to not change get_folders() much class TestDummyService(DummyService, location=service_dir): pass @@ -143,7 +143,7 @@ def test_one_snapshot(backups, dummy_service): def test_backup_returns_snapshot(backups, dummy_service): - service_folder = dummy_service.get_drive() + service_folder = dummy_service.get_folders() provider = Backups.provider() name = dummy_service.get_id() snapshot = provider.backuper.start_backup(service_folder, name) @@ -154,7 +154,7 @@ def test_backup_returns_snapshot(backups, dummy_service): def test_restore(backups, dummy_service): - service_folder = dummy_service.get_drive() + service_folder = dummy_service.get_folders() file_to_nuke = listdir(service_folder)[0] assert file_to_nuke is not None path_to_nuke = path.join(service_folder, file_to_nuke) From f0aabec947873842daa736c0bb967c71e1baf311 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 14 Apr 2023 11:20:03 +0000 Subject: [PATCH 301/537] refactor(backups): make api accept a list of folders --- selfprivacy_api/backup/__init__.py | 8 ++++---- selfprivacy_api/backup/backuper.py | 4 ++-- selfprivacy_api/backup/restic_backuper.py | 10 +++++++--- selfprivacy_api/services/test_service/__init__.py | 6 +++--- tests/test_graphql/test_backup.py | 6 +++--- 5 files changed, 19 insertions(+), 15 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 708d4a5..f89a54e 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -194,11 +194,11 @@ class Backups: @staticmethod def back_up(service: Service): """The top-level function to back up a service""" - folder = service.get_folders() + folders = service.get_folders() repo_name = service.get_id() service.pre_backup() - snapshot = Backups.provider().backuper.start_backup(folder, repo_name) + snapshot = Backups.provider().backuper.start_backup(folders, repo_name) Backups._store_last_snapshot(repo_name, snapshot) service.post_restore() @@ -238,9 +238,9 @@ class Backups: @staticmethod def restore_service_from_snapshot(service: Service, snapshot_id: str): repo_name = service.get_id() - folder = service.get_folders() + folders = service.get_folders() - Backups.provider().backuper.restore_from_backup(repo_name, snapshot_id, folder) + Backups.provider().backuper.restore_from_backup(repo_name, snapshot_id, folders) @staticmethod def restore_snapshot(snapshot: Snapshot): diff --git a/selfprivacy_api/backup/backuper.py b/selfprivacy_api/backup/backuper.py index 5bba9d5..908c1fc 100644 --- a/selfprivacy_api/backup/backuper.py +++ b/selfprivacy_api/backup/backuper.py @@ -13,7 +13,7 @@ class AbstractBackuper(ABC): raise NotImplementedError @abstractmethod - def start_backup(self, folder: str, repo_name: str): + def start_backup(self, folders: List[str], repo_name: str): raise NotImplementedError @abstractmethod @@ -26,7 +26,7 @@ class AbstractBackuper(ABC): raise NotImplementedError @abstractmethod - def restore_from_backup(self, repo_name: str, snapshot_id: str, folder: str): + def restore_from_backup(self, repo_name: str, snapshot_id: str, folders: List[str]): """Restore a target folder using a snapshot""" raise NotImplementedError diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index cc81361..2af7d44 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -57,15 +57,19 @@ class ResticBackuper(AbstractBackuper): command.extend(args) return command - def start_backup(self, folder: str, repo_name: str): + def start_backup(self, folders: List[str], repo_name: str): """ Start backup with restic """ + + # but maybe it is ok to accept a union of a string and an array of strings + assert not isinstance(folders, str) + backup_command = self.restic_command( repo_name, "backup", "--json", - folder, + folders[0], ) with subprocess.Popen( backup_command, @@ -145,7 +149,7 @@ class ResticBackuper(AbstractBackuper): except ValueError as e: raise ValueError("cannot restore a snapshot: " + output) from e - def restore_from_backup(self, repo_name, snapshot_id, folder): + def restore_from_backup(self, repo_name, snapshot_id, folders): """ Restore from backup with restic """ diff --git a/selfprivacy_api/services/test_service/__init__.py b/selfprivacy_api/services/test_service/__init__.py index 2906244..e790be9 100644 --- a/selfprivacy_api/services/test_service/__init__.py +++ b/selfprivacy_api/services/test_service/__init__.py @@ -1,6 +1,7 @@ """Class representing Bitwarden service""" import base64 import typing +from typing import List from selfprivacy_api.jobs import Job from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus @@ -111,9 +112,8 @@ class DummyService(Service): return "sda1" @classmethod - def get_folders(cls) -> str: - # for now only a single folder - return cls.location + def get_folders(cls) -> List[str]: + return [cls.location] @staticmethod def get_dns_records() -> typing.List[ServiceDnsRecord]: diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 9c8af3f..460fa44 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -143,10 +143,10 @@ def test_one_snapshot(backups, dummy_service): def test_backup_returns_snapshot(backups, dummy_service): - service_folder = dummy_service.get_folders() + service_folders = dummy_service.get_folders() provider = Backups.provider() name = dummy_service.get_id() - snapshot = provider.backuper.start_backup(service_folder, name) + snapshot = provider.backuper.start_backup(service_folders, name) assert snapshot.id is not None assert snapshot.service_name == name @@ -154,7 +154,7 @@ def test_backup_returns_snapshot(backups, dummy_service): def test_restore(backups, dummy_service): - service_folder = dummy_service.get_folders() + service_folder = dummy_service.get_folders()[0] file_to_nuke = listdir(service_folder)[0] assert file_to_nuke is not None path_to_nuke = path.join(service_folder, file_to_nuke) From 3aefbaaf0ba46c0d48a00c6a21bab3f8151820d7 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 14 Apr 2023 11:58:39 +0000 Subject: [PATCH 302/537] refactor(backups): actually accept a list of folders --- selfprivacy_api/backup/restic_backuper.py | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index 2af7d44..896f68d 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -3,6 +3,7 @@ import json import datetime from typing import List +from collections.abc import Iterable from selfprivacy_api.backup.backuper import AbstractBackuper from selfprivacy_api.models.backup.snapshot import Snapshot @@ -54,9 +55,20 @@ class ResticBackuper(AbstractBackuper): self._password_command(), ] if args != []: - command.extend(args) + command.extend(ResticBackuper.__flatten_list(args)) return command + @staticmethod + def __flatten_list(list): + """string-aware list flattener""" + result = [] + for item in list: + if isinstance(item, Iterable) and not isinstance(item, str): + result.extend(ResticBackuper.__flatten_list(item)) + continue + result.append(item) + return result + def start_backup(self, folders: List[str], repo_name: str): """ Start backup with restic @@ -69,7 +81,7 @@ class ResticBackuper(AbstractBackuper): repo_name, "backup", "--json", - folders[0], + folders, ) with subprocess.Popen( backup_command, From b21d63be637e61eafb568b6807a4c7eca018f65d Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 14 Apr 2023 12:29:23 +0000 Subject: [PATCH 303/537] refactor(backups): set a list of folders for our dummy service --- selfprivacy_api/services/test_service/__init__.py | 6 +++--- tests/test_graphql/test_backup.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/selfprivacy_api/services/test_service/__init__.py b/selfprivacy_api/services/test_service/__init__.py index e790be9..c14feca 100644 --- a/selfprivacy_api/services/test_service/__init__.py +++ b/selfprivacy_api/services/test_service/__init__.py @@ -15,8 +15,8 @@ from selfprivacy_api.services.test_service.icon import BITWARDEN_ICON class DummyService(Service): """A test service""" - def __init_subclass__(cls, location): - cls.location = location + def __init_subclass__(cls, folders: List[str]): + cls.folders = folders @staticmethod def get_id() -> str: @@ -113,7 +113,7 @@ class DummyService(Service): @classmethod def get_folders(cls) -> List[str]: - return [cls.location] + return cls.folders @staticmethod def get_dns_records() -> typing.List[ServiceDnsRecord]: diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 460fa44..ff89ccb 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -45,7 +45,7 @@ def raw_dummy_service(tmpdir, backups): file.write(TESTFILE_BODY) # we need this to not change get_folders() much - class TestDummyService(DummyService, location=service_dir): + class TestDummyService(DummyService, folders=[service_dir]): pass service = TestDummyService() From f3bfa2293cf97f14c3f60f359db701aef72dcee9 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 14 Apr 2023 12:40:41 +0000 Subject: [PATCH 304/537] test(backups): actually back up 2 folders --- tests/test_graphql/test_backup.py | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index ff89ccb..a14d268 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -19,6 +19,7 @@ from selfprivacy_api.backup.storage import Storage TESTFILE_BODY = "testytest!" +TESTFILE_2_BODY = "testissimo!" REPO_NAME = "test_backup" @@ -37,15 +38,23 @@ def backups_backblaze(generic_userdata): @pytest.fixture() def raw_dummy_service(tmpdir, backups): - service_dir = path.join(tmpdir, "test_service") - makedirs(service_dir) + dirnames = ["test_service", "also_test_service"] + service_dirs = [] + for d in dirnames: + service_dir = path.join(tmpdir, d) + makedirs(service_dir) + service_dirs.append(service_dir) - testfile_path = path.join(service_dir, "testfile.txt") - with open(testfile_path, "w") as file: + testfile_path_1 = path.join(service_dirs[0], "testfile.txt") + with open(testfile_path_1, "w") as file: file.write(TESTFILE_BODY) + testfile_path_2 = path.join(service_dirs[1], "testfile2.txt") + with open(testfile_path_2, "w") as file: + file.write(TESTFILE_2_BODY) + # we need this to not change get_folders() much - class TestDummyService(DummyService, folders=[service_dir]): + class TestDummyService(DummyService, folders=service_dirs): pass service = TestDummyService() From 840572f82c9a7e41b93aee32e179ccb409812228 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 14 Apr 2023 13:06:17 +0000 Subject: [PATCH 305/537] test(backups): test 2-folder restoration --- tests/test_graphql/test_backup.py | 28 +++++++++++++++++++--------- 1 file changed, 19 insertions(+), 9 deletions(-) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index a14d268..d0f5d00 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -163,21 +163,31 @@ def test_backup_returns_snapshot(backups, dummy_service): def test_restore(backups, dummy_service): - service_folder = dummy_service.get_folders()[0] - file_to_nuke = listdir(service_folder)[0] - assert file_to_nuke is not None - path_to_nuke = path.join(service_folder, file_to_nuke) + paths_to_nuke = [] + contents = [] + + for service_folder in dummy_service.get_folders(): + file_to_nuke = listdir(service_folder)[0] + assert file_to_nuke is not None + path_to_nuke = path.join(service_folder, file_to_nuke) + paths_to_nuke.append(path_to_nuke) + with open(path_to_nuke, "r") as file: + contents.append(file.read()) Backups.back_up(dummy_service) snap = Backups.get_snapshots(dummy_service)[0] assert snap is not None - assert path.exists(path_to_nuke) - remove(path_to_nuke) - assert not path.exists(path_to_nuke) + for p in paths_to_nuke: + assert path.exists(p) + remove(p) + assert not path.exists(p) Backups.restore_service_from_snapshot(dummy_service, snap.id) - assert path.exists(path_to_nuke) + for p, content in zip(paths_to_nuke, contents): + assert path.exists(p) + with open(p, "r") as file: + assert file.read() == content def test_sizing(backups, dummy_service): @@ -381,4 +391,4 @@ def test_services_to_back_up(backups, dummy_service): services = Backups.services_to_back_up(now) assert len(services) == 1 - assert services[0].get_id() == dummy_service.get_id() \ No newline at end of file + assert services[0].get_id() == dummy_service.get_id() From 498208f083c0d15e4591230b8503dc96e6193c86 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 17 Apr 2023 12:33:27 +0000 Subject: [PATCH 306/537] test(backups): implement get_folders() for bitwarden --- .../services/bitwarden/__init__.py | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/selfprivacy_api/services/bitwarden/__init__.py b/selfprivacy_api/services/bitwarden/__init__.py index 702eca6..a3ed94e 100644 --- a/selfprivacy_api/services/bitwarden/__init__.py +++ b/selfprivacy_api/services/bitwarden/__init__.py @@ -113,10 +113,14 @@ class Bitwarden(Service): @staticmethod def get_storage_usage() -> int: storage_usage = 0 - storage_usage += get_storage_usage("/var/lib/bitwarden") - storage_usage += get_storage_usage("/var/lib/bitwarden_rs") + for folder in Bitwarden.get_folders(): + storage_usage += get_storage_usage(folder) return storage_usage + @staticmethod + def get_folders() -> typing.List[str]: + return ["/var/lib/bitwarden", "/var/lib/bitwarden_rs"] + @staticmethod def get_drive() -> str: with ReadUserData() as user_data: @@ -157,16 +161,11 @@ class Bitwarden(Service): [ FolderMoveNames( name="bitwarden", - bind_location="/var/lib/bitwarden", + bind_location=folder, group="vaultwarden", owner="vaultwarden", - ), - FolderMoveNames( - name="bitwarden_rs", - bind_location="/var/lib/bitwarden_rs", - group="vaultwarden", - owner="vaultwarden", - ), + ) + for folder in Bitwarden.get_folders() ], "bitwarden", ) From 95b88ea2e4a64c7bf493982b0f53fe49d71c5c0a Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 17 Apr 2023 12:48:11 +0000 Subject: [PATCH 307/537] test(backups): implement get_folders() for gitea --- selfprivacy_api/services/gitea/__init__.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/selfprivacy_api/services/gitea/__init__.py b/selfprivacy_api/services/gitea/__init__.py index 12b3b19..de7d858 100644 --- a/selfprivacy_api/services/gitea/__init__.py +++ b/selfprivacy_api/services/gitea/__init__.py @@ -112,9 +112,14 @@ class Gitea(Service): @staticmethod def get_storage_usage() -> int: storage_usage = 0 - storage_usage += get_storage_usage("/var/lib/gitea") + for folder in Gitea.get_folders(): + storage_usage += get_storage_usage() return storage_usage + @staticmethod + def get_folders() -> typing.List[str]: + return ["/var/lib/gitea"] + @staticmethod def get_drive() -> str: with ReadUserData() as user_data: @@ -154,10 +159,11 @@ class Gitea(Service): [ FolderMoveNames( name="gitea", - bind_location="/var/lib/gitea", + bind_location=folder, group="gitea", owner="gitea", - ), + ) + for folder in Gitea.get_folders() ], "gitea", ) From 1019031b5b7aeb3f41e4de7de377e6ab001006f1 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 17 Apr 2023 12:54:21 +0000 Subject: [PATCH 308/537] fix(services): use get_foldername() for moving around --- selfprivacy_api/services/bitwarden/__init__.py | 2 +- selfprivacy_api/services/gitea/__init__.py | 2 +- selfprivacy_api/services/service.py | 4 ++++ 3 files changed, 6 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/services/bitwarden/__init__.py b/selfprivacy_api/services/bitwarden/__init__.py index a3ed94e..90763bd 100644 --- a/selfprivacy_api/services/bitwarden/__init__.py +++ b/selfprivacy_api/services/bitwarden/__init__.py @@ -160,7 +160,7 @@ class Bitwarden(Service): job, [ FolderMoveNames( - name="bitwarden", + name=Bitwarden.get_foldername(folder), bind_location=folder, group="vaultwarden", owner="vaultwarden", diff --git a/selfprivacy_api/services/gitea/__init__.py b/selfprivacy_api/services/gitea/__init__.py index de7d858..a07ccd1 100644 --- a/selfprivacy_api/services/gitea/__init__.py +++ b/selfprivacy_api/services/gitea/__init__.py @@ -158,7 +158,7 @@ class Gitea(Service): job, [ FolderMoveNames( - name="gitea", + name=Gitea.get_foldername(folder), bind_location=folder, group="gitea", owner="gitea", diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index 9a7663a..9ca8eba 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -140,6 +140,10 @@ class Service(ABC): # def get_folders() -> str: # pass + @staticmethod + def get_foldername(path: str) -> str: + return path.split("/")[-1] + @abstractmethod def move_to_volume(self, volume: BlockDevice) -> Job: pass From ac04425221d25fd5a8fcd5e6db004ff4c2863cb7 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 17 Apr 2023 13:47:49 +0000 Subject: [PATCH 309/537] refactor(services): add get_folders() to the rest of the services --- selfprivacy_api/services/jitsi/__init__.py | 4 ++++ selfprivacy_api/services/mailserver/__init__.py | 4 ++++ selfprivacy_api/services/nextcloud/__init__.py | 4 ++++ selfprivacy_api/services/ocserv/__init__.py | 4 ++++ selfprivacy_api/services/pleroma/__init__.py | 4 ++++ 5 files changed, 20 insertions(+) diff --git a/selfprivacy_api/services/jitsi/__init__.py b/selfprivacy_api/services/jitsi/__init__.py index f156f5a..60a94b3 100644 --- a/selfprivacy_api/services/jitsi/__init__.py +++ b/selfprivacy_api/services/jitsi/__init__.py @@ -115,6 +115,10 @@ class Jitsi(Service): storage_usage += get_storage_usage("/var/lib/jitsi-meet") return storage_usage + @staticmethod + def get_folders() -> typing.List[str]: + return ["/var/lib/jitsi-meet"] + @staticmethod def get_drive() -> str: return "sda1" diff --git a/selfprivacy_api/services/mailserver/__init__.py b/selfprivacy_api/services/mailserver/__init__.py index ed04e73..9c61b7a 100644 --- a/selfprivacy_api/services/mailserver/__init__.py +++ b/selfprivacy_api/services/mailserver/__init__.py @@ -100,6 +100,10 @@ class MailServer(Service): def get_storage_usage() -> int: return get_storage_usage("/var/vmail") + @staticmethod + def get_folders() -> typing.List[str]: + return ["/var/vmail", "/var/sieve"] + @staticmethod def get_drive() -> str: with utils.ReadUserData() as user_data: diff --git a/selfprivacy_api/services/nextcloud/__init__.py b/selfprivacy_api/services/nextcloud/__init__.py index 37b4742..eb0f83f 100644 --- a/selfprivacy_api/services/nextcloud/__init__.py +++ b/selfprivacy_api/services/nextcloud/__init__.py @@ -122,6 +122,10 @@ class Nextcloud(Service): """ return get_storage_usage("/var/lib/nextcloud") + @staticmethod + def get_folders() -> typing.List[str]: + return ["/var/lib/nextcloud"] + @staticmethod def get_drive() -> str: """Get the name of disk where Nextcloud is installed.""" diff --git a/selfprivacy_api/services/ocserv/__init__.py b/selfprivacy_api/services/ocserv/__init__.py index bad3ad4..54a4b98 100644 --- a/selfprivacy_api/services/ocserv/__init__.py +++ b/selfprivacy_api/services/ocserv/__init__.py @@ -117,5 +117,9 @@ class Ocserv(Service): def get_storage_usage() -> int: return 0 + @staticmethod + def get_folders() -> typing.List[str]: + return [] + def move_to_volume(self, volume: BlockDevice) -> Job: raise NotImplementedError("ocserv service is not movable") diff --git a/selfprivacy_api/services/pleroma/__init__.py b/selfprivacy_api/services/pleroma/__init__.py index 2b8c4cb..f3656a6 100644 --- a/selfprivacy_api/services/pleroma/__init__.py +++ b/selfprivacy_api/services/pleroma/__init__.py @@ -103,6 +103,10 @@ class Pleroma(Service): storage_usage += get_storage_usage("/var/lib/postgresql") return storage_usage + @staticmethod + def get_folders() -> typing.List[str]: + return ["/var/lib/pleroma", "/var/lib/postgresql"] + @staticmethod def get_drive() -> str: with ReadUserData() as user_data: From 0a9848be4707aeda8d0c416608fc020e9a30e29c Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 17 Apr 2023 13:53:51 +0000 Subject: [PATCH 310/537] refactor(services): make get_folders() a mandatory part of Service interface --- selfprivacy_api/services/service.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index 9ca8eba..192f98e 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -135,10 +135,10 @@ class Service(ABC): def get_drive() -> str: pass - # @staticmethod - # @abstractmethod - # def get_folders() -> str: - # pass + @staticmethod + @abstractmethod + def get_folders() -> str: + pass @staticmethod def get_foldername(path: str) -> str: From c2cd97280583e662d72d1773e277322a2c66d4c0 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 17 Apr 2023 14:02:07 +0000 Subject: [PATCH 311/537] refactor(services): add a generic storage counter --- selfprivacy_api/services/gitea/__init__.py | 2 +- selfprivacy_api/services/service.py | 12 ++++++++---- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/selfprivacy_api/services/gitea/__init__.py b/selfprivacy_api/services/gitea/__init__.py index a07ccd1..bb885a5 100644 --- a/selfprivacy_api/services/gitea/__init__.py +++ b/selfprivacy_api/services/gitea/__init__.py @@ -113,7 +113,7 @@ class Gitea(Service): def get_storage_usage() -> int: storage_usage = 0 for folder in Gitea.get_folders(): - storage_usage += get_storage_usage() + storage_usage += get_storage_usage(folder) return storage_usage @staticmethod diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index 192f98e..c5a7527 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -8,6 +8,8 @@ from selfprivacy_api.jobs import Job from selfprivacy_api.utils.block_devices import BlockDevice +from selfprivacy_api.services.generic_size_counter import get_storage_usage + class ServiceStatus(Enum): """Enum for service status""" @@ -120,10 +122,12 @@ class Service(ABC): def get_logs(): pass - @staticmethod - @abstractmethod - def get_storage_usage() -> int: - pass + @classmethod + def get_storage_usage(cls) -> int: + storage_used = 0 + for folder in cls.get_folders(): + storage_used += get_storage_usage(folder) + return storage_used @staticmethod @abstractmethod From 3b8168c25de7baded8a8c58f42c82456f2524278 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 17 Apr 2023 14:05:17 +0000 Subject: [PATCH 312/537] refactor(services): remove special storage counting from bitwarden --- selfprivacy_api/services/bitwarden/__init__.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/selfprivacy_api/services/bitwarden/__init__.py b/selfprivacy_api/services/bitwarden/__init__.py index 90763bd..8d2a903 100644 --- a/selfprivacy_api/services/bitwarden/__init__.py +++ b/selfprivacy_api/services/bitwarden/__init__.py @@ -5,7 +5,6 @@ import typing from selfprivacy_api.jobs import Job, JobStatus, Jobs from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service -from selfprivacy_api.services.generic_size_counter import get_storage_usage from selfprivacy_api.services.generic_status_getter import get_service_status from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain @@ -110,13 +109,6 @@ class Bitwarden(Service): def get_logs(): return "" - @staticmethod - def get_storage_usage() -> int: - storage_usage = 0 - for folder in Bitwarden.get_folders(): - storage_usage += get_storage_usage(folder) - return storage_usage - @staticmethod def get_folders() -> typing.List[str]: return ["/var/lib/bitwarden", "/var/lib/bitwarden_rs"] From 44041662c2be2a30054e1173a32a82e8e768504e Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 17 Apr 2023 14:07:04 +0000 Subject: [PATCH 313/537] refactor(services): remove special storage counting from gitea --- selfprivacy_api/services/gitea/__init__.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/selfprivacy_api/services/gitea/__init__.py b/selfprivacy_api/services/gitea/__init__.py index bb885a5..707cdec 100644 --- a/selfprivacy_api/services/gitea/__init__.py +++ b/selfprivacy_api/services/gitea/__init__.py @@ -5,7 +5,6 @@ import typing from selfprivacy_api.jobs import Job, Jobs from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service -from selfprivacy_api.services.generic_size_counter import get_storage_usage from selfprivacy_api.services.generic_status_getter import get_service_status from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain @@ -109,13 +108,6 @@ class Gitea(Service): def get_logs(): return "" - @staticmethod - def get_storage_usage() -> int: - storage_usage = 0 - for folder in Gitea.get_folders(): - storage_usage += get_storage_usage(folder) - return storage_usage - @staticmethod def get_folders() -> typing.List[str]: return ["/var/lib/gitea"] From 208e256c0fab5ff6b0801a2e6c2bdf50ba6f124f Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 17 Apr 2023 14:11:47 +0000 Subject: [PATCH 314/537] refactor(services): remove special storage counting from jitsi --- selfprivacy_api/services/jitsi/__init__.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/selfprivacy_api/services/jitsi/__init__.py b/selfprivacy_api/services/jitsi/__init__.py index 60a94b3..a969eb2 100644 --- a/selfprivacy_api/services/jitsi/__init__.py +++ b/selfprivacy_api/services/jitsi/__init__.py @@ -5,7 +5,6 @@ import typing from selfprivacy_api.jobs import Job, Jobs from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service -from selfprivacy_api.services.generic_size_counter import get_storage_usage from selfprivacy_api.services.generic_status_getter import ( get_service_status, get_service_status_from_several_units, @@ -109,12 +108,6 @@ class Jitsi(Service): def get_logs(): return "" - @staticmethod - def get_storage_usage() -> int: - storage_usage = 0 - storage_usage += get_storage_usage("/var/lib/jitsi-meet") - return storage_usage - @staticmethod def get_folders() -> typing.List[str]: return ["/var/lib/jitsi-meet"] From f5de4974e727e8dc5ef97ad741c29d60762a36f3 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 17 Apr 2023 14:14:48 +0000 Subject: [PATCH 315/537] refactor(services): remove special storage counting from mail --- selfprivacy_api/services/mailserver/__init__.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/selfprivacy_api/services/mailserver/__init__.py b/selfprivacy_api/services/mailserver/__init__.py index 9c61b7a..eb69ae9 100644 --- a/selfprivacy_api/services/mailserver/__init__.py +++ b/selfprivacy_api/services/mailserver/__init__.py @@ -6,7 +6,6 @@ import typing from selfprivacy_api.jobs import Job, JobStatus, Jobs from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service -from selfprivacy_api.services.generic_size_counter import get_storage_usage from selfprivacy_api.services.generic_status_getter import ( get_service_status, get_service_status_from_several_units, @@ -96,10 +95,6 @@ class MailServer(Service): def get_logs(): return "" - @staticmethod - def get_storage_usage() -> int: - return get_storage_usage("/var/vmail") - @staticmethod def get_folders() -> typing.List[str]: return ["/var/vmail", "/var/sieve"] From 34854b51188e8c05888f04a657c9664a3f598cae Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 17 Apr 2023 14:18:44 +0000 Subject: [PATCH 316/537] documentation(services): move the storage count docstring to parent service class --- selfprivacy_api/services/service.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index c5a7527..1a1cb48 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -124,6 +124,11 @@ class Service(ABC): @classmethod def get_storage_usage(cls) -> int: + """ + Calculate the real storage usage of folders occupied by service + Calculate using pathlib. + Do not follow symlinks. + """ storage_used = 0 for folder in cls.get_folders(): storage_used += get_storage_usage(folder) From ae7f53d1ec8e45bff61db358210d0e406a3b374b Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 17 Apr 2023 14:19:15 +0000 Subject: [PATCH 317/537] refactor(services): remove special storage counting from nextcloud --- selfprivacy_api/services/nextcloud/__init__.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/selfprivacy_api/services/nextcloud/__init__.py b/selfprivacy_api/services/nextcloud/__init__.py index eb0f83f..0036c77 100644 --- a/selfprivacy_api/services/nextcloud/__init__.py +++ b/selfprivacy_api/services/nextcloud/__init__.py @@ -4,7 +4,6 @@ import subprocess import typing from selfprivacy_api.jobs import Job, Jobs from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service -from selfprivacy_api.services.generic_size_counter import get_storage_usage from selfprivacy_api.services.generic_status_getter import get_service_status from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain @@ -113,15 +112,6 @@ class Nextcloud(Service): """Return Nextcloud logs.""" return "" - @staticmethod - def get_storage_usage() -> int: - """ - Calculate the real storage usage of /var/lib/nextcloud and all subdirectories. - Calculate using pathlib. - Do not follow symlinks. - """ - return get_storage_usage("/var/lib/nextcloud") - @staticmethod def get_folders() -> typing.List[str]: return ["/var/lib/nextcloud"] From f0d6ac624df878a1f7ad23d8f711099e9b855ae4 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 17 Apr 2023 14:28:12 +0000 Subject: [PATCH 318/537] refactor(services): remove special storage counting from ocserv --- selfprivacy_api/services/ocserv/__init__.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/selfprivacy_api/services/ocserv/__init__.py b/selfprivacy_api/services/ocserv/__init__.py index 54a4b98..a15cb84 100644 --- a/selfprivacy_api/services/ocserv/__init__.py +++ b/selfprivacy_api/services/ocserv/__init__.py @@ -4,7 +4,6 @@ import subprocess import typing from selfprivacy_api.jobs import Job, Jobs from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service -from selfprivacy_api.services.generic_size_counter import get_storage_usage from selfprivacy_api.services.generic_status_getter import get_service_status from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus from selfprivacy_api.utils import ReadUserData, WriteUserData @@ -113,10 +112,6 @@ class Ocserv(Service): ), ] - @staticmethod - def get_storage_usage() -> int: - return 0 - @staticmethod def get_folders() -> typing.List[str]: return [] From df5b318fff2567d6deec651e728e525f88bffe0b Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 17 Apr 2023 14:31:04 +0000 Subject: [PATCH 319/537] refactor(services): remove special storage counting from pleroma --- selfprivacy_api/services/pleroma/__init__.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/selfprivacy_api/services/pleroma/__init__.py b/selfprivacy_api/services/pleroma/__init__.py index f3656a6..0191c18 100644 --- a/selfprivacy_api/services/pleroma/__init__.py +++ b/selfprivacy_api/services/pleroma/__init__.py @@ -4,7 +4,6 @@ import subprocess import typing from selfprivacy_api.jobs import Job, Jobs from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service -from selfprivacy_api.services.generic_size_counter import get_storage_usage from selfprivacy_api.services.generic_status_getter import get_service_status from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain @@ -96,13 +95,6 @@ class Pleroma(Service): def get_logs(): return "" - @staticmethod - def get_storage_usage() -> int: - storage_usage = 0 - storage_usage += get_storage_usage("/var/lib/pleroma") - storage_usage += get_storage_usage("/var/lib/postgresql") - return storage_usage - @staticmethod def get_folders() -> typing.List[str]: return ["/var/lib/pleroma", "/var/lib/postgresql"] From 6057e350ef63bf2bb11c819afa826a4486498728 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 17 Apr 2023 14:54:42 +0000 Subject: [PATCH 320/537] refactor(services): add OwnedPath struct --- selfprivacy_api/services/owned_path.py | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 selfprivacy_api/services/owned_path.py diff --git a/selfprivacy_api/services/owned_path.py b/selfprivacy_api/services/owned_path.py new file mode 100644 index 0000000..23542dc --- /dev/null +++ b/selfprivacy_api/services/owned_path.py @@ -0,0 +1,7 @@ +from pydantic import BaseModel + + +class OwnedPath(BaseModel): + path: str + owner: str + group: str From 9f2dbaa98debdf9618ece4488ce456edeb8ccb35 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 17 Apr 2023 15:00:36 +0000 Subject: [PATCH 321/537] refactor(services): add overridable get owner and get group --- selfprivacy_api/services/service.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index 1a1cb48..cc34aa2 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -62,6 +62,14 @@ class Service(ABC): def get_url() -> typing.Optional[str]: pass + @classmethod + def get_user(cls) -> typing.Optional[str]: + return cls.get_id() + + @classmethod + def get_group(cls) -> typing.Optional[str]: + return cls.get_user() + @staticmethod @abstractmethod def is_movable() -> bool: From 71b987da57de62f7cb4ffe47905965bcc4eb4ccf Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 17 Apr 2023 15:37:11 +0000 Subject: [PATCH 322/537] refactor(services): add folder owner derivation --- selfprivacy_api/services/service.py | 37 +++++++++++++++++++++++++---- tests/test_services.py | 20 ++++++++++++++++ 2 files changed, 53 insertions(+), 4 deletions(-) create mode 100644 tests/test_services.py diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index cc34aa2..1a1d56f 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -9,6 +9,7 @@ from selfprivacy_api.jobs import Job from selfprivacy_api.utils.block_devices import BlockDevice from selfprivacy_api.services.generic_size_counter import get_storage_usage +from selfprivacy_api.services.owned_path import OwnedPath class ServiceStatus(Enum): @@ -152,10 +153,29 @@ class Service(ABC): def get_drive() -> str: pass - @staticmethod - @abstractmethod - def get_folders() -> str: - pass + @classmethod + def get_folders(cls) -> str: + """ + get a plain list of occupied directories + Default extracts info from overriden get_owned_folders() + """ + if cls.get_owned_folders == Service.get_owned_folders: + raise NotImplementedError( + "you need to implement at least one of get_folders() or get_owned_folders()" + ) + return [owned_folder.path for owned_folder in cls.get_owned_folders()] + + @classmethod + def get_owned_folders(cls) -> str: + """ + Get a list of occupied directories with ownership info + Default extracts info from overriden get_folders() + """ + if cls.get_folders == Service.get_folders: + raise NotImplementedError( + "you need to implement at least one of get_folders() or get_owned_folders()" + ) + return [cls.owned_path(path) for path in cls.get_folders()] @staticmethod def get_foldername(path: str) -> str: @@ -165,6 +185,15 @@ class Service(ABC): def move_to_volume(self, volume: BlockDevice) -> Job: pass + @classmethod + def owned_path(cls, path: str): + """A default guess on folder ownership""" + return OwnedPath( + path=path, + owner=cls.get_user(), + group=cls.get_group(), + ) + def pre_backup(self): pass diff --git a/tests/test_services.py b/tests/test_services.py new file mode 100644 index 0000000..52164a9 --- /dev/null +++ b/tests/test_services.py @@ -0,0 +1,20 @@ +""" + Tests for generic service methods +""" +from pytest import raises + +from selfprivacy_api.services.test_service import DummyService +from selfprivacy_api.services.service import Service + + +def test_unimplemented_folders_raises(): + with raises(NotImplementedError): + Service.get_folders() + with raises(NotImplementedError): + Service.get_owned_folders() + + class OurDummy(DummyService, folders=["testydir", "dirtessimo"]): + pass + + owned_folders = OurDummy.get_owned_folders() + assert owned_folders is not None From 92be699031138e5100971329186b03b779bef32c Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 17 Apr 2023 16:01:51 +0000 Subject: [PATCH 323/537] refactor(services): make a foldermove from owned path --- selfprivacy_api/services/generic_service_mover.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/selfprivacy_api/services/generic_service_mover.py b/selfprivacy_api/services/generic_service_mover.py index 148ac1a..cf353cd 100644 --- a/selfprivacy_api/services/generic_service_mover.py +++ b/selfprivacy_api/services/generic_service_mover.py @@ -1,5 +1,6 @@ """Generic handler for moving services""" +from __future__ import annotations import subprocess import time import pathlib @@ -11,6 +12,7 @@ from selfprivacy_api.utils.huey import huey from selfprivacy_api.utils.block_devices import BlockDevice from selfprivacy_api.utils import ReadUserData, WriteUserData from selfprivacy_api.services.service import Service, ServiceStatus +from selfprivacy_api.services.owned_path import OwnedPath class FolderMoveNames(BaseModel): @@ -19,6 +21,19 @@ class FolderMoveNames(BaseModel): owner: str group: str + @staticmethod + def from_owned_path(path: OwnedPath) -> FolderMoveNames: + return FolderMoveNames( + name=FolderMoveNames.get_foldername(), + bind_location=path.path, + owner=path.owner, + group=path.group, + ) + + @staticmethod + def get_foldername(path: str) -> str: + return path.split("/")[-1] + @huey.task() def move_service( From aca05f26ea4830959bf53e9f887404cfe4a7fed9 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 17 Apr 2023 16:04:21 +0000 Subject: [PATCH 324/537] fix(services): folder methods typing --- selfprivacy_api/services/service.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index 1a1d56f..f804773 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -154,7 +154,7 @@ class Service(ABC): pass @classmethod - def get_folders(cls) -> str: + def get_folders(cls) -> typing.List[str]: """ get a plain list of occupied directories Default extracts info from overriden get_owned_folders() @@ -166,7 +166,7 @@ class Service(ABC): return [owned_folder.path for owned_folder in cls.get_owned_folders()] @classmethod - def get_owned_folders(cls) -> str: + def get_owned_folders(cls) -> typing.List[OwnedPath]: """ Get a list of occupied directories with ownership info Default extracts info from overriden get_folders() From 7a5af6af99453056afc2dceffb8cbc9695ad8250 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 19 Apr 2023 11:34:14 +0000 Subject: [PATCH 325/537] test(services): test that we indeed return correct folders and owned folders from real services --- .../services/bitwarden/__init__.py | 4 ++++ selfprivacy_api/services/pleroma/__init__.py | 20 +++++++++++++++-- tests/test_services.py | 22 +++++++++++++++++++ 3 files changed, 44 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/services/bitwarden/__init__.py b/selfprivacy_api/services/bitwarden/__init__.py index 8d2a903..56eb0fd 100644 --- a/selfprivacy_api/services/bitwarden/__init__.py +++ b/selfprivacy_api/services/bitwarden/__init__.py @@ -37,6 +37,10 @@ class Bitwarden(Service): """Read SVG icon from file and return it as base64 encoded string.""" return base64.b64encode(BITWARDEN_ICON.encode("utf-8")).decode("utf-8") + @staticmethod + def get_user() -> str: + return "vaultwarden" + @staticmethod def get_url() -> typing.Optional[str]: """Return service url.""" diff --git a/selfprivacy_api/services/pleroma/__init__.py b/selfprivacy_api/services/pleroma/__init__.py index 0191c18..f7e185b 100644 --- a/selfprivacy_api/services/pleroma/__init__.py +++ b/selfprivacy_api/services/pleroma/__init__.py @@ -6,6 +6,7 @@ from selfprivacy_api.jobs import Job, Jobs from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service from selfprivacy_api.services.generic_status_getter import get_service_status from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus +from selfprivacy_api.services.owned_path import OwnedPath from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain from selfprivacy_api.utils.block_devices import BlockDevice import selfprivacy_api.utils.network as network_utils @@ -96,8 +97,23 @@ class Pleroma(Service): return "" @staticmethod - def get_folders() -> typing.List[str]: - return ["/var/lib/pleroma", "/var/lib/postgresql"] + def get_owned_folders() -> typing.List[OwnedPath]: + """ + Get a list of occupied directories with ownership info + pleroma has folders that are owned by different users + """ + return [ + OwnedPath( + path="/var/lib/pleroma", + owner="pleroma", + group="pleroma", + ), + OwnedPath( + path="/var/lib/postgresql", + owner="postgres", + group="postgres", + ), + ] @staticmethod def get_drive() -> str: diff --git a/tests/test_services.py b/tests/test_services.py index 52164a9..fbbae09 100644 --- a/tests/test_services.py +++ b/tests/test_services.py @@ -3,6 +3,10 @@ """ from pytest import raises +from selfprivacy_api.services.bitwarden import Bitwarden +from selfprivacy_api.services.pleroma import Pleroma +from selfprivacy_api.services.owned_path import OwnedPath + from selfprivacy_api.services.test_service import DummyService from selfprivacy_api.services.service import Service @@ -18,3 +22,21 @@ def test_unimplemented_folders_raises(): owned_folders = OurDummy.get_owned_folders() assert owned_folders is not None + + +def test_owned_folders_from_not_owned(): + assert Bitwarden.get_owned_folders() == [ + OwnedPath( + path=folder, + group="vaultwarden", + owner="vaultwarden", + ) + for folder in Bitwarden.get_folders() + ] + + +def test_paths_from_owned_paths(): + assert len(Pleroma.get_folders()) == 2 + assert Pleroma.get_folders() == [ + ownedpath.path for ownedpath in Pleroma.get_owned_folders() + ] From f09d21a031fe223d16f1e579976283c0eced1709 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 19 Apr 2023 12:12:46 +0000 Subject: [PATCH 326/537] test(services): test derivation of foldermoves --- .../services/generic_service_mover.py | 2 +- tests/test_services.py | 16 ++++++++++++++++ 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/services/generic_service_mover.py b/selfprivacy_api/services/generic_service_mover.py index cf353cd..1e5efe6 100644 --- a/selfprivacy_api/services/generic_service_mover.py +++ b/selfprivacy_api/services/generic_service_mover.py @@ -24,7 +24,7 @@ class FolderMoveNames(BaseModel): @staticmethod def from_owned_path(path: OwnedPath) -> FolderMoveNames: return FolderMoveNames( - name=FolderMoveNames.get_foldername(), + name=FolderMoveNames.get_foldername(path.path), bind_location=path.path, owner=path.owner, group=path.group, diff --git a/tests/test_services.py b/tests/test_services.py index fbbae09..5816140 100644 --- a/tests/test_services.py +++ b/tests/test_services.py @@ -6,6 +6,7 @@ from pytest import raises from selfprivacy_api.services.bitwarden import Bitwarden from selfprivacy_api.services.pleroma import Pleroma from selfprivacy_api.services.owned_path import OwnedPath +from selfprivacy_api.services.generic_service_mover import FolderMoveNames from selfprivacy_api.services.test_service import DummyService from selfprivacy_api.services.service import Service @@ -40,3 +41,18 @@ def test_paths_from_owned_paths(): assert Pleroma.get_folders() == [ ownedpath.path for ownedpath in Pleroma.get_owned_folders() ] + + +def test_foldermoves_from_ownedpaths(): + owned = OwnedPath( + path="var/lib/bitwarden", + group="vaultwarden", + owner="vaultwarden", + ) + + assert FolderMoveNames.from_owned_path(owned) == FolderMoveNames( + name="bitwarden", + bind_location="var/lib/bitwarden", + group="vaultwarden", + owner="vaultwarden", + ) From 592eb1a1f8a306c13c252be8fb56f8f2508e5379 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 19 Apr 2023 12:43:47 +0000 Subject: [PATCH 327/537] refactor(services): use fully generic foldermoves --- .../services/bitwarden/__init__.py | 10 +--------- .../services/generic_service_mover.py | 9 +++++++++ selfprivacy_api/services/gitea/__init__.py | 10 +--------- .../services/mailserver/__init__.py | 19 +++++-------------- .../services/nextcloud/__init__.py | 9 +-------- selfprivacy_api/services/pleroma/__init__.py | 15 +-------------- 6 files changed, 18 insertions(+), 54 deletions(-) diff --git a/selfprivacy_api/services/bitwarden/__init__.py b/selfprivacy_api/services/bitwarden/__init__.py index 56eb0fd..6842af6 100644 --- a/selfprivacy_api/services/bitwarden/__init__.py +++ b/selfprivacy_api/services/bitwarden/__init__.py @@ -154,15 +154,7 @@ class Bitwarden(Service): self, volume, job, - [ - FolderMoveNames( - name=Bitwarden.get_foldername(folder), - bind_location=folder, - group="vaultwarden", - owner="vaultwarden", - ) - for folder in Bitwarden.get_folders() - ], + FolderMoveNames.default_foldermoves(self), "bitwarden", ) diff --git a/selfprivacy_api/services/generic_service_mover.py b/selfprivacy_api/services/generic_service_mover.py index 1e5efe6..e2b26f4 100644 --- a/selfprivacy_api/services/generic_service_mover.py +++ b/selfprivacy_api/services/generic_service_mover.py @@ -34,6 +34,15 @@ class FolderMoveNames(BaseModel): def get_foldername(path: str) -> str: return path.split("/")[-1] + @staticmethod + def default_foldermoves(service: Service): + return ( + [ + FolderMoveNames.from_owned_path(folder) + for folder in service.get_owned_folders() + ], + ) + @huey.task() def move_service( diff --git a/selfprivacy_api/services/gitea/__init__.py b/selfprivacy_api/services/gitea/__init__.py index 707cdec..f9ff3d2 100644 --- a/selfprivacy_api/services/gitea/__init__.py +++ b/selfprivacy_api/services/gitea/__init__.py @@ -148,15 +148,7 @@ class Gitea(Service): self, volume, job, - [ - FolderMoveNames( - name=Gitea.get_foldername(folder), - bind_location=folder, - group="gitea", - owner="gitea", - ) - for folder in Gitea.get_folders() - ], + FolderMoveNames.default_foldermoves(self), "gitea", ) diff --git a/selfprivacy_api/services/mailserver/__init__.py b/selfprivacy_api/services/mailserver/__init__.py index eb69ae9..b0a6e30 100644 --- a/selfprivacy_api/services/mailserver/__init__.py +++ b/selfprivacy_api/services/mailserver/__init__.py @@ -37,6 +37,10 @@ class MailServer(Service): def get_svg_icon() -> str: return base64.b64encode(MAILSERVER_ICON.encode("utf-8")).decode("utf-8") + @staticmethod + def get_user() -> str: + return "virtualMail" + @staticmethod def get_url() -> typing.Optional[str]: """Return service url.""" @@ -158,20 +162,7 @@ class MailServer(Service): self, volume, job, - [ - FolderMoveNames( - name="vmail", - bind_location="/var/vmail", - group="virtualMail", - owner="virtualMail", - ), - FolderMoveNames( - name="sieve", - bind_location="/var/sieve", - group="virtualMail", - owner="virtualMail", - ), - ], + FolderMoveNames.default_foldermoves(self), "mailserver", ) diff --git a/selfprivacy_api/services/nextcloud/__init__.py b/selfprivacy_api/services/nextcloud/__init__.py index 0036c77..ae81403 100644 --- a/selfprivacy_api/services/nextcloud/__init__.py +++ b/selfprivacy_api/services/nextcloud/__init__.py @@ -152,14 +152,7 @@ class Nextcloud(Service): self, volume, job, - [ - FolderMoveNames( - name="nextcloud", - bind_location="/var/lib/nextcloud", - owner="nextcloud", - group="nextcloud", - ), - ], + FolderMoveNames.default_foldermoves(self), "nextcloud", ) return job diff --git a/selfprivacy_api/services/pleroma/__init__.py b/selfprivacy_api/services/pleroma/__init__.py index f7e185b..0d5b338 100644 --- a/selfprivacy_api/services/pleroma/__init__.py +++ b/selfprivacy_api/services/pleroma/__init__.py @@ -150,20 +150,7 @@ class Pleroma(Service): self, volume, job, - [ - FolderMoveNames( - name="pleroma", - bind_location="/var/lib/pleroma", - owner="pleroma", - group="pleroma", - ), - FolderMoveNames( - name="postgresql", - bind_location="/var/lib/postgresql", - owner="postgres", - group="postgres", - ), - ], + FolderMoveNames.default_foldermoves(self), "pleroma", ) return job From ebeb76149bef6d5debf69132dcfbd5e1a6075647 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 19 Apr 2023 13:59:03 +0000 Subject: [PATCH 328/537] refactor(services): make local secret setting public --- selfprivacy_api/backup/local_secret.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/selfprivacy_api/backup/local_secret.py b/selfprivacy_api/backup/local_secret.py index 02d78a4..e04733c 100644 --- a/selfprivacy_api/backup/local_secret.py +++ b/selfprivacy_api/backup/local_secret.py @@ -23,10 +23,14 @@ class LocalBackupSecret: LocalBackupSecret.reset() return redis.get(REDIS_KEY) + @staticmethod + def set(secret: str): + redis.set(REDIS_KEY, secret) + @staticmethod def reset(): new_secret = LocalBackupSecret._generate() - LocalBackupSecret._store(new_secret) + LocalBackupSecret.set(new_secret) @staticmethod def exists() -> bool: @@ -35,7 +39,3 @@ class LocalBackupSecret: @staticmethod def _generate() -> str: return secrets.token_urlsafe(256) - - @staticmethod - def _store(secret: str): - redis.set(REDIS_KEY, secret) From d3f9ce7bf51c2623e062741f4f69a915f5af1ba0 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 19 Apr 2023 14:24:53 +0000 Subject: [PATCH 329/537] test(backups): test local secrets --- selfprivacy_api/backup/local_secret.py | 4 +++ tests/test_graphql/test_localsecret.py | 38 ++++++++++++++++++++++++++ 2 files changed, 42 insertions(+) create mode 100644 tests/test_graphql/test_localsecret.py diff --git a/selfprivacy_api/backup/local_secret.py b/selfprivacy_api/backup/local_secret.py index e04733c..76237f3 100644 --- a/selfprivacy_api/backup/local_secret.py +++ b/selfprivacy_api/backup/local_secret.py @@ -32,6 +32,10 @@ class LocalBackupSecret: new_secret = LocalBackupSecret._generate() LocalBackupSecret.set(new_secret) + @staticmethod + def _full_reset(): + redis.delete(REDIS_KEY) + @staticmethod def exists() -> bool: return redis.exists(REDIS_KEY) diff --git a/tests/test_graphql/test_localsecret.py b/tests/test_graphql/test_localsecret.py new file mode 100644 index 0000000..d4b637a --- /dev/null +++ b/tests/test_graphql/test_localsecret.py @@ -0,0 +1,38 @@ +from selfprivacy_api.backup.local_secret import LocalBackupSecret +from pytest import fixture + + +@fixture() +def localsecret(): + LocalBackupSecret._full_reset() + return LocalBackupSecret + + +def test_local_secret_firstget(localsecret): + assert not LocalBackupSecret.exists() + secret = LocalBackupSecret.get() + assert LocalBackupSecret.exists() + assert secret is not None + + # making sure it does not reset again + secret2 = LocalBackupSecret.get() + assert LocalBackupSecret.exists() + assert secret2 == secret + + +def test_local_secret_reset(localsecret): + secret1 = LocalBackupSecret.get() + + LocalBackupSecret.reset() + secret2 = LocalBackupSecret.get() + assert secret2 is not None + assert secret2 != secret1 + + +def test_local_secret_set(localsecret): + newsecret = "great and totally safe secret" + oldsecret = LocalBackupSecret.get() + assert oldsecret != newsecret + + LocalBackupSecret.set(newsecret) + assert LocalBackupSecret.get() == newsecret \ No newline at end of file From 4572c0064000997363438063bcedac0270ec6a20 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 19 Apr 2023 15:09:06 +0000 Subject: [PATCH 330/537] feature(backups): restore task --- selfprivacy_api/backup/tasks.py | 7 +++++ tests/test_graphql/test_backup.py | 46 +++++++++++++++++++++++++------ 2 files changed, 45 insertions(+), 8 deletions(-) diff --git a/selfprivacy_api/backup/tasks.py b/selfprivacy_api/backup/tasks.py index 4f6ab16..3236029 100644 --- a/selfprivacy_api/backup/tasks.py +++ b/selfprivacy_api/backup/tasks.py @@ -1,5 +1,6 @@ from datetime import datetime +from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.utils.huey import huey from selfprivacy_api.services.service import Service from selfprivacy_api.backup import Backups @@ -24,6 +25,12 @@ def start_backup(service: Service) -> bool: return True +@huey.task() +def restore_snapshot(snapshot: Snapshot) -> bool: + Backups.restore_snapshot(snapshot) + return True + + @huey.periodic_task(validate_datetime=validate_datetime) def automatic_backup(): time = datetime.now() diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index d0f5d00..5e064fa 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -14,7 +14,7 @@ from selfprivacy_api.backup import Backups import selfprivacy_api.backup.providers as providers from selfprivacy_api.backup.providers import AbstractBackupProvider from selfprivacy_api.backup.providers.backblaze import Backblaze -from selfprivacy_api.backup.tasks import start_backup +from selfprivacy_api.backup.tasks import start_backup, restore_snapshot from selfprivacy_api.backup.storage import Storage @@ -162,16 +162,22 @@ def test_backup_returns_snapshot(backups, dummy_service): assert snapshot.created_at is not None +def service_files(service): + result = [] + for service_folder in service.get_folders(): + service_filename = listdir(service_folder)[0] + assert service_filename is not None + service_file = path.join(service_folder, service_filename) + result.append(service_file) + return result + + def test_restore(backups, dummy_service): - paths_to_nuke = [] + paths_to_nuke = service_files(dummy_service) contents = [] - for service_folder in dummy_service.get_folders(): - file_to_nuke = listdir(service_folder)[0] - assert file_to_nuke is not None - path_to_nuke = path.join(service_folder, file_to_nuke) - paths_to_nuke.append(path_to_nuke) - with open(path_to_nuke, "r") as file: + for service_file in paths_to_nuke: + with open(service_file, "r") as file: contents.append(file.read()) Backups.back_up(dummy_service) @@ -214,6 +220,30 @@ def test_backup_service_task(backups, dummy_service): assert len(snaps) == 1 +def test_restore_snapshot_task(backups, dummy_service): + Backups.back_up(dummy_service) + snaps = Backups.get_snapshots(dummy_service) + assert len(snaps) == 1 + + paths_to_nuke = service_files(dummy_service) + contents = [] + + for service_file in paths_to_nuke: + with open(service_file, "r") as file: + contents.append(file.read()) + + for p in paths_to_nuke: + remove(p) + + handle = restore_snapshot(snaps[0]) + handle(blocking=True) + + for p, content in zip(paths_to_nuke, contents): + assert path.exists(p) + with open(p, "r") as file: + assert file.read() == content + + def test_autobackup_enable_service(backups, dummy_service): assert not Backups.is_autobackup_enabled(dummy_service) From fa86c45bd089d5d6593f40c6038601b80cca03bc Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 21 Apr 2023 11:59:15 +0000 Subject: [PATCH 331/537] feature(backups): simplest jobs intergration in tasks: created and finished --- selfprivacy_api/backup/jobs.py | 31 +++++++++++++++++++++++++++++++ selfprivacy_api/backup/tasks.py | 5 +++++ 2 files changed, 36 insertions(+) create mode 100644 selfprivacy_api/backup/jobs.py diff --git a/selfprivacy_api/backup/jobs.py b/selfprivacy_api/backup/jobs.py new file mode 100644 index 0000000..a90c4bd --- /dev/null +++ b/selfprivacy_api/backup/jobs.py @@ -0,0 +1,31 @@ +from typing import Optional + +from selfprivacy_api.jobs import Jobs, Job, JobStatus +from selfprivacy_api.services.service import Service + + +def backup_job_type(service: Service): + return f"services.{service.get_id()}.backup" + + +def add_backup_job(service: Service) -> Job: + display_name = service.get_display_name() + job = Jobs.add( + type_id=backup_job_type(service), + name=f"Backup {display_name}", + description=f"Backing up {display_name}", + ) + return job + + +def get_job_by_type(type_id: str) -> Optional[Job]: + for job in Jobs.get_jobs(): + if job.type_id == type_id and job.status in [ + JobStatus.CREATED, + JobStatus.RUNNING, + ]: + return job + + +def get_backup_job(service: Service) -> Optional[Job]: + return get_job_by_type(backup_job_type(service)) diff --git a/selfprivacy_api/backup/tasks.py b/selfprivacy_api/backup/tasks.py index 3236029..111f255 100644 --- a/selfprivacy_api/backup/tasks.py +++ b/selfprivacy_api/backup/tasks.py @@ -4,6 +4,8 @@ from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.utils.huey import huey from selfprivacy_api.services.service import Service from selfprivacy_api.backup import Backups +from selfprivacy_api.backup.jobs import get_backup_job, add_backup_job +from selfprivacy_api.jobs import Jobs, JobStatus def validate_datetime(dt: datetime): @@ -21,7 +23,10 @@ def validate_datetime(dt: datetime): # huey tasks need to return something @huey.task() def start_backup(service: Service) -> bool: + add_backup_job(service) Backups.back_up(service) + job = get_backup_job(service) + Jobs.update(job, status=JobStatus.FINISHED) return True From b32ca3b11ae609b40f6c7f118f343f7830414a7d Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 21 Apr 2023 12:19:59 +0000 Subject: [PATCH 332/537] test(backups): assure that jobs are created and not duplicated --- tests/test_graphql/test_backup.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 5e064fa..420e14e 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -9,6 +9,7 @@ import selfprivacy_api.services as services from selfprivacy_api.services import get_service_by_id from selfprivacy_api.services.test_service import DummyService from selfprivacy_api.graphql.queries.providers import BackupProvider +from selfprivacy_api.jobs import Jobs, JobStatus from selfprivacy_api.backup import Backups import selfprivacy_api.backup.providers as providers @@ -16,6 +17,7 @@ from selfprivacy_api.backup.providers import AbstractBackupProvider from selfprivacy_api.backup.providers.backblaze import Backblaze from selfprivacy_api.backup.tasks import start_backup, restore_snapshot from selfprivacy_api.backup.storage import Storage +from selfprivacy_api.backup.jobs import get_backup_job TESTFILE_BODY = "testytest!" @@ -30,6 +32,8 @@ def backups(tmpdir): test_repo_path = path.join(tmpdir, "totallyunrelated") Backups.set_localfile_repo(test_repo_path) + Jobs.reset() + @pytest.fixture() def backups_backblaze(generic_userdata): @@ -219,6 +223,11 @@ def test_backup_service_task(backups, dummy_service): snaps = Backups.get_snapshots(dummy_service) assert len(snaps) == 1 + id = dummy_service.get_id() + finished_jobs = [job for job in Jobs.get_jobs() if job.status is JobStatus.FINISHED] + finished_types = [job.type_id for job in finished_jobs] + assert finished_types.count(f"services.{id}.backup") == 1 + def test_restore_snapshot_task(backups, dummy_service): Backups.back_up(dummy_service) From 029cb47db62a5e2dd320c83788365e10e7f08ee0 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 24 Apr 2023 16:15:12 +0000 Subject: [PATCH 333/537] feature(backups): also create a job if not called from a task --- selfprivacy_api/backup/__init__.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index f89a54e..0f35456 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -15,6 +15,7 @@ from selfprivacy_api.graphql.queries.providers import BackupProvider from selfprivacy_api.backup.providers.provider import AbstractBackupProvider from selfprivacy_api.backup.providers import get_provider from selfprivacy_api.backup.storage import Storage +from selfprivacy_api.backup.jobs import get_backup_job, add_backup_job class Backups: @@ -197,6 +198,9 @@ class Backups: folders = service.get_folders() repo_name = service.get_id() + if get_backup_job(service) is None: + add_backup_job(service) + service.pre_backup() snapshot = Backups.provider().backuper.start_backup(folders, repo_name) Backups._store_last_snapshot(repo_name, snapshot) From 026d72b551f5b6ce47b4e9a04047d1a33a2929a5 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 24 Apr 2023 16:16:45 +0000 Subject: [PATCH 334/537] refactor(backups): delete unused redis import from backups ckass --- selfprivacy_api/backup/__init__.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 0f35456..e74877d 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -4,8 +4,6 @@ from datetime import datetime, timedelta from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.utils import ReadUserData -from selfprivacy_api.utils.redis_pool import RedisPool - from selfprivacy_api.services import get_service_by_id from selfprivacy_api.services.service import Service From ac6d25c4c1ccd9ac34be1a6c9afee45a631d288b Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 24 Apr 2023 16:37:07 +0000 Subject: [PATCH 335/537] refactor(backups): make a backup job running when the backup code itself is executed --- selfprivacy_api/backup/__init__.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index e74877d..ec1180e 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -14,6 +14,7 @@ from selfprivacy_api.backup.providers.provider import AbstractBackupProvider from selfprivacy_api.backup.providers import get_provider from selfprivacy_api.backup.storage import Storage from selfprivacy_api.backup.jobs import get_backup_job, add_backup_job +from selfprivacy_api.jobs import Jobs, JobStatus class Backups: @@ -196,8 +197,10 @@ class Backups: folders = service.get_folders() repo_name = service.get_id() - if get_backup_job(service) is None: - add_backup_job(service) + job = get_backup_job(service) + if job is None: + job = add_backup_job(service) + Jobs.update(job, status=JobStatus.RUNNING) service.pre_backup() snapshot = Backups.provider().backuper.start_backup(folders, repo_name) From 312fceeb9cbbc004b1cccbcee7801035ec527481 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 24 Apr 2023 16:50:22 +0000 Subject: [PATCH 336/537] test(backups): break out a finished job checker --- tests/test_graphql/test_backup.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 420e14e..cb52e88 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -216,6 +216,12 @@ def test_init_tracking(backups, raw_dummy_service): assert Backups.is_initted(raw_dummy_service) is True +def assert_job_finished(job_type, count): + finished_jobs = [job for job in Jobs.get_jobs() if job.status is JobStatus.FINISHED] + finished_types = [job.type_id for job in finished_jobs] + assert finished_types.count(job_type) == count + + def test_backup_service_task(backups, dummy_service): handle = start_backup(dummy_service) handle(blocking=True) @@ -224,9 +230,7 @@ def test_backup_service_task(backups, dummy_service): assert len(snaps) == 1 id = dummy_service.get_id() - finished_jobs = [job for job in Jobs.get_jobs() if job.status is JobStatus.FINISHED] - finished_types = [job.type_id for job in finished_jobs] - assert finished_types.count(f"services.{id}.backup") == 1 + assert_job_finished(f"services.{id}.backup", count=1) def test_restore_snapshot_task(backups, dummy_service): From f622d617cf61f4527c893df3dbd4f7218a0aa9fd Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 24 Apr 2023 17:03:56 +0000 Subject: [PATCH 337/537] test(backups): test jobs starting and finishing when from Backups --- selfprivacy_api/backup/__init__.py | 1 + selfprivacy_api/backup/tasks.py | 5 +++-- tests/test_graphql/test_backup.py | 5 +++++ 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index ec1180e..1992648 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -207,6 +207,7 @@ class Backups: Backups._store_last_snapshot(repo_name, snapshot) service.post_restore() + Jobs.update(job, status=JobStatus.FINISHED) @staticmethod def init_repo(service: Service): diff --git a/selfprivacy_api/backup/tasks.py b/selfprivacy_api/backup/tasks.py index 111f255..3b3051e 100644 --- a/selfprivacy_api/backup/tasks.py +++ b/selfprivacy_api/backup/tasks.py @@ -23,10 +23,11 @@ def validate_datetime(dt: datetime): # huey tasks need to return something @huey.task() def start_backup(service: Service) -> bool: + # Backups can create the job, but doing this here + # allows us to see the job as queued before it is actually executed add_backup_job(service) + Backups.back_up(service) - job = get_backup_job(service) - Jobs.update(job, status=JobStatus.FINISHED) return True diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index cb52e88..a185492 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -131,7 +131,10 @@ def test_backup_simple_file(raw_dummy_service, file_backup): def test_backup_service(dummy_service, backups): + id = dummy_service.get_id() + assert_job_finished(f"services.{id}.backup", count=0) assert Backups.get_last_backed_up(dummy_service) is None + Backups.back_up(dummy_service) now = datetime.now(timezone.utc) @@ -140,6 +143,8 @@ def test_backup_service(dummy_service, backups): assert now > date assert now - date < timedelta(minutes=1) + assert_job_finished(f"services.{id}.backup", count=1) + def test_no_repo(memory_backup): with pytest.raises(ValueError): From 05f2cc3f149bc3a4a0db64d5cbb13d00f5835997 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 8 May 2023 10:49:18 +0000 Subject: [PATCH 338/537] refactor(backups): cleanup unused imports in tasks --- selfprivacy_api/backup/tasks.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/tasks.py b/selfprivacy_api/backup/tasks.py index 3b3051e..d92a926 100644 --- a/selfprivacy_api/backup/tasks.py +++ b/selfprivacy_api/backup/tasks.py @@ -4,8 +4,7 @@ from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.utils.huey import huey from selfprivacy_api.services.service import Service from selfprivacy_api.backup import Backups -from selfprivacy_api.backup.jobs import get_backup_job, add_backup_job -from selfprivacy_api.jobs import Jobs, JobStatus +from selfprivacy_api.backup.jobs import add_backup_job def validate_datetime(dt: datetime): From f116ce1bdbfb65ac9f931b52af8de17238175e06 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 8 May 2023 10:55:22 +0000 Subject: [PATCH 339/537] feature(backups): set job status to error if backup fails --- selfprivacy_api/backup/__init__.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 1992648..f3e2ba5 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -202,11 +202,15 @@ class Backups: job = add_backup_job(service) Jobs.update(job, status=JobStatus.RUNNING) - service.pre_backup() - snapshot = Backups.provider().backuper.start_backup(folders, repo_name) - Backups._store_last_snapshot(repo_name, snapshot) + try: + service.pre_backup() + snapshot = Backups.provider().backuper.start_backup(folders, repo_name) + Backups._store_last_snapshot(repo_name, snapshot) + service.post_restore() + except Exception as e: + Jobs.update(job, status=JobStatus.ERROR) + raise e - service.post_restore() Jobs.update(job, status=JobStatus.FINISHED) @staticmethod From 2a87eb80f91eadc88b1499a2e951be12f133db61 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 8 May 2023 12:21:02 +0000 Subject: [PATCH 340/537] refactor(backups): quick-expiration logs of jobs status updates --- selfprivacy_api/jobs/__init__.py | 23 ++++++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py index fe4a053..211a2ab 100644 --- a/selfprivacy_api/jobs/__init__.py +++ b/selfprivacy_api/jobs/__init__.py @@ -27,7 +27,7 @@ from selfprivacy_api.utils.redis_pool import RedisPool JOB_EXPIRATION_SECONDS = 10 * 24 * 60 * 60 # ten days -class JobStatus(Enum): +class JobStatus(str, Enum): """ Status of a job. """ @@ -70,6 +70,7 @@ class Jobs: jobs = Jobs.get_jobs() for job in jobs: Jobs.remove(job) + Jobs.reset_logs() @staticmethod def add( @@ -120,6 +121,21 @@ class Jobs: return True return False + @staticmethod + def reset_logs(): + redis = RedisPool().get_connection() + for key in redis.keys("jobs_logs:" + "*"): + redis.delete(key) + + @staticmethod + def log_status_update(job: Job, status: JobStatus): + redis = RedisPool().get_connection() + key = _redis_log_key_from_uuid(job.uid) + if redis.exists(key): + assert redis.type(key) == "list" + redis.lpush(key, str(status)) + redis.expire(key, 10) + @staticmethod def update( job: Job, @@ -143,6 +159,7 @@ class Jobs: if progress is not None: job.progress = progress job.status = status + Jobs.log_status_update(job, status) job.updated_at = datetime.datetime.now() job.error = error job.result = result @@ -198,6 +215,10 @@ def _redis_key_from_uuid(uuid_string): return "jobs:" + str(uuid_string) +def _redis_log_key_from_uuid(uuid_string): + return "jobs_logs:" + str(uuid_string) + + def _store_job_as_hash(redis, redis_key, model): for key, value in model.dict().items(): if isinstance(value, uuid.UUID): From ebff2b308ade4d8308f998e8369397376ef86a4b Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 8 May 2023 12:43:11 +0000 Subject: [PATCH 341/537] test(backups): test that the job has run --- selfprivacy_api/jobs/__init__.py | 19 ++++++++++++++++++- tests/test_graphql/test_backup.py | 10 +++++++++- 2 files changed, 27 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py index 211a2ab..16306a7 100644 --- a/selfprivacy_api/jobs/__init__.py +++ b/selfprivacy_api/jobs/__init__.py @@ -133,9 +133,26 @@ class Jobs: key = _redis_log_key_from_uuid(job.uid) if redis.exists(key): assert redis.type(key) == "list" - redis.lpush(key, str(status)) + redis.lpush(key, status.value) redis.expire(key, 10) + @staticmethod + def status_updates(job: Job) -> typing.List[JobStatus]: + result = [] + + redis = RedisPool().get_connection() + key = _redis_log_key_from_uuid(job.uid) + if not redis.exists(key): + return [] + + status_strings = redis.lrange(key, 0, -1) + for status in status_strings: + try: + result.append(JobStatus[status]) + except KeyError as e: + raise ValueError("impossible job status: " + status) from e + return result + @staticmethod def update( job: Job, diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index a185492..ec12506 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -227,6 +227,12 @@ def assert_job_finished(job_type, count): assert finished_types.count(job_type) == count +def assert_job_has_run(job_type): + finished_jobs = [job for job in Jobs.get_jobs() if job.status is JobStatus.FINISHED] + job = [job for job in finished_jobs if job.type_id == job_type][0] + assert JobStatus.RUNNING in Jobs.status_updates(job) + + def test_backup_service_task(backups, dummy_service): handle = start_backup(dummy_service) handle(blocking=True) @@ -235,7 +241,9 @@ def test_backup_service_task(backups, dummy_service): assert len(snaps) == 1 id = dummy_service.get_id() - assert_job_finished(f"services.{id}.backup", count=1) + job_type_id = f"services.{id}.backup" + assert_job_finished(job_type_id, count=1) + assert_job_has_run(job_type_id) def test_restore_snapshot_task(backups, dummy_service): From ecf44e5169213188365d78f000de526c862aa96e Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 12 May 2023 11:07:55 +0000 Subject: [PATCH 342/537] feature(backups): deny adding a backup job if another one is already queued --- selfprivacy_api/backup/jobs.py | 31 ++++++++++++++++++++++++++++--- 1 file changed, 28 insertions(+), 3 deletions(-) diff --git a/selfprivacy_api/backup/jobs.py b/selfprivacy_api/backup/jobs.py index a90c4bd..ebc2ea0 100644 --- a/selfprivacy_api/backup/jobs.py +++ b/selfprivacy_api/backup/jobs.py @@ -1,14 +1,39 @@ -from typing import Optional +from typing import Optional, List from selfprivacy_api.jobs import Jobs, Job, JobStatus from selfprivacy_api.services.service import Service -def backup_job_type(service: Service): - return f"services.{service.get_id()}.backup" +def job_type_prefix(service: Service) -> str: + return f"services.{service.get_id()}" + + +def backup_job_type(service: Service) -> str: + return f"{job_type_prefix(service)}.backup" + + +def get_jobs_by_service(service: Service) -> List[Job]: + result = [] + for job in Jobs.get_jobs(): + if job.type_id.startswith(job_type_prefix(service)) and job.status in [ + JobStatus.CREATED, + JobStatus.RUNNING, + ]: + result.append(job) + return result + + +def is_something_queued_for(service: Service) -> bool: + return len(get_jobs_by_service(service)) != 0 def add_backup_job(service: Service) -> Job: + if is_something_queued_for(service): + message = ( + f"Cannot start a backup of {service.get_id()}, another operation is queued: " + + get_jobs_by_service(service)[0].type_id + ) + raise ValueError(message) display_name = service.get_display_name() job = Jobs.add( type_id=backup_job_type(service), From afdbf01cfce569c8918561932868b4d3a7249b2c Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 17 May 2023 17:48:06 +0000 Subject: [PATCH 343/537] refactor(backups): use single repo and multiplex by tags --- .../backup/providers/local_file.py | 3 +- selfprivacy_api/backup/restic_backuper.py | 28 +++++++++++-------- 2 files changed, 18 insertions(+), 13 deletions(-) diff --git a/selfprivacy_api/backup/providers/local_file.py b/selfprivacy_api/backup/providers/local_file.py index bdd9213..a20f615 100644 --- a/selfprivacy_api/backup/providers/local_file.py +++ b/selfprivacy_api/backup/providers/local_file.py @@ -8,4 +8,5 @@ class LocalFileBackup(AbstractBackupProvider): # login and key args are for compatibility with generic provider methods. They are ignored. def __init__(self, filename: str, login: str = "", key: str = ""): super().__init__() - self.backuper = ResticBackuper("", "", f":local:{filename}/") + self.backuper = ResticBackuper("", "", ":local:") + self.backuper.set_creds("", "", filename) diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index 896f68d..07ddb1c 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -18,15 +18,17 @@ class ResticBackuper(AbstractBackuper): self.type = type self.account = "" self.key = "" + self.repo = "" - def set_creds(self, account: str, key: str): + def set_creds(self, account: str, key: str, repo: str): self.account = account self.key = key + self.repo = repo - def restic_repo(self, repository_name: str) -> str: + def restic_repo(self) -> str: # https://restic.readthedocs.io/en/latest/030_preparing_a_new_repo.html#other-services-via-rclone # https://forum.rclone.org/t/can-rclone-be-run-solely-with-command-line-options-no-config-no-env-vars/6314/5 - return f"rclone:{self.type}{repository_name}/sfbackup" + return f"rclone:{self.type}{self.repo}" def rclone_args(self): return "rclone.args=serve restic --stdio" + self.backend_rclone_args() @@ -44,16 +46,23 @@ class ResticBackuper(AbstractBackuper): def _password_command(self): return f"echo {LocalBackupSecret.get()}" - def restic_command(self, repo_name: str, *args): + def restic_command(self, *args, branch_name: str = ""): command = [ "restic", "-o", self.rclone_args(), "-r", - self.restic_repo(repo_name), + self.restic_repo(), "--password-command", self._password_command(), ] + if branch_name != "": + command.extend( + [ + "--tag", + branch_name, + ] + ) if args != []: command.extend(ResticBackuper.__flatten_list(args)) return command @@ -78,10 +87,10 @@ class ResticBackuper(AbstractBackuper): assert not isinstance(folders, str) backup_command = self.restic_command( - repo_name, "backup", "--json", folders, + branch_name=repo_name, ) with subprocess.Popen( backup_command, @@ -115,7 +124,6 @@ class ResticBackuper(AbstractBackuper): def init(self, repo_name): init_command = self.restic_command( - repo_name, "init", ) with subprocess.Popen( @@ -130,7 +138,6 @@ class ResticBackuper(AbstractBackuper): def is_initted(self, repo_name: str) -> bool: command = self.restic_command( - repo_name, "check", "--json", ) @@ -147,7 +154,6 @@ class ResticBackuper(AbstractBackuper): Size of a snapshot """ command = self.restic_command( - repo_name, "stats", snapshot_id, "--json", @@ -169,7 +175,6 @@ class ResticBackuper(AbstractBackuper): # I do not alter the signature yet because maybe this can be # changed with flags restore_command = self.restic_command( - repo_name, "restore", snapshot_id, "--target", @@ -190,7 +195,6 @@ class ResticBackuper(AbstractBackuper): raises Value Error if repo does not exist """ listing_command = self.restic_command( - repo_name, "snapshots", "--json", ) @@ -217,7 +221,7 @@ class ResticBackuper(AbstractBackuper): snapshot = Snapshot( id=restic_snapshot["short_id"], created_at=restic_snapshot["time"], - service_name=repo_name, + service_name=restic_snapshot["tags"][0], ) snapshots.append(snapshot) From ca036b294abcf5bac16c589757b041d1e2b36a49 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 17 May 2023 17:59:56 +0000 Subject: [PATCH 344/537] refactor(backups): break out job logs status prefix --- selfprivacy_api/jobs/__init__.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py index 16306a7..d7e4f31 100644 --- a/selfprivacy_api/jobs/__init__.py +++ b/selfprivacy_api/jobs/__init__.py @@ -26,6 +26,9 @@ from selfprivacy_api.utils.redis_pool import RedisPool JOB_EXPIRATION_SECONDS = 10 * 24 * 60 * 60 # ten days +STATUS_LOGS_PREFIX = "jobs_logs:status:" +PROGRESS_LOGS_PREFIX = "jobs_logs:progress:" + class JobStatus(str, Enum): """ @@ -124,7 +127,7 @@ class Jobs: @staticmethod def reset_logs(): redis = RedisPool().get_connection() - for key in redis.keys("jobs_logs:" + "*"): + for key in redis.keys(STATUS_LOGS_PREFIX + "*"): redis.delete(key) @staticmethod @@ -233,7 +236,7 @@ def _redis_key_from_uuid(uuid_string): def _redis_log_key_from_uuid(uuid_string): - return "jobs_logs:" + str(uuid_string) + return STATUS_LOGS_PREFIX + str(uuid_string) def _store_job_as_hash(redis, redis_key, model): From 135fb0c42dd06c9a2dd85924e7bebc1190e30a37 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 17 May 2023 18:36:39 +0000 Subject: [PATCH 345/537] feature(backups): job progress logs --- selfprivacy_api/jobs/__init__.py | 37 +++++++++++++++++++++++++++++--- 1 file changed, 34 insertions(+), 3 deletions(-) diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py index d7e4f31..5c3ea62 100644 --- a/selfprivacy_api/jobs/__init__.py +++ b/selfprivacy_api/jobs/__init__.py @@ -133,18 +133,27 @@ class Jobs: @staticmethod def log_status_update(job: Job, status: JobStatus): redis = RedisPool().get_connection() - key = _redis_log_key_from_uuid(job.uid) + key = _status_log_key_from_uuid(job.uid) if redis.exists(key): assert redis.type(key) == "list" redis.lpush(key, status.value) redis.expire(key, 10) + @staticmethod + def log_progress_update(job: Job, progress: int): + redis = RedisPool().get_connection() + key = _progress_log_key_from_uuid(job.uid) + if redis.exists(key): + assert redis.type(key) == "list" + redis.lpush(key, progress) + redis.expire(key, 10) + @staticmethod def status_updates(job: Job) -> typing.List[JobStatus]: result = [] redis = RedisPool().get_connection() - key = _redis_log_key_from_uuid(job.uid) + key = _status_log_key_from_uuid(job.uid) if not redis.exists(key): return [] @@ -156,6 +165,23 @@ class Jobs: raise ValueError("impossible job status: " + status) from e return result + @staticmethod + def progress_updates(job: Job) -> typing.List[int]: + result = [] + + redis = RedisPool().get_connection() + key = _progress_log_key_from_uuid(job.uid) + if not redis.exists(key): + return [] + + progress_strings = redis.lrange(key, 0, -1) + for progress in progress_strings: + try: + result.append(int(progress)) + except KeyError as e: + raise ValueError("impossible job progress: " + progress) from e + return result + @staticmethod def update( job: Job, @@ -178,6 +204,7 @@ class Jobs: job.status_text = status_text if progress is not None: job.progress = progress + Jobs.log_progress_update(job, progress) job.status = status Jobs.log_status_update(job, status) job.updated_at = datetime.datetime.now() @@ -235,10 +262,14 @@ def _redis_key_from_uuid(uuid_string): return "jobs:" + str(uuid_string) -def _redis_log_key_from_uuid(uuid_string): +def _status_log_key_from_uuid(uuid_string): return STATUS_LOGS_PREFIX + str(uuid_string) +def _progress_log_key_from_uuid(uuid_string): + return PROGRESS_LOGS_PREFIX + str(uuid_string) + + def _store_job_as_hash(redis, redis_key, model): for key, value in model.dict().items(): if isinstance(value, uuid.UUID): From 1faaed992eabbf11800832323ac0d94a9b79118f Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 17 May 2023 20:02:21 +0000 Subject: [PATCH 346/537] test(backups): break out obtaining finished jobs --- tests/test_graphql/test_backup.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index ec12506..94d8ef3 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -221,15 +221,17 @@ def test_init_tracking(backups, raw_dummy_service): assert Backups.is_initted(raw_dummy_service) is True +def finished_jobs(): + return [job for job in Jobs.get_jobs() if job.status is JobStatus.FINISHED] + + def assert_job_finished(job_type, count): - finished_jobs = [job for job in Jobs.get_jobs() if job.status is JobStatus.FINISHED] - finished_types = [job.type_id for job in finished_jobs] + finished_types = [job.type_id for job in finished_jobs()] assert finished_types.count(job_type) == count def assert_job_has_run(job_type): - finished_jobs = [job for job in Jobs.get_jobs() if job.status is JobStatus.FINISHED] - job = [job for job in finished_jobs if job.type_id == job_type][0] + job = [job for job in finished_jobs() if job.type_id == job_type][0] assert JobStatus.RUNNING in Jobs.status_updates(job) From d38b8180cbf737d171464d4b568f94c8e066de07 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 17 May 2023 20:09:29 +0000 Subject: [PATCH 347/537] feature(backups): realtime progress updates of backups --- selfprivacy_api/backup/restic_backuper.py | 53 +++++++++++++++++------ tests/test_graphql/test_backup.py | 6 +++ 2 files changed, 45 insertions(+), 14 deletions(-) diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index 07ddb1c..3e72561 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -7,6 +7,9 @@ from collections.abc import Iterable from selfprivacy_api.backup.backuper import AbstractBackuper from selfprivacy_api.models.backup.snapshot import Snapshot +from selfprivacy_api.backup.jobs import get_backup_job +from selfprivacy_api.services import get_service_by_id +from selfprivacy_api.jobs import Jobs, JobStatus from selfprivacy_api.backup.local_secret import LocalBackupSecret @@ -78,6 +81,19 @@ class ResticBackuper(AbstractBackuper): result.append(item) return result + @staticmethod + def output_yielder(command): + with subprocess.Popen( + command, + shell=False, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + universal_newlines=True, + ) as handle: + for line in iter(handle.stdout.readline, ""): + if not "NOTICE:" in line: + yield line + def start_backup(self, folders: List[str], repo_name: str): """ Start backup with restic @@ -92,20 +108,25 @@ class ResticBackuper(AbstractBackuper): folders, branch_name=repo_name, ) - with subprocess.Popen( - backup_command, - shell=False, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - ) as handle: - output = handle.communicate()[0].decode("utf-8") - try: - messages = self.parse_json_output(output) - return ResticBackuper._snapshot_from_backup_messages( - messages, repo_name - ) - except ValueError as e: - raise ValueError("could not create a snapshot: ") from e + + messages = [] + try: + for raw_message in ResticBackuper.output_yielder(backup_command): + message = self.parse_json_output(raw_message) + if message["message_type"] == "status": + job = get_backup_job(get_service_by_id(repo_name)) + if job is not None: # only update status if we run under some job + Jobs.update( + job, + JobStatus.RUNNING, + progress=ResticBackuper.progress_from_status_message( + message + ), + ) + messages.append(message) + return ResticBackuper._snapshot_from_backup_messages(messages, repo_name) + except ValueError as e: + raise ValueError("could not create a snapshot: ", messages) from e @staticmethod def _snapshot_from_backup_messages(messages, repo_name) -> Snapshot: @@ -114,6 +135,10 @@ class ResticBackuper(AbstractBackuper): return ResticBackuper._snapshot_from_fresh_summary(message, repo_name) raise ValueError("no summary message in restic json output") + @staticmethod + def progress_from_status_message(message: object) -> int: + return int(message["percent_done"]) + @staticmethod def _snapshot_from_fresh_summary(message: object, repo_name) -> Snapshot: return Snapshot( diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 94d8ef3..89978b1 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -235,6 +235,11 @@ def assert_job_has_run(job_type): assert JobStatus.RUNNING in Jobs.status_updates(job) +def assert_job_had_progress(job_type): + job = [job for job in finished_jobs() if job.type_id == job_type][0] + assert len(Jobs.progress_updates(job)) > 0 + + def test_backup_service_task(backups, dummy_service): handle = start_backup(dummy_service) handle(blocking=True) @@ -246,6 +251,7 @@ def test_backup_service_task(backups, dummy_service): job_type_id = f"services.{id}.backup" assert_job_finished(job_type_id, count=1) assert_job_has_run(job_type_id) + assert_job_had_progress(job_type_id) def test_restore_snapshot_task(backups, dummy_service): From c8512eacdc2cfc61a15b98059e48da054196b3ce Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 17 May 2023 20:21:37 +0000 Subject: [PATCH 348/537] refactor(backups): refactor realtime updating --- selfprivacy_api/backup/restic_backuper.py | 26 +++++++++++------------ 1 file changed, 12 insertions(+), 14 deletions(-) diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index 3e72561..bc92148 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -110,19 +110,10 @@ class ResticBackuper(AbstractBackuper): ) messages = [] + job = get_backup_job(get_service_by_id(repo_name)) try: for raw_message in ResticBackuper.output_yielder(backup_command): - message = self.parse_json_output(raw_message) - if message["message_type"] == "status": - job = get_backup_job(get_service_by_id(repo_name)) - if job is not None: # only update status if we run under some job - Jobs.update( - job, - JobStatus.RUNNING, - progress=ResticBackuper.progress_from_status_message( - message - ), - ) + message = self.parse_message(raw_message, job) messages.append(message) return ResticBackuper._snapshot_from_backup_messages(messages, repo_name) except ValueError as e: @@ -135,9 +126,16 @@ class ResticBackuper(AbstractBackuper): return ResticBackuper._snapshot_from_fresh_summary(message, repo_name) raise ValueError("no summary message in restic json output") - @staticmethod - def progress_from_status_message(message: object) -> int: - return int(message["percent_done"]) + def parse_message(self, raw_message, job=None) -> object: + message = self.parse_json_output(raw_message) + if message["message_type"] == "status": + if job is not None: # only update status if we run under some job + Jobs.update( + job, + JobStatus.RUNNING, + progress=int(message["percent_done"]), + ) + return message @staticmethod def _snapshot_from_fresh_summary(message: object, repo_name) -> Snapshot: From c5c41b3ced570fd0180ee4bd209c562f0eb97feb Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 17 May 2023 20:24:29 +0000 Subject: [PATCH 349/537] refactor(backups): remove extraneous asserts from jobs --- selfprivacy_api/jobs/__init__.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py index 5c3ea62..5e86c5f 100644 --- a/selfprivacy_api/jobs/__init__.py +++ b/selfprivacy_api/jobs/__init__.py @@ -134,8 +134,6 @@ class Jobs: def log_status_update(job: Job, status: JobStatus): redis = RedisPool().get_connection() key = _status_log_key_from_uuid(job.uid) - if redis.exists(key): - assert redis.type(key) == "list" redis.lpush(key, status.value) redis.expire(key, 10) @@ -143,8 +141,6 @@ class Jobs: def log_progress_update(job: Job, progress: int): redis = RedisPool().get_connection() key = _progress_log_key_from_uuid(job.uid) - if redis.exists(key): - assert redis.type(key) == "list" redis.lpush(key, progress) redis.expire(key, 10) From d10bf9992765a7333c26b301b0768dc9f732d6b2 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 22 May 2023 16:01:57 +0000 Subject: [PATCH 350/537] fix(backups): make sure location and credentials get properly passed around --- selfprivacy_api/backup/providers/provider.py | 3 ++- tests/test_graphql/test_backup.py | 3 +++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/backup/providers/provider.py b/selfprivacy_api/backup/providers/provider.py index 017c03d..ce9d055 100644 --- a/selfprivacy_api/backup/providers/provider.py +++ b/selfprivacy_api/backup/providers/provider.py @@ -12,6 +12,7 @@ class AbstractBackupProvider(ABC): def backuper(self) -> AbstractBackuper: raise NotImplementedError - def __init__(self, login="", key=""): + def __init__(self, login="", key="", location=""): + self.backuper.set_creds(login, key, location) self.login = login self.key = key diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 89978b1..95001cd 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -109,6 +109,9 @@ def test_config_load(generic_userdata): assert provider.login == "ID" assert provider.key == "KEY" + assert provider.backuper.account == "ID" + assert provider.backuper.key == "KEY" + def test_select_backend(): provider = providers.get_provider(BackupProvider.BACKBLAZE) From d0b27da641b9e1c4613867c6f9fd6c14727640f8 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 29 May 2023 15:34:26 +0000 Subject: [PATCH 351/537] feature(backups): init repo mutation --- selfprivacy_api/backup/__init__.py | 34 ++++++++---- selfprivacy_api/backup/providers/provider.py | 6 ++- selfprivacy_api/backup/storage.py | 6 ++- .../graphql/mutations/backup_mutations.py | 53 +++++++++++++++++++ selfprivacy_api/models/backup/provider.py | 2 + 5 files changed, 89 insertions(+), 12 deletions(-) create mode 100644 selfprivacy_api/graphql/mutations/backup_mutations.py diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index f3e2ba5..a5d5416 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -29,6 +29,9 @@ class Backups: Storage.store_testrepo_path(file_path) Storage.store_provider(provider) + def set_provider(provider: AbstractBackupProvider): + Storage.store_provider(provider) + @staticmethod def get_last_backed_up(service: Service) -> Optional[datetime]: """Get a timezone-aware time of the last backup of a service""" @@ -126,19 +129,21 @@ class Backups: return Backups.lookup_provider() @staticmethod - def set_provider(kind: str, login: str, key: str): - provider = Backups.construct_provider(kind, login, key) + def set_provider(kind: str, login: str, key: str, location: str, repo_id: str = ""): + provider = Backups.construct_provider(kind, login, key, location, id) Storage.store_provider(provider) @staticmethod - def construct_provider(kind: str, login: str, key: str): + def construct_provider( + kind: str, login: str, key: str, location: str, repo_id: str = "" + ): provider_class = get_provider(BackupProvider[kind]) if kind == "FILE": path = Storage.get_testrepo_path() return provider_class(path) - return provider_class(login=login, key=key) + return provider_class(login=login, key=key, location=location, repo_id=repo_id) @staticmethod def reset(): @@ -169,17 +174,19 @@ class Backups: if "backblaze" in user_data.keys(): account = user_data["backblaze"]["accountId"] key = user_data["backblaze"]["accountKey"] + location = user_data["backblaze"]["bucket"] provider_string = "BACKBLAZE" return Backups.construct_provider( - kind=provider_string, login=account, key=key + kind=provider_string, login=account, key=key, location=location ) return None account = user_data["backup"]["accountId"] key = user_data["backup"]["accountKey"] provider_string = user_data["backup"]["provider"] + location = user_data["backup"]["bucket"] return Backups.construct_provider( - kind=provider_string, login=account, key=key + kind=provider_string, login=account, key=key, location=location ) @staticmethod @@ -188,7 +195,11 @@ class Backups: if provider_model is None: return None return Backups.construct_provider( - provider_model.kind, provider_model.login, provider_model.key + provider_model.kind, + provider_model.login, + provider_model.key, + provider_model.location, + provider_model.repo_id, ) @staticmethod @@ -214,10 +225,13 @@ class Backups: Jobs.update(job, status=JobStatus.FINISHED) @staticmethod - def init_repo(service: Service): - repo_name = service.get_id() + def init_repo(service: Optional[Service] = None): + if service is not None: + repo_name = service.get_id() + Backups.provider().backuper.init(repo_name) - Storage.mark_as_init(service) + if service is not None: + Storage.mark_as_init(service) @staticmethod def is_initted(service: Service) -> bool: diff --git a/selfprivacy_api/backup/providers/provider.py b/selfprivacy_api/backup/providers/provider.py index ce9d055..c303d4e 100644 --- a/selfprivacy_api/backup/providers/provider.py +++ b/selfprivacy_api/backup/providers/provider.py @@ -12,7 +12,11 @@ class AbstractBackupProvider(ABC): def backuper(self) -> AbstractBackuper: raise NotImplementedError - def __init__(self, login="", key="", location=""): + def __init__(self, login="", key="", location="", repo_id=""): self.backuper.set_creds(login, key, location) self.login = login self.key = key + self.location = location + # We do not need to do anything with this one + # Just remember in case the app forgets + self.repo_id = repo_id diff --git a/selfprivacy_api/backup/storage.py b/selfprivacy_api/backup/storage.py index 7ca5f18..072c80f 100644 --- a/selfprivacy_api/backup/storage.py +++ b/selfprivacy_api/backup/storage.py @@ -146,7 +146,11 @@ class Storage: redis, REDIS_PROVIDER_KEY, BackupProviderModel( - kind=get_kind(provider), login=provider.login, key=provider.key + kind=get_kind(provider), + login=provider.login, + key=provider.key, + location=provider.location, + repo_id=provider.repo_id, ), ) diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py new file mode 100644 index 0000000..4b61f43 --- /dev/null +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -0,0 +1,53 @@ +import datetime +import typing +import strawberry +from strawberry.types import Info + +from selfprivacy_api.graphql import IsAuthenticated +from selfprivacy_api.graphql.mutations.mutation_interface import ( + GenericMutationReturn, + MutationReturnInterface, +) +from selfprivacy_api.graphql.queries.backup import BackupConfiguration +from selfprivacy_api.graphql.queries.providers import BackupProvider + +from selfprivacy_api.backup import Backups + + +@strawberry.input +class InitializeRepositoryInput: + """Initialize repository input""" + + provider: BackupProvider + # The following field may become optional for other providers? + # Backblaze takes bucket id and name + location_id: str + location_name: str + # Key ID and key for Backblaze + login: str + password: str + + +@strawberry.type +class GenericBackupConfigReturn(MutationReturnInterface): + """Generic backup config return""" + + configuration: typing.Optional[BackupConfiguration] + + +@strawberry.type +class BackupMutations: + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def initialize_repository( + self, repository: InitializeRepositoryInput + ) -> GenericBackupConfigReturn: + """Initialize a new repository""" + provider = Backups.construct_provider( + kind=repository.provider, + login=repository.login, + key=repository.password, + location=repository.location_name, + repo_id=repository.location_id, + ) + Backups.set_provider(provider) + Backups.init_repo() diff --git a/selfprivacy_api/models/backup/provider.py b/selfprivacy_api/models/backup/provider.py index e454c39..e05a7f7 100644 --- a/selfprivacy_api/models/backup/provider.py +++ b/selfprivacy_api/models/backup/provider.py @@ -7,3 +7,5 @@ class BackupProviderModel(BaseModel): kind: str login: str key: str + location: str + repo_id: str # for app usage, not for us From e7683352cde5d3cef71d5c3816c8b937bf0a3787 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 29 May 2023 16:12:22 +0000 Subject: [PATCH 352/537] feature(backups): a graphql query to get provider info --- .../graphql/common_types/service.py | 7 ++++ selfprivacy_api/graphql/queries/backup.py | 32 +++++++++++++++++-- 2 files changed, 36 insertions(+), 3 deletions(-) diff --git a/selfprivacy_api/graphql/common_types/service.py b/selfprivacy_api/graphql/common_types/service.py index 8f27386..9e04254 100644 --- a/selfprivacy_api/graphql/common_types/service.py +++ b/selfprivacy_api/graphql/common_types/service.py @@ -108,6 +108,13 @@ class Service: return None +@strawberry.type +class SnapshotInfo: + id: str + service: "Service" + created_at: datetime.datetime + + def service_to_graphql_service(service: ServiceInterface) -> Service: """Convert service to graphql service""" return Service( diff --git a/selfprivacy_api/graphql/queries/backup.py b/selfprivacy_api/graphql/queries/backup.py index ef61b10..80b08e9 100644 --- a/selfprivacy_api/graphql/queries/backup.py +++ b/selfprivacy_api/graphql/queries/backup.py @@ -2,13 +2,39 @@ # pylint: disable=too-few-public-methods import typing import strawberry -from selfprivacy_api.graphql.common_types.backup_snapshot import SnapshotInfo + + +from selfprivacy_api.backup import Backups +from selfprivacy_api.backup.local_secret import LocalBackupSecret +from selfprivacy_api.graphql.queries.providers import BackupProvider +from selfprivacy_api.graphql.common_types.service import SnapshotInfo + + +@strawberry.type +class BackupConfiguration: + provider: BackupProvider + # When server is lost, the app should have the key to decrypt backups on a new server + encryption_key: str + # If none, autobackups are disabled + autobackup_period: typing.Optional[int] = None + # Bucket name for Backblaze, path for some other providers + location_name: typing.Optional[str] = None + location_id: typing.Optional[str] = None + # False when repo is not initialized and not ready to be used + is_initialized: bool @strawberry.type class Backup: - backend: str + @strawberry.field + def configuration() -> BackupConfiguration: + config = BackupConfiguration() + config.encryption_key = LocalBackupSecret.get() + config.is_initialized = Backups.is_initted() + config.autobackup_period = Backups.autobackup_period_minutes() + config.location_name = Backups.provider().location + config.location_id = Backups.provider().repo_id @strawberry.field - def get_backups(self) -> typing.List[SnapshotInfo]: + def all_snapshots(self) -> typing.List[SnapshotInfo]: return [] From bdae6cfb75f4d9c8b842687b68935b86d73a8098 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 29 May 2023 16:50:14 +0000 Subject: [PATCH 353/537] feature(backups): global init instead of per-service --- selfprivacy_api/backup/__init__.py | 14 ++++++------- selfprivacy_api/backup/restic_backuper.py | 4 ++-- selfprivacy_api/backup/storage.py | 10 ++++------ tests/test_graphql/test_backup.py | 24 +++++++++++------------ 4 files changed, 24 insertions(+), 28 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index a5d5416..b10ee39 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -229,19 +229,17 @@ class Backups: if service is not None: repo_name = service.get_id() - Backups.provider().backuper.init(repo_name) - if service is not None: - Storage.mark_as_init(service) + Backups.provider().backuper.init() + Storage.mark_as_init() @staticmethod - def is_initted(service: Service) -> bool: - repo_name = service.get_id() - if Storage.has_init_mark(service): + def is_initted() -> bool: + if Storage.has_init_mark(): return True - initted = Backups.provider().backuper.is_initted(repo_name) + initted = Backups.provider().backuper.is_initted() if initted: - Storage.mark_as_init(service) + Storage.mark_as_init() return True return False diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index bc92148..c16f444 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -145,7 +145,7 @@ class ResticBackuper(AbstractBackuper): service_name=repo_name, ) - def init(self, repo_name): + def init(self): init_command = self.restic_command( "init", ) @@ -159,7 +159,7 @@ class ResticBackuper(AbstractBackuper): if not "created restic repository" in output: raise ValueError("cannot init a repo: " + output) - def is_initted(self, repo_name: str) -> bool: + def is_initted(self) -> bool: command = self.restic_command( "check", "--json", diff --git a/selfprivacy_api/backup/storage.py b/selfprivacy_api/backup/storage.py index 072c80f..dd23210 100644 --- a/selfprivacy_api/backup/storage.py +++ b/selfprivacy_api/backup/storage.py @@ -160,13 +160,11 @@ class Storage: return provider_model @staticmethod - def has_init_mark(service: Service) -> bool: - repo_name = service.get_id() - if redis.exists(REDIS_INITTED_CACHE_PREFIX + repo_name): + def has_init_mark() -> bool: + if redis.exists(REDIS_INITTED_CACHE_PREFIX): return True return False @staticmethod - def mark_as_init(service: Service): - repo_name = service.get_id() - redis.set(REDIS_INITTED_CACHE_PREFIX + repo_name, 1) + def mark_as_init(): + redis.set(REDIS_INITTED_CACHE_PREFIX, 1) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 95001cd..3e1536e 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -120,7 +120,7 @@ def test_select_backend(): def test_file_backend_init(file_backup): - file_backup.backuper.init("somerepo") + file_backup.backuper.init() def test_backup_simple_file(raw_dummy_service, file_backup): @@ -130,7 +130,7 @@ def test_backup_simple_file(raw_dummy_service, file_backup): assert file_backup is not None name = service.get_id() - file_backup.backuper.init(name) + file_backup.backuper.init() def test_backup_service(dummy_service, backups): @@ -217,11 +217,11 @@ def test_sizing(backups, dummy_service): def test_init_tracking(backups, raw_dummy_service): - assert Backups.is_initted(raw_dummy_service) is False + assert Backups.is_initted() is False - Backups.init_repo(raw_dummy_service) + Backups.init_repo() - assert Backups.is_initted(raw_dummy_service) is True + assert Backups.is_initted() is True def finished_jobs(): @@ -414,21 +414,21 @@ def test_snapshots_caching(backups, dummy_service): # Storage def test_init_tracking_caching(backups, raw_dummy_service): - assert Storage.has_init_mark(raw_dummy_service) is False + assert Storage.has_init_mark() is False - Storage.mark_as_init(raw_dummy_service) + Storage.mark_as_init() - assert Storage.has_init_mark(raw_dummy_service) is True - assert Backups.is_initted(raw_dummy_service) is True + assert Storage.has_init_mark() is True + assert Backups.is_initted() is True # Storage def test_init_tracking_caching2(backups, raw_dummy_service): - assert Storage.has_init_mark(raw_dummy_service) is False + assert Storage.has_init_mark() is False - Backups.init_repo(raw_dummy_service) + Backups.init_repo() - assert Storage.has_init_mark(raw_dummy_service) is True + assert Storage.has_init_mark() is True # Storage From ac9fbbff3e7301c7333c604590352b32f1036530 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 29 May 2023 18:24:38 +0000 Subject: [PATCH 354/537] feature(backups): drop repository call --- selfprivacy_api/graphql/mutations/backup_mutations.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index 4b61f43..8bb2d94 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -9,6 +9,7 @@ from selfprivacy_api.graphql.mutations.mutation_interface import ( MutationReturnInterface, ) from selfprivacy_api.graphql.queries.backup import BackupConfiguration +from selfprivacy_api.graphql.queries.backup import Backup from selfprivacy_api.graphql.queries.providers import BackupProvider from selfprivacy_api.backup import Backups @@ -51,3 +52,10 @@ class BackupMutations: ) Backups.set_provider(provider) Backups.init_repo() + + +@strawberry.mutation(permission_classes=[IsAuthenticated]) +def remove_repository(self) -> GenericBackupConfigReturn: + """Remove repository""" + Backups.reset() + return Backup.configuration() From a76b4ac134967f92083122cf44974b5546f3df0f Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 31 May 2023 11:30:09 +0000 Subject: [PATCH 355/537] feature(backups): start backup graphql API --- .../graphql/mutations/backup_mutations.py | 31 ++++++++++++++++--- 1 file changed, 26 insertions(+), 5 deletions(-) diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index 8bb2d94..4704df2 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -13,6 +13,8 @@ from selfprivacy_api.graphql.queries.backup import Backup from selfprivacy_api.graphql.queries.providers import BackupProvider from selfprivacy_api.backup import Backups +from selfprivacy_api.services import get_all_services, get_service_by_id +from selfprivacy_api.backup.tasks import start_backup @strawberry.input @@ -36,6 +38,10 @@ class GenericBackupConfigReturn(MutationReturnInterface): configuration: typing.Optional[BackupConfiguration] +class GenericJobMutationReturn: + pass + + @strawberry.type class BackupMutations: @strawberry.mutation(permission_classes=[IsAuthenticated]) @@ -53,9 +59,24 @@ class BackupMutations: Backups.set_provider(provider) Backups.init_repo() + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def remove_repository(self) -> GenericBackupConfigReturn: + """Remove repository""" + Backups.reset() + return Backup.configuration() -@strawberry.mutation(permission_classes=[IsAuthenticated]) -def remove_repository(self) -> GenericBackupConfigReturn: - """Remove repository""" - Backups.reset() - return Backup.configuration() + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def start_backup( + self, service_id: typing.Optional[str] = None + ) -> GenericJobMutationReturn: + """Start backup. If service not provided, backup all services""" + if service_id is None: + for service in get_all_services(): + start_backup(service) + else: + service = get_service_by_id(service_id) + if service is None: + raise ValueError(f"nonexistent service: {service_id}") + start_backup(service) + + return GenericJobMutationReturn() From 761b6be4e5f197bb9450df012d250c4b87363d20 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 31 May 2023 13:16:08 +0000 Subject: [PATCH 356/537] refactor(backups): global snapshots --- selfprivacy_api/backup/__init__.py | 2 +- selfprivacy_api/backup/restic_backuper.py | 6 +++--- tests/test_graphql/test_backup.py | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index b10ee39..4e13ea9 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -253,7 +253,7 @@ class Backups: # TODO: the oldest snapshots will get expired faster than the new ones. # How to detect that the end is missing? - upstream_snapshots = Backups.provider().backuper.get_snapshots(service_id) + upstream_snapshots = Backups.provider().backuper.get_snapshots() Backups.sync_service_snapshots(service_id, upstream_snapshots) return upstream_snapshots diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index c16f444..2c98b46 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -212,7 +212,7 @@ class ResticBackuper(AbstractBackuper): if "restoring" not in output: raise ValueError("cannot restore a snapshot: " + output) - def _load_snapshots(self, repo_name) -> object: + def _load_snapshots(self) -> object: """ Load list of snapshots from repository raises Value Error if repo does not exist @@ -237,10 +237,10 @@ class ResticBackuper(AbstractBackuper): except ValueError as e: raise ValueError("Cannot load snapshots: ") from e - def get_snapshots(self, repo_name) -> List[Snapshot]: + def get_snapshots(self) -> List[Snapshot]: """Get all snapshots from the repo""" snapshots = [] - for restic_snapshot in self._load_snapshots(repo_name): + for restic_snapshot in self._load_snapshots(): snapshot = Snapshot( id=restic_snapshot["short_id"], created_at=restic_snapshot["time"], diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 3e1536e..a50fed4 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -151,7 +151,7 @@ def test_backup_service(dummy_service, backups): def test_no_repo(memory_backup): with pytest.raises(ValueError): - assert memory_backup.backuper.get_snapshots("") == [] + assert memory_backup.backuper.get_snapshots() == [] def test_one_snapshot(backups, dummy_service): From 44ddd27e84789682df82c24a573fc048bae1dc1e Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Thu, 1 Jun 2023 12:44:14 +0000 Subject: [PATCH 357/537] fix(backups): return correct snapshots per service --- selfprivacy_api/backup/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 4e13ea9..7a3b37d 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -255,7 +255,7 @@ class Backups: upstream_snapshots = Backups.provider().backuper.get_snapshots() Backups.sync_service_snapshots(service_id, upstream_snapshots) - return upstream_snapshots + return [snap for snap in upstream_snapshots if snap.service_name == service_id] @staticmethod def restore_service_from_snapshot(service: Service, snapshot_id: str): From 7e022e0cfe1ebaf5a389254ac8847739ddcfd4b5 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Thu, 1 Jun 2023 14:03:26 +0000 Subject: [PATCH 358/537] feature(backups): graphql mutation for restore --- selfprivacy_api/backup/__init__.py | 19 +++++++++++++++++++ selfprivacy_api/backup/storage.py | 12 ++++++++++++ .../graphql/mutations/backup_mutations.py | 12 +++++++++++- 3 files changed, 42 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 7a3b37d..7001d03 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -257,6 +257,25 @@ class Backups: Backups.sync_service_snapshots(service_id, upstream_snapshots) return [snap for snap in upstream_snapshots if snap.service_name == service_id] + @staticmethod + def get_snapshot_by_id(id: str) -> Optional[Snapshot]: + snap = Storage.get_cached_snapshot_by_id(id) + if snap is not None: + return snap + + # Possibly our cache entry got invalidated, let's try one more time + Backups.sync_all_snapshots() + snap = Storage.get_cached_snapshot_by_id(id) + + return snap + + @staticmethod + def sync_all_snapshots(): + upstream_snapshots = Backups.provider().backuper.get_snapshots() + Storage.invalidate_snapshot_storage() + for snapshot in upstream_snapshots: + Storage.cache_snapshot(snapshot) + @staticmethod def restore_service_from_snapshot(service: Service, snapshot_id: str): repo_name = service.get_id() diff --git a/selfprivacy_api/backup/storage.py b/selfprivacy_api/backup/storage.py index dd23210..38155e6 100644 --- a/selfprivacy_api/backup/storage.py +++ b/selfprivacy_api/backup/storage.py @@ -47,6 +47,11 @@ class Storage: for key in redis.keys(prefix + "*"): redis.delete(key) + @staticmethod + def invalidate_snapshot_storage(): + for key in redis.keys(REDIS_SNAPSHOTS_PREFIX + "*"): + redis.delete(key) + @staticmethod def store_testrepo_path(path: str): redis.set(REDIS_REPO_PATH_KEY, path) @@ -97,6 +102,13 @@ class Storage: snapshot_key = Storage.__snapshot_key(snapshot) redis.delete(snapshot_key) + @staticmethod + def get_cached_snapshot_by_id(snapshot_id: str) -> Optional[Snapshot]: + key = redis.keys(REDIS_SNAPSHOTS_PREFIX + snapshot_id) + if not redis.exists(key): + return None + return hash_as_model(redis, key, Snapshot) + @staticmethod def get_cached_snapshots() -> List[Snapshot]: keys = redis.keys(REDIS_SNAPSHOTS_PREFIX + "*") diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index 4704df2..8ae19bb 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -14,7 +14,7 @@ from selfprivacy_api.graphql.queries.providers import BackupProvider from selfprivacy_api.backup import Backups from selfprivacy_api.services import get_all_services, get_service_by_id -from selfprivacy_api.backup.tasks import start_backup +from selfprivacy_api.backup.tasks import start_backup, restore_snapshot @strawberry.input @@ -80,3 +80,13 @@ class BackupMutations: start_backup(service) return GenericJobMutationReturn() + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def restore_backup(self, snapshot_id: str) -> GenericJobMutationReturn: + """Restore backup""" + snap = Backups.get_snapshot_by_id(snapshot_id) + if snap in None: + raise ValueError(f"No such snapshot: {snapshot_id}") + restore_snapshot(snap) + + return GenericJobMutationReturn() From 891993e4cd68eb6aa514f17d9b1e30e6a720112d Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Thu, 1 Jun 2023 16:12:32 +0000 Subject: [PATCH 359/537] feature(backups): a graphql call to invalidate cache --- selfprivacy_api/backup/__init__.py | 4 ++++ selfprivacy_api/graphql/mutations/backup_mutations.py | 6 ++++++ 2 files changed, 10 insertions(+) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 7001d03..86445ba 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -269,6 +269,10 @@ class Backups: return snap + @staticmethod + def force_snapshot_reload(): + Backups.sync_all_snapshots() + @staticmethod def sync_all_snapshots(): upstream_snapshots = Backups.provider().backuper.get_snapshots() diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index 8ae19bb..c5c63bf 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -90,3 +90,9 @@ class BackupMutations: restore_snapshot(snap) return GenericJobMutationReturn() + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def force_snapshots_reload(self) -> GenericMutationReturn: + """Force snapshots reload""" + Backups.force_snapshot_reload() + return GenericMutationReturn() From cc073155db6d62554e33fc61b05862596f150741 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 5 Jun 2023 11:19:01 +0000 Subject: [PATCH 360/537] feature(backups): feature(backups): return a snapshot from start_backup --- selfprivacy_api/backup/__init__.py | 1 + selfprivacy_api/backup/storage.py | 2 +- tests/test_graphql/test_backup.py | 12 ++++++++++++ 3 files changed, 14 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 86445ba..37b9517 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -223,6 +223,7 @@ class Backups: raise e Jobs.update(job, status=JobStatus.FINISHED) + return snapshot @staticmethod def init_repo(service: Optional[Service] = None): diff --git a/selfprivacy_api/backup/storage.py b/selfprivacy_api/backup/storage.py index 38155e6..680f39f 100644 --- a/selfprivacy_api/backup/storage.py +++ b/selfprivacy_api/backup/storage.py @@ -104,7 +104,7 @@ class Storage: @staticmethod def get_cached_snapshot_by_id(snapshot_id: str) -> Optional[Snapshot]: - key = redis.keys(REDIS_SNAPSHOTS_PREFIX + snapshot_id) + key = REDIS_SNAPSHOTS_PREFIX + snapshot_id if not redis.exists(key): return None return hash_as_model(redis, key, Snapshot) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index a50fed4..5ac024f 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -243,6 +243,18 @@ def assert_job_had_progress(job_type): assert len(Jobs.progress_updates(job)) > 0 +def test_snapshots_by_id(backups, dummy_service): + snap1 = Backups.back_up(dummy_service) + snap2 = Backups.back_up(dummy_service) + snap3 = Backups.back_up(dummy_service) + + assert snap2.id is not None + assert snap2.id != "" + + assert len(Backups.get_snapshots(dummy_service)) == 3 + assert Backups.get_snapshot_by_id(snap2.id).id == snap2.id + + def test_backup_service_task(backups, dummy_service): handle = start_backup(dummy_service) handle(blocking=True) From 550f7fa620108bc891d3fac7ec8927d01765b65f Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 5 Jun 2023 11:28:53 +0000 Subject: [PATCH 361/537] refactor(backups): introduce get_all_snapshots() --- selfprivacy_api/backup/__init__.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 37b9517..7678258 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -247,16 +247,20 @@ class Backups: @staticmethod def get_snapshots(service: Service) -> List[Snapshot]: - service_id = service.get_id() - cached_snapshots = Backups.get_cached_snapshots_service(service_id) + snapshots = Backups.get_all_snapshots() + return [snap for snap in snapshots if snap.service_name == service.get_id()] + + @staticmethod + def get_all_snapshots() -> List[Snapshot]: + cached_snapshots = Storage.get_cached_snapshots() if cached_snapshots != []: return cached_snapshots # TODO: the oldest snapshots will get expired faster than the new ones. # How to detect that the end is missing? upstream_snapshots = Backups.provider().backuper.get_snapshots() - Backups.sync_service_snapshots(service_id, upstream_snapshots) - return [snap for snap in upstream_snapshots if snap.service_name == service_id] + Backups.sync_all_snapshots() + return upstream_snapshots @staticmethod def get_snapshot_by_id(id: str) -> Optional[Snapshot]: From e3545d4541b5ef60f1ea9789d8142e6a6aad7b45 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 5 Jun 2023 11:36:58 +0000 Subject: [PATCH 362/537] feature(backups): get all snapshots if requested by api --- selfprivacy_api/graphql/queries/backup.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/graphql/queries/backup.py b/selfprivacy_api/graphql/queries/backup.py index 80b08e9..4d41c36 100644 --- a/selfprivacy_api/graphql/queries/backup.py +++ b/selfprivacy_api/graphql/queries/backup.py @@ -37,4 +37,11 @@ class Backup: @strawberry.field def all_snapshots(self) -> typing.List[SnapshotInfo]: - return [] + result = [] + snapshots = Backups.get_all_snapshots() + for snap in snapshots: + graphql_snap = SnapshotInfo( + id=snap.id, service=snap.service_name, created_at=snap.created_at + ) + result.append(graphql_snap) + return result From 0b8f77e6f75c20804057648f77c51f0e7a3edbd1 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 5 Jun 2023 11:49:37 +0000 Subject: [PATCH 363/537] feature(backups): set autobackup period from gql --- selfprivacy_api/graphql/mutations/backup_mutations.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index c5c63bf..a227106 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -65,6 +65,14 @@ class BackupMutations: Backups.reset() return Backup.configuration() + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def set_autobackup_period( + self, period: typing.Optional[int] = None + ) -> GenericBackupConfigReturn: + """Set autobackup period. None is to disable autobackup""" + Backups.set_autobackup_period_minutes(period) + return Backup.configuration() + @strawberry.mutation(permission_classes=[IsAuthenticated]) def start_backup( self, service_id: typing.Optional[str] = None From 44e45a5124d58e2b4716cc8649e002762f1ec909 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 7 Jun 2023 14:11:22 +0000 Subject: [PATCH 364/537] BREAKING CHANGE(backups): support only individual service backup requests(combinable) --- selfprivacy_api/backup/jobs.py | 8 +++ .../graphql/mutations/backup_mutations.py | 54 ++++++++++++------- 2 files changed, 43 insertions(+), 19 deletions(-) diff --git a/selfprivacy_api/backup/jobs.py b/selfprivacy_api/backup/jobs.py index ebc2ea0..2293da0 100644 --- a/selfprivacy_api/backup/jobs.py +++ b/selfprivacy_api/backup/jobs.py @@ -12,6 +12,10 @@ def backup_job_type(service: Service) -> str: return f"{job_type_prefix(service)}.backup" +def restore_job_type(service: Service) -> str: + return f"{job_type_prefix(service)}.restore" + + def get_jobs_by_service(service: Service) -> List[Job]: result = [] for job in Jobs.get_jobs(): @@ -54,3 +58,7 @@ def get_job_by_type(type_id: str) -> Optional[Job]: def get_backup_job(service: Service) -> Optional[Job]: return get_job_by_type(backup_job_type(service)) + + +def get_restore_job(service: Service) -> Optional[Job]: + return get_job_by_type(restore_job_type(service)) diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index a227106..8ddd9f6 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -6,15 +6,18 @@ from strawberry.types import Info from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.graphql.mutations.mutation_interface import ( GenericMutationReturn, + GenericJobButationReturn, MutationReturnInterface, ) from selfprivacy_api.graphql.queries.backup import BackupConfiguration from selfprivacy_api.graphql.queries.backup import Backup from selfprivacy_api.graphql.queries.providers import BackupProvider +from selfprivacy_api.graphql.common_types.jobs import job_to_api_job from selfprivacy_api.backup import Backups from selfprivacy_api.services import get_all_services, get_service_by_id from selfprivacy_api.backup.tasks import start_backup, restore_snapshot +from selfprivacy_api.backup.jobs import get_backup_job, get_restore_job @strawberry.input @@ -38,10 +41,6 @@ class GenericBackupConfigReturn(MutationReturnInterface): configuration: typing.Optional[BackupConfiguration] -class GenericJobMutationReturn: - pass - - @strawberry.type class BackupMutations: @strawberry.mutation(permission_classes=[IsAuthenticated]) @@ -76,28 +75,45 @@ class BackupMutations: @strawberry.mutation(permission_classes=[IsAuthenticated]) def start_backup( self, service_id: typing.Optional[str] = None - ) -> GenericJobMutationReturn: - """Start backup. If service not provided, backup all services""" - if service_id is None: - for service in get_all_services(): - start_backup(service) - else: - service = get_service_by_id(service_id) - if service is None: - raise ValueError(f"nonexistent service: {service_id}") - start_backup(service) + ) -> GenericJobButationReturn: + """Start backup""" - return GenericJobMutationReturn() + service = get_service_by_id(service_id) + if service is None: + return GenericJobButationReturn( + success=False, + code=300, + message=f"nonexistent service: {service_id}", + job=None, + ) + start_backup(service) + job = get_backup_job(service) + + return GenericJobButationReturn( + success=True, code=200, message="Backup job queued", job=job_to_api_job(job) + ) @strawberry.mutation(permission_classes=[IsAuthenticated]) - def restore_backup(self, snapshot_id: str) -> GenericJobMutationReturn: + def restore_backup(self, snapshot_id: str) -> GenericJobButationReturn: """Restore backup""" snap = Backups.get_snapshot_by_id(snapshot_id) - if snap in None: - raise ValueError(f"No such snapshot: {snapshot_id}") + service = get_service_by_id(snap.service_name) + if snap is None: + return GenericJobButationReturn( + success=False, + code=300, + message=f"No such snapshot: {snapshot_id}", + job=None, + ) + restore_snapshot(snap) - return GenericJobMutationReturn() + return GenericJobButationReturn( + success=True, + code=200, + message="restore job created", + jobs=[get_restore_job(service)], + ) @strawberry.mutation(permission_classes=[IsAuthenticated]) def force_snapshots_reload(self) -> GenericMutationReturn: From 5100f1a497d62b5e3bfaccf472a21df78d756d69 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 7 Jun 2023 14:25:17 +0000 Subject: [PATCH 365/537] fix(backups): return 400, not 300 --- selfprivacy_api/graphql/mutations/backup_mutations.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index 8ddd9f6..9c2f567 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -101,7 +101,7 @@ class BackupMutations: if snap is None: return GenericJobButationReturn( success=False, - code=300, + code=400, message=f"No such snapshot: {snapshot_id}", job=None, ) From 792dcd459d19e7f58858024b9104ada8305ab137 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 7 Jun 2023 14:27:37 +0000 Subject: [PATCH 366/537] fix(backups): return one job, not an array of one --- selfprivacy_api/graphql/mutations/backup_mutations.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index 9c2f567..2a916f5 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -112,7 +112,7 @@ class BackupMutations: success=True, code=200, message="restore job created", - jobs=[get_restore_job(service)], + job=get_restore_job(service), ) @strawberry.mutation(permission_classes=[IsAuthenticated]) From 6da0791b4787f62dd1e104f5ed0f686a9033d392 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 7 Jun 2023 15:05:58 +0000 Subject: [PATCH 367/537] feature(backups): integration between restore and jobs --- selfprivacy_api/backup/__init__.py | 26 ++++++++++++++++++++++---- selfprivacy_api/backup/jobs.py | 19 +++++++++++++++++++ selfprivacy_api/backup/tasks.py | 5 ++++- 3 files changed, 45 insertions(+), 5 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 7678258..b313165 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -13,7 +13,12 @@ from selfprivacy_api.graphql.queries.providers import BackupProvider from selfprivacy_api.backup.providers.provider import AbstractBackupProvider from selfprivacy_api.backup.providers import get_provider from selfprivacy_api.backup.storage import Storage -from selfprivacy_api.backup.jobs import get_backup_job, add_backup_job +from selfprivacy_api.backup.jobs import ( + get_backup_job, + add_backup_job, + get_restore_job, + add_restore_job, +) from selfprivacy_api.jobs import Jobs, JobStatus @@ -285,6 +290,7 @@ class Backups: for snapshot in upstream_snapshots: Storage.cache_snapshot(snapshot) + # to be deprecated/internalized in favor of restore_snapshot() @staticmethod def restore_service_from_snapshot(service: Service, snapshot_id: str): repo_name = service.get_id() @@ -294,9 +300,21 @@ class Backups: @staticmethod def restore_snapshot(snapshot: Snapshot): - Backups.restore_service_from_snapshot( - get_service_by_id(snapshot.service_name), snapshot.id - ) + service = get_service_by_id(snapshot.service_name) + + job = get_restore_job(service) + if job is None: + job = add_restore_job(snapshot) + + Jobs.update(job, status=JobStatus.RUNNING) + try: + Backups.restore_service_from_snapshot(service, snapshot.id) + service.post_restore() + except Exception as e: + Jobs.update(job, status=JobStatus.ERROR) + raise e + + Jobs.update(job, status=JobStatus.FINISHED) @staticmethod def service_snapshot_size(service: Service, snapshot_id: str) -> float: diff --git a/selfprivacy_api/backup/jobs.py b/selfprivacy_api/backup/jobs.py index 2293da0..5a9cb0d 100644 --- a/selfprivacy_api/backup/jobs.py +++ b/selfprivacy_api/backup/jobs.py @@ -1,7 +1,9 @@ from typing import Optional, List +from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.jobs import Jobs, Job, JobStatus from selfprivacy_api.services.service import Service +from selfprivacy_api.services import get_service_by_id def job_type_prefix(service: Service) -> str: @@ -47,6 +49,23 @@ def add_backup_job(service: Service) -> Job: return job +def add_restore_job(snapshot: Snapshot) -> Job: + service = get_service_by_id(snapshot.service_name) + if is_something_queued_for(service): + message = ( + f"Cannot start a restore of {service.get_id()}, another operation is queued: " + + get_jobs_by_service(service)[0].type_id + ) + raise ValueError(message) + display_name = service.get_display_name() + job = Jobs.add( + type_id=restore_job_type(service), + name=f"Restore {display_name}", + description=f"restoring {display_name} from {snapshot.id}", + ) + return job + + def get_job_by_type(type_id: str) -> Optional[Job]: for job in Jobs.get_jobs(): if job.type_id == type_id and job.status in [ diff --git a/selfprivacy_api/backup/tasks.py b/selfprivacy_api/backup/tasks.py index d92a926..e88f651 100644 --- a/selfprivacy_api/backup/tasks.py +++ b/selfprivacy_api/backup/tasks.py @@ -2,9 +2,10 @@ from datetime import datetime from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.utils.huey import huey +from selfprivacy_api.services import get_service_by_id from selfprivacy_api.services.service import Service from selfprivacy_api.backup import Backups -from selfprivacy_api.backup.jobs import add_backup_job +from selfprivacy_api.backup.jobs import add_backup_job, add_restore_job def validate_datetime(dt: datetime): @@ -32,6 +33,8 @@ def start_backup(service: Service) -> bool: @huey.task() def restore_snapshot(snapshot: Snapshot) -> bool: + add_restore_job(snapshot) + Backups.restore_snapshot(snapshot) return True From 780c12df6c135b269c3e8f69961874b8b912908e Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 7 Jun 2023 15:55:38 +0000 Subject: [PATCH 368/537] refactor(backups): expect one more error of restic json output parsing --- selfprivacy_api/backup/restic_backuper.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/restic_backuper.py index 2c98b46..69bdea9 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/restic_backuper.py @@ -4,6 +4,7 @@ import datetime from typing import List from collections.abc import Iterable +from json.decoder import JSONDecodeError from selfprivacy_api.backup.backuper import AbstractBackuper from selfprivacy_api.models.backup.snapshot import Snapshot @@ -208,6 +209,7 @@ class ResticBackuper(AbstractBackuper): restore_command, stdout=subprocess.PIPE, shell=False ) as handle: + # for some reason restore does not support nice reporting of progress via json output = handle.communicate()[0].decode("utf-8") if "restoring" not in output: raise ValueError("cannot restore a snapshot: " + output) @@ -259,7 +261,12 @@ class ResticBackuper(AbstractBackuper): truncated_output = output[starting_index:] json_messages = truncated_output.splitlines() if len(json_messages) == 1: - return json.loads(truncated_output) + try: + return json.loads(truncated_output) + except JSONDecodeError as e: + raise ValueError( + "There is no json in the restic output : " + output + ) from e result_array = [] for message in json_messages: From ad130e392caea5eaca29a4fb0c02b24949068097 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 7 Jun 2023 16:33:13 +0000 Subject: [PATCH 369/537] feature(backups): check available space before restoring --- selfprivacy_api/backup/__init__.py | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index b313165..d496758 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -1,5 +1,6 @@ from typing import List, Optional from datetime import datetime, timedelta +from os import statvfs from selfprivacy_api.models.backup.snapshot import Snapshot @@ -298,6 +299,21 @@ class Backups: Backups.provider().backuper.restore_from_backup(repo_name, snapshot_id, folders) + @staticmethod + def assert_restorable(snapshot: Snapshot): + service = get_service_by_id(snapshot.service_name) + if service is None: + raise ValueError( + f"snapshot has a nonexistent service: {snapshot.service_name}" + ) + + needed_space = Backups.snapshot_restored_size(snapshot) + available_space = Backups.space_usable_for_service(service) + if needed_space > available_space: + raise ValueError( + f"we only have {available_space} bytes but snapshot needs{ needed_space}" + ) + @staticmethod def restore_snapshot(snapshot: Snapshot): service = get_service_by_id(snapshot.service_name) @@ -308,6 +324,7 @@ class Backups: Jobs.update(job, status=JobStatus.RUNNING) try: + Backups.assert_restorable(snapshot) Backups.restore_service_from_snapshot(service, snapshot.id) service.post_restore() except Exception as e: @@ -327,6 +344,16 @@ class Backups: get_service_by_id(snapshot.service_name), snapshot.id ) + @staticmethod + def space_usable_for_service(service: Service) -> bool: + folders = service.get_folders() + if folders == []: + raise ValueError("unallocated service", service.get_id()) + + fs_info = statvfs(folders[0]) + usable_bytes = fs_info.f_frsize * fs_info.f_bavail + return usable_bytes + @staticmethod def _store_last_snapshot(service_id: str, snapshot: Snapshot): """What do we do with a snapshot that is just made?""" From a4b0e6f20843c3eb3d8beb0414b9496f5202c52d Mon Sep 17 00:00:00 2001 From: Inex Code Date: Tue, 13 Jun 2023 23:54:02 +0300 Subject: [PATCH 370/537] fix: BackupConfiguration argument order --- selfprivacy_api/graphql/queries/backup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/graphql/queries/backup.py b/selfprivacy_api/graphql/queries/backup.py index 4d41c36..97b4682 100644 --- a/selfprivacy_api/graphql/queries/backup.py +++ b/selfprivacy_api/graphql/queries/backup.py @@ -15,13 +15,13 @@ class BackupConfiguration: provider: BackupProvider # When server is lost, the app should have the key to decrypt backups on a new server encryption_key: str + # False when repo is not initialized and not ready to be used + is_initialized: bool # If none, autobackups are disabled autobackup_period: typing.Optional[int] = None # Bucket name for Backblaze, path for some other providers location_name: typing.Optional[str] = None location_id: typing.Optional[str] = None - # False when repo is not initialized and not ready to be used - is_initialized: bool @strawberry.type From 32a242b560a721c7afd8b5ce3264f44a445aa685 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Wed, 14 Jun 2023 00:00:29 +0300 Subject: [PATCH 371/537] feat(backups): register backups in GraphQL schema --- selfprivacy_api/graphql/schema.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/selfprivacy_api/graphql/schema.py b/selfprivacy_api/graphql/schema.py index dff9304..e364dd1 100644 --- a/selfprivacy_api/graphql/schema.py +++ b/selfprivacy_api/graphql/schema.py @@ -6,6 +6,7 @@ from typing import AsyncGenerator import strawberry from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.graphql.mutations.api_mutations import ApiMutations +from selfprivacy_api.graphql.mutations.backup_mutations import BackupMutations from selfprivacy_api.graphql.mutations.job_mutations import JobMutations from selfprivacy_api.graphql.mutations.mutation_interface import GenericMutationReturn from selfprivacy_api.graphql.mutations.services_mutations import ServicesMutations @@ -14,6 +15,7 @@ from selfprivacy_api.graphql.mutations.storage_mutations import StorageMutations from selfprivacy_api.graphql.mutations.system_mutations import SystemMutations from selfprivacy_api.graphql.queries.api_queries import Api +from selfprivacy_api.graphql.queries.backup import Backup from selfprivacy_api.graphql.queries.jobs import Job from selfprivacy_api.graphql.queries.services import Services from selfprivacy_api.graphql.queries.storage import Storage @@ -58,6 +60,11 @@ class Query: """Services queries""" return Services() + @strawberry.field(permission_classes=[IsAuthenticated]) + def backup(self) -> Backup: + """Backup queries""" + return Backup() + @strawberry.type class Mutation( @@ -68,6 +75,7 @@ class Mutation( StorageMutations, ServicesMutations, JobMutations, + BackupMutations, ): """Root schema for mutations""" From 42a5b6f70acfd0041d8156f1734bd7221e467a52 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Jun 2023 09:52:44 +0000 Subject: [PATCH 372/537] test(backups): test backup API - backing up --- selfprivacy_api/backup/tasks.py | 6 ------ .../graphql/mutations/backup_mutations.py | 14 ++++++++++---- selfprivacy_api/graphql/schema.py | 1 + 3 files changed, 11 insertions(+), 10 deletions(-) diff --git a/selfprivacy_api/backup/tasks.py b/selfprivacy_api/backup/tasks.py index e88f651..bd3925d 100644 --- a/selfprivacy_api/backup/tasks.py +++ b/selfprivacy_api/backup/tasks.py @@ -23,18 +23,12 @@ def validate_datetime(dt: datetime): # huey tasks need to return something @huey.task() def start_backup(service: Service) -> bool: - # Backups can create the job, but doing this here - # allows us to see the job as queued before it is actually executed - add_backup_job(service) - Backups.back_up(service) return True @huey.task() def restore_snapshot(snapshot: Snapshot) -> bool: - add_restore_job(snapshot) - Backups.restore_snapshot(snapshot) return True diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index 2a916f5..898702a 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -17,7 +17,7 @@ from selfprivacy_api.graphql.common_types.jobs import job_to_api_job from selfprivacy_api.backup import Backups from selfprivacy_api.services import get_all_services, get_service_by_id from selfprivacy_api.backup.tasks import start_backup, restore_snapshot -from selfprivacy_api.backup.jobs import get_backup_job, get_restore_job +from selfprivacy_api.backup.jobs import add_backup_job, add_restore_job @strawberry.input @@ -86,11 +86,16 @@ class BackupMutations: message=f"nonexistent service: {service_id}", job=None, ) + + job = add_backup_job(service) start_backup(service) - job = get_backup_job(service) + job = job_to_api_job(job) return GenericJobButationReturn( - success=True, code=200, message="Backup job queued", job=job_to_api_job(job) + success=True, + code=200, + message="Backup job queued", + job=job, ) @strawberry.mutation(permission_classes=[IsAuthenticated]) @@ -106,13 +111,14 @@ class BackupMutations: job=None, ) + job = add_restore_job(snap) restore_snapshot(snap) return GenericJobButationReturn( success=True, code=200, message="restore job created", - job=get_restore_job(service), + job=job, ) @strawberry.mutation(permission_classes=[IsAuthenticated]) diff --git a/selfprivacy_api/graphql/schema.py b/selfprivacy_api/graphql/schema.py index e364dd1..9e40d7c 100644 --- a/selfprivacy_api/graphql/schema.py +++ b/selfprivacy_api/graphql/schema.py @@ -13,6 +13,7 @@ from selfprivacy_api.graphql.mutations.services_mutations import ServicesMutatio from selfprivacy_api.graphql.mutations.ssh_mutations import SshMutations from selfprivacy_api.graphql.mutations.storage_mutations import StorageMutations from selfprivacy_api.graphql.mutations.system_mutations import SystemMutations +from selfprivacy_api.graphql.mutations.backup_mutations import BackupMutations from selfprivacy_api.graphql.queries.api_queries import Api from selfprivacy_api.graphql.queries.backup import Backup From b04dfc6c4e25e14b37db3b4458c05054f951da0e Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Jun 2023 10:06:01 +0000 Subject: [PATCH 373/537] fix(backups): register queries --- selfprivacy_api/graphql/schema.py | 1 + tests/common.py | 4 ++++ 2 files changed, 5 insertions(+) diff --git a/selfprivacy_api/graphql/schema.py b/selfprivacy_api/graphql/schema.py index 9e40d7c..7107e20 100644 --- a/selfprivacy_api/graphql/schema.py +++ b/selfprivacy_api/graphql/schema.py @@ -21,6 +21,7 @@ from selfprivacy_api.graphql.queries.jobs import Job from selfprivacy_api.graphql.queries.services import Services from selfprivacy_api.graphql.queries.storage import Storage from selfprivacy_api.graphql.queries.system import System +from selfprivacy_api.graphql.queries.backup import Backup from selfprivacy_api.graphql.mutations.users_mutations import UserMutations from selfprivacy_api.graphql.queries.users import Users diff --git a/tests/common.py b/tests/common.py index 18e065c..e4a283d 100644 --- a/tests/common.py +++ b/tests/common.py @@ -24,5 +24,9 @@ def generate_users_query(query_array): return "query TestUsers {\n users {" + "\n".join(query_array) + "}\n}" +def generate_backup_query(query_array): + return "query TestBackup {\n backup {" + "\n".join(query_array) + "}\n}" + + def mnemonic_to_hex(mnemonic): return Mnemonic(language="english").to_entropy(mnemonic).hex() From f77556b60ee89741e7d8a0e725a9819ef26b62d5 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Jun 2023 10:09:38 +0000 Subject: [PATCH 374/537] test(backups): actual testfile --- tests/test_graphql/test_api_backup.py | 38 +++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) create mode 100644 tests/test_graphql/test_api_backup.py diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py new file mode 100644 index 0000000..1d944f4 --- /dev/null +++ b/tests/test_graphql/test_api_backup.py @@ -0,0 +1,38 @@ +from tests.test_graphql.test_backup import dummy_service, backups, raw_dummy_service + +# from tests.common import generate_api_query + +# from selfprivacy_api.graphql.mutations.backup_mutations import BackupMutations +from selfprivacy_api.jobs import Jobs, JobStatus + +API_BACK_UP_MUTATION = """ +mutation TestBackupService($service_id: String) { + startBackup(serviceId: $service_id) { + success + message + code + job { + uid + status + } + } +} +""" + + +def api_backup(authorized_client, service): + response = authorized_client.post( + "/graphql", + json={ + "query": API_BACK_UP_MUTATION, + "variables": {"service_id": service.get_id()}, + }, + ).json() + return response + + +def test_start_backup(authorized_client, dummy_service): + response = api_backup(authorized_client, dummy_service) + assert response["data"]["startBackup"]["success"] is True + job = response["data"]["startBackup"]["job"] + assert Jobs.get_job(job["uid"]).status == JobStatus.FINISHED From c60339444944405a716bad5cf1b350dba5621973 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Wed, 14 Jun 2023 00:43:01 +0300 Subject: [PATCH 375/537] fix(backups): try to actually get backup configuration --- selfprivacy_api/backup/providers/backblaze.py | 2 ++ .../backup/providers/local_file.py | 2 ++ selfprivacy_api/backup/providers/memory.py | 2 ++ selfprivacy_api/graphql/queries/backup.py | 23 +++++++++++-------- selfprivacy_api/graphql/queries/providers.py | 1 + 5 files changed, 20 insertions(+), 10 deletions(-) diff --git a/selfprivacy_api/backup/providers/backblaze.py b/selfprivacy_api/backup/providers/backblaze.py index 9ec5eba..f474a99 100644 --- a/selfprivacy_api/backup/providers/backblaze.py +++ b/selfprivacy_api/backup/providers/backblaze.py @@ -4,3 +4,5 @@ from selfprivacy_api.backup.restic_backuper import ResticBackuper class Backblaze(AbstractBackupProvider): backuper = ResticBackuper("--b2-account", "--b2-key", ":b2:") + + name = "BACKBLAZE" diff --git a/selfprivacy_api/backup/providers/local_file.py b/selfprivacy_api/backup/providers/local_file.py index a20f615..95075dd 100644 --- a/selfprivacy_api/backup/providers/local_file.py +++ b/selfprivacy_api/backup/providers/local_file.py @@ -5,6 +5,8 @@ from selfprivacy_api.backup.restic_backuper import ResticBackuper class LocalFileBackup(AbstractBackupProvider): backuper = ResticBackuper("", "", "memory") + name = "FILE" + # login and key args are for compatibility with generic provider methods. They are ignored. def __init__(self, filename: str, login: str = "", key: str = ""): super().__init__() diff --git a/selfprivacy_api/backup/providers/memory.py b/selfprivacy_api/backup/providers/memory.py index 3f257bf..a481559 100644 --- a/selfprivacy_api/backup/providers/memory.py +++ b/selfprivacy_api/backup/providers/memory.py @@ -4,3 +4,5 @@ from selfprivacy_api.backup.restic_backuper import ResticBackuper class InMemoryBackup(AbstractBackupProvider): backuper = ResticBackuper("", "", ":memory:") + + name = "MEMORY" diff --git a/selfprivacy_api/graphql/queries/backup.py b/selfprivacy_api/graphql/queries/backup.py index 97b4682..c20be35 100644 --- a/selfprivacy_api/graphql/queries/backup.py +++ b/selfprivacy_api/graphql/queries/backup.py @@ -18,22 +18,25 @@ class BackupConfiguration: # False when repo is not initialized and not ready to be used is_initialized: bool # If none, autobackups are disabled - autobackup_period: typing.Optional[int] = None + autobackup_period: typing.Optional[int] # Bucket name for Backblaze, path for some other providers - location_name: typing.Optional[str] = None - location_id: typing.Optional[str] = None + location_name: typing.Optional[str] + location_id: typing.Optional[str] @strawberry.type class Backup: @strawberry.field - def configuration() -> BackupConfiguration: - config = BackupConfiguration() - config.encryption_key = LocalBackupSecret.get() - config.is_initialized = Backups.is_initted() - config.autobackup_period = Backups.autobackup_period_minutes() - config.location_name = Backups.provider().location - config.location_id = Backups.provider().repo_id + def configuration(self) -> BackupConfiguration: + encryption_key = LocalBackupSecret.get() + return BackupConfiguration( + provider=BackupProvider[Backups.provider().name], + encryption_key=encryption_key.decode() if encryption_key else "", + is_initialized=Backups.is_initted(), + autobackup_period=Backups.autobackup_period_minutes(), + location_name=Backups.provider().location, + location_id=Backups.provider().repo_id, + ) @strawberry.field def all_snapshots(self) -> typing.List[SnapshotInfo]: diff --git a/selfprivacy_api/graphql/queries/providers.py b/selfprivacy_api/graphql/queries/providers.py index 2a9fcec..b9ca7ef 100644 --- a/selfprivacy_api/graphql/queries/providers.py +++ b/selfprivacy_api/graphql/queries/providers.py @@ -19,6 +19,7 @@ class ServerProvider(Enum): @strawberry.enum class BackupProvider(Enum): BACKBLAZE = "BACKBLAZE" + NONE = "NONE" # for testing purposes, make sure not selectable in prod. MEMORY = "MEMORY" FILE = "FILE" From 421c92d12ea8627d4dece9180b63bf3d352d259e Mon Sep 17 00:00:00 2001 From: Inex Code Date: Wed, 14 Jun 2023 01:40:53 +0300 Subject: [PATCH 376/537] fix(backups): return type of encryption key --- selfprivacy_api/backup/local_secret.py | 4 ++-- selfprivacy_api/backup/providers/provider.py | 2 ++ selfprivacy_api/graphql/queries/backup.py | 3 +-- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/selfprivacy_api/backup/local_secret.py b/selfprivacy_api/backup/local_secret.py index 76237f3..389f3a3 100644 --- a/selfprivacy_api/backup/local_secret.py +++ b/selfprivacy_api/backup/local_secret.py @@ -15,13 +15,13 @@ redis = RedisPool().get_connection() class LocalBackupSecret: @staticmethod - def get(): + def get() -> str: """A secret string which backblaze/other clouds do not know. Serves as encryption key. """ if not LocalBackupSecret.exists(): LocalBackupSecret.reset() - return redis.get(REDIS_KEY) + return redis.get(REDIS_KEY) # type: ignore @staticmethod def set(secret: str): diff --git a/selfprivacy_api/backup/providers/provider.py b/selfprivacy_api/backup/providers/provider.py index c303d4e..fcf179b 100644 --- a/selfprivacy_api/backup/providers/provider.py +++ b/selfprivacy_api/backup/providers/provider.py @@ -12,6 +12,8 @@ class AbstractBackupProvider(ABC): def backuper(self) -> AbstractBackuper: raise NotImplementedError + name = "NONE" + def __init__(self, login="", key="", location="", repo_id=""): self.backuper.set_creds(login, key, location) self.login = login diff --git a/selfprivacy_api/graphql/queries/backup.py b/selfprivacy_api/graphql/queries/backup.py index c20be35..2cc8d83 100644 --- a/selfprivacy_api/graphql/queries/backup.py +++ b/selfprivacy_api/graphql/queries/backup.py @@ -28,10 +28,9 @@ class BackupConfiguration: class Backup: @strawberry.field def configuration(self) -> BackupConfiguration: - encryption_key = LocalBackupSecret.get() return BackupConfiguration( provider=BackupProvider[Backups.provider().name], - encryption_key=encryption_key.decode() if encryption_key else "", + encryption_key=LocalBackupSecret.get(), is_initialized=Backups.is_initted(), autobackup_period=Backups.autobackup_period_minutes(), location_name=Backups.provider().location, From 93b98cd4fd42fbf4ab860d96ae6993a39012dfef Mon Sep 17 00:00:00 2001 From: Inex Code Date: Wed, 14 Jun 2023 02:52:10 +0300 Subject: [PATCH 377/537] fix(backups): Handle orphaned snapshots --- selfprivacy_api/backup/local_secret.py | 2 +- .../graphql/common_types/backup_snapshot.py | 9 ----- .../graphql/common_types/service.py | 5 ++- selfprivacy_api/graphql/queries/backup.py | 33 +++++++++++++++++-- 4 files changed, 33 insertions(+), 16 deletions(-) delete mode 100644 selfprivacy_api/graphql/common_types/backup_snapshot.py diff --git a/selfprivacy_api/backup/local_secret.py b/selfprivacy_api/backup/local_secret.py index 389f3a3..ea2afec 100644 --- a/selfprivacy_api/backup/local_secret.py +++ b/selfprivacy_api/backup/local_secret.py @@ -38,7 +38,7 @@ class LocalBackupSecret: @staticmethod def exists() -> bool: - return redis.exists(REDIS_KEY) + return redis.exists(REDIS_KEY) == 1 @staticmethod def _generate() -> str: diff --git a/selfprivacy_api/graphql/common_types/backup_snapshot.py b/selfprivacy_api/graphql/common_types/backup_snapshot.py deleted file mode 100644 index 3256e0c..0000000 --- a/selfprivacy_api/graphql/common_types/backup_snapshot.py +++ /dev/null @@ -1,9 +0,0 @@ -import datetime -import strawberry - - -@strawberry.type -class SnapshotInfo: - id: str - service_name: str - created_at: datetime.datetime diff --git a/selfprivacy_api/graphql/common_types/service.py b/selfprivacy_api/graphql/common_types/service.py index 9e04254..b3403e9 100644 --- a/selfprivacy_api/graphql/common_types/service.py +++ b/selfprivacy_api/graphql/common_types/service.py @@ -3,7 +3,6 @@ import typing import strawberry import datetime from selfprivacy_api.graphql.common_types.dns import DnsRecord -from selfprivacy_api.graphql.common_types.backup_snapshot import SnapshotInfo from selfprivacy_api.services import get_service_by_id, get_services_by_location from selfprivacy_api.services import Service as ServiceInterface @@ -104,14 +103,14 @@ class Service: return get_storage_usage(self) @strawberry.field - def backup_snapshots(self) -> typing.Optional[typing.List[SnapshotInfo]]: + def backup_snapshots(self) -> typing.Optional[typing.List["SnapshotInfo"]]: return None @strawberry.type class SnapshotInfo: id: str - service: "Service" + service: Service created_at: datetime.datetime diff --git a/selfprivacy_api/graphql/queries/backup.py b/selfprivacy_api/graphql/queries/backup.py index 2cc8d83..9858543 100644 --- a/selfprivacy_api/graphql/queries/backup.py +++ b/selfprivacy_api/graphql/queries/backup.py @@ -7,13 +7,20 @@ import strawberry from selfprivacy_api.backup import Backups from selfprivacy_api.backup.local_secret import LocalBackupSecret from selfprivacy_api.graphql.queries.providers import BackupProvider -from selfprivacy_api.graphql.common_types.service import SnapshotInfo +from selfprivacy_api.graphql.common_types.service import ( + Service, + ServiceStatusEnum, + SnapshotInfo, + service_to_graphql_service, +) +from selfprivacy_api.services import get_service_by_id @strawberry.type class BackupConfiguration: provider: BackupProvider - # When server is lost, the app should have the key to decrypt backups on a new server + # When server is lost, the app should have the key to decrypt backups + # on a new server encryption_key: str # False when repo is not initialized and not ready to be used is_initialized: bool @@ -39,11 +46,31 @@ class Backup: @strawberry.field def all_snapshots(self) -> typing.List[SnapshotInfo]: + if not Backups.is_initted(): + return [] result = [] snapshots = Backups.get_all_snapshots() for snap in snapshots: + service = get_service_by_id(snap.service_name) + if service is None: + service = Service( + id=snap.service_name, + display_name=f"{snap.service_name} (Orphaned)", + description="", + svg_icon="", + is_movable=False, + is_required=False, + is_enabled=False, + status=ServiceStatusEnum.OFF, + url=None, + dns_records=None, + ) + else: + service = service_to_graphql_service(service) graphql_snap = SnapshotInfo( - id=snap.id, service=snap.service_name, created_at=snap.created_at + id=snap.id, + service=service, + created_at=snap.created_at, ) result.append(graphql_snap) return result From 09c79b34776a87f96b977f60aeef3e610a7fd81d Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Jun 2023 11:14:52 +0000 Subject: [PATCH 378/537] test(backups): snapshot query --- tests/test_graphql/test_api_backup.py | 38 ++++++++++++++++++++++++--- 1 file changed, 35 insertions(+), 3 deletions(-) diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index 1d944f4..c0961b7 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -1,10 +1,18 @@ from tests.test_graphql.test_backup import dummy_service, backups, raw_dummy_service +from tests.common import generate_backup_query -# from tests.common import generate_api_query - -# from selfprivacy_api.graphql.mutations.backup_mutations import BackupMutations from selfprivacy_api.jobs import Jobs, JobStatus +API_SNAPSHOTS_QUERY = """ +allSnapshots { + id + service { + id + } + createdAt +} +""" + API_BACK_UP_MUTATION = """ mutation TestBackupService($service_id: String) { startBackup(serviceId: $service_id) { @@ -31,6 +39,30 @@ def api_backup(authorized_client, service): return response +def get_data(response): + assert response.status_code == 200 + response = response.json() + assert response["data"] is not None + data = response["data"] + return data + + +def api_snapshots(authorized_client, service): + response = authorized_client.post( + "/graphql", + json={"query": generate_backup_query([API_SNAPSHOTS_QUERY])}, + ) + data = get_data(response) + result = data["backup"]["allSnapshots"] + assert result is not None + return result + + +def test_snapshots_empty(authorized_client, dummy_service): + snaps = api_snapshots(authorized_client, dummy_service) + assert snaps == [] + + def test_start_backup(authorized_client, dummy_service): response = api_backup(authorized_client, dummy_service) assert response["data"]["startBackup"]["success"] is True From cfda6b08103ef0f17aeee5c9c724b2da0e597113 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Jun 2023 11:52:57 +0000 Subject: [PATCH 379/537] fix(backups): shorten snapshot query signature --- tests/test_graphql/test_api_backup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index c0961b7..5b9aabf 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -47,7 +47,7 @@ def get_data(response): return data -def api_snapshots(authorized_client, service): +def api_snapshots(authorized_client): response = authorized_client.post( "/graphql", json={"query": generate_backup_query([API_SNAPSHOTS_QUERY])}, @@ -59,7 +59,7 @@ def api_snapshots(authorized_client, service): def test_snapshots_empty(authorized_client, dummy_service): - snaps = api_snapshots(authorized_client, dummy_service) + snaps = api_snapshots(authorized_client) assert snaps == [] From de1cbcb1caa962e24eeebcc7a3cbd88f33b97639 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Jun 2023 11:54:24 +0000 Subject: [PATCH 380/537] test(backups): display errors from api --- tests/test_graphql/test_api_backup.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index 5b9aabf..40c81fa 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -42,6 +42,10 @@ def api_backup(authorized_client, service): def get_data(response): assert response.status_code == 200 response = response.json() + if ( + "errors" in response.keys() + ): # convenience for debugging, this will display error + assert response["errors"] == [] assert response["data"] is not None data = response["data"] return data From 53638b7e06686729ae3216ff51c4fbb5180fb125 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Jun 2023 11:55:46 +0000 Subject: [PATCH 381/537] test(backups): make dummy service more compliant --- selfprivacy_api/services/test_service/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/services/test_service/__init__.py b/selfprivacy_api/services/test_service/__init__.py index c14feca..b1c2924 100644 --- a/selfprivacy_api/services/test_service/__init__.py +++ b/selfprivacy_api/services/test_service/__init__.py @@ -42,7 +42,7 @@ class DummyService(Service): @staticmethod def get_url() -> typing.Optional[str]: """Return service url.""" - domain = get_domain() + domain = "test.com" return f"https://password.{domain}" @staticmethod @@ -68,7 +68,7 @@ class DummyService(Service): Return code 3 means service is stopped. Return code 4 means service is off. """ - return 0 + return ServiceStatus.ACTIVE @staticmethod def enable(): From eac561c57c7d8fff79f5cc8b97e1d8e568dee22f Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Jun 2023 12:08:07 +0000 Subject: [PATCH 382/537] test(backups): test dummy service compliance --- tests/test_graphql/test_api_backup.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index 40c81fa..2f11077 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -1,6 +1,8 @@ from tests.test_graphql.test_backup import dummy_service, backups, raw_dummy_service from tests.common import generate_backup_query + +from selfprivacy_api.graphql.common_types.service import service_to_graphql_service from selfprivacy_api.jobs import Jobs, JobStatus API_SNAPSHOTS_QUERY = """ @@ -62,6 +64,11 @@ def api_snapshots(authorized_client): return result +def test_dummy_service_convertible_to_gql(dummy_service): + gql_service = service_to_graphql_service(dummy_service) + assert gql_service is not None + + def test_snapshots_empty(authorized_client, dummy_service): snaps = api_snapshots(authorized_client) assert snaps == [] From a486825a4f1578f363bce6186a6a9267423f1cf2 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Jun 2023 12:35:41 +0000 Subject: [PATCH 383/537] test(backups): check snapshots getting created --- tests/test_graphql/test_api_backup.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index 2f11077..4c13398 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -78,4 +78,9 @@ def test_start_backup(authorized_client, dummy_service): response = api_backup(authorized_client, dummy_service) assert response["data"]["startBackup"]["success"] is True job = response["data"]["startBackup"]["job"] + assert Jobs.get_job(job["uid"]).status == JobStatus.FINISHED + snaps = api_snapshots(authorized_client) + assert len(snaps) == 1 + snap = snaps[0] + assert snap["service"]["id"] == "testservice" From cf2dc6795a8d3258fc1e1981e7afc68d27673ac7 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Jun 2023 12:41:45 +0000 Subject: [PATCH 384/537] test(backups): use get_data --- tests/test_graphql/test_api_backup.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index 4c13398..7de6401 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -37,7 +37,7 @@ def api_backup(authorized_client, service): "query": API_BACK_UP_MUTATION, "variables": {"service_id": service.get_id()}, }, - ).json() + ) return response @@ -76,8 +76,9 @@ def test_snapshots_empty(authorized_client, dummy_service): def test_start_backup(authorized_client, dummy_service): response = api_backup(authorized_client, dummy_service) - assert response["data"]["startBackup"]["success"] is True - job = response["data"]["startBackup"]["job"] + data = get_data(response)["startBackup"] + assert data["success"] is True + job = data["job"] assert Jobs.get_job(job["uid"]).status == JobStatus.FINISHED snaps = api_snapshots(authorized_client) From a8f72201a70239e386fc0a53bcd8ab99df7b199d Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Jun 2023 13:00:23 +0000 Subject: [PATCH 385/537] test(backups): test restore --- tests/test_graphql/test_api_backup.py | 42 +++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index 7de6401..f78bb59 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -5,6 +5,21 @@ from tests.common import generate_backup_query from selfprivacy_api.graphql.common_types.service import service_to_graphql_service from selfprivacy_api.jobs import Jobs, JobStatus + +API_RESTORE_MUTATION = """ +mutation TestRestoreService($snapshot_id: String!) { + restoreBackup(snapshotId: $snapshot_id) { + success + message + code + job { + uid + status + } + } +} +""" + API_SNAPSHOTS_QUERY = """ allSnapshots { id @@ -30,6 +45,17 @@ mutation TestBackupService($service_id: String) { """ +def api_restore(authorized_client, snapshot_id): + response = authorized_client.post( + "/graphql", + json={ + "query": API_RESTORE_MUTATION, + "variables": {"snapshot_id": snapshot_id}, + }, + ) + return response + + def api_backup(authorized_client, service): response = authorized_client.post( "/graphql", @@ -84,4 +110,20 @@ def test_start_backup(authorized_client, dummy_service): snaps = api_snapshots(authorized_client) assert len(snaps) == 1 snap = snaps[0] + + assert snap["id"] is not None + assert snap["id"] != "" assert snap["service"]["id"] == "testservice" + + +def test_restore(authorized_client, dummy_service): + api_backup(authorized_client, dummy_service) + snap = api_snapshots(authorized_client)[0] + assert snap["id"] is not None + + response = api_restore(authorized_client, snap["id"]) + data = get_data(response)["restoreBackup"] + assert data["success"] is True + job = data["job"] + + assert Jobs.get_job(job["uid"]).status == JobStatus.FINISHED From a48856c9ad4a0b39c79a0b2951c0ac93b840aa86 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Jun 2023 13:06:32 +0000 Subject: [PATCH 386/537] fix(backups): non-nullable service when backing up --- tests/test_graphql/test_api_backup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index f78bb59..660df95 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -31,7 +31,7 @@ allSnapshots { """ API_BACK_UP_MUTATION = """ -mutation TestBackupService($service_id: String) { +mutation TestBackupService($service_id: String!) { startBackup(serviceId: $service_id) { success message From 8475ae33756319598aa13568a1e2984ac3db0639 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Jun 2023 14:07:51 +0000 Subject: [PATCH 387/537] refactor(backups): make localfile repos normal --- selfprivacy_api/backup/__init__.py | 7 +------ selfprivacy_api/backup/providers/local_file.py | 11 ++--------- tests/test_graphql/test_backup.py | 2 +- 3 files changed, 4 insertions(+), 16 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index d496758..f6e2bfc 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -31,8 +31,7 @@ class Backups: @staticmethod def set_localfile_repo(file_path: str): ProviderClass = get_provider(BackupProvider.FILE) - provider = ProviderClass(file_path) - Storage.store_testrepo_path(file_path) + provider = ProviderClass(login="", key="", location=file_path, repo_id="") Storage.store_provider(provider) def set_provider(provider: AbstractBackupProvider): @@ -145,10 +144,6 @@ class Backups: ): provider_class = get_provider(BackupProvider[kind]) - if kind == "FILE": - path = Storage.get_testrepo_path() - return provider_class(path) - return provider_class(login=login, key=key, location=location, repo_id=repo_id) @staticmethod diff --git a/selfprivacy_api/backup/providers/local_file.py b/selfprivacy_api/backup/providers/local_file.py index 95075dd..77b0c92 100644 --- a/selfprivacy_api/backup/providers/local_file.py +++ b/selfprivacy_api/backup/providers/local_file.py @@ -3,12 +3,5 @@ from selfprivacy_api.backup.restic_backuper import ResticBackuper class LocalFileBackup(AbstractBackupProvider): - backuper = ResticBackuper("", "", "memory") - - name = "FILE" - - # login and key args are for compatibility with generic provider methods. They are ignored. - def __init__(self, filename: str, login: str = "", key: str = ""): - super().__init__() - self.backuper = ResticBackuper("", "", ":local:") - self.backuper.set_creds("", "", filename) + backuper = ResticBackuper("", "", ":local:") + name = "FILE" \ No newline at end of file diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 5ac024f..f0462c8 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -95,7 +95,7 @@ def file_backup(tmpdir) -> AbstractBackupProvider: test_repo_path = path.join(tmpdir, "test_repo") ProviderClass = providers.get_provider(BackupProvider.FILE) assert ProviderClass is not None - provider = ProviderClass(test_repo_path) + provider = ProviderClass(location=test_repo_path) assert provider is not None return provider From 38de01da8b626e194ab97094e8b25819d53b83bd Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Jun 2023 14:17:04 +0000 Subject: [PATCH 388/537] refactor(backups): cleanup localfile-specific logic --- selfprivacy_api/backup/storage.py | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/selfprivacy_api/backup/storage.py b/selfprivacy_api/backup/storage.py index 680f39f..bff4047 100644 --- a/selfprivacy_api/backup/storage.py +++ b/selfprivacy_api/backup/storage.py @@ -21,7 +21,6 @@ REDIS_SNAPSHOTS_PREFIX = "backups:snapshots:" REDIS_LAST_BACKUP_PREFIX = "backups:last-backed-up:" REDIS_INITTED_CACHE_PREFIX = "backups:initted_services:" -REDIS_REPO_PATH_KEY = "backups:test_repo_path" REDIS_PROVIDER_KEY = "backups:provider" REDIS_AUTOBACKUP_PERIOD_KEY = "backups:autobackup_period" @@ -33,7 +32,6 @@ class Storage: @staticmethod def reset(): redis.delete(REDIS_PROVIDER_KEY) - redis.delete(REDIS_REPO_PATH_KEY) redis.delete(REDIS_AUTOBACKUP_PERIOD_KEY) prefixes_to_clean = [ @@ -52,18 +50,6 @@ class Storage: for key in redis.keys(REDIS_SNAPSHOTS_PREFIX + "*"): redis.delete(key) - @staticmethod - def store_testrepo_path(path: str): - redis.set(REDIS_REPO_PATH_KEY, path) - - @staticmethod - def get_testrepo_path() -> str: - if not redis.exists(REDIS_REPO_PATH_KEY): - raise ValueError( - "No test repository filepath is set, but we tried to access it" - ) - return redis.get(REDIS_REPO_PATH_KEY) - @staticmethod def services_with_autobackup() -> List[str]: keys = redis.keys(REDIS_AUTOBACKUP_ENABLED_PREFIX + "*") From 1c96743c5d7733f67c0aff64c6170ae068a43e2b Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 16 Jun 2023 13:43:41 +0000 Subject: [PATCH 389/537] test(backups): test reinitting repository --- selfprivacy_api/backup/__init__.py | 2 +- .../graphql/mutations/backup_mutations.py | 12 ++-- tests/test_graphql/test_api_backup.py | 66 +++++++++++++++++++ 3 files changed, 73 insertions(+), 7 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index f6e2bfc..b340034 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -135,7 +135,7 @@ class Backups: @staticmethod def set_provider(kind: str, login: str, key: str, location: str, repo_id: str = ""): - provider = Backups.construct_provider(kind, login, key, location, id) + provider = Backups.construct_provider(kind, login, key, location, repo_id) Storage.store_provider(provider) @staticmethod diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index 898702a..ad43686 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -48,15 +48,17 @@ class BackupMutations: self, repository: InitializeRepositoryInput ) -> GenericBackupConfigReturn: """Initialize a new repository""" - provider = Backups.construct_provider( - kind=repository.provider, + Backups.set_provider( + kind=repository.provider.value, login=repository.login, key=repository.password, location=repository.location_name, repo_id=repository.location_id, ) - Backups.set_provider(provider) Backups.init_repo() + return GenericBackupConfigReturn( + success=True, message="", code="200", configuration=Backup().configuration() + ) @strawberry.mutation(permission_classes=[IsAuthenticated]) def remove_repository(self) -> GenericBackupConfigReturn: @@ -73,9 +75,7 @@ class BackupMutations: return Backup.configuration() @strawberry.mutation(permission_classes=[IsAuthenticated]) - def start_backup( - self, service_id: typing.Optional[str] = None - ) -> GenericJobButationReturn: + def start_backup(self, service_id: str) -> GenericJobButationReturn: """Start backup""" service = get_service_by_id(service_id) diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index 660df95..3c9c7f2 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -1,3 +1,4 @@ +from os import path from tests.test_graphql.test_backup import dummy_service, backups, raw_dummy_service from tests.common import generate_backup_query @@ -5,6 +6,23 @@ from tests.common import generate_backup_query from selfprivacy_api.graphql.common_types.service import service_to_graphql_service from selfprivacy_api.jobs import Jobs, JobStatus +API_INIT_MUTATION = """ +mutation TestInitRepo($input: InitializeRepositoryInput!) { + initializeRepository(repository: $input) { + success + message + code + configuration { + provider + encryptionKey + isInitialized + autobackupPeriod + locationName + locationId + } + } +} +""" API_RESTORE_MUTATION = """ mutation TestRestoreService($snapshot_id: String!) { @@ -67,6 +85,32 @@ def api_backup(authorized_client, service): return response +def api_init_without_key( + authorized_client, kind, login, password, location_name, location_id +): + response = authorized_client.post( + "/graphql", + json={ + "query": API_INIT_MUTATION, + "variables": { + "input": { + "provider": kind, + "locationId": location_id, + "locationName": location_name, + "login": login, + "password": password, + } + }, + }, + ) + return response + + +def assert_ok(data): + assert data["code"] == 200 + assert data["success"] is True + + def get_data(response): assert response.status_code == 200 response = response.json() @@ -127,3 +171,25 @@ def test_restore(authorized_client, dummy_service): job = data["job"] assert Jobs.get_job(job["uid"]).status == JobStatus.FINISHED + + +def test_reinit(authorized_client, dummy_service, tmpdir): + test_repo_path = path.join(tmpdir, "not_at_all_sus") + response = api_init_without_key( + authorized_client, "FILE", "", "", test_repo_path, "" + ) + data = get_data(response)["initializeRepository"] + assert_ok(data) + configuration = data["configuration"] + assert configuration["provider"] == "FILE" + assert configuration["locationId"] == "" + assert configuration["locationName"] == test_repo_path + assert len(configuration["encryptionKey"]) > 1 + assert configuration["isInitialized"] is True + + response = api_backup(authorized_client, dummy_service) + data = get_data(response)["startBackup"] + assert data["success"] is True + job = data["job"] + + assert Jobs.get_job(job["uid"]).status == JobStatus.FINISHED From 5d95c1b44e95e787b698f111b7f8d1cd3168ac64 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 16 Jun 2023 14:33:48 +0000 Subject: [PATCH 390/537] test(backups): preliminary test of repo reset --- .../graphql/mutations/backup_mutations.py | 4 +- tests/test_graphql/test_api_backup.py | 43 +++++++++++++++++++ 2 files changed, 46 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index ad43686..110e731 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -64,7 +64,9 @@ class BackupMutations: def remove_repository(self) -> GenericBackupConfigReturn: """Remove repository""" Backups.reset() - return Backup.configuration() + return GenericBackupConfigReturn( + success=True, message="", code="200", configuration=Backup().configuration() + ) @strawberry.mutation(permission_classes=[IsAuthenticated]) def set_autobackup_period( diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index 3c9c7f2..7d23902 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -6,6 +6,24 @@ from tests.common import generate_backup_query from selfprivacy_api.graphql.common_types.service import service_to_graphql_service from selfprivacy_api.jobs import Jobs, JobStatus +API_REMOVE_REPOSITORY_MUTATION = """ +mutation TestRemoveRepo { + removeRepository { + success + message + code + configuration { + provider + encryptionKey + isInitialized + autobackupPeriod + locationName + locationId + } + } +} +""" + API_INIT_MUTATION = """ mutation TestInitRepo($input: InitializeRepositoryInput!) { initializeRepository(repository: $input) { @@ -85,6 +103,17 @@ def api_backup(authorized_client, service): return response +def api_remove(authorized_client): + response = authorized_client.post( + "/graphql", + json={ + "query": API_REMOVE_REPOSITORY_MUTATION, + "variables": {}, + }, + ) + return response + + def api_init_without_key( authorized_client, kind, login, password, location_name, location_id ): @@ -193,3 +222,17 @@ def test_reinit(authorized_client, dummy_service, tmpdir): job = data["job"] assert Jobs.get_job(job["uid"]).status == JobStatus.FINISHED + + +def test_remove(authorized_client, generic_userdata): + response = api_remove(authorized_client) + data = get_data(response)["removeRepository"] + assert_ok(data) + + configuration = data["configuration"] + assert configuration["provider"] == "BACKBLAZE" + assert configuration["locationId"] == "" + assert configuration["locationName"] == "selfprivacy" + # still generated every time it is missing + assert len(configuration["encryptionKey"]) > 1 + assert configuration["isInitialized"] is False From 1fd5db9ff3e277c9ad5be245cf0bab8be59bda5d Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 16 Jun 2023 14:39:00 +0000 Subject: [PATCH 391/537] fix(backups): fix output API return types for configuration --- selfprivacy_api/graphql/mutations/backup_mutations.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index 110e731..1488a2c 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -74,7 +74,9 @@ class BackupMutations: ) -> GenericBackupConfigReturn: """Set autobackup period. None is to disable autobackup""" Backups.set_autobackup_period_minutes(period) - return Backup.configuration() + return GenericBackupConfigReturn( + success=True, message="", code="200", configuration=Backup().configuration() + ) @strawberry.mutation(permission_classes=[IsAuthenticated]) def start_backup(self, service_id: str) -> GenericJobButationReturn: @@ -127,4 +129,8 @@ class BackupMutations: def force_snapshots_reload(self) -> GenericMutationReturn: """Force snapshots reload""" Backups.force_snapshot_reload() - return GenericMutationReturn() + return GenericMutationReturn( + success=True, + code=200, + message="", + ) From 806fb3c84b1f6f07529ef25fb6faffc84f1d5fd4 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 16 Jun 2023 15:09:39 +0000 Subject: [PATCH 392/537] feature(backups): resetting json config too --- selfprivacy_api/backup/__init__.py | 23 +++++++++++++++++++++-- tests/test_graphql/test_api_backup.py | 2 +- tests/test_graphql/test_backup.py | 5 ++--- 3 files changed, 24 insertions(+), 6 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index b340034..329f81f 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -4,7 +4,7 @@ from os import statvfs from selfprivacy_api.models.backup.snapshot import Snapshot -from selfprivacy_api.utils import ReadUserData +from selfprivacy_api.utils import ReadUserData, WriteUserData from selfprivacy_api.services import get_service_by_id from selfprivacy_api.services.service import Service @@ -22,6 +22,13 @@ from selfprivacy_api.backup.jobs import ( ) from selfprivacy_api.jobs import Jobs, JobStatus +DEFAULT_JSON_PROVIDER = { + "provider": "BACKBLAZE", + "accountId": "", + "accountKey": "", + "bucket": "", +} + class Backups: """A singleton controller for backups""" @@ -147,8 +154,13 @@ class Backups: return provider_class(login=login, key=key, location=location, repo_id=repo_id) @staticmethod - def reset(): + def reset(reset_json=True): Storage.reset() + if reset_json: + try: + Backups.reset_provider_json() + except FileNotFoundError: # if there is no userdata file, we do not need to reset it + pass @staticmethod def lookup_provider() -> AbstractBackupProvider: @@ -190,6 +202,13 @@ class Backups: kind=provider_string, login=account, key=key, location=location ) + def reset_provider_json() -> AbstractBackupProvider: + with WriteUserData() as user_data: + if "backblaze" in user_data.keys(): + del user_data["backblaze"] + + user_data["backup"] = DEFAULT_JSON_PROVIDER + @staticmethod def load_provider_redis() -> AbstractBackupProvider: provider_model = Storage.load_provider() diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index 7d23902..90381c4 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -232,7 +232,7 @@ def test_remove(authorized_client, generic_userdata): configuration = data["configuration"] assert configuration["provider"] == "BACKBLAZE" assert configuration["locationId"] == "" - assert configuration["locationName"] == "selfprivacy" + assert configuration["locationName"] == "" # still generated every time it is missing assert len(configuration["encryptionKey"]) > 1 assert configuration["isInitialized"] is False diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index f0462c8..a70cdb8 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -37,7 +37,7 @@ def backups(tmpdir): @pytest.fixture() def backups_backblaze(generic_userdata): - Backups.reset() + Backups.reset(reset_json=False) @pytest.fixture() @@ -101,7 +101,7 @@ def file_backup(tmpdir) -> AbstractBackupProvider: def test_config_load(generic_userdata): - Backups.reset() + Backups.reset(reset_json=False) provider = Backups.provider() assert provider is not None @@ -445,7 +445,6 @@ def test_init_tracking_caching2(backups, raw_dummy_service): # Storage def test_provider_storage(backups_backblaze): - Backups.reset() provider = Backups.provider() assert provider is not None From b346a283a47ca80a5ca4511c2f1c4b05e3c44237 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 16 Jun 2023 15:19:22 +0000 Subject: [PATCH 393/537] test(backups): add a backend json reset test --- tests/test_graphql/test_backup.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index a70cdb8..645b2fa 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -108,11 +108,31 @@ def test_config_load(generic_userdata): assert isinstance(provider, Backblaze) assert provider.login == "ID" assert provider.key == "KEY" + assert provider.location == "selfprivacy" assert provider.backuper.account == "ID" assert provider.backuper.key == "KEY" +def test_json_reset(generic_userdata): + Backups.reset(reset_json=False) + provider = Backups.provider() + assert provider is not None + assert isinstance(provider, Backblaze) + assert provider.login == "ID" + assert provider.key == "KEY" + assert provider.location == "selfprivacy" + + Backups.reset() + provider = Backups.provider() + assert provider is not None + assert isinstance(provider, Backblaze) + assert provider.login == "" + assert provider.key == "" + assert provider.location == "" + assert provider.repo_id == "" + + def test_select_backend(): provider = providers.get_provider(BackupProvider.BACKBLAZE) assert provider is not None From a56461fb9699af3269daeae62d3275e07865b9ac Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 16 Jun 2023 15:48:58 +0000 Subject: [PATCH 394/537] refactor(backups): make a dir for backuppers --- selfprivacy_api/backup/{backuper.py => backuppers/__init__.py} | 0 .../{restic_backuper.py => backuppers/restic_backupper.py} | 2 +- selfprivacy_api/backup/providers/backblaze.py | 2 +- selfprivacy_api/backup/providers/local_file.py | 2 +- selfprivacy_api/backup/providers/memory.py | 2 +- selfprivacy_api/backup/providers/provider.py | 2 +- 6 files changed, 5 insertions(+), 5 deletions(-) rename selfprivacy_api/backup/{backuper.py => backuppers/__init__.py} (100%) rename selfprivacy_api/backup/{restic_backuper.py => backuppers/restic_backupper.py} (99%) diff --git a/selfprivacy_api/backup/backuper.py b/selfprivacy_api/backup/backuppers/__init__.py similarity index 100% rename from selfprivacy_api/backup/backuper.py rename to selfprivacy_api/backup/backuppers/__init__.py diff --git a/selfprivacy_api/backup/restic_backuper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py similarity index 99% rename from selfprivacy_api/backup/restic_backuper.py rename to selfprivacy_api/backup/backuppers/restic_backupper.py index 69bdea9..7b58a2c 100644 --- a/selfprivacy_api/backup/restic_backuper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -6,7 +6,7 @@ from typing import List from collections.abc import Iterable from json.decoder import JSONDecodeError -from selfprivacy_api.backup.backuper import AbstractBackuper +from selfprivacy_api.backup.backuppers import AbstractBackuper from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.backup.jobs import get_backup_job from selfprivacy_api.services import get_service_by_id diff --git a/selfprivacy_api/backup/providers/backblaze.py b/selfprivacy_api/backup/providers/backblaze.py index f474a99..3f2d873 100644 --- a/selfprivacy_api/backup/providers/backblaze.py +++ b/selfprivacy_api/backup/providers/backblaze.py @@ -1,5 +1,5 @@ from .provider import AbstractBackupProvider -from selfprivacy_api.backup.restic_backuper import ResticBackuper +from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackuper class Backblaze(AbstractBackupProvider): diff --git a/selfprivacy_api/backup/providers/local_file.py b/selfprivacy_api/backup/providers/local_file.py index 77b0c92..bdb2113 100644 --- a/selfprivacy_api/backup/providers/local_file.py +++ b/selfprivacy_api/backup/providers/local_file.py @@ -1,5 +1,5 @@ from .provider import AbstractBackupProvider -from selfprivacy_api.backup.restic_backuper import ResticBackuper +from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackuper class LocalFileBackup(AbstractBackupProvider): diff --git a/selfprivacy_api/backup/providers/memory.py b/selfprivacy_api/backup/providers/memory.py index a481559..e73af51 100644 --- a/selfprivacy_api/backup/providers/memory.py +++ b/selfprivacy_api/backup/providers/memory.py @@ -1,5 +1,5 @@ from .provider import AbstractBackupProvider -from selfprivacy_api.backup.restic_backuper import ResticBackuper +from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackuper class InMemoryBackup(AbstractBackupProvider): diff --git a/selfprivacy_api/backup/providers/provider.py b/selfprivacy_api/backup/providers/provider.py index fcf179b..c14f1e7 100644 --- a/selfprivacy_api/backup/providers/provider.py +++ b/selfprivacy_api/backup/providers/provider.py @@ -4,7 +4,7 @@ It assumes that while some providers are supported via restic/rclone, others may require different backends """ from abc import ABC -from selfprivacy_api.backup.backuper import AbstractBackuper +from selfprivacy_api.backup.backuppers import AbstractBackuper class AbstractBackupProvider(ABC): From cd32aa83b705dca32d89c19341ccdc2b0f3cf604 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 16 Jun 2023 16:03:09 +0000 Subject: [PATCH 395/537] refactor(backups): NoneBackupper class for those cases when we do not know --- .../backup/backuppers/none_backupper.py | 26 +++++++++++++++++++ selfprivacy_api/backup/providers/__init__.py | 1 + selfprivacy_api/backup/providers/provider.py | 3 ++- 3 files changed, 29 insertions(+), 1 deletion(-) create mode 100644 selfprivacy_api/backup/backuppers/none_backupper.py diff --git a/selfprivacy_api/backup/backuppers/none_backupper.py b/selfprivacy_api/backup/backuppers/none_backupper.py new file mode 100644 index 0000000..0c9509e --- /dev/null +++ b/selfprivacy_api/backup/backuppers/none_backupper.py @@ -0,0 +1,26 @@ +from typing import List + +from selfprivacy_api.models.backup.snapshot import Snapshot +from selfprivacy_api.backup.backuppers import AbstractBackuper + + +class NoneBackupper(AbstractBackuper): + def is_initted(self, repo_name: str) -> bool: + return False + + def start_backup(self, folders: List[str], repo_name: str): + raise NotImplementedError + + def get_snapshots(self, repo_name) -> List[Snapshot]: + """Get all snapshots from the repo""" + raise NotImplementedError + + def init(self, repo_name): + raise NotImplementedError + + def restore_from_backup(self, repo_name: str, snapshot_id: str, folders: List[str]): + """Restore a target folder using a snapshot""" + raise NotImplementedError + + def restored_size(self, repo_name, snapshot_id) -> float: + raise NotImplementedError diff --git a/selfprivacy_api/backup/providers/__init__.py b/selfprivacy_api/backup/providers/__init__.py index 5428e80..bac51e5 100644 --- a/selfprivacy_api/backup/providers/__init__.py +++ b/selfprivacy_api/backup/providers/__init__.py @@ -9,6 +9,7 @@ PROVIDER_MAPPING = { BackupProvider.BACKBLAZE: Backblaze, BackupProvider.MEMORY: InMemoryBackup, BackupProvider.FILE: LocalFileBackup, + BackupProvider.NONE: AbstractBackupProvider, } diff --git a/selfprivacy_api/backup/providers/provider.py b/selfprivacy_api/backup/providers/provider.py index c14f1e7..c6da12d 100644 --- a/selfprivacy_api/backup/providers/provider.py +++ b/selfprivacy_api/backup/providers/provider.py @@ -5,12 +5,13 @@ require different backends """ from abc import ABC from selfprivacy_api.backup.backuppers import AbstractBackuper +from selfprivacy_api.backup.backuppers.none_backupper import NoneBackupper class AbstractBackupProvider(ABC): @property def backuper(self) -> AbstractBackuper: - raise NotImplementedError + return NoneBackupper name = "NONE" From 9f096ed2c0d315d36cc66c24ad24ffda659340af Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 19 Jun 2023 11:09:10 +0000 Subject: [PATCH 396/537] feature(backups): actually dealing with situation when the provider is not configured --- selfprivacy_api/backup/__init__.py | 47 +++++++++++-------- selfprivacy_api/backup/backuppers/__init__.py | 6 ++- .../backup/backuppers/none_backupper.py | 5 +- selfprivacy_api/backup/providers/provider.py | 2 +- tests/test_graphql/test_api_backup.py | 2 +- tests/test_graphql/test_backup.py | 3 +- 6 files changed, 41 insertions(+), 24 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 329f81f..29fac73 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -168,38 +168,47 @@ class Backups: if redis_provider is not None: return redis_provider - json_provider = Backups.load_provider_json() + try: + json_provider = Backups.load_provider_json() + except FileNotFoundError: + json_provider = None + if json_provider is not None: Storage.store_provider(json_provider) return json_provider - memory_provider = Backups.construct_provider("MEMORY", login="", key="") - Storage.store_provider(memory_provider) - return memory_provider + none_provider = Backups.construct_provider( + "NONE", login="", key="", location="" + ) + Storage.store_provider(none_provider) + return none_provider @staticmethod - def load_provider_json() -> AbstractBackupProvider: + def load_provider_json() -> Optional[AbstractBackupProvider]: with ReadUserData() as user_data: - account = "" - key = "" + provider_dict = { + "provider": "", + "accountId": "", + "accountKey": "", + "bucket": "", + } if "backup" not in user_data.keys(): if "backblaze" in user_data.keys(): - account = user_data["backblaze"]["accountId"] - key = user_data["backblaze"]["accountKey"] - location = user_data["backblaze"]["bucket"] - provider_string = "BACKBLAZE" - return Backups.construct_provider( - kind=provider_string, login=account, key=key, location=location - ) + provider_dict.update(user_data["backblaze"]) + provider_dict["provider"] = "BACKBLAZE" + return None + else: + provider_dict.update(user_data["backup"]) + + if provider_dict == DEFAULT_JSON_PROVIDER: return None - account = user_data["backup"]["accountId"] - key = user_data["backup"]["accountKey"] - provider_string = user_data["backup"]["provider"] - location = user_data["backup"]["bucket"] return Backups.construct_provider( - kind=provider_string, login=account, key=key, location=location + kind=provider_dict["provider"], + login=provider_dict["accountId"], + key=provider_dict["accountKey"], + location=provider_dict["bucket"], ) def reset_provider_json() -> AbstractBackupProvider: diff --git a/selfprivacy_api/backup/backuppers/__init__.py b/selfprivacy_api/backup/backuppers/__init__.py index 908c1fc..61ef1ba 100644 --- a/selfprivacy_api/backup/backuppers/__init__.py +++ b/selfprivacy_api/backup/backuppers/__init__.py @@ -9,7 +9,11 @@ class AbstractBackuper(ABC): pass @abstractmethod - def is_initted(self, repo_name: str) -> bool: + def is_initted(self) -> bool: + raise NotImplementedError + + @abstractmethod + def set_creds(self, account: str, key: str, repo: str): raise NotImplementedError @abstractmethod diff --git a/selfprivacy_api/backup/backuppers/none_backupper.py b/selfprivacy_api/backup/backuppers/none_backupper.py index 0c9509e..de51d6a 100644 --- a/selfprivacy_api/backup/backuppers/none_backupper.py +++ b/selfprivacy_api/backup/backuppers/none_backupper.py @@ -5,9 +5,12 @@ from selfprivacy_api.backup.backuppers import AbstractBackuper class NoneBackupper(AbstractBackuper): - def is_initted(self, repo_name: str) -> bool: + def is_initted(self, repo_name: str = "") -> bool: return False + def set_creds(self, account: str, key: str, repo: str): + pass + def start_backup(self, folders: List[str], repo_name: str): raise NotImplementedError diff --git a/selfprivacy_api/backup/providers/provider.py b/selfprivacy_api/backup/providers/provider.py index c6da12d..dddc53f 100644 --- a/selfprivacy_api/backup/providers/provider.py +++ b/selfprivacy_api/backup/providers/provider.py @@ -11,7 +11,7 @@ from selfprivacy_api.backup.backuppers.none_backupper import NoneBackupper class AbstractBackupProvider(ABC): @property def backuper(self) -> AbstractBackuper: - return NoneBackupper + return NoneBackupper() name = "NONE" diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index 90381c4..d65d6f1 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -230,7 +230,7 @@ def test_remove(authorized_client, generic_userdata): assert_ok(data) configuration = data["configuration"] - assert configuration["provider"] == "BACKBLAZE" + assert configuration["provider"] == "NONE" assert configuration["locationId"] == "" assert configuration["locationName"] == "" # still generated every time it is missing diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 645b2fa..428e3dd 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -15,6 +15,7 @@ from selfprivacy_api.backup import Backups import selfprivacy_api.backup.providers as providers from selfprivacy_api.backup.providers import AbstractBackupProvider from selfprivacy_api.backup.providers.backblaze import Backblaze + from selfprivacy_api.backup.tasks import start_backup, restore_snapshot from selfprivacy_api.backup.storage import Storage from selfprivacy_api.backup.jobs import get_backup_job @@ -126,7 +127,7 @@ def test_json_reset(generic_userdata): Backups.reset() provider = Backups.provider() assert provider is not None - assert isinstance(provider, Backblaze) + assert isinstance(provider, AbstractBackupProvider) assert provider.login == "" assert provider.key == "" assert provider.location == "" From f829a34dc7a5b3160a1ccafe1c508c643ae58411 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 19 Jun 2023 11:17:09 +0000 Subject: [PATCH 397/537] refactor(backups): delete legacy provider setting --- selfprivacy_api/backup/__init__.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 29fac73..bb17254 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -41,9 +41,6 @@ class Backups: provider = ProviderClass(login="", key="", location=file_path, repo_id="") Storage.store_provider(provider) - def set_provider(provider: AbstractBackupProvider): - Storage.store_provider(provider) - @staticmethod def get_last_backed_up(service: Service) -> Optional[datetime]: """Get a timezone-aware time of the last backup of a service""" From ecf72948b115bda697f04f93e258fb9378de8661 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 19 Jun 2023 13:28:02 +0000 Subject: [PATCH 398/537] test(backups): setting autobackup period --- .../graphql/mutations/backup_mutations.py | 6 +- tests/test_graphql/test_api_backup.py | 76 +++++++++++++++++++ 2 files changed, 81 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index 1488a2c..ad7c0c6 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -73,7 +73,11 @@ class BackupMutations: self, period: typing.Optional[int] = None ) -> GenericBackupConfigReturn: """Set autobackup period. None is to disable autobackup""" - Backups.set_autobackup_period_minutes(period) + if period is not None: + Backups.set_autobackup_period_minutes(period) + else: + Backups.set_autobackup_period_minutes(0) + return GenericBackupConfigReturn( success=True, message="", code="200", configuration=Backup().configuration() ) diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index d65d6f1..b8c09dc 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -6,6 +6,24 @@ from tests.common import generate_backup_query from selfprivacy_api.graphql.common_types.service import service_to_graphql_service from selfprivacy_api.jobs import Jobs, JobStatus +API_SET_AUTOBACKUP_PERIOD_MUTATION = """ +mutation TestAutobackupPeriod($period: Int) { + setAutobackupPeriod(period: $period) { + success + message + code + configuration { + provider + encryptionKey + isInitialized + autobackupPeriod + locationName + locationId + } + } +} +""" + API_REMOVE_REPOSITORY_MUTATION = """ mutation TestRemoveRepo { removeRepository { @@ -103,6 +121,17 @@ def api_backup(authorized_client, service): return response +def api_set_period(authorized_client, period): + response = authorized_client.post( + "/graphql", + json={ + "query": API_SET_AUTOBACKUP_PERIOD_MUTATION, + "variables": {"period": period}, + }, + ) + return response + + def api_remove(authorized_client): response = authorized_client.post( "/graphql", @@ -236,3 +265,50 @@ def test_remove(authorized_client, generic_userdata): # still generated every time it is missing assert len(configuration["encryptionKey"]) > 1 assert configuration["isInitialized"] is False + + +def test_autobackup_period_nonzero(authorized_client): + new_period = 11 + response = api_set_period(authorized_client, new_period) + data = get_data(response)["setAutobackupPeriod"] + assert_ok(data) + + configuration = data["configuration"] + assert configuration["autobackupPeriod"] == new_period + + +def test_autobackup_period_zero(authorized_client): + new_period = 0 + # since it is none by default, we better first set it to something non-negative + response = api_set_period(authorized_client, 11) + # and now we nullify it + response = api_set_period(authorized_client, new_period) + data = get_data(response)["setAutobackupPeriod"] + assert_ok(data) + + configuration = data["configuration"] + assert configuration["autobackupPeriod"] == None + + +def test_autobackup_period_none(authorized_client): + # since it is none by default, we better first set it to something non-negative + response = api_set_period(authorized_client, 11) + # and now we nullify it + response = api_set_period(authorized_client, None) + data = get_data(response)["setAutobackupPeriod"] + assert_ok(data) + + configuration = data["configuration"] + assert configuration["autobackupPeriod"] == None + + +def test_autobackup_period_negative(authorized_client): + # since it is none by default, we better first set it to something non-negative + response = api_set_period(authorized_client, 11) + # and now we nullify it + response = api_set_period(authorized_client, -12) + data = get_data(response)["setAutobackupPeriod"] + assert_ok(data) + + configuration = data["configuration"] + assert configuration["autobackupPeriod"] == None From 53dfb382842db990287fc401d174d91f63fff540 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 19 Jun 2023 14:12:40 +0000 Subject: [PATCH 399/537] test(backups): ensure asking to reload snaps does not explode the server --- selfprivacy_api/backup/backuppers/__init__.py | 2 +- .../backup/backuppers/none_backupper.py | 4 +- tests/test_graphql/test_api_backup.py | 46 +++++++++++++++++++ 3 files changed, 49 insertions(+), 3 deletions(-) diff --git a/selfprivacy_api/backup/backuppers/__init__.py b/selfprivacy_api/backup/backuppers/__init__.py index 61ef1ba..f20496d 100644 --- a/selfprivacy_api/backup/backuppers/__init__.py +++ b/selfprivacy_api/backup/backuppers/__init__.py @@ -21,7 +21,7 @@ class AbstractBackuper(ABC): raise NotImplementedError @abstractmethod - def get_snapshots(self, repo_name) -> List[Snapshot]: + def get_snapshots(self) -> List[Snapshot]: """Get all snapshots from the repo""" raise NotImplementedError diff --git a/selfprivacy_api/backup/backuppers/none_backupper.py b/selfprivacy_api/backup/backuppers/none_backupper.py index de51d6a..e687323 100644 --- a/selfprivacy_api/backup/backuppers/none_backupper.py +++ b/selfprivacy_api/backup/backuppers/none_backupper.py @@ -14,9 +14,9 @@ class NoneBackupper(AbstractBackuper): def start_backup(self, folders: List[str], repo_name: str): raise NotImplementedError - def get_snapshots(self, repo_name) -> List[Snapshot]: + def get_snapshots(self) -> List[Snapshot]: """Get all snapshots from the repo""" - raise NotImplementedError + return [] def init(self, repo_name): raise NotImplementedError diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index b8c09dc..3eed12a 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -6,6 +6,16 @@ from tests.common import generate_backup_query from selfprivacy_api.graphql.common_types.service import service_to_graphql_service from selfprivacy_api.jobs import Jobs, JobStatus +API_RELOAD_SNAPSHOTS = """ +mutation TestSnapshotsReload { + forceSnapshotsReload { + success + message + code + } +} +""" + API_SET_AUTOBACKUP_PERIOD_MUTATION = """ mutation TestAutobackupPeriod($period: Int) { setAutobackupPeriod(period: $period) { @@ -143,6 +153,17 @@ def api_remove(authorized_client): return response +def api_reload_snapshots(authorized_client): + response = authorized_client.post( + "/graphql", + json={ + "query": API_RELOAD_SNAPSHOTS, + "variables": {}, + }, + ) + return response + + def api_init_without_key( authorized_client, kind, login, password, location_name, location_id ): @@ -312,3 +333,28 @@ def test_autobackup_period_negative(authorized_client): configuration = data["configuration"] assert configuration["autobackupPeriod"] == None + + +# We cannot really check the effect at this level, we leave it to backend tests +# But we still make it run in both empty and full scenarios and ask for snaps afterwards +def test_reload_snapshots_bare_bare_bare(authorized_client, dummy_service): + api_remove(authorized_client) + + response = api_reload_snapshots(authorized_client) + data = get_data(response)["forceSnapshotsReload"] + assert_ok(data) + + snaps = api_snapshots(authorized_client) + assert snaps == [] + + +def test_reload_snapshots(authorized_client, dummy_service): + response = api_backup(authorized_client, dummy_service) + data = get_data(response)["startBackup"] + + response = api_reload_snapshots(authorized_client) + data = get_data(response)["forceSnapshotsReload"] + assert_ok(data) + + snaps = api_snapshots(authorized_client) + assert len(snaps) == 1 From b78ee5fcca816aca67c0ef56cf68a3950ed9342f Mon Sep 17 00:00:00 2001 From: Inex Code Date: Wed, 21 Jun 2023 06:46:56 +0300 Subject: [PATCH 400/537] refactor(api): Group mutations I've learned that there is no problem in grouping mutations like we do with queries. This is a big mistake from my side, now we have legacy not so conveniently placed endpoints. I've grouped all mutations, left the copies of old ones flattened in the root for backwards compatibility. We will migrate to mutation groups on client side, and backups now only use grouped mutations. Tests are updated. --- .../backup/providers/local_file.py | 2 +- .../graphql/mutations/backup_mutations.py | 14 +- .../graphql/mutations/deprecated_mutations.py | 215 +++++++++++++++ .../graphql/mutations/mutation_interface.py | 2 +- .../graphql/mutations/services_mutations.py | 4 +- .../graphql/mutations/ssh_mutations.py | 102 ------- .../graphql/mutations/storage_mutations.py | 8 +- .../graphql/mutations/users_mutations.py | 88 +++++- selfprivacy_api/graphql/schema.py | 76 +++-- tests/test_graphql/test_api_backup.py | 46 ++-- tests/test_graphql/test_api_devices.py | 259 +++++++++++------- tests/test_graphql/test_api_recovery.py | 158 ++++++----- tests/test_graphql/test_localsecret.py | 2 +- tests/test_graphql/test_ssh.py | 122 +++++---- tests/test_graphql/test_system.py | 241 +++++++++++----- tests/test_graphql/test_system_nixos_tasks.py | 64 +++-- tests/test_graphql/test_users.py | 168 ++++++------ tests/test_model_storage.py | 17 +- 18 files changed, 1023 insertions(+), 565 deletions(-) create mode 100644 selfprivacy_api/graphql/mutations/deprecated_mutations.py delete mode 100644 selfprivacy_api/graphql/mutations/ssh_mutations.py diff --git a/selfprivacy_api/backup/providers/local_file.py b/selfprivacy_api/backup/providers/local_file.py index bdb2113..d31417e 100644 --- a/selfprivacy_api/backup/providers/local_file.py +++ b/selfprivacy_api/backup/providers/local_file.py @@ -4,4 +4,4 @@ from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackuper class LocalFileBackup(AbstractBackupProvider): backuper = ResticBackuper("", "", ":local:") - name = "FILE" \ No newline at end of file + name = "FILE" diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index ad7c0c6..f2bade0 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -6,7 +6,7 @@ from strawberry.types import Info from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.graphql.mutations.mutation_interface import ( GenericMutationReturn, - GenericJobButationReturn, + GenericJobMutationReturn, MutationReturnInterface, ) from selfprivacy_api.graphql.queries.backup import BackupConfiguration @@ -83,12 +83,12 @@ class BackupMutations: ) @strawberry.mutation(permission_classes=[IsAuthenticated]) - def start_backup(self, service_id: str) -> GenericJobButationReturn: + def start_backup(self, service_id: str) -> GenericJobMutationReturn: """Start backup""" service = get_service_by_id(service_id) if service is None: - return GenericJobButationReturn( + return GenericJobMutationReturn( success=False, code=300, message=f"nonexistent service: {service_id}", @@ -99,7 +99,7 @@ class BackupMutations: start_backup(service) job = job_to_api_job(job) - return GenericJobButationReturn( + return GenericJobMutationReturn( success=True, code=200, message="Backup job queued", @@ -107,12 +107,12 @@ class BackupMutations: ) @strawberry.mutation(permission_classes=[IsAuthenticated]) - def restore_backup(self, snapshot_id: str) -> GenericJobButationReturn: + def restore_backup(self, snapshot_id: str) -> GenericJobMutationReturn: """Restore backup""" snap = Backups.get_snapshot_by_id(snapshot_id) service = get_service_by_id(snap.service_name) if snap is None: - return GenericJobButationReturn( + return GenericJobMutationReturn( success=False, code=400, message=f"No such snapshot: {snapshot_id}", @@ -122,7 +122,7 @@ class BackupMutations: job = add_restore_job(snap) restore_snapshot(snap) - return GenericJobButationReturn( + return GenericJobMutationReturn( success=True, code=200, message="restore job created", diff --git a/selfprivacy_api/graphql/mutations/deprecated_mutations.py b/selfprivacy_api/graphql/mutations/deprecated_mutations.py new file mode 100644 index 0000000..6d187c6 --- /dev/null +++ b/selfprivacy_api/graphql/mutations/deprecated_mutations.py @@ -0,0 +1,215 @@ +"""Deprecated mutations + +There was made a mistake, where mutations were not grouped, and were instead +placed in the root of mutations schema. In this file, we import all the +mutations from and provide them to the root for backwards compatibility. +""" + +import strawberry +from selfprivacy_api.graphql import IsAuthenticated +from selfprivacy_api.graphql.common_types.user import UserMutationReturn +from selfprivacy_api.graphql.mutations.api_mutations import ( + ApiKeyMutationReturn, + ApiMutations, + DeviceApiTokenMutationReturn, +) +from selfprivacy_api.graphql.mutations.backup_mutations import BackupMutations +from selfprivacy_api.graphql.mutations.job_mutations import JobMutations +from selfprivacy_api.graphql.mutations.mutation_interface import ( + GenericJobMutationReturn, + GenericMutationReturn, +) +from selfprivacy_api.graphql.mutations.services_mutations import ( + ServiceMutationReturn, + ServicesMutations, +) +from selfprivacy_api.graphql.mutations.storage_mutations import StorageMutations +from selfprivacy_api.graphql.mutations.system_mutations import ( + AutoUpgradeSettingsMutationReturn, + SystemMutations, + TimezoneMutationReturn, +) +from selfprivacy_api.graphql.mutations.backup_mutations import BackupMutations +from selfprivacy_api.graphql.mutations.users_mutations import UsersMutations + + +def deprecated_mutation(func, group, auth=True): + return strawberry.mutation( + resolver=func, + permission_classes=[IsAuthenticated] if auth else [], + deprecation_reason=f"Use `{group}.{func.__name__}` instead", + ) + + +@strawberry.type +class DeprecatedApiMutations: + get_new_recovery_api_key: ApiKeyMutationReturn = deprecated_mutation( + ApiMutations.get_new_recovery_api_key, + "api", + ) + + use_recovery_api_key: DeviceApiTokenMutationReturn = deprecated_mutation( + ApiMutations.use_recovery_api_key, + "api", + auth=False, + ) + + refresh_device_api_token: DeviceApiTokenMutationReturn = deprecated_mutation( + ApiMutations.refresh_device_api_token, + "api", + ) + + delete_device_api_token: GenericMutationReturn = deprecated_mutation( + ApiMutations.delete_device_api_token, + "api", + ) + + get_new_device_api_key: ApiKeyMutationReturn = deprecated_mutation( + ApiMutations.get_new_device_api_key, + "api", + ) + + invalidate_new_device_api_key: GenericMutationReturn = deprecated_mutation( + ApiMutations.invalidate_new_device_api_key, + "api", + ) + + authorize_with_new_device_api_key: DeviceApiTokenMutationReturn = ( + deprecated_mutation( + ApiMutations.authorize_with_new_device_api_key, + "api", + auth=False, + ) + ) + + +@strawberry.type +class DeprecatedSystemMutations: + change_timezone: TimezoneMutationReturn = deprecated_mutation( + SystemMutations.change_timezone, + "system", + ) + + change_auto_upgrade_settings: AutoUpgradeSettingsMutationReturn = ( + deprecated_mutation( + SystemMutations.change_auto_upgrade_settings, + "system", + ) + ) + + run_system_rebuild: GenericMutationReturn = deprecated_mutation( + SystemMutations.run_system_rebuild, + "system", + ) + + run_system_rollback: GenericMutationReturn = deprecated_mutation( + SystemMutations.run_system_rollback, + "system", + ) + + run_system_upgrade: GenericMutationReturn = deprecated_mutation( + SystemMutations.run_system_upgrade, + "system", + ) + + reboot_system: GenericMutationReturn = deprecated_mutation( + SystemMutations.reboot_system, + "system", + ) + + pull_repository_changes: GenericMutationReturn = deprecated_mutation( + SystemMutations.pull_repository_changes, + "system", + ) + + +@strawberry.type +class DeprecatedUsersMutations: + create_user: UserMutationReturn = deprecated_mutation( + UsersMutations.create_user, + "users", + ) + + delete_user: GenericMutationReturn = deprecated_mutation( + UsersMutations.delete_user, + "users", + ) + + update_user: UserMutationReturn = deprecated_mutation( + UsersMutations.update_user, + "users", + ) + + add_ssh_key: UserMutationReturn = deprecated_mutation( + UsersMutations.add_ssh_key, + "users", + ) + + remove_ssh_key: UserMutationReturn = deprecated_mutation( + UsersMutations.remove_ssh_key, + "users", + ) + + +@strawberry.type +class DeprecatedStorageMutations: + resize_volume: GenericMutationReturn = deprecated_mutation( + StorageMutations.resize_volume, + "storage", + ) + + mount_volume: GenericMutationReturn = deprecated_mutation( + StorageMutations.mount_volume, + "storage", + ) + + unmount_volume: GenericMutationReturn = deprecated_mutation( + StorageMutations.unmount_volume, + "storage", + ) + + migrate_to_binds: GenericJobMutationReturn = deprecated_mutation( + StorageMutations.migrate_to_binds, + "storage", + ) + + +@strawberry.type +class DeprecatedServicesMutations: + enable_service: ServiceMutationReturn = deprecated_mutation( + ServicesMutations.enable_service, + "services", + ) + + disable_service: ServiceMutationReturn = deprecated_mutation( + ServicesMutations.disable_service, + "services", + ) + + stop_service: ServiceMutationReturn = deprecated_mutation( + ServicesMutations.stop_service, + "services", + ) + + start_service: ServiceMutationReturn = deprecated_mutation( + ServicesMutations.start_service, + "services", + ) + + restart_service: ServiceMutationReturn = deprecated_mutation( + ServicesMutations.restart_service, + "services", + ) + + move_service: ServiceMutationReturn = deprecated_mutation( + ServicesMutations.move_service, + "services", + ) + + +@strawberry.type +class DeprecatedJobMutations: + remove_job: GenericMutationReturn = deprecated_mutation( + JobMutations.remove_job, + "jobs", + ) diff --git a/selfprivacy_api/graphql/mutations/mutation_interface.py b/selfprivacy_api/graphql/mutations/mutation_interface.py index 33a6b02..94fde2f 100644 --- a/selfprivacy_api/graphql/mutations/mutation_interface.py +++ b/selfprivacy_api/graphql/mutations/mutation_interface.py @@ -17,5 +17,5 @@ class GenericMutationReturn(MutationReturnInterface): @strawberry.type -class GenericJobButationReturn(MutationReturnInterface): +class GenericJobMutationReturn(MutationReturnInterface): job: typing.Optional[ApiJob] = None diff --git a/selfprivacy_api/graphql/mutations/services_mutations.py b/selfprivacy_api/graphql/mutations/services_mutations.py index 38a0d7f..86cab10 100644 --- a/selfprivacy_api/graphql/mutations/services_mutations.py +++ b/selfprivacy_api/graphql/mutations/services_mutations.py @@ -10,7 +10,7 @@ from selfprivacy_api.graphql.common_types.service import ( service_to_graphql_service, ) from selfprivacy_api.graphql.mutations.mutation_interface import ( - GenericJobButationReturn, + GenericJobMutationReturn, GenericMutationReturn, ) @@ -34,7 +34,7 @@ class MoveServiceInput: @strawberry.type -class ServiceJobMutationReturn(GenericJobButationReturn): +class ServiceJobMutationReturn(GenericJobMutationReturn): """Service job mutation return type.""" service: typing.Optional[Service] = None diff --git a/selfprivacy_api/graphql/mutations/ssh_mutations.py b/selfprivacy_api/graphql/mutations/ssh_mutations.py deleted file mode 100644 index 60f81a8..0000000 --- a/selfprivacy_api/graphql/mutations/ssh_mutations.py +++ /dev/null @@ -1,102 +0,0 @@ -#!/usr/bin/env python3 -"""Users management module""" -# pylint: disable=too-few-public-methods - -import strawberry -from selfprivacy_api.actions.users import UserNotFound - -from selfprivacy_api.graphql import IsAuthenticated -from selfprivacy_api.actions.ssh import ( - InvalidPublicKey, - KeyAlreadyExists, - KeyNotFound, - create_ssh_key, - remove_ssh_key, -) -from selfprivacy_api.graphql.common_types.user import ( - UserMutationReturn, - get_user_by_username, -) - - -@strawberry.input -class SshMutationInput: - """Input type for ssh mutation""" - - username: str - ssh_key: str - - -@strawberry.type -class SshMutations: - """Mutations ssh""" - - @strawberry.mutation(permission_classes=[IsAuthenticated]) - def add_ssh_key(self, ssh_input: SshMutationInput) -> UserMutationReturn: - """Add a new ssh key""" - - try: - create_ssh_key(ssh_input.username, ssh_input.ssh_key) - except KeyAlreadyExists: - return UserMutationReturn( - success=False, - message="Key already exists", - code=409, - ) - except InvalidPublicKey: - return UserMutationReturn( - success=False, - message="Invalid key type. Only ssh-ed25519 and ssh-rsa are supported", - code=400, - ) - except UserNotFound: - return UserMutationReturn( - success=False, - message="User not found", - code=404, - ) - except Exception as e: - return UserMutationReturn( - success=False, - message=str(e), - code=500, - ) - - return UserMutationReturn( - success=True, - message="New SSH key successfully written", - code=201, - user=get_user_by_username(ssh_input.username), - ) - - @strawberry.mutation(permission_classes=[IsAuthenticated]) - def remove_ssh_key(self, ssh_input: SshMutationInput) -> UserMutationReturn: - """Remove ssh key from user""" - - try: - remove_ssh_key(ssh_input.username, ssh_input.ssh_key) - except KeyNotFound: - return UserMutationReturn( - success=False, - message="Key not found", - code=404, - ) - except UserNotFound: - return UserMutationReturn( - success=False, - message="User not found", - code=404, - ) - except Exception as e: - return UserMutationReturn( - success=False, - message=str(e), - code=500, - ) - - return UserMutationReturn( - success=True, - message="SSH key successfully removed", - code=200, - user=get_user_by_username(ssh_input.username), - ) diff --git a/selfprivacy_api/graphql/mutations/storage_mutations.py b/selfprivacy_api/graphql/mutations/storage_mutations.py index 1b6d74e..243220b 100644 --- a/selfprivacy_api/graphql/mutations/storage_mutations.py +++ b/selfprivacy_api/graphql/mutations/storage_mutations.py @@ -4,7 +4,7 @@ from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.graphql.common_types.jobs import job_to_api_job from selfprivacy_api.utils.block_devices import BlockDevices from selfprivacy_api.graphql.mutations.mutation_interface import ( - GenericJobButationReturn, + GenericJobMutationReturn, GenericMutationReturn, ) from selfprivacy_api.jobs.migrate_to_binds import ( @@ -79,10 +79,10 @@ class StorageMutations: ) @strawberry.mutation(permission_classes=[IsAuthenticated]) - def migrate_to_binds(self, input: MigrateToBindsInput) -> GenericJobButationReturn: + def migrate_to_binds(self, input: MigrateToBindsInput) -> GenericJobMutationReturn: """Migrate to binds""" if is_bind_migrated(): - return GenericJobButationReturn( + return GenericJobMutationReturn( success=False, code=409, message="Already migrated to binds" ) job = start_bind_migration( @@ -94,7 +94,7 @@ class StorageMutations: pleroma_block_device=input.pleroma_block_device, ) ) - return GenericJobButationReturn( + return GenericJobMutationReturn( success=True, code=200, message="Migration to binds started, rebuild the system to apply changes", diff --git a/selfprivacy_api/graphql/mutations/users_mutations.py b/selfprivacy_api/graphql/mutations/users_mutations.py index 27be1d7..f7317fb 100644 --- a/selfprivacy_api/graphql/mutations/users_mutations.py +++ b/selfprivacy_api/graphql/mutations/users_mutations.py @@ -3,10 +3,18 @@ # pylint: disable=too-few-public-methods import strawberry from selfprivacy_api.graphql import IsAuthenticated +from selfprivacy_api.actions.users import UserNotFound from selfprivacy_api.graphql.common_types.user import ( UserMutationReturn, get_user_by_username, ) +from selfprivacy_api.actions.ssh import ( + InvalidPublicKey, + KeyAlreadyExists, + KeyNotFound, + create_ssh_key, + remove_ssh_key, +) from selfprivacy_api.graphql.mutations.mutation_interface import ( GenericMutationReturn, ) @@ -21,8 +29,16 @@ class UserMutationInput: password: str +@strawberry.input +class SshMutationInput: + """Input type for ssh mutation""" + + username: str + ssh_key: str + + @strawberry.type -class UserMutations: +class UsersMutations: """Mutations change user settings""" @strawberry.mutation(permission_classes=[IsAuthenticated]) @@ -115,3 +131,73 @@ class UserMutations: code=200, user=get_user_by_username(user.username), ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def add_ssh_key(self, ssh_input: SshMutationInput) -> UserMutationReturn: + """Add a new ssh key""" + + try: + create_ssh_key(ssh_input.username, ssh_input.ssh_key) + except KeyAlreadyExists: + return UserMutationReturn( + success=False, + message="Key already exists", + code=409, + ) + except InvalidPublicKey: + return UserMutationReturn( + success=False, + message="Invalid key type. Only ssh-ed25519 and ssh-rsa are supported", + code=400, + ) + except UserNotFound: + return UserMutationReturn( + success=False, + message="User not found", + code=404, + ) + except Exception as e: + return UserMutationReturn( + success=False, + message=str(e), + code=500, + ) + + return UserMutationReturn( + success=True, + message="New SSH key successfully written", + code=201, + user=get_user_by_username(ssh_input.username), + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def remove_ssh_key(self, ssh_input: SshMutationInput) -> UserMutationReturn: + """Remove ssh key from user""" + + try: + remove_ssh_key(ssh_input.username, ssh_input.ssh_key) + except KeyNotFound: + return UserMutationReturn( + success=False, + message="Key not found", + code=404, + ) + except UserNotFound: + return UserMutationReturn( + success=False, + message="User not found", + code=404, + ) + except Exception as e: + return UserMutationReturn( + success=False, + message=str(e), + code=500, + ) + + return UserMutationReturn( + success=True, + message="SSH key successfully removed", + code=200, + user=get_user_by_username(ssh_input.username), + ) diff --git a/selfprivacy_api/graphql/schema.py b/selfprivacy_api/graphql/schema.py index 7107e20..9a6c82c 100644 --- a/selfprivacy_api/graphql/schema.py +++ b/selfprivacy_api/graphql/schema.py @@ -7,10 +7,17 @@ import strawberry from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.graphql.mutations.api_mutations import ApiMutations from selfprivacy_api.graphql.mutations.backup_mutations import BackupMutations +from selfprivacy_api.graphql.mutations.deprecated_mutations import ( + DeprecatedApiMutations, + DeprecatedJobMutations, + DeprecatedServicesMutations, + DeprecatedStorageMutations, + DeprecatedSystemMutations, + DeprecatedUsersMutations, +) from selfprivacy_api.graphql.mutations.job_mutations import JobMutations from selfprivacy_api.graphql.mutations.mutation_interface import GenericMutationReturn from selfprivacy_api.graphql.mutations.services_mutations import ServicesMutations -from selfprivacy_api.graphql.mutations.ssh_mutations import SshMutations from selfprivacy_api.graphql.mutations.storage_mutations import StorageMutations from selfprivacy_api.graphql.mutations.system_mutations import SystemMutations from selfprivacy_api.graphql.mutations.backup_mutations import BackupMutations @@ -23,7 +30,7 @@ from selfprivacy_api.graphql.queries.storage import Storage from selfprivacy_api.graphql.queries.system import System from selfprivacy_api.graphql.queries.backup import Backup -from selfprivacy_api.graphql.mutations.users_mutations import UserMutations +from selfprivacy_api.graphql.mutations.users_mutations import UsersMutations from selfprivacy_api.graphql.queries.users import Users from selfprivacy_api.jobs.test import test_job @@ -32,16 +39,16 @@ from selfprivacy_api.jobs.test import test_job class Query: """Root schema for queries""" - @strawberry.field(permission_classes=[IsAuthenticated]) - def system(self) -> System: - """System queries""" - return System() - @strawberry.field def api(self) -> Api: """API access status""" return Api() + @strawberry.field(permission_classes=[IsAuthenticated]) + def system(self) -> System: + """System queries""" + return System() + @strawberry.field(permission_classes=[IsAuthenticated]) def users(self) -> Users: """Users queries""" @@ -70,17 +77,50 @@ class Query: @strawberry.type class Mutation( - ApiMutations, - SystemMutations, - UserMutations, - SshMutations, - StorageMutations, - ServicesMutations, - JobMutations, - BackupMutations, + DeprecatedApiMutations, + DeprecatedSystemMutations, + DeprecatedUsersMutations, + DeprecatedStorageMutations, + DeprecatedServicesMutations, + DeprecatedJobMutations, ): """Root schema for mutations""" + @strawberry.field + def api(self) -> ApiMutations: + """API mutations""" + return ApiMutations() + + @strawberry.field(permission_classes=[IsAuthenticated]) + def system(self) -> SystemMutations: + """System mutations""" + return SystemMutations() + + @strawberry.field(permission_classes=[IsAuthenticated]) + def users(self) -> UsersMutations: + """Users mutations""" + return UsersMutations() + + @strawberry.field(permission_classes=[IsAuthenticated]) + def storage(self) -> StorageMutations: + """Storage mutations""" + return StorageMutations() + + @strawberry.field(permission_classes=[IsAuthenticated]) + def services(self) -> ServicesMutations: + """Services mutations""" + return ServicesMutations() + + @strawberry.field(permission_classes=[IsAuthenticated]) + def jobs(self) -> JobMutations: + """Jobs mutations""" + return JobMutations() + + @strawberry.field(permission_classes=[IsAuthenticated]) + def backup(self) -> BackupMutations: + """Backup mutations""" + return BackupMutations() + @strawberry.mutation(permission_classes=[IsAuthenticated]) def test_mutation(self) -> GenericMutationReturn: """Test mutation""" @@ -105,4 +145,8 @@ class Subscription: await asyncio.sleep(0.5) -schema = strawberry.Schema(query=Query, mutation=Mutation, subscription=Subscription) +schema = strawberry.Schema( + query=Query, + mutation=Mutation, + subscription=Subscription, +) diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index 3eed12a..bfa315b 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -8,21 +8,24 @@ from selfprivacy_api.jobs import Jobs, JobStatus API_RELOAD_SNAPSHOTS = """ mutation TestSnapshotsReload { + backup { forceSnapshotsReload { success message code } + } } """ API_SET_AUTOBACKUP_PERIOD_MUTATION = """ mutation TestAutobackupPeriod($period: Int) { + backup { setAutobackupPeriod(period: $period) { success message code - configuration { + configuration { provider encryptionKey isInitialized @@ -31,16 +34,18 @@ mutation TestAutobackupPeriod($period: Int) { locationId } } + } } """ API_REMOVE_REPOSITORY_MUTATION = """ mutation TestRemoveRepo { + backup { removeRepository { success message code - configuration { + configuration { provider encryptionKey isInitialized @@ -49,16 +54,18 @@ mutation TestRemoveRepo { locationId } } + } } """ API_INIT_MUTATION = """ mutation TestInitRepo($input: InitializeRepositoryInput!) { + backup { initializeRepository(repository: $input) { success message code - configuration { + configuration { provider encryptionKey isInitialized @@ -67,20 +74,23 @@ mutation TestInitRepo($input: InitializeRepositoryInput!) { locationId } } + } } """ API_RESTORE_MUTATION = """ mutation TestRestoreService($snapshot_id: String!) { + backup { restoreBackup(snapshotId: $snapshot_id) { success message code - job { + job { uid status } } + } } """ @@ -96,15 +106,17 @@ allSnapshots { API_BACK_UP_MUTATION = """ mutation TestBackupService($service_id: String!) { + backup { startBackup(serviceId: $service_id) { success message code - job { + job { uid status } } + } } """ @@ -225,7 +237,7 @@ def test_snapshots_empty(authorized_client, dummy_service): def test_start_backup(authorized_client, dummy_service): response = api_backup(authorized_client, dummy_service) - data = get_data(response)["startBackup"] + data = get_data(response)["backup"]["startBackup"] assert data["success"] is True job = data["job"] @@ -245,7 +257,7 @@ def test_restore(authorized_client, dummy_service): assert snap["id"] is not None response = api_restore(authorized_client, snap["id"]) - data = get_data(response)["restoreBackup"] + data = get_data(response)["backup"]["restoreBackup"] assert data["success"] is True job = data["job"] @@ -257,7 +269,7 @@ def test_reinit(authorized_client, dummy_service, tmpdir): response = api_init_without_key( authorized_client, "FILE", "", "", test_repo_path, "" ) - data = get_data(response)["initializeRepository"] + data = get_data(response)["backup"]["initializeRepository"] assert_ok(data) configuration = data["configuration"] assert configuration["provider"] == "FILE" @@ -267,7 +279,7 @@ def test_reinit(authorized_client, dummy_service, tmpdir): assert configuration["isInitialized"] is True response = api_backup(authorized_client, dummy_service) - data = get_data(response)["startBackup"] + data = get_data(response)["backup"]["startBackup"] assert data["success"] is True job = data["job"] @@ -276,7 +288,7 @@ def test_reinit(authorized_client, dummy_service, tmpdir): def test_remove(authorized_client, generic_userdata): response = api_remove(authorized_client) - data = get_data(response)["removeRepository"] + data = get_data(response)["backup"]["removeRepository"] assert_ok(data) configuration = data["configuration"] @@ -291,7 +303,7 @@ def test_remove(authorized_client, generic_userdata): def test_autobackup_period_nonzero(authorized_client): new_period = 11 response = api_set_period(authorized_client, new_period) - data = get_data(response)["setAutobackupPeriod"] + data = get_data(response)["backup"]["setAutobackupPeriod"] assert_ok(data) configuration = data["configuration"] @@ -304,7 +316,7 @@ def test_autobackup_period_zero(authorized_client): response = api_set_period(authorized_client, 11) # and now we nullify it response = api_set_period(authorized_client, new_period) - data = get_data(response)["setAutobackupPeriod"] + data = get_data(response)["backup"]["setAutobackupPeriod"] assert_ok(data) configuration = data["configuration"] @@ -316,7 +328,7 @@ def test_autobackup_period_none(authorized_client): response = api_set_period(authorized_client, 11) # and now we nullify it response = api_set_period(authorized_client, None) - data = get_data(response)["setAutobackupPeriod"] + data = get_data(response)["backup"]["setAutobackupPeriod"] assert_ok(data) configuration = data["configuration"] @@ -328,7 +340,7 @@ def test_autobackup_period_negative(authorized_client): response = api_set_period(authorized_client, 11) # and now we nullify it response = api_set_period(authorized_client, -12) - data = get_data(response)["setAutobackupPeriod"] + data = get_data(response)["backup"]["setAutobackupPeriod"] assert_ok(data) configuration = data["configuration"] @@ -341,7 +353,7 @@ def test_reload_snapshots_bare_bare_bare(authorized_client, dummy_service): api_remove(authorized_client) response = api_reload_snapshots(authorized_client) - data = get_data(response)["forceSnapshotsReload"] + data = get_data(response)["backup"]["forceSnapshotsReload"] assert_ok(data) snaps = api_snapshots(authorized_client) @@ -350,10 +362,10 @@ def test_reload_snapshots_bare_bare_bare(authorized_client, dummy_service): def test_reload_snapshots(authorized_client, dummy_service): response = api_backup(authorized_client, dummy_service) - data = get_data(response)["startBackup"] + data = get_data(response)["backup"]["startBackup"] response = api_reload_snapshots(authorized_client) - data = get_data(response)["forceSnapshotsReload"] + data = get_data(response)["backup"]["forceSnapshotsReload"] assert_ok(data) snaps = api_snapshots(authorized_client) diff --git a/tests/test_graphql/test_api_devices.py b/tests/test_graphql/test_api_devices.py index 07cf42a..cd76ef7 100644 --- a/tests/test_graphql/test_api_devices.py +++ b/tests/test_graphql/test_api_devices.py @@ -75,10 +75,12 @@ def test_graphql_tokens_info_unauthorized(client, tokens_file): DELETE_TOKEN_MUTATION = """ mutation DeleteToken($device: String!) { - deleteDeviceApiToken(device: $device) { - success - message - code + api { + deleteDeviceApiToken(device: $device) { + success + message + code + } } } """ @@ -110,9 +112,9 @@ def test_graphql_delete_token(authorized_client, tokens_file): ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["deleteDeviceApiToken"]["success"] is True - assert response.json()["data"]["deleteDeviceApiToken"]["message"] is not None - assert response.json()["data"]["deleteDeviceApiToken"]["code"] == 200 + assert response.json()["data"]["api"]["deleteDeviceApiToken"]["success"] is True + assert response.json()["data"]["api"]["deleteDeviceApiToken"]["message"] is not None + assert response.json()["data"]["api"]["deleteDeviceApiToken"]["code"] == 200 assert read_json(tokens_file) == { "tokens": [ { @@ -136,13 +138,16 @@ def test_graphql_delete_self_token(authorized_client, tokens_file): ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["deleteDeviceApiToken"]["success"] is False - assert response.json()["data"]["deleteDeviceApiToken"]["message"] is not None - assert response.json()["data"]["deleteDeviceApiToken"]["code"] == 400 + assert response.json()["data"]["api"]["deleteDeviceApiToken"]["success"] is False + assert response.json()["data"]["api"]["deleteDeviceApiToken"]["message"] is not None + assert response.json()["data"]["api"]["deleteDeviceApiToken"]["code"] == 400 assert read_json(tokens_file) == TOKENS_FILE_CONTETS -def test_graphql_delete_nonexistent_token(authorized_client, tokens_file): +def test_graphql_delete_nonexistent_token( + authorized_client, + tokens_file, +): response = authorized_client.post( "/graphql", json={ @@ -154,19 +159,21 @@ def test_graphql_delete_nonexistent_token(authorized_client, tokens_file): ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["deleteDeviceApiToken"]["success"] is False - assert response.json()["data"]["deleteDeviceApiToken"]["message"] is not None - assert response.json()["data"]["deleteDeviceApiToken"]["code"] == 404 + assert response.json()["data"]["api"]["deleteDeviceApiToken"]["success"] is False + assert response.json()["data"]["api"]["deleteDeviceApiToken"]["message"] is not None + assert response.json()["data"]["api"]["deleteDeviceApiToken"]["code"] == 404 assert read_json(tokens_file) == TOKENS_FILE_CONTETS REFRESH_TOKEN_MUTATION = """ mutation RefreshToken { - refreshDeviceApiToken { - success - message - code - token + api { + refreshDeviceApiToken { + success + message + code + token + } } } """ @@ -181,19 +188,25 @@ def test_graphql_refresh_token_unauthorized(client, tokens_file): assert response.json()["data"] is None -def test_graphql_refresh_token(authorized_client, tokens_file, token_repo): +def test_graphql_refresh_token( + authorized_client, + tokens_file, + token_repo, +): response = authorized_client.post( "/graphql", json={"query": REFRESH_TOKEN_MUTATION}, ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["refreshDeviceApiToken"]["success"] is True - assert response.json()["data"]["refreshDeviceApiToken"]["message"] is not None - assert response.json()["data"]["refreshDeviceApiToken"]["code"] == 200 + assert response.json()["data"]["api"]["refreshDeviceApiToken"]["success"] is True + assert ( + response.json()["data"]["api"]["refreshDeviceApiToken"]["message"] is not None + ) + assert response.json()["data"]["api"]["refreshDeviceApiToken"]["code"] == 200 token = token_repo.get_token_by_name("test_token") assert token == Token( - token=response.json()["data"]["refreshDeviceApiToken"]["token"], + token=response.json()["data"]["api"]["refreshDeviceApiToken"]["token"], device_name="test_token", created_at=datetime.datetime(2022, 1, 14, 8, 31, 10, 789314), ) @@ -201,17 +214,22 @@ def test_graphql_refresh_token(authorized_client, tokens_file, token_repo): NEW_DEVICE_KEY_MUTATION = """ mutation NewDeviceKey { - getNewDeviceApiKey { - success - message - code - key + api { + getNewDeviceApiKey { + success + message + code + key + } } } """ -def test_graphql_get_new_device_auth_key_unauthorized(client, tokens_file): +def test_graphql_get_new_device_auth_key_unauthorized( + client, + tokens_file, +): response = client.post( "/graphql", json={"query": NEW_DEVICE_KEY_MUTATION}, @@ -220,22 +238,26 @@ def test_graphql_get_new_device_auth_key_unauthorized(client, tokens_file): assert response.json()["data"] is None -def test_graphql_get_new_device_auth_key(authorized_client, tokens_file): +def test_graphql_get_new_device_auth_key( + authorized_client, + tokens_file, +): response = authorized_client.post( "/graphql", json={"query": NEW_DEVICE_KEY_MUTATION}, ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["getNewDeviceApiKey"]["success"] is True - assert response.json()["data"]["getNewDeviceApiKey"]["message"] is not None - assert response.json()["data"]["getNewDeviceApiKey"]["code"] == 200 + assert response.json()["data"]["api"]["getNewDeviceApiKey"]["success"] is True + assert response.json()["data"]["api"]["getNewDeviceApiKey"]["message"] is not None + assert response.json()["data"]["api"]["getNewDeviceApiKey"]["code"] == 200 assert ( - response.json()["data"]["getNewDeviceApiKey"]["key"].split(" ").__len__() == 12 + response.json()["data"]["api"]["getNewDeviceApiKey"]["key"].split(" ").__len__() + == 12 ) token = ( Mnemonic(language="english") - .to_entropy(response.json()["data"]["getNewDeviceApiKey"]["key"]) + .to_entropy(response.json()["data"]["api"]["getNewDeviceApiKey"]["key"]) .hex() ) assert read_json(tokens_file)["new_device"]["token"] == token @@ -243,20 +265,25 @@ def test_graphql_get_new_device_auth_key(authorized_client, tokens_file): INVALIDATE_NEW_DEVICE_KEY_MUTATION = """ mutation InvalidateNewDeviceKey { - invalidateNewDeviceApiKey { - success - message - code + api { + invalidateNewDeviceApiKey { + success + message + code + } } } """ -def test_graphql_invalidate_new_device_token_unauthorized(client, tokens_file): +def test_graphql_invalidate_new_device_token_unauthorized( + client, + tokens_file, +): response = client.post( "/graphql", json={ - "query": DELETE_TOKEN_MUTATION, + "query": INVALIDATE_NEW_DEVICE_KEY_MUTATION, "variables": { "device": "test_token", }, @@ -266,22 +293,26 @@ def test_graphql_invalidate_new_device_token_unauthorized(client, tokens_file): assert response.json()["data"] is None -def test_graphql_get_and_delete_new_device_key(authorized_client, tokens_file): +def test_graphql_get_and_delete_new_device_key( + authorized_client, + tokens_file, +): response = authorized_client.post( "/graphql", json={"query": NEW_DEVICE_KEY_MUTATION}, ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["getNewDeviceApiKey"]["success"] is True - assert response.json()["data"]["getNewDeviceApiKey"]["message"] is not None - assert response.json()["data"]["getNewDeviceApiKey"]["code"] == 200 + assert response.json()["data"]["api"]["getNewDeviceApiKey"]["success"] is True + assert response.json()["data"]["api"]["getNewDeviceApiKey"]["message"] is not None + assert response.json()["data"]["api"]["getNewDeviceApiKey"]["code"] == 200 assert ( - response.json()["data"]["getNewDeviceApiKey"]["key"].split(" ").__len__() == 12 + response.json()["data"]["api"]["getNewDeviceApiKey"]["key"].split(" ").__len__() + == 12 ) token = ( Mnemonic(language="english") - .to_entropy(response.json()["data"]["getNewDeviceApiKey"]["key"]) + .to_entropy(response.json()["data"]["api"]["getNewDeviceApiKey"]["key"]) .hex() ) assert read_json(tokens_file)["new_device"]["token"] == token @@ -291,35 +322,46 @@ def test_graphql_get_and_delete_new_device_key(authorized_client, tokens_file): ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["invalidateNewDeviceApiKey"]["success"] is True - assert response.json()["data"]["invalidateNewDeviceApiKey"]["message"] is not None - assert response.json()["data"]["invalidateNewDeviceApiKey"]["code"] == 200 + assert ( + response.json()["data"]["api"]["invalidateNewDeviceApiKey"]["success"] is True + ) + assert ( + response.json()["data"]["api"]["invalidateNewDeviceApiKey"]["message"] + is not None + ) + assert response.json()["data"]["api"]["invalidateNewDeviceApiKey"]["code"] == 200 assert read_json(tokens_file) == TOKENS_FILE_CONTETS AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION = """ mutation AuthorizeWithNewDeviceKey($input: UseNewDeviceKeyInput!) { - authorizeWithNewDeviceApiKey(input: $input) { - success - message - code - token + api { + authorizeWithNewDeviceApiKey(input: $input) { + success + message + code + token + } } } """ -def test_graphql_get_and_authorize_new_device(client, authorized_client, tokens_file): +def test_graphql_get_and_authorize_new_device( + client, + authorized_client, + tokens_file, +): response = authorized_client.post( "/graphql", json={"query": NEW_DEVICE_KEY_MUTATION}, ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["getNewDeviceApiKey"]["success"] is True - assert response.json()["data"]["getNewDeviceApiKey"]["message"] is not None - assert response.json()["data"]["getNewDeviceApiKey"]["code"] == 200 - mnemonic_key = response.json()["data"]["getNewDeviceApiKey"]["key"] + assert response.json()["data"]["api"]["getNewDeviceApiKey"]["success"] is True + assert response.json()["data"]["api"]["getNewDeviceApiKey"]["message"] is not None + assert response.json()["data"]["api"]["getNewDeviceApiKey"]["code"] == 200 + mnemonic_key = response.json()["data"]["api"]["getNewDeviceApiKey"]["key"] assert mnemonic_key.split(" ").__len__() == 12 key = Mnemonic(language="english").to_entropy(mnemonic_key).hex() assert read_json(tokens_file)["new_device"]["token"] == key @@ -337,17 +379,24 @@ def test_graphql_get_and_authorize_new_device(client, authorized_client, tokens_ ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["success"] is True assert ( - response.json()["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None + response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["success"] + is True ) - assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["code"] == 200 - token = response.json()["data"]["authorizeWithNewDeviceApiKey"]["token"] + assert ( + response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["message"] + is not None + ) + assert response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["code"] == 200 + token = response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["token"] assert read_json(tokens_file)["tokens"][2]["token"] == token assert read_json(tokens_file)["tokens"][2]["name"] == "new_device" -def test_graphql_authorize_new_device_with_invalid_key(client, tokens_file): +def test_graphql_authorize_new_device_with_invalid_key( + client, + tokens_file, +): response = client.post( "/graphql", json={ @@ -362,25 +411,33 @@ def test_graphql_authorize_new_device_with_invalid_key(client, tokens_file): ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["success"] is False assert ( - response.json()["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None + response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["success"] + is False ) - assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["code"] == 404 + assert ( + response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["message"] + is not None + ) + assert response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["code"] == 404 assert read_json(tokens_file) == TOKENS_FILE_CONTETS -def test_graphql_get_and_authorize_used_key(client, authorized_client, tokens_file): +def test_graphql_get_and_authorize_used_key( + client, + authorized_client, + tokens_file, +): response = authorized_client.post( "/graphql", json={"query": NEW_DEVICE_KEY_MUTATION}, ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["getNewDeviceApiKey"]["success"] is True - assert response.json()["data"]["getNewDeviceApiKey"]["message"] is not None - assert response.json()["data"]["getNewDeviceApiKey"]["code"] == 200 - mnemonic_key = response.json()["data"]["getNewDeviceApiKey"]["key"] + assert response.json()["data"]["api"]["getNewDeviceApiKey"]["success"] is True + assert response.json()["data"]["api"]["getNewDeviceApiKey"]["message"] is not None + assert response.json()["data"]["api"]["getNewDeviceApiKey"]["code"] == 200 + mnemonic_key = response.json()["data"]["api"]["getNewDeviceApiKey"]["key"] assert mnemonic_key.split(" ").__len__() == 12 key = Mnemonic(language="english").to_entropy(mnemonic_key).hex() assert read_json(tokens_file)["new_device"]["token"] == key @@ -398,14 +455,18 @@ def test_graphql_get_and_authorize_used_key(client, authorized_client, tokens_fi ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["success"] is True assert ( - response.json()["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None + response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["success"] + is True ) - assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["code"] == 200 + assert ( + response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["message"] + is not None + ) + assert response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["code"] == 200 assert ( read_json(tokens_file)["tokens"][2]["token"] - == response.json()["data"]["authorizeWithNewDeviceApiKey"]["token"] + == response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["token"] ) assert read_json(tokens_file)["tokens"][2]["name"] == "new_token" @@ -415,7 +476,7 @@ def test_graphql_get_and_authorize_used_key(client, authorized_client, tokens_fi "query": AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION, "variables": { "input": { - "key": mnemonic_key, + "key": NEW_DEVICE_KEY_MUTATION, "deviceName": "test_token2", } }, @@ -423,16 +484,22 @@ def test_graphql_get_and_authorize_used_key(client, authorized_client, tokens_fi ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["success"] is False assert ( - response.json()["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None + response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["success"] + is False ) - assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["code"] == 404 + assert ( + response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["message"] + is not None + ) + assert response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["code"] == 404 assert read_json(tokens_file)["tokens"].__len__() == 3 def test_graphql_get_and_authorize_key_after_12_minutes( - client, authorized_client, tokens_file + client, + authorized_client, + tokens_file, ): response = authorized_client.post( "/graphql", @@ -440,15 +507,16 @@ def test_graphql_get_and_authorize_key_after_12_minutes( ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["getNewDeviceApiKey"]["success"] is True - assert response.json()["data"]["getNewDeviceApiKey"]["message"] is not None - assert response.json()["data"]["getNewDeviceApiKey"]["code"] == 200 + assert response.json()["data"]["api"]["getNewDeviceApiKey"]["success"] is True + assert response.json()["data"]["api"]["getNewDeviceApiKey"]["message"] is not None + assert response.json()["data"]["api"]["getNewDeviceApiKey"]["code"] == 200 assert ( - response.json()["data"]["getNewDeviceApiKey"]["key"].split(" ").__len__() == 12 + response.json()["data"]["api"]["getNewDeviceApiKey"]["key"].split(" ").__len__() + == 12 ) key = ( Mnemonic(language="english") - .to_entropy(response.json()["data"]["getNewDeviceApiKey"]["key"]) + .to_entropy(response.json()["data"]["api"]["getNewDeviceApiKey"]["key"]) .hex() ) assert read_json(tokens_file)["new_device"]["token"] == key @@ -473,14 +541,21 @@ def test_graphql_get_and_authorize_key_after_12_minutes( ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["success"] is False assert ( - response.json()["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None + response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["success"] + is False ) - assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["code"] == 404 + assert ( + response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["message"] + is not None + ) + assert response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["code"] == 404 -def test_graphql_authorize_without_token(client, tokens_file): +def test_graphql_authorize_without_token( + client, + tokens_file, +): response = client.post( "/graphql", json={ diff --git a/tests/test_graphql/test_api_recovery.py b/tests/test_graphql/test_api_recovery.py index c5e229e..87df666 100644 --- a/tests/test_graphql/test_api_recovery.py +++ b/tests/test_graphql/test_api_recovery.py @@ -57,22 +57,26 @@ def test_graphql_recovery_key_status_when_none_exists(authorized_client, tokens_ API_RECOVERY_KEY_GENERATE_MUTATION = """ mutation TestGenerateRecoveryKey($limits: RecoveryKeyLimitsInput) { - getNewRecoveryApiKey(limits: $limits) { - success - message - code - key + api { + getNewRecoveryApiKey(limits: $limits) { + success + message + code + key + } } } """ API_RECOVERY_KEY_USE_MUTATION = """ mutation TestUseRecoveryKey($input: UseRecoveryKeyInput!) { - useRecoveryApiKey(input: $input) { - success - message - code - token + api { + useRecoveryApiKey(input: $input) { + success + message + code + token + } } } """ @@ -87,18 +91,20 @@ def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is True - assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 200 - assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is not None + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["success"] is True + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] is not None assert ( - response.json()["data"]["getNewRecoveryApiKey"]["key"].split(" ").__len__() + response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] + .split(" ") + .__len__() == 18 ) assert read_json(tokens_file)["recovery_token"] is not None time_generated = read_json(tokens_file)["recovery_token"]["date"] assert time_generated is not None - key = response.json()["data"]["getNewRecoveryApiKey"]["key"] + key = response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] assert ( datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%f") - datetime.timedelta(seconds=5) @@ -136,12 +142,12 @@ def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["useRecoveryApiKey"]["success"] is True - assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200 - assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None + assert response.json()["data"]["api"]["useRecoveryApiKey"]["success"] is True + assert response.json()["data"]["api"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["api"]["useRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["api"]["useRecoveryApiKey"]["token"] is not None assert ( - response.json()["data"]["useRecoveryApiKey"]["token"] + response.json()["data"]["api"]["useRecoveryApiKey"]["token"] == read_json(tokens_file)["tokens"][2]["token"] ) assert read_json(tokens_file)["tokens"][2]["name"] == "new_test_token" @@ -161,12 +167,12 @@ def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["useRecoveryApiKey"]["success"] is True - assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200 - assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None + assert response.json()["data"]["api"]["useRecoveryApiKey"]["success"] is True + assert response.json()["data"]["api"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["api"]["useRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["api"]["useRecoveryApiKey"]["token"] is not None assert ( - response.json()["data"]["useRecoveryApiKey"]["token"] + response.json()["data"]["api"]["useRecoveryApiKey"]["token"] == read_json(tokens_file)["tokens"][3]["token"] ) assert read_json(tokens_file)["tokens"][3]["name"] == "new_test_token2" @@ -190,17 +196,19 @@ def test_graphql_generate_recovery_key_with_expiration_date( ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is True - assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 200 - assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is not None + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["success"] is True + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] is not None assert ( - response.json()["data"]["getNewRecoveryApiKey"]["key"].split(" ").__len__() + response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] + .split(" ") + .__len__() == 18 ) assert read_json(tokens_file)["recovery_token"] is not None - key = response.json()["data"]["getNewRecoveryApiKey"]["key"] + key = response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] assert read_json(tokens_file)["recovery_token"]["expiration"] == expiration_date_str assert read_json(tokens_file)["recovery_token"]["token"] == mnemonic_to_hex(key) @@ -246,12 +254,12 @@ def test_graphql_generate_recovery_key_with_expiration_date( ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["useRecoveryApiKey"]["success"] is True - assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200 - assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None + assert response.json()["data"]["api"]["useRecoveryApiKey"]["success"] is True + assert response.json()["data"]["api"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["api"]["useRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["api"]["useRecoveryApiKey"]["token"] is not None assert ( - response.json()["data"]["useRecoveryApiKey"]["token"] + response.json()["data"]["api"]["useRecoveryApiKey"]["token"] == read_json(tokens_file)["tokens"][2]["token"] ) @@ -270,12 +278,12 @@ def test_graphql_generate_recovery_key_with_expiration_date( ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["useRecoveryApiKey"]["success"] is True - assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200 - assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None + assert response.json()["data"]["api"]["useRecoveryApiKey"]["success"] is True + assert response.json()["data"]["api"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["api"]["useRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["api"]["useRecoveryApiKey"]["token"] is not None assert ( - response.json()["data"]["useRecoveryApiKey"]["token"] + response.json()["data"]["api"]["useRecoveryApiKey"]["token"] == read_json(tokens_file)["tokens"][3]["token"] ) @@ -299,10 +307,10 @@ def test_graphql_generate_recovery_key_with_expiration_date( ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["useRecoveryApiKey"]["success"] is False - assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["useRecoveryApiKey"]["code"] == 404 - assert response.json()["data"]["useRecoveryApiKey"]["token"] is None + assert response.json()["data"]["api"]["useRecoveryApiKey"]["success"] is False + assert response.json()["data"]["api"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["api"]["useRecoveryApiKey"]["code"] == 404 + assert response.json()["data"]["api"]["useRecoveryApiKey"]["token"] is None assert read_json(tokens_file)["tokens"] == new_data["tokens"] @@ -345,10 +353,10 @@ def test_graphql_generate_recovery_key_with_expiration_in_the_past( ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is False - assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 400 - assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is None + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["success"] is False + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["code"] == 400 + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] is None assert "recovery_token" not in read_json(tokens_file) @@ -393,12 +401,12 @@ def test_graphql_generate_recovery_key_with_limited_uses( ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is True - assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 200 - assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is not None + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["success"] is True + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] is not None - mnemonic_key = response.json()["data"]["getNewRecoveryApiKey"]["key"] + mnemonic_key = response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] key = mnemonic_to_hex(mnemonic_key) assert read_json(tokens_file)["recovery_token"]["token"] == key @@ -433,10 +441,10 @@ def test_graphql_generate_recovery_key_with_limited_uses( ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["useRecoveryApiKey"]["success"] is True - assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200 - assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None + assert response.json()["data"]["api"]["useRecoveryApiKey"]["success"] is True + assert response.json()["data"]["api"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["api"]["useRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["api"]["useRecoveryApiKey"]["token"] is not None # Try to get token status response = authorized_client.post( @@ -467,10 +475,10 @@ def test_graphql_generate_recovery_key_with_limited_uses( ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["useRecoveryApiKey"]["success"] is True - assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200 - assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None + assert response.json()["data"]["api"]["useRecoveryApiKey"]["success"] is True + assert response.json()["data"]["api"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["api"]["useRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["api"]["useRecoveryApiKey"]["token"] is not None # Try to get token status response = authorized_client.post( @@ -501,10 +509,10 @@ def test_graphql_generate_recovery_key_with_limited_uses( ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["useRecoveryApiKey"]["success"] is False - assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["useRecoveryApiKey"]["code"] == 404 - assert response.json()["data"]["useRecoveryApiKey"]["token"] is None + assert response.json()["data"]["api"]["useRecoveryApiKey"]["success"] is False + assert response.json()["data"]["api"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["api"]["useRecoveryApiKey"]["code"] == 404 + assert response.json()["data"]["api"]["useRecoveryApiKey"]["token"] is None def test_graphql_generate_recovery_key_with_negative_uses( @@ -524,10 +532,10 @@ def test_graphql_generate_recovery_key_with_negative_uses( ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is False - assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 400 - assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is None + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["success"] is False + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["code"] == 400 + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] is None def test_graphql_generate_recovery_key_with_zero_uses(authorized_client, tokens_file): @@ -545,7 +553,7 @@ def test_graphql_generate_recovery_key_with_zero_uses(authorized_client, tokens_ ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is False - assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 400 - assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is None + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["success"] is False + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["code"] == 400 + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] is None diff --git a/tests/test_graphql/test_localsecret.py b/tests/test_graphql/test_localsecret.py index d4b637a..91c2e26 100644 --- a/tests/test_graphql/test_localsecret.py +++ b/tests/test_graphql/test_localsecret.py @@ -35,4 +35,4 @@ def test_local_secret_set(localsecret): assert oldsecret != newsecret LocalBackupSecret.set(newsecret) - assert LocalBackupSecret.get() == newsecret \ No newline at end of file + assert LocalBackupSecret.get() == newsecret diff --git a/tests/test_graphql/test_ssh.py b/tests/test_graphql/test_ssh.py index 4831692..5f888c8 100644 --- a/tests/test_graphql/test_ssh.py +++ b/tests/test_graphql/test_ssh.py @@ -44,13 +44,15 @@ def some_users(mocker, datadir): API_CREATE_SSH_KEY_MUTATION = """ mutation addSshKey($sshInput: SshMutationInput!) { - addSshKey(sshInput: $sshInput) { - success - message - code - user { - username - sshKeys + users { + addSshKey(sshInput: $sshInput) { + success + message + code + user { + username + sshKeys + } } } } @@ -90,12 +92,12 @@ def test_graphql_add_ssh_key(authorized_client, some_users, mock_subprocess_pope assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["addSshKey"]["code"] == 201 - assert response.json()["data"]["addSshKey"]["message"] is not None - assert response.json()["data"]["addSshKey"]["success"] is True + assert response.json()["data"]["users"]["addSshKey"]["code"] == 201 + assert response.json()["data"]["users"]["addSshKey"]["message"] is not None + assert response.json()["data"]["users"]["addSshKey"]["success"] is True - assert response.json()["data"]["addSshKey"]["user"]["username"] == "user1" - assert response.json()["data"]["addSshKey"]["user"]["sshKeys"] == [ + assert response.json()["data"]["users"]["addSshKey"]["user"]["username"] == "user1" + assert response.json()["data"]["users"]["addSshKey"]["user"]["sshKeys"] == [ "ssh-rsa KEY user1@pc", "ssh-rsa KEY test_key@pc", ] @@ -117,12 +119,12 @@ def test_graphql_add_root_ssh_key(authorized_client, some_users, mock_subprocess assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["addSshKey"]["code"] == 201 - assert response.json()["data"]["addSshKey"]["message"] is not None - assert response.json()["data"]["addSshKey"]["success"] is True + assert response.json()["data"]["users"]["addSshKey"]["code"] == 201 + assert response.json()["data"]["users"]["addSshKey"]["message"] is not None + assert response.json()["data"]["users"]["addSshKey"]["success"] is True - assert response.json()["data"]["addSshKey"]["user"]["username"] == "root" - assert response.json()["data"]["addSshKey"]["user"]["sshKeys"] == [ + assert response.json()["data"]["users"]["addSshKey"]["user"]["username"] == "root" + assert response.json()["data"]["users"]["addSshKey"]["user"]["sshKeys"] == [ "ssh-ed25519 KEY test@pc", "ssh-rsa KEY test_key@pc", ] @@ -144,12 +146,12 @@ def test_graphql_add_main_ssh_key(authorized_client, some_users, mock_subprocess assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["addSshKey"]["code"] == 201 - assert response.json()["data"]["addSshKey"]["message"] is not None - assert response.json()["data"]["addSshKey"]["success"] is True + assert response.json()["data"]["users"]["addSshKey"]["code"] == 201 + assert response.json()["data"]["users"]["addSshKey"]["message"] is not None + assert response.json()["data"]["users"]["addSshKey"]["success"] is True - assert response.json()["data"]["addSshKey"]["user"]["username"] == "tester" - assert response.json()["data"]["addSshKey"]["user"]["sshKeys"] == [ + assert response.json()["data"]["users"]["addSshKey"]["user"]["username"] == "tester" + assert response.json()["data"]["users"]["addSshKey"]["user"]["sshKeys"] == [ "ssh-rsa KEY test@pc", "ssh-rsa KEY test_key@pc", ] @@ -171,9 +173,9 @@ def test_graphql_add_bad_ssh_key(authorized_client, some_users, mock_subprocess_ assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["addSshKey"]["code"] == 400 - assert response.json()["data"]["addSshKey"]["message"] is not None - assert response.json()["data"]["addSshKey"]["success"] is False + assert response.json()["data"]["users"]["addSshKey"]["code"] == 400 + assert response.json()["data"]["users"]["addSshKey"]["message"] is not None + assert response.json()["data"]["users"]["addSshKey"]["success"] is False def test_graphql_add_ssh_key_nonexistent_user( @@ -194,20 +196,22 @@ def test_graphql_add_ssh_key_nonexistent_user( assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["addSshKey"]["code"] == 404 - assert response.json()["data"]["addSshKey"]["message"] is not None - assert response.json()["data"]["addSshKey"]["success"] is False + assert response.json()["data"]["users"]["addSshKey"]["code"] == 404 + assert response.json()["data"]["users"]["addSshKey"]["message"] is not None + assert response.json()["data"]["users"]["addSshKey"]["success"] is False API_REMOVE_SSH_KEY_MUTATION = """ mutation removeSshKey($sshInput: SshMutationInput!) { - removeSshKey(sshInput: $sshInput) { - success - message - code - user { - username - sshKeys + users { + removeSshKey(sshInput: $sshInput) { + success + message + code + user { + username + sshKeys + } } } } @@ -247,12 +251,14 @@ def test_graphql_remove_ssh_key(authorized_client, some_users, mock_subprocess_p assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["removeSshKey"]["code"] == 200 - assert response.json()["data"]["removeSshKey"]["message"] is not None - assert response.json()["data"]["removeSshKey"]["success"] is True + assert response.json()["data"]["users"]["removeSshKey"]["code"] == 200 + assert response.json()["data"]["users"]["removeSshKey"]["message"] is not None + assert response.json()["data"]["users"]["removeSshKey"]["success"] is True - assert response.json()["data"]["removeSshKey"]["user"]["username"] == "user1" - assert response.json()["data"]["removeSshKey"]["user"]["sshKeys"] == [] + assert ( + response.json()["data"]["users"]["removeSshKey"]["user"]["username"] == "user1" + ) + assert response.json()["data"]["users"]["removeSshKey"]["user"]["sshKeys"] == [] def test_graphql_remove_root_ssh_key( @@ -273,12 +279,14 @@ def test_graphql_remove_root_ssh_key( assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["removeSshKey"]["code"] == 200 - assert response.json()["data"]["removeSshKey"]["message"] is not None - assert response.json()["data"]["removeSshKey"]["success"] is True + assert response.json()["data"]["users"]["removeSshKey"]["code"] == 200 + assert response.json()["data"]["users"]["removeSshKey"]["message"] is not None + assert response.json()["data"]["users"]["removeSshKey"]["success"] is True - assert response.json()["data"]["removeSshKey"]["user"]["username"] == "root" - assert response.json()["data"]["removeSshKey"]["user"]["sshKeys"] == [] + assert ( + response.json()["data"]["users"]["removeSshKey"]["user"]["username"] == "root" + ) + assert response.json()["data"]["users"]["removeSshKey"]["user"]["sshKeys"] == [] def test_graphql_remove_main_ssh_key( @@ -299,12 +307,14 @@ def test_graphql_remove_main_ssh_key( assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["removeSshKey"]["code"] == 200 - assert response.json()["data"]["removeSshKey"]["message"] is not None - assert response.json()["data"]["removeSshKey"]["success"] is True + assert response.json()["data"]["users"]["removeSshKey"]["code"] == 200 + assert response.json()["data"]["users"]["removeSshKey"]["message"] is not None + assert response.json()["data"]["users"]["removeSshKey"]["success"] is True - assert response.json()["data"]["removeSshKey"]["user"]["username"] == "tester" - assert response.json()["data"]["removeSshKey"]["user"]["sshKeys"] == [] + assert ( + response.json()["data"]["users"]["removeSshKey"]["user"]["username"] == "tester" + ) + assert response.json()["data"]["users"]["removeSshKey"]["user"]["sshKeys"] == [] def test_graphql_remove_nonexistent_ssh_key( @@ -325,9 +335,9 @@ def test_graphql_remove_nonexistent_ssh_key( assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["removeSshKey"]["code"] == 404 - assert response.json()["data"]["removeSshKey"]["message"] is not None - assert response.json()["data"]["removeSshKey"]["success"] is False + assert response.json()["data"]["users"]["removeSshKey"]["code"] == 404 + assert response.json()["data"]["users"]["removeSshKey"]["message"] is not None + assert response.json()["data"]["users"]["removeSshKey"]["success"] is False def test_graphql_remove_ssh_key_nonexistent_user( @@ -348,6 +358,6 @@ def test_graphql_remove_ssh_key_nonexistent_user( assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["removeSshKey"]["code"] == 404 - assert response.json()["data"]["removeSshKey"]["message"] is not None - assert response.json()["data"]["removeSshKey"]["success"] is False + assert response.json()["data"]["users"]["removeSshKey"]["code"] == 404 + assert response.json()["data"]["users"]["removeSshKey"]["message"] is not None + assert response.json()["data"]["users"]["removeSshKey"]["success"] is False diff --git a/tests/test_graphql/test_system.py b/tests/test_graphql/test_system.py index a021a16..3de4816 100644 --- a/tests/test_graphql/test_system.py +++ b/tests/test_graphql/test_system.py @@ -382,11 +382,13 @@ def test_graphql_get_timezone_on_undefined(authorized_client, undefined_config): API_CHANGE_TIMEZONE_MUTATION = """ mutation changeTimezone($timezone: String!) { - changeTimezone(timezone: $timezone) { - success - message - code - timezone + system { + changeTimezone(timezone: $timezone) { + success + message + code + timezone + } } } """ @@ -420,10 +422,13 @@ def test_graphql_change_timezone(authorized_client, turned_on): ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["changeTimezone"]["success"] is True - assert response.json()["data"]["changeTimezone"]["message"] is not None - assert response.json()["data"]["changeTimezone"]["code"] == 200 - assert response.json()["data"]["changeTimezone"]["timezone"] == "Europe/Helsinki" + assert response.json()["data"]["system"]["changeTimezone"]["success"] is True + assert response.json()["data"]["system"]["changeTimezone"]["message"] is not None + assert response.json()["data"]["system"]["changeTimezone"]["code"] == 200 + assert ( + response.json()["data"]["system"]["changeTimezone"]["timezone"] + == "Europe/Helsinki" + ) assert read_json(turned_on / "turned_on.json")["timezone"] == "Europe/Helsinki" @@ -440,10 +445,13 @@ def test_graphql_change_timezone_on_undefined(authorized_client, undefined_confi ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["changeTimezone"]["success"] is True - assert response.json()["data"]["changeTimezone"]["message"] is not None - assert response.json()["data"]["changeTimezone"]["code"] == 200 - assert response.json()["data"]["changeTimezone"]["timezone"] == "Europe/Helsinki" + assert response.json()["data"]["system"]["changeTimezone"]["success"] is True + assert response.json()["data"]["system"]["changeTimezone"]["message"] is not None + assert response.json()["data"]["system"]["changeTimezone"]["code"] == 200 + assert ( + response.json()["data"]["system"]["changeTimezone"]["timezone"] + == "Europe/Helsinki" + ) assert ( read_json(undefined_config / "undefined.json")["timezone"] == "Europe/Helsinki" ) @@ -462,10 +470,10 @@ def test_graphql_change_timezone_without_timezone(authorized_client, turned_on): ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["changeTimezone"]["success"] is False - assert response.json()["data"]["changeTimezone"]["message"] is not None - assert response.json()["data"]["changeTimezone"]["code"] == 400 - assert response.json()["data"]["changeTimezone"]["timezone"] is None + assert response.json()["data"]["system"]["changeTimezone"]["success"] is False + assert response.json()["data"]["system"]["changeTimezone"]["message"] is not None + assert response.json()["data"]["system"]["changeTimezone"]["code"] == 400 + assert response.json()["data"]["system"]["changeTimezone"]["timezone"] is None assert read_json(turned_on / "turned_on.json")["timezone"] == "Europe/Moscow" @@ -482,10 +490,10 @@ def test_graphql_change_timezone_with_invalid_timezone(authorized_client, turned ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["changeTimezone"]["success"] is False - assert response.json()["data"]["changeTimezone"]["message"] is not None - assert response.json()["data"]["changeTimezone"]["code"] == 400 - assert response.json()["data"]["changeTimezone"]["timezone"] is None + assert response.json()["data"]["system"]["changeTimezone"]["success"] is False + assert response.json()["data"]["system"]["changeTimezone"]["message"] is not None + assert response.json()["data"]["system"]["changeTimezone"]["code"] == 400 + assert response.json()["data"]["system"]["changeTimezone"]["timezone"] is None assert read_json(turned_on / "turned_on.json")["timezone"] == "Europe/Moscow" @@ -589,12 +597,14 @@ def test_graphql_get_auto_upgrade_turned_off(authorized_client, turned_off): API_CHANGE_AUTO_UPGRADE_SETTINGS = """ mutation changeServerSettings($settings: AutoUpgradeSettingsInput!) { - changeAutoUpgradeSettings(settings: $settings) { - success - message - code - enableAutoUpgrade - allowReboot + system { + changeAutoUpgradeSettings(settings: $settings) { + success + message + code + enableAutoUpgrade + allowReboot + } } } """ @@ -634,14 +644,25 @@ def test_graphql_change_auto_upgrade(authorized_client, turned_on): ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["changeAutoUpgradeSettings"]["success"] is True - assert response.json()["data"]["changeAutoUpgradeSettings"]["message"] is not None - assert response.json()["data"]["changeAutoUpgradeSettings"]["code"] == 200 assert ( - response.json()["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] + response.json()["data"]["system"]["changeAutoUpgradeSettings"]["success"] + is True + ) + assert ( + response.json()["data"]["system"]["changeAutoUpgradeSettings"]["message"] + is not None + ) + assert response.json()["data"]["system"]["changeAutoUpgradeSettings"]["code"] == 200 + assert ( + response.json()["data"]["system"]["changeAutoUpgradeSettings"][ + "enableAutoUpgrade" + ] is False ) - assert response.json()["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True + assert ( + response.json()["data"]["system"]["changeAutoUpgradeSettings"]["allowReboot"] + is True + ) assert read_json(turned_on / "turned_on.json")["autoUpgrade"]["enable"] is False assert read_json(turned_on / "turned_on.json")["autoUpgrade"]["allowReboot"] is True @@ -662,14 +683,25 @@ def test_graphql_change_auto_upgrade_on_undefined(authorized_client, undefined_c ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["changeAutoUpgradeSettings"]["success"] is True - assert response.json()["data"]["changeAutoUpgradeSettings"]["message"] is not None - assert response.json()["data"]["changeAutoUpgradeSettings"]["code"] == 200 assert ( - response.json()["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] + response.json()["data"]["system"]["changeAutoUpgradeSettings"]["success"] + is True + ) + assert ( + response.json()["data"]["system"]["changeAutoUpgradeSettings"]["message"] + is not None + ) + assert response.json()["data"]["system"]["changeAutoUpgradeSettings"]["code"] == 200 + assert ( + response.json()["data"]["system"]["changeAutoUpgradeSettings"][ + "enableAutoUpgrade" + ] is False ) - assert response.json()["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True + assert ( + response.json()["data"]["system"]["changeAutoUpgradeSettings"]["allowReboot"] + is True + ) assert ( read_json(undefined_config / "undefined.json")["autoUpgrade"]["enable"] is False ) @@ -695,14 +727,25 @@ def test_graphql_change_auto_upgrade_without_vlaues(authorized_client, no_values ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["changeAutoUpgradeSettings"]["success"] is True - assert response.json()["data"]["changeAutoUpgradeSettings"]["message"] is not None - assert response.json()["data"]["changeAutoUpgradeSettings"]["code"] == 200 assert ( - response.json()["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] + response.json()["data"]["system"]["changeAutoUpgradeSettings"]["success"] + is True + ) + assert ( + response.json()["data"]["system"]["changeAutoUpgradeSettings"]["message"] + is not None + ) + assert response.json()["data"]["system"]["changeAutoUpgradeSettings"]["code"] == 200 + assert ( + response.json()["data"]["system"]["changeAutoUpgradeSettings"][ + "enableAutoUpgrade" + ] + is True + ) + assert ( + response.json()["data"]["system"]["changeAutoUpgradeSettings"]["allowReboot"] is True ) - assert response.json()["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True assert read_json(no_values / "no_values.json")["autoUpgrade"]["enable"] is True assert read_json(no_values / "no_values.json")["autoUpgrade"]["allowReboot"] is True @@ -723,14 +766,25 @@ def test_graphql_change_auto_upgrade_turned_off(authorized_client, turned_off): ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["changeAutoUpgradeSettings"]["success"] is True - assert response.json()["data"]["changeAutoUpgradeSettings"]["message"] is not None - assert response.json()["data"]["changeAutoUpgradeSettings"]["code"] == 200 assert ( - response.json()["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] + response.json()["data"]["system"]["changeAutoUpgradeSettings"]["success"] + is True + ) + assert ( + response.json()["data"]["system"]["changeAutoUpgradeSettings"]["message"] + is not None + ) + assert response.json()["data"]["system"]["changeAutoUpgradeSettings"]["code"] == 200 + assert ( + response.json()["data"]["system"]["changeAutoUpgradeSettings"][ + "enableAutoUpgrade" + ] + is True + ) + assert ( + response.json()["data"]["system"]["changeAutoUpgradeSettings"]["allowReboot"] is True ) - assert response.json()["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True assert read_json(turned_off / "turned_off.json")["autoUpgrade"]["enable"] is True assert ( read_json(turned_off / "turned_off.json")["autoUpgrade"]["allowReboot"] is True @@ -752,14 +806,25 @@ def test_grphql_change_auto_upgrade_without_enable(authorized_client, turned_off ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["changeAutoUpgradeSettings"]["success"] is True - assert response.json()["data"]["changeAutoUpgradeSettings"]["message"] is not None - assert response.json()["data"]["changeAutoUpgradeSettings"]["code"] == 200 assert ( - response.json()["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] + response.json()["data"]["system"]["changeAutoUpgradeSettings"]["success"] + is True + ) + assert ( + response.json()["data"]["system"]["changeAutoUpgradeSettings"]["message"] + is not None + ) + assert response.json()["data"]["system"]["changeAutoUpgradeSettings"]["code"] == 200 + assert ( + response.json()["data"]["system"]["changeAutoUpgradeSettings"][ + "enableAutoUpgrade" + ] is False ) - assert response.json()["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True + assert ( + response.json()["data"]["system"]["changeAutoUpgradeSettings"]["allowReboot"] + is True + ) assert read_json(turned_off / "turned_off.json")["autoUpgrade"]["enable"] is False assert ( read_json(turned_off / "turned_off.json")["autoUpgrade"]["allowReboot"] is True @@ -783,14 +848,25 @@ def test_graphql_change_auto_upgrade_without_allow_reboot( ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["changeAutoUpgradeSettings"]["success"] is True - assert response.json()["data"]["changeAutoUpgradeSettings"]["message"] is not None - assert response.json()["data"]["changeAutoUpgradeSettings"]["code"] == 200 assert ( - response.json()["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] + response.json()["data"]["system"]["changeAutoUpgradeSettings"]["success"] is True ) - assert response.json()["data"]["changeAutoUpgradeSettings"]["allowReboot"] is False + assert ( + response.json()["data"]["system"]["changeAutoUpgradeSettings"]["message"] + is not None + ) + assert response.json()["data"]["system"]["changeAutoUpgradeSettings"]["code"] == 200 + assert ( + response.json()["data"]["system"]["changeAutoUpgradeSettings"][ + "enableAutoUpgrade" + ] + is True + ) + assert ( + response.json()["data"]["system"]["changeAutoUpgradeSettings"]["allowReboot"] + is False + ) assert read_json(turned_off / "turned_off.json")["autoUpgrade"]["enable"] is True assert ( read_json(turned_off / "turned_off.json")["autoUpgrade"]["allowReboot"] is False @@ -810,14 +886,25 @@ def test_graphql_change_auto_upgrade_with_empty_input(authorized_client, turned_ ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["changeAutoUpgradeSettings"]["success"] is True - assert response.json()["data"]["changeAutoUpgradeSettings"]["message"] is not None - assert response.json()["data"]["changeAutoUpgradeSettings"]["code"] == 200 assert ( - response.json()["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] + response.json()["data"]["system"]["changeAutoUpgradeSettings"]["success"] + is True + ) + assert ( + response.json()["data"]["system"]["changeAutoUpgradeSettings"]["message"] + is not None + ) + assert response.json()["data"]["system"]["changeAutoUpgradeSettings"]["code"] == 200 + assert ( + response.json()["data"]["system"]["changeAutoUpgradeSettings"][ + "enableAutoUpgrade" + ] + is False + ) + assert ( + response.json()["data"]["system"]["changeAutoUpgradeSettings"]["allowReboot"] is False ) - assert response.json()["data"]["changeAutoUpgradeSettings"]["allowReboot"] is False assert read_json(turned_off / "turned_off.json")["autoUpgrade"]["enable"] is False assert ( read_json(turned_off / "turned_off.json")["autoUpgrade"]["allowReboot"] is False @@ -826,10 +913,12 @@ def test_graphql_change_auto_upgrade_with_empty_input(authorized_client, turned_ API_PULL_SYSTEM_CONFIGURATION_MUTATION = """ mutation testPullSystemConfiguration { - pullRepositoryChanges { - success - message - code + system { + pullRepositoryChanges { + success + message + code + } } } """ @@ -861,9 +950,12 @@ def test_graphql_pull_system_configuration( assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["pullRepositoryChanges"]["success"] is True - assert response.json()["data"]["pullRepositoryChanges"]["message"] is not None - assert response.json()["data"]["pullRepositoryChanges"]["code"] == 200 + assert response.json()["data"]["system"]["pullRepositoryChanges"]["success"] is True + assert ( + response.json()["data"]["system"]["pullRepositoryChanges"]["message"] + is not None + ) + assert response.json()["data"]["system"]["pullRepositoryChanges"]["code"] == 200 assert mock_subprocess_popen.call_count == 1 assert mock_subprocess_popen.call_args[0][0] == ["git", "pull"] @@ -886,9 +978,14 @@ def test_graphql_pull_system_broken_repo( assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["pullRepositoryChanges"]["success"] is False - assert response.json()["data"]["pullRepositoryChanges"]["message"] is not None - assert response.json()["data"]["pullRepositoryChanges"]["code"] == 500 + assert ( + response.json()["data"]["system"]["pullRepositoryChanges"]["success"] is False + ) + assert ( + response.json()["data"]["system"]["pullRepositoryChanges"]["message"] + is not None + ) + assert response.json()["data"]["system"]["pullRepositoryChanges"]["code"] == 500 assert mock_broken_service.call_count == 1 assert mock_os_chdir.call_count == 2 diff --git a/tests/test_graphql/test_system_nixos_tasks.py b/tests/test_graphql/test_system_nixos_tasks.py index 3e823b6..b292fda 100644 --- a/tests/test_graphql/test_system_nixos_tasks.py +++ b/tests/test_graphql/test_system_nixos_tasks.py @@ -54,10 +54,12 @@ def mock_subprocess_check_output(mocker): API_REBUILD_SYSTEM_MUTATION = """ mutation rebuildSystem { - runSystemRebuild { - success - message - code + system { + runSystemRebuild { + success + message + code + } } } """ @@ -86,9 +88,9 @@ def test_graphql_system_rebuild(authorized_client, mock_subprocess_popen): ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["runSystemRebuild"]["success"] is True - assert response.json()["data"]["runSystemRebuild"]["message"] is not None - assert response.json()["data"]["runSystemRebuild"]["code"] == 200 + assert response.json()["data"]["system"]["runSystemRebuild"]["success"] is True + assert response.json()["data"]["system"]["runSystemRebuild"]["message"] is not None + assert response.json()["data"]["system"]["runSystemRebuild"]["code"] == 200 assert mock_subprocess_popen.call_count == 1 assert mock_subprocess_popen.call_args[0][0] == [ "systemctl", @@ -99,10 +101,12 @@ def test_graphql_system_rebuild(authorized_client, mock_subprocess_popen): API_UPGRADE_SYSTEM_MUTATION = """ mutation upgradeSystem { - runSystemUpgrade { - success - message - code + system { + runSystemUpgrade { + success + message + code + } } } """ @@ -131,9 +135,9 @@ def test_graphql_system_upgrade(authorized_client, mock_subprocess_popen): ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["runSystemUpgrade"]["success"] is True - assert response.json()["data"]["runSystemUpgrade"]["message"] is not None - assert response.json()["data"]["runSystemUpgrade"]["code"] == 200 + assert response.json()["data"]["system"]["runSystemUpgrade"]["success"] is True + assert response.json()["data"]["system"]["runSystemUpgrade"]["message"] is not None + assert response.json()["data"]["system"]["runSystemUpgrade"]["code"] == 200 assert mock_subprocess_popen.call_count == 1 assert mock_subprocess_popen.call_args[0][0] == [ "systemctl", @@ -144,10 +148,12 @@ def test_graphql_system_upgrade(authorized_client, mock_subprocess_popen): API_ROLLBACK_SYSTEM_MUTATION = """ mutation rollbackSystem { - runSystemRollback { - success - message - code + system { + runSystemRollback { + success + message + code + } } } """ @@ -176,9 +182,9 @@ def test_graphql_system_rollback(authorized_client, mock_subprocess_popen): ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["runSystemRollback"]["success"] is True - assert response.json()["data"]["runSystemRollback"]["message"] is not None - assert response.json()["data"]["runSystemRollback"]["code"] == 200 + assert response.json()["data"]["system"]["runSystemRollback"]["success"] is True + assert response.json()["data"]["system"]["runSystemRollback"]["message"] is not None + assert response.json()["data"]["system"]["runSystemRollback"]["code"] == 200 assert mock_subprocess_popen.call_count == 1 assert mock_subprocess_popen.call_args[0][0] == [ "systemctl", @@ -189,10 +195,12 @@ def test_graphql_system_rollback(authorized_client, mock_subprocess_popen): API_REBOOT_SYSTEM_MUTATION = """ mutation system { - rebootSystem { - success - message - code + system { + rebootSystem { + success + message + code + } } } """ @@ -223,9 +231,9 @@ def test_graphql_reboot_system(authorized_client, mock_subprocess_popen): assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["rebootSystem"]["success"] is True - assert response.json()["data"]["rebootSystem"]["message"] is not None - assert response.json()["data"]["rebootSystem"]["code"] == 200 + assert response.json()["data"]["system"]["rebootSystem"]["success"] is True + assert response.json()["data"]["system"]["rebootSystem"]["message"] is not None + assert response.json()["data"]["system"]["rebootSystem"]["code"] == 200 assert mock_subprocess_popen.call_count == 1 assert mock_subprocess_popen.call_args[0][0] == ["reboot"] diff --git a/tests/test_graphql/test_users.py b/tests/test_graphql/test_users.py index 7a65736..9554195 100644 --- a/tests/test_graphql/test_users.py +++ b/tests/test_graphql/test_users.py @@ -295,13 +295,15 @@ def test_graphql_get_nonexistent_user( API_CREATE_USERS_MUTATION = """ mutation createUser($user: UserMutationInput!) { - createUser(user: $user) { - success - message - code - user { - username - sshKeys + users { + createUser(user: $user) { + success + message + code + user { + username + sshKeys + } } } } @@ -341,12 +343,12 @@ def test_graphql_add_user(authorized_client, one_user, mock_subprocess_popen): assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["createUser"]["message"] is not None - assert response.json()["data"]["createUser"]["code"] == 201 - assert response.json()["data"]["createUser"]["success"] is True + assert response.json()["data"]["users"]["createUser"]["message"] is not None + assert response.json()["data"]["users"]["createUser"]["code"] == 201 + assert response.json()["data"]["users"]["createUser"]["success"] is True - assert response.json()["data"]["createUser"]["user"]["username"] == "user2" - assert response.json()["data"]["createUser"]["user"]["sshKeys"] == [] + assert response.json()["data"]["users"]["createUser"]["user"]["username"] == "user2" + assert response.json()["data"]["users"]["createUser"]["user"]["sshKeys"] == [] def test_graphql_add_undefined_settings( @@ -367,12 +369,12 @@ def test_graphql_add_undefined_settings( assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["createUser"]["message"] is not None - assert response.json()["data"]["createUser"]["code"] == 201 - assert response.json()["data"]["createUser"]["success"] is True + assert response.json()["data"]["users"]["createUser"]["message"] is not None + assert response.json()["data"]["users"]["createUser"]["code"] == 201 + assert response.json()["data"]["users"]["createUser"]["success"] is True - assert response.json()["data"]["createUser"]["user"]["username"] == "user2" - assert response.json()["data"]["createUser"]["user"]["sshKeys"] == [] + assert response.json()["data"]["users"]["createUser"]["user"]["username"] == "user2" + assert response.json()["data"]["users"]["createUser"]["user"]["sshKeys"] == [] def test_graphql_add_without_password( @@ -393,11 +395,11 @@ def test_graphql_add_without_password( assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["createUser"]["message"] is not None - assert response.json()["data"]["createUser"]["code"] == 400 - assert response.json()["data"]["createUser"]["success"] is False + assert response.json()["data"]["users"]["createUser"]["message"] is not None + assert response.json()["data"]["users"]["createUser"]["code"] == 400 + assert response.json()["data"]["users"]["createUser"]["success"] is False - assert response.json()["data"]["createUser"]["user"] is None + assert response.json()["data"]["users"]["createUser"]["user"] is None def test_graphql_add_without_both(authorized_client, one_user, mock_subprocess_popen): @@ -416,11 +418,11 @@ def test_graphql_add_without_both(authorized_client, one_user, mock_subprocess_p assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["createUser"]["message"] is not None - assert response.json()["data"]["createUser"]["code"] == 400 - assert response.json()["data"]["createUser"]["success"] is False + assert response.json()["data"]["users"]["createUser"]["message"] is not None + assert response.json()["data"]["users"]["createUser"]["code"] == 400 + assert response.json()["data"]["users"]["createUser"]["success"] is False - assert response.json()["data"]["createUser"]["user"] is None + assert response.json()["data"]["users"]["createUser"]["user"] is None @pytest.mark.parametrize("username", invalid_usernames) @@ -442,11 +444,11 @@ def test_graphql_add_system_username( assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["createUser"]["message"] is not None - assert response.json()["data"]["createUser"]["code"] == 409 - assert response.json()["data"]["createUser"]["success"] is False + assert response.json()["data"]["users"]["createUser"]["message"] is not None + assert response.json()["data"]["users"]["createUser"]["code"] == 409 + assert response.json()["data"]["users"]["createUser"]["success"] is False - assert response.json()["data"]["createUser"]["user"] is None + assert response.json()["data"]["users"]["createUser"]["user"] is None def test_graphql_add_existing_user(authorized_client, one_user, mock_subprocess_popen): @@ -465,13 +467,13 @@ def test_graphql_add_existing_user(authorized_client, one_user, mock_subprocess_ assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["createUser"]["message"] is not None - assert response.json()["data"]["createUser"]["code"] == 409 - assert response.json()["data"]["createUser"]["success"] is False + assert response.json()["data"]["users"]["createUser"]["message"] is not None + assert response.json()["data"]["users"]["createUser"]["code"] == 409 + assert response.json()["data"]["users"]["createUser"]["success"] is False - assert response.json()["data"]["createUser"]["user"]["username"] == "user1" + assert response.json()["data"]["users"]["createUser"]["user"]["username"] == "user1" assert ( - response.json()["data"]["createUser"]["user"]["sshKeys"][0] + response.json()["data"]["users"]["createUser"]["user"]["sshKeys"][0] == "ssh-rsa KEY user1@pc" ) @@ -492,13 +494,15 @@ def test_graphql_add_main_user(authorized_client, one_user, mock_subprocess_pope assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["createUser"]["message"] is not None - assert response.json()["data"]["createUser"]["code"] == 409 - assert response.json()["data"]["createUser"]["success"] is False + assert response.json()["data"]["users"]["createUser"]["message"] is not None + assert response.json()["data"]["users"]["createUser"]["code"] == 409 + assert response.json()["data"]["users"]["createUser"]["success"] is False - assert response.json()["data"]["createUser"]["user"]["username"] == "tester" assert ( - response.json()["data"]["createUser"]["user"]["sshKeys"][0] + response.json()["data"]["users"]["createUser"]["user"]["username"] == "tester" + ) + assert ( + response.json()["data"]["users"]["createUser"]["user"]["sshKeys"][0] == "ssh-rsa KEY test@pc" ) @@ -518,11 +522,11 @@ def test_graphql_add_long_username(authorized_client, one_user, mock_subprocess_ ) assert response.json().get("data") is not None - assert response.json()["data"]["createUser"]["message"] is not None - assert response.json()["data"]["createUser"]["code"] == 400 - assert response.json()["data"]["createUser"]["success"] is False + assert response.json()["data"]["users"]["createUser"]["message"] is not None + assert response.json()["data"]["users"]["createUser"]["code"] == 400 + assert response.json()["data"]["users"]["createUser"]["success"] is False - assert response.json()["data"]["createUser"]["user"] is None + assert response.json()["data"]["users"]["createUser"]["user"] is None @pytest.mark.parametrize("username", ["", "1", "фыр", "user1@", "^-^"]) @@ -544,19 +548,21 @@ def test_graphql_add_invalid_username( assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["createUser"]["message"] is not None - assert response.json()["data"]["createUser"]["code"] == 400 - assert response.json()["data"]["createUser"]["success"] is False + assert response.json()["data"]["users"]["createUser"]["message"] is not None + assert response.json()["data"]["users"]["createUser"]["code"] == 400 + assert response.json()["data"]["users"]["createUser"]["success"] is False - assert response.json()["data"]["createUser"]["user"] is None + assert response.json()["data"]["users"]["createUser"]["user"] is None API_DELETE_USER_MUTATION = """ mutation deleteUser($username: String!) { - deleteUser(username: $username) { - success - message - code + users { + deleteUser(username: $username) { + success + message + code + } } } """ @@ -585,9 +591,9 @@ def test_graphql_delete_user(authorized_client, some_users, mock_subprocess_pope assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["deleteUser"]["code"] == 200 - assert response.json()["data"]["deleteUser"]["message"] is not None - assert response.json()["data"]["deleteUser"]["success"] is True + assert response.json()["data"]["users"]["deleteUser"]["code"] == 200 + assert response.json()["data"]["users"]["deleteUser"]["message"] is not None + assert response.json()["data"]["users"]["deleteUser"]["success"] is True @pytest.mark.parametrize("username", ["", "def"]) @@ -604,9 +610,9 @@ def test_graphql_delete_nonexistent_users( assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["deleteUser"]["code"] == 404 - assert response.json()["data"]["deleteUser"]["message"] is not None - assert response.json()["data"]["deleteUser"]["success"] is False + assert response.json()["data"]["users"]["deleteUser"]["code"] == 404 + assert response.json()["data"]["users"]["deleteUser"]["message"] is not None + assert response.json()["data"]["users"]["deleteUser"]["success"] is False @pytest.mark.parametrize("username", invalid_usernames) @@ -624,11 +630,11 @@ def test_graphql_delete_system_users( assert response.json().get("data") is not None assert ( - response.json()["data"]["deleteUser"]["code"] == 404 - or response.json()["data"]["deleteUser"]["code"] == 400 + response.json()["data"]["users"]["deleteUser"]["code"] == 404 + or response.json()["data"]["users"]["deleteUser"]["code"] == 400 ) - assert response.json()["data"]["deleteUser"]["message"] is not None - assert response.json()["data"]["deleteUser"]["success"] is False + assert response.json()["data"]["users"]["deleteUser"]["message"] is not None + assert response.json()["data"]["users"]["deleteUser"]["success"] is False def test_graphql_delete_main_user(authorized_client, some_users, mock_subprocess_popen): @@ -642,20 +648,22 @@ def test_graphql_delete_main_user(authorized_client, some_users, mock_subprocess assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["deleteUser"]["code"] == 400 - assert response.json()["data"]["deleteUser"]["message"] is not None - assert response.json()["data"]["deleteUser"]["success"] is False + assert response.json()["data"]["users"]["deleteUser"]["code"] == 400 + assert response.json()["data"]["users"]["deleteUser"]["message"] is not None + assert response.json()["data"]["users"]["deleteUser"]["success"] is False API_UPDATE_USER_MUTATION = """ mutation updateUser($user: UserMutationInput!) { - updateUser(user: $user) { - success - message - code - user { - username - sshKeys + users { + updateUser(user: $user) { + success + message + code + user { + username + sshKeys + } } } } @@ -695,12 +703,12 @@ def test_graphql_update_user(authorized_client, some_users, mock_subprocess_pope assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["updateUser"]["code"] == 200 - assert response.json()["data"]["updateUser"]["message"] is not None - assert response.json()["data"]["updateUser"]["success"] is True + assert response.json()["data"]["users"]["updateUser"]["code"] == 200 + assert response.json()["data"]["users"]["updateUser"]["message"] is not None + assert response.json()["data"]["users"]["updateUser"]["success"] is True - assert response.json()["data"]["updateUser"]["user"]["username"] == "user1" - assert response.json()["data"]["updateUser"]["user"]["sshKeys"] == [ + assert response.json()["data"]["users"]["updateUser"]["user"]["username"] == "user1" + assert response.json()["data"]["users"]["updateUser"]["user"]["sshKeys"] == [ "ssh-rsa KEY user1@pc" ] assert mock_subprocess_popen.call_count == 1 @@ -724,9 +732,9 @@ def test_graphql_update_nonexistent_user( assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["updateUser"]["code"] == 404 - assert response.json()["data"]["updateUser"]["message"] is not None - assert response.json()["data"]["updateUser"]["success"] is False + assert response.json()["data"]["users"]["updateUser"]["code"] == 404 + assert response.json()["data"]["users"]["updateUser"]["message"] is not None + assert response.json()["data"]["users"]["updateUser"]["success"] is False - assert response.json()["data"]["updateUser"]["user"] is None + assert response.json()["data"]["users"]["updateUser"]["user"] is None assert mock_subprocess_popen.call_count == 1 diff --git a/tests/test_model_storage.py b/tests/test_model_storage.py index d26fabb..c9ab582 100644 --- a/tests/test_model_storage.py +++ b/tests/test_model_storage.py @@ -10,6 +10,7 @@ from selfprivacy_api.utils.redis_pool import RedisPool TEST_KEY = "model_storage" redis = RedisPool().get_connection() + @pytest.fixture() def clean_redis(): redis.delete(TEST_KEY) @@ -19,18 +20,14 @@ class DummyModel(BaseModel): name: str date: Optional[datetime] + def test_store_retrieve(): - model = DummyModel( - name= "test", - date= datetime.now() - ) + model = DummyModel(name="test", date=datetime.now()) store_model_as_hash(redis, TEST_KEY, model) - assert hash_as_model(redis, TEST_KEY, DummyModel) == model + assert hash_as_model(redis, TEST_KEY, DummyModel) == model + def test_store_retrieve_none(): - model = DummyModel( - name= "test", - date= None - ) + model = DummyModel(name="test", date=None) store_model_as_hash(redis, TEST_KEY, model) - assert hash_as_model(redis, TEST_KEY, DummyModel) == model + assert hash_as_model(redis, TEST_KEY, DummyModel) == model From 1e840f8cff527657c61fea342901114f63b7061f Mon Sep 17 00:00:00 2001 From: Inex Code Date: Fri, 23 Jun 2023 04:50:00 +0300 Subject: [PATCH 401/537] ci: fix killing redis-server --- .drone.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.drone.yml b/.drone.yml index 24ab5da..ffef56a 100644 --- a/.drone.yml +++ b/.drone.yml @@ -5,7 +5,7 @@ name: default steps: - name: Run Tests and Generate Coverage Report commands: - - kill $(ps aux | grep '[r]edis-server 127.0.0.1:6389' | awk '{print $2}') + - kill $(ps aux | grep 'redis-server 127.0.0.1:6389' | awk '{print $2}') - redis-server --bind 127.0.0.1 --port 6389 >/dev/null & - coverage run -m pytest -q - coverage xml From f27a3df807d13e13363d88bcab5665ba8b7bdcd4 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Fri, 23 Jun 2023 12:40:10 +0300 Subject: [PATCH 402/537] refactor(backups): fix typing errors --- selfprivacy_api/backup/__init__.py | 179 ++++++++++++------ selfprivacy_api/backup/backuppers/__init__.py | 6 +- .../backup/backuppers/none_backupper.py | 6 +- .../backup/backuppers/restic_backupper.py | 47 +++-- selfprivacy_api/backup/jobs.py | 2 + selfprivacy_api/backup/providers/__init__.py | 26 ++- selfprivacy_api/backup/providers/backblaze.py | 9 +- .../backup/providers/local_file.py | 10 +- selfprivacy_api/backup/providers/memory.py | 9 +- selfprivacy_api/backup/providers/none.py | 13 ++ selfprivacy_api/backup/providers/provider.py | 15 +- selfprivacy_api/backup/storage.py | 13 +- .../graphql/mutations/backup_mutations.py | 45 ++++- tests/test_graphql/test_backup.py | 4 +- 14 files changed, 265 insertions(+), 119 deletions(-) create mode 100644 selfprivacy_api/backup/providers/none.py diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index bb17254..35b98eb 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -1,3 +1,4 @@ +from operator import add from typing import List, Optional from datetime import datetime, timedelta from os import statvfs @@ -9,7 +10,9 @@ from selfprivacy_api.utils import ReadUserData, WriteUserData from selfprivacy_api.services import get_service_by_id from selfprivacy_api.services.service import Service -from selfprivacy_api.graphql.queries.providers import BackupProvider +from selfprivacy_api.graphql.queries.providers import ( + BackupProvider as BackupProviderEnum, +) from selfprivacy_api.backup.providers.provider import AbstractBackupProvider from selfprivacy_api.backup.providers import get_provider @@ -33,12 +36,15 @@ DEFAULT_JSON_PROVIDER = { class Backups: """A singleton controller for backups""" - provider: AbstractBackupProvider - @staticmethod def set_localfile_repo(file_path: str): - ProviderClass = get_provider(BackupProvider.FILE) - provider = ProviderClass(login="", key="", location=file_path, repo_id="") + ProviderClass = get_provider(BackupProviderEnum.FILE) + provider = ProviderClass( + login="", + key="", + location=file_path, + repo_id="", + ) Storage.store_provider(provider) @staticmethod @@ -67,7 +73,14 @@ class Backups: @staticmethod def _service_ids_to_back_up(time: datetime) -> List[str]: services = Storage.services_with_autobackup() - return [id for id in services if Backups.is_time_to_backup_service(id, time)] + return [ + id + for id in services + if Backups.is_time_to_backup_service( + id, + time, + ) + ] @staticmethod def services_to_back_up(time: datetime) -> List[Service]: @@ -75,14 +88,17 @@ class Backups: for id in Backups._service_ids_to_back_up(time): service = get_service_by_id(id) if service is None: - raise ValueError("Cannot look up a service scheduled for backup!") + raise ValueError( + "Cannot look up a service scheduled for backup!", + ) result.append(service) return result @staticmethod def is_time_to_backup(time: datetime) -> bool: """ - Intended as a time validator for huey cron scheduler of automatic backups + Intended as a time validator for huey cron scheduler + of automatic backups """ return Backups._service_ids_to_back_up(time) != [] @@ -97,7 +113,8 @@ class Backups: last_backup = Storage.get_last_backup_time(service_id) if last_backup is None: - return True # queue a backup immediately if there are no previous backups + # queue a backup immediately if there are no previous backups + return True if time > last_backup + timedelta(minutes=period): return True @@ -121,7 +138,8 @@ class Backups: def set_autobackup_period_minutes(minutes: int): """ 0 and negative numbers are equivalent to disable. - Setting to a positive number may result in a backup very soon if some services are not backed up. + Setting to a positive number may result in a backup very soon + if some services are not backed up. """ if minutes <= 0: Backups.disable_all_autobackup() @@ -130,7 +148,10 @@ class Backups: @staticmethod def disable_all_autobackup(): - """disables all automatic backing up, but does not change per-service settings""" + """ + Disables all automatic backing up, + but does not change per-service settings + """ Storage.delete_backup_period() @staticmethod @@ -138,17 +159,38 @@ class Backups: return Backups.lookup_provider() @staticmethod - def set_provider(kind: str, login: str, key: str, location: str, repo_id: str = ""): - provider = Backups.construct_provider(kind, login, key, location, repo_id) + def set_provider( + kind: BackupProviderEnum, + login: str, + key: str, + location: str, + repo_id: str = "", + ): + provider = Backups.construct_provider( + kind, + login, + key, + location, + repo_id, + ) Storage.store_provider(provider) @staticmethod def construct_provider( - kind: str, login: str, key: str, location: str, repo_id: str = "" - ): - provider_class = get_provider(BackupProvider[kind]) + kind: BackupProviderEnum, + login: str, + key: str, + location: str, + repo_id: str = "", + ) -> AbstractBackupProvider: + provider_class = get_provider(kind) - return provider_class(login=login, key=key, location=location, repo_id=repo_id) + return provider_class( + login=login, + key=key, + location=location, + repo_id=repo_id, + ) @staticmethod def reset(reset_json=True): @@ -156,7 +198,8 @@ class Backups: if reset_json: try: Backups.reset_provider_json() - except FileNotFoundError: # if there is no userdata file, we do not need to reset it + except FileNotFoundError: + # if there is no userdata file, we do not need to reset it pass @staticmethod @@ -175,7 +218,7 @@ class Backups: return json_provider none_provider = Backups.construct_provider( - "NONE", login="", key="", location="" + BackupProviderEnum.NONE, login="", key="", location="" ) Storage.store_provider(none_provider) return none_provider @@ -200,15 +243,18 @@ class Backups: if provider_dict == DEFAULT_JSON_PROVIDER: return None + try: + return Backups.construct_provider( + kind=BackupProviderEnum[provider_dict["provider"]], + login=provider_dict["accountId"], + key=provider_dict["accountKey"], + location=provider_dict["bucket"], + ) + except KeyError: + return None - return Backups.construct_provider( - kind=provider_dict["provider"], - login=provider_dict["accountId"], - key=provider_dict["accountKey"], - location=provider_dict["bucket"], - ) - - def reset_provider_json() -> AbstractBackupProvider: + @staticmethod + def reset_provider_json() -> None: with WriteUserData() as user_data: if "backblaze" in user_data.keys(): del user_data["backblaze"] @@ -216,12 +262,12 @@ class Backups: user_data["backup"] = DEFAULT_JSON_PROVIDER @staticmethod - def load_provider_redis() -> AbstractBackupProvider: + def load_provider_redis() -> Optional[AbstractBackupProvider]: provider_model = Storage.load_provider() if provider_model is None: return None return Backups.construct_provider( - provider_model.kind, + BackupProviderEnum[provider_model.kind], provider_model.login, provider_model.key, provider_model.location, @@ -232,7 +278,7 @@ class Backups: def back_up(service: Service): """The top-level function to back up a service""" folders = service.get_folders() - repo_name = service.get_id() + tag = service.get_id() job = get_backup_job(service) if job is None: @@ -241,8 +287,11 @@ class Backups: try: service.pre_backup() - snapshot = Backups.provider().backuper.start_backup(folders, repo_name) - Backups._store_last_snapshot(repo_name, snapshot) + snapshot = Backups.provider().backuper.start_backup( + folders, + tag, + ) + Backups._store_last_snapshot(tag, snapshot) service.post_restore() except Exception as e: Jobs.update(job, status=JobStatus.ERROR) @@ -252,10 +301,7 @@ class Backups: return snapshot @staticmethod - def init_repo(service: Optional[Service] = None): - if service is not None: - repo_name = service.get_id() - + def init_repo(): Backups.provider().backuper.init() Storage.mark_as_init() @@ -274,7 +320,13 @@ class Backups: @staticmethod def get_snapshots(service: Service) -> List[Snapshot]: snapshots = Backups.get_all_snapshots() - return [snap for snap in snapshots if snap.service_name == service.get_id()] + service_id = service.get_id() + return list( + filter( + lambda snap: snap.service_name == service_id, + snapshots, + ) + ) @staticmethod def get_all_snapshots() -> List[Snapshot]: @@ -314,10 +366,12 @@ class Backups: # to be deprecated/internalized in favor of restore_snapshot() @staticmethod def restore_service_from_snapshot(service: Service, snapshot_id: str): - repo_name = service.get_id() folders = service.get_folders() - Backups.provider().backuper.restore_from_backup(repo_name, snapshot_id, folders) + Backups.provider().backuper.restore_from_backup( + snapshot_id, + folders, + ) @staticmethod def assert_restorable(snapshot: Snapshot): @@ -327,45 +381,58 @@ class Backups: f"snapshot has a nonexistent service: {snapshot.service_name}" ) - needed_space = Backups.snapshot_restored_size(snapshot) + needed_space = Backups.service_snapshot_size(snapshot.id) available_space = Backups.space_usable_for_service(service) if needed_space > available_space: raise ValueError( - f"we only have {available_space} bytes but snapshot needs{ needed_space}" + f"we only have {available_space} bytes " + f"but snapshot needs {needed_space}" ) @staticmethod def restore_snapshot(snapshot: Snapshot): service = get_service_by_id(snapshot.service_name) + if service is None: + raise ValueError( + f"snapshot has a nonexistent service: {snapshot.service_name}" + ) + job = get_restore_job(service) if job is None: job = add_restore_job(snapshot) - Jobs.update(job, status=JobStatus.RUNNING) + Jobs.update( + job, + status=JobStatus.RUNNING, + ) try: Backups.assert_restorable(snapshot) - Backups.restore_service_from_snapshot(service, snapshot.id) + Backups.restore_service_from_snapshot( + service, + snapshot.id, + ) service.post_restore() except Exception as e: - Jobs.update(job, status=JobStatus.ERROR) + Jobs.update( + job, + status=JobStatus.ERROR, + ) raise e - Jobs.update(job, status=JobStatus.FINISHED) - - @staticmethod - def service_snapshot_size(service: Service, snapshot_id: str) -> float: - repo_name = service.get_id() - return Backups.provider().backuper.restored_size(repo_name, snapshot_id) - - @staticmethod - def snapshot_restored_size(snapshot: Snapshot) -> float: - return Backups.service_snapshot_size( - get_service_by_id(snapshot.service_name), snapshot.id + Jobs.update( + job, + status=JobStatus.FINISHED, ) @staticmethod - def space_usable_for_service(service: Service) -> bool: + def service_snapshot_size(snapshot_id: str) -> int: + return Backups.provider().backuper.restored_size( + snapshot_id, + ) + + @staticmethod + def space_usable_for_service(service: Service) -> int: folders = service.get_folders() if folders == []: raise ValueError("unallocated service", service.get_id()) diff --git a/selfprivacy_api/backup/backuppers/__init__.py b/selfprivacy_api/backup/backuppers/__init__.py index f20496d..169a502 100644 --- a/selfprivacy_api/backup/backuppers/__init__.py +++ b/selfprivacy_api/backup/backuppers/__init__.py @@ -26,14 +26,14 @@ class AbstractBackuper(ABC): raise NotImplementedError @abstractmethod - def init(self, repo_name): + def init(self): raise NotImplementedError @abstractmethod - def restore_from_backup(self, repo_name: str, snapshot_id: str, folders: List[str]): + def restore_from_backup(self, snapshot_id: str, folders: List[str]): """Restore a target folder using a snapshot""" raise NotImplementedError @abstractmethod - def restored_size(self, repo_name, snapshot_id) -> float: + def restored_size(self, snapshot_id: str) -> int: raise NotImplementedError diff --git a/selfprivacy_api/backup/backuppers/none_backupper.py b/selfprivacy_api/backup/backuppers/none_backupper.py index e687323..d0f0dda 100644 --- a/selfprivacy_api/backup/backuppers/none_backupper.py +++ b/selfprivacy_api/backup/backuppers/none_backupper.py @@ -18,12 +18,12 @@ class NoneBackupper(AbstractBackuper): """Get all snapshots from the repo""" return [] - def init(self, repo_name): + def init(self): raise NotImplementedError - def restore_from_backup(self, repo_name: str, snapshot_id: str, folders: List[str]): + def restore_from_backup(self, snapshot_id: str, folders: List[str]): """Restore a target folder using a snapshot""" raise NotImplementedError - def restored_size(self, repo_name, snapshot_id) -> float: + def restored_size(self, snapshot_id: str) -> int: raise NotImplementedError diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index 7b58a2c..5a16812 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -50,7 +50,7 @@ class ResticBackuper(AbstractBackuper): def _password_command(self): return f"echo {LocalBackupSecret.get()}" - def restic_command(self, *args, branch_name: str = ""): + def restic_command(self, *args, tag: str = ""): command = [ "restic", "-o", @@ -60,11 +60,11 @@ class ResticBackuper(AbstractBackuper): "--password-command", self._password_command(), ] - if branch_name != "": + if tag != "": command.extend( [ "--tag", - branch_name, + tag, ] ) if args != []: @@ -92,10 +92,10 @@ class ResticBackuper(AbstractBackuper): universal_newlines=True, ) as handle: for line in iter(handle.stdout.readline, ""): - if not "NOTICE:" in line: + if "NOTICE:" not in line: yield line - def start_backup(self, folders: List[str], repo_name: str): + def start_backup(self, folders: List[str], tag: str): """ Start backup with restic """ @@ -107,16 +107,16 @@ class ResticBackuper(AbstractBackuper): "backup", "--json", folders, - branch_name=repo_name, + tag=tag, ) messages = [] - job = get_backup_job(get_service_by_id(repo_name)) + job = get_backup_job(get_service_by_id(tag)) try: for raw_message in ResticBackuper.output_yielder(backup_command): message = self.parse_message(raw_message, job) messages.append(message) - return ResticBackuper._snapshot_from_backup_messages(messages, repo_name) + return ResticBackuper._snapshot_from_backup_messages(messages, tag) except ValueError as e: raise ValueError("could not create a snapshot: ", messages) from e @@ -128,7 +128,7 @@ class ResticBackuper(AbstractBackuper): raise ValueError("no summary message in restic json output") def parse_message(self, raw_message, job=None) -> object: - message = self.parse_json_output(raw_message) + message = ResticBackuper.parse_json_output(raw_message) if message["message_type"] == "status": if job is not None: # only update status if we run under some job Jobs.update( @@ -168,12 +168,12 @@ class ResticBackuper(AbstractBackuper): with subprocess.Popen(command, stdout=subprocess.PIPE, shell=False) as handle: output = handle.communicate()[0].decode("utf-8") - if not self.has_json(output): + if not ResticBackuper.has_json(output): return False # raise NotImplementedError("error(big): " + output) return True - def restored_size(self, repo_name, snapshot_id) -> float: + def restored_size(self, snapshot_id: str) -> int: """ Size of a snapshot """ @@ -183,15 +183,19 @@ class ResticBackuper(AbstractBackuper): "--json", ) - with subprocess.Popen(command, stdout=subprocess.PIPE, shell=False) as handle: + with subprocess.Popen( + command, + stdout=subprocess.PIPE, + shell=False, + ) as handle: output = handle.communicate()[0].decode("utf-8") try: - parsed_output = self.parse_json_output(output) + parsed_output = ResticBackuper.parse_json_output(output) return parsed_output["total_size"] except ValueError as e: raise ValueError("cannot restore a snapshot: " + output) from e - def restore_from_backup(self, repo_name, snapshot_id, folders): + def restore_from_backup(self, snapshot_id, folders): """ Restore from backup with restic """ @@ -235,7 +239,7 @@ class ResticBackuper(AbstractBackuper): if "Is there a repository at the following location?" in output: raise ValueError("No repository! : " + output) try: - return self.parse_json_output(output) + return ResticBackuper.parse_json_output(output) except ValueError as e: raise ValueError("Cannot load snapshots: ") from e @@ -252,8 +256,9 @@ class ResticBackuper(AbstractBackuper): snapshots.append(snapshot) return snapshots - def parse_json_output(self, output: str) -> object: - starting_index = self.json_start(output) + @staticmethod + def parse_json_output(output: str) -> object: + starting_index = ResticBackuper.json_start(output) if starting_index == -1: raise ValueError("There is no json in the restic output : " + output) @@ -273,7 +278,8 @@ class ResticBackuper(AbstractBackuper): result_array.append(json.loads(message)) return result_array - def json_start(self, output: str) -> int: + @staticmethod + def json_start(output: str) -> int: indices = [ output.find("["), output.find("{"), @@ -284,7 +290,8 @@ class ResticBackuper(AbstractBackuper): return -1 return min(indices) - def has_json(self, output: str) -> bool: - if self.json_start(output) == -1: + @staticmethod + def has_json(output: str) -> bool: + if ResticBackuper.json_start(output) == -1: return False return True diff --git a/selfprivacy_api/backup/jobs.py b/selfprivacy_api/backup/jobs.py index 5a9cb0d..38e9ad1 100644 --- a/selfprivacy_api/backup/jobs.py +++ b/selfprivacy_api/backup/jobs.py @@ -51,6 +51,8 @@ def add_backup_job(service: Service) -> Job: def add_restore_job(snapshot: Snapshot) -> Job: service = get_service_by_id(snapshot.service_name) + if service is None: + raise ValueError(f"no such service: {snapshot.service_name}") if is_something_queued_for(service): message = ( f"Cannot start a restore of {service.get_id()}, another operation is queued: " diff --git a/selfprivacy_api/backup/providers/__init__.py b/selfprivacy_api/backup/providers/__init__.py index bac51e5..4f8bb75 100644 --- a/selfprivacy_api/backup/providers/__init__.py +++ b/selfprivacy_api/backup/providers/__init__.py @@ -1,23 +1,29 @@ -from selfprivacy_api.graphql.queries.providers import BackupProvider +from typing import Type + +from selfprivacy_api.graphql.queries.providers import ( + BackupProvider as BackupProviderEnum, +) from selfprivacy_api.backup.providers.provider import AbstractBackupProvider from selfprivacy_api.backup.providers.backblaze import Backblaze from selfprivacy_api.backup.providers.memory import InMemoryBackup from selfprivacy_api.backup.providers.local_file import LocalFileBackup +from selfprivacy_api.backup.providers.none import NoBackups -PROVIDER_MAPPING = { - BackupProvider.BACKBLAZE: Backblaze, - BackupProvider.MEMORY: InMemoryBackup, - BackupProvider.FILE: LocalFileBackup, - BackupProvider.NONE: AbstractBackupProvider, +PROVIDER_MAPPING: dict[BackupProviderEnum, Type[AbstractBackupProvider]] = { + BackupProviderEnum.BACKBLAZE: Backblaze, + BackupProviderEnum.MEMORY: InMemoryBackup, + BackupProviderEnum.FILE: LocalFileBackup, + BackupProviderEnum.NONE: NoBackups, } -def get_provider(provider_type: BackupProvider) -> AbstractBackupProvider: +def get_provider( + provider_type: BackupProviderEnum, +) -> Type[AbstractBackupProvider]: return PROVIDER_MAPPING[provider_type] def get_kind(provider: AbstractBackupProvider) -> str: - for key, value in PROVIDER_MAPPING.items(): - if isinstance(provider, value): - return key.value + """Get the kind of the provider in the form of a string""" + return provider.name.value diff --git a/selfprivacy_api/backup/providers/backblaze.py b/selfprivacy_api/backup/providers/backblaze.py index 3f2d873..b826bdd 100644 --- a/selfprivacy_api/backup/providers/backblaze.py +++ b/selfprivacy_api/backup/providers/backblaze.py @@ -1,8 +1,13 @@ from .provider import AbstractBackupProvider from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackuper +from selfprivacy_api.graphql.queries.providers import ( + BackupProvider as BackupProviderEnum, +) class Backblaze(AbstractBackupProvider): - backuper = ResticBackuper("--b2-account", "--b2-key", ":b2:") + @property + def backuper(self): + return ResticBackuper("--b2-account", "--b2-key", ":b2:") - name = "BACKBLAZE" + name = BackupProviderEnum.BACKBLAZE diff --git a/selfprivacy_api/backup/providers/local_file.py b/selfprivacy_api/backup/providers/local_file.py index d31417e..f4fda29 100644 --- a/selfprivacy_api/backup/providers/local_file.py +++ b/selfprivacy_api/backup/providers/local_file.py @@ -1,7 +1,13 @@ from .provider import AbstractBackupProvider from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackuper +from selfprivacy_api.graphql.queries.providers import ( + BackupProvider as BackupProviderEnum, +) class LocalFileBackup(AbstractBackupProvider): - backuper = ResticBackuper("", "", ":local:") - name = "FILE" + @property + def backuper(self): + return ResticBackuper("", "", ":local:") + + name = BackupProviderEnum.FILE diff --git a/selfprivacy_api/backup/providers/memory.py b/selfprivacy_api/backup/providers/memory.py index e73af51..6d1ba48 100644 --- a/selfprivacy_api/backup/providers/memory.py +++ b/selfprivacy_api/backup/providers/memory.py @@ -1,8 +1,13 @@ from .provider import AbstractBackupProvider from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackuper +from selfprivacy_api.graphql.queries.providers import ( + BackupProvider as BackupProviderEnum, +) class InMemoryBackup(AbstractBackupProvider): - backuper = ResticBackuper("", "", ":memory:") + @property + def backuper(self): + return ResticBackuper("", "", ":memory:") - name = "MEMORY" + name = BackupProviderEnum.MEMORY diff --git a/selfprivacy_api/backup/providers/none.py b/selfprivacy_api/backup/providers/none.py new file mode 100644 index 0000000..8c8bfba --- /dev/null +++ b/selfprivacy_api/backup/providers/none.py @@ -0,0 +1,13 @@ +from .provider import AbstractBackupProvider +from selfprivacy_api.backup.backuppers.none_backupper import NoneBackupper +from selfprivacy_api.graphql.queries.providers import ( + BackupProvider as BackupProviderEnum, +) + + +class NoBackups(AbstractBackupProvider): + @property + def backuper(self): + return NoneBackupper() + + name = BackupProviderEnum.NONE diff --git a/selfprivacy_api/backup/providers/provider.py b/selfprivacy_api/backup/providers/provider.py index dddc53f..8ee1ced 100644 --- a/selfprivacy_api/backup/providers/provider.py +++ b/selfprivacy_api/backup/providers/provider.py @@ -1,19 +1,22 @@ """ An abstract class for BackBlaze, S3 etc. -It assumes that while some providers are supported via restic/rclone, others may -require different backends +It assumes that while some providers are supported via restic/rclone, others +may require different backends """ -from abc import ABC +from abc import ABC, abstractmethod from selfprivacy_api.backup.backuppers import AbstractBackuper -from selfprivacy_api.backup.backuppers.none_backupper import NoneBackupper +from selfprivacy_api.graphql.queries.providers import ( + BackupProvider as BackupProviderEnum, +) class AbstractBackupProvider(ABC): @property + @abstractmethod def backuper(self) -> AbstractBackuper: - return NoneBackupper() + raise NotImplementedError - name = "NONE" + name: BackupProviderEnum def __init__(self, login="", key="", location="", repo_id=""): self.backuper.set_creds(login, key, location) diff --git a/selfprivacy_api/backup/storage.py b/selfprivacy_api/backup/storage.py index bff4047..29a5462 100644 --- a/selfprivacy_api/backup/storage.py +++ b/selfprivacy_api/backup/storage.py @@ -5,7 +5,10 @@ from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.models.backup.provider import BackupProviderModel from selfprivacy_api.utils.redis_pool import RedisPool -from selfprivacy_api.utils.redis_model_storage import store_model_as_hash, hash_as_model +from selfprivacy_api.utils.redis_model_storage import ( + store_model_as_hash, + hash_as_model, +) from selfprivacy_api.services.service import Service @@ -153,8 +156,12 @@ class Storage: ) @staticmethod - def load_provider() -> BackupProviderModel: - provider_model = hash_as_model(redis, REDIS_PROVIDER_KEY, BackupProviderModel) + def load_provider() -> Optional[BackupProviderModel]: + provider_model = hash_as_model( + redis, + REDIS_PROVIDER_KEY, + BackupProviderModel, + ) return provider_model @staticmethod diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index f2bade0..6ab3e1a 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -49,7 +49,7 @@ class BackupMutations: ) -> GenericBackupConfigReturn: """Initialize a new repository""" Backups.set_provider( - kind=repository.provider.value, + kind=repository.provider, login=repository.login, key=repository.password, location=repository.location_name, @@ -57,7 +57,10 @@ class BackupMutations: ) Backups.init_repo() return GenericBackupConfigReturn( - success=True, message="", code="200", configuration=Backup().configuration() + success=True, + message="", + code="200", + configuration=Backup().configuration(), ) @strawberry.mutation(permission_classes=[IsAuthenticated]) @@ -65,7 +68,10 @@ class BackupMutations: """Remove repository""" Backups.reset() return GenericBackupConfigReturn( - success=True, message="", code="200", configuration=Backup().configuration() + success=True, + message="", + code="200", + configuration=Backup().configuration(), ) @strawberry.mutation(permission_classes=[IsAuthenticated]) @@ -79,7 +85,10 @@ class BackupMutations: Backups.set_autobackup_period_minutes(0) return GenericBackupConfigReturn( - success=True, message="", code="200", configuration=Backup().configuration() + success=True, + message="", + code="200", + configuration=Backup().configuration(), ) @strawberry.mutation(permission_classes=[IsAuthenticated]) @@ -97,36 +106,52 @@ class BackupMutations: job = add_backup_job(service) start_backup(service) - job = job_to_api_job(job) return GenericJobMutationReturn( success=True, code=200, message="Backup job queued", - job=job, + job=job_to_api_job(job), ) @strawberry.mutation(permission_classes=[IsAuthenticated]) def restore_backup(self, snapshot_id: str) -> GenericJobMutationReturn: """Restore backup""" snap = Backups.get_snapshot_by_id(snapshot_id) - service = get_service_by_id(snap.service_name) if snap is None: return GenericJobMutationReturn( success=False, - code=400, + code=404, message=f"No such snapshot: {snapshot_id}", job=None, ) - job = add_restore_job(snap) + service = get_service_by_id(snap.service_name) + if service is None: + return GenericJobMutationReturn( + success=False, + code=404, + message=f"nonexistent service: {snap.service_name}", + job=None, + ) + + try: + job = add_restore_job(snap) + except ValueError as e: + return GenericJobMutationReturn( + success=False, + code=400, + message=str(e), + job=None, + ) + restore_snapshot(snap) return GenericJobMutationReturn( success=True, code=200, message="restore job created", - job=job, + job=job_to_api_job(job), ) @strawberry.mutation(permission_classes=[IsAuthenticated]) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 428e3dd..c1d668e 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -73,7 +73,7 @@ def dummy_service(tmpdir, backups, raw_dummy_service): assert not path.exists(repo_path) # assert not repo_path - Backups.init_repo(service) + Backups.init_repo() # register our service services.services.append(service) @@ -232,7 +232,7 @@ def test_restore(backups, dummy_service): def test_sizing(backups, dummy_service): Backups.back_up(dummy_service) snap = Backups.get_snapshots(dummy_service)[0] - size = Backups.service_snapshot_size(dummy_service, snap.id) + size = Backups.service_snapshot_size(snap.id) assert size is not None assert size > 0 From 0d622d431f1d05e66f32190717a4de359c0450e1 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Fri, 23 Jun 2023 12:57:39 +0300 Subject: [PATCH 403/537] ci: ignore the failure when trying to kill redis --- .drone.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.drone.yml b/.drone.yml index ffef56a..8d99fd5 100644 --- a/.drone.yml +++ b/.drone.yml @@ -5,7 +5,7 @@ name: default steps: - name: Run Tests and Generate Coverage Report commands: - - kill $(ps aux | grep 'redis-server 127.0.0.1:6389' | awk '{print $2}') + - kill $(ps aux | grep 'redis-server 127.0.0.1:6389' | awk '{print $2}') || true - redis-server --bind 127.0.0.1 --port 6389 >/dev/null & - coverage run -m pytest -q - coverage xml From f3dd18a830db8195aaf38f8ff72553ec32b9f3a1 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Fri, 23 Jun 2023 13:02:52 +0300 Subject: [PATCH 404/537] ci: only run on push event --- .drone.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.drone.yml b/.drone.yml index 8d99fd5..a1bd384 100644 --- a/.drone.yml +++ b/.drone.yml @@ -26,3 +26,7 @@ steps: node: server: builder + +trigger: + event: + - push From 20402728798aa36f1bbcd0faf8060cd6268d3944 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 12 Apr 2023 13:20:41 +0000 Subject: [PATCH 405/537] fix(redis): Do not shut down redis on ctrl c see https://github.com/NixOS/nix/issues/2141 --- shell.nix | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/shell.nix b/shell.nix index d7f08b4..b94b71d 100644 --- a/shell.nix +++ b/shell.nix @@ -35,7 +35,8 @@ pkgs.mkShell { # for example. printenv will not fetch the value of an attribute. export USE_REDIS_PORT=6379 pkill redis-server - redis-server --bind 127.0.0.1 --port $USE_REDIS_PORT >/dev/null & + sleep 2 + setsid redis-server --bind 127.0.0.1 --port $USE_REDIS_PORT >/dev/null 2>/dev/null & # maybe set more env-vars ''; } From 422577257302ae02c2a61b1f2ee7bd09de27e896 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Fri, 23 Jun 2023 14:36:16 +0300 Subject: [PATCH 406/537] fix(backups): Providers were not initialized corretly --- selfprivacy_api/backup/providers/backblaze.py | 4 +--- selfprivacy_api/backup/providers/local_file.py | 4 +--- selfprivacy_api/backup/providers/memory.py | 4 +--- selfprivacy_api/backup/providers/none.py | 4 +--- selfprivacy_api/backup/providers/provider.py | 5 +---- selfprivacy_api/graphql/mutations/backup_mutations.py | 6 +++--- selfprivacy_api/graphql/queries/backup.py | 2 +- 7 files changed, 9 insertions(+), 20 deletions(-) diff --git a/selfprivacy_api/backup/providers/backblaze.py b/selfprivacy_api/backup/providers/backblaze.py index b826bdd..349c5c7 100644 --- a/selfprivacy_api/backup/providers/backblaze.py +++ b/selfprivacy_api/backup/providers/backblaze.py @@ -6,8 +6,6 @@ from selfprivacy_api.graphql.queries.providers import ( class Backblaze(AbstractBackupProvider): - @property - def backuper(self): - return ResticBackuper("--b2-account", "--b2-key", ":b2:") + backuper = ResticBackuper("--b2-account", "--b2-key", ":b2:") name = BackupProviderEnum.BACKBLAZE diff --git a/selfprivacy_api/backup/providers/local_file.py b/selfprivacy_api/backup/providers/local_file.py index f4fda29..9afc61a 100644 --- a/selfprivacy_api/backup/providers/local_file.py +++ b/selfprivacy_api/backup/providers/local_file.py @@ -6,8 +6,6 @@ from selfprivacy_api.graphql.queries.providers import ( class LocalFileBackup(AbstractBackupProvider): - @property - def backuper(self): - return ResticBackuper("", "", ":local:") + backuper = ResticBackuper("", "", ":local:") name = BackupProviderEnum.FILE diff --git a/selfprivacy_api/backup/providers/memory.py b/selfprivacy_api/backup/providers/memory.py index 6d1ba48..31a4cbb 100644 --- a/selfprivacy_api/backup/providers/memory.py +++ b/selfprivacy_api/backup/providers/memory.py @@ -6,8 +6,6 @@ from selfprivacy_api.graphql.queries.providers import ( class InMemoryBackup(AbstractBackupProvider): - @property - def backuper(self): - return ResticBackuper("", "", ":memory:") + backuper = ResticBackuper("", "", ":memory:") name = BackupProviderEnum.MEMORY diff --git a/selfprivacy_api/backup/providers/none.py b/selfprivacy_api/backup/providers/none.py index 8c8bfba..f190324 100644 --- a/selfprivacy_api/backup/providers/none.py +++ b/selfprivacy_api/backup/providers/none.py @@ -6,8 +6,6 @@ from selfprivacy_api.graphql.queries.providers import ( class NoBackups(AbstractBackupProvider): - @property - def backuper(self): - return NoneBackupper() + backuper = NoneBackupper() name = BackupProviderEnum.NONE diff --git a/selfprivacy_api/backup/providers/provider.py b/selfprivacy_api/backup/providers/provider.py index 8ee1ced..3c4fc43 100644 --- a/selfprivacy_api/backup/providers/provider.py +++ b/selfprivacy_api/backup/providers/provider.py @@ -11,10 +11,7 @@ from selfprivacy_api.graphql.queries.providers import ( class AbstractBackupProvider(ABC): - @property - @abstractmethod - def backuper(self) -> AbstractBackuper: - raise NotImplementedError + backuper: AbstractBackuper name: BackupProviderEnum diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index 6ab3e1a..b7720c5 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -59,7 +59,7 @@ class BackupMutations: return GenericBackupConfigReturn( success=True, message="", - code="200", + code=200, configuration=Backup().configuration(), ) @@ -70,7 +70,7 @@ class BackupMutations: return GenericBackupConfigReturn( success=True, message="", - code="200", + code=200, configuration=Backup().configuration(), ) @@ -87,7 +87,7 @@ class BackupMutations: return GenericBackupConfigReturn( success=True, message="", - code="200", + code=200, configuration=Backup().configuration(), ) diff --git a/selfprivacy_api/graphql/queries/backup.py b/selfprivacy_api/graphql/queries/backup.py index 9858543..322dab0 100644 --- a/selfprivacy_api/graphql/queries/backup.py +++ b/selfprivacy_api/graphql/queries/backup.py @@ -36,7 +36,7 @@ class Backup: @strawberry.field def configuration(self) -> BackupConfiguration: return BackupConfiguration( - provider=BackupProvider[Backups.provider().name], + provider=Backups.provider().name, encryption_key=LocalBackupSecret.get(), is_initialized=Backups.is_initted(), autobackup_period=Backups.autobackup_period_minutes(), From c94b4d07bfe2f7ddd10c7231fc1d85e4e44cf607 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 21 Jun 2023 12:15:33 +0000 Subject: [PATCH 407/537] fix(tokens-repo): persistent hashing --- .../tokens/redis_tokens_repository.py | 24 +++++++++++++++---- 1 file changed, 20 insertions(+), 4 deletions(-) diff --git a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py index c72e231..8baa16d 100644 --- a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py @@ -3,6 +3,7 @@ Token repository using Redis as backend. """ from typing import Optional from datetime import datetime +from hashlib import md5 from selfprivacy_api.repositories.tokens.abstract_tokens_repository import ( AbstractTokensRepository, @@ -28,7 +29,10 @@ class RedisTokensRepository(AbstractTokensRepository): @staticmethod def token_key_for_device(device_name: str): - return TOKENS_PREFIX + str(hash(device_name)) + hash = md5() + hash.update(bytes(device_name, "utf-8")) + digest = hash.hexdigest() + return TOKENS_PREFIX + digest def get_tokens(self) -> list[Token]: """Get the tokens""" @@ -41,11 +45,20 @@ class RedisTokensRepository(AbstractTokensRepository): tokens.append(token) return tokens + def _discover_token_key(self, input_token: Token) -> str: + """brute-force searching for tokens, for robust deletion""" + redis = self.connection + token_keys = redis.keys(TOKENS_PREFIX + "*") + for key in token_keys: + token = self._token_from_hash(key) + if token == input_token: + return key + def delete_token(self, input_token: Token) -> None: """Delete the token""" redis = self.connection - key = RedisTokensRepository._token_redis_key(input_token) - if input_token not in self.get_tokens(): + key = self._discover_token_key(input_token) + if key is None: raise TokenNotFound redis.delete(key) @@ -138,7 +151,10 @@ class RedisTokensRepository(AbstractTokensRepository): return None def _token_from_hash(self, redis_key: str) -> Optional[Token]: - return self._hash_as_model(redis_key, Token) + token = self._hash_as_model(redis_key, Token) + if token is not None: + token.created_at = token.created_at.replace(tzinfo=None) + return token def _recovery_key_from_hash(self, redis_key: str) -> Optional[RecoveryKey]: return self._hash_as_model(redis_key, RecoveryKey) From 4b1594ca22b4f14b167f854a066c4d48bda53688 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 23 Jun 2023 12:04:33 +0000 Subject: [PATCH 408/537] refactoring(backups): backuper -> backupper --- selfprivacy_api/backup/__init__.py | 14 +++++----- selfprivacy_api/backup/backuppers/__init__.py | 2 +- .../backup/backuppers/none_backupper.py | 4 +-- .../backup/backuppers/restic_backupper.py | 26 +++++++++---------- selfprivacy_api/backup/providers/backblaze.py | 4 +-- .../backup/providers/local_file.py | 4 +-- selfprivacy_api/backup/providers/memory.py | 4 +-- selfprivacy_api/backup/providers/none.py | 2 +- selfprivacy_api/backup/providers/provider.py | 6 ++--- tests/test_graphql/test_backup.py | 12 ++++----- 10 files changed, 39 insertions(+), 39 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 35b98eb..eaed6f8 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -287,7 +287,7 @@ class Backups: try: service.pre_backup() - snapshot = Backups.provider().backuper.start_backup( + snapshot = Backups.provider().backupper.start_backup( folders, tag, ) @@ -302,7 +302,7 @@ class Backups: @staticmethod def init_repo(): - Backups.provider().backuper.init() + Backups.provider().backupper.init() Storage.mark_as_init() @staticmethod @@ -310,7 +310,7 @@ class Backups: if Storage.has_init_mark(): return True - initted = Backups.provider().backuper.is_initted() + initted = Backups.provider().backupper.is_initted() if initted: Storage.mark_as_init() return True @@ -336,7 +336,7 @@ class Backups: # TODO: the oldest snapshots will get expired faster than the new ones. # How to detect that the end is missing? - upstream_snapshots = Backups.provider().backuper.get_snapshots() + upstream_snapshots = Backups.provider().backupper.get_snapshots() Backups.sync_all_snapshots() return upstream_snapshots @@ -358,7 +358,7 @@ class Backups: @staticmethod def sync_all_snapshots(): - upstream_snapshots = Backups.provider().backuper.get_snapshots() + upstream_snapshots = Backups.provider().backupper.get_snapshots() Storage.invalidate_snapshot_storage() for snapshot in upstream_snapshots: Storage.cache_snapshot(snapshot) @@ -368,7 +368,7 @@ class Backups: def restore_service_from_snapshot(service: Service, snapshot_id: str): folders = service.get_folders() - Backups.provider().backuper.restore_from_backup( + Backups.provider().backupper.restore_from_backup( snapshot_id, folders, ) @@ -427,7 +427,7 @@ class Backups: @staticmethod def service_snapshot_size(snapshot_id: str) -> int: - return Backups.provider().backuper.restored_size( + return Backups.provider().backupper.restored_size( snapshot_id, ) diff --git a/selfprivacy_api/backup/backuppers/__init__.py b/selfprivacy_api/backup/backuppers/__init__.py index 169a502..16cde07 100644 --- a/selfprivacy_api/backup/backuppers/__init__.py +++ b/selfprivacy_api/backup/backuppers/__init__.py @@ -4,7 +4,7 @@ from typing import List from selfprivacy_api.models.backup.snapshot import Snapshot -class AbstractBackuper(ABC): +class AbstractBackupper(ABC): def __init__(self): pass diff --git a/selfprivacy_api/backup/backuppers/none_backupper.py b/selfprivacy_api/backup/backuppers/none_backupper.py index d0f0dda..014f755 100644 --- a/selfprivacy_api/backup/backuppers/none_backupper.py +++ b/selfprivacy_api/backup/backuppers/none_backupper.py @@ -1,10 +1,10 @@ from typing import List from selfprivacy_api.models.backup.snapshot import Snapshot -from selfprivacy_api.backup.backuppers import AbstractBackuper +from selfprivacy_api.backup.backuppers import AbstractBackupper -class NoneBackupper(AbstractBackuper): +class NoneBackupper(AbstractBackupper): def is_initted(self, repo_name: str = "") -> bool: return False diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index 5a16812..8ec2cc5 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -6,7 +6,7 @@ from typing import List from collections.abc import Iterable from json.decoder import JSONDecodeError -from selfprivacy_api.backup.backuppers import AbstractBackuper +from selfprivacy_api.backup.backuppers import AbstractBackupper from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.backup.jobs import get_backup_job from selfprivacy_api.services import get_service_by_id @@ -15,7 +15,7 @@ from selfprivacy_api.jobs import Jobs, JobStatus from selfprivacy_api.backup.local_secret import LocalBackupSecret -class ResticBackuper(AbstractBackuper): +class ResticBackupper(AbstractBackupper): def __init__(self, login_flag: str, key_flag: str, type: str): self.login_flag = login_flag self.key_flag = key_flag @@ -68,7 +68,7 @@ class ResticBackuper(AbstractBackuper): ] ) if args != []: - command.extend(ResticBackuper.__flatten_list(args)) + command.extend(ResticBackupper.__flatten_list(args)) return command @staticmethod @@ -77,7 +77,7 @@ class ResticBackuper(AbstractBackuper): result = [] for item in list: if isinstance(item, Iterable) and not isinstance(item, str): - result.extend(ResticBackuper.__flatten_list(item)) + result.extend(ResticBackupper.__flatten_list(item)) continue result.append(item) return result @@ -113,10 +113,10 @@ class ResticBackuper(AbstractBackuper): messages = [] job = get_backup_job(get_service_by_id(tag)) try: - for raw_message in ResticBackuper.output_yielder(backup_command): + for raw_message in ResticBackupper.output_yielder(backup_command): message = self.parse_message(raw_message, job) messages.append(message) - return ResticBackuper._snapshot_from_backup_messages(messages, tag) + return ResticBackupper._snapshot_from_backup_messages(messages, tag) except ValueError as e: raise ValueError("could not create a snapshot: ", messages) from e @@ -124,11 +124,11 @@ class ResticBackuper(AbstractBackuper): def _snapshot_from_backup_messages(messages, repo_name) -> Snapshot: for message in messages: if message["message_type"] == "summary": - return ResticBackuper._snapshot_from_fresh_summary(message, repo_name) + return ResticBackupper._snapshot_from_fresh_summary(message, repo_name) raise ValueError("no summary message in restic json output") def parse_message(self, raw_message, job=None) -> object: - message = ResticBackuper.parse_json_output(raw_message) + message = ResticBackupper.parse_json_output(raw_message) if message["message_type"] == "status": if job is not None: # only update status if we run under some job Jobs.update( @@ -168,7 +168,7 @@ class ResticBackuper(AbstractBackuper): with subprocess.Popen(command, stdout=subprocess.PIPE, shell=False) as handle: output = handle.communicate()[0].decode("utf-8") - if not ResticBackuper.has_json(output): + if not ResticBackupper.has_json(output): return False # raise NotImplementedError("error(big): " + output) return True @@ -190,7 +190,7 @@ class ResticBackuper(AbstractBackuper): ) as handle: output = handle.communicate()[0].decode("utf-8") try: - parsed_output = ResticBackuper.parse_json_output(output) + parsed_output = ResticBackupper.parse_json_output(output) return parsed_output["total_size"] except ValueError as e: raise ValueError("cannot restore a snapshot: " + output) from e @@ -239,7 +239,7 @@ class ResticBackuper(AbstractBackuper): if "Is there a repository at the following location?" in output: raise ValueError("No repository! : " + output) try: - return ResticBackuper.parse_json_output(output) + return ResticBackupper.parse_json_output(output) except ValueError as e: raise ValueError("Cannot load snapshots: ") from e @@ -258,7 +258,7 @@ class ResticBackuper(AbstractBackuper): @staticmethod def parse_json_output(output: str) -> object: - starting_index = ResticBackuper.json_start(output) + starting_index = ResticBackupper.json_start(output) if starting_index == -1: raise ValueError("There is no json in the restic output : " + output) @@ -292,6 +292,6 @@ class ResticBackuper(AbstractBackuper): @staticmethod def has_json(output: str) -> bool: - if ResticBackuper.json_start(output) == -1: + if ResticBackupper.json_start(output) == -1: return False return True diff --git a/selfprivacy_api/backup/providers/backblaze.py b/selfprivacy_api/backup/providers/backblaze.py index 349c5c7..74f3411 100644 --- a/selfprivacy_api/backup/providers/backblaze.py +++ b/selfprivacy_api/backup/providers/backblaze.py @@ -1,11 +1,11 @@ from .provider import AbstractBackupProvider -from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackuper +from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackupper from selfprivacy_api.graphql.queries.providers import ( BackupProvider as BackupProviderEnum, ) class Backblaze(AbstractBackupProvider): - backuper = ResticBackuper("--b2-account", "--b2-key", ":b2:") + backupper = ResticBackupper("--b2-account", "--b2-key", ":b2:") name = BackupProviderEnum.BACKBLAZE diff --git a/selfprivacy_api/backup/providers/local_file.py b/selfprivacy_api/backup/providers/local_file.py index 9afc61a..af38579 100644 --- a/selfprivacy_api/backup/providers/local_file.py +++ b/selfprivacy_api/backup/providers/local_file.py @@ -1,11 +1,11 @@ from .provider import AbstractBackupProvider -from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackuper +from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackupper from selfprivacy_api.graphql.queries.providers import ( BackupProvider as BackupProviderEnum, ) class LocalFileBackup(AbstractBackupProvider): - backuper = ResticBackuper("", "", ":local:") + backupper = ResticBackupper("", "", ":local:") name = BackupProviderEnum.FILE diff --git a/selfprivacy_api/backup/providers/memory.py b/selfprivacy_api/backup/providers/memory.py index 31a4cbb..18cdee5 100644 --- a/selfprivacy_api/backup/providers/memory.py +++ b/selfprivacy_api/backup/providers/memory.py @@ -1,11 +1,11 @@ from .provider import AbstractBackupProvider -from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackuper +from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackupper from selfprivacy_api.graphql.queries.providers import ( BackupProvider as BackupProviderEnum, ) class InMemoryBackup(AbstractBackupProvider): - backuper = ResticBackuper("", "", ":memory:") + backupper = ResticBackupper("", "", ":memory:") name = BackupProviderEnum.MEMORY diff --git a/selfprivacy_api/backup/providers/none.py b/selfprivacy_api/backup/providers/none.py index f190324..474d0a2 100644 --- a/selfprivacy_api/backup/providers/none.py +++ b/selfprivacy_api/backup/providers/none.py @@ -6,6 +6,6 @@ from selfprivacy_api.graphql.queries.providers import ( class NoBackups(AbstractBackupProvider): - backuper = NoneBackupper() + backupper = NoneBackupper() name = BackupProviderEnum.NONE diff --git a/selfprivacy_api/backup/providers/provider.py b/selfprivacy_api/backup/providers/provider.py index 3c4fc43..077e920 100644 --- a/selfprivacy_api/backup/providers/provider.py +++ b/selfprivacy_api/backup/providers/provider.py @@ -4,19 +4,19 @@ It assumes that while some providers are supported via restic/rclone, others may require different backends """ from abc import ABC, abstractmethod -from selfprivacy_api.backup.backuppers import AbstractBackuper +from selfprivacy_api.backup.backuppers import AbstractBackupper from selfprivacy_api.graphql.queries.providers import ( BackupProvider as BackupProviderEnum, ) class AbstractBackupProvider(ABC): - backuper: AbstractBackuper + backupper: AbstractBackupper name: BackupProviderEnum def __init__(self, login="", key="", location="", repo_id=""): - self.backuper.set_creds(login, key, location) + self.backupper.set_creds(login, key, location) self.login = login self.key = key self.location = location diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index c1d668e..e3bf681 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -111,8 +111,8 @@ def test_config_load(generic_userdata): assert provider.key == "KEY" assert provider.location == "selfprivacy" - assert provider.backuper.account == "ID" - assert provider.backuper.key == "KEY" + assert provider.backupper.account == "ID" + assert provider.backupper.key == "KEY" def test_json_reset(generic_userdata): @@ -141,7 +141,7 @@ def test_select_backend(): def test_file_backend_init(file_backup): - file_backup.backuper.init() + file_backup.backupper.init() def test_backup_simple_file(raw_dummy_service, file_backup): @@ -151,7 +151,7 @@ def test_backup_simple_file(raw_dummy_service, file_backup): assert file_backup is not None name = service.get_id() - file_backup.backuper.init() + file_backup.backupper.init() def test_backup_service(dummy_service, backups): @@ -172,7 +172,7 @@ def test_backup_service(dummy_service, backups): def test_no_repo(memory_backup): with pytest.raises(ValueError): - assert memory_backup.backuper.get_snapshots() == [] + assert memory_backup.backupper.get_snapshots() == [] def test_one_snapshot(backups, dummy_service): @@ -188,7 +188,7 @@ def test_backup_returns_snapshot(backups, dummy_service): service_folders = dummy_service.get_folders() provider = Backups.provider() name = dummy_service.get_id() - snapshot = provider.backuper.start_backup(service_folders, name) + snapshot = provider.backupper.start_backup(service_folders, name) assert snapshot.id is not None assert snapshot.service_name == name From ce9b24b579c4fe35099762f22e8c838f0e752f79 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 23 Jun 2023 12:17:48 +0000 Subject: [PATCH 409/537] feature(dev_qol): mypy type checking and rope refactoring support --- shell.nix | 3 +++ 1 file changed, 3 insertions(+) diff --git a/shell.nix b/shell.nix index b94b71d..f1ce9b2 100644 --- a/shell.nix +++ b/shell.nix @@ -12,6 +12,9 @@ let mnemonic coverage pylint + rope + mypy + pylsp-mypy pydantic typing-extensions psutil From c09f2f393b2f206ab68e8f1e45d9cc0de549ecfa Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Jun 2023 18:00:42 +0000 Subject: [PATCH 410/537] refactor(backups): api readability reorg --- selfprivacy_api/backup/__init__.py | 348 +++++++++++++++-------------- 1 file changed, 183 insertions(+), 165 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index eaed6f8..2957832 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -34,125 +34,9 @@ DEFAULT_JSON_PROVIDER = { class Backups: - """A singleton controller for backups""" + """A stateless controller class for backups""" - @staticmethod - def set_localfile_repo(file_path: str): - ProviderClass = get_provider(BackupProviderEnum.FILE) - provider = ProviderClass( - login="", - key="", - location=file_path, - repo_id="", - ) - Storage.store_provider(provider) - - @staticmethod - def get_last_backed_up(service: Service) -> Optional[datetime]: - """Get a timezone-aware time of the last backup of a service""" - return Storage.get_last_backup_time(service.get_id()) - - @staticmethod - def get_cached_snapshots_service(service_id: str) -> List[Snapshot]: - snapshots = Storage.get_cached_snapshots() - return [snap for snap in snapshots if snap.service_name == service_id] - - @staticmethod - def sync_service_snapshots(service_id: str, snapshots: List[Snapshot]): - for snapshot in snapshots: - if snapshot.service_name == service_id: - Storage.cache_snapshot(snapshot) - for snapshot in Backups.get_cached_snapshots_service(service_id): - if snapshot.id not in [snap.id for snap in snapshots]: - Storage.delete_cached_snapshot(snapshot) - - @staticmethod - def enable_autobackup(service: Service): - Storage.set_autobackup(service) - - @staticmethod - def _service_ids_to_back_up(time: datetime) -> List[str]: - services = Storage.services_with_autobackup() - return [ - id - for id in services - if Backups.is_time_to_backup_service( - id, - time, - ) - ] - - @staticmethod - def services_to_back_up(time: datetime) -> List[Service]: - result = [] - for id in Backups._service_ids_to_back_up(time): - service = get_service_by_id(id) - if service is None: - raise ValueError( - "Cannot look up a service scheduled for backup!", - ) - result.append(service) - return result - - @staticmethod - def is_time_to_backup(time: datetime) -> bool: - """ - Intended as a time validator for huey cron scheduler - of automatic backups - """ - - return Backups._service_ids_to_back_up(time) != [] - - @staticmethod - def is_time_to_backup_service(service_id: str, time: datetime): - period = Backups.autobackup_period_minutes() - if period is None: - return False - if not Storage.is_autobackup_set(service_id): - return False - - last_backup = Storage.get_last_backup_time(service_id) - if last_backup is None: - # queue a backup immediately if there are no previous backups - return True - - if time > last_backup + timedelta(minutes=period): - return True - return False - - @staticmethod - def disable_autobackup(service: Service): - """also see disable_all_autobackup()""" - Storage.unset_autobackup(service) - - @staticmethod - def is_autobackup_enabled(service: Service) -> bool: - return Storage.is_autobackup_set(service.get_id()) - - @staticmethod - def autobackup_period_minutes() -> Optional[int]: - """None means autobackup is disabled""" - return Storage.autobackup_period_minutes() - - @staticmethod - def set_autobackup_period_minutes(minutes: int): - """ - 0 and negative numbers are equivalent to disable. - Setting to a positive number may result in a backup very soon - if some services are not backed up. - """ - if minutes <= 0: - Backups.disable_all_autobackup() - return - Storage.store_autobackup_period_minutes(minutes) - - @staticmethod - def disable_all_autobackup(): - """ - Disables all automatic backing up, - but does not change per-service settings - """ - Storage.delete_backup_period() +### Providers @staticmethod def provider(): @@ -175,32 +59,6 @@ class Backups: ) Storage.store_provider(provider) - @staticmethod - def construct_provider( - kind: BackupProviderEnum, - login: str, - key: str, - location: str, - repo_id: str = "", - ) -> AbstractBackupProvider: - provider_class = get_provider(kind) - - return provider_class( - login=login, - key=key, - location=location, - repo_id=repo_id, - ) - - @staticmethod - def reset(reset_json=True): - Storage.reset() - if reset_json: - try: - Backups.reset_provider_json() - except FileNotFoundError: - # if there is no userdata file, we do not need to reset it - pass @staticmethod def lookup_provider() -> AbstractBackupProvider: @@ -223,6 +81,36 @@ class Backups: Storage.store_provider(none_provider) return none_provider + @staticmethod + def construct_provider( + kind: BackupProviderEnum, + login: str, + key: str, + location: str, + repo_id: str = "", + ) -> AbstractBackupProvider: + provider_class = get_provider(kind) + + return provider_class( + login=login, + key=key, + location=location, + repo_id=repo_id, + ) + + @staticmethod + def load_provider_redis() -> Optional[AbstractBackupProvider]: + provider_model = Storage.load_provider() + if provider_model is None: + return None + return Backups.construct_provider( + BackupProviderEnum[provider_model.kind], + provider_model.login, + provider_model.key, + provider_model.location, + provider_model.repo_id, + ) + @staticmethod def load_provider_json() -> Optional[AbstractBackupProvider]: with ReadUserData() as user_data: @@ -261,18 +149,18 @@ class Backups: user_data["backup"] = DEFAULT_JSON_PROVIDER + @staticmethod - def load_provider_redis() -> Optional[AbstractBackupProvider]: - provider_model = Storage.load_provider() - if provider_model is None: - return None - return Backups.construct_provider( - BackupProviderEnum[provider_model.kind], - provider_model.login, - provider_model.key, - provider_model.location, - provider_model.repo_id, - ) + def reset(reset_json=True): + Storage.reset() + if reset_json: + try: + Backups.reset_provider_json() + except FileNotFoundError: + # if there is no userdata file, we do not need to reset it + pass + +### Backup @staticmethod def back_up(service: Service): @@ -300,6 +188,8 @@ class Backups: Jobs.update(job, status=JobStatus.FINISHED) return snapshot +### Init + @staticmethod def init_repo(): Backups.provider().backupper.init() @@ -317,6 +207,8 @@ class Backups: return False +### Snapshots + @staticmethod def get_snapshots(service: Service) -> List[Snapshot]: snapshots = Backups.get_all_snapshots() @@ -363,6 +255,36 @@ class Backups: for snapshot in upstream_snapshots: Storage.cache_snapshot(snapshot) + @staticmethod + def service_snapshot_size(snapshot_id: str) -> int: + return Backups.provider().backupper.restored_size( + snapshot_id, + ) + + @staticmethod + def _store_last_snapshot(service_id: str, snapshot: Snapshot): + """What do we do with a snapshot that is just made?""" + # non-expiring timestamp of the last + Storage.store_last_timestamp(service_id, snapshot) + # expiring cache entry + Storage.cache_snapshot(snapshot) + + @staticmethod + def get_cached_snapshots_service(service_id: str) -> List[Snapshot]: + snapshots = Storage.get_cached_snapshots() + return [snap for snap in snapshots if snap.service_name == service_id] + + @staticmethod + def sync_service_snapshots(service_id: str, snapshots: List[Snapshot]): + for snapshot in snapshots: + if snapshot.service_name == service_id: + Storage.cache_snapshot(snapshot) + for snapshot in Backups.get_cached_snapshots_service(service_id): + if snapshot.id not in [snap.id for snap in snapshots]: + Storage.delete_cached_snapshot(snapshot) + +### Restoring + # to be deprecated/internalized in favor of restore_snapshot() @staticmethod def restore_service_from_snapshot(service: Service, snapshot_id: str): @@ -425,11 +347,101 @@ class Backups: status=JobStatus.FINISHED, ) +### Autobackup @staticmethod - def service_snapshot_size(snapshot_id: str) -> int: - return Backups.provider().backupper.restored_size( - snapshot_id, - ) + def is_autobackup_enabled(service: Service) -> bool: + return Storage.is_autobackup_set(service.get_id()) + + @staticmethod + def enable_autobackup(service: Service): + Storage.set_autobackup(service) + + @staticmethod + def disable_autobackup(service: Service): + """also see disable_all_autobackup()""" + Storage.unset_autobackup(service) + + @staticmethod + def disable_all_autobackup(): + """ + Disables all automatic backing up, + but does not change per-service settings + """ + Storage.delete_backup_period() + + @staticmethod + def autobackup_period_minutes() -> Optional[int]: + """None means autobackup is disabled""" + return Storage.autobackup_period_minutes() + + @staticmethod + def set_autobackup_period_minutes(minutes: int): + """ + 0 and negative numbers are equivalent to disable. + Setting to a positive number may result in a backup very soon + if some services are not backed up. + """ + if minutes <= 0: + Backups.disable_all_autobackup() + return + Storage.store_autobackup_period_minutes(minutes) + + @staticmethod + def is_time_to_backup(time: datetime) -> bool: + """ + Intended as a time validator for huey cron scheduler + of automatic backups + """ + + return Backups._service_ids_to_back_up(time) != [] + + @staticmethod + def services_to_back_up(time: datetime) -> List[Service]: + result = [] + for id in Backups._service_ids_to_back_up(time): + service = get_service_by_id(id) + if service is None: + raise ValueError( + "Cannot look up a service scheduled for backup!", + ) + result.append(service) + return result + + @staticmethod + def get_last_backed_up(service: Service) -> Optional[datetime]: + """Get a timezone-aware time of the last backup of a service""" + return Storage.get_last_backup_time(service.get_id()) + + @staticmethod + def is_time_to_backup_service(service_id: str, time: datetime): + period = Backups.autobackup_period_minutes() + if period is None: + return False + if not Storage.is_autobackup_set(service_id): + return False + + last_backup = Storage.get_last_backup_time(service_id) + if last_backup is None: + # queue a backup immediately if there are no previous backups + return True + + if time > last_backup + timedelta(minutes=period): + return True + return False + + @staticmethod + def _service_ids_to_back_up(time: datetime) -> List[str]: + services = Storage.services_with_autobackup() + return [ + id + for id in services + if Backups.is_time_to_backup_service( + id, + time, + ) + ] + +### Helpers @staticmethod def space_usable_for_service(service: Service) -> int: @@ -442,9 +454,15 @@ class Backups: return usable_bytes @staticmethod - def _store_last_snapshot(service_id: str, snapshot: Snapshot): - """What do we do with a snapshot that is just made?""" - # non-expiring timestamp of the last - Storage.store_last_timestamp(service_id, snapshot) - # expiring cache entry - Storage.cache_snapshot(snapshot) + def set_localfile_repo(file_path: str): + ProviderClass = get_provider(BackupProviderEnum.FILE) + provider = ProviderClass( + login="", + key="", + location=file_path, + repo_id="", + ) + Storage.store_provider(provider) + + + From dbac01030375c22d903ea6b81f1c983d8bed3018 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Jun 2023 18:07:47 +0000 Subject: [PATCH 411/537] refactor(backups): reorder imports --- selfprivacy_api/backup/__init__.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 2957832..06c0842 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -1,19 +1,21 @@ -from operator import add -from typing import List, Optional from datetime import datetime, timedelta +from operator import add from os import statvfs - -from selfprivacy_api.models.backup.snapshot import Snapshot +from typing import List, Optional from selfprivacy_api.utils import ReadUserData, WriteUserData from selfprivacy_api.services import get_service_by_id from selfprivacy_api.services.service import Service +from selfprivacy_api.jobs import Jobs, JobStatus + from selfprivacy_api.graphql.queries.providers import ( BackupProvider as BackupProviderEnum, ) +from selfprivacy_api.models.backup.snapshot import Snapshot + from selfprivacy_api.backup.providers.provider import AbstractBackupProvider from selfprivacy_api.backup.providers import get_provider from selfprivacy_api.backup.storage import Storage @@ -23,7 +25,6 @@ from selfprivacy_api.backup.jobs import ( get_restore_job, add_restore_job, ) -from selfprivacy_api.jobs import Jobs, JobStatus DEFAULT_JSON_PROVIDER = { "provider": "BACKBLAZE", From 6b0c55a78685b5f7511c92368b160f1010585903 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Jun 2023 18:11:11 +0000 Subject: [PATCH 412/537] refactor(backups): make lookup_provider not public --- selfprivacy_api/backup/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 06c0842..380c399 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -41,7 +41,7 @@ class Backups: @staticmethod def provider(): - return Backups.lookup_provider() + return Backups._lookup_provider() @staticmethod def set_provider( @@ -62,7 +62,7 @@ class Backups: @staticmethod - def lookup_provider() -> AbstractBackupProvider: + def _lookup_provider() -> AbstractBackupProvider: redis_provider = Backups.load_provider_redis() if redis_provider is not None: return redis_provider From 3684345c2dfabe7dcbdb3d7f0885423ad7629dba Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Jun 2023 18:14:15 +0000 Subject: [PATCH 413/537] refactor(backups): make construct_provider not public --- selfprivacy_api/backup/__init__.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 380c399..23f5c77 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -51,7 +51,7 @@ class Backups: location: str, repo_id: str = "", ): - provider = Backups.construct_provider( + provider = Backups._construct_provider( kind, login, key, @@ -76,14 +76,14 @@ class Backups: Storage.store_provider(json_provider) return json_provider - none_provider = Backups.construct_provider( + none_provider = Backups._construct_provider( BackupProviderEnum.NONE, login="", key="", location="" ) Storage.store_provider(none_provider) return none_provider @staticmethod - def construct_provider( + def _construct_provider( kind: BackupProviderEnum, login: str, key: str, @@ -104,7 +104,7 @@ class Backups: provider_model = Storage.load_provider() if provider_model is None: return None - return Backups.construct_provider( + return Backups._construct_provider( BackupProviderEnum[provider_model.kind], provider_model.login, provider_model.key, @@ -133,7 +133,7 @@ class Backups: if provider_dict == DEFAULT_JSON_PROVIDER: return None try: - return Backups.construct_provider( + return Backups._construct_provider( kind=BackupProviderEnum[provider_dict["provider"]], login=provider_dict["accountId"], key=provider_dict["accountKey"], From 3edb38262fbf37d35830edbe19abfefcce95a523 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Jun 2023 18:20:22 +0000 Subject: [PATCH 414/537] refactor(backups): make redis and json provider related lowlevels private --- selfprivacy_api/backup/__init__.py | 12 ++++++------ tests/test_graphql/test_backup.py | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 23f5c77..6439fb2 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -63,12 +63,12 @@ class Backups: @staticmethod def _lookup_provider() -> AbstractBackupProvider: - redis_provider = Backups.load_provider_redis() + redis_provider = Backups._load_provider_redis() if redis_provider is not None: return redis_provider try: - json_provider = Backups.load_provider_json() + json_provider = Backups._load_provider_json() except FileNotFoundError: json_provider = None @@ -100,7 +100,7 @@ class Backups: ) @staticmethod - def load_provider_redis() -> Optional[AbstractBackupProvider]: + def _load_provider_redis() -> Optional[AbstractBackupProvider]: provider_model = Storage.load_provider() if provider_model is None: return None @@ -113,7 +113,7 @@ class Backups: ) @staticmethod - def load_provider_json() -> Optional[AbstractBackupProvider]: + def _load_provider_json() -> Optional[AbstractBackupProvider]: with ReadUserData() as user_data: provider_dict = { "provider": "", @@ -143,7 +143,7 @@ class Backups: return None @staticmethod - def reset_provider_json() -> None: + def _reset_provider_json() -> None: with WriteUserData() as user_data: if "backblaze" in user_data.keys(): del user_data["backblaze"] @@ -156,7 +156,7 @@ class Backups: Storage.reset() if reset_json: try: - Backups.reset_provider_json() + Backups._reset_provider_json() except FileNotFoundError: # if there is no userdata file, we do not need to reset it pass diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index e3bf681..23fa685 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -475,7 +475,7 @@ def test_provider_storage(backups_backblaze): assert provider.key == "KEY" Storage.store_provider(provider) - restored_provider = Backups.load_provider_redis() + restored_provider = Backups._load_provider_redis() assert isinstance(restored_provider, Backblaze) assert restored_provider.login == "ID" assert restored_provider.key == "KEY" From 636904242009f07fc12f77a763b9293545691a6a Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Jun 2023 18:21:50 +0000 Subject: [PATCH 415/537] refactor(backups): move reset() to top because toplevel interface --- selfprivacy_api/backup/__init__.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 6439fb2..fff9c66 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -60,6 +60,16 @@ class Backups: ) Storage.store_provider(provider) + @staticmethod + def reset(reset_json=True): + Storage.reset() + if reset_json: + try: + Backups._reset_provider_json() + except FileNotFoundError: + # if there is no userdata file, we do not need to reset it + pass + @staticmethod def _lookup_provider() -> AbstractBackupProvider: @@ -151,16 +161,6 @@ class Backups: user_data["backup"] = DEFAULT_JSON_PROVIDER - @staticmethod - def reset(reset_json=True): - Storage.reset() - if reset_json: - try: - Backups._reset_provider_json() - except FileNotFoundError: - # if there is no userdata file, we do not need to reset it - pass - ### Backup @staticmethod From cb2273323f9f91356d71b3547467fdde1fcebde0 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Jun 2023 18:30:31 +0000 Subject: [PATCH 416/537] refactor(backups): group operations together --- selfprivacy_api/backup/__init__.py | 152 ++++++++++++++--------------- 1 file changed, 76 insertions(+), 76 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index fff9c66..6878ce5 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -160,6 +160,24 @@ class Backups: user_data["backup"] = DEFAULT_JSON_PROVIDER +### Init + + @staticmethod + def init_repo(): + Backups.provider().backupper.init() + Storage.mark_as_init() + + @staticmethod + def is_initted() -> bool: + if Storage.has_init_mark(): + return True + + initted = Backups.provider().backupper.is_initted() + if initted: + Storage.mark_as_init() + return True + + return False ### Backup @@ -189,24 +207,69 @@ class Backups: Jobs.update(job, status=JobStatus.FINISHED) return snapshot -### Init +### Restoring + + # to be deprecated/internalized in favor of restore_snapshot() + @staticmethod + def restore_service_from_snapshot(service: Service, snapshot_id: str): + folders = service.get_folders() + + Backups.provider().backupper.restore_from_backup( + snapshot_id, + folders, + ) @staticmethod - def init_repo(): - Backups.provider().backupper.init() - Storage.mark_as_init() + def assert_restorable(snapshot: Snapshot): + service = get_service_by_id(snapshot.service_name) + if service is None: + raise ValueError( + f"snapshot has a nonexistent service: {snapshot.service_name}" + ) + + needed_space = Backups.service_snapshot_size(snapshot.id) + available_space = Backups.space_usable_for_service(service) + if needed_space > available_space: + raise ValueError( + f"we only have {available_space} bytes " + f"but snapshot needs {needed_space}" + ) @staticmethod - def is_initted() -> bool: - if Storage.has_init_mark(): - return True + def restore_snapshot(snapshot: Snapshot): + service = get_service_by_id(snapshot.service_name) - initted = Backups.provider().backupper.is_initted() - if initted: - Storage.mark_as_init() - return True + if service is None: + raise ValueError( + f"snapshot has a nonexistent service: {snapshot.service_name}" + ) - return False + job = get_restore_job(service) + if job is None: + job = add_restore_job(snapshot) + + Jobs.update( + job, + status=JobStatus.RUNNING, + ) + try: + Backups.assert_restorable(snapshot) + Backups.restore_service_from_snapshot( + service, + snapshot.id, + ) + service.post_restore() + except Exception as e: + Jobs.update( + job, + status=JobStatus.ERROR, + ) + raise e + + Jobs.update( + job, + status=JobStatus.FINISHED, + ) ### Snapshots @@ -284,71 +347,8 @@ class Backups: if snapshot.id not in [snap.id for snap in snapshots]: Storage.delete_cached_snapshot(snapshot) -### Restoring - - # to be deprecated/internalized in favor of restore_snapshot() - @staticmethod - def restore_service_from_snapshot(service: Service, snapshot_id: str): - folders = service.get_folders() - - Backups.provider().backupper.restore_from_backup( - snapshot_id, - folders, - ) - - @staticmethod - def assert_restorable(snapshot: Snapshot): - service = get_service_by_id(snapshot.service_name) - if service is None: - raise ValueError( - f"snapshot has a nonexistent service: {snapshot.service_name}" - ) - - needed_space = Backups.service_snapshot_size(snapshot.id) - available_space = Backups.space_usable_for_service(service) - if needed_space > available_space: - raise ValueError( - f"we only have {available_space} bytes " - f"but snapshot needs {needed_space}" - ) - - @staticmethod - def restore_snapshot(snapshot: Snapshot): - service = get_service_by_id(snapshot.service_name) - - if service is None: - raise ValueError( - f"snapshot has a nonexistent service: {snapshot.service_name}" - ) - - job = get_restore_job(service) - if job is None: - job = add_restore_job(snapshot) - - Jobs.update( - job, - status=JobStatus.RUNNING, - ) - try: - Backups.assert_restorable(snapshot) - Backups.restore_service_from_snapshot( - service, - snapshot.id, - ) - service.post_restore() - except Exception as e: - Jobs.update( - job, - status=JobStatus.ERROR, - ) - raise e - - Jobs.update( - job, - status=JobStatus.FINISHED, - ) - ### Autobackup + @staticmethod def is_autobackup_enabled(service: Service) -> bool: return Storage.is_autobackup_set(service.get_id()) From f2161f053262d25b2f5cc40ed3d1e4ab222b8411 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Jun 2023 18:42:26 +0000 Subject: [PATCH 417/537] refactor(backups): privatize assert_restorable and restore_snapshot_from_id --- selfprivacy_api/backup/__init__.py | 54 +++++++++++++++--------------- tests/test_graphql/test_backup.py | 2 +- 2 files changed, 28 insertions(+), 28 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 6878ce5..5fe46ef 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -209,31 +209,6 @@ class Backups: ### Restoring - # to be deprecated/internalized in favor of restore_snapshot() - @staticmethod - def restore_service_from_snapshot(service: Service, snapshot_id: str): - folders = service.get_folders() - - Backups.provider().backupper.restore_from_backup( - snapshot_id, - folders, - ) - - @staticmethod - def assert_restorable(snapshot: Snapshot): - service = get_service_by_id(snapshot.service_name) - if service is None: - raise ValueError( - f"snapshot has a nonexistent service: {snapshot.service_name}" - ) - - needed_space = Backups.service_snapshot_size(snapshot.id) - available_space = Backups.space_usable_for_service(service) - if needed_space > available_space: - raise ValueError( - f"we only have {available_space} bytes " - f"but snapshot needs {needed_space}" - ) @staticmethod def restore_snapshot(snapshot: Snapshot): @@ -253,8 +228,8 @@ class Backups: status=JobStatus.RUNNING, ) try: - Backups.assert_restorable(snapshot) - Backups.restore_service_from_snapshot( + Backups._assert_restorable(snapshot) + Backups._restore_service_from_snapshot( service, snapshot.id, ) @@ -271,6 +246,31 @@ class Backups: status=JobStatus.FINISHED, ) + @staticmethod + def _assert_restorable(snapshot: Snapshot): + service = get_service_by_id(snapshot.service_name) + if service is None: + raise ValueError( + f"snapshot has a nonexistent service: {snapshot.service_name}" + ) + + needed_space = Backups.service_snapshot_size(snapshot.id) + available_space = Backups.space_usable_for_service(service) + if needed_space > available_space: + raise ValueError( + f"we only have {available_space} bytes " + f"but snapshot needs {needed_space}" + ) + + @staticmethod + def _restore_service_from_snapshot(service: Service, snapshot_id: str): + folders = service.get_folders() + + Backups.provider().backupper.restore_from_backup( + snapshot_id, + folders, + ) + ### Snapshots @staticmethod diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 23fa685..c882372 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -222,7 +222,7 @@ def test_restore(backups, dummy_service): remove(p) assert not path.exists(p) - Backups.restore_service_from_snapshot(dummy_service, snap.id) + Backups._restore_service_from_snapshot(dummy_service, snap.id) for p, content in zip(paths_to_nuke, contents): assert path.exists(p) with open(p, "r") as file: From 16a96fe0fa91f86a4b9f18817e574f6ca4f9cb5b Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Jun 2023 18:50:37 +0000 Subject: [PATCH 418/537] refactor(backups): delete sync_service_snapshots --- selfprivacy_api/backup/__init__.py | 9 --------- 1 file changed, 9 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 5fe46ef..b378ef0 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -338,15 +338,6 @@ class Backups: snapshots = Storage.get_cached_snapshots() return [snap for snap in snapshots if snap.service_name == service_id] - @staticmethod - def sync_service_snapshots(service_id: str, snapshots: List[Snapshot]): - for snapshot in snapshots: - if snapshot.service_name == service_id: - Storage.cache_snapshot(snapshot) - for snapshot in Backups.get_cached_snapshots_service(service_id): - if snapshot.id not in [snap.id for snap in snapshots]: - Storage.delete_cached_snapshot(snapshot) - ### Autobackup @staticmethod From ce55416b26b6994406f94a3311919f927d9887e8 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Jun 2023 19:01:26 +0000 Subject: [PATCH 419/537] refactor(backups): straighten get_all_snapshots --- selfprivacy_api/backup/__init__.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index b378ef0..f6d9e0b 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -292,9 +292,8 @@ class Backups: # TODO: the oldest snapshots will get expired faster than the new ones. # How to detect that the end is missing? - upstream_snapshots = Backups.provider().backupper.get_snapshots() Backups.sync_all_snapshots() - return upstream_snapshots + return Storage.get_cached_snapshots() @staticmethod def get_snapshot_by_id(id: str) -> Optional[Snapshot]: From 3551813b3449122e340439fc782cac3aafe2329d Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Jun 2023 19:16:18 +0000 Subject: [PATCH 420/537] refactor(backups): merge sync_all_snapshots with force_snapshot_reload --- selfprivacy_api/backup/__init__.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index f6d9e0b..7edba6f 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -292,7 +292,7 @@ class Backups: # TODO: the oldest snapshots will get expired faster than the new ones. # How to detect that the end is missing? - Backups.sync_all_snapshots() + Backups.force_snapshot_reload() return Storage.get_cached_snapshots() @staticmethod @@ -302,17 +302,13 @@ class Backups: return snap # Possibly our cache entry got invalidated, let's try one more time - Backups.sync_all_snapshots() + Backups.force_snapshot_reload() snap = Storage.get_cached_snapshot_by_id(id) return snap @staticmethod def force_snapshot_reload(): - Backups.sync_all_snapshots() - - @staticmethod - def sync_all_snapshots(): upstream_snapshots = Backups.provider().backupper.get_snapshots() Storage.invalidate_snapshot_storage() for snapshot in upstream_snapshots: From 60049778457ff43371e1435a19313689ba1eab8f Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Jun 2023 19:20:49 +0000 Subject: [PATCH 421/537] refactor(backups): rename force_snapshot_reload to force_snapshot_cache_reload --- selfprivacy_api/backup/__init__.py | 6 +++--- selfprivacy_api/graphql/mutations/backup_mutations.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 7edba6f..b76d483 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -292,7 +292,7 @@ class Backups: # TODO: the oldest snapshots will get expired faster than the new ones. # How to detect that the end is missing? - Backups.force_snapshot_reload() + Backups.force_snapshot_cache_reload() return Storage.get_cached_snapshots() @staticmethod @@ -302,13 +302,13 @@ class Backups: return snap # Possibly our cache entry got invalidated, let's try one more time - Backups.force_snapshot_reload() + Backups.force_snapshot_cache_reload() snap = Storage.get_cached_snapshot_by_id(id) return snap @staticmethod - def force_snapshot_reload(): + def force_snapshot_cache_reload(): upstream_snapshots = Backups.provider().backupper.get_snapshots() Storage.invalidate_snapshot_storage() for snapshot in upstream_snapshots: diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index b7720c5..adc3873 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -157,7 +157,7 @@ class BackupMutations: @strawberry.mutation(permission_classes=[IsAuthenticated]) def force_snapshots_reload(self) -> GenericMutationReturn: """Force snapshots reload""" - Backups.force_snapshot_reload() + Backups.force_snapshot_cache_reload() return GenericMutationReturn( success=True, code=200, From f804c88fa69e774e7446ce86b55b5c288c4c094c Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Jun 2023 19:29:20 +0000 Subject: [PATCH 422/537] refactor(backups): remove the by-service getting of cached snapshots --- selfprivacy_api/backup/__init__.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index b76d483..dac10f2 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -328,10 +328,6 @@ class Backups: # expiring cache entry Storage.cache_snapshot(snapshot) - @staticmethod - def get_cached_snapshots_service(service_id: str) -> List[Snapshot]: - snapshots = Storage.get_cached_snapshots() - return [snap for snap in snapshots if snap.service_name == service_id] ### Autobackup From e8f1f39b180228001bd9308e2fe08e7c10245c76 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Jun 2023 19:41:18 +0000 Subject: [PATCH 423/537] refactor(backups): rename service_snapshot_size to snapshot_restored_size --- selfprivacy_api/backup/__init__.py | 4 ++-- tests/test_graphql/test_backup.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index dac10f2..f4d7ab2 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -254,7 +254,7 @@ class Backups: f"snapshot has a nonexistent service: {snapshot.service_name}" ) - needed_space = Backups.service_snapshot_size(snapshot.id) + needed_space = Backups.snapshot_restored_size(snapshot.id) available_space = Backups.space_usable_for_service(service) if needed_space > available_space: raise ValueError( @@ -315,7 +315,7 @@ class Backups: Storage.cache_snapshot(snapshot) @staticmethod - def service_snapshot_size(snapshot_id: str) -> int: + def snapshot_restored_size(snapshot_id: str) -> int: return Backups.provider().backupper.restored_size( snapshot_id, ) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index c882372..bf1be69 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -232,7 +232,7 @@ def test_restore(backups, dummy_service): def test_sizing(backups, dummy_service): Backups.back_up(dummy_service) snap = Backups.get_snapshots(dummy_service)[0] - size = Backups.service_snapshot_size(snap.id) + size = Backups.snapshot_restored_size(snap.id) assert size is not None assert size > 0 From a7d0f6226fcee364ebb17802993cf2bd85304633 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Wed, 28 Jun 2023 12:10:12 +0300 Subject: [PATCH 424/537] fix(backups): missing space in rclone args --- selfprivacy_api/backup/backuppers/restic_backupper.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index 8ec2cc5..e5d7955 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -35,7 +35,7 @@ class ResticBackupper(AbstractBackupper): return f"rclone:{self.type}{self.repo}" def rclone_args(self): - return "rclone.args=serve restic --stdio" + self.backend_rclone_args() + return "rclone.args=serve restic --stdio " + self.backend_rclone_args() def backend_rclone_args(self) -> str: acc_arg = "" From 2dd9da9a962e84179809694f9e6459c90b7e8661 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 28 Jun 2023 09:41:03 +0000 Subject: [PATCH 425/537] fix(backups): register the correct tasks --- selfprivacy_api/task_registry.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/selfprivacy_api/task_registry.py b/selfprivacy_api/task_registry.py index 82eaf06..dfd329c 100644 --- a/selfprivacy_api/task_registry.py +++ b/selfprivacy_api/task_registry.py @@ -1,4 +1,4 @@ from selfprivacy_api.utils.huey import huey from selfprivacy_api.jobs.test import test_job -from selfprivacy_api.restic_controller.tasks import * +from selfprivacy_api.backup.tasks import * from selfprivacy_api.services.generic_service_mover import move_service From 1fb5e3af976665d0eb8792fddd593351def2c0c2 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 28 Jun 2023 09:48:32 +0000 Subject: [PATCH 426/537] fix(services): cleanup a stray get_location --- selfprivacy_api/services/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/selfprivacy_api/services/__init__.py b/selfprivacy_api/services/__init__.py index a688734..02bb1d3 100644 --- a/selfprivacy_api/services/__init__.py +++ b/selfprivacy_api/services/__init__.py @@ -42,7 +42,7 @@ def get_disabled_services() -> list[Service]: def get_services_by_location(location: str) -> list[Service]: - return [service for service in services if service.get_location() == location] + return [service for service in services if service.get_drive() == location] def get_all_required_dns_records() -> list[ServiceDnsRecord]: From 0e13e61b7318552c5ac57b6f8c59a712cedfaf5c Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 28 Jun 2023 11:45:07 +0000 Subject: [PATCH 427/537] fix(services): proper backup progress reporting --- .../backup/backuppers/restic_backupper.py | 10 +++--- tests/test_graphql/test_backup.py | 32 +++++++++++++++++-- 2 files changed, 36 insertions(+), 6 deletions(-) diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index e5d7955..ad163ea 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -127,19 +127,21 @@ class ResticBackupper(AbstractBackupper): return ResticBackupper._snapshot_from_fresh_summary(message, repo_name) raise ValueError("no summary message in restic json output") - def parse_message(self, raw_message, job=None) -> object: - message = ResticBackupper.parse_json_output(raw_message) + def parse_message(self, raw_message_line: str, job=None) -> dict: + message = ResticBackupper.parse_json_output(raw_message_line) + if not isinstance(message, dict): + raise ValueError("we have too many messages on one line?") if message["message_type"] == "status": if job is not None: # only update status if we run under some job Jobs.update( job, JobStatus.RUNNING, - progress=int(message["percent_done"]), + progress=int(message["percent_done"] * 100), ) return message @staticmethod - def _snapshot_from_fresh_summary(message: object, repo_name) -> Snapshot: + def _snapshot_from_fresh_summary(message: dict, repo_name) -> Snapshot: return Snapshot( id=message["snapshot_id"], created_at=datetime.datetime.now(datetime.timezone.utc), diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index bf1be69..0ab2136 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -3,6 +3,7 @@ import os.path as path from os import makedirs from os import remove from os import listdir +from os import urandom from datetime import datetime, timedelta, timezone import selfprivacy_api.services as services @@ -259,9 +260,18 @@ def assert_job_has_run(job_type): assert JobStatus.RUNNING in Jobs.status_updates(job) -def assert_job_had_progress(job_type): +def job_progress_updates(job_type): job = [job for job in finished_jobs() if job.type_id == job_type][0] - assert len(Jobs.progress_updates(job)) > 0 + return Jobs.progress_updates(job) + + +def assert_job_had_progress(job_type): + assert len(job_progress_updates(job_type)) > 0 + + +def make_large_file(path: str, bytes: int): + with open(path, "wb") as file: + file.write(urandom(bytes)) def test_snapshots_by_id(backups, dummy_service): @@ -290,6 +300,24 @@ def test_backup_service_task(backups, dummy_service): assert_job_had_progress(job_type_id) +def test_backup_larger_file(backups, dummy_service): + dir = path.join(dummy_service.get_folders()[0], "LARGEFILE") + mega = 2**20 + make_large_file(dir, 10 * mega) + + handle = start_backup(dummy_service) + handle(blocking=True) + + # results will be slightly different on different machines. if someone has troubles with it on their machine, consider dropping this test. + id = dummy_service.get_id() + job_type_id = f"services.{id}.backup" + assert_job_finished(job_type_id, count=1) + assert_job_has_run(job_type_id) + updates = job_progress_updates(job_type_id) + assert len(updates) > 3 + assert updates[1] > 10 + + def test_restore_snapshot_task(backups, dummy_service): Backups.back_up(dummy_service) snaps = Backups.get_snapshots(dummy_service) From ba9270755a6ed42d874fde9cd3a7b68870ec9999 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Wed, 28 Jun 2023 15:56:30 +0300 Subject: [PATCH 428/537] feat(jobs): return type_id of the job in graphql api --- selfprivacy_api/graphql/common_types/jobs.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/selfprivacy_api/graphql/common_types/jobs.py b/selfprivacy_api/graphql/common_types/jobs.py index 3019a70..1a644ec 100644 --- a/selfprivacy_api/graphql/common_types/jobs.py +++ b/selfprivacy_api/graphql/common_types/jobs.py @@ -12,6 +12,7 @@ class ApiJob: """Job type for GraphQL.""" uid: str + type_id: str name: str description: str status: str @@ -28,6 +29,7 @@ def job_to_api_job(job: Job) -> ApiJob: """Convert a Job from jobs controller to a GraphQL ApiJob.""" return ApiJob( uid=str(job.uid), + type_id=job.type_id, name=job.name, description=job.description, status=job.status.name, From 5ff89c21d5dc4fa792c1bd79f36c49df1feed968 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 28 Jun 2023 13:04:57 +0000 Subject: [PATCH 429/537] test(backup): make large testfile larger --- tests/test_graphql/test_backup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 0ab2136..cc5a1eb 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -303,7 +303,7 @@ def test_backup_service_task(backups, dummy_service): def test_backup_larger_file(backups, dummy_service): dir = path.join(dummy_service.get_folders()[0], "LARGEFILE") mega = 2**20 - make_large_file(dir, 10 * mega) + make_large_file(dir, 100 * mega) handle = start_backup(dummy_service) handle(blocking=True) @@ -315,7 +315,7 @@ def test_backup_larger_file(backups, dummy_service): assert_job_has_run(job_type_id) updates = job_progress_updates(job_type_id) assert len(updates) > 3 - assert updates[1] > 10 + assert updates[int((len(updates)-1)/2.0)] > 10 def test_restore_snapshot_task(backups, dummy_service): From 559de632217811fa6c30eb2a9f4c4888cfea025c Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 28 Jun 2023 13:22:53 +0000 Subject: [PATCH 430/537] fix(jobs): make finishing the job set progress to 100 --- selfprivacy_api/jobs/__init__.py | 3 +++ tests/test_jobs.py | 23 +++++++++++++++++++++++ 2 files changed, 26 insertions(+) diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py index 5e86c5f..2551237 100644 --- a/selfprivacy_api/jobs/__init__.py +++ b/selfprivacy_api/jobs/__init__.py @@ -198,7 +198,10 @@ class Jobs: job.description = description if status_text is not None: job.status_text = status_text + if status == JobStatus.FINISHED: + job.progress = 100 if progress is not None: + # explicitly provided progress has priority job.progress = progress Jobs.log_progress_update(job, progress) job.status = status diff --git a/tests/test_jobs.py b/tests/test_jobs.py index 56e4aa3..0a4271e 100644 --- a/tests/test_jobs.py +++ b/tests/test_jobs.py @@ -80,6 +80,29 @@ def test_jobs(jobs_with_one_job): jobsmodule.JOB_EXPIRATION_SECONDS = backup +def test_finishing_equals_100(jobs_with_one_job): + jobs = jobs_with_one_job + test_job = jobs.get_jobs()[0] + assert not jobs.is_busy() + assert test_job.progress != 100 + + jobs.update(job=test_job, status=JobStatus.FINISHED) + + assert test_job.progress == 100 + + +def test_finishing_equals_100_unless_stated_otherwise(jobs_with_one_job): + jobs = jobs_with_one_job + test_job = jobs.get_jobs()[0] + assert not jobs.is_busy() + assert test_job.progress != 100 + assert test_job.progress != 23 + + jobs.update(job=test_job, status=JobStatus.FINISHED, progress=23) + + assert test_job.progress == 23 + + @pytest.fixture def jobs(): j = Jobs() From 21c5f6814c34f5e95216536c2d9ed9e5183af3e0 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Thu, 29 Jun 2023 13:44:29 +0300 Subject: [PATCH 431/537] style: fix styling --- selfprivacy_api/backup/__init__.py | 20 +++++++------------- tests/test_graphql/test_backup.py | 2 +- 2 files changed, 8 insertions(+), 14 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index f4d7ab2..7a60ecb 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -37,7 +37,7 @@ DEFAULT_JSON_PROVIDER = { class Backups: """A stateless controller class for backups""" -### Providers + ### Providers @staticmethod def provider(): @@ -70,7 +70,6 @@ class Backups: # if there is no userdata file, we do not need to reset it pass - @staticmethod def _lookup_provider() -> AbstractBackupProvider: redis_provider = Backups._load_provider_redis() @@ -160,7 +159,7 @@ class Backups: user_data["backup"] = DEFAULT_JSON_PROVIDER -### Init + ### Init @staticmethod def init_repo(): @@ -179,7 +178,7 @@ class Backups: return False -### Backup + ### Backup @staticmethod def back_up(service: Service): @@ -207,8 +206,7 @@ class Backups: Jobs.update(job, status=JobStatus.FINISHED) return snapshot -### Restoring - + ### Restoring @staticmethod def restore_snapshot(snapshot: Snapshot): @@ -271,7 +269,7 @@ class Backups: folders, ) -### Snapshots + ### Snapshots @staticmethod def get_snapshots(service: Service) -> List[Snapshot]: @@ -328,9 +326,8 @@ class Backups: # expiring cache entry Storage.cache_snapshot(snapshot) + ### Autobackup -### Autobackup - @staticmethod def is_autobackup_enabled(service: Service) -> bool: return Storage.is_autobackup_set(service.get_id()) @@ -424,7 +421,7 @@ class Backups: ) ] -### Helpers + ### Helpers @staticmethod def space_usable_for_service(service: Service) -> int: @@ -446,6 +443,3 @@ class Backups: repo_id="", ) Storage.store_provider(provider) - - - diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index cc5a1eb..a212ade 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -315,7 +315,7 @@ def test_backup_larger_file(backups, dummy_service): assert_job_has_run(job_type_id) updates = job_progress_updates(job_type_id) assert len(updates) > 3 - assert updates[int((len(updates)-1)/2.0)] > 10 + assert updates[int((len(updates) - 1) / 2.0)] > 10 def test_restore_snapshot_task(backups, dummy_service): From 2c21bd2a14ffadede4188f6b726aaaea7a01ec00 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Thu, 29 Jun 2023 13:45:00 +0300 Subject: [PATCH 432/537] feat(backups): expose if the service can be backed up --- selfprivacy_api/graphql/common_types/service.py | 2 ++ selfprivacy_api/services/ocserv/__init__.py | 4 ++++ selfprivacy_api/services/service.py | 4 ++++ 3 files changed, 10 insertions(+) diff --git a/selfprivacy_api/graphql/common_types/service.py b/selfprivacy_api/graphql/common_types/service.py index b3403e9..fd671d4 100644 --- a/selfprivacy_api/graphql/common_types/service.py +++ b/selfprivacy_api/graphql/common_types/service.py @@ -93,6 +93,7 @@ class Service: is_movable: bool is_required: bool is_enabled: bool + can_be_backed_up: bool status: ServiceStatusEnum url: typing.Optional[str] dns_records: typing.Optional[typing.List[DnsRecord]] @@ -124,6 +125,7 @@ def service_to_graphql_service(service: ServiceInterface) -> Service: is_movable=service.is_movable(), is_required=service.is_required(), is_enabled=service.is_enabled(), + can_be_backed_up=service.can_be_backed_up(), status=ServiceStatusEnum(service.get_status().value), url=service.get_url(), dns_records=[ diff --git a/selfprivacy_api/services/ocserv/__init__.py b/selfprivacy_api/services/ocserv/__init__.py index a15cb84..4f46692 100644 --- a/selfprivacy_api/services/ocserv/__init__.py +++ b/selfprivacy_api/services/ocserv/__init__.py @@ -44,6 +44,10 @@ class Ocserv(Service): def is_required() -> bool: return False + @staticmethod + def can_be_backed_up() -> bool: + return False + @staticmethod def is_enabled() -> bool: with ReadUserData() as user_data: diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index f804773..65337b4 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -81,6 +81,10 @@ class Service(ABC): def is_required() -> bool: pass + @staticmethod + def can_be_backed_up() -> bool: + return True + @staticmethod @abstractmethod def is_enabled() -> bool: From 2df930b9ba5c01d084f7a1f67d63c81acaba69f2 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Thu, 29 Jun 2023 14:27:08 +0300 Subject: [PATCH 433/537] feat(backups): Add backup descriptions for UI --- .../graphql/common_types/service.py | 2 + .../services/bitwarden/__init__.py | 4 ++ selfprivacy_api/services/gitea/__init__.py | 4 ++ selfprivacy_api/services/jitsi/__init__.py | 4 ++ .../services/mailserver/__init__.py | 4 ++ .../services/nextcloud/__init__.py | 4 ++ selfprivacy_api/services/ocserv/__init__.py | 4 ++ selfprivacy_api/services/pleroma/__init__.py | 4 ++ selfprivacy_api/services/service.py | 42 +++++++++++++++++++ .../services/test_service/__init__.py | 4 ++ 10 files changed, 76 insertions(+) diff --git a/selfprivacy_api/graphql/common_types/service.py b/selfprivacy_api/graphql/common_types/service.py index fd671d4..836a3df 100644 --- a/selfprivacy_api/graphql/common_types/service.py +++ b/selfprivacy_api/graphql/common_types/service.py @@ -94,6 +94,7 @@ class Service: is_required: bool is_enabled: bool can_be_backed_up: bool + backup_description: str status: ServiceStatusEnum url: typing.Optional[str] dns_records: typing.Optional[typing.List[DnsRecord]] @@ -126,6 +127,7 @@ def service_to_graphql_service(service: ServiceInterface) -> Service: is_required=service.is_required(), is_enabled=service.is_enabled(), can_be_backed_up=service.can_be_backed_up(), + backup_description=service.get_backup_description(), status=ServiceStatusEnum(service.get_status().value), url=service.get_url(), dns_records=[ diff --git a/selfprivacy_api/services/bitwarden/__init__.py b/selfprivacy_api/services/bitwarden/__init__.py index 6842af6..98455d8 100644 --- a/selfprivacy_api/services/bitwarden/__init__.py +++ b/selfprivacy_api/services/bitwarden/__init__.py @@ -55,6 +55,10 @@ class Bitwarden(Service): def is_required() -> bool: return False + @staticmethod + def get_backup_description() -> str: + return "Password database, encryption certificate and attachments." + @staticmethod def is_enabled() -> bool: with ReadUserData() as user_data: diff --git a/selfprivacy_api/services/gitea/__init__.py b/selfprivacy_api/services/gitea/__init__.py index f9ff3d2..ce73dc6 100644 --- a/selfprivacy_api/services/gitea/__init__.py +++ b/selfprivacy_api/services/gitea/__init__.py @@ -51,6 +51,10 @@ class Gitea(Service): def is_required() -> bool: return False + @staticmethod + def get_backup_description() -> str: + return "Git repositories, database and user data." + @staticmethod def is_enabled() -> bool: with ReadUserData() as user_data: diff --git a/selfprivacy_api/services/jitsi/__init__.py b/selfprivacy_api/services/jitsi/__init__.py index a969eb2..2b54ae1 100644 --- a/selfprivacy_api/services/jitsi/__init__.py +++ b/selfprivacy_api/services/jitsi/__init__.py @@ -54,6 +54,10 @@ class Jitsi(Service): def is_required() -> bool: return False + @staticmethod + def get_backup_description() -> str: + return "Secrets that are used to encrypt the communication." + @staticmethod def is_enabled() -> bool: with ReadUserData() as user_data: diff --git a/selfprivacy_api/services/mailserver/__init__.py b/selfprivacy_api/services/mailserver/__init__.py index b0a6e30..d3600e5 100644 --- a/selfprivacy_api/services/mailserver/__init__.py +++ b/selfprivacy_api/services/mailserver/__init__.py @@ -54,6 +54,10 @@ class MailServer(Service): def is_required() -> bool: return True + @staticmethod + def get_backup_description() -> str: + return "Mail boxes and filters." + @staticmethod def is_enabled() -> bool: return True diff --git a/selfprivacy_api/services/nextcloud/__init__.py b/selfprivacy_api/services/nextcloud/__init__.py index ae81403..632c5d3 100644 --- a/selfprivacy_api/services/nextcloud/__init__.py +++ b/selfprivacy_api/services/nextcloud/__init__.py @@ -49,6 +49,10 @@ class Nextcloud(Service): def is_required() -> bool: return False + @staticmethod + def get_backup_description() -> str: + return "All the files and other data stored in Nextcloud." + @staticmethod def is_enabled() -> bool: with ReadUserData() as user_data: diff --git a/selfprivacy_api/services/ocserv/__init__.py b/selfprivacy_api/services/ocserv/__init__.py index 4f46692..3860b19 100644 --- a/selfprivacy_api/services/ocserv/__init__.py +++ b/selfprivacy_api/services/ocserv/__init__.py @@ -48,6 +48,10 @@ class Ocserv(Service): def can_be_backed_up() -> bool: return False + @staticmethod + def get_backup_description() -> str: + return "Nothing to backup." + @staticmethod def is_enabled() -> bool: with ReadUserData() as user_data: diff --git a/selfprivacy_api/services/pleroma/__init__.py b/selfprivacy_api/services/pleroma/__init__.py index 0d5b338..bac1cda 100644 --- a/selfprivacy_api/services/pleroma/__init__.py +++ b/selfprivacy_api/services/pleroma/__init__.py @@ -46,6 +46,10 @@ class Pleroma(Service): def is_required() -> bool: return False + @staticmethod + def get_backup_description() -> str: + return "Your Pleroma accounts, posts and media." + @staticmethod def is_enabled() -> bool: with ReadUserData() as user_data: diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index 65337b4..286fab7 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -41,83 +41,125 @@ class Service(ABC): @staticmethod @abstractmethod def get_id() -> str: + """ + The unique id of the service. + """ pass @staticmethod @abstractmethod def get_display_name() -> str: + """ + The name of the service that is shown to the user. + """ pass @staticmethod @abstractmethod def get_description() -> str: + """ + The description of the service that is shown to the user. + """ pass @staticmethod @abstractmethod def get_svg_icon() -> str: + """ + The monochrome svg icon of the service. + """ pass @staticmethod @abstractmethod def get_url() -> typing.Optional[str]: + """ + The url of the service if it is accessible from the internet browser. + """ pass @classmethod def get_user(cls) -> typing.Optional[str]: + """ + The user that owns the service's files. + Defaults to the service's id. + """ return cls.get_id() @classmethod def get_group(cls) -> typing.Optional[str]: + """ + The group that owns the service's files. + Defaults to the service's user. + """ return cls.get_user() @staticmethod @abstractmethod def is_movable() -> bool: + """`True` if the service can be moved to the non-system volume.""" pass @staticmethod @abstractmethod def is_required() -> bool: + """`True` if the service is required for the server to function.""" pass @staticmethod def can_be_backed_up() -> bool: + """`True` if the service can be backed up.""" return True + @staticmethod + @abstractmethod + def get_backup_description() -> str: + """ + The text shown to the user that exlplains what data will be + backed up. + """ + pass + @staticmethod @abstractmethod def is_enabled() -> bool: + """`True` if the service is enabled.""" pass @staticmethod @abstractmethod def get_status() -> ServiceStatus: + """The status of the service, reported by systemd.""" pass @staticmethod @abstractmethod def enable(): + """Enable the service. Usually this means enabling systemd unit.""" pass @staticmethod @abstractmethod def disable(): + """Disable the service. Usually this means disabling systemd unit.""" pass @staticmethod @abstractmethod def stop(): + """Stop the service. Usually this means stopping systemd unit.""" pass @staticmethod @abstractmethod def start(): + """Start the service. Usually this means starting systemd unit.""" pass @staticmethod @abstractmethod def restart(): + """Restart the service. Usually this means restarting systemd unit.""" pass @staticmethod diff --git a/selfprivacy_api/services/test_service/__init__.py b/selfprivacy_api/services/test_service/__init__.py index b1c2924..af527a0 100644 --- a/selfprivacy_api/services/test_service/__init__.py +++ b/selfprivacy_api/services/test_service/__init__.py @@ -53,6 +53,10 @@ class DummyService(Service): def is_required() -> bool: return False + @staticmethod + def get_backup_description() -> str: + return "How did we get here?" + @staticmethod def is_enabled() -> bool: return True From 1c28984475e1ca8290d35fdcf1456e7dac8a802c Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 3 Jul 2023 12:15:36 +0000 Subject: [PATCH 434/537] feature(backups): a wrapper for rclone sync --- .../backup/backuppers/restic_backupper.py | 15 ++++++++++ tests/test_graphql/test_backup.py | 28 ++++++++++++++++++- 2 files changed, 42 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index ad163ea..826b336 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -5,6 +5,7 @@ import datetime from typing import List from collections.abc import Iterable from json.decoder import JSONDecodeError +from os.path import exists from selfprivacy_api.backup.backuppers import AbstractBackupper from selfprivacy_api.models.backup.snapshot import Snapshot @@ -95,6 +96,20 @@ class ResticBackupper(AbstractBackupper): if "NOTICE:" not in line: yield line + + @staticmethod + def sync (src_path: str, dest_path:str): + """a wrapper around rclone sync""" + + if not exists(src_path): + raise ValueError("source dir for rclone sync must exist") + + rclone_command = ["rclone", "sync", "-P", src_path, dest_path] + for raw_message in ResticBackupper.output_yielder(rclone_command): + if "ERROR" in raw_message: + raise ValueError(raw_message) + + def start_backup(self, folders: List[str], tag: str): """ Start backup with restic diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index a212ade..86310c7 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -7,6 +7,8 @@ from os import urandom from datetime import datetime, timedelta, timezone import selfprivacy_api.services as services +from selfprivacy_api.services import Service + from selfprivacy_api.services import get_service_by_id from selfprivacy_api.services.test_service import DummyService from selfprivacy_api.graphql.queries.providers import BackupProvider @@ -17,6 +19,8 @@ import selfprivacy_api.backup.providers as providers from selfprivacy_api.backup.providers import AbstractBackupProvider from selfprivacy_api.backup.providers.backblaze import Backblaze +from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackupper + from selfprivacy_api.backup.tasks import start_backup, restore_snapshot from selfprivacy_api.backup.storage import Storage from selfprivacy_api.backup.jobs import get_backup_job @@ -68,7 +72,7 @@ def raw_dummy_service(tmpdir, backups): @pytest.fixture() -def dummy_service(tmpdir, backups, raw_dummy_service): +def dummy_service(tmpdir, backups, raw_dummy_service) -> Service: service = raw_dummy_service repo_path = path.join(tmpdir, "test_repo") assert not path.exists(repo_path) @@ -519,3 +523,25 @@ def test_services_to_back_up(backups, dummy_service): services = Backups.services_to_back_up(now) assert len(services) == 1 assert services[0].get_id() == dummy_service.get_id() + + +def test_sync(dummy_service): + src = dummy_service.get_folders()[0] + dst = dummy_service.get_folders()[1] + old_files_src = listdir(src) + old_files_dst = listdir(dst) + assert old_files_src != old_files_dst + + ResticBackupper.sync(src, dst) + new_files_src = listdir(src) + new_files_dst = listdir(dst) + assert new_files_src == old_files_src + assert new_files_dst == new_files_src + + +def test_sync_nonexistent_src(dummy_service): + src = "/var/lib/nonexistentFluffyBunniesOfUnix" + dst = dummy_service.get_folders()[1] + + with pytest.raises(ValueError): + ResticBackupper.sync(src, dst) From ae708e446b25c0522a5a347653475a5250b11f71 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 3 Jul 2023 12:54:43 +0000 Subject: [PATCH 435/537] test(backups): actually list folders --- tests/test_graphql/test_backup.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 86310c7..0a150a6 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -200,13 +200,18 @@ def test_backup_returns_snapshot(backups, dummy_service): assert snapshot.created_at is not None +def folder_files(folder): + return [ + path.join(folder, filename) + for filename in listdir(folder) + if filename is not None + ] + + def service_files(service): result = [] for service_folder in service.get_folders(): - service_filename = listdir(service_folder)[0] - assert service_filename is not None - service_file = path.join(service_folder, service_filename) - result.append(service_file) + result.extend(folder_files(service_folder)) return result From 7ad5f91be1873a4056e33f6aa8a3ababa96f105b Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 3 Jul 2023 13:06:54 +0000 Subject: [PATCH 436/537] refactor(backups): move output yielding into backup utils --- .../backup/backuppers/restic_backupper.py | 22 ++++--------------- selfprivacy_api/backup/util.py | 14 ++++++++++++ 2 files changed, 18 insertions(+), 18 deletions(-) create mode 100644 selfprivacy_api/backup/util.py diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index 826b336..bbaf0c0 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -7,6 +7,7 @@ from collections.abc import Iterable from json.decoder import JSONDecodeError from os.path import exists +from selfprivacy_api.backup.util import output_yielder from selfprivacy_api.backup.backuppers import AbstractBackupper from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.backup.jobs import get_backup_job @@ -84,32 +85,17 @@ class ResticBackupper(AbstractBackupper): return result @staticmethod - def output_yielder(command): - with subprocess.Popen( - command, - shell=False, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - universal_newlines=True, - ) as handle: - for line in iter(handle.stdout.readline, ""): - if "NOTICE:" not in line: - yield line - - - @staticmethod - def sync (src_path: str, dest_path:str): + def sync(src_path: str, dest_path: str): """a wrapper around rclone sync""" if not exists(src_path): raise ValueError("source dir for rclone sync must exist") rclone_command = ["rclone", "sync", "-P", src_path, dest_path] - for raw_message in ResticBackupper.output_yielder(rclone_command): + for raw_message in output_yielder(rclone_command): if "ERROR" in raw_message: raise ValueError(raw_message) - def start_backup(self, folders: List[str], tag: str): """ Start backup with restic @@ -128,7 +114,7 @@ class ResticBackupper(AbstractBackupper): messages = [] job = get_backup_job(get_service_by_id(tag)) try: - for raw_message in ResticBackupper.output_yielder(backup_command): + for raw_message in output_yielder(backup_command): message = self.parse_message(raw_message, job) messages.append(message) return ResticBackupper._snapshot_from_backup_messages(messages, tag) diff --git a/selfprivacy_api/backup/util.py b/selfprivacy_api/backup/util.py new file mode 100644 index 0000000..8af74e0 --- /dev/null +++ b/selfprivacy_api/backup/util.py @@ -0,0 +1,14 @@ +import subprocess + + +def output_yielder(command): + with subprocess.Popen( + command, + shell=False, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + universal_newlines=True, + ) as handle: + for line in iter(handle.stdout.readline, ""): + if "NOTICE:" not in line: + yield line From 9a28c0ebcbe5c386a3daffcded4bdd44ce6eebd5 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 3 Jul 2023 13:28:23 +0000 Subject: [PATCH 437/537] refactor(backups): move syncing (non-restic) into backup utils --- .../backup/backuppers/restic_backupper.py | 12 ------------ selfprivacy_api/backup/util.py | 13 +++++++++++++ tests/test_graphql/test_backup.py | 6 +++--- 3 files changed, 16 insertions(+), 15 deletions(-) diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index bbaf0c0..a94c993 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -84,18 +84,6 @@ class ResticBackupper(AbstractBackupper): result.append(item) return result - @staticmethod - def sync(src_path: str, dest_path: str): - """a wrapper around rclone sync""" - - if not exists(src_path): - raise ValueError("source dir for rclone sync must exist") - - rclone_command = ["rclone", "sync", "-P", src_path, dest_path] - for raw_message in output_yielder(rclone_command): - if "ERROR" in raw_message: - raise ValueError(raw_message) - def start_backup(self, folders: List[str], tag: str): """ Start backup with restic diff --git a/selfprivacy_api/backup/util.py b/selfprivacy_api/backup/util.py index 8af74e0..bda421e 100644 --- a/selfprivacy_api/backup/util.py +++ b/selfprivacy_api/backup/util.py @@ -1,4 +1,5 @@ import subprocess +from os.path import exists def output_yielder(command): @@ -12,3 +13,15 @@ def output_yielder(command): for line in iter(handle.stdout.readline, ""): if "NOTICE:" not in line: yield line + + +def sync(src_path: str, dest_path: str): + """a wrapper around rclone sync""" + + if not exists(src_path): + raise ValueError("source dir for rclone sync must exist") + + rclone_command = ["rclone", "sync", "-P", src_path, dest_path] + for raw_message in output_yielder(rclone_command): + if "ERROR" in raw_message: + raise ValueError(raw_message) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 0a150a6..e269cf1 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -18,8 +18,8 @@ from selfprivacy_api.backup import Backups import selfprivacy_api.backup.providers as providers from selfprivacy_api.backup.providers import AbstractBackupProvider from selfprivacy_api.backup.providers.backblaze import Backblaze +from selfprivacy_api.backup.util import sync -from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackupper from selfprivacy_api.backup.tasks import start_backup, restore_snapshot from selfprivacy_api.backup.storage import Storage @@ -537,7 +537,7 @@ def test_sync(dummy_service): old_files_dst = listdir(dst) assert old_files_src != old_files_dst - ResticBackupper.sync(src, dst) + sync(src, dst) new_files_src = listdir(src) new_files_dst = listdir(dst) assert new_files_src == old_files_src @@ -549,4 +549,4 @@ def test_sync_nonexistent_src(dummy_service): dst = dummy_service.get_folders()[1] with pytest.raises(ValueError): - ResticBackupper.sync(src, dst) + sync(src, dst) From 5467a62906dae8aba12d18f2bf2a295e3f48890d Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 3 Jul 2023 13:29:31 +0000 Subject: [PATCH 438/537] test(backups): remove the 100mb file after test --- tests/test_graphql/test_backup.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index e269cf1..337ef86 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -325,6 +325,8 @@ def test_backup_larger_file(backups, dummy_service): updates = job_progress_updates(job_type_id) assert len(updates) > 3 assert updates[int((len(updates) - 1) / 2.0)] > 10 + #clean up a bit + remove(dir) def test_restore_snapshot_task(backups, dummy_service): From 91375362948a0c5fe93c6eb3346022de76fb8f1c Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 3 Jul 2023 15:28:12 +0000 Subject: [PATCH 439/537] feature(backups): mounting a repo --- .../backup/backuppers/restic_backupper.py | 26 ++++++++++++++++++- tests/test_graphql/test_backup.py | 23 +++++++++++++++- 2 files changed, 47 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index a94c993..d9f278c 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -6,6 +6,8 @@ from typing import List from collections.abc import Iterable from json.decoder import JSONDecodeError from os.path import exists +from os import listdir +from time import sleep from selfprivacy_api.backup.util import output_yielder from selfprivacy_api.backup.backuppers import AbstractBackupper @@ -52,7 +54,7 @@ class ResticBackupper(AbstractBackupper): def _password_command(self): return f"echo {LocalBackupSecret.get()}" - def restic_command(self, *args, tag: str = ""): + def restic_command(self, *args, tag: str = "") -> List[str]: command = [ "restic", "-o", @@ -73,6 +75,28 @@ class ResticBackupper(AbstractBackupper): command.extend(ResticBackupper.__flatten_list(args)) return command + def mount_repo(self, dir): + mount_command = self.restic_command("mount", dir) + mount_command.insert(0, "nohup") + handle = subprocess.Popen(mount_command, stdout=subprocess.DEVNULL, shell=False) + sleep(2) + if not "ids" in listdir(dir): + raise IOError("failed to mount dir ", dir) + return handle + + def unmount_repo(self, dir): + mount_command = ["umount", "-l", dir] + with subprocess.Popen( + mount_command, stdout=subprocess.PIPE, shell=False + ) as handle: + output = handle.communicate()[0].decode("utf-8") + # TODO: check for exit code? + if "error" in output.lower(): + return IOError("failed to unmount dir ", dir, ": ", output) + + if not listdir(dir) == []: + return IOError("failed to unmount dir ", dir) + @staticmethod def __flatten_list(list): """string-aware list flattener""" diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 337ef86..8fe3c99 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -5,6 +5,7 @@ from os import remove from os import listdir from os import urandom from datetime import datetime, timedelta, timezone +from subprocess import Popen import selfprivacy_api.services as services from selfprivacy_api.services import Service @@ -19,6 +20,7 @@ import selfprivacy_api.backup.providers as providers from selfprivacy_api.backup.providers import AbstractBackupProvider from selfprivacy_api.backup.providers.backblaze import Backblaze from selfprivacy_api.backup.util import sync +from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackupper from selfprivacy_api.backup.tasks import start_backup, restore_snapshot @@ -325,7 +327,7 @@ def test_backup_larger_file(backups, dummy_service): updates = job_progress_updates(job_type_id) assert len(updates) > 3 assert updates[int((len(updates) - 1) / 2.0)] > 10 - #clean up a bit + # clean up a bit remove(dir) @@ -552,3 +554,22 @@ def test_sync_nonexistent_src(dummy_service): with pytest.raises(ValueError): sync(src, dst) + + +# Restic lowlevel +def test_mount_umount(backups, dummy_service, tmpdir): + Backups.back_up(dummy_service) + backupper = Backups.provider().backupper + assert isinstance(backupper, ResticBackupper) + + mountpoint = tmpdir / "mount" + makedirs(mountpoint) + assert path.exists(mountpoint) + assert len(listdir(mountpoint)) == 0 + + handle = backupper.mount_repo(mountpoint) + assert len(listdir(mountpoint)) != 0 + + backupper.unmount_repo(mountpoint) + # handle.terminate() + assert len(listdir(mountpoint)) == 0 From 4423db745843e2bbe216e3c486d93b865e9528e4 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 3 Jul 2023 20:41:52 +0000 Subject: [PATCH 440/537] refactor(backups): download a copy before replacing original --- .../backup/backuppers/restic_backupper.py | 30 ++++++++++++++----- tests/test_graphql/test_backup.py | 5 +++- 2 files changed, 27 insertions(+), 8 deletions(-) diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index d9f278c..14a8be8 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -1,15 +1,16 @@ import subprocess import json import datetime +import tempfile from typing import List from collections.abc import Iterable from json.decoder import JSONDecodeError -from os.path import exists +from os.path import exists, join from os import listdir from time import sleep -from selfprivacy_api.backup.util import output_yielder +from selfprivacy_api.backup.util import output_yielder, sync from selfprivacy_api.backup.backuppers import AbstractBackupper from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.backup.jobs import get_backup_job @@ -210,19 +211,34 @@ class ResticBackupper(AbstractBackupper): except ValueError as e: raise ValueError("cannot restore a snapshot: " + output) from e - def restore_from_backup(self, snapshot_id, folders): + def restore_from_backup(self, snapshot_id, folders: List[str], verify=True): """ Restore from backup with restic """ - # snapshots save the path of the folder in the file system - # I do not alter the signature yet because maybe this can be - # changed with flags + if folders is None or folders == []: + raise ValueError("cannot restore without knowing where to!") + + with tempfile.TemporaryDirectory() as dir: + self.do_restore(snapshot_id, target=dir) + for folder in folders: + src = join(dir, folder.strip("/")) + if not exists(src): + raise ValueError( + f"there is no such path: {src}. We tried to find {folder}" + ) + dst = folder + sync(src, dst) + + def do_restore(self, snapshot_id, target="/", verify=False): + """barebones restic restore""" restore_command = self.restic_command( "restore", snapshot_id, "--target", - "/", + target, ) + if verify: + restore_command.append("--verify") with subprocess.Popen( restore_command, stdout=subprocess.PIPE, shell=False diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 8fe3c99..872b6ad 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -86,7 +86,10 @@ def dummy_service(tmpdir, backups, raw_dummy_service) -> Service: services.services.append(service) assert get_service_by_id(service.get_id()) is not None - return service + yield service + + # cleanup because apparently it matters wrt tasks + services.services.remove(service) @pytest.fixture() From f361f44dedb5420cb9010d99e0422d505421e66b Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 3 Jul 2023 20:53:21 +0000 Subject: [PATCH 441/537] feature(backups): check restore exit code --- selfprivacy_api/backup/backuppers/restic_backupper.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index 14a8be8..ae86efc 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -219,7 +219,7 @@ class ResticBackupper(AbstractBackupper): raise ValueError("cannot restore without knowing where to!") with tempfile.TemporaryDirectory() as dir: - self.do_restore(snapshot_id, target=dir) + self.do_restore(snapshot_id, target=dir, verify=verify) for folder in folders: src = join(dir, folder.strip("/")) if not exists(src): @@ -249,6 +249,14 @@ class ResticBackupper(AbstractBackupper): if "restoring" not in output: raise ValueError("cannot restore a snapshot: " + output) + assert ( + handle.returncode is not None + ) # none should be impossible after communicate + if handle.returncode != 0: + raise ValueError( + "restore exited with errorcode", returncode, ":", output + ) + def _load_snapshots(self) -> object: """ Load list of snapshots from repository From 02e3c9bd5e5690f4dddfd9612181514567d54260 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 5 Jul 2023 13:13:30 +0000 Subject: [PATCH 442/537] feature(backups): forgetting snapshots --- selfprivacy_api/backup/__init__.py | 5 ++++ selfprivacy_api/backup/backuppers/__init__.py | 4 +++ .../backup/backuppers/none_backupper.py | 3 +++ .../backup/backuppers/restic_backupper.py | 26 +++++++++++++++++++ tests/test_graphql/test_backup.py | 26 +++++++++++++++++++ 5 files changed, 64 insertions(+) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 7a60ecb..216cf65 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -305,6 +305,11 @@ class Backups: return snap + @staticmethod + def forget_snapshot(snapshot: Snapshot): + Backups.provider().backupper.forget_snapshot(snapshot.id) + Storage.delete_cached_snapshot(snapshot) + @staticmethod def force_snapshot_cache_reload(): upstream_snapshots = Backups.provider().backupper.get_snapshots() diff --git a/selfprivacy_api/backup/backuppers/__init__.py b/selfprivacy_api/backup/backuppers/__init__.py index 16cde07..335cdfd 100644 --- a/selfprivacy_api/backup/backuppers/__init__.py +++ b/selfprivacy_api/backup/backuppers/__init__.py @@ -37,3 +37,7 @@ class AbstractBackupper(ABC): @abstractmethod def restored_size(self, snapshot_id: str) -> int: raise NotImplementedError + + @abstractmethod + def forget_snapshot(self, snapshot_id): + raise NotImplementedError diff --git a/selfprivacy_api/backup/backuppers/none_backupper.py b/selfprivacy_api/backup/backuppers/none_backupper.py index 014f755..2ac2035 100644 --- a/selfprivacy_api/backup/backuppers/none_backupper.py +++ b/selfprivacy_api/backup/backuppers/none_backupper.py @@ -27,3 +27,6 @@ class NoneBackupper(AbstractBackupper): def restored_size(self, snapshot_id: str) -> int: raise NotImplementedError + + def forget_snapshot(self, snapshot_id): + raise NotImplementedError diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index ae86efc..7f16a91 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -257,6 +257,32 @@ class ResticBackupper(AbstractBackupper): "restore exited with errorcode", returncode, ":", output ) + def forget_snapshot(self, snapshot_id): + """either removes snapshot or marks it for deletion later depending on server settings""" + forget_command = self.restic_command( + "forget", + snapshot_id, + ) + + with subprocess.Popen( + forget_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False + ) as handle: + # for some reason restore does not support nice reporting of progress via json + output, err = [string.decode("utf-8") for string in handle.communicate()] + + if "no matching ID found" in err: + raise ValueError( + "trying to delete, but no such snapshot: ", snapshot_id + ) + + assert ( + handle.returncode is not None + ) # none should be impossible after communicate + if handle.returncode != 0: + raise ValueError( + "forget exited with errorcode", returncode, ":", output + ) + def _load_snapshots(self) -> object: """ Load list of snapshots from repository diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 872b6ad..928c1b7 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -15,6 +15,8 @@ from selfprivacy_api.services.test_service import DummyService from selfprivacy_api.graphql.queries.providers import BackupProvider from selfprivacy_api.jobs import Jobs, JobStatus +from selfprivacy_api.models.backup.snapshot import Snapshot + from selfprivacy_api.backup import Backups import selfprivacy_api.backup.providers as providers from selfprivacy_api.backup.providers import AbstractBackupProvider @@ -314,6 +316,30 @@ def test_backup_service_task(backups, dummy_service): assert_job_had_progress(job_type_id) +def test_forget_snapshot(backups, dummy_service): + snap1 = Backups.back_up(dummy_service) + snap2 = Backups.back_up(dummy_service) + assert len(Backups.get_snapshots(dummy_service)) == 2 + + Backups.forget_snapshot(snap2) + assert len(Backups.get_snapshots(dummy_service)) == 1 + Backups.force_snapshot_cache_reload() + assert len(Backups.get_snapshots(dummy_service)) == 1 + + assert Backups.get_snapshots(dummy_service)[0].id == snap1.id + + Backups.forget_snapshot(snap1) + assert len(Backups.get_snapshots(dummy_service)) == 0 + + +def test_forget_nonexistent_snapshot(backups, dummy_service): + bogus = Snapshot( + id="gibberjibber", service_name="nohoho", created_at=datetime.now(timezone.utc) + ) + with pytest.raises(ValueError): + Backups.forget_snapshot(bogus) + + def test_backup_larger_file(backups, dummy_service): dir = path.join(dummy_service.get_folders()[0], "LARGEFILE") mega = 2**20 From 59fe3864634965f9a4534000063bb94935a8bea6 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 7 Jul 2023 10:50:59 +0000 Subject: [PATCH 443/537] feature(backups): restore strategies enum --- selfprivacy_api/backup/__init__.py | 39 ++++++++----------- .../graphql/common_types/backup.py | 10 +++++ 2 files changed, 27 insertions(+), 22 deletions(-) create mode 100644 selfprivacy_api/graphql/common_types/backup.py diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 216cf65..c3deee0 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -8,11 +8,12 @@ from selfprivacy_api.utils import ReadUserData, WriteUserData from selfprivacy_api.services import get_service_by_id from selfprivacy_api.services.service import Service -from selfprivacy_api.jobs import Jobs, JobStatus +from selfprivacy_api.jobs import Jobs, JobStatus, Job from selfprivacy_api.graphql.queries.providers import ( BackupProvider as BackupProviderEnum, ) +from selfprivacy_api.graphql.common_types.backup import RestoreStrategy from selfprivacy_api.models.backup.snapshot import Snapshot @@ -207,42 +208,36 @@ class Backups: return snapshot ### Restoring + @staticmethod + def _ensure_active_restore_job(service, snapshot) -> Job: + job = get_restore_job(service) + if job is None: + job = add_restore_job(snapshot) + + Jobs.update(job, status=JobStatus.RUNNING) + return job @staticmethod - def restore_snapshot(snapshot: Snapshot): + def restore_snapshot( + snapshot: Snapshot, strategy=RestoreStrategy.DOWNLOAD_VERIFY_OVERWRITE + ): service = get_service_by_id(snapshot.service_name) - if service is None: raise ValueError( f"snapshot has a nonexistent service: {snapshot.service_name}" ) - job = get_restore_job(service) - if job is None: - job = add_restore_job(snapshot) + job = Backups._ensure_active_restore_job(service, snapshot) - Jobs.update( - job, - status=JobStatus.RUNNING, - ) try: Backups._assert_restorable(snapshot) - Backups._restore_service_from_snapshot( - service, - snapshot.id, - ) + Backups._restore_service_from_snapshot(service, snapshot.id) service.post_restore() except Exception as e: - Jobs.update( - job, - status=JobStatus.ERROR, - ) + Jobs.update(job, status=JobStatus.ERROR) raise e - Jobs.update( - job, - status=JobStatus.FINISHED, - ) + Jobs.update(job, status=JobStatus.FINISHED) @staticmethod def _assert_restorable(snapshot: Snapshot): diff --git a/selfprivacy_api/graphql/common_types/backup.py b/selfprivacy_api/graphql/common_types/backup.py new file mode 100644 index 0000000..992363b --- /dev/null +++ b/selfprivacy_api/graphql/common_types/backup.py @@ -0,0 +1,10 @@ +"""Backup""" +# pylint: disable=too-few-public-methods +import strawberry +from enum import Enum + + +@strawberry.enum +class RestoreStrategy(Enum): + INPLACE = "INPLACE" + DOWNLOAD_VERIFY_OVERWRITE = "DOWNLOAD_VERIFY_OVERWRITE" From 95e4296d0b0266dd41726d574530e3d25b260972 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 7 Jul 2023 11:54:48 +0000 Subject: [PATCH 444/537] feature(backups): implement inplace restore strategy --- selfprivacy_api/backup/__init__.py | 34 +++++++++++++++---- selfprivacy_api/backup/backuppers/__init__.py | 2 +- 2 files changed, 29 insertions(+), 7 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index c3deee0..ddfd6be 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -209,14 +209,28 @@ class Backups: ### Restoring @staticmethod - def _ensure_active_restore_job(service, snapshot) -> Job: + def _ensure_queued_restore_job(service, snapshot) -> Job: job = get_restore_job(service) if job is None: job = add_restore_job(snapshot) - Jobs.update(job, status=JobStatus.RUNNING) + Jobs.update(job, status=JobStatus.CREATED) return job + @staticmethod + def _inplace_restore(service: Service, snapshot: Snapshot, job: Job): + failsafe_snapshot = Backups.back_up(service) + + Jobs.update(job, status=JobStatus.RUNNING) + try: + Backups._restore_service_from_snapshot(service, snapshot.id, verify=False) + except Exception as e: + Backups._restore_service_from_snapshot( + service, failsafe_snapshot.id, verify=False + ) + raise e + Backups.forget_snapshot(failsafe_snapshot) + @staticmethod def restore_snapshot( snapshot: Snapshot, strategy=RestoreStrategy.DOWNLOAD_VERIFY_OVERWRITE @@ -226,13 +240,21 @@ class Backups: raise ValueError( f"snapshot has a nonexistent service: {snapshot.service_name}" ) - - job = Backups._ensure_active_restore_job(service, snapshot) + job = Backups._ensure_queued_restore_job(service, snapshot) try: Backups._assert_restorable(snapshot) - Backups._restore_service_from_snapshot(service, snapshot.id) + + if strategy == RestoreStrategy.INPLACE: + Backups._inplace_restore(service, snapshot, job) + else: # verify_before_download is our default + Jobs.update(job, status=JobStatus.RUNNING) + Backups._restore_service_from_snapshot( + service, snapshot.id, verify=True + ) + service.post_restore() + except Exception as e: Jobs.update(job, status=JobStatus.ERROR) raise e @@ -256,7 +278,7 @@ class Backups: ) @staticmethod - def _restore_service_from_snapshot(service: Service, snapshot_id: str): + def _restore_service_from_snapshot(service: Service, snapshot_id: str, verify=True): folders = service.get_folders() Backups.provider().backupper.restore_from_backup( diff --git a/selfprivacy_api/backup/backuppers/__init__.py b/selfprivacy_api/backup/backuppers/__init__.py index 335cdfd..24eb108 100644 --- a/selfprivacy_api/backup/backuppers/__init__.py +++ b/selfprivacy_api/backup/backuppers/__init__.py @@ -30,7 +30,7 @@ class AbstractBackupper(ABC): raise NotImplementedError @abstractmethod - def restore_from_backup(self, snapshot_id: str, folders: List[str]): + def restore_from_backup(self, snapshot_id: str, folders: List[str], verify=True): """Restore a target folder using a snapshot""" raise NotImplementedError From 65ce86f0f950029ac81dbc0e89d0b8176af646dd Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 7 Jul 2023 12:49:52 +0000 Subject: [PATCH 445/537] test(backups): test out that pre-restore backup plays nice with jobs --- selfprivacy_api/backup/jobs.py | 15 +++++++++------ selfprivacy_api/backup/tasks.py | 9 +++++++-- tests/test_graphql/test_backup.py | 16 ++++++++++++++-- 3 files changed, 30 insertions(+), 10 deletions(-) diff --git a/selfprivacy_api/backup/jobs.py b/selfprivacy_api/backup/jobs.py index 38e9ad1..ab4eaca 100644 --- a/selfprivacy_api/backup/jobs.py +++ b/selfprivacy_api/backup/jobs.py @@ -29,14 +29,17 @@ def get_jobs_by_service(service: Service) -> List[Job]: return result -def is_something_queued_for(service: Service) -> bool: - return len(get_jobs_by_service(service)) != 0 +def is_something_running_for(service: Service) -> bool: + running_jobs = [ + job for job in get_jobs_by_service(service) if job.status == JobStatus.RUNNING + ] + return len(running_jobs) != 0 def add_backup_job(service: Service) -> Job: - if is_something_queued_for(service): + if is_something_running_for(service): message = ( - f"Cannot start a backup of {service.get_id()}, another operation is queued: " + f"Cannot start a backup of {service.get_id()}, another operation is running: " + get_jobs_by_service(service)[0].type_id ) raise ValueError(message) @@ -53,9 +56,9 @@ def add_restore_job(snapshot: Snapshot) -> Job: service = get_service_by_id(snapshot.service_name) if service is None: raise ValueError(f"no such service: {snapshot.service_name}") - if is_something_queued_for(service): + if is_something_running_for(service): message = ( - f"Cannot start a restore of {service.get_id()}, another operation is queued: " + f"Cannot start a restore of {service.get_id()}, another operation is running: " + get_jobs_by_service(service)[0].type_id ) raise ValueError(message) diff --git a/selfprivacy_api/backup/tasks.py b/selfprivacy_api/backup/tasks.py index bd3925d..ac8f2e2 100644 --- a/selfprivacy_api/backup/tasks.py +++ b/selfprivacy_api/backup/tasks.py @@ -1,5 +1,7 @@ from datetime import datetime +from selfprivacy_api.graphql.common_types.backup import RestoreStrategy + from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.utils.huey import huey from selfprivacy_api.services import get_service_by_id @@ -28,8 +30,11 @@ def start_backup(service: Service) -> bool: @huey.task() -def restore_snapshot(snapshot: Snapshot) -> bool: - Backups.restore_snapshot(snapshot) +def restore_snapshot( + snapshot: Snapshot, + strategy: RestoreStrategy = RestoreStrategy.DOWNLOAD_VERIFY_OVERWRITE, +) -> bool: + Backups.restore_snapshot(snapshot, strategy) return True diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 928c1b7..e54be85 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -13,6 +13,7 @@ from selfprivacy_api.services import Service from selfprivacy_api.services import get_service_by_id from selfprivacy_api.services.test_service import DummyService from selfprivacy_api.graphql.queries.providers import BackupProvider +from selfprivacy_api.graphql.common_types.backup import RestoreStrategy from selfprivacy_api.jobs import Jobs, JobStatus from selfprivacy_api.models.backup.snapshot import Snapshot @@ -360,7 +361,15 @@ def test_backup_larger_file(backups, dummy_service): remove(dir) -def test_restore_snapshot_task(backups, dummy_service): +@pytest.fixture(params=["verify", "inplace"]) +def restore_strategy(request) -> RestoreStrategy: + if request.param == "verify": + return RestoreStrategy.DOWNLOAD_VERIFY_OVERWRITE + else: + return RestoreStrategy.INPLACE + + +def test_restore_snapshot_task(backups, dummy_service, restore_strategy): Backups.back_up(dummy_service) snaps = Backups.get_snapshots(dummy_service) assert len(snaps) == 1 @@ -375,7 +384,7 @@ def test_restore_snapshot_task(backups, dummy_service): for p in paths_to_nuke: remove(p) - handle = restore_snapshot(snaps[0]) + handle = restore_snapshot(snaps[0], restore_strategy) handle(blocking=True) for p, content in zip(paths_to_nuke, contents): @@ -383,6 +392,9 @@ def test_restore_snapshot_task(backups, dummy_service): with open(p, "r") as file: assert file.read() == content + snaps = Backups.get_snapshots(dummy_service) + assert len(snaps) == 1 + def test_autobackup_enable_service(backups, dummy_service): assert not Backups.is_autobackup_enabled(dummy_service) From cacbf8335d51a8635961f8be8a3358f5a98ab942 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 7 Jul 2023 13:14:30 +0000 Subject: [PATCH 446/537] fix(backups): actually mount if asked for an inplace restore --- selfprivacy_api/backup/backuppers/restic_backupper.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index 7f16a91..565a084 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -219,9 +219,16 @@ class ResticBackupper(AbstractBackupper): raise ValueError("cannot restore without knowing where to!") with tempfile.TemporaryDirectory() as dir: - self.do_restore(snapshot_id, target=dir, verify=verify) + if verify: + self.do_restore(snapshot_id, target=dir, verify=verify) + snapshot_root = dir + else: # attempting inplace restore via mount + sync + self.mount_repo(dir) + snapshot_root = join(dir, "ids", snapshot_id) + + assert snapshot_root is not None for folder in folders: - src = join(dir, folder.strip("/")) + src = join(snapshot_root, folder.strip("/")) if not exists(src): raise ValueError( f"there is no such path: {src}. We tried to find {folder}" From be95b84d525d0f3dcefead9d338190f92cc9f2b0 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 7 Jul 2023 13:24:05 +0000 Subject: [PATCH 447/537] feature(backups): expose restore strategies to the API --- selfprivacy_api/graphql/mutations/backup_mutations.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index adc3873..5c8163c 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -13,6 +13,7 @@ from selfprivacy_api.graphql.queries.backup import BackupConfiguration from selfprivacy_api.graphql.queries.backup import Backup from selfprivacy_api.graphql.queries.providers import BackupProvider from selfprivacy_api.graphql.common_types.jobs import job_to_api_job +from selfprivacy_api.graphql.common_types.backup import RestoreStrategy from selfprivacy_api.backup import Backups from selfprivacy_api.services import get_all_services, get_service_by_id @@ -115,7 +116,11 @@ class BackupMutations: ) @strawberry.mutation(permission_classes=[IsAuthenticated]) - def restore_backup(self, snapshot_id: str) -> GenericJobMutationReturn: + def restore_backup( + self, + snapshot_id: str, + strategy: RestoreStrategy = RestoreStrategy.DOWNLOAD_VERIFY_OVERWRITE, + ) -> GenericJobMutationReturn: """Restore backup""" snap = Backups.get_snapshot_by_id(snapshot_id) if snap is None: @@ -145,7 +150,7 @@ class BackupMutations: job=None, ) - restore_snapshot(snap) + restore_snapshot(snap, strategy) return GenericJobMutationReturn( success=True, From 8e29634d0295c3ceda27594828afee49c9ab8e9f Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 10 Jul 2023 10:33:01 +0000 Subject: [PATCH 448/537] feature(utils): a hopefully reusable waitloop --- selfprivacy_api/utils/waitloop.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) create mode 100644 selfprivacy_api/utils/waitloop.py diff --git a/selfprivacy_api/utils/waitloop.py b/selfprivacy_api/utils/waitloop.py new file mode 100644 index 0000000..a375b8e --- /dev/null +++ b/selfprivacy_api/utils/waitloop.py @@ -0,0 +1,15 @@ +from time import sleep +from typing import Callable +from typing import Optional + +def wait_until_true(readiness_checker: Callable[[],bool],*,interval: float =0.1, timeout_sec: Optional[float] = None): + elapsed = 0.0 + if timeout_sec is None: + timeout_sec = 10e16 + while not readiness_checker or elapsed > timeout_sec: + sleep(interval) + elapsed += interval + if elapsed > timeout_sec: + raise TimeoutError() + + From d33e9d633580f59c58264557b11915259506c65f Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 10 Jul 2023 17:03:10 +0000 Subject: [PATCH 449/537] test(backups): simulating async service start n stop --- .../services/test_service/__init__.py | 89 +++++++++++++------ 1 file changed, 64 insertions(+), 25 deletions(-) diff --git a/selfprivacy_api/services/test_service/__init__.py b/selfprivacy_api/services/test_service/__init__.py index af527a0..da4960a 100644 --- a/selfprivacy_api/services/test_service/__init__.py +++ b/selfprivacy_api/services/test_service/__init__.py @@ -1,7 +1,12 @@ """Class representing Bitwarden service""" import base64 import typing +import subprocess + from typing import List +from os import path + +# from enum import Enum from selfprivacy_api.jobs import Job from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus @@ -11,13 +16,24 @@ import selfprivacy_api.utils.network as network_utils from selfprivacy_api.services.test_service.icon import BITWARDEN_ICON +DEFAULT_DELAY = 0 + class DummyService(Service): """A test service""" + folders: List[str] = [] + def __init_subclass__(cls, folders: List[str]): cls.folders = folders + def __init__(self): + super().__init__() + dir = self.folders[0] + status_file = path.join(dir, "service_status") + with open(status_file, "w") as file: + file.write(ServiceStatus.ACTIVE.value) + @staticmethod def get_id() -> str: """Return service id.""" @@ -61,38 +77,61 @@ class DummyService(Service): def is_enabled() -> bool: return True - @staticmethod - def get_status() -> ServiceStatus: - """ - Return Bitwarden status from systemd. - Use command return code to determine status. + @classmethod + def status_file(cls) -> str: + dir = cls.folders[0] + return path.join(dir, "service_status") - Return code 0 means service is running. - Return code 1 or 2 means service is in error stat. - Return code 3 means service is stopped. - Return code 4 means service is off. - """ - return ServiceStatus.ACTIVE + @classmethod + def set_status(cls, status: ServiceStatus): + with open(cls.status_file(), "w") as file: + status_string = file.write(status.value) - @staticmethod - def enable(): + @classmethod + def get_status(cls) -> ServiceStatus: + with open(cls.status_file(), "r") as file: + status_string = file.read().strip() + return ServiceStatus[status_string] + + @classmethod + def change_status_with_async_delay( + cls, new_status: ServiceStatus, delay_sec: float + ): + """simulating a delay on systemd side""" + dir = cls.folders[0] + status_file = path.join(dir, "service_status") + + command = [ + "bash", + "-c", + f" sleep {delay_sec} && echo {new_status.value} > {status_file}", + ] + handle = subprocess.Popen(command) + if delay_sec == 0: + handle.communicate() + + @classmethod + def enable(cls): pass - @staticmethod - def disable(): + @classmethod + def disable(cls, delay): pass - @staticmethod - def stop(): - pass + @classmethod + def stop(cls, delay=DEFAULT_DELAY): + cls.set_status(ServiceStatus.DEACTIVATING) + cls.change_status_with_async_delay(ServiceStatus.INACTIVE, delay) - @staticmethod - def start(): - pass + @classmethod + def start(cls, delay=DEFAULT_DELAY): + cls.set_status(ServiceStatus.ACTIVATING) + cls.change_status_with_async_delay(ServiceStatus.ACTIVE, delay) - @staticmethod - def restart(): - pass + @classmethod + def restart(cls, delay=DEFAULT_DELAY): + cls.set_status(ServiceStatus.RELOADING) # is a correct one? + cls.change_status_with_async_delay(ServiceStatus.ACTIVE, delay) @staticmethod def get_configuration(): @@ -112,7 +151,7 @@ class DummyService(Service): return storage_usage @staticmethod - def get_drive(cls) -> str: + def get_drive() -> str: return "sda1" @classmethod From e2b906b219c0242a0475245fd91f29bc10c68bf4 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 12 Jul 2023 12:10:40 +0000 Subject: [PATCH 450/537] test(backups): test async service start n stop simulation --- selfprivacy_api/utils/waitloop.py | 13 +++++++++---- tests/test_graphql/test_backup.py | 10 +++++----- tests/test_services.py | 19 ++++++++++++++++++- 3 files changed, 32 insertions(+), 10 deletions(-) diff --git a/selfprivacy_api/utils/waitloop.py b/selfprivacy_api/utils/waitloop.py index a375b8e..9f71a37 100644 --- a/selfprivacy_api/utils/waitloop.py +++ b/selfprivacy_api/utils/waitloop.py @@ -2,14 +2,19 @@ from time import sleep from typing import Callable from typing import Optional -def wait_until_true(readiness_checker: Callable[[],bool],*,interval: float =0.1, timeout_sec: Optional[float] = None): + +def wait_until_true( + readiness_checker: Callable[[], bool], + *, + interval: float = 0.1, + timeout_sec: Optional[float] = None +): elapsed = 0.0 if timeout_sec is None: timeout_sec = 10e16 - while not readiness_checker or elapsed > timeout_sec: + + while (not readiness_checker()) and elapsed < timeout_sec: sleep(interval) elapsed += interval if elapsed > timeout_sec: raise TimeoutError() - - diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index e54be85..573480c 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -52,7 +52,7 @@ def backups_backblaze(generic_userdata): @pytest.fixture() -def raw_dummy_service(tmpdir, backups): +def raw_dummy_service(tmpdir): dirnames = ["test_service", "also_test_service"] service_dirs = [] for d in dirnames: @@ -578,13 +578,13 @@ def test_services_to_back_up(backups, dummy_service): def test_sync(dummy_service): src = dummy_service.get_folders()[0] dst = dummy_service.get_folders()[1] - old_files_src = listdir(src) - old_files_dst = listdir(dst) + old_files_src = set(listdir(src)) + old_files_dst = set(listdir(dst)) assert old_files_src != old_files_dst sync(src, dst) - new_files_src = listdir(src) - new_files_dst = listdir(dst) + new_files_src = set(listdir(src)) + new_files_dst = set(listdir(dst)) assert new_files_src == old_files_src assert new_files_dst == new_files_src diff --git a/tests/test_services.py b/tests/test_services.py index 5816140..4d4c8f4 100644 --- a/tests/test_services.py +++ b/tests/test_services.py @@ -9,7 +9,10 @@ from selfprivacy_api.services.owned_path import OwnedPath from selfprivacy_api.services.generic_service_mover import FolderMoveNames from selfprivacy_api.services.test_service import DummyService -from selfprivacy_api.services.service import Service +from selfprivacy_api.services.service import Service, ServiceStatus +from selfprivacy_api.utils.waitloop import wait_until_true + +from tests.test_graphql.test_backup import raw_dummy_service def test_unimplemented_folders_raises(): @@ -25,6 +28,20 @@ def test_unimplemented_folders_raises(): assert owned_folders is not None +def test_delayed_start_stop(raw_dummy_service): + dummy = raw_dummy_service + + dummy.stop(delay=0.3) + assert dummy.get_status() == ServiceStatus.DEACTIVATING + wait_until_true(lambda: dummy.get_status() == ServiceStatus.INACTIVE) + assert dummy.get_status() == ServiceStatus.INACTIVE + + dummy.start(delay=0.3) + assert dummy.get_status() == ServiceStatus.ACTIVATING + wait_until_true(lambda: dummy.get_status() == ServiceStatus.ACTIVE) + assert dummy.get_status() == ServiceStatus.ACTIVE + + def test_owned_folders_from_not_owned(): assert Bitwarden.get_owned_folders() == [ OwnedPath( From ea4e53f826a94d5c1d271c735949f3e2033872cd Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 12 Jul 2023 12:27:55 +0000 Subject: [PATCH 451/537] test(backups): make delay settable per dummyservice --- .../services/test_service/__init__.py | 17 +++++++++++------ tests/test_services.py | 5 +++-- 2 files changed, 14 insertions(+), 8 deletions(-) diff --git a/selfprivacy_api/services/test_service/__init__.py b/selfprivacy_api/services/test_service/__init__.py index da4960a..07b460b 100644 --- a/selfprivacy_api/services/test_service/__init__.py +++ b/selfprivacy_api/services/test_service/__init__.py @@ -23,6 +23,7 @@ class DummyService(Service): """A test service""" folders: List[str] = [] + startstop_delay = 0 def __init_subclass__(cls, folders: List[str]): cls.folders = folders @@ -119,19 +120,23 @@ class DummyService(Service): pass @classmethod - def stop(cls, delay=DEFAULT_DELAY): + def set_delay(cls, new_delay): + cls.startstop_delay = new_delay + + @classmethod + def stop(cls): cls.set_status(ServiceStatus.DEACTIVATING) - cls.change_status_with_async_delay(ServiceStatus.INACTIVE, delay) + cls.change_status_with_async_delay(ServiceStatus.INACTIVE, cls.startstop_delay) @classmethod - def start(cls, delay=DEFAULT_DELAY): + def start(cls): cls.set_status(ServiceStatus.ACTIVATING) - cls.change_status_with_async_delay(ServiceStatus.ACTIVE, delay) + cls.change_status_with_async_delay(ServiceStatus.ACTIVE, cls.startstop_delay) @classmethod - def restart(cls, delay=DEFAULT_DELAY): + def restart(cls): cls.set_status(ServiceStatus.RELOADING) # is a correct one? - cls.change_status_with_async_delay(ServiceStatus.ACTIVE, delay) + cls.change_status_with_async_delay(ServiceStatus.ACTIVE, cls.startstop_delay) @staticmethod def get_configuration(): diff --git a/tests/test_services.py b/tests/test_services.py index 4d4c8f4..12889c3 100644 --- a/tests/test_services.py +++ b/tests/test_services.py @@ -30,13 +30,14 @@ def test_unimplemented_folders_raises(): def test_delayed_start_stop(raw_dummy_service): dummy = raw_dummy_service + dummy.set_delay(0.3) - dummy.stop(delay=0.3) + dummy.stop() assert dummy.get_status() == ServiceStatus.DEACTIVATING wait_until_true(lambda: dummy.get_status() == ServiceStatus.INACTIVE) assert dummy.get_status() == ServiceStatus.INACTIVE - dummy.start(delay=0.3) + dummy.start() assert dummy.get_status() == ServiceStatus.ACTIVATING wait_until_true(lambda: dummy.get_status() == ServiceStatus.ACTIVE) assert dummy.get_status() == ServiceStatus.ACTIVE From 86c2ae2c1fc6824dadb5d193f354bf0ebd8b0da2 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 12 Jul 2023 15:02:45 +0000 Subject: [PATCH 452/537] refactor(backups): make a StoppedService context manager --- selfprivacy_api/services/service.py | 30 +++++++++++++++++++++++++++++ tests/test_services.py | 15 ++++++++++++++- 2 files changed, 44 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index 286fab7..e2c7c01 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -10,6 +10,7 @@ from selfprivacy_api.utils.block_devices import BlockDevice from selfprivacy_api.services.generic_size_counter import get_storage_usage from selfprivacy_api.services.owned_path import OwnedPath +from selfprivacy_api.utils.waitloop import wait_until_true class ServiceStatus(Enum): @@ -245,3 +246,32 @@ class Service(ABC): def post_restore(self): pass + + +class StoppedService: + """ + A context manager that stops the service if needed and reactivates it + after you are done if it was active + + Example: + ``` + assert service.get_status() == ServiceStatus.ACTIVE + with StoppedService(service) [as stopped_service]: + assert service.get_status() == ServiceStatus.INACTIVE + ``` + """ + def __init__(self, service: Service): + self.service = service + self.original_status = service.get_status() + + def __enter__(self) -> Service: + self.original_status = self.service.get_status() + if self.original_status != ServiceStatus.INACTIVE: + self.service.stop() + wait_until_true(lambda: self.service.get_status() == ServiceStatus.INACTIVE) + return self.service + + def __exit__(self, type, value, traceback): + if self.original_status in [ServiceStatus.ACTIVATING, ServiceStatus.ACTIVE]: + self.service.start() + wait_until_true(lambda: self.service.get_status() == ServiceStatus.ACTIVE) diff --git a/tests/test_services.py b/tests/test_services.py index 12889c3..b83a7f2 100644 --- a/tests/test_services.py +++ b/tests/test_services.py @@ -9,7 +9,7 @@ from selfprivacy_api.services.owned_path import OwnedPath from selfprivacy_api.services.generic_service_mover import FolderMoveNames from selfprivacy_api.services.test_service import DummyService -from selfprivacy_api.services.service import Service, ServiceStatus +from selfprivacy_api.services.service import Service, ServiceStatus, StoppedService from selfprivacy_api.utils.waitloop import wait_until_true from tests.test_graphql.test_backup import raw_dummy_service @@ -28,6 +28,19 @@ def test_unimplemented_folders_raises(): assert owned_folders is not None +def test_service_stopper(raw_dummy_service): + dummy: Service = raw_dummy_service + dummy.set_delay(0.3) + + assert dummy.get_status() == ServiceStatus.ACTIVE + + with StoppedService(dummy) as stopped_dummy: + assert stopped_dummy.get_status() == ServiceStatus.INACTIVE + assert dummy.get_status() == ServiceStatus.INACTIVE + + assert dummy.get_status() == ServiceStatus.ACTIVE + + def test_delayed_start_stop(raw_dummy_service): dummy = raw_dummy_service dummy.set_delay(0.3) From a7427f3cb591b94fef7a736f9610550a73f96a59 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 12 Jul 2023 16:00:36 +0000 Subject: [PATCH 453/537] test(backups): do not store the status file in backupped folders --- selfprivacy_api/services/test_service/__init__.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/selfprivacy_api/services/test_service/__init__.py b/selfprivacy_api/services/test_service/__init__.py index 07b460b..a0fb02a 100644 --- a/selfprivacy_api/services/test_service/__init__.py +++ b/selfprivacy_api/services/test_service/__init__.py @@ -30,8 +30,7 @@ class DummyService(Service): def __init__(self): super().__init__() - dir = self.folders[0] - status_file = path.join(dir, "service_status") + status_file = self.status_file() with open(status_file, "w") as file: file.write(ServiceStatus.ACTIVE.value) @@ -81,7 +80,8 @@ class DummyService(Service): @classmethod def status_file(cls) -> str: dir = cls.folders[0] - return path.join(dir, "service_status") + # we do not REALLY want to store our state in our declared folders + return path.join(dir, "..", "service_status") @classmethod def set_status(cls, status: ServiceStatus): @@ -99,8 +99,7 @@ class DummyService(Service): cls, new_status: ServiceStatus, delay_sec: float ): """simulating a delay on systemd side""" - dir = cls.folders[0] - status_file = path.join(dir, "service_status") + status_file = cls.status_file() command = [ "bash", From 40ad1b5ce44e74b5a2f7e7d1dd736dcba3f923a4 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 12 Jul 2023 16:43:26 +0000 Subject: [PATCH 454/537] feature(backups): stop services before backups --- selfprivacy_api/backup/__init__.py | 38 +++++++++++++++++++++++------- tests/test_graphql/test_backup.py | 12 +++++++++- 2 files changed, 41 insertions(+), 9 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index ddfd6be..3bbd721 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -6,7 +6,7 @@ from typing import List, Optional from selfprivacy_api.utils import ReadUserData, WriteUserData from selfprivacy_api.services import get_service_by_id -from selfprivacy_api.services.service import Service +from selfprivacy_api.services.service import Service, ServiceStatus, StoppedService from selfprivacy_api.jobs import Jobs, JobStatus, Job @@ -35,6 +35,18 @@ DEFAULT_JSON_PROVIDER = { } +class NotDeadError(AssertionError): + def __init__(self, service: Service): + self.service_name = service.get_id() + + def __str__(self): + return f""" + Service {self.service_name} should be either stopped or dead from an error before we back up. + Normally, this error is unreachable because we do try ensure this. + Apparently, not this time. + """ + + class Backups: """A stateless controller class for backups""" @@ -193,13 +205,15 @@ class Backups: Jobs.update(job, status=JobStatus.RUNNING) try: - service.pre_backup() - snapshot = Backups.provider().backupper.start_backup( - folders, - tag, - ) - Backups._store_last_snapshot(tag, snapshot) - service.post_restore() + with StoppedService(service): + Backups.assert_dead(service) # to be extra sure + service.pre_backup() + snapshot = Backups.provider().backupper.start_backup( + folders, + tag, + ) + Backups._store_last_snapshot(tag, snapshot) + service.post_restore() except Exception as e: Jobs.update(job, status=JobStatus.ERROR) raise e @@ -465,3 +479,11 @@ class Backups: repo_id="", ) Storage.store_provider(provider) + + @staticmethod + def assert_dead(service: Service): + # if we backup the service that is failing to restore it to the + # previous snapshot, its status can be FAILED + # And obviously restoring a failed service is the moun route + if service.get_status() not in [ServiceStatus.INACTIVE, ServiceStatus.FAILED]: + raise NotDeadError(service) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 573480c..319fb53 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -303,7 +303,17 @@ def test_snapshots_by_id(backups, dummy_service): assert Backups.get_snapshot_by_id(snap2.id).id == snap2.id -def test_backup_service_task(backups, dummy_service): +@pytest.fixture(params=["instant_server_stop", "delayed_server_stop"]) +def simulated_service_stopping_delay(request) -> float: + if request.param == "instant_server_stop": + return 0.0 + else: + return 0.3 + + +def test_backup_service_task(backups, dummy_service, simulated_service_stopping_delay): + dummy_service.set_delay(simulated_service_stopping_delay) + handle = start_backup(dummy_service) handle(blocking=True) From b001e198bf2492f4ed3f94b9ab55845cc59160d2 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 12 Jul 2023 16:53:49 +0000 Subject: [PATCH 455/537] feature(backups): stop services before restores --- selfprivacy_api/backup/__init__.py | 19 ++++++++++--------- tests/test_graphql/test_backup.py | 6 +++++- 2 files changed, 15 insertions(+), 10 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 3bbd721..0f93667 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -258,16 +258,17 @@ class Backups: try: Backups._assert_restorable(snapshot) + with StoppedService(service): + Backups.assert_dead(service) + if strategy == RestoreStrategy.INPLACE: + Backups._inplace_restore(service, snapshot, job) + else: # verify_before_download is our default + Jobs.update(job, status=JobStatus.RUNNING) + Backups._restore_service_from_snapshot( + service, snapshot.id, verify=True + ) - if strategy == RestoreStrategy.INPLACE: - Backups._inplace_restore(service, snapshot, job) - else: # verify_before_download is our default - Jobs.update(job, status=JobStatus.RUNNING) - Backups._restore_service_from_snapshot( - service, snapshot.id, verify=True - ) - - service.post_restore() + service.post_restore() except Exception as e: Jobs.update(job, status=JobStatus.ERROR) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 319fb53..3709440 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -379,7 +379,11 @@ def restore_strategy(request) -> RestoreStrategy: return RestoreStrategy.INPLACE -def test_restore_snapshot_task(backups, dummy_service, restore_strategy): +def test_restore_snapshot_task( + backups, dummy_service, restore_strategy, simulated_service_stopping_delay +): + dummy_service.set_delay(simulated_service_stopping_delay) + Backups.back_up(dummy_service) snaps = Backups.get_snapshots(dummy_service) assert len(snaps) == 1 From c53f35c94799a38e10a6a3ef0373314babfc1945 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 12 Jul 2023 17:02:54 +0000 Subject: [PATCH 456/537] feature(servers): set default timeout of server operations to 10 min --- selfprivacy_api/services/service.py | 29 +++++++++++++++++++---------- 1 file changed, 19 insertions(+), 10 deletions(-) diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index e2c7c01..c1cc5be 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -12,6 +12,8 @@ from selfprivacy_api.services.generic_size_counter import get_storage_usage from selfprivacy_api.services.owned_path import OwnedPath from selfprivacy_api.utils.waitloop import wait_until_true +DEFAULT_START_STOP_TIMEOUT = 10 * 60 + class ServiceStatus(Enum): """Enum for service status""" @@ -250,16 +252,17 @@ class Service(ABC): class StoppedService: """ - A context manager that stops the service if needed and reactivates it - after you are done if it was active + A context manager that stops the service if needed and reactivates it + after you are done if it was active - Example: - ``` - assert service.get_status() == ServiceStatus.ACTIVE - with StoppedService(service) [as stopped_service]: - assert service.get_status() == ServiceStatus.INACTIVE - ``` + Example: + ``` + assert service.get_status() == ServiceStatus.ACTIVE + with StoppedService(service) [as stopped_service]: + assert service.get_status() == ServiceStatus.INACTIVE + ``` """ + def __init__(self, service: Service): self.service = service self.original_status = service.get_status() @@ -268,10 +271,16 @@ class StoppedService: self.original_status = self.service.get_status() if self.original_status != ServiceStatus.INACTIVE: self.service.stop() - wait_until_true(lambda: self.service.get_status() == ServiceStatus.INACTIVE) + wait_until_true( + lambda: self.service.get_status() == ServiceStatus.INACTIVE, + timeout_sec=DEFAULT_START_STOP_TIMEOUT, + ) return self.service def __exit__(self, type, value, traceback): if self.original_status in [ServiceStatus.ACTIVATING, ServiceStatus.ACTIVE]: self.service.start() - wait_until_true(lambda: self.service.get_status() == ServiceStatus.ACTIVE) + wait_until_true( + lambda: self.service.get_status() == ServiceStatus.ACTIVE, + timeout_sec=DEFAULT_START_STOP_TIMEOUT, + ) From 097cf50b37eed1c388084df6baed8688a8f56bc0 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 14 Jul 2023 10:58:31 +0000 Subject: [PATCH 457/537] fix(servers): hopefully fix moving --- selfprivacy_api/services/generic_service_mover.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/selfprivacy_api/services/generic_service_mover.py b/selfprivacy_api/services/generic_service_mover.py index e2b26f4..d858b93 100644 --- a/selfprivacy_api/services/generic_service_mover.py +++ b/selfprivacy_api/services/generic_service_mover.py @@ -35,13 +35,11 @@ class FolderMoveNames(BaseModel): return path.split("/")[-1] @staticmethod - def default_foldermoves(service: Service): - return ( - [ - FolderMoveNames.from_owned_path(folder) - for folder in service.get_owned_folders() - ], - ) + def default_foldermoves(service: Service) -> list[FolderMoveNames]: + return [ + FolderMoveNames.from_owned_path(folder) + for folder in service.get_owned_folders() + ] @huey.task() From f711275a5e3957f5b19e1c74d4cea93205c69ff2 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 14 Jul 2023 11:41:03 +0000 Subject: [PATCH 458/537] test(backup): test moving preventing backups --- tests/test_graphql/test_backup.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 3709440..02d3487 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -24,6 +24,7 @@ from selfprivacy_api.backup.providers import AbstractBackupProvider from selfprivacy_api.backup.providers.backblaze import Backblaze from selfprivacy_api.backup.util import sync from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackupper +from selfprivacy_api.backup.jobs import add_backup_job, add_restore_job from selfprivacy_api.backup.tasks import start_backup, restore_snapshot @@ -628,3 +629,19 @@ def test_mount_umount(backups, dummy_service, tmpdir): backupper.unmount_repo(mountpoint) # handle.terminate() assert len(listdir(mountpoint)) == 0 + + +def test_move_blocks_backups(backups, dummy_service, restore_strategy): + snap = Backups.back_up(dummy_service) + job = Jobs.add( + type_id=f"services.{dummy_service.get_id()}.move", + name="Move Dummy", + description=f"Moving Dummy data to the Rainbow Land", + status=JobStatus.RUNNING, + ) + + with pytest.raises(ValueError): + Backups.back_up(dummy_service) + + with pytest.raises(ValueError): + Backups.restore_snapshot(snap, restore_strategy) From 45011450c5b96e3200648121b1b832fe3350f929 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 14 Jul 2023 12:34:45 +0000 Subject: [PATCH 459/537] feature(backup):calculate needed space for inplace restoration --- selfprivacy_api/backup/__init__.py | 21 ++++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 0f93667..56150db 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -1,6 +1,6 @@ from datetime import datetime, timedelta from operator import add -from os import statvfs +from os import statvfs, path, walk from typing import List, Optional from selfprivacy_api.utils import ReadUserData, WriteUserData @@ -277,14 +277,28 @@ class Backups: Jobs.update(job, status=JobStatus.FINISHED) @staticmethod - def _assert_restorable(snapshot: Snapshot): + def _assert_restorable( + snapshot: Snapshot, strategy=RestoreStrategy.DOWNLOAD_VERIFY_OVERWRITE + ): service = get_service_by_id(snapshot.service_name) if service is None: raise ValueError( f"snapshot has a nonexistent service: {snapshot.service_name}" ) - needed_space = Backups.snapshot_restored_size(snapshot.id) + restored_snap_size = Backups.snapshot_restored_size(snapshot.id) + + if strategy == RestoreStrategy.DOWNLOAD_VERIFY_OVERWRITE: + needed_space = restored_snap_size + elif strategy == RestoreStrategy.INPLACE: + needed_space = restored_snap_size - service.get_storage_usage() + else: + raise NotImplementedError( + """ + We do not know if there is enough space for restoration because there is some novel restore strategy used! + This is a developer's fault, open a issue please + """ + ) available_space = Backups.space_usable_for_service(service) if needed_space > available_space: raise ValueError( @@ -466,6 +480,7 @@ class Backups: if folders == []: raise ValueError("unallocated service", service.get_id()) + # We assume all folders of one service live at the same volume fs_info = statvfs(folders[0]) usable_bytes = fs_info.f_frsize * fs_info.f_bavail return usable_bytes From 8805f738123e77c522ece3b5266dd005d0c99f21 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Mon, 17 Jul 2023 17:39:34 +0300 Subject: [PATCH 460/537] CI: Disable redis errors on write failures --- .drone.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.drone.yml b/.drone.yml index a1bd384..5459ff3 100644 --- a/.drone.yml +++ b/.drone.yml @@ -7,6 +7,8 @@ steps: commands: - kill $(ps aux | grep 'redis-server 127.0.0.1:6389' | awk '{print $2}') || true - redis-server --bind 127.0.0.1 --port 6389 >/dev/null & + # We do not care about persistance on CI + - redis-cli config set stop-writes-on-bgsave-error no - coverage run -m pytest -q - coverage xml - sonar-scanner -Dsonar.projectKey=SelfPrivacy-REST-API -Dsonar.sources=. -Dsonar.host.url=http://analyzer.lan:9000 -Dsonar.login="$SONARQUBE_TOKEN" From 7fe802eb1d9ae0068aebc46dcd41c87ac4563253 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Mon, 17 Jul 2023 19:45:00 +0300 Subject: [PATCH 461/537] ci: Fix redis port of redis-cli command --- .drone.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.drone.yml b/.drone.yml index 5459ff3..9e5ef64 100644 --- a/.drone.yml +++ b/.drone.yml @@ -8,7 +8,7 @@ steps: - kill $(ps aux | grep 'redis-server 127.0.0.1:6389' | awk '{print $2}') || true - redis-server --bind 127.0.0.1 --port 6389 >/dev/null & # We do not care about persistance on CI - - redis-cli config set stop-writes-on-bgsave-error no + - redis-cli -h 127.0.0.1 -p 6389 config set stop-writes-on-bgsave-error no - coverage run -m pytest -q - coverage xml - sonar-scanner -Dsonar.projectKey=SelfPrivacy-REST-API -Dsonar.sources=. -Dsonar.host.url=http://analyzer.lan:9000 -Dsonar.login="$SONARQUBE_TOKEN" From 20f3e5c5645f871d4a34cc63bb6d6368b33c0bbc Mon Sep 17 00:00:00 2001 From: Inex Code Date: Mon, 17 Jul 2023 19:55:16 +0300 Subject: [PATCH 462/537] ci: Give redis some time to start? --- .drone.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.drone.yml b/.drone.yml index 9e5ef64..fff99ae 100644 --- a/.drone.yml +++ b/.drone.yml @@ -8,6 +8,7 @@ steps: - kill $(ps aux | grep 'redis-server 127.0.0.1:6389' | awk '{print $2}') || true - redis-server --bind 127.0.0.1 --port 6389 >/dev/null & # We do not care about persistance on CI + - sleep 10 - redis-cli -h 127.0.0.1 -p 6389 config set stop-writes-on-bgsave-error no - coverage run -m pytest -q - coverage xml From 8b504993d06a862355f7e0231d712f2f32feeb44 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Tue, 18 Jul 2023 20:15:22 +0300 Subject: [PATCH 463/537] fix(backups): backup strategies were unused --- selfprivacy_api/backup/__init__.py | 57 ++++++++++++------- selfprivacy_api/backup/backuppers/__init__.py | 7 ++- .../backup/backuppers/restic_backupper.py | 12 +++- 3 files changed, 54 insertions(+), 22 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 56150db..3db2547 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -1,12 +1,15 @@ from datetime import datetime, timedelta -from operator import add -from os import statvfs, path, walk +from os import statvfs from typing import List, Optional from selfprivacy_api.utils import ReadUserData, WriteUserData from selfprivacy_api.services import get_service_by_id -from selfprivacy_api.services.service import Service, ServiceStatus, StoppedService +from selfprivacy_api.services.service import ( + Service, + ServiceStatus, + StoppedService, +) from selfprivacy_api.jobs import Jobs, JobStatus, Job @@ -41,16 +44,17 @@ class NotDeadError(AssertionError): def __str__(self): return f""" - Service {self.service_name} should be either stopped or dead from an error before we back up. - Normally, this error is unreachable because we do try ensure this. - Apparently, not this time. - """ + Service {self.service_name} should be either stopped or dead from + an error before we back up. + Normally, this error is unreachable because we do try ensure this. + Apparently, not this time. + """ class Backups: """A stateless controller class for backups""" - ### Providers + # Providers @staticmethod def provider(): @@ -172,7 +176,7 @@ class Backups: user_data["backup"] = DEFAULT_JSON_PROVIDER - ### Init + # Init @staticmethod def init_repo(): @@ -191,7 +195,7 @@ class Backups: return False - ### Backup + # Backup @staticmethod def back_up(service: Service): @@ -221,7 +225,8 @@ class Backups: Jobs.update(job, status=JobStatus.FINISHED) return snapshot - ### Restoring + # Restoring + @staticmethod def _ensure_queued_restore_job(service, snapshot) -> Job: job = get_restore_job(service) @@ -237,12 +242,17 @@ class Backups: Jobs.update(job, status=JobStatus.RUNNING) try: - Backups._restore_service_from_snapshot(service, snapshot.id, verify=False) + Backups._restore_service_from_snapshot( + service, + snapshot.id, + verify=False, + ) except Exception as e: Backups._restore_service_from_snapshot( service, failsafe_snapshot.id, verify=False ) raise e + # TODO: Do we really have to forget this snapshot? — Inex Backups.forget_snapshot(failsafe_snapshot) @staticmethod @@ -295,8 +305,9 @@ class Backups: else: raise NotImplementedError( """ - We do not know if there is enough space for restoration because there is some novel restore strategy used! - This is a developer's fault, open a issue please + We do not know if there is enough space for restoration because + there is some novel restore strategy used! + This is a developer's fault, open an issue please """ ) available_space = Backups.space_usable_for_service(service) @@ -307,15 +318,20 @@ class Backups: ) @staticmethod - def _restore_service_from_snapshot(service: Service, snapshot_id: str, verify=True): + def _restore_service_from_snapshot( + service: Service, + snapshot_id: str, + verify=True, + ): folders = service.get_folders() Backups.provider().backupper.restore_from_backup( snapshot_id, folders, + verify=verify, ) - ### Snapshots + # Snapshots @staticmethod def get_snapshots(service: Service) -> List[Snapshot]: @@ -377,7 +393,7 @@ class Backups: # expiring cache entry Storage.cache_snapshot(snapshot) - ### Autobackup + # Autobackup @staticmethod def is_autobackup_enabled(service: Service) -> bool: @@ -472,7 +488,7 @@ class Backups: ) ] - ### Helpers + # Helpers @staticmethod def space_usable_for_service(service: Service) -> int: @@ -501,5 +517,8 @@ class Backups: # if we backup the service that is failing to restore it to the # previous snapshot, its status can be FAILED # And obviously restoring a failed service is the moun route - if service.get_status() not in [ServiceStatus.INACTIVE, ServiceStatus.FAILED]: + if service.get_status() not in [ + ServiceStatus.INACTIVE, + ServiceStatus.FAILED, + ]: raise NotDeadError(service) diff --git a/selfprivacy_api/backup/backuppers/__init__.py b/selfprivacy_api/backup/backuppers/__init__.py index 24eb108..05adede 100644 --- a/selfprivacy_api/backup/backuppers/__init__.py +++ b/selfprivacy_api/backup/backuppers/__init__.py @@ -30,7 +30,12 @@ class AbstractBackupper(ABC): raise NotImplementedError @abstractmethod - def restore_from_backup(self, snapshot_id: str, folders: List[str], verify=True): + def restore_from_backup( + self, + snapshot_id: str, + folders: List[str], + verify=True, + ): """Restore a target folder using a snapshot""" raise NotImplementedError diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index 565a084..e04eaaf 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -81,7 +81,7 @@ class ResticBackupper(AbstractBackupper): mount_command.insert(0, "nohup") handle = subprocess.Popen(mount_command, stdout=subprocess.DEVNULL, shell=False) sleep(2) - if not "ids" in listdir(dir): + if "ids" not in listdir(dir): raise IOError("failed to mount dir ", dir) return handle @@ -211,7 +211,12 @@ class ResticBackupper(AbstractBackupper): except ValueError as e: raise ValueError("cannot restore a snapshot: " + output) from e - def restore_from_backup(self, snapshot_id, folders: List[str], verify=True): + def restore_from_backup( + self, + snapshot_id, + folders: List[str], + verify=True, + ): """ Restore from backup with restic """ @@ -236,6 +241,9 @@ class ResticBackupper(AbstractBackupper): dst = folder sync(src, dst) + if not verify: + self.unmount_repo(dir) + def do_restore(self, snapshot_id, target="/", verify=False): """barebones restic restore""" restore_command = self.restic_command( From fa53264136b918f196cfa917227fe1476d472fe7 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 19 Jul 2023 10:51:36 +0000 Subject: [PATCH 464/537] refactor(backup):remove unused verify arg from do_restore --- selfprivacy_api/backup/backuppers/restic_backupper.py | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index e04eaaf..fa7b2ee 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -225,7 +225,7 @@ class ResticBackupper(AbstractBackupper): with tempfile.TemporaryDirectory() as dir: if verify: - self.do_restore(snapshot_id, target=dir, verify=verify) + self._raw_verified_restore(snapshot_id, target=dir) snapshot_root = dir else: # attempting inplace restore via mount + sync self.mount_repo(dir) @@ -244,16 +244,11 @@ class ResticBackupper(AbstractBackupper): if not verify: self.unmount_repo(dir) - def do_restore(self, snapshot_id, target="/", verify=False): + def _raw_verified_restore(self, snapshot_id, target="/"): """barebones restic restore""" restore_command = self.restic_command( - "restore", - snapshot_id, - "--target", - target, + "restore", snapshot_id, "--target", target, "--verify" ) - if verify: - restore_command.append("--verify") with subprocess.Popen( restore_command, stdout=subprocess.PIPE, shell=False From 3067d353d883e8487e8922e5ba53dd955481982b Mon Sep 17 00:00:00 2001 From: Inex Code Date: Wed, 19 Jul 2023 15:59:29 +0300 Subject: [PATCH 465/537] fix(backups): Do not remove failsafe snapshot after recovery --- selfprivacy_api/backup/__init__.py | 43 +++++++++++++++--------------- tests/test_graphql/test_backup.py | 5 +++- 2 files changed, 26 insertions(+), 22 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index bbcebb7..83ed569 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -1,5 +1,4 @@ from datetime import datetime, timedelta -from operator import add from os import statvfs from typing import List, Optional @@ -58,7 +57,7 @@ class Backups: # Providers @staticmethod - def provider(): + def provider() -> AbstractBackupProvider: return Backups._lookup_provider() @staticmethod @@ -68,8 +67,8 @@ class Backups: key: str, location: str, repo_id: str = "", - ): - provider = Backups._construct_provider( + ) -> None: + provider: AbstractBackupProvider = Backups._construct_provider( kind, login, key, @@ -79,7 +78,7 @@ class Backups: Storage.store_provider(provider) @staticmethod - def reset(reset_json=True): + def reset(reset_json=True) -> None: Storage.reset() if reset_json: try: @@ -180,7 +179,7 @@ class Backups: # Init @staticmethod - def init_repo(): + def init_repo() -> None: Backups.provider().backupper.init() Storage.mark_as_init() @@ -199,7 +198,7 @@ class Backups: # Backup @staticmethod - def back_up(service: Service): + def back_up(service: Service) -> Snapshot: """The top-level function to back up a service""" folders = service.get_folders() tag = service.get_id() @@ -238,7 +237,11 @@ class Backups: return job @staticmethod - def _inplace_restore(service: Service, snapshot: Snapshot, job: Job): + def _inplace_restore( + service: Service, + snapshot: Snapshot, + job: Job, + ) -> None: failsafe_snapshot = Backups.back_up(service) Jobs.update(job, status=JobStatus.RUNNING) @@ -253,13 +256,11 @@ class Backups: service, failsafe_snapshot.id, verify=False ) raise e - # TODO: Do we really have to forget this snapshot? — Inex - Backups.forget_snapshot(failsafe_snapshot) @staticmethod def restore_snapshot( snapshot: Snapshot, strategy=RestoreStrategy.DOWNLOAD_VERIFY_OVERWRITE - ): + ) -> None: service = get_service_by_id(snapshot.service_name) if service is None: raise ValueError( @@ -290,7 +291,7 @@ class Backups: @staticmethod def _assert_restorable( snapshot: Snapshot, strategy=RestoreStrategy.DOWNLOAD_VERIFY_OVERWRITE - ): + ) -> None: service = get_service_by_id(snapshot.service_name) if service is None: raise ValueError( @@ -323,7 +324,7 @@ class Backups: service: Service, snapshot_id: str, verify=True, - ): + ) -> None: folders = service.get_folders() Backups.provider().backupper.restore_from_backup( @@ -369,12 +370,12 @@ class Backups: return snap @staticmethod - def forget_snapshot(snapshot: Snapshot): + def forget_snapshot(snapshot: Snapshot) -> None: Backups.provider().backupper.forget_snapshot(snapshot.id) Storage.delete_cached_snapshot(snapshot) @staticmethod - def force_snapshot_cache_reload(): + def force_snapshot_cache_reload() -> None: upstream_snapshots = Backups.provider().backupper.get_snapshots() Storage.invalidate_snapshot_storage() for snapshot in upstream_snapshots: @@ -387,7 +388,7 @@ class Backups: ) @staticmethod - def _store_last_snapshot(service_id: str, snapshot: Snapshot): + def _store_last_snapshot(service_id: str, snapshot: Snapshot) -> None: """What do we do with a snapshot that is just made?""" # non-expiring timestamp of the last Storage.store_last_timestamp(service_id, snapshot) @@ -401,16 +402,16 @@ class Backups: return Storage.is_autobackup_set(service.get_id()) @staticmethod - def enable_autobackup(service: Service): + def enable_autobackup(service: Service) -> None: Storage.set_autobackup(service) @staticmethod - def disable_autobackup(service: Service): + def disable_autobackup(service: Service) -> None: """also see disable_all_autobackup()""" Storage.unset_autobackup(service) @staticmethod - def disable_all_autobackup(): + def disable_all_autobackup() -> None: """ Disables all automatic backing up, but does not change per-service settings @@ -423,7 +424,7 @@ class Backups: return Storage.autobackup_period_minutes() @staticmethod - def set_autobackup_period_minutes(minutes: int): + def set_autobackup_period_minutes(minutes: int) -> None: """ 0 and negative numbers are equivalent to disable. Setting to a positive number may result in a backup very soon @@ -445,7 +446,7 @@ class Backups: @staticmethod def services_to_back_up(time: datetime) -> List[Service]: - result = [] + result: list[Service] = [] for id in Backups._service_ids_to_back_up(time): service = get_service_by_id(id) if service is None: diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 02d3487..9f13e52 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -408,7 +408,10 @@ def test_restore_snapshot_task( assert file.read() == content snaps = Backups.get_snapshots(dummy_service) - assert len(snaps) == 1 + if restore_strategy == RestoreStrategy.INPLACE: + assert len(snaps) == 2 + else: + assert len(snaps) == 1 def test_autobackup_enable_service(backups, dummy_service): From 5253780cc8aa4aef778fe4deaa8bdefce4fe7470 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Wed, 19 Jul 2023 15:59:51 +0300 Subject: [PATCH 466/537] style: linting of backups module --- selfprivacy_api/backup/backuppers/__init__.py | 12 +-- .../backup/backuppers/restic_backupper.py | 92 +++++++++++++------ selfprivacy_api/backup/storage.py | 8 +- 3 files changed, 79 insertions(+), 33 deletions(-) diff --git a/selfprivacy_api/backup/backuppers/__init__.py b/selfprivacy_api/backup/backuppers/__init__.py index 05adede..7051a6a 100644 --- a/selfprivacy_api/backup/backuppers/__init__.py +++ b/selfprivacy_api/backup/backuppers/__init__.py @@ -5,7 +5,7 @@ from selfprivacy_api.models.backup.snapshot import Snapshot class AbstractBackupper(ABC): - def __init__(self): + def __init__(self) -> None: pass @abstractmethod @@ -13,11 +13,11 @@ class AbstractBackupper(ABC): raise NotImplementedError @abstractmethod - def set_creds(self, account: str, key: str, repo: str): + def set_creds(self, account: str, key: str, repo: str) -> None: raise NotImplementedError @abstractmethod - def start_backup(self, folders: List[str], repo_name: str): + def start_backup(self, folders: List[str], repo_name: str) -> Snapshot: raise NotImplementedError @abstractmethod @@ -26,7 +26,7 @@ class AbstractBackupper(ABC): raise NotImplementedError @abstractmethod - def init(self): + def init(self) -> None: raise NotImplementedError @abstractmethod @@ -35,7 +35,7 @@ class AbstractBackupper(ABC): snapshot_id: str, folders: List[str], verify=True, - ): + ) -> None: """Restore a target folder using a snapshot""" raise NotImplementedError @@ -44,5 +44,5 @@ class AbstractBackupper(ABC): raise NotImplementedError @abstractmethod - def forget_snapshot(self, snapshot_id): + def forget_snapshot(self, snapshot_id) -> None: raise NotImplementedError diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index fa7b2ee..60e8285 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -21,7 +21,7 @@ from selfprivacy_api.backup.local_secret import LocalBackupSecret class ResticBackupper(AbstractBackupper): - def __init__(self, login_flag: str, key_flag: str, type: str): + def __init__(self, login_flag: str, key_flag: str, type: str) -> None: self.login_flag = login_flag self.key_flag = key_flag self.type = type @@ -29,7 +29,7 @@ class ResticBackupper(AbstractBackupper): self.key = "" self.repo = "" - def set_creds(self, account: str, key: str, repo: str): + def set_creds(self, account: str, key: str, repo: str) -> None: self.account = account self.key = key self.repo = repo @@ -79,7 +79,11 @@ class ResticBackupper(AbstractBackupper): def mount_repo(self, dir): mount_command = self.restic_command("mount", dir) mount_command.insert(0, "nohup") - handle = subprocess.Popen(mount_command, stdout=subprocess.DEVNULL, shell=False) + handle = subprocess.Popen( + mount_command, + stdout=subprocess.DEVNULL, + shell=False, + ) sleep(2) if "ids" not in listdir(dir): raise IOError("failed to mount dir ", dir) @@ -109,12 +113,13 @@ class ResticBackupper(AbstractBackupper): result.append(item) return result - def start_backup(self, folders: List[str], tag: str): + def start_backup(self, folders: List[str], tag: str) -> Snapshot: """ Start backup with restic """ - # but maybe it is ok to accept a union of a string and an array of strings + # but maybe it is ok to accept a union + # of a string and an array of strings assert not isinstance(folders, str) backup_command = self.restic_command( @@ -125,20 +130,34 @@ class ResticBackupper(AbstractBackupper): ) messages = [] - job = get_backup_job(get_service_by_id(tag)) + + service = get_service_by_id(tag) + if service is None: + raise ValueError("No service with id ", tag) + + job = get_backup_job(service) try: for raw_message in output_yielder(backup_command): - message = self.parse_message(raw_message, job) + message = self.parse_message( + raw_message, + job, + ) messages.append(message) - return ResticBackupper._snapshot_from_backup_messages(messages, tag) + return ResticBackupper._snapshot_from_backup_messages( + messages, + tag, + ) except ValueError as e: - raise ValueError("could not create a snapshot: ", messages) from e + raise ValueError("Could not create a snapshot: ", messages) from e @staticmethod def _snapshot_from_backup_messages(messages, repo_name) -> Snapshot: for message in messages: if message["message_type"] == "summary": - return ResticBackupper._snapshot_from_fresh_summary(message, repo_name) + return ResticBackupper._snapshot_from_fresh_summary( + message, + repo_name, + ) raise ValueError("no summary message in restic json output") def parse_message(self, raw_message_line: str, job=None) -> dict: @@ -162,7 +181,7 @@ class ResticBackupper(AbstractBackupper): service_name=repo_name, ) - def init(self): + def init(self) -> None: init_command = self.restic_command( "init", ) @@ -173,7 +192,7 @@ class ResticBackupper(AbstractBackupper): stderr=subprocess.STDOUT, ) as process_handle: output = process_handle.communicate()[0].decode("utf-8") - if not "created restic repository" in output: + if "created restic repository" not in output: raise ValueError("cannot init a repo: " + output) def is_initted(self) -> bool: @@ -182,7 +201,11 @@ class ResticBackupper(AbstractBackupper): "--json", ) - with subprocess.Popen(command, stdout=subprocess.PIPE, shell=False) as handle: + with subprocess.Popen( + command, + stdout=subprocess.PIPE, + shell=False, + ) as handle: output = handle.communicate()[0].decode("utf-8") if not ResticBackupper.has_json(output): return False @@ -216,7 +239,7 @@ class ResticBackupper(AbstractBackupper): snapshot_id, folders: List[str], verify=True, - ): + ) -> None: """ Restore from backup with restic """ @@ -235,9 +258,7 @@ class ResticBackupper(AbstractBackupper): for folder in folders: src = join(snapshot_root, folder.strip("/")) if not exists(src): - raise ValueError( - f"there is no such path: {src}. We tried to find {folder}" - ) + raise ValueError(f"No such path: {src}. We tried to find {folder}") dst = folder sync(src, dst) @@ -254,7 +275,8 @@ class ResticBackupper(AbstractBackupper): restore_command, stdout=subprocess.PIPE, shell=False ) as handle: - # for some reason restore does not support nice reporting of progress via json + # for some reason restore does not support + # nice reporting of progress via json output = handle.communicate()[0].decode("utf-8") if "restoring" not in output: raise ValueError("cannot restore a snapshot: " + output) @@ -264,21 +286,36 @@ class ResticBackupper(AbstractBackupper): ) # none should be impossible after communicate if handle.returncode != 0: raise ValueError( - "restore exited with errorcode", returncode, ":", output + "restore exited with errorcode", + handle.returncode, + ":", + output, ) - def forget_snapshot(self, snapshot_id): - """either removes snapshot or marks it for deletion later depending on server settings""" + def forget_snapshot(self, snapshot_id) -> None: + """ + Either removes snapshot or marks it for deletion later, + depending on server settings + """ forget_command = self.restic_command( "forget", snapshot_id, ) with subprocess.Popen( - forget_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False + forget_command, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + shell=False, ) as handle: - # for some reason restore does not support nice reporting of progress via json - output, err = [string.decode("utf-8") for string in handle.communicate()] + # for some reason restore does not support + # nice reporting of progress via json + output, err = [ + string.decode( + "utf-8", + ) + for string in handle.communicate() + ] if "no matching ID found" in err: raise ValueError( @@ -290,7 +327,10 @@ class ResticBackupper(AbstractBackupper): ) # none should be impossible after communicate if handle.returncode != 0: raise ValueError( - "forget exited with errorcode", returncode, ":", output + "forget exited with errorcode", + handle.returncode, + ":", + output, ) def _load_snapshots(self) -> object: @@ -336,7 +376,7 @@ class ResticBackupper(AbstractBackupper): starting_index = ResticBackupper.json_start(output) if starting_index == -1: - raise ValueError("There is no json in the restic output : " + output) + raise ValueError("There is no json in the restic output: " + output) truncated_output = output[starting_index:] json_messages = truncated_output.splitlines() diff --git a/selfprivacy_api/backup/storage.py b/selfprivacy_api/backup/storage.py index 29a5462..99a4950 100644 --- a/selfprivacy_api/backup/storage.py +++ b/selfprivacy_api/backup/storage.py @@ -74,11 +74,17 @@ class Storage: return None snapshot = hash_as_model(redis, key, Snapshot) + if not snapshot: + return None return snapshot.created_at @staticmethod def store_last_timestamp(service_id: str, snapshot: Snapshot): - store_model_as_hash(redis, Storage.__last_backup_key(service_id), snapshot) + store_model_as_hash( + redis, + Storage.__last_backup_key(service_id), + snapshot, + ) @staticmethod def cache_snapshot(snapshot: Snapshot): From a757dc7cc18a0d2bab18511ec1dd5491e7d27fce Mon Sep 17 00:00:00 2001 From: Inex Code Date: Wed, 19 Jul 2023 16:00:04 +0300 Subject: [PATCH 467/537] chore: add rclone as a nix shell dependency --- shell.nix | 1 + 1 file changed, 1 insertion(+) diff --git a/shell.nix b/shell.nix index f1ce9b2..c227f68 100644 --- a/shell.nix +++ b/shell.nix @@ -31,6 +31,7 @@ pkgs.mkShell { pkgs.black pkgs.redis pkgs.restic + pkgs.rclone ]; shellHook = '' PYTHONPATH=${sp-python}/${sp-python.sitePackages} From 5bee12450047617b1fab815cab1893bc618795b0 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Wed, 19 Jul 2023 16:49:35 +0300 Subject: [PATCH 468/537] style: set 88 as a line length limit --- .flake8 | 4 ++++ .pylintrc | 3 +++ shell.nix | 2 ++ 3 files changed, 9 insertions(+) create mode 100644 .flake8 diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000..e4e4892 --- /dev/null +++ b/.flake8 @@ -0,0 +1,4 @@ +[flake8] +max-line-length = 80 +select = C,E,F,W,B,B950 +extend-ignore = E203, E501 diff --git a/.pylintrc b/.pylintrc index 9135ea9..5a02c70 100644 --- a/.pylintrc +++ b/.pylintrc @@ -1,3 +1,6 @@ [MASTER] init-hook="from pylint.config import find_pylintrc; import os, sys; sys.path.append(os.path.dirname(find_pylintrc()))" extension-pkg-whitelist=pydantic + +[FORMAT] +max-line-length=88 diff --git a/shell.nix b/shell.nix index c227f68..bce16bd 100644 --- a/shell.nix +++ b/shell.nix @@ -23,6 +23,8 @@ let uvicorn redis strawberry-graphql + flake8-bugbear + flake8 ]); in pkgs.mkShell { From 466160dbf9b261fea6c9783b8aa691a5a2e1c021 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 19 Jul 2023 15:09:49 +0000 Subject: [PATCH 469/537] feature(backup):disable finegrain control over services to back up --- selfprivacy_api/backup/__init__.py | 13 ++-- selfprivacy_api/backup/storage.py | 6 +- .../services/test_service/__init__.py | 12 ++++ tests/test_graphql/test_backup.py | 67 ++++++++++--------- 4 files changed, 57 insertions(+), 41 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 83ed569..25522a5 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -5,6 +5,7 @@ from typing import List, Optional from selfprivacy_api.utils import ReadUserData, WriteUserData from selfprivacy_api.services import get_service_by_id +from selfprivacy_api.services import get_all_services from selfprivacy_api.services.service import ( Service, ServiceStatus, @@ -480,15 +481,9 @@ class Backups: @staticmethod def _service_ids_to_back_up(time: datetime) -> List[str]: - services = Storage.services_with_autobackup() - return [ - id - for id in services - if Backups.is_time_to_backup_service( - id, - time, - ) - ] + # TODO: simplify in light that we do not use redis for this anymore + service_ids = [service.get_id() for service in get_all_services()] + return [id for id in service_ids if Backups.is_time_to_backup_service(id, time)] # Helpers diff --git a/selfprivacy_api/backup/storage.py b/selfprivacy_api/backup/storage.py index 99a4950..f20bd4f 100644 --- a/selfprivacy_api/backup/storage.py +++ b/selfprivacy_api/backup/storage.py @@ -12,6 +12,7 @@ from selfprivacy_api.utils.redis_model_storage import ( from selfprivacy_api.services.service import Service +from selfprivacy_api.services import get_service_by_id from selfprivacy_api.backup.providers.provider import AbstractBackupProvider from selfprivacy_api.backup.providers import get_kind @@ -130,7 +131,10 @@ class Storage: @staticmethod def is_autobackup_set(service_name: str) -> bool: - return redis.exists(Storage.__autobackup_key(service_name)) + service = get_service_by_id(service_name) + if service is None: + raise ValueError("nonexistent service: ", service_name) + return service.can_be_backed_up() @staticmethod def autobackup_period_minutes() -> Optional[int]: diff --git a/selfprivacy_api/services/test_service/__init__.py b/selfprivacy_api/services/test_service/__init__.py index a0fb02a..d062700 100644 --- a/selfprivacy_api/services/test_service/__init__.py +++ b/selfprivacy_api/services/test_service/__init__.py @@ -24,6 +24,7 @@ class DummyService(Service): folders: List[str] = [] startstop_delay = 0 + backuppable = True def __init_subclass__(cls, folders: List[str]): cls.folders = folders @@ -110,6 +111,17 @@ class DummyService(Service): if delay_sec == 0: handle.communicate() + @classmethod + def set_backuppable(cls, new_value: bool) -> None: + """For tests: because can_be_backed_up is static, + we can only set it up dynamically for tests via a classmethod""" + cls.backuppable = new_value + + @classmethod + def can_be_backed_up(cls) -> bool: + """`True` if the service can be backed up.""" + return cls.backuppable + @classmethod def enable(cls): pass diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 9f13e52..88bbd53 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -8,7 +8,7 @@ from datetime import datetime, timedelta, timezone from subprocess import Popen import selfprivacy_api.services as services -from selfprivacy_api.services import Service +from selfprivacy_api.services import Service, get_all_services from selfprivacy_api.services import get_service_by_id from selfprivacy_api.services.test_service import DummyService @@ -414,16 +414,6 @@ def test_restore_snapshot_task( assert len(snaps) == 1 -def test_autobackup_enable_service(backups, dummy_service): - assert not Backups.is_autobackup_enabled(dummy_service) - - Backups.enable_autobackup(dummy_service) - assert Backups.is_autobackup_enabled(dummy_service) - - Backups.disable_autobackup(dummy_service) - assert not Backups.is_autobackup_enabled(dummy_service) - - def test_autobackup_enable_service_storage(backups, dummy_service): assert len(Storage.services_with_autobackup()) == 0 @@ -463,11 +453,36 @@ def test_no_default_autobackup(backups, dummy_service): assert not Backups.is_time_to_backup(now) +def backuppable_services() -> list[Service]: + return [service for service in get_all_services() if service.can_be_backed_up()] + + +def test_services_to_back_up(backups, dummy_service): + backup_period = 13 # minutes + now = datetime.now(timezone.utc) + + dummy_service.set_backuppable(False) + services = Backups.services_to_back_up(now) + assert len(services) == 0 + + dummy_service.set_backuppable(True) + + services = Backups.services_to_back_up(now) + assert len(services) == 0 + + Backups.set_autobackup_period_minutes(backup_period) + + services = Backups.services_to_back_up(now) + assert len(services) == len(backuppable_services()) + assert dummy_service.get_id() in [ + service.get_id() for service in backuppable_services() + ] + + def test_autobackup_timer_periods(backups, dummy_service): now = datetime.now(timezone.utc) backup_period = 13 # minutes - Backups.enable_autobackup(dummy_service) assert not Backups.is_time_to_backup_service(dummy_service.get_id(), now) assert not Backups.is_time_to_backup(now) @@ -483,16 +498,21 @@ def test_autobackup_timer_periods(backups, dummy_service): def test_autobackup_timer_enabling(backups, dummy_service): now = datetime.now(timezone.utc) backup_period = 13 # minutes + dummy_service.set_backuppable(False) Backups.set_autobackup_period_minutes(backup_period) + assert Backups.is_time_to_backup( + now + ) # there are other services too, not just our dummy + + # not backuppable service is not backuppable even if period is set assert not Backups.is_time_to_backup_service(dummy_service.get_id(), now) - assert not Backups.is_time_to_backup(now) - Backups.enable_autobackup(dummy_service) + dummy_service.set_backuppable(True) + assert dummy_service.can_be_backed_up() assert Backups.is_time_to_backup_service(dummy_service.get_id(), now) - assert Backups.is_time_to_backup(now) - Backups.disable_autobackup(dummy_service) + Backups.disable_all_autobackup() assert not Backups.is_time_to_backup_service(dummy_service.get_id(), now) assert not Backups.is_time_to_backup(now) @@ -510,15 +530,12 @@ def test_autobackup_timing(backups, dummy_service): now = datetime.now(timezone.utc) assert not Backups.is_time_to_backup_service(dummy_service.get_id(), now) - assert not Backups.is_time_to_backup(now) past = datetime.now(timezone.utc) - timedelta(minutes=1) assert not Backups.is_time_to_backup_service(dummy_service.get_id(), past) - assert not Backups.is_time_to_backup(past) future = datetime.now(timezone.utc) + timedelta(minutes=backup_period + 2) assert Backups.is_time_to_backup_service(dummy_service.get_id(), future) - assert Backups.is_time_to_backup(future) # Storage @@ -581,18 +598,6 @@ def test_provider_storage(backups_backblaze): assert restored_provider.key == "KEY" -def test_services_to_back_up(backups, dummy_service): - backup_period = 13 # minutes - now = datetime.now(timezone.utc) - - Backups.enable_autobackup(dummy_service) - Backups.set_autobackup_period_minutes(backup_period) - - services = Backups.services_to_back_up(now) - assert len(services) == 1 - assert services[0].get_id() == dummy_service.get_id() - - def test_sync(dummy_service): src = dummy_service.get_folders()[0] dst = dummy_service.get_folders()[1] From f4ac3d29a9c3217350c319cf654b51dc50c0d66c Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 19 Jul 2023 15:35:24 +0000 Subject: [PATCH 470/537] feature(backup):remove code for finegrained autobackup control --- selfprivacy_api/backup/__init__.py | 58 ++++++++++-------------------- selfprivacy_api/backup/storage.py | 29 --------------- tests/test_graphql/test_backup.py | 34 ++++++------------ 3 files changed, 29 insertions(+), 92 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 25522a5..997dec4 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -398,27 +398,6 @@ class Backups: # Autobackup - @staticmethod - def is_autobackup_enabled(service: Service) -> bool: - return Storage.is_autobackup_set(service.get_id()) - - @staticmethod - def enable_autobackup(service: Service) -> None: - Storage.set_autobackup(service) - - @staticmethod - def disable_autobackup(service: Service) -> None: - """also see disable_all_autobackup()""" - Storage.unset_autobackup(service) - - @staticmethod - def disable_all_autobackup() -> None: - """ - Disables all automatic backing up, - but does not change per-service settings - """ - Storage.delete_backup_period() - @staticmethod def autobackup_period_minutes() -> Optional[int]: """None means autobackup is disabled""" @@ -436,6 +415,14 @@ class Backups: return Storage.store_autobackup_period_minutes(minutes) + @staticmethod + def disable_all_autobackup() -> None: + """ + Disables all automatic backing up, + but does not change per-service settings + """ + Storage.delete_backup_period() + @staticmethod def is_time_to_backup(time: datetime) -> bool: """ @@ -443,19 +430,15 @@ class Backups: of automatic backups """ - return Backups._service_ids_to_back_up(time) != [] + return Backups.services_to_back_up(time) != [] @staticmethod def services_to_back_up(time: datetime) -> List[Service]: - result: list[Service] = [] - for id in Backups._service_ids_to_back_up(time): - service = get_service_by_id(id) - if service is None: - raise ValueError( - "Cannot look up a service scheduled for backup!", - ) - result.append(service) - return result + return [ + service + for service in get_all_services() + if Backups.is_time_to_backup_service(service, time) + ] @staticmethod def get_last_backed_up(service: Service) -> Optional[datetime]: @@ -463,11 +446,12 @@ class Backups: return Storage.get_last_backup_time(service.get_id()) @staticmethod - def is_time_to_backup_service(service_id: str, time: datetime): + def is_time_to_backup_service(service: Service, time: datetime): period = Backups.autobackup_period_minutes() - if period is None: + service_id = service.get_id() + if not service.can_be_backed_up(): return False - if not Storage.is_autobackup_set(service_id): + if period is None: return False last_backup = Storage.get_last_backup_time(service_id) @@ -479,12 +463,6 @@ class Backups: return True return False - @staticmethod - def _service_ids_to_back_up(time: datetime) -> List[str]: - # TODO: simplify in light that we do not use redis for this anymore - service_ids = [service.get_id() for service in get_all_services()] - return [id for id in service_ids if Backups.is_time_to_backup_service(id, time)] - # Helpers @staticmethod diff --git a/selfprivacy_api/backup/storage.py b/selfprivacy_api/backup/storage.py index f20bd4f..87e0aa6 100644 --- a/selfprivacy_api/backup/storage.py +++ b/selfprivacy_api/backup/storage.py @@ -20,7 +20,6 @@ from selfprivacy_api.backup.providers import get_kind # a hack to store file path. REDIS_SNAPSHOT_CACHE_EXPIRE_SECONDS = 24 * 60 * 60 # one day -REDIS_AUTOBACKUP_ENABLED_PREFIX = "backup:autobackup:services:" REDIS_SNAPSHOTS_PREFIX = "backups:snapshots:" REDIS_LAST_BACKUP_PREFIX = "backups:last-backed-up:" REDIS_INITTED_CACHE_PREFIX = "backups:initted_services:" @@ -42,7 +41,6 @@ class Storage: REDIS_INITTED_CACHE_PREFIX, REDIS_SNAPSHOTS_PREFIX, REDIS_LAST_BACKUP_PREFIX, - REDIS_AUTOBACKUP_ENABLED_PREFIX, ] for prefix in prefixes_to_clean: @@ -54,12 +52,6 @@ class Storage: for key in redis.keys(REDIS_SNAPSHOTS_PREFIX + "*"): redis.delete(key) - @staticmethod - def services_with_autobackup() -> List[str]: - keys = redis.keys(REDIS_AUTOBACKUP_ENABLED_PREFIX + "*") - service_ids = [key.split(":")[-1] for key in keys] - return service_ids - @staticmethod def __last_backup_key(service_id): return REDIS_LAST_BACKUP_PREFIX + service_id @@ -115,27 +107,6 @@ class Storage: result.append(snapshot) return result - @staticmethod - def __autobackup_key(service_name: str) -> str: - return REDIS_AUTOBACKUP_ENABLED_PREFIX + service_name - - @staticmethod - def set_autobackup(service: Service): - # shortcut this - redis.set(Storage.__autobackup_key(service.get_id()), 1) - - @staticmethod - def unset_autobackup(service: Service): - """also see disable_all_autobackup()""" - redis.delete(Storage.__autobackup_key(service.get_id())) - - @staticmethod - def is_autobackup_set(service_name: str) -> bool: - service = get_service_by_id(service_name) - if service is None: - raise ValueError("nonexistent service: ", service_name) - return service.can_be_backed_up() - @staticmethod def autobackup_period_minutes() -> Optional[int]: """None means autobackup is disabled""" diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 88bbd53..2fa9531 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -414,17 +414,6 @@ def test_restore_snapshot_task( assert len(snaps) == 1 -def test_autobackup_enable_service_storage(backups, dummy_service): - assert len(Storage.services_with_autobackup()) == 0 - - Backups.enable_autobackup(dummy_service) - assert len(Storage.services_with_autobackup()) == 1 - assert Storage.services_with_autobackup()[0] == dummy_service.get_id() - - Backups.disable_autobackup(dummy_service) - assert len(Storage.services_with_autobackup()) == 0 - - def test_set_autobackup_period(backups): assert Backups.autobackup_period_minutes() is None @@ -449,7 +438,7 @@ def test_set_autobackup_period(backups): def test_no_default_autobackup(backups, dummy_service): now = datetime.now(timezone.utc) - assert not Backups.is_time_to_backup_service(dummy_service.get_id(), now) + assert not Backups.is_time_to_backup_service(dummy_service, now) assert not Backups.is_time_to_backup(now) @@ -483,15 +472,15 @@ def test_autobackup_timer_periods(backups, dummy_service): now = datetime.now(timezone.utc) backup_period = 13 # minutes - assert not Backups.is_time_to_backup_service(dummy_service.get_id(), now) + assert not Backups.is_time_to_backup_service(dummy_service, now) assert not Backups.is_time_to_backup(now) Backups.set_autobackup_period_minutes(backup_period) - assert Backups.is_time_to_backup_service(dummy_service.get_id(), now) + assert Backups.is_time_to_backup_service(dummy_service, now) assert Backups.is_time_to_backup(now) Backups.set_autobackup_period_minutes(0) - assert not Backups.is_time_to_backup_service(dummy_service.get_id(), now) + assert not Backups.is_time_to_backup_service(dummy_service, now) assert not Backups.is_time_to_backup(now) @@ -506,14 +495,14 @@ def test_autobackup_timer_enabling(backups, dummy_service): ) # there are other services too, not just our dummy # not backuppable service is not backuppable even if period is set - assert not Backups.is_time_to_backup_service(dummy_service.get_id(), now) + assert not Backups.is_time_to_backup_service(dummy_service, now) dummy_service.set_backuppable(True) assert dummy_service.can_be_backed_up() - assert Backups.is_time_to_backup_service(dummy_service.get_id(), now) + assert Backups.is_time_to_backup_service(dummy_service, now) Backups.disable_all_autobackup() - assert not Backups.is_time_to_backup_service(dummy_service.get_id(), now) + assert not Backups.is_time_to_backup_service(dummy_service, now) assert not Backups.is_time_to_backup(now) @@ -521,21 +510,20 @@ def test_autobackup_timing(backups, dummy_service): backup_period = 13 # minutes now = datetime.now(timezone.utc) - Backups.enable_autobackup(dummy_service) Backups.set_autobackup_period_minutes(backup_period) - assert Backups.is_time_to_backup_service(dummy_service.get_id(), now) + assert Backups.is_time_to_backup_service(dummy_service, now) assert Backups.is_time_to_backup(now) Backups.back_up(dummy_service) now = datetime.now(timezone.utc) - assert not Backups.is_time_to_backup_service(dummy_service.get_id(), now) + assert not Backups.is_time_to_backup_service(dummy_service, now) past = datetime.now(timezone.utc) - timedelta(minutes=1) - assert not Backups.is_time_to_backup_service(dummy_service.get_id(), past) + assert not Backups.is_time_to_backup_service(dummy_service, past) future = datetime.now(timezone.utc) + timedelta(minutes=backup_period + 2) - assert Backups.is_time_to_backup_service(dummy_service.get_id(), future) + assert Backups.is_time_to_backup_service(dummy_service, future) # Storage From 2df448a4a9076ea05152fcb2cfbdd39c409e0e0f Mon Sep 17 00:00:00 2001 From: Inex Code Date: Thu, 20 Jul 2023 17:11:02 +0300 Subject: [PATCH 471/537] fix(backups): Do not shut down the service during backup We do not want the user to expirience outages during automatic backups. Generally, they are not even needed. We should use hooks to service-specific tasks, such as creating the database dump, so we don't have to shut down Postgres. --- selfprivacy_api/backup/__init__.py | 31 ++++++++++++++++-------------- 1 file changed, 17 insertions(+), 14 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 997dec4..23e706f 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -4,8 +4,10 @@ from typing import List, Optional from selfprivacy_api.utils import ReadUserData, WriteUserData -from selfprivacy_api.services import get_service_by_id -from selfprivacy_api.services import get_all_services +from selfprivacy_api.services import ( + get_service_by_id, + get_all_services, +) from selfprivacy_api.services.service import ( Service, ServiceStatus, @@ -210,15 +212,13 @@ class Backups: Jobs.update(job, status=JobStatus.RUNNING) try: - with StoppedService(service): - Backups.assert_dead(service) # to be extra sure - service.pre_backup() - snapshot = Backups.provider().backupper.start_backup( - folders, - tag, - ) - Backups._store_last_snapshot(tag, snapshot) - service.post_restore() + service.pre_backup() + snapshot = Backups.provider().backupper.start_backup( + folders, + tag, + ) + Backups._store_last_snapshot(tag, snapshot) + service.post_restore() except Exception as e: Jobs.update(job, status=JobStatus.ERROR) raise e @@ -489,9 +489,12 @@ class Backups: @staticmethod def assert_dead(service: Service): - # if we backup the service that is failing to restore it to the - # previous snapshot, its status can be FAILED - # And obviously restoring a failed service is the main route + """ + + If we backup the service that is failing to restore it to the previous snapshot, + its status can be FAILED. + And obviously restoring a failed service is the main route + """ if service.get_status() not in [ ServiceStatus.INACTIVE, ServiceStatus.FAILED, From 0245d629fd66bd20a230a08fcbd3a45eb2b42fc7 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Thu, 20 Jul 2023 18:24:26 +0300 Subject: [PATCH 472/537] refactor(backups): linting --- selfprivacy_api/backup/__init__.py | 73 +++++++++++++++---- selfprivacy_api/backup/backuppers/__init__.py | 11 ++- .../backup/backuppers/none_backupper.py | 6 +- .../backup/backuppers/restic_backupper.py | 59 +++++++-------- selfprivacy_api/backup/providers/none.py | 2 +- selfprivacy_api/backup/storage.py | 51 ++++++++----- selfprivacy_api/backup/tasks.py | 22 ++++-- .../graphql/mutations/backup_mutations.py | 8 +- selfprivacy_api/graphql/queries/backup.py | 2 + selfprivacy_api/jobs/__init__.py | 36 ++++----- .../tokens/redis_tokens_repository.py | 34 ++++----- selfprivacy_api/utils/redis_pool.py | 4 +- 12 files changed, 196 insertions(+), 112 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 23e706f..bd16488 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -1,3 +1,6 @@ +""" +This module contains the controller class for backups. +""" from datetime import datetime, timedelta from os import statvfs from typing import List, Optional @@ -42,8 +45,12 @@ DEFAULT_JSON_PROVIDER = { class NotDeadError(AssertionError): + """ + This error is raised when we try to back up a service that is not dead yet. + """ def __init__(self, service: Service): self.service_name = service.get_id() + super().__init__() def __str__(self): return f""" @@ -61,6 +68,9 @@ class Backups: @staticmethod def provider() -> AbstractBackupProvider: + """ + Returns the current backup storage provider. + """ return Backups._lookup_provider() @staticmethod @@ -71,6 +81,13 @@ class Backups: location: str, repo_id: str = "", ) -> None: + """ + Sets the new configuration of the backup storage provider. + + In case of `BackupProviderEnum.BACKBLAZE`, the `login` is the key ID, + the `key` is the key itself, and the `location` is the bucket name and + the `repo_id` is the bucket ID. + """ provider: AbstractBackupProvider = Backups._construct_provider( kind, login, @@ -82,6 +99,9 @@ class Backups: @staticmethod def reset(reset_json=True) -> None: + """ + Deletes all the data about the backup storage provider. + """ Storage.reset() if reset_json: try: @@ -183,11 +203,19 @@ class Backups: @staticmethod def init_repo() -> None: + """ + Initializes the backup repository. This is required once per repo. + """ Backups.provider().backupper.init() Storage.mark_as_init() @staticmethod def is_initted() -> bool: + """ + Returns whether the backup repository is initialized or not. + If it is not initialized, we cannot back up and probably should + call `init_repo` first. + """ if Storage.has_init_mark(): return True @@ -219,9 +247,9 @@ class Backups: ) Backups._store_last_snapshot(tag, snapshot) service.post_restore() - except Exception as e: + except Exception as error: Jobs.update(job, status=JobStatus.ERROR) - raise e + raise error Jobs.update(job, status=JobStatus.FINISHED) return snapshot @@ -252,16 +280,17 @@ class Backups: snapshot.id, verify=False, ) - except Exception as e: + except Exception as error: Backups._restore_service_from_snapshot( service, failsafe_snapshot.id, verify=False ) - raise e + raise error @staticmethod def restore_snapshot( snapshot: Snapshot, strategy=RestoreStrategy.DOWNLOAD_VERIFY_OVERWRITE ) -> None: + """Restores a snapshot to its original service using the given strategy""" service = get_service_by_id(snapshot.service_name) if service is None: raise ValueError( @@ -283,9 +312,9 @@ class Backups: service.post_restore() - except Exception as e: + except Exception as error: Jobs.update(job, status=JobStatus.ERROR) - raise e + raise error Jobs.update(job, status=JobStatus.FINISHED) @@ -338,6 +367,7 @@ class Backups: @staticmethod def get_snapshots(service: Service) -> List[Snapshot]: + """Returns all snapshots for a given service""" snapshots = Backups.get_all_snapshots() service_id = service.get_id() return list( @@ -349,8 +379,9 @@ class Backups: @staticmethod def get_all_snapshots() -> List[Snapshot]: + """Returns all snapshots""" cached_snapshots = Storage.get_cached_snapshots() - if cached_snapshots != []: + if cached_snapshots: return cached_snapshots # TODO: the oldest snapshots will get expired faster than the new ones. # How to detect that the end is missing? @@ -359,24 +390,32 @@ class Backups: return Storage.get_cached_snapshots() @staticmethod - def get_snapshot_by_id(id: str) -> Optional[Snapshot]: - snap = Storage.get_cached_snapshot_by_id(id) + def get_snapshot_by_id(snapshot_id: str) -> Optional[Snapshot]: + """Returns a backup snapshot by its id""" + snap = Storage.get_cached_snapshot_by_id(snapshot_id) if snap is not None: return snap # Possibly our cache entry got invalidated, let's try one more time Backups.force_snapshot_cache_reload() - snap = Storage.get_cached_snapshot_by_id(id) + snap = Storage.get_cached_snapshot_by_id(snapshot_id) return snap @staticmethod def forget_snapshot(snapshot: Snapshot) -> None: + """Deletes a snapshot from the storage""" Backups.provider().backupper.forget_snapshot(snapshot.id) Storage.delete_cached_snapshot(snapshot) @staticmethod def force_snapshot_cache_reload() -> None: + """ + Forces a reload of the snapshot cache. + + This may be an expensive operation, so use it wisely. + User pays for the API calls. + """ upstream_snapshots = Backups.provider().backupper.get_snapshots() Storage.invalidate_snapshot_storage() for snapshot in upstream_snapshots: @@ -384,6 +423,7 @@ class Backups: @staticmethod def snapshot_restored_size(snapshot_id: str) -> int: + """Returns the size of the snapshot""" return Backups.provider().backupper.restored_size( snapshot_id, ) @@ -434,6 +474,7 @@ class Backups: @staticmethod def services_to_back_up(time: datetime) -> List[Service]: + """Returns a list of services that should be backed up at a given time""" return [ service for service in get_all_services() @@ -447,6 +488,7 @@ class Backups: @staticmethod def is_time_to_backup_service(service: Service, time: datetime): + """Returns True if it is time to back up a service""" period = Backups.autobackup_period_minutes() service_id = service.get_id() if not service.can_be_backed_up(): @@ -467,6 +509,10 @@ class Backups: @staticmethod def space_usable_for_service(service: Service) -> int: + """ + Returns the amount of space available on the volume the given + service is located on. + """ folders = service.get_folders() if folders == []: raise ValueError("unallocated service", service.get_id()) @@ -478,6 +524,8 @@ class Backups: @staticmethod def set_localfile_repo(file_path: str): + """Used by tests to set a local folder as a backup repo""" + # pylint: disable-next=invalid-name ProviderClass = get_provider(BackupProviderEnum.FILE) provider = ProviderClass( login="", @@ -490,10 +538,7 @@ class Backups: @staticmethod def assert_dead(service: Service): """ - - If we backup the service that is failing to restore it to the previous snapshot, - its status can be FAILED. - And obviously restoring a failed service is the main route + Checks if a service is dead and can be safely restored from a snapshot. """ if service.get_status() not in [ ServiceStatus.INACTIVE, diff --git a/selfprivacy_api/backup/backuppers/__init__.py b/selfprivacy_api/backup/backuppers/__init__.py index 7051a6a..ea2350b 100644 --- a/selfprivacy_api/backup/backuppers/__init__.py +++ b/selfprivacy_api/backup/backuppers/__init__.py @@ -5,19 +5,25 @@ from selfprivacy_api.models.backup.snapshot import Snapshot class AbstractBackupper(ABC): + """Abstract class for backuppers""" + + # flake8: noqa: B027 def __init__(self) -> None: pass @abstractmethod def is_initted(self) -> bool: + """Returns true if the repository is initted""" raise NotImplementedError @abstractmethod def set_creds(self, account: str, key: str, repo: str) -> None: + """Set the credentials for the backupper""" raise NotImplementedError @abstractmethod - def start_backup(self, folders: List[str], repo_name: str) -> Snapshot: + def start_backup(self, folders: List[str], tag: str) -> Snapshot: + """Start a backup of the given folders""" raise NotImplementedError @abstractmethod @@ -27,6 +33,7 @@ class AbstractBackupper(ABC): @abstractmethod def init(self) -> None: + """Initialize the repository""" raise NotImplementedError @abstractmethod @@ -41,8 +48,10 @@ class AbstractBackupper(ABC): @abstractmethod def restored_size(self, snapshot_id: str) -> int: + """Get the size of the restored snapshot""" raise NotImplementedError @abstractmethod def forget_snapshot(self, snapshot_id) -> None: + """Forget a snapshot""" raise NotImplementedError diff --git a/selfprivacy_api/backup/backuppers/none_backupper.py b/selfprivacy_api/backup/backuppers/none_backupper.py index 2ac2035..d9edaeb 100644 --- a/selfprivacy_api/backup/backuppers/none_backupper.py +++ b/selfprivacy_api/backup/backuppers/none_backupper.py @@ -5,13 +5,15 @@ from selfprivacy_api.backup.backuppers import AbstractBackupper class NoneBackupper(AbstractBackupper): + """A backupper that does nothing""" + def is_initted(self, repo_name: str = "") -> bool: return False def set_creds(self, account: str, key: str, repo: str): pass - def start_backup(self, folders: List[str], repo_name: str): + def start_backup(self, folders: List[str], tag: str): raise NotImplementedError def get_snapshots(self) -> List[Snapshot]: @@ -21,7 +23,7 @@ class NoneBackupper(AbstractBackupper): def init(self): raise NotImplementedError - def restore_from_backup(self, snapshot_id: str, folders: List[str]): + def restore_from_backup(self, snapshot_id: str, folders: List[str], verify=True): """Restore a target folder using a snapshot""" raise NotImplementedError diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index 60e8285..b69c85d 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -21,13 +21,14 @@ from selfprivacy_api.backup.local_secret import LocalBackupSecret class ResticBackupper(AbstractBackupper): - def __init__(self, login_flag: str, key_flag: str, type: str) -> None: + def __init__(self, login_flag: str, key_flag: str, storage_type: str) -> None: self.login_flag = login_flag self.key_flag = key_flag - self.type = type + self.storage_type = storage_type self.account = "" self.key = "" self.repo = "" + super().__init__() def set_creds(self, account: str, key: str, repo: str) -> None: self.account = account @@ -37,7 +38,7 @@ class ResticBackupper(AbstractBackupper): def restic_repo(self) -> str: # https://restic.readthedocs.io/en/latest/030_preparing_a_new_repo.html#other-services-via-rclone # https://forum.rclone.org/t/can-rclone-be-run-solely-with-command-line-options-no-config-no-env-vars/6314/5 - return f"rclone:{self.type}{self.repo}" + return f"rclone:{self.storage_type}{self.repo}" def rclone_args(self): return "rclone.args=serve restic --stdio " + self.backend_rclone_args() @@ -72,12 +73,12 @@ class ResticBackupper(AbstractBackupper): tag, ] ) - if args != []: + if args: command.extend(ResticBackupper.__flatten_list(args)) return command - def mount_repo(self, dir): - mount_command = self.restic_command("mount", dir) + def mount_repo(self, mount_directory): + mount_command = self.restic_command("mount", mount_directory) mount_command.insert(0, "nohup") handle = subprocess.Popen( mount_command, @@ -85,28 +86,28 @@ class ResticBackupper(AbstractBackupper): shell=False, ) sleep(2) - if "ids" not in listdir(dir): - raise IOError("failed to mount dir ", dir) + if "ids" not in listdir(mount_directory): + raise IOError("failed to mount dir ", mount_directory) return handle - def unmount_repo(self, dir): - mount_command = ["umount", "-l", dir] + def unmount_repo(self, mount_directory): + mount_command = ["umount", "-l", mount_directory] with subprocess.Popen( mount_command, stdout=subprocess.PIPE, shell=False ) as handle: output = handle.communicate()[0].decode("utf-8") # TODO: check for exit code? if "error" in output.lower(): - return IOError("failed to unmount dir ", dir, ": ", output) + return IOError("failed to unmount dir ", mount_directory, ": ", output) - if not listdir(dir) == []: - return IOError("failed to unmount dir ", dir) + if not listdir(mount_directory) == []: + return IOError("failed to unmount dir ", mount_directory) @staticmethod - def __flatten_list(list): + def __flatten_list(list_to_flatten): """string-aware list flattener""" result = [] - for item in list: + for item in list_to_flatten: if isinstance(item, Iterable) and not isinstance(item, str): result.extend(ResticBackupper.__flatten_list(item)) continue @@ -147,8 +148,8 @@ class ResticBackupper(AbstractBackupper): messages, tag, ) - except ValueError as e: - raise ValueError("Could not create a snapshot: ", messages) from e + except ValueError as error: + raise ValueError("Could not create a snapshot: ", messages) from error @staticmethod def _snapshot_from_backup_messages(messages, repo_name) -> Snapshot: @@ -231,8 +232,8 @@ class ResticBackupper(AbstractBackupper): try: parsed_output = ResticBackupper.parse_json_output(output) return parsed_output["total_size"] - except ValueError as e: - raise ValueError("cannot restore a snapshot: " + output) from e + except ValueError as error: + raise ValueError("cannot restore a snapshot: " + output) from error def restore_from_backup( self, @@ -246,13 +247,13 @@ class ResticBackupper(AbstractBackupper): if folders is None or folders == []: raise ValueError("cannot restore without knowing where to!") - with tempfile.TemporaryDirectory() as dir: + with tempfile.TemporaryDirectory() as temp_dir: if verify: - self._raw_verified_restore(snapshot_id, target=dir) - snapshot_root = dir + self._raw_verified_restore(snapshot_id, target=temp_dir) + snapshot_root = temp_dir else: # attempting inplace restore via mount + sync - self.mount_repo(dir) - snapshot_root = join(dir, "ids", snapshot_id) + self.mount_repo(temp_dir) + snapshot_root = join(temp_dir, "ids", snapshot_id) assert snapshot_root is not None for folder in folders: @@ -263,7 +264,7 @@ class ResticBackupper(AbstractBackupper): sync(src, dst) if not verify: - self.unmount_repo(dir) + self.unmount_repo(temp_dir) def _raw_verified_restore(self, snapshot_id, target="/"): """barebones restic restore""" @@ -355,8 +356,8 @@ class ResticBackupper(AbstractBackupper): raise ValueError("No repository! : " + output) try: return ResticBackupper.parse_json_output(output) - except ValueError as e: - raise ValueError("Cannot load snapshots: ") from e + except ValueError as error: + raise ValueError("Cannot load snapshots: ") from error def get_snapshots(self) -> List[Snapshot]: """Get all snapshots from the repo""" @@ -383,10 +384,10 @@ class ResticBackupper(AbstractBackupper): if len(json_messages) == 1: try: return json.loads(truncated_output) - except JSONDecodeError as e: + except JSONDecodeError as error: raise ValueError( "There is no json in the restic output : " + output - ) from e + ) from error result_array = [] for message in json_messages: diff --git a/selfprivacy_api/backup/providers/none.py b/selfprivacy_api/backup/providers/none.py index 474d0a2..6a37771 100644 --- a/selfprivacy_api/backup/providers/none.py +++ b/selfprivacy_api/backup/providers/none.py @@ -1,4 +1,4 @@ -from .provider import AbstractBackupProvider +from selfprivacy_api.backup.providers.provider import AbstractBackupProvider from selfprivacy_api.backup.backuppers.none_backupper import NoneBackupper from selfprivacy_api.graphql.queries.providers import ( BackupProvider as BackupProviderEnum, diff --git a/selfprivacy_api/backup/storage.py b/selfprivacy_api/backup/storage.py index 87e0aa6..bda7f09 100644 --- a/selfprivacy_api/backup/storage.py +++ b/selfprivacy_api/backup/storage.py @@ -1,3 +1,6 @@ +""" +Module for storing backup related data in redis. +""" from typing import List, Optional from datetime import datetime @@ -10,10 +13,6 @@ from selfprivacy_api.utils.redis_model_storage import ( hash_as_model, ) - -from selfprivacy_api.services.service import Service -from selfprivacy_api.services import get_service_by_id - from selfprivacy_api.backup.providers.provider import AbstractBackupProvider from selfprivacy_api.backup.providers import get_kind @@ -32,8 +31,10 @@ redis = RedisPool().get_connection() class Storage: + """Static class for storing backup related data in redis""" @staticmethod - def reset(): + def reset() -> None: + """Deletes all backup related data from redis""" redis.delete(REDIS_PROVIDER_KEY) redis.delete(REDIS_AUTOBACKUP_PERIOD_KEY) @@ -48,20 +49,22 @@ class Storage: redis.delete(key) @staticmethod - def invalidate_snapshot_storage(): + def invalidate_snapshot_storage() -> None: + """Deletes all cached snapshots from redis""" for key in redis.keys(REDIS_SNAPSHOTS_PREFIX + "*"): redis.delete(key) @staticmethod - def __last_backup_key(service_id): + def __last_backup_key(service_id: str) -> str: return REDIS_LAST_BACKUP_PREFIX + service_id @staticmethod - def __snapshot_key(snapshot: Snapshot): + def __snapshot_key(snapshot: Snapshot) -> str: return REDIS_SNAPSHOTS_PREFIX + snapshot.id @staticmethod def get_last_backup_time(service_id: str) -> Optional[datetime]: + """Returns last backup time for a service or None if it was never backed up""" key = Storage.__last_backup_key(service_id) if not redis.exists(key): return None @@ -72,7 +75,8 @@ class Storage: return snapshot.created_at @staticmethod - def store_last_timestamp(service_id: str, snapshot: Snapshot): + def store_last_timestamp(service_id: str, snapshot: Snapshot) -> None: + """Stores last backup time for a service""" store_model_as_hash( redis, Storage.__last_backup_key(service_id), @@ -80,18 +84,21 @@ class Storage: ) @staticmethod - def cache_snapshot(snapshot: Snapshot): + def cache_snapshot(snapshot: Snapshot) -> None: + """Stores snapshot metadata in redis for caching purposes""" snapshot_key = Storage.__snapshot_key(snapshot) store_model_as_hash(redis, snapshot_key, snapshot) redis.expire(snapshot_key, REDIS_SNAPSHOT_CACHE_EXPIRE_SECONDS) @staticmethod - def delete_cached_snapshot(snapshot: Snapshot): + def delete_cached_snapshot(snapshot: Snapshot) -> None: + """Deletes snapshot metadata from redis""" snapshot_key = Storage.__snapshot_key(snapshot) redis.delete(snapshot_key) @staticmethod def get_cached_snapshot_by_id(snapshot_id: str) -> Optional[Snapshot]: + """Returns cached snapshot by id or None if it doesn't exist""" key = REDIS_SNAPSHOTS_PREFIX + snapshot_id if not redis.exists(key): return None @@ -99,12 +106,14 @@ class Storage: @staticmethod def get_cached_snapshots() -> List[Snapshot]: - keys = redis.keys(REDIS_SNAPSHOTS_PREFIX + "*") - result = [] + """Returns all cached snapshots stored in redis""" + keys: list[str] = redis.keys(REDIS_SNAPSHOTS_PREFIX + "*") # type: ignore + result: list[Snapshot] = [] for key in keys: snapshot = hash_as_model(redis, key, Snapshot) - result.append(snapshot) + if snapshot: + result.append(snapshot) return result @staticmethod @@ -112,18 +121,21 @@ class Storage: """None means autobackup is disabled""" if not redis.exists(REDIS_AUTOBACKUP_PERIOD_KEY): return None - return int(redis.get(REDIS_AUTOBACKUP_PERIOD_KEY)) + return int(redis.get(REDIS_AUTOBACKUP_PERIOD_KEY)) # type: ignore @staticmethod - def store_autobackup_period_minutes(minutes: int): + def store_autobackup_period_minutes(minutes: int) -> None: + """Set the new autobackup period in minutes""" redis.set(REDIS_AUTOBACKUP_PERIOD_KEY, minutes) @staticmethod - def delete_backup_period(): + def delete_backup_period() -> None: + """Set the autobackup period to none, effectively disabling autobackup""" redis.delete(REDIS_AUTOBACKUP_PERIOD_KEY) @staticmethod - def store_provider(provider: AbstractBackupProvider): + def store_provider(provider: AbstractBackupProvider) -> None: + """Stores backup stroage provider auth data in redis""" store_model_as_hash( redis, REDIS_PROVIDER_KEY, @@ -138,6 +150,7 @@ class Storage: @staticmethod def load_provider() -> Optional[BackupProviderModel]: + """Loads backup storage provider auth data from redis""" provider_model = hash_as_model( redis, REDIS_PROVIDER_KEY, @@ -147,10 +160,12 @@ class Storage: @staticmethod def has_init_mark() -> bool: + """Returns True if the repository was initialized""" if redis.exists(REDIS_INITTED_CACHE_PREFIX): return True return False @staticmethod def mark_as_init(): + """Marks the repository as initialized""" redis.set(REDIS_INITTED_CACHE_PREFIX, 1) diff --git a/selfprivacy_api/backup/tasks.py b/selfprivacy_api/backup/tasks.py index ac8f2e2..0f73178 100644 --- a/selfprivacy_api/backup/tasks.py +++ b/selfprivacy_api/backup/tasks.py @@ -1,21 +1,24 @@ +""" +The tasks module contains the worker tasks that are used to back up and restore +""" from datetime import datetime from selfprivacy_api.graphql.common_types.backup import RestoreStrategy from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.utils.huey import huey -from selfprivacy_api.services import get_service_by_id from selfprivacy_api.services.service import Service from selfprivacy_api.backup import Backups -from selfprivacy_api.backup.jobs import add_backup_job, add_restore_job -def validate_datetime(dt: datetime): - # dt = datetime.now(timezone.utc) +def validate_datetime(dt: datetime) -> bool: + """ + Validates that the datetime passed in is timezone-aware. + """ if dt.timetz is None: raise ValueError( """ - huey passed in the timezone-unaware time! + huey passed in the timezone-unaware time! Post it in support chat or maybe try uncommenting a line above """ ) @@ -25,6 +28,9 @@ def validate_datetime(dt: datetime): # huey tasks need to return something @huey.task() def start_backup(service: Service) -> bool: + """ + The worker task that starts the backup process. + """ Backups.back_up(service) return True @@ -34,12 +40,18 @@ def restore_snapshot( snapshot: Snapshot, strategy: RestoreStrategy = RestoreStrategy.DOWNLOAD_VERIFY_OVERWRITE, ) -> bool: + """ + The worker task that starts the restore process. + """ Backups.restore_snapshot(snapshot, strategy) return True @huey.periodic_task(validate_datetime=validate_datetime) def automatic_backup(): + """ + The worker periodic task that starts the automatic backup process. + """ time = datetime.now() for service in Backups.services_to_back_up(time): start_backup(service) diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index 5c8163c..b92af4a 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -1,7 +1,5 @@ -import datetime import typing import strawberry -from strawberry.types import Info from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.graphql.mutations.mutation_interface import ( @@ -16,7 +14,7 @@ from selfprivacy_api.graphql.common_types.jobs import job_to_api_job from selfprivacy_api.graphql.common_types.backup import RestoreStrategy from selfprivacy_api.backup import Backups -from selfprivacy_api.services import get_all_services, get_service_by_id +from selfprivacy_api.services import get_service_by_id from selfprivacy_api.backup.tasks import start_backup, restore_snapshot from selfprivacy_api.backup.jobs import add_backup_job, add_restore_job @@ -142,11 +140,11 @@ class BackupMutations: try: job = add_restore_job(snap) - except ValueError as e: + except ValueError as error: return GenericJobMutationReturn( success=False, code=400, - message=str(e), + message=str(error), job=None, ) diff --git a/selfprivacy_api/graphql/queries/backup.py b/selfprivacy_api/graphql/queries/backup.py index 322dab0..6535a88 100644 --- a/selfprivacy_api/graphql/queries/backup.py +++ b/selfprivacy_api/graphql/queries/backup.py @@ -64,6 +64,8 @@ class Backup: status=ServiceStatusEnum.OFF, url=None, dns_records=None, + can_be_backed_up=False, + backup_description="", ) else: service = service_to_graphql_service(service) diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py index 2551237..ea1e15e 100644 --- a/selfprivacy_api/jobs/__init__.py +++ b/selfprivacy_api/jobs/__init__.py @@ -125,57 +125,57 @@ class Jobs: return False @staticmethod - def reset_logs(): + def reset_logs() -> None: redis = RedisPool().get_connection() for key in redis.keys(STATUS_LOGS_PREFIX + "*"): redis.delete(key) @staticmethod - def log_status_update(job: Job, status: JobStatus): + def log_status_update(job: Job, status: JobStatus) -> None: redis = RedisPool().get_connection() key = _status_log_key_from_uuid(job.uid) redis.lpush(key, status.value) redis.expire(key, 10) @staticmethod - def log_progress_update(job: Job, progress: int): + def log_progress_update(job: Job, progress: int) -> None: redis = RedisPool().get_connection() key = _progress_log_key_from_uuid(job.uid) redis.lpush(key, progress) redis.expire(key, 10) @staticmethod - def status_updates(job: Job) -> typing.List[JobStatus]: - result = [] + def status_updates(job: Job) -> list[JobStatus]: + result: list[JobStatus] = [] redis = RedisPool().get_connection() key = _status_log_key_from_uuid(job.uid) if not redis.exists(key): return [] - status_strings = redis.lrange(key, 0, -1) + status_strings: list[str] = redis.lrange(key, 0, -1) # type: ignore for status in status_strings: try: result.append(JobStatus[status]) - except KeyError as e: - raise ValueError("impossible job status: " + status) from e + except KeyError as error: + raise ValueError("impossible job status: " + status) from error return result @staticmethod - def progress_updates(job: Job) -> typing.List[int]: - result = [] + def progress_updates(job: Job) -> list[int]: + result: list[int] = [] redis = RedisPool().get_connection() key = _progress_log_key_from_uuid(job.uid) if not redis.exists(key): return [] - progress_strings = redis.lrange(key, 0, -1) + progress_strings: list[str] = redis.lrange(key, 0, -1) # type: ignore for progress in progress_strings: try: result.append(int(progress)) - except KeyError as e: - raise ValueError("impossible job progress: " + progress) from e + except KeyError as error: + raise ValueError("impossible job progress: " + progress) from error return result @staticmethod @@ -257,19 +257,19 @@ class Jobs: return False -def _redis_key_from_uuid(uuid_string): +def _redis_key_from_uuid(uuid_string) -> str: return "jobs:" + str(uuid_string) -def _status_log_key_from_uuid(uuid_string): +def _status_log_key_from_uuid(uuid_string) -> str: return STATUS_LOGS_PREFIX + str(uuid_string) -def _progress_log_key_from_uuid(uuid_string): +def _progress_log_key_from_uuid(uuid_string) -> str: return PROGRESS_LOGS_PREFIX + str(uuid_string) -def _store_job_as_hash(redis, redis_key, model): +def _store_job_as_hash(redis, redis_key, model) -> None: for key, value in model.dict().items(): if isinstance(value, uuid.UUID): value = str(value) @@ -280,7 +280,7 @@ def _store_job_as_hash(redis, redis_key, model): redis.hset(redis_key, key, str(value)) -def _job_from_hash(redis, redis_key): +def _job_from_hash(redis, redis_key) -> typing.Optional[Job]: if redis.exists(redis_key): job_dict = redis.hgetall(redis_key) for date in [ diff --git a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py index 8baa16d..80825bc 100644 --- a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py @@ -1,7 +1,7 @@ """ Token repository using Redis as backend. """ -from typing import Optional +from typing import Any, Optional from datetime import datetime from hashlib import md5 @@ -29,15 +29,15 @@ class RedisTokensRepository(AbstractTokensRepository): @staticmethod def token_key_for_device(device_name: str): - hash = md5() - hash.update(bytes(device_name, "utf-8")) - digest = hash.hexdigest() + md5_hash = md5() + md5_hash.update(bytes(device_name, "utf-8")) + digest = md5_hash.hexdigest() return TOKENS_PREFIX + digest def get_tokens(self) -> list[Token]: """Get the tokens""" redis = self.connection - token_keys = redis.keys(TOKENS_PREFIX + "*") + token_keys: list[str] = redis.keys(TOKENS_PREFIX + "*") # type: ignore tokens = [] for key in token_keys: token = self._token_from_hash(key) @@ -45,10 +45,10 @@ class RedisTokensRepository(AbstractTokensRepository): tokens.append(token) return tokens - def _discover_token_key(self, input_token: Token) -> str: + def _discover_token_key(self, input_token: Token) -> Optional[str]: """brute-force searching for tokens, for robust deletion""" redis = self.connection - token_keys = redis.keys(TOKENS_PREFIX + "*") + token_keys: list[str] = redis.keys(TOKENS_PREFIX + "*") # type: ignore for key in token_keys: token = self._token_from_hash(key) if token == input_token: @@ -120,26 +120,26 @@ class RedisTokensRepository(AbstractTokensRepository): return self._new_device_key_from_hash(NEW_DEVICE_KEY_REDIS_KEY) @staticmethod - def _is_date_key(key: str): + def _is_date_key(key: str) -> bool: return key in [ "created_at", "expires_at", ] @staticmethod - def _prepare_model_dict(d: dict): - date_keys = [key for key in d.keys() if RedisTokensRepository._is_date_key(key)] + def _prepare_model_dict(model_dict: dict[str, Any]) -> None: + date_keys = [key for key in model_dict.keys() if RedisTokensRepository._is_date_key(key)] for date in date_keys: - if d[date] != "None": - d[date] = datetime.fromisoformat(d[date]) - for key in d.keys(): - if d[key] == "None": - d[key] = None + if model_dict[date] != "None": + model_dict[date] = datetime.fromisoformat(model_dict[date]) + for key in model_dict.keys(): + if model_dict[key] == "None": + model_dict[key] = None - def _model_dict_from_hash(self, redis_key: str) -> Optional[dict]: + def _model_dict_from_hash(self, redis_key: str) -> Optional[dict[str, Any]]: redis = self.connection if redis.exists(redis_key): - token_dict = redis.hgetall(redis_key) + token_dict: dict[str, Any] = redis.hgetall(redis_key) # type: ignore RedisTokensRepository._prepare_model_dict(token_dict) return token_dict return None diff --git a/selfprivacy_api/utils/redis_pool.py b/selfprivacy_api/utils/redis_pool.py index 2f2cf21..4bd6eda 100644 --- a/selfprivacy_api/utils/redis_pool.py +++ b/selfprivacy_api/utils/redis_pool.py @@ -1,9 +1,9 @@ """ Redis pool module for selfprivacy_api """ +from os import environ import redis from selfprivacy_api.utils.singleton_metaclass import SingletonMetaclass -from os import environ REDIS_SOCKET = "/run/redis-sp-api/redis.sock" @@ -14,7 +14,7 @@ class RedisPool(metaclass=SingletonMetaclass): """ def __init__(self): - if "USE_REDIS_PORT" in environ.keys(): + if "USE_REDIS_PORT" in environ: self._pool = redis.ConnectionPool( host="127.0.0.1", port=int(environ["USE_REDIS_PORT"]), From 4b575b6138c51623c54f1561e965dda2f17b3faf Mon Sep 17 00:00:00 2001 From: Inex Code Date: Thu, 20 Jul 2023 18:25:32 +0300 Subject: [PATCH 473/537] fix(graphql): duplicate imports --- selfprivacy_api/graphql/schema.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/selfprivacy_api/graphql/schema.py b/selfprivacy_api/graphql/schema.py index 9a6c82c..e4e7264 100644 --- a/selfprivacy_api/graphql/schema.py +++ b/selfprivacy_api/graphql/schema.py @@ -5,8 +5,6 @@ import asyncio from typing import AsyncGenerator import strawberry from selfprivacy_api.graphql import IsAuthenticated -from selfprivacy_api.graphql.mutations.api_mutations import ApiMutations -from selfprivacy_api.graphql.mutations.backup_mutations import BackupMutations from selfprivacy_api.graphql.mutations.deprecated_mutations import ( DeprecatedApiMutations, DeprecatedJobMutations, @@ -15,6 +13,7 @@ from selfprivacy_api.graphql.mutations.deprecated_mutations import ( DeprecatedSystemMutations, DeprecatedUsersMutations, ) +from selfprivacy_api.graphql.mutations.api_mutations import ApiMutations from selfprivacy_api.graphql.mutations.job_mutations import JobMutations from selfprivacy_api.graphql.mutations.mutation_interface import GenericMutationReturn from selfprivacy_api.graphql.mutations.services_mutations import ServicesMutations @@ -28,7 +27,6 @@ from selfprivacy_api.graphql.queries.jobs import Job from selfprivacy_api.graphql.queries.services import Services from selfprivacy_api.graphql.queries.storage import Storage from selfprivacy_api.graphql.queries.system import System -from selfprivacy_api.graphql.queries.backup import Backup from selfprivacy_api.graphql.mutations.users_mutations import UsersMutations from selfprivacy_api.graphql.queries.users import Users From ac236569b0dd29e1dffbed1b7f06ba9843f10b8a Mon Sep 17 00:00:00 2001 From: Inex Code Date: Thu, 20 Jul 2023 18:28:00 +0300 Subject: [PATCH 474/537] chore: bump version to 2.2.0 --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index d20bf9a..7e964dc 100755 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import setup, find_packages setup( name="selfprivacy_api", - version="2.1.3", + version="2.2.0", packages=find_packages(), scripts=[ "selfprivacy_api/app.py", From 95dbd4fac26867178bfe80b0ee3d5dab64c6c002 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Thu, 20 Jul 2023 19:35:35 +0300 Subject: [PATCH 475/537] chore: bump version --- selfprivacy_api/dependencies.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/selfprivacy_api/dependencies.py b/selfprivacy_api/dependencies.py index d7b12fe..95c49e3 100644 --- a/selfprivacy_api/dependencies.py +++ b/selfprivacy_api/dependencies.py @@ -27,4 +27,4 @@ async def get_token_header( def get_api_version() -> str: """Get API version""" - return "2.1.3" + return "2.2.0" From e0a5ecdd9d3498252b3b436e33ac74f7b080dcd3 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Thu, 20 Jul 2023 19:37:01 +0300 Subject: [PATCH 476/537] style: reformat --- selfprivacy_api/backup/__init__.py | 1 + selfprivacy_api/backup/storage.py | 1 + .../repositories/tokens/redis_tokens_repository.py | 4 +++- 3 files changed, 5 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index bd16488..9722b71 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -48,6 +48,7 @@ class NotDeadError(AssertionError): """ This error is raised when we try to back up a service that is not dead yet. """ + def __init__(self, service: Service): self.service_name = service.get_id() super().__init__() diff --git a/selfprivacy_api/backup/storage.py b/selfprivacy_api/backup/storage.py index bda7f09..f7384a0 100644 --- a/selfprivacy_api/backup/storage.py +++ b/selfprivacy_api/backup/storage.py @@ -32,6 +32,7 @@ redis = RedisPool().get_connection() class Storage: """Static class for storing backup related data in redis""" + @staticmethod def reset() -> None: """Deletes all backup related data from redis""" diff --git a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py index 80825bc..8e683d2 100644 --- a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py @@ -128,7 +128,9 @@ class RedisTokensRepository(AbstractTokensRepository): @staticmethod def _prepare_model_dict(model_dict: dict[str, Any]) -> None: - date_keys = [key for key in model_dict.keys() if RedisTokensRepository._is_date_key(key)] + date_keys = [ + key for key in model_dict.keys() if RedisTokensRepository._is_date_key(key) + ] for date in date_keys: if model_dict[date] != "None": model_dict[date] = datetime.fromisoformat(model_dict[date]) From c9d20e8efdc963dba5662a2fa8abd2704417309e Mon Sep 17 00:00:00 2001 From: Inex Code Date: Thu, 20 Jul 2023 19:39:10 +0300 Subject: [PATCH 477/537] fix(backups): make datetimes from huey timezone aware --- selfprivacy_api/backup/tasks.py | 11 +++-------- selfprivacy_api/utils/huey.py | 1 + 2 files changed, 4 insertions(+), 8 deletions(-) diff --git a/selfprivacy_api/backup/tasks.py b/selfprivacy_api/backup/tasks.py index 0f73178..47063b3 100644 --- a/selfprivacy_api/backup/tasks.py +++ b/selfprivacy_api/backup/tasks.py @@ -1,7 +1,7 @@ """ The tasks module contains the worker tasks that are used to back up and restore """ -from datetime import datetime +from datetime import datetime, timezone from selfprivacy_api.graphql.common_types.backup import RestoreStrategy @@ -15,13 +15,8 @@ def validate_datetime(dt: datetime) -> bool: """ Validates that the datetime passed in is timezone-aware. """ - if dt.timetz is None: - raise ValueError( - """ - huey passed in the timezone-unaware time! - Post it in support chat or maybe try uncommenting a line above - """ - ) + if dt.tzinfo is None: + return Backups.is_time_to_backup(dt.replace(tzinfo=timezone.utc)) return Backups.is_time_to_backup(dt) diff --git a/selfprivacy_api/utils/huey.py b/selfprivacy_api/utils/huey.py index 034f7ba..a7ff492 100644 --- a/selfprivacy_api/utils/huey.py +++ b/selfprivacy_api/utils/huey.py @@ -11,4 +11,5 @@ test_mode = os.environ.get("TEST_MODE") huey = SqliteHuey( HUEY_DATABASE, immediate=test_mode == "true", + utc=True, ) From eafc7a412c53974990a36ecce10ad9e87cc90629 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Thu, 20 Jul 2023 19:42:44 +0300 Subject: [PATCH 478/537] fix: timezone awareness of automatic backup task --- selfprivacy_api/backup/tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/selfprivacy_api/backup/tasks.py b/selfprivacy_api/backup/tasks.py index 47063b3..db350d4 100644 --- a/selfprivacy_api/backup/tasks.py +++ b/selfprivacy_api/backup/tasks.py @@ -47,6 +47,6 @@ def automatic_backup(): """ The worker periodic task that starts the automatic backup process. """ - time = datetime.now() + time = datetime.utcnow().replace(tzinfo=timezone.utc) for service in Backups.services_to_back_up(time): start_backup(service) From 413911849d00cf858beb81a14132973a9a3d82d4 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Thu, 20 Jul 2023 20:06:39 +0300 Subject: [PATCH 479/537] refactor(backups): remove calls of legacy backups API --- selfprivacy_api/app.py | 2 - selfprivacy_api/rest/services.py | 88 +++++++++----------------------- 2 files changed, 25 insertions(+), 65 deletions(-) diff --git a/selfprivacy_api/app.py b/selfprivacy_api/app.py index a1460e7..a58301a 100644 --- a/selfprivacy_api/app.py +++ b/selfprivacy_api/app.py @@ -9,7 +9,6 @@ import uvicorn from selfprivacy_api.dependencies import get_api_version from selfprivacy_api.graphql.schema import schema from selfprivacy_api.migrations import run_migrations -from selfprivacy_api.restic_controller.tasks import init_restic from selfprivacy_api.rest import ( system, @@ -49,7 +48,6 @@ async def get_version(): @app.on_event("startup") async def startup(): run_migrations() - init_restic() if __name__ == "__main__": diff --git a/selfprivacy_api/rest/services.py b/selfprivacy_api/rest/services.py index 317cba0..c6dc12e 100644 --- a/selfprivacy_api/rest/services.py +++ b/selfprivacy_api/rest/services.py @@ -16,8 +16,6 @@ from selfprivacy_api.actions.ssh import ( from selfprivacy_api.actions.users import UserNotFound, get_user_by_username from selfprivacy_api.dependencies import get_token_header -from selfprivacy_api.restic_controller import ResticController, ResticStates -from selfprivacy_api.restic_controller import tasks as restic_tasks from selfprivacy_api.services.bitwarden import Bitwarden from selfprivacy_api.services.gitea import Gitea from selfprivacy_api.services.mailserver import MailServer @@ -25,7 +23,7 @@ from selfprivacy_api.services.nextcloud import Nextcloud from selfprivacy_api.services.ocserv import Ocserv from selfprivacy_api.services.pleroma import Pleroma from selfprivacy_api.services.service import ServiceStatus -from selfprivacy_api.utils import WriteUserData, get_dkim_key, get_domain +from selfprivacy_api.utils import get_dkim_key, get_domain router = APIRouter( prefix="/services", @@ -186,44 +184,34 @@ async def disable_pleroma(): @router.get("/restic/backup/list") async def get_restic_backup_list(): - restic = ResticController() - return restic.snapshot_list + raise HTTPException( + status_code=410, + detail="This endpoint is deprecated, please use GraphQL API", + ) @router.put("/restic/backup/create") async def create_restic_backup(): - restic = ResticController() - if restic.state is ResticStates.NO_KEY: - raise HTTPException(status_code=400, detail="Backup key not provided") - if restic.state is ResticStates.INITIALIZING: - raise HTTPException(status_code=400, detail="Backup is initializing") - if restic.state is ResticStates.BACKING_UP: - raise HTTPException(status_code=409, detail="Backup is already running") - restic_tasks.start_backup() - return { - "status": 0, - "message": "Backup creation has started", - } + raise HTTPException( + status_code=410, + detail="This endpoint is deprecated, please use GraphQL API", + ) @router.get("/restic/backup/status") async def get_restic_backup_status(): - restic = ResticController() - - return { - "status": restic.state.name, - "progress": restic.progress, - "error_message": restic.error_message, - } + raise HTTPException( + status_code=410, + detail="This endpoint is deprecated, please use GraphQL API", + ) @router.get("/restic/backup/reload") async def reload_restic_backup(): - restic_tasks.load_snapshots() - return { - "status": 0, - "message": "Snapshots reload started", - } + raise HTTPException( + status_code=410, + detail="This endpoint is deprecated, please use GraphQL API", + ) class BackupRestoreInput(BaseModel): @@ -232,29 +220,10 @@ class BackupRestoreInput(BaseModel): @router.put("/restic/backup/restore") async def restore_restic_backup(backup: BackupRestoreInput): - restic = ResticController() - if restic.state is ResticStates.NO_KEY: - raise HTTPException(status_code=400, detail="Backup key not provided") - if restic.state is ResticStates.NOT_INITIALIZED: - raise HTTPException( - status_code=400, detail="Backups repository is not initialized" - ) - if restic.state is ResticStates.BACKING_UP: - raise HTTPException(status_code=409, detail="Backup is already running") - if restic.state is ResticStates.INITIALIZING: - raise HTTPException(status_code=400, detail="Repository is initializing") - if restic.state is ResticStates.RESTORING: - raise HTTPException(status_code=409, detail="Restore is already running") - - for backup_item in restic.snapshot_list: - if backup_item["short_id"] == backup.backupId: - restic_tasks.restore_from_backup(backup.backupId) - return { - "status": 0, - "message": "Backup restoration procedure started", - } - - raise HTTPException(status_code=404, detail="Backup not found") + raise HTTPException( + status_code=410, + detail="This endpoint is deprecated, please use GraphQL API", + ) class BackupConfigInput(BaseModel): @@ -265,17 +234,10 @@ class BackupConfigInput(BaseModel): @router.put("/restic/backblaze/config") async def set_backblaze_config(backup_config: BackupConfigInput): - with WriteUserData() as data: - if "backup" not in data: - data["backup"] = {} - data["backup"]["provider"] = "BACKBLAZE" - data["backup"]["accountId"] = backup_config.accountId - data["backup"]["accountKey"] = backup_config.accountKey - data["backup"]["bucket"] = backup_config.bucket - - restic_tasks.update_keys_from_userdata() - - return "New backup settings saved" + raise HTTPException( + status_code=410, + detail="This endpoint is deprecated, please use GraphQL API", + ) @router.post("/ssh/enable") From b01247bc5598eae7206f1cb0db7daa987be52a85 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Thu, 20 Jul 2023 20:11:42 +0300 Subject: [PATCH 480/537] refactor: remove legacy backups implementations --- selfprivacy_api/restic_controller/__init__.py | 233 -------- selfprivacy_api/restic_controller/tasks.py | 70 --- .../services/test_restic.py | 506 ------------------ 3 files changed, 809 deletions(-) delete mode 100644 selfprivacy_api/restic_controller/__init__.py delete mode 100644 selfprivacy_api/restic_controller/tasks.py delete mode 100644 tests/test_rest_endpoints/services/test_restic.py diff --git a/selfprivacy_api/restic_controller/__init__.py b/selfprivacy_api/restic_controller/__init__.py deleted file mode 100644 index 4ac84e8..0000000 --- a/selfprivacy_api/restic_controller/__init__.py +++ /dev/null @@ -1,233 +0,0 @@ -"""Restic singleton controller.""" -from datetime import datetime -import json -import subprocess -import os -from enum import Enum -from selfprivacy_api.utils import ReadUserData -from selfprivacy_api.utils.singleton_metaclass import SingletonMetaclass - - -class ResticStates(Enum): - """Restic states enum.""" - - NO_KEY = 0 - NOT_INITIALIZED = 1 - INITIALIZED = 2 - BACKING_UP = 3 - RESTORING = 4 - ERROR = 5 - INITIALIZING = 6 - - -class ResticController(metaclass=SingletonMetaclass): - """ - States in wich the restic_controller may be - - no backblaze key - - backblaze key is provided, but repository is not initialized - - backblaze key is provided, repository is initialized - - fetching list of snapshots - - creating snapshot, current progress can be retrieved - - recovering from snapshot - - Any ongoing operation acquires the lock - Current state can be fetched with get_state() - """ - - _initialized = False - - def __init__(self): - if self._initialized: - return - self.state = ResticStates.NO_KEY - self.lock = False - self.progress = 0 - self._backblaze_account = None - self._backblaze_key = None - self._repository_name = None - self.snapshot_list = [] - self.error_message = None - self._initialized = True - self.load_configuration() - self.load_snapshots() - - def load_configuration(self): - """Load current configuration from user data to singleton.""" - with ReadUserData() as user_data: - self._backblaze_account = user_data["backblaze"]["accountId"] - self._backblaze_key = user_data["backblaze"]["accountKey"] - self._repository_name = user_data["backblaze"]["bucket"] - if self._backblaze_account and self._backblaze_key and self._repository_name: - self.state = ResticStates.INITIALIZING - else: - self.state = ResticStates.NO_KEY - - def load_snapshots(self): - """ - Load list of snapshots from repository - """ - backup_listing_command = [ - "restic", - "-o", - self.rclone_args(), - "-r", - self.restic_repo(), - "snapshots", - "--json", - ] - - if self.state in (ResticStates.BACKING_UP, ResticStates.RESTORING): - return - with subprocess.Popen( - backup_listing_command, - shell=False, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - ) as backup_listing_process_descriptor: - snapshots_list = backup_listing_process_descriptor.communicate()[0].decode( - "utf-8" - ) - try: - starting_index = snapshots_list.find("[") - json.loads(snapshots_list[starting_index:]) - self.snapshot_list = json.loads(snapshots_list[starting_index:]) - self.state = ResticStates.INITIALIZED - print(snapshots_list) - except ValueError: - if "Is there a repository at the following location?" in snapshots_list: - self.state = ResticStates.NOT_INITIALIZED - return - self.state = ResticStates.ERROR - self.error_message = snapshots_list - return - - def restic_repo(self): - # https://restic.readthedocs.io/en/latest/030_preparing_a_new_repo.html#other-services-via-rclone - # https://forum.rclone.org/t/can-rclone-be-run-solely-with-command-line-options-no-config-no-env-vars/6314/5 - return f"rclone::b2:{self._repository_name}/sfbackup" - - def rclone_args(self): - return "rclone.args=serve restic --stdio" + self.backend_rclone_args() - - def backend_rclone_args(self): - return f"--b2-account {self._backblaze_account} --b2-key {self._backblaze_key}" - - def initialize_repository(self): - """ - Initialize repository with restic - """ - initialize_repository_command = [ - "restic", - "-o", - self.rclone_args(), - "-r", - self.restic_repo(), - "init", - ] - with subprocess.Popen( - initialize_repository_command, - shell=False, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - ) as initialize_repository_process_descriptor: - msg = initialize_repository_process_descriptor.communicate()[0].decode( - "utf-8" - ) - if initialize_repository_process_descriptor.returncode == 0: - self.state = ResticStates.INITIALIZED - else: - self.state = ResticStates.ERROR - self.error_message = msg - - self.state = ResticStates.INITIALIZED - - def start_backup(self): - """ - Start backup with restic - """ - backup_command = [ - "restic", - "-o", - self.rclone_args(), - "-r", - self.restic_repo(), - "--verbose", - "--json", - "backup", - "/var", - ] - with open("/var/backup.log", "w", encoding="utf-8") as log_file: - subprocess.Popen( - backup_command, - shell=False, - stdout=log_file, - stderr=subprocess.STDOUT, - ) - - self.state = ResticStates.BACKING_UP - self.progress = 0 - - def check_progress(self): - """ - Check progress of ongoing backup operation - """ - backup_status_check_command = ["tail", "-1", "/var/backup.log"] - - if self.state in (ResticStates.NO_KEY, ResticStates.NOT_INITIALIZED): - return - - # If the log file does not exists - if os.path.exists("/var/backup.log") is False: - self.state = ResticStates.INITIALIZED - - with subprocess.Popen( - backup_status_check_command, - shell=False, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - ) as backup_status_check_process_descriptor: - backup_process_status = ( - backup_status_check_process_descriptor.communicate()[0].decode("utf-8") - ) - - try: - status = json.loads(backup_process_status) - except ValueError: - print(backup_process_status) - self.error_message = backup_process_status - return - if status["message_type"] == "status": - self.progress = status["percent_done"] - self.state = ResticStates.BACKING_UP - elif status["message_type"] == "summary": - self.state = ResticStates.INITIALIZED - self.progress = 0 - self.snapshot_list.append( - { - "short_id": status["snapshot_id"], - # Current time in format 2021-12-02T00:02:51.086452543+03:00 - "time": datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f%z"), - } - ) - - def restore_from_backup(self, snapshot_id): - """ - Restore from backup with restic - """ - backup_restoration_command = [ - "restic", - "-o", - self.rclone_args(), - "-r", - self.restic_repo(), - "restore", - snapshot_id, - "--target", - "/", - ] - - self.state = ResticStates.RESTORING - - subprocess.run(backup_restoration_command, shell=False) - - self.state = ResticStates.INITIALIZED diff --git a/selfprivacy_api/restic_controller/tasks.py b/selfprivacy_api/restic_controller/tasks.py deleted file mode 100644 index f583d8b..0000000 --- a/selfprivacy_api/restic_controller/tasks.py +++ /dev/null @@ -1,70 +0,0 @@ -"""Tasks for the restic controller.""" -from huey import crontab -from selfprivacy_api.utils.huey import huey -from . import ResticController, ResticStates - - -@huey.task() -def init_restic(): - controller = ResticController() - if controller.state == ResticStates.NOT_INITIALIZED: - initialize_repository() - - -@huey.task() -def update_keys_from_userdata(): - controller = ResticController() - controller.load_configuration() - controller.write_rclone_config() - initialize_repository() - - -# Check every morning at 5:00 AM -@huey.task(crontab(hour=5, minute=0)) -def cron_load_snapshots(): - controller = ResticController() - controller.load_snapshots() - - -# Check every morning at 5:00 AM -@huey.task() -def load_snapshots(): - controller = ResticController() - controller.load_snapshots() - if controller.state == ResticStates.NOT_INITIALIZED: - load_snapshots.schedule(delay=120) - - -@huey.task() -def initialize_repository(): - controller = ResticController() - if controller.state is not ResticStates.NO_KEY: - controller.initialize_repository() - load_snapshots() - - -@huey.task() -def fetch_backup_status(): - controller = ResticController() - if controller.state is ResticStates.BACKING_UP: - controller.check_progress() - if controller.state is ResticStates.BACKING_UP: - fetch_backup_status.schedule(delay=2) - else: - load_snapshots.schedule(delay=240) - - -@huey.task() -def start_backup(): - controller = ResticController() - if controller.state is ResticStates.NOT_INITIALIZED: - resp = initialize_repository() - resp.get() - controller.start_backup() - fetch_backup_status.schedule(delay=3) - - -@huey.task() -def restore_from_backup(snapshot): - controller = ResticController() - controller.restore_from_backup(snapshot) diff --git a/tests/test_rest_endpoints/services/test_restic.py b/tests/test_rest_endpoints/services/test_restic.py deleted file mode 100644 index 844ff34..0000000 --- a/tests/test_rest_endpoints/services/test_restic.py +++ /dev/null @@ -1,506 +0,0 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -import json -import pytest -from selfprivacy_api.restic_controller import ResticStates - - -def read_json(file_path): - with open(file_path, "r") as f: - return json.load(f) - - -MOCKED_SNAPSHOTS = [ - { - "time": "2021-12-06T09:05:04.224685677+03:00", - "tree": "b76152d1e716d86d420407ead05d9911f2b6d971fe1589c12b63e4de65b14d4e", - "paths": ["/var"], - "hostname": "test-host", - "username": "root", - "id": "f96b428f1ca1252089ea3e25cd8ee33e63fb24615f1cc07559ba907d990d81c5", - "short_id": "f96b428f", - }, - { - "time": "2021-12-08T07:42:06.998894055+03:00", - "parent": "f96b428f1ca1252089ea3e25cd8ee33e63fb24615f1cc07559ba907d990d81c5", - "tree": "8379b4fdc9ee3e9bb7c322f632a7bed9fc334b0258abbf4e7134f8fe5b3d61b0", - "paths": ["/var"], - "hostname": "test-host", - "username": "root", - "id": "db96b36efec97e5ba385099b43f9062d214c7312c20138aee7b8bd2c6cd8995a", - "short_id": "db96b36e", - }, -] - - -class ResticControllerMock: - snapshot_list = MOCKED_SNAPSHOTS - state = ResticStates.INITIALIZED - progress = 0 - error_message = None - - -@pytest.fixture -def mock_restic_controller(mocker): - mock = mocker.patch( - "selfprivacy_api.rest.services.ResticController", - autospec=True, - return_value=ResticControllerMock, - ) - return mock - - -class ResticControllerMockNoKey: - snapshot_list = [] - state = ResticStates.NO_KEY - progress = 0 - error_message = None - - -@pytest.fixture -def mock_restic_controller_no_key(mocker): - mock = mocker.patch( - "selfprivacy_api.rest.services.ResticController", - autospec=True, - return_value=ResticControllerMockNoKey, - ) - return mock - - -class ResticControllerNotInitialized: - snapshot_list = [] - state = ResticStates.NOT_INITIALIZED - progress = 0 - error_message = None - - -@pytest.fixture -def mock_restic_controller_not_initialized(mocker): - mock = mocker.patch( - "selfprivacy_api.rest.services.ResticController", - autospec=True, - return_value=ResticControllerNotInitialized, - ) - return mock - - -class ResticControllerInitializing: - snapshot_list = [] - state = ResticStates.INITIALIZING - progress = 0 - error_message = None - - -@pytest.fixture -def mock_restic_controller_initializing(mocker): - mock = mocker.patch( - "selfprivacy_api.rest.services.ResticController", - autospec=True, - return_value=ResticControllerInitializing, - ) - return mock - - -class ResticControllerBackingUp: - snapshot_list = MOCKED_SNAPSHOTS - state = ResticStates.BACKING_UP - progress = 0.42 - error_message = None - - -@pytest.fixture -def mock_restic_controller_backing_up(mocker): - mock = mocker.patch( - "selfprivacy_api.rest.services.ResticController", - autospec=True, - return_value=ResticControllerBackingUp, - ) - return mock - - -class ResticControllerError: - snapshot_list = MOCKED_SNAPSHOTS - state = ResticStates.ERROR - progress = 0 - error_message = "Error message" - - -@pytest.fixture -def mock_restic_controller_error(mocker): - mock = mocker.patch( - "selfprivacy_api.rest.services.ResticController", - autospec=True, - return_value=ResticControllerError, - ) - return mock - - -class ResticControllerRestoring: - snapshot_list = MOCKED_SNAPSHOTS - state = ResticStates.RESTORING - progress = 0 - error_message = None - - -@pytest.fixture -def mock_restic_controller_restoring(mocker): - mock = mocker.patch( - "selfprivacy_api.rest.services.ResticController", - autospec=True, - return_value=ResticControllerRestoring, - ) - return mock - - -@pytest.fixture -def mock_restic_tasks(mocker): - mock = mocker.patch("selfprivacy_api.rest.services.restic_tasks", autospec=True) - return mock - - -@pytest.fixture -def undefined_settings(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "undefined.json") - assert "backup" not in read_json(datadir / "undefined.json") - return datadir - - -@pytest.fixture -def some_settings(mocker, datadir): - mocker.patch( - "selfprivacy_api.utils.USERDATA_FILE", new=datadir / "some_values.json" - ) - assert "backup" in read_json(datadir / "some_values.json") - assert read_json(datadir / "some_values.json")["backup"]["provider"] == "BACKBLAZE" - assert read_json(datadir / "some_values.json")["backup"]["accountId"] == "ID" - assert read_json(datadir / "some_values.json")["backup"]["accountKey"] == "KEY" - assert read_json(datadir / "some_values.json")["backup"]["bucket"] == "BUCKET" - return datadir - - -@pytest.fixture -def no_values(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "no_values.json") - assert "backup" in read_json(datadir / "no_values.json") - assert "provider" not in read_json(datadir / "no_values.json")["backup"] - assert "accountId" not in read_json(datadir / "no_values.json")["backup"] - assert "accountKey" not in read_json(datadir / "no_values.json")["backup"] - assert "bucket" not in read_json(datadir / "no_values.json")["backup"] - return datadir - - -def test_get_snapshots_unauthorized(client, mock_restic_controller, mock_restic_tasks): - response = client.get("/services/restic/backup/list") - assert response.status_code == 401 - - -def test_get_snapshots(authorized_client, mock_restic_controller, mock_restic_tasks): - response = authorized_client.get("/services/restic/backup/list") - assert response.status_code == 200 - assert response.json() == MOCKED_SNAPSHOTS - - -def test_create_backup_unauthorized(client, mock_restic_controller, mock_restic_tasks): - response = client.put("/services/restic/backup/create") - assert response.status_code == 401 - - -def test_create_backup(authorized_client, mock_restic_controller, mock_restic_tasks): - response = authorized_client.put("/services/restic/backup/create") - assert response.status_code == 200 - assert mock_restic_tasks.start_backup.call_count == 1 - - -def test_create_backup_without_key( - authorized_client, mock_restic_controller_no_key, mock_restic_tasks -): - response = authorized_client.put("/services/restic/backup/create") - assert response.status_code == 400 - assert mock_restic_tasks.start_backup.call_count == 0 - - -def test_create_backup_initializing( - authorized_client, mock_restic_controller_initializing, mock_restic_tasks -): - response = authorized_client.put("/services/restic/backup/create") - assert response.status_code == 400 - assert mock_restic_tasks.start_backup.call_count == 0 - - -def test_create_backup_backing_up( - authorized_client, mock_restic_controller_backing_up, mock_restic_tasks -): - response = authorized_client.put("/services/restic/backup/create") - assert response.status_code == 409 - assert mock_restic_tasks.start_backup.call_count == 0 - - -def test_check_backup_status_unauthorized( - client, mock_restic_controller, mock_restic_tasks -): - response = client.get("/services/restic/backup/status") - assert response.status_code == 401 - - -def test_check_backup_status( - authorized_client, mock_restic_controller, mock_restic_tasks -): - response = authorized_client.get("/services/restic/backup/status") - assert response.status_code == 200 - assert response.json() == { - "status": "INITIALIZED", - "progress": 0, - "error_message": None, - } - - -def test_check_backup_status_no_key( - authorized_client, mock_restic_controller_no_key, mock_restic_tasks -): - response = authorized_client.get("/services/restic/backup/status") - assert response.status_code == 200 - assert response.json() == { - "status": "NO_KEY", - "progress": 0, - "error_message": None, - } - - -def test_check_backup_status_not_initialized( - authorized_client, mock_restic_controller_not_initialized, mock_restic_tasks -): - response = authorized_client.get("/services/restic/backup/status") - assert response.status_code == 200 - assert response.json() == { - "status": "NOT_INITIALIZED", - "progress": 0, - "error_message": None, - } - - -def test_check_backup_status_initializing( - authorized_client, mock_restic_controller_initializing, mock_restic_tasks -): - response = authorized_client.get("/services/restic/backup/status") - assert response.status_code == 200 - assert response.json() == { - "status": "INITIALIZING", - "progress": 0, - "error_message": None, - } - - -def test_check_backup_status_backing_up( - authorized_client, mock_restic_controller_backing_up -): - response = authorized_client.get("/services/restic/backup/status") - assert response.status_code == 200 - assert response.json() == { - "status": "BACKING_UP", - "progress": 0.42, - "error_message": None, - } - - -def test_check_backup_status_error( - authorized_client, mock_restic_controller_error, mock_restic_tasks -): - response = authorized_client.get("/services/restic/backup/status") - assert response.status_code == 200 - assert response.json() == { - "status": "ERROR", - "progress": 0, - "error_message": "Error message", - } - - -def test_check_backup_status_restoring( - authorized_client, mock_restic_controller_restoring, mock_restic_tasks -): - response = authorized_client.get("/services/restic/backup/status") - assert response.status_code == 200 - assert response.json() == { - "status": "RESTORING", - "progress": 0, - "error_message": None, - } - - -def test_reload_unauthenticated(client, mock_restic_controller, mock_restic_tasks): - response = client.get("/services/restic/backup/reload") - assert response.status_code == 401 - - -def test_backup_reload(authorized_client, mock_restic_controller, mock_restic_tasks): - response = authorized_client.get("/services/restic/backup/reload") - assert response.status_code == 200 - assert mock_restic_tasks.load_snapshots.call_count == 1 - - -def test_backup_restore_unauthorized(client, mock_restic_controller, mock_restic_tasks): - response = client.put("/services/restic/backup/restore") - assert response.status_code == 401 - - -def test_backup_restore_without_backup_id( - authorized_client, mock_restic_controller, mock_restic_tasks -): - response = authorized_client.put("/services/restic/backup/restore", json={}) - assert response.status_code == 422 - assert mock_restic_tasks.restore_from_backup.call_count == 0 - - -def test_backup_restore_with_nonexistent_backup_id( - authorized_client, mock_restic_controller, mock_restic_tasks -): - response = authorized_client.put( - "/services/restic/backup/restore", json={"backupId": "nonexistent"} - ) - assert response.status_code == 404 - assert mock_restic_tasks.restore_from_backup.call_count == 0 - - -def test_backup_restore_when_no_key( - authorized_client, mock_restic_controller_no_key, mock_restic_tasks -): - response = authorized_client.put( - "/services/restic/backup/restore", json={"backupId": "f96b428f"} - ) - assert response.status_code == 400 - assert mock_restic_tasks.restore_from_backup.call_count == 0 - - -def test_backup_restore_when_not_initialized( - authorized_client, mock_restic_controller_not_initialized, mock_restic_tasks -): - response = authorized_client.put( - "/services/restic/backup/restore", json={"backupId": "f96b428f"} - ) - assert response.status_code == 400 - assert mock_restic_tasks.restore_from_backup.call_count == 0 - - -def test_backup_restore_when_initializing( - authorized_client, mock_restic_controller_initializing, mock_restic_tasks -): - response = authorized_client.put( - "/services/restic/backup/restore", json={"backupId": "f96b428f"} - ) - assert response.status_code == 400 - assert mock_restic_tasks.restore_from_backup.call_count == 0 - - -def test_backup_restore_when_backing_up( - authorized_client, mock_restic_controller_backing_up, mock_restic_tasks -): - response = authorized_client.put( - "/services/restic/backup/restore", json={"backupId": "f96b428f"} - ) - assert response.status_code == 409 - assert mock_restic_tasks.restore_from_backup.call_count == 0 - - -def test_backup_restore_when_restoring( - authorized_client, mock_restic_controller_restoring, mock_restic_tasks -): - response = authorized_client.put( - "/services/restic/backup/restore", json={"backupId": "f96b428f"} - ) - assert response.status_code == 409 - assert mock_restic_tasks.restore_from_backup.call_count == 0 - - -def test_backup_restore_when_error( - authorized_client, mock_restic_controller_error, mock_restic_tasks -): - response = authorized_client.put( - "/services/restic/backup/restore", json={"backupId": "f96b428f"} - ) - assert response.status_code == 200 - assert mock_restic_tasks.restore_from_backup.call_count == 1 - - -def test_backup_restore(authorized_client, mock_restic_controller, mock_restic_tasks): - response = authorized_client.put( - "/services/restic/backup/restore", json={"backupId": "f96b428f"} - ) - assert response.status_code == 200 - assert mock_restic_tasks.restore_from_backup.call_count == 1 - - -def test_set_backblaze_config_unauthorized( - client, mock_restic_controller, mock_restic_tasks, some_settings -): - response = client.put("/services/restic/backblaze/config") - assert response.status_code == 401 - assert mock_restic_tasks.update_keys_from_userdata.call_count == 0 - - -def test_set_backblaze_config_without_arguments( - authorized_client, mock_restic_controller, mock_restic_tasks, some_settings -): - response = authorized_client.put("/services/restic/backblaze/config") - assert response.status_code == 422 - assert mock_restic_tasks.update_keys_from_userdata.call_count == 0 - - -def test_set_backblaze_config_without_all_values( - authorized_client, mock_restic_controller, mock_restic_tasks, some_settings -): - response = authorized_client.put( - "/services/restic/backblaze/config", - json={"accountId": "123", "applicationKey": "456"}, - ) - assert response.status_code == 422 - assert mock_restic_tasks.update_keys_from_userdata.call_count == 0 - - -def test_set_backblaze_config( - authorized_client, mock_restic_controller, mock_restic_tasks, some_settings -): - response = authorized_client.put( - "/services/restic/backblaze/config", - json={"accountId": "123", "accountKey": "456", "bucket": "789"}, - ) - assert response.status_code == 200 - assert mock_restic_tasks.update_keys_from_userdata.call_count == 1 - assert read_json(some_settings / "some_values.json")["backup"] == { - "provider": "BACKBLAZE", - "accountId": "123", - "accountKey": "456", - "bucket": "789", - } - - -def test_set_backblaze_config_on_undefined( - authorized_client, mock_restic_controller, mock_restic_tasks, undefined_settings -): - response = authorized_client.put( - "/services/restic/backblaze/config", - json={"accountId": "123", "accountKey": "456", "bucket": "789"}, - ) - assert response.status_code == 200 - assert mock_restic_tasks.update_keys_from_userdata.call_count == 1 - assert read_json(undefined_settings / "undefined.json")["backup"] == { - "provider": "BACKBLAZE", - "accountId": "123", - "accountKey": "456", - "bucket": "789", - } - - -def test_set_backblaze_config_on_no_values( - authorized_client, mock_restic_controller, mock_restic_tasks, no_values -): - response = authorized_client.put( - "/services/restic/backblaze/config", - json={"accountId": "123", "accountKey": "456", "bucket": "789"}, - ) - assert response.status_code == 200 - assert mock_restic_tasks.update_keys_from_userdata.call_count == 1 - assert read_json(no_values / "no_values.json")["backup"] == { - "provider": "BACKBLAZE", - "accountId": "123", - "accountKey": "456", - "bucket": "789", - } From 26488aa12fea0df459407e0834afbcd30a508542 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 21 Jul 2023 11:11:24 +0000 Subject: [PATCH 481/537] fix(backup):force backup function to return same length ids as when getting snapshots --- selfprivacy_api/backup/backuppers/restic_backupper.py | 6 +++++- tests/test_graphql/test_backup.py | 2 ++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index b69c85d..e98c4c3 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -19,6 +19,8 @@ from selfprivacy_api.jobs import Jobs, JobStatus from selfprivacy_api.backup.local_secret import LocalBackupSecret +SHORT_ID_LEN = 8 + class ResticBackupper(AbstractBackupper): def __init__(self, login_flag: str, key_flag: str, storage_type: str) -> None: @@ -177,7 +179,9 @@ class ResticBackupper(AbstractBackupper): @staticmethod def _snapshot_from_fresh_summary(message: dict, repo_name) -> Snapshot: return Snapshot( - id=message["snapshot_id"], + # There is a discrepancy between versions of restic/rclone + # Some report short_id in this field and some full + id=message["snapshot_id"][0:SHORT_ID_LEN], created_at=datetime.datetime.now(datetime.timezone.utc), service_name=repo_name, ) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 2fa9531..6d12a5e 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -205,6 +205,8 @@ def test_backup_returns_snapshot(backups, dummy_service): snapshot = provider.backupper.start_backup(service_folders, name) assert snapshot.id is not None + assert len(snapshot.id) == len(Backups.get_all_snapshots()[0].id) + assert Backups.get_snapshot_by_id(snapshot.id) is not None assert snapshot.service_name == name assert snapshot.created_at is not None From d664fcbdc45fc62169f2b96ef40c5aeac887d2e4 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 21 Jul 2023 13:06:18 +0000 Subject: [PATCH 482/537] refactor(jobs):track 100-on-finished as a progress update --- selfprivacy_api/jobs/__init__.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py index ea1e15e..3fe452b 100644 --- a/selfprivacy_api/jobs/__init__.py +++ b/selfprivacy_api/jobs/__init__.py @@ -198,12 +198,15 @@ class Jobs: job.description = description if status_text is not None: job.status_text = status_text - if status == JobStatus.FINISHED: - job.progress = 100 - if progress is not None: - # explicitly provided progress has priority + + # if it is finished it is 100 + # unless user says otherwise + if status == JobStatus.FINISHED and progress is None: + progress = 100 + if progress is not None and job.progress != progress: job.progress = progress Jobs.log_progress_update(job, progress) + job.status = status Jobs.log_status_update(job, status) job.updated_at = datetime.datetime.now() From e9bb6d9973a8c1b948619e62dc49d658845c8b64 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 26 Jul 2023 10:09:27 +0000 Subject: [PATCH 483/537] test(backups):check that snapshot cache invalidation invalidates both ways. --- tests/test_graphql/test_backup.py | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 6d12a5e..dcb4739 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -552,6 +552,34 @@ def test_snapshots_caching(backups, dummy_service): assert len(cached_snapshots) == 1 +def lowlevel_forget(snapshot_id): + Backups.provider().backupper.forget_snapshot(snapshot_id) + + +# Storage +def test_snapshots_cache_invalidation(backups, dummy_service): + Backups.back_up(dummy_service) + cached_snapshots = Storage.get_cached_snapshots() + assert len(cached_snapshots) == 1 + + Storage.invalidate_snapshot_storage() + cached_snapshots = Storage.get_cached_snapshots() + assert len(cached_snapshots) == 0 + + Backups.force_snapshot_cache_reload() + cached_snapshots = Storage.get_cached_snapshots() + assert len(cached_snapshots) == 1 + snap = cached_snapshots[0] + + lowlevel_forget(snap.id) + cached_snapshots = Storage.get_cached_snapshots() + assert len(cached_snapshots) == 1 + + Backups.force_snapshot_cache_reload() + cached_snapshots = Storage.get_cached_snapshots() + assert len(cached_snapshots) == 0 + + # Storage def test_init_tracking_caching(backups, raw_dummy_service): assert Storage.has_init_mark() is False From aa7cc7155756791a2353635fd0f8dd9ae37d107f Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 26 Jul 2023 11:54:17 +0000 Subject: [PATCH 484/537] feature(backups):add a function to set provider from env --- selfprivacy_api/backup/__init__.py | 26 ++++++++++++++++++++ tests/test_graphql/test_backup.py | 38 +++++++++++++++++++++++++++++- 2 files changed, 63 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 9722b71..94d1abb 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -2,6 +2,7 @@ This module contains the controller class for backups. """ from datetime import datetime, timedelta +import os from os import statvfs from typing import List, Optional @@ -43,6 +44,13 @@ DEFAULT_JSON_PROVIDER = { "bucket": "", } +BACKUP_PROVIDER_ENVS = { + "kind": "BACKUP_KIND", + "login": "BACKUP_LOGIN", + "key": "BACKUP_KEY", + "location": "BACKUP_LOCATION", +} + class NotDeadError(AssertionError): """ @@ -132,6 +140,24 @@ class Backups: Storage.store_provider(none_provider) return none_provider + @staticmethod + def set_provider_from_envs(): + for env in BACKUP_PROVIDER_ENVS.values(): + if env not in os.environ.keys(): + raise ValueError( + f"Cannot set backup provider from envs, there is no {env} set" + ) + + kind_str = os.environ[BACKUP_PROVIDER_ENVS["kind"]] + kind_enum = BackupProviderEnum[kind_str] + provider = Backups._construct_provider( + kind=kind_enum, + login=os.environ[BACKUP_PROVIDER_ENVS["login"]], + key=os.environ[BACKUP_PROVIDER_ENVS["key"]], + location=os.environ[BACKUP_PROVIDER_ENVS["location"]], + ) + Storage.store_provider(provider) + @staticmethod def _construct_provider( kind: BackupProviderEnum, diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index dcb4739..fcb437e 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -1,4 +1,5 @@ import pytest +import os import os.path as path from os import makedirs from os import remove @@ -18,10 +19,11 @@ from selfprivacy_api.jobs import Jobs, JobStatus from selfprivacy_api.models.backup.snapshot import Snapshot -from selfprivacy_api.backup import Backups +from selfprivacy_api.backup import Backups, BACKUP_PROVIDER_ENVS import selfprivacy_api.backup.providers as providers from selfprivacy_api.backup.providers import AbstractBackupProvider from selfprivacy_api.backup.providers.backblaze import Backblaze +from selfprivacy_api.backup.providers.none import NoBackups from selfprivacy_api.backup.util import sync from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackupper from selfprivacy_api.backup.jobs import add_backup_job, add_restore_job @@ -129,6 +131,40 @@ def test_config_load(generic_userdata): assert provider.backupper.key == "KEY" +def test_reset_sets_to_none1(): + Backups.reset() + provider = Backups.provider() + assert provider is not None + assert isinstance(provider, NoBackups) + + +def test_reset_sets_to_none2(backups): + # now with something set up first^^^ + Backups.reset() + provider = Backups.provider() + assert provider is not None + assert isinstance(provider, NoBackups) + + +def test_setting_from_envs(tmpdir): + Backups.reset() + os.environ[BACKUP_PROVIDER_ENVS["kind"]] = "BACKBLAZE" + os.environ[BACKUP_PROVIDER_ENVS["login"]] = "ID" + os.environ[BACKUP_PROVIDER_ENVS["key"]] = "KEY" + os.environ[BACKUP_PROVIDER_ENVS["location"]] = "selfprivacy" + Backups.set_provider_from_envs() + provider = Backups.provider() + + assert provider is not None + assert isinstance(provider, Backblaze) + assert provider.login == "ID" + assert provider.key == "KEY" + assert provider.location == "selfprivacy" + + assert provider.backupper.account == "ID" + assert provider.backupper.key == "KEY" + + def test_json_reset(generic_userdata): Backups.reset(reset_json=False) provider = Backups.provider() From ffec344ba81b9b7355930c8f65e8d7bae5aa0ab8 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 26 Jul 2023 14:26:04 +0000 Subject: [PATCH 485/537] test(backups): make the test repo overridable by envs --- selfprivacy_api/backup/__init__.py | 10 ++++- tests/test_graphql/test_backup.py | 64 ++++++++++++++++++++++++------ 2 files changed, 60 insertions(+), 14 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 94d1abb..725904e 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -431,10 +431,18 @@ class Backups: @staticmethod def forget_snapshot(snapshot: Snapshot) -> None: - """Deletes a snapshot from the storage""" + """Deletes a snapshot from the repo and from cache""" Backups.provider().backupper.forget_snapshot(snapshot.id) Storage.delete_cached_snapshot(snapshot) + @staticmethod + def forget_all_snapshots(): + """deliberately erase all snapshots we made""" + # there is no dedicated optimized command for this, + # but maybe we can have a multi-erase + for snapshot in Backups.get_all_snapshots(): + Backups.forget_snapshot(snapshot) + @staticmethod def force_snapshot_cache_reload() -> None: """ diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index fcb437e..fc42ca2 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -39,14 +39,34 @@ TESTFILE_2_BODY = "testissimo!" REPO_NAME = "test_backup" -@pytest.fixture(scope="function") -def backups(tmpdir): - Backups.reset() - - test_repo_path = path.join(tmpdir, "totallyunrelated") +def prepare_localfile_backups(temp_dir): + test_repo_path = path.join(temp_dir, "totallyunrelated") + assert not path.exists(test_repo_path) Backups.set_localfile_repo(test_repo_path) + +@pytest.fixture(scope="function") +def backups_local(tmpdir): + Backups.reset() + prepare_localfile_backups(tmpdir) Jobs.reset() + Backups.init_repo() + + +@pytest.fixture(scope="function") +def backups(tmpdir): + # for those tests that are supposed to pass with any repo + Backups.reset() + if BACKUP_PROVIDER_ENVS["kind"] in os.environ.keys(): + Backups.set_provider_from_envs() + else: + prepare_localfile_backups(tmpdir) + Jobs.reset() + # assert not repo_path + + Backups.init_repo() + yield + Backups.forget_all_snapshots() @pytest.fixture() @@ -82,11 +102,6 @@ def raw_dummy_service(tmpdir): @pytest.fixture() def dummy_service(tmpdir, backups, raw_dummy_service) -> Service: service = raw_dummy_service - repo_path = path.join(tmpdir, "test_repo") - assert not path.exists(repo_path) - # assert not repo_path - - Backups.init_repo() # register our service services.services.append(service) @@ -148,6 +163,12 @@ def test_reset_sets_to_none2(backups): def test_setting_from_envs(tmpdir): Backups.reset() + environment_stash = {} + if BACKUP_PROVIDER_ENVS["kind"] in os.environ.keys(): + # we are running under special envs, stash them before rewriting them + for key in BACKUP_PROVIDER_ENVS.values(): + environment_stash[key] = os.environ[key] + os.environ[BACKUP_PROVIDER_ENVS["kind"]] = "BACKBLAZE" os.environ[BACKUP_PROVIDER_ENVS["login"]] = "ID" os.environ[BACKUP_PROVIDER_ENVS["key"]] = "KEY" @@ -164,6 +185,13 @@ def test_setting_from_envs(tmpdir): assert provider.backupper.account == "ID" assert provider.backupper.key == "KEY" + if environment_stash != {}: + for key in BACKUP_PROVIDER_ENVS.values(): + os.environ[key] = environment_stash[key] + else: + for key in BACKUP_PROVIDER_ENVS.values(): + del os.environ[key] + def test_json_reset(generic_userdata): Backups.reset(reset_json=False) @@ -294,9 +322,12 @@ def test_sizing(backups, dummy_service): assert size > 0 -def test_init_tracking(backups, raw_dummy_service): +def test_init_tracking(backups, tmpdir): + assert Backups.is_initted() is True + Backups.reset() assert Backups.is_initted() is False - + separate_dir = tmpdir / "out_of_the_way" + prepare_localfile_backups(separate_dir) Backups.init_repo() assert Backups.is_initted() is True @@ -618,6 +649,8 @@ def test_snapshots_cache_invalidation(backups, dummy_service): # Storage def test_init_tracking_caching(backups, raw_dummy_service): + assert Storage.has_init_mark() is True + Backups.reset() assert Storage.has_init_mark() is False Storage.mark_as_init() @@ -627,7 +660,12 @@ def test_init_tracking_caching(backups, raw_dummy_service): # Storage -def test_init_tracking_caching2(backups, raw_dummy_service): +def test_init_tracking_caching2(backups, tmpdir): + assert Storage.has_init_mark() is True + Backups.reset() + assert Storage.has_init_mark() is False + separate_dir = tmpdir / "out_of_the_way" + prepare_localfile_backups(separate_dir) assert Storage.has_init_mark() is False Backups.init_repo() From cfa7f4ae59b23a279efa597d05453fcf11ac67bf Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 26 Jul 2023 16:45:08 +0000 Subject: [PATCH 486/537] feature(backups): add full repo erasure capability --- selfprivacy_api/backup/__init__.py | 8 ++++ selfprivacy_api/backup/backuppers/__init__.py | 5 +++ .../backup/backuppers/none_backupper.py | 4 ++ .../backup/backuppers/restic_backupper.py | 44 +++++++++++++++---- selfprivacy_api/backup/storage.py | 13 ++++-- tests/test_graphql/test_backup.py | 13 ++++++ 6 files changed, 74 insertions(+), 13 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 725904e..c28c01f 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -236,6 +236,14 @@ class Backups: Backups.provider().backupper.init() Storage.mark_as_init() + @staticmethod + def erase_repo() -> None: + """ + Completely empties the remote + """ + Backups.provider().backupper.erase_repo() + Storage.mark_as_uninitted() + @staticmethod def is_initted() -> bool: """ diff --git a/selfprivacy_api/backup/backuppers/__init__.py b/selfprivacy_api/backup/backuppers/__init__.py index ea2350b..ccf78b9 100644 --- a/selfprivacy_api/backup/backuppers/__init__.py +++ b/selfprivacy_api/backup/backuppers/__init__.py @@ -36,6 +36,11 @@ class AbstractBackupper(ABC): """Initialize the repository""" raise NotImplementedError + @abstractmethod + def erase_repo(self) -> None: + """Completely empties the remote""" + raise NotImplementedError + @abstractmethod def restore_from_backup( self, diff --git a/selfprivacy_api/backup/backuppers/none_backupper.py b/selfprivacy_api/backup/backuppers/none_backupper.py index d9edaeb..87e43c5 100644 --- a/selfprivacy_api/backup/backuppers/none_backupper.py +++ b/selfprivacy_api/backup/backuppers/none_backupper.py @@ -23,6 +23,10 @@ class NoneBackupper(AbstractBackupper): def init(self): raise NotImplementedError + def erase_repo(self) -> None: + """Completely empties the remote""" + raise NotImplementedError + def restore_from_backup(self, snapshot_id: str, folders: List[str], verify=True): """Restore a target folder using a snapshot""" raise NotImplementedError diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index e98c4c3..816bebf 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -40,20 +40,25 @@ class ResticBackupper(AbstractBackupper): def restic_repo(self) -> str: # https://restic.readthedocs.io/en/latest/030_preparing_a_new_repo.html#other-services-via-rclone # https://forum.rclone.org/t/can-rclone-be-run-solely-with-command-line-options-no-config-no-env-vars/6314/5 - return f"rclone:{self.storage_type}{self.repo}" + return f"rclone:{self.rclone_repo()}" + + def rclone_repo(self) -> str: + return f"{self.storage_type}{self.repo}" def rclone_args(self): - return "rclone.args=serve restic --stdio " + self.backend_rclone_args() + return "rclone.args=serve restic --stdio " + " ".join( + self.backend_rclone_args() + ) - def backend_rclone_args(self) -> str: - acc_arg = "" - key_arg = "" + def backend_rclone_args(self) -> list[str]: + args = [] if self.account != "": - acc_arg = f"{self.login_flag} {self.account}" + acc_args = [self.login_flag, self.account] + args.extend(acc_args) if self.key != "": - key_arg = f"{self.key_flag} {self.key}" - - return f"{acc_arg} {key_arg}" + key_args = [self.key_flag, self.key] + args.extend(key_args) + return args def _password_command(self): return f"echo {LocalBackupSecret.get()}" @@ -79,6 +84,27 @@ class ResticBackupper(AbstractBackupper): command.extend(ResticBackupper.__flatten_list(args)) return command + def erase_repo(self) -> None: + """Fully erases repo on remote, can be reinitted again""" + command = [ + "rclone", + "purge", + self.rclone_repo(), + ] + backend_args = self.backend_rclone_args() + if backend_args: + command.extend(backend_args) + + with subprocess.Popen(command, stdout=subprocess.PIPE, shell=False) as handle: + output = handle.communicate()[0].decode("utf-8") + if handle.returncode != 0: + raise ValueError( + "purge exited with errorcode", + handle.returncode, + ":", + output, + ) + def mount_repo(self, mount_directory): mount_command = self.restic_command("mount", mount_directory) mount_command.insert(0, "nohup") diff --git a/selfprivacy_api/backup/storage.py b/selfprivacy_api/backup/storage.py index f7384a0..d46f584 100644 --- a/selfprivacy_api/backup/storage.py +++ b/selfprivacy_api/backup/storage.py @@ -21,7 +21,7 @@ REDIS_SNAPSHOT_CACHE_EXPIRE_SECONDS = 24 * 60 * 60 # one day REDIS_SNAPSHOTS_PREFIX = "backups:snapshots:" REDIS_LAST_BACKUP_PREFIX = "backups:last-backed-up:" -REDIS_INITTED_CACHE_PREFIX = "backups:initted_services:" +REDIS_INITTED_CACHE = "backups:repo_initted" REDIS_PROVIDER_KEY = "backups:provider" REDIS_AUTOBACKUP_PERIOD_KEY = "backups:autobackup_period" @@ -38,9 +38,9 @@ class Storage: """Deletes all backup related data from redis""" redis.delete(REDIS_PROVIDER_KEY) redis.delete(REDIS_AUTOBACKUP_PERIOD_KEY) + redis.delete(REDIS_INITTED_CACHE) prefixes_to_clean = [ - REDIS_INITTED_CACHE_PREFIX, REDIS_SNAPSHOTS_PREFIX, REDIS_LAST_BACKUP_PREFIX, ] @@ -162,11 +162,16 @@ class Storage: @staticmethod def has_init_mark() -> bool: """Returns True if the repository was initialized""" - if redis.exists(REDIS_INITTED_CACHE_PREFIX): + if redis.exists(REDIS_INITTED_CACHE): return True return False @staticmethod def mark_as_init(): """Marks the repository as initialized""" - redis.set(REDIS_INITTED_CACHE_PREFIX, 1) + redis.set(REDIS_INITTED_CACHE, 1) + + @staticmethod + def mark_as_uninitted(): + """Marks the repository as initialized""" + redis.delete(REDIS_INITTED_CACHE) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index fc42ca2..e85d1de 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -222,6 +222,19 @@ def test_file_backend_init(file_backup): file_backup.backupper.init() +def test_reinit_after_purge(backups): + assert Backups.is_initted() is True + + Backups.erase_repo() + assert Backups.is_initted() is False + with pytest.raises(ValueError): + Backups.get_all_snapshots() + + Backups.init_repo() + assert Backups.is_initted() is True + assert len(Backups.get_all_snapshots()) == 0 + + def test_backup_simple_file(raw_dummy_service, file_backup): # temporarily incomplete service = raw_dummy_service From 00317cc7e4f0fcab6f385ad1be8bf3830211f30b Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 26 Jul 2023 16:52:58 +0000 Subject: [PATCH 487/537] test(backups): erase repos between tests --- selfprivacy_api/backup/backuppers/none_backupper.py | 3 ++- tests/test_graphql/test_backup.py | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/backuppers/none_backupper.py b/selfprivacy_api/backup/backuppers/none_backupper.py index 87e43c5..3f9f7fd 100644 --- a/selfprivacy_api/backup/backuppers/none_backupper.py +++ b/selfprivacy_api/backup/backuppers/none_backupper.py @@ -25,7 +25,8 @@ class NoneBackupper(AbstractBackupper): def erase_repo(self) -> None: """Completely empties the remote""" - raise NotImplementedError + # this one is already empty + pass def restore_from_backup(self, snapshot_id: str, folders: List[str], verify=True): """Restore a target folder using a snapshot""" diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index e85d1de..da81c60 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -66,7 +66,7 @@ def backups(tmpdir): Backups.init_repo() yield - Backups.forget_all_snapshots() + Backups.erase_repo() @pytest.fixture() From 3bff43a6bf5bcbd83b9aefa753785f26d519583d Mon Sep 17 00:00:00 2001 From: Inex Code Date: Fri, 28 Jul 2023 02:18:05 +0300 Subject: [PATCH 488/537] fix(mailserver): Location of the "location" attribute was inconsistent --- .../services/mailserver/__init__.py | 24 +++++++++---------- 1 file changed, 11 insertions(+), 13 deletions(-) diff --git a/selfprivacy_api/services/mailserver/__init__.py b/selfprivacy_api/services/mailserver/__init__.py index d3600e5..d947864 100644 --- a/selfprivacy_api/services/mailserver/__init__.py +++ b/selfprivacy_api/services/mailserver/__init__.py @@ -4,16 +4,14 @@ import base64 import subprocess import typing -from selfprivacy_api.jobs import Job, JobStatus, Jobs +from selfprivacy_api.jobs import Job, Jobs from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service from selfprivacy_api.services.generic_status_getter import ( - get_service_status, get_service_status_from_several_units, ) from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus -import selfprivacy_api.utils as utils +from selfprivacy_api import utils from selfprivacy_api.utils.block_devices import BlockDevice -from selfprivacy_api.utils.huey import huey import selfprivacy_api.utils.network as network_utils from selfprivacy_api.services.mailserver.icon import MAILSERVER_ICON @@ -78,18 +76,18 @@ class MailServer(Service): @staticmethod def stop(): - subprocess.run(["systemctl", "stop", "dovecot2.service"]) - subprocess.run(["systemctl", "stop", "postfix.service"]) + subprocess.run(["systemctl", "stop", "dovecot2.service"], check=False) + subprocess.run(["systemctl", "stop", "postfix.service"], check=False) @staticmethod def start(): - subprocess.run(["systemctl", "start", "dovecot2.service"]) - subprocess.run(["systemctl", "start", "postfix.service"]) + subprocess.run(["systemctl", "start", "dovecot2.service"], check=False) + subprocess.run(["systemctl", "start", "postfix.service"], check=False) @staticmethod def restart(): - subprocess.run(["systemctl", "restart", "dovecot2.service"]) - subprocess.run(["systemctl", "restart", "postfix.service"]) + subprocess.run(["systemctl", "restart", "dovecot2.service"], check=False) + subprocess.run(["systemctl", "restart", "postfix.service"], check=False) @staticmethod def get_configuration(): @@ -111,7 +109,7 @@ class MailServer(Service): def get_drive() -> str: with utils.ReadUserData() as user_data: if user_data.get("useBinds", False): - return user_data.get("mailserver", {}).get("location", "sda1") + return user_data.get("email", {}).get("location", "sda1") else: return "sda1" @@ -142,7 +140,7 @@ class MailServer(Service): type="MX", name=domain, content=domain, ttl=3600, priority=10 ), ServiceDnsRecord( - type="TXT", name="_dmarc", content=f"v=DMARC1; p=none", ttl=18000 + type="TXT", name="_dmarc", content="v=DMARC1; p=none", ttl=18000 ), ServiceDnsRecord( type="TXT", @@ -167,7 +165,7 @@ class MailServer(Service): volume, job, FolderMoveNames.default_foldermoves(self), - "mailserver", + "email", ) return job From 6660e1d9d5d06edb6905e090200f3d65cf914540 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Fri, 28 Jul 2023 02:31:28 +0300 Subject: [PATCH 489/537] fix(services): ocserv and jitsi were returning wrong volumes too --- selfprivacy_api/services/jitsi/__init__.py | 30 ++++++++++++--------- selfprivacy_api/services/ocserv/__init__.py | 13 +++++---- selfprivacy_api/utils/block_devices.py | 9 +++++++ 3 files changed, 33 insertions(+), 19 deletions(-) diff --git a/selfprivacy_api/services/jitsi/__init__.py b/selfprivacy_api/services/jitsi/__init__.py index 2b54ae1..96bb224 100644 --- a/selfprivacy_api/services/jitsi/__init__.py +++ b/selfprivacy_api/services/jitsi/__init__.py @@ -3,16 +3,13 @@ import base64 import subprocess import typing -from selfprivacy_api.jobs import Job, Jobs -from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service +from selfprivacy_api.jobs import Job from selfprivacy_api.services.generic_status_getter import ( - get_service_status, get_service_status_from_several_units, ) from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain -from selfprivacy_api.utils.block_devices import BlockDevice -from selfprivacy_api.utils.huey import huey +from selfprivacy_api.utils.block_devices import BlockDevice, BlockDevices import selfprivacy_api.utils.network as network_utils from selfprivacy_api.services.jitsi.icon import JITSI_ICON @@ -87,18 +84,27 @@ class Jitsi(Service): @staticmethod def stop(): - subprocess.run(["systemctl", "stop", "jitsi-videobridge.service"]) - subprocess.run(["systemctl", "stop", "jicofo.service"]) + subprocess.run( + ["systemctl", "stop", "jitsi-videobridge.service"], + check=False, + ) + subprocess.run(["systemctl", "stop", "jicofo.service"], check=False) @staticmethod def start(): - subprocess.run(["systemctl", "start", "jitsi-videobridge.service"]) - subprocess.run(["systemctl", "start", "jicofo.service"]) + subprocess.run( + ["systemctl", "start", "jitsi-videobridge.service"], + check=False, + ) + subprocess.run(["systemctl", "start", "jicofo.service"], check=False) @staticmethod def restart(): - subprocess.run(["systemctl", "restart", "jitsi-videobridge.service"]) - subprocess.run(["systemctl", "restart", "jicofo.service"]) + subprocess.run( + ["systemctl", "restart", "jitsi-videobridge.service"], + check=False, + ) + subprocess.run(["systemctl", "restart", "jicofo.service"], check=False) @staticmethod def get_configuration(): @@ -118,7 +124,7 @@ class Jitsi(Service): @staticmethod def get_drive() -> str: - return "sda1" + return BlockDevices().get_root_block_device().name @staticmethod def get_dns_records() -> typing.List[ServiceDnsRecord]: diff --git a/selfprivacy_api/services/ocserv/__init__.py b/selfprivacy_api/services/ocserv/__init__.py index 3860b19..ec93021 100644 --- a/selfprivacy_api/services/ocserv/__init__.py +++ b/selfprivacy_api/services/ocserv/__init__.py @@ -2,12 +2,11 @@ import base64 import subprocess import typing -from selfprivacy_api.jobs import Job, Jobs -from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service +from selfprivacy_api.jobs import Job from selfprivacy_api.services.generic_status_getter import get_service_status from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus from selfprivacy_api.utils import ReadUserData, WriteUserData -from selfprivacy_api.utils.block_devices import BlockDevice +from selfprivacy_api.utils.block_devices import BlockDevice, BlockDevices from selfprivacy_api.services.ocserv.icon import OCSERV_ICON import selfprivacy_api.utils.network as network_utils @@ -77,15 +76,15 @@ class Ocserv(Service): @staticmethod def stop(): - subprocess.run(["systemctl", "stop", "ocserv.service"]) + subprocess.run(["systemctl", "stop", "ocserv.service"], check=False) @staticmethod def start(): - subprocess.run(["systemctl", "start", "ocserv.service"]) + subprocess.run(["systemctl", "start", "ocserv.service"], check=False) @staticmethod def restart(): - subprocess.run(["systemctl", "restart", "ocserv.service"]) + subprocess.run(["systemctl", "restart", "ocserv.service"], check=False) @staticmethod def get_configuration(): @@ -101,7 +100,7 @@ class Ocserv(Service): @staticmethod def get_drive() -> str: - return "sda1" + return BlockDevices().get_root_block_device().name @staticmethod def get_dns_records() -> typing.List[ServiceDnsRecord]: diff --git a/selfprivacy_api/utils/block_devices.py b/selfprivacy_api/utils/block_devices.py index 0de3d90..15b6979 100644 --- a/selfprivacy_api/utils/block_devices.py +++ b/selfprivacy_api/utils/block_devices.py @@ -218,3 +218,12 @@ class BlockDevices(metaclass=SingletonMetaclass): if mountpoint in block_device.mountpoints: block_devices.append(block_device) return block_devices + + def get_root_block_device(self) -> BlockDevice: + """ + Return the root block device. + """ + for block_device in self.block_devices: + if "/" in block_device.mountpoints: + return block_device + raise RuntimeError("No root block device found") From 829915029d12b4cd2548e029b2f16730b80ef7c3 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Fri, 28 Jul 2023 02:36:10 +0300 Subject: [PATCH 490/537] test: Add test for getting root block device --- tests/test_block_device_utils.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/tests/test_block_device_utils.py b/tests/test_block_device_utils.py index 2676e6c..f821e96 100644 --- a/tests/test_block_device_utils.py +++ b/tests/test_block_device_utils.py @@ -488,3 +488,21 @@ def test_get_block_devices_by_mountpoint(lsblk_full_mock, authorized_client): def test_get_block_devices_by_mountpoint_no_match(lsblk_full_mock, authorized_client): block_devices = BlockDevices().get_block_devices_by_mountpoint("/foo") assert len(block_devices) == 0 + + +def test_get_root_block_device(lsblk_full_mock, authorized_client): + block_device = BlockDevices().get_root_block_device() + assert block_device is not None + assert block_device.name == "sda1" + assert block_device.path == "/dev/sda1" + assert block_device.fsavail == "4605702144" + assert block_device.fssize == "19814920192" + assert block_device.fstype == "ext4" + assert block_device.fsused == "14353719296" + assert block_device.mountpoints == ["/nix/store", "/"] + assert block_device.label is None + assert block_device.uuid == "ec80c004-baec-4a2c-851d-0e1807135511" + assert block_device.size == "20210236928" + assert block_device.model is None + assert block_device.serial is None + assert block_device.type == "part" From 641ab260697d160cdff3de54dd7430279d30acb0 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Fri, 28 Jul 2023 03:14:50 +0300 Subject: [PATCH 491/537] fix(storage): fix root device detection and ignore iso9660 --- selfprivacy_api/graphql/queries/storage.py | 2 +- selfprivacy_api/services/generic_service_mover.py | 2 +- selfprivacy_api/utils/block_devices.py | 9 +++++++++ 3 files changed, 11 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/graphql/queries/storage.py b/selfprivacy_api/graphql/queries/storage.py index 6800518..4b9a291 100644 --- a/selfprivacy_api/graphql/queries/storage.py +++ b/selfprivacy_api/graphql/queries/storage.py @@ -23,7 +23,7 @@ class Storage: else str(volume.size), free_space=str(volume.fsavail), used_space=str(volume.fsused), - root=volume.name == "sda1", + root=volume.is_root(), name=volume.name, model=volume.model, serial=volume.serial, diff --git a/selfprivacy_api/services/generic_service_mover.py b/selfprivacy_api/services/generic_service_mover.py index d858b93..cfb0385 100644 --- a/selfprivacy_api/services/generic_service_mover.py +++ b/selfprivacy_api/services/generic_service_mover.py @@ -83,7 +83,7 @@ def move_service( ) return # Make sure the volume is mounted - if volume.name != "sda1" and f"/volumes/{volume.name}" not in volume.mountpoints: + if not volume.is_root() and f"/volumes/{volume.name}" not in volume.mountpoints: Jobs.update( job=job, status=JobStatus.ERROR, diff --git a/selfprivacy_api/utils/block_devices.py b/selfprivacy_api/utils/block_devices.py index 15b6979..83fc28f 100644 --- a/selfprivacy_api/utils/block_devices.py +++ b/selfprivacy_api/utils/block_devices.py @@ -71,6 +71,12 @@ class BlockDevice: def __hash__(self): return hash(self.name) + def is_root(self) -> bool: + """ + Return True if the block device is the root device. + """ + return "/" in self.mountpoints + def stats(self) -> typing.Dict[str, typing.Any]: """ Update current data and return a dictionary of stats. @@ -175,6 +181,9 @@ class BlockDevices(metaclass=SingletonMetaclass): # Ignore devices with type "rom" if device["type"] == "rom": continue + # Ignore iso9660 devices + if device["fstype"] == "iso9660": + continue if device["fstype"] is None: if "children" in device: for child in device["children"]: From 88af27a8ba230e37243dedbb0853186363d04c21 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Fri, 28 Jul 2023 03:20:33 +0300 Subject: [PATCH 492/537] chore: bump version --- selfprivacy_api/dependencies.py | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/dependencies.py b/selfprivacy_api/dependencies.py index 95c49e3..6f6f5a5 100644 --- a/selfprivacy_api/dependencies.py +++ b/selfprivacy_api/dependencies.py @@ -27,4 +27,4 @@ async def get_token_header( def get_api_version() -> str: """Get API version""" - return "2.2.0" + return "2.2.1" diff --git a/setup.py b/setup.py index 7e964dc..dea4568 100755 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import setup, find_packages setup( name="selfprivacy_api", - version="2.2.0", + version="2.2.1", packages=find_packages(), scripts=[ "selfprivacy_api/app.py", From bba837530afe2d776f4620b84473ea1d67c9b2ce Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 28 Jul 2023 10:40:40 +0000 Subject: [PATCH 493/537] feature(backups): expose forget to API --- .../graphql/mutations/backup_mutations.py | 29 +++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index b92af4a..f6dc282 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -157,6 +157,35 @@ class BackupMutations: job=job_to_api_job(job), ) + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def forget_snapshot(self, snapshot_id: str) -> GenericMutationReturn: + """Forget a snapshot. + Makes it inaccessible from the server. + After some time, the data (encrypted) will not be recoverable + from the backup server too, but not immediately""" + + snap = Backups.get_snapshot_by_id(snapshot_id) + if snap is None: + return GenericMutationReturn( + success=False, + code=404, + message=f"snapshot {snapshot_id} not found", + ) + + try: + Backups.forget_snapshot(snap) + return GenericMutationReturn( + success=True, + code=200, + message="", + ) + except Exception as error: + return GenericMutationReturn( + success=False, + code=400, + message=str(error), + ) + @strawberry.mutation(permission_classes=[IsAuthenticated]) def force_snapshots_reload(self) -> GenericMutationReturn: """Force snapshots reload""" From 2934e2becac0d7f86ad79736e6d002ecc7f62941 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 28 Jul 2023 11:32:48 +0000 Subject: [PATCH 494/537] test(backups): test forgetting via API --- tests/test_graphql/test_api_backup.py | 50 +++++++++++++++++++++++++++ 1 file changed, 50 insertions(+) diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index bfa315b..e53ce2a 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -94,6 +94,18 @@ mutation TestRestoreService($snapshot_id: String!) { } """ +API_FORGET_MUTATION = """ +mutation TestForgetSnapshot($snapshot_id: String!) { + backup { + forgetSnapshot(snapshotId: $snapshot_id) { + success + message + code + } + } +} +""" + API_SNAPSHOTS_QUERY = """ allSnapshots { id @@ -143,6 +155,17 @@ def api_backup(authorized_client, service): return response +def api_forget(authorized_client, snapshot_id): + response = authorized_client.post( + "/graphql", + json={ + "query": API_FORGET_MUTATION, + "variables": {"snapshot_id": snapshot_id}, + }, + ) + return response + + def api_set_period(authorized_client, period): response = authorized_client.post( "/graphql", @@ -370,3 +393,30 @@ def test_reload_snapshots(authorized_client, dummy_service): snaps = api_snapshots(authorized_client) assert len(snaps) == 1 + + +def test_forget_snapshot(authorized_client, dummy_service): + response = api_backup(authorized_client, dummy_service) + data = get_data(response)["backup"]["startBackup"] + + snaps = api_snapshots(authorized_client) + assert len(snaps) == 1 + + response = api_forget(authorized_client, snaps[0]["id"]) + data = get_data(response)["backup"]["forgetSnapshot"] + assert_ok(data) + + snaps = api_snapshots(authorized_client) + assert len(snaps) == 0 + + +def test_forget_nonexistent_snapshot(authorized_client, dummy_service): + snaps = api_snapshots(authorized_client) + assert len(snaps) == 0 + response = api_forget(authorized_client, "898798uekiodpjoiweoiwuoeirueor") + data = get_data(response)["backup"]["forgetSnapshot"] + assert data["code"] == 404 + assert data["success"] is False + + snaps = api_snapshots(authorized_client) + assert len(snaps) == 0 From a87889b2521c9dc1ef8a337dfd77153b82ffcde1 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Wed, 2 Aug 2023 07:41:55 +0300 Subject: [PATCH 495/537] refactor: Change Mail Server id from "mailserver" to "email" Nix config uses "email" so we should too. --- selfprivacy_api/services/mailserver/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/services/mailserver/__init__.py b/selfprivacy_api/services/mailserver/__init__.py index d947864..c70dfa8 100644 --- a/selfprivacy_api/services/mailserver/__init__.py +++ b/selfprivacy_api/services/mailserver/__init__.py @@ -21,7 +21,7 @@ class MailServer(Service): @staticmethod def get_id() -> str: - return "mailserver" + return "email" @staticmethod def get_display_name() -> str: @@ -155,7 +155,7 @@ class MailServer(Service): def move_to_volume(self, volume: BlockDevice) -> Job: job = Jobs.add( - type_id="services.mailserver.move", + type_id="services.email.move", name="Move Mail Server", description=f"Moving mailserver data to {volume.name}", ) From f08eafc3d84f6f96f82eb8641b424965ddb06aa2 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Wed, 2 Aug 2023 08:51:42 +0300 Subject: [PATCH 496/537] refactor(services): extract get_drive to the base Service class --- .../services/bitwarden/__init__.py | 8 ------- selfprivacy_api/services/gitea/__init__.py | 8 ------- selfprivacy_api/services/jitsi/__init__.py | 4 ---- .../services/mailserver/__init__.py | 8 ------- .../services/nextcloud/__init__.py | 9 ------- selfprivacy_api/services/ocserv/__init__.py | 4 ---- selfprivacy_api/services/pleroma/__init__.py | 8 ------- selfprivacy_api/services/service.py | 24 +++++++++++++++---- 8 files changed, 19 insertions(+), 54 deletions(-) diff --git a/selfprivacy_api/services/bitwarden/__init__.py b/selfprivacy_api/services/bitwarden/__init__.py index 98455d8..6e9f932 100644 --- a/selfprivacy_api/services/bitwarden/__init__.py +++ b/selfprivacy_api/services/bitwarden/__init__.py @@ -121,14 +121,6 @@ class Bitwarden(Service): def get_folders() -> typing.List[str]: return ["/var/lib/bitwarden", "/var/lib/bitwarden_rs"] - @staticmethod - def get_drive() -> str: - with ReadUserData() as user_data: - if user_data.get("useBinds", False): - return user_data.get("bitwarden", {}).get("location", "sda1") - else: - return "sda1" - @staticmethod def get_dns_records() -> typing.List[ServiceDnsRecord]: """Return list of DNS records for Bitwarden service.""" diff --git a/selfprivacy_api/services/gitea/__init__.py b/selfprivacy_api/services/gitea/__init__.py index ce73dc6..81eae37 100644 --- a/selfprivacy_api/services/gitea/__init__.py +++ b/selfprivacy_api/services/gitea/__init__.py @@ -116,14 +116,6 @@ class Gitea(Service): def get_folders() -> typing.List[str]: return ["/var/lib/gitea"] - @staticmethod - def get_drive() -> str: - with ReadUserData() as user_data: - if user_data.get("useBinds", False): - return user_data.get("gitea", {}).get("location", "sda1") - else: - return "sda1" - @staticmethod def get_dns_records() -> typing.List[ServiceDnsRecord]: return [ diff --git a/selfprivacy_api/services/jitsi/__init__.py b/selfprivacy_api/services/jitsi/__init__.py index 96bb224..f2917d5 100644 --- a/selfprivacy_api/services/jitsi/__init__.py +++ b/selfprivacy_api/services/jitsi/__init__.py @@ -122,10 +122,6 @@ class Jitsi(Service): def get_folders() -> typing.List[str]: return ["/var/lib/jitsi-meet"] - @staticmethod - def get_drive() -> str: - return BlockDevices().get_root_block_device().name - @staticmethod def get_dns_records() -> typing.List[ServiceDnsRecord]: ip4 = network_utils.get_ip4() diff --git a/selfprivacy_api/services/mailserver/__init__.py b/selfprivacy_api/services/mailserver/__init__.py index c70dfa8..d0f70eb 100644 --- a/selfprivacy_api/services/mailserver/__init__.py +++ b/selfprivacy_api/services/mailserver/__init__.py @@ -105,14 +105,6 @@ class MailServer(Service): def get_folders() -> typing.List[str]: return ["/var/vmail", "/var/sieve"] - @staticmethod - def get_drive() -> str: - with utils.ReadUserData() as user_data: - if user_data.get("useBinds", False): - return user_data.get("email", {}).get("location", "sda1") - else: - return "sda1" - @staticmethod def get_dns_records() -> typing.List[ServiceDnsRecord]: domain = utils.get_domain() diff --git a/selfprivacy_api/services/nextcloud/__init__.py b/selfprivacy_api/services/nextcloud/__init__.py index 632c5d3..4ac01af 100644 --- a/selfprivacy_api/services/nextcloud/__init__.py +++ b/selfprivacy_api/services/nextcloud/__init__.py @@ -120,15 +120,6 @@ class Nextcloud(Service): def get_folders() -> typing.List[str]: return ["/var/lib/nextcloud"] - @staticmethod - def get_drive() -> str: - """Get the name of disk where Nextcloud is installed.""" - with ReadUserData() as user_data: - if user_data.get("useBinds", False): - return user_data.get("nextcloud", {}).get("location", "sda1") - else: - return "sda1" - @staticmethod def get_dns_records() -> typing.List[ServiceDnsRecord]: return [ diff --git a/selfprivacy_api/services/ocserv/__init__.py b/selfprivacy_api/services/ocserv/__init__.py index ec93021..5e28045 100644 --- a/selfprivacy_api/services/ocserv/__init__.py +++ b/selfprivacy_api/services/ocserv/__init__.py @@ -98,10 +98,6 @@ class Ocserv(Service): def get_logs(): return "" - @staticmethod - def get_drive() -> str: - return BlockDevices().get_root_block_device().name - @staticmethod def get_dns_records() -> typing.List[ServiceDnsRecord]: return [ diff --git a/selfprivacy_api/services/pleroma/__init__.py b/selfprivacy_api/services/pleroma/__init__.py index bac1cda..d98b13f 100644 --- a/selfprivacy_api/services/pleroma/__init__.py +++ b/selfprivacy_api/services/pleroma/__init__.py @@ -119,14 +119,6 @@ class Pleroma(Service): ), ] - @staticmethod - def get_drive() -> str: - with ReadUserData() as user_data: - if user_data.get("useBinds", False): - return user_data.get("pleroma", {}).get("location", "sda1") - else: - return "sda1" - @staticmethod def get_dns_records() -> typing.List[ServiceDnsRecord]: return [ diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index c1cc5be..30e810f 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -6,10 +6,11 @@ import typing from pydantic import BaseModel from selfprivacy_api.jobs import Job -from selfprivacy_api.utils.block_devices import BlockDevice +from selfprivacy_api.utils.block_devices import BlockDevice, BlockDevices from selfprivacy_api.services.generic_size_counter import get_storage_usage from selfprivacy_api.services.owned_path import OwnedPath +from selfprivacy_api import utils from selfprivacy_api.utils.waitloop import wait_until_true DEFAULT_START_STOP_TIMEOUT = 10 * 60 @@ -197,10 +198,23 @@ class Service(ABC): def get_dns_records() -> typing.List[ServiceDnsRecord]: pass - @staticmethod - @abstractmethod - def get_drive() -> str: - pass + @classmethod + def get_drive(cls) -> str: + """ + Get the name of the drive/volume where the service is located. + Example values are `sda1`, `vda`, `sdb`. + """ + root_device: str = BlockDevices().get_root_block_device().name + if not cls.is_movable(): + return root_device + with utils.ReadUserData() as userdata: + if userdata.get("useBinds", False): + return userdata.get(cls.get_id(), {}).get( + "location", + root_device, + ) + else: + return root_device @classmethod def get_folders(cls) -> typing.List[str]: From f4263b0288965071f8c093267269cab4ff1eb3a3 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Wed, 2 Aug 2023 08:54:18 +0300 Subject: [PATCH 497/537] style: remove unused imports --- selfprivacy_api/services/bitwarden/__init__.py | 3 +-- selfprivacy_api/services/gitea/__init__.py | 1 - selfprivacy_api/services/jitsi/__init__.py | 2 +- selfprivacy_api/services/ocserv/__init__.py | 2 +- selfprivacy_api/services/test_service/__init__.py | 1 - 5 files changed, 3 insertions(+), 6 deletions(-) diff --git a/selfprivacy_api/services/bitwarden/__init__.py b/selfprivacy_api/services/bitwarden/__init__.py index 6e9f932..2f695fd 100644 --- a/selfprivacy_api/services/bitwarden/__init__.py +++ b/selfprivacy_api/services/bitwarden/__init__.py @@ -3,13 +3,12 @@ import base64 import subprocess import typing -from selfprivacy_api.jobs import Job, JobStatus, Jobs +from selfprivacy_api.jobs import Job, Jobs from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service from selfprivacy_api.services.generic_status_getter import get_service_status from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain from selfprivacy_api.utils.block_devices import BlockDevice -from selfprivacy_api.utils.huey import huey import selfprivacy_api.utils.network as network_utils from selfprivacy_api.services.bitwarden.icon import BITWARDEN_ICON diff --git a/selfprivacy_api/services/gitea/__init__.py b/selfprivacy_api/services/gitea/__init__.py index 81eae37..fcb9ca7 100644 --- a/selfprivacy_api/services/gitea/__init__.py +++ b/selfprivacy_api/services/gitea/__init__.py @@ -9,7 +9,6 @@ from selfprivacy_api.services.generic_status_getter import get_service_status from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain from selfprivacy_api.utils.block_devices import BlockDevice -from selfprivacy_api.utils.huey import huey import selfprivacy_api.utils.network as network_utils from selfprivacy_api.services.gitea.icon import GITEA_ICON diff --git a/selfprivacy_api/services/jitsi/__init__.py b/selfprivacy_api/services/jitsi/__init__.py index f2917d5..2684fc3 100644 --- a/selfprivacy_api/services/jitsi/__init__.py +++ b/selfprivacy_api/services/jitsi/__init__.py @@ -9,7 +9,7 @@ from selfprivacy_api.services.generic_status_getter import ( ) from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain -from selfprivacy_api.utils.block_devices import BlockDevice, BlockDevices +from selfprivacy_api.utils.block_devices import BlockDevice import selfprivacy_api.utils.network as network_utils from selfprivacy_api.services.jitsi.icon import JITSI_ICON diff --git a/selfprivacy_api/services/ocserv/__init__.py b/selfprivacy_api/services/ocserv/__init__.py index 5e28045..98c6e97 100644 --- a/selfprivacy_api/services/ocserv/__init__.py +++ b/selfprivacy_api/services/ocserv/__init__.py @@ -6,7 +6,7 @@ from selfprivacy_api.jobs import Job from selfprivacy_api.services.generic_status_getter import get_service_status from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus from selfprivacy_api.utils import ReadUserData, WriteUserData -from selfprivacy_api.utils.block_devices import BlockDevice, BlockDevices +from selfprivacy_api.utils.block_devices import BlockDevice from selfprivacy_api.services.ocserv.icon import OCSERV_ICON import selfprivacy_api.utils.network as network_utils diff --git a/selfprivacy_api/services/test_service/__init__.py b/selfprivacy_api/services/test_service/__init__.py index d062700..967b32e 100644 --- a/selfprivacy_api/services/test_service/__init__.py +++ b/selfprivacy_api/services/test_service/__init__.py @@ -10,7 +10,6 @@ from os import path from selfprivacy_api.jobs import Job from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus -from selfprivacy_api.utils import ReadUserData, get_domain from selfprivacy_api.utils.block_devices import BlockDevice import selfprivacy_api.utils.network as network_utils From ff70a3588e6a8b447d87341eb576851e1f213043 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Fri, 4 Aug 2023 12:57:31 +0300 Subject: [PATCH 498/537] chore: bump version --- selfprivacy_api/dependencies.py | 2 +- selfprivacy_api/graphql/mutations/backup_mutations.py | 6 +++--- setup.py | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/selfprivacy_api/dependencies.py b/selfprivacy_api/dependencies.py index 6f6f5a5..fb974e8 100644 --- a/selfprivacy_api/dependencies.py +++ b/selfprivacy_api/dependencies.py @@ -27,4 +27,4 @@ async def get_token_header( def get_api_version() -> str: """Get API version""" - return "2.2.1" + return "2.3.0" diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index f6dc282..c022d57 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -159,8 +159,8 @@ class BackupMutations: @strawberry.mutation(permission_classes=[IsAuthenticated]) def forget_snapshot(self, snapshot_id: str) -> GenericMutationReturn: - """Forget a snapshot. - Makes it inaccessible from the server. + """Forget a snapshot. + Makes it inaccessible from the server. After some time, the data (encrypted) will not be recoverable from the backup server too, but not immediately""" @@ -171,7 +171,7 @@ class BackupMutations: code=404, message=f"snapshot {snapshot_id} not found", ) - + try: Backups.forget_snapshot(snap) return GenericMutationReturn( diff --git a/setup.py b/setup.py index dea4568..684f54f 100755 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import setup, find_packages setup( name="selfprivacy_api", - version="2.2.1", + version="2.3.0", packages=find_packages(), scripts=[ "selfprivacy_api/app.py", From 52336b885dfc804e93d4df4e3ad4fa0498d045d7 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Fri, 4 Aug 2023 14:08:23 +0300 Subject: [PATCH 499/537] fix: check if repo is initted by checking retcode --- selfprivacy_api/backup/backuppers/restic_backupper.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index 816bebf..37ae06b 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -228,8 +228,7 @@ class ResticBackupper(AbstractBackupper): def is_initted(self) -> bool: command = self.restic_command( - "check", - "--json", + "unlock", ) with subprocess.Popen( @@ -237,10 +236,8 @@ class ResticBackupper(AbstractBackupper): stdout=subprocess.PIPE, shell=False, ) as handle: - output = handle.communicate()[0].decode("utf-8") - if not ResticBackupper.has_json(output): + if handle.returncode != 0: return False - # raise NotImplementedError("error(big): " + output) return True def restored_size(self, snapshot_id: str) -> int: From 752a0b807e7cc0133f4818f6c11ff4df7761a855 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 7 Aug 2023 13:33:10 +0000 Subject: [PATCH 500/537] feature(backups): lock and unlock at will --- .../backup/backuppers/restic_backupper.py | 53 ++++++++++++++++++- selfprivacy_api/backup/util.py | 16 ++++-- tests/test_graphql/test_backup.py | 15 ++++++ 3 files changed, 78 insertions(+), 6 deletions(-) diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index 37ae06b..a359f98 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -227,6 +227,24 @@ class ResticBackupper(AbstractBackupper): raise ValueError("cannot init a repo: " + output) def is_initted(self) -> bool: + command = self.restic_command( + "check", + ) + + with subprocess.Popen( + command, + stdout=subprocess.PIPE, + shell=False, + stderr=subprocess.STDOUT, + ) as handle: + # communication forces to complete and for returncode to get defined + output = handle.communicate()[0].decode("utf-8") + if handle.returncode != 0: + return False + return True + + def unlock(self) -> None: + """Remove stale locks.""" command = self.restic_command( "unlock", ) @@ -235,10 +253,41 @@ class ResticBackupper(AbstractBackupper): command, stdout=subprocess.PIPE, shell=False, + stderr=subprocess.STDOUT, ) as handle: + # communication forces to complete and for returncode to get defined + output = handle.communicate()[0].decode("utf-8") if handle.returncode != 0: - return False - return True + raise ValueError("cannot unlock the backup repository: ", output) + + def lock(self) -> None: + """ + Introduce a stale lock. + Mainly for testing purposes. + Double lock is supposed to fail + """ + command = self.restic_command( + "check", + ) + + # using temporary cache in /run/user/1000/restic-check-cache-817079729 + # repository 9639c714 opened (repository version 2) successfully, password is correct + # created new cache in /run/user/1000/restic-check-cache-817079729 + # create exclusive lock for repository + # load indexes + # check all packs + # check snapshots, trees and blobs + # [0:00] 100.00% 1 / 1 snapshots + # no errors were found + + try: + for line in output_yielder(command): + if "indexes" in line: + break + if "unable" in line: + raise ValueError(line) + except Exception as e: + raise ValueError("could not lock repository") from e def restored_size(self, snapshot_id: str) -> int: """ diff --git a/selfprivacy_api/backup/util.py b/selfprivacy_api/backup/util.py index bda421e..41d926c 100644 --- a/selfprivacy_api/backup/util.py +++ b/selfprivacy_api/backup/util.py @@ -1,8 +1,10 @@ import subprocess from os.path import exists +from typing import Generator -def output_yielder(command): +def output_yielder(command) -> Generator[str, None, None]: + """Note: If you break during iteration, it kills the process""" with subprocess.Popen( command, shell=False, @@ -10,9 +12,15 @@ def output_yielder(command): stderr=subprocess.STDOUT, universal_newlines=True, ) as handle: - for line in iter(handle.stdout.readline, ""): - if "NOTICE:" not in line: - yield line + if handle is None or handle.stdout is None: + raise ValueError("could not run command: ", command) + + try: + for line in iter(handle.stdout.readline, ""): + if "NOTICE:" not in line: + yield line + except GeneratorExit: + handle.kill() def sync(src_path: str, dest_path: str): diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index da81c60..9743567 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -758,3 +758,18 @@ def test_move_blocks_backups(backups, dummy_service, restore_strategy): with pytest.raises(ValueError): Backups.restore_snapshot(snap, restore_strategy) + + +def test_double_lock_unlock(backups, dummy_service): + # notice that introducing stale locks is only safe for other tests if we erase repo in between + # which we do at the time of writing this test + + Backups.provider().backupper.lock() + with pytest.raises(ValueError): + Backups.provider().backupper.lock() + + Backups.provider().backupper.unlock() + Backups.provider().backupper.lock() + + Backups.provider().backupper.unlock() + Backups.provider().backupper.unlock() From eca4b26a3171c018cb5a658f6eb996772726b5be Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 9 Aug 2023 13:47:18 +0000 Subject: [PATCH 501/537] fix(backups): robustness against stale locks: backing up --- .../backup/backuppers/restic_backupper.py | 34 +++++++++++++++++-- tests/test_graphql/test_backup.py | 6 ++++ 2 files changed, 38 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index a359f98..6c3dbcc 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -1,9 +1,11 @@ +from __future__ import annotations + import subprocess import json import datetime import tempfile -from typing import List +from typing import List, TypeVar, Callable from collections.abc import Iterable from json.decoder import JSONDecodeError from os.path import exists, join @@ -21,6 +23,25 @@ from selfprivacy_api.backup.local_secret import LocalBackupSecret SHORT_ID_LEN = 8 +T = TypeVar("T", bound=Callable) + + +def unlocked_repo(func: T) -> T: + """unlock repo and retry if it appears to be locked""" + + def inner(self: ResticBackupper, *args, **kwargs): + try: + return func(self, *args, **kwargs) + except Exception as e: + if "unable to create lock" in str(e): + self.unlock() + return func(self, *args, **kwargs) + else: + raise e + + # Above, we manually guarantee that the type returned is compatible. + return inner # type: ignore + class ResticBackupper(AbstractBackupper): def __init__(self, login_flag: str, key_flag: str, storage_type: str) -> None: @@ -142,6 +163,7 @@ class ResticBackupper(AbstractBackupper): result.append(item) return result + @unlocked_repo def start_backup(self, folders: List[str], tag: str) -> Snapshot: """ Start backup with restic @@ -165,8 +187,10 @@ class ResticBackupper(AbstractBackupper): raise ValueError("No service with id ", tag) job = get_backup_job(service) + output = [] try: for raw_message in output_yielder(backup_command): + output.append(raw_message) message = self.parse_message( raw_message, job, @@ -177,7 +201,13 @@ class ResticBackupper(AbstractBackupper): tag, ) except ValueError as error: - raise ValueError("Could not create a snapshot: ", messages) from error + raise ValueError( + "Could not create a snapshot: ", + str(error), + output, + "parsed messages:", + messages, + ) from error @staticmethod def _snapshot_from_backup_messages(messages, repo_name) -> Snapshot: diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 9743567..b575b5b 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -773,3 +773,9 @@ def test_double_lock_unlock(backups, dummy_service): Backups.provider().backupper.unlock() Backups.provider().backupper.unlock() + + +def test_operations_while_locked(backups, dummy_service): + Backups.provider().backupper.lock() + snap = Backups.back_up(dummy_service) + assert snap is not None From 26ab7b4d7b9b9f7670a5bb9d085044e664160200 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 9 Aug 2023 13:58:53 +0000 Subject: [PATCH 502/537] fix(backups): robustness against stale locks: is_initted --- .../backup/backuppers/restic_backupper.py | 5 ++++- tests/test_graphql/test_backup.py | 15 +++++++++++++++ 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index 6c3dbcc..022bda7 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -256,6 +256,7 @@ class ResticBackupper(AbstractBackupper): if "created restic repository" not in output: raise ValueError("cannot init a repo: " + output) + @unlocked_repo def is_initted(self) -> bool: command = self.restic_command( "check", @@ -267,9 +268,10 @@ class ResticBackupper(AbstractBackupper): shell=False, stderr=subprocess.STDOUT, ) as handle: - # communication forces to complete and for returncode to get defined output = handle.communicate()[0].decode("utf-8") if handle.returncode != 0: + if "unable to create lock" in output: + raise ValueError("Stale lock detected: ", output) return False return True @@ -319,6 +321,7 @@ class ResticBackupper(AbstractBackupper): except Exception as e: raise ValueError("could not lock repository") from e + @unlocked_repo def restored_size(self, snapshot_id: str) -> int: """ Size of a snapshot diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index b575b5b..da4da7a 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -776,6 +776,21 @@ def test_double_lock_unlock(backups, dummy_service): def test_operations_while_locked(backups, dummy_service): + # Stale lock prevention test + + # consider making it fully at the level of backupper? + # because this is where prevention lives? + # Backups singleton is here only so that we can run this against B2, S3 and whatever + # But maybe it is not necessary (if restic treats them uniformly enough) + Backups.provider().backupper.lock() snap = Backups.back_up(dummy_service) assert snap is not None + + Backups.provider().backupper.lock() + # using lowlevel to make sure no caching interferes + assert Backups.provider().backupper.is_initted() is True + + # check that no locks were left + Backups.provider().backupper.lock() + Backups.provider().backupper.unlock() From 0eb70e1551f530853ea9a99b41a9d37efa033768 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 9 Aug 2023 14:46:27 +0000 Subject: [PATCH 503/537] fix(backups): robustness against stale locks: snapshot sizing --- selfprivacy_api/backup/backuppers/restic_backupper.py | 6 +++++- tests/test_graphql/test_backup.py | 3 +++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index 022bda7..5db9f11 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -335,6 +335,7 @@ class ResticBackupper(AbstractBackupper): with subprocess.Popen( command, stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, shell=False, ) as handle: output = handle.communicate()[0].decode("utf-8") @@ -382,7 +383,10 @@ class ResticBackupper(AbstractBackupper): ) with subprocess.Popen( - restore_command, stdout=subprocess.PIPE, shell=False + restore_command, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + shell=False, ) as handle: # for some reason restore does not support diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index da4da7a..556b72b 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -791,6 +791,9 @@ def test_operations_while_locked(backups, dummy_service): # using lowlevel to make sure no caching interferes assert Backups.provider().backupper.is_initted() is True + Backups.provider().backupper.lock() + assert Backups.snapshot_restored_size(snap.id) > 0 + # check that no locks were left Backups.provider().backupper.lock() Backups.provider().backupper.unlock() From 2c9011cc87f5fb3c337627133d9cb6ac00bce56e Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 9 Aug 2023 15:18:20 +0000 Subject: [PATCH 504/537] fix(backups): robustness against stale locks: everything else --- selfprivacy_api/backup/backuppers/restic_backupper.py | 10 +++++----- tests/test_graphql/test_backup.py | 9 +++++++++ 2 files changed, 14 insertions(+), 5 deletions(-) diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index 5db9f11..3a5fc49 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -345,6 +345,7 @@ class ResticBackupper(AbstractBackupper): except ValueError as error: raise ValueError("cannot restore a snapshot: " + output) from error + @unlocked_repo def restore_from_backup( self, snapshot_id, @@ -406,6 +407,7 @@ class ResticBackupper(AbstractBackupper): output, ) + @unlocked_repo def forget_snapshot(self, snapshot_id) -> None: """ Either removes snapshot or marks it for deletion later, @@ -441,10 +443,7 @@ class ResticBackupper(AbstractBackupper): ) # none should be impossible after communicate if handle.returncode != 0: raise ValueError( - "forget exited with errorcode", - handle.returncode, - ":", - output, + "forget exited with errorcode", handle.returncode, ":", output, err ) def _load_snapshots(self) -> object: @@ -470,8 +469,9 @@ class ResticBackupper(AbstractBackupper): try: return ResticBackupper.parse_json_output(output) except ValueError as error: - raise ValueError("Cannot load snapshots: ") from error + raise ValueError("Cannot load snapshots: ", output) from error + @unlocked_repo def get_snapshots(self) -> List[Snapshot]: """Get all snapshots from the repo""" snapshots = [] diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 556b72b..1990ef7 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -794,6 +794,15 @@ def test_operations_while_locked(backups, dummy_service): Backups.provider().backupper.lock() assert Backups.snapshot_restored_size(snap.id) > 0 + Backups.provider().backupper.lock() + Backups.restore_snapshot(snap) + + Backups.provider().backupper.lock() + Backups.forget_snapshot(snap) + + Backups.provider().backupper.lock() + assert Backups.provider().backupper.get_snapshots() == [] + # check that no locks were left Backups.provider().backupper.lock() Backups.provider().backupper.unlock() From 69f6e62877dcaafed3dec109767cbcd52c1d1ca7 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 14 Aug 2023 11:50:59 +0000 Subject: [PATCH 505/537] test(backups): more checks regarding tmpdirs and mounting --- .../backup/backuppers/restic_backupper.py | 13 +++++++++---- tests/test_graphql/test_backup.py | 9 +++++++++ 2 files changed, 18 insertions(+), 4 deletions(-) diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index 3a5fc49..418aa5b 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -126,7 +126,9 @@ class ResticBackupper(AbstractBackupper): output, ) - def mount_repo(self, mount_directory): + def mount_repo(self, mount_directory: str) -> subprocess.Popen: + if not exists(mount_directory): + raise FileNotFoundError("no such directory to mount at: ", mount_directory) mount_command = self.restic_command("mount", mount_directory) mount_command.insert(0, "nohup") handle = subprocess.Popen( @@ -139,7 +141,7 @@ class ResticBackupper(AbstractBackupper): raise IOError("failed to mount dir ", mount_directory) return handle - def unmount_repo(self, mount_directory): + def unmount_repo(self, mount_directory: str) -> None: mount_command = ["umount", "-l", mount_directory] with subprocess.Popen( mount_command, stdout=subprocess.PIPE, shell=False @@ -147,10 +149,10 @@ class ResticBackupper(AbstractBackupper): output = handle.communicate()[0].decode("utf-8") # TODO: check for exit code? if "error" in output.lower(): - return IOError("failed to unmount dir ", mount_directory, ": ", output) + raise IOError("failed to unmount dir ", mount_directory, ": ", output) if not listdir(mount_directory) == []: - return IOError("failed to unmount dir ", mount_directory) + raise IOError("failed to unmount dir ", mount_directory) @staticmethod def __flatten_list(list_to_flatten): @@ -363,6 +365,9 @@ class ResticBackupper(AbstractBackupper): self._raw_verified_restore(snapshot_id, target=temp_dir) snapshot_root = temp_dir else: # attempting inplace restore via mount + sync + assert exists( + temp_dir + ) # paranoid check, TemporaryDirectory is supposedly created self.mount_repo(temp_dir) snapshot_root = join(temp_dir, "ids", snapshot_id) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 1990ef7..9736f91 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -8,6 +8,8 @@ from os import urandom from datetime import datetime, timedelta, timezone from subprocess import Popen +import tempfile + import selfprivacy_api.services as services from selfprivacy_api.services import Service, get_all_services @@ -806,3 +808,10 @@ def test_operations_while_locked(backups, dummy_service): # check that no locks were left Backups.provider().backupper.lock() Backups.provider().backupper.unlock() + + +# a paranoid check to weed out problems with tempdirs that are not dependent on us +def test_tempfile(): + with tempfile.TemporaryDirectory() as temp: + assert path.exists(temp) + assert not path.exists(temp) From c89f9cf89d06aeee6997e80b0376dd50a86e373b Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 14 Aug 2023 12:43:44 +0000 Subject: [PATCH 506/537] feature(backups): do not rely on mounting --- .../backup/backuppers/restic_backupper.py | 33 +++++++++---------- tests/test_graphql/test_backup.py | 11 +++++++ 2 files changed, 27 insertions(+), 17 deletions(-) diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index 418aa5b..a935520 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -9,8 +9,9 @@ from typing import List, TypeVar, Callable from collections.abc import Iterable from json.decoder import JSONDecodeError from os.path import exists, join -from os import listdir +from os import listdir, mkdir from time import sleep +from shutil import rmtree from selfprivacy_api.backup.util import output_yielder, sync from selfprivacy_api.backup.backuppers import AbstractBackupper @@ -364,23 +365,21 @@ class ResticBackupper(AbstractBackupper): if verify: self._raw_verified_restore(snapshot_id, target=temp_dir) snapshot_root = temp_dir - else: # attempting inplace restore via mount + sync - assert exists( - temp_dir - ) # paranoid check, TemporaryDirectory is supposedly created - self.mount_repo(temp_dir) - snapshot_root = join(temp_dir, "ids", snapshot_id) + for folder in folders: + src = join(snapshot_root, folder.strip("/")) + if not exists(src): + raise ValueError( + f"No such path: {src}. We tried to find {folder}" + ) + dst = folder + sync(src, dst) - assert snapshot_root is not None - for folder in folders: - src = join(snapshot_root, folder.strip("/")) - if not exists(src): - raise ValueError(f"No such path: {src}. We tried to find {folder}") - dst = folder - sync(src, dst) - - if not verify: - self.unmount_repo(temp_dir) + else: # attempting inplace restore + for folder in folders: + rmtree(folder) + mkdir(folder) + self._raw_verified_restore(snapshot_id, target="/") + return def _raw_verified_restore(self, snapshot_id, target="/"): """barebones restic restore""" diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 9736f91..25eaaf4 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -746,6 +746,17 @@ def test_mount_umount(backups, dummy_service, tmpdir): assert len(listdir(mountpoint)) == 0 +def test_mount_nonexistent(backups, dummy_service, tmpdir): + backupper = Backups.provider().backupper + assert isinstance(backupper, ResticBackupper) + + mountpoint = tmpdir / "nonexistent" + assert not path.exists(mountpoint) + + with pytest.raises(FileNotFoundError): + handle = backupper.mount_repo(mountpoint) + + def test_move_blocks_backups(backups, dummy_service, restore_strategy): snap = Backups.back_up(dummy_service) job = Jobs.add( From d621ca64497a67b72b194a1d5002b0a764e56655 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 14 Aug 2023 12:50:45 +0000 Subject: [PATCH 507/537] refactor(backups): clean up unused mounting tools --- .../backup/backuppers/restic_backupper.py | 28 ----------------- tests/test_graphql/test_backup.py | 30 ------------------- 2 files changed, 58 deletions(-) diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index a935520..e954b65 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -127,34 +127,6 @@ class ResticBackupper(AbstractBackupper): output, ) - def mount_repo(self, mount_directory: str) -> subprocess.Popen: - if not exists(mount_directory): - raise FileNotFoundError("no such directory to mount at: ", mount_directory) - mount_command = self.restic_command("mount", mount_directory) - mount_command.insert(0, "nohup") - handle = subprocess.Popen( - mount_command, - stdout=subprocess.DEVNULL, - shell=False, - ) - sleep(2) - if "ids" not in listdir(mount_directory): - raise IOError("failed to mount dir ", mount_directory) - return handle - - def unmount_repo(self, mount_directory: str) -> None: - mount_command = ["umount", "-l", mount_directory] - with subprocess.Popen( - mount_command, stdout=subprocess.PIPE, shell=False - ) as handle: - output = handle.communicate()[0].decode("utf-8") - # TODO: check for exit code? - if "error" in output.lower(): - raise IOError("failed to unmount dir ", mount_directory, ": ", output) - - if not listdir(mount_directory) == []: - raise IOError("failed to unmount dir ", mount_directory) - @staticmethod def __flatten_list(list_to_flatten): """string-aware list flattener""" diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 25eaaf4..6878de1 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -727,36 +727,6 @@ def test_sync_nonexistent_src(dummy_service): sync(src, dst) -# Restic lowlevel -def test_mount_umount(backups, dummy_service, tmpdir): - Backups.back_up(dummy_service) - backupper = Backups.provider().backupper - assert isinstance(backupper, ResticBackupper) - - mountpoint = tmpdir / "mount" - makedirs(mountpoint) - assert path.exists(mountpoint) - assert len(listdir(mountpoint)) == 0 - - handle = backupper.mount_repo(mountpoint) - assert len(listdir(mountpoint)) != 0 - - backupper.unmount_repo(mountpoint) - # handle.terminate() - assert len(listdir(mountpoint)) == 0 - - -def test_mount_nonexistent(backups, dummy_service, tmpdir): - backupper = Backups.provider().backupper - assert isinstance(backupper, ResticBackupper) - - mountpoint = tmpdir / "nonexistent" - assert not path.exists(mountpoint) - - with pytest.raises(FileNotFoundError): - handle = backupper.mount_repo(mountpoint) - - def test_move_blocks_backups(backups, dummy_service, restore_strategy): snap = Backups.back_up(dummy_service) job = Jobs.add( From 027a37bb47040266edd09d2cab9469dad6ea3a6f Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 21 Aug 2023 11:11:56 +0000 Subject: [PATCH 508/537] feature(backup): remember the reason for making a snapshot --- selfprivacy_api/backup/__init__.py | 15 ++-- selfprivacy_api/backup/backuppers/__init__.py | 8 +- .../backup/backuppers/none_backupper.py | 5 +- .../backup/backuppers/restic_backupper.py | 81 ++++++++++--------- selfprivacy_api/backup/tasks.py | 10 ++- .../graphql/common_types/backup.py | 7 ++ selfprivacy_api/models/backup/snapshot.py | 3 + tests/test_graphql/test_backup.py | 7 +- 8 files changed, 85 insertions(+), 51 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index c28c01f..3b141fa 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -23,7 +23,7 @@ from selfprivacy_api.jobs import Jobs, JobStatus, Job from selfprivacy_api.graphql.queries.providers import ( BackupProvider as BackupProviderEnum, ) -from selfprivacy_api.graphql.common_types.backup import RestoreStrategy +from selfprivacy_api.graphql.common_types.backup import RestoreStrategy, BackupReason from selfprivacy_api.models.backup.snapshot import Snapshot @@ -264,10 +264,12 @@ class Backups: # Backup @staticmethod - def back_up(service: Service) -> Snapshot: + def back_up( + service: Service, reason: BackupReason = BackupReason.EXPLICIT + ) -> Snapshot: """The top-level function to back up a service""" folders = service.get_folders() - tag = service.get_id() + service_name = service.get_id() job = get_backup_job(service) if job is None: @@ -278,9 +280,10 @@ class Backups: service.pre_backup() snapshot = Backups.provider().backupper.start_backup( folders, - tag, + service_name, + reason=reason, ) - Backups._store_last_snapshot(tag, snapshot) + Backups._store_last_snapshot(service_name, snapshot) service.post_restore() except Exception as error: Jobs.update(job, status=JobStatus.ERROR) @@ -306,7 +309,7 @@ class Backups: snapshot: Snapshot, job: Job, ) -> None: - failsafe_snapshot = Backups.back_up(service) + failsafe_snapshot = Backups.back_up(service, BackupReason.PRE_RESTORE) Jobs.update(job, status=JobStatus.RUNNING) try: diff --git a/selfprivacy_api/backup/backuppers/__init__.py b/selfprivacy_api/backup/backuppers/__init__.py index ccf78b9..0067a41 100644 --- a/selfprivacy_api/backup/backuppers/__init__.py +++ b/selfprivacy_api/backup/backuppers/__init__.py @@ -2,6 +2,7 @@ from abc import ABC, abstractmethod from typing import List from selfprivacy_api.models.backup.snapshot import Snapshot +from selfprivacy_api.graphql.common_types.backup import BackupReason class AbstractBackupper(ABC): @@ -22,7 +23,12 @@ class AbstractBackupper(ABC): raise NotImplementedError @abstractmethod - def start_backup(self, folders: List[str], tag: str) -> Snapshot: + def start_backup( + self, + folders: List[str], + service_name: str, + reason: BackupReason = BackupReason.EXPLICIT, + ) -> Snapshot: """Start a backup of the given folders""" raise NotImplementedError diff --git a/selfprivacy_api/backup/backuppers/none_backupper.py b/selfprivacy_api/backup/backuppers/none_backupper.py index 3f9f7fd..429d9ab 100644 --- a/selfprivacy_api/backup/backuppers/none_backupper.py +++ b/selfprivacy_api/backup/backuppers/none_backupper.py @@ -2,6 +2,7 @@ from typing import List from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.backup.backuppers import AbstractBackupper +from selfprivacy_api.graphql.common_types.backup import BackupReason class NoneBackupper(AbstractBackupper): @@ -13,7 +14,9 @@ class NoneBackupper(AbstractBackupper): def set_creds(self, account: str, key: str, repo: str): pass - def start_backup(self, folders: List[str], tag: str): + def start_backup( + self, folders: List[str], tag: str, reason: BackupReason = BackupReason.EXPLICIT + ): raise NotImplementedError def get_snapshots(self) -> List[Snapshot]: diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index 3a5fc49..f5467ff 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -12,6 +12,7 @@ from os.path import exists, join from os import listdir from time import sleep +from selfprivacy_api.graphql.common_types.backup import BackupReason from selfprivacy_api.backup.util import output_yielder, sync from selfprivacy_api.backup.backuppers import AbstractBackupper from selfprivacy_api.models.backup.snapshot import Snapshot @@ -84,7 +85,7 @@ class ResticBackupper(AbstractBackupper): def _password_command(self): return f"echo {LocalBackupSecret.get()}" - def restic_command(self, *args, tag: str = "") -> List[str]: + def restic_command(self, *args, tags: List[str] = []) -> List[str]: command = [ "restic", "-o", @@ -94,13 +95,14 @@ class ResticBackupper(AbstractBackupper): "--password-command", self._password_command(), ] - if tag != "": - command.extend( - [ - "--tag", - tag, - ] - ) + if tags != []: + for tag in tags: + command.extend( + [ + "--tag", + tag, + ] + ) if args: command.extend(ResticBackupper.__flatten_list(args)) return command @@ -164,7 +166,12 @@ class ResticBackupper(AbstractBackupper): return result @unlocked_repo - def start_backup(self, folders: List[str], tag: str) -> Snapshot: + def start_backup( + self, + folders: List[str], + service_name: str, + reason: BackupReason = BackupReason.EXPLICIT, + ) -> Snapshot: """ Start backup with restic """ @@ -173,33 +180,35 @@ class ResticBackupper(AbstractBackupper): # of a string and an array of strings assert not isinstance(folders, str) + tags = [service_name, reason.value] + backup_command = self.restic_command( "backup", "--json", folders, - tag=tag, + tags=tags, ) - messages = [] - - service = get_service_by_id(tag) + service = get_service_by_id(service_name) if service is None: - raise ValueError("No service with id ", tag) - + raise ValueError("No service with id ", service_name) job = get_backup_job(service) + + messages = [] output = [] try: for raw_message in output_yielder(backup_command): output.append(raw_message) - message = self.parse_message( - raw_message, - job, - ) + message = self.parse_message(raw_message, job) messages.append(message) - return ResticBackupper._snapshot_from_backup_messages( - messages, - tag, + id = ResticBackupper._snapshot_id_from_backup_messages(messages) + return Snapshot( + created_at=datetime.datetime.now(datetime.timezone.utc), + id=id, + service_name=service_name, + reason=reason, ) + except ValueError as error: raise ValueError( "Could not create a snapshot: ", @@ -210,13 +219,13 @@ class ResticBackupper(AbstractBackupper): ) from error @staticmethod - def _snapshot_from_backup_messages(messages, repo_name) -> Snapshot: + def _snapshot_id_from_backup_messages(messages) -> Snapshot: for message in messages: if message["message_type"] == "summary": - return ResticBackupper._snapshot_from_fresh_summary( - message, - repo_name, - ) + # There is a discrepancy between versions of restic/rclone + # Some report short_id in this field and some full + return message["snapshot_id"][0:SHORT_ID_LEN] + raise ValueError("no summary message in restic json output") def parse_message(self, raw_message_line: str, job=None) -> dict: @@ -232,16 +241,6 @@ class ResticBackupper(AbstractBackupper): ) return message - @staticmethod - def _snapshot_from_fresh_summary(message: dict, repo_name) -> Snapshot: - return Snapshot( - # There is a discrepancy between versions of restic/rclone - # Some report short_id in this field and some full - id=message["snapshot_id"][0:SHORT_ID_LEN], - created_at=datetime.datetime.now(datetime.timezone.utc), - service_name=repo_name, - ) - def init(self) -> None: init_command = self.restic_command( "init", @@ -475,11 +474,19 @@ class ResticBackupper(AbstractBackupper): def get_snapshots(self) -> List[Snapshot]: """Get all snapshots from the repo""" snapshots = [] + for restic_snapshot in self._load_snapshots(): + # Compatibility with previous snaps: + if len(restic_snapshot["tags"]) == 1: + reason = BackupReason.EXPLICIT + else: + reason = restic_snapshot["tags"][1] + snapshot = Snapshot( id=restic_snapshot["short_id"], created_at=restic_snapshot["time"], service_name=restic_snapshot["tags"][0], + reason=reason, ) snapshots.append(snapshot) diff --git a/selfprivacy_api/backup/tasks.py b/selfprivacy_api/backup/tasks.py index db350d4..546b27c 100644 --- a/selfprivacy_api/backup/tasks.py +++ b/selfprivacy_api/backup/tasks.py @@ -3,7 +3,7 @@ The tasks module contains the worker tasks that are used to back up and restore """ from datetime import datetime, timezone -from selfprivacy_api.graphql.common_types.backup import RestoreStrategy +from selfprivacy_api.graphql.common_types.backup import RestoreStrategy, BackupReason from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.utils.huey import huey @@ -22,11 +22,13 @@ def validate_datetime(dt: datetime) -> bool: # huey tasks need to return something @huey.task() -def start_backup(service: Service) -> bool: +def start_backup( + service: Service, reason: BackupReason = BackupReason.EXPLICIT +) -> bool: """ The worker task that starts the backup process. """ - Backups.back_up(service) + Backups.back_up(service, reason) return True @@ -49,4 +51,4 @@ def automatic_backup(): """ time = datetime.utcnow().replace(tzinfo=timezone.utc) for service in Backups.services_to_back_up(time): - start_backup(service) + start_backup(service, BackupReason.AUTO) diff --git a/selfprivacy_api/graphql/common_types/backup.py b/selfprivacy_api/graphql/common_types/backup.py index 992363b..9eaef12 100644 --- a/selfprivacy_api/graphql/common_types/backup.py +++ b/selfprivacy_api/graphql/common_types/backup.py @@ -8,3 +8,10 @@ from enum import Enum class RestoreStrategy(Enum): INPLACE = "INPLACE" DOWNLOAD_VERIFY_OVERWRITE = "DOWNLOAD_VERIFY_OVERWRITE" + + +@strawberry.enum +class BackupReason(Enum): + EXPLICIT = "EXPLICIT" + AUTO = "AUTO" + PRE_RESTORE = "PRE_RESTORE" diff --git a/selfprivacy_api/models/backup/snapshot.py b/selfprivacy_api/models/backup/snapshot.py index 9893f03..28ad661 100644 --- a/selfprivacy_api/models/backup/snapshot.py +++ b/selfprivacy_api/models/backup/snapshot.py @@ -1,8 +1,11 @@ import datetime from pydantic import BaseModel +from selfprivacy_api.graphql.common_types.backup import BackupReason + class Snapshot(BaseModel): id: str service_name: str created_at: datetime.datetime + reason: BackupReason diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 1990ef7..fdb8497 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -14,7 +14,7 @@ from selfprivacy_api.services import Service, get_all_services from selfprivacy_api.services import get_service_by_id from selfprivacy_api.services.test_service import DummyService from selfprivacy_api.graphql.queries.providers import BackupProvider -from selfprivacy_api.graphql.common_types.backup import RestoreStrategy +from selfprivacy_api.graphql.common_types.backup import RestoreStrategy, BackupReason from selfprivacy_api.jobs import Jobs, JobStatus from selfprivacy_api.models.backup.snapshot import Snapshot @@ -428,7 +428,10 @@ def test_forget_snapshot(backups, dummy_service): def test_forget_nonexistent_snapshot(backups, dummy_service): bogus = Snapshot( - id="gibberjibber", service_name="nohoho", created_at=datetime.now(timezone.utc) + id="gibberjibber", + service_name="nohoho", + created_at=datetime.now(timezone.utc), + reason=BackupReason.EXPLICIT, ) with pytest.raises(ValueError): Backups.forget_snapshot(bogus) From 1b9761293cea920a3e39ce606173412eba30758f Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 21 Aug 2023 11:30:35 +0000 Subject: [PATCH 509/537] test(backup): test reasons --- tests/test_graphql/test_backup.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index fdb8497..16933b8 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -286,6 +286,16 @@ def test_backup_returns_snapshot(backups, dummy_service): assert Backups.get_snapshot_by_id(snapshot.id) is not None assert snapshot.service_name == name assert snapshot.created_at is not None + assert snapshot.reason == BackupReason.EXPLICIT + + +def test_backup_reasons(backups, dummy_service): + snap = Backups.back_up(dummy_service, BackupReason.AUTO) + assert snap.reason == BackupReason.AUTO + + Backups.force_snapshot_cache_reload() + snaps = Backups.get_snapshots(dummy_service) + assert snaps[0].reason == BackupReason.AUTO def folder_files(folder): @@ -495,6 +505,8 @@ def test_restore_snapshot_task( snaps = Backups.get_snapshots(dummy_service) if restore_strategy == RestoreStrategy.INPLACE: assert len(snaps) == 2 + reasons = [snap.reason for snap in snaps] + assert BackupReason.PRE_RESTORE in reasons else: assert len(snaps) == 1 From 30b62c351aba37c60edb81aa8f219c1dad2fd6a4 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 21 Aug 2023 11:31:29 +0000 Subject: [PATCH 510/537] feature(redis): compatibility with str enums --- selfprivacy_api/utils/redis_model_storage.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/selfprivacy_api/utils/redis_model_storage.py b/selfprivacy_api/utils/redis_model_storage.py index 51faff7..06dfe8c 100644 --- a/selfprivacy_api/utils/redis_model_storage.py +++ b/selfprivacy_api/utils/redis_model_storage.py @@ -1,11 +1,14 @@ from datetime import datetime from typing import Optional +from enum import Enum def store_model_as_hash(redis, redis_key, model): for key, value in model.dict().items(): if isinstance(value, datetime): value = value.isoformat() + if isinstance(value, Enum): + value = value.value redis.hset(redis_key, key, str(value)) From b2c7e8b73a3cf59ddd67206da6314e120f3de7ee Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 21 Aug 2023 12:45:31 +0000 Subject: [PATCH 511/537] feature(backups): caps for autobackups --- selfprivacy_api/backup/__init__.py | 37 ++++++++++++++++++++++++++++++ selfprivacy_api/backup/storage.py | 13 +++++++++++ tests/test_graphql/test_backup.py | 25 ++++++++++++++++++++ 3 files changed, 75 insertions(+) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 3b141fa..b16f089 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -283,7 +283,10 @@ class Backups: service_name, reason=reason, ) + Backups._store_last_snapshot(service_name, snapshot) + if reason == BackupReason.AUTO: + Backups._prune_auto_snaps(service) service.post_restore() except Exception as error: Jobs.update(job, status=JobStatus.ERROR) @@ -292,6 +295,40 @@ class Backups: Jobs.update(job, status=JobStatus.FINISHED) return snapshot + @staticmethod + def _auto_snaps(service): + return [ + snap + for snap in Backups.get_snapshots(service) + if snap.reason == BackupReason.AUTO + ] + + @staticmethod + def _prune_auto_snaps(service) -> None: + max = Backups.max_auto_snapshots() + if max == -1: + return + + auto_snaps = Backups._auto_snaps(service) + if len(auto_snaps) > max: + n_to_kill = len(auto_snaps) - max + sorted_snaps = sorted(auto_snaps, key=lambda s: s.created_at) + snaps_to_kill = sorted_snaps[:n_to_kill] + for snap in snaps_to_kill: + Backups.forget_snapshot(snap) + + @staticmethod + def set_max_auto_snapshots(value: int) -> None: + """everything <=0 means unlimited""" + if value <= 0: + value = -1 + Storage.set_max_auto_snapshots(value) + + @staticmethod + def max_auto_snapshots() -> int: + """-1 means unlimited""" + return Storage.max_auto_snapshots() + # Restoring @staticmethod diff --git a/selfprivacy_api/backup/storage.py b/selfprivacy_api/backup/storage.py index d46f584..1a0091f 100644 --- a/selfprivacy_api/backup/storage.py +++ b/selfprivacy_api/backup/storage.py @@ -26,6 +26,7 @@ REDIS_INITTED_CACHE = "backups:repo_initted" REDIS_PROVIDER_KEY = "backups:provider" REDIS_AUTOBACKUP_PERIOD_KEY = "backups:autobackup_period" +REDIS_AUTOBACKUP_MAX_KEY = "backups:autobackup_cap" redis = RedisPool().get_connection() @@ -39,6 +40,7 @@ class Storage: redis.delete(REDIS_PROVIDER_KEY) redis.delete(REDIS_AUTOBACKUP_PERIOD_KEY) redis.delete(REDIS_INITTED_CACHE) + redis.delete(REDIS_AUTOBACKUP_MAX_KEY) prefixes_to_clean = [ REDIS_SNAPSHOTS_PREFIX, @@ -175,3 +177,14 @@ class Storage: def mark_as_uninitted(): """Marks the repository as initialized""" redis.delete(REDIS_INITTED_CACHE) + + @staticmethod + def set_max_auto_snapshots(value: int): + redis.set(REDIS_AUTOBACKUP_MAX_KEY, value) + + @staticmethod + def max_auto_snapshots(): + if redis.exists(REDIS_AUTOBACKUP_MAX_KEY): + return int(redis.get(REDIS_AUTOBACKUP_MAX_KEY)) + else: + return -1 diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 16933b8..781468a 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -298,6 +298,31 @@ def test_backup_reasons(backups, dummy_service): assert snaps[0].reason == BackupReason.AUTO +def test_too_many_auto(backups, dummy_service): + assert Backups.max_auto_snapshots() == -1 + Backups.set_max_auto_snapshots(2) + assert Backups.max_auto_snapshots() == 2 + + snap = Backups.back_up(dummy_service, BackupReason.AUTO) + assert len(Backups.get_snapshots(dummy_service)) == 1 + snap2 = Backups.back_up(dummy_service, BackupReason.AUTO) + assert len(Backups.get_snapshots(dummy_service)) == 2 + snap3 = Backups.back_up(dummy_service, BackupReason.AUTO) + assert len(Backups.get_snapshots(dummy_service)) == 2 + + snaps = Backups.get_snapshots(dummy_service) + + assert snap2 in snaps + assert snap3 in snaps + assert snap not in snaps + + Backups.set_max_auto_snapshots(-1) + snap4 = Backups.back_up(dummy_service, BackupReason.AUTO) + snaps = Backups.get_snapshots(dummy_service) + assert len(snaps) == 3 + assert snap4 in snaps + + def folder_files(folder): return [ path.join(folder, filename) From d6cf2abdc23444e737bde58dd8b47f331c5f739b Mon Sep 17 00:00:00 2001 From: Inex Code Date: Wed, 23 Aug 2023 14:51:01 +0300 Subject: [PATCH 512/537] style: remove unused imports --- .../backup/backuppers/restic_backupper.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index e954b65..f508368 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -9,8 +9,7 @@ from typing import List, TypeVar, Callable from collections.abc import Iterable from json.decoder import JSONDecodeError from os.path import exists, join -from os import listdir, mkdir -from time import sleep +from os import mkdir from shutil import rmtree from selfprivacy_api.backup.util import output_yielder, sync @@ -33,12 +32,12 @@ def unlocked_repo(func: T) -> T: def inner(self: ResticBackupper, *args, **kwargs): try: return func(self, *args, **kwargs) - except Exception as e: - if "unable to create lock" in str(e): + except Exception as error: + if "unable to create lock" in str(error): self.unlock() return func(self, *args, **kwargs) else: - raise e + raise error # Above, we manually guarantee that the type returned is compatible. return inner # type: ignore @@ -293,8 +292,8 @@ class ResticBackupper(AbstractBackupper): break if "unable" in line: raise ValueError(line) - except Exception as e: - raise ValueError("could not lock repository") from e + except Exception as error: + raise ValueError("could not lock repository") from error @unlocked_repo def restored_size(self, snapshot_id: str) -> int: From f2c972ed5f0790d9a3eb4271c3114bdbf9c3acd3 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Wed, 23 Aug 2023 14:51:15 +0300 Subject: [PATCH 513/537] chore: bump version --- selfprivacy_api/dependencies.py | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/dependencies.py b/selfprivacy_api/dependencies.py index fb974e8..095d087 100644 --- a/selfprivacy_api/dependencies.py +++ b/selfprivacy_api/dependencies.py @@ -27,4 +27,4 @@ async def get_token_header( def get_api_version() -> str: """Get API version""" - return "2.3.0" + return "2.3.1" diff --git a/setup.py b/setup.py index 684f54f..99f0679 100755 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import setup, find_packages setup( name="selfprivacy_api", - version="2.3.0", + version="2.3.1", packages=find_packages(), scripts=[ "selfprivacy_api/app.py", From 0a852d8b509cd95edaad498a85efa4f7191795db Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 23 Aug 2023 13:39:12 +0000 Subject: [PATCH 514/537] fix(backups): consider failing services MORE and not try to stop them --- selfprivacy_api/services/service.py | 2 +- selfprivacy_api/services/test_service/__init__.py | 8 ++++++-- tests/test_graphql/test_backup.py | 12 +++++++++++- 3 files changed, 18 insertions(+), 4 deletions(-) diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index 30e810f..e6f51d3 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -283,7 +283,7 @@ class StoppedService: def __enter__(self) -> Service: self.original_status = self.service.get_status() - if self.original_status != ServiceStatus.INACTIVE: + if self.original_status not in [ServiceStatus.INACTIVE, ServiceStatus.FAILED]: self.service.stop() wait_until_true( lambda: self.service.get_status() == ServiceStatus.INACTIVE, diff --git a/selfprivacy_api/services/test_service/__init__.py b/selfprivacy_api/services/test_service/__init__.py index 967b32e..6ae33ef 100644 --- a/selfprivacy_api/services/test_service/__init__.py +++ b/selfprivacy_api/services/test_service/__init__.py @@ -135,8 +135,12 @@ class DummyService(Service): @classmethod def stop(cls): - cls.set_status(ServiceStatus.DEACTIVATING) - cls.change_status_with_async_delay(ServiceStatus.INACTIVE, cls.startstop_delay) + # simulate a failing service unable to stop + if not cls.get_status() == ServiceStatus.FAILED: + cls.set_status(ServiceStatus.DEACTIVATING) + cls.change_status_with_async_delay( + ServiceStatus.INACTIVE, cls.startstop_delay + ) @classmethod def start(cls): diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 1990ef7..f66398d 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -10,6 +10,7 @@ from subprocess import Popen import selfprivacy_api.services as services from selfprivacy_api.services import Service, get_all_services +from selfprivacy_api.services.service import ServiceStatus from selfprivacy_api.services import get_service_by_id from selfprivacy_api.services.test_service import DummyService @@ -462,10 +463,19 @@ def restore_strategy(request) -> RestoreStrategy: return RestoreStrategy.INPLACE +@pytest.fixture(params=["failed", "healthy"]) +def failed(request) -> bool: + if request.param == "failed": + return True + return False + + def test_restore_snapshot_task( - backups, dummy_service, restore_strategy, simulated_service_stopping_delay + backups, dummy_service, restore_strategy, simulated_service_stopping_delay, failed ): dummy_service.set_delay(simulated_service_stopping_delay) + if failed: + dummy_service.set_status(ServiceStatus.FAILED) Backups.back_up(dummy_service) snaps = Backups.get_snapshots(dummy_service) From 72535f86558ff90e7ad9ea964a282ec9a0044099 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 23 Aug 2023 13:40:04 +0000 Subject: [PATCH 515/537] fix(backups): default timeout to 5 min for service starting and stopping in backup operations --- selfprivacy_api/services/service.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index e6f51d3..e01501b 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -13,7 +13,7 @@ from selfprivacy_api.services.owned_path import OwnedPath from selfprivacy_api import utils from selfprivacy_api.utils.waitloop import wait_until_true -DEFAULT_START_STOP_TIMEOUT = 10 * 60 +DEFAULT_START_STOP_TIMEOUT = 5 * 60 class ServiceStatus(Enum): From de52dffddadab70dc09cf5fcd853324f215c9ad7 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 23 Aug 2023 13:55:23 +0000 Subject: [PATCH 516/537] refactor(backups): a better backup-related service timeout error --- selfprivacy_api/services/service.py | 26 ++++++++++++++++---------- 1 file changed, 16 insertions(+), 10 deletions(-) diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index e01501b..0f018b6 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -284,17 +284,23 @@ class StoppedService: def __enter__(self) -> Service: self.original_status = self.service.get_status() if self.original_status not in [ServiceStatus.INACTIVE, ServiceStatus.FAILED]: - self.service.stop() - wait_until_true( - lambda: self.service.get_status() == ServiceStatus.INACTIVE, - timeout_sec=DEFAULT_START_STOP_TIMEOUT, - ) + try: + self.service.stop() + wait_until_true( + lambda: self.service.get_status() == ServiceStatus.INACTIVE, + timeout_sec=DEFAULT_START_STOP_TIMEOUT, + ) + except TimeoutError as e: + raise TimeoutError(f"timed out waiting for {self.service.get_display_name()} to stop") return self.service def __exit__(self, type, value, traceback): if self.original_status in [ServiceStatus.ACTIVATING, ServiceStatus.ACTIVE]: - self.service.start() - wait_until_true( - lambda: self.service.get_status() == ServiceStatus.ACTIVE, - timeout_sec=DEFAULT_START_STOP_TIMEOUT, - ) + try: + self.service.start() + wait_until_true( + lambda: self.service.get_status() == ServiceStatus.ACTIVE, + timeout_sec=DEFAULT_START_STOP_TIMEOUT, + ) + except TimeoutError as e: + raise TimeoutError(f"timed out waiting for {self.service.get_display_name()} to start") From 02b03cf401c88e348cb6c2c9582b4d1f9556bf76 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 23 Aug 2023 14:02:07 +0000 Subject: [PATCH 517/537] feature(backups): report the error text in a job --- selfprivacy_api/backup/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index c28c01f..167ff33 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -283,7 +283,7 @@ class Backups: Backups._store_last_snapshot(tag, snapshot) service.post_restore() except Exception as error: - Jobs.update(job, status=JobStatus.ERROR) + Jobs.update(job, status=JobStatus.ERROR, status_text=str(error)) raise error Jobs.update(job, status=JobStatus.FINISHED) @@ -348,7 +348,7 @@ class Backups: service.post_restore() except Exception as error: - Jobs.update(job, status=JobStatus.ERROR) + Jobs.update(job, status=JobStatus.ERROR, status_text=str(error)) raise error Jobs.update(job, status=JobStatus.FINISHED) From c68239044f8c5983a68ae0606ebb9f5ccd07d56c Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 23 Aug 2023 14:18:33 +0000 Subject: [PATCH 518/537] feature(backups): report status text for restore jobs --- selfprivacy_api/backup/__init__.py | 29 +++++++++++++++++++++++++++-- 1 file changed, 27 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 167ff33..0f445a8 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -308,7 +308,9 @@ class Backups: ) -> None: failsafe_snapshot = Backups.back_up(service) - Jobs.update(job, status=JobStatus.RUNNING) + Jobs.update( + job, status=JobStatus.RUNNING, status_text=f"Restoring from {snapshot.id}" + ) try: Backups._restore_service_from_snapshot( service, @@ -316,9 +318,19 @@ class Backups: verify=False, ) except Exception as error: + Jobs.update( + job, + status=JobStatus.ERROR, + status_text=f" restore failed with {str(error)}, reverting to {failsafe_snapshot.id}", + ) Backups._restore_service_from_snapshot( service, failsafe_snapshot.id, verify=False ) + Jobs.update( + job, + status=JobStatus.ERROR, + status_text=f" restore failed with {str(error)}, reverted to {failsafe_snapshot.id}", + ) raise error @staticmethod @@ -335,17 +347,30 @@ class Backups: try: Backups._assert_restorable(snapshot) + Jobs.update( + job, status=JobStatus.CREATED, status_text="stopping the service" + ) with StoppedService(service): Backups.assert_dead(service) if strategy == RestoreStrategy.INPLACE: Backups._inplace_restore(service, snapshot, job) else: # verify_before_download is our default - Jobs.update(job, status=JobStatus.RUNNING) + Jobs.update( + job, + status=JobStatus.RUNNING, + status_text=f"Restoring from {snapshot.id}", + ) Backups._restore_service_from_snapshot( service, snapshot.id, verify=True ) service.post_restore() + Jobs.update( + job, + status=JobStatus.RUNNING, + progress=90, + status_text="restarting the service", + ) except Exception as error: Jobs.update(job, status=JobStatus.ERROR, status_text=str(error)) From 1333aad57dac56dea8f0d2b990bc0a66467c9a4b Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 23 Aug 2023 14:35:03 +0000 Subject: [PATCH 519/537] feature(backups): temporarily revert restore job status to created for inplace restore to run backup --- selfprivacy_api/backup/__init__.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 0f445a8..7b013f4 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -306,6 +306,9 @@ class Backups: snapshot: Snapshot, job: Job, ) -> None: + Jobs.update( + job, status=JobStatus.CREATED, status_text=f"Waiting for pre-restore backup" + ) failsafe_snapshot = Backups.back_up(service) Jobs.update( @@ -321,7 +324,7 @@ class Backups: Jobs.update( job, status=JobStatus.ERROR, - status_text=f" restore failed with {str(error)}, reverting to {failsafe_snapshot.id}", + status_text=f"Restore failed with {str(error)}, reverting to {failsafe_snapshot.id}", ) Backups._restore_service_from_snapshot( service, failsafe_snapshot.id, verify=False @@ -329,7 +332,7 @@ class Backups: Jobs.update( job, status=JobStatus.ERROR, - status_text=f" restore failed with {str(error)}, reverted to {failsafe_snapshot.id}", + status_text=f"Restore failed with {str(error)}, reverted to {failsafe_snapshot.id}", ) raise error @@ -348,7 +351,7 @@ class Backups: try: Backups._assert_restorable(snapshot) Jobs.update( - job, status=JobStatus.CREATED, status_text="stopping the service" + job, status=JobStatus.RUNNING, status_text="Stopping the service" ) with StoppedService(service): Backups.assert_dead(service) @@ -369,7 +372,7 @@ class Backups: job, status=JobStatus.RUNNING, progress=90, - status_text="restarting the service", + status_text="Restarting the service", ) except Exception as error: From 9db717c774ff2ddb22d81aad6930caebf53c4c83 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Fri, 25 Aug 2023 19:28:37 +0300 Subject: [PATCH 520/537] style: linting --- selfprivacy_api/services/service.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index 0f018b6..b66bd19 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -290,8 +290,10 @@ class StoppedService: lambda: self.service.get_status() == ServiceStatus.INACTIVE, timeout_sec=DEFAULT_START_STOP_TIMEOUT, ) - except TimeoutError as e: - raise TimeoutError(f"timed out waiting for {self.service.get_display_name()} to stop") + except TimeoutError as error: + raise TimeoutError( + f"timed out waiting for {self.service.get_display_name()} to stop" + ) from error return self.service def __exit__(self, type, value, traceback): @@ -302,5 +304,7 @@ class StoppedService: lambda: self.service.get_status() == ServiceStatus.ACTIVE, timeout_sec=DEFAULT_START_STOP_TIMEOUT, ) - except TimeoutError as e: - raise TimeoutError(f"timed out waiting for {self.service.get_display_name()} to start") + except TimeoutError as error: + raise TimeoutError( + f"timed out waiting for {self.service.get_display_name()} to start" + ) from error From 9207f5385ca2f23f4ef240c80246499930d0afd9 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 28 Aug 2023 17:02:45 +0000 Subject: [PATCH 521/537] feature(backups): actual finegrained quotas --- selfprivacy_api/backup/__init__.py | 97 +++++++- selfprivacy_api/backup/storage.py | 26 ++ .../graphql/common_types/backup.py | 14 ++ tests/test_graphql/test_backup.py | 232 ++++++++++++++++-- 4 files changed, 340 insertions(+), 29 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index b16f089..73f74a9 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -23,7 +23,18 @@ from selfprivacy_api.jobs import Jobs, JobStatus, Job from selfprivacy_api.graphql.queries.providers import ( BackupProvider as BackupProviderEnum, ) -from selfprivacy_api.graphql.common_types.backup import RestoreStrategy, BackupReason +from selfprivacy_api.graphql.common_types.backup import ( + RestoreStrategy, + BackupReason, + AutobackupQuotas, +) +from selfprivacy_api.backup.time import ( + same_day, + same_month, + same_week, + same_year, + same_lifetime_of_the_universe, +) from selfprivacy_api.models.backup.snapshot import Snapshot @@ -303,20 +314,88 @@ class Backups: if snap.reason == BackupReason.AUTO ] + @staticmethod + def add_snap_but_with_quotas( + new_snap: Snapshot, snaps: List[Snapshot], quotas: AutobackupQuotas + ) -> None: + quotas_map = { + same_day: quotas.daily, + same_week: quotas.weekly, + same_month: quotas.monthly, + same_year: quotas.yearly, + same_lifetime_of_the_universe: quotas.total, + } + + snaps.append(new_snap) + + for is_same_period, quota in quotas_map.items(): + if quota <= 0: + continue + + cohort = [ + snap + for snap in snaps + if is_same_period(snap.created_at, new_snap.created_at) + ] + sorted_cohort = sorted(cohort, key=lambda s: s.created_at) + n_to_kill = len(cohort) - quota + if n_to_kill > 0: + snaps_to_kill = sorted_cohort[:n_to_kill] + for snap in snaps_to_kill: + snaps.remove(snap) + + @staticmethod + def _prune_snaps_with_quotas(snapshots: List[Snapshot]) -> List[Snapshot]: + # Function broken out for testability + sorted_snaps = sorted(snapshots, key=lambda s: s.created_at) + quotas = Backups.autobackup_quotas() + + new_snaplist: List[Snapshot] = [] + for snap in sorted_snaps: + Backups.add_snap_but_with_quotas(snap, new_snaplist, quotas) + + return new_snaplist + @staticmethod def _prune_auto_snaps(service) -> None: - max = Backups.max_auto_snapshots() - if max == -1: - return + # Not very testable by itself, so most testing is going on Backups._prune_snaps_with_quotas + # We can still test total limits and, say, daily limits auto_snaps = Backups._auto_snaps(service) - if len(auto_snaps) > max: - n_to_kill = len(auto_snaps) - max - sorted_snaps = sorted(auto_snaps, key=lambda s: s.created_at) - snaps_to_kill = sorted_snaps[:n_to_kill] - for snap in snaps_to_kill: + new_snaplist = Backups._prune_snaps_with_quotas(auto_snaps) + + # TODO: Can be optimized since there is forgetting of an array in one restic op + # but most of the time this will be only one snap to forget. + for snap in auto_snaps: + if snap not in new_snaplist: Backups.forget_snapshot(snap) + @staticmethod + def _standardize_quotas(i: int) -> int: + if i <= 0: + i = -1 + return i + + @staticmethod + def autobackup_quotas() -> AutobackupQuotas: + """everything <=0 means unlimited""" + + return Storage.autobackup_quotas() + + @staticmethod + def set_autobackup_quotas(quotas: AutobackupQuotas) -> None: + """everything <=0 means unlimited""" + + Storage.set_autobackup_quotas( + AutobackupQuotas( + daily=Backups._standardize_quotas(quotas.daily), + weekly=Backups._standardize_quotas(quotas.weekly), + monthly=Backups._standardize_quotas(quotas.monthly), + yearly=Backups._standardize_quotas(quotas.yearly), + total=Backups._standardize_quotas(quotas.total), + ) + ) + @staticmethod def set_max_auto_snapshots(value: int) -> None: """everything <=0 means unlimited""" diff --git a/selfprivacy_api/backup/storage.py b/selfprivacy_api/backup/storage.py index 1a0091f..38fc3a2 100644 --- a/selfprivacy_api/backup/storage.py +++ b/selfprivacy_api/backup/storage.py @@ -6,6 +6,10 @@ from datetime import datetime from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.models.backup.provider import BackupProviderModel +from selfprivacy_api.graphql.common_types.backup import ( + AutobackupQuotas, + _AutobackupQuotas, +) from selfprivacy_api.utils.redis_pool import RedisPool from selfprivacy_api.utils.redis_model_storage import ( @@ -27,6 +31,7 @@ REDIS_PROVIDER_KEY = "backups:provider" REDIS_AUTOBACKUP_PERIOD_KEY = "backups:autobackup_period" REDIS_AUTOBACKUP_MAX_KEY = "backups:autobackup_cap" +REDIS_AUTOBACKUP_QUOTAS_KEY = "backups:autobackup_quotas_key" redis = RedisPool().get_connection() @@ -41,6 +46,7 @@ class Storage: redis.delete(REDIS_AUTOBACKUP_PERIOD_KEY) redis.delete(REDIS_INITTED_CACHE) redis.delete(REDIS_AUTOBACKUP_MAX_KEY) + redis.delete(REDIS_AUTOBACKUP_QUOTAS_KEY) prefixes_to_clean = [ REDIS_SNAPSHOTS_PREFIX, @@ -178,6 +184,26 @@ class Storage: """Marks the repository as initialized""" redis.delete(REDIS_INITTED_CACHE) + @staticmethod + def set_autobackup_quotas(quotas: AutobackupQuotas) -> None: + store_model_as_hash(redis, REDIS_AUTOBACKUP_QUOTAS_KEY, quotas.to_pydantic()) + + @staticmethod + def autobackup_quotas() -> AutobackupQuotas: + quotas_model = hash_as_model( + redis, REDIS_AUTOBACKUP_QUOTAS_KEY, _AutobackupQuotas + ) + if quotas_model is None: + unlimited_quotas = AutobackupQuotas( + daily=-1, + weekly=-1, + monthly=-1, + yearly=-1, + total=-1, + ) + return unlimited_quotas + return AutobackupQuotas.from_pydantic(quotas_model) + @staticmethod def set_max_auto_snapshots(value: int): redis.set(REDIS_AUTOBACKUP_MAX_KEY, value) diff --git a/selfprivacy_api/graphql/common_types/backup.py b/selfprivacy_api/graphql/common_types/backup.py index 9eaef12..3d5b5aa 100644 --- a/selfprivacy_api/graphql/common_types/backup.py +++ b/selfprivacy_api/graphql/common_types/backup.py @@ -2,6 +2,7 @@ # pylint: disable=too-few-public-methods import strawberry from enum import Enum +from pydantic import BaseModel @strawberry.enum @@ -15,3 +16,16 @@ class BackupReason(Enum): EXPLICIT = "EXPLICIT" AUTO = "AUTO" PRE_RESTORE = "PRE_RESTORE" + + +class _AutobackupQuotas(BaseModel): + daily: int + weekly: int + monthly: int + yearly: int + total: int + + +@strawberry.experimental.pydantic.type(model=_AutobackupQuotas, all_fields=True) +class AutobackupQuotas: + pass diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 781468a..3314597 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -5,8 +5,12 @@ from os import makedirs from os import remove from os import listdir from os import urandom -from datetime import datetime, timedelta, timezone +from datetime import datetime, timedelta, timezone, date, time from subprocess import Popen +from copy import copy + +import secrets + import selfprivacy_api.services as services from selfprivacy_api.services import Service, get_all_services @@ -19,6 +23,8 @@ from selfprivacy_api.jobs import Jobs, JobStatus from selfprivacy_api.models.backup.snapshot import Snapshot +from selfprivacy_api.graphql.common_types.backup import AutobackupQuotas + from selfprivacy_api.backup import Backups, BACKUP_PROVIDER_ENVS import selfprivacy_api.backup.providers as providers from selfprivacy_api.backup.providers import AbstractBackupProvider @@ -298,29 +304,215 @@ def test_backup_reasons(backups, dummy_service): assert snaps[0].reason == BackupReason.AUTO -def test_too_many_auto(backups, dummy_service): - assert Backups.max_auto_snapshots() == -1 - Backups.set_max_auto_snapshots(2) - assert Backups.max_auto_snapshots() == 2 +unlimited_quotas = AutobackupQuotas( + daily=-1, + weekly=-1, + monthly=-1, + yearly=-1, + total=-1, +) - snap = Backups.back_up(dummy_service, BackupReason.AUTO) - assert len(Backups.get_snapshots(dummy_service)) == 1 - snap2 = Backups.back_up(dummy_service, BackupReason.AUTO) - assert len(Backups.get_snapshots(dummy_service)) == 2 - snap3 = Backups.back_up(dummy_service, BackupReason.AUTO) - assert len(Backups.get_snapshots(dummy_service)) == 2 - snaps = Backups.get_snapshots(dummy_service) +def test_get_empty_quotas(backups): + quotas = Backups.autobackup_quotas() + assert quotas is not None + assert quotas == unlimited_quotas - assert snap2 in snaps - assert snap3 in snaps - assert snap not in snaps - Backups.set_max_auto_snapshots(-1) - snap4 = Backups.back_up(dummy_service, BackupReason.AUTO) - snaps = Backups.get_snapshots(dummy_service) - assert len(snaps) == 3 - assert snap4 in snaps +def test_set_quotas(backups): + quotas = AutobackupQuotas( + daily=2343, + weekly=343, + monthly=0, + yearly=-34556, + total=563, + ) + Backups.set_autobackup_quotas(quotas) + assert Backups.autobackup_quotas() == AutobackupQuotas( + daily=2343, + weekly=343, + monthly=-1, + yearly=-1, + total=563, + ) + + +def dummy_snapshot(date: datetime): + return Snapshot( + id=str(hash(date)), + service_name="someservice", + created_at=date, + reason=BackupReason.EXPLICIT, + ) + + +def test_autobackup_snapshots_pruning(backups): + # Wednesday, fourth week + now = datetime(year=2023, month=1, day=25, hour=10) + + snaps = [ + dummy_snapshot(now - timedelta(days=365 * 2)), + dummy_snapshot(now - timedelta(days=20)), + dummy_snapshot(now - timedelta(days=2)), + dummy_snapshot(now - timedelta(days=1, hours=3)), + dummy_snapshot(now - timedelta(days=1, hours=2)), + dummy_snapshot(now - timedelta(days=1)), + dummy_snapshot(now - timedelta(hours=2)), + dummy_snapshot(now - timedelta(minutes=5)), + dummy_snapshot(now), + ] + old_len = len(snaps) + + quotas = copy(unlimited_quotas) + Backups.set_autobackup_quotas(quotas) + assert Backups._prune_snaps_with_quotas(snaps) == snaps + + quotas = copy(unlimited_quotas) + quotas.daily = 2 + Backups.set_autobackup_quotas(quotas) + + pruned_snaps = Backups._prune_snaps_with_quotas(snaps) + assert pruned_snaps == [ + dummy_snapshot(now - timedelta(days=365 * 2)), + dummy_snapshot(now - timedelta(days=20)), + dummy_snapshot(now - timedelta(days=2)), + dummy_snapshot(now - timedelta(days=1, hours=2)), + dummy_snapshot(now - timedelta(days=1)), + dummy_snapshot(now - timedelta(minutes=5)), + dummy_snapshot(now), + ] + + # checking that this function does not mutate the argument + assert snaps != pruned_snaps + assert len(snaps) == old_len + + quotas = copy(unlimited_quotas) + quotas.weekly = 4 + Backups.set_autobackup_quotas(quotas) + + pruned_snaps = Backups._prune_snaps_with_quotas(snaps) + assert pruned_snaps == [ + dummy_snapshot(now - timedelta(days=365 * 2)), + dummy_snapshot(now - timedelta(days=20)), + dummy_snapshot(now - timedelta(days=1)), + dummy_snapshot(now - timedelta(hours=2)), + dummy_snapshot(now - timedelta(minutes=5)), + dummy_snapshot(now), + ] + + quotas = copy(unlimited_quotas) + quotas.monthly = 7 + Backups.set_autobackup_quotas(quotas) + + pruned_snaps = Backups._prune_snaps_with_quotas(snaps) + assert pruned_snaps == [ + dummy_snapshot(now - timedelta(days=365 * 2)), + dummy_snapshot(now - timedelta(days=2)), + dummy_snapshot(now - timedelta(days=1, hours=3)), + dummy_snapshot(now - timedelta(days=1, hours=2)), + dummy_snapshot(now - timedelta(days=1)), + dummy_snapshot(now - timedelta(hours=2)), + dummy_snapshot(now - timedelta(minutes=5)), + dummy_snapshot(now), + ] + + +def test_autobackup_snapshots_pruning_yearly(backups): + snaps = [ + dummy_snapshot(datetime(year=2023, month=2, day=1)), + dummy_snapshot(datetime(year=2023, month=3, day=1)), + dummy_snapshot(datetime(year=2023, month=4, day=1)), + dummy_snapshot(datetime(year=2055, month=3, day=1)), + ] + quotas = copy(unlimited_quotas) + quotas.yearly = 2 + Backups.set_autobackup_quotas(quotas) + + pruned_snaps = Backups._prune_snaps_with_quotas(snaps) + assert pruned_snaps == [ + dummy_snapshot(datetime(year=2023, month=3, day=1)), + dummy_snapshot(datetime(year=2023, month=4, day=1)), + dummy_snapshot(datetime(year=2055, month=3, day=1)), + ] + + +def test_autobackup_snapshots_pruning_bottleneck(backups): + now = datetime(year=2023, month=1, day=25, hour=10) + snaps = [ + dummy_snapshot(now - timedelta(hours=4)), + dummy_snapshot(now - timedelta(hours=3)), + dummy_snapshot(now - timedelta(hours=2)), + dummy_snapshot(now - timedelta(minutes=5)), + dummy_snapshot(now), + ] + + yearly_quota = copy(unlimited_quotas) + yearly_quota.yearly = 2 + + monthly_quota = copy(unlimited_quotas) + monthly_quota.monthly = 2 + + weekly_quota = copy(unlimited_quotas) + weekly_quota.weekly = 2 + + daily_quota = copy(unlimited_quotas) + daily_quota.daily = 2 + + total_quota = copy(unlimited_quotas) + total_quota.total = 2 + + for quota in [total_quota, yearly_quota, monthly_quota, weekly_quota, daily_quota]: + Backups.set_autobackup_quotas(quota) + pruned_snaps = Backups._prune_snaps_with_quotas(snaps) + assert pruned_snaps == [ + dummy_snapshot(now - timedelta(minutes=5)), + dummy_snapshot(now), + ] + + +def test_autobackup_snapshots_pruning_edgeweek(backups): + # jan 1 2023 is Sunday + snaps = [ + dummy_snapshot(datetime(year=2022, month=12, day=30)), + dummy_snapshot(datetime(year=2022, month=12, day=31)), + dummy_snapshot(datetime(year=2023, month=1, day=1)), + dummy_snapshot(datetime(year=2023, month=1, day=6)), + ] + quotas = copy(unlimited_quotas) + quotas.weekly = 2 + Backups.set_autobackup_quotas(quotas) + + pruned_snaps = Backups._prune_snaps_with_quotas(snaps) + assert pruned_snaps == [ + dummy_snapshot(datetime(year=2022, month=12, day=31)), + dummy_snapshot(datetime(year=2023, month=1, day=1)), + dummy_snapshot(datetime(year=2023, month=1, day=6)), + ] + + +# def test_too_many_auto(backups, dummy_service): +# assert Backups.autobackup_quotas() +# Backups.set_max_auto_snapshots(2) +# assert Backups.max_auto_snapshots() == 2 + +# snap = Backups.back_up(dummy_service, BackupReason.AUTO) +# assert len(Backups.get_snapshots(dummy_service)) == 1 +# snap2 = Backups.back_up(dummy_service, BackupReason.AUTO) +# assert len(Backups.get_snapshots(dummy_service)) == 2 +# snap3 = Backups.back_up(dummy_service, BackupReason.AUTO) +# assert len(Backups.get_snapshots(dummy_service)) == 2 + +# snaps = Backups.get_snapshots(dummy_service) + +# assert snap2 in snaps +# assert snap3 in snaps +# assert snap not in snaps + +# Backups.set_max_auto_snapshots(-1) +# snap4 = Backups.back_up(dummy_service, BackupReason.AUTO) +# snaps = Backups.get_snapshots(dummy_service) +# assert len(snaps) == 3 +# assert snap4 in snaps def folder_files(folder): From a75a102df6cb2846bde8ad9e013dab8f06648c28 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 28 Aug 2023 17:15:27 +0000 Subject: [PATCH 522/537] test(backups): test quotas with actual backups --- tests/test_graphql/test_backup.py | 58 +++++++++++++++++++++---------- 1 file changed, 39 insertions(+), 19 deletions(-) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 3314597..550c56b 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -490,29 +490,49 @@ def test_autobackup_snapshots_pruning_edgeweek(backups): ] -# def test_too_many_auto(backups, dummy_service): -# assert Backups.autobackup_quotas() -# Backups.set_max_auto_snapshots(2) -# assert Backups.max_auto_snapshots() == 2 +def test_too_many_auto(backups, dummy_service): + assert Backups.autobackup_quotas() + quota = copy(unlimited_quotas) + quota.total = 2 + Backups.set_autobackup_quotas(quota) + assert Backups.autobackup_quotas().total == 2 -# snap = Backups.back_up(dummy_service, BackupReason.AUTO) -# assert len(Backups.get_snapshots(dummy_service)) == 1 -# snap2 = Backups.back_up(dummy_service, BackupReason.AUTO) -# assert len(Backups.get_snapshots(dummy_service)) == 2 -# snap3 = Backups.back_up(dummy_service, BackupReason.AUTO) -# assert len(Backups.get_snapshots(dummy_service)) == 2 + snap = Backups.back_up(dummy_service, BackupReason.AUTO) + assert len(Backups.get_snapshots(dummy_service)) == 1 + snap2 = Backups.back_up(dummy_service, BackupReason.AUTO) + assert len(Backups.get_snapshots(dummy_service)) == 2 + snap3 = Backups.back_up(dummy_service, BackupReason.AUTO) + assert len(Backups.get_snapshots(dummy_service)) == 2 -# snaps = Backups.get_snapshots(dummy_service) + snaps = Backups.get_snapshots(dummy_service) + assert snap2 in snaps + assert snap3 in snaps + assert snap not in snaps -# assert snap2 in snaps -# assert snap3 in snaps -# assert snap not in snaps + quota.total = -1 + Backups.set_autobackup_quotas(quota) + snap4 = Backups.back_up(dummy_service, BackupReason.AUTO) -# Backups.set_max_auto_snapshots(-1) -# snap4 = Backups.back_up(dummy_service, BackupReason.AUTO) -# snaps = Backups.get_snapshots(dummy_service) -# assert len(snaps) == 3 -# assert snap4 in snaps + snaps = Backups.get_snapshots(dummy_service) + assert len(snaps) == 3 + assert snap4 in snaps + + # Retroactivity + quota.total = 1 + Backups.set_autobackup_quotas(quota) + snap5 = Backups.back_up(dummy_service, BackupReason.AUTO) + + snaps = Backups.get_snapshots(dummy_service) + assert len(snaps) == 1 + assert snap5 in snaps + + # Explicit snaps are not affected + snap6 = Backups.back_up(dummy_service, BackupReason.EXPLICIT) + + snaps = Backups.get_snapshots(dummy_service) + assert len(snaps) == 2 + assert snap5 in snaps + assert snap6 in snaps def folder_files(folder): From 1fc47b049daa262637c244e785de4e792fa2dd06 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 28 Aug 2023 17:23:21 +0000 Subject: [PATCH 523/537] refactor(backups): clean up caps code --- selfprivacy_api/backup/__init__.py | 12 ------------ selfprivacy_api/backup/storage.py | 13 ------------- 2 files changed, 25 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 73f74a9..336b705 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -396,18 +396,6 @@ class Backups: ) ) - @staticmethod - def set_max_auto_snapshots(value: int) -> None: - """everything <=0 means unlimited""" - if value <= 0: - value = -1 - Storage.set_max_auto_snapshots(value) - - @staticmethod - def max_auto_snapshots() -> int: - """-1 means unlimited""" - return Storage.max_auto_snapshots() - # Restoring @staticmethod diff --git a/selfprivacy_api/backup/storage.py b/selfprivacy_api/backup/storage.py index 38fc3a2..86b92f3 100644 --- a/selfprivacy_api/backup/storage.py +++ b/selfprivacy_api/backup/storage.py @@ -30,7 +30,6 @@ REDIS_INITTED_CACHE = "backups:repo_initted" REDIS_PROVIDER_KEY = "backups:provider" REDIS_AUTOBACKUP_PERIOD_KEY = "backups:autobackup_period" -REDIS_AUTOBACKUP_MAX_KEY = "backups:autobackup_cap" REDIS_AUTOBACKUP_QUOTAS_KEY = "backups:autobackup_quotas_key" redis = RedisPool().get_connection() @@ -45,7 +44,6 @@ class Storage: redis.delete(REDIS_PROVIDER_KEY) redis.delete(REDIS_AUTOBACKUP_PERIOD_KEY) redis.delete(REDIS_INITTED_CACHE) - redis.delete(REDIS_AUTOBACKUP_MAX_KEY) redis.delete(REDIS_AUTOBACKUP_QUOTAS_KEY) prefixes_to_clean = [ @@ -203,14 +201,3 @@ class Storage: ) return unlimited_quotas return AutobackupQuotas.from_pydantic(quotas_model) - - @staticmethod - def set_max_auto_snapshots(value: int): - redis.set(REDIS_AUTOBACKUP_MAX_KEY, value) - - @staticmethod - def max_auto_snapshots(): - if redis.exists(REDIS_AUTOBACKUP_MAX_KEY): - return int(redis.get(REDIS_AUTOBACKUP_MAX_KEY)) - else: - return -1 From 0c04975ea4013d39ba60f98eee8894e2cc1f2bbb Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 28 Aug 2023 17:24:20 +0000 Subject: [PATCH 524/537] flx(backups): commit forgotten time.py --- selfprivacy_api/backup/time.py | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 selfprivacy_api/backup/time.py diff --git a/selfprivacy_api/backup/time.py b/selfprivacy_api/backup/time.py new file mode 100644 index 0000000..aba12bd --- /dev/null +++ b/selfprivacy_api/backup/time.py @@ -0,0 +1,28 @@ +from datetime import datetime, timedelta, time + + +def same_day(a: datetime, b: datetime) -> bool: + return a.date() == b.date() + + +def same_week(a: datetime, b: datetime) -> bool: + # doing the hard way because weeks traverse the edges of years + zerobased_weekday = a.isoweekday() - 1 + start_of_day = datetime.combine(a.date(), time.min) + start_of_week = start_of_day - timedelta(days=zerobased_weekday) + end_of_week = start_of_week + timedelta(days=7) + + if b >= start_of_week and b <= end_of_week: + return True + return False + + +def same_month(a: datetime, b: datetime) -> bool: + return a.month == b.month and a.year == b.year + + +def same_year(a: datetime, b: datetime) -> bool: + return a.year == b.year + +def same_lifetime_of_the_universe(a: datetime, b: datetime) -> bool: + return True From 9fdc536f9fa5fce702b1fea0133ce0f73c497dcf Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 28 Aug 2023 18:24:29 +0000 Subject: [PATCH 525/537] BROKEN(backups): hooking up quotas to API fails. AutobackupQuotas needs to be an input type, but if input type, it fails because it needs to be an Output type, which is not documented --- .../graphql/mutations/backup_mutations.py | 39 ++++++++++++++- selfprivacy_api/graphql/queries/backup.py | 4 ++ tests/test_graphql/test_api_backup.py | 50 +++++++++++++++++++ 3 files changed, 92 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index c022d57..babbcf8 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -11,7 +11,10 @@ from selfprivacy_api.graphql.queries.backup import BackupConfiguration from selfprivacy_api.graphql.queries.backup import Backup from selfprivacy_api.graphql.queries.providers import BackupProvider from selfprivacy_api.graphql.common_types.jobs import job_to_api_job -from selfprivacy_api.graphql.common_types.backup import RestoreStrategy +from selfprivacy_api.graphql.common_types.backup import ( + RestoreStrategy, + AutobackupQuotas, +) from selfprivacy_api.backup import Backups from selfprivacy_api.services import get_service_by_id @@ -33,6 +36,13 @@ class InitializeRepositoryInput: password: str +@strawberry.input +class SetAutobackupQuotasInput: + """A single field input to reuse AutobackupQuotas""" + + quotas: AutobackupQuotas + + @strawberry.type class GenericBackupConfigReturn(MutationReturnInterface): """Generic backup config return""" @@ -90,6 +100,33 @@ class BackupMutations: configuration=Backup().configuration(), ) + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def set_autobackup_quotas( + self, quotas: SetAutobackupQuotasInput + ) -> GenericBackupConfigReturn: + """ + Set autobackup quotas. + Values <=0 for any timeframe mean no limits for that timeframe. + To disable autobackup use autobackup period setting, not this mutation. + """ + + try: + Backups.set_autobackup_quotas(quotas) + return GenericBackupConfigReturn( + success=True, + message="", + code=200, + configuration=Backup().configuration(), + ) + + except Exception as e: + return GenericBackupConfigReturn( + success=False, + message=str(e), + code=400, + configuration=Backup().configuration(), + ) + @strawberry.mutation(permission_classes=[IsAuthenticated]) def start_backup(self, service_id: str) -> GenericJobMutationReturn: """Start backup""" diff --git a/selfprivacy_api/graphql/queries/backup.py b/selfprivacy_api/graphql/queries/backup.py index 6535a88..e03215d 100644 --- a/selfprivacy_api/graphql/queries/backup.py +++ b/selfprivacy_api/graphql/queries/backup.py @@ -13,6 +13,7 @@ from selfprivacy_api.graphql.common_types.service import ( SnapshotInfo, service_to_graphql_service, ) +from selfprivacy_api.graphql.common_types.backup import AutobackupQuotas from selfprivacy_api.services import get_service_by_id @@ -26,6 +27,8 @@ class BackupConfiguration: is_initialized: bool # If none, autobackups are disabled autobackup_period: typing.Optional[int] + # None is equal to all quotas being unlimited (-1). Optional for compatibility reasons. + autobackup_quotas: typing.Optional[AutobackupQuotas] # Bucket name for Backblaze, path for some other providers location_name: typing.Optional[str] location_id: typing.Optional[str] @@ -42,6 +45,7 @@ class Backup: autobackup_period=Backups.autobackup_period_minutes(), location_name=Backups.provider().location, location_id=Backups.provider().repo_id, + autobackup_quotas=Backups.autobackup_quotas(), ) @strawberry.field diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index e53ce2a..9681e7b 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -4,6 +4,7 @@ from tests.common import generate_backup_query from selfprivacy_api.graphql.common_types.service import service_to_graphql_service +from selfprivacy_api.graphql.common_types.backup import AutobackupQuotas from selfprivacy_api.jobs import Jobs, JobStatus API_RELOAD_SNAPSHOTS = """ @@ -38,6 +39,28 @@ mutation TestAutobackupPeriod($period: Int) { } """ + +API_SET_AUTOBACKUP_QUOTAS_MUTATION = """ +mutation TestAutobackupQuotas($input: SetAutobackupQuotasInput!) { + backup { + setAutobackupQuotas(quotas: $input) { + success + message + code + configuration { + provider + encryptionKey + isInitialized + autobackupPeriod + locationName + locationId + autobackupQuotas + } + } + } +} +""" + API_REMOVE_REPOSITORY_MUTATION = """ mutation TestRemoveRepo { backup { @@ -177,6 +200,17 @@ def api_set_period(authorized_client, period): return response +def api_set_quotas(authorized_client, quotas): + response = authorized_client.post( + "/graphql", + json={ + "query": API_SET_AUTOBACKUP_QUOTAS_MUTATION, + "variables": {"input": {"quotas": quotas}}, + }, + ) + return response + + def api_remove(authorized_client): response = authorized_client.post( "/graphql", @@ -323,6 +357,22 @@ def test_remove(authorized_client, generic_userdata): assert configuration["isInitialized"] is False +def test_autobackup_quotas_nonzero(authorized_client): + quotas = AutobackupQuotas( + daily=2, + weekly=4, + monthly=13, + yearly=14, + total=3, + ) + response = api_set_quotas(authorized_client, quotas) + data = get_data(response)["backup"]["setAutobackupQuotas"] + assert_ok(data) + + configuration = data["configuration"] + assert configuration["autobackupQuotas"] == quotas + + def test_autobackup_period_nonzero(authorized_client): new_period = 11 response = api_set_period(authorized_client, new_period) From ad9384c850d0249db4f30bb58f472d8b3013a2ba Mon Sep 17 00:00:00 2001 From: Inex Code Date: Wed, 30 Aug 2023 12:03:19 +0300 Subject: [PATCH 526/537] fix(graphql): backup quotas field typing --- selfprivacy_api/backup/time.py | 1 + .../graphql/common_types/backup.py | 5 +++++ .../graphql/mutations/backup_mutations.py | 11 ++-------- selfprivacy_api/graphql/queries/backup.py | 2 +- tests/test_graphql/test_api_backup.py | 21 +++++++++++++------ 5 files changed, 24 insertions(+), 16 deletions(-) diff --git a/selfprivacy_api/backup/time.py b/selfprivacy_api/backup/time.py index aba12bd..9e34211 100644 --- a/selfprivacy_api/backup/time.py +++ b/selfprivacy_api/backup/time.py @@ -24,5 +24,6 @@ def same_month(a: datetime, b: datetime) -> bool: def same_year(a: datetime, b: datetime) -> bool: return a.year == b.year + def same_lifetime_of_the_universe(a: datetime, b: datetime) -> bool: return True diff --git a/selfprivacy_api/graphql/common_types/backup.py b/selfprivacy_api/graphql/common_types/backup.py index 3d5b5aa..cc03936 100644 --- a/selfprivacy_api/graphql/common_types/backup.py +++ b/selfprivacy_api/graphql/common_types/backup.py @@ -29,3 +29,8 @@ class _AutobackupQuotas(BaseModel): @strawberry.experimental.pydantic.type(model=_AutobackupQuotas, all_fields=True) class AutobackupQuotas: pass + + +@strawberry.experimental.pydantic.input(model=_AutobackupQuotas, all_fields=True) +class AutobackupQuotasInput: + pass diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index babbcf8..dcfebff 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -12,8 +12,8 @@ from selfprivacy_api.graphql.queries.backup import Backup from selfprivacy_api.graphql.queries.providers import BackupProvider from selfprivacy_api.graphql.common_types.jobs import job_to_api_job from selfprivacy_api.graphql.common_types.backup import ( + AutobackupQuotasInput, RestoreStrategy, - AutobackupQuotas, ) from selfprivacy_api.backup import Backups @@ -36,13 +36,6 @@ class InitializeRepositoryInput: password: str -@strawberry.input -class SetAutobackupQuotasInput: - """A single field input to reuse AutobackupQuotas""" - - quotas: AutobackupQuotas - - @strawberry.type class GenericBackupConfigReturn(MutationReturnInterface): """Generic backup config return""" @@ -102,7 +95,7 @@ class BackupMutations: @strawberry.mutation(permission_classes=[IsAuthenticated]) def set_autobackup_quotas( - self, quotas: SetAutobackupQuotasInput + self, quotas: AutobackupQuotasInput ) -> GenericBackupConfigReturn: """ Set autobackup quotas. diff --git a/selfprivacy_api/graphql/queries/backup.py b/selfprivacy_api/graphql/queries/backup.py index e03215d..6d47a8c 100644 --- a/selfprivacy_api/graphql/queries/backup.py +++ b/selfprivacy_api/graphql/queries/backup.py @@ -28,7 +28,7 @@ class BackupConfiguration: # If none, autobackups are disabled autobackup_period: typing.Optional[int] # None is equal to all quotas being unlimited (-1). Optional for compatibility reasons. - autobackup_quotas: typing.Optional[AutobackupQuotas] + autobackup_quotas: AutobackupQuotas # Bucket name for Backblaze, path for some other providers location_name: typing.Optional[str] location_id: typing.Optional[str] diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index 9681e7b..e8de4a1 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -4,7 +4,10 @@ from tests.common import generate_backup_query from selfprivacy_api.graphql.common_types.service import service_to_graphql_service -from selfprivacy_api.graphql.common_types.backup import AutobackupQuotas +from selfprivacy_api.graphql.common_types.backup import ( + _AutobackupQuotas, + AutobackupQuotas, +) from selfprivacy_api.jobs import Jobs, JobStatus API_RELOAD_SNAPSHOTS = """ @@ -41,7 +44,7 @@ mutation TestAutobackupPeriod($period: Int) { API_SET_AUTOBACKUP_QUOTAS_MUTATION = """ -mutation TestAutobackupQuotas($input: SetAutobackupQuotasInput!) { +mutation TestAutobackupQuotas($input: AutobackupQuotasInput!) { backup { setAutobackupQuotas(quotas: $input) { success @@ -54,7 +57,13 @@ mutation TestAutobackupQuotas($input: SetAutobackupQuotasInput!) { autobackupPeriod locationName locationId - autobackupQuotas + autobackupQuotas { + daily + weekly + monthly + yearly + total + } } } } @@ -200,12 +209,12 @@ def api_set_period(authorized_client, period): return response -def api_set_quotas(authorized_client, quotas): +def api_set_quotas(authorized_client, quotas: _AutobackupQuotas): response = authorized_client.post( "/graphql", json={ "query": API_SET_AUTOBACKUP_QUOTAS_MUTATION, - "variables": {"input": {"quotas": quotas}}, + "variables": {"input": quotas.dict()}, }, ) return response @@ -358,7 +367,7 @@ def test_remove(authorized_client, generic_userdata): def test_autobackup_quotas_nonzero(authorized_client): - quotas = AutobackupQuotas( + quotas = _AutobackupQuotas( daily=2, weekly=4, monthly=13, From 0dfb41a689249ff28d1316cd77de8e96f31295b4 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 1 Sep 2023 10:41:27 +0000 Subject: [PATCH 527/537] feature(backups): a task to autorefresh cache. Redis expiry abolished --- selfprivacy_api/backup/storage.py | 5 ----- selfprivacy_api/backup/tasks.py | 11 ++++++++++- tests/test_graphql/test_backup.py | 19 ++++++++++++++++++- 3 files changed, 28 insertions(+), 7 deletions(-) diff --git a/selfprivacy_api/backup/storage.py b/selfprivacy_api/backup/storage.py index d46f584..4d1d415 100644 --- a/selfprivacy_api/backup/storage.py +++ b/selfprivacy_api/backup/storage.py @@ -16,9 +16,6 @@ from selfprivacy_api.utils.redis_model_storage import ( from selfprivacy_api.backup.providers.provider import AbstractBackupProvider from selfprivacy_api.backup.providers import get_kind -# a hack to store file path. -REDIS_SNAPSHOT_CACHE_EXPIRE_SECONDS = 24 * 60 * 60 # one day - REDIS_SNAPSHOTS_PREFIX = "backups:snapshots:" REDIS_LAST_BACKUP_PREFIX = "backups:last-backed-up:" REDIS_INITTED_CACHE = "backups:repo_initted" @@ -26,7 +23,6 @@ REDIS_INITTED_CACHE = "backups:repo_initted" REDIS_PROVIDER_KEY = "backups:provider" REDIS_AUTOBACKUP_PERIOD_KEY = "backups:autobackup_period" - redis = RedisPool().get_connection() @@ -89,7 +85,6 @@ class Storage: """Stores snapshot metadata in redis for caching purposes""" snapshot_key = Storage.__snapshot_key(snapshot) store_model_as_hash(redis, snapshot_key, snapshot) - redis.expire(snapshot_key, REDIS_SNAPSHOT_CACHE_EXPIRE_SECONDS) @staticmethod def delete_cached_snapshot(snapshot: Snapshot) -> None: diff --git a/selfprivacy_api/backup/tasks.py b/selfprivacy_api/backup/tasks.py index db350d4..2b6b79c 100644 --- a/selfprivacy_api/backup/tasks.py +++ b/selfprivacy_api/backup/tasks.py @@ -7,13 +7,17 @@ from selfprivacy_api.graphql.common_types.backup import RestoreStrategy from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.utils.huey import huey +from huey import crontab from selfprivacy_api.services.service import Service from selfprivacy_api.backup import Backups +SNAPSHOT_CACHE_TTL_HOURS = 6 + def validate_datetime(dt: datetime) -> bool: """ - Validates that the datetime passed in is timezone-aware. + Validates that it is time to back up. + Also ensures that the timezone-aware time is used. """ if dt.tzinfo is None: return Backups.is_time_to_backup(dt.replace(tzinfo=timezone.utc)) @@ -50,3 +54,8 @@ def automatic_backup(): time = datetime.utcnow().replace(tzinfo=timezone.utc) for service in Backups.services_to_back_up(time): start_backup(service) + + +@huey.periodic_task(crontab(hour=SNAPSHOT_CACHE_TTL_HOURS)) +def reload_snapshot_cache(): + Backups.force_snapshot_cache_reload() diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index dc491c4..d54af7b 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -32,7 +32,11 @@ from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackupper from selfprivacy_api.backup.jobs import add_backup_job, add_restore_job -from selfprivacy_api.backup.tasks import start_backup, restore_snapshot +from selfprivacy_api.backup.tasks import ( + start_backup, + restore_snapshot, + reload_snapshot_cache, +) from selfprivacy_api.backup.storage import Storage from selfprivacy_api.backup.jobs import get_backup_job @@ -806,3 +810,16 @@ def test_tempfile(): with tempfile.TemporaryDirectory() as temp: assert path.exists(temp) assert not path.exists(temp) + + +# Storage +def test_cache_invalidaton_task(backups, dummy_service): + Backups.back_up(dummy_service) + assert len(Storage.get_cached_snapshots()) == 1 + + # Does not trigger resync + Storage.invalidate_snapshot_storage() + assert Storage.get_cached_snapshots() == [] + + reload_snapshot_cache() + assert len(Storage.get_cached_snapshots()) == 1 From 56be3d9c31b972c1bb6eb03e8fb2a624b84852d9 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 8 Sep 2023 16:22:53 +0000 Subject: [PATCH 528/537] fix(backup): trim auto-snapshots on setting the quotas --- selfprivacy_api/backup/__init__.py | 3 +++ tests/test_graphql/test_backup.py | 4 +++- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 336b705..7056071 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -396,6 +396,9 @@ class Backups: ) ) + for service in get_all_services(): + Backups._prune_auto_snaps(service) + # Restoring @staticmethod diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 550c56b..5daae0c 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -520,8 +520,10 @@ def test_too_many_auto(backups, dummy_service): # Retroactivity quota.total = 1 Backups.set_autobackup_quotas(quota) - snap5 = Backups.back_up(dummy_service, BackupReason.AUTO) + snaps = Backups.get_snapshots(dummy_service) + assert len(snaps) == 1 + snap5 = Backups.back_up(dummy_service, BackupReason.AUTO) snaps = Backups.get_snapshots(dummy_service) assert len(snaps) == 1 assert snap5 in snaps From dedd6a9cc949fcd176ce941b5182611e4bc7d33c Mon Sep 17 00:00:00 2001 From: Inex Code Date: Sat, 9 Sep 2023 03:26:41 +0300 Subject: [PATCH 529/537] refactor(backups): use restic-like rotation policy --- selfprivacy_api/backup/__init__.py | 113 +++++--- .../backup/backuppers/restic_backupper.py | 21 +- selfprivacy_api/backup/storage.py | 4 +- selfprivacy_api/backup/time.py | 29 -- .../graphql/common_types/backup.py | 4 +- tests/test_graphql/test_api_backup.py | 4 +- tests/test_graphql/test_backup.py | 266 +++++++++++++----- 7 files changed, 278 insertions(+), 163 deletions(-) delete mode 100644 selfprivacy_api/backup/time.py diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 7056071..dff4b3b 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -4,7 +4,7 @@ This module contains the controller class for backups. from datetime import datetime, timedelta import os from os import statvfs -from typing import List, Optional +from typing import Callable, List, Optional from selfprivacy_api.utils import ReadUserData, WriteUserData @@ -28,13 +28,7 @@ from selfprivacy_api.graphql.common_types.backup import ( BackupReason, AutobackupQuotas, ) -from selfprivacy_api.backup.time import ( - same_day, - same_month, - same_week, - same_year, - same_lifetime_of_the_universe, -) + from selfprivacy_api.models.backup.snapshot import Snapshot @@ -81,6 +75,24 @@ class NotDeadError(AssertionError): """ +class RotationBucket: + """ + Bucket object used for rotation. + Has the following mutable fields: + - the counter, int + - the lambda function which takes datetime and the int and returns the int + - the last, int + """ + + def __init__(self, counter: int, last: int, rotation_lambda): + self.counter: int = counter + self.last: int = last + self.rotation_lambda: Callable[[datetime, int], int] = rotation_lambda + + def __str__(self) -> str: + return f"Bucket(counter={self.counter}, last={self.last})" + + class Backups: """A stateless controller class for backups""" @@ -314,45 +326,54 @@ class Backups: if snap.reason == BackupReason.AUTO ] - @staticmethod - def add_snap_but_with_quotas( - new_snap: Snapshot, snaps: List[Snapshot], quotas: AutobackupQuotas - ) -> None: - quotas_map = { - same_day: quotas.daily, - same_week: quotas.weekly, - same_month: quotas.monthly, - same_year: quotas.yearly, - same_lifetime_of_the_universe: quotas.total, - } - - snaps.append(new_snap) - - for is_same_period, quota in quotas_map.items(): - if quota <= 0: - continue - - cohort = [ - snap - for snap in snaps - if is_same_period(snap.created_at, new_snap.created_at) - ] - sorted_cohort = sorted(cohort, key=lambda s: s.created_at) - n_to_kill = len(cohort) - quota - if n_to_kill > 0: - snaps_to_kill = sorted_cohort[:n_to_kill] - for snap in snaps_to_kill: - snaps.remove(snap) - @staticmethod def _prune_snaps_with_quotas(snapshots: List[Snapshot]) -> List[Snapshot]: # Function broken out for testability - sorted_snaps = sorted(snapshots, key=lambda s: s.created_at) - quotas = Backups.autobackup_quotas() + # Sorting newest first + sorted_snaps = sorted(snapshots, key=lambda s: s.created_at, reverse=True) + quotas: AutobackupQuotas = Backups.autobackup_quotas() + + buckets: list[RotationBucket] = [ + RotationBucket( + quotas.last, + -1, + lambda _, index: index, + ), + RotationBucket( + quotas.daily, + -1, + lambda date, _: date.year * 10000 + date.month * 100 + date.day, + ), + RotationBucket( + quotas.weekly, + -1, + lambda date, _: date.year * 100 + date.isocalendar()[1], + ), + RotationBucket( + quotas.monthly, + -1, + lambda date, _: date.year * 100 + date.month, + ), + RotationBucket( + quotas.yearly, + -1, + lambda date, _: date.year, + ), + ] new_snaplist: List[Snapshot] = [] - for snap in sorted_snaps: - Backups.add_snap_but_with_quotas(snap, new_snaplist, quotas) + for i, snap in enumerate(sorted_snaps): + keep_snap = False + for bucket in buckets: + if (bucket.counter > 0) or (bucket.counter == -1): + val = bucket.rotation_lambda(snap.created_at, i) + if (val != bucket.last) or (i == len(sorted_snaps) - 1): + bucket.last = val + if bucket.counter > 0: + bucket.counter -= 1 + if not keep_snap: + new_snaplist.append(snap) + keep_snap = True return new_snaplist @@ -372,27 +393,27 @@ class Backups: @staticmethod def _standardize_quotas(i: int) -> int: - if i <= 0: + if i <= -1: i = -1 return i @staticmethod def autobackup_quotas() -> AutobackupQuotas: - """everything <=0 means unlimited""" + """0 means do not keep, -1 means unlimited""" return Storage.autobackup_quotas() @staticmethod def set_autobackup_quotas(quotas: AutobackupQuotas) -> None: - """everything <=0 means unlimited""" + """0 means do not keep, -1 means unlimited""" Storage.set_autobackup_quotas( AutobackupQuotas( + last=Backups._standardize_quotas(quotas.last), daily=Backups._standardize_quotas(quotas.daily), weekly=Backups._standardize_quotas(quotas.weekly), monthly=Backups._standardize_quotas(quotas.monthly), yearly=Backups._standardize_quotas(quotas.yearly), - total=Backups._standardize_quotas(quotas.total), ) ) diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index f5467ff..b6c643b 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -5,7 +5,7 @@ import json import datetime import tempfile -from typing import List, TypeVar, Callable +from typing import List, Optional, TypeVar, Callable from collections.abc import Iterable from json.decoder import JSONDecodeError from os.path import exists, join @@ -33,12 +33,12 @@ def unlocked_repo(func: T) -> T: def inner(self: ResticBackupper, *args, **kwargs): try: return func(self, *args, **kwargs) - except Exception as e: - if "unable to create lock" in str(e): + except Exception as error: + if "unable to create lock" in str(error): self.unlock() return func(self, *args, **kwargs) else: - raise e + raise error # Above, we manually guarantee that the type returned is compatible. return inner # type: ignore @@ -85,7 +85,10 @@ class ResticBackupper(AbstractBackupper): def _password_command(self): return f"echo {LocalBackupSecret.get()}" - def restic_command(self, *args, tags: List[str] = []) -> List[str]: + def restic_command(self, *args, tags: Optional[List[str]] = None) -> List[str]: + if tags is None: + tags = [] + command = [ "restic", "-o", @@ -219,7 +222,7 @@ class ResticBackupper(AbstractBackupper): ) from error @staticmethod - def _snapshot_id_from_backup_messages(messages) -> Snapshot: + def _snapshot_id_from_backup_messages(messages) -> str: for message in messages: if message["message_type"] == "summary": # There is a discrepancy between versions of restic/rclone @@ -317,8 +320,8 @@ class ResticBackupper(AbstractBackupper): break if "unable" in line: raise ValueError(line) - except Exception as e: - raise ValueError("could not lock repository") from e + except Exception as error: + raise ValueError("could not lock repository") from error @unlocked_repo def restored_size(self, snapshot_id: str) -> int: @@ -415,6 +418,8 @@ class ResticBackupper(AbstractBackupper): forget_command = self.restic_command( "forget", snapshot_id, + # TODO: prune should be done in a separate process + "--prune", ) with subprocess.Popen( diff --git a/selfprivacy_api/backup/storage.py b/selfprivacy_api/backup/storage.py index 86b92f3..ddfd176 100644 --- a/selfprivacy_api/backup/storage.py +++ b/selfprivacy_api/backup/storage.py @@ -193,11 +193,11 @@ class Storage: ) if quotas_model is None: unlimited_quotas = AutobackupQuotas( + last=-1, daily=-1, weekly=-1, monthly=-1, yearly=-1, - total=-1, ) return unlimited_quotas - return AutobackupQuotas.from_pydantic(quotas_model) + return AutobackupQuotas.from_pydantic(quotas_model) # pylint: disable=no-member diff --git a/selfprivacy_api/backup/time.py b/selfprivacy_api/backup/time.py deleted file mode 100644 index 9e34211..0000000 --- a/selfprivacy_api/backup/time.py +++ /dev/null @@ -1,29 +0,0 @@ -from datetime import datetime, timedelta, time - - -def same_day(a: datetime, b: datetime) -> bool: - return a.date() == b.date() - - -def same_week(a: datetime, b: datetime) -> bool: - # doing the hard way because weeks traverse the edges of years - zerobased_weekday = a.isoweekday() - 1 - start_of_day = datetime.combine(a.date(), time.min) - start_of_week = start_of_day - timedelta(days=zerobased_weekday) - end_of_week = start_of_week + timedelta(days=7) - - if b >= start_of_week and b <= end_of_week: - return True - return False - - -def same_month(a: datetime, b: datetime) -> bool: - return a.month == b.month and a.year == b.year - - -def same_year(a: datetime, b: datetime) -> bool: - return a.year == b.year - - -def same_lifetime_of_the_universe(a: datetime, b: datetime) -> bool: - return True diff --git a/selfprivacy_api/graphql/common_types/backup.py b/selfprivacy_api/graphql/common_types/backup.py index cc03936..953009d 100644 --- a/selfprivacy_api/graphql/common_types/backup.py +++ b/selfprivacy_api/graphql/common_types/backup.py @@ -1,7 +1,7 @@ """Backup""" # pylint: disable=too-few-public-methods -import strawberry from enum import Enum +import strawberry from pydantic import BaseModel @@ -19,11 +19,11 @@ class BackupReason(Enum): class _AutobackupQuotas(BaseModel): + last: int daily: int weekly: int monthly: int yearly: int - total: int @strawberry.experimental.pydantic.type(model=_AutobackupQuotas, all_fields=True) diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index e8de4a1..14410e3 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -58,11 +58,11 @@ mutation TestAutobackupQuotas($input: AutobackupQuotasInput!) { locationName locationId autobackupQuotas { + last daily weekly monthly yearly - total } } } @@ -368,11 +368,11 @@ def test_remove(authorized_client, generic_userdata): def test_autobackup_quotas_nonzero(authorized_client): quotas = _AutobackupQuotas( + last=3, daily=2, weekly=4, monthly=13, yearly=14, - total=3, ) response = api_set_quotas(authorized_client, quotas) data = get_data(response)["backup"]["setAutobackupQuotas"] diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 5daae0c..edef6d0 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -305,11 +305,19 @@ def test_backup_reasons(backups, dummy_service): unlimited_quotas = AutobackupQuotas( + last=-1, daily=-1, weekly=-1, monthly=-1, yearly=-1, - total=-1, +) + +zero_quotas = AutobackupQuotas( + last=0, + daily=0, + weekly=0, + monthly=0, + yearly=0, ) @@ -321,20 +329,66 @@ def test_get_empty_quotas(backups): def test_set_quotas(backups): quotas = AutobackupQuotas( + last=3, daily=2343, weekly=343, monthly=0, yearly=-34556, - total=563, ) Backups.set_autobackup_quotas(quotas) assert Backups.autobackup_quotas() == AutobackupQuotas( + last=3, daily=2343, weekly=343, + monthly=0, + yearly=-1, + ) + + +def test_set_zero_quotas(backups): + quotas = AutobackupQuotas( + last=0, + daily=0, + weekly=0, + monthly=0, + yearly=0, + ) + Backups.set_autobackup_quotas(quotas) + assert Backups.autobackup_quotas() == zero_quotas + + +def test_set_unlimited_quotas(backups): + quotas = AutobackupQuotas( + last=-1, + daily=-1, + weekly=-1, monthly=-1, yearly=-1, - total=563, ) + Backups.set_autobackup_quotas(quotas) + assert Backups.autobackup_quotas() == unlimited_quotas + + +def test_set_zero_quotas_after_unlimited(backups): + quotas = AutobackupQuotas( + last=-1, + daily=-1, + weekly=-1, + monthly=-1, + yearly=-1, + ) + Backups.set_autobackup_quotas(quotas) + assert Backups.autobackup_quotas() == unlimited_quotas + + quotas = AutobackupQuotas( + last=0, + daily=0, + weekly=0, + monthly=0, + yearly=0, + ) + Backups.set_autobackup_quotas(quotas) + assert Backups.autobackup_quotas() == zero_quotas def dummy_snapshot(date: datetime): @@ -351,15 +405,24 @@ def test_autobackup_snapshots_pruning(backups): now = datetime(year=2023, month=1, day=25, hour=10) snaps = [ - dummy_snapshot(now - timedelta(days=365 * 2)), - dummy_snapshot(now - timedelta(days=20)), - dummy_snapshot(now - timedelta(days=2)), - dummy_snapshot(now - timedelta(days=1, hours=3)), - dummy_snapshot(now - timedelta(days=1, hours=2)), - dummy_snapshot(now - timedelta(days=1)), - dummy_snapshot(now - timedelta(hours=2)), - dummy_snapshot(now - timedelta(minutes=5)), dummy_snapshot(now), + dummy_snapshot(now - timedelta(minutes=5)), + dummy_snapshot(now - timedelta(hours=2)), + dummy_snapshot(now - timedelta(hours=5)), + dummy_snapshot(now - timedelta(days=1)), + dummy_snapshot(now - timedelta(days=1, hours=2)), + dummy_snapshot(now - timedelta(days=1, hours=3)), + dummy_snapshot(now - timedelta(days=2)), + dummy_snapshot(now - timedelta(days=7)), + dummy_snapshot(now - timedelta(days=12)), + dummy_snapshot(now - timedelta(days=23)), + dummy_snapshot(now - timedelta(days=28)), + dummy_snapshot(now - timedelta(days=32)), + dummy_snapshot(now - timedelta(days=47)), + dummy_snapshot(now - timedelta(days=64)), + dummy_snapshot(now - timedelta(days=84)), + dummy_snapshot(now - timedelta(days=104)), + dummy_snapshot(now - timedelta(days=365 * 2)), ] old_len = len(snaps) @@ -367,135 +430,190 @@ def test_autobackup_snapshots_pruning(backups): Backups.set_autobackup_quotas(quotas) assert Backups._prune_snaps_with_quotas(snaps) == snaps - quotas = copy(unlimited_quotas) + quotas = copy(zero_quotas) + quotas.last = 2 quotas.daily = 2 Backups.set_autobackup_quotas(quotas) - pruned_snaps = Backups._prune_snaps_with_quotas(snaps) - assert pruned_snaps == [ - dummy_snapshot(now - timedelta(days=365 * 2)), - dummy_snapshot(now - timedelta(days=20)), - dummy_snapshot(now - timedelta(days=2)), - dummy_snapshot(now - timedelta(days=1, hours=2)), - dummy_snapshot(now - timedelta(days=1)), - dummy_snapshot(now - timedelta(minutes=5)), + snaps_to_keep = Backups._prune_snaps_with_quotas(snaps) + assert snaps_to_keep == [ dummy_snapshot(now), + dummy_snapshot(now - timedelta(minutes=5)), + # dummy_snapshot(now - timedelta(hours=2)), + # dummy_snapshot(now - timedelta(hours=5)), + dummy_snapshot(now - timedelta(days=1)), + # dummy_snapshot(now - timedelta(days=1, hours=2)), + # dummy_snapshot(now - timedelta(days=1, hours=3)), + # dummy_snapshot(now - timedelta(days=2)), + # dummy_snapshot(now - timedelta(days=7)), + # dummy_snapshot(now - timedelta(days=12)), + # dummy_snapshot(now - timedelta(days=23)), + # dummy_snapshot(now - timedelta(days=28)), + # dummy_snapshot(now - timedelta(days=32)), + # dummy_snapshot(now - timedelta(days=47)), + # dummy_snapshot(now - timedelta(days=64)), + # dummy_snapshot(now - timedelta(days=84)), + # dummy_snapshot(now - timedelta(days=104)), + # dummy_snapshot(now - timedelta(days=365 * 2)), ] # checking that this function does not mutate the argument - assert snaps != pruned_snaps + assert snaps != snaps_to_keep assert len(snaps) == old_len - quotas = copy(unlimited_quotas) + quotas = copy(zero_quotas) quotas.weekly = 4 Backups.set_autobackup_quotas(quotas) - pruned_snaps = Backups._prune_snaps_with_quotas(snaps) - assert pruned_snaps == [ - dummy_snapshot(now - timedelta(days=365 * 2)), - dummy_snapshot(now - timedelta(days=20)), - dummy_snapshot(now - timedelta(days=1)), - dummy_snapshot(now - timedelta(hours=2)), - dummy_snapshot(now - timedelta(minutes=5)), + snaps_to_keep = Backups._prune_snaps_with_quotas(snaps) + assert snaps_to_keep == [ dummy_snapshot(now), + # dummy_snapshot(now - timedelta(minutes=5)), + # dummy_snapshot(now - timedelta(hours=2)), + # dummy_snapshot(now - timedelta(hours=5)), + # dummy_snapshot(now - timedelta(days=1)), + # dummy_snapshot(now - timedelta(days=1, hours=2)), + # dummy_snapshot(now - timedelta(days=1, hours=3)), + # dummy_snapshot(now - timedelta(days=2)), + dummy_snapshot(now - timedelta(days=7)), + dummy_snapshot(now - timedelta(days=12)), + dummy_snapshot(now - timedelta(days=23)), + # dummy_snapshot(now - timedelta(days=28)), + # dummy_snapshot(now - timedelta(days=32)), + # dummy_snapshot(now - timedelta(days=47)), + # dummy_snapshot(now - timedelta(days=64)), + # dummy_snapshot(now - timedelta(days=84)), + # dummy_snapshot(now - timedelta(days=104)), + # dummy_snapshot(now - timedelta(days=365 * 2)), ] - quotas = copy(unlimited_quotas) + quotas = copy(zero_quotas) quotas.monthly = 7 Backups.set_autobackup_quotas(quotas) - pruned_snaps = Backups._prune_snaps_with_quotas(snaps) - assert pruned_snaps == [ - dummy_snapshot(now - timedelta(days=365 * 2)), - dummy_snapshot(now - timedelta(days=2)), - dummy_snapshot(now - timedelta(days=1, hours=3)), - dummy_snapshot(now - timedelta(days=1, hours=2)), - dummy_snapshot(now - timedelta(days=1)), - dummy_snapshot(now - timedelta(hours=2)), - dummy_snapshot(now - timedelta(minutes=5)), + snaps_to_keep = Backups._prune_snaps_with_quotas(snaps) + assert snaps_to_keep == [ dummy_snapshot(now), + # dummy_snapshot(now - timedelta(minutes=5)), + # dummy_snapshot(now - timedelta(hours=2)), + # dummy_snapshot(now - timedelta(hours=5)), + # dummy_snapshot(now - timedelta(days=1)), + # dummy_snapshot(now - timedelta(days=1, hours=2)), + # dummy_snapshot(now - timedelta(days=1, hours=3)), + # dummy_snapshot(now - timedelta(days=2)), + # dummy_snapshot(now - timedelta(days=7)), + # dummy_snapshot(now - timedelta(days=12)), + # dummy_snapshot(now - timedelta(days=23)), + dummy_snapshot(now - timedelta(days=28)), + # dummy_snapshot(now - timedelta(days=32)), + # dummy_snapshot(now - timedelta(days=47)), + dummy_snapshot(now - timedelta(days=64)), + # dummy_snapshot(now - timedelta(days=84)), + dummy_snapshot(now - timedelta(days=104)), + dummy_snapshot(now - timedelta(days=365 * 2)), ] def test_autobackup_snapshots_pruning_yearly(backups): snaps = [ - dummy_snapshot(datetime(year=2023, month=2, day=1)), - dummy_snapshot(datetime(year=2023, month=3, day=1)), - dummy_snapshot(datetime(year=2023, month=4, day=1)), dummy_snapshot(datetime(year=2055, month=3, day=1)), + dummy_snapshot(datetime(year=2055, month=2, day=1)), + dummy_snapshot(datetime(year=2023, month=4, day=1)), + dummy_snapshot(datetime(year=2023, month=3, day=1)), + dummy_snapshot(datetime(year=2023, month=2, day=1)), + dummy_snapshot(datetime(year=2021, month=2, day=1)), ] - quotas = copy(unlimited_quotas) + quotas = copy(zero_quotas) quotas.yearly = 2 Backups.set_autobackup_quotas(quotas) - pruned_snaps = Backups._prune_snaps_with_quotas(snaps) - assert pruned_snaps == [ - dummy_snapshot(datetime(year=2023, month=3, day=1)), - dummy_snapshot(datetime(year=2023, month=4, day=1)), + snaps_to_keep = Backups._prune_snaps_with_quotas(snaps) + assert snaps_to_keep == [ dummy_snapshot(datetime(year=2055, month=3, day=1)), + dummy_snapshot(datetime(year=2023, month=4, day=1)), ] def test_autobackup_snapshots_pruning_bottleneck(backups): now = datetime(year=2023, month=1, day=25, hour=10) snaps = [ - dummy_snapshot(now - timedelta(hours=4)), - dummy_snapshot(now - timedelta(hours=3)), - dummy_snapshot(now - timedelta(hours=2)), - dummy_snapshot(now - timedelta(minutes=5)), dummy_snapshot(now), + dummy_snapshot(now - timedelta(minutes=5)), + dummy_snapshot(now - timedelta(hours=2)), + dummy_snapshot(now - timedelta(hours=3)), + dummy_snapshot(now - timedelta(hours=4)), ] - yearly_quota = copy(unlimited_quotas) + yearly_quota = copy(zero_quotas) yearly_quota.yearly = 2 - monthly_quota = copy(unlimited_quotas) + monthly_quota = copy(zero_quotas) monthly_quota.monthly = 2 - weekly_quota = copy(unlimited_quotas) + weekly_quota = copy(zero_quotas) weekly_quota.weekly = 2 - daily_quota = copy(unlimited_quotas) + daily_quota = copy(zero_quotas) daily_quota.daily = 2 - total_quota = copy(unlimited_quotas) - total_quota.total = 2 + last_quota = copy(zero_quotas) + last_quota.last = 1 + last_quota.yearly = 2 - for quota in [total_quota, yearly_quota, monthly_quota, weekly_quota, daily_quota]: + for quota in [last_quota, yearly_quota, monthly_quota, weekly_quota, daily_quota]: + print(quota) Backups.set_autobackup_quotas(quota) - pruned_snaps = Backups._prune_snaps_with_quotas(snaps) - assert pruned_snaps == [ - dummy_snapshot(now - timedelta(minutes=5)), + snaps_to_keep = Backups._prune_snaps_with_quotas(snaps) + assert snaps_to_keep == [ dummy_snapshot(now), + # If there is a vacant quota, we should keep the last snapshot even if it doesn't fit + dummy_snapshot(now - timedelta(hours=4)), ] def test_autobackup_snapshots_pruning_edgeweek(backups): # jan 1 2023 is Sunday snaps = [ - dummy_snapshot(datetime(year=2022, month=12, day=30)), - dummy_snapshot(datetime(year=2022, month=12, day=31)), - dummy_snapshot(datetime(year=2023, month=1, day=1)), dummy_snapshot(datetime(year=2023, month=1, day=6)), + dummy_snapshot(datetime(year=2023, month=1, day=1)), + dummy_snapshot(datetime(year=2022, month=12, day=31)), + dummy_snapshot(datetime(year=2022, month=12, day=30)), ] - quotas = copy(unlimited_quotas) + quotas = copy(zero_quotas) quotas.weekly = 2 Backups.set_autobackup_quotas(quotas) - pruned_snaps = Backups._prune_snaps_with_quotas(snaps) - assert pruned_snaps == [ - dummy_snapshot(datetime(year=2022, month=12, day=31)), - dummy_snapshot(datetime(year=2023, month=1, day=1)), + snaps_to_keep = Backups._prune_snaps_with_quotas(snaps) + assert snaps_to_keep == [ dummy_snapshot(datetime(year=2023, month=1, day=6)), + dummy_snapshot(datetime(year=2023, month=1, day=1)), + ] + + +def test_autobackup_snapshots_pruning_big_gap(backups): + snaps = [ + dummy_snapshot(datetime(year=2023, month=1, day=6)), + dummy_snapshot(datetime(year=2023, month=1, day=2)), + dummy_snapshot(datetime(year=2022, month=10, day=31)), + dummy_snapshot(datetime(year=2022, month=10, day=30)), + ] + quotas = copy(zero_quotas) + quotas.weekly = 2 + Backups.set_autobackup_quotas(quotas) + + snaps_to_keep = Backups._prune_snaps_with_quotas(snaps) + assert snaps_to_keep == [ + dummy_snapshot(datetime(year=2023, month=1, day=6)), + dummy_snapshot(datetime(year=2022, month=10, day=31)), ] def test_too_many_auto(backups, dummy_service): assert Backups.autobackup_quotas() - quota = copy(unlimited_quotas) - quota.total = 2 + quota = copy(zero_quotas) + quota.last = 2 Backups.set_autobackup_quotas(quota) - assert Backups.autobackup_quotas().total == 2 + assert Backups.autobackup_quotas().last == 2 snap = Backups.back_up(dummy_service, BackupReason.AUTO) assert len(Backups.get_snapshots(dummy_service)) == 1 @@ -509,7 +627,7 @@ def test_too_many_auto(backups, dummy_service): assert snap3 in snaps assert snap not in snaps - quota.total = -1 + quota.last = -1 Backups.set_autobackup_quotas(quota) snap4 = Backups.back_up(dummy_service, BackupReason.AUTO) @@ -518,7 +636,7 @@ def test_too_many_auto(backups, dummy_service): assert snap4 in snaps # Retroactivity - quota.total = 1 + quota.last = 1 Backups.set_autobackup_quotas(quota) snaps = Backups.get_snapshots(dummy_service) assert len(snaps) == 1 From 450a998ea638fc572027fc8433326ceaa496fc62 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Sat, 9 Sep 2023 03:32:57 +0300 Subject: [PATCH 530/537] chore:bump version --- selfprivacy_api/dependencies.py | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/dependencies.py b/selfprivacy_api/dependencies.py index fb974e8..9e144fd 100644 --- a/selfprivacy_api/dependencies.py +++ b/selfprivacy_api/dependencies.py @@ -27,4 +27,4 @@ async def get_token_header( def get_api_version() -> str: """Get API version""" - return "2.3.0" + return "2.4.0" diff --git a/setup.py b/setup.py index 684f54f..5ce3947 100755 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import setup, find_packages setup( name="selfprivacy_api", - version="2.3.0", + version="2.4.0", packages=find_packages(), scripts=[ "selfprivacy_api/app.py", From 8b840d4c2cff45845b01a9b654654eb620f9333f Mon Sep 17 00:00:00 2001 From: Inex Code Date: Sat, 9 Sep 2023 09:52:20 +0300 Subject: [PATCH 531/537] fix(backups): expose snapshot reason and set the default value --- selfprivacy_api/backup/__init__.py | 22 +++++++++---------- selfprivacy_api/dependencies.py | 2 +- .../graphql/common_types/service.py | 2 ++ selfprivacy_api/graphql/queries/backup.py | 1 + selfprivacy_api/models/backup/snapshot.py | 2 +- setup.py | 2 +- tests/test_graphql/test_api_backup.py | 1 + 7 files changed, 18 insertions(+), 14 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 265ee0f..72d1567 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -335,27 +335,27 @@ class Backups: buckets: list[RotationBucket] = [ RotationBucket( - quotas.last, + quotas.last, # type: ignore -1, lambda _, index: index, ), RotationBucket( - quotas.daily, + quotas.daily, # type: ignore -1, lambda date, _: date.year * 10000 + date.month * 100 + date.day, ), RotationBucket( - quotas.weekly, + quotas.weekly, # type: ignore -1, lambda date, _: date.year * 100 + date.isocalendar()[1], ), RotationBucket( - quotas.monthly, + quotas.monthly, # type: ignore -1, lambda date, _: date.year * 100 + date.month, ), RotationBucket( - quotas.yearly, + quotas.yearly, # type: ignore -1, lambda date, _: date.year, ), @@ -409,11 +409,11 @@ class Backups: Storage.set_autobackup_quotas( AutobackupQuotas( - last=Backups._standardize_quotas(quotas.last), - daily=Backups._standardize_quotas(quotas.daily), - weekly=Backups._standardize_quotas(quotas.weekly), - monthly=Backups._standardize_quotas(quotas.monthly), - yearly=Backups._standardize_quotas(quotas.yearly), + last=Backups._standardize_quotas(quotas.last), # type: ignore + daily=Backups._standardize_quotas(quotas.daily), # type: ignore + weekly=Backups._standardize_quotas(quotas.weekly), # type: ignore + monthly=Backups._standardize_quotas(quotas.monthly), # type: ignore + yearly=Backups._standardize_quotas(quotas.yearly), # type: ignore ) ) @@ -438,7 +438,7 @@ class Backups: job: Job, ) -> None: Jobs.update( - job, status=JobStatus.CREATED, status_text=f"Waiting for pre-restore backup" + job, status=JobStatus.CREATED, status_text="Waiting for pre-restore backup" ) failsafe_snapshot = Backups.back_up(service, BackupReason.PRE_RESTORE) diff --git a/selfprivacy_api/dependencies.py b/selfprivacy_api/dependencies.py index 9e144fd..35cf9e1 100644 --- a/selfprivacy_api/dependencies.py +++ b/selfprivacy_api/dependencies.py @@ -27,4 +27,4 @@ async def get_token_header( def get_api_version() -> str: """Get API version""" - return "2.4.0" + return "2.4.1" diff --git a/selfprivacy_api/graphql/common_types/service.py b/selfprivacy_api/graphql/common_types/service.py index 836a3df..319ce3e 100644 --- a/selfprivacy_api/graphql/common_types/service.py +++ b/selfprivacy_api/graphql/common_types/service.py @@ -2,6 +2,7 @@ from enum import Enum import typing import strawberry import datetime +from selfprivacy_api.graphql.common_types.backup import BackupReason from selfprivacy_api.graphql.common_types.dns import DnsRecord from selfprivacy_api.services import get_service_by_id, get_services_by_location @@ -114,6 +115,7 @@ class SnapshotInfo: id: str service: Service created_at: datetime.datetime + reason: BackupReason def service_to_graphql_service(service: ServiceInterface) -> Service: diff --git a/selfprivacy_api/graphql/queries/backup.py b/selfprivacy_api/graphql/queries/backup.py index 6d47a8c..fc5f78a 100644 --- a/selfprivacy_api/graphql/queries/backup.py +++ b/selfprivacy_api/graphql/queries/backup.py @@ -77,6 +77,7 @@ class Backup: id=snap.id, service=service, created_at=snap.created_at, + reason=snap.reason, ) result.append(graphql_snap) return result diff --git a/selfprivacy_api/models/backup/snapshot.py b/selfprivacy_api/models/backup/snapshot.py index 28ad661..b2831e7 100644 --- a/selfprivacy_api/models/backup/snapshot.py +++ b/selfprivacy_api/models/backup/snapshot.py @@ -8,4 +8,4 @@ class Snapshot(BaseModel): id: str service_name: str created_at: datetime.datetime - reason: BackupReason + reason: BackupReason = BackupReason.EXPLICIT diff --git a/setup.py b/setup.py index 5ce3947..44d2336 100755 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import setup, find_packages setup( name="selfprivacy_api", - version="2.4.0", + version="2.4.1", packages=find_packages(), scripts=[ "selfprivacy_api/app.py", diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index 14410e3..225abf7 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -145,6 +145,7 @@ allSnapshots { id } createdAt + reason } """ From 62d5de0dd63ed1afa3488276f50e5010ff51787e Mon Sep 17 00:00:00 2001 From: Inex Code Date: Tue, 3 Oct 2023 16:41:09 +0300 Subject: [PATCH 532/537] refactor(ssh): Remove unused add_root_ssh_key function --- selfprivacy_api/actions/ssh.py | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/selfprivacy_api/actions/ssh.py b/selfprivacy_api/actions/ssh.py index 3f79ff8..8a92735 100644 --- a/selfprivacy_api/actions/ssh.py +++ b/selfprivacy_api/actions/ssh.py @@ -49,19 +49,6 @@ def set_ssh_settings( data["ssh"]["passwordAuthentication"] = password_authentication -def add_root_ssh_key(public_key: str): - with WriteUserData() as data: - if "ssh" not in data: - data["ssh"] = {} - if "rootKeys" not in data["ssh"]: - data["ssh"]["rootKeys"] = [] - # Return 409 if key already in array - for key in data["ssh"]["rootKeys"]: - if key == public_key: - raise KeyAlreadyExists() - data["ssh"]["rootKeys"].append(public_key) - - class KeyAlreadyExists(Exception): """Key already exists""" From cebb71ff4a5851864f66699a679d5875a3142154 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Tue, 3 Oct 2023 16:51:06 +0300 Subject: [PATCH 533/537] feat(ssh): Add support for ecdsa keys --- selfprivacy_api/graphql/mutations/users_mutations.py | 2 +- selfprivacy_api/utils/__init__.py | 6 ++++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/selfprivacy_api/graphql/mutations/users_mutations.py b/selfprivacy_api/graphql/mutations/users_mutations.py index f7317fb..57825bc 100644 --- a/selfprivacy_api/graphql/mutations/users_mutations.py +++ b/selfprivacy_api/graphql/mutations/users_mutations.py @@ -147,7 +147,7 @@ class UsersMutations: except InvalidPublicKey: return UserMutationReturn( success=False, - message="Invalid key type. Only ssh-ed25519 and ssh-rsa are supported", + message="Invalid key type. Only ssh-ed25519, ssh-rsa and ecdsa are supported", code=400, ) except UserNotFound: diff --git a/selfprivacy_api/utils/__init__.py b/selfprivacy_api/utils/__init__.py index 96bf9d8..40ed5b6 100644 --- a/selfprivacy_api/utils/__init__.py +++ b/selfprivacy_api/utils/__init__.py @@ -88,10 +88,12 @@ class ReadUserData(object): def validate_ssh_public_key(key): - """Validate SSH public key. It may be ssh-ed25519 or ssh-rsa.""" + """Validate SSH public key. + It may be ssh-ed25519, ssh-rsa or ecdsa-sha2-nistp256.""" if not key.startswith("ssh-ed25519"): if not key.startswith("ssh-rsa"): - return False + if not key.startswith("ecdsa-sha2-nistp256"): + return False return True From 07aaa21602e9afb0088b06ba17f973b30d49cb5d Mon Sep 17 00:00:00 2001 From: Inex Code Date: Mon, 9 Oct 2023 12:45:22 +0300 Subject: [PATCH 534/537] chore: bump version --- selfprivacy_api/dependencies.py | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/dependencies.py b/selfprivacy_api/dependencies.py index 35cf9e1..1955601 100644 --- a/selfprivacy_api/dependencies.py +++ b/selfprivacy_api/dependencies.py @@ -27,4 +27,4 @@ async def get_token_header( def get_api_version() -> str: """Get API version""" - return "2.4.1" + return "2.4.2" diff --git a/setup.py b/setup.py index 44d2336..399b157 100755 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import setup, find_packages setup( name="selfprivacy_api", - version="2.4.1", + version="2.4.2", packages=find_packages(), scripts=[ "selfprivacy_api/app.py", From 0912ac183198f7882d60dafd1d3d1333043752d9 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 22 Sep 2023 17:56:04 +0000 Subject: [PATCH 535/537] feature(jobs): set ttl via method --- selfprivacy_api/jobs/__init__.py | 8 ++++++++ tests/test_jobs.py | 6 ++++++ 2 files changed, 14 insertions(+) diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py index 3fe452b..05b5ab8 100644 --- a/selfprivacy_api/jobs/__init__.py +++ b/selfprivacy_api/jobs/__init__.py @@ -224,6 +224,14 @@ class Jobs: return job + @staticmethod + def set_expiration(job: Job, expiration_seconds: int) -> Job: + redis = RedisPool().get_connection() + key = _redis_key_from_uuid(job.uid) + if redis.exists(key): + redis.expire(key, expiration_seconds) + return job + @staticmethod def get_job(uid: str) -> typing.Optional[Job]: """ diff --git a/tests/test_jobs.py b/tests/test_jobs.py index 0a4271e..c0e2125 100644 --- a/tests/test_jobs.py +++ b/tests/test_jobs.py @@ -49,6 +49,12 @@ def test_remove_get_nonexistent(jobs_with_one_job): assert jobs_with_one_job.get_job(uid_str) is None +def test_set_zeroing_ttl(jobs_with_one_job): + test_job = jobs_with_one_job.get_jobs()[0] + jobs_with_one_job.set_expiration(test_job, 0) + assert jobs_with_one_job.get_jobs() == [] + + def test_jobs(jobs_with_one_job): jobs = jobs_with_one_job test_job = jobs_with_one_job.get_jobs()[0] From 26c0a8fafe3a76fddffedcbf2c5b8f6c21a78020 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 22 Sep 2023 18:09:39 +0000 Subject: [PATCH 536/537] feature(jobs): set 1 hour ttl for successful autobackup jobs --- selfprivacy_api/backup/__init__.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 72d1567..aa11f7f 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -56,6 +56,8 @@ BACKUP_PROVIDER_ENVS = { "location": "BACKUP_LOCATION", } +AUTOBACKUP_JOB_EXPIRATION_SECONDS = 60 * 60 # one hour + class NotDeadError(AssertionError): """ @@ -316,6 +318,8 @@ class Backups: raise error Jobs.update(job, status=JobStatus.FINISHED) + if reason in [BackupReason.AUTO, BackupReason.PRE_RESTORE]: + Jobs.set_expiration(job, AUTOBACKUP_JOB_EXPIRATION_SECONDS) return snapshot @staticmethod From ece3258c7858ebf42c08d339dbd2320152acab26 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 22 Sep 2023 18:13:22 +0000 Subject: [PATCH 537/537] test(jobs): test out setting ttl --- tests/test_jobs.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/tests/test_jobs.py b/tests/test_jobs.py index c0e2125..64cf457 100644 --- a/tests/test_jobs.py +++ b/tests/test_jobs.py @@ -1,6 +1,7 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument import pytest +from time import sleep from selfprivacy_api.jobs import Jobs, JobStatus import selfprivacy_api.jobs as jobsmodule @@ -55,6 +56,14 @@ def test_set_zeroing_ttl(jobs_with_one_job): assert jobs_with_one_job.get_jobs() == [] +def test_not_zeroing_ttl(jobs_with_one_job): + test_job = jobs_with_one_job.get_jobs()[0] + jobs_with_one_job.set_expiration(test_job, 1) + assert len(jobs_with_one_job.get_jobs()) == 1 + sleep(1.2) + assert len(jobs_with_one_job.get_jobs()) == 0 + + def test_jobs(jobs_with_one_job): jobs = jobs_with_one_job test_job = jobs_with_one_job.get_jobs()[0]