refactor(backups): fix typing errors
continuous-integration/drone/push Build is failing Details
continuous-integration/drone/pr Build is failing Details

pull/35/head
Inex Code 2023-06-23 12:40:10 +03:00
parent c9cfb7d7bc
commit e7e0fdc4a1
14 changed files with 265 additions and 119 deletions

View File

@ -1,3 +1,4 @@
from operator import add
from typing import List, Optional
from datetime import datetime, timedelta
from os import statvfs
@ -9,7 +10,9 @@ from selfprivacy_api.utils import ReadUserData, WriteUserData
from selfprivacy_api.services import get_service_by_id
from selfprivacy_api.services.service import Service
from selfprivacy_api.graphql.queries.providers import BackupProvider
from selfprivacy_api.graphql.queries.providers import (
BackupProvider as BackupProviderEnum,
)
from selfprivacy_api.backup.providers.provider import AbstractBackupProvider
from selfprivacy_api.backup.providers import get_provider
@ -33,12 +36,15 @@ DEFAULT_JSON_PROVIDER = {
class Backups:
"""A singleton controller for backups"""
provider: AbstractBackupProvider
@staticmethod
def set_localfile_repo(file_path: str):
ProviderClass = get_provider(BackupProvider.FILE)
provider = ProviderClass(login="", key="", location=file_path, repo_id="")
ProviderClass = get_provider(BackupProviderEnum.FILE)
provider = ProviderClass(
login="",
key="",
location=file_path,
repo_id="",
)
Storage.store_provider(provider)
@staticmethod
@ -67,7 +73,14 @@ class Backups:
@staticmethod
def _service_ids_to_back_up(time: datetime) -> List[str]:
services = Storage.services_with_autobackup()
return [id for id in services if Backups.is_time_to_backup_service(id, time)]
return [
id
for id in services
if Backups.is_time_to_backup_service(
id,
time,
)
]
@staticmethod
def services_to_back_up(time: datetime) -> List[Service]:
@ -75,14 +88,17 @@ class Backups:
for id in Backups._service_ids_to_back_up(time):
service = get_service_by_id(id)
if service is None:
raise ValueError("Cannot look up a service scheduled for backup!")
raise ValueError(
"Cannot look up a service scheduled for backup!",
)
result.append(service)
return result
@staticmethod
def is_time_to_backup(time: datetime) -> bool:
"""
Intended as a time validator for huey cron scheduler of automatic backups
Intended as a time validator for huey cron scheduler
of automatic backups
"""
return Backups._service_ids_to_back_up(time) != []
@ -97,7 +113,8 @@ class Backups:
last_backup = Storage.get_last_backup_time(service_id)
if last_backup is None:
return True # queue a backup immediately if there are no previous backups
# queue a backup immediately if there are no previous backups
return True
if time > last_backup + timedelta(minutes=period):
return True
@ -121,7 +138,8 @@ class Backups:
def set_autobackup_period_minutes(minutes: int):
"""
0 and negative numbers are equivalent to disable.
Setting to a positive number may result in a backup very soon if some services are not backed up.
Setting to a positive number may result in a backup very soon
if some services are not backed up.
"""
if minutes <= 0:
Backups.disable_all_autobackup()
@ -130,7 +148,10 @@ class Backups:
@staticmethod
def disable_all_autobackup():
"""disables all automatic backing up, but does not change per-service settings"""
"""
Disables all automatic backing up,
but does not change per-service settings
"""
Storage.delete_backup_period()
@staticmethod
@ -138,17 +159,38 @@ class Backups:
return Backups.lookup_provider()
@staticmethod
def set_provider(kind: str, login: str, key: str, location: str, repo_id: str = ""):
provider = Backups.construct_provider(kind, login, key, location, repo_id)
def set_provider(
kind: BackupProviderEnum,
login: str,
key: str,
location: str,
repo_id: str = "",
):
provider = Backups.construct_provider(
kind,
login,
key,
location,
repo_id,
)
Storage.store_provider(provider)
@staticmethod
def construct_provider(
kind: str, login: str, key: str, location: str, repo_id: str = ""
):
provider_class = get_provider(BackupProvider[kind])
kind: BackupProviderEnum,
login: str,
key: str,
location: str,
repo_id: str = "",
) -> AbstractBackupProvider:
provider_class = get_provider(kind)
return provider_class(login=login, key=key, location=location, repo_id=repo_id)
return provider_class(
login=login,
key=key,
location=location,
repo_id=repo_id,
)
@staticmethod
def reset(reset_json=True):
@ -156,7 +198,8 @@ class Backups:
if reset_json:
try:
Backups.reset_provider_json()
except FileNotFoundError: # if there is no userdata file, we do not need to reset it
except FileNotFoundError:
# if there is no userdata file, we do not need to reset it
pass
@staticmethod
@ -175,7 +218,7 @@ class Backups:
return json_provider
none_provider = Backups.construct_provider(
"NONE", login="", key="", location=""
BackupProviderEnum.NONE, login="", key="", location=""
)
Storage.store_provider(none_provider)
return none_provider
@ -200,15 +243,18 @@ class Backups:
if provider_dict == DEFAULT_JSON_PROVIDER:
return None
try:
return Backups.construct_provider(
kind=BackupProviderEnum[provider_dict["provider"]],
login=provider_dict["accountId"],
key=provider_dict["accountKey"],
location=provider_dict["bucket"],
)
except KeyError:
return None
return Backups.construct_provider(
kind=provider_dict["provider"],
login=provider_dict["accountId"],
key=provider_dict["accountKey"],
location=provider_dict["bucket"],
)
def reset_provider_json() -> AbstractBackupProvider:
@staticmethod
def reset_provider_json() -> None:
with WriteUserData() as user_data:
if "backblaze" in user_data.keys():
del user_data["backblaze"]
@ -216,12 +262,12 @@ class Backups:
user_data["backup"] = DEFAULT_JSON_PROVIDER
@staticmethod
def load_provider_redis() -> AbstractBackupProvider:
def load_provider_redis() -> Optional[AbstractBackupProvider]:
provider_model = Storage.load_provider()
if provider_model is None:
return None
return Backups.construct_provider(
provider_model.kind,
BackupProviderEnum[provider_model.kind],
provider_model.login,
provider_model.key,
provider_model.location,
@ -232,7 +278,7 @@ class Backups:
def back_up(service: Service):
"""The top-level function to back up a service"""
folders = service.get_folders()
repo_name = service.get_id()
tag = service.get_id()
job = get_backup_job(service)
if job is None:
@ -241,8 +287,11 @@ class Backups:
try:
service.pre_backup()
snapshot = Backups.provider().backuper.start_backup(folders, repo_name)
Backups._store_last_snapshot(repo_name, snapshot)
snapshot = Backups.provider().backuper.start_backup(
folders,
tag,
)
Backups._store_last_snapshot(tag, snapshot)
service.post_restore()
except Exception as e:
Jobs.update(job, status=JobStatus.ERROR)
@ -252,10 +301,7 @@ class Backups:
return snapshot
@staticmethod
def init_repo(service: Optional[Service] = None):
if service is not None:
repo_name = service.get_id()
def init_repo():
Backups.provider().backuper.init()
Storage.mark_as_init()
@ -274,7 +320,13 @@ class Backups:
@staticmethod
def get_snapshots(service: Service) -> List[Snapshot]:
snapshots = Backups.get_all_snapshots()
return [snap for snap in snapshots if snap.service_name == service.get_id()]
service_id = service.get_id()
return list(
filter(
lambda snap: snap.service_name == service_id,
snapshots,
)
)
@staticmethod
def get_all_snapshots() -> List[Snapshot]:
@ -314,10 +366,12 @@ class Backups:
# to be deprecated/internalized in favor of restore_snapshot()
@staticmethod
def restore_service_from_snapshot(service: Service, snapshot_id: str):
repo_name = service.get_id()
folders = service.get_folders()
Backups.provider().backuper.restore_from_backup(repo_name, snapshot_id, folders)
Backups.provider().backuper.restore_from_backup(
snapshot_id,
folders,
)
@staticmethod
def assert_restorable(snapshot: Snapshot):
@ -327,45 +381,58 @@ class Backups:
f"snapshot has a nonexistent service: {snapshot.service_name}"
)
needed_space = Backups.snapshot_restored_size(snapshot)
needed_space = Backups.service_snapshot_size(snapshot.id)
available_space = Backups.space_usable_for_service(service)
if needed_space > available_space:
raise ValueError(
f"we only have {available_space} bytes but snapshot needs{ needed_space}"
f"we only have {available_space} bytes "
f"but snapshot needs {needed_space}"
)
@staticmethod
def restore_snapshot(snapshot: Snapshot):
service = get_service_by_id(snapshot.service_name)
if service is None:
raise ValueError(
f"snapshot has a nonexistent service: {snapshot.service_name}"
)
job = get_restore_job(service)
if job is None:
job = add_restore_job(snapshot)
Jobs.update(job, status=JobStatus.RUNNING)
Jobs.update(
job,
status=JobStatus.RUNNING,
)
try:
Backups.assert_restorable(snapshot)
Backups.restore_service_from_snapshot(service, snapshot.id)
Backups.restore_service_from_snapshot(
service,
snapshot.id,
)
service.post_restore()
except Exception as e:
Jobs.update(job, status=JobStatus.ERROR)
Jobs.update(
job,
status=JobStatus.ERROR,
)
raise e
Jobs.update(job, status=JobStatus.FINISHED)
@staticmethod
def service_snapshot_size(service: Service, snapshot_id: str) -> float:
repo_name = service.get_id()
return Backups.provider().backuper.restored_size(repo_name, snapshot_id)
@staticmethod
def snapshot_restored_size(snapshot: Snapshot) -> float:
return Backups.service_snapshot_size(
get_service_by_id(snapshot.service_name), snapshot.id
Jobs.update(
job,
status=JobStatus.FINISHED,
)
@staticmethod
def space_usable_for_service(service: Service) -> bool:
def service_snapshot_size(snapshot_id: str) -> int:
return Backups.provider().backuper.restored_size(
snapshot_id,
)
@staticmethod
def space_usable_for_service(service: Service) -> int:
folders = service.get_folders()
if folders == []:
raise ValueError("unallocated service", service.get_id())

View File

@ -26,14 +26,14 @@ class AbstractBackuper(ABC):
raise NotImplementedError
@abstractmethod
def init(self, repo_name):
def init(self):
raise NotImplementedError
@abstractmethod
def restore_from_backup(self, repo_name: str, snapshot_id: str, folders: List[str]):
def restore_from_backup(self, snapshot_id: str, folders: List[str]):
"""Restore a target folder using a snapshot"""
raise NotImplementedError
@abstractmethod
def restored_size(self, repo_name, snapshot_id) -> float:
def restored_size(self, snapshot_id: str) -> int:
raise NotImplementedError

View File

@ -18,12 +18,12 @@ class NoneBackupper(AbstractBackuper):
"""Get all snapshots from the repo"""
return []
def init(self, repo_name):
def init(self):
raise NotImplementedError
def restore_from_backup(self, repo_name: str, snapshot_id: str, folders: List[str]):
def restore_from_backup(self, snapshot_id: str, folders: List[str]):
"""Restore a target folder using a snapshot"""
raise NotImplementedError
def restored_size(self, repo_name, snapshot_id) -> float:
def restored_size(self, snapshot_id: str) -> int:
raise NotImplementedError

View File

@ -50,7 +50,7 @@ class ResticBackuper(AbstractBackuper):
def _password_command(self):
return f"echo {LocalBackupSecret.get()}"
def restic_command(self, *args, branch_name: str = ""):
def restic_command(self, *args, tag: str = ""):
command = [
"restic",
"-o",
@ -60,11 +60,11 @@ class ResticBackuper(AbstractBackuper):
"--password-command",
self._password_command(),
]
if branch_name != "":
if tag != "":
command.extend(
[
"--tag",
branch_name,
tag,
]
)
if args != []:
@ -92,10 +92,10 @@ class ResticBackuper(AbstractBackuper):
universal_newlines=True,
) as handle:
for line in iter(handle.stdout.readline, ""):
if not "NOTICE:" in line:
if "NOTICE:" not in line:
yield line
def start_backup(self, folders: List[str], repo_name: str):
def start_backup(self, folders: List[str], tag: str):
"""
Start backup with restic
"""
@ -107,16 +107,16 @@ class ResticBackuper(AbstractBackuper):
"backup",
"--json",
folders,
branch_name=repo_name,
tag=tag,
)
messages = []
job = get_backup_job(get_service_by_id(repo_name))
job = get_backup_job(get_service_by_id(tag))
try:
for raw_message in ResticBackuper.output_yielder(backup_command):
message = self.parse_message(raw_message, job)
messages.append(message)
return ResticBackuper._snapshot_from_backup_messages(messages, repo_name)
return ResticBackuper._snapshot_from_backup_messages(messages, tag)
except ValueError as e:
raise ValueError("could not create a snapshot: ", messages) from e
@ -128,7 +128,7 @@ class ResticBackuper(AbstractBackuper):
raise ValueError("no summary message in restic json output")
def parse_message(self, raw_message, job=None) -> object:
message = self.parse_json_output(raw_message)
message = ResticBackuper.parse_json_output(raw_message)
if message["message_type"] == "status":
if job is not None: # only update status if we run under some job
Jobs.update(
@ -168,12 +168,12 @@ class ResticBackuper(AbstractBackuper):
with subprocess.Popen(command, stdout=subprocess.PIPE, shell=False) as handle:
output = handle.communicate()[0].decode("utf-8")
if not self.has_json(output):
if not ResticBackuper.has_json(output):
return False
# raise NotImplementedError("error(big): " + output)
return True
def restored_size(self, repo_name, snapshot_id) -> float:
def restored_size(self, snapshot_id: str) -> int:
"""
Size of a snapshot
"""
@ -183,15 +183,19 @@ class ResticBackuper(AbstractBackuper):
"--json",
)
with subprocess.Popen(command, stdout=subprocess.PIPE, shell=False) as handle:
with subprocess.Popen(
command,
stdout=subprocess.PIPE,
shell=False,
) as handle:
output = handle.communicate()[0].decode("utf-8")
try:
parsed_output = self.parse_json_output(output)
parsed_output = ResticBackuper.parse_json_output(output)
return parsed_output["total_size"]
except ValueError as e:
raise ValueError("cannot restore a snapshot: " + output) from e
def restore_from_backup(self, repo_name, snapshot_id, folders):
def restore_from_backup(self, snapshot_id, folders):
"""
Restore from backup with restic
"""
@ -235,7 +239,7 @@ class ResticBackuper(AbstractBackuper):
if "Is there a repository at the following location?" in output:
raise ValueError("No repository! : " + output)
try:
return self.parse_json_output(output)
return ResticBackuper.parse_json_output(output)
except ValueError as e:
raise ValueError("Cannot load snapshots: ") from e
@ -252,8 +256,9 @@ class ResticBackuper(AbstractBackuper):
snapshots.append(snapshot)
return snapshots
def parse_json_output(self, output: str) -> object:
starting_index = self.json_start(output)
@staticmethod
def parse_json_output(output: str) -> object:
starting_index = ResticBackuper.json_start(output)
if starting_index == -1:
raise ValueError("There is no json in the restic output : " + output)
@ -273,7 +278,8 @@ class ResticBackuper(AbstractBackuper):
result_array.append(json.loads(message))
return result_array
def json_start(self, output: str) -> int:
@staticmethod
def json_start(output: str) -> int:
indices = [
output.find("["),
output.find("{"),
@ -284,7 +290,8 @@ class ResticBackuper(AbstractBackuper):
return -1
return min(indices)
def has_json(self, output: str) -> bool:
if self.json_start(output) == -1:
@staticmethod
def has_json(output: str) -> bool:
if ResticBackuper.json_start(output) == -1:
return False
return True

View File

@ -51,6 +51,8 @@ def add_backup_job(service: Service) -> Job:
def add_restore_job(snapshot: Snapshot) -> Job:
service = get_service_by_id(snapshot.service_name)
if service is None:
raise ValueError(f"no such service: {snapshot.service_name}")
if is_something_queued_for(service):
message = (
f"Cannot start a restore of {service.get_id()}, another operation is queued: "

View File

@ -1,23 +1,29 @@
from selfprivacy_api.graphql.queries.providers import BackupProvider
from typing import Type
from selfprivacy_api.graphql.queries.providers import (
BackupProvider as BackupProviderEnum,
)
from selfprivacy_api.backup.providers.provider import AbstractBackupProvider
from selfprivacy_api.backup.providers.backblaze import Backblaze
from selfprivacy_api.backup.providers.memory import InMemoryBackup
from selfprivacy_api.backup.providers.local_file import LocalFileBackup
from selfprivacy_api.backup.providers.none import NoBackups
PROVIDER_MAPPING = {
BackupProvider.BACKBLAZE: Backblaze,
BackupProvider.MEMORY: InMemoryBackup,
BackupProvider.FILE: LocalFileBackup,
BackupProvider.NONE: AbstractBackupProvider,
PROVIDER_MAPPING: dict[BackupProviderEnum, Type[AbstractBackupProvider]] = {
BackupProviderEnum.BACKBLAZE: Backblaze,
BackupProviderEnum.MEMORY: InMemoryBackup,
BackupProviderEnum.FILE: LocalFileBackup,
BackupProviderEnum.NONE: NoBackups,
}
def get_provider(provider_type: BackupProvider) -> AbstractBackupProvider:
def get_provider(
provider_type: BackupProviderEnum,
) -> Type[AbstractBackupProvider]:
return PROVIDER_MAPPING[provider_type]
def get_kind(provider: AbstractBackupProvider) -> str:
for key, value in PROVIDER_MAPPING.items():
if isinstance(provider, value):
return key.value
"""Get the kind of the provider in the form of a string"""
return provider.name.value

View File

@ -1,8 +1,13 @@
from .provider import AbstractBackupProvider
from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackuper
from selfprivacy_api.graphql.queries.providers import (
BackupProvider as BackupProviderEnum,
)
class Backblaze(AbstractBackupProvider):
backuper = ResticBackuper("--b2-account", "--b2-key", ":b2:")
@property
def backuper(self):
return ResticBackuper("--b2-account", "--b2-key", ":b2:")
name = "BACKBLAZE"
name = BackupProviderEnum.BACKBLAZE

View File

@ -1,7 +1,13 @@
from .provider import AbstractBackupProvider
from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackuper
from selfprivacy_api.graphql.queries.providers import (
BackupProvider as BackupProviderEnum,
)
class LocalFileBackup(AbstractBackupProvider):
backuper = ResticBackuper("", "", ":local:")
name = "FILE"
@property
def backuper(self):
return ResticBackuper("", "", ":local:")
name = BackupProviderEnum.FILE

View File

@ -1,8 +1,13 @@
from .provider import AbstractBackupProvider
from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackuper
from selfprivacy_api.graphql.queries.providers import (
BackupProvider as BackupProviderEnum,
)
class InMemoryBackup(AbstractBackupProvider):
backuper = ResticBackuper("", "", ":memory:")
@property
def backuper(self):
return ResticBackuper("", "", ":memory:")
name = "MEMORY"
name = BackupProviderEnum.MEMORY

View File

@ -0,0 +1,13 @@
from .provider import AbstractBackupProvider
from selfprivacy_api.backup.backuppers.none_backupper import NoneBackupper
from selfprivacy_api.graphql.queries.providers import (
BackupProvider as BackupProviderEnum,
)
class NoBackups(AbstractBackupProvider):
@property
def backuper(self):
return NoneBackupper()
name = BackupProviderEnum.NONE

View File

@ -1,19 +1,22 @@
"""
An abstract class for BackBlaze, S3 etc.
It assumes that while some providers are supported via restic/rclone, others may
require different backends
It assumes that while some providers are supported via restic/rclone, others
may require different backends
"""
from abc import ABC
from abc import ABC, abstractmethod
from selfprivacy_api.backup.backuppers import AbstractBackuper
from selfprivacy_api.backup.backuppers.none_backupper import NoneBackupper
from selfprivacy_api.graphql.queries.providers import (
BackupProvider as BackupProviderEnum,
)
class AbstractBackupProvider(ABC):
@property
@abstractmethod
def backuper(self) -> AbstractBackuper:
return NoneBackupper()
raise NotImplementedError
name = "NONE"
name: BackupProviderEnum
def __init__(self, login="", key="", location="", repo_id=""):
self.backuper.set_creds(login, key, location)

View File

@ -5,7 +5,10 @@ from selfprivacy_api.models.backup.snapshot import Snapshot
from selfprivacy_api.models.backup.provider import BackupProviderModel
from selfprivacy_api.utils.redis_pool import RedisPool
from selfprivacy_api.utils.redis_model_storage import store_model_as_hash, hash_as_model
from selfprivacy_api.utils.redis_model_storage import (
store_model_as_hash,
hash_as_model,
)
from selfprivacy_api.services.service import Service
@ -153,8 +156,12 @@ class Storage:
)
@staticmethod
def load_provider() -> BackupProviderModel:
provider_model = hash_as_model(redis, REDIS_PROVIDER_KEY, BackupProviderModel)
def load_provider() -> Optional[BackupProviderModel]:
provider_model = hash_as_model(
redis,
REDIS_PROVIDER_KEY,
BackupProviderModel,
)
return provider_model
@staticmethod

View File

@ -49,7 +49,7 @@ class BackupMutations:
) -> GenericBackupConfigReturn:
"""Initialize a new repository"""
Backups.set_provider(
kind=repository.provider.value,
kind=repository.provider,
login=repository.login,
key=repository.password,
location=repository.location_name,
@ -57,7 +57,10 @@ class BackupMutations:
)
Backups.init_repo()
return GenericBackupConfigReturn(
success=True, message="", code="200", configuration=Backup().configuration()
success=True,
message="",
code="200",
configuration=Backup().configuration(),
)
@strawberry.mutation(permission_classes=[IsAuthenticated])
@ -65,7 +68,10 @@ class BackupMutations:
"""Remove repository"""
Backups.reset()
return GenericBackupConfigReturn(
success=True, message="", code="200", configuration=Backup().configuration()
success=True,
message="",
code="200",
configuration=Backup().configuration(),
)
@strawberry.mutation(permission_classes=[IsAuthenticated])
@ -79,7 +85,10 @@ class BackupMutations:
Backups.set_autobackup_period_minutes(0)
return GenericBackupConfigReturn(
success=True, message="", code="200", configuration=Backup().configuration()
success=True,
message="",
code="200",
configuration=Backup().configuration(),
)
@strawberry.mutation(permission_classes=[IsAuthenticated])
@ -97,36 +106,52 @@ class BackupMutations:
job = add_backup_job(service)
start_backup(service)
job = job_to_api_job(job)
return GenericJobMutationReturn(
success=True,
code=200,
message="Backup job queued",
job=job,
job=job_to_api_job(job),
)
@strawberry.mutation(permission_classes=[IsAuthenticated])
def restore_backup(self, snapshot_id: str) -> GenericJobMutationReturn:
"""Restore backup"""
snap = Backups.get_snapshot_by_id(snapshot_id)
service = get_service_by_id(snap.service_name)
if snap is None:
return GenericJobMutationReturn(
success=False,
code=400,
code=404,
message=f"No such snapshot: {snapshot_id}",
job=None,
)
job = add_restore_job(snap)
service = get_service_by_id(snap.service_name)
if service is None:
return GenericJobMutationReturn(
success=False,
code=404,
message=f"nonexistent service: {snap.service_name}",
job=None,
)
try:
job = add_restore_job(snap)
except ValueError as e:
return GenericJobMutationReturn(
success=False,
code=400,
message=str(e),
job=None,
)
restore_snapshot(snap)
return GenericJobMutationReturn(
success=True,
code=200,
message="restore job created",
job=job,
job=job_to_api_job(job),
)
@strawberry.mutation(permission_classes=[IsAuthenticated])

View File

@ -73,7 +73,7 @@ def dummy_service(tmpdir, backups, raw_dummy_service):
assert not path.exists(repo_path)
# assert not repo_path
Backups.init_repo(service)
Backups.init_repo()
# register our service
services.services.append(service)
@ -232,7 +232,7 @@ def test_restore(backups, dummy_service):
def test_sizing(backups, dummy_service):
Backups.back_up(dummy_service)
snap = Backups.get_snapshots(dummy_service)[0]
size = Backups.service_snapshot_size(dummy_service, snap.id)
size = Backups.service_snapshot_size(snap.id)
assert size is not None
assert size > 0