Merge pull request 'feature(backups): remove all json logic' (#86) from do-not-load-backup-provider-from-json into master
continuous-integration/drone/push Build is failing Details

Reviewed-on: #86
Reviewed-by: Inex Code <inex.code@selfprivacy.org>
pull/85/head
houkime 2024-01-26 12:35:48 +02:00
commit 9e8326bbcf
4 changed files with 46 additions and 126 deletions

View File

@ -7,8 +7,6 @@ import os
from os import statvfs
from typing import Callable, List, Optional
from selfprivacy_api.utils import ReadUserData, WriteUserData
from selfprivacy_api.services import (
get_service_by_id,
get_all_services,
@ -44,12 +42,6 @@ from selfprivacy_api.backup.jobs import (
add_restore_job,
)
DEFAULT_JSON_PROVIDER = {
"provider": "BACKBLAZE",
"accountId": "",
"accountKey": "",
"bucket": "",
}
BACKUP_PROVIDER_ENVS = {
"kind": "BACKUP_KIND",
@ -134,17 +126,11 @@ class Backups:
Storage.store_provider(provider)
@staticmethod
def reset(reset_json=True) -> None:
def reset() -> None:
"""
Deletes all the data about the backup storage provider.
"""
Storage.reset()
if reset_json:
try:
Backups._reset_provider_json()
except FileNotFoundError:
# if there is no userdata file, we do not need to reset it
pass
@staticmethod
def _lookup_provider() -> AbstractBackupProvider:
@ -152,15 +138,6 @@ class Backups:
if redis_provider is not None:
return redis_provider
try:
json_provider = Backups._load_provider_json()
except FileNotFoundError:
json_provider = None
if json_provider is not None:
Storage.store_provider(json_provider)
return json_provider
none_provider = Backups._construct_provider(
BackupProviderEnum.NONE, login="", key="", location=""
)
@ -215,44 +192,6 @@ class Backups:
provider_model.repo_id,
)
@staticmethod
def _load_provider_json() -> Optional[AbstractBackupProvider]:
with ReadUserData() as user_data:
provider_dict = {
"provider": "",
"accountId": "",
"accountKey": "",
"bucket": "",
}
if "backup" not in user_data.keys():
if "backblaze" in user_data.keys():
provider_dict.update(user_data["backblaze"])
provider_dict["provider"] = "BACKBLAZE"
return None
else:
provider_dict.update(user_data["backup"])
if provider_dict == DEFAULT_JSON_PROVIDER:
return None
try:
return Backups._construct_provider(
kind=BackupProviderEnum[provider_dict["provider"]],
login=provider_dict["accountId"],
key=provider_dict["accountKey"],
location=provider_dict["bucket"],
)
except KeyError:
return None
@staticmethod
def _reset_provider_json() -> None:
with WriteUserData() as user_data:
if "backblaze" in user_data.keys():
del user_data["backblaze"]
user_data["backup"] = DEFAULT_JSON_PROVIDER
# Init
@staticmethod

View File

@ -138,18 +138,17 @@ class Storage:
@staticmethod
def store_provider(provider: AbstractBackupProvider) -> None:
"""Stores backup stroage provider auth data in redis"""
store_model_as_hash(
redis,
REDIS_PROVIDER_KEY,
BackupProviderModel(
kind=get_kind(provider),
login=provider.login,
key=provider.key,
location=provider.location,
repo_id=provider.repo_id,
),
"""Stores backup provider auth data in redis"""
model = BackupProviderModel(
kind=get_kind(provider),
login=provider.login,
key=provider.key,
location=provider.location,
repo_id=provider.repo_id,
)
store_model_as_hash(redis, REDIS_PROVIDER_KEY, model)
if Storage.load_provider() != model:
raise IOError("could not store the provider model: ", model.dict)
@staticmethod
def load_provider() -> Optional[BackupProviderModel]:

View File

@ -77,11 +77,5 @@
"rootKeys": [
"ssh-ed25519 KEY test@pc"
]
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
}

View File

@ -29,6 +29,7 @@ import selfprivacy_api.backup.providers as providers
from selfprivacy_api.backup.providers import AbstractBackupProvider
from selfprivacy_api.backup.providers.backblaze import Backblaze
from selfprivacy_api.backup.providers.none import NoBackups
from selfprivacy_api.backup.providers import get_kind
from selfprivacy_api.backup.util import sync
from selfprivacy_api.backup.tasks import (
@ -82,11 +83,6 @@ def backups(tmpdir):
Backups.erase_repo()
@pytest.fixture()
def backups_backblaze(generic_userdata):
Backups.reset(reset_json=False)
@pytest.fixture()
def memory_backup() -> AbstractBackupProvider:
ProviderClass = providers.get_provider(BackupProvider.MEMORY)
@ -106,20 +102,6 @@ def file_backup(tmpdir) -> AbstractBackupProvider:
return provider
def test_config_load(generic_userdata):
Backups.reset(reset_json=False)
provider = Backups.provider()
assert provider is not None
assert isinstance(provider, Backblaze)
assert provider.login == "ID"
assert provider.key == "KEY"
assert provider.location == "selfprivacy"
assert provider.backupper.account == "ID"
assert provider.backupper.key == "KEY"
def test_reset_sets_to_none1():
Backups.reset()
provider = Backups.provider()
@ -167,25 +149,6 @@ def test_setting_from_envs(tmpdir):
del os.environ[key]
def test_json_reset(generic_userdata):
Backups.reset(reset_json=False)
provider = Backups.provider()
assert provider is not None
assert isinstance(provider, Backblaze)
assert provider.login == "ID"
assert provider.key == "KEY"
assert provider.location == "selfprivacy"
Backups.reset()
provider = Backups.provider()
assert provider is not None
assert isinstance(provider, AbstractBackupProvider)
assert provider.login == ""
assert provider.key == ""
assert provider.location == ""
assert provider.repo_id == ""
def test_select_backend():
provider = providers.get_provider(BackupProvider.BACKBLAZE)
assert provider is not None
@ -570,20 +533,45 @@ def test_init_tracking_caching2(backups, tmpdir):
# Storage
def test_provider_storage(backups_backblaze):
provider = Backups.provider()
def test_provider_storage(backups):
test_login = "ID"
test_key = "KEY"
test_location = "selprivacy_bin"
assert provider is not None
old_provider = Backups.provider()
assert old_provider is not None
assert isinstance(provider, Backblaze)
assert provider.login == "ID"
assert provider.key == "KEY"
assert not isinstance(old_provider, Backblaze)
assert old_provider.login != test_login
assert old_provider.key != test_key
assert old_provider.location != test_location
test_provider = Backups._construct_provider(
kind=BackupProvider.BACKBLAZE, login="ID", key=test_key, location=test_location
)
assert isinstance(test_provider, Backblaze)
assert get_kind(test_provider) == "BACKBLAZE"
assert test_provider.login == test_login
assert test_provider.key == test_key
assert test_provider.location == test_location
Storage.store_provider(test_provider)
restored_provider_model = Storage.load_provider()
assert restored_provider_model.kind == "BACKBLAZE"
assert restored_provider_model.login == test_login
assert restored_provider_model.key == test_key
assert restored_provider_model.location == test_location
Storage.store_provider(provider)
restored_provider = Backups._load_provider_redis()
assert isinstance(restored_provider, Backblaze)
assert restored_provider.login == "ID"
assert restored_provider.key == "KEY"
assert restored_provider.login == test_login
assert restored_provider.key == test_key
assert restored_provider.location == test_location
# Revert our mess so we can teardown ok
Storage.store_provider(old_provider)
def test_sync(dummy_service):