Compare commits

...

82 Commits
master ... l10n

Author SHA1 Message Date
Inex Code 47d1a0f4a4 refactor(i10l): Move services string localization to API layer 2023-06-01 22:42:27 +03:00
Houkime d9dab29fe8 test(backups): test 2-folder restoration 2023-04-29 15:28:21 +03:00
Houkime 9815281735 test(backups): actually back up 2 folders 2023-04-29 15:28:21 +03:00
Houkime d38872072d refactor(backups): set a list of folders for our dummy service 2023-04-29 15:28:21 +03:00
Houkime 47f2f857f6 refactor(backups): actually accept a list of folders 2023-04-29 15:28:21 +03:00
Houkime 406d255b2c refactor(backups): make api accept a list of folders 2023-04-29 15:28:21 +03:00
Houkime 69f63f04eb refactor(backups): make a dedicated get_folders() function 2023-04-29 15:28:21 +03:00
Houkime 170cf1923e refactor(services): rename get_location() to get_drive() 2023-04-29 15:28:21 +03:00
Houkime 5101f41437 test(backups): register dummy service 2023-04-29 15:28:21 +03:00
Inex Code a7feda02ec fix: Include the translation files in the project 2023-04-12 17:55:41 +03:00
Inex Code c7a65febe7 feat: Locale extension to parse the Accept-Language header 2023-04-12 16:59:23 +03:00
Inex Code e0ea004e80 feat: Test if getting headers works 2023-04-12 16:13:30 +03:00
Inex Code 9376fe151f feat(l10n): Add option for localizing the output of strings in Service classes 2023-04-12 14:55:34 +03:00
Houkime 3d4d05ff11 feature(backups): automatic backup 2023-04-10 16:35:35 +00:00
Houkime d7316f8e79 test(backups): test autobackup timing 2023-04-10 15:51:54 +00:00
Houkime a11627da7d refactor(backups): split out storage 2023-04-10 13:23:17 +00:00
Houkime 9d772ea2e2 test(backups): test that we do use cache 2023-04-07 18:12:05 +00:00
Houkime d68e9a4141 feature(backups): enable snapshot cache usage 2023-04-07 17:24:53 +00:00
Houkime 942c35b7e6 feature(backups): add snapshot cache sync functions 2023-04-07 15:41:02 +00:00
Houkime 644a0b96b8 test(backups): test last backup date retrieval 2023-04-07 15:18:54 +00:00
Houkime f6402f2394 feature(backups): add a datetime validator function for huey autobackups 2023-04-03 23:29:02 +00:00
Houkime aeec3ad0a2 test(backups): test setting autobackup period 2023-04-03 23:29:02 +00:00
Houkime 9edfe10128 test(backups): test setting services as enabled for autobackups 2023-04-03 23:29:02 +00:00
Houkime 7f99fd044e feature(backups): methods for autobackup period setting and getting 2023-04-03 23:29:02 +00:00
Houkime 3e93572648 fix(backups): remove self from static method 2023-04-03 23:29:02 +00:00
Houkime 58086909a4 feature(backups): check, set and unset service autobackup status 2023-04-03 23:29:02 +00:00
Houkime d9102eba37 feature(backups): cache snapshots and last backup timestamps 2023-04-03 23:29:02 +00:00
Houkime 3a65f0845a test(backups): test that we do return snapshot on backup 2023-04-03 23:29:02 +00:00
Houkime a82a986997 feature(backups): return snapshot info from backup function 2023-04-03 23:29:02 +00:00
Houkime daa40d1142 feature(backups): huey task to back up 2023-04-03 23:29:02 +00:00
Houkime baf3afb25b refactor(backups): make backups stateless 2023-04-03 23:29:02 +00:00
Inex Code 09598033e7 chore: Bump python to 3.10 and nixpkgs to 22.11 2023-04-03 23:29:02 +00:00
Houkime b4a3658c78 feature(backups): repo init tracking 2023-04-03 23:29:02 +00:00
Houkime e02c1a878b feature(backups): provider storage and retrieval 2023-04-03 23:29:02 +00:00
Houkime 36907aa9c2 refactor(backups): add a provider model for redis storage 2023-04-03 23:29:02 +00:00
Houkime f785e6724a refactor(backups): redis model storage utils 2023-04-03 23:29:02 +00:00
Houkime d8a0e05602 feature(backups): load from json 2023-04-03 23:29:02 +00:00
Houkime 719c81d2f4 feat(backups): local secret generation and storage 2023-04-03 23:29:02 +00:00
Houkime 5d5ceee1cf feat(backups): sizing up snapshots 2023-04-03 23:29:02 +00:00
Houkime 48f8f95d83 test(backups): test restoring a file 2023-04-03 23:29:02 +00:00
Houkime f2aab38085 feat(backups): add restore_snapshot and restore_service_from_snapshot 2023-04-03 23:29:02 +00:00
Houkime 39a97cf6d8 feat(backups): a better error on failed snapshot retrieval 2023-04-03 23:29:02 +00:00
Houkime cc09e933ed feat(backups): return proper snapshot structs when listing 2023-04-03 23:29:02 +00:00
Houkime de12685d3d test(backups): reenable snapshot testing 2023-04-03 23:29:02 +00:00
Houkime 86467788d3 feat(backups): throw an error on a failed backup 2023-04-03 23:29:02 +00:00
Houkime 8f019c99e3 fix(backups): singleton metaclass was screwing with tests 2023-04-03 23:29:02 +00:00
Houkime 090198c300 test(backups): localfile repo by default in tests 2023-04-03 23:29:02 +00:00
Houkime 7cb6ca9641 feature(backups): throw an error if repo init fails 2023-04-03 23:29:02 +00:00
Houkime d7f96a9adf test(backups): basic file backend init test 2023-04-03 23:29:02 +00:00
Houkime 0ce6624d5a feature(backups): register localfile backend 2023-04-03 23:29:02 +00:00
Houkime aeb66b9c72 feature(backups): localfile repo 2023-04-03 23:29:02 +00:00
Houkime 6821b245d2 test(backups): test repo init 2023-04-03 23:29:02 +00:00
Houkime ad1b1c4972 refactor(backups): repo init service method 2023-04-03 23:29:02 +00:00
Houkime d1dbcbae5e refactor(backups): add repo init 2023-04-03 23:29:02 +00:00
Houkime cbf917ad8a refactor(backups): snapshotlist and local secret groundwork 2023-04-03 23:29:02 +00:00
Houkime 37b747f87f test(backup): no snapshots 2023-04-03 23:29:02 +00:00
Houkime 6989dd0f7c refactor(backup): snapshot model 2023-04-03 23:29:02 +00:00
Houkime 6376503793 feature(backup): loading snapshots 2023-04-03 23:29:02 +00:00
Houkime e7062d72c6 feature(backup): add a restore function to restic backuper 2023-04-03 23:29:02 +00:00
Houkime 0381a9c671 feat(backup): hooks 2023-04-03 23:29:02 +00:00
Houkime de6b96f446 test(backup): use a backup service function 2023-04-03 23:29:02 +00:00
Houkime bf3b698b34 refactor(backup): add a backup function to Backups singleton class 2023-04-03 23:29:02 +00:00
Houkime 33df16f743 refactor(backup): add a placeholder Backups singleton class 2023-04-03 23:29:02 +00:00
Houkime 88b715464d test(backup): try to back up! 2023-04-03 23:29:02 +00:00
Houkime 4547f02e1b fix(backup): add memory backup class,forgot to add to git 2023-04-03 23:29:02 +00:00
Houkime 6a11cd67ac feat(backup): add backuping to restic backuper 2023-04-03 23:29:02 +00:00
Houkime d5e0a3894b test(backup): make a testfile to backup 2023-04-03 23:29:02 +00:00
Houkime b6650b52c3 test(backup): init an in-memory backup class 2023-04-03 23:29:02 +00:00
Houkime 83ed93b271 feat(backup): add in-memory backup 2023-04-03 23:29:02 +00:00
Houkime 5fd7b6c4ed feat(backup): allow no auth 2023-04-03 23:29:02 +00:00
Houkime 696cb406a8 test(backup): dummy service 2023-04-03 23:29:02 +00:00
Houkime 327ad8171f test(backup): provider class selection 2023-04-03 23:29:02 +00:00
Houkime 19ad9a5113 feature(backups): copy cli logic to new restic backuper 2023-04-03 23:29:02 +00:00
Houkime 96b6dfabbe feature(backups): placeholders for the backupers and backup providers 2023-04-03 23:29:02 +00:00
Houkime 61ff2724f3 feature(backups): placeholders for the modules of the new backup system 2023-04-03 23:29:02 +00:00
Houkime fbed185475 feature(backups): add backup structures and queries 2023-04-03 23:29:02 +00:00
Houkime cdcb4ec4c0 refactor(backup): do not use config file 2023-04-03 23:29:02 +00:00
Houkime 4e8050727a refactor(backup): pass key and account to exec 2023-04-03 23:29:02 +00:00
Houkime 7956829981 refactor(backup): extract restic repo 2023-04-03 23:29:02 +00:00
Houkime c7f304744c refactor(backup): extract rclone args 2023-04-03 23:29:02 +00:00
Houkime 2d761f7311 refactor(backup): delete unused import 2023-04-03 23:29:02 +00:00
Houkime 5450b92454 test(backup): add restic to the dev shell 2023-04-03 23:29:02 +00:00
46 changed files with 1917 additions and 165 deletions

1
MANIFEST.in Normal file
View File

@ -0,0 +1 @@
recursive-include selfprivacy_api/locales *.json

View File

@ -0,0 +1,268 @@
from typing import List, Optional
from datetime import datetime, timedelta
from selfprivacy_api.models.backup.snapshot import Snapshot
from selfprivacy_api.utils import ReadUserData
from selfprivacy_api.utils.redis_pool import RedisPool
from selfprivacy_api.services import get_service_by_id
from selfprivacy_api.services.service import Service
from selfprivacy_api.graphql.queries.providers import BackupProvider
from selfprivacy_api.backup.providers.provider import AbstractBackupProvider
from selfprivacy_api.backup.providers import get_provider
from selfprivacy_api.backup.storage import Storage
class Backups:
"""A singleton controller for backups"""
provider: AbstractBackupProvider
@staticmethod
def set_localfile_repo(file_path: str):
ProviderClass = get_provider(BackupProvider.FILE)
provider = ProviderClass(file_path)
Storage.store_testrepo_path(file_path)
Storage.store_provider(provider)
@staticmethod
def get_last_backed_up(service: Service) -> Optional[datetime]:
"""Get a timezone-aware time of the last backup of a service"""
return Storage.get_last_backup_time(service.get_id())
@staticmethod
def get_cached_snapshots_service(service_id: str) -> List[Snapshot]:
snapshots = Storage.get_cached_snapshots()
return [snap for snap in snapshots if snap.service_name == service_id]
@staticmethod
def sync_service_snapshots(service_id: str, snapshots: List[Snapshot]):
for snapshot in snapshots:
if snapshot.service_name == service_id:
Storage.cache_snapshot(snapshot)
for snapshot in Backups.get_cached_snapshots_service(service_id):
if snapshot.id not in [snap.id for snap in snapshots]:
Storage.delete_cached_snapshot(snapshot)
@staticmethod
def enable_autobackup(service: Service):
Storage.set_autobackup(service)
@staticmethod
def _service_ids_to_back_up(time: datetime) -> List[str]:
services = Storage.services_with_autobackup()
return [id for id in services if Backups.is_time_to_backup_service(id, time)]
@staticmethod
def services_to_back_up(time: datetime) -> List[Service]:
result = []
for id in Backups._service_ids_to_back_up(time):
service = get_service_by_id(id)
if service is None:
raise ValueError("Cannot look up a service scheduled for backup!")
result.append(service)
return result
@staticmethod
def is_time_to_backup(time: datetime) -> bool:
"""
Intended as a time validator for huey cron scheduler of automatic backups
"""
return Backups._service_ids_to_back_up(time) != []
@staticmethod
def is_time_to_backup_service(service_id: str, time: datetime):
period = Backups.autobackup_period_minutes()
if period is None:
return False
if not Storage.is_autobackup_set(service_id):
return False
last_backup = Storage.get_last_backup_time(service_id)
if last_backup is None:
return True # queue a backup immediately if there are no previous backups
if time > last_backup + timedelta(minutes=period):
return True
return False
@staticmethod
def disable_autobackup(service: Service):
"""also see disable_all_autobackup()"""
Storage.unset_autobackup(service)
@staticmethod
def is_autobackup_enabled(service: Service) -> bool:
return Storage.is_autobackup_set(service.get_id())
@staticmethod
def autobackup_period_minutes() -> Optional[int]:
"""None means autobackup is disabled"""
return Storage.autobackup_period_minutes()
@staticmethod
def set_autobackup_period_minutes(minutes: int):
"""
0 and negative numbers are equivalent to disable.
Setting to a positive number may result in a backup very soon if some services are not backed up.
"""
if minutes <= 0:
Backups.disable_all_autobackup()
return
Storage.store_autobackup_period_minutes(minutes)
@staticmethod
def disable_all_autobackup():
"""disables all automatic backing up, but does not change per-service settings"""
Storage.delete_backup_period()
@staticmethod
def provider():
return Backups.lookup_provider()
@staticmethod
def set_provider(kind: str, login: str, key: str):
provider = Backups.construct_provider(kind, login, key)
Storage.store_provider(provider)
@staticmethod
def construct_provider(kind: str, login: str, key: str):
provider_class = get_provider(BackupProvider[kind])
if kind == "FILE":
path = Storage.get_testrepo_path()
return provider_class(path)
return provider_class(login=login, key=key)
@staticmethod
def reset():
Storage.reset()
@staticmethod
def lookup_provider() -> AbstractBackupProvider:
redis_provider = Backups.load_provider_redis()
if redis_provider is not None:
return redis_provider
json_provider = Backups.load_provider_json()
if json_provider is not None:
Storage.store_provider(json_provider)
return json_provider
memory_provider = Backups.construct_provider("MEMORY", login="", key="")
Storage.store_provider(memory_provider)
return memory_provider
@staticmethod
def load_provider_json() -> AbstractBackupProvider:
with ReadUserData() as user_data:
account = ""
key = ""
if "backup" not in user_data.keys():
if "backblaze" in user_data.keys():
account = user_data["backblaze"]["accountId"]
key = user_data["backblaze"]["accountKey"]
provider_string = "BACKBLAZE"
return Backups.construct_provider(
kind=provider_string, login=account, key=key
)
return None
account = user_data["backup"]["accountId"]
key = user_data["backup"]["accountKey"]
provider_string = user_data["backup"]["provider"]
return Backups.construct_provider(
kind=provider_string, login=account, key=key
)
@staticmethod
def load_provider_redis() -> AbstractBackupProvider:
provider_model = Storage.load_provider()
if provider_model is None:
return None
return Backups.construct_provider(
provider_model.kind, provider_model.login, provider_model.key
)
@staticmethod
def back_up(service: Service):
"""The top-level function to back up a service"""
folders = service.get_folders()
repo_name = service.get_id()
service.pre_backup()
snapshot = Backups.provider().backuper.start_backup(folders, repo_name)
Backups._store_last_snapshot(repo_name, snapshot)
service.post_restore()
@staticmethod
def init_repo(service: Service):
repo_name = service.get_id()
Backups.provider().backuper.init(repo_name)
Storage.mark_as_init(service)
@staticmethod
def is_initted(service: Service) -> bool:
repo_name = service.get_id()
if Storage.has_init_mark(service):
return True
initted = Backups.provider().backuper.is_initted(repo_name)
if initted:
Storage.mark_as_init(service)
return True
return False
@staticmethod
def get_snapshots(service: Service) -> List[Snapshot]:
service_id = service.get_id()
cached_snapshots = Backups.get_cached_snapshots_service(service_id)
if cached_snapshots != []:
return cached_snapshots
# TODO: the oldest snapshots will get expired faster than the new ones.
# How to detect that the end is missing?
upstream_snapshots = Backups.provider().backuper.get_snapshots(service_id)
Backups.sync_service_snapshots(service_id, upstream_snapshots)
return upstream_snapshots
@staticmethod
def restore_service_from_snapshot(service: Service, snapshot_id: str):
repo_name = service.get_id()
folders = service.get_folders()
Backups.provider().backuper.restore_from_backup(repo_name, snapshot_id, folders)
@staticmethod
def restore_snapshot(snapshot: Snapshot):
Backups.restore_service_from_snapshot(
get_service_by_id(snapshot.service_name), snapshot.id
)
@staticmethod
def service_snapshot_size(service: Service, snapshot_id: str) -> float:
repo_name = service.get_id()
return Backups.provider().backuper.restored_size(repo_name, snapshot_id)
@staticmethod
def snapshot_restored_size(snapshot: Snapshot) -> float:
return Backups.service_snapshot_size(
get_service_by_id(snapshot.service_name), snapshot.id
)
@staticmethod
def _store_last_snapshot(service_id: str, snapshot: Snapshot):
"""What do we do with a snapshot that is just made?"""
# non-expiring timestamp of the last
Storage.store_last_timestamp(service_id, snapshot)
# expiring cache entry
Storage.cache_snapshot(snapshot)

View File

@ -0,0 +1,35 @@
from abc import ABC, abstractmethod
from typing import List
from selfprivacy_api.models.backup.snapshot import Snapshot
class AbstractBackuper(ABC):
def __init__(self):
pass
@abstractmethod
def is_initted(self, repo_name: str) -> bool:
raise NotImplementedError
@abstractmethod
def start_backup(self, folders: List[str], repo_name: str):
raise NotImplementedError
@abstractmethod
def get_snapshots(self, repo_name) -> List[Snapshot]:
"""Get all snapshots from the repo"""
raise NotImplementedError
@abstractmethod
def init(self, repo_name):
raise NotImplementedError
@abstractmethod
def restore_from_backup(self, repo_name: str, snapshot_id: str, folders: List[str]):
"""Restore a target folder using a snapshot"""
raise NotImplementedError
@abstractmethod
def restored_size(self, repo_name, snapshot_id) -> float:
raise NotImplementedError

View File

@ -0,0 +1,41 @@
"""Handling of local secret used for encrypted backups.
Separated out for circular dependency reasons
"""
from __future__ import annotations
import secrets
from selfprivacy_api.utils.redis_pool import RedisPool
REDIS_KEY = "backup:local_secret"
redis = RedisPool().get_connection()
class LocalBackupSecret:
@staticmethod
def get():
"""A secret string which backblaze/other clouds do not know.
Serves as encryption key.
"""
if not LocalBackupSecret.exists():
LocalBackupSecret.reset()
return redis.get(REDIS_KEY)
@staticmethod
def reset():
new_secret = LocalBackupSecret._generate()
LocalBackupSecret._store(new_secret)
@staticmethod
def exists() -> bool:
return redis.exists(REDIS_KEY)
@staticmethod
def _generate() -> str:
return secrets.token_urlsafe(256)
@staticmethod
def _store(secret: str):
redis.set(REDIS_KEY, secret)

View File

@ -0,0 +1,22 @@
from selfprivacy_api.graphql.queries.providers import BackupProvider
from selfprivacy_api.backup.providers.provider import AbstractBackupProvider
from selfprivacy_api.backup.providers.backblaze import Backblaze
from selfprivacy_api.backup.providers.memory import InMemoryBackup
from selfprivacy_api.backup.providers.local_file import LocalFileBackup
PROVIDER_MAPPING = {
BackupProvider.BACKBLAZE: Backblaze,
BackupProvider.MEMORY: InMemoryBackup,
BackupProvider.FILE: LocalFileBackup,
}
def get_provider(provider_type: BackupProvider) -> AbstractBackupProvider:
return PROVIDER_MAPPING[provider_type]
def get_kind(provider: AbstractBackupProvider) -> str:
for key, value in PROVIDER_MAPPING.items():
if isinstance(provider, value):
return key.value

View File

@ -0,0 +1,6 @@
from .provider import AbstractBackupProvider
from selfprivacy_api.backup.restic_backuper import ResticBackuper
class Backblaze(AbstractBackupProvider):
backuper = ResticBackuper("--b2-account", "--b2-key", ":b2:")

View File

@ -0,0 +1,11 @@
from .provider import AbstractBackupProvider
from selfprivacy_api.backup.restic_backuper import ResticBackuper
class LocalFileBackup(AbstractBackupProvider):
backuper = ResticBackuper("", "", "memory")
# login and key args are for compatibility with generic provider methods. They are ignored.
def __init__(self, filename: str, login: str = "", key: str = ""):
super().__init__()
self.backuper = ResticBackuper("", "", f":local:{filename}/")

View File

@ -0,0 +1,6 @@
from .provider import AbstractBackupProvider
from selfprivacy_api.backup.restic_backuper import ResticBackuper
class InMemoryBackup(AbstractBackupProvider):
backuper = ResticBackuper("", "", ":memory:")

View File

@ -0,0 +1,17 @@
"""
An abstract class for BackBlaze, S3 etc.
It assumes that while some providers are supported via restic/rclone, others may
require different backends
"""
from abc import ABC
from selfprivacy_api.backup.backuper import AbstractBackuper
class AbstractBackupProvider(ABC):
@property
def backuper(self) -> AbstractBackuper:
raise NotImplementedError
def __init__(self, login="", key=""):
self.login = login
self.key = key

View File

@ -0,0 +1,256 @@
import subprocess
import json
import datetime
from typing import List
from collections.abc import Iterable
from selfprivacy_api.backup.backuper import AbstractBackuper
from selfprivacy_api.models.backup.snapshot import Snapshot
from selfprivacy_api.backup.local_secret import LocalBackupSecret
class ResticBackuper(AbstractBackuper):
def __init__(self, login_flag: str, key_flag: str, type: str):
self.login_flag = login_flag
self.key_flag = key_flag
self.type = type
self.account = ""
self.key = ""
def set_creds(self, account: str, key: str):
self.account = account
self.key = key
def restic_repo(self, repository_name: str) -> str:
# https://restic.readthedocs.io/en/latest/030_preparing_a_new_repo.html#other-services-via-rclone
# https://forum.rclone.org/t/can-rclone-be-run-solely-with-command-line-options-no-config-no-env-vars/6314/5
return f"rclone:{self.type}{repository_name}/sfbackup"
def rclone_args(self):
return "rclone.args=serve restic --stdio" + self.backend_rclone_args()
def backend_rclone_args(self) -> str:
acc_arg = ""
key_arg = ""
if self.account != "":
acc_arg = f"{self.login_flag} {self.account}"
if self.key != "":
key_arg = f"{self.key_flag} {self.key}"
return f"{acc_arg} {key_arg}"
def _password_command(self):
return f"echo {LocalBackupSecret.get()}"
def restic_command(self, repo_name: str, *args):
command = [
"restic",
"-o",
self.rclone_args(),
"-r",
self.restic_repo(repo_name),
"--password-command",
self._password_command(),
]
if args != []:
command.extend(ResticBackuper.__flatten_list(args))
return command
@staticmethod
def __flatten_list(list):
"""string-aware list flattener"""
result = []
for item in list:
if isinstance(item, Iterable) and not isinstance(item, str):
result.extend(ResticBackuper.__flatten_list(item))
continue
result.append(item)
return result
def start_backup(self, folders: List[str], repo_name: str):
"""
Start backup with restic
"""
# but maybe it is ok to accept a union of a string and an array of strings
assert not isinstance(folders, str)
backup_command = self.restic_command(
repo_name,
"backup",
"--json",
folders,
)
with subprocess.Popen(
backup_command,
shell=False,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
) as handle:
output = handle.communicate()[0].decode("utf-8")
try:
messages = self.parse_json_output(output)
return ResticBackuper._snapshot_from_backup_messages(
messages, repo_name
)
except ValueError as e:
raise ValueError("could not create a snapshot: ") from e
@staticmethod
def _snapshot_from_backup_messages(messages, repo_name) -> Snapshot:
for message in messages:
if message["message_type"] == "summary":
return ResticBackuper._snapshot_from_fresh_summary(message, repo_name)
raise ValueError("no summary message in restic json output")
@staticmethod
def _snapshot_from_fresh_summary(message: object, repo_name) -> Snapshot:
return Snapshot(
id=message["snapshot_id"],
created_at=datetime.datetime.now(datetime.timezone.utc),
service_name=repo_name,
)
def init(self, repo_name):
init_command = self.restic_command(
repo_name,
"init",
)
with subprocess.Popen(
init_command,
shell=False,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
) as process_handle:
output = process_handle.communicate()[0].decode("utf-8")
if not "created restic repository" in output:
raise ValueError("cannot init a repo: " + output)
def is_initted(self, repo_name: str) -> bool:
command = self.restic_command(
repo_name,
"check",
"--json",
)
with subprocess.Popen(command, stdout=subprocess.PIPE, shell=False) as handle:
output = handle.communicate()[0].decode("utf-8")
if not self.has_json(output):
return False
# raise NotImplementedError("error(big): " + output)
return True
def restored_size(self, repo_name, snapshot_id) -> float:
"""
Size of a snapshot
"""
command = self.restic_command(
repo_name,
"stats",
snapshot_id,
"--json",
)
with subprocess.Popen(command, stdout=subprocess.PIPE, shell=False) as handle:
output = handle.communicate()[0].decode("utf-8")
try:
parsed_output = self.parse_json_output(output)
return parsed_output["total_size"]
except ValueError as e:
raise ValueError("cannot restore a snapshot: " + output) from e
def restore_from_backup(self, repo_name, snapshot_id, folders):
"""
Restore from backup with restic
"""
# snapshots save the path of the folder in the file system
# I do not alter the signature yet because maybe this can be
# changed with flags
restore_command = self.restic_command(
repo_name,
"restore",
snapshot_id,
"--target",
"/",
)
with subprocess.Popen(
restore_command, stdout=subprocess.PIPE, shell=False
) as handle:
output = handle.communicate()[0].decode("utf-8")
if "restoring" not in output:
raise ValueError("cannot restore a snapshot: " + output)
def _load_snapshots(self, repo_name) -> object:
"""
Load list of snapshots from repository
raises Value Error if repo does not exist
"""
listing_command = self.restic_command(
repo_name,
"snapshots",
"--json",
)
with subprocess.Popen(
listing_command,
shell=False,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
) as backup_listing_process_descriptor:
output = backup_listing_process_descriptor.communicate()[0].decode("utf-8")
if "Is there a repository at the following location?" in output:
raise ValueError("No repository! : " + output)
try:
return self.parse_json_output(output)
except ValueError as e:
raise ValueError("Cannot load snapshots: ") from e
def get_snapshots(self, repo_name) -> List[Snapshot]:
"""Get all snapshots from the repo"""
snapshots = []
for restic_snapshot in self._load_snapshots(repo_name):
snapshot = Snapshot(
id=restic_snapshot["short_id"],
created_at=restic_snapshot["time"],
service_name=repo_name,
)
snapshots.append(snapshot)
return snapshots
def parse_json_output(self, output: str) -> object:
starting_index = self.json_start(output)
if starting_index == -1:
raise ValueError("There is no json in the restic output : " + output)
truncated_output = output[starting_index:]
json_messages = truncated_output.splitlines()
if len(json_messages) == 1:
return json.loads(truncated_output)
result_array = []
for message in json_messages:
result_array.append(json.loads(message))
return result_array
def json_start(self, output: str) -> int:
indices = [
output.find("["),
output.find("{"),
]
indices = [x for x in indices if x != -1]
if indices == []:
return -1
return min(indices)
def has_json(self, output: str) -> bool:
if self.json_start(output) == -1:
return False
return True

View File

@ -0,0 +1,168 @@
from typing import List, Optional
from datetime import datetime
from selfprivacy_api.models.backup.snapshot import Snapshot
from selfprivacy_api.models.backup.provider import BackupProviderModel
from selfprivacy_api.utils.redis_pool import RedisPool
from selfprivacy_api.utils.redis_model_storage import store_model_as_hash, hash_as_model
from selfprivacy_api.services.service import Service
from selfprivacy_api.backup.providers.provider import AbstractBackupProvider
from selfprivacy_api.backup.providers import get_kind
# a hack to store file path.
REDIS_SNAPSHOT_CACHE_EXPIRE_SECONDS = 24 * 60 * 60 # one day
REDIS_AUTOBACKUP_ENABLED_PREFIX = "backup:autobackup:services:"
REDIS_SNAPSHOTS_PREFIX = "backups:snapshots:"
REDIS_LAST_BACKUP_PREFIX = "backups:last-backed-up:"
REDIS_INITTED_CACHE_PREFIX = "backups:initted_services:"
REDIS_REPO_PATH_KEY = "backups:test_repo_path"
REDIS_PROVIDER_KEY = "backups:provider"
REDIS_AUTOBACKUP_PERIOD_KEY = "backups:autobackup_period"
redis = RedisPool().get_connection()
class Storage:
@staticmethod
def reset():
redis.delete(REDIS_PROVIDER_KEY)
redis.delete(REDIS_REPO_PATH_KEY)
redis.delete(REDIS_AUTOBACKUP_PERIOD_KEY)
prefixes_to_clean = [
REDIS_INITTED_CACHE_PREFIX,
REDIS_SNAPSHOTS_PREFIX,
REDIS_LAST_BACKUP_PREFIX,
REDIS_AUTOBACKUP_ENABLED_PREFIX,
]
for prefix in prefixes_to_clean:
for key in redis.keys(prefix + "*"):
redis.delete(key)
@staticmethod
def store_testrepo_path(path: str):
redis.set(REDIS_REPO_PATH_KEY, path)
@staticmethod
def get_testrepo_path() -> str:
if not redis.exists(REDIS_REPO_PATH_KEY):
raise ValueError(
"No test repository filepath is set, but we tried to access it"
)
return redis.get(REDIS_REPO_PATH_KEY)
@staticmethod
def services_with_autobackup() -> List[str]:
keys = redis.keys(REDIS_AUTOBACKUP_ENABLED_PREFIX + "*")
service_ids = [key.split(":")[-1] for key in keys]
return service_ids
@staticmethod
def __last_backup_key(service_id):
return REDIS_LAST_BACKUP_PREFIX + service_id
@staticmethod
def __snapshot_key(snapshot: Snapshot):
return REDIS_SNAPSHOTS_PREFIX + snapshot.id
@staticmethod
def get_last_backup_time(service_id: str) -> Optional[datetime]:
key = Storage.__last_backup_key(service_id)
if not redis.exists(key):
return None
snapshot = hash_as_model(redis, key, Snapshot)
return snapshot.created_at
@staticmethod
def store_last_timestamp(service_id: str, snapshot: Snapshot):
store_model_as_hash(redis, Storage.__last_backup_key(service_id), snapshot)
@staticmethod
def cache_snapshot(snapshot: Snapshot):
snapshot_key = Storage.__snapshot_key(snapshot)
store_model_as_hash(redis, snapshot_key, snapshot)
redis.expire(snapshot_key, REDIS_SNAPSHOT_CACHE_EXPIRE_SECONDS)
@staticmethod
def delete_cached_snapshot(snapshot: Snapshot):
snapshot_key = Storage.__snapshot_key(snapshot)
redis.delete(snapshot_key)
@staticmethod
def get_cached_snapshots() -> List[Snapshot]:
keys = redis.keys(REDIS_SNAPSHOTS_PREFIX + "*")
result = []
for key in keys:
snapshot = hash_as_model(redis, key, Snapshot)
result.append(snapshot)
return result
@staticmethod
def __autobackup_key(service_name: str) -> str:
return REDIS_AUTOBACKUP_ENABLED_PREFIX + service_name
@staticmethod
def set_autobackup(service: Service):
# shortcut this
redis.set(Storage.__autobackup_key(service.get_id()), 1)
@staticmethod
def unset_autobackup(service: Service):
"""also see disable_all_autobackup()"""
redis.delete(Storage.__autobackup_key(service.get_id()))
@staticmethod
def is_autobackup_set(service_name: str) -> bool:
return redis.exists(Storage.__autobackup_key(service_name))
@staticmethod
def autobackup_period_minutes() -> Optional[int]:
"""None means autobackup is disabled"""
if not redis.exists(REDIS_AUTOBACKUP_PERIOD_KEY):
return None
return int(redis.get(REDIS_AUTOBACKUP_PERIOD_KEY))
@staticmethod
def store_autobackup_period_minutes(minutes: int):
redis.set(REDIS_AUTOBACKUP_PERIOD_KEY, minutes)
@staticmethod
def delete_backup_period():
redis.delete(REDIS_AUTOBACKUP_PERIOD_KEY)
@staticmethod
def store_provider(provider: AbstractBackupProvider):
store_model_as_hash(
redis,
REDIS_PROVIDER_KEY,
BackupProviderModel(
kind=get_kind(provider), login=provider.login, key=provider.key
),
)
@staticmethod
def load_provider() -> BackupProviderModel:
provider_model = hash_as_model(redis, REDIS_PROVIDER_KEY, BackupProviderModel)
return provider_model
@staticmethod
def has_init_mark(service: Service) -> bool:
repo_name = service.get_id()
if redis.exists(REDIS_INITTED_CACHE_PREFIX + repo_name):
return True
return False
@staticmethod
def mark_as_init(service: Service):
repo_name = service.get_id()
redis.set(REDIS_INITTED_CACHE_PREFIX + repo_name, 1)

View File

@ -0,0 +1,31 @@
from datetime import datetime
from selfprivacy_api.utils.huey import huey
from selfprivacy_api.services.service import Service
from selfprivacy_api.backup import Backups
def validate_datetime(dt: datetime):
# dt = datetime.now(timezone.utc)
if dt.timetz is None:
raise ValueError(
"""
huey passed in the timezone-unaware time!
Post it in support chat or maybe try uncommenting a line above
"""
)
return Backups.is_time_to_backup(dt)
# huey tasks need to return something
@huey.task()
def start_backup(service: Service) -> bool:
Backups.back_up(service)
return True
@huey.periodic_task(validate_datetime=validate_datetime)
def automatic_backup():
time = datetime.now()
for service in Backups.services_to_back_up(time):
start_backup(service)

View File

@ -1,10 +1,13 @@
"""GraphQL API for SelfPrivacy."""
# pylint: disable=too-few-public-methods
import typing
from strawberry.permission import BasePermission
from strawberry.types import Info
from strawberry.extensions import Extension
from selfprivacy_api.actions.api_tokens import is_token_valid
from selfprivacy_api.utils.localization import Localization
class IsAuthenticated(BasePermission):
@ -19,3 +22,14 @@ class IsAuthenticated(BasePermission):
if token is None:
return False
return is_token_valid(token.replace("Bearer ", ""))
class LocaleExtension(Extension):
"""Parse the Accept-Language header and set the locale in the context as one of the supported locales."""
def resolve(self, _next, root, info: Info, *args, **kwargs):
locale = Localization().get_locale(
info.context["request"].headers.get("Accept-Language")
)
info.context["locale"] = locale
return _next(root, info, *args, **kwargs)

View File

@ -0,0 +1,9 @@
import datetime
import strawberry
@strawberry.type
class SnapshotInfo:
id: str
service_name: str
created_at: datetime.datetime

View File

@ -1,21 +1,24 @@
from enum import Enum
import typing
import strawberry
from strawberry.types import Info
from selfprivacy_api.graphql.common_types.dns import DnsRecord
from selfprivacy_api.graphql.common_types.backup_snapshot import SnapshotInfo
from selfprivacy_api.services import get_service_by_id, get_services_by_location
from selfprivacy_api.services import Service as ServiceInterface
from selfprivacy_api.utils.block_devices import BlockDevices
from selfprivacy_api.utils.localization import Localization as L10n
def get_usages(root: "StorageVolume") -> list["StorageUsageInterface"]:
def get_usages(root: "StorageVolume", locale: str) -> list["StorageUsageInterface"]:
"""Get usages of a volume"""
return [
ServiceStorageUsage(
service=service_to_graphql_service(service),
service=service_to_graphql_service(service, locale),
title=service.get_display_name(),
used_space=str(service.get_storage_usage()),
volume=get_volume_by_id(service.get_location()),
volume=get_volume_by_id(service.get_drive()),
)
for service in get_services_by_location(root.name)
]
@ -35,9 +38,10 @@ class StorageVolume:
type: str
@strawberry.field
def usages(self) -> list["StorageUsageInterface"]:
def usages(self, info: Info) -> list["StorageUsageInterface"]:
"""Get usages of a volume"""
return get_usages(self)
locale = info.context["locale"]
return get_usages(self, locale)
@strawberry.interface
@ -65,7 +69,7 @@ class ServiceStatusEnum(Enum):
OFF = "OFF"
def get_storage_usage(root: "Service") -> ServiceStorageUsage:
def get_storage_usage(root: "Service", locale: str) -> ServiceStorageUsage:
"""Get storage usage for a service"""
service = get_service_by_id(root.id)
if service is None:
@ -76,10 +80,10 @@ def get_storage_usage(root: "Service") -> ServiceStorageUsage:
volume=get_volume_by_id("sda1"),
)
return ServiceStorageUsage(
service=service_to_graphql_service(service),
service=service_to_graphql_service(service, locale),
title=service.get_display_name(),
used_space=str(service.get_storage_usage()),
volume=get_volume_by_id(service.get_location()),
volume=get_volume_by_id(service.get_drive()),
)
@ -97,17 +101,23 @@ class Service:
dns_records: typing.Optional[typing.List[DnsRecord]]
@strawberry.field
def storage_usage(self) -> ServiceStorageUsage:
def storage_usage(self, info: Info) -> ServiceStorageUsage:
"""Get storage usage for a service"""
return get_storage_usage(self)
locale = info.context["locale"]
return get_storage_usage(self, locale)
@strawberry.field
def backup_snapshots(self) -> typing.Optional[typing.List[SnapshotInfo]]:
return None
def service_to_graphql_service(service: ServiceInterface) -> Service:
def service_to_graphql_service(service: ServiceInterface, locale: str) -> Service:
"""Convert service to graphql service"""
l10n = L10n()
return Service(
id=service.get_id(),
display_name=service.get_display_name(),
description=service.get_description(),
display_name=l10n.get(service.get_display_name(), locale),
description=l10n.get(service.get_description(), locale),
svg_icon=service.get_svg_icon(),
is_movable=service.is_movable(),
is_required=service.is_required(),

View File

@ -1,7 +1,9 @@
"""Services mutations"""
# pylint: disable=too-few-public-methods
from threading import local
import typing
import strawberry
from strawberry.types import Info
from selfprivacy_api.graphql import IsAuthenticated
from selfprivacy_api.graphql.common_types.jobs import job_to_api_job
@ -45,8 +47,9 @@ class ServicesMutations:
"""Services mutations."""
@strawberry.mutation(permission_classes=[IsAuthenticated])
def enable_service(self, service_id: str) -> ServiceMutationReturn:
def enable_service(self, service_id: str, info: Info) -> ServiceMutationReturn:
"""Enable service."""
locale = info.context["locale"]
service = get_service_by_id(service_id)
if service is None:
return ServiceMutationReturn(
@ -59,12 +62,13 @@ class ServicesMutations:
success=True,
message="Service enabled.",
code=200,
service=service_to_graphql_service(service),
service=service_to_graphql_service(service, locale),
)
@strawberry.mutation(permission_classes=[IsAuthenticated])
def disable_service(self, service_id: str) -> ServiceMutationReturn:
def disable_service(self, service_id: str, info: Info) -> ServiceMutationReturn:
"""Disable service."""
locale = info.context["locale"]
service = get_service_by_id(service_id)
if service is None:
return ServiceMutationReturn(
@ -77,12 +81,13 @@ class ServicesMutations:
success=True,
message="Service disabled.",
code=200,
service=service_to_graphql_service(service),
service=service_to_graphql_service(service, locale),
)
@strawberry.mutation(permission_classes=[IsAuthenticated])
def stop_service(self, service_id: str) -> ServiceMutationReturn:
def stop_service(self, service_id: str, info: Info) -> ServiceMutationReturn:
"""Stop service."""
locale = info.context["locale"]
service = get_service_by_id(service_id)
if service is None:
return ServiceMutationReturn(
@ -95,12 +100,13 @@ class ServicesMutations:
success=True,
message="Service stopped.",
code=200,
service=service_to_graphql_service(service),
service=service_to_graphql_service(service, locale),
)
@strawberry.mutation(permission_classes=[IsAuthenticated])
def start_service(self, service_id: str) -> ServiceMutationReturn:
def start_service(self, service_id: str, info: Info) -> ServiceMutationReturn:
"""Start service."""
locale = info.context["locale"]
service = get_service_by_id(service_id)
if service is None:
return ServiceMutationReturn(
@ -113,12 +119,13 @@ class ServicesMutations:
success=True,
message="Service started.",
code=200,
service=service_to_graphql_service(service),
service=service_to_graphql_service(service, locale),
)
@strawberry.mutation(permission_classes=[IsAuthenticated])
def restart_service(self, service_id: str) -> ServiceMutationReturn:
def restart_service(self, service_id: str, info: Info) -> ServiceMutationReturn:
"""Restart service."""
locale = info.context["locale"]
service = get_service_by_id(service_id)
if service is None:
return ServiceMutationReturn(
@ -131,12 +138,15 @@ class ServicesMutations:
success=True,
message="Service restarted.",
code=200,
service=service_to_graphql_service(service),
service=service_to_graphql_service(service, locale),
)
@strawberry.mutation(permission_classes=[IsAuthenticated])
def move_service(self, input: MoveServiceInput) -> ServiceJobMutationReturn:
def move_service(
self, input: MoveServiceInput, info: Info
) -> ServiceJobMutationReturn:
"""Move service."""
locale = info.context["locale"]
service = get_service_by_id(input.service_id)
if service is None:
return ServiceJobMutationReturn(
@ -149,7 +159,7 @@ class ServicesMutations:
success=False,
message="Service is not movable.",
code=400,
service=service_to_graphql_service(service),
service=service_to_graphql_service(service, locale),
)
volume = BlockDevices().get_block_device(input.location)
if volume is None:
@ -157,13 +167,13 @@ class ServicesMutations:
success=False,
message="Volume not found.",
code=404,
service=service_to_graphql_service(service),
service=service_to_graphql_service(service, locale),
)
job = service.move_to_volume(volume)
return ServiceJobMutationReturn(
success=True,
message="Service moved.",
code=200,
service=service_to_graphql_service(service),
service=service_to_graphql_service(service, locale),
job=job_to_api_job(job),
)

View File

@ -0,0 +1,14 @@
"""Backup"""
# pylint: disable=too-few-public-methods
import typing
import strawberry
from selfprivacy_api.graphql.common_types.backup_snapshot import SnapshotInfo
@strawberry.type
class Backup:
backend: str
@strawberry.field
def get_backups(self) -> typing.List[SnapshotInfo]:
return []

View File

@ -18,3 +18,6 @@ class ServerProvider(Enum):
@strawberry.enum
class BackupProvider(Enum):
BACKBLAZE = "BACKBLAZE"
# for testing purposes, make sure not selectable in prod.
MEMORY = "MEMORY"
FILE = "FILE"

View File

@ -2,6 +2,7 @@
# pylint: disable=too-few-public-methods
import typing
import strawberry
from strawberry.types import Info
from selfprivacy_api.graphql.common_types.service import (
Service,
@ -13,6 +14,7 @@ from selfprivacy_api.services import get_all_services
@strawberry.type
class Services:
@strawberry.field
def all_services(self) -> typing.List[Service]:
def all_services(self, info: Info) -> typing.List[Service]:
locale = info.context["locale"]
services = get_all_services()
return [service_to_graphql_service(service) for service in services]
return [service_to_graphql_service(service, locale) for service in services]

View File

@ -4,7 +4,9 @@
import asyncio
from typing import AsyncGenerator
import strawberry
from selfprivacy_api.graphql import IsAuthenticated
from strawberry.types import Info
from selfprivacy_api.graphql import IsAuthenticated, LocaleExtension
from selfprivacy_api.graphql.mutations.api_mutations import ApiMutations
from selfprivacy_api.graphql.mutations.job_mutations import JobMutations
from selfprivacy_api.graphql.mutations.mutation_interface import GenericMutationReturn
@ -58,6 +60,11 @@ class Query:
"""Services queries"""
return Services()
@strawberry.field()
def test(self, info: Info) -> str:
"""Test query"""
return info.context["locale"]
@strawberry.type
class Mutation(
@ -95,4 +102,9 @@ class Subscription:
await asyncio.sleep(0.5)
schema = strawberry.Schema(query=Query, mutation=Mutation, subscription=Subscription)
schema = strawberry.Schema(
query=Query,
mutation=Mutation,
subscription=Subscription,
extensions=[LocaleExtension],
)

View File

@ -0,0 +1,52 @@
{
"services": {
"bitwarden": {
"display_name": "Bitwarden",
"description": "Bitwarden is an open source password management solution you can run on your own server.",
"move_job": {
"name": "Move Bitwarden",
"description": "Moving Bitwarden data to {volume}"
}
},
"gitea": {
"display_name": "Gitea",
"description": "Gitea is a lightweight code hosting solution written in Go.",
"move_job": {
"name": "Move Gitea",
"description": "Moving Gitea data to {volume}"
}
},
"jitsi": {
"display_name": "Jitsi",
"description": "Jitsi is a free and open source video conferencing solution."
},
"mailserver": {
"display_name": "Email",
"description": "E-Mail for company and family.",
"move_job": {
"name": "Move Mail Server",
"description": "Moving mailserver data to {volume}"
}
},
"nextcloud": {
"display_name": "Nextcloud",
"description": "Nextcloud is a cloud storage service that offers a web interface and a desktop client.",
"move_job": {
"name": "Move Nextcloud",
"description": "Moving Nextcloud data to {volume}"
}
},
"ocserv": {
"display_name": "OpenConnect VPN",
"description": "OpenConnect VPN to connect your devices and access the internet."
},
"pleroma": {
"display_name": "Pleroma",
"description": "Pleroma is a free and open source microblogging server.",
"move_job": {
"name": "Move Pleroma",
"description": "Moving Pleroma data to {volume}"
}
}
}
}

View File

@ -0,0 +1,12 @@
{
"services": {
"bitwarden": {
"display_name": "Bitwarden",
"description": "Bitwarden это менеджер паролей с открытым исходным кодом, который может работать на вашем сервере.",
"move_job": {
"name": "Переместить Bitwarden",
"description": "Перемещение данных Bitwarden на {volume}"
}
}
}
}

View File

@ -0,0 +1,9 @@
from pydantic import BaseModel
"""for storage in Redis"""
class BackupProviderModel(BaseModel):
kind: str
login: str
key: str

View File

@ -0,0 +1,8 @@
import datetime
from pydantic import BaseModel
class Snapshot(BaseModel):
id: str
service_name: str
created_at: datetime.datetime

View File

@ -3,9 +3,7 @@ from datetime import datetime
import json
import subprocess
import os
from threading import Lock
from enum import Enum
import portalocker
from selfprivacy_api.utils import ReadUserData
from selfprivacy_api.utils.singleton_metaclass import SingletonMetaclass
@ -51,7 +49,6 @@ class ResticController(metaclass=SingletonMetaclass):
self.error_message = None
self._initialized = True
self.load_configuration()
self.write_rclone_config()
self.load_snapshots()
def load_configuration(self):
@ -65,25 +62,6 @@ class ResticController(metaclass=SingletonMetaclass):
else:
self.state = ResticStates.NO_KEY
def write_rclone_config(self):
"""
Open /root/.config/rclone/rclone.conf with portalocker
and write configuration in the following format:
[backblaze]
type = b2
account = {self.backblaze_account}
key = {self.backblaze_key}
"""
with portalocker.Lock(
"/root/.config/rclone/rclone.conf", "w", timeout=None
) as rclone_config:
rclone_config.write(
f"[backblaze]\n"
f"type = b2\n"
f"account = {self._backblaze_account}\n"
f"key = {self._backblaze_key}\n"
)
def load_snapshots(self):
"""
Load list of snapshots from repository
@ -91,9 +69,9 @@ class ResticController(metaclass=SingletonMetaclass):
backup_listing_command = [
"restic",
"-o",
"rclone.args=serve restic --stdio",
self.rclone_args(),
"-r",
f"rclone:backblaze:{self._repository_name}/sfbackup",
self.restic_repo(),
"snapshots",
"--json",
]
@ -123,6 +101,17 @@ class ResticController(metaclass=SingletonMetaclass):
self.error_message = snapshots_list
return
def restic_repo(self):
# https://restic.readthedocs.io/en/latest/030_preparing_a_new_repo.html#other-services-via-rclone
# https://forum.rclone.org/t/can-rclone-be-run-solely-with-command-line-options-no-config-no-env-vars/6314/5
return f"rclone::b2:{self._repository_name}/sfbackup"
def rclone_args(self):
return "rclone.args=serve restic --stdio" + self.backend_rclone_args()
def backend_rclone_args(self):
return f"--b2-account {self._backblaze_account} --b2-key {self._backblaze_key}"
def initialize_repository(self):
"""
Initialize repository with restic
@ -130,9 +119,9 @@ class ResticController(metaclass=SingletonMetaclass):
initialize_repository_command = [
"restic",
"-o",
"rclone.args=serve restic --stdio",
self.rclone_args(),
"-r",
f"rclone:backblaze:{self._repository_name}/sfbackup",
self.restic_repo(),
"init",
]
with subprocess.Popen(
@ -159,9 +148,9 @@ class ResticController(metaclass=SingletonMetaclass):
backup_command = [
"restic",
"-o",
"rclone.args=serve restic --stdio",
self.rclone_args(),
"-r",
f"rclone:backblaze:{self._repository_name}/sfbackup",
self.restic_repo(),
"--verbose",
"--json",
"backup",
@ -228,9 +217,9 @@ class ResticController(metaclass=SingletonMetaclass):
backup_restoration_command = [
"restic",
"-o",
"rclone.args=serve restic --stdio",
self.rclone_args(),
"-r",
f"rclone:backblaze:{self._repository_name}/sfbackup",
self.restic_repo(),
"restore",
snapshot_id,
"--target",

View File

@ -11,6 +11,7 @@ from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceS
from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain
from selfprivacy_api.utils.block_devices import BlockDevice
from selfprivacy_api.utils.huey import huey
from selfprivacy_api.utils.localization import Localization as L10n
import selfprivacy_api.utils.network as network_utils
from selfprivacy_api.services.bitwarden.icon import BITWARDEN_ICON
@ -24,14 +25,14 @@ class Bitwarden(Service):
return "bitwarden"
@staticmethod
def get_display_name() -> str:
def get_display_name(locale: str = "en") -> str:
"""Return service display name."""
return "Bitwarden"
return "services.bitwarden.display_name"
@staticmethod
def get_description() -> str:
def get_description(locale: str = "en") -> str:
"""Return service description."""
return "Bitwarden is a password manager."
return "services.bitwarden.description"
@staticmethod
def get_svg_icon() -> str:
@ -118,7 +119,7 @@ class Bitwarden(Service):
return storage_usage
@staticmethod
def get_location() -> str:
def get_drive() -> str:
with ReadUserData() as user_data:
if user_data.get("useBinds", False):
return user_data.get("bitwarden", {}).get("location", "sda1")
@ -143,11 +144,13 @@ class Bitwarden(Service):
),
]
def move_to_volume(self, volume: BlockDevice) -> Job:
def move_to_volume(self, volume: BlockDevice, locale: str = "en") -> Job:
job = Jobs.add(
type_id="services.bitwarden.move",
name="Move Bitwarden",
description=f"Moving Bitwarden data to {volume.name}",
name=L10n().get("services.bitwarden.move_job.name", locale),
description=L10n()
.get("services.bitwarden.move_job.description")
.format(volume=volume.name),
)
move_service(

View File

@ -44,7 +44,7 @@ def move_service(
)
return
# Check if we are on the same volume
old_volume = service.get_location()
old_volume = service.get_drive()
if old_volume == volume.name:
Jobs.update(
job=job,

View File

@ -11,6 +11,7 @@ from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceS
from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain
from selfprivacy_api.utils.block_devices import BlockDevice
from selfprivacy_api.utils.huey import huey
from selfprivacy_api.utils.localization import Localization as L10n
import selfprivacy_api.utils.network as network_utils
from selfprivacy_api.services.gitea.icon import GITEA_ICON
@ -24,14 +25,14 @@ class Gitea(Service):
return "gitea"
@staticmethod
def get_display_name() -> str:
def get_display_name(locale: str = "en") -> str:
"""Return service display name."""
return "Gitea"
return "services.gitea.display_name"
@staticmethod
def get_description() -> str:
def get_description(locale: str = "en") -> str:
"""Return service description."""
return "Gitea is a Git forge."
return "services.gitea.description"
@staticmethod
def get_svg_icon() -> str:
@ -116,7 +117,7 @@ class Gitea(Service):
return storage_usage
@staticmethod
def get_location() -> str:
def get_drive() -> str:
with ReadUserData() as user_data:
if user_data.get("useBinds", False):
return user_data.get("gitea", {}).get("location", "sda1")
@ -140,11 +141,13 @@ class Gitea(Service):
),
]
def move_to_volume(self, volume: BlockDevice) -> Job:
def move_to_volume(self, volume: BlockDevice, locale: str = "en") -> Job:
job = Jobs.add(
type_id="services.gitea.move",
name="Move Gitea",
description=f"Moving Gitea data to {volume.name}",
name=L10n().get("services.gitea.move_job.name", locale),
description=L10n()
.get("services.gitea.move_job.description", locale)
.format(volume=volume.name),
)
move_service(

View File

@ -3,19 +3,17 @@ import base64
import subprocess
import typing
from selfprivacy_api.jobs import Job, Jobs
from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service
import selfprivacy_api.utils.network as network_utils
from selfprivacy_api.jobs import Job
from selfprivacy_api.services.generic_size_counter import get_storage_usage
from selfprivacy_api.services.generic_status_getter import (
get_service_status,
get_service_status_from_several_units,
)
from selfprivacy_api.services.jitsi.icon import JITSI_ICON
from selfprivacy_api.utils.localization import Localization as L10n
from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus
from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain
from selfprivacy_api.utils.block_devices import BlockDevice
from selfprivacy_api.utils.huey import huey
import selfprivacy_api.utils.network as network_utils
from selfprivacy_api.services.jitsi.icon import JITSI_ICON
class Jitsi(Service):
@ -27,14 +25,14 @@ class Jitsi(Service):
return "jitsi"
@staticmethod
def get_display_name() -> str:
def get_display_name(locale: str = "en") -> str:
"""Return service display name."""
return "Jitsi"
return "services.jitsi.display_name"
@staticmethod
def get_description() -> str:
def get_description(locale: str = "en") -> str:
"""Return service description."""
return "Jitsi is a free and open-source video conferencing solution."
return "services.jitsi.description"
@staticmethod
def get_svg_icon() -> str:
@ -116,7 +114,7 @@ class Jitsi(Service):
return storage_usage
@staticmethod
def get_location() -> str:
def get_drive() -> str:
return "sda1"
@staticmethod
@ -138,5 +136,5 @@ class Jitsi(Service):
),
]
def move_to_volume(self, volume: BlockDevice) -> Job:
def move_to_volume(self, volume: BlockDevice, locale: str = "en") -> Job:
raise NotImplementedError("jitsi service is not movable")

View File

@ -13,6 +13,7 @@ from selfprivacy_api.services.generic_status_getter import (
)
from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus
import selfprivacy_api.utils as utils
from selfprivacy_api.utils.localization import Localization as L10n
from selfprivacy_api.utils.block_devices import BlockDevice
from selfprivacy_api.utils.huey import huey
import selfprivacy_api.utils.network as network_utils
@ -27,12 +28,12 @@ class MailServer(Service):
return "mailserver"
@staticmethod
def get_display_name() -> str:
return "Mail Server"
def get_display_name(locale: str = "en") -> str:
return "services.mailserver.display_name"
@staticmethod
def get_description() -> str:
return "E-Mail for company and family."
def get_description(locale: str = "en") -> str:
return "services.mailserver.description"
@staticmethod
def get_svg_icon() -> str:
@ -101,7 +102,7 @@ class MailServer(Service):
return get_storage_usage("/var/vmail")
@staticmethod
def get_location() -> str:
def get_drive() -> str:
with utils.ReadUserData() as user_data:
if user_data.get("useBinds", False):
return user_data.get("mailserver", {}).get("location", "sda1")
@ -148,11 +149,13 @@ class MailServer(Service):
),
]
def move_to_volume(self, volume: BlockDevice) -> Job:
def move_to_volume(self, volume: BlockDevice, locale: str = "en") -> Job:
job = Jobs.add(
type_id="services.mailserver.move",
name="Move Mail Server",
description=f"Moving mailserver data to {volume.name}",
name=L10n().get("services.mailserver.move_job.name", locale),
description=L10n()
.get("services.mailserver.move_job.description", locale)
.format(volume=volume.name),
)
move_service(

View File

@ -9,6 +9,7 @@ from selfprivacy_api.services.generic_status_getter import get_service_status
from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus
from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain
from selfprivacy_api.utils.block_devices import BlockDevice
from selfprivacy_api.utils.localization import Localization as L10n
import selfprivacy_api.utils.network as network_utils
from selfprivacy_api.services.nextcloud.icon import NEXTCLOUD_ICON
@ -24,12 +25,12 @@ class Nextcloud(Service):
@staticmethod
def get_display_name() -> str:
"""Return service display name."""
return "Nextcloud"
return "services.nextcloud.display_name"
@staticmethod
def get_description() -> str:
"""Return service description."""
return "Nextcloud is a cloud storage service that offers a web interface and a desktop client."
return "services.nextcloud.description"
@staticmethod
def get_svg_icon() -> str:
@ -123,7 +124,7 @@ class Nextcloud(Service):
return get_storage_usage("/var/lib/nextcloud")
@staticmethod
def get_location() -> str:
def get_drive() -> str:
"""Get the name of disk where Nextcloud is installed."""
with ReadUserData() as user_data:
if user_data.get("useBinds", False):
@ -148,11 +149,13 @@ class Nextcloud(Service):
),
]
def move_to_volume(self, volume: BlockDevice) -> Job:
def move_to_volume(self, volume: BlockDevice, locale: str = "en") -> Job:
job = Jobs.add(
type_id="services.nextcloud.move",
name="Move Nextcloud",
description=f"Moving Nextcloud to volume {volume.name}",
name=L10n().get("services.nextcloud.move_job.name", locale),
description=L10n()
.get("services.nextcloud.move_job.description", locale)
.format(volume=volume.name),
)
move_service(
self,

View File

@ -2,15 +2,15 @@
import base64
import subprocess
import typing
from selfprivacy_api.jobs import Job, Jobs
from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service
from selfprivacy_api.services.generic_size_counter import get_storage_usage
import selfprivacy_api.utils.network as network_utils
from selfprivacy_api.jobs import Job
from selfprivacy_api.services.generic_status_getter import get_service_status
from selfprivacy_api.services.ocserv.icon import OCSERV_ICON
from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus
from selfprivacy_api.utils import ReadUserData, WriteUserData
from selfprivacy_api.utils.block_devices import BlockDevice
from selfprivacy_api.services.ocserv.icon import OCSERV_ICON
import selfprivacy_api.utils.network as network_utils
from selfprivacy_api.utils.localization import Localization as L10n
class Ocserv(Service):
@ -21,12 +21,12 @@ class Ocserv(Service):
return "ocserv"
@staticmethod
def get_display_name() -> str:
return "OpenConnect VPN"
def get_display_name(locale: str = "en") -> str:
return "services.ocserv.display_name"
@staticmethod
def get_description() -> str:
return "OpenConnect VPN to connect your devices and access the internet."
def get_description(locale: str = "en") -> str:
return "services.ocserv.description"
@staticmethod
def get_svg_icon() -> str:
@ -93,7 +93,7 @@ class Ocserv(Service):
return ""
@staticmethod
def get_location() -> str:
def get_drive() -> str:
return "sda1"
@staticmethod
@ -117,5 +117,5 @@ class Ocserv(Service):
def get_storage_usage() -> int:
return 0
def move_to_volume(self, volume: BlockDevice) -> Job:
def move_to_volume(self, volume: BlockDevice, locale: str = "en") -> Job:
raise NotImplementedError("ocserv service is not movable")

View File

@ -9,6 +9,7 @@ from selfprivacy_api.services.generic_status_getter import get_service_status
from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus
from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain
from selfprivacy_api.utils.block_devices import BlockDevice
from selfprivacy_api.utils.localization import Localization as L10n
import selfprivacy_api.utils.network as network_utils
from selfprivacy_api.services.pleroma.icon import PLEROMA_ICON
@ -21,12 +22,12 @@ class Pleroma(Service):
return "pleroma"
@staticmethod
def get_display_name() -> str:
return "Pleroma"
def get_display_name(locale: str = "en") -> str:
return "services.pleroma.display_name"
@staticmethod
def get_description() -> str:
return "Pleroma is a microblogging service that offers a web interface and a desktop client."
def get_description(locale: str = "en") -> str:
return "services.pleroma.description"
@staticmethod
def get_svg_icon() -> str:
@ -104,7 +105,7 @@ class Pleroma(Service):
return storage_usage
@staticmethod
def get_location() -> str:
def get_drive() -> str:
with ReadUserData() as user_data:
if user_data.get("useBinds", False):
return user_data.get("pleroma", {}).get("location", "sda1")
@ -128,11 +129,13 @@ class Pleroma(Service):
),
]
def move_to_volume(self, volume: BlockDevice) -> Job:
def move_to_volume(self, volume: BlockDevice, locale: str = "en") -> Job:
job = Jobs.add(
type_id="services.pleroma.move",
name="Move Pleroma",
description=f"Moving Pleroma to volume {volume.name}",
name=L10n().get("services.pleroma.move_job.name", locale),
description=L10n()
.get("services.pleroma.move_job.description", locale)
.format(volume=volume.name),
)
move_service(
self,

View File

@ -132,9 +132,20 @@ class Service(ABC):
@staticmethod
@abstractmethod
def get_location() -> str:
def get_drive() -> str:
pass
# @staticmethod
# @abstractmethod
# def get_folders() -> str:
# pass
@abstractmethod
def move_to_volume(self, volume: BlockDevice) -> Job:
pass
def pre_backup(self):
pass
def post_restore(self):
pass

View File

@ -0,0 +1,137 @@
"""Class representing Bitwarden service"""
import base64
import typing
from typing import List
from selfprivacy_api.jobs import Job
from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus
from selfprivacy_api.utils import ReadUserData, get_domain
from selfprivacy_api.utils.block_devices import BlockDevice
import selfprivacy_api.utils.network as network_utils
from selfprivacy_api.services.test_service.icon import BITWARDEN_ICON
class DummyService(Service):
"""A test service"""
def __init_subclass__(cls, folders: List[str]):
cls.folders = folders
@staticmethod
def get_id() -> str:
"""Return service id."""
return "testservice"
@staticmethod
def get_display_name() -> str:
"""Return service display name."""
return "Test Service"
@staticmethod
def get_description() -> str:
"""Return service description."""
return "A small service used for test purposes. Does nothing."
@staticmethod
def get_svg_icon() -> str:
"""Read SVG icon from file and return it as base64 encoded string."""
# return ""
return base64.b64encode(BITWARDEN_ICON.encode("utf-8")).decode("utf-8")
@staticmethod
def get_url() -> typing.Optional[str]:
"""Return service url."""
domain = get_domain()
return f"https://password.{domain}"
@staticmethod
def is_movable() -> bool:
return True
@staticmethod
def is_required() -> bool:
return False
@staticmethod
def is_enabled() -> bool:
return True
@staticmethod
def get_status() -> ServiceStatus:
"""
Return Bitwarden status from systemd.
Use command return code to determine status.
Return code 0 means service is running.
Return code 1 or 2 means service is in error stat.
Return code 3 means service is stopped.
Return code 4 means service is off.
"""
return 0
@staticmethod
def enable():
pass
@staticmethod
def disable():
pass
@staticmethod
def stop():
pass
@staticmethod
def start():
pass
@staticmethod
def restart():
pass
@staticmethod
def get_configuration():
return {}
@staticmethod
def set_configuration(config_items):
return super().set_configuration(config_items)
@staticmethod
def get_logs():
return ""
@staticmethod
def get_storage_usage() -> int:
storage_usage = 0
return storage_usage
@staticmethod
def get_drive(cls) -> str:
return "sda1"
@classmethod
def get_folders(cls) -> List[str]:
return cls.folders
@staticmethod
def get_dns_records() -> typing.List[ServiceDnsRecord]:
"""Return list of DNS records for Bitwarden service."""
return [
ServiceDnsRecord(
type="A",
name="password",
content=network_utils.get_ip4(),
ttl=3600,
),
ServiceDnsRecord(
type="AAAA",
name="password",
content=network_utils.get_ip6(),
ttl=3600,
),
]
def move_to_volume(self, volume: BlockDevice) -> Job:
pass

View File

@ -0,0 +1,3 @@
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M5.125 2C4.2962 2 3.50134 2.32924 2.91529 2.91529C2.32924 3.50134 2 4.2962 2 5.125L2 18.875C2 19.7038 2.32924 20.4987 2.91529 21.0847C3.50134 21.6708 4.2962 22 5.125 22H18.875C19.7038 22 20.4987 21.6708 21.0847 21.0847C21.6708 20.4987 22 19.7038 22 18.875V5.125C22 4.2962 21.6708 3.50134 21.0847 2.91529C20.4987 2.32924 19.7038 2 18.875 2H5.125ZM6.25833 4.43333H17.7583C17.9317 4.43333 18.0817 4.49667 18.2083 4.62333C18.2688 4.68133 18.3168 4.7511 18.3494 4.82835C18.3819 4.9056 18.3983 4.98869 18.3975 5.0725V12.7392C18.3975 13.3117 18.2858 13.8783 18.0633 14.4408C17.8558 14.9751 17.5769 15.4789 17.2342 15.9383C16.8824 16.3987 16.4882 16.825 16.0567 17.2117C15.6008 17.6242 15.18 17.9667 14.7942 18.24C14.4075 18.5125 14.005 18.77 13.5858 19.0133C13.1667 19.2558 12.8692 19.4208 12.6925 19.5075C12.5158 19.5942 12.375 19.6608 12.2675 19.7075C12.1872 19.7472 12.0987 19.7674 12.0092 19.7667C11.919 19.7674 11.8299 19.7468 11.7492 19.7067C11.6062 19.6429 11.4645 19.5762 11.3242 19.5067C11.0218 19.3511 10.7242 19.1866 10.4317 19.0133C10.0175 18.7738 9.6143 18.5158 9.22333 18.24C8.7825 17.9225 8.36093 17.5791 7.96083 17.2117C7.52907 16.825 7.13456 16.3987 6.7825 15.9383C6.44006 15.4788 6.16141 14.9751 5.95417 14.4408C5.73555 13.9 5.62213 13.3225 5.62 12.7392V5.0725C5.62 4.89917 5.68333 4.75 5.80917 4.6225C5.86726 4.56188 5.93717 4.51382 6.01457 4.48129C6.09196 4.44875 6.17521 4.43243 6.25917 4.43333H6.25833ZM12.0083 6.35V17.7C12.8 17.2817 13.5092 16.825 14.135 16.3333C15.6992 15.1083 16.4808 13.9108 16.4808 12.7392V6.35H12.0083Z" fill="black"/>
</svg>

After

Width:  |  Height:  |  Size: 1.6 KiB

View File

@ -0,0 +1,5 @@
BITWARDEN_ICON = """
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M5.125 2C4.2962 2 3.50134 2.32924 2.91529 2.91529C2.32924 3.50134 2 4.2962 2 5.125L2 18.875C2 19.7038 2.32924 20.4987 2.91529 21.0847C3.50134 21.6708 4.2962 22 5.125 22H18.875C19.7038 22 20.4987 21.6708 21.0847 21.0847C21.6708 20.4987 22 19.7038 22 18.875V5.125C22 4.2962 21.6708 3.50134 21.0847 2.91529C20.4987 2.32924 19.7038 2 18.875 2H5.125ZM6.25833 4.43333H17.7583C17.9317 4.43333 18.0817 4.49667 18.2083 4.62333C18.2688 4.68133 18.3168 4.7511 18.3494 4.82835C18.3819 4.9056 18.3983 4.98869 18.3975 5.0725V12.7392C18.3975 13.3117 18.2858 13.8783 18.0633 14.4408C17.8558 14.9751 17.5769 15.4789 17.2342 15.9383C16.8824 16.3987 16.4882 16.825 16.0567 17.2117C15.6008 17.6242 15.18 17.9667 14.7942 18.24C14.4075 18.5125 14.005 18.77 13.5858 19.0133C13.1667 19.2558 12.8692 19.4208 12.6925 19.5075C12.5158 19.5942 12.375 19.6608 12.2675 19.7075C12.1872 19.7472 12.0987 19.7674 12.0092 19.7667C11.919 19.7674 11.8299 19.7468 11.7492 19.7067C11.6062 19.6429 11.4645 19.5762 11.3242 19.5067C11.0218 19.3511 10.7242 19.1866 10.4317 19.0133C10.0175 18.7738 9.6143 18.5158 9.22333 18.24C8.7825 17.9225 8.36093 17.5791 7.96083 17.2117C7.52907 16.825 7.13456 16.3987 6.7825 15.9383C6.44006 15.4788 6.16141 14.9751 5.95417 14.4408C5.73555 13.9 5.62213 13.3225 5.62 12.7392V5.0725C5.62 4.89917 5.68333 4.75 5.80917 4.6225C5.86726 4.56188 5.93717 4.51382 6.01457 4.48129C6.09196 4.44875 6.17521 4.43243 6.25917 4.43333H6.25833ZM12.0083 6.35V17.7C12.8 17.2817 13.5092 16.825 14.135 16.3333C15.6992 15.1083 16.4808 13.9108 16.4808 12.7392V6.35H12.0083Z" fill="black"/>
</svg>
"""

View File

@ -0,0 +1,73 @@
"""
A localization module that loads strings from JSONs in the locale directory.
It provides a function to get a localized string by its ID.
If the string is not found in the current locale, it will try to find it in the default locale.
If the string is not found in the default locale, it will return the ID.
The locales are loaded into the memory at the api startup and kept in a singleton.
"""
import json
import os
import typing
from pathlib import Path
from selfprivacy_api.utils.singleton_metaclass import SingletonMetaclass
DEFAULT_LOCALE = "en"
LOCALE_DIR: Path = Path(__file__).parent.parent / "locales"
class Localization(metaclass=SingletonMetaclass):
"""Localization class."""
def __init__(self):
self.locales: typing.Dict[str, typing.Dict[str, str]] = {}
self.load_locales()
def load_locales(self):
"""Load locales from locale directory."""
for locale in os.listdir(str(LOCALE_DIR)):
locale_path = LOCALE_DIR / locale
if not locale_path.is_dir():
continue
self.locales[locale] = {}
for file in os.listdir(str(locale_path)):
if file.endswith(".json"):
with open(locale_path / file, "r") as locale_file:
locale_data = self.flatten_dict(json.load(locale_file))
self.locales[locale].update(locale_data)
def get(self, string_id: str, locale: str = DEFAULT_LOCALE) -> str:
"""Get localized string by its ID."""
if locale in self.locales and string_id in self.locales[locale]:
return self.locales[locale][string_id]
if DEFAULT_LOCALE in self.locales and string_id in self.locales[DEFAULT_LOCALE]:
return self.locales[DEFAULT_LOCALE][string_id]
return string_id
def supported_locales(self) -> typing.List[str]:
"""Return a list of supported languages."""
return list(self.locales.keys())
def get_locale(self, locale: typing.Optional[str]) -> str:
"""Parse the value of Accept-Language header and return the most preferred supported locale."""
if locale is None:
return DEFAULT_LOCALE
for lang in locale.split(","):
lang = lang.split(";")[0]
if lang in self.locales:
return lang
return DEFAULT_LOCALE
def flatten_dict(
self, d: typing.Dict[str, typing.Any], parent_key: str = "", sep: str = "."
) -> typing.Dict[str, str]:
"""Flatten a dict."""
items = []
for k, v in d.items():
new_key = parent_key + sep + k if parent_key else k
if isinstance(v, dict):
items.extend(self.flatten_dict(v, new_key, sep=sep).items())
else:
items.append((new_key, v))
return dict(items)

View File

@ -0,0 +1,30 @@
from datetime import datetime
from typing import Optional
def store_model_as_hash(redis, redis_key, model):
for key, value in model.dict().items():
if isinstance(value, datetime):
value = value.isoformat()
redis.hset(redis_key, key, str(value))
def hash_as_model(redis, redis_key: str, model_class):
token_dict = _model_dict_from_hash(redis, redis_key)
if token_dict is not None:
return model_class(**token_dict)
return None
def _prepare_model_dict(d: dict):
for key in d.keys():
if d[key] == "None":
d[key] = None
def _model_dict_from_hash(redis, redis_key: str) -> Optional[dict]:
if redis.exists(redis_key):
token_dict = redis.hgetall(redis_key)
_prepare_model_dict(token_dict)
return token_dict
return None

View File

@ -7,4 +7,5 @@ setup(
scripts=[
"selfprivacy_api/app.py",
],
include_package_data=True,
)

View File

@ -1,6 +1,6 @@
{ pkgs ? import <nixpkgs> { } }:
{ pkgs ? import <nixos-22.11> { } }:
let
sp-python = pkgs.python39.withPackages (p: with p; [
sp-python = pkgs.python310.withPackages (p: with p; [
setuptools
portalocker
pytz
@ -19,45 +19,7 @@ let
fastapi
uvicorn
redis
(buildPythonPackage rec {
pname = "strawberry-graphql";
version = "0.123.0";
format = "pyproject";
patches = [
./strawberry-graphql.patch
];
propagatedBuildInputs = [
typing-extensions
python-multipart
python-dateutil
# flask
pydantic
pygments
poetry
# flask-cors
(buildPythonPackage rec {
pname = "graphql-core";
version = "3.2.0";
format = "setuptools";
src = fetchPypi {
inherit pname version;
sha256 = "sha256-huKgvgCL/eGe94OI3opyWh2UKpGQykMcJKYIN5c4A84=";
};
checkInputs = [
pytest-asyncio
pytest-benchmark
pytestCheckHook
];
pythonImportsCheck = [
"graphql"
];
})
];
src = fetchPypi {
inherit pname version;
sha256 = "KsmZ5Xv8tUg6yBxieAEtvoKoRG60VS+iVGV0X6oCExo=";
};
})
strawberry-graphql
]);
in
pkgs.mkShell {
@ -65,6 +27,7 @@ pkgs.mkShell {
sp-python
pkgs.black
pkgs.redis
pkgs.restic
];
shellHook = ''
PYTHONPATH=${sp-python}/${sp-python.sitePackages}

View File

@ -3,6 +3,8 @@
# pylint: disable=unused-argument
import os
import pytest
from os import path
from fastapi.testclient import TestClient
@ -10,6 +12,10 @@ def pytest_generate_tests(metafunc):
os.environ["TEST_MODE"] = "true"
def global_data_dir():
return path.join(path.dirname(__file__), "data")
@pytest.fixture
def tokens_file(mocker, shared_datadir):
"""Mock tokens file."""
@ -26,6 +32,20 @@ def jobs_file(mocker, shared_datadir):
return mock
@pytest.fixture
def generic_userdata(mocker, tmpdir):
filename = "turned_on.json"
source_path = path.join(global_data_dir(), filename)
userdata_path = path.join(tmpdir, filename)
with open(userdata_path, "w") as file:
with open(source_path, "r") as source:
file.write(source.read())
mock = mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=userdata_path)
return mock
@pytest.fixture
def huey_database(mocker, shared_datadir):
"""Mock huey database."""

60
tests/data/turned_on.json Normal file
View File

@ -0,0 +1,60 @@
{
"api": {
"token": "TEST_TOKEN",
"enableSwagger": false
},
"bitwarden": {
"enable": true
},
"databasePassword": "PASSWORD",
"domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD",
"hostname": "test-instance",
"nextcloud": {
"adminPassword": "ADMIN",
"databasePassword": "ADMIN",
"enable": true
},
"resticPassword": "PASS",
"ssh": {
"enable": true,
"passwordAuthentication": true,
"rootKeys": [
"ssh-ed25519 KEY test@pc"
]
},
"username": "tester",
"gitea": {
"enable": true
},
"ocserv": {
"enable": true
},
"pleroma": {
"enable": true
},
"jitsi": {
"enable": true
},
"autoUpgrade": {
"enable": true,
"allowReboot": true
},
"timezone": "Europe/Moscow",
"sshKeys": [
"ssh-rsa KEY test@pc"
],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
}

View File

@ -0,0 +1,394 @@
import pytest
import os.path as path
from os import makedirs
from os import remove
from os import listdir
from datetime import datetime, timedelta, timezone
import selfprivacy_api.services as services
from selfprivacy_api.services import get_service_by_id
from selfprivacy_api.services.test_service import DummyService
from selfprivacy_api.graphql.queries.providers import BackupProvider
from selfprivacy_api.backup import Backups
import selfprivacy_api.backup.providers as providers
from selfprivacy_api.backup.providers import AbstractBackupProvider
from selfprivacy_api.backup.providers.backblaze import Backblaze
from selfprivacy_api.backup.tasks import start_backup
from selfprivacy_api.backup.storage import Storage
TESTFILE_BODY = "testytest!"
TESTFILE_2_BODY = "testissimo!"
REPO_NAME = "test_backup"
@pytest.fixture(scope="function")
def backups(tmpdir):
Backups.reset()
test_repo_path = path.join(tmpdir, "totallyunrelated")
Backups.set_localfile_repo(test_repo_path)
@pytest.fixture()
def backups_backblaze(generic_userdata):
Backups.reset()
@pytest.fixture()
def raw_dummy_service(tmpdir, backups):
dirnames = ["test_service", "also_test_service"]
service_dirs = []
for d in dirnames:
service_dir = path.join(tmpdir, d)
makedirs(service_dir)
service_dirs.append(service_dir)
testfile_path_1 = path.join(service_dirs[0], "testfile.txt")
with open(testfile_path_1, "w") as file:
file.write(TESTFILE_BODY)
testfile_path_2 = path.join(service_dirs[1], "testfile2.txt")
with open(testfile_path_2, "w") as file:
file.write(TESTFILE_2_BODY)
# we need this to not change get_folders() much
class TestDummyService(DummyService, folders=service_dirs):
pass
service = TestDummyService()
return service
@pytest.fixture()
def dummy_service(tmpdir, backups, raw_dummy_service):
service = raw_dummy_service
repo_path = path.join(tmpdir, "test_repo")
assert not path.exists(repo_path)
# assert not repo_path
Backups.init_repo(service)
# register our service
services.services.append(service)
assert get_service_by_id(service.get_id()) is not None
return service
@pytest.fixture()
def memory_backup() -> AbstractBackupProvider:
ProviderClass = providers.get_provider(BackupProvider.MEMORY)
assert ProviderClass is not None
memory_provider = ProviderClass(login="", key="")
assert memory_provider is not None
return memory_provider
@pytest.fixture()
def file_backup(tmpdir) -> AbstractBackupProvider:
test_repo_path = path.join(tmpdir, "test_repo")
ProviderClass = providers.get_provider(BackupProvider.FILE)
assert ProviderClass is not None
provider = ProviderClass(test_repo_path)
assert provider is not None
return provider
def test_config_load(generic_userdata):
Backups.reset()
provider = Backups.provider()
assert provider is not None
assert isinstance(provider, Backblaze)
assert provider.login == "ID"
assert provider.key == "KEY"
def test_select_backend():
provider = providers.get_provider(BackupProvider.BACKBLAZE)
assert provider is not None
assert provider == Backblaze
def test_file_backend_init(file_backup):
file_backup.backuper.init("somerepo")
def test_backup_simple_file(raw_dummy_service, file_backup):
# temporarily incomplete
service = raw_dummy_service
assert service is not None
assert file_backup is not None
name = service.get_id()
file_backup.backuper.init(name)
def test_backup_service(dummy_service, backups):
assert Backups.get_last_backed_up(dummy_service) is None
Backups.back_up(dummy_service)
now = datetime.now(timezone.utc)
date = Backups.get_last_backed_up(dummy_service)
assert date is not None
assert now > date
assert now - date < timedelta(minutes=1)
def test_no_repo(memory_backup):
with pytest.raises(ValueError):
assert memory_backup.backuper.get_snapshots("") == []
def test_one_snapshot(backups, dummy_service):
Backups.back_up(dummy_service)
snaps = Backups.get_snapshots(dummy_service)
assert len(snaps) == 1
snap = snaps[0]
assert snap.service_name == dummy_service.get_id()
def test_backup_returns_snapshot(backups, dummy_service):
service_folders = dummy_service.get_folders()
provider = Backups.provider()
name = dummy_service.get_id()
snapshot = provider.backuper.start_backup(service_folders, name)
assert snapshot.id is not None
assert snapshot.service_name == name
assert snapshot.created_at is not None
def test_restore(backups, dummy_service):
paths_to_nuke = []
contents = []
for service_folder in dummy_service.get_folders():
file_to_nuke = listdir(service_folder)[0]
assert file_to_nuke is not None
path_to_nuke = path.join(service_folder, file_to_nuke)
paths_to_nuke.append(path_to_nuke)
with open(path_to_nuke, "r") as file:
contents.append(file.read())
Backups.back_up(dummy_service)
snap = Backups.get_snapshots(dummy_service)[0]
assert snap is not None
for p in paths_to_nuke:
assert path.exists(p)
remove(p)
assert not path.exists(p)
Backups.restore_service_from_snapshot(dummy_service, snap.id)
for p, content in zip(paths_to_nuke, contents):
assert path.exists(p)
with open(p, "r") as file:
assert file.read() == content
def test_sizing(backups, dummy_service):
Backups.back_up(dummy_service)
snap = Backups.get_snapshots(dummy_service)[0]
size = Backups.service_snapshot_size(dummy_service, snap.id)
assert size is not None
assert size > 0
def test_init_tracking(backups, raw_dummy_service):
assert Backups.is_initted(raw_dummy_service) is False
Backups.init_repo(raw_dummy_service)
assert Backups.is_initted(raw_dummy_service) is True
def test_backup_service_task(backups, dummy_service):
handle = start_backup(dummy_service)
handle(blocking=True)
snaps = Backups.get_snapshots(dummy_service)
assert len(snaps) == 1
def test_autobackup_enable_service(backups, dummy_service):
assert not Backups.is_autobackup_enabled(dummy_service)
Backups.enable_autobackup(dummy_service)
assert Backups.is_autobackup_enabled(dummy_service)
Backups.disable_autobackup(dummy_service)
assert not Backups.is_autobackup_enabled(dummy_service)
def test_autobackup_enable_service_storage(backups, dummy_service):
assert len(Storage.services_with_autobackup()) == 0
Backups.enable_autobackup(dummy_service)
assert len(Storage.services_with_autobackup()) == 1
assert Storage.services_with_autobackup()[0] == dummy_service.get_id()
Backups.disable_autobackup(dummy_service)
assert len(Storage.services_with_autobackup()) == 0
def test_set_autobackup_period(backups):
assert Backups.autobackup_period_minutes() is None
Backups.set_autobackup_period_minutes(2)
assert Backups.autobackup_period_minutes() == 2
Backups.disable_all_autobackup()
assert Backups.autobackup_period_minutes() is None
Backups.set_autobackup_period_minutes(3)
assert Backups.autobackup_period_minutes() == 3
Backups.set_autobackup_period_minutes(0)
assert Backups.autobackup_period_minutes() is None
Backups.set_autobackup_period_minutes(3)
assert Backups.autobackup_period_minutes() == 3
Backups.set_autobackup_period_minutes(-1)
assert Backups.autobackup_period_minutes() is None
def test_no_default_autobackup(backups, dummy_service):
now = datetime.now(timezone.utc)
assert not Backups.is_time_to_backup_service(dummy_service.get_id(), now)
assert not Backups.is_time_to_backup(now)
def test_autobackup_timer_periods(backups, dummy_service):
now = datetime.now(timezone.utc)
backup_period = 13 # minutes
Backups.enable_autobackup(dummy_service)
assert not Backups.is_time_to_backup_service(dummy_service.get_id(), now)
assert not Backups.is_time_to_backup(now)
Backups.set_autobackup_period_minutes(backup_period)
assert Backups.is_time_to_backup_service(dummy_service.get_id(), now)
assert Backups.is_time_to_backup(now)
Backups.set_autobackup_period_minutes(0)
assert not Backups.is_time_to_backup_service(dummy_service.get_id(), now)
assert not Backups.is_time_to_backup(now)
def test_autobackup_timer_enabling(backups, dummy_service):
now = datetime.now(timezone.utc)
backup_period = 13 # minutes
Backups.set_autobackup_period_minutes(backup_period)
assert not Backups.is_time_to_backup_service(dummy_service.get_id(), now)
assert not Backups.is_time_to_backup(now)
Backups.enable_autobackup(dummy_service)
assert Backups.is_time_to_backup_service(dummy_service.get_id(), now)
assert Backups.is_time_to_backup(now)
Backups.disable_autobackup(dummy_service)
assert not Backups.is_time_to_backup_service(dummy_service.get_id(), now)
assert not Backups.is_time_to_backup(now)
def test_autobackup_timing(backups, dummy_service):
backup_period = 13 # minutes
now = datetime.now(timezone.utc)
Backups.enable_autobackup(dummy_service)
Backups.set_autobackup_period_minutes(backup_period)
assert Backups.is_time_to_backup_service(dummy_service.get_id(), now)
assert Backups.is_time_to_backup(now)
Backups.back_up(dummy_service)
now = datetime.now(timezone.utc)
assert not Backups.is_time_to_backup_service(dummy_service.get_id(), now)
assert not Backups.is_time_to_backup(now)
past = datetime.now(timezone.utc) - timedelta(minutes=1)
assert not Backups.is_time_to_backup_service(dummy_service.get_id(), past)
assert not Backups.is_time_to_backup(past)
future = datetime.now(timezone.utc) + timedelta(minutes=backup_period + 2)
assert Backups.is_time_to_backup_service(dummy_service.get_id(), future)
assert Backups.is_time_to_backup(future)
# Storage
def test_snapshots_caching(backups, dummy_service):
Backups.back_up(dummy_service)
# we test indirectly that we do redis calls instead of shell calls
start = datetime.now()
for i in range(10):
snapshots = Backups.get_snapshots(dummy_service)
assert len(snapshots) == 1
assert datetime.now() - start < timedelta(seconds=0.5)
cached_snapshots = Storage.get_cached_snapshots()
assert len(cached_snapshots) == 1
Storage.delete_cached_snapshot(cached_snapshots[0])
cached_snapshots = Storage.get_cached_snapshots()
assert len(cached_snapshots) == 0
snapshots = Backups.get_snapshots(dummy_service)
assert len(snapshots) == 1
cached_snapshots = Storage.get_cached_snapshots()
assert len(cached_snapshots) == 1
# Storage
def test_init_tracking_caching(backups, raw_dummy_service):
assert Storage.has_init_mark(raw_dummy_service) is False
Storage.mark_as_init(raw_dummy_service)
assert Storage.has_init_mark(raw_dummy_service) is True
assert Backups.is_initted(raw_dummy_service) is True
# Storage
def test_init_tracking_caching2(backups, raw_dummy_service):
assert Storage.has_init_mark(raw_dummy_service) is False
Backups.init_repo(raw_dummy_service)
assert Storage.has_init_mark(raw_dummy_service) is True
# Storage
def test_provider_storage(backups_backblaze):
Backups.reset()
provider = Backups.provider()
assert provider is not None
assert isinstance(provider, Backblaze)
assert provider.login == "ID"
assert provider.key == "KEY"
Storage.store_provider(provider)
restored_provider = Backups.load_provider_redis()
assert isinstance(restored_provider, Backblaze)
assert restored_provider.login == "ID"
assert restored_provider.key == "KEY"
def test_services_to_back_up(backups, dummy_service):
backup_period = 13 # minutes
now = datetime.now(timezone.utc)
Backups.enable_autobackup(dummy_service)
Backups.set_autobackup_period_minutes(backup_period)
services = Backups.services_to_back_up(now)
assert len(services) == 1
assert services[0].get_id() == dummy_service.get_id()

View File

@ -0,0 +1,36 @@
import pytest
from pydantic import BaseModel
from datetime import datetime
from typing import Optional
from selfprivacy_api.utils.redis_model_storage import store_model_as_hash, hash_as_model
from selfprivacy_api.utils.redis_pool import RedisPool
TEST_KEY = "model_storage"
redis = RedisPool().get_connection()
@pytest.fixture()
def clean_redis():
redis.delete(TEST_KEY)
class DummyModel(BaseModel):
name: str
date: Optional[datetime]
def test_store_retrieve():
model = DummyModel(
name= "test",
date= datetime.now()
)
store_model_as_hash(redis, TEST_KEY, model)
assert hash_as_model(redis, TEST_KEY, DummyModel) == model
def test_store_retrieve_none():
model = DummyModel(
name= "test",
date= None
)
store_model_as_hash(redis, TEST_KEY, model)
assert hash_as_model(redis, TEST_KEY, DummyModel) == model