Compare commits
6 Commits
64e7afe53e
...
b6eb27dc5e
Author | SHA1 | Date |
---|---|---|
![]() |
b6eb27dc5e | |
![]() |
c5088e0e2c | |
![]() |
953860a02c | |
![]() |
6a00d3cff9 | |
![]() |
08cc7740b3 | |
![]() |
9793201ca1 |
|
@ -5,7 +5,11 @@ import datetime
|
|||
from typing import List
|
||||
from collections.abc import Iterable
|
||||
from json.decoder import JSONDecodeError
|
||||
from os.path import exists
|
||||
from os import listdir
|
||||
from time import sleep
|
||||
|
||||
from selfprivacy_api.backup.util import output_yielder
|
||||
from selfprivacy_api.backup.backuppers import AbstractBackupper
|
||||
from selfprivacy_api.models.backup.snapshot import Snapshot
|
||||
from selfprivacy_api.backup.jobs import get_backup_job
|
||||
|
@ -50,7 +54,7 @@ class ResticBackupper(AbstractBackupper):
|
|||
def _password_command(self):
|
||||
return f"echo {LocalBackupSecret.get()}"
|
||||
|
||||
def restic_command(self, *args, tag: str = ""):
|
||||
def restic_command(self, *args, tag: str = "") -> List[str]:
|
||||
command = [
|
||||
"restic",
|
||||
"-o",
|
||||
|
@ -71,6 +75,28 @@ class ResticBackupper(AbstractBackupper):
|
|||
command.extend(ResticBackupper.__flatten_list(args))
|
||||
return command
|
||||
|
||||
def mount_repo(self, dir):
|
||||
mount_command = self.restic_command("mount", dir)
|
||||
mount_command.insert(0, "nohup")
|
||||
handle = subprocess.Popen(mount_command, stdout=subprocess.DEVNULL, shell=False)
|
||||
sleep(2)
|
||||
if not "ids" in listdir(dir):
|
||||
raise IOError("failed to mount dir ", dir)
|
||||
return handle
|
||||
|
||||
def unmount_repo(self, dir):
|
||||
mount_command = ["umount", "-l", dir]
|
||||
with subprocess.Popen(
|
||||
mount_command, stdout=subprocess.PIPE, shell=False
|
||||
) as handle:
|
||||
output = handle.communicate()[0].decode("utf-8")
|
||||
# TODO: check for exit code?
|
||||
if "error" in output.lower():
|
||||
return IOError("failed to unmount dir ", dir, ": ", output)
|
||||
|
||||
if not listdir(dir) == []:
|
||||
return IOError("failed to unmount dir ", dir)
|
||||
|
||||
@staticmethod
|
||||
def __flatten_list(list):
|
||||
"""string-aware list flattener"""
|
||||
|
@ -82,19 +108,6 @@ class ResticBackupper(AbstractBackupper):
|
|||
result.append(item)
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def output_yielder(command):
|
||||
with subprocess.Popen(
|
||||
command,
|
||||
shell=False,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
universal_newlines=True,
|
||||
) as handle:
|
||||
for line in iter(handle.stdout.readline, ""):
|
||||
if "NOTICE:" not in line:
|
||||
yield line
|
||||
|
||||
def start_backup(self, folders: List[str], tag: str):
|
||||
"""
|
||||
Start backup with restic
|
||||
|
@ -113,7 +126,7 @@ class ResticBackupper(AbstractBackupper):
|
|||
messages = []
|
||||
job = get_backup_job(get_service_by_id(tag))
|
||||
try:
|
||||
for raw_message in ResticBackupper.output_yielder(backup_command):
|
||||
for raw_message in output_yielder(backup_command):
|
||||
message = self.parse_message(raw_message, job)
|
||||
messages.append(message)
|
||||
return ResticBackupper._snapshot_from_backup_messages(messages, tag)
|
||||
|
|
|
@ -0,0 +1,27 @@
|
|||
import subprocess
|
||||
from os.path import exists
|
||||
|
||||
|
||||
def output_yielder(command):
|
||||
with subprocess.Popen(
|
||||
command,
|
||||
shell=False,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
universal_newlines=True,
|
||||
) as handle:
|
||||
for line in iter(handle.stdout.readline, ""):
|
||||
if "NOTICE:" not in line:
|
||||
yield line
|
||||
|
||||
|
||||
def sync(src_path: str, dest_path: str):
|
||||
"""a wrapper around rclone sync"""
|
||||
|
||||
if not exists(src_path):
|
||||
raise ValueError("source dir for rclone sync must exist")
|
||||
|
||||
rclone_command = ["rclone", "sync", "-P", src_path, dest_path]
|
||||
for raw_message in output_yielder(rclone_command):
|
||||
if "ERROR" in raw_message:
|
||||
raise ValueError(raw_message)
|
|
@ -5,8 +5,11 @@ from os import remove
|
|||
from os import listdir
|
||||
from os import urandom
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from subprocess import Popen
|
||||
|
||||
import selfprivacy_api.services as services
|
||||
from selfprivacy_api.services import Service
|
||||
|
||||
from selfprivacy_api.services import get_service_by_id
|
||||
from selfprivacy_api.services.test_service import DummyService
|
||||
from selfprivacy_api.graphql.queries.providers import BackupProvider
|
||||
|
@ -16,6 +19,9 @@ from selfprivacy_api.backup import Backups
|
|||
import selfprivacy_api.backup.providers as providers
|
||||
from selfprivacy_api.backup.providers import AbstractBackupProvider
|
||||
from selfprivacy_api.backup.providers.backblaze import Backblaze
|
||||
from selfprivacy_api.backup.util import sync
|
||||
from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackupper
|
||||
|
||||
|
||||
from selfprivacy_api.backup.tasks import start_backup, restore_snapshot
|
||||
from selfprivacy_api.backup.storage import Storage
|
||||
|
@ -68,7 +74,7 @@ def raw_dummy_service(tmpdir, backups):
|
|||
|
||||
|
||||
@pytest.fixture()
|
||||
def dummy_service(tmpdir, backups, raw_dummy_service):
|
||||
def dummy_service(tmpdir, backups, raw_dummy_service) -> Service:
|
||||
service = raw_dummy_service
|
||||
repo_path = path.join(tmpdir, "test_repo")
|
||||
assert not path.exists(repo_path)
|
||||
|
@ -196,13 +202,18 @@ def test_backup_returns_snapshot(backups, dummy_service):
|
|||
assert snapshot.created_at is not None
|
||||
|
||||
|
||||
def folder_files(folder):
|
||||
return [
|
||||
path.join(folder, filename)
|
||||
for filename in listdir(folder)
|
||||
if filename is not None
|
||||
]
|
||||
|
||||
|
||||
def service_files(service):
|
||||
result = []
|
||||
for service_folder in service.get_folders():
|
||||
service_filename = listdir(service_folder)[0]
|
||||
assert service_filename is not None
|
||||
service_file = path.join(service_folder, service_filename)
|
||||
result.append(service_file)
|
||||
result.extend(folder_files(service_folder))
|
||||
return result
|
||||
|
||||
|
||||
|
@ -316,6 +327,8 @@ def test_backup_larger_file(backups, dummy_service):
|
|||
updates = job_progress_updates(job_type_id)
|
||||
assert len(updates) > 3
|
||||
assert updates[int((len(updates) - 1) / 2.0)] > 10
|
||||
# clean up a bit
|
||||
remove(dir)
|
||||
|
||||
|
||||
def test_restore_snapshot_task(backups, dummy_service):
|
||||
|
@ -519,3 +532,44 @@ def test_services_to_back_up(backups, dummy_service):
|
|||
services = Backups.services_to_back_up(now)
|
||||
assert len(services) == 1
|
||||
assert services[0].get_id() == dummy_service.get_id()
|
||||
|
||||
|
||||
def test_sync(dummy_service):
|
||||
src = dummy_service.get_folders()[0]
|
||||
dst = dummy_service.get_folders()[1]
|
||||
old_files_src = listdir(src)
|
||||
old_files_dst = listdir(dst)
|
||||
assert old_files_src != old_files_dst
|
||||
|
||||
sync(src, dst)
|
||||
new_files_src = listdir(src)
|
||||
new_files_dst = listdir(dst)
|
||||
assert new_files_src == old_files_src
|
||||
assert new_files_dst == new_files_src
|
||||
|
||||
|
||||
def test_sync_nonexistent_src(dummy_service):
|
||||
src = "/var/lib/nonexistentFluffyBunniesOfUnix"
|
||||
dst = dummy_service.get_folders()[1]
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
sync(src, dst)
|
||||
|
||||
|
||||
# Restic lowlevel
|
||||
def test_mount_umount(backups, dummy_service, tmpdir):
|
||||
Backups.back_up(dummy_service)
|
||||
backupper = Backups.provider().backupper
|
||||
assert isinstance(backupper, ResticBackupper)
|
||||
|
||||
mountpoint = tmpdir / "mount"
|
||||
makedirs(mountpoint)
|
||||
assert path.exists(mountpoint)
|
||||
assert len(listdir(mountpoint)) == 0
|
||||
|
||||
handle = backupper.mount_repo(mountpoint)
|
||||
assert len(listdir(mountpoint)) != 0
|
||||
|
||||
backupper.unmount_repo(mountpoint)
|
||||
# handle.terminate()
|
||||
assert len(listdir(mountpoint)) == 0
|
||||
|
|
Loading…
Reference in New Issue