From 52a58d94e7706c2a356e80c9f004ff71a4d57279 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Tue, 2 Aug 2022 22:50:16 +0300 Subject: [PATCH 01/50] Test subscription --- selfprivacy_api/app.py | 3 +- selfprivacy_api/graphql/schema.py | 12 + .../graphql/subscriptions/__init__.py | 0 selfprivacy_api/graphql/subscriptions/jobs.py | 26 +++ selfprivacy_api/jobs/__init__.py | 83 ++++--- selfprivacy_api/jobs/test.py | 56 +++++ selfprivacy_api/restic_controller/tasks.py | 4 +- .../services/nextcloud/__init__.py | 215 +++++++++++++++++- selfprivacy_api/services/service.py | 12 +- selfprivacy_api/utils/block_devices.py | 12 +- selfprivacy_api/utils/huey.py | 4 + selfprivacy_api/utils/migrate_to_binds.py | 103 +++++++++ 12 files changed, 485 insertions(+), 45 deletions(-) create mode 100644 selfprivacy_api/graphql/subscriptions/__init__.py create mode 100644 selfprivacy_api/graphql/subscriptions/jobs.py create mode 100644 selfprivacy_api/jobs/test.py create mode 100644 selfprivacy_api/utils/huey.py create mode 100644 selfprivacy_api/utils/migrate_to_binds.py diff --git a/selfprivacy_api/app.py b/selfprivacy_api/app.py index 15142f0..b22d034 100644 --- a/selfprivacy_api/app.py +++ b/selfprivacy_api/app.py @@ -18,7 +18,8 @@ from selfprivacy_api.resources.system import api_system from selfprivacy_api.resources.services import services as api_services from selfprivacy_api.resources.api_auth import auth as api_auth -from selfprivacy_api.restic_controller.tasks import huey, init_restic +from selfprivacy_api.utils.huey import huey +from selfprivacy_api.restic_controller.tasks import init_restic from selfprivacy_api.migrations import run_migrations diff --git a/selfprivacy_api/graphql/schema.py b/selfprivacy_api/graphql/schema.py index c2d6a10..4a7aad5 100644 --- a/selfprivacy_api/graphql/schema.py +++ b/selfprivacy_api/graphql/schema.py @@ -4,6 +4,7 @@ import strawberry from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.graphql.mutations.api_mutations import ApiMutations +from selfprivacy_api.graphql.mutations.mutation_interface import GenericMutationReturn from selfprivacy_api.graphql.mutations.ssh_mutations import SshMutations from selfprivacy_api.graphql.mutations.storage_mutation import StorageMutations from selfprivacy_api.graphql.mutations.system_mutations import SystemMutations @@ -14,6 +15,7 @@ from selfprivacy_api.graphql.queries.system import System from selfprivacy_api.graphql.mutations.users_mutations import UserMutations from selfprivacy_api.graphql.queries.users import Users +from selfprivacy_api.jobs.test import test_job @strawberry.type @@ -51,6 +53,16 @@ class Mutation( ): """Root schema for mutations""" + @strawberry.mutation + def test_mutation(self) -> GenericMutationReturn: + """Test mutation""" + test_job() + return GenericMutationReturn( + success=True, + message="Test mutation", + code=200, + ) + pass diff --git a/selfprivacy_api/graphql/subscriptions/__init__.py b/selfprivacy_api/graphql/subscriptions/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/selfprivacy_api/graphql/subscriptions/jobs.py b/selfprivacy_api/graphql/subscriptions/jobs.py new file mode 100644 index 0000000..55d4c10 --- /dev/null +++ b/selfprivacy_api/graphql/subscriptions/jobs.py @@ -0,0 +1,26 @@ +import asyncio +from typing import AsyncGenerator +import typing + +import strawberry +from selfprivacy_api.graphql import IsAuthenticated + +from selfprivacy_api.jobs import Job, Jobs + +@strawberry.type +class JobSubscription: + @strawberry.subscription(permission_classes=[IsAuthenticated]) + async def job_subscription(self) -> AsyncGenerator[typing.List[Job], None]: + is_updated = True + def callback(jobs: typing.List[Job]): + nonlocal is_updated + is_updated = True + Jobs().add_observer(callback) + try: + while True: + if is_updated: + is_updated = False + yield Jobs().jobs + except GeneratorExit: + Jobs().remove_observer(callback) + return diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py index a467583..d1ab948 100644 --- a/selfprivacy_api/jobs/__init__.py +++ b/selfprivacy_api/jobs/__init__.py @@ -16,6 +16,7 @@ A job is a dictionary with the following keys: """ import typing import datetime +import asyncio import json import os import time @@ -44,6 +45,8 @@ class Job: name: str, description: str, status: JobStatus, + status_text: typing.Optional[str], + progress: typing.Optional[int], created_at: datetime.datetime, updated_at: datetime.datetime, finished_at: typing.Optional[datetime.datetime], @@ -54,45 +57,25 @@ class Job: self.name = name self.description = description self.status = status + self.status_text = status_text or "" + self.progress = progress or 0 self.created_at = created_at self.updated_at = updated_at self.finished_at = finished_at self.error = error self.result = result - def to_dict(self) -> dict: - """ - Convert the job to a dictionary. - """ - return { - "id": self.id, - "name": self.name, - "description": self.description, - "status": self.status, - "created_at": self.created_at, - "updated_at": self.updated_at, - "finished_at": self.finished_at, - "error": self.error, - "result": self.result, - } - - def to_json(self) -> str: - """ - Convert the job to a JSON string. - """ - return json.dumps(self.to_dict()) - def __str__(self) -> str: """ Convert the job to a string. """ - return self.to_json() + return f"{self.name} - {self.status}" def __repr__(self) -> str: """ Convert the job to a string. """ - return self.to_json() + return f"{self.name} - {self.status}" class Jobs: @@ -120,9 +103,30 @@ class Jobs: else: Jobs.__instance = self self.jobs = [] + # Observers of the jobs list. + self.observers = [] + + def add_observer(self, observer: typing.Callable[[typing.List[Job]], None]) -> None: + """ + Add an observer to the jobs list. + """ + self.observers.append(observer) + + def remove_observer(self, observer: typing.Callable[[typing.List[Job]], None]) -> None: + """ + Remove an observer from the jobs list. + """ + self.observers.remove(observer) + + def _notify_observers(self) -> None: + """ + Notify the observers of the jobs list. + """ + for observer in self.observers: + observer(self.jobs) def add( - self, name: str, description: str, status: JobStatus = JobStatus.CREATED + self, name: str, description: str, status: JobStatus = JobStatus.CREATED, status_text: str = "", progress: int = 0 ) -> Job: """ Add a job to the jobs list. @@ -131,6 +135,8 @@ class Jobs: name=name, description=description, status=status, + status_text=status_text, + progress=progress, created_at=datetime.datetime.now(), updated_at=datetime.datetime.now(), finished_at=None, @@ -138,6 +144,9 @@ class Jobs: result=None, ) self.jobs.append(job) + # Notify the observers. + self._notify_observers() + return job def remove(self, job: Job) -> None: @@ -145,15 +154,19 @@ class Jobs: Remove a job from the jobs list. """ self.jobs.remove(job) + # Notify the observers. + self._notify_observers() def update( self, job: Job, - name: typing.Optional[str], - description: typing.Optional[str], status: JobStatus, - error: typing.Optional[str], - result: typing.Optional[str], + status_text: typing.Optional[str] = None, + progress: typing.Optional[int] = None, + name: typing.Optional[str] = None, + description: typing.Optional[str] = None, + error: typing.Optional[str] = None, + result: typing.Optional[str] = None, ) -> Job: """ Update a job in the jobs list. @@ -162,10 +175,20 @@ class Jobs: job.name = name if description is not None: job.description = description + if status_text is not None: + job.status_text = status_text + if progress is not None: + job.progress = progress job.status = status job.updated_at = datetime.datetime.now() job.error = error job.result = result + if status == JobStatus.FINISHED or status == JobStatus.ERROR: + job.finished_at = datetime.datetime.now() + + # Notify the observers. + self._notify_observers() + return job def get_job(self, id: str) -> typing.Optional[Job]: @@ -177,7 +200,7 @@ class Jobs: return job return None - def get_jobs(self) -> list: + def get_jobs(self) -> typing.List[Job]: """ Get the jobs list. """ diff --git a/selfprivacy_api/jobs/test.py b/selfprivacy_api/jobs/test.py new file mode 100644 index 0000000..13856a1 --- /dev/null +++ b/selfprivacy_api/jobs/test.py @@ -0,0 +1,56 @@ +import time +from selfprivacy_api.utils.huey import huey +from selfprivacy_api.jobs import JobStatus, Jobs + + +@huey.task() +def test_job(): + job = Jobs().add( + name="Test job", + description="This is a test job.", + status=JobStatus.CREATED, + status_text="", + progress=0, + ) + time.sleep(5) + Jobs().update( + job=job, + status=JobStatus.RUNNING, + status_text="Performing pre-move checks...", + progress=5, + ) + time.sleep(5) + Jobs().update( + job=job, + status=JobStatus.RUNNING, + status_text="Performing pre-move checks...", + progress=10, + ) + time.sleep(5) + Jobs().update( + job=job, + status=JobStatus.RUNNING, + status_text="Performing pre-move checks...", + progress=15, + ) + time.sleep(5) + Jobs().update( + job=job, + status=JobStatus.RUNNING, + status_text="Performing pre-move checks...", + progress=20, + ) + time.sleep(5) + Jobs().update( + job=job, + status=JobStatus.RUNNING, + status_text="Performing pre-move checks...", + progress=25, + ) + time.sleep(5) + Jobs().update( + job=job, + status=JobStatus.FINISHED, + status_text="Job finished.", + progress=100, + ) diff --git a/selfprivacy_api/restic_controller/tasks.py b/selfprivacy_api/restic_controller/tasks.py index 4c610c4..f583d8b 100644 --- a/selfprivacy_api/restic_controller/tasks.py +++ b/selfprivacy_api/restic_controller/tasks.py @@ -1,10 +1,8 @@ """Tasks for the restic controller.""" from huey import crontab -from huey.contrib.mini import MiniHuey +from selfprivacy_api.utils.huey import huey from . import ResticController, ResticStates -huey = MiniHuey() - @huey.task() def init_restic(): diff --git a/selfprivacy_api/services/nextcloud/__init__.py b/selfprivacy_api/services/nextcloud/__init__.py index 525f657..a0604b2 100644 --- a/selfprivacy_api/services/nextcloud/__init__.py +++ b/selfprivacy_api/services/nextcloud/__init__.py @@ -1,10 +1,16 @@ """Class representing Nextcloud service.""" import base64 import subprocess +import time +import typing import psutil -from selfprivacy_api.services.service import Service, ServiceStatus +import pathlib +import shutil +from selfprivacy_api.jobs import Job, JobStatus, Jobs +from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus from selfprivacy_api.utils import ReadUserData, WriteUserData - +from selfprivacy_api.utils.block_devices import BlockDevice +from selfprivacy_api.utils.huey import huey class Nextcloud(Service): """Class representing Nextcloud service.""" @@ -92,5 +98,206 @@ class Nextcloud(Service): """Return Nextcloud logs.""" return "" - def get_storage_usage(self): - return psutil.disk_usage("/var/lib/nextcloud").used + def get_storage_usage(self) -> int: + """ + Calculate the real storage usage of /var/lib/nextcloud and all subdirectories. + Calculate using pathlib. + Do not follow symlinks. + """ + storage_usage = 0 + for path in pathlib.Path("/var/lib/nextcloud").rglob("**/*"): + if path.is_dir(): + continue + storage_usage += path.stat().st_size + return storage_usage + + def get_location(self) -> str: + """Get the name of disk where Nextcloud is installed.""" + with ReadUserData() as user_data: + if user_data.get("useBinds", False): + return user_data.get("nextcloud", {}).get("location", "sda1") + else: + return "sda1" + + def get_dns_records(self) -> typing.List[ServiceDnsRecord]: + return super().get_dns_records() + + def move_to_volume(self, volume: BlockDevice): + job = Jobs().add( + name="services.nextcloud.move", + description=f"Moving Nextcloud to volume {volume.name}", + ) + move_nextcloud(self, volume, job) + return job + + +@huey.task() +def move_nextcloud(nextcloud: Nextcloud, volume: BlockDevice, job: Job): + """Move Nextcloud to another volume.""" + job = Jobs().update( + job=job, + status_text="Performing pre-move checks...", + status=JobStatus.RUNNING, + ) + with ReadUserData() as user_data: + if not user_data.get("useBinds", False): + Jobs().update( + job=job, + status=JobStatus.ERROR, + error="Server is not using binds.", + ) + return + # Check if we are on the same volume + old_location = nextcloud.get_location() + if old_location == volume.name: + Jobs().update( + job=job, + status=JobStatus.ERROR, + error="Nextcloud is already on this volume.", + ) + return + # Check if there is enough space on the new volume + if volume.fsavail < nextcloud.get_storage_usage(): + Jobs().update( + job=job, + status=JobStatus.ERROR, + error="Not enough space on the new volume.", + ) + return + # Make sure the volume is mounted + if f"/volumes/{volume.name}" not in volume.mountpoints: + Jobs().update( + job=job, + status=JobStatus.ERROR, + error="Volume is not mounted.", + ) + return + # Make sure current actual directory exists + if not pathlib.Path(f"/volumes/{old_location}/nextcloud").exists(): + Jobs().update( + job=job, + status=JobStatus.ERROR, + error="Nextcloud is not found.", + ) + return + + # Stop Nextcloud + Jobs().update( + job=job, + status=JobStatus.RUNNING, + status_text="Stopping Nextcloud...", + progress=5, + ) + nextcloud.stop() + # Wait for Nextcloud to stop, check every second + # If it does not stop in 30 seconds, abort + for _ in range(30): + if nextcloud.get_status() != ServiceStatus.RUNNING: + break + time.sleep(1) + else: + Jobs().update( + job=job, + status=JobStatus.ERROR, + error="Nextcloud did not stop in 30 seconds.", + ) + return + + # Unmount old volume + Jobs().update( + job=job, + status_text="Unmounting old folder...", + status=JobStatus.RUNNING, + progress=10, + ) + try: + subprocess.run(["umount", "/var/lib/nextcloud"], check=True) + except subprocess.CalledProcessError: + Jobs().update( + job=job, + status=JobStatus.ERROR, + error="Unable to unmount old volume.", + ) + return + # Move data to new volume and set correct permissions + Jobs().update( + job=job, + status_text="Moving data to new volume...", + status=JobStatus.RUNNING, + progress=20, + ) + shutil.move( + f"/volumes/{old_location}/nextcloud", f"/volumes/{volume.name}/nextcloud" + ) + + Jobs().update( + job=job, + status_text="Making sure Nextcloud owns its files...", + status=JobStatus.RUNNING, + progress=70, + ) + try: + subprocess.run( + [ + "chown", + "-R", + "nextcloud:nextcloud", + f"/volumes/{volume.name}/nextcloud", + ], + check=True, + ) + except subprocess.CalledProcessError as error: + print(error.output) + Jobs().update( + job=job, + status=JobStatus.RUNNING, + error="Unable to set ownership of new volume. Nextcloud may not be able to access its files. Continuing anyway.", + ) + return + + # Mount new volume + Jobs().update( + job=job, + status_text="Mounting Nextcloud data...", + status=JobStatus.RUNNING, + progress=90, + ) + try: + subprocess.run( + [ + "mount", + "--bind", + f"/volumes/{volume.name}/nextcloud", + "/var/lib/nextcloud", + ], + check=True, + ) + except subprocess.CalledProcessError as error: + print(error.output) + Jobs().update( + job=job, + status=JobStatus.ERROR, + error="Unable to mount new volume.", + ) + return + + # Update userdata + Jobs().update( + job=job, + status_text="Finishing move...", + status=JobStatus.RUNNING, + progress=95, + ) + with WriteUserData() as user_data: + if "nextcloud" not in user_data: + user_data["nextcloud"] = {} + user_data["nextcloud"]["location"] = volume.name + # Start Nextcloud + nextcloud.start() + Jobs().update( + job=job, + status=JobStatus.FINISHED, + result="Nextcloud moved successfully.", + status_text="Starting Nextcloud...", + progress=100, + ) diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index a0e6ae6..7c0b09e 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -3,6 +3,8 @@ from abc import ABC, abstractmethod from enum import Enum import typing +from selfprivacy_api.utils.block_devices import BlockDevice + class ServiceStatus(Enum): """Enum for service status""" @@ -85,9 +87,17 @@ class Service(ABC): pass @abstractmethod - def get_storage_usage(self): + def get_storage_usage(self) -> int: pass @abstractmethod def get_dns_records(self) -> typing.List[ServiceDnsRecord]: pass + + @abstractmethod + def get_location(self) -> str: + pass + + @abstractmethod + def move_to_volume(self, volume: BlockDevice): + pass diff --git a/selfprivacy_api/utils/block_devices.py b/selfprivacy_api/utils/block_devices.py index e6adddc..b33c7aa 100644 --- a/selfprivacy_api/utils/block_devices.py +++ b/selfprivacy_api/utils/block_devices.py @@ -16,7 +16,7 @@ def get_block_device(device_name): "-J", "-b", "-o", - "NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINT,LABEL,UUID,SIZE, MODEL,SERIAL,TYPE", + "NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINTS,LABEL,UUID,SIZE, MODEL,SERIAL,TYPE", device_name, ] ) @@ -47,7 +47,7 @@ class BlockDevice: self.fssize = block_device["fssize"] self.fstype = block_device["fstype"] self.fsused = block_device["fsused"] - self.mountpoint = block_device["mountpoint"] + self.mountpoints = block_device["mountpoints"] self.label = block_device["label"] self.uuid = block_device["uuid"] self.size = block_device["size"] @@ -60,7 +60,7 @@ class BlockDevice: return self.name def __repr__(self): - return f"" + return f"" def __eq__(self, other): return self.name == other.name @@ -77,7 +77,7 @@ class BlockDevice: self.fssize = device["fssize"] self.fstype = device["fstype"] self.fsused = device["fsused"] - self.mountpoint = device["mountpoint"] + self.mountpoints = device["mountpoints"] self.label = device["label"] self.uuid = device["uuid"] self.size = device["size"] @@ -92,7 +92,7 @@ class BlockDevice: "fssize": self.fssize, "fstype": self.fstype, "fsused": self.fsused, - "mountpoint": self.mountpoint, + "mountpoints": self.mountpoints, "label": self.label, "uuid": self.uuid, "size": self.size, @@ -219,6 +219,6 @@ class BlockDevices: """ block_devices = [] for block_device in self.block_devices: - if block_device.mountpoint == mountpoint: + if mountpoint in block_device.mountpoints: block_devices.append(block_device) return block_devices diff --git a/selfprivacy_api/utils/huey.py b/selfprivacy_api/utils/huey.py new file mode 100644 index 0000000..9803e7b --- /dev/null +++ b/selfprivacy_api/utils/huey.py @@ -0,0 +1,4 @@ +"""MiniHuey singleton.""" +from huey.contrib.mini import MiniHuey + +huey = MiniHuey() diff --git a/selfprivacy_api/utils/migrate_to_binds.py b/selfprivacy_api/utils/migrate_to_binds.py new file mode 100644 index 0000000..faac03b --- /dev/null +++ b/selfprivacy_api/utils/migrate_to_binds.py @@ -0,0 +1,103 @@ +"""Function to perform migration of app data to binds.""" +import subprocess +import psutil +import pathlib +import shutil +from selfprivacy_api.services.nextcloud import Nextcloud +from selfprivacy_api.utils import WriteUserData +from selfprivacy_api.utils.block_devices import BlockDevices + +class BindMigrationConfig: + """Config for bind migration. + For each service provide block device name. + """ + email_block_device: str + bitwarden_block_device: str + gitea_block_device: str + nextcloud_block_device: str + pleroma_block_device: str + + +def migrate_to_binds(config: BindMigrationConfig): + """Migrate app data to binds.""" + + # Get block devices. + block_devices = BlockDevices().get_block_devices() + block_device_names = [ device.name for device in block_devices ] + + # Get all unique required block devices + required_block_devices = [] + for block_device_name in config.__dict__.values(): + if block_device_name not in required_block_devices: + required_block_devices.append(block_device_name) + + # Check if all block devices from config are present. + for block_device_name in required_block_devices: + if block_device_name not in block_device_names: + raise Exception(f"Block device {block_device_name} is not present.") + + # Make sure all required block devices are mounted. + # sda1 is the root partition and is always mounted. + for block_device_name in required_block_devices: + if block_device_name == "sda1": + continue + block_device = BlockDevices().get_block_device(block_device_name) + if block_device is None: + raise Exception(f"Block device {block_device_name} is not present.") + if f"/volumes/{block_device_name}" not in block_device.mountpoints: + raise Exception(f"Block device {block_device_name} is not mounted.") + + # Activate binds in userdata + with WriteUserData() as user_data: + if "email" not in user_data: + user_data["email"] = {} + user_data["email"]["block_device"] = config.email_block_device + if "bitwarden" not in user_data: + user_data["bitwarden"] = {} + user_data["bitwarden"]["block_device"] = config.bitwarden_block_device + if "gitea" not in user_data: + user_data["gitea"] = {} + user_data["gitea"]["block_device"] = config.gitea_block_device + if "nextcloud" not in user_data: + user_data["nextcloud"] = {} + user_data["nextcloud"]["block_device"] = config.nextcloud_block_device + if "pleroma" not in user_data: + user_data["pleroma"] = {} + user_data["pleroma"]["block_device"] = config.pleroma_block_device + + user_data["useBinds"] = True + + # Make sure /volumes/sda1 exists. + pathlib.Path("/volumes/sda1").mkdir(parents=True, exist_ok=True) + + # Perform migration of Nextcloud. + # Data is moved from /var/lib/nextcloud to /volumes//nextcloud. + # /var/lib/nextcloud is removed and /volumes//nextcloud is mounted as bind mount. + + # Turn off Nextcloud + Nextcloud().stop() + + # Move data from /var/lib/nextcloud to /volumes//nextcloud. + # /var/lib/nextcloud is removed and /volumes//nextcloud is mounted as bind mount. + nextcloud_data_path = pathlib.Path("/var/lib/nextcloud") + nextcloud_bind_path = pathlib.Path(f"/volumes/{config.nextcloud_block_device}/nextcloud") + if nextcloud_data_path.exists(): + shutil.move(str(nextcloud_data_path), str(nextcloud_bind_path)) + else: + raise Exception("Nextcloud data path does not exist.") + + # Make sure folder /var/lib/nextcloud exists. + nextcloud_data_path.mkdir(mode=0o750, parents=True, exist_ok=True) + + # Make sure this folder is owned by user nextcloud and group nextcloud. + shutil.chown(nextcloud_bind_path, user="nextcloud", group="nextcloud") + shutil.chown(nextcloud_data_path, user="nextcloud", group="nextcloud") + + # Mount nextcloud bind mount. + subprocess.run(["mount","--bind", str(nextcloud_bind_path), str(nextcloud_data_path)], check=True) + + # Recursively chown all files in nextcloud bind mount. + subprocess.run(["chown", "-R", "nextcloud:nextcloud", str(nextcloud_data_path)], check=True) + + # Start Nextcloud + Nextcloud().start() -- 2.42.0 From d8d3cd20684be4238081e73fc124278508c3e27b Mon Sep 17 00:00:00 2001 From: Inex Code Date: Tue, 2 Aug 2022 22:53:35 +0300 Subject: [PATCH 02/50] Register subscription --- selfprivacy_api/graphql/schema.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/graphql/schema.py b/selfprivacy_api/graphql/schema.py index 4a7aad5..03d8021 100644 --- a/selfprivacy_api/graphql/schema.py +++ b/selfprivacy_api/graphql/schema.py @@ -15,6 +15,7 @@ from selfprivacy_api.graphql.queries.system import System from selfprivacy_api.graphql.mutations.users_mutations import UserMutations from selfprivacy_api.graphql.queries.users import Users +from selfprivacy_api.graphql.subscriptions.jobs import JobSubscription from selfprivacy_api.jobs.test import test_job @@ -66,4 +67,4 @@ class Mutation( pass -schema = strawberry.Schema(query=Query, mutation=Mutation) +schema = strawberry.Schema(query=Query, mutation=Mutation, subscription=JobSubscription) -- 2.42.0 From 8ea0d89d71de8dbe0c42a755f3e7d3892b8fb240 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Tue, 2 Aug 2022 22:58:39 +0300 Subject: [PATCH 03/50] Fix --- selfprivacy_api/graphql/subscriptions/jobs.py | 29 +++++++++++++++++-- 1 file changed, 27 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/graphql/subscriptions/jobs.py b/selfprivacy_api/graphql/subscriptions/jobs.py index 55d4c10..26dd2aa 100644 --- a/selfprivacy_api/graphql/subscriptions/jobs.py +++ b/selfprivacy_api/graphql/subscriptions/jobs.py @@ -1,4 +1,5 @@ import asyncio +import datetime from typing import AsyncGenerator import typing @@ -7,10 +8,23 @@ from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.jobs import Job, Jobs +@strawberry.type +class ApiJob: + name: str + description: str + status: str + status_text: typing.Optional[str] + progress: typing.Optional[int] + created_at: datetime.datetime + updated_at: datetime.datetime + finished_at: typing.Optional[datetime.datetime] + error: typing.Optional[str] + result: typing.Optional[str] + @strawberry.type class JobSubscription: @strawberry.subscription(permission_classes=[IsAuthenticated]) - async def job_subscription(self) -> AsyncGenerator[typing.List[Job], None]: + async def job_subscription(self) -> AsyncGenerator[typing.List[ApiJob], None]: is_updated = True def callback(jobs: typing.List[Job]): nonlocal is_updated @@ -20,7 +34,18 @@ class JobSubscription: while True: if is_updated: is_updated = False - yield Jobs().jobs + yield [ ApiJob( + name=job.name, + description=job.description, + status=job.status.name, + status_text=job.status_text, + progress=job.progress, + created_at=job.created_at, + updated_at=job.updated_at, + finished_at=job.finished_at, + error=job.error, + result=job.result, + ) for job in Jobs().get_jobs() ] except GeneratorExit: Jobs().remove_observer(callback) return -- 2.42.0 From 8f940e64fd40e5c5acfd3b3f5896cfe0d4bc97df Mon Sep 17 00:00:00 2001 From: Inex Code Date: Tue, 2 Aug 2022 23:08:32 +0300 Subject: [PATCH 04/50] uh --- selfprivacy_api/graphql/subscriptions/jobs.py | 36 +++++++++---------- 1 file changed, 16 insertions(+), 20 deletions(-) diff --git a/selfprivacy_api/graphql/subscriptions/jobs.py b/selfprivacy_api/graphql/subscriptions/jobs.py index 26dd2aa..82f5f29 100644 --- a/selfprivacy_api/graphql/subscriptions/jobs.py +++ b/selfprivacy_api/graphql/subscriptions/jobs.py @@ -23,29 +23,25 @@ class ApiJob: @strawberry.type class JobSubscription: - @strawberry.subscription(permission_classes=[IsAuthenticated]) + @strawberry.subscription() async def job_subscription(self) -> AsyncGenerator[typing.List[ApiJob], None]: is_updated = True def callback(jobs: typing.List[Job]): nonlocal is_updated is_updated = True Jobs().add_observer(callback) - try: - while True: - if is_updated: - is_updated = False - yield [ ApiJob( - name=job.name, - description=job.description, - status=job.status.name, - status_text=job.status_text, - progress=job.progress, - created_at=job.created_at, - updated_at=job.updated_at, - finished_at=job.finished_at, - error=job.error, - result=job.result, - ) for job in Jobs().get_jobs() ] - except GeneratorExit: - Jobs().remove_observer(callback) - return + while True: + if is_updated: + is_updated = False + yield [ ApiJob( + name=job.name, + description=job.description, + status=job.status.name, + status_text=job.status_text, + progress=job.progress, + created_at=job.created_at, + updated_at=job.updated_at, + finished_at=job.finished_at, + error=job.error, + result=job.result, + ) for job in Jobs().get_jobs() ] -- 2.42.0 From b965ffd96ae30dfd866d3fe07bccc8a476a76232 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Tue, 2 Aug 2022 23:12:48 +0300 Subject: [PATCH 05/50] Trying out --- selfprivacy_api/graphql/subscriptions/jobs.py | 47 ++++++++++++++----- 1 file changed, 35 insertions(+), 12 deletions(-) diff --git a/selfprivacy_api/graphql/subscriptions/jobs.py b/selfprivacy_api/graphql/subscriptions/jobs.py index 82f5f29..597ef5b 100644 --- a/selfprivacy_api/graphql/subscriptions/jobs.py +++ b/selfprivacy_api/graphql/subscriptions/jobs.py @@ -8,6 +8,7 @@ from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.jobs import Job, Jobs + @strawberry.type class ApiJob: name: str @@ -21,27 +22,49 @@ class ApiJob: error: typing.Optional[str] result: typing.Optional[str] + @strawberry.type class JobSubscription: @strawberry.subscription() async def job_subscription(self) -> AsyncGenerator[typing.List[ApiJob], None]: is_updated = True + def callback(jobs: typing.List[Job]): nonlocal is_updated is_updated = True + + print("Subscribing to job updates...") Jobs().add_observer(callback) + yield [ + ApiJob( + name=job.name, + description=job.description, + status=job.status.name, + status_text=job.status_text, + progress=job.progress, + created_at=job.created_at, + updated_at=job.updated_at, + finished_at=job.finished_at, + error=job.error, + result=job.result, + ) + for job in Jobs().get_jobs() + ] while True: if is_updated: is_updated = False - yield [ ApiJob( - name=job.name, - description=job.description, - status=job.status.name, - status_text=job.status_text, - progress=job.progress, - created_at=job.created_at, - updated_at=job.updated_at, - finished_at=job.finished_at, - error=job.error, - result=job.result, - ) for job in Jobs().get_jobs() ] + yield [ + ApiJob( + name=job.name, + description=job.description, + status=job.status.name, + status_text=job.status_text, + progress=job.progress, + created_at=job.created_at, + updated_at=job.updated_at, + finished_at=job.finished_at, + error=job.error, + result=job.result, + ) + for job in Jobs().get_jobs() + ] -- 2.42.0 From 5e62798fde49776cb09cc1d1305305126f996fb3 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Tue, 2 Aug 2022 23:30:03 +0300 Subject: [PATCH 06/50] Test --- selfprivacy_api/graphql/subscriptions/jobs.py | 12 +++++-- selfprivacy_api/jobs/__init__.py | 21 +++++++++-- selfprivacy_api/jobs/test.py | 14 ++++---- .../services/nextcloud/__init__.py | 36 +++++++++---------- 4 files changed, 53 insertions(+), 30 deletions(-) diff --git a/selfprivacy_api/graphql/subscriptions/jobs.py b/selfprivacy_api/graphql/subscriptions/jobs.py index 597ef5b..2dfca07 100644 --- a/selfprivacy_api/graphql/subscriptions/jobs.py +++ b/selfprivacy_api/graphql/subscriptions/jobs.py @@ -25,6 +25,12 @@ class ApiJob: @strawberry.type class JobSubscription: + @strawberry.subscription + async def count(self, target: int = 100) -> AsyncGenerator[int, None]: + for i in range(target): + yield i + await asyncio.sleep(0.5) + @strawberry.subscription() async def job_subscription(self) -> AsyncGenerator[typing.List[ApiJob], None]: is_updated = True @@ -34,7 +40,7 @@ class JobSubscription: is_updated = True print("Subscribing to job updates...") - Jobs().add_observer(callback) + Jobs.get_instance().add_observer(callback) yield [ ApiJob( name=job.name, @@ -48,7 +54,7 @@ class JobSubscription: error=job.error, result=job.result, ) - for job in Jobs().get_jobs() + for job in Jobs.get_instance().get_jobs() ] while True: if is_updated: @@ -66,5 +72,5 @@ class JobSubscription: error=job.error, result=job.result, ) - for job in Jobs().get_jobs() + for job in Jobs.get_instance().get_jobs() ] diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py index d1ab948..c9dd025 100644 --- a/selfprivacy_api/jobs/__init__.py +++ b/selfprivacy_api/jobs/__init__.py @@ -92,7 +92,11 @@ class Jobs: """ if Jobs.__instance is None: Jobs() - return Jobs.__instance + if Jobs.__instance is None: + raise Exception("Couldn't init Jobs singleton!") + return Jobs.__instance + else: + return Jobs.__instance def __init__(self): """ @@ -102,7 +106,20 @@ class Jobs: raise Exception("This class is a singleton!") else: Jobs.__instance = self - self.jobs = [] + self.jobs = [ + Job( + name="Init job", + description="Initial job", + status=JobStatus.FINISHED, + status_text="", + progress=100, + created_at=datetime.datetime.now(), + updated_at=datetime.datetime.now(), + finished_at=datetime.datetime.now(), + error=None, + result=None, + ) + ] # Observers of the jobs list. self.observers = [] diff --git a/selfprivacy_api/jobs/test.py b/selfprivacy_api/jobs/test.py index 13856a1..f3c03af 100644 --- a/selfprivacy_api/jobs/test.py +++ b/selfprivacy_api/jobs/test.py @@ -5,7 +5,7 @@ from selfprivacy_api.jobs import JobStatus, Jobs @huey.task() def test_job(): - job = Jobs().add( + job = Jobs.get_instance().add( name="Test job", description="This is a test job.", status=JobStatus.CREATED, @@ -13,42 +13,42 @@ def test_job(): progress=0, ) time.sleep(5) - Jobs().update( + Jobs.get_instance().update( job=job, status=JobStatus.RUNNING, status_text="Performing pre-move checks...", progress=5, ) time.sleep(5) - Jobs().update( + Jobs.get_instance().update( job=job, status=JobStatus.RUNNING, status_text="Performing pre-move checks...", progress=10, ) time.sleep(5) - Jobs().update( + Jobs.get_instance().update( job=job, status=JobStatus.RUNNING, status_text="Performing pre-move checks...", progress=15, ) time.sleep(5) - Jobs().update( + Jobs.get_instance().update( job=job, status=JobStatus.RUNNING, status_text="Performing pre-move checks...", progress=20, ) time.sleep(5) - Jobs().update( + Jobs.get_instance().update( job=job, status=JobStatus.RUNNING, status_text="Performing pre-move checks...", progress=25, ) time.sleep(5) - Jobs().update( + Jobs.get_instance().update( job=job, status=JobStatus.FINISHED, status_text="Job finished.", diff --git a/selfprivacy_api/services/nextcloud/__init__.py b/selfprivacy_api/services/nextcloud/__init__.py index a0604b2..e1de92a 100644 --- a/selfprivacy_api/services/nextcloud/__init__.py +++ b/selfprivacy_api/services/nextcloud/__init__.py @@ -123,7 +123,7 @@ class Nextcloud(Service): return super().get_dns_records() def move_to_volume(self, volume: BlockDevice): - job = Jobs().add( + job = Jobs.get_instance().add( name="services.nextcloud.move", description=f"Moving Nextcloud to volume {volume.name}", ) @@ -134,14 +134,14 @@ class Nextcloud(Service): @huey.task() def move_nextcloud(nextcloud: Nextcloud, volume: BlockDevice, job: Job): """Move Nextcloud to another volume.""" - job = Jobs().update( + job = Jobs.get_instance().update( job=job, status_text="Performing pre-move checks...", status=JobStatus.RUNNING, ) with ReadUserData() as user_data: if not user_data.get("useBinds", False): - Jobs().update( + Jobs.get_instance().update( job=job, status=JobStatus.ERROR, error="Server is not using binds.", @@ -150,7 +150,7 @@ def move_nextcloud(nextcloud: Nextcloud, volume: BlockDevice, job: Job): # Check if we are on the same volume old_location = nextcloud.get_location() if old_location == volume.name: - Jobs().update( + Jobs.get_instance().update( job=job, status=JobStatus.ERROR, error="Nextcloud is already on this volume.", @@ -158,7 +158,7 @@ def move_nextcloud(nextcloud: Nextcloud, volume: BlockDevice, job: Job): return # Check if there is enough space on the new volume if volume.fsavail < nextcloud.get_storage_usage(): - Jobs().update( + Jobs.get_instance().update( job=job, status=JobStatus.ERROR, error="Not enough space on the new volume.", @@ -166,7 +166,7 @@ def move_nextcloud(nextcloud: Nextcloud, volume: BlockDevice, job: Job): return # Make sure the volume is mounted if f"/volumes/{volume.name}" not in volume.mountpoints: - Jobs().update( + Jobs.get_instance().update( job=job, status=JobStatus.ERROR, error="Volume is not mounted.", @@ -174,7 +174,7 @@ def move_nextcloud(nextcloud: Nextcloud, volume: BlockDevice, job: Job): return # Make sure current actual directory exists if not pathlib.Path(f"/volumes/{old_location}/nextcloud").exists(): - Jobs().update( + Jobs.get_instance().update( job=job, status=JobStatus.ERROR, error="Nextcloud is not found.", @@ -182,7 +182,7 @@ def move_nextcloud(nextcloud: Nextcloud, volume: BlockDevice, job: Job): return # Stop Nextcloud - Jobs().update( + Jobs.get_instance().update( job=job, status=JobStatus.RUNNING, status_text="Stopping Nextcloud...", @@ -196,7 +196,7 @@ def move_nextcloud(nextcloud: Nextcloud, volume: BlockDevice, job: Job): break time.sleep(1) else: - Jobs().update( + Jobs.get_instance().update( job=job, status=JobStatus.ERROR, error="Nextcloud did not stop in 30 seconds.", @@ -204,7 +204,7 @@ def move_nextcloud(nextcloud: Nextcloud, volume: BlockDevice, job: Job): return # Unmount old volume - Jobs().update( + Jobs.get_instance().update( job=job, status_text="Unmounting old folder...", status=JobStatus.RUNNING, @@ -213,14 +213,14 @@ def move_nextcloud(nextcloud: Nextcloud, volume: BlockDevice, job: Job): try: subprocess.run(["umount", "/var/lib/nextcloud"], check=True) except subprocess.CalledProcessError: - Jobs().update( + Jobs.get_instance().update( job=job, status=JobStatus.ERROR, error="Unable to unmount old volume.", ) return # Move data to new volume and set correct permissions - Jobs().update( + Jobs.get_instance().update( job=job, status_text="Moving data to new volume...", status=JobStatus.RUNNING, @@ -230,7 +230,7 @@ def move_nextcloud(nextcloud: Nextcloud, volume: BlockDevice, job: Job): f"/volumes/{old_location}/nextcloud", f"/volumes/{volume.name}/nextcloud" ) - Jobs().update( + Jobs.get_instance().update( job=job, status_text="Making sure Nextcloud owns its files...", status=JobStatus.RUNNING, @@ -248,7 +248,7 @@ def move_nextcloud(nextcloud: Nextcloud, volume: BlockDevice, job: Job): ) except subprocess.CalledProcessError as error: print(error.output) - Jobs().update( + Jobs.get_instance().update( job=job, status=JobStatus.RUNNING, error="Unable to set ownership of new volume. Nextcloud may not be able to access its files. Continuing anyway.", @@ -256,7 +256,7 @@ def move_nextcloud(nextcloud: Nextcloud, volume: BlockDevice, job: Job): return # Mount new volume - Jobs().update( + Jobs.get_instance().update( job=job, status_text="Mounting Nextcloud data...", status=JobStatus.RUNNING, @@ -274,7 +274,7 @@ def move_nextcloud(nextcloud: Nextcloud, volume: BlockDevice, job: Job): ) except subprocess.CalledProcessError as error: print(error.output) - Jobs().update( + Jobs.get_instance().update( job=job, status=JobStatus.ERROR, error="Unable to mount new volume.", @@ -282,7 +282,7 @@ def move_nextcloud(nextcloud: Nextcloud, volume: BlockDevice, job: Job): return # Update userdata - Jobs().update( + Jobs.get_instance().update( job=job, status_text="Finishing move...", status=JobStatus.RUNNING, @@ -294,7 +294,7 @@ def move_nextcloud(nextcloud: Nextcloud, volume: BlockDevice, job: Job): user_data["nextcloud"]["location"] = volume.name # Start Nextcloud nextcloud.start() - Jobs().update( + Jobs.get_instance().update( job=job, status=JobStatus.FINISHED, result="Nextcloud moved successfully.", -- 2.42.0 From 9132b70e709b19ddc393859ce23f8a5c342fa4e8 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Wed, 3 Aug 2022 14:06:18 +0300 Subject: [PATCH 07/50] Fix mountpoints --- selfprivacy_api/utils/block_devices.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/selfprivacy_api/utils/block_devices.py b/selfprivacy_api/utils/block_devices.py index b33c7aa..f3b0911 100644 --- a/selfprivacy_api/utils/block_devices.py +++ b/selfprivacy_api/utils/block_devices.py @@ -170,7 +170,7 @@ class BlockDevices: "-J", "-b", "-o", - "NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINT,LABEL,UUID,SIZE,MODEL,SERIAL,TYPE", + "NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINTS,LABEL,UUID,SIZE,MODEL,SERIAL,TYPE", ] ) lsblk_output = lsblk_output.decode("utf-8") -- 2.42.0 From dfd28ad0cd419366480df2e89227737cb9e137ff Mon Sep 17 00:00:00 2001 From: inexcode Date: Thu, 11 Aug 2022 03:36:36 +0400 Subject: [PATCH 08/50] Move to fastapi --- .gitignore | 2 + requirements.txt | 17 - .../{resources => actions}/__init__.py | 0 selfprivacy_api/actions/api_tokens.py | 123 ++++++ selfprivacy_api/actions/ssh.py | 149 +++++++ selfprivacy_api/actions/system.py | 139 ++++++ selfprivacy_api/actions/users.py | 219 ++++++++++ selfprivacy_api/app.py | 132 ++---- selfprivacy_api/dependencies.py | 55 +++ selfprivacy_api/graphql/__init__.py | 10 +- selfprivacy_api/graphql/common_types/user.py | 67 +-- .../graphql/mutations/api_mutations.py | 92 ++-- .../graphql/mutations/ssh_mutations.py | 66 ++- .../graphql/mutations/ssh_utils.py | 74 ---- .../graphql/mutations/storage_mutation.py | 1 - .../graphql/mutations/system_mutations.py | 52 +-- .../graphql/mutations/users_mutations.py | 90 +++- .../graphql/mutations/users_utils.py | 111 ----- .../graphql/queries/api_queries.py | 41 +- selfprivacy_api/graphql/queries/providers.py | 2 - selfprivacy_api/graphql/queries/system.py | 49 +-- selfprivacy_api/graphql/queries/users.py | 17 +- selfprivacy_api/jobs/__init__.py | 11 +- selfprivacy_api/jobs/test.py | 4 +- .../resources/api_auth/__init__.py | 14 - .../resources/api_auth/app_tokens.py | 118 ----- .../resources/api_auth/new_device.py | 103 ----- .../resources/api_auth/recovery_token.py | 205 --------- selfprivacy_api/resources/common.py | 27 -- .../resources/services/__init__.py | 19 - .../resources/services/bitwarden.py | 66 --- selfprivacy_api/resources/services/gitea.py | 66 --- .../resources/services/mailserver.py | 41 -- selfprivacy_api/resources/services/main.py | 84 ---- .../resources/services/nextcloud.py | 66 --- selfprivacy_api/resources/services/ocserv.py | 66 --- selfprivacy_api/resources/services/pleroma.py | 66 --- selfprivacy_api/resources/services/restic.py | 241 ----------- selfprivacy_api/resources/services/ssh.py | 407 ------------------ selfprivacy_api/resources/system.py | 346 --------------- selfprivacy_api/resources/users.py | 162 ------- selfprivacy_api/rest/__init__.py | 0 selfprivacy_api/rest/api_auth.py | 127 ++++++ selfprivacy_api/rest/services.py | 371 ++++++++++++++++ selfprivacy_api/rest/system.py | 105 +++++ selfprivacy_api/rest/users.py | 62 +++ selfprivacy_api/restic_controller/tasks.py | 4 +- .../services/bitwarden/__init__.py | 163 +++++++ .../services/bitwarden/bitwarden.svg | 3 + .../services/generic_service_mover.py | 238 ++++++++++ .../services/generic_size_counter.py | 16 + .../services/generic_status_getter.py | 29 ++ selfprivacy_api/services/gitea/__init__.py | 154 +++++++ selfprivacy_api/services/gitea/gitea.svg | 3 + .../services/mailserver/__init__.py | 172 ++++++++ .../services/mailserver/mailserver.svg | 3 + .../services/nextcloud/__init__.py | 287 +++--------- selfprivacy_api/services/ocserv/__init__.py | 99 +++++ selfprivacy_api/services/ocserv/ocserv.svg | 3 + selfprivacy_api/services/pleroma/__init__.py | 144 +++++++ selfprivacy_api/services/pleroma/pleroma.svg | 10 + selfprivacy_api/services/service.py | 8 + selfprivacy_api/utils/auth.py | 14 +- selfprivacy_api/utils/block_devices.py | 2 +- selfprivacy_api/utils/huey.py | 16 +- selfprivacy_api/utils/migrate_to_binds.py | 17 +- selfprivacy_api/utils/network.py | 5 +- shell.nix | 11 +- tests/conftest.py | 71 +-- tests/services/test_mailserver.py | 6 +- tests/services/test_restic.py | 40 +- tests/services/test_services.py | 21 +- tests/services/test_ssh.py | 57 +-- tests/test_auth.py | 100 ++--- tests/test_common.py | 10 +- tests/test_graphql/_test_system.py | 221 +++++----- tests/test_graphql/test_api.py | 34 +- tests/test_graphql/test_api_devices.py | 192 +++++---- tests/test_graphql/test_api_recovery.py | 278 ++++++------ tests/test_graphql/test_api_version.py | 8 +- tests/test_graphql/test_ssh.py | 108 ++--- tests/test_graphql/test_system_nixos_tasks.py | 40 +- tests/test_graphql/test_users.py | 242 +++++------ tests/test_system.py | 18 +- tests/test_users.py | 26 +- 85 files changed, 3546 insertions(+), 3612 deletions(-) delete mode 100755 requirements.txt rename selfprivacy_api/{resources => actions}/__init__.py (100%) create mode 100644 selfprivacy_api/actions/api_tokens.py create mode 100644 selfprivacy_api/actions/ssh.py create mode 100644 selfprivacy_api/actions/system.py create mode 100644 selfprivacy_api/actions/users.py create mode 100644 selfprivacy_api/dependencies.py delete mode 100644 selfprivacy_api/graphql/mutations/ssh_utils.py delete mode 100644 selfprivacy_api/graphql/mutations/users_utils.py delete mode 100644 selfprivacy_api/resources/api_auth/__init__.py delete mode 100644 selfprivacy_api/resources/api_auth/app_tokens.py delete mode 100644 selfprivacy_api/resources/api_auth/new_device.py delete mode 100644 selfprivacy_api/resources/api_auth/recovery_token.py delete mode 100644 selfprivacy_api/resources/common.py delete mode 100644 selfprivacy_api/resources/services/__init__.py delete mode 100644 selfprivacy_api/resources/services/bitwarden.py delete mode 100644 selfprivacy_api/resources/services/gitea.py delete mode 100644 selfprivacy_api/resources/services/mailserver.py delete mode 100644 selfprivacy_api/resources/services/main.py delete mode 100644 selfprivacy_api/resources/services/nextcloud.py delete mode 100644 selfprivacy_api/resources/services/ocserv.py delete mode 100644 selfprivacy_api/resources/services/pleroma.py delete mode 100644 selfprivacy_api/resources/services/restic.py delete mode 100644 selfprivacy_api/resources/services/ssh.py delete mode 100644 selfprivacy_api/resources/system.py delete mode 100644 selfprivacy_api/resources/users.py create mode 100644 selfprivacy_api/rest/__init__.py create mode 100644 selfprivacy_api/rest/api_auth.py create mode 100644 selfprivacy_api/rest/services.py create mode 100644 selfprivacy_api/rest/system.py create mode 100644 selfprivacy_api/rest/users.py create mode 100644 selfprivacy_api/services/bitwarden/__init__.py create mode 100644 selfprivacy_api/services/bitwarden/bitwarden.svg create mode 100644 selfprivacy_api/services/generic_service_mover.py create mode 100644 selfprivacy_api/services/generic_size_counter.py create mode 100644 selfprivacy_api/services/generic_status_getter.py create mode 100644 selfprivacy_api/services/gitea/__init__.py create mode 100644 selfprivacy_api/services/gitea/gitea.svg create mode 100644 selfprivacy_api/services/mailserver/__init__.py create mode 100644 selfprivacy_api/services/mailserver/mailserver.svg create mode 100644 selfprivacy_api/services/ocserv/__init__.py create mode 100644 selfprivacy_api/services/ocserv/ocserv.svg create mode 100644 selfprivacy_api/services/pleroma/__init__.py create mode 100644 selfprivacy_api/services/pleroma/pleroma.svg diff --git a/.gitignore b/.gitignore index 1264e45..7941396 100755 --- a/.gitignore +++ b/.gitignore @@ -145,3 +145,5 @@ dmypy.json cython_debug/ # End of https://www.toptal.com/developers/gitignore/api/flask + +*.db diff --git a/requirements.txt b/requirements.txt deleted file mode 100755 index 4e0e02e..0000000 --- a/requirements.txt +++ /dev/null @@ -1,17 +0,0 @@ -wheel -flask -flask_restful -flask_socketio -setuptools -portalocker -flask-swagger -flask-swagger-ui -pytz -huey -gevent -mnemonic - -pytest -coverage -pytest-mock -pytest-datadir diff --git a/selfprivacy_api/resources/__init__.py b/selfprivacy_api/actions/__init__.py similarity index 100% rename from selfprivacy_api/resources/__init__.py rename to selfprivacy_api/actions/__init__.py diff --git a/selfprivacy_api/actions/api_tokens.py b/selfprivacy_api/actions/api_tokens.py new file mode 100644 index 0000000..3a57ef7 --- /dev/null +++ b/selfprivacy_api/actions/api_tokens.py @@ -0,0 +1,123 @@ +"""App tokens actions""" +from datetime import datetime +from typing import Optional +from pydantic import BaseModel + +from selfprivacy_api.utils.auth import ( + delete_token, + generate_recovery_token, + get_recovery_token_status, + get_tokens_info, + is_recovery_token_exists, + is_recovery_token_valid, + is_token_name_exists, + is_token_name_pair_valid, + refresh_token, + get_token_name, +) + + +class TokenInfoWithIsCaller(BaseModel): + """Token info""" + + name: str + date: datetime + is_caller: bool + + +def get_api_tokens_with_caller_flag(caller_token: str) -> list[TokenInfoWithIsCaller]: + """Get the tokens info""" + caller_name = get_token_name(caller_token) + tokens = get_tokens_info() + return [ + TokenInfoWithIsCaller( + name=token.name, + date=token.date, + is_caller=token.name == caller_name, + ) + for token in tokens + ] + + +class NotFoundException(Exception): + """Not found exception""" + + pass + + +class CannotDeleteCallerException(Exception): + """Cannot delete caller exception""" + + pass + + +def delete_api_token(caller_token: str, token_name: str) -> None: + """Delete the token""" + if is_token_name_pair_valid(token_name, caller_token): + raise CannotDeleteCallerException("Cannot delete caller's token") + if not is_token_name_exists(token_name): + raise NotFoundException("Token not found") + delete_token(token_name) + + +def refresh_api_token(caller_token: str) -> str: + """Refresh the token""" + new_token = refresh_token(caller_token) + if new_token is None: + raise NotFoundException("Token not found") + return new_token + + +class RecoveryTokenStatus(BaseModel): + """Recovery token status""" + + exists: bool + valid: bool + date: Optional[datetime] = None + expiration: Optional[datetime] = None + uses_left: Optional[int] = None + + +def get_api_recovery_token_status() -> RecoveryTokenStatus: + """Get the recovery token status""" + if not is_recovery_token_exists(): + return RecoveryTokenStatus(exists=False, valid=False) + status = get_recovery_token_status() + if status is None: + return RecoveryTokenStatus(exists=False, valid=False) + is_valid = is_recovery_token_valid() + return RecoveryTokenStatus( + exists=True, + valid=is_valid, + date=status["date"], + expiration=status["expiration"], + uses_left=status["uses_left"], + ) + + +class InvalidExpirationDate(Exception): + """Invalid expiration date exception""" + + pass + + +class InvalidUsesLeft(Exception): + """Invalid uses left exception""" + + pass + + +def get_new_api_recovery_key( + expiration_date: Optional[datetime] = None, uses_left: Optional[int] = None +) -> str: + """Get new recovery key""" + if expiration_date is not None: + current_time = datetime.now().timestamp() + if expiration_date.timestamp() < current_time: + raise InvalidExpirationDate("Expiration date is in the past") + if uses_left is not None: + if uses_left <= 0: + raise InvalidUsesLeft("Uses must be greater than 0") + + key = generate_recovery_token(expiration_date, uses_left) + return key diff --git a/selfprivacy_api/actions/ssh.py b/selfprivacy_api/actions/ssh.py new file mode 100644 index 0000000..3f79ff8 --- /dev/null +++ b/selfprivacy_api/actions/ssh.py @@ -0,0 +1,149 @@ +"""Actions to manage the SSH.""" +from typing import Optional +from pydantic import BaseModel +from selfprivacy_api.actions.users import ( + UserNotFound, + ensure_ssh_and_users_fields_exist, +) + +from selfprivacy_api.utils import WriteUserData, ReadUserData, validate_ssh_public_key + + +def enable_ssh(): + with WriteUserData() as data: + if "ssh" not in data: + data["ssh"] = {} + data["ssh"]["enable"] = True + + +class UserdataSshSettings(BaseModel): + """Settings for the SSH.""" + + enable: bool = True + passwordAuthentication: bool = True + rootKeys: list[str] = [] + + +def get_ssh_settings() -> UserdataSshSettings: + with ReadUserData() as data: + if "ssh" not in data: + return UserdataSshSettings() + if "enable" not in data["ssh"]: + data["ssh"]["enable"] = True + if "passwordAuthentication" not in data["ssh"]: + data["ssh"]["passwordAuthentication"] = True + if "rootKeys" not in data["ssh"]: + data["ssh"]["rootKeys"] = [] + return UserdataSshSettings(**data["ssh"]) + + +def set_ssh_settings( + enable: Optional[bool] = None, password_authentication: Optional[bool] = None +) -> None: + with WriteUserData() as data: + if "ssh" not in data: + data["ssh"] = {} + if enable is not None: + data["ssh"]["enable"] = enable + if password_authentication is not None: + data["ssh"]["passwordAuthentication"] = password_authentication + + +def add_root_ssh_key(public_key: str): + with WriteUserData() as data: + if "ssh" not in data: + data["ssh"] = {} + if "rootKeys" not in data["ssh"]: + data["ssh"]["rootKeys"] = [] + # Return 409 if key already in array + for key in data["ssh"]["rootKeys"]: + if key == public_key: + raise KeyAlreadyExists() + data["ssh"]["rootKeys"].append(public_key) + + +class KeyAlreadyExists(Exception): + """Key already exists""" + + pass + + +class InvalidPublicKey(Exception): + """Invalid public key""" + + pass + + +def create_ssh_key(username: str, ssh_key: str): + """Create a new ssh key""" + + if not validate_ssh_public_key(ssh_key): + raise InvalidPublicKey() + + with WriteUserData() as data: + ensure_ssh_and_users_fields_exist(data) + + if username == data["username"]: + if ssh_key in data["sshKeys"]: + raise KeyAlreadyExists() + + data["sshKeys"].append(ssh_key) + return + + if username == "root": + if ssh_key in data["ssh"]["rootKeys"]: + raise KeyAlreadyExists() + + data["ssh"]["rootKeys"].append(ssh_key) + return + + for user in data["users"]: + if user["username"] == username: + if "sshKeys" not in user: + user["sshKeys"] = [] + if ssh_key in user["sshKeys"]: + raise KeyAlreadyExists() + + user["sshKeys"].append(ssh_key) + return + + raise UserNotFound() + + +class KeyNotFound(Exception): + """Key not found""" + + pass + + +def remove_ssh_key(username: str, ssh_key: str): + """Delete a ssh key""" + + with WriteUserData() as data: + ensure_ssh_and_users_fields_exist(data) + + if username == "root": + if ssh_key in data["ssh"]["rootKeys"]: + data["ssh"]["rootKeys"].remove(ssh_key) + return + + raise KeyNotFound() + + if username == data["username"]: + if ssh_key in data["sshKeys"]: + data["sshKeys"].remove(ssh_key) + return + + raise KeyNotFound() + + for user in data["users"]: + if user["username"] == username: + if "sshKeys" not in user: + user["sshKeys"] = [] + if ssh_key in user["sshKeys"]: + user["sshKeys"].remove(ssh_key) + return + + raise KeyNotFound() + + raise UserNotFound() diff --git a/selfprivacy_api/actions/system.py b/selfprivacy_api/actions/system.py new file mode 100644 index 0000000..853662f --- /dev/null +++ b/selfprivacy_api/actions/system.py @@ -0,0 +1,139 @@ +"""Actions to manage the system.""" +import os +import subprocess +import pytz +from typing import Optional +from pydantic import BaseModel + +from selfprivacy_api.utils import WriteUserData, ReadUserData + + +def get_timezone() -> str: + """Get the timezone of the server""" + with ReadUserData() as user_data: + if "timezone" in user_data: + return user_data["timezone"] + return "Europe/Uzhgorod" + + +class InvalidTimezone(Exception): + """Invalid timezone""" + + pass + + +def change_timezone(timezone: str) -> None: + """Change the timezone of the server""" + if timezone not in pytz.all_timezones: + raise InvalidTimezone(f"Invalid timezone: {timezone}") + with WriteUserData() as user_data: + user_data["timezone"] = timezone + + +class UserDataAutoUpgradeSettings(BaseModel): + """Settings for auto-upgrading user data""" + + enable: bool = True + allowReboot: bool = False + + +def get_auto_upgrade_settings() -> UserDataAutoUpgradeSettings: + """Get the auto-upgrade settings""" + with ReadUserData() as user_data: + if "autoUpgrade" in user_data: + return UserDataAutoUpgradeSettings(**user_data["autoUpgrade"]) + return UserDataAutoUpgradeSettings() + + +def set_auto_upgrade_settings( + enalbe: Optional[bool] = None, allowReboot: Optional[bool] = None +) -> None: + """Set the auto-upgrade settings""" + with WriteUserData() as user_data: + if "autoUpgrade" not in user_data: + user_data["autoUpgrade"] = {} + if enalbe is not None: + user_data["autoUpgrade"]["enable"] = enalbe + if allowReboot is not None: + user_data["autoUpgrade"]["allowReboot"] = allowReboot + + +def rebuild_system() -> int: + """Rebuild the system""" + rebuild_result = subprocess.Popen( + ["systemctl", "start", "sp-nixos-rebuild.service"], start_new_session=True + ) + rebuild_result.communicate()[0] + return rebuild_result.returncode + + +def rollback_system() -> int: + """Rollback the system""" + rollback_result = subprocess.Popen( + ["systemctl", "start", "sp-nixos-rollback.service"], start_new_session=True + ) + rollback_result.communicate()[0] + return rollback_result.returncode + + +def upgrade_system() -> int: + """Upgrade the system""" + upgrade_result = subprocess.Popen( + ["systemctl", "start", "sp-nixos-upgrade.service"], start_new_session=True + ) + upgrade_result.communicate()[0] + return upgrade_result.returncode + + +def reboot_system() -> None: + """Reboot the system""" + subprocess.Popen(["reboot"], start_new_session=True) + + +def get_system_version() -> str: + """Get system version""" + return subprocess.check_output(["uname", "-a"]).decode("utf-8").strip() + + +def get_python_version() -> str: + """Get Python version""" + return subprocess.check_output(["python", "-V"]).decode("utf-8").strip() + + +class SystemActionResult(BaseModel): + """System action result""" + + status: int + message: str + data: str + + +def pull_repository_changes() -> SystemActionResult: + """Pull repository changes""" + git_pull_command = ["git", "pull"] + + current_working_directory = os.getcwd() + os.chdir("/etc/nixos") + + git_pull_process_descriptor = subprocess.Popen( + git_pull_command, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + shell=False, + ) + + data = git_pull_process_descriptor.communicate()[0].decode("utf-8") + + os.chdir(current_working_directory) + + if git_pull_process_descriptor.returncode == 0: + return SystemActionResult( + status=0, + message="Pulled repository changes", + data=data, + ) + return SystemActionResult( + status=git_pull_process_descriptor.returncode, + message="Failed to pull repository changes", + data=data, + ) diff --git a/selfprivacy_api/actions/users.py b/selfprivacy_api/actions/users.py new file mode 100644 index 0000000..bfc1756 --- /dev/null +++ b/selfprivacy_api/actions/users.py @@ -0,0 +1,219 @@ +"""Actions to manage the users.""" +import re +from typing import Optional +from pydantic import BaseModel +from enum import Enum +from selfprivacy_api.utils import ( + ReadUserData, + WriteUserData, + hash_password, + is_username_forbidden, +) + + +class UserDataUserOrigin(Enum): + """Origin of the user in the user data""" + + NORMAL = "NORMAL" + PRIMARY = "PRIMARY" + ROOT = "ROOT" + + +class UserDataUser(BaseModel): + """The user model from the userdata file""" + + username: str + ssh_keys: list[str] + origin: UserDataUserOrigin + + +def ensure_ssh_and_users_fields_exist(data): + if "ssh" not in data: + data["ssh"] = {} + data["ssh"]["rootKeys"] = [] + + elif data["ssh"].get("rootKeys") is None: + data["ssh"]["rootKeys"] = [] + + if "sshKeys" not in data: + data["sshKeys"] = [] + + if "users" not in data: + data["users"] = [] + + +def get_users( + exclude_primary: bool = False, + exclude_root: bool = False, +) -> list[UserDataUser]: + """Get the list of users""" + users = [] + with ReadUserData() as user_data: + ensure_ssh_and_users_fields_exist(user_data) + users = [ + UserDataUser( + username=user["username"], + ssh_keys=user.get("sshKeys", []), + origin=UserDataUserOrigin.NORMAL, + ) + for user in user_data["users"] + ] + if not exclude_primary: + users.append( + UserDataUser( + username=user_data["username"], + ssh_keys=user_data["sshKeys"], + origin=UserDataUserOrigin.PRIMARY, + ) + ) + if not exclude_root: + users.append( + UserDataUser( + username="root", + ssh_keys=user_data["ssh"]["rootKeys"], + origin=UserDataUserOrigin.ROOT, + ) + ) + return users + + +class UsernameForbidden(Exception): + """Attemted to create a user with a forbidden username""" + + pass + + +class UserAlreadyExists(Exception): + """Attemted to create a user that already exists""" + + pass + + +class UsernameNotAlphanumeric(Exception): + """Attemted to create a user with a non-alphanumeric username""" + + pass + + +class UsernameTooLong(Exception): + """Attemted to create a user with a too long username. Username must be less than 32 characters""" + + pass + + +class PasswordIsEmpty(Exception): + """Attemted to create a user with an empty password""" + + pass + + +def create_user(username: str, password: str): + if password == "": + raise PasswordIsEmpty("Password is empty") + + if is_username_forbidden(username): + raise UsernameForbidden("Username is forbidden") + + if not re.match(r"^[a-z_][a-z0-9_]+$", username): + raise UsernameNotAlphanumeric( + "Username must be alphanumeric and start with a letter" + ) + + if len(username) >= 32: + raise UsernameTooLong("Username must be less than 32 characters") + + with ReadUserData() as user_data: + ensure_ssh_and_users_fields_exist(user_data) + if username == user_data["username"]: + raise UserAlreadyExists("User already exists") + if username in [user["username"] for user in user_data["users"]]: + raise UserAlreadyExists("User already exists") + + hashed_password = hash_password(password) + + with WriteUserData() as user_data: + ensure_ssh_and_users_fields_exist(user_data) + + user_data["users"].append( + {"username": username, "sshKeys": [], "hashedPassword": hashed_password} + ) + + +class UserNotFound(Exception): + """Attemted to get a user that does not exist""" + + pass + + +class UserIsProtected(Exception): + """Attemted to delete a user that is protected""" + + pass + + +def delete_user(username: str): + with WriteUserData() as user_data: + ensure_ssh_and_users_fields_exist(user_data) + if username == user_data["username"] or username == "root": + raise UserIsProtected("Cannot delete main or root user") + + for data_user in user_data["users"]: + if data_user["username"] == username: + user_data["users"].remove(data_user) + break + else: + raise UserNotFound("User did not exist") + + +def update_user(username: str, password: str): + if password == "": + raise PasswordIsEmpty("Password is empty") + + hashed_password = hash_password(password) + + with WriteUserData() as data: + ensure_ssh_and_users_fields_exist(data) + + if username == data["username"]: + data["hashedMasterPassword"] = hashed_password + + # Return 404 if user does not exist + else: + for data_user in data["users"]: + if data_user["username"] == username: + data_user["hashedPassword"] = hashed_password + break + else: + raise UserNotFound("User does not exist") + + +def get_user_by_username(username: str) -> Optional[UserDataUser]: + with ReadUserData() as data: + ensure_ssh_and_users_fields_exist(data) + + if username == "root": + return UserDataUser( + origin=UserDataUserOrigin.ROOT, + username="root", + ssh_keys=data["ssh"]["rootKeys"], + ) + + if username == data["username"]: + return UserDataUser( + origin=UserDataUserOrigin.PRIMARY, + username=username, + ssh_keys=data["sshKeys"], + ) + + for user in data["users"]: + if user["username"] == username: + if "sshKeys" not in user: + user["sshKeys"] = [] + + return UserDataUser( + origin=UserDataUserOrigin.NORMAL, + username=username, + ssh_keys=user["sshKeys"], + ) + + return None diff --git a/selfprivacy_api/app.py b/selfprivacy_api/app.py index b22d034..a65b47b 100644 --- a/selfprivacy_api/app.py +++ b/selfprivacy_api/app.py @@ -1,111 +1,51 @@ #!/usr/bin/env python3 """SelfPrivacy server management API""" import os -from gevent import monkey +from fastapi import FastAPI, Depends, Request, WebSocket, BackgroundTasks +from fastapi.middleware.cors import CORSMiddleware +from strawberry.fastapi import BaseContext, GraphQLRouter - -from flask import Flask, request, jsonify -from flask_restful import Api -from flask_swagger import swagger -from flask_swagger_ui import get_swaggerui_blueprint -from flask_cors import CORS - -from strawberry.flask.views import AsyncGraphQLView - -from selfprivacy_api.resources.users import User, Users -from selfprivacy_api.resources.common import ApiVersion -from selfprivacy_api.resources.system import api_system -from selfprivacy_api.resources.services import services as api_services -from selfprivacy_api.resources.api_auth import auth as api_auth - -from selfprivacy_api.utils.huey import huey +from selfprivacy_api.dependencies import get_api_version, get_graphql_context +from selfprivacy_api.graphql.schema import schema +from selfprivacy_api.migrations import run_migrations from selfprivacy_api.restic_controller.tasks import init_restic -from selfprivacy_api.migrations import run_migrations +from selfprivacy_api.rest import ( + system, + users, + api_auth, + services, +) -from selfprivacy_api.utils.auth import is_token_valid +app = FastAPI() +graphql_app = GraphQLRouter( + schema, + context_getter=get_graphql_context, +) -from selfprivacy_api.graphql.schema import schema - -swagger_blueprint = get_swaggerui_blueprint( - "/api/docs", "/api/swagger.json", config={"app_name": "SelfPrivacy API"} +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], ) -def create_app(test_config=None): - """Initiate Flask app and bind routes""" - app = Flask(__name__) - api = Api(app) - CORS(app) - - if test_config is None: - app.config["ENABLE_SWAGGER"] = os.environ.get("ENABLE_SWAGGER", "0") - app.config["B2_BUCKET"] = os.environ.get("B2_BUCKET") - else: - app.config.update(test_config) - - # Check bearer token - @app.before_request - def check_auth(): - # Exclude swagger-ui, /auth/new_device/authorize, /auth/recovery_token/use - if request.path.startswith("/api"): - pass - elif request.path.startswith("/auth/new_device/authorize"): - pass - elif request.path.startswith("/auth/recovery_token/use"): - pass - elif request.path.startswith("/graphql"): - pass - else: - auth = request.headers.get("Authorization") - if auth is None: - return jsonify({"error": "Missing Authorization header"}), 401 - # Strip Bearer from auth header - auth = auth.replace("Bearer ", "") - if not is_token_valid(auth): - return jsonify({"error": "Invalid token"}), 401 - - api.add_resource(ApiVersion, "/api/version") - api.add_resource(Users, "/users") - api.add_resource(User, "/users/") - - app.register_blueprint(api_system) - app.register_blueprint(api_services) - app.register_blueprint(api_auth) - - @app.route("/api/swagger.json") - def spec(): - if app.config["ENABLE_SWAGGER"] == "1": - swag = swagger(app) - swag["info"]["version"] = "1.2.7" - swag["info"]["title"] = "SelfPrivacy API" - swag["info"]["description"] = "SelfPrivacy API" - swag["securityDefinitions"] = { - "bearerAuth": { - "type": "apiKey", - "name": "Authorization", - "in": "header", - } - } - swag["security"] = [{"bearerAuth": []}] - - return jsonify(swag) - return jsonify({}), 404 - - app.add_url_rule( - "/graphql", view_func=AsyncGraphQLView.as_view("graphql", schema=schema) - ) - - if app.config["ENABLE_SWAGGER"] == "1": - app.register_blueprint(swagger_blueprint, url_prefix="/api/docs") - - return app +app.include_router(system.router) +app.include_router(users.router) +app.include_router(api_auth.router) +app.include_router(services.router) +app.include_router(graphql_app, prefix="/graphql") -if __name__ == "__main__": - monkey.patch_all() - created_app = create_app() +@app.get("/api/version") +async def get_version(): + """Get the version of the server""" + return {"version": get_api_version()} + + +@app.on_event("startup") +async def startup(): run_migrations() - huey.start() init_restic() - created_app.run(port=5050, debug=False) diff --git a/selfprivacy_api/dependencies.py b/selfprivacy_api/dependencies.py new file mode 100644 index 0000000..c50d772 --- /dev/null +++ b/selfprivacy_api/dependencies.py @@ -0,0 +1,55 @@ +from fastapi import Depends, FastAPI, HTTPException, status +from typing import Optional +from strawberry.fastapi import BaseContext +from fastapi.security import APIKeyHeader +from pydantic import BaseModel + +from selfprivacy_api.utils.auth import is_token_valid + + +class TokenHeader(BaseModel): + token: str + + +async def get_token_header( + token: str = Depends(APIKeyHeader(name="Authorization", auto_error=False)) +) -> TokenHeader: + if token is None: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail="Token not provided" + ) + else: + token = token.replace("Bearer ", "") + if not is_token_valid(token): + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid token" + ) + return TokenHeader(token=token) + + +class GraphQlContext(BaseContext): + def __init__(self, auth_token: Optional[str] = None): + self.auth_token = auth_token + self.is_authenticated = auth_token is not None + + +async def get_graphql_context( + token: str = Depends( + APIKeyHeader( + name="Authorization", + auto_error=False, + ) + ) +) -> GraphQlContext: + if token is None: + return GraphQlContext() + else: + token = token.replace("Bearer ", "") + if not is_token_valid(token): + return GraphQlContext() + return GraphQlContext(auth_token=token) + + +def get_api_version() -> str: + """Get API version""" + return "2.0.0" diff --git a/selfprivacy_api/graphql/__init__.py b/selfprivacy_api/graphql/__init__.py index 5e332f3..e01b158 100644 --- a/selfprivacy_api/graphql/__init__.py +++ b/selfprivacy_api/graphql/__init__.py @@ -3,7 +3,6 @@ import typing from strawberry.permission import BasePermission from strawberry.types import Info -from flask import request from selfprivacy_api.utils.auth import is_token_valid @@ -14,11 +13,4 @@ class IsAuthenticated(BasePermission): message = "You must be authenticated to access this resource." def has_permission(self, source: typing.Any, info: Info, **kwargs) -> bool: - auth = request.headers.get("Authorization") - if auth is None: - return False - # Strip Bearer from auth header - auth = auth.replace("Bearer ", "") - if not is_token_valid(auth): - return False - return True + return info.context.is_authenticated diff --git a/selfprivacy_api/graphql/common_types/user.py b/selfprivacy_api/graphql/common_types/user.py index 8cc5f2c..26ad6f2 100644 --- a/selfprivacy_api/graphql/common_types/user.py +++ b/selfprivacy_api/graphql/common_types/user.py @@ -1,8 +1,8 @@ import typing from enum import Enum import strawberry +import selfprivacy_api.actions.users as users_actions -from selfprivacy_api.utils import ReadUserData from selfprivacy_api.graphql.mutations.mutation_interface import ( MutationReturnInterface, ) @@ -28,51 +28,30 @@ class User: class UserMutationReturn(MutationReturnInterface): """Return type for user mutation""" - user: typing.Optional[User] - - -def ensure_ssh_and_users_fields_exist(data): - if "ssh" not in data: - data["ssh"] = [] - data["ssh"]["rootKeys"] = [] - - elif data["ssh"].get("rootKeys") is None: - data["ssh"]["rootKeys"] = [] - - if "sshKeys" not in data: - data["sshKeys"] = [] - - if "users" not in data: - data["users"] = [] + user: typing.Optional[User] = None def get_user_by_username(username: str) -> typing.Optional[User]: - with ReadUserData() as data: - ensure_ssh_and_users_fields_exist(data) - - if username == "root": - return User( - user_type=UserType.ROOT, - username="root", - ssh_keys=data["ssh"]["rootKeys"], - ) - - if username == data["username"]: - return User( - user_type=UserType.PRIMARY, - username=username, - ssh_keys=data["sshKeys"], - ) - - for user in data["users"]: - if user["username"] == username: - if "sshKeys" not in user: - user["sshKeys"] = [] - - return User( - user_type=UserType.NORMAL, - username=username, - ssh_keys=user["sshKeys"], - ) + user = users_actions.get_user_by_username(username) + if user is None: return None + + return User( + user_type=UserType(user.origin.value), + username=user.username, + ssh_keys=user.ssh_keys, + ) + + +def get_users() -> typing.List[User]: + """Get users""" + users = users_actions.get_users(exclude_root=True) + return [ + User( + user_type=UserType(user.origin.value), + username=user.username, + ssh_keys=user.ssh_keys, + ) + for user in users + ] diff --git a/selfprivacy_api/graphql/mutations/api_mutations.py b/selfprivacy_api/graphql/mutations/api_mutations.py index e0d1057..15fa6dd 100644 --- a/selfprivacy_api/graphql/mutations/api_mutations.py +++ b/selfprivacy_api/graphql/mutations/api_mutations.py @@ -2,8 +2,16 @@ # pylint: disable=too-few-public-methods import datetime import typing -from flask import request import strawberry +from strawberry.types import Info +from selfprivacy_api.actions.api_tokens import ( + CannotDeleteCallerException, + InvalidExpirationDate, + InvalidUsesLeft, + NotFoundException, + delete_api_token, + get_new_api_recovery_key, +) from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.graphql.mutations.mutation_interface import ( GenericMutationReturn, @@ -12,11 +20,7 @@ from selfprivacy_api.graphql.mutations.mutation_interface import ( from selfprivacy_api.utils.auth import ( delete_new_device_auth_token, - delete_token, - generate_recovery_token, get_new_device_auth_token, - is_token_name_exists, - is_token_name_pair_valid, refresh_token, use_mnemonic_recoverery_token, use_new_device_auth_token, @@ -64,27 +68,24 @@ class ApiMutations: self, limits: typing.Optional[RecoveryKeyLimitsInput] = None ) -> ApiKeyMutationReturn: """Generate recovery key""" - if limits is not None: - if limits.expiration_date is not None: - if limits.expiration_date < datetime.datetime.now(): - return ApiKeyMutationReturn( - success=False, - message="Expiration date must be in the future", - code=400, - key=None, - ) - if limits.uses is not None: - if limits.uses < 1: - return ApiKeyMutationReturn( - success=False, - message="Uses must be greater than 0", - code=400, - key=None, - ) - if limits is not None: - key = generate_recovery_token(limits.expiration_date, limits.uses) - else: - key = generate_recovery_token(None, None) + if limits is None: + limits = RecoveryKeyLimitsInput() + try: + key = get_new_api_recovery_key(limits.expiration_date, limits.uses) + except InvalidExpirationDate: + return ApiKeyMutationReturn( + success=False, + message="Expiration date must be in the future", + code=400, + key=None, + ) + except InvalidUsesLeft: + return ApiKeyMutationReturn( + success=False, + message="Uses must be greater than 0", + code=400, + key=None, + ) return ApiKeyMutationReturn( success=True, message="Recovery key generated", @@ -113,13 +114,9 @@ class ApiMutations: ) @strawberry.mutation(permission_classes=[IsAuthenticated]) - def refresh_device_api_token(self) -> DeviceApiTokenMutationReturn: + def refresh_device_api_token(self, info: Info) -> DeviceApiTokenMutationReturn: """Refresh device api token""" - token = ( - request.headers.get("Authorization").split(" ")[1] - if request.headers.get("Authorization") is not None - else None - ) + token = info.context.auth_token if token is None: return DeviceApiTokenMutationReturn( success=False, @@ -143,26 +140,29 @@ class ApiMutations: ) @strawberry.mutation(permission_classes=[IsAuthenticated]) - def delete_device_api_token(self, device: str) -> GenericMutationReturn: + def delete_device_api_token(self, device: str, info: Info) -> GenericMutationReturn: """Delete device api token""" - self_token = ( - request.headers.get("Authorization").split(" ")[1] - if request.headers.get("Authorization") is not None - else None - ) - if self_token is not None and is_token_name_pair_valid(device, self_token): - return GenericMutationReturn( - success=False, - message="Cannot delete caller's token", - code=400, - ) - if not is_token_name_exists(device): + self_token = info.context.auth_token + try: + delete_api_token(self_token, device) + except NotFoundException: return GenericMutationReturn( success=False, message="Token not found", code=404, ) - delete_token(device) + except CannotDeleteCallerException: + return GenericMutationReturn( + success=False, + message="Cannot delete caller token", + code=400, + ) + except Exception as e: + return GenericMutationReturn( + success=False, + message=str(e), + code=500, + ) return GenericMutationReturn( success=True, message="Token deleted", diff --git a/selfprivacy_api/graphql/mutations/ssh_mutations.py b/selfprivacy_api/graphql/mutations/ssh_mutations.py index b30f474..60f81a8 100644 --- a/selfprivacy_api/graphql/mutations/ssh_mutations.py +++ b/selfprivacy_api/graphql/mutations/ssh_mutations.py @@ -3,9 +3,13 @@ # pylint: disable=too-few-public-methods import strawberry +from selfprivacy_api.actions.users import UserNotFound from selfprivacy_api.graphql import IsAuthenticated -from selfprivacy_api.graphql.mutations.ssh_utils import ( +from selfprivacy_api.actions.ssh import ( + InvalidPublicKey, + KeyAlreadyExists, + KeyNotFound, create_ssh_key, remove_ssh_key, ) @@ -31,12 +35,37 @@ class SshMutations: def add_ssh_key(self, ssh_input: SshMutationInput) -> UserMutationReturn: """Add a new ssh key""" - success, message, code = create_ssh_key(ssh_input.username, ssh_input.ssh_key) + try: + create_ssh_key(ssh_input.username, ssh_input.ssh_key) + except KeyAlreadyExists: + return UserMutationReturn( + success=False, + message="Key already exists", + code=409, + ) + except InvalidPublicKey: + return UserMutationReturn( + success=False, + message="Invalid key type. Only ssh-ed25519 and ssh-rsa are supported", + code=400, + ) + except UserNotFound: + return UserMutationReturn( + success=False, + message="User not found", + code=404, + ) + except Exception as e: + return UserMutationReturn( + success=False, + message=str(e), + code=500, + ) return UserMutationReturn( - success=success, - message=message, - code=code, + success=True, + message="New SSH key successfully written", + code=201, user=get_user_by_username(ssh_input.username), ) @@ -44,11 +73,30 @@ class SshMutations: def remove_ssh_key(self, ssh_input: SshMutationInput) -> UserMutationReturn: """Remove ssh key from user""" - success, message, code = remove_ssh_key(ssh_input.username, ssh_input.ssh_key) + try: + remove_ssh_key(ssh_input.username, ssh_input.ssh_key) + except KeyNotFound: + return UserMutationReturn( + success=False, + message="Key not found", + code=404, + ) + except UserNotFound: + return UserMutationReturn( + success=False, + message="User not found", + code=404, + ) + except Exception as e: + return UserMutationReturn( + success=False, + message=str(e), + code=500, + ) return UserMutationReturn( - success=success, - message=message, - code=code, + success=True, + message="SSH key successfully removed", + code=200, user=get_user_by_username(ssh_input.username), ) diff --git a/selfprivacy_api/graphql/mutations/ssh_utils.py b/selfprivacy_api/graphql/mutations/ssh_utils.py deleted file mode 100644 index 3dbc152..0000000 --- a/selfprivacy_api/graphql/mutations/ssh_utils.py +++ /dev/null @@ -1,74 +0,0 @@ -from selfprivacy_api.graphql.common_types.user import ensure_ssh_and_users_fields_exist -from selfprivacy_api.utils import ( - WriteUserData, - validate_ssh_public_key, -) - - -def create_ssh_key(username: str, ssh_key: str) -> tuple[bool, str, int]: - """Create a new ssh key""" - - if not validate_ssh_public_key(ssh_key): - return ( - False, - "Invalid key type. Only ssh-ed25519 and ssh-rsa are supported", - 400, - ) - - with WriteUserData() as data: - ensure_ssh_and_users_fields_exist(data) - - if username == data["username"]: - if ssh_key in data["sshKeys"]: - return False, "Key already exists", 409 - - data["sshKeys"].append(ssh_key) - return True, "New SSH key successfully written", 201 - - if username == "root": - if ssh_key in data["ssh"]["rootKeys"]: - return False, "Key already exists", 409 - - data["ssh"]["rootKeys"].append(ssh_key) - return True, "New SSH key successfully written", 201 - - for user in data["users"]: - if user["username"] == username: - if ssh_key in user["sshKeys"]: - return False, "Key already exists", 409 - - user["sshKeys"].append(ssh_key) - return True, "New SSH key successfully written", 201 - - return False, "User not found", 404 - - -def remove_ssh_key(username: str, ssh_key: str) -> tuple[bool, str, int]: - """Delete a ssh key""" - - with WriteUserData() as data: - ensure_ssh_and_users_fields_exist(data) - - if username == "root": - if ssh_key in data["ssh"]["rootKeys"]: - data["ssh"]["rootKeys"].remove(ssh_key) - return True, "SSH key deleted", 200 - - return False, "Key not found", 404 - - if username == data["username"]: - if ssh_key in data["sshKeys"]: - data["sshKeys"].remove(ssh_key) - return True, "SSH key deleted", 200 - - return False, "Key not found", 404 - - for user in data["users"]: - if user["username"] == username: - if ssh_key in user["sshKeys"]: - user["sshKeys"].remove(ssh_key) - return True, "SSH key deleted", 200 - - return False, "Key not found", 404 - - return False, "User not found", 404 diff --git a/selfprivacy_api/graphql/mutations/storage_mutation.py b/selfprivacy_api/graphql/mutations/storage_mutation.py index ff69aea..1275945 100644 --- a/selfprivacy_api/graphql/mutations/storage_mutation.py +++ b/selfprivacy_api/graphql/mutations/storage_mutation.py @@ -1,5 +1,4 @@ """Storage devices mutations""" -import typing import strawberry from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.utils.block_devices import BlockDevices diff --git a/selfprivacy_api/graphql/mutations/system_mutations.py b/selfprivacy_api/graphql/mutations/system_mutations.py index 057c26f..c19460f 100644 --- a/selfprivacy_api/graphql/mutations/system_mutations.py +++ b/selfprivacy_api/graphql/mutations/system_mutations.py @@ -1,15 +1,14 @@ """System management mutations""" # pylint: disable=too-few-public-methods -import subprocess import typing -import pytz import strawberry from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.graphql.mutations.mutation_interface import ( GenericMutationReturn, MutationReturnInterface, ) -from selfprivacy_api.utils import WriteUserData + +import selfprivacy_api.actions.system as system_actions @strawberry.type @@ -42,15 +41,15 @@ class SystemMutations: @strawberry.mutation(permission_classes=[IsAuthenticated]) def change_timezone(self, timezone: str) -> TimezoneMutationReturn: """Change the timezone of the server. Timezone is a tzdatabase name.""" - if timezone not in pytz.all_timezones: + try: + system_actions.change_timezone(timezone) + except system_actions.InvalidTimezone as e: return TimezoneMutationReturn( success=False, - message="Invalid timezone", + message=str(e), code=400, timezone=None, ) - with WriteUserData() as data: - data["timezone"] = timezone return TimezoneMutationReturn( success=True, message="Timezone changed", @@ -63,36 +62,23 @@ class SystemMutations: self, settings: AutoUpgradeSettingsInput ) -> AutoUpgradeSettingsMutationReturn: """Change auto upgrade settings of the server.""" - with WriteUserData() as data: - if "autoUpgrade" not in data: - data["autoUpgrade"] = {} - if "enable" not in data["autoUpgrade"]: - data["autoUpgrade"]["enable"] = True - if "allowReboot" not in data["autoUpgrade"]: - data["autoUpgrade"]["allowReboot"] = False + system_actions.set_auto_upgrade_settings( + settings.enableAutoUpgrade, settings.allowReboot + ) - if settings.enableAutoUpgrade is not None: - data["autoUpgrade"]["enable"] = settings.enableAutoUpgrade - if settings.allowReboot is not None: - data["autoUpgrade"]["allowReboot"] = settings.allowReboot - - auto_upgrade = data["autoUpgrade"]["enable"] - allow_reboot = data["autoUpgrade"]["allowReboot"] + new_settings = system_actions.get_auto_upgrade_settings() return AutoUpgradeSettingsMutationReturn( success=True, message="Auto-upgrade settings changed", code=200, - enableAutoUpgrade=auto_upgrade, - allowReboot=allow_reboot, + enableAutoUpgrade=new_settings.enable, + allowReboot=new_settings.allowReboot, ) @strawberry.mutation(permission_classes=[IsAuthenticated]) def run_system_rebuild(self) -> GenericMutationReturn: - rebuild_result = subprocess.Popen( - ["systemctl", "start", "sp-nixos-rebuild.service"], start_new_session=True - ) - rebuild_result.communicate()[0] + system_actions.rebuild_system() return GenericMutationReturn( success=True, message="Starting rebuild system", @@ -101,10 +87,7 @@ class SystemMutations: @strawberry.mutation(permission_classes=[IsAuthenticated]) def run_system_rollback(self) -> GenericMutationReturn: - rollback_result = subprocess.Popen( - ["systemctl", "start", "sp-nixos-rollback.service"], start_new_session=True - ) - rollback_result.communicate()[0] + system_actions.rollback_system() return GenericMutationReturn( success=True, message="Starting rebuild system", @@ -113,10 +96,7 @@ class SystemMutations: @strawberry.mutation(permission_classes=[IsAuthenticated]) def run_system_upgrade(self) -> GenericMutationReturn: - upgrade_result = subprocess.Popen( - ["systemctl", "start", "sp-nixos-upgrade.service"], start_new_session=True - ) - upgrade_result.communicate()[0] + system_actions.upgrade_system() return GenericMutationReturn( success=True, message="Starting rebuild system", @@ -125,7 +105,7 @@ class SystemMutations: @strawberry.mutation(permission_classes=[IsAuthenticated]) def reboot_system(self) -> GenericMutationReturn: - subprocess.Popen(["reboot"], start_new_session=True) + system_actions.reboot_system() return GenericMutationReturn( success=True, message="System reboot has started", diff --git a/selfprivacy_api/graphql/mutations/users_mutations.py b/selfprivacy_api/graphql/mutations/users_mutations.py index a284ff2..27be1d7 100644 --- a/selfprivacy_api/graphql/mutations/users_mutations.py +++ b/selfprivacy_api/graphql/mutations/users_mutations.py @@ -10,11 +10,7 @@ from selfprivacy_api.graphql.common_types.user import ( from selfprivacy_api.graphql.mutations.mutation_interface import ( GenericMutationReturn, ) -from selfprivacy_api.graphql.mutations.users_utils import ( - create_user, - delete_user, - update_user, -) +import selfprivacy_api.actions.users as users_actions @strawberry.input @@ -31,35 +27,91 @@ class UserMutations: @strawberry.mutation(permission_classes=[IsAuthenticated]) def create_user(self, user: UserMutationInput) -> UserMutationReturn: - - success, message, code = create_user(user.username, user.password) + try: + users_actions.create_user(user.username, user.password) + except users_actions.PasswordIsEmpty as e: + return UserMutationReturn( + success=False, + message=str(e), + code=400, + ) + except users_actions.UsernameForbidden as e: + return UserMutationReturn( + success=False, + message=str(e), + code=409, + ) + except users_actions.UsernameNotAlphanumeric as e: + return UserMutationReturn( + success=False, + message=str(e), + code=400, + ) + except users_actions.UsernameTooLong as e: + return UserMutationReturn( + success=False, + message=str(e), + code=400, + ) + except users_actions.UserAlreadyExists as e: + return UserMutationReturn( + success=False, + message=str(e), + code=409, + user=get_user_by_username(user.username), + ) return UserMutationReturn( - success=success, - message=message, - code=code, + success=True, + message="User created", + code=201, user=get_user_by_username(user.username), ) @strawberry.mutation(permission_classes=[IsAuthenticated]) def delete_user(self, username: str) -> GenericMutationReturn: - success, message, code = delete_user(username) + try: + users_actions.delete_user(username) + except users_actions.UserNotFound as e: + return GenericMutationReturn( + success=False, + message=str(e), + code=404, + ) + except users_actions.UserIsProtected as e: + return GenericMutationReturn( + success=False, + message=str(e), + code=400, + ) return GenericMutationReturn( - success=success, - message=message, - code=code, + success=True, + message="User deleted", + code=200, ) @strawberry.mutation(permission_classes=[IsAuthenticated]) def update_user(self, user: UserMutationInput) -> UserMutationReturn: """Update user mutation""" - - success, message, code = update_user(user.username, user.password) + try: + users_actions.update_user(user.username, user.password) + except users_actions.PasswordIsEmpty as e: + return UserMutationReturn( + success=False, + message=str(e), + code=400, + ) + except users_actions.UserNotFound as e: + return UserMutationReturn( + success=False, + message=str(e), + code=404, + ) return UserMutationReturn( - success=success, - message=message, - code=code, + success=True, + message="User updated", + code=200, user=get_user_by_username(user.username), ) diff --git a/selfprivacy_api/graphql/mutations/users_utils.py b/selfprivacy_api/graphql/mutations/users_utils.py deleted file mode 100644 index f649b45..0000000 --- a/selfprivacy_api/graphql/mutations/users_utils.py +++ /dev/null @@ -1,111 +0,0 @@ -import re -from selfprivacy_api.utils import ( - WriteUserData, - ReadUserData, - is_username_forbidden, -) -from selfprivacy_api.utils import hash_password - - -def ensure_ssh_and_users_fields_exist(data): - if "ssh" not in data: - data["ssh"] = [] - data["ssh"]["rootKeys"] = [] - - elif data["ssh"].get("rootKeys") is None: - data["ssh"]["rootKeys"] = [] - - if "sshKeys" not in data: - data["sshKeys"] = [] - - if "users" not in data: - data["users"] = [] - - -def create_user(username: str, password: str) -> tuple[bool, str, int]: - """Create a new user""" - - # Check if password is null or none - if password == "": - return False, "Password is null", 400 - - # Check if username is forbidden - if is_username_forbidden(username): - return False, "Username is forbidden", 409 - - # Check is username passes regex - if not re.match(r"^[a-z_][a-z0-9_]+$", username): - return False, "Username must be alphanumeric", 400 - - # Check if username less than 32 characters - if len(username) >= 32: - return False, "Username must be less than 32 characters", 400 - - with ReadUserData() as data: - ensure_ssh_and_users_fields_exist(data) - - # Return 409 if user already exists - if data["username"] == username: - return False, "User already exists", 409 - - for data_user in data["users"]: - if data_user["username"] == username: - return False, "User already exists", 409 - - hashed_password = hash_password(password) - - with WriteUserData() as data: - ensure_ssh_and_users_fields_exist(data) - - data["users"].append( - { - "username": username, - "hashedPassword": hashed_password, - "sshKeys": [], - } - ) - - return True, "User was successfully created!", 201 - - -def delete_user(username: str) -> tuple[bool, str, int]: - with WriteUserData() as data: - ensure_ssh_and_users_fields_exist(data) - - if username == data["username"] or username == "root": - return False, "Cannot delete main or root user", 400 - - # Return 404 if user does not exist - for data_user in data["users"]: - if data_user["username"] == username: - data["users"].remove(data_user) - break - else: - return False, "User does not exist", 404 - - return True, "User was deleted", 200 - - -def update_user(username: str, password: str) -> tuple[bool, str, int]: - # Check if password is null or none - if password == "": - return False, "Password is null", 400 - - hashed_password = hash_password(password) - - with WriteUserData() as data: - ensure_ssh_and_users_fields_exist(data) - - if username == data["username"]: - data["hashedMasterPassword"] = hashed_password - - # Return 404 if user does not exist - else: - for data_user in data["users"]: - if data_user["username"] == username: - data_user["hashedPassword"] = hashed_password - break - else: - return False, "User does not exist", 404 - - return True, "User was successfully updated", 200 diff --git a/selfprivacy_api/graphql/queries/api_queries.py b/selfprivacy_api/graphql/queries/api_queries.py index b2a81d2..ccdf89f 100644 --- a/selfprivacy_api/graphql/queries/api_queries.py +++ b/selfprivacy_api/graphql/queries/api_queries.py @@ -2,20 +2,16 @@ # pylint: disable=too-few-public-methods import datetime import typing -from flask import request import strawberry +from strawberry.types import Info +from selfprivacy_api.actions.api_tokens import get_api_tokens_with_caller_flag from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.utils import parse_date from selfprivacy_api.utils.auth import ( get_recovery_token_status, - get_tokens_info, is_recovery_token_exists, is_recovery_token_valid, - is_token_name_exists, - is_token_name_pair_valid, - refresh_token, - get_token_name, ) @@ -33,24 +29,6 @@ class ApiDevice: is_caller: bool -def get_devices() -> typing.List[ApiDevice]: - """Get list of devices""" - caller_name = get_token_name( - request.headers.get("Authorization").split(" ")[1] - if request.headers.get("Authorization") is not None - else None - ) - tokens = get_tokens_info() - return [ - ApiDevice( - name=token["name"], - creation_date=parse_date(token["date"]), - is_caller=token["name"] == caller_name, - ) - for token in tokens - ] - - @strawberry.type class ApiRecoveryKeyStatus: """Recovery key status""" @@ -97,9 +75,18 @@ class Api: """API access status""" version: str = strawberry.field(resolver=get_api_version) - devices: typing.List[ApiDevice] = strawberry.field( - resolver=get_devices, permission_classes=[IsAuthenticated] - ) + + @strawberry.field(permission_classes=[IsAuthenticated]) + def devices(self, info: Info) -> typing.List[ApiDevice]: + return [ + ApiDevice( + name=device.name, + creation_date=device.date, + is_caller=device.is_caller, + ) + for device in get_api_tokens_with_caller_flag(info.context.auth_token) + ] + recovery_key: ApiRecoveryKeyStatus = strawberry.field( resolver=get_recovery_key_status, permission_classes=[IsAuthenticated] ) diff --git a/selfprivacy_api/graphql/queries/providers.py b/selfprivacy_api/graphql/queries/providers.py index 774d465..6d0381e 100644 --- a/selfprivacy_api/graphql/queries/providers.py +++ b/selfprivacy_api/graphql/queries/providers.py @@ -1,7 +1,5 @@ """Enums representing different service providers.""" from enum import Enum -import datetime -import typing import strawberry diff --git a/selfprivacy_api/graphql/queries/system.py b/selfprivacy_api/graphql/queries/system.py index a235e4d..b5fb6a6 100644 --- a/selfprivacy_api/graphql/queries/system.py +++ b/selfprivacy_api/graphql/queries/system.py @@ -1,12 +1,13 @@ """Common system information and settings""" # pylint: disable=too-few-public-methods -import subprocess import typing import strawberry from selfprivacy_api.graphql.queries.common import Alert, Severity from selfprivacy_api.graphql.queries.providers import DnsProvider, ServerProvider from selfprivacy_api.utils import ReadUserData +import selfprivacy_api.actions.system as system_actions +import selfprivacy_api.actions.ssh as ssh_actions @strawberry.type @@ -52,17 +53,11 @@ class AutoUpgradeOptions: def get_auto_upgrade_options() -> AutoUpgradeOptions: """Get automatic upgrade options""" - with ReadUserData() as user_data: - if "autoUpgrade" not in user_data: - return AutoUpgradeOptions(enable=True, allow_reboot=False) - if "enable" not in user_data["autoUpgrade"]: - user_data["autoUpgrade"]["enable"] = True - if "allowReboot" not in user_data["autoUpgrade"]: - user_data["autoUpgrade"]["allowReboot"] = False - return AutoUpgradeOptions( - enable=user_data["autoUpgrade"]["enable"], - allow_reboot=user_data["autoUpgrade"]["allowReboot"], - ) + settings = system_actions.get_auto_upgrade_settings() + return AutoUpgradeOptions( + enable=settings.enable, + allow_reboot=settings.allowReboot, + ) @strawberry.type @@ -76,30 +71,18 @@ class SshSettings: def get_ssh_settings() -> SshSettings: """Get SSH settings""" - with ReadUserData() as user_data: - if "ssh" not in user_data: - return SshSettings( - enable=False, password_authentication=False, root_ssh_keys=[] - ) - if "enable" not in user_data["ssh"]: - user_data["ssh"]["enable"] = False - if "passwordAuthentication" not in user_data["ssh"]: - user_data["ssh"]["passwordAuthentication"] = False - if "rootKeys" not in user_data["ssh"]: - user_data["ssh"]["rootKeys"] = [] - return SshSettings( - enable=user_data["ssh"]["enable"], - password_authentication=user_data["ssh"]["passwordAuthentication"], - root_ssh_keys=user_data["ssh"]["rootKeys"], - ) + settings = ssh_actions.get_ssh_settings() + return SshSettings( + enable=settings.enable, + password_authentication=settings.passwordAuthentication, + root_ssh_keys=settings.rootSshKeys, + ) def get_system_timezone() -> str: """Get system timezone""" with ReadUserData() as user_data: - if "timezone" not in user_data: - return "Europe/Uzhgorod" - return user_data["timezone"] + return system_actions.get_timezone() @strawberry.type @@ -115,12 +98,12 @@ class SystemSettings: def get_system_version() -> str: """Get system version""" - return subprocess.check_output(["uname", "-a"]).decode("utf-8").strip() + return system_actions.get_system_version() def get_python_version() -> str: """Get Python version""" - return subprocess.check_output(["python", "-V"]).decode("utf-8").strip() + return system_actions.get_python_version() @strawberry.type diff --git a/selfprivacy_api/graphql/queries/users.py b/selfprivacy_api/graphql/queries/users.py index fc18a84..d2c0555 100644 --- a/selfprivacy_api/graphql/queries/users.py +++ b/selfprivacy_api/graphql/queries/users.py @@ -5,27 +5,12 @@ import strawberry from selfprivacy_api.graphql.common_types.user import ( User, - ensure_ssh_and_users_fields_exist, get_user_by_username, + get_users, ) -from selfprivacy_api.utils import ReadUserData from selfprivacy_api.graphql import IsAuthenticated -def get_users() -> typing.List[User]: - """Get users""" - user_list = [] - with ReadUserData() as data: - ensure_ssh_and_users_fields_exist(data) - - for user in data["users"]: - user_list.append(get_user_by_username(user["username"])) - - user_list.append(get_user_by_username(data["username"])) - - return user_list - - @strawberry.type class Users: @strawberry.field(permission_classes=[IsAuthenticated]) diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py index c9dd025..c00fe79 100644 --- a/selfprivacy_api/jobs/__init__.py +++ b/selfprivacy_api/jobs/__init__.py @@ -129,7 +129,9 @@ class Jobs: """ self.observers.append(observer) - def remove_observer(self, observer: typing.Callable[[typing.List[Job]], None]) -> None: + def remove_observer( + self, observer: typing.Callable[[typing.List[Job]], None] + ) -> None: """ Remove an observer from the jobs list. """ @@ -143,7 +145,12 @@ class Jobs: observer(self.jobs) def add( - self, name: str, description: str, status: JobStatus = JobStatus.CREATED, status_text: str = "", progress: int = 0 + self, + name: str, + description: str, + status: JobStatus = JobStatus.CREATED, + status_text: str = "", + progress: int = 0, ) -> Job: """ Add a job to the jobs list. diff --git a/selfprivacy_api/jobs/test.py b/selfprivacy_api/jobs/test.py index f3c03af..d07ea6c 100644 --- a/selfprivacy_api/jobs/test.py +++ b/selfprivacy_api/jobs/test.py @@ -1,7 +1,9 @@ import time -from selfprivacy_api.utils.huey import huey +from selfprivacy_api.utils.huey import Huey from selfprivacy_api.jobs import JobStatus, Jobs +huey = Huey() + @huey.task() def test_job(): diff --git a/selfprivacy_api/resources/api_auth/__init__.py b/selfprivacy_api/resources/api_auth/__init__.py deleted file mode 100644 index 9bd1703..0000000 --- a/selfprivacy_api/resources/api_auth/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -#!/usr/bin/env python3 -"""API authentication module""" - -from flask import Blueprint -from flask_restful import Api - -auth = Blueprint("auth", __name__, url_prefix="/auth") -api = Api(auth) - -from . import ( - new_device, - recovery_token, - app_tokens, -) diff --git a/selfprivacy_api/resources/api_auth/app_tokens.py b/selfprivacy_api/resources/api_auth/app_tokens.py deleted file mode 100644 index 940c60a..0000000 --- a/selfprivacy_api/resources/api_auth/app_tokens.py +++ /dev/null @@ -1,118 +0,0 @@ -#!/usr/bin/env python3 -"""App tokens management module""" -from flask import request -from flask_restful import Resource, reqparse - -from selfprivacy_api.resources.api_auth import api -from selfprivacy_api.utils.auth import ( - delete_token, - get_tokens_info, - is_token_name_exists, - is_token_name_pair_valid, - refresh_token, - get_token_name, -) - - -class Tokens(Resource): - """Token management class - GET returns the list of active devices. - DELETE invalidates token unless it is the last one or the caller uses this token. - POST refreshes the token of the caller. - """ - - def get(self): - """ - Get current device tokens - --- - tags: - - Tokens - security: - - bearerAuth: [] - responses: - 200: - description: List of tokens - 400: - description: Bad request - """ - caller_name = get_token_name(request.headers.get("Authorization").split(" ")[1]) - tokens = get_tokens_info() - # Retrun a list of tokens and if it is the caller's token - # it will be marked with a flag - return [ - { - "name": token["name"], - "date": token["date"], - "is_caller": token["name"] == caller_name, - } - for token in tokens - ] - - def delete(self): - """ - Delete token - --- - tags: - - Tokens - security: - - bearerAuth: [] - parameters: - - in: body - name: token - required: true - description: Token's name to delete - schema: - type: object - properties: - token_name: - type: string - description: Token name to delete - required: true - responses: - 200: - description: Token deleted - 400: - description: Bad request - 404: - description: Token not found - """ - parser = reqparse.RequestParser() - parser.add_argument( - "token_name", type=str, required=True, help="Token to delete" - ) - args = parser.parse_args() - token_name = args["token_name"] - if is_token_name_pair_valid( - token_name, request.headers.get("Authorization").split(" ")[1] - ): - return {"message": "Cannot delete caller's token"}, 400 - if not is_token_name_exists(token_name): - return {"message": "Token not found"}, 404 - delete_token(token_name) - return {"message": "Token deleted"}, 200 - - def post(self): - """ - Refresh token - --- - tags: - - Tokens - security: - - bearerAuth: [] - responses: - 200: - description: Token refreshed - 400: - description: Bad request - 404: - description: Token not found - """ - # Get token from header - token = request.headers.get("Authorization").split(" ")[1] - new_token = refresh_token(token) - if new_token is None: - return {"message": "Token not found"}, 404 - return {"token": new_token}, 200 - - -api.add_resource(Tokens, "/tokens") diff --git a/selfprivacy_api/resources/api_auth/new_device.py b/selfprivacy_api/resources/api_auth/new_device.py deleted file mode 100644 index 2c0bde1..0000000 --- a/selfprivacy_api/resources/api_auth/new_device.py +++ /dev/null @@ -1,103 +0,0 @@ -#!/usr/bin/env python3 -"""New device auth module""" -from flask_restful import Resource, reqparse - -from selfprivacy_api.resources.api_auth import api -from selfprivacy_api.utils.auth import ( - get_new_device_auth_token, - use_new_device_auth_token, - delete_new_device_auth_token, -) - - -class NewDevice(Resource): - """New device auth class - POST returns a new token for the caller. - """ - - def post(self): - """ - Get new device token - --- - tags: - - Tokens - security: - - bearerAuth: [] - responses: - 200: - description: New device token - 400: - description: Bad request - """ - token = get_new_device_auth_token() - return {"token": token} - - def delete(self): - """ - Delete new device token - --- - tags: - - Tokens - security: - - bearerAuth: [] - responses: - 200: - description: New device token deleted - 400: - description: Bad request - """ - delete_new_device_auth_token() - return {"token": None} - - -class AuthorizeDevice(Resource): - """Authorize device class - POST authorizes the caller. - """ - - def post(self): - """ - Authorize device - --- - tags: - - Tokens - parameters: - - in: body - name: data - required: true - description: Who is authorizing - schema: - type: object - properties: - token: - type: string - description: Mnemonic token to authorize - device: - type: string - description: Device to authorize - responses: - 200: - description: Device authorized - 400: - description: Bad request - 404: - description: Token not found - """ - parser = reqparse.RequestParser() - parser.add_argument( - "token", type=str, required=True, help="Mnemonic token to authorize" - ) - parser.add_argument( - "device", type=str, required=True, help="Device to authorize" - ) - args = parser.parse_args() - auth_token = args["token"] - device = args["device"] - token = use_new_device_auth_token(auth_token, device) - if token is None: - return {"message": "Token not found"}, 404 - return {"message": "Device authorized", "token": token}, 200 - - -api.add_resource(NewDevice, "/new_device") -api.add_resource(AuthorizeDevice, "/new_device/authorize") diff --git a/selfprivacy_api/resources/api_auth/recovery_token.py b/selfprivacy_api/resources/api_auth/recovery_token.py deleted file mode 100644 index 912a50b..0000000 --- a/selfprivacy_api/resources/api_auth/recovery_token.py +++ /dev/null @@ -1,205 +0,0 @@ -#!/usr/bin/env python3 -"""Recovery token module""" -from datetime import datetime -from flask_restful import Resource, reqparse - -from selfprivacy_api.resources.api_auth import api -from selfprivacy_api.utils import parse_date -from selfprivacy_api.utils.auth import ( - is_recovery_token_exists, - is_recovery_token_valid, - get_recovery_token_status, - generate_recovery_token, - use_mnemonic_recoverery_token, -) - - -class RecoveryToken(Resource): - """Recovery token class - GET returns the status of the recovery token. - POST generates a new recovery token. - """ - - def get(self): - """ - Get recovery token status - --- - tags: - - Tokens - security: - - bearerAuth: [] - responses: - 200: - description: Recovery token status - schema: - type: object - properties: - exists: - type: boolean - description: Recovery token exists - valid: - type: boolean - description: Recovery token is valid - date: - type: string - description: Recovery token date - expiration: - type: string - description: Recovery token expiration date - uses_left: - type: integer - description: Recovery token uses left - 400: - description: Bad request - """ - if not is_recovery_token_exists(): - return { - "exists": False, - "valid": False, - "date": None, - "expiration": None, - "uses_left": None, - } - status = get_recovery_token_status() - # check if status is None - if status is None: - return { - "exists": False, - "valid": False, - "date": None, - "expiration": None, - "uses_left": None, - } - - if not is_recovery_token_valid(): - return { - "exists": True, - "valid": False, - "date": status["date"], - "expiration": status["expiration"], - "uses_left": status["uses_left"], - } - return { - "exists": True, - "valid": True, - "date": status["date"], - "expiration": status["expiration"], - "uses_left": status["uses_left"], - } - - def post(self): - """ - Generate recovery token - --- - tags: - - Tokens - security: - - bearerAuth: [] - parameters: - - in: body - name: data - required: true - description: Token data - schema: - type: object - properties: - expiration: - type: string - description: Token expiration date - uses: - type: integer - description: Token uses - responses: - 200: - description: Recovery token generated - schema: - type: object - properties: - token: - type: string - description: Mnemonic recovery token - 400: - description: Bad request - """ - parser = reqparse.RequestParser() - parser.add_argument( - "expiration", type=str, required=False, help="Token expiration date" - ) - parser.add_argument("uses", type=int, required=False, help="Token uses") - args = parser.parse_args() - # Convert expiration date to datetime and return 400 if it is not valid - if args["expiration"]: - try: - expiration = parse_date(args["expiration"]) - # Retrun 400 if expiration date is in the past - if expiration < datetime.now(): - return {"message": "Expiration date cannot be in the past"}, 400 - except ValueError: - return { - "error": "Invalid expiration date. Use YYYY-MM-DDTHH:MM:SS.SSS" - }, 400 - else: - expiration = None - if args["uses"] is not None and args["uses"] < 1: - return {"message": "Uses must be greater than 0"}, 400 - # Generate recovery token - token = generate_recovery_token(expiration, args["uses"]) - return {"token": token} - - -class UseRecoveryToken(Resource): - """Use recovery token class - POST uses the recovery token. - """ - - def post(self): - """ - Use recovery token - --- - tags: - - Tokens - parameters: - - in: body - name: data - required: true - description: Token data - schema: - type: object - properties: - token: - type: string - description: Mnemonic recovery token - device: - type: string - description: Device to authorize - responses: - 200: - description: Recovery token used - schema: - type: object - properties: - token: - type: string - description: Device authorization token - 400: - description: Bad request - 404: - description: Token not found - """ - parser = reqparse.RequestParser() - parser.add_argument( - "token", type=str, required=True, help="Mnemonic recovery token" - ) - parser.add_argument( - "device", type=str, required=True, help="Device to authorize" - ) - args = parser.parse_args() - # Use recovery token - token = use_mnemonic_recoverery_token(args["token"], args["device"]) - if token is None: - return {"error": "Token not found"}, 404 - return {"token": token} - - -api.add_resource(RecoveryToken, "/recovery_token") -api.add_resource(UseRecoveryToken, "/recovery_token/use") diff --git a/selfprivacy_api/resources/common.py b/selfprivacy_api/resources/common.py deleted file mode 100644 index f78aad6..0000000 --- a/selfprivacy_api/resources/common.py +++ /dev/null @@ -1,27 +0,0 @@ -#!/usr/bin/env python3 -"""Unassigned views""" -from flask_restful import Resource -from selfprivacy_api.graphql.queries.api_queries import get_api_version - - -class ApiVersion(Resource): - """SelfPrivacy API version""" - - def get(self): - """Get API version - --- - tags: - - System - responses: - 200: - description: API version - schema: - type: object - properties: - version: - type: string - description: API version - 401: - description: Unauthorized - """ - return {"version": get_api_version()} diff --git a/selfprivacy_api/resources/services/__init__.py b/selfprivacy_api/resources/services/__init__.py deleted file mode 100644 index a7f1dbe..0000000 --- a/selfprivacy_api/resources/services/__init__.py +++ /dev/null @@ -1,19 +0,0 @@ -#!/usr/bin/env python3 -"""Services management module""" -from flask import Blueprint -from flask_restful import Api - -services = Blueprint("services", __name__, url_prefix="/services") -api = Api(services) - -from . import ( - bitwarden, - gitea, - mailserver, - main, - nextcloud, - ocserv, - pleroma, - restic, - ssh, -) diff --git a/selfprivacy_api/resources/services/bitwarden.py b/selfprivacy_api/resources/services/bitwarden.py deleted file mode 100644 index 412ba8a..0000000 --- a/selfprivacy_api/resources/services/bitwarden.py +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/bin/env python3 -"""Bitwarden management module""" -from flask_restful import Resource - -from selfprivacy_api.resources.services import api -from selfprivacy_api.utils import WriteUserData - - -class EnableBitwarden(Resource): - """Enable Bitwarden""" - - def post(self): - """ - Enable Bitwarden - --- - tags: - - Bitwarden - security: - - bearerAuth: [] - responses: - 200: - description: Bitwarden enabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "bitwarden" not in data: - data["bitwarden"] = {} - data["bitwarden"]["enable"] = True - - return { - "status": 0, - "message": "Bitwarden enabled", - } - - -class DisableBitwarden(Resource): - """Disable Bitwarden""" - - def post(self): - """ - Disable Bitwarden - --- - tags: - - Bitwarden - security: - - bearerAuth: [] - responses: - 200: - description: Bitwarden disabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "bitwarden" not in data: - data["bitwarden"] = {} - data["bitwarden"]["enable"] = False - - return { - "status": 0, - "message": "Bitwarden disabled", - } - - -api.add_resource(EnableBitwarden, "/bitwarden/enable") -api.add_resource(DisableBitwarden, "/bitwarden/disable") diff --git a/selfprivacy_api/resources/services/gitea.py b/selfprivacy_api/resources/services/gitea.py deleted file mode 100644 index bd4b8de..0000000 --- a/selfprivacy_api/resources/services/gitea.py +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/bin/env python3 -"""Gitea management module""" -from flask_restful import Resource - -from selfprivacy_api.resources.services import api -from selfprivacy_api.utils import WriteUserData - - -class EnableGitea(Resource): - """Enable Gitea""" - - def post(self): - """ - Enable Gitea - --- - tags: - - Gitea - security: - - bearerAuth: [] - responses: - 200: - description: Gitea enabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "gitea" not in data: - data["gitea"] = {} - data["gitea"]["enable"] = True - - return { - "status": 0, - "message": "Gitea enabled", - } - - -class DisableGitea(Resource): - """Disable Gitea""" - - def post(self): - """ - Disable Gitea - --- - tags: - - Gitea - security: - - bearerAuth: [] - responses: - 200: - description: Gitea disabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "gitea" not in data: - data["gitea"] = {} - data["gitea"]["enable"] = False - - return { - "status": 0, - "message": "Gitea disabled", - } - - -api.add_resource(EnableGitea, "/gitea/enable") -api.add_resource(DisableGitea, "/gitea/disable") diff --git a/selfprivacy_api/resources/services/mailserver.py b/selfprivacy_api/resources/services/mailserver.py deleted file mode 100644 index 01fa574..0000000 --- a/selfprivacy_api/resources/services/mailserver.py +++ /dev/null @@ -1,41 +0,0 @@ -#!/usr/bin/env python3 -"""Mail server management module""" -import base64 -import subprocess -import os -from flask_restful import Resource - -from selfprivacy_api.resources.services import api - -from selfprivacy_api.utils import get_dkim_key, get_domain - - -class DKIMKey(Resource): - """Get DKIM key from file""" - - def get(self): - """ - Get DKIM key from file - --- - tags: - - Email - security: - - bearerAuth: [] - responses: - 200: - description: DKIM key encoded in base64 - 401: - description: Unauthorized - 404: - description: DKIM key not found - """ - domain = get_domain() - - dkim = get_dkim_key(domain) - if dkim is None: - return "DKIM file not found", 404 - dkim = base64.b64encode(dkim.encode("utf-8")).decode("utf-8") - return dkim - - -api.add_resource(DKIMKey, "/mailserver/dkim") diff --git a/selfprivacy_api/resources/services/main.py b/selfprivacy_api/resources/services/main.py deleted file mode 100644 index 8b6743c..0000000 --- a/selfprivacy_api/resources/services/main.py +++ /dev/null @@ -1,84 +0,0 @@ -#!/usr/bin/env python3 -"""Services status module""" -import subprocess -from flask_restful import Resource - -from . import api - - -class ServiceStatus(Resource): - """Get service status""" - - def get(self): - """ - Get service status - --- - tags: - - Services - responses: - 200: - description: Service status - schema: - type: object - properties: - imap: - type: integer - description: Dovecot service status - smtp: - type: integer - description: Postfix service status - http: - type: integer - description: Nginx service status - bitwarden: - type: integer - description: Bitwarden service status - gitea: - type: integer - description: Gitea service status - nextcloud: - type: integer - description: Nextcloud service status - ocserv: - type: integer - description: OpenConnect VPN service status - pleroma: - type: integer - description: Pleroma service status - 401: - description: Unauthorized - """ - imap_service = subprocess.Popen(["systemctl", "status", "dovecot2.service"]) - imap_service.communicate()[0] - smtp_service = subprocess.Popen(["systemctl", "status", "postfix.service"]) - smtp_service.communicate()[0] - http_service = subprocess.Popen(["systemctl", "status", "nginx.service"]) - http_service.communicate()[0] - bitwarden_service = subprocess.Popen( - ["systemctl", "status", "vaultwarden.service"] - ) - bitwarden_service.communicate()[0] - gitea_service = subprocess.Popen(["systemctl", "status", "gitea.service"]) - gitea_service.communicate()[0] - nextcloud_service = subprocess.Popen( - ["systemctl", "status", "phpfpm-nextcloud.service"] - ) - nextcloud_service.communicate()[0] - ocserv_service = subprocess.Popen(["systemctl", "status", "ocserv.service"]) - ocserv_service.communicate()[0] - pleroma_service = subprocess.Popen(["systemctl", "status", "pleroma.service"]) - pleroma_service.communicate()[0] - - return { - "imap": imap_service.returncode, - "smtp": smtp_service.returncode, - "http": http_service.returncode, - "bitwarden": bitwarden_service.returncode, - "gitea": gitea_service.returncode, - "nextcloud": nextcloud_service.returncode, - "ocserv": ocserv_service.returncode, - "pleroma": pleroma_service.returncode, - } - - -api.add_resource(ServiceStatus, "/status") diff --git a/selfprivacy_api/resources/services/nextcloud.py b/selfprivacy_api/resources/services/nextcloud.py deleted file mode 100644 index 3aa9d06..0000000 --- a/selfprivacy_api/resources/services/nextcloud.py +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/bin/env python3 -"""Nextcloud management module""" -from flask_restful import Resource - -from selfprivacy_api.resources.services import api -from selfprivacy_api.utils import WriteUserData - - -class EnableNextcloud(Resource): - """Enable Nextcloud""" - - def post(self): - """ - Enable Nextcloud - --- - tags: - - Nextcloud - security: - - bearerAuth: [] - responses: - 200: - description: Nextcloud enabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "nextcloud" not in data: - data["nextcloud"] = {} - data["nextcloud"]["enable"] = True - - return { - "status": 0, - "message": "Nextcloud enabled", - } - - -class DisableNextcloud(Resource): - """Disable Nextcloud""" - - def post(self): - """ - Disable Nextcloud - --- - tags: - - Nextcloud - security: - - bearerAuth: [] - responses: - 200: - description: Nextcloud disabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "nextcloud" not in data: - data["nextcloud"] = {} - data["nextcloud"]["enable"] = False - - return { - "status": 0, - "message": "Nextcloud disabled", - } - - -api.add_resource(EnableNextcloud, "/nextcloud/enable") -api.add_resource(DisableNextcloud, "/nextcloud/disable") diff --git a/selfprivacy_api/resources/services/ocserv.py b/selfprivacy_api/resources/services/ocserv.py deleted file mode 100644 index 4dc83da..0000000 --- a/selfprivacy_api/resources/services/ocserv.py +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/bin/env python3 -"""OpenConnect VPN server management module""" -from flask_restful import Resource - -from selfprivacy_api.resources.services import api -from selfprivacy_api.utils import WriteUserData - - -class EnableOcserv(Resource): - """Enable OpenConnect VPN server""" - - def post(self): - """ - Enable OCserv - --- - tags: - - OCserv - security: - - bearerAuth: [] - responses: - 200: - description: OCserv enabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "ocserv" not in data: - data["ocserv"] = {} - data["ocserv"]["enable"] = True - - return { - "status": 0, - "message": "OpenConnect VPN server enabled", - } - - -class DisableOcserv(Resource): - """Disable OpenConnect VPN server""" - - def post(self): - """ - Disable OCserv - --- - tags: - - OCserv - security: - - bearerAuth: [] - responses: - 200: - description: OCserv disabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "ocserv" not in data: - data["ocserv"] = {} - data["ocserv"]["enable"] = False - - return { - "status": 0, - "message": "OpenConnect VPN server disabled", - } - - -api.add_resource(EnableOcserv, "/ocserv/enable") -api.add_resource(DisableOcserv, "/ocserv/disable") diff --git a/selfprivacy_api/resources/services/pleroma.py b/selfprivacy_api/resources/services/pleroma.py deleted file mode 100644 index aaf08f0..0000000 --- a/selfprivacy_api/resources/services/pleroma.py +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/bin/env python3 -"""Pleroma management module""" -from flask_restful import Resource - -from selfprivacy_api.resources.services import api -from selfprivacy_api.utils import WriteUserData - - -class EnablePleroma(Resource): - """Enable Pleroma""" - - def post(self): - """ - Enable Pleroma - --- - tags: - - Pleroma - security: - - bearerAuth: [] - responses: - 200: - description: Pleroma enabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "pleroma" not in data: - data["pleroma"] = {} - data["pleroma"]["enable"] = True - - return { - "status": 0, - "message": "Pleroma enabled", - } - - -class DisablePleroma(Resource): - """Disable Pleroma""" - - def post(self): - """ - Disable Pleroma - --- - tags: - - Pleroma - security: - - bearerAuth: [] - responses: - 200: - description: Pleroma disabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "pleroma" not in data: - data["pleroma"] = {} - data["pleroma"]["enable"] = False - - return { - "status": 0, - "message": "Pleroma disabled", - } - - -api.add_resource(EnablePleroma, "/pleroma/enable") -api.add_resource(DisablePleroma, "/pleroma/disable") diff --git a/selfprivacy_api/resources/services/restic.py b/selfprivacy_api/resources/services/restic.py deleted file mode 100644 index dd22c9a..0000000 --- a/selfprivacy_api/resources/services/restic.py +++ /dev/null @@ -1,241 +0,0 @@ -#!/usr/bin/env python3 -"""Backups management module""" -from flask_restful import Resource, reqparse - -from selfprivacy_api.resources.services import api -from selfprivacy_api.utils import WriteUserData -from selfprivacy_api.restic_controller import tasks as restic_tasks -from selfprivacy_api.restic_controller import ResticController, ResticStates - - -class ListAllBackups(Resource): - """List all restic backups""" - - def get(self): - """ - Get all restic backups - --- - tags: - - Backups - security: - - bearerAuth: [] - responses: - 200: - description: A list of snapshots - 400: - description: Bad request - 401: - description: Unauthorized - """ - - restic = ResticController() - return restic.snapshot_list - - -class AsyncCreateBackup(Resource): - """Create a new restic backup""" - - def put(self): - """ - Initiate a new restic backup - --- - tags: - - Backups - security: - - bearerAuth: [] - responses: - 200: - description: Backup creation has started - 400: - description: Bad request - 401: - description: Unauthorized - 409: - description: Backup already in progress - """ - restic = ResticController() - if restic.state is ResticStates.NO_KEY: - return {"error": "No key provided"}, 400 - if restic.state is ResticStates.INITIALIZING: - return {"error": "Backup is initializing"}, 400 - if restic.state is ResticStates.BACKING_UP: - return {"error": "Backup is already running"}, 409 - restic_tasks.start_backup() - return { - "status": 0, - "message": "Backup creation has started", - } - - -class CheckBackupStatus(Resource): - """Check current backup status""" - - def get(self): - """ - Get backup status - --- - tags: - - Backups - security: - - bearerAuth: [] - responses: - 200: - description: Backup status - 400: - description: Bad request - 401: - description: Unauthorized - """ - restic = ResticController() - - return { - "status": restic.state.name, - "progress": restic.progress, - "error_message": restic.error_message, - } - - -class ForceReloadSnapshots(Resource): - """Force reload snapshots""" - - def get(self): - """ - Force reload snapshots - --- - tags: - - Backups - security: - - bearerAuth: [] - responses: - 200: - description: Snapshots reloaded - 400: - description: Bad request - 401: - description: Unauthorized - """ - restic_tasks.load_snapshots() - return { - "status": 0, - "message": "Snapshots reload started", - } - - -class AsyncRestoreBackup(Resource): - """Trigger backup restoration process""" - - def put(self): - """ - Start backup restoration - --- - tags: - - Backups - security: - - bearerAuth: [] - parameters: - - in: body - required: true - name: backup - description: Backup to restore - schema: - type: object - required: - - backupId - properties: - backupId: - type: string - responses: - 200: - description: Backup restoration process started - 400: - description: Bad request - 401: - description: Unauthorized - """ - parser = reqparse.RequestParser() - parser.add_argument("backupId", type=str, required=True) - args = parser.parse_args() - - restic = ResticController() - if restic.state is ResticStates.NO_KEY: - return {"error": "No key provided"}, 400 - if restic.state is ResticStates.NOT_INITIALIZED: - return {"error": "Repository is not initialized"}, 400 - if restic.state is ResticStates.BACKING_UP: - return {"error": "Backup is already running"}, 409 - if restic.state is ResticStates.INITIALIZING: - return {"error": "Repository is initializing"}, 400 - if restic.state is ResticStates.RESTORING: - return {"error": "Restore is already running"}, 409 - for backup in restic.snapshot_list: - if backup["short_id"] == args["backupId"]: - restic_tasks.restore_from_backup(args["backupId"]) - return { - "status": 0, - "message": "Backup restoration procedure started", - } - - return {"error": "Backup not found"}, 404 - - -class BackblazeConfig(Resource): - """Backblaze config""" - - def put(self): - """ - Set the new key for backblaze - --- - tags: - - Backups - security: - - bearerAuth: [] - parameters: - - in: body - required: true - name: backblazeSettings - description: New Backblaze settings - schema: - type: object - required: - - accountId - - accountKey - - bucket - properties: - accountId: - type: string - accountKey: - type: string - bucket: - type: string - responses: - 200: - description: New Backblaze settings - 400: - description: Bad request - 401: - description: Unauthorized - """ - parser = reqparse.RequestParser() - parser.add_argument("accountId", type=str, required=True) - parser.add_argument("accountKey", type=str, required=True) - parser.add_argument("bucket", type=str, required=True) - args = parser.parse_args() - - with WriteUserData() as data: - if "backblaze" not in data: - data["backblaze"] = {} - data["backblaze"]["accountId"] = args["accountId"] - data["backblaze"]["accountKey"] = args["accountKey"] - data["backblaze"]["bucket"] = args["bucket"] - - restic_tasks.update_keys_from_userdata() - - return "New Backblaze settings saved" - - -api.add_resource(ListAllBackups, "/restic/backup/list") -api.add_resource(AsyncCreateBackup, "/restic/backup/create") -api.add_resource(CheckBackupStatus, "/restic/backup/status") -api.add_resource(AsyncRestoreBackup, "/restic/backup/restore") -api.add_resource(BackblazeConfig, "/restic/backblaze/config") -api.add_resource(ForceReloadSnapshots, "/restic/backup/reload") diff --git a/selfprivacy_api/resources/services/ssh.py b/selfprivacy_api/resources/services/ssh.py deleted file mode 100644 index 3ea5a1d..0000000 --- a/selfprivacy_api/resources/services/ssh.py +++ /dev/null @@ -1,407 +0,0 @@ -#!/usr/bin/env python3 -"""SSH management module""" -from flask_restful import Resource, reqparse - -from selfprivacy_api.resources.services import api -from selfprivacy_api.utils import WriteUserData, ReadUserData, validate_ssh_public_key - - -class EnableSSH(Resource): - """Enable SSH""" - - def post(self): - """ - Enable SSH - --- - tags: - - SSH - security: - - bearerAuth: [] - responses: - 200: - description: SSH enabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "ssh" not in data: - data["ssh"] = {} - data["ssh"]["enable"] = True - - return { - "status": 0, - "message": "SSH enabled", - } - - -class SSHSettings(Resource): - """Enable/disable SSH""" - - def get(self): - """ - Get current SSH settings - --- - tags: - - SSH - security: - - bearerAuth: [] - responses: - 200: - description: SSH settings - 400: - description: Bad request - """ - with ReadUserData() as data: - if "ssh" not in data: - return {"enable": True, "passwordAuthentication": True} - if "enable" not in data["ssh"]: - data["ssh"]["enable"] = True - if "passwordAuthentication" not in data["ssh"]: - data["ssh"]["passwordAuthentication"] = True - return { - "enable": data["ssh"]["enable"], - "passwordAuthentication": data["ssh"]["passwordAuthentication"], - } - - def put(self): - """ - Change SSH settings - --- - tags: - - SSH - security: - - bearerAuth: [] - parameters: - - name: sshSettings - in: body - required: true - description: SSH settings - schema: - type: object - required: - - enable - - passwordAuthentication - properties: - enable: - type: boolean - passwordAuthentication: - type: boolean - responses: - 200: - description: New settings saved - 400: - description: Bad request - """ - parser = reqparse.RequestParser() - parser.add_argument("enable", type=bool, required=False) - parser.add_argument("passwordAuthentication", type=bool, required=False) - args = parser.parse_args() - enable = args["enable"] - password_authentication = args["passwordAuthentication"] - - with WriteUserData() as data: - if "ssh" not in data: - data["ssh"] = {} - if enable is not None: - data["ssh"]["enable"] = enable - if password_authentication is not None: - data["ssh"]["passwordAuthentication"] = password_authentication - - return "SSH settings changed" - - -class WriteSSHKey(Resource): - """Write new SSH key""" - - def put(self): - """ - Add a SSH root key - --- - consumes: - - application/json - tags: - - SSH - security: - - bearerAuth: [] - parameters: - - in: body - name: body - required: true - description: Public key to add - schema: - type: object - required: - - public_key - properties: - public_key: - type: string - description: ssh-ed25519 public key. - responses: - 201: - description: Key added - 400: - description: Bad request - 401: - description: Unauthorized - 409: - description: Key already exists - """ - parser = reqparse.RequestParser() - parser.add_argument( - "public_key", type=str, required=True, help="Key cannot be blank!" - ) - args = parser.parse_args() - - public_key = args["public_key"] - - if not validate_ssh_public_key(public_key): - return { - "error": "Invalid key type. Only ssh-ed25519 and ssh-rsa are supported.", - }, 400 - - with WriteUserData() as data: - if "ssh" not in data: - data["ssh"] = {} - if "rootKeys" not in data["ssh"]: - data["ssh"]["rootKeys"] = [] - # Return 409 if key already in array - for key in data["ssh"]["rootKeys"]: - if key == public_key: - return { - "error": "Key already exists", - }, 409 - data["ssh"]["rootKeys"].append(public_key) - - return { - "status": 0, - "message": "New SSH key successfully written", - }, 201 - - -class SSHKeys(Resource): - """List SSH keys""" - - def get(self, username): - """ - List SSH keys - --- - tags: - - SSH - security: - - bearerAuth: [] - parameters: - - in: path - name: username - type: string - required: true - description: User to list keys for - responses: - 200: - description: SSH keys - 401: - description: Unauthorized - """ - with ReadUserData() as data: - if username == "root": - if "ssh" not in data: - data["ssh"] = {} - if "rootKeys" not in data["ssh"]: - data["ssh"]["rootKeys"] = [] - return data["ssh"]["rootKeys"] - if username == data["username"]: - if "sshKeys" not in data: - data["sshKeys"] = [] - return data["sshKeys"] - if "users" not in data: - data["users"] = [] - for user in data["users"]: - if user["username"] == username: - if "sshKeys" not in user: - user["sshKeys"] = [] - return user["sshKeys"] - return { - "error": "User not found", - }, 404 - - def post(self, username): - """ - Add SSH key to the user - --- - tags: - - SSH - security: - - bearerAuth: [] - parameters: - - in: body - required: true - name: public_key - schema: - type: object - required: - - public_key - properties: - public_key: - type: string - - in: path - name: username - type: string - required: true - description: User to add keys for - responses: - 201: - description: SSH key added - 401: - description: Unauthorized - 404: - description: User not found - 409: - description: Key already exists - """ - parser = reqparse.RequestParser() - parser.add_argument( - "public_key", type=str, required=True, help="Key cannot be blank!" - ) - args = parser.parse_args() - - if username == "root": - return { - "error": "Use /ssh/key/send to add root keys", - }, 400 - - if not validate_ssh_public_key(args["public_key"]): - return { - "error": "Invalid key type. Only ssh-ed25519 and ssh-rsa are supported.", - }, 400 - - with WriteUserData() as data: - if username == data["username"]: - if "sshKeys" not in data: - data["sshKeys"] = [] - # Return 409 if key already in array - for key in data["sshKeys"]: - if key == args["public_key"]: - return { - "error": "Key already exists", - }, 409 - data["sshKeys"].append(args["public_key"]) - return { - "message": "New SSH key successfully written", - }, 201 - - if "users" not in data: - data["users"] = [] - for user in data["users"]: - if user["username"] == username: - if "sshKeys" not in user: - user["sshKeys"] = [] - # Return 409 if key already in array - for key in user["sshKeys"]: - if key == args["public_key"]: - return { - "error": "Key already exists", - }, 409 - user["sshKeys"].append(args["public_key"]) - return { - "message": "New SSH key successfully written", - }, 201 - return { - "error": "User not found", - }, 404 - - def delete(self, username): - """ - Delete SSH key - --- - tags: - - SSH - security: - - bearerAuth: [] - parameters: - - in: body - name: public_key - required: true - description: Key to delete - schema: - type: object - required: - - public_key - properties: - public_key: - type: string - - in: path - name: username - type: string - required: true - description: User to delete keys for - responses: - 200: - description: SSH key deleted - 401: - description: Unauthorized - 404: - description: Key not found - """ - parser = reqparse.RequestParser() - parser.add_argument( - "public_key", type=str, required=True, help="Key cannot be blank!" - ) - args = parser.parse_args() - - with WriteUserData() as data: - if username == "root": - if "ssh" not in data: - data["ssh"] = {} - if "rootKeys" not in data["ssh"]: - data["ssh"]["rootKeys"] = [] - # Return 404 if key not in array - for key in data["ssh"]["rootKeys"]: - if key == args["public_key"]: - data["ssh"]["rootKeys"].remove(key) - # If rootKeys became zero length, delete it - if len(data["ssh"]["rootKeys"]) == 0: - del data["ssh"]["rootKeys"] - return { - "message": "SSH key deleted", - }, 200 - return { - "error": "Key not found", - }, 404 - if username == data["username"]: - if "sshKeys" not in data: - data["sshKeys"] = [] - # Return 404 if key not in array - for key in data["sshKeys"]: - if key == args["public_key"]: - data["sshKeys"].remove(key) - return { - "message": "SSH key deleted", - }, 200 - return { - "error": "Key not found", - }, 404 - if "users" not in data: - data["users"] = [] - for user in data["users"]: - if user["username"] == username: - if "sshKeys" not in user: - user["sshKeys"] = [] - # Return 404 if key not in array - for key in user["sshKeys"]: - if key == args["public_key"]: - user["sshKeys"].remove(key) - return { - "message": "SSH key successfully deleted", - }, 200 - return { - "error": "Key not found", - }, 404 - return { - "error": "User not found", - }, 404 - - -api.add_resource(EnableSSH, "/ssh/enable") -api.add_resource(SSHSettings, "/ssh") - -api.add_resource(WriteSSHKey, "/ssh/key/send") -api.add_resource(SSHKeys, "/ssh/keys/") diff --git a/selfprivacy_api/resources/system.py b/selfprivacy_api/resources/system.py deleted file mode 100644 index 958616e..0000000 --- a/selfprivacy_api/resources/system.py +++ /dev/null @@ -1,346 +0,0 @@ -#!/usr/bin/env python3 -"""System management module""" -import os -import subprocess -import pytz -from flask import Blueprint -from flask_restful import Resource, Api, reqparse -from selfprivacy_api.graphql.queries.system import ( - get_python_version, - get_system_version, -) - -from selfprivacy_api.utils import WriteUserData, ReadUserData - -api_system = Blueprint("system", __name__, url_prefix="/system") -api = Api(api_system) - - -class Timezone(Resource): - """Change timezone of NixOS""" - - def get(self): - """ - Get current system timezone - --- - tags: - - System - security: - - bearerAuth: [] - responses: - 200: - description: Timezone - 400: - description: Bad request - """ - with ReadUserData() as data: - if "timezone" not in data: - return "Europe/Uzhgorod" - return data["timezone"] - - def put(self): - """ - Change system timezone - --- - tags: - - System - security: - - bearerAuth: [] - parameters: - - name: timezone - in: body - required: true - description: Timezone to set - schema: - type: object - required: - - timezone - properties: - timezone: - type: string - responses: - 200: - description: Timezone changed - 400: - description: Bad request - """ - parser = reqparse.RequestParser() - parser.add_argument("timezone", type=str, required=True) - timezone = parser.parse_args()["timezone"] - - # Check if timezone is a valid tzdata string - if timezone not in pytz.all_timezones: - return {"error": "Invalid timezone"}, 400 - - with WriteUserData() as data: - data["timezone"] = timezone - return "Timezone changed" - - -class AutoUpgrade(Resource): - """Enable/disable automatic upgrades and reboots""" - - def get(self): - """ - Get current system autoupgrade settings - --- - tags: - - System - security: - - bearerAuth: [] - responses: - 200: - description: Auto-upgrade settings - 400: - description: Bad request - """ - with ReadUserData() as data: - if "autoUpgrade" not in data: - return {"enable": True, "allowReboot": False} - if "enable" not in data["autoUpgrade"]: - data["autoUpgrade"]["enable"] = True - if "allowReboot" not in data["autoUpgrade"]: - data["autoUpgrade"]["allowReboot"] = False - return data["autoUpgrade"] - - def put(self): - """ - Change system auto upgrade settings - --- - tags: - - System - security: - - bearerAuth: [] - parameters: - - name: autoUpgrade - in: body - required: true - description: Auto upgrade settings - schema: - type: object - required: - - enable - - allowReboot - properties: - enable: - type: boolean - allowReboot: - type: boolean - responses: - 200: - description: New settings saved - 400: - description: Bad request - """ - parser = reqparse.RequestParser() - parser.add_argument("enable", type=bool, required=False) - parser.add_argument("allowReboot", type=bool, required=False) - args = parser.parse_args() - enable = args["enable"] - allow_reboot = args["allowReboot"] - - with WriteUserData() as data: - if "autoUpgrade" not in data: - data["autoUpgrade"] = {} - if enable is not None: - data["autoUpgrade"]["enable"] = enable - if allow_reboot is not None: - data["autoUpgrade"]["allowReboot"] = allow_reboot - return "Auto-upgrade settings changed" - - -class RebuildSystem(Resource): - """Rebuild NixOS""" - - def get(self): - """ - Rebuild NixOS with nixos-rebuild switch - --- - tags: - - System - security: - - bearerAuth: [] - responses: - 200: - description: System rebuild has started - 401: - description: Unauthorized - """ - rebuild_result = subprocess.Popen( - ["systemctl", "start", "sp-nixos-rebuild.service"], start_new_session=True - ) - rebuild_result.communicate()[0] - return rebuild_result.returncode - - -class RollbackSystem(Resource): - """Rollback NixOS""" - - def get(self): - """ - Rollback NixOS with nixos-rebuild switch --rollback - --- - tags: - - System - security: - - bearerAuth: [] - responses: - 200: - description: System rollback has started - 401: - description: Unauthorized - """ - rollback_result = subprocess.Popen( - ["systemctl", "start", "sp-nixos-rollback.service"], start_new_session=True - ) - rollback_result.communicate()[0] - return rollback_result.returncode - - -class UpgradeSystem(Resource): - """Upgrade NixOS""" - - def get(self): - """ - Upgrade NixOS with nixos-rebuild switch --upgrade - --- - tags: - - System - security: - - bearerAuth: [] - responses: - 200: - description: System upgrade has started - 401: - description: Unauthorized - """ - upgrade_result = subprocess.Popen( - ["systemctl", "start", "sp-nixos-upgrade.service"], start_new_session=True - ) - upgrade_result.communicate()[0] - return upgrade_result.returncode - - -class RebootSystem(Resource): - """Reboot the system""" - - def get(self): - """ - Reboot the system - --- - tags: - - System - security: - - bearerAuth: [] - responses: - 200: - description: System reboot has started - 401: - description: Unauthorized - """ - subprocess.Popen(["reboot"], start_new_session=True) - return "System reboot has started" - - -class SystemVersion(Resource): - """Get system version from uname""" - - def get(self): - """ - Get system version from uname -a - --- - tags: - - System - security: - - bearerAuth: [] - responses: - 200: - description: OK - 401: - description: Unauthorized - """ - return { - "system_version": get_system_version(), - } - - -class PythonVersion(Resource): - """Get python version""" - - def get(self): - """ - Get python version used by this API - --- - tags: - - System - security: - - bearerAuth: [] - responses: - 200: - description: OK - 401: - description: Unauthorized - """ - return get_python_version() - - -class PullRepositoryChanges(Resource): - """Pull NixOS config repository changes""" - - def get(self): - """ - Pull Repository Changes - --- - tags: - - System - security: - - bearerAuth: [] - responses: - 200: - description: Got update - 201: - description: Nothing to update - 401: - description: Unauthorized - 500: - description: Something went wrong - """ - - git_pull_command = ["git", "pull"] - - current_working_directory = os.getcwd() - os.chdir("/etc/nixos") - - git_pull_process_descriptor = subprocess.Popen( - git_pull_command, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - shell=False, - ) - - data = git_pull_process_descriptor.communicate()[0].decode("utf-8") - - os.chdir(current_working_directory) - - if git_pull_process_descriptor.returncode == 0: - return { - "status": 0, - "message": "Update completed successfully", - "data": data, - } - return { - "status": git_pull_process_descriptor.returncode, - "message": "Something went wrong", - "data": data, - }, 500 - - -api.add_resource(Timezone, "/configuration/timezone") -api.add_resource(AutoUpgrade, "/configuration/autoUpgrade") -api.add_resource(RebuildSystem, "/configuration/apply") -api.add_resource(RollbackSystem, "/configuration/rollback") -api.add_resource(UpgradeSystem, "/configuration/upgrade") -api.add_resource(RebootSystem, "/reboot") -api.add_resource(SystemVersion, "/version") -api.add_resource(PythonVersion, "/pythonVersion") -api.add_resource(PullRepositoryChanges, "/configuration/pull") diff --git a/selfprivacy_api/resources/users.py b/selfprivacy_api/resources/users.py deleted file mode 100644 index e114324..0000000 --- a/selfprivacy_api/resources/users.py +++ /dev/null @@ -1,162 +0,0 @@ -#!/usr/bin/env python3 -"""Users management module""" -import subprocess -import re -from flask_restful import Resource, reqparse - -from selfprivacy_api.utils import WriteUserData, ReadUserData, is_username_forbidden - - -class Users(Resource): - """Users management""" - - def get(self): - """ - Get a list of users - --- - tags: - - Users - security: - - bearerAuth: [] - responses: - 200: - description: A list of users - 401: - description: Unauthorized - """ - parser = reqparse.RequestParser(bundle_errors=True) - parser.add_argument("withMainUser", type=bool, required=False) - args = parser.parse_args() - with_main_user = False if args["withMainUser"] is None else args["withMainUser"] - - with ReadUserData() as data: - users = [] - if with_main_user: - users.append(data["username"]) - if "users" in data: - for user in data["users"]: - users.append(user["username"]) - return users - - def post(self): - """ - Create a new user - --- - consumes: - - application/json - tags: - - Users - security: - - bearerAuth: [] - parameters: - - in: body - name: user - required: true - description: User to create - schema: - type: object - required: - - username - - password - properties: - username: - type: string - description: Unix username. Must be alphanumeric and less than 32 characters - password: - type: string - description: Unix password. - responses: - 201: - description: Created user - 400: - description: Bad request - 401: - description: Unauthorized - 409: - description: User already exists - """ - parser = reqparse.RequestParser(bundle_errors=True) - parser.add_argument("username", type=str, required=True) - parser.add_argument("password", type=str, required=True) - args = parser.parse_args() - hashing_command = ["mkpasswd", "-m", "sha-512", args["password"]] - password_hash_process_descriptor = subprocess.Popen( - hashing_command, - shell=False, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - ) - hashed_password = password_hash_process_descriptor.communicate()[0] - hashed_password = hashed_password.decode("ascii") - hashed_password = hashed_password.rstrip() - # Check if username is forbidden - if is_username_forbidden(args["username"]): - return {"message": "Username is forbidden"}, 409 - # Check is username passes regex - if not re.match(r"^[a-z_][a-z0-9_]+$", args["username"]): - return {"error": "username must be alphanumeric"}, 400 - # Check if username less than 32 characters - if len(args["username"]) >= 32: - return {"error": "username must be less than 32 characters"}, 400 - - with WriteUserData() as data: - if "users" not in data: - data["users"] = [] - - # Return 409 if user already exists - if data["username"] == args["username"]: - return {"error": "User already exists"}, 409 - - for user in data["users"]: - if user["username"] == args["username"]: - return {"error": "User already exists"}, 409 - - data["users"].append( - { - "username": args["username"], - "hashedPassword": hashed_password, - } - ) - - return {"result": 0, "username": args["username"]}, 201 - - -class User(Resource): - """Single user managment""" - - def delete(self, username): - """ - Delete a user - --- - tags: - - Users - security: - - bearerAuth: [] - parameters: - - in: path - name: username - required: true - description: User to delete - type: string - responses: - 200: - description: Deleted user - 400: - description: Bad request - 401: - description: Unauthorized - 404: - description: User not found - """ - with WriteUserData() as data: - if username == data["username"]: - return {"error": "Cannot delete root user"}, 400 - # Return 400 if user does not exist - for user in data["users"]: - if user["username"] == username: - data["users"].remove(user) - break - else: - return {"error": "User does not exist"}, 404 - - return {"result": 0, "username": username} diff --git a/selfprivacy_api/rest/__init__.py b/selfprivacy_api/rest/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/selfprivacy_api/rest/api_auth.py b/selfprivacy_api/rest/api_auth.py new file mode 100644 index 0000000..f73056c --- /dev/null +++ b/selfprivacy_api/rest/api_auth.py @@ -0,0 +1,127 @@ +from datetime import datetime +from typing import Optional +from fastapi import APIRouter, Depends, HTTPException +from pydantic import BaseModel +from selfprivacy_api.actions.api_tokens import ( + CannotDeleteCallerException, + InvalidExpirationDate, + InvalidUsesLeft, + NotFoundException, + delete_api_token, + get_api_recovery_token_status, + get_api_tokens_with_caller_flag, + get_new_api_recovery_key, + refresh_api_token, +) + +from selfprivacy_api.dependencies import TokenHeader, get_token_header + +from selfprivacy_api.utils.auth import ( + delete_new_device_auth_token, + get_new_device_auth_token, + use_mnemonic_recoverery_token, + use_new_device_auth_token, +) + +router = APIRouter( + prefix="/auth", + tags=["auth"], + responses={404: {"description": "Not found"}}, +) + + +@router.get("/tokens") +async def rest_get_tokens(auth_token: TokenHeader = Depends(get_token_header)): + """Get the tokens info""" + return get_api_tokens_with_caller_flag(auth_token.token) + + +class DeleteTokenInput(BaseModel): + """Delete token input""" + + token_name: str + + +@router.delete("/tokens") +async def rest_delete_tokens( + token: DeleteTokenInput, auth_token: TokenHeader = Depends(get_token_header) +): + """Delete the tokens""" + try: + delete_api_token(auth_token.token, token.token_name) + except NotFoundException: + raise HTTPException(status_code=404, detail="Token not found") + except CannotDeleteCallerException: + raise HTTPException(status_code=400, detail="Cannot delete caller's token") + return {"message": "Token deleted"} + + +@router.post("/tokens") +async def rest_refresh_token(auth_token: TokenHeader = Depends(get_token_header)): + """Refresh the token""" + try: + new_token = refresh_api_token(auth_token.token) + except NotFoundException: + raise HTTPException(status_code=404, detail="Token not found") + return {"token": new_token} + + +@router.get("/recovery_token") +async def rest_get_recovery_token_status( + auth_token: TokenHeader = Depends(get_token_header), +): + return get_api_recovery_token_status() + + +class CreateRecoveryTokenInput(BaseModel): + expiration: Optional[datetime] = None + uses: Optional[int] = None + + +@router.post("/recovery_token") +async def rest_create_recovery_token( + limits: CreateRecoveryTokenInput = CreateRecoveryTokenInput(), + auth_token: TokenHeader = Depends(get_token_header), +): + try: + token = get_new_api_recovery_key(limits.expiration, limits.uses) + except InvalidExpirationDate as e: + raise HTTPException(status_code=400, detail=str(e)) + except InvalidUsesLeft as e: + raise HTTPException(status_code=400, detail=str(e)) + return {"token": token} + + +class UseTokenInput(BaseModel): + token: str + device: str + + +@router.post("/recovery_token/use") +async def rest_use_recovery_token(input: UseTokenInput): + token = use_mnemonic_recoverery_token(input.token, input.device) + if token is None: + raise HTTPException(status_code=404, detail="Token not found") + return {"token": token} + + +@router.post("/new_device") +async def rest_new_device(auth_token: TokenHeader = Depends(get_token_header)): + token = get_new_device_auth_token() + return {"token": token} + + +@router.delete("/new_device") +async def rest_delete_new_device_token( + auth_token: TokenHeader = Depends(get_token_header), +): + delete_new_device_auth_token() + return {"token": None} + + +@router.post("/new_device/authorize") +async def rest_new_device_authorize(input: UseTokenInput): + token = use_new_device_auth_token(input.token, input.device) + if token is None: + raise HTTPException(status_code=404, detail="Token not found") + return {"message": "Device authorized", "token": token} diff --git a/selfprivacy_api/rest/services.py b/selfprivacy_api/rest/services.py new file mode 100644 index 0000000..d374e7b --- /dev/null +++ b/selfprivacy_api/rest/services.py @@ -0,0 +1,371 @@ +"""Basic services legacy api""" +import base64 +from typing import Optional +from fastapi import APIRouter, Depends, HTTPException +from pydantic import BaseModel +from selfprivacy_api.actions.ssh import ( + InvalidPublicKey, + KeyAlreadyExists, + KeyNotFound, + create_ssh_key, + enable_ssh, + get_ssh_settings, + remove_ssh_key, + set_ssh_settings, +) +from selfprivacy_api.actions.users import UserNotFound, get_user_by_username + +from selfprivacy_api.dependencies import get_token_header +from selfprivacy_api.restic_controller import ResticController, ResticStates +from selfprivacy_api.restic_controller import tasks as restic_tasks +from selfprivacy_api.services.bitwarden import Bitwarden +from selfprivacy_api.services.gitea import Gitea +from selfprivacy_api.services.mailserver import MailServer +from selfprivacy_api.services.nextcloud import Nextcloud +from selfprivacy_api.services.ocserv import Ocserv +from selfprivacy_api.services.pleroma import Pleroma +from selfprivacy_api.services.service import ServiceStatus +from selfprivacy_api.utils import WriteUserData, get_dkim_key, get_domain + +router = APIRouter( + prefix="/services", + tags=["services"], + dependencies=[Depends(get_token_header)], + responses={404: {"description": "Not found"}}, +) + + +def service_status_to_return_code(status: ServiceStatus): + if status == ServiceStatus.RUNNING: + return 0 + elif status == ServiceStatus.ERROR: + return 1 + elif status == ServiceStatus.STOPPED: + return 3 + elif status == ServiceStatus.OFF: + return 4 + else: + return 2 + + +@router.get("/status") +async def get_status(): + """Get the status of the services""" + mail_status = MailServer.get_status() + bitwarden_status = Bitwarden.get_status() + gitea_status = Gitea.get_status() + nextcloud_status = Nextcloud.get_status() + ocserv_stauts = Ocserv.get_status() + pleroma_status = Pleroma.get_status() + + return { + "imap": service_status_to_return_code(mail_status), + "smtp": service_status_to_return_code(mail_status), + "http": 0, + "bitwarden": service_status_to_return_code(bitwarden_status), + "gitea": service_status_to_return_code(gitea_status), + "nextcloud": service_status_to_return_code(nextcloud_status), + "ocserv": service_status_to_return_code(ocserv_stauts), + "pleroma": service_status_to_return_code(pleroma_status), + } + + +@router.post("/bitwarden/enable") +async def enable_bitwarden(): + """Enable Bitwarden""" + Bitwarden.enable() + return { + "status": 0, + "message": "Bitwarden enabled", + } + + +@router.post("/bitwarden/disable") +async def disable_bitwarden(): + """Disable Bitwarden""" + Bitwarden.disable() + return { + "status": 0, + "message": "Bitwarden disabled", + } + + +@router.post("/gitea/enable") +async def enable_gitea(): + """Enable Gitea""" + Gitea.enable() + return { + "status": 0, + "message": "Gitea enabled", + } + + +@router.post("/gitea/disable") +async def disable_gitea(): + """Disable Gitea""" + Gitea.disable() + return { + "status": 0, + "message": "Gitea disabled", + } + + +@router.get("/mailserver/dkim") +async def get_mailserver_dkim(): + """Get the DKIM record for the mailserver""" + domain = get_domain() + + dkim = get_dkim_key(domain) + if dkim is None: + raise HTTPException(status_code=404, detail="DKIM record not found") + dkim = base64.b64encode(dkim.encode("utf-8")).decode("utf-8") + return dkim + + +@router.post("/nextcloud/enable") +async def enable_nextcloud(): + """Enable Nextcloud""" + Nextcloud.enable() + return { + "status": 0, + "message": "Nextcloud enabled", + } + + +@router.post("/nextcloud/disable") +async def disable_nextcloud(): + """Disable Nextcloud""" + Nextcloud.disable() + return { + "status": 0, + "message": "Nextcloud disabled", + } + + +@router.post("/ocserv/enable") +async def enable_ocserv(): + """Enable Ocserv""" + Ocserv.enable() + return { + "status": 0, + "message": "Ocserv enabled", + } + + +@router.post("/ocserv/disable") +async def disable_ocserv(): + """Disable Ocserv""" + Ocserv.disable() + return { + "status": 0, + "message": "Ocserv disabled", + } + + +@router.post("/pleroma/enable") +async def enable_pleroma(): + """Enable Pleroma""" + Pleroma.enable() + return { + "status": 0, + "message": "Pleroma enabled", + } + + +@router.post("/pleroma/disable") +async def disable_pleroma(): + """Disable Pleroma""" + Pleroma.disable() + return { + "status": 0, + "message": "Pleroma disabled", + } + + +@router.get("/restic/backup/list") +async def get_restic_backup_list(): + restic = ResticController() + return restic.snapshot_list + + +@router.put("/restic/backup/create") +async def create_restic_backup(): + restic = ResticController() + if restic.state is ResticStates.NO_KEY: + raise HTTPException(status_code=400, detail="Backup key not provided") + if restic.state is ResticStates.INITIALIZING: + raise HTTPException(status_code=400, detail="Backup is initializing") + if restic.state is ResticStates.BACKING_UP: + raise HTTPException(status_code=409, detail="Backup is already running") + restic_tasks.start_backup() + return { + "status": 0, + "message": "Backup creation has started", + } + + +@router.get("/restic/backup/status") +async def get_restic_backup_status(): + restic = ResticController() + + return { + "status": restic.state.name, + "progress": restic.progress, + "error_message": restic.error_message, + } + + +@router.get("/restic/backup/reload") +async def reload_restic_backup(): + restic_tasks.load_snapshots() + return { + "status": 0, + "message": "Snapshots reload started", + } + + +class BackupRestoreInput(BaseModel): + backupId: str + + +@router.put("/restic/backup/restore") +async def restore_restic_backup(backup: BackupRestoreInput): + restic = ResticController() + if restic.state is ResticStates.NO_KEY: + raise HTTPException(status_code=400, detail="Backup key not provided") + if restic.state is ResticStates.NOT_INITIALIZED: + raise HTTPException( + status_code=400, detail="Backups repository is not initialized" + ) + if restic.state is ResticStates.BACKING_UP: + raise HTTPException(status_code=409, detail="Backup is already running") + if restic.state is ResticStates.INITIALIZING: + raise HTTPException(status_code=400, detail="Repository is initializing") + if restic.state is ResticStates.RESTORING: + raise HTTPException(status_code=409, detail="Restore is already running") + + for backup_item in restic.snapshot_list: + if backup_item["short_id"] == backup.backupId: + restic_tasks.restore_from_backup(backup.backupId) + return { + "status": 0, + "message": "Backup restoration procedure started", + } + + raise HTTPException(status_code=404, detail="Backup not found") + + +class BackblazeConfigInput(BaseModel): + accountId: str + accountKey: str + bucket: str + + +@router.put("/restic/backblaze/config") +async def set_backblaze_config(backblaze_config: BackblazeConfigInput): + with WriteUserData() as data: + if "backblaze" not in data: + data["backblaze"] = {} + data["backblaze"]["accountId"] = backblaze_config.accountId + data["backblaze"]["accountKey"] = backblaze_config.accountKey + data["backblaze"]["bucket"] = backblaze_config.bucket + + restic_tasks.update_keys_from_userdata() + + return "New Backblaze settings saved" + + +@router.post("/ssh/enable") +async def rest_enable_ssh(): + """Enable SSH""" + enable_ssh() + return { + "status": 0, + "message": "SSH enabled", + } + + +@router.get("/ssh") +async def rest_get_ssh(): + """Get the SSH configuration""" + settings = get_ssh_settings() + return { + "enable": settings.enable, + "passwordAuthentication": settings.passwordAuthentication, + } + + +class SshConfigInput(BaseModel): + enable: Optional[bool] = None + passwordAuthentication: Optional[bool] = None + + +@router.put("/ssh") +async def rest_set_ssh(ssh_config: SshConfigInput): + """Set the SSH configuration""" + set_ssh_settings(ssh_config.enable, ssh_config.passwordAuthentication) + + return "SSH settings changed" + + +class SshKeyInput(BaseModel): + public_key: str + + +@router.put("/ssh/key/send", status_code=201) +async def rest_send_ssh_key(input: SshKeyInput): + """Send the SSH key""" + try: + create_ssh_key("root", input.public_key) + except KeyAlreadyExists: + raise HTTPException(status_code=409, detail="Key already exists") + except InvalidPublicKey: + raise HTTPException( + status_code=400, + detail="Invalid key type. Only ssh-ed25519 and ssh-rsa are supported", + ) + + return { + "status": 0, + "message": "SSH key sent", + } + + +@router.get("/ssh/keys/{username}") +async def rest_get_ssh_keys(username: str): + """Get the SSH keys for a user""" + user = get_user_by_username(username) + if user is None: + raise HTTPException(status_code=404, detail="User not found") + + return user.ssh_keys + + +@router.post("/ssh/keys/{username}", status_code=201) +async def rest_add_ssh_key(username: str, input: SshKeyInput): + try: + create_ssh_key(username, input.public_key) + except KeyAlreadyExists: + raise HTTPException(status_code=409, detail="Key already exists") + except InvalidPublicKey: + raise HTTPException( + status_code=400, + detail="Invalid key type. Only ssh-ed25519 and ssh-rsa are supported", + ) + except UserNotFound: + raise HTTPException(status_code=404, detail="User not found") + + return { + "message": "New SSH key successfully written", + } + + +@router.delete("/ssh/keys/{username}") +async def rest_delete_ssh_key(username: str, input: SshKeyInput): + try: + remove_ssh_key(username, input.public_key) + except KeyNotFound: + raise HTTPException(status_code=404, detail="Key not found") + except UserNotFound: + raise HTTPException(status_code=404, detail="User not found") + return {"message": "SSH key deleted"} diff --git a/selfprivacy_api/rest/system.py b/selfprivacy_api/rest/system.py new file mode 100644 index 0000000..9933fb3 --- /dev/null +++ b/selfprivacy_api/rest/system.py @@ -0,0 +1,105 @@ +from typing import Optional +from fastapi import APIRouter, Body, Depends, HTTPException +from pydantic import BaseModel + +from selfprivacy_api.dependencies import get_token_header + +import selfprivacy_api.actions.system as system_actions + +router = APIRouter( + prefix="/system", + tags=["system"], + dependencies=[Depends(get_token_header)], + responses={404: {"description": "Not found"}}, +) + + +@router.get("/configuration/timezone") +async def get_timezone(): + """Get the timezone of the server""" + return system_actions.get_timezone() + + +class ChangeTimezoneRequestBody(BaseModel): + """Change the timezone of the server""" + + timezone: str + + +@router.put("/configuration/timezone") +async def change_timezone(timezone: ChangeTimezoneRequestBody): + """Change the timezone of the server""" + try: + system_actions.change_timezone(timezone.timezone) + except system_actions.InvalidTimezone as e: + raise HTTPException(status_code=400, detail=str(e)) + return {"timezone": timezone.timezone} + + +@router.get("/configuration/autoUpgrade") +async def get_auto_upgrade_settings(): + """Get the auto-upgrade settings""" + return system_actions.get_auto_upgrade_settings().dict() + + +class AutoUpgradeSettings(BaseModel): + """Settings for auto-upgrading user data""" + + enable: Optional[bool] = None + allowReboot: Optional[bool] = None + + +@router.put("/configuration/autoUpgrade") +async def set_auto_upgrade_settings(settings: AutoUpgradeSettings): + """Set the auto-upgrade settings""" + system_actions.set_auto_upgrade_settings(settings.enable, settings.allowReboot) + return "Auto-upgrade settings changed" + + +@router.get("/configuration/apply") +async def apply_configuration(): + """Apply the configuration""" + return_code = system_actions.rebuild_system() + return return_code + + +@router.get("/configuration/rollback") +async def rollback_configuration(): + """Rollback the configuration""" + return_code = system_actions.rollback_system() + return return_code + + +@router.get("/configuration/upgrade") +async def upgrade_configuration(): + """Upgrade the configuration""" + return_code = system_actions.upgrade_system() + return return_code + + +@router.get("/reboot") +async def reboot_system(): + """Reboot the system""" + system_actions.reboot_system() + return "System reboot has started" + + +@router.get("/version") +async def get_system_version(): + """Get the system version""" + return {"system_version": system_actions.get_system_version()} + + +@router.get("/pythonVersion") +async def get_python_version(): + """Get the Python version""" + return system_actions.get_python_version() + + +@router.get("/configuration/pull") +async def pull_configuration(): + """Pull the configuration""" + action_result = system_actions.pull_repository_changes() + if action_result.status == 0: + return action_result.dict() + raise HTTPException(status_code=500, detail=action_result.dict()) diff --git a/selfprivacy_api/rest/users.py b/selfprivacy_api/rest/users.py new file mode 100644 index 0000000..ab4c6c9 --- /dev/null +++ b/selfprivacy_api/rest/users.py @@ -0,0 +1,62 @@ +"""Users management module""" +from typing import Optional +from fastapi import APIRouter, Body, Depends, HTTPException +from pydantic import BaseModel + +import selfprivacy_api.actions.users as users_actions + +from selfprivacy_api.dependencies import get_token_header + +router = APIRouter( + prefix="/users", + tags=["users"], + dependencies=[Depends(get_token_header)], + responses={404: {"description": "Not found"}}, +) + + +@router.get("") +async def get_users(withMainUser: bool = False): + """Get the list of users""" + users: list[users_actions.UserDataUser] = users_actions.get_users( + exclude_primary=not withMainUser, exclude_root=True + ) + + return [user.username for user in users] + + +class UserInput(BaseModel): + """User input""" + + username: str + password: str + + +@router.post("", status_code=201) +async def create_user(user: UserInput): + try: + users_actions.create_user(user.username, user.password) + except users_actions.PasswordIsEmpty as e: + raise HTTPException(status_code=400, detail=str(e)) + except users_actions.UsernameForbidden as e: + raise HTTPException(status_code=409, detail=str(e)) + except users_actions.UsernameNotAlphanumeric as e: + raise HTTPException(status_code=400, detail=str(e)) + except users_actions.UsernameTooLong as e: + raise HTTPException(status_code=400, detail=str(e)) + except users_actions.UserAlreadyExists as e: + raise HTTPException(status_code=409, detail=str(e)) + + return {"result": 0, "username": user.username} + + +@router.delete("/{username}") +async def delete_user(username: str): + try: + users_actions.delete_user(username) + except users_actions.UserNotFound as e: + raise HTTPException(status_code=404, detail=str(e)) + except users_actions.UserIsProtected as e: + raise HTTPException(status_code=400, detail=str(e)) + + return {"result": 0, "username": username} diff --git a/selfprivacy_api/restic_controller/tasks.py b/selfprivacy_api/restic_controller/tasks.py index f583d8b..32eb87d 100644 --- a/selfprivacy_api/restic_controller/tasks.py +++ b/selfprivacy_api/restic_controller/tasks.py @@ -1,8 +1,10 @@ """Tasks for the restic controller.""" from huey import crontab -from selfprivacy_api.utils.huey import huey +from selfprivacy_api.utils.huey import Huey from . import ResticController, ResticStates +huey = Huey() + @huey.task() def init_restic(): diff --git a/selfprivacy_api/services/bitwarden/__init__.py b/selfprivacy_api/services/bitwarden/__init__.py new file mode 100644 index 0000000..21ba40a --- /dev/null +++ b/selfprivacy_api/services/bitwarden/__init__.py @@ -0,0 +1,163 @@ +"""Class representing Bitwarden service""" +import base64 +import subprocess +import typing + +from selfprivacy_api.jobs import Job, JobStatus, Jobs +from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service +from selfprivacy_api.services.generic_size_counter import get_storage_usage +from selfprivacy_api.services.generic_status_getter import get_service_status +from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus +from selfprivacy_api.utils import ReadUserData, WriteUserData +from selfprivacy_api.utils.block_devices import BlockDevice +from selfprivacy_api.utils.huey import Huey +from selfprivacy_api.utils.network import get_ip4 + +huey = Huey() + + +class Bitwarden(Service): + """Class representing Bitwarden service.""" + + @staticmethod + def get_id() -> str: + """Return service id.""" + return "bitwarden" + + @staticmethod + def get_display_name() -> str: + """Return service display name.""" + return "Bitwarden" + + @staticmethod + def get_description() -> str: + """Return service description.""" + return "Bitwarden is a password manager." + + @staticmethod + def get_svg_icon(self) -> str: + """Read SVG icon from file and return it as base64 encoded string.""" + with open("selfprivacy_api/services/bitwarden/bitwarden.svg", "rb") as f: + return base64.b64encode(f.read()).decode("utf-8") + + @staticmethod + def is_movable() -> bool: + return True + + @staticmethod + def is_required() -> bool: + return False + + @staticmethod + def is_enabled() -> bool: + with ReadUserData() as user_data: + return user_data.get("bitwarden", {}).get("enable", False) + + @staticmethod + def get_status() -> ServiceStatus: + """ + Return Bitwarden status from systemd. + Use command return code to determine status. + + Return code 0 means service is running. + Return code 1 or 2 means service is in error stat. + Return code 3 means service is stopped. + Return code 4 means service is off. + """ + return get_service_status("vaultwarden.service") + + @staticmethod + def enable(): + """Enable Bitwarden service.""" + with WriteUserData() as user_data: + if "bitwarden" not in user_data: + user_data["bitwarden"] = {} + user_data["bitwarden"]["enable"] = True + + @staticmethod + def disable(): + """Disable Bitwarden service.""" + with WriteUserData() as user_data: + if "bitwarden" not in user_data: + user_data["bitwarden"] = {} + user_data["bitwarden"]["enable"] = False + + @staticmethod + def stop(): + subprocess.run(["systemctl", "stop", "vaultwarden.service"]) + + @staticmethod + def start(): + subprocess.run(["systemctl", "start", "vaultwarden.service"]) + + @staticmethod + def restart(): + subprocess.run(["systemctl", "restart", "vaultwarden.service"]) + + @staticmethod + def get_configuration(): + return {} + + @staticmethod + def set_configuration(config_items): + return super().set_configuration(config_items) + + @staticmethod + def get_logs(): + return "" + + @staticmethod + def get_storage_usage() -> int: + storage_usage = 0 + storage_usage += get_storage_usage("/var/lib/bitwarden") + storage_usage += get_storage_usage("/var/lib/bitwarden_rs") + return storage_usage + + @staticmethod + def get_location() -> str: + with ReadUserData() as user_data: + if user_data.get("useBinds", False): + return user_data.get("bitwarden", {}).get("location", "sda1") + else: + return "sda1" + + @staticmethod + def get_dns_records() -> typing.List[ServiceDnsRecord]: + """Return list of DNS records for Bitwarden service.""" + return [ + ServiceDnsRecord( + type="A", + name="password", + content=get_ip4(), + ttl=3600, + ), + ] + + def move_to_volume(self, volume: BlockDevice): + job = Jobs.get_instance().add( + name="services.bitwarden.move", + description=f"Moving Bitwarden data to {volume.name}", + ) + + move_service( + self, + volume, + job, + [ + FolderMoveNames( + name="bitwarden", + bind_location="/var/lib/bitwarden", + group="vaultwarden", + owner="vaultwarden", + ), + FolderMoveNames( + name="bitwarden", + bind_location="/var/lib/bitwarden_rs", + group="vaultwarden", + owner="vaultwarden", + ), + ], + "bitwarden", + ) + + return job diff --git a/selfprivacy_api/services/bitwarden/bitwarden.svg b/selfprivacy_api/services/bitwarden/bitwarden.svg new file mode 100644 index 0000000..ced270c --- /dev/null +++ b/selfprivacy_api/services/bitwarden/bitwarden.svg @@ -0,0 +1,3 @@ + + + diff --git a/selfprivacy_api/services/generic_service_mover.py b/selfprivacy_api/services/generic_service_mover.py new file mode 100644 index 0000000..f625f99 --- /dev/null +++ b/selfprivacy_api/services/generic_service_mover.py @@ -0,0 +1,238 @@ +"""Generic handler for moving services""" + +import base64 +import subprocess +import time +import typing +import pathlib +import shutil + +from pydantic import BaseModel +from selfprivacy_api.jobs import Job, JobStatus, Jobs +from selfprivacy_api.utils.huey import Huey +from selfprivacy_api.utils.block_devices import BlockDevice +from selfprivacy_api.utils import ReadUserData, WriteUserData +from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus + +huey = Huey() + + +class FolderMoveNames(BaseModel): + name: str + bind_location: str + owner: str + group: str + + +@huey.task() +def move_service( + service: Service, + volume: BlockDevice, + job: Job, + folder_names: list[FolderMoveNames], + userdata_location: str, +): + """Move a service to another volume.""" + job = Jobs.get_instance().update( + job=job, + status_text="Performing pre-move checks...", + status=JobStatus.RUNNING, + ) + service_name = service.get_display_name() + with ReadUserData() as user_data: + if not user_data.get("useBinds", False): + Jobs.get_instance().update( + job=job, + status=JobStatus.ERROR, + error="Server is not using binds.", + ) + return + # Check if we are on the same volume + old_volume = service.get_location() + if old_volume == volume.name: + Jobs.get_instance().update( + job=job, + status=JobStatus.ERROR, + error=f"{service_name} is already on this volume.", + ) + return + # Check if there is enough space on the new volume + if volume.fsavail < service.get_storage_usage(): + Jobs.get_instance().update( + job=job, + status=JobStatus.ERROR, + error="Not enough space on the new volume.", + ) + return + # Make sure the volume is mounted + if f"/volumes/{volume.name}" not in volume.mountpoints: + Jobs.get_instance().update( + job=job, + status=JobStatus.ERROR, + error="Volume is not mounted.", + ) + return + # Make sure current actual directory exists and if its user and group are correct + for folder in folder_names: + if not pathlib.Path(f"/volumes/{old_volume}/{folder.name}").exists(): + Jobs.get_instance().update( + job=job, + status=JobStatus.ERROR, + error=f"{service_name} is not found.", + ) + return + if not pathlib.Path(f"/volumes/{old_volume}/{folder.name}").is_dir(): + Jobs.get_instance().update( + job=job, + status=JobStatus.ERROR, + error=f"{service_name} is not a directory.", + ) + return + if ( + not pathlib.Path(f"/volumes/{old_volume}/{folder.name}").owner() + == folder.owner + ): + Jobs.get_instance().update( + job=job, + status=JobStatus.ERROR, + error=f"{service_name} owner is not {folder.owner}.", + ) + return + + # Stop service + Jobs.get_instance().update( + job=job, + status=JobStatus.RUNNING, + status_text=f"Stopping {service_name}...", + progress=5, + ) + service.stop() + # Wait for Nextcloud to stop, check every second + # If it does not stop in 30 seconds, abort + for _ in range(30): + if service.get_status() != ServiceStatus.RUNNING: + break + time.sleep(1) + else: + Jobs.get_instance().update( + job=job, + status=JobStatus.ERROR, + error=f"{service_name} did not stop in 30 seconds.", + ) + return + + # Unmount old volume + Jobs.get_instance().update( + job=job, + status_text="Unmounting old folder...", + status=JobStatus.RUNNING, + progress=10, + ) + for folder in folder_names: + try: + subprocess.run( + ["umount", folder.bind_location], + check=True, + ) + except subprocess.CalledProcessError: + Jobs.get_instance().update( + job=job, + status=JobStatus.ERROR, + error="Unable to unmount old volume.", + ) + return + # Move data to new volume and set correct permissions + Jobs.get_instance().update( + job=job, + status_text="Moving data to new volume...", + status=JobStatus.RUNNING, + progress=20, + ) + current_progress = 20 + folder_percentage = 50 / len(folder_names) + for folder in folder_names: + shutil.move( + f"/volumes/{old_volume}/{folder.name}", + f"/volumes/{volume.name}/{folder.name}", + ) + Jobs.get_instance().update( + job=job, + status_text="Moving data to new volume...", + status=JobStatus.RUNNING, + progress=current_progress + folder_percentage, + ) + + Jobs.get_instance().update( + job=job, + status_text=f"Making sure {service_name} owns its files...", + status=JobStatus.RUNNING, + progress=70, + ) + for folder in folder_names: + try: + subprocess.run( + [ + "chown", + "-R", + f"{folder.owner}:f{folder.group}", + f"/volumes/{volume.name}/{folder.name}", + ], + check=True, + ) + except subprocess.CalledProcessError as error: + print(error.output) + Jobs.get_instance().update( + job=job, + status=JobStatus.RUNNING, + error=f"Unable to set ownership of new volume. {service_name} may not be able to access its files. Continuing anyway.", + ) + return + + # Mount new volume + Jobs.get_instance().update( + job=job, + status_text=f"Mounting {service_name} data...", + status=JobStatus.RUNNING, + progress=90, + ) + + for folder in folder_names: + try: + subprocess.run( + [ + "mount", + "--bind", + f"/volumes/{volume.name}/{folder.name}", + folder.bind_location, + ], + check=True, + ) + except subprocess.CalledProcessError as error: + print(error.output) + Jobs.get_instance().update( + job=job, + status=JobStatus.ERROR, + error="Unable to mount new volume.", + ) + return + + # Update userdata + Jobs.get_instance().update( + job=job, + status_text="Finishing move...", + status=JobStatus.RUNNING, + progress=95, + ) + with WriteUserData() as user_data: + if userdata_location not in user_data: + user_data[userdata_location] = {} + user_data[userdata_location]["location"] = volume.name + # Start service + service.start() + Jobs.get_instance().update( + job=job, + status=JobStatus.FINISHED, + result=f"{service_name} moved successfully.", + status_text=f"Starting {service}...", + progress=100, + ) diff --git a/selfprivacy_api/services/generic_size_counter.py b/selfprivacy_api/services/generic_size_counter.py new file mode 100644 index 0000000..4a706fb --- /dev/null +++ b/selfprivacy_api/services/generic_size_counter.py @@ -0,0 +1,16 @@ +"""Generic size counter using pathlib""" +import pathlib + + +def get_storage_usage(path: str) -> int: + """ + Calculate the real storage usage of path and all subdirectories. + Calculate using pathlib. + Do not follow symlinks. + """ + storage_usage = 0 + for iter_path in pathlib.Path(path).rglob("**/*"): + if iter_path.is_dir(): + continue + storage_usage += iter_path.stat().st_size + return storage_usage diff --git a/selfprivacy_api/services/generic_status_getter.py b/selfprivacy_api/services/generic_status_getter.py new file mode 100644 index 0000000..d53011e --- /dev/null +++ b/selfprivacy_api/services/generic_status_getter.py @@ -0,0 +1,29 @@ +"""Generic service status fetcher using systemctl""" +import subprocess +import typing + +from selfprivacy_api.services.service import ServiceStatus + + +def get_service_status(service: str) -> ServiceStatus: + """ + Return service status from systemd. + Use command return code to determine status. + + Return code 0 means service is running. + Return code 1 or 2 means service is in error stat. + Return code 3 means service is stopped. + Return code 4 means service is off. + """ + service_status = subprocess.Popen(["systemctl", "status", service]) + service_status.communicate()[0] + if service_status.returncode == 0: + return ServiceStatus.RUNNING + elif service_status.returncode == 1 or service_status.returncode == 2: + return ServiceStatus.ERROR + elif service_status.returncode == 3: + return ServiceStatus.STOPPED + elif service_status.returncode == 4: + return ServiceStatus.OFF + else: + return ServiceStatus.DEGRADED diff --git a/selfprivacy_api/services/gitea/__init__.py b/selfprivacy_api/services/gitea/__init__.py new file mode 100644 index 0000000..9893bc8 --- /dev/null +++ b/selfprivacy_api/services/gitea/__init__.py @@ -0,0 +1,154 @@ +"""Class representing Bitwarden service""" +import base64 +import subprocess +import typing + +from selfprivacy_api.jobs import Job, JobStatus, Jobs +from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service +from selfprivacy_api.services.generic_size_counter import get_storage_usage +from selfprivacy_api.services.generic_status_getter import get_service_status +from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus +from selfprivacy_api.utils import ReadUserData, WriteUserData +from selfprivacy_api.utils.block_devices import BlockDevice +from selfprivacy_api.utils.huey import Huey +from selfprivacy_api.utils.network import get_ip4 + +huey = Huey() + + +class Gitea(Service): + """Class representing Gitea service""" + + @staticmethod + def get_id() -> str: + """Return service id.""" + return "gitea" + + @staticmethod + def get_display_name() -> str: + """Return service display name.""" + return "Gitea" + + @staticmethod + def get_description() -> str: + """Return service description.""" + return "Gitea is a Git forge." + + @staticmethod + def get_svg_icon() -> str: + """Read SVG icon from file and return it as base64 encoded string.""" + with open("selfprivacy_api/services/gitea/gitea.svg", "rb") as f: + return base64.b64encode(f.read()).decode("utf-8") + + @staticmethod + def is_movable() -> bool: + return True + + @staticmethod + def is_required() -> bool: + return False + + @staticmethod + def is_enabled() -> bool: + with ReadUserData() as user_data: + return user_data.get("gitea", {}).get("enable", False) + + @staticmethod + def get_status() -> ServiceStatus: + """ + Return Gitea status from systemd. + Use command return code to determine status. + Return code 0 means service is running. + Return code 1 or 2 means service is in error stat. + Return code 3 means service is stopped. + Return code 4 means service is off. + """ + return get_service_status("gitea.service") + + @staticmethod + def enable(): + """Enable Gitea service.""" + with WriteUserData() as user_data: + if "gitea" not in user_data: + user_data["gitea"] = {} + user_data["gitea"]["enable"] = True + + @staticmethod + def disable(): + """Disable Gitea service.""" + with WriteUserData() as user_data: + if "gitea" not in user_data: + user_data["gitea"] = {} + user_data["gitea"]["enable"] = False + + @staticmethod + def stop(): + subprocess.run(["systemctl", "stop", "gitea.service"]) + + @staticmethod + def start(): + subprocess.run(["systemctl", "start", "gitea.service"]) + + @staticmethod + def restart(): + subprocess.run(["systemctl", "restart", "gitea.service"]) + + @staticmethod + def get_configuration(): + return {} + + @staticmethod + def set_configuration(config_items): + return super().set_configuration(config_items) + + @staticmethod + def get_logs(): + return "" + + @staticmethod + def get_storage_usage() -> int: + storage_usage = 0 + storage_usage += get_storage_usage("/var/lib/gitea") + return storage_usage + + @staticmethod + def get_location() -> str: + with ReadUserData() as user_data: + if user_data.get("useBinds", False): + return user_data.get("gitea", {}).get("location", "sda1") + else: + return "sda1" + + @staticmethod + def get_dns_records() -> typing.List[ServiceDnsRecord]: + return [ + ServiceDnsRecord( + type="A", + name="git", + content=get_ip4(), + ttl=3600, + ), + ] + + def move_to_volume(self, volume: BlockDevice): + job = Jobs.get_instance().add( + name="services.gitea.move", + description=f"Moving Gitea data to {volume.name}", + ) + + move_service( + self, + volume, + job, + [ + FolderMoveNames( + name="gitea", + bind_location="/var/lib/gitea", + group="gitea", + owner="gitea", + ), + ], + "bitwarden", + ) + + return job diff --git a/selfprivacy_api/services/gitea/gitea.svg b/selfprivacy_api/services/gitea/gitea.svg new file mode 100644 index 0000000..9ba8a76 --- /dev/null +++ b/selfprivacy_api/services/gitea/gitea.svg @@ -0,0 +1,3 @@ + + + diff --git a/selfprivacy_api/services/mailserver/__init__.py b/selfprivacy_api/services/mailserver/__init__.py new file mode 100644 index 0000000..c97864d --- /dev/null +++ b/selfprivacy_api/services/mailserver/__init__.py @@ -0,0 +1,172 @@ +"""Class representing Dovecot and Postfix services""" + +import base64 +import subprocess +import typing + +from selfprivacy_api.jobs import Job, JobStatus, Jobs +from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service +from selfprivacy_api.services.generic_size_counter import get_storage_usage +from selfprivacy_api.services.generic_status_getter import get_service_status +from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus +from selfprivacy_api.utils import ReadUserData, WriteUserData, get_dkim_key, get_domain +from selfprivacy_api.utils import huey +from selfprivacy_api.utils.block_devices import BlockDevice +from selfprivacy_api.utils.huey import Huey +from selfprivacy_api.utils.network import get_ip4 + +huey = Huey() + + +class MailServer(Service): + """Class representing mail service""" + + @staticmethod + def get_id() -> str: + return "mailserver" + + @staticmethod + def get_display_name() -> str: + return "Mail Server" + + @staticmethod + def get_description() -> str: + return "E-Mail for company and family." + + @staticmethod + def get_svg_icon() -> str: + with open("selfprivacy_api/services/mailserver/mailserver.svg", "rb") as f: + return base64.b64encode(f.read()).decode("utf-8") + + @staticmethod + def is_movable() -> bool: + return True + + @staticmethod + def is_required() -> bool: + return True + + @staticmethod + def is_enabled() -> bool: + return True + + @staticmethod + def get_status() -> ServiceStatus: + imap_status = get_service_status("dovecot2.service") + smtp_status = get_service_status("postfix.service") + + if ( + imap_status == ServiceStatus.RUNNING + and smtp_status == ServiceStatus.RUNNING + ): + return ServiceStatus.RUNNING + elif imap_status == ServiceStatus.ERROR or smtp_status == ServiceStatus.ERROR: + return ServiceStatus.ERROR + elif ( + imap_status == ServiceStatus.STOPPED or smtp_status == ServiceStatus.STOPPED + ): + return ServiceStatus.STOPPED + elif imap_status == ServiceStatus.OFF or smtp_status == ServiceStatus.OFF: + return ServiceStatus.OFF + else: + return ServiceStatus.DEGRADED + + @staticmethod + def enable(): + raise NotImplementedError("enable is not implemented for MailServer") + + @staticmethod + def disable(): + raise NotImplementedError("disable is not implemented for MailServer") + + @staticmethod + def stop(): + subprocess.run(["systemctl", "stop", "dovecot2.service"]) + subprocess.run(["systemctl", "stop", "postfix.service"]) + + @staticmethod + def start(): + subprocess.run(["systemctl", "start", "dovecot2.service"]) + subprocess.run(["systemctl", "start", "postfix.service"]) + + @staticmethod + def restart(): + subprocess.run(["systemctl", "restart", "dovecot2.service"]) + subprocess.run(["systemctl", "restart", "postfix.service"]) + + @staticmethod + def get_configuration(): + return {} + + @staticmethod + def set_configuration(config_items): + return super().set_configuration(config_items) + + @staticmethod + def get_logs(): + return "" + + @staticmethod + def get_storage_usage() -> int: + return get_storage_usage("/var/vmail") + + @staticmethod + def get_location() -> str: + with ReadUserData() as user_data: + if user_data.get("useBinds", False): + return user_data.get("mailserver", {}).get("location", "sda1") + else: + return "sda1" + + @staticmethod + def get_dns_records() -> typing.List[ServiceDnsRecord]: + dkim_record = get_dkim_key() + domain = get_domain() + ip4 = get_ip4() + + if dkim_record is None: + return [] + + return [ + ServiceDnsRecord( + type="MX", name=domain, data=domain, ttl=3600, priority=10 + ), + ServiceDnsRecord( + type="TXT", name="_dmarc", data=f"v=DMARC1; p=none", ttl=3600 + ), + ServiceDnsRecord( + type="TXT", name=domain, data=f"v=spf1 a mx ip4:{ip4} -all", ttl=3600 + ), + ServiceDnsRecord( + type="TXT", name="selector._domainkey", data=dkim_record, ttl=3600 + ), + ] + + def move_to_volume(self, volume: BlockDevice): + job = Jobs.get_instance().add( + name="services.mailserver.move", + description=f"Moving mailserver data to {volume.name}", + ) + + move_service( + self, + volume, + job, + [ + FolderMoveNames( + name="vmail", + bind_location="/var/vmail", + group="virtualMail", + owner="virtualMail", + ), + FolderMoveNames( + name="sieve", + bind_location="/var/sieve", + group="virtualMail", + owner="virtualMail", + ), + ], + "mailserver", + ) + + return job diff --git a/selfprivacy_api/services/mailserver/mailserver.svg b/selfprivacy_api/services/mailserver/mailserver.svg new file mode 100644 index 0000000..d7d0ee2 --- /dev/null +++ b/selfprivacy_api/services/mailserver/mailserver.svg @@ -0,0 +1,3 @@ + + + diff --git a/selfprivacy_api/services/nextcloud/__init__.py b/selfprivacy_api/services/nextcloud/__init__.py index e1de92a..40eaf7f 100644 --- a/selfprivacy_api/services/nextcloud/__init__.py +++ b/selfprivacy_api/services/nextcloud/__init__.py @@ -1,42 +1,56 @@ """Class representing Nextcloud service.""" import base64 import subprocess -import time import typing -import psutil -import pathlib -import shutil -from selfprivacy_api.jobs import Job, JobStatus, Jobs +from selfprivacy_api.jobs import Jobs +from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service +from selfprivacy_api.services.generic_size_counter import get_storage_usage +from selfprivacy_api.services.generic_status_getter import get_service_status from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus from selfprivacy_api.utils import ReadUserData, WriteUserData from selfprivacy_api.utils.block_devices import BlockDevice -from selfprivacy_api.utils.huey import huey +from selfprivacy_api.utils.network import get_ip4 + class Nextcloud(Service): """Class representing Nextcloud service.""" + @staticmethod def get_id(self) -> str: """Return service id.""" return "nextcloud" - def get_display_name(self) -> str: + @staticmethod + def get_display_name() -> str: """Return service display name.""" return "Nextcloud" - def get_description(self) -> str: + @staticmethod + def get_description() -> str: """Return service description.""" return "Nextcloud is a cloud storage service that offers a web interface and a desktop client." - def get_svg_icon(self) -> str: + @staticmethod + def get_svg_icon() -> str: """Read SVG icon from file and return it as base64 encoded string.""" with open("selfprivacy_api/services/nextcloud/nextcloud.svg", "rb") as f: return base64.b64encode(f.read()).decode("utf-8") - def is_enabled(self) -> bool: + @staticmethod + def is_movable() -> bool: + return True + + @staticmethod + def is_required() -> bool: + return False + + @staticmethod + def is_enabled() -> bool: with ReadUserData() as user_data: return user_data.get("nextcloud", {}).get("enable", False) - def get_status(self) -> ServiceStatus: + @staticmethod + def get_status() -> ServiceStatus: """ Return Nextcloud status from systemd. Use command return code to determine status. @@ -46,72 +60,64 @@ class Nextcloud(Service): Return code 3 means service is stopped. Return code 4 means service is off. """ - service_status = subprocess.Popen( - ["systemctl", "status", "phpfpm-nextcloud.service"] - ) - service_status.communicate()[0] - if service_status.returncode == 0: - return ServiceStatus.RUNNING - elif service_status.returncode == 1 or service_status.returncode == 2: - return ServiceStatus.ERROR - elif service_status.returncode == 3: - return ServiceStatus.STOPPED - elif service_status.returncode == 4: - return ServiceStatus.OFF - else: - return ServiceStatus.DEGRADED + return get_service_status("phpfpm-nextcloud.service") - def enable(self): + @staticmethod + def enable(): """Enable Nextcloud service.""" with WriteUserData() as user_data: if "nextcloud" not in user_data: user_data["nextcloud"] = {} user_data["nextcloud"]["enable"] = True - def disable(self): + @staticmethod + def disable(): """Disable Nextcloud service.""" with WriteUserData() as user_data: if "nextcloud" not in user_data: user_data["nextcloud"] = {} user_data["nextcloud"]["enable"] = False - def stop(self): + @staticmethod + def stop(): """Stop Nextcloud service.""" subprocess.Popen(["systemctl", "stop", "phpfpm-nextcloud.service"]) - def start(self): + @staticmethod + def start(): """Start Nextcloud service.""" subprocess.Popen(["systemctl", "start", "phpfpm-nextcloud.service"]) - def restart(self): + @staticmethod + def restart(): """Restart Nextcloud service.""" subprocess.Popen(["systemctl", "restart", "phpfpm-nextcloud.service"]) - def get_configuration(self) -> dict: + @staticmethod + def get_configuration() -> dict: """Return Nextcloud configuration.""" return {} - def set_configuration(self, config_items): + @staticmethod + def set_configuration(config_items): return super().set_configuration(config_items) - def get_logs(self): + @staticmethod + def get_logs(): """Return Nextcloud logs.""" return "" - def get_storage_usage(self) -> int: + @staticmethod + def get_storage_usage() -> int: """ Calculate the real storage usage of /var/lib/nextcloud and all subdirectories. Calculate using pathlib. Do not follow symlinks. """ - storage_usage = 0 - for path in pathlib.Path("/var/lib/nextcloud").rglob("**/*"): - if path.is_dir(): - continue - storage_usage += path.stat().st_size - return storage_usage + return get_storage_usage("/var/lib/nextcloud") - def get_location(self) -> str: + @staticmethod + def get_location() -> str: """Get the name of disk where Nextcloud is installed.""" with ReadUserData() as user_data: if user_data.get("useBinds", False): @@ -119,185 +125,34 @@ class Nextcloud(Service): else: return "sda1" - def get_dns_records(self) -> typing.List[ServiceDnsRecord]: - return super().get_dns_records() + @staticmethod + def get_dns_records() -> typing.List[ServiceDnsRecord]: + return [ + ServiceDnsRecord( + type="A", + name="cloud", + content=get_ip4(), + ttl=3600, + ), + ] def move_to_volume(self, volume: BlockDevice): job = Jobs.get_instance().add( name="services.nextcloud.move", description=f"Moving Nextcloud to volume {volume.name}", ) - move_nextcloud(self, volume, job) + move_service( + self, + volume, + job, + [ + FolderMoveNames( + name="nextcloud", + bind_location="/var/lib/nextcloud", + owner="nextcloud", + group="nextcloud", + ), + ], + "nextcloud", + ) return job - - -@huey.task() -def move_nextcloud(nextcloud: Nextcloud, volume: BlockDevice, job: Job): - """Move Nextcloud to another volume.""" - job = Jobs.get_instance().update( - job=job, - status_text="Performing pre-move checks...", - status=JobStatus.RUNNING, - ) - with ReadUserData() as user_data: - if not user_data.get("useBinds", False): - Jobs.get_instance().update( - job=job, - status=JobStatus.ERROR, - error="Server is not using binds.", - ) - return - # Check if we are on the same volume - old_location = nextcloud.get_location() - if old_location == volume.name: - Jobs.get_instance().update( - job=job, - status=JobStatus.ERROR, - error="Nextcloud is already on this volume.", - ) - return - # Check if there is enough space on the new volume - if volume.fsavail < nextcloud.get_storage_usage(): - Jobs.get_instance().update( - job=job, - status=JobStatus.ERROR, - error="Not enough space on the new volume.", - ) - return - # Make sure the volume is mounted - if f"/volumes/{volume.name}" not in volume.mountpoints: - Jobs.get_instance().update( - job=job, - status=JobStatus.ERROR, - error="Volume is not mounted.", - ) - return - # Make sure current actual directory exists - if not pathlib.Path(f"/volumes/{old_location}/nextcloud").exists(): - Jobs.get_instance().update( - job=job, - status=JobStatus.ERROR, - error="Nextcloud is not found.", - ) - return - - # Stop Nextcloud - Jobs.get_instance().update( - job=job, - status=JobStatus.RUNNING, - status_text="Stopping Nextcloud...", - progress=5, - ) - nextcloud.stop() - # Wait for Nextcloud to stop, check every second - # If it does not stop in 30 seconds, abort - for _ in range(30): - if nextcloud.get_status() != ServiceStatus.RUNNING: - break - time.sleep(1) - else: - Jobs.get_instance().update( - job=job, - status=JobStatus.ERROR, - error="Nextcloud did not stop in 30 seconds.", - ) - return - - # Unmount old volume - Jobs.get_instance().update( - job=job, - status_text="Unmounting old folder...", - status=JobStatus.RUNNING, - progress=10, - ) - try: - subprocess.run(["umount", "/var/lib/nextcloud"], check=True) - except subprocess.CalledProcessError: - Jobs.get_instance().update( - job=job, - status=JobStatus.ERROR, - error="Unable to unmount old volume.", - ) - return - # Move data to new volume and set correct permissions - Jobs.get_instance().update( - job=job, - status_text="Moving data to new volume...", - status=JobStatus.RUNNING, - progress=20, - ) - shutil.move( - f"/volumes/{old_location}/nextcloud", f"/volumes/{volume.name}/nextcloud" - ) - - Jobs.get_instance().update( - job=job, - status_text="Making sure Nextcloud owns its files...", - status=JobStatus.RUNNING, - progress=70, - ) - try: - subprocess.run( - [ - "chown", - "-R", - "nextcloud:nextcloud", - f"/volumes/{volume.name}/nextcloud", - ], - check=True, - ) - except subprocess.CalledProcessError as error: - print(error.output) - Jobs.get_instance().update( - job=job, - status=JobStatus.RUNNING, - error="Unable to set ownership of new volume. Nextcloud may not be able to access its files. Continuing anyway.", - ) - return - - # Mount new volume - Jobs.get_instance().update( - job=job, - status_text="Mounting Nextcloud data...", - status=JobStatus.RUNNING, - progress=90, - ) - try: - subprocess.run( - [ - "mount", - "--bind", - f"/volumes/{volume.name}/nextcloud", - "/var/lib/nextcloud", - ], - check=True, - ) - except subprocess.CalledProcessError as error: - print(error.output) - Jobs.get_instance().update( - job=job, - status=JobStatus.ERROR, - error="Unable to mount new volume.", - ) - return - - # Update userdata - Jobs.get_instance().update( - job=job, - status_text="Finishing move...", - status=JobStatus.RUNNING, - progress=95, - ) - with WriteUserData() as user_data: - if "nextcloud" not in user_data: - user_data["nextcloud"] = {} - user_data["nextcloud"]["location"] = volume.name - # Start Nextcloud - nextcloud.start() - Jobs.get_instance().update( - job=job, - status=JobStatus.FINISHED, - result="Nextcloud moved successfully.", - status_text="Starting Nextcloud...", - progress=100, - ) diff --git a/selfprivacy_api/services/ocserv/__init__.py b/selfprivacy_api/services/ocserv/__init__.py new file mode 100644 index 0000000..9f1a9f6 --- /dev/null +++ b/selfprivacy_api/services/ocserv/__init__.py @@ -0,0 +1,99 @@ +"""Class representing ocserv service.""" +import base64 +import subprocess +import typing +from selfprivacy_api.jobs import Jobs +from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service +from selfprivacy_api.services.generic_size_counter import get_storage_usage +from selfprivacy_api.services.generic_status_getter import get_service_status +from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus +from selfprivacy_api.utils import ReadUserData, WriteUserData +from selfprivacy_api.utils.block_devices import BlockDevice +from selfprivacy_api.utils.network import get_ip4 + + +class Ocserv(Service): + """Class representing ocserv service.""" + + @staticmethod + def get_id() -> str: + return "ocserv" + + @staticmethod + def get_display_name() -> str: + return "OpenConnect VPN" + + @staticmethod + def get_description() -> str: + return "OpenConnect VPN to connect your devices and access the internet." + + @staticmethod + def get_svg_icon() -> str: + with open("selfprivacy_api/services/ocserv/ocserv.svg", "rb") as f: + return base64.b64encode(f.read()).decode("utf-8") + + @staticmethod + def is_movable() -> bool: + return False + + @staticmethod + def is_required() -> bool: + return False + + @staticmethod + def is_enabled() -> bool: + with ReadUserData() as user_data: + return user_data.get("ocserv", {}).get("enable", False) + + @staticmethod + def get_status() -> ServiceStatus: + return get_service_status("ocserv.service") + + @staticmethod + def enable(): + with WriteUserData() as user_data: + if "ocserv" not in user_data: + user_data["ocserv"] = {} + user_data["ocserv"]["enable"] = True + + @staticmethod + def disable(): + with WriteUserData() as user_data: + if "ocserv" not in user_data: + user_data["ocserv"] = {} + user_data["ocserv"]["enable"] = False + + @staticmethod + def stop(): + subprocess.run(["systemctl", "stop", "ocserv.service"]) + + @staticmethod + def start(): + subprocess.run(["systemctl", "start", "ocserv.service"]) + + @staticmethod + def restart(): + subprocess.run(["systemctl", "restart", "ocserv.service"]) + + @staticmethod + def get_configuration(): + return {} + + @staticmethod + def get_logs(): + return "" + + @staticmethod + def get_location() -> str: + return "sda1" + + @staticmethod + def get_dns_records() -> typing.List[ServiceDnsRecord]: + return [] + + @staticmethod + def get_storage_usage() -> int: + return 0 + + def move_to_volume(self, volume: BlockDevice): + raise NotImplementedError("ocserv service is not movable") diff --git a/selfprivacy_api/services/ocserv/ocserv.svg b/selfprivacy_api/services/ocserv/ocserv.svg new file mode 100644 index 0000000..288f743 --- /dev/null +++ b/selfprivacy_api/services/ocserv/ocserv.svg @@ -0,0 +1,3 @@ + + + diff --git a/selfprivacy_api/services/pleroma/__init__.py b/selfprivacy_api/services/pleroma/__init__.py new file mode 100644 index 0000000..c8b1bd5 --- /dev/null +++ b/selfprivacy_api/services/pleroma/__init__.py @@ -0,0 +1,144 @@ +"""Class representing Nextcloud service.""" +import base64 +import subprocess +import typing +from selfprivacy_api.jobs import Jobs +from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service +from selfprivacy_api.services.generic_size_counter import get_storage_usage +from selfprivacy_api.services.generic_status_getter import get_service_status +from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus +from selfprivacy_api.utils import ReadUserData, WriteUserData +from selfprivacy_api.utils.block_devices import BlockDevice +from selfprivacy_api.utils.network import get_ip4 + + +class Pleroma(Service): + """Class representing Pleroma service.""" + + @staticmethod + def get_id() -> str: + return "pleroma" + + @staticmethod + def get_display_name() -> str: + return "Pleroma" + + @staticmethod + def get_description() -> str: + return "Pleroma is a microblogging service that offers a web interface and a desktop client." + + @staticmethod + def get_svg_icon() -> str: + with open("selfprivacy_api/services/pleroma/pleroma.svg", "rb") as f: + return base64.b64encode(f.read()).decode("utf-8") + + @staticmethod + def is_movable() -> bool: + return True + + @staticmethod + def is_required() -> bool: + return False + + @staticmethod + def is_enabled() -> bool: + with ReadUserData() as user_data: + return user_data.get("pleroma", {}).get("enable", False) + + @staticmethod + def get_status() -> ServiceStatus: + return get_service_status("pleroma.service") + + @staticmethod + def enable(): + with WriteUserData() as user_data: + if "pleroma" not in user_data: + user_data["pleroma"] = {} + user_data["pleroma"]["enable"] = True + + @staticmethod + def disable(): + with WriteUserData() as user_data: + if "pleroma" not in user_data: + user_data["pleroma"] = {} + user_data["pleroma"]["enable"] = False + + @staticmethod + def stop(): + subprocess.run(["systemctl", "stop", "pleroma.service"]) + subprocess.run(["systemctl", "stop", "postgresql.service"]) + + @staticmethod + def start(): + subprocess.run(["systemctl", "start", "pleroma.service"]) + subprocess.run(["systemctl", "start", "postgresql.service"]) + + @staticmethod + def restart(): + subprocess.run(["systemctl", "restart", "pleroma.service"]) + subprocess.run(["systemctl", "restart", "postgresql.service"]) + + @staticmethod + def get_configuration(config_items): + return {} + + @staticmethod + def set_configuration(config_items): + return super().set_configuration(config_items) + + @staticmethod + def get_logs(): + return "" + + @staticmethod + def get_storage_usage() -> int: + storage_usage = 0 + storage_usage += get_storage_usage("/var/lib/pleroma") + storage_usage += get_storage_usage("/var/lib/postgresql") + return storage_usage + + @staticmethod + def get_location() -> str: + with ReadUserData() as user_data: + if user_data.get("useBinds", False): + return user_data.get("pleroma", {}).get("location", "sda1") + else: + return "sda1" + + @staticmethod + def get_dns_records() -> typing.List[ServiceDnsRecord]: + return [ + ServiceDnsRecord( + type="A", + name="social", + content=get_ip4(), + ttl=3600, + ), + ] + + def move_to_volume(self, volume: BlockDevice): + job = Jobs.get_instance().add( + name="services.pleroma.move", + description=f"Moving Pleroma to volume {volume.name}", + ) + move_service( + self, + volume, + job, + [ + FolderMoveNames( + name="pleroma", + bind_location="/var/lib/pleroma", + owner="pleroma", + group="pleroma", + ), + FolderMoveNames( + name="postgresql", + bind_location="/var/lib/postgresql", + owner="postgres", + group="postgres", + ), + ], + "pleroma", + ) + return job diff --git a/selfprivacy_api/services/pleroma/pleroma.svg b/selfprivacy_api/services/pleroma/pleroma.svg new file mode 100644 index 0000000..f87c438 --- /dev/null +++ b/selfprivacy_api/services/pleroma/pleroma.svg @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index 7c0b09e..43c4fbb 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -46,6 +46,14 @@ class Service(ABC): def get_svg_icon(self) -> str: pass + @abstractmethod + def is_movable() -> bool: + pass + + @abstractmethod + def is_required() -> bool: + pass + @abstractmethod def is_enabled(self) -> bool: pass diff --git a/selfprivacy_api/utils/auth.py b/selfprivacy_api/utils/auth.py index f512948..d059d38 100644 --- a/selfprivacy_api/utils/auth.py +++ b/selfprivacy_api/utils/auth.py @@ -5,6 +5,7 @@ from datetime import datetime, timedelta import re import typing +from pydantic import BaseModel from mnemonic import Mnemonic from . import ReadUserData, UserDataFiles, WriteUserData, parse_date @@ -96,11 +97,22 @@ def get_token_name(token): return None +class BasicTokenInfo(BaseModel): + """Token info""" + + name: str + date: datetime + + def get_tokens_info(): """Get all tokens info without tokens themselves""" with ReadUserData(UserDataFiles.TOKENS) as tokens: return [ - {"name": token["name"], "date": token["date"]} for token in tokens["tokens"] + BasicTokenInfo( + name=t["name"], + date=parse_date(t["date"]), + ) + for t in tokens["tokens"] ] diff --git a/selfprivacy_api/utils/block_devices.py b/selfprivacy_api/utils/block_devices.py index f3b0911..b33c7aa 100644 --- a/selfprivacy_api/utils/block_devices.py +++ b/selfprivacy_api/utils/block_devices.py @@ -170,7 +170,7 @@ class BlockDevices: "-J", "-b", "-o", - "NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINTS,LABEL,UUID,SIZE,MODEL,SERIAL,TYPE", + "NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINT,LABEL,UUID,SIZE,MODEL,SERIAL,TYPE", ] ) lsblk_output = lsblk_output.decode("utf-8") diff --git a/selfprivacy_api/utils/huey.py b/selfprivacy_api/utils/huey.py index 9803e7b..f03435e 100644 --- a/selfprivacy_api/utils/huey.py +++ b/selfprivacy_api/utils/huey.py @@ -1,4 +1,16 @@ """MiniHuey singleton.""" -from huey.contrib.mini import MiniHuey +from huey import SqliteHuey -huey = MiniHuey() +HUEY_DATABASE = "/etc/nixos/userdata/tasks.db" + +# Singleton instance containing the huey database. +class Huey: + """Huey singleton.""" + + __instance = None + + def __new__(cls): + """Create a new instance of the huey singleton.""" + if Huey.__instance is None: + Huey.__instance = SqliteHuey(HUEY_DATABASE) + return Huey.__instance diff --git a/selfprivacy_api/utils/migrate_to_binds.py b/selfprivacy_api/utils/migrate_to_binds.py index faac03b..0159cf1 100644 --- a/selfprivacy_api/utils/migrate_to_binds.py +++ b/selfprivacy_api/utils/migrate_to_binds.py @@ -7,10 +7,12 @@ from selfprivacy_api.services.nextcloud import Nextcloud from selfprivacy_api.utils import WriteUserData from selfprivacy_api.utils.block_devices import BlockDevices + class BindMigrationConfig: """Config for bind migration. For each service provide block device name. """ + email_block_device: str bitwarden_block_device: str gitea_block_device: str @@ -23,7 +25,7 @@ def migrate_to_binds(config: BindMigrationConfig): # Get block devices. block_devices = BlockDevices().get_block_devices() - block_device_names = [ device.name for device in block_devices ] + block_device_names = [device.name for device in block_devices] # Get all unique required block devices required_block_devices = [] @@ -80,7 +82,9 @@ def migrate_to_binds(config: BindMigrationConfig): # Move data from /var/lib/nextcloud to /volumes//nextcloud. # /var/lib/nextcloud is removed and /volumes//nextcloud is mounted as bind mount. nextcloud_data_path = pathlib.Path("/var/lib/nextcloud") - nextcloud_bind_path = pathlib.Path(f"/volumes/{config.nextcloud_block_device}/nextcloud") + nextcloud_bind_path = pathlib.Path( + f"/volumes/{config.nextcloud_block_device}/nextcloud" + ) if nextcloud_data_path.exists(): shutil.move(str(nextcloud_data_path), str(nextcloud_bind_path)) else: @@ -94,10 +98,15 @@ def migrate_to_binds(config: BindMigrationConfig): shutil.chown(nextcloud_data_path, user="nextcloud", group="nextcloud") # Mount nextcloud bind mount. - subprocess.run(["mount","--bind", str(nextcloud_bind_path), str(nextcloud_data_path)], check=True) + subprocess.run( + ["mount", "--bind", str(nextcloud_bind_path), str(nextcloud_data_path)], + check=True, + ) # Recursively chown all files in nextcloud bind mount. - subprocess.run(["chown", "-R", "nextcloud:nextcloud", str(nextcloud_data_path)], check=True) + subprocess.run( + ["chown", "-R", "nextcloud:nextcloud", str(nextcloud_data_path)], check=True + ) # Start Nextcloud Nextcloud().start() diff --git a/selfprivacy_api/utils/network.py b/selfprivacy_api/utils/network.py index 5081f0e..9b1ba99 100644 --- a/selfprivacy_api/utils/network.py +++ b/selfprivacy_api/utils/network.py @@ -2,9 +2,10 @@ """Network utils""" import subprocess import re +from typing import Optional -def get_ip4(): +def get_ip4() -> Optional[str]: """Get IPv4 address""" try: ip4 = subprocess.check_output(["ip", "addr", "show", "dev", "eth0"]).decode( @@ -16,7 +17,7 @@ def get_ip4(): return ip4.group(1) if ip4 else None -def get_ip6(): +def get_ip6() -> Optional[str]: """Get IPv6 address""" try: ip6 = subprocess.check_output(["ip", "addr", "show", "dev", "eth0"]).decode( diff --git a/shell.nix b/shell.nix index e754a11..0ccb99d 100644 --- a/shell.nix +++ b/shell.nix @@ -1,12 +1,8 @@ { pkgs ? import { } }: let sp-python = pkgs.python39.withPackages (p: with p; [ - flask - flask-restful setuptools portalocker - flask-swagger - flask-swagger-ui pytz pytest pytest-mock @@ -18,9 +14,10 @@ let pylint pydantic typing-extensions - flask-cors psutil black + fastapi + uvicorn (buildPythonPackage rec { pname = "strawberry-graphql"; version = "0.123.0"; @@ -32,11 +29,11 @@ let typing-extensions python-multipart python-dateutil - flask + # flask pydantic pygments poetry - flask-cors + # flask-cors (buildPythonPackage rec { pname = "graphql-core"; version = "3.2.0"; diff --git a/tests/conftest.py b/tests/conftest.py index fb31456..2eca0f6 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -2,8 +2,7 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument import pytest -from flask import testing -from selfprivacy_api.app import create_app +from fastapi.testclient import TestClient @pytest.fixture @@ -16,66 +15,36 @@ def tokens_file(mocker, shared_datadir): @pytest.fixture -def app(): - """Flask application.""" - app = create_app( - { - "ENABLE_SWAGGER": "1", - } +def huey_database(mocker, shared_datadir): + """Mock huey database.""" + mock = mocker.patch( + "selfprivacy_api.utils.huey.HUEY_DATABASE", shared_datadir / "huey.db" ) - - yield app + return mock @pytest.fixture -def client(app, tokens_file): - """Flask unauthorized test client.""" - return app.test_client() +def client(tokens_file, huey_database): + from selfprivacy_api.app import app - -class AuthorizedClient(testing.FlaskClient): - """Flask authorized test client.""" - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.token = "TEST_TOKEN" - - def open(self, *args, **kwargs): - if "headers" not in kwargs: - kwargs["headers"] = {} - kwargs["headers"]["Authorization"] = f"Bearer {self.token}" - return super().open(*args, **kwargs) - - -class WrongAuthClient(testing.FlaskClient): - """Flask client with wrong token""" - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.token = "WRONG_TOKEN" - - def open(self, *args, **kwargs): - if "headers" not in kwargs: - kwargs["headers"] = {} - kwargs["headers"]["Authorization"] = f"Bearer {self.token}" - return super().open(*args, **kwargs) + return TestClient(app) @pytest.fixture -def authorized_client(app, tokens_file): +def authorized_client(tokens_file, huey_database): """Authorized test client fixture.""" - app.test_client_class = AuthorizedClient - return app.test_client() + from selfprivacy_api.app import app + + client = TestClient(app) + client.headers.update({"Authorization": "Bearer TEST_TOKEN"}) + return client @pytest.fixture -def wrong_auth_client(app, tokens_file): +def wrong_auth_client(tokens_file, huey_database): """Wrong token test client fixture.""" - app.test_client_class = WrongAuthClient - return app.test_client() + from selfprivacy_api.app import app - -@pytest.fixture -def runner(app, tokens_file): - """Flask test runner.""" - return app.test_cli_runner() + client = TestClient(app) + client.headers.update({"Authorization": "Bearer WRONG_TOKEN"}) + return client diff --git a/tests/services/test_mailserver.py b/tests/services/test_mailserver.py index a9e5f12..36cf615 100644 --- a/tests/services/test_mailserver.py +++ b/tests/services/test_mailserver.py @@ -25,7 +25,7 @@ class NoFileMock(ProcessMock): def mock_subproccess_popen(mocker): mock = mocker.patch("subprocess.Popen", autospec=True, return_value=ProcessMock) mocker.patch( - "selfprivacy_api.resources.services.mailserver.get_domain", + "selfprivacy_api.rest.services.get_domain", autospec=True, return_value="example.com", ) @@ -37,7 +37,7 @@ def mock_subproccess_popen(mocker): def mock_no_file(mocker): mock = mocker.patch("subprocess.Popen", autospec=True, return_value=NoFileMock) mocker.patch( - "selfprivacy_api.resources.services.mailserver.get_domain", + "selfprivacy_api.rest.services.get_domain", autospec=True, return_value="example.com", ) @@ -67,7 +67,7 @@ def test_dkim_key(authorized_client, mock_subproccess_popen): """Test DKIM key""" response = authorized_client.get("/services/mailserver/dkim") assert response.status_code == 200 - assert base64.b64decode(response.data) == b"I am a DKIM key" + assert base64.b64decode(response.text) == b"I am a DKIM key" assert mock_subproccess_popen.call_args[0][0] == [ "cat", "/var/dkim/example.com.selector.txt", diff --git a/tests/services/test_restic.py b/tests/services/test_restic.py index 913362f..9502be5 100644 --- a/tests/services/test_restic.py +++ b/tests/services/test_restic.py @@ -43,7 +43,7 @@ class ResticControllerMock: @pytest.fixture def mock_restic_controller(mocker): mock = mocker.patch( - "selfprivacy_api.resources.services.restic.ResticController", + "selfprivacy_api.rest.services.ResticController", autospec=True, return_value=ResticControllerMock, ) @@ -60,7 +60,7 @@ class ResticControllerMockNoKey: @pytest.fixture def mock_restic_controller_no_key(mocker): mock = mocker.patch( - "selfprivacy_api.resources.services.restic.ResticController", + "selfprivacy_api.rest.services.ResticController", autospec=True, return_value=ResticControllerMockNoKey, ) @@ -77,7 +77,7 @@ class ResticControllerNotInitialized: @pytest.fixture def mock_restic_controller_not_initialized(mocker): mock = mocker.patch( - "selfprivacy_api.resources.services.restic.ResticController", + "selfprivacy_api.rest.services.ResticController", autospec=True, return_value=ResticControllerNotInitialized, ) @@ -94,7 +94,7 @@ class ResticControllerInitializing: @pytest.fixture def mock_restic_controller_initializing(mocker): mock = mocker.patch( - "selfprivacy_api.resources.services.restic.ResticController", + "selfprivacy_api.rest.services.ResticController", autospec=True, return_value=ResticControllerInitializing, ) @@ -111,7 +111,7 @@ class ResticControllerBackingUp: @pytest.fixture def mock_restic_controller_backing_up(mocker): mock = mocker.patch( - "selfprivacy_api.resources.services.restic.ResticController", + "selfprivacy_api.rest.services.ResticController", autospec=True, return_value=ResticControllerBackingUp, ) @@ -128,7 +128,7 @@ class ResticControllerError: @pytest.fixture def mock_restic_controller_error(mocker): mock = mocker.patch( - "selfprivacy_api.resources.services.restic.ResticController", + "selfprivacy_api.rest.services.ResticController", autospec=True, return_value=ResticControllerError, ) @@ -145,7 +145,7 @@ class ResticControllerRestoring: @pytest.fixture def mock_restic_controller_restoring(mocker): mock = mocker.patch( - "selfprivacy_api.resources.services.restic.ResticController", + "selfprivacy_api.rest.services.ResticController", autospec=True, return_value=ResticControllerRestoring, ) @@ -154,9 +154,7 @@ def mock_restic_controller_restoring(mocker): @pytest.fixture def mock_restic_tasks(mocker): - mock = mocker.patch( - "selfprivacy_api.resources.services.restic.restic_tasks", autospec=True - ) + mock = mocker.patch("selfprivacy_api.rest.services.restic_tasks", autospec=True) return mock @@ -197,7 +195,7 @@ def test_get_snapshots_unauthorized(client, mock_restic_controller, mock_restic_ def test_get_snapshots(authorized_client, mock_restic_controller, mock_restic_tasks): response = authorized_client.get("/services/restic/backup/list") assert response.status_code == 200 - assert response.get_json() == MOCKED_SNAPSHOTS + assert response.json() == MOCKED_SNAPSHOTS def test_create_backup_unauthorized(client, mock_restic_controller, mock_restic_tasks): @@ -247,7 +245,7 @@ def test_check_backup_status( ): response = authorized_client.get("/services/restic/backup/status") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "status": "INITIALIZED", "progress": 0, "error_message": None, @@ -259,7 +257,7 @@ def test_check_backup_status_no_key( ): response = authorized_client.get("/services/restic/backup/status") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "status": "NO_KEY", "progress": 0, "error_message": None, @@ -271,7 +269,7 @@ def test_check_backup_status_not_initialized( ): response = authorized_client.get("/services/restic/backup/status") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "status": "NOT_INITIALIZED", "progress": 0, "error_message": None, @@ -283,7 +281,7 @@ def test_check_backup_status_initializing( ): response = authorized_client.get("/services/restic/backup/status") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "status": "INITIALIZING", "progress": 0, "error_message": None, @@ -295,7 +293,7 @@ def test_check_backup_status_backing_up( ): response = authorized_client.get("/services/restic/backup/status") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "status": "BACKING_UP", "progress": 0.42, "error_message": None, @@ -307,7 +305,7 @@ def test_check_backup_status_error( ): response = authorized_client.get("/services/restic/backup/status") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "status": "ERROR", "progress": 0, "error_message": "Error message", @@ -319,7 +317,7 @@ def test_check_backup_status_restoring( ): response = authorized_client.get("/services/restic/backup/status") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "status": "RESTORING", "progress": 0, "error_message": None, @@ -346,7 +344,7 @@ def test_backup_restore_without_backup_id( authorized_client, mock_restic_controller, mock_restic_tasks ): response = authorized_client.put("/services/restic/backup/restore", json={}) - assert response.status_code == 400 + assert response.status_code == 422 assert mock_restic_tasks.restore_from_backup.call_count == 0 @@ -440,7 +438,7 @@ def test_set_backblaze_config_without_arguments( authorized_client, mock_restic_controller, mock_restic_tasks, some_settings ): response = authorized_client.put("/services/restic/backblaze/config") - assert response.status_code == 400 + assert response.status_code == 422 assert mock_restic_tasks.update_keys_from_userdata.call_count == 0 @@ -451,7 +449,7 @@ def test_set_backblaze_config_without_all_values( "/services/restic/backblaze/config", json={"accountId": "123", "applicationKey": "456"}, ) - assert response.status_code == 400 + assert response.status_code == 422 assert mock_restic_tasks.update_keys_from_userdata.call_count == 0 diff --git a/tests/services/test_services.py b/tests/services/test_services.py index aed48fb..03ae104 100644 --- a/tests/services/test_services.py +++ b/tests/services/test_services.py @@ -9,7 +9,7 @@ def read_json(file_path): def call_args_asserts(mocked_object): - assert mocked_object.call_count == 8 + assert mocked_object.call_count == 7 assert mocked_object.call_args_list[0][0][0] == [ "systemctl", "status", @@ -23,29 +23,24 @@ def call_args_asserts(mocked_object): assert mocked_object.call_args_list[2][0][0] == [ "systemctl", "status", - "nginx.service", + "vaultwarden.service", ] assert mocked_object.call_args_list[3][0][0] == [ "systemctl", "status", - "vaultwarden.service", + "gitea.service", ] assert mocked_object.call_args_list[4][0][0] == [ "systemctl", "status", - "gitea.service", + "phpfpm-nextcloud.service", ] assert mocked_object.call_args_list[5][0][0] == [ - "systemctl", - "status", - "phpfpm-nextcloud.service", - ] - assert mocked_object.call_args_list[6][0][0] == [ "systemctl", "status", "ocserv.service", ] - assert mocked_object.call_args_list[7][0][0] == [ + assert mocked_object.call_args_list[6][0][0] == [ "systemctl", "status", "pleroma.service", @@ -104,7 +99,7 @@ def test_illegal_methods(authorized_client, mock_subproccess_popen): def test_dkim_key(authorized_client, mock_subproccess_popen): response = authorized_client.get("/services/status") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "imap": 0, "smtp": 0, "http": 0, @@ -120,10 +115,10 @@ def test_dkim_key(authorized_client, mock_subproccess_popen): def test_no_dkim_key(authorized_client, mock_broken_service): response = authorized_client.get("/services/status") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "imap": 3, "smtp": 3, - "http": 3, + "http": 0, "bitwarden": 3, "gitea": 3, "nextcloud": 3, diff --git a/tests/services/test_ssh.py b/tests/services/test_ssh.py index 5975811..a17bdab 100644 --- a/tests/services/test_ssh.py +++ b/tests/services/test_ssh.py @@ -95,14 +95,18 @@ def some_users(mocker, datadir): ## TEST 401 ###################################################### -@pytest.mark.parametrize( - "endpoint", ["ssh", "ssh/enable", "ssh/key/send", "ssh/keys/user"] -) +@pytest.mark.parametrize("endpoint", ["ssh/enable", "ssh/keys/user"]) def test_unauthorized(client, ssh_off, endpoint): response = client.post(f"/services/{endpoint}") assert response.status_code == 401 +@pytest.mark.parametrize("endpoint", ["ssh", "ssh/key/send"]) +def test_unauthorized_put(client, ssh_off, endpoint): + response = client.put(f"/services/{endpoint}") + assert response.status_code == 401 + + ## TEST ENABLE ###################################################### @@ -133,31 +137,31 @@ def test_legacy_enable_when_enabled(authorized_client, ssh_on): def test_get_current_settings_ssh_off(authorized_client, ssh_off): response = authorized_client.get("/services/ssh") assert response.status_code == 200 - assert response.json == {"enable": False, "passwordAuthentication": True} + assert response.json() == {"enable": False, "passwordAuthentication": True} def test_get_current_settings_ssh_on(authorized_client, ssh_on): response = authorized_client.get("/services/ssh") assert response.status_code == 200 - assert response.json == {"enable": True, "passwordAuthentication": True} + assert response.json() == {"enable": True, "passwordAuthentication": True} def test_get_current_settings_all_off(authorized_client, all_off): response = authorized_client.get("/services/ssh") assert response.status_code == 200 - assert response.json == {"enable": False, "passwordAuthentication": False} + assert response.json() == {"enable": False, "passwordAuthentication": False} def test_get_current_settings_undefined(authorized_client, undefined_settings): response = authorized_client.get("/services/ssh") assert response.status_code == 200 - assert response.json == {"enable": True, "passwordAuthentication": True} + assert response.json() == {"enable": True, "passwordAuthentication": True} def test_get_current_settings_mostly_undefined(authorized_client, undefined_values): response = authorized_client.get("/services/ssh") assert response.status_code == 200 - assert response.json == {"enable": True, "passwordAuthentication": True} + assert response.json() == {"enable": True, "passwordAuthentication": True} ## PUT ON /ssh ###################################################### @@ -275,29 +279,22 @@ def test_add_invalid_root_key(authorized_client, ssh_on): ## /ssh/keys/{user} ###################################################### -def test_add_root_key_via_wrong_endpoint(authorized_client, ssh_on): - response = authorized_client.post( - "/services/ssh/keys/root", json={"public_key": "ssh-rsa KEY test@pc"} - ) - assert response.status_code == 400 - - def test_get_root_key(authorized_client, root_and_admin_have_keys): response = authorized_client.get("/services/ssh/keys/root") assert response.status_code == 200 - assert response.json == ["ssh-ed25519 KEY test@pc"] + assert response.json() == ["ssh-ed25519 KEY test@pc"] def test_get_root_key_when_none(authorized_client, ssh_on): response = authorized_client.get("/services/ssh/keys/root") assert response.status_code == 200 - assert response.json == [] + assert response.json() == [] def test_get_root_key_on_undefined(authorized_client, undefined_settings): response = authorized_client.get("/services/ssh/keys/root") assert response.status_code == 200 - assert response.json == [] + assert response.json() == [] def test_delete_root_key(authorized_client, root_and_admin_have_keys): @@ -310,6 +307,10 @@ def test_delete_root_key(authorized_client, root_and_admin_have_keys): not in read_json(root_and_admin_have_keys / "root_and_admin_have_keys.json")[ "ssh" ] + or read_json(root_and_admin_have_keys / "root_and_admin_have_keys.json")["ssh"][ + "rootKeys" + ] + == [] ) @@ -330,19 +331,19 @@ def test_delete_root_key_on_undefined(authorized_client, undefined_settings): "/services/ssh/keys/root", json={"public_key": "ssh-ed25519 KEY test@pc"} ) assert response.status_code == 404 - assert read_json(undefined_settings / "undefined.json")["ssh"]["rootKeys"] == [] + assert "ssh" not in read_json(undefined_settings / "undefined.json") def test_get_admin_key(authorized_client, root_and_admin_have_keys): response = authorized_client.get("/services/ssh/keys/tester") assert response.status_code == 200 - assert response.json == ["ssh-rsa KEY test@pc"] + assert response.json() == ["ssh-rsa KEY test@pc"] def test_get_admin_key_when_none(authorized_client, ssh_on): response = authorized_client.get("/services/ssh/keys/tester") assert response.status_code == 200 - assert response.json == [] + assert response.json() == [] def test_delete_admin_key(authorized_client, root_and_admin_have_keys): @@ -371,7 +372,7 @@ def test_delete_admin_key_on_undefined(authorized_client, undefined_settings): "/services/ssh/keys/tester", json={"public_key": "ssh-rsa KEY test@pc"} ) assert response.status_code == 404 - assert read_json(undefined_settings / "undefined.json")["sshKeys"] == [] + assert "sshKeys" not in read_json(undefined_settings / "undefined.json") def test_add_admin_key(authorized_client, ssh_on): @@ -418,9 +419,9 @@ def test_get_user_key(authorized_client, some_users, user): response = authorized_client.get(f"/services/ssh/keys/user{user}") assert response.status_code == 200 if user == 1: - assert response.json == ["ssh-rsa KEY user1@pc"] + assert response.json() == ["ssh-rsa KEY user1@pc"] else: - assert response.json == [] + assert response.json() == [] def test_get_keys_of_nonexistent_user(authorized_client, some_users): @@ -483,7 +484,13 @@ def test_delete_nonexistent_user_key(authorized_client, some_users, user): f"/services/ssh/keys/user{user}", json={"public_key": "ssh-rsa KEY user1@pc"} ) assert response.status_code == 404 - assert read_json(some_users / "some_users.json")["users"][user - 1]["sshKeys"] == [] + if user == 2: + assert ( + read_json(some_users / "some_users.json")["users"][user - 1]["sshKeys"] + == [] + ) + if user == 3: + "sshKeys" not in read_json(some_users / "some_users.json")["users"][user - 1] def test_add_keys_of_nonexistent_user(authorized_client, some_users): diff --git a/tests/test_auth.py b/tests/test_auth.py index d209c9c..5430e3a 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -36,11 +36,11 @@ DATE_FORMATS = [ def test_get_tokens_info(authorized_client, tokens_file): response = authorized_client.get("/auth/tokens") assert response.status_code == 200 - assert response.json == [ - {"name": "test_token", "date": "2022-01-14 08:31:10.789314", "is_caller": True}, + assert response.json() == [ + {"name": "test_token", "date": "2022-01-14T08:31:10.789314", "is_caller": True}, { "name": "test_token2", - "date": "2022-01-14 08:31:10.789314", + "date": "2022-01-14T08:31:10.789314", "is_caller": False, }, ] @@ -98,7 +98,7 @@ def test_refresh_token_unauthorized(client, tokens_file): def test_refresh_token(authorized_client, tokens_file): response = authorized_client.post("/auth/tokens") assert response.status_code == 200 - new_token = response.json["token"] + new_token = response.json()["token"] assert read_json(tokens_file)["tokens"][0]["token"] == new_token @@ -106,7 +106,7 @@ def test_refresh_token(authorized_client, tokens_file): def test_get_new_device_auth_token_unauthorized(client, tokens_file): - response = client.get("/auth/new_device") + response = client.post("/auth/new_device") assert response.status_code == 401 assert read_json(tokens_file) == TOKENS_FILE_CONTETS @@ -114,19 +114,19 @@ def test_get_new_device_auth_token_unauthorized(client, tokens_file): def test_get_new_device_auth_token(authorized_client, tokens_file): response = authorized_client.post("/auth/new_device") assert response.status_code == 200 - assert "token" in response.json - token = Mnemonic(language="english").to_entropy(response.json["token"]).hex() + assert "token" in response.json() + token = Mnemonic(language="english").to_entropy(response.json()["token"]).hex() assert read_json(tokens_file)["new_device"]["token"] == token def test_get_and_delete_new_device_token(authorized_client, tokens_file): response = authorized_client.post("/auth/new_device") assert response.status_code == 200 - assert "token" in response.json - token = Mnemonic(language="english").to_entropy(response.json["token"]).hex() + assert "token" in response.json() + token = Mnemonic(language="english").to_entropy(response.json()["token"]).hex() assert read_json(tokens_file)["new_device"]["token"] == token response = authorized_client.delete( - "/auth/new_device", json={"token": response.json["token"]} + "/auth/new_device", json={"token": response.json()["token"]} ) assert response.status_code == 200 assert read_json(tokens_file) == TOKENS_FILE_CONTETS @@ -141,15 +141,15 @@ def test_delete_token_unauthenticated(client, tokens_file): def test_get_and_authorize_new_device(client, authorized_client, tokens_file): response = authorized_client.post("/auth/new_device") assert response.status_code == 200 - assert "token" in response.json - token = Mnemonic(language="english").to_entropy(response.json["token"]).hex() + assert "token" in response.json() + token = Mnemonic(language="english").to_entropy(response.json()["token"]).hex() assert read_json(tokens_file)["new_device"]["token"] == token response = client.post( "/auth/new_device/authorize", - json={"token": response.json["token"], "device": "new_device"}, + json={"token": response.json()["token"], "device": "new_device"}, ) assert response.status_code == 200 - assert read_json(tokens_file)["tokens"][2]["token"] == response.json["token"] + assert read_json(tokens_file)["tokens"][2]["token"] == response.json()["token"] assert read_json(tokens_file)["tokens"][2]["name"] == "new_device" @@ -165,19 +165,19 @@ def test_authorize_new_device_with_invalid_token(client, tokens_file): def test_get_and_authorize_used_token(client, authorized_client, tokens_file): response = authorized_client.post("/auth/new_device") assert response.status_code == 200 - assert "token" in response.json - token = Mnemonic(language="english").to_entropy(response.json["token"]).hex() + assert "token" in response.json() + token = Mnemonic(language="english").to_entropy(response.json()["token"]).hex() assert read_json(tokens_file)["new_device"]["token"] == token response = client.post( "/auth/new_device/authorize", - json={"token": response.json["token"], "device": "new_device"}, + json={"token": response.json()["token"], "device": "new_device"}, ) assert response.status_code == 200 - assert read_json(tokens_file)["tokens"][2]["token"] == response.json["token"] + assert read_json(tokens_file)["tokens"][2]["token"] == response.json()["token"] assert read_json(tokens_file)["tokens"][2]["name"] == "new_device" response = client.post( "/auth/new_device/authorize", - json={"token": response.json["token"], "device": "new_device"}, + json={"token": response.json()["token"], "device": "new_device"}, ) assert response.status_code == 404 @@ -187,8 +187,8 @@ def test_get_and_authorize_token_after_12_minutes( ): response = authorized_client.post("/auth/new_device") assert response.status_code == 200 - assert "token" in response.json - token = Mnemonic(language="english").to_entropy(response.json["token"]).hex() + assert "token" in response.json() + token = Mnemonic(language="english").to_entropy(response.json()["token"]).hex() assert read_json(tokens_file)["new_device"]["token"] == token file_data = read_json(tokens_file) @@ -199,7 +199,7 @@ def test_get_and_authorize_token_after_12_minutes( response = client.post( "/auth/new_device/authorize", - json={"token": response.json["token"], "device": "new_device"}, + json={"token": response.json()["token"], "device": "new_device"}, ) assert response.status_code == 404 @@ -209,7 +209,7 @@ def test_authorize_without_token(client, tokens_file): "/auth/new_device/authorize", json={"device": "new_device"}, ) - assert response.status_code == 400 + assert response.status_code == 422 assert read_json(tokens_file) == TOKENS_FILE_CONTETS @@ -245,7 +245,7 @@ def test_get_recovery_token_status_unauthorized(client, tokens_file): def test_get_recovery_token_when_none_exists(authorized_client, tokens_file): response = authorized_client.get("/auth/recovery_token") assert response.status_code == 200 - assert response.json == { + assert response.json() == { "exists": False, "valid": False, "date": None, @@ -259,8 +259,8 @@ def test_generate_recovery_token(authorized_client, client, tokens_file): # Generate token without expiration and uses_left response = authorized_client.post("/auth/recovery_token") assert response.status_code == 200 - assert "token" in response.json - mnemonic_token = response.json["token"] + assert "token" in response.json() + mnemonic_token = response.json()["token"] token = Mnemonic(language="english").to_entropy(mnemonic_token).hex() assert read_json(tokens_file)["recovery_token"]["token"] == token @@ -274,9 +274,9 @@ def test_generate_recovery_token(authorized_client, client, tokens_file): ) # Try to get token status - response = client.get("/auth/recovery_token") + response = authorized_client.get("/auth/recovery_token") assert response.status_code == 200 - assert response.json == { + assert response.json() == { "exists": True, "valid": True, "date": time_generated, @@ -290,7 +290,7 @@ def test_generate_recovery_token(authorized_client, client, tokens_file): json={"token": mnemonic_token, "device": "recovery_device"}, ) assert recovery_response.status_code == 200 - new_token = recovery_response.json["token"] + new_token = recovery_response.json()["token"] assert read_json(tokens_file)["tokens"][2]["token"] == new_token assert read_json(tokens_file)["tokens"][2]["name"] == "recovery_device" @@ -300,7 +300,7 @@ def test_generate_recovery_token(authorized_client, client, tokens_file): json={"token": mnemonic_token, "device": "recovery_device2"}, ) assert recovery_response.status_code == 200 - new_token = recovery_response.json["token"] + new_token = recovery_response.json()["token"] assert read_json(tokens_file)["tokens"][3]["token"] == new_token assert read_json(tokens_file)["tokens"][3]["name"] == "recovery_device2" @@ -318,8 +318,8 @@ def test_generate_recovery_token_with_expiration_date( json={"expiration": expiration_date_str}, ) assert response.status_code == 200 - assert "token" in response.json - mnemonic_token = response.json["token"] + assert "token" in response.json() + mnemonic_token = response.json()["token"] token = Mnemonic(language="english").to_entropy(mnemonic_token).hex() assert read_json(tokens_file)["recovery_token"]["token"] == token assert datetime.datetime.strptime( @@ -336,9 +336,9 @@ def test_generate_recovery_token_with_expiration_date( ) # Try to get token status - response = client.get("/auth/recovery_token") + response = authorized_client.get("/auth/recovery_token") assert response.status_code == 200 - assert response.json == { + assert response.json() == { "exists": True, "valid": True, "date": time_generated, @@ -352,7 +352,7 @@ def test_generate_recovery_token_with_expiration_date( json={"token": mnemonic_token, "device": "recovery_device"}, ) assert recovery_response.status_code == 200 - new_token = recovery_response.json["token"] + new_token = recovery_response.json()["token"] assert read_json(tokens_file)["tokens"][2]["token"] == new_token assert read_json(tokens_file)["tokens"][2]["name"] == "recovery_device" @@ -362,7 +362,7 @@ def test_generate_recovery_token_with_expiration_date( json={"token": mnemonic_token, "device": "recovery_device2"}, ) assert recovery_response.status_code == 200 - new_token = recovery_response.json["token"] + new_token = recovery_response.json()["token"] assert read_json(tokens_file)["tokens"][3]["token"] == new_token assert read_json(tokens_file)["tokens"][3]["name"] == "recovery_device2" @@ -381,9 +381,9 @@ def test_generate_recovery_token_with_expiration_date( assert read_json(tokens_file)["tokens"] == new_data["tokens"] # Get the status of the token - response = client.get("/auth/recovery_token") + response = authorized_client.get("/auth/recovery_token") assert response.status_code == 200 - assert response.json == { + assert response.json() == { "exists": True, "valid": False, "date": time_generated, @@ -397,7 +397,7 @@ def test_generate_recovery_token_with_expiration_in_the_past( authorized_client, tokens_file, timeformat ): # Server must return 400 if expiration date is in the past - expiration_date = datetime.datetime.now() - datetime.timedelta(minutes=5) + expiration_date = datetime.datetime.utcnow() - datetime.timedelta(minutes=5) expiration_date_str = expiration_date.strftime(timeformat) response = authorized_client.post( "/auth/recovery_token", @@ -416,7 +416,7 @@ def test_generate_recovery_token_with_invalid_time_format( "/auth/recovery_token", json={"expiration": expiration_date}, ) - assert response.status_code == 400 + assert response.status_code == 422 assert "recovery_token" not in read_json(tokens_file) @@ -429,8 +429,8 @@ def test_generate_recovery_token_with_limited_uses( json={"uses": 2}, ) assert response.status_code == 200 - assert "token" in response.json - mnemonic_token = response.json["token"] + assert "token" in response.json() + mnemonic_token = response.json()["token"] token = Mnemonic(language="english").to_entropy(mnemonic_token).hex() assert read_json(tokens_file)["recovery_token"]["token"] == token assert read_json(tokens_file)["recovery_token"]["uses_left"] == 2 @@ -445,9 +445,9 @@ def test_generate_recovery_token_with_limited_uses( ) # Try to get token status - response = client.get("/auth/recovery_token") + response = authorized_client.get("/auth/recovery_token") assert response.status_code == 200 - assert response.json == { + assert response.json() == { "exists": True, "valid": True, "date": time_generated, @@ -461,16 +461,16 @@ def test_generate_recovery_token_with_limited_uses( json={"token": mnemonic_token, "device": "recovery_device"}, ) assert recovery_response.status_code == 200 - new_token = recovery_response.json["token"] + new_token = recovery_response.json()["token"] assert read_json(tokens_file)["tokens"][2]["token"] == new_token assert read_json(tokens_file)["tokens"][2]["name"] == "recovery_device" assert read_json(tokens_file)["recovery_token"]["uses_left"] == 1 # Get the status of the token - response = client.get("/auth/recovery_token") + response = authorized_client.get("/auth/recovery_token") assert response.status_code == 200 - assert response.json == { + assert response.json() == { "exists": True, "valid": True, "date": time_generated, @@ -484,14 +484,14 @@ def test_generate_recovery_token_with_limited_uses( json={"token": mnemonic_token, "device": "recovery_device2"}, ) assert recovery_response.status_code == 200 - new_token = recovery_response.json["token"] + new_token = recovery_response.json()["token"] assert read_json(tokens_file)["tokens"][3]["token"] == new_token assert read_json(tokens_file)["tokens"][3]["name"] == "recovery_device2" # Get the status of the token - response = client.get("/auth/recovery_token") + response = authorized_client.get("/auth/recovery_token") assert response.status_code == 200 - assert response.json == { + assert response.json() == { "exists": True, "valid": False, "date": time_generated, diff --git a/tests/test_common.py b/tests/test_common.py index db60d84..e581bd4 100644 --- a/tests/test_common.py +++ b/tests/test_common.py @@ -9,19 +9,13 @@ from selfprivacy_api.utils import WriteUserData, ReadUserData def test_get_api_version(authorized_client): response = authorized_client.get("/api/version") assert response.status_code == 200 - assert "version" in response.get_json() + assert "version" in response.json() def test_get_api_version_unauthorized(client): response = client.get("/api/version") assert response.status_code == 200 - assert "version" in response.get_json() - - -def test_get_swagger_json(authorized_client): - response = authorized_client.get("/api/swagger.json") - assert response.status_code == 200 - assert "swagger" in response.get_json() + assert "version" in response.json() def test_read_invalid_user_data(): diff --git a/tests/test_graphql/_test_system.py b/tests/test_graphql/_test_system.py index 476846a..738f1f4 100644 --- a/tests/test_graphql/_test_system.py +++ b/tests/test_graphql/_test_system.py @@ -144,7 +144,7 @@ def test_graphql_get_python_version_wrong_auth( }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None def test_graphql_get_python_version(authorized_client, mock_subprocess_check_output): @@ -156,8 +156,8 @@ def test_graphql_get_python_version(authorized_client, mock_subprocess_check_out }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["system"]["info"]["pythonVersion"] == "Testing Linux" + assert response.json().get("data") is not None + assert response.json()["data"]["system"]["info"]["pythonVersion"] == "Testing Linux" assert mock_subprocess_check_output.call_count == 1 assert mock_subprocess_check_output.call_args[0][0] == ["python", "-V"] @@ -181,7 +181,7 @@ def test_graphql_get_system_version_unauthorized( ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None assert mock_subprocess_check_output.call_count == 0 @@ -196,9 +196,9 @@ def test_graphql_get_system_version(authorized_client, mock_subprocess_check_out ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["system"]["info"]["systemVersion"] == "Testing Linux" + assert response.json()["data"]["system"]["info"]["systemVersion"] == "Testing Linux" assert mock_subprocess_check_output.call_count == 1 assert mock_subprocess_check_output.call_args[0][0] == ["uname", "-a"] @@ -258,11 +258,13 @@ def test_graphql_get_domain( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["system"]["domainInfo"]["domain"] == "test.tld" - assert response.json["data"]["system"]["domainInfo"]["hostname"] == "test-instance" - assert response.json["data"]["system"]["domainInfo"]["provider"] == "HETZNER" - dns_records = response.json["data"]["system"]["domainInfo"]["requiredDnsRecords"] + assert response.json().get("data") is not None + assert response.json()["data"]["system"]["domainInfo"]["domain"] == "test.tld" + assert ( + response.json()["data"]["system"]["domainInfo"]["hostname"] == "test-instance" + ) + assert response.json()["data"]["system"]["domainInfo"]["provider"] == "HETZNER" + dns_records = response.json()["data"]["system"]["domainInfo"]["requiredDnsRecords"] assert is_dns_record_in_array(dns_records, dns_record()) assert is_dns_record_in_array(dns_records, dns_record(type="AAAA")) assert is_dns_record_in_array(dns_records, dns_record(name="api.test.tld")) @@ -339,7 +341,7 @@ def test_graphql_get_timezone_unauthorized(client, turned_on): }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None def test_graphql_get_timezone(authorized_client, turned_on): @@ -351,8 +353,8 @@ def test_graphql_get_timezone(authorized_client, turned_on): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["system"]["settings"]["timezone"] == "Europe/Moscow" + assert response.json().get("data") is not None + assert response.json()["data"]["system"]["settings"]["timezone"] == "Europe/Moscow" def test_graphql_get_timezone_on_undefined(authorized_client, undefined_config): @@ -364,8 +366,10 @@ def test_graphql_get_timezone_on_undefined(authorized_client, undefined_config): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["system"]["settings"]["timezone"] == "Europe/Uzhgorod" + assert response.json().get("data") is not None + assert ( + response.json()["data"]["system"]["settings"]["timezone"] == "Europe/Uzhgorod" + ) API_CHANGE_TIMEZONE_MUTATION = """ @@ -392,7 +396,7 @@ def test_graphql_change_timezone_unauthorized(client, turned_on): }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None def test_graphql_change_timezone(authorized_client, turned_on): @@ -407,11 +411,11 @@ def test_graphql_change_timezone(authorized_client, turned_on): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["changeTimezone"]["success"] is True - assert response.json["data"]["changeTimezone"]["message"] is not None - assert response.json["data"]["changeTimezone"]["code"] == 200 - assert response.json["data"]["changeTimezone"]["timezone"] == "Europe/Helsinki" + assert response.json().get("data") is not None + assert response.json()["data"]["changeTimezone"]["success"] is True + assert response.json()["data"]["changeTimezone"]["message"] is not None + assert response.json()["data"]["changeTimezone"]["code"] == 200 + assert response.json()["data"]["changeTimezone"]["timezone"] == "Europe/Helsinki" assert read_json(turned_on / "turned_on.json")["timezone"] == "Europe/Helsinki" @@ -427,11 +431,11 @@ def test_graphql_change_timezone_on_undefined(authorized_client, undefined_confi }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["changeTimezone"]["success"] is True - assert response.json["data"]["changeTimezone"]["message"] is not None - assert response.json["data"]["changeTimezone"]["code"] == 200 - assert response.json["data"]["changeTimezone"]["timezone"] == "Europe/Helsinki" + assert response.json().get("data") is not None + assert response.json()["data"]["changeTimezone"]["success"] is True + assert response.json()["data"]["changeTimezone"]["message"] is not None + assert response.json()["data"]["changeTimezone"]["code"] == 200 + assert response.json()["data"]["changeTimezone"]["timezone"] == "Europe/Helsinki" assert ( read_json(undefined_config / "undefined.json")["timezone"] == "Europe/Helsinki" ) @@ -449,11 +453,11 @@ def test_graphql_change_timezone_without_timezone(authorized_client, turned_on): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["changeTimezone"]["success"] is False - assert response.json["data"]["changeTimezone"]["message"] is not None - assert response.json["data"]["changeTimezone"]["code"] == 400 - assert response.json["data"]["changeTimezone"]["timezone"] is None + assert response.json().get("data") is not None + assert response.json()["data"]["changeTimezone"]["success"] is False + assert response.json()["data"]["changeTimezone"]["message"] is not None + assert response.json()["data"]["changeTimezone"]["code"] == 400 + assert response.json()["data"]["changeTimezone"]["timezone"] is None assert read_json(turned_on / "turned_on.json")["timezone"] == "Europe/Moscow" @@ -469,11 +473,11 @@ def test_graphql_change_timezone_with_invalid_timezone(authorized_client, turned }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["changeTimezone"]["success"] is False - assert response.json["data"]["changeTimezone"]["message"] is not None - assert response.json["data"]["changeTimezone"]["code"] == 400 - assert response.json["data"]["changeTimezone"]["timezone"] is None + assert response.json().get("data") is not None + assert response.json()["data"]["changeTimezone"]["success"] is False + assert response.json()["data"]["changeTimezone"]["message"] is not None + assert response.json()["data"]["changeTimezone"]["code"] == 400 + assert response.json()["data"]["changeTimezone"]["timezone"] is None assert read_json(turned_on / "turned_on.json")["timezone"] == "Europe/Moscow" @@ -496,7 +500,7 @@ def test_graphql_get_auto_upgrade_unauthorized(client, turned_on): }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None def test_graphql_get_auto_upgrade(authorized_client, turned_on): @@ -508,9 +512,11 @@ def test_graphql_get_auto_upgrade(authorized_client, turned_on): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["settings"]["autoUpgrade"]["enableAutoUpgrade"] is True - assert response.json["data"]["settings"]["autoUpgrade"]["allowReboot"] is True + assert response.json().get("data") is not None + assert ( + response.json()["data"]["settings"]["autoUpgrade"]["enableAutoUpgrade"] is True + ) + assert response.json()["data"]["settings"]["autoUpgrade"]["allowReboot"] is True def test_graphql_get_auto_upgrade_on_undefined(authorized_client, undefined_config): @@ -522,9 +528,11 @@ def test_graphql_get_auto_upgrade_on_undefined(authorized_client, undefined_conf }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["settings"]["autoUpgrade"]["enableAutoUpgrade"] is True - assert response.json["data"]["settings"]["autoUpgrade"]["allowReboot"] is False + assert response.json().get("data") is not None + assert ( + response.json()["data"]["settings"]["autoUpgrade"]["enableAutoUpgrade"] is True + ) + assert response.json()["data"]["settings"]["autoUpgrade"]["allowReboot"] is False def test_graphql_get_auto_upgrade_without_vlaues(authorized_client, no_values): @@ -536,9 +544,11 @@ def test_graphql_get_auto_upgrade_without_vlaues(authorized_client, no_values): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["settings"]["autoUpgrade"]["enableAutoUpgrade"] is True - assert response.json["data"]["settings"]["autoUpgrade"]["allowReboot"] is False + assert response.json().get("data") is not None + assert ( + response.json()["data"]["settings"]["autoUpgrade"]["enableAutoUpgrade"] is True + ) + assert response.json()["data"]["settings"]["autoUpgrade"]["allowReboot"] is False def test_graphql_get_auto_upgrade_turned_off(authorized_client, turned_off): @@ -550,11 +560,11 @@ def test_graphql_get_auto_upgrade_turned_off(authorized_client, turned_off): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None assert ( - response.json["data"]["settings"]["autoUpgrade"]["enableAutoUpgrade"] is False + response.json()["data"]["settings"]["autoUpgrade"]["enableAutoUpgrade"] is False ) - assert response.json["data"]["settings"]["autoUpgrade"]["allowReboot"] is False + assert response.json()["data"]["settings"]["autoUpgrade"]["allowReboot"] is False API_CHANGE_AUTO_UPGRADE_SETTINGS = """ @@ -585,7 +595,7 @@ def test_graphql_change_auto_upgrade_unauthorized(client, turned_on): }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None def test_graphql_change_auto_upgrade(authorized_client, turned_on): @@ -603,14 +613,15 @@ def test_graphql_change_auto_upgrade(authorized_client, turned_on): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["changeAutoUpgradeSettings"]["success"] is True - assert response.json["data"]["changeAutoUpgradeSettings"]["message"] is not None - assert response.json["data"]["changeAutoUpgradeSettings"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["success"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["message"] is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["code"] == 200 assert ( - response.json["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] is False + response.json()["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] + is False ) - assert response.json["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True assert read_json(turned_on / "turned_on.json")["autoUpgrade"]["enable"] is False assert read_json(turned_on / "turned_on.json")["autoUpgrade"]["allowReboot"] is True @@ -630,14 +641,15 @@ def test_graphql_change_auto_upgrade_on_undefined(authorized_client, undefined_c }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["changeAutoUpgradeSettings"]["success"] is True - assert response.json["data"]["changeAutoUpgradeSettings"]["message"] is not None - assert response.json["data"]["changeAutoUpgradeSettings"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["success"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["message"] is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["code"] == 200 assert ( - response.json["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] is False + response.json()["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] + is False ) - assert response.json["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True assert ( read_json(undefined_config / "undefined.json")["autoUpgrade"]["enable"] is False ) @@ -662,14 +674,15 @@ def test_graphql_change_auto_upgrade_without_vlaues(authorized_client, no_values }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["changeAutoUpgradeSettings"]["success"] is True - assert response.json["data"]["changeAutoUpgradeSettings"]["message"] is not None - assert response.json["data"]["changeAutoUpgradeSettings"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["success"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["message"] is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["code"] == 200 assert ( - response.json["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] is True + response.json()["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] + is True ) - assert response.json["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True assert read_json(no_values / "no_values.json")["autoUpgrade"]["enable"] is True assert read_json(no_values / "no_values.json")["autoUpgrade"]["allowReboot"] is True @@ -689,14 +702,15 @@ def test_graphql_change_auto_upgrade_turned_off(authorized_client, turned_off): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["changeAutoUpgradeSettings"]["success"] is True - assert response.json["data"]["changeAutoUpgradeSettings"]["message"] is not None - assert response.json["data"]["changeAutoUpgradeSettings"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["success"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["message"] is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["code"] == 200 assert ( - response.json["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] is True + response.json()["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] + is True ) - assert response.json["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True assert read_json(turned_off / "turned_off.json")["autoUpgrade"]["enable"] is True assert ( read_json(turned_off / "turned_off.json")["autoUpgrade"]["allowReboot"] is True @@ -717,14 +731,15 @@ def test_grphql_change_auto_upgrade_without_enable(authorized_client, turned_off }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["changeAutoUpgradeSettings"]["success"] is True - assert response.json["data"]["changeAutoUpgradeSettings"]["message"] is not None - assert response.json["data"]["changeAutoUpgradeSettings"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["success"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["message"] is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["code"] == 200 assert ( - response.json["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] is False + response.json()["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] + is False ) - assert response.json["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True assert read_json(turned_off / "turned_off.json")["autoUpgrade"]["enable"] is False assert ( read_json(turned_off / "turned_off.json")["autoUpgrade"]["allowReboot"] is True @@ -747,14 +762,15 @@ def test_graphql_change_auto_upgrade_without_allow_reboot( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["changeAutoUpgradeSettings"]["success"] is True - assert response.json["data"]["changeAutoUpgradeSettings"]["message"] is not None - assert response.json["data"]["changeAutoUpgradeSettings"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["success"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["message"] is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["code"] == 200 assert ( - response.json["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] is True + response.json()["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] + is True ) - assert response.json["data"]["changeAutoUpgradeSettings"]["allowReboot"] is False + assert response.json()["data"]["changeAutoUpgradeSettings"]["allowReboot"] is False assert read_json(turned_off / "turned_off.json")["autoUpgrade"]["enable"] is True assert ( read_json(turned_off / "turned_off.json")["autoUpgrade"]["allowReboot"] is False @@ -773,14 +789,15 @@ def test_graphql_change_auto_upgrade_with_empty_input(authorized_client, turned_ }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["changeAutoUpgradeSettings"]["success"] is True - assert response.json["data"]["changeAutoUpgradeSettings"]["message"] is not None - assert response.json["data"]["changeAutoUpgradeSettings"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["success"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["message"] is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["code"] == 200 assert ( - response.json["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] is False + response.json()["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] + is False ) - assert response.json["data"]["changeAutoUpgradeSettings"]["allowReboot"] is False + assert response.json()["data"]["changeAutoUpgradeSettings"]["allowReboot"] is False assert read_json(turned_off / "turned_off.json")["autoUpgrade"]["enable"] is False assert ( read_json(turned_off / "turned_off.json")["autoUpgrade"]["allowReboot"] is False @@ -807,7 +824,7 @@ def test_graphql_pull_system_configuration_unauthorized(client, mock_subprocess_ ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None assert mock_subprocess_popen.call_count == 0 @@ -823,10 +840,10 @@ def test_graphql_pull_system_configuration( ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["pullRepositoryChanges"]["success"] is True - assert response.json["data"]["pullRepositoryChanges"]["message"] is not None - assert response.json["data"]["pullRepositoryChanges"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["pullRepositoryChanges"]["success"] is True + assert response.json()["data"]["pullRepositoryChanges"]["message"] is not None + assert response.json()["data"]["pullRepositoryChanges"]["code"] == 200 assert mock_subprocess_popen.call_count == 1 assert mock_subprocess_popen.call_args[0][0] == ["git", "pull"] @@ -848,10 +865,10 @@ def test_graphql_pull_system_broken_repo( ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["pullRepositoryChanges"]["success"] is False - assert response.json["data"]["pullRepositoryChanges"]["message"] is not None - assert response.json["data"]["pullRepositoryChanges"]["code"] == 500 + assert response.json().get("data") is not None + assert response.json()["data"]["pullRepositoryChanges"]["success"] is False + assert response.json()["data"]["pullRepositoryChanges"]["message"] is not None + assert response.json()["data"]["pullRepositoryChanges"]["code"] == 500 assert mock_broken_service.call_count == 1 assert mock_os_chdir.call_count == 2 diff --git a/tests/test_graphql/test_api.py b/tests/test_graphql/test_api.py index 6343d8f..16c7c4d 100644 --- a/tests/test_graphql/test_api.py +++ b/tests/test_graphql/test_api.py @@ -24,7 +24,7 @@ TOKENS_FILE_CONTETS = { def test_graphql_get_entire_api_data(authorized_client, tokens_file): - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ "query": generate_api_query( @@ -33,25 +33,25 @@ def test_graphql_get_entire_api_data(authorized_client, tokens_file): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert "version" in response.get_json()["data"]["api"] - assert response.json["data"]["api"]["devices"] is not None - assert len(response.json["data"]["api"]["devices"]) == 2 + assert response.json().get("data") is not None + assert "version" in response.json()["data"]["api"] + assert response.json()["data"]["api"]["devices"] is not None + assert len(response.json()["data"]["api"]["devices"]) == 2 assert ( - response.json["data"]["api"]["devices"][0]["creationDate"] + response.json()["data"]["api"]["devices"][0]["creationDate"] == "2022-01-14T08:31:10.789314" ) - assert response.json["data"]["api"]["devices"][0]["isCaller"] is True - assert response.json["data"]["api"]["devices"][0]["name"] == "test_token" + assert response.json()["data"]["api"]["devices"][0]["isCaller"] is True + assert response.json()["data"]["api"]["devices"][0]["name"] == "test_token" assert ( - response.json["data"]["api"]["devices"][1]["creationDate"] + response.json()["data"]["api"]["devices"][1]["creationDate"] == "2022-01-14T08:31:10.789314" ) - assert response.json["data"]["api"]["devices"][1]["isCaller"] is False - assert response.json["data"]["api"]["devices"][1]["name"] == "test_token2" - assert response.json["data"]["api"]["recoveryKey"] is not None - assert response.json["data"]["api"]["recoveryKey"]["exists"] is False - assert response.json["data"]["api"]["recoveryKey"]["valid"] is False - assert response.json["data"]["api"]["recoveryKey"]["creationDate"] is None - assert response.json["data"]["api"]["recoveryKey"]["expirationDate"] is None - assert response.json["data"]["api"]["recoveryKey"]["usesLeft"] is None + assert response.json()["data"]["api"]["devices"][1]["isCaller"] is False + assert response.json()["data"]["api"]["devices"][1]["name"] == "test_token2" + assert response.json()["data"]["api"]["recoveryKey"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["exists"] is False + assert response.json()["data"]["api"]["recoveryKey"]["valid"] is False + assert response.json()["data"]["api"]["recoveryKey"]["creationDate"] is None + assert response.json()["data"]["api"]["recoveryKey"]["expirationDate"] is None + assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] is None diff --git a/tests/test_graphql/test_api_devices.py b/tests/test_graphql/test_api_devices.py index 627d06a..d8dc974 100644 --- a/tests/test_graphql/test_api_devices.py +++ b/tests/test_graphql/test_api_devices.py @@ -31,35 +31,35 @@ devices { def test_graphql_tokens_info(authorized_client, tokens_file): - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={"query": generate_api_query([API_DEVICES_QUERY])}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["api"]["devices"] is not None - assert len(response.json["data"]["api"]["devices"]) == 2 + assert response.json().get("data") is not None + assert response.json()["data"]["api"]["devices"] is not None + assert len(response.json()["data"]["api"]["devices"]) == 2 assert ( - response.json["data"]["api"]["devices"][0]["creationDate"] + response.json()["data"]["api"]["devices"][0]["creationDate"] == "2022-01-14T08:31:10.789314" ) - assert response.json["data"]["api"]["devices"][0]["isCaller"] is True - assert response.json["data"]["api"]["devices"][0]["name"] == "test_token" + assert response.json()["data"]["api"]["devices"][0]["isCaller"] is True + assert response.json()["data"]["api"]["devices"][0]["name"] == "test_token" assert ( - response.json["data"]["api"]["devices"][1]["creationDate"] + response.json()["data"]["api"]["devices"][1]["creationDate"] == "2022-01-14T08:31:10.789314" ) - assert response.json["data"]["api"]["devices"][1]["isCaller"] is False - assert response.json["data"]["api"]["devices"][1]["name"] == "test_token2" + assert response.json()["data"]["api"]["devices"][1]["isCaller"] is False + assert response.json()["data"]["api"]["devices"][1]["name"] == "test_token2" def test_graphql_tokens_info_unauthorized(client, tokens_file): - response = client.get( + response = client.post( "/graphql", json={"query": generate_api_query([API_DEVICES_QUERY])}, ) assert response.status_code == 200 - assert response.json["data"] is None + assert response.json()["data"] is None DELETE_TOKEN_MUTATION = """ @@ -84,7 +84,7 @@ def test_graphql_delete_token_unauthorized(client, tokens_file): }, ) assert response.status_code == 200 - assert response.json["data"] is None + assert response.json()["data"] is None def test_graphql_delete_token(authorized_client, tokens_file): @@ -98,10 +98,10 @@ def test_graphql_delete_token(authorized_client, tokens_file): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["deleteDeviceApiToken"]["success"] is True - assert response.json["data"]["deleteDeviceApiToken"]["message"] is not None - assert response.json["data"]["deleteDeviceApiToken"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["deleteDeviceApiToken"]["success"] is True + assert response.json()["data"]["deleteDeviceApiToken"]["message"] is not None + assert response.json()["data"]["deleteDeviceApiToken"]["code"] == 200 assert read_json(tokens_file) == { "tokens": [ { @@ -124,10 +124,10 @@ def test_graphql_delete_self_token(authorized_client, tokens_file): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["deleteDeviceApiToken"]["success"] is False - assert response.json["data"]["deleteDeviceApiToken"]["message"] is not None - assert response.json["data"]["deleteDeviceApiToken"]["code"] == 400 + assert response.json().get("data") is not None + assert response.json()["data"]["deleteDeviceApiToken"]["success"] is False + assert response.json()["data"]["deleteDeviceApiToken"]["message"] is not None + assert response.json()["data"]["deleteDeviceApiToken"]["code"] == 400 assert read_json(tokens_file) == TOKENS_FILE_CONTETS @@ -142,10 +142,10 @@ def test_graphql_delete_nonexistent_token(authorized_client, tokens_file): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["deleteDeviceApiToken"]["success"] is False - assert response.json["data"]["deleteDeviceApiToken"]["message"] is not None - assert response.json["data"]["deleteDeviceApiToken"]["code"] == 404 + assert response.json().get("data") is not None + assert response.json()["data"]["deleteDeviceApiToken"]["success"] is False + assert response.json()["data"]["deleteDeviceApiToken"]["message"] is not None + assert response.json()["data"]["deleteDeviceApiToken"]["code"] == 404 assert read_json(tokens_file) == TOKENS_FILE_CONTETS @@ -167,7 +167,7 @@ def test_graphql_refresh_token_unauthorized(client, tokens_file): json={"query": REFRESH_TOKEN_MUTATION}, ) assert response.status_code == 200 - assert response.json["data"] is None + assert response.json()["data"] is None def test_graphql_refresh_token(authorized_client, tokens_file): @@ -176,12 +176,12 @@ def test_graphql_refresh_token(authorized_client, tokens_file): json={"query": REFRESH_TOKEN_MUTATION}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["refreshDeviceApiToken"]["success"] is True - assert response.json["data"]["refreshDeviceApiToken"]["message"] is not None - assert response.json["data"]["refreshDeviceApiToken"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["refreshDeviceApiToken"]["success"] is True + assert response.json()["data"]["refreshDeviceApiToken"]["message"] is not None + assert response.json()["data"]["refreshDeviceApiToken"]["code"] == 200 assert read_json(tokens_file)["tokens"][0] == { - "token": response.json["data"]["refreshDeviceApiToken"]["token"], + "token": response.json()["data"]["refreshDeviceApiToken"]["token"], "name": "test_token", "date": "2022-01-14 08:31:10.789314", } @@ -205,7 +205,7 @@ def test_graphql_get_new_device_auth_key_unauthorized(client, tokens_file): json={"query": NEW_DEVICE_KEY_MUTATION}, ) assert response.status_code == 200 - assert response.json["data"] is None + assert response.json()["data"] is None def test_graphql_get_new_device_auth_key(authorized_client, tokens_file): @@ -214,14 +214,16 @@ def test_graphql_get_new_device_auth_key(authorized_client, tokens_file): json={"query": NEW_DEVICE_KEY_MUTATION}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["getNewDeviceApiKey"]["success"] is True - assert response.json["data"]["getNewDeviceApiKey"]["message"] is not None - assert response.json["data"]["getNewDeviceApiKey"]["code"] == 200 - assert response.json["data"]["getNewDeviceApiKey"]["key"].split(" ").__len__() == 12 + assert response.json().get("data") is not None + assert response.json()["data"]["getNewDeviceApiKey"]["success"] is True + assert response.json()["data"]["getNewDeviceApiKey"]["message"] is not None + assert response.json()["data"]["getNewDeviceApiKey"]["code"] == 200 + assert ( + response.json()["data"]["getNewDeviceApiKey"]["key"].split(" ").__len__() == 12 + ) token = ( Mnemonic(language="english") - .to_entropy(response.json["data"]["getNewDeviceApiKey"]["key"]) + .to_entropy(response.json()["data"]["getNewDeviceApiKey"]["key"]) .hex() ) assert read_json(tokens_file)["new_device"]["token"] == token @@ -249,7 +251,7 @@ def test_graphql_invalidate_new_device_token_unauthorized(client, tokens_file): }, ) assert response.status_code == 200 - assert response.json["data"] is None + assert response.json()["data"] is None def test_graphql_get_and_delete_new_device_key(authorized_client, tokens_file): @@ -258,14 +260,16 @@ def test_graphql_get_and_delete_new_device_key(authorized_client, tokens_file): json={"query": NEW_DEVICE_KEY_MUTATION}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["getNewDeviceApiKey"]["success"] is True - assert response.json["data"]["getNewDeviceApiKey"]["message"] is not None - assert response.json["data"]["getNewDeviceApiKey"]["code"] == 200 - assert response.json["data"]["getNewDeviceApiKey"]["key"].split(" ").__len__() == 12 + assert response.json().get("data") is not None + assert response.json()["data"]["getNewDeviceApiKey"]["success"] is True + assert response.json()["data"]["getNewDeviceApiKey"]["message"] is not None + assert response.json()["data"]["getNewDeviceApiKey"]["code"] == 200 + assert ( + response.json()["data"]["getNewDeviceApiKey"]["key"].split(" ").__len__() == 12 + ) token = ( Mnemonic(language="english") - .to_entropy(response.json["data"]["getNewDeviceApiKey"]["key"]) + .to_entropy(response.json()["data"]["getNewDeviceApiKey"]["key"]) .hex() ) assert read_json(tokens_file)["new_device"]["token"] == token @@ -274,10 +278,10 @@ def test_graphql_get_and_delete_new_device_key(authorized_client, tokens_file): json={"query": INVALIDATE_NEW_DEVICE_KEY_MUTATION}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["invalidateNewDeviceApiKey"]["success"] is True - assert response.json["data"]["invalidateNewDeviceApiKey"]["message"] is not None - assert response.json["data"]["invalidateNewDeviceApiKey"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["invalidateNewDeviceApiKey"]["success"] is True + assert response.json()["data"]["invalidateNewDeviceApiKey"]["message"] is not None + assert response.json()["data"]["invalidateNewDeviceApiKey"]["code"] == 200 assert read_json(tokens_file) == TOKENS_FILE_CONTETS @@ -299,11 +303,11 @@ def test_graphql_get_and_authorize_new_device(client, authorized_client, tokens_ json={"query": NEW_DEVICE_KEY_MUTATION}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["getNewDeviceApiKey"]["success"] is True - assert response.json["data"]["getNewDeviceApiKey"]["message"] is not None - assert response.json["data"]["getNewDeviceApiKey"]["code"] == 200 - mnemonic_key = response.json["data"]["getNewDeviceApiKey"]["key"] + assert response.json().get("data") is not None + assert response.json()["data"]["getNewDeviceApiKey"]["success"] is True + assert response.json()["data"]["getNewDeviceApiKey"]["message"] is not None + assert response.json()["data"]["getNewDeviceApiKey"]["code"] == 200 + mnemonic_key = response.json()["data"]["getNewDeviceApiKey"]["key"] assert mnemonic_key.split(" ").__len__() == 12 key = Mnemonic(language="english").to_entropy(mnemonic_key).hex() assert read_json(tokens_file)["new_device"]["token"] == key @@ -320,11 +324,13 @@ def test_graphql_get_and_authorize_new_device(client, authorized_client, tokens_ }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["success"] is True - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["code"] == 200 - token = response.json["data"]["authorizeWithNewDeviceApiKey"]["token"] + assert response.json().get("data") is not None + assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["success"] is True + assert ( + response.json()["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None + ) + assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["code"] == 200 + token = response.json()["data"]["authorizeWithNewDeviceApiKey"]["token"] assert read_json(tokens_file)["tokens"][2]["token"] == token assert read_json(tokens_file)["tokens"][2]["name"] == "new_device" @@ -343,10 +349,12 @@ def test_graphql_authorize_new_device_with_invalid_key(client, tokens_file): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["success"] is False - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["code"] == 404 + assert response.json().get("data") is not None + assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["success"] is False + assert ( + response.json()["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None + ) + assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["code"] == 404 assert read_json(tokens_file) == TOKENS_FILE_CONTETS @@ -356,11 +364,11 @@ def test_graphql_get_and_authorize_used_key(client, authorized_client, tokens_fi json={"query": NEW_DEVICE_KEY_MUTATION}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["getNewDeviceApiKey"]["success"] is True - assert response.json["data"]["getNewDeviceApiKey"]["message"] is not None - assert response.json["data"]["getNewDeviceApiKey"]["code"] == 200 - mnemonic_key = response.json["data"]["getNewDeviceApiKey"]["key"] + assert response.json().get("data") is not None + assert response.json()["data"]["getNewDeviceApiKey"]["success"] is True + assert response.json()["data"]["getNewDeviceApiKey"]["message"] is not None + assert response.json()["data"]["getNewDeviceApiKey"]["code"] == 200 + mnemonic_key = response.json()["data"]["getNewDeviceApiKey"]["key"] assert mnemonic_key.split(" ").__len__() == 12 key = Mnemonic(language="english").to_entropy(mnemonic_key).hex() assert read_json(tokens_file)["new_device"]["token"] == key @@ -377,13 +385,15 @@ def test_graphql_get_and_authorize_used_key(client, authorized_client, tokens_fi }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["success"] is True - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["success"] is True + assert ( + response.json()["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None + ) + assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["code"] == 200 assert ( read_json(tokens_file)["tokens"][2]["token"] - == response.json["data"]["authorizeWithNewDeviceApiKey"]["token"] + == response.json()["data"]["authorizeWithNewDeviceApiKey"]["token"] ) assert read_json(tokens_file)["tokens"][2]["name"] == "new_token" @@ -400,10 +410,12 @@ def test_graphql_get_and_authorize_used_key(client, authorized_client, tokens_fi }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["success"] is False - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["code"] == 404 + assert response.json().get("data") is not None + assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["success"] is False + assert ( + response.json()["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None + ) + assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["code"] == 404 assert read_json(tokens_file)["tokens"].__len__() == 3 @@ -415,14 +427,16 @@ def test_graphql_get_and_authorize_key_after_12_minutes( json={"query": NEW_DEVICE_KEY_MUTATION}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["getNewDeviceApiKey"]["success"] is True - assert response.json["data"]["getNewDeviceApiKey"]["message"] is not None - assert response.json["data"]["getNewDeviceApiKey"]["code"] == 200 - assert response.json["data"]["getNewDeviceApiKey"]["key"].split(" ").__len__() == 12 + assert response.json().get("data") is not None + assert response.json()["data"]["getNewDeviceApiKey"]["success"] is True + assert response.json()["data"]["getNewDeviceApiKey"]["message"] is not None + assert response.json()["data"]["getNewDeviceApiKey"]["code"] == 200 + assert ( + response.json()["data"]["getNewDeviceApiKey"]["key"].split(" ").__len__() == 12 + ) key = ( Mnemonic(language="english") - .to_entropy(response.json["data"]["getNewDeviceApiKey"]["key"]) + .to_entropy(response.json()["data"]["getNewDeviceApiKey"]["key"]) .hex() ) assert read_json(tokens_file)["new_device"]["token"] == key @@ -446,10 +460,12 @@ def test_graphql_get_and_authorize_key_after_12_minutes( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["success"] is False - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["code"] == 404 + assert response.json().get("data") is not None + assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["success"] is False + assert ( + response.json()["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None + ) + assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["code"] == 404 def test_graphql_authorize_without_token(client, tokens_file): @@ -465,4 +481,4 @@ def test_graphql_authorize_without_token(client, tokens_file): }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None diff --git a/tests/test_graphql/test_api_recovery.py b/tests/test_graphql/test_api_recovery.py index be0fdff..c5e229e 100644 --- a/tests/test_graphql/test_api_recovery.py +++ b/tests/test_graphql/test_api_recovery.py @@ -37,22 +37,22 @@ def test_graphql_recovery_key_status_unauthorized(client, tokens_file): json={"query": generate_api_query([API_RECOVERY_QUERY])}, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None def test_graphql_recovery_key_status_when_none_exists(authorized_client, tokens_file): - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={"query": generate_api_query([API_RECOVERY_QUERY])}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["api"]["recoveryKey"] is not None - assert response.json["data"]["api"]["recoveryKey"]["exists"] is False - assert response.json["data"]["api"]["recoveryKey"]["valid"] is False - assert response.json["data"]["api"]["recoveryKey"]["creationDate"] is None - assert response.json["data"]["api"]["recoveryKey"]["expirationDate"] is None - assert response.json["data"]["api"]["recoveryKey"]["usesLeft"] is None + assert response.json().get("data") is not None + assert response.json()["data"]["api"]["recoveryKey"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["exists"] is False + assert response.json()["data"]["api"]["recoveryKey"]["valid"] is False + assert response.json()["data"]["api"]["recoveryKey"]["creationDate"] is None + assert response.json()["data"]["api"]["recoveryKey"]["expirationDate"] is None + assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] is None API_RECOVERY_KEY_GENERATE_MUTATION = """ @@ -86,18 +86,19 @@ def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["getNewRecoveryApiKey"]["success"] is True - assert response.json["data"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json["data"]["getNewRecoveryApiKey"]["code"] == 200 - assert response.json["data"]["getNewRecoveryApiKey"]["key"] is not None + assert response.json().get("data") is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is True + assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is not None assert ( - response.json["data"]["getNewRecoveryApiKey"]["key"].split(" ").__len__() == 18 + response.json()["data"]["getNewRecoveryApiKey"]["key"].split(" ").__len__() + == 18 ) assert read_json(tokens_file)["recovery_token"] is not None time_generated = read_json(tokens_file)["recovery_token"]["date"] assert time_generated is not None - key = response.json["data"]["getNewRecoveryApiKey"]["key"] + key = response.json()["data"]["getNewRecoveryApiKey"]["key"] assert ( datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%f") - datetime.timedelta(seconds=5) @@ -105,20 +106,20 @@ def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): ) # Try to get token status - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={"query": generate_api_query([API_RECOVERY_QUERY])}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["api"]["recoveryKey"] is not None - assert response.json["data"]["api"]["recoveryKey"]["exists"] is True - assert response.json["data"]["api"]["recoveryKey"]["valid"] is True - assert response.json["data"]["api"]["recoveryKey"][ + assert response.json().get("data") is not None + assert response.json()["data"]["api"]["recoveryKey"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["exists"] is True + assert response.json()["data"]["api"]["recoveryKey"]["valid"] is True + assert response.json()["data"]["api"]["recoveryKey"][ "creationDate" ] == time_generated.replace("Z", "") - assert response.json["data"]["api"]["recoveryKey"]["expirationDate"] is None - assert response.json["data"]["api"]["recoveryKey"]["usesLeft"] is None + assert response.json()["data"]["api"]["recoveryKey"]["expirationDate"] is None + assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] is None # Try to use token response = client.post( @@ -134,13 +135,13 @@ def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["useRecoveryApiKey"]["success"] is True - assert response.json["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json["data"]["useRecoveryApiKey"]["code"] == 200 - assert response.json["data"]["useRecoveryApiKey"]["token"] is not None + assert response.json().get("data") is not None + assert response.json()["data"]["useRecoveryApiKey"]["success"] is True + assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None assert ( - response.json["data"]["useRecoveryApiKey"]["token"] + response.json()["data"]["useRecoveryApiKey"]["token"] == read_json(tokens_file)["tokens"][2]["token"] ) assert read_json(tokens_file)["tokens"][2]["name"] == "new_test_token" @@ -159,13 +160,13 @@ def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["useRecoveryApiKey"]["success"] is True - assert response.json["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json["data"]["useRecoveryApiKey"]["code"] == 200 - assert response.json["data"]["useRecoveryApiKey"]["token"] is not None + assert response.json().get("data") is not None + assert response.json()["data"]["useRecoveryApiKey"]["success"] is True + assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None assert ( - response.json["data"]["useRecoveryApiKey"]["token"] + response.json()["data"]["useRecoveryApiKey"]["token"] == read_json(tokens_file)["tokens"][3]["token"] ) assert read_json(tokens_file)["tokens"][3]["name"] == "new_test_token2" @@ -188,17 +189,18 @@ def test_graphql_generate_recovery_key_with_expiration_date( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["getNewRecoveryApiKey"]["success"] is True - assert response.json["data"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json["data"]["getNewRecoveryApiKey"]["code"] == 200 - assert response.json["data"]["getNewRecoveryApiKey"]["key"] is not None + assert response.json().get("data") is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is True + assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is not None assert ( - response.json["data"]["getNewRecoveryApiKey"]["key"].split(" ").__len__() == 18 + response.json()["data"]["getNewRecoveryApiKey"]["key"].split(" ").__len__() + == 18 ) assert read_json(tokens_file)["recovery_token"] is not None - key = response.json["data"]["getNewRecoveryApiKey"]["key"] + key = response.json()["data"]["getNewRecoveryApiKey"]["key"] assert read_json(tokens_file)["recovery_token"]["expiration"] == expiration_date_str assert read_json(tokens_file)["recovery_token"]["token"] == mnemonic_to_hex(key) @@ -211,23 +213,23 @@ def test_graphql_generate_recovery_key_with_expiration_date( ) # Try to get token status - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={"query": generate_api_query([API_RECOVERY_QUERY])}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["api"]["recoveryKey"] is not None - assert response.json["data"]["api"]["recoveryKey"]["exists"] is True - assert response.json["data"]["api"]["recoveryKey"]["valid"] is True - assert response.json["data"]["api"]["recoveryKey"][ + assert response.json().get("data") is not None + assert response.json()["data"]["api"]["recoveryKey"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["exists"] is True + assert response.json()["data"]["api"]["recoveryKey"]["valid"] is True + assert response.json()["data"]["api"]["recoveryKey"][ "creationDate" ] == time_generated.replace("Z", "") assert ( - response.json["data"]["api"]["recoveryKey"]["expirationDate"] + response.json()["data"]["api"]["recoveryKey"]["expirationDate"] == expiration_date_str ) - assert response.json["data"]["api"]["recoveryKey"]["usesLeft"] is None + assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] is None # Try to use token response = authorized_client.post( @@ -243,13 +245,13 @@ def test_graphql_generate_recovery_key_with_expiration_date( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["useRecoveryApiKey"]["success"] is True - assert response.json["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json["data"]["useRecoveryApiKey"]["code"] == 200 - assert response.json["data"]["useRecoveryApiKey"]["token"] is not None + assert response.json().get("data") is not None + assert response.json()["data"]["useRecoveryApiKey"]["success"] is True + assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None assert ( - response.json["data"]["useRecoveryApiKey"]["token"] + response.json()["data"]["useRecoveryApiKey"]["token"] == read_json(tokens_file)["tokens"][2]["token"] ) @@ -267,13 +269,13 @@ def test_graphql_generate_recovery_key_with_expiration_date( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["useRecoveryApiKey"]["success"] is True - assert response.json["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json["data"]["useRecoveryApiKey"]["code"] == 200 - assert response.json["data"]["useRecoveryApiKey"]["token"] is not None + assert response.json().get("data") is not None + assert response.json()["data"]["useRecoveryApiKey"]["success"] is True + assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None assert ( - response.json["data"]["useRecoveryApiKey"]["token"] + response.json()["data"]["useRecoveryApiKey"]["token"] == read_json(tokens_file)["tokens"][3]["token"] ) @@ -296,30 +298,32 @@ def test_graphql_generate_recovery_key_with_expiration_date( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["useRecoveryApiKey"]["success"] is False - assert response.json["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json["data"]["useRecoveryApiKey"]["code"] == 404 - assert response.json["data"]["useRecoveryApiKey"]["token"] is None + assert response.json().get("data") is not None + assert response.json()["data"]["useRecoveryApiKey"]["success"] is False + assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["useRecoveryApiKey"]["code"] == 404 + assert response.json()["data"]["useRecoveryApiKey"]["token"] is None assert read_json(tokens_file)["tokens"] == new_data["tokens"] # Try to get token status - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={"query": generate_api_query([API_RECOVERY_QUERY])}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["api"]["recoveryKey"] is not None - assert response.json["data"]["api"]["recoveryKey"]["exists"] is True - assert response.json["data"]["api"]["recoveryKey"]["valid"] is False - assert response.json["data"]["api"]["recoveryKey"]["creationDate"] == time_generated + assert response.json().get("data") is not None + assert response.json()["data"]["api"]["recoveryKey"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["exists"] is True + assert response.json()["data"]["api"]["recoveryKey"]["valid"] is False assert ( - response.json["data"]["api"]["recoveryKey"]["expirationDate"] + response.json()["data"]["api"]["recoveryKey"]["creationDate"] == time_generated + ) + assert ( + response.json()["data"]["api"]["recoveryKey"]["expirationDate"] == new_data["recovery_token"]["expiration"] ) - assert response.json["data"]["api"]["recoveryKey"]["usesLeft"] is None + assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] is None def test_graphql_generate_recovery_key_with_expiration_in_the_past( @@ -340,11 +344,11 @@ def test_graphql_generate_recovery_key_with_expiration_in_the_past( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["getNewRecoveryApiKey"]["success"] is False - assert response.json["data"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json["data"]["getNewRecoveryApiKey"]["code"] == 400 - assert response.json["data"]["getNewRecoveryApiKey"]["key"] is None + assert response.json().get("data") is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is False + assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 400 + assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is None assert "recovery_token" not in read_json(tokens_file) @@ -366,7 +370,7 @@ def test_graphql_generate_recovery_key_with_invalid_time_format( }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None assert "recovery_token" not in read_json(tokens_file) @@ -388,31 +392,31 @@ def test_graphql_generate_recovery_key_with_limited_uses( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["getNewRecoveryApiKey"]["success"] is True - assert response.json["data"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json["data"]["getNewRecoveryApiKey"]["code"] == 200 - assert response.json["data"]["getNewRecoveryApiKey"]["key"] is not None + assert response.json().get("data") is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is True + assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is not None - mnemonic_key = response.json["data"]["getNewRecoveryApiKey"]["key"] + mnemonic_key = response.json()["data"]["getNewRecoveryApiKey"]["key"] key = mnemonic_to_hex(mnemonic_key) assert read_json(tokens_file)["recovery_token"]["token"] == key assert read_json(tokens_file)["recovery_token"]["uses_left"] == 2 # Try to get token status - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={"query": generate_api_query([API_RECOVERY_QUERY])}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["api"]["recoveryKey"] is not None - assert response.json["data"]["api"]["recoveryKey"]["exists"] is True - assert response.json["data"]["api"]["recoveryKey"]["valid"] is True - assert response.json["data"]["api"]["recoveryKey"]["creationDate"] is not None - assert response.json["data"]["api"]["recoveryKey"]["expirationDate"] is None - assert response.json["data"]["api"]["recoveryKey"]["usesLeft"] == 2 + assert response.json().get("data") is not None + assert response.json()["data"]["api"]["recoveryKey"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["exists"] is True + assert response.json()["data"]["api"]["recoveryKey"]["valid"] is True + assert response.json()["data"]["api"]["recoveryKey"]["creationDate"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["expirationDate"] is None + assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] == 2 # Try to use token response = authorized_client.post( @@ -428,25 +432,25 @@ def test_graphql_generate_recovery_key_with_limited_uses( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["useRecoveryApiKey"]["success"] is True - assert response.json["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json["data"]["useRecoveryApiKey"]["code"] == 200 - assert response.json["data"]["useRecoveryApiKey"]["token"] is not None + assert response.json().get("data") is not None + assert response.json()["data"]["useRecoveryApiKey"]["success"] is True + assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None # Try to get token status - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={"query": generate_api_query([API_RECOVERY_QUERY])}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["api"]["recoveryKey"] is not None - assert response.json["data"]["api"]["recoveryKey"]["exists"] is True - assert response.json["data"]["api"]["recoveryKey"]["valid"] is True - assert response.json["data"]["api"]["recoveryKey"]["creationDate"] is not None - assert response.json["data"]["api"]["recoveryKey"]["expirationDate"] is None - assert response.json["data"]["api"]["recoveryKey"]["usesLeft"] == 1 + assert response.json().get("data") is not None + assert response.json()["data"]["api"]["recoveryKey"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["exists"] is True + assert response.json()["data"]["api"]["recoveryKey"]["valid"] is True + assert response.json()["data"]["api"]["recoveryKey"]["creationDate"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["expirationDate"] is None + assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] == 1 # Try to use token response = authorized_client.post( @@ -462,25 +466,25 @@ def test_graphql_generate_recovery_key_with_limited_uses( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["useRecoveryApiKey"]["success"] is True - assert response.json["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json["data"]["useRecoveryApiKey"]["code"] == 200 - assert response.json["data"]["useRecoveryApiKey"]["token"] is not None + assert response.json().get("data") is not None + assert response.json()["data"]["useRecoveryApiKey"]["success"] is True + assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None # Try to get token status - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={"query": generate_api_query([API_RECOVERY_QUERY])}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["api"]["recoveryKey"] is not None - assert response.json["data"]["api"]["recoveryKey"]["exists"] is True - assert response.json["data"]["api"]["recoveryKey"]["valid"] is False - assert response.json["data"]["api"]["recoveryKey"]["creationDate"] is not None - assert response.json["data"]["api"]["recoveryKey"]["expirationDate"] is None - assert response.json["data"]["api"]["recoveryKey"]["usesLeft"] == 0 + assert response.json().get("data") is not None + assert response.json()["data"]["api"]["recoveryKey"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["exists"] is True + assert response.json()["data"]["api"]["recoveryKey"]["valid"] is False + assert response.json()["data"]["api"]["recoveryKey"]["creationDate"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["expirationDate"] is None + assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] == 0 # Try to use token response = authorized_client.post( @@ -496,11 +500,11 @@ def test_graphql_generate_recovery_key_with_limited_uses( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["useRecoveryApiKey"]["success"] is False - assert response.json["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json["data"]["useRecoveryApiKey"]["code"] == 404 - assert response.json["data"]["useRecoveryApiKey"]["token"] is None + assert response.json().get("data") is not None + assert response.json()["data"]["useRecoveryApiKey"]["success"] is False + assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["useRecoveryApiKey"]["code"] == 404 + assert response.json()["data"]["useRecoveryApiKey"]["token"] is None def test_graphql_generate_recovery_key_with_negative_uses( @@ -519,11 +523,11 @@ def test_graphql_generate_recovery_key_with_negative_uses( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["getNewRecoveryApiKey"]["success"] is False - assert response.json["data"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json["data"]["getNewRecoveryApiKey"]["code"] == 400 - assert response.json["data"]["getNewRecoveryApiKey"]["key"] is None + assert response.json().get("data") is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is False + assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 400 + assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is None def test_graphql_generate_recovery_key_with_zero_uses(authorized_client, tokens_file): @@ -540,8 +544,8 @@ def test_graphql_generate_recovery_key_with_zero_uses(authorized_client, tokens_ }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["getNewRecoveryApiKey"]["success"] is False - assert response.json["data"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json["data"]["getNewRecoveryApiKey"]["code"] == 400 - assert response.json["data"]["getNewRecoveryApiKey"]["key"] is None + assert response.json().get("data") is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is False + assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 400 + assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is None diff --git a/tests/test_graphql/test_api_version.py b/tests/test_graphql/test_api_version.py index 8f76035..64bcc36 100644 --- a/tests/test_graphql/test_api_version.py +++ b/tests/test_graphql/test_api_version.py @@ -8,18 +8,18 @@ API_VERSION_QUERY = "version" def test_graphql_get_api_version(authorized_client): - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={"query": generate_api_query([API_VERSION_QUERY])}, ) assert response.status_code == 200 - assert "version" in response.get_json()["data"]["api"] + assert "version" in response.json()["data"]["api"] def test_graphql_api_version_unauthorized(client): - response = client.get( + response = client.post( "/graphql", json={"query": generate_api_query([API_VERSION_QUERY])}, ) assert response.status_code == 200 - assert "version" in response.get_json()["data"]["api"] + assert "version" in response.json()["data"]["api"] diff --git a/tests/test_graphql/test_ssh.py b/tests/test_graphql/test_ssh.py index 7b48c83..4831692 100644 --- a/tests/test_graphql/test_ssh.py +++ b/tests/test_graphql/test_ssh.py @@ -71,7 +71,7 @@ def test_graphql_add_ssh_key_unauthorized(client, some_users, mock_subprocess_po }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None def test_graphql_add_ssh_key(authorized_client, some_users, mock_subprocess_popen): @@ -88,14 +88,14 @@ def test_graphql_add_ssh_key(authorized_client, some_users, mock_subprocess_pope }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["addSshKey"]["code"] == 201 - assert response.json["data"]["addSshKey"]["message"] is not None - assert response.json["data"]["addSshKey"]["success"] is True + assert response.json()["data"]["addSshKey"]["code"] == 201 + assert response.json()["data"]["addSshKey"]["message"] is not None + assert response.json()["data"]["addSshKey"]["success"] is True - assert response.json["data"]["addSshKey"]["user"]["username"] == "user1" - assert response.json["data"]["addSshKey"]["user"]["sshKeys"] == [ + assert response.json()["data"]["addSshKey"]["user"]["username"] == "user1" + assert response.json()["data"]["addSshKey"]["user"]["sshKeys"] == [ "ssh-rsa KEY user1@pc", "ssh-rsa KEY test_key@pc", ] @@ -115,14 +115,14 @@ def test_graphql_add_root_ssh_key(authorized_client, some_users, mock_subprocess }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["addSshKey"]["code"] == 201 - assert response.json["data"]["addSshKey"]["message"] is not None - assert response.json["data"]["addSshKey"]["success"] is True + assert response.json()["data"]["addSshKey"]["code"] == 201 + assert response.json()["data"]["addSshKey"]["message"] is not None + assert response.json()["data"]["addSshKey"]["success"] is True - assert response.json["data"]["addSshKey"]["user"]["username"] == "root" - assert response.json["data"]["addSshKey"]["user"]["sshKeys"] == [ + assert response.json()["data"]["addSshKey"]["user"]["username"] == "root" + assert response.json()["data"]["addSshKey"]["user"]["sshKeys"] == [ "ssh-ed25519 KEY test@pc", "ssh-rsa KEY test_key@pc", ] @@ -142,14 +142,14 @@ def test_graphql_add_main_ssh_key(authorized_client, some_users, mock_subprocess }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["addSshKey"]["code"] == 201 - assert response.json["data"]["addSshKey"]["message"] is not None - assert response.json["data"]["addSshKey"]["success"] is True + assert response.json()["data"]["addSshKey"]["code"] == 201 + assert response.json()["data"]["addSshKey"]["message"] is not None + assert response.json()["data"]["addSshKey"]["success"] is True - assert response.json["data"]["addSshKey"]["user"]["username"] == "tester" - assert response.json["data"]["addSshKey"]["user"]["sshKeys"] == [ + assert response.json()["data"]["addSshKey"]["user"]["username"] == "tester" + assert response.json()["data"]["addSshKey"]["user"]["sshKeys"] == [ "ssh-rsa KEY test@pc", "ssh-rsa KEY test_key@pc", ] @@ -169,11 +169,11 @@ def test_graphql_add_bad_ssh_key(authorized_client, some_users, mock_subprocess_ }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["addSshKey"]["code"] == 400 - assert response.json["data"]["addSshKey"]["message"] is not None - assert response.json["data"]["addSshKey"]["success"] is False + assert response.json()["data"]["addSshKey"]["code"] == 400 + assert response.json()["data"]["addSshKey"]["message"] is not None + assert response.json()["data"]["addSshKey"]["success"] is False def test_graphql_add_ssh_key_nonexistent_user( @@ -192,11 +192,11 @@ def test_graphql_add_ssh_key_nonexistent_user( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["addSshKey"]["code"] == 404 - assert response.json["data"]["addSshKey"]["message"] is not None - assert response.json["data"]["addSshKey"]["success"] is False + assert response.json()["data"]["addSshKey"]["code"] == 404 + assert response.json()["data"]["addSshKey"]["message"] is not None + assert response.json()["data"]["addSshKey"]["success"] is False API_REMOVE_SSH_KEY_MUTATION = """ @@ -228,7 +228,7 @@ def test_graphql_remove_ssh_key_unauthorized(client, some_users, mock_subprocess }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None def test_graphql_remove_ssh_key(authorized_client, some_users, mock_subprocess_popen): @@ -245,14 +245,14 @@ def test_graphql_remove_ssh_key(authorized_client, some_users, mock_subprocess_p }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["removeSshKey"]["code"] == 200 - assert response.json["data"]["removeSshKey"]["message"] is not None - assert response.json["data"]["removeSshKey"]["success"] is True + assert response.json()["data"]["removeSshKey"]["code"] == 200 + assert response.json()["data"]["removeSshKey"]["message"] is not None + assert response.json()["data"]["removeSshKey"]["success"] is True - assert response.json["data"]["removeSshKey"]["user"]["username"] == "user1" - assert response.json["data"]["removeSshKey"]["user"]["sshKeys"] == [] + assert response.json()["data"]["removeSshKey"]["user"]["username"] == "user1" + assert response.json()["data"]["removeSshKey"]["user"]["sshKeys"] == [] def test_graphql_remove_root_ssh_key( @@ -271,14 +271,14 @@ def test_graphql_remove_root_ssh_key( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["removeSshKey"]["code"] == 200 - assert response.json["data"]["removeSshKey"]["message"] is not None - assert response.json["data"]["removeSshKey"]["success"] is True + assert response.json()["data"]["removeSshKey"]["code"] == 200 + assert response.json()["data"]["removeSshKey"]["message"] is not None + assert response.json()["data"]["removeSshKey"]["success"] is True - assert response.json["data"]["removeSshKey"]["user"]["username"] == "root" - assert response.json["data"]["removeSshKey"]["user"]["sshKeys"] == [] + assert response.json()["data"]["removeSshKey"]["user"]["username"] == "root" + assert response.json()["data"]["removeSshKey"]["user"]["sshKeys"] == [] def test_graphql_remove_main_ssh_key( @@ -297,14 +297,14 @@ def test_graphql_remove_main_ssh_key( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["removeSshKey"]["code"] == 200 - assert response.json["data"]["removeSshKey"]["message"] is not None - assert response.json["data"]["removeSshKey"]["success"] is True + assert response.json()["data"]["removeSshKey"]["code"] == 200 + assert response.json()["data"]["removeSshKey"]["message"] is not None + assert response.json()["data"]["removeSshKey"]["success"] is True - assert response.json["data"]["removeSshKey"]["user"]["username"] == "tester" - assert response.json["data"]["removeSshKey"]["user"]["sshKeys"] == [] + assert response.json()["data"]["removeSshKey"]["user"]["username"] == "tester" + assert response.json()["data"]["removeSshKey"]["user"]["sshKeys"] == [] def test_graphql_remove_nonexistent_ssh_key( @@ -323,11 +323,11 @@ def test_graphql_remove_nonexistent_ssh_key( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["removeSshKey"]["code"] == 404 - assert response.json["data"]["removeSshKey"]["message"] is not None - assert response.json["data"]["removeSshKey"]["success"] is False + assert response.json()["data"]["removeSshKey"]["code"] == 404 + assert response.json()["data"]["removeSshKey"]["message"] is not None + assert response.json()["data"]["removeSshKey"]["success"] is False def test_graphql_remove_ssh_key_nonexistent_user( @@ -346,8 +346,8 @@ def test_graphql_remove_ssh_key_nonexistent_user( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["removeSshKey"]["code"] == 404 - assert response.json["data"]["removeSshKey"]["message"] is not None - assert response.json["data"]["removeSshKey"]["success"] is False + assert response.json()["data"]["removeSshKey"]["code"] == 404 + assert response.json()["data"]["removeSshKey"]["message"] is not None + assert response.json()["data"]["removeSshKey"]["success"] is False diff --git a/tests/test_graphql/test_system_nixos_tasks.py b/tests/test_graphql/test_system_nixos_tasks.py index 601c353..3e823b6 100644 --- a/tests/test_graphql/test_system_nixos_tasks.py +++ b/tests/test_graphql/test_system_nixos_tasks.py @@ -72,7 +72,7 @@ def test_graphql_system_rebuild_unauthorized(client, mock_subprocess_popen): }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None assert mock_subprocess_popen.call_count == 0 @@ -85,10 +85,10 @@ def test_graphql_system_rebuild(authorized_client, mock_subprocess_popen): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["runSystemRebuild"]["success"] is True - assert response.json["data"]["runSystemRebuild"]["message"] is not None - assert response.json["data"]["runSystemRebuild"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["runSystemRebuild"]["success"] is True + assert response.json()["data"]["runSystemRebuild"]["message"] is not None + assert response.json()["data"]["runSystemRebuild"]["code"] == 200 assert mock_subprocess_popen.call_count == 1 assert mock_subprocess_popen.call_args[0][0] == [ "systemctl", @@ -117,7 +117,7 @@ def test_graphql_system_upgrade_unauthorized(client, mock_subprocess_popen): }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None assert mock_subprocess_popen.call_count == 0 @@ -130,10 +130,10 @@ def test_graphql_system_upgrade(authorized_client, mock_subprocess_popen): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["runSystemUpgrade"]["success"] is True - assert response.json["data"]["runSystemUpgrade"]["message"] is not None - assert response.json["data"]["runSystemUpgrade"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["runSystemUpgrade"]["success"] is True + assert response.json()["data"]["runSystemUpgrade"]["message"] is not None + assert response.json()["data"]["runSystemUpgrade"]["code"] == 200 assert mock_subprocess_popen.call_count == 1 assert mock_subprocess_popen.call_args[0][0] == [ "systemctl", @@ -162,7 +162,7 @@ def test_graphql_system_rollback_unauthorized(client, mock_subprocess_popen): }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None assert mock_subprocess_popen.call_count == 0 @@ -175,10 +175,10 @@ def test_graphql_system_rollback(authorized_client, mock_subprocess_popen): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["runSystemRollback"]["success"] is True - assert response.json["data"]["runSystemRollback"]["message"] is not None - assert response.json["data"]["runSystemRollback"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["runSystemRollback"]["success"] is True + assert response.json()["data"]["runSystemRollback"]["message"] is not None + assert response.json()["data"]["runSystemRollback"]["code"] == 200 assert mock_subprocess_popen.call_count == 1 assert mock_subprocess_popen.call_args[0][0] == [ "systemctl", @@ -207,7 +207,7 @@ def test_graphql_reboot_system_unauthorized(client, mock_subprocess_popen): ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None assert mock_subprocess_popen.call_count == 0 @@ -221,11 +221,11 @@ def test_graphql_reboot_system(authorized_client, mock_subprocess_popen): ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["rebootSystem"]["success"] is True - assert response.json["data"]["rebootSystem"]["message"] is not None - assert response.json["data"]["rebootSystem"]["code"] == 200 + assert response.json()["data"]["rebootSystem"]["success"] is True + assert response.json()["data"]["rebootSystem"]["message"] is not None + assert response.json()["data"]["rebootSystem"]["code"] == 200 assert mock_subprocess_popen.call_count == 1 assert mock_subprocess_popen.call_args[0][0] == ["reboot"] diff --git a/tests/test_graphql/test_users.py b/tests/test_graphql/test_users.py index afae1da..c36dcb2 100644 --- a/tests/test_graphql/test_users.py +++ b/tests/test_graphql/test_users.py @@ -119,53 +119,53 @@ allUsers { def test_graphql_get_users_unauthorized(client, some_users, mock_subprocess_popen): """Test wrong auth""" - response = client.get( + response = client.post( "/graphql", json={ "query": generate_users_query([API_USERS_INFO]), }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None def test_graphql_get_some_users(authorized_client, some_users, mock_subprocess_popen): - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ "query": generate_users_query([API_USERS_INFO]), }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert len(response.json["data"]["users"]["allUsers"]) == 4 - assert response.json["data"]["users"]["allUsers"][0]["username"] == "user1" - assert response.json["data"]["users"]["allUsers"][0]["sshKeys"] == [ + assert response.json().get("data") is not None + assert len(response.json()["data"]["users"]["allUsers"]) == 4 + assert response.json()["data"]["users"]["allUsers"][0]["username"] == "user1" + assert response.json()["data"]["users"]["allUsers"][0]["sshKeys"] == [ "ssh-rsa KEY user1@pc" ] - assert response.json["data"]["users"]["allUsers"][1]["username"] == "user2" - assert response.json["data"]["users"]["allUsers"][1]["sshKeys"] == [] + assert response.json()["data"]["users"]["allUsers"][1]["username"] == "user2" + assert response.json()["data"]["users"]["allUsers"][1]["sshKeys"] == [] - assert response.json["data"]["users"]["allUsers"][3]["username"] == "tester" - assert response.json["data"]["users"]["allUsers"][3]["sshKeys"] == [ + assert response.json()["data"]["users"]["allUsers"][3]["username"] == "tester" + assert response.json()["data"]["users"]["allUsers"][3]["sshKeys"] == [ "ssh-rsa KEY test@pc" ] def test_graphql_get_no_users(authorized_client, no_users, mock_subprocess_popen): - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ "query": generate_users_query([API_USERS_INFO]), }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert len(response.json["data"]["users"]["allUsers"]) == 1 - assert response.json["data"]["users"]["allUsers"][0]["username"] == "tester" - assert response.json["data"]["users"]["allUsers"][0]["sshKeys"] == [ + assert len(response.json()["data"]["users"]["allUsers"]) == 1 + assert response.json()["data"]["users"]["allUsers"][0]["username"] == "tester" + assert response.json()["data"]["users"]["allUsers"][0]["sshKeys"] == [ "ssh-rsa KEY test@pc" ] @@ -183,7 +183,7 @@ query TestUsers($username: String!) { def test_graphql_get_one_user_unauthorized(client, one_user, mock_subprocess_popen): - response = client.get( + response = client.post( "/graphql", json={ "query": API_GET_USERS, @@ -193,12 +193,12 @@ def test_graphql_get_one_user_unauthorized(client, one_user, mock_subprocess_pop }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None def test_graphql_get_one_user(authorized_client, one_user, mock_subprocess_popen): - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ "query": API_GET_USERS, @@ -208,17 +208,17 @@ def test_graphql_get_one_user(authorized_client, one_user, mock_subprocess_popen }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert len(response.json["data"]["users"]["getUser"]) == 2 - assert response.json["data"]["users"]["getUser"]["username"] == "user1" - assert response.json["data"]["users"]["getUser"]["sshKeys"] == [ + assert len(response.json()["data"]["users"]["getUser"]) == 2 + assert response.json()["data"]["users"]["getUser"]["username"] == "user1" + assert response.json()["data"]["users"]["getUser"]["sshKeys"] == [ "ssh-rsa KEY user1@pc" ] def test_graphql_get_some_user(authorized_client, some_users, mock_subprocess_popen): - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ "query": API_GET_USERS, @@ -228,15 +228,15 @@ def test_graphql_get_some_user(authorized_client, some_users, mock_subprocess_po }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert len(response.json["data"]["users"]["getUser"]) == 2 - assert response.json["data"]["users"]["getUser"]["username"] == "user2" - assert response.json["data"]["users"]["getUser"]["sshKeys"] == [] + assert len(response.json()["data"]["users"]["getUser"]) == 2 + assert response.json()["data"]["users"]["getUser"]["username"] == "user2" + assert response.json()["data"]["users"]["getUser"]["sshKeys"] == [] def test_graphql_get_root_user(authorized_client, some_users, mock_subprocess_popen): - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ "query": API_GET_USERS, @@ -246,17 +246,17 @@ def test_graphql_get_root_user(authorized_client, some_users, mock_subprocess_po }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert len(response.json["data"]["users"]["getUser"]) == 2 - assert response.json["data"]["users"]["getUser"]["username"] == "root" - assert response.json["data"]["users"]["getUser"]["sshKeys"] == [ + assert len(response.json()["data"]["users"]["getUser"]) == 2 + assert response.json()["data"]["users"]["getUser"]["username"] == "root" + assert response.json()["data"]["users"]["getUser"]["sshKeys"] == [ "ssh-ed25519 KEY test@pc" ] def test_graphql_get_main_user(authorized_client, one_user, mock_subprocess_popen): - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ "query": API_GET_USERS, @@ -266,11 +266,11 @@ def test_graphql_get_main_user(authorized_client, one_user, mock_subprocess_pope }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert len(response.json["data"]["users"]["getUser"]) == 2 - assert response.json["data"]["users"]["getUser"]["username"] == "tester" - assert response.json["data"]["users"]["getUser"]["sshKeys"] == [ + assert len(response.json()["data"]["users"]["getUser"]) == 2 + assert response.json()["data"]["users"]["getUser"]["username"] == "tester" + assert response.json()["data"]["users"]["getUser"]["sshKeys"] == [ "ssh-rsa KEY test@pc" ] @@ -278,7 +278,7 @@ def test_graphql_get_main_user(authorized_client, one_user, mock_subprocess_pope def test_graphql_get_nonexistent_user( authorized_client, one_user, mock_subprocess_popen ): - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ "query": API_GET_USERS, @@ -288,9 +288,9 @@ def test_graphql_get_nonexistent_user( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["users"]["getUser"] is None + assert response.json()["data"]["users"]["getUser"] is None API_CREATE_USERS_MUTATION = """ @@ -322,7 +322,7 @@ def test_graphql_add_user_unauthorize(client, one_user, mock_subprocess_popen): }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None def test_graphql_add_user(authorized_client, one_user, mock_subprocess_popen): @@ -339,14 +339,14 @@ def test_graphql_add_user(authorized_client, one_user, mock_subprocess_popen): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["createUser"]["message"] is not None - assert response.json["data"]["createUser"]["code"] == 201 - assert response.json["data"]["createUser"]["success"] is True + assert response.json()["data"]["createUser"]["message"] is not None + assert response.json()["data"]["createUser"]["code"] == 201 + assert response.json()["data"]["createUser"]["success"] is True - assert response.json["data"]["createUser"]["user"]["username"] == "user2" - assert response.json["data"]["createUser"]["user"]["sshKeys"] == [] + assert response.json()["data"]["createUser"]["user"]["username"] == "user2" + assert response.json()["data"]["createUser"]["user"]["sshKeys"] == [] def test_graphql_add_undefined_settings( @@ -365,14 +365,14 @@ def test_graphql_add_undefined_settings( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["createUser"]["message"] is not None - assert response.json["data"]["createUser"]["code"] == 201 - assert response.json["data"]["createUser"]["success"] is True + assert response.json()["data"]["createUser"]["message"] is not None + assert response.json()["data"]["createUser"]["code"] == 201 + assert response.json()["data"]["createUser"]["success"] is True - assert response.json["data"]["createUser"]["user"]["username"] == "user2" - assert response.json["data"]["createUser"]["user"]["sshKeys"] == [] + assert response.json()["data"]["createUser"]["user"]["username"] == "user2" + assert response.json()["data"]["createUser"]["user"]["sshKeys"] == [] def test_graphql_add_without_password( @@ -391,13 +391,13 @@ def test_graphql_add_without_password( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["createUser"]["message"] is not None - assert response.json["data"]["createUser"]["code"] == 400 - assert response.json["data"]["createUser"]["success"] is False + assert response.json()["data"]["createUser"]["message"] is not None + assert response.json()["data"]["createUser"]["code"] == 400 + assert response.json()["data"]["createUser"]["success"] is False - assert response.json["data"]["createUser"]["user"] is None + assert response.json()["data"]["createUser"]["user"] is None def test_graphql_add_without_both(authorized_client, one_user, mock_subprocess_popen): @@ -414,13 +414,13 @@ def test_graphql_add_without_both(authorized_client, one_user, mock_subprocess_p }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["createUser"]["message"] is not None - assert response.json["data"]["createUser"]["code"] == 400 - assert response.json["data"]["createUser"]["success"] is False + assert response.json()["data"]["createUser"]["message"] is not None + assert response.json()["data"]["createUser"]["code"] == 400 + assert response.json()["data"]["createUser"]["success"] is False - assert response.json["data"]["createUser"]["user"] is None + assert response.json()["data"]["createUser"]["user"] is None @pytest.mark.parametrize("username", invalid_usernames) @@ -440,13 +440,13 @@ def test_graphql_add_system_username( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["createUser"]["message"] is not None - assert response.json["data"]["createUser"]["code"] == 409 - assert response.json["data"]["createUser"]["success"] is False + assert response.json()["data"]["createUser"]["message"] is not None + assert response.json()["data"]["createUser"]["code"] == 409 + assert response.json()["data"]["createUser"]["success"] is False - assert response.json["data"]["createUser"]["user"] is None + assert response.json()["data"]["createUser"]["user"] is None def test_graphql_add_existing_user(authorized_client, one_user, mock_subprocess_popen): @@ -463,15 +463,15 @@ def test_graphql_add_existing_user(authorized_client, one_user, mock_subprocess_ }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["createUser"]["message"] is not None - assert response.json["data"]["createUser"]["code"] == 409 - assert response.json["data"]["createUser"]["success"] is False + assert response.json()["data"]["createUser"]["message"] is not None + assert response.json()["data"]["createUser"]["code"] == 409 + assert response.json()["data"]["createUser"]["success"] is False - assert response.json["data"]["createUser"]["user"]["username"] == "user1" + assert response.json()["data"]["createUser"]["user"]["username"] == "user1" assert ( - response.json["data"]["createUser"]["user"]["sshKeys"][0] + response.json()["data"]["createUser"]["user"]["sshKeys"][0] == "ssh-rsa KEY user1@pc" ) @@ -490,15 +490,15 @@ def test_graphql_add_main_user(authorized_client, one_user, mock_subprocess_pope }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["createUser"]["message"] is not None - assert response.json["data"]["createUser"]["code"] == 409 - assert response.json["data"]["createUser"]["success"] is False + assert response.json()["data"]["createUser"]["message"] is not None + assert response.json()["data"]["createUser"]["code"] == 409 + assert response.json()["data"]["createUser"]["success"] is False - assert response.json["data"]["createUser"]["user"]["username"] == "tester" + assert response.json()["data"]["createUser"]["user"]["username"] == "tester" assert ( - response.json["data"]["createUser"]["user"]["sshKeys"][0] + response.json()["data"]["createUser"]["user"]["sshKeys"][0] == "ssh-rsa KEY test@pc" ) @@ -517,13 +517,13 @@ def test_graphql_add_long_username(authorized_client, one_user, mock_subprocess_ }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["createUser"]["message"] is not None - assert response.json["data"]["createUser"]["code"] == 400 - assert response.json["data"]["createUser"]["success"] is False + assert response.json()["data"]["createUser"]["message"] is not None + assert response.json()["data"]["createUser"]["code"] == 400 + assert response.json()["data"]["createUser"]["success"] is False - assert response.json["data"]["createUser"]["user"] is None + assert response.json()["data"]["createUser"]["user"] is None @pytest.mark.parametrize("username", ["", "1", "Ñ„Ñ‹Ñ€", "user1@", "^-^"]) @@ -543,13 +543,13 @@ def test_graphql_add_invalid_username( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["createUser"]["message"] is not None - assert response.json["data"]["createUser"]["code"] == 400 - assert response.json["data"]["createUser"]["success"] is False + assert response.json()["data"]["createUser"]["message"] is not None + assert response.json()["data"]["createUser"]["code"] == 400 + assert response.json()["data"]["createUser"]["success"] is False - assert response.json["data"]["createUser"]["user"] is None + assert response.json()["data"]["createUser"]["user"] is None API_DELETE_USER_MUTATION = """ @@ -572,7 +572,7 @@ def test_graphql_delete_user_unauthorized(client, some_users, mock_subprocess_po }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None def test_graphql_delete_user(authorized_client, some_users, mock_subprocess_popen): @@ -584,11 +584,11 @@ def test_graphql_delete_user(authorized_client, some_users, mock_subprocess_pope }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["deleteUser"]["code"] == 200 - assert response.json["data"]["deleteUser"]["message"] is not None - assert response.json["data"]["deleteUser"]["success"] is True + assert response.json()["data"]["deleteUser"]["code"] == 200 + assert response.json()["data"]["deleteUser"]["message"] is not None + assert response.json()["data"]["deleteUser"]["success"] is True @pytest.mark.parametrize("username", ["", "def"]) @@ -603,11 +603,11 @@ def test_graphql_delete_nonexistent_users( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["deleteUser"]["code"] == 404 - assert response.json["data"]["deleteUser"]["message"] is not None - assert response.json["data"]["deleteUser"]["success"] is False + assert response.json()["data"]["deleteUser"]["code"] == 404 + assert response.json()["data"]["deleteUser"]["message"] is not None + assert response.json()["data"]["deleteUser"]["success"] is False @pytest.mark.parametrize("username", invalid_usernames) @@ -622,14 +622,14 @@ def test_graphql_delete_system_users( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None assert ( - response.json["data"]["deleteUser"]["code"] == 404 - or response.json["data"]["deleteUser"]["code"] == 400 + response.json()["data"]["deleteUser"]["code"] == 404 + or response.json()["data"]["deleteUser"]["code"] == 400 ) - assert response.json["data"]["deleteUser"]["message"] is not None - assert response.json["data"]["deleteUser"]["success"] is False + assert response.json()["data"]["deleteUser"]["message"] is not None + assert response.json()["data"]["deleteUser"]["success"] is False def test_graphql_delete_main_user(authorized_client, some_users, mock_subprocess_popen): @@ -641,11 +641,11 @@ def test_graphql_delete_main_user(authorized_client, some_users, mock_subprocess }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["deleteUser"]["code"] == 400 - assert response.json["data"]["deleteUser"]["message"] is not None - assert response.json["data"]["deleteUser"]["success"] is False + assert response.json()["data"]["deleteUser"]["code"] == 400 + assert response.json()["data"]["deleteUser"]["message"] is not None + assert response.json()["data"]["deleteUser"]["success"] is False API_UPDATE_USER_MUTATION = """ @@ -677,7 +677,7 @@ def test_graphql_update_user_unauthorized(client, some_users, mock_subprocess_po }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None def test_graphql_update_user(authorized_client, some_users, mock_subprocess_popen): @@ -694,14 +694,14 @@ def test_graphql_update_user(authorized_client, some_users, mock_subprocess_pope }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["updateUser"]["code"] == 200 - assert response.json["data"]["updateUser"]["message"] is not None - assert response.json["data"]["updateUser"]["success"] is True + assert response.json()["data"]["updateUser"]["code"] == 200 + assert response.json()["data"]["updateUser"]["message"] is not None + assert response.json()["data"]["updateUser"]["success"] is True - assert response.json["data"]["updateUser"]["user"]["username"] == "user1" - assert response.json["data"]["updateUser"]["user"]["sshKeys"] == [ + assert response.json()["data"]["updateUser"]["user"]["username"] == "user1" + assert response.json()["data"]["updateUser"]["user"]["sshKeys"] == [ "ssh-rsa KEY user1@pc" ] assert mock_subprocess_popen.call_count == 1 @@ -723,11 +723,11 @@ def test_graphql_update_nonexistent_user( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["updateUser"]["code"] == 404 - assert response.json["data"]["updateUser"]["message"] is not None - assert response.json["data"]["updateUser"]["success"] is False + assert response.json()["data"]["updateUser"]["code"] == 404 + assert response.json()["data"]["updateUser"]["message"] is not None + assert response.json()["data"]["updateUser"]["success"] is False - assert response.json["data"]["updateUser"]["user"] is None + assert response.json()["data"]["updateUser"]["user"] is None assert mock_subprocess_popen.call_count == 1 diff --git a/tests/test_system.py b/tests/test_system.py index b9c8649..90c1499 100644 --- a/tests/test_system.py +++ b/tests/test_system.py @@ -123,13 +123,13 @@ def test_get_timezone_unauthorized(client, turned_on): def test_get_timezone(authorized_client, turned_on): response = authorized_client.get("/system/configuration/timezone") assert response.status_code == 200 - assert response.get_json() == "Europe/Moscow" + assert response.json() == "Europe/Moscow" def test_get_timezone_on_undefined(authorized_client, undefined_config): response = authorized_client.get("/system/configuration/timezone") assert response.status_code == 200 - assert response.get_json() == "Europe/Uzhgorod" + assert response.json() == "Europe/Uzhgorod" def test_put_timezone_unauthorized(client, turned_on): @@ -159,7 +159,7 @@ def test_put_timezone_on_undefined(authorized_client, undefined_config): def test_put_timezone_without_timezone(authorized_client, turned_on): response = authorized_client.put("/system/configuration/timezone", json={}) - assert response.status_code == 400 + assert response.status_code == 422 assert read_json(turned_on / "turned_on.json")["timezone"] == "Europe/Moscow" @@ -182,7 +182,7 @@ def test_get_auto_upgrade_unauthorized(client, turned_on): def test_get_auto_upgrade(authorized_client, turned_on): response = authorized_client.get("/system/configuration/autoUpgrade") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "enable": True, "allowReboot": True, } @@ -191,7 +191,7 @@ def test_get_auto_upgrade(authorized_client, turned_on): def test_get_auto_upgrade_on_undefined(authorized_client, undefined_config): response = authorized_client.get("/system/configuration/autoUpgrade") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "enable": True, "allowReboot": False, } @@ -200,7 +200,7 @@ def test_get_auto_upgrade_on_undefined(authorized_client, undefined_config): def test_get_auto_upgrade_without_values(authorized_client, no_values): response = authorized_client.get("/system/configuration/autoUpgrade") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "enable": True, "allowReboot": False, } @@ -209,7 +209,7 @@ def test_get_auto_upgrade_without_values(authorized_client, no_values): def test_get_auto_upgrade_turned_off(authorized_client, turned_off): response = authorized_client.get("/system/configuration/autoUpgrade") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "enable": False, "allowReboot": False, } @@ -357,7 +357,7 @@ def test_get_system_version_unauthorized(client, mock_subprocess_check_output): def test_get_system_version(authorized_client, mock_subprocess_check_output): response = authorized_client.get("/system/version") assert response.status_code == 200 - assert response.get_json() == {"system_version": "Testing Linux"} + assert response.json() == {"system_version": "Testing Linux"} assert mock_subprocess_check_output.call_count == 1 assert mock_subprocess_check_output.call_args[0][0] == ["uname", "-a"] @@ -384,7 +384,7 @@ def test_get_python_version_unauthorized(client, mock_subprocess_check_output): def test_get_python_version(authorized_client, mock_subprocess_check_output): response = authorized_client.get("/system/pythonVersion") assert response.status_code == 200 - assert response.get_json() == "Testing Linux" + assert response.json() == "Testing Linux" assert mock_subprocess_check_output.call_count == 1 assert mock_subprocess_check_output.call_args[0][0] == ["python", "-V"] diff --git a/tests/test_users.py b/tests/test_users.py index 9374ef2..ebb3eff 100644 --- a/tests/test_users.py +++ b/tests/test_users.py @@ -121,31 +121,31 @@ def test_get_users_unauthorized(client, some_users, mock_subprocess_popen): def test_get_some_users(authorized_client, some_users, mock_subprocess_popen): response = authorized_client.get("/users") assert response.status_code == 200 - assert response.json == ["user1", "user2", "user3"] + assert response.json() == ["user1", "user2", "user3"] def test_get_one_user(authorized_client, one_user, mock_subprocess_popen): response = authorized_client.get("/users") assert response.status_code == 200 - assert response.json == ["user1"] + assert response.json() == ["user1"] def test_get_one_user_with_main(authorized_client, one_user, mock_subprocess_popen): response = authorized_client.get("/users?withMainUser=true") assert response.status_code == 200 - assert response.json == ["tester", "user1"] + assert response.json().sort() == ["tester", "user1"].sort() def test_get_no_users(authorized_client, no_users, mock_subprocess_popen): response = authorized_client.get("/users") assert response.status_code == 200 - assert response.json == [] + assert response.json() == [] def test_get_no_users_with_main(authorized_client, no_users, mock_subprocess_popen): response = authorized_client.get("/users?withMainUser=true") assert response.status_code == 200 - assert response.json == ["tester"] + assert response.json() == ["tester"] def test_get_undefined_users( @@ -153,7 +153,7 @@ def test_get_undefined_users( ): response = authorized_client.get("/users") assert response.status_code == 200 - assert response.json == [] + assert response.json() == [] def test_post_users_unauthorized(client, some_users, mock_subprocess_popen): @@ -174,6 +174,7 @@ def test_post_one_user(authorized_client, one_user, mock_subprocess_popen): }, { "username": "user4", + "sshKeys": [], "hashedPassword": "NEW_HASHED", }, ] @@ -181,19 +182,19 @@ def test_post_one_user(authorized_client, one_user, mock_subprocess_popen): def test_post_without_username(authorized_client, one_user, mock_subprocess_popen): response = authorized_client.post("/users", json={"password": "password"}) - assert response.status_code == 400 + assert response.status_code == 422 def test_post_without_password(authorized_client, one_user, mock_subprocess_popen): response = authorized_client.post("/users", json={"username": "user4"}) - assert response.status_code == 400 + assert response.status_code == 422 def test_post_without_username_and_password( authorized_client, one_user, mock_subprocess_popen ): response = authorized_client.post("/users", json={}) - assert response.status_code == 400 + assert response.status_code == 422 @pytest.mark.parametrize("username", invalid_usernames) @@ -226,7 +227,7 @@ def test_post_user_to_undefined_users( ) assert response.status_code == 201 assert read_json(undefined_settings / "undefined.json")["users"] == [ - {"username": "user4", "hashedPassword": "NEW_HASHED"} + {"username": "user4", "sshKeys": [], "hashedPassword": "NEW_HASHED"} ] @@ -279,11 +280,6 @@ def test_delete_main_user(authorized_client, some_users, mock_subprocess_popen): assert response.status_code == 400 -def test_delete_without_argument(authorized_client, some_users, mock_subprocess_popen): - response = authorized_client.delete("/users/") - assert response.status_code == 404 - - def test_delete_just_delete(authorized_client, some_users, mock_subprocess_popen): response = authorized_client.delete("/users") assert response.status_code == 405 -- 2.42.0 From a96f6bd06733199cf4af3b05acdce1f1d43c85c2 Mon Sep 17 00:00:00 2001 From: inexcode Date: Thu, 11 Aug 2022 23:11:00 +0400 Subject: [PATCH 09/50] LInting --- .pylintrc | 1 + selfprivacy_api/actions/api_tokens.py | 9 +-- selfprivacy_api/graphql/queries/system.py | 5 +- .../services/bitwarden/__init__.py | 2 +- .../services/generic_service_mover.py | 4 +- .../services/mailserver/__init__.py | 10 ++-- selfprivacy_api/services/service.py | 59 +++++++++++++------ selfprivacy_api/utils/network.py | 8 +-- 8 files changed, 55 insertions(+), 43 deletions(-) diff --git a/.pylintrc b/.pylintrc index c6d73d8..9135ea9 100644 --- a/.pylintrc +++ b/.pylintrc @@ -1,2 +1,3 @@ [MASTER] init-hook="from pylint.config import find_pylintrc; import os, sys; sys.path.append(os.path.dirname(find_pylintrc()))" +extension-pkg-whitelist=pydantic diff --git a/selfprivacy_api/actions/api_tokens.py b/selfprivacy_api/actions/api_tokens.py index 3a57ef7..61c695d 100644 --- a/selfprivacy_api/actions/api_tokens.py +++ b/selfprivacy_api/actions/api_tokens.py @@ -3,6 +3,7 @@ from datetime import datetime from typing import Optional from pydantic import BaseModel + from selfprivacy_api.utils.auth import ( delete_token, generate_recovery_token, @@ -42,14 +43,10 @@ def get_api_tokens_with_caller_flag(caller_token: str) -> list[TokenInfoWithIsCa class NotFoundException(Exception): """Not found exception""" - pass - class CannotDeleteCallerException(Exception): """Cannot delete caller exception""" - pass - def delete_api_token(caller_token: str, token_name: str) -> None: """Delete the token""" @@ -98,14 +95,10 @@ def get_api_recovery_token_status() -> RecoveryTokenStatus: class InvalidExpirationDate(Exception): """Invalid expiration date exception""" - pass - class InvalidUsesLeft(Exception): """Invalid uses left exception""" - pass - def get_new_api_recovery_key( expiration_date: Optional[datetime] = None, uses_left: Optional[int] = None diff --git a/selfprivacy_api/graphql/queries/system.py b/selfprivacy_api/graphql/queries/system.py index b5fb6a6..decb3f0 100644 --- a/selfprivacy_api/graphql/queries/system.py +++ b/selfprivacy_api/graphql/queries/system.py @@ -75,14 +75,13 @@ def get_ssh_settings() -> SshSettings: return SshSettings( enable=settings.enable, password_authentication=settings.passwordAuthentication, - root_ssh_keys=settings.rootSshKeys, + root_ssh_keys=settings.rootKeys, ) def get_system_timezone() -> str: """Get system timezone""" - with ReadUserData() as user_data: - return system_actions.get_timezone() + return system_actions.get_timezone() @strawberry.type diff --git a/selfprivacy_api/services/bitwarden/__init__.py b/selfprivacy_api/services/bitwarden/__init__.py index 21ba40a..599e0e4 100644 --- a/selfprivacy_api/services/bitwarden/__init__.py +++ b/selfprivacy_api/services/bitwarden/__init__.py @@ -35,7 +35,7 @@ class Bitwarden(Service): return "Bitwarden is a password manager." @staticmethod - def get_svg_icon(self) -> str: + def get_svg_icon() -> str: """Read SVG icon from file and return it as base64 encoded string.""" with open("selfprivacy_api/services/bitwarden/bitwarden.svg", "rb") as f: return base64.b64encode(f.read()).decode("utf-8") diff --git a/selfprivacy_api/services/generic_service_mover.py b/selfprivacy_api/services/generic_service_mover.py index f625f99..1a9c0ac 100644 --- a/selfprivacy_api/services/generic_service_mover.py +++ b/selfprivacy_api/services/generic_service_mover.py @@ -1,9 +1,7 @@ """Generic handler for moving services""" -import base64 import subprocess import time -import typing import pathlib import shutil @@ -149,7 +147,7 @@ def move_service( progress=20, ) current_progress = 20 - folder_percentage = 50 / len(folder_names) + folder_percentage = 50 // len(folder_names) for folder in folder_names: shutil.move( f"/volumes/{old_volume}/{folder.name}", diff --git a/selfprivacy_api/services/mailserver/__init__.py b/selfprivacy_api/services/mailserver/__init__.py index c97864d..3eaf8c6 100644 --- a/selfprivacy_api/services/mailserver/__init__.py +++ b/selfprivacy_api/services/mailserver/__init__.py @@ -120,8 +120,8 @@ class MailServer(Service): @staticmethod def get_dns_records() -> typing.List[ServiceDnsRecord]: - dkim_record = get_dkim_key() domain = get_domain() + dkim_record = get_dkim_key(domain) ip4 = get_ip4() if dkim_record is None: @@ -129,16 +129,16 @@ class MailServer(Service): return [ ServiceDnsRecord( - type="MX", name=domain, data=domain, ttl=3600, priority=10 + type="MX", name=domain, content=domain, ttl=3600, priority=10 ), ServiceDnsRecord( - type="TXT", name="_dmarc", data=f"v=DMARC1; p=none", ttl=3600 + type="TXT", name="_dmarc", content=f"v=DMARC1; p=none", ttl=3600 ), ServiceDnsRecord( - type="TXT", name=domain, data=f"v=spf1 a mx ip4:{ip4} -all", ttl=3600 + type="TXT", name=domain, content=f"v=spf1 a mx ip4:{ip4} -all", ttl=3600 ), ServiceDnsRecord( - type="TXT", name="selector._domainkey", data=dkim_record, ttl=3600 + type="TXT", name="selector._domainkey", content=dkim_record, ttl=3600 ), ] diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index 43c4fbb..5627e17 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -3,6 +3,8 @@ from abc import ABC, abstractmethod from enum import Enum import typing +from pydantic import BaseModel + from selfprivacy_api.utils.block_devices import BlockDevice @@ -16,12 +18,12 @@ class ServiceStatus(Enum): OFF = "OFF" -class ServiceDnsRecord: +class ServiceDnsRecord(BaseModel): type: str name: str content: str ttl: int - priority: typing.Optional[int] + priority: typing.Optional[int] = None class Service(ABC): @@ -30,80 +32,99 @@ class Service(ABC): can be installed, configured and used by a user. """ + @staticmethod @abstractmethod - def get_id(self) -> str: + def get_id() -> str: pass + @staticmethod @abstractmethod - def get_display_name(self) -> str: + def get_display_name() -> str: pass + @staticmethod @abstractmethod - def get_description(self) -> str: + def get_description() -> str: pass + @staticmethod @abstractmethod - def get_svg_icon(self) -> str: + def get_svg_icon() -> str: pass + @staticmethod @abstractmethod def is_movable() -> bool: pass + @staticmethod @abstractmethod def is_required() -> bool: pass + @staticmethod @abstractmethod - def is_enabled(self) -> bool: + def is_enabled() -> bool: pass + @staticmethod @abstractmethod - def get_status(self) -> ServiceStatus: + def get_status() -> ServiceStatus: pass + @staticmethod @abstractmethod - def enable(self): + def enable(): pass + @staticmethod @abstractmethod - def disable(self): + def disable(): pass + @staticmethod @abstractmethod - def stop(self): + def stop(): pass + @staticmethod @abstractmethod - def start(self): + def start(): pass + @staticmethod @abstractmethod - def restart(self): + def restart(): pass + @staticmethod @abstractmethod - def get_configuration(self): + def get_configuration(): pass + @staticmethod @abstractmethod - def set_configuration(self, config_items): + def set_configuration(config_items): pass + @staticmethod @abstractmethod - def get_logs(self): + def get_logs(): pass + @staticmethod @abstractmethod - def get_storage_usage(self) -> int: + def get_storage_usage() -> int: pass + @staticmethod @abstractmethod - def get_dns_records(self) -> typing.List[ServiceDnsRecord]: + def get_dns_records() -> typing.List[ServiceDnsRecord]: pass + @staticmethod @abstractmethod - def get_location(self) -> str: + def get_location() -> str: pass @abstractmethod diff --git a/selfprivacy_api/utils/network.py b/selfprivacy_api/utils/network.py index 9b1ba99..c1b8a2b 100644 --- a/selfprivacy_api/utils/network.py +++ b/selfprivacy_api/utils/network.py @@ -5,7 +5,7 @@ import re from typing import Optional -def get_ip4() -> Optional[str]: +def get_ip4() -> str: """Get IPv4 address""" try: ip4 = subprocess.check_output(["ip", "addr", "show", "dev", "eth0"]).decode( @@ -14,10 +14,10 @@ def get_ip4() -> Optional[str]: ip4 = re.search(r"inet (\d+\.\d+\.\d+\.\d+)\/\d+", ip4) except subprocess.CalledProcessError: ip4 = None - return ip4.group(1) if ip4 else None + return ip4.group(1) if ip4 else "" -def get_ip6() -> Optional[str]: +def get_ip6() -> str: """Get IPv6 address""" try: ip6 = subprocess.check_output(["ip", "addr", "show", "dev", "eth0"]).decode( @@ -26,4 +26,4 @@ def get_ip6() -> Optional[str]: ip6 = re.search(r"inet6 (\S+)\/\d+", ip6) except subprocess.CalledProcessError: ip6 = None - return ip6.group(1) if ip6 else None + return ip6.group(1) if ip6 else "" -- 2.42.0 From 43675b2d1d1e8b9ff3a66b47afbb73ff3de63425 Mon Sep 17 00:00:00 2001 From: inexcode Date: Fri, 12 Aug 2022 17:43:04 +0400 Subject: [PATCH 10/50] Programmatical uvicorn start --- pyproject.toml | 4 ++-- selfprivacy_api/app.py | 5 +++++ setup.py | 2 +- 3 files changed, 8 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 1ffd18c..7f8d872 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,3 +1,3 @@ [build-system] -requires = ["setuptools", "wheel", "portalocker", "flask-swagger", "flask-swagger-ui"] -build-backend = "setuptools.build_meta" \ No newline at end of file +requires = ["setuptools", "wheel", "portalocker"] +build-backend = "setuptools.build_meta" diff --git a/selfprivacy_api/app.py b/selfprivacy_api/app.py index a65b47b..60fac7f 100644 --- a/selfprivacy_api/app.py +++ b/selfprivacy_api/app.py @@ -5,6 +5,8 @@ from fastapi import FastAPI, Depends, Request, WebSocket, BackgroundTasks from fastapi.middleware.cors import CORSMiddleware from strawberry.fastapi import BaseContext, GraphQLRouter +import uvicorn + from selfprivacy_api.dependencies import get_api_version, get_graphql_context from selfprivacy_api.graphql.schema import schema from selfprivacy_api.migrations import run_migrations @@ -49,3 +51,6 @@ async def get_version(): async def startup(): run_migrations() init_restic() + +if __name__ == "__main__": + uvicorn.run("app:app", host="0.0.0.0", port=5050, log_level="info") diff --git a/setup.py b/setup.py index 5619621..eabc165 100755 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import setup, find_packages setup( name="selfprivacy_api", - version="1.2.7", + version="2.0.0", packages=find_packages(), scripts=[ "selfprivacy_api/app.py", -- 2.42.0 From e7df559787d461658fc19244906e87641cd06f99 Mon Sep 17 00:00:00 2001 From: inexcode Date: Fri, 12 Aug 2022 22:04:20 +0400 Subject: [PATCH 11/50] Change uvicorn run expression --- selfprivacy_api/app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/selfprivacy_api/app.py b/selfprivacy_api/app.py index 60fac7f..3a73346 100644 --- a/selfprivacy_api/app.py +++ b/selfprivacy_api/app.py @@ -53,4 +53,4 @@ async def startup(): init_restic() if __name__ == "__main__": - uvicorn.run("app:app", host="0.0.0.0", port=5050, log_level="info") + uvicorn.run("selfprivacy_api.app:app", host="0.0.0.0", port=5050, log_level="info") -- 2.42.0 From 00badfbbf87ccc352ae46d8886bb8f2be4b3a5c6 Mon Sep 17 00:00:00 2001 From: inexcode Date: Sat, 13 Aug 2022 01:29:18 +0400 Subject: [PATCH 12/50] Add some services endpoints --- selfprivacy_api/app.py | 1 + selfprivacy_api/graphql/common_types/dns.py | 13 ++ .../graphql/common_types/storage_usage.py | 24 ++++ .../graphql/queries/api_queries.py | 3 +- selfprivacy_api/graphql/queries/jobs.py | 45 +++++++ selfprivacy_api/graphql/queries/services.py | 117 ++++++++++++++++++ selfprivacy_api/graphql/queries/storage.py | 16 +-- selfprivacy_api/graphql/queries/system.py | 12 +- selfprivacy_api/graphql/schema.py | 15 ++- .../graphql/subscriptions/__init__.py | 0 selfprivacy_api/graphql/subscriptions/jobs.py | 76 ------------ selfprivacy_api/jobs/test.py | 4 +- selfprivacy_api/restic_controller/tasks.py | 4 +- selfprivacy_api/services/__init__.py | 39 ++++++ .../services/bitwarden/__init__.py | 12 +- .../services/generic_service_mover.py | 6 +- selfprivacy_api/services/gitea/__init__.py | 14 ++- .../services/mailserver/__init__.py | 9 +- .../services/nextcloud/__init__.py | 10 +- selfprivacy_api/services/ocserv/__init__.py | 9 ++ selfprivacy_api/services/pleroma/__init__.py | 8 +- selfprivacy_api/services/service.py | 5 + selfprivacy_api/utils/huey.py | 10 +- 23 files changed, 314 insertions(+), 138 deletions(-) create mode 100644 selfprivacy_api/graphql/common_types/dns.py create mode 100644 selfprivacy_api/graphql/common_types/storage_usage.py create mode 100644 selfprivacy_api/graphql/queries/jobs.py create mode 100644 selfprivacy_api/graphql/queries/services.py delete mode 100644 selfprivacy_api/graphql/subscriptions/__init__.py delete mode 100644 selfprivacy_api/graphql/subscriptions/jobs.py diff --git a/selfprivacy_api/app.py b/selfprivacy_api/app.py index 3a73346..b5ed512 100644 --- a/selfprivacy_api/app.py +++ b/selfprivacy_api/app.py @@ -52,5 +52,6 @@ async def startup(): run_migrations() init_restic() + if __name__ == "__main__": uvicorn.run("selfprivacy_api.app:app", host="0.0.0.0", port=5050, log_level="info") diff --git a/selfprivacy_api/graphql/common_types/dns.py b/selfprivacy_api/graphql/common_types/dns.py new file mode 100644 index 0000000..c9f8413 --- /dev/null +++ b/selfprivacy_api/graphql/common_types/dns.py @@ -0,0 +1,13 @@ +import typing +import strawberry + + +@strawberry.type +class DnsRecord: + """DNS record""" + + record_type: str + name: str + content: str + ttl: int + priority: typing.Optional[int] diff --git a/selfprivacy_api/graphql/common_types/storage_usage.py b/selfprivacy_api/graphql/common_types/storage_usage.py new file mode 100644 index 0000000..f45966e --- /dev/null +++ b/selfprivacy_api/graphql/common_types/storage_usage.py @@ -0,0 +1,24 @@ +import typing +import strawberry + + +@strawberry.type +class StorageVolume: + """Stats and basic info about a volume or a system disk.""" + + total_space: str + free_space: str + used_space: str + root: bool + name: str + model: typing.Optional[str] + serial: typing.Optional[str] + type: str + usages: list["StorageUsageInterface"] + + +@strawberry.interface +class StorageUsageInterface: + used_space: str + volume: typing.Optional[StorageVolume] + title: str diff --git a/selfprivacy_api/graphql/queries/api_queries.py b/selfprivacy_api/graphql/queries/api_queries.py index ccdf89f..cbe7690 100644 --- a/selfprivacy_api/graphql/queries/api_queries.py +++ b/selfprivacy_api/graphql/queries/api_queries.py @@ -7,6 +7,7 @@ from strawberry.types import Info from selfprivacy_api.actions.api_tokens import get_api_tokens_with_caller_flag from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.utils import parse_date +from selfprivacy_api.dependencies import get_api_version as get_api_version_dependency from selfprivacy_api.utils.auth import ( get_recovery_token_status, @@ -17,7 +18,7 @@ from selfprivacy_api.utils.auth import ( def get_api_version() -> str: """Get API version""" - return "1.2.7" + return get_api_version_dependency() @strawberry.type diff --git a/selfprivacy_api/graphql/queries/jobs.py b/selfprivacy_api/graphql/queries/jobs.py new file mode 100644 index 0000000..4d2a3eb --- /dev/null +++ b/selfprivacy_api/graphql/queries/jobs.py @@ -0,0 +1,45 @@ +"""Jobs status""" +# pylint: disable=too-few-public-methods +import typing +import strawberry +import datetime + +from selfprivacy_api.jobs import Jobs + + +@strawberry.type +class ApiJob: + name: str + description: str + status: str + status_text: typing.Optional[str] + progress: typing.Optional[int] + created_at: datetime.datetime + updated_at: datetime.datetime + finished_at: typing.Optional[datetime.datetime] + error: typing.Optional[str] + result: typing.Optional[str] + + +@strawberry.type +class Job: + @strawberry.field + def get_jobs(self) -> typing.List[ApiJob]: + + Jobs.get_instance().get_jobs() + + return [ + ApiJob( + name=job.name, + description=job.description, + status=job.status.name, + status_text=job.status_text, + progress=job.progress, + created_at=job.created_at, + updated_at=job.updated_at, + finished_at=job.finished_at, + error=job.error, + result=job.result, + ) + for job in Jobs.get_instance().get_jobs() + ] diff --git a/selfprivacy_api/graphql/queries/services.py b/selfprivacy_api/graphql/queries/services.py new file mode 100644 index 0000000..f5c6fff --- /dev/null +++ b/selfprivacy_api/graphql/queries/services.py @@ -0,0 +1,117 @@ +"""Services status""" +# pylint: disable=too-few-public-methods +from enum import Enum +import typing +import strawberry +import datetime + +from selfprivacy_api.graphql.common_types.dns import DnsRecord +from selfprivacy_api.graphql.common_types.storage_usage import ( + StorageUsageInterface, + StorageVolume, +) +from selfprivacy_api.services import get_all_services, get_service_by_id +from selfprivacy_api.services import Service as ServiceInterface +from selfprivacy_api.utils.block_devices import BlockDevices + + +@strawberry.enum +class ServiceStatusEnum(Enum): + RUNNING = "RUNNING" + DEGRADED = "DEGRADED" + ERROR = "ERROR" + STOPPED = "STOPPED" + OFF = "OFF" + + +@strawberry.type +class ServiceStorageUsage(StorageUsageInterface): + """Storage usage for a service""" + + service: typing.Optional["Service"] + + +def get_storage_usage(root: "Service") -> ServiceStorageUsage: + """Get storage usage for a service""" + service = get_service_by_id(root.id) + if service is None: + return ServiceStorageUsage( + service=service, + title="Not found", + used_space="0", + volume=get_volume_by_id("sda1"), + ) + return ServiceStorageUsage( + service=service_to_graphql_service(service), + title=service.get_display_name(), + used_space=str(service.get_storage_usage()), + volume=get_volume_by_id(service.get_location()), + ) + + +@strawberry.type +class Service: + storage_usage: ServiceStorageUsage = strawberry.field(resolver=get_storage_usage) + id: str + display_name: str + description: str + svg_icon: str + is_movable: bool + is_required: bool + is_enabled: bool + status: ServiceStatusEnum + url: typing.Optional[str] + dns_records: typing.Optional[typing.List[DnsRecord]] + + +def service_to_graphql_service(service: ServiceInterface) -> Service: + """Convert service to graphql service""" + return Service( + id=service.get_id(), + display_name=service.get_display_name(), + description=service.get_description(), + svg_icon=service.get_svg_icon(), + is_movable=service.is_movable(), + is_required=service.is_required(), + is_enabled=service.is_enabled(), + status=ServiceStatusEnum(service.get_status().value), + url=service.get_url(), + dns_records=[ + DnsRecord( + record_type=record.type, + name=record.name, + content=record.content, + ttl=record.ttl, + priority=record.priority, + ) + for record in service.get_dns_records() + ], + ) + + +def get_volume_by_id(volume_id: str) -> typing.Optional[StorageVolume]: + """Get volume by id""" + volume = BlockDevices().get_block_device(volume_id) + if volume is None: + return None + return StorageVolume( + total_space=str(volume.fssize) + if volume.fssize is not None + else str(volume.size), + free_space=str(volume.fsavail), + used_space=str(volume.fsused), + root=volume.name == "sda1", + name=volume.name, + model=volume.model, + serial=volume.serial, + type=volume.type, + usages=[], + ) + + +@strawberry.type +class Services: + @strawberry.field + def all_services(self, info) -> typing.List[Service]: + services = get_all_services() + return [service_to_graphql_service(service) for service in services] diff --git a/selfprivacy_api/graphql/queries/storage.py b/selfprivacy_api/graphql/queries/storage.py index 6315b26..fcff066 100644 --- a/selfprivacy_api/graphql/queries/storage.py +++ b/selfprivacy_api/graphql/queries/storage.py @@ -2,23 +2,10 @@ # pylint: disable=too-few-public-methods import typing import strawberry +from selfprivacy_api.graphql.common_types.storage_usage import StorageVolume from selfprivacy_api.utils.block_devices import BlockDevices -@strawberry.type -class StorageVolume: - """Stats and basic info about a volume or a system disk.""" - - total_space: str - free_space: str - used_space: str - root: bool - name: str - model: typing.Optional[str] - serial: typing.Optional[str] - type: str - - @strawberry.type class Storage: """GraphQL queries to get storage information.""" @@ -38,6 +25,7 @@ class Storage: model=volume.model, serial=volume.serial, type=volume.type, + usages=[], ) for volume in BlockDevices().get_block_devices() ] diff --git a/selfprivacy_api/graphql/queries/system.py b/selfprivacy_api/graphql/queries/system.py index decb3f0..2997292 100644 --- a/selfprivacy_api/graphql/queries/system.py +++ b/selfprivacy_api/graphql/queries/system.py @@ -2,6 +2,7 @@ # pylint: disable=too-few-public-methods import typing import strawberry +from selfprivacy_api.graphql.common_types.dns import DnsRecord from selfprivacy_api.graphql.queries.common import Alert, Severity from selfprivacy_api.graphql.queries.providers import DnsProvider, ServerProvider @@ -10,17 +11,6 @@ import selfprivacy_api.actions.system as system_actions import selfprivacy_api.actions.ssh as ssh_actions -@strawberry.type -class DnsRecord: - """DNS record""" - - recordType: str - name: str - content: str - ttl: int - priority: typing.Optional[int] - - @strawberry.type class SystemDomainInfo: """Information about the system domain""" diff --git a/selfprivacy_api/graphql/schema.py b/selfprivacy_api/graphql/schema.py index 03d8021..aa7a6a4 100644 --- a/selfprivacy_api/graphql/schema.py +++ b/selfprivacy_api/graphql/schema.py @@ -10,12 +10,13 @@ from selfprivacy_api.graphql.mutations.storage_mutation import StorageMutations from selfprivacy_api.graphql.mutations.system_mutations import SystemMutations from selfprivacy_api.graphql.queries.api_queries import Api +from selfprivacy_api.graphql.queries.jobs import Job +from selfprivacy_api.graphql.queries.services import Services from selfprivacy_api.graphql.queries.storage import Storage from selfprivacy_api.graphql.queries.system import System from selfprivacy_api.graphql.mutations.users_mutations import UserMutations from selfprivacy_api.graphql.queries.users import Users -from selfprivacy_api.graphql.subscriptions.jobs import JobSubscription from selfprivacy_api.jobs.test import test_job @@ -43,6 +44,16 @@ class Query: """Storage queries""" return Storage() + @strawberry.field(permission_classes=[IsAuthenticated]) + def jobs(self) -> Job: + """Jobs queries""" + return Job() + + @strawberry.field(permission_classes=[IsAuthenticated]) + def services(self) -> Services: + """Services queries""" + return Services() + @strawberry.type class Mutation( @@ -67,4 +78,4 @@ class Mutation( pass -schema = strawberry.Schema(query=Query, mutation=Mutation, subscription=JobSubscription) +schema = strawberry.Schema(query=Query, mutation=Mutation) diff --git a/selfprivacy_api/graphql/subscriptions/__init__.py b/selfprivacy_api/graphql/subscriptions/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/selfprivacy_api/graphql/subscriptions/jobs.py b/selfprivacy_api/graphql/subscriptions/jobs.py deleted file mode 100644 index 2dfca07..0000000 --- a/selfprivacy_api/graphql/subscriptions/jobs.py +++ /dev/null @@ -1,76 +0,0 @@ -import asyncio -import datetime -from typing import AsyncGenerator -import typing - -import strawberry -from selfprivacy_api.graphql import IsAuthenticated - -from selfprivacy_api.jobs import Job, Jobs - - -@strawberry.type -class ApiJob: - name: str - description: str - status: str - status_text: typing.Optional[str] - progress: typing.Optional[int] - created_at: datetime.datetime - updated_at: datetime.datetime - finished_at: typing.Optional[datetime.datetime] - error: typing.Optional[str] - result: typing.Optional[str] - - -@strawberry.type -class JobSubscription: - @strawberry.subscription - async def count(self, target: int = 100) -> AsyncGenerator[int, None]: - for i in range(target): - yield i - await asyncio.sleep(0.5) - - @strawberry.subscription() - async def job_subscription(self) -> AsyncGenerator[typing.List[ApiJob], None]: - is_updated = True - - def callback(jobs: typing.List[Job]): - nonlocal is_updated - is_updated = True - - print("Subscribing to job updates...") - Jobs.get_instance().add_observer(callback) - yield [ - ApiJob( - name=job.name, - description=job.description, - status=job.status.name, - status_text=job.status_text, - progress=job.progress, - created_at=job.created_at, - updated_at=job.updated_at, - finished_at=job.finished_at, - error=job.error, - result=job.result, - ) - for job in Jobs.get_instance().get_jobs() - ] - while True: - if is_updated: - is_updated = False - yield [ - ApiJob( - name=job.name, - description=job.description, - status=job.status.name, - status_text=job.status_text, - progress=job.progress, - created_at=job.created_at, - updated_at=job.updated_at, - finished_at=job.finished_at, - error=job.error, - result=job.result, - ) - for job in Jobs.get_instance().get_jobs() - ] diff --git a/selfprivacy_api/jobs/test.py b/selfprivacy_api/jobs/test.py index d07ea6c..f3c03af 100644 --- a/selfprivacy_api/jobs/test.py +++ b/selfprivacy_api/jobs/test.py @@ -1,9 +1,7 @@ import time -from selfprivacy_api.utils.huey import Huey +from selfprivacy_api.utils.huey import huey from selfprivacy_api.jobs import JobStatus, Jobs -huey = Huey() - @huey.task() def test_job(): diff --git a/selfprivacy_api/restic_controller/tasks.py b/selfprivacy_api/restic_controller/tasks.py index 32eb87d..f583d8b 100644 --- a/selfprivacy_api/restic_controller/tasks.py +++ b/selfprivacy_api/restic_controller/tasks.py @@ -1,10 +1,8 @@ """Tasks for the restic controller.""" from huey import crontab -from selfprivacy_api.utils.huey import Huey +from selfprivacy_api.utils.huey import huey from . import ResticController, ResticStates -huey = Huey() - @huey.task() def init_restic(): diff --git a/selfprivacy_api/services/__init__.py b/selfprivacy_api/services/__init__.py index e69de29..fbc2aed 100644 --- a/selfprivacy_api/services/__init__.py +++ b/selfprivacy_api/services/__init__.py @@ -0,0 +1,39 @@ +"""Services module.""" + +import typing +from selfprivacy_api.services.bitwarden import Bitwarden +from selfprivacy_api.services.gitea import Gitea +from selfprivacy_api.services.mailserver import MailServer +from selfprivacy_api.services.nextcloud import Nextcloud +from selfprivacy_api.services.pleroma import Pleroma +from selfprivacy_api.services.ocserv import Ocserv +from selfprivacy_api.services.service import Service + + +services = [ + Bitwarden(), + Gitea(), + MailServer(), + Nextcloud(), + Pleroma(), + Ocserv(), +] + + +def get_all_services() -> typing.List[Service]: + return services + + +def get_service_by_id(service_id: str) -> typing.Optional[Service]: + for service in services: + if service.get_id() == service_id: + return service + return None + + +def get_enabled_services() -> typing.List[Service]: + return [service for service in services if service.is_enabled()] + + +def get_disabled_services() -> typing.List[Service]: + return [service for service in services if not service.is_enabled()] diff --git a/selfprivacy_api/services/bitwarden/__init__.py b/selfprivacy_api/services/bitwarden/__init__.py index 599e0e4..9fe9995 100644 --- a/selfprivacy_api/services/bitwarden/__init__.py +++ b/selfprivacy_api/services/bitwarden/__init__.py @@ -8,13 +8,11 @@ from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move from selfprivacy_api.services.generic_size_counter import get_storage_usage from selfprivacy_api.services.generic_status_getter import get_service_status from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus -from selfprivacy_api.utils import ReadUserData, WriteUserData +from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain from selfprivacy_api.utils.block_devices import BlockDevice -from selfprivacy_api.utils.huey import Huey +from selfprivacy_api.utils.huey import huey from selfprivacy_api.utils.network import get_ip4 -huey = Huey() - class Bitwarden(Service): """Class representing Bitwarden service.""" @@ -40,6 +38,12 @@ class Bitwarden(Service): with open("selfprivacy_api/services/bitwarden/bitwarden.svg", "rb") as f: return base64.b64encode(f.read()).decode("utf-8") + @staticmethod + def get_url() -> typing.Optional[str]: + """Return service url.""" + domain = get_domain() + return f"https://password.{domain}" + @staticmethod def is_movable() -> bool: return True diff --git a/selfprivacy_api/services/generic_service_mover.py b/selfprivacy_api/services/generic_service_mover.py index 1a9c0ac..6831ed7 100644 --- a/selfprivacy_api/services/generic_service_mover.py +++ b/selfprivacy_api/services/generic_service_mover.py @@ -7,12 +7,10 @@ import shutil from pydantic import BaseModel from selfprivacy_api.jobs import Job, JobStatus, Jobs -from selfprivacy_api.utils.huey import Huey +from selfprivacy_api.utils.huey import huey from selfprivacy_api.utils.block_devices import BlockDevice from selfprivacy_api.utils import ReadUserData, WriteUserData -from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus - -huey = Huey() +from selfprivacy_api.services.service import Service, ServiceStatus class FolderMoveNames(BaseModel): diff --git a/selfprivacy_api/services/gitea/__init__.py b/selfprivacy_api/services/gitea/__init__.py index 9893bc8..ee0a058 100644 --- a/selfprivacy_api/services/gitea/__init__.py +++ b/selfprivacy_api/services/gitea/__init__.py @@ -3,18 +3,16 @@ import base64 import subprocess import typing -from selfprivacy_api.jobs import Job, JobStatus, Jobs +from selfprivacy_api.jobs import Jobs from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service from selfprivacy_api.services.generic_size_counter import get_storage_usage from selfprivacy_api.services.generic_status_getter import get_service_status from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus -from selfprivacy_api.utils import ReadUserData, WriteUserData +from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain from selfprivacy_api.utils.block_devices import BlockDevice -from selfprivacy_api.utils.huey import Huey +from selfprivacy_api.utils.huey import huey from selfprivacy_api.utils.network import get_ip4 -huey = Huey() - class Gitea(Service): """Class representing Gitea service""" @@ -40,6 +38,12 @@ class Gitea(Service): with open("selfprivacy_api/services/gitea/gitea.svg", "rb") as f: return base64.b64encode(f.read()).decode("utf-8") + @staticmethod + def get_url() -> typing.Optional[str]: + """Return service url.""" + domain = get_domain() + return f"https://git.{domain}" + @staticmethod def is_movable() -> bool: return True diff --git a/selfprivacy_api/services/mailserver/__init__.py b/selfprivacy_api/services/mailserver/__init__.py index 3eaf8c6..31e250b 100644 --- a/selfprivacy_api/services/mailserver/__init__.py +++ b/selfprivacy_api/services/mailserver/__init__.py @@ -12,11 +12,9 @@ from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceS from selfprivacy_api.utils import ReadUserData, WriteUserData, get_dkim_key, get_domain from selfprivacy_api.utils import huey from selfprivacy_api.utils.block_devices import BlockDevice -from selfprivacy_api.utils.huey import Huey +from selfprivacy_api.utils.huey import huey from selfprivacy_api.utils.network import get_ip4 -huey = Huey() - class MailServer(Service): """Class representing mail service""" @@ -38,6 +36,11 @@ class MailServer(Service): with open("selfprivacy_api/services/mailserver/mailserver.svg", "rb") as f: return base64.b64encode(f.read()).decode("utf-8") + @staticmethod + def get_url() -> typing.Optional[str]: + """Return service url.""" + return None + @staticmethod def is_movable() -> bool: return True diff --git a/selfprivacy_api/services/nextcloud/__init__.py b/selfprivacy_api/services/nextcloud/__init__.py index 40eaf7f..99efade 100644 --- a/selfprivacy_api/services/nextcloud/__init__.py +++ b/selfprivacy_api/services/nextcloud/__init__.py @@ -7,7 +7,7 @@ from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move from selfprivacy_api.services.generic_size_counter import get_storage_usage from selfprivacy_api.services.generic_status_getter import get_service_status from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus -from selfprivacy_api.utils import ReadUserData, WriteUserData +from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain from selfprivacy_api.utils.block_devices import BlockDevice from selfprivacy_api.utils.network import get_ip4 @@ -16,7 +16,7 @@ class Nextcloud(Service): """Class representing Nextcloud service.""" @staticmethod - def get_id(self) -> str: + def get_id() -> str: """Return service id.""" return "nextcloud" @@ -36,6 +36,12 @@ class Nextcloud(Service): with open("selfprivacy_api/services/nextcloud/nextcloud.svg", "rb") as f: return base64.b64encode(f.read()).decode("utf-8") + @staticmethod + def get_url() -> typing.Optional[str]: + """Return service url.""" + domain = get_domain() + return f"https://cloud.{domain}" + @staticmethod def is_movable() -> bool: return True diff --git a/selfprivacy_api/services/ocserv/__init__.py b/selfprivacy_api/services/ocserv/__init__.py index 9f1a9f6..e7bf74b 100644 --- a/selfprivacy_api/services/ocserv/__init__.py +++ b/selfprivacy_api/services/ocserv/__init__.py @@ -32,6 +32,11 @@ class Ocserv(Service): with open("selfprivacy_api/services/ocserv/ocserv.svg", "rb") as f: return base64.b64encode(f.read()).decode("utf-8") + @staticmethod + def get_url() -> typing.Optional[str]: + """Return service url.""" + return None + @staticmethod def is_movable() -> bool: return False @@ -79,6 +84,10 @@ class Ocserv(Service): def get_configuration(): return {} + @staticmethod + def set_configuration(config_items): + return super().set_configuration(config_items) + @staticmethod def get_logs(): return "" diff --git a/selfprivacy_api/services/pleroma/__init__.py b/selfprivacy_api/services/pleroma/__init__.py index c8b1bd5..97f76f9 100644 --- a/selfprivacy_api/services/pleroma/__init__.py +++ b/selfprivacy_api/services/pleroma/__init__.py @@ -7,7 +7,7 @@ from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move from selfprivacy_api.services.generic_size_counter import get_storage_usage from selfprivacy_api.services.generic_status_getter import get_service_status from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus -from selfprivacy_api.utils import ReadUserData, WriteUserData +from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain from selfprivacy_api.utils.block_devices import BlockDevice from selfprivacy_api.utils.network import get_ip4 @@ -32,6 +32,12 @@ class Pleroma(Service): with open("selfprivacy_api/services/pleroma/pleroma.svg", "rb") as f: return base64.b64encode(f.read()).decode("utf-8") + @staticmethod + def get_url() -> typing.Optional[str]: + """Return service url.""" + domain = get_domain() + return f"https://social.{domain}" + @staticmethod def is_movable() -> bool: return True diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index 5627e17..bffa1a4 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -52,6 +52,11 @@ class Service(ABC): def get_svg_icon() -> str: pass + @staticmethod + @abstractmethod + def get_url() -> typing.Optional[str]: + pass + @staticmethod @abstractmethod def is_movable() -> bool: diff --git a/selfprivacy_api/utils/huey.py b/selfprivacy_api/utils/huey.py index f03435e..7b39d5a 100644 --- a/selfprivacy_api/utils/huey.py +++ b/selfprivacy_api/utils/huey.py @@ -4,13 +4,5 @@ from huey import SqliteHuey HUEY_DATABASE = "/etc/nixos/userdata/tasks.db" # Singleton instance containing the huey database. -class Huey: - """Huey singleton.""" - __instance = None - - def __new__(cls): - """Create a new instance of the huey singleton.""" - if Huey.__instance is None: - Huey.__instance = SqliteHuey(HUEY_DATABASE) - return Huey.__instance +huey = SqliteHuey(HUEY_DATABASE) -- 2.42.0 From 1e901d1fcb64e4d28a60eda3990166275dfb465e Mon Sep 17 00:00:00 2001 From: inexcode Date: Sat, 13 Aug 2022 01:34:48 +0400 Subject: [PATCH 13/50] add get working directory endpoint --- selfprivacy_api/graphql/queries/system.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/selfprivacy_api/graphql/queries/system.py b/selfprivacy_api/graphql/queries/system.py index 2997292..1b6332e 100644 --- a/selfprivacy_api/graphql/queries/system.py +++ b/selfprivacy_api/graphql/queries/system.py @@ -1,5 +1,6 @@ """Common system information and settings""" # pylint: disable=too-few-public-methods +import os import typing import strawberry from selfprivacy_api.graphql.common_types.dns import DnsRecord @@ -135,3 +136,7 @@ class System: info: SystemInfo = SystemInfo() provider: SystemProviderInfo = strawberry.field(resolver=get_system_provider_info) busy: bool = False + @strawberry.field + def working_directory(self) -> str: + """Get working directory""" + return os.getcwd() -- 2.42.0 From 1ed2b73ec814a912951009659d6da5700ce6deb6 Mon Sep 17 00:00:00 2001 From: inexcode Date: Sat, 13 Aug 2022 01:42:11 +0400 Subject: [PATCH 14/50] Make icons python modules --- selfprivacy_api/services/bitwarden/__init__.py | 4 ++-- selfprivacy_api/services/bitwarden/icon.py | 5 +++++ selfprivacy_api/services/gitea/__init__.py | 4 ++-- selfprivacy_api/services/gitea/icon.py | 5 +++++ selfprivacy_api/services/mailserver/__init__.py | 4 ++-- selfprivacy_api/services/mailserver/icon.py | 5 +++++ selfprivacy_api/services/nextcloud/__init__.py | 4 ++-- selfprivacy_api/services/nextcloud/icon.py | 12 ++++++++++++ selfprivacy_api/services/ocserv/__init__.py | 4 ++-- selfprivacy_api/services/ocserv/icon.py | 5 +++++ selfprivacy_api/services/pleroma/__init__.py | 4 ++-- selfprivacy_api/services/pleroma/icon.py | 12 ++++++++++++ 12 files changed, 56 insertions(+), 12 deletions(-) create mode 100644 selfprivacy_api/services/bitwarden/icon.py create mode 100644 selfprivacy_api/services/gitea/icon.py create mode 100644 selfprivacy_api/services/mailserver/icon.py create mode 100644 selfprivacy_api/services/nextcloud/icon.py create mode 100644 selfprivacy_api/services/ocserv/icon.py create mode 100644 selfprivacy_api/services/pleroma/icon.py diff --git a/selfprivacy_api/services/bitwarden/__init__.py b/selfprivacy_api/services/bitwarden/__init__.py index 9fe9995..a752400 100644 --- a/selfprivacy_api/services/bitwarden/__init__.py +++ b/selfprivacy_api/services/bitwarden/__init__.py @@ -12,6 +12,7 @@ from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain from selfprivacy_api.utils.block_devices import BlockDevice from selfprivacy_api.utils.huey import huey from selfprivacy_api.utils.network import get_ip4 +from selfprivacy_api.services.bitwarden.icon import BITWARDEN_ICON class Bitwarden(Service): @@ -35,8 +36,7 @@ class Bitwarden(Service): @staticmethod def get_svg_icon() -> str: """Read SVG icon from file and return it as base64 encoded string.""" - with open("selfprivacy_api/services/bitwarden/bitwarden.svg", "rb") as f: - return base64.b64encode(f.read()).decode("utf-8") + return base64.b64encode(BITWARDEN_ICON.encode("utf-8")).decode("utf-8") @staticmethod def get_url() -> typing.Optional[str]: diff --git a/selfprivacy_api/services/bitwarden/icon.py b/selfprivacy_api/services/bitwarden/icon.py new file mode 100644 index 0000000..f9280e0 --- /dev/null +++ b/selfprivacy_api/services/bitwarden/icon.py @@ -0,0 +1,5 @@ +BITWARDEN_ICON = """ + + + +""" diff --git a/selfprivacy_api/services/gitea/__init__.py b/selfprivacy_api/services/gitea/__init__.py index ee0a058..7916b60 100644 --- a/selfprivacy_api/services/gitea/__init__.py +++ b/selfprivacy_api/services/gitea/__init__.py @@ -12,6 +12,7 @@ from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain from selfprivacy_api.utils.block_devices import BlockDevice from selfprivacy_api.utils.huey import huey from selfprivacy_api.utils.network import get_ip4 +from selfprivacy_api.services.gitea.icon import GITEA_ICON class Gitea(Service): @@ -35,8 +36,7 @@ class Gitea(Service): @staticmethod def get_svg_icon() -> str: """Read SVG icon from file and return it as base64 encoded string.""" - with open("selfprivacy_api/services/gitea/gitea.svg", "rb") as f: - return base64.b64encode(f.read()).decode("utf-8") + return base64.b64encode(GITEA_ICON.encode("utf-8")).decode("utf-8") @staticmethod def get_url() -> typing.Optional[str]: diff --git a/selfprivacy_api/services/gitea/icon.py b/selfprivacy_api/services/gitea/icon.py new file mode 100644 index 0000000..569f96a --- /dev/null +++ b/selfprivacy_api/services/gitea/icon.py @@ -0,0 +1,5 @@ +GITEA_ICON = """ + + + +""" diff --git a/selfprivacy_api/services/mailserver/__init__.py b/selfprivacy_api/services/mailserver/__init__.py index 31e250b..659d4c3 100644 --- a/selfprivacy_api/services/mailserver/__init__.py +++ b/selfprivacy_api/services/mailserver/__init__.py @@ -14,6 +14,7 @@ from selfprivacy_api.utils import huey from selfprivacy_api.utils.block_devices import BlockDevice from selfprivacy_api.utils.huey import huey from selfprivacy_api.utils.network import get_ip4 +from selfprivacy_api.services.mailserver.icon import MAILSERVER_ICON class MailServer(Service): @@ -33,8 +34,7 @@ class MailServer(Service): @staticmethod def get_svg_icon() -> str: - with open("selfprivacy_api/services/mailserver/mailserver.svg", "rb") as f: - return base64.b64encode(f.read()).decode("utf-8") + return base64.b64encode(MAILSERVER_ICON.encode("utf-8")).decode("utf-8") @staticmethod def get_url() -> typing.Optional[str]: diff --git a/selfprivacy_api/services/mailserver/icon.py b/selfprivacy_api/services/mailserver/icon.py new file mode 100644 index 0000000..cb5b639 --- /dev/null +++ b/selfprivacy_api/services/mailserver/icon.py @@ -0,0 +1,5 @@ +MAILSERVER_ICON = """ + + + +""" diff --git a/selfprivacy_api/services/nextcloud/__init__.py b/selfprivacy_api/services/nextcloud/__init__.py index 99efade..fcdaf9f 100644 --- a/selfprivacy_api/services/nextcloud/__init__.py +++ b/selfprivacy_api/services/nextcloud/__init__.py @@ -10,6 +10,7 @@ from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceS from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain from selfprivacy_api.utils.block_devices import BlockDevice from selfprivacy_api.utils.network import get_ip4 +from selfprivacy_api.services.nextcloud.icon import NEXTCLOUD_ICON class Nextcloud(Service): @@ -33,8 +34,7 @@ class Nextcloud(Service): @staticmethod def get_svg_icon() -> str: """Read SVG icon from file and return it as base64 encoded string.""" - with open("selfprivacy_api/services/nextcloud/nextcloud.svg", "rb") as f: - return base64.b64encode(f.read()).decode("utf-8") + return base64.b64encode(NEXTCLOUD_ICON.encode("utf-8")).decode("utf-8") @staticmethod def get_url() -> typing.Optional[str]: diff --git a/selfprivacy_api/services/nextcloud/icon.py b/selfprivacy_api/services/nextcloud/icon.py new file mode 100644 index 0000000..d178640 --- /dev/null +++ b/selfprivacy_api/services/nextcloud/icon.py @@ -0,0 +1,12 @@ +NEXTCLOUD_ICON = """ + + + + + + + + + + +""" diff --git a/selfprivacy_api/services/ocserv/__init__.py b/selfprivacy_api/services/ocserv/__init__.py index e7bf74b..73d6474 100644 --- a/selfprivacy_api/services/ocserv/__init__.py +++ b/selfprivacy_api/services/ocserv/__init__.py @@ -10,6 +10,7 @@ from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceS from selfprivacy_api.utils import ReadUserData, WriteUserData from selfprivacy_api.utils.block_devices import BlockDevice from selfprivacy_api.utils.network import get_ip4 +from selfprivacy_api.services.ocserv.icon import OCSERV_ICON class Ocserv(Service): @@ -29,8 +30,7 @@ class Ocserv(Service): @staticmethod def get_svg_icon() -> str: - with open("selfprivacy_api/services/ocserv/ocserv.svg", "rb") as f: - return base64.b64encode(f.read()).decode("utf-8") + return base64.b64encode(OCSERV_ICON.encode("utf-8")).decode("utf-8") @staticmethod def get_url() -> typing.Optional[str]: diff --git a/selfprivacy_api/services/ocserv/icon.py b/selfprivacy_api/services/ocserv/icon.py new file mode 100644 index 0000000..6585c5e --- /dev/null +++ b/selfprivacy_api/services/ocserv/icon.py @@ -0,0 +1,5 @@ +OCSERV_ICON = """ + + + +""" diff --git a/selfprivacy_api/services/pleroma/__init__.py b/selfprivacy_api/services/pleroma/__init__.py index 97f76f9..9dd333e 100644 --- a/selfprivacy_api/services/pleroma/__init__.py +++ b/selfprivacy_api/services/pleroma/__init__.py @@ -10,6 +10,7 @@ from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceS from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain from selfprivacy_api.utils.block_devices import BlockDevice from selfprivacy_api.utils.network import get_ip4 +from selfprivacy_api.services.pleroma.icon import PLEROMA_ICON class Pleroma(Service): @@ -29,8 +30,7 @@ class Pleroma(Service): @staticmethod def get_svg_icon() -> str: - with open("selfprivacy_api/services/pleroma/pleroma.svg", "rb") as f: - return base64.b64encode(f.read()).decode("utf-8") + return base64.b64encode(PLEROMA_ICON.encode("utf-8")).decode("utf-8") @staticmethod def get_url() -> typing.Optional[str]: diff --git a/selfprivacy_api/services/pleroma/icon.py b/selfprivacy_api/services/pleroma/icon.py new file mode 100644 index 0000000..c0c4d2b --- /dev/null +++ b/selfprivacy_api/services/pleroma/icon.py @@ -0,0 +1,12 @@ +PLEROMA_ICON = """ + + + + + + + + + + +""" -- 2.42.0 From 5e6c51a8bcd4b350ad588d2483a614cadd90aa4c Mon Sep 17 00:00:00 2001 From: inexcode Date: Sat, 13 Aug 2022 01:42:25 +0400 Subject: [PATCH 15/50] add shell module --- api.nix | 64 +++++++++++++++++++++++++++++++++++++++++++++++++++++ default.nix | 2 ++ 2 files changed, 66 insertions(+) create mode 100644 api.nix create mode 100644 default.nix diff --git a/api.nix b/api.nix new file mode 100644 index 0000000..83bc695 --- /dev/null +++ b/api.nix @@ -0,0 +1,64 @@ +{ lib, python39Packages }: +with python39Packages; +buildPythonApplication { + pname = "selfprivacy-api"; + version = "2.0.0"; + + propagatedBuildInputs = [ + setuptools + portalocker + pytz + pytest + pytest-mock + pytest-datadir + huey + gevent + mnemonic + pydantic + typing-extensions + psutil + fastapi + uvicorn + (buildPythonPackage rec { + pname = "strawberry-graphql"; + version = "0.123.0"; + format = "pyproject"; + patches = [ + ./strawberry-graphql.patch + ]; + propagatedBuildInputs = [ + typing-extensions + python-multipart + python-dateutil + # flask + pydantic + pygments + poetry + # flask-cors + (buildPythonPackage rec { + pname = "graphql-core"; + version = "3.2.0"; + format = "setuptools"; + src = fetchPypi { + inherit pname version; + sha256 = "sha256-huKgvgCL/eGe94OI3opyWh2UKpGQykMcJKYIN5c4A84="; + }; + checkInputs = [ + pytest-asyncio + pytest-benchmark + pytestCheckHook + ]; + pythonImportsCheck = [ + "graphql" + ]; + }) + ]; + src = fetchPypi { + inherit pname version; + sha256 = "KsmZ5Xv8tUg6yBxieAEtvoKoRG60VS+iVGV0X6oCExo="; + }; + }) + ]; + + src = ./.; +} diff --git a/default.nix b/default.nix new file mode 100644 index 0000000..740c7ce --- /dev/null +++ b/default.nix @@ -0,0 +1,2 @@ +{ pkgs ? import {} }: +pkgs.callPackage ./api.nix {} -- 2.42.0 From 19b5c06fc6c0a23987377f17b1c94fc816459e40 Mon Sep 17 00:00:00 2001 From: inexcode Date: Sat, 13 Aug 2022 01:52:36 +0400 Subject: [PATCH 16/50] Fix mountpoints bug --- selfprivacy_api/utils/block_devices.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/selfprivacy_api/utils/block_devices.py b/selfprivacy_api/utils/block_devices.py index b33c7aa..f3b0911 100644 --- a/selfprivacy_api/utils/block_devices.py +++ b/selfprivacy_api/utils/block_devices.py @@ -170,7 +170,7 @@ class BlockDevices: "-J", "-b", "-o", - "NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINT,LABEL,UUID,SIZE,MODEL,SERIAL,TYPE", + "NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINTS,LABEL,UUID,SIZE,MODEL,SERIAL,TYPE", ] ) lsblk_output = lsblk_output.decode("utf-8") -- 2.42.0 From 9abc11f1879999a67d941b790b4c85221947ae6d Mon Sep 17 00:00:00 2001 From: inexcode Date: Sat, 13 Aug 2022 02:12:28 +0400 Subject: [PATCH 17/50] Add services to volumes --- .../graphql/common_types/service.py | 108 ++++++++++++++++++ .../graphql/common_types/storage_usage.py | 9 ++ selfprivacy_api/graphql/queries/services.py | 107 +---------------- selfprivacy_api/graphql/queries/storage.py | 20 +++- selfprivacy_api/graphql/queries/system.py | 1 + selfprivacy_api/services/__init__.py | 6 +- 6 files changed, 145 insertions(+), 106 deletions(-) create mode 100644 selfprivacy_api/graphql/common_types/service.py diff --git a/selfprivacy_api/graphql/common_types/service.py b/selfprivacy_api/graphql/common_types/service.py new file mode 100644 index 0000000..fc13974 --- /dev/null +++ b/selfprivacy_api/graphql/common_types/service.py @@ -0,0 +1,108 @@ +from enum import Enum +import typing +import strawberry +from selfprivacy_api.graphql.common_types.dns import DnsRecord + +from selfprivacy_api.graphql.common_types.storage_usage import ( + ServiceStorageUsage, + StorageVolume, +) +from selfprivacy_api.graphql.queries.services import get_volume_by_id +from selfprivacy_api.services import get_service_by_id, get_services_by_location +from selfprivacy_api.services import Service as ServiceInterface +from selfprivacy_api.utils.block_devices import BlockDevices + + +@strawberry.enum +class ServiceStatusEnum(Enum): + RUNNING = "RUNNING" + DEGRADED = "DEGRADED" + ERROR = "ERROR" + STOPPED = "STOPPED" + OFF = "OFF" + + +def get_storage_usage(root: "Service") -> ServiceStorageUsage: + """Get storage usage for a service""" + service = get_service_by_id(root.id) + if service is None: + return ServiceStorageUsage( + service=service, + title="Not found", + used_space="0", + volume=get_volume_by_id("sda1"), + ) + return ServiceStorageUsage( + service=service_to_graphql_service(service), + title=service.get_display_name(), + used_space=str(service.get_storage_usage()), + volume=get_volume_by_id(service.get_location()), + ) + + +@strawberry.type +class Service: + storage_usage: ServiceStorageUsage = strawberry.field(resolver=get_storage_usage) + id: str + display_name: str + description: str + svg_icon: str + is_movable: bool + is_required: bool + is_enabled: bool + status: ServiceStatusEnum + url: typing.Optional[str] + dns_records: typing.Optional[typing.List[DnsRecord]] + + +def service_to_graphql_service(service: ServiceInterface) -> Service: + """Convert service to graphql service""" + return Service( + id=service.get_id(), + display_name=service.get_display_name(), + description=service.get_description(), + svg_icon=service.get_svg_icon(), + is_movable=service.is_movable(), + is_required=service.is_required(), + is_enabled=service.is_enabled(), + status=ServiceStatusEnum(service.get_status().value), + url=service.get_url(), + dns_records=[ + DnsRecord( + record_type=record.type, + name=record.name, + content=record.content, + ttl=record.ttl, + priority=record.priority, + ) + for record in service.get_dns_records() + ], + ) + + +def get_volume_by_id(volume_id: str) -> typing.Optional[StorageVolume]: + """Get volume by id""" + volume = BlockDevices().get_block_device(volume_id) + if volume is None: + return None + return StorageVolume( + total_space=str(volume.fssize) + if volume.fssize is not None + else str(volume.size), + free_space=str(volume.fsavail), + used_space=str(volume.fsused), + root=volume.name == "sda1", + name=volume.name, + model=volume.model, + serial=volume.serial, + type=volume.type, + usages=[ + ServiceStorageUsage( + service=service_to_graphql_service(service), + title=service.get_display_name(), + used_space=str(service.get_storage_usage()), + volume=get_volume_by_id(service.get_location()), + ) + for service in get_services_by_location(volume.name) + ], + ) diff --git a/selfprivacy_api/graphql/common_types/storage_usage.py b/selfprivacy_api/graphql/common_types/storage_usage.py index f45966e..cdff53f 100644 --- a/selfprivacy_api/graphql/common_types/storage_usage.py +++ b/selfprivacy_api/graphql/common_types/storage_usage.py @@ -1,6 +1,8 @@ import typing import strawberry +from selfprivacy_api.graphql.common_types.service import Service + @strawberry.type class StorageVolume: @@ -22,3 +24,10 @@ class StorageUsageInterface: used_space: str volume: typing.Optional[StorageVolume] title: str + + +@strawberry.type +class ServiceStorageUsage(StorageUsageInterface): + """Storage usage for a service""" + + service: typing.Optional["Service"] diff --git a/selfprivacy_api/graphql/queries/services.py b/selfprivacy_api/graphql/queries/services.py index f5c6fff..7d622b7 100644 --- a/selfprivacy_api/graphql/queries/services.py +++ b/selfprivacy_api/graphql/queries/services.py @@ -1,112 +1,13 @@ """Services status""" # pylint: disable=too-few-public-methods -from enum import Enum import typing import strawberry -import datetime -from selfprivacy_api.graphql.common_types.dns import DnsRecord -from selfprivacy_api.graphql.common_types.storage_usage import ( - StorageUsageInterface, - StorageVolume, +from selfprivacy_api.graphql.common_types.service import ( + Service, + service_to_graphql_service, ) -from selfprivacy_api.services import get_all_services, get_service_by_id -from selfprivacy_api.services import Service as ServiceInterface -from selfprivacy_api.utils.block_devices import BlockDevices - - -@strawberry.enum -class ServiceStatusEnum(Enum): - RUNNING = "RUNNING" - DEGRADED = "DEGRADED" - ERROR = "ERROR" - STOPPED = "STOPPED" - OFF = "OFF" - - -@strawberry.type -class ServiceStorageUsage(StorageUsageInterface): - """Storage usage for a service""" - - service: typing.Optional["Service"] - - -def get_storage_usage(root: "Service") -> ServiceStorageUsage: - """Get storage usage for a service""" - service = get_service_by_id(root.id) - if service is None: - return ServiceStorageUsage( - service=service, - title="Not found", - used_space="0", - volume=get_volume_by_id("sda1"), - ) - return ServiceStorageUsage( - service=service_to_graphql_service(service), - title=service.get_display_name(), - used_space=str(service.get_storage_usage()), - volume=get_volume_by_id(service.get_location()), - ) - - -@strawberry.type -class Service: - storage_usage: ServiceStorageUsage = strawberry.field(resolver=get_storage_usage) - id: str - display_name: str - description: str - svg_icon: str - is_movable: bool - is_required: bool - is_enabled: bool - status: ServiceStatusEnum - url: typing.Optional[str] - dns_records: typing.Optional[typing.List[DnsRecord]] - - -def service_to_graphql_service(service: ServiceInterface) -> Service: - """Convert service to graphql service""" - return Service( - id=service.get_id(), - display_name=service.get_display_name(), - description=service.get_description(), - svg_icon=service.get_svg_icon(), - is_movable=service.is_movable(), - is_required=service.is_required(), - is_enabled=service.is_enabled(), - status=ServiceStatusEnum(service.get_status().value), - url=service.get_url(), - dns_records=[ - DnsRecord( - record_type=record.type, - name=record.name, - content=record.content, - ttl=record.ttl, - priority=record.priority, - ) - for record in service.get_dns_records() - ], - ) - - -def get_volume_by_id(volume_id: str) -> typing.Optional[StorageVolume]: - """Get volume by id""" - volume = BlockDevices().get_block_device(volume_id) - if volume is None: - return None - return StorageVolume( - total_space=str(volume.fssize) - if volume.fssize is not None - else str(volume.size), - free_space=str(volume.fsavail), - used_space=str(volume.fsused), - root=volume.name == "sda1", - name=volume.name, - model=volume.model, - serial=volume.serial, - type=volume.type, - usages=[], - ) +from selfprivacy_api.services import get_all_services @strawberry.type diff --git a/selfprivacy_api/graphql/queries/storage.py b/selfprivacy_api/graphql/queries/storage.py index fcff066..12071ce 100644 --- a/selfprivacy_api/graphql/queries/storage.py +++ b/selfprivacy_api/graphql/queries/storage.py @@ -2,7 +2,15 @@ # pylint: disable=too-few-public-methods import typing import strawberry -from selfprivacy_api.graphql.common_types.storage_usage import StorageVolume +from selfprivacy_api.graphql.common_types.service import ( + service_to_graphql_service, + get_volume_by_id, +) +from selfprivacy_api.graphql.common_types.storage_usage import ( + ServiceStorageUsage, + StorageVolume, +) +from selfprivacy_api.services import get_services_by_location from selfprivacy_api.utils.block_devices import BlockDevices @@ -25,7 +33,15 @@ class Storage: model=volume.model, serial=volume.serial, type=volume.type, - usages=[], + usages=[ + ServiceStorageUsage( + service=service_to_graphql_service(service), + title=service.get_display_name(), + used_space=str(service.get_storage_usage()), + volume=get_volume_by_id(service.get_location()), + ) + for service in get_services_by_location(volume.name) + ], ) for volume in BlockDevices().get_block_devices() ] diff --git a/selfprivacy_api/graphql/queries/system.py b/selfprivacy_api/graphql/queries/system.py index 1b6332e..d17d283 100644 --- a/selfprivacy_api/graphql/queries/system.py +++ b/selfprivacy_api/graphql/queries/system.py @@ -136,6 +136,7 @@ class System: info: SystemInfo = SystemInfo() provider: SystemProviderInfo = strawberry.field(resolver=get_system_provider_info) busy: bool = False + @strawberry.field def working_directory(self) -> str: """Get working directory""" diff --git a/selfprivacy_api/services/__init__.py b/selfprivacy_api/services/__init__.py index fbc2aed..eb9b7be 100644 --- a/selfprivacy_api/services/__init__.py +++ b/selfprivacy_api/services/__init__.py @@ -10,7 +10,7 @@ from selfprivacy_api.services.ocserv import Ocserv from selfprivacy_api.services.service import Service -services = [ +services: list[Service] = [ Bitwarden(), Gitea(), MailServer(), @@ -37,3 +37,7 @@ def get_enabled_services() -> typing.List[Service]: def get_disabled_services() -> typing.List[Service]: return [service for service in services if not service.is_enabled()] + + +def get_services_by_location(location: str) -> typing.List[Service]: + return [service for service in services if service.get_location() == location] -- 2.42.0 From f750056ad802abb212fe3d21464a5aaf2699ad79 Mon Sep 17 00:00:00 2001 From: inexcode Date: Sat, 13 Aug 2022 02:18:13 +0400 Subject: [PATCH 18/50] fix circular import --- selfprivacy_api/graphql/common_types/service.py | 10 ++++++++-- selfprivacy_api/graphql/common_types/storage_usage.py | 9 --------- selfprivacy_api/graphql/queries/storage.py | 2 +- 3 files changed, 9 insertions(+), 12 deletions(-) diff --git a/selfprivacy_api/graphql/common_types/service.py b/selfprivacy_api/graphql/common_types/service.py index fc13974..c9cf50e 100644 --- a/selfprivacy_api/graphql/common_types/service.py +++ b/selfprivacy_api/graphql/common_types/service.py @@ -4,15 +4,21 @@ import strawberry from selfprivacy_api.graphql.common_types.dns import DnsRecord from selfprivacy_api.graphql.common_types.storage_usage import ( - ServiceStorageUsage, + StorageUsageInterface, StorageVolume, ) -from selfprivacy_api.graphql.queries.services import get_volume_by_id from selfprivacy_api.services import get_service_by_id, get_services_by_location from selfprivacy_api.services import Service as ServiceInterface from selfprivacy_api.utils.block_devices import BlockDevices +@strawberry.type +class ServiceStorageUsage(StorageUsageInterface): + """Storage usage for a service""" + + service: typing.Optional["Service"] + + @strawberry.enum class ServiceStatusEnum(Enum): RUNNING = "RUNNING" diff --git a/selfprivacy_api/graphql/common_types/storage_usage.py b/selfprivacy_api/graphql/common_types/storage_usage.py index cdff53f..f45966e 100644 --- a/selfprivacy_api/graphql/common_types/storage_usage.py +++ b/selfprivacy_api/graphql/common_types/storage_usage.py @@ -1,8 +1,6 @@ import typing import strawberry -from selfprivacy_api.graphql.common_types.service import Service - @strawberry.type class StorageVolume: @@ -24,10 +22,3 @@ class StorageUsageInterface: used_space: str volume: typing.Optional[StorageVolume] title: str - - -@strawberry.type -class ServiceStorageUsage(StorageUsageInterface): - """Storage usage for a service""" - - service: typing.Optional["Service"] diff --git a/selfprivacy_api/graphql/queries/storage.py b/selfprivacy_api/graphql/queries/storage.py index 12071ce..73b9a8d 100644 --- a/selfprivacy_api/graphql/queries/storage.py +++ b/selfprivacy_api/graphql/queries/storage.py @@ -3,11 +3,11 @@ import typing import strawberry from selfprivacy_api.graphql.common_types.service import ( + ServiceStorageUsage, service_to_graphql_service, get_volume_by_id, ) from selfprivacy_api.graphql.common_types.storage_usage import ( - ServiceStorageUsage, StorageVolume, ) from selfprivacy_api.services import get_services_by_location -- 2.42.0 From bb14adb8bc5bdf17b988d6887d56f498c0707f51 Mon Sep 17 00:00:00 2001 From: inexcode Date: Sat, 13 Aug 2022 02:26:47 +0400 Subject: [PATCH 19/50] Remove infinite recursion --- .../graphql/common_types/service.py | 38 +++++++++++++++++-- .../graphql/common_types/storage_usage.py | 24 ------------ selfprivacy_api/graphql/queries/storage.py | 11 +----- 3 files changed, 35 insertions(+), 38 deletions(-) delete mode 100644 selfprivacy_api/graphql/common_types/storage_usage.py diff --git a/selfprivacy_api/graphql/common_types/service.py b/selfprivacy_api/graphql/common_types/service.py index c9cf50e..fc9f205 100644 --- a/selfprivacy_api/graphql/common_types/service.py +++ b/selfprivacy_api/graphql/common_types/service.py @@ -3,14 +3,44 @@ import typing import strawberry from selfprivacy_api.graphql.common_types.dns import DnsRecord -from selfprivacy_api.graphql.common_types.storage_usage import ( - StorageUsageInterface, - StorageVolume, -) from selfprivacy_api.services import get_service_by_id, get_services_by_location from selfprivacy_api.services import Service as ServiceInterface from selfprivacy_api.utils.block_devices import BlockDevices +def get_usages(root: "StorageVolume") -> list["StorageUsageInterface"]: + """Get usages of a volume""" + return [ + ServiceStorageUsage( + service=service_to_graphql_service(service), + title=service.get_display_name(), + used_space=str(service.get_storage_usage()), + volume=get_volume_by_id(service.get_location()), + ) + for service in get_services_by_location(root.name) + ] + +@strawberry.type +class StorageVolume: + """Stats and basic info about a volume or a system disk.""" + + total_space: str + free_space: str + used_space: str + root: bool + name: str + model: typing.Optional[str] + serial: typing.Optional[str] + type: str + usages: list["StorageUsageInterface"] = strawberry.field(resolver=get_usages) + + + +@strawberry.interface +class StorageUsageInterface: + used_space: str + volume: typing.Optional[StorageVolume] + title: str + @strawberry.type class ServiceStorageUsage(StorageUsageInterface): diff --git a/selfprivacy_api/graphql/common_types/storage_usage.py b/selfprivacy_api/graphql/common_types/storage_usage.py deleted file mode 100644 index f45966e..0000000 --- a/selfprivacy_api/graphql/common_types/storage_usage.py +++ /dev/null @@ -1,24 +0,0 @@ -import typing -import strawberry - - -@strawberry.type -class StorageVolume: - """Stats and basic info about a volume or a system disk.""" - - total_space: str - free_space: str - used_space: str - root: bool - name: str - model: typing.Optional[str] - serial: typing.Optional[str] - type: str - usages: list["StorageUsageInterface"] - - -@strawberry.interface -class StorageUsageInterface: - used_space: str - volume: typing.Optional[StorageVolume] - title: str diff --git a/selfprivacy_api/graphql/queries/storage.py b/selfprivacy_api/graphql/queries/storage.py index 73b9a8d..273b6a7 100644 --- a/selfprivacy_api/graphql/queries/storage.py +++ b/selfprivacy_api/graphql/queries/storage.py @@ -7,7 +7,7 @@ from selfprivacy_api.graphql.common_types.service import ( service_to_graphql_service, get_volume_by_id, ) -from selfprivacy_api.graphql.common_types.storage_usage import ( +from selfprivacy_api.graphql.common_types.service import ( StorageVolume, ) from selfprivacy_api.services import get_services_by_location @@ -33,15 +33,6 @@ class Storage: model=volume.model, serial=volume.serial, type=volume.type, - usages=[ - ServiceStorageUsage( - service=service_to_graphql_service(service), - title=service.get_display_name(), - used_space=str(service.get_storage_usage()), - volume=get_volume_by_id(service.get_location()), - ) - for service in get_services_by_location(volume.name) - ], ) for volume in BlockDevices().get_block_devices() ] -- 2.42.0 From 79bc2668e19701d8ef29498779b11ff260a6479e Mon Sep 17 00:00:00 2001 From: inexcode Date: Sat, 13 Aug 2022 02:32:08 +0400 Subject: [PATCH 20/50] fix --- selfprivacy_api/graphql/common_types/service.py | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/selfprivacy_api/graphql/common_types/service.py b/selfprivacy_api/graphql/common_types/service.py index fc9f205..1634f89 100644 --- a/selfprivacy_api/graphql/common_types/service.py +++ b/selfprivacy_api/graphql/common_types/service.py @@ -7,6 +7,7 @@ from selfprivacy_api.services import get_service_by_id, get_services_by_location from selfprivacy_api.services import Service as ServiceInterface from selfprivacy_api.utils.block_devices import BlockDevices + def get_usages(root: "StorageVolume") -> list["StorageUsageInterface"]: """Get usages of a volume""" return [ @@ -19,6 +20,7 @@ def get_usages(root: "StorageVolume") -> list["StorageUsageInterface"]: for service in get_services_by_location(root.name) ] + @strawberry.type class StorageVolume: """Stats and basic info about a volume or a system disk.""" @@ -34,7 +36,6 @@ class StorageVolume: usages: list["StorageUsageInterface"] = strawberry.field(resolver=get_usages) - @strawberry.interface class StorageUsageInterface: used_space: str @@ -132,13 +133,4 @@ def get_volume_by_id(volume_id: str) -> typing.Optional[StorageVolume]: model=volume.model, serial=volume.serial, type=volume.type, - usages=[ - ServiceStorageUsage( - service=service_to_graphql_service(service), - title=service.get_display_name(), - used_space=str(service.get_storage_usage()), - volume=get_volume_by_id(service.get_location()), - ) - for service in get_services_by_location(volume.name) - ], ) -- 2.42.0 From d8a8b2ec29c537255a8082c22da01496a31424be Mon Sep 17 00:00:00 2001 From: inexcode Date: Sat, 13 Aug 2022 03:39:37 +0400 Subject: [PATCH 21/50] fixes --- selfprivacy_api/graphql/common_types/service.py | 11 ++++++++--- .../{storage_mutation.py => storage_mutations.py} | 0 selfprivacy_api/graphql/queries/storage.py | 7 +------ selfprivacy_api/graphql/schema.py | 2 +- 4 files changed, 10 insertions(+), 10 deletions(-) rename selfprivacy_api/graphql/mutations/{storage_mutation.py => storage_mutations.py} (100%) diff --git a/selfprivacy_api/graphql/common_types/service.py b/selfprivacy_api/graphql/common_types/service.py index 1634f89..65e418f 100644 --- a/selfprivacy_api/graphql/common_types/service.py +++ b/selfprivacy_api/graphql/common_types/service.py @@ -33,7 +33,10 @@ class StorageVolume: model: typing.Optional[str] serial: typing.Optional[str] type: str - usages: list["StorageUsageInterface"] = strawberry.field(resolver=get_usages) + @strawberry.field + def usages(self) -> list["StorageUsageInterface"]: + """Get usages of a volume""" + return get_usages(self) @strawberry.interface @@ -79,7 +82,6 @@ def get_storage_usage(root: "Service") -> ServiceStorageUsage: @strawberry.type class Service: - storage_usage: ServiceStorageUsage = strawberry.field(resolver=get_storage_usage) id: str display_name: str description: str @@ -90,7 +92,10 @@ class Service: status: ServiceStatusEnum url: typing.Optional[str] dns_records: typing.Optional[typing.List[DnsRecord]] - + @strawberry.field + def storage_usage(self) -> ServiceStorageUsage: + """Get storage usage for a service""" + return get_storage_usage(self) def service_to_graphql_service(service: ServiceInterface) -> Service: """Convert service to graphql service""" diff --git a/selfprivacy_api/graphql/mutations/storage_mutation.py b/selfprivacy_api/graphql/mutations/storage_mutations.py similarity index 100% rename from selfprivacy_api/graphql/mutations/storage_mutation.py rename to selfprivacy_api/graphql/mutations/storage_mutations.py diff --git a/selfprivacy_api/graphql/queries/storage.py b/selfprivacy_api/graphql/queries/storage.py index 273b6a7..6800518 100644 --- a/selfprivacy_api/graphql/queries/storage.py +++ b/selfprivacy_api/graphql/queries/storage.py @@ -2,15 +2,10 @@ # pylint: disable=too-few-public-methods import typing import strawberry -from selfprivacy_api.graphql.common_types.service import ( - ServiceStorageUsage, - service_to_graphql_service, - get_volume_by_id, -) + from selfprivacy_api.graphql.common_types.service import ( StorageVolume, ) -from selfprivacy_api.services import get_services_by_location from selfprivacy_api.utils.block_devices import BlockDevices diff --git a/selfprivacy_api/graphql/schema.py b/selfprivacy_api/graphql/schema.py index aa7a6a4..271d066 100644 --- a/selfprivacy_api/graphql/schema.py +++ b/selfprivacy_api/graphql/schema.py @@ -6,7 +6,7 @@ from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.graphql.mutations.api_mutations import ApiMutations from selfprivacy_api.graphql.mutations.mutation_interface import GenericMutationReturn from selfprivacy_api.graphql.mutations.ssh_mutations import SshMutations -from selfprivacy_api.graphql.mutations.storage_mutation import StorageMutations +from selfprivacy_api.graphql.mutations.storage_mutations import StorageMutations from selfprivacy_api.graphql.mutations.system_mutations import SystemMutations from selfprivacy_api.graphql.queries.api_queries import Api -- 2.42.0 From 69557fcf50ef759a2aa94a312ac5dd510a5ce8a7 Mon Sep 17 00:00:00 2001 From: inexcode Date: Sat, 13 Aug 2022 03:59:48 +0400 Subject: [PATCH 22/50] add task registry --- selfprivacy_api/task_registry.py | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 selfprivacy_api/task_registry.py diff --git a/selfprivacy_api/task_registry.py b/selfprivacy_api/task_registry.py new file mode 100644 index 0000000..82eaf06 --- /dev/null +++ b/selfprivacy_api/task_registry.py @@ -0,0 +1,4 @@ +from selfprivacy_api.utils.huey import huey +from selfprivacy_api.jobs.test import test_job +from selfprivacy_api.restic_controller.tasks import * +from selfprivacy_api.services.generic_service_mover import move_service -- 2.42.0 From 1b1bb4966a1a9c9b8748dac023a7d1cf2de7237c Mon Sep 17 00:00:00 2001 From: inexcode Date: Mon, 15 Aug 2022 22:37:02 +0400 Subject: [PATCH 23/50] Use jobs file to transfer data between threads --- .../graphql/common_types/service.py | 3 + selfprivacy_api/jobs/__init__.py | 130 ++++++------------ selfprivacy_api/utils/__init__.py | 10 ++ 3 files changed, 56 insertions(+), 87 deletions(-) diff --git a/selfprivacy_api/graphql/common_types/service.py b/selfprivacy_api/graphql/common_types/service.py index 65e418f..99310f7 100644 --- a/selfprivacy_api/graphql/common_types/service.py +++ b/selfprivacy_api/graphql/common_types/service.py @@ -33,6 +33,7 @@ class StorageVolume: model: typing.Optional[str] serial: typing.Optional[str] type: str + @strawberry.field def usages(self) -> list["StorageUsageInterface"]: """Get usages of a volume""" @@ -92,11 +93,13 @@ class Service: status: ServiceStatusEnum url: typing.Optional[str] dns_records: typing.Optional[typing.List[DnsRecord]] + @strawberry.field def storage_usage(self) -> ServiceStorageUsage: """Get storage usage for a service""" return get_storage_usage(self) + def service_to_graphql_service(service: ServiceInterface) -> Service: """Convert service to graphql service""" return Service( diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py index c00fe79..5475e05 100644 --- a/selfprivacy_api/jobs/__init__.py +++ b/selfprivacy_api/jobs/__init__.py @@ -16,6 +16,7 @@ A job is a dictionary with the following keys: """ import typing import datetime +from uuid import UUID import asyncio import json import os @@ -23,6 +24,10 @@ import time import uuid from enum import Enum +from pydantic import BaseModel + +from selfprivacy_api.utils import ReadUserData, UserDataFiles, WriteUserData + class JobStatus(Enum): """ @@ -35,47 +40,22 @@ class JobStatus(Enum): ERROR = "ERROR" -class Job: +class Job(BaseModel): """ Job class. """ - def __init__( - self, - name: str, - description: str, - status: JobStatus, - status_text: typing.Optional[str], - progress: typing.Optional[int], - created_at: datetime.datetime, - updated_at: datetime.datetime, - finished_at: typing.Optional[datetime.datetime], - error: typing.Optional[str], - result: typing.Optional[str], - ): - self.id = str(uuid.uuid4()) - self.name = name - self.description = description - self.status = status - self.status_text = status_text or "" - self.progress = progress or 0 - self.created_at = created_at - self.updated_at = updated_at - self.finished_at = finished_at - self.error = error - self.result = result - - def __str__(self) -> str: - """ - Convert the job to a string. - """ - return f"{self.name} - {self.status}" - - def __repr__(self) -> str: - """ - Convert the job to a string. - """ - return f"{self.name} - {self.status}" + uid: UUID = uuid.uuid4() + name: str + description: str + status: JobStatus + status_text: typing.Optional[str] + progress: typing.Optional[int] + created_at: datetime.datetime + updated_at: datetime.datetime + finished_at: typing.Optional[datetime.datetime] + error: typing.Optional[str] + result: typing.Optional[str] class Jobs: @@ -95,8 +75,7 @@ class Jobs: if Jobs.__instance is None: raise Exception("Couldn't init Jobs singleton!") return Jobs.__instance - else: - return Jobs.__instance + return Jobs.__instance def __init__(self): """ @@ -106,43 +85,13 @@ class Jobs: raise Exception("This class is a singleton!") else: Jobs.__instance = self - self.jobs = [ - Job( - name="Init job", - description="Initial job", - status=JobStatus.FINISHED, - status_text="", - progress=100, - created_at=datetime.datetime.now(), - updated_at=datetime.datetime.now(), - finished_at=datetime.datetime.now(), - error=None, - result=None, - ) - ] - # Observers of the jobs list. - self.observers = [] - def add_observer(self, observer: typing.Callable[[typing.List[Job]], None]) -> None: + def reset(self) -> None: """ - Add an observer to the jobs list. + Reset the jobs list. """ - self.observers.append(observer) - - def remove_observer( - self, observer: typing.Callable[[typing.List[Job]], None] - ) -> None: - """ - Remove an observer from the jobs list. - """ - self.observers.remove(observer) - - def _notify_observers(self) -> None: - """ - Notify the observers of the jobs list. - """ - for observer in self.observers: - observer(self.jobs) + with WriteUserData(UserDataFiles.JOBS) as user_data: + user_data = [] def add( self, @@ -167,19 +116,20 @@ class Jobs: error=None, result=None, ) - self.jobs.append(job) - # Notify the observers. - self._notify_observers() - + with WriteUserData(UserDataFiles.JOBS) as user_data: + try: + user_data.append(job.dict()) + except json.decoder.JSONDecodeError: + user_data = [] + user_data.append(job.dict()) return job def remove(self, job: Job) -> None: """ Remove a job from the jobs list. """ - self.jobs.remove(job) - # Notify the observers. - self._notify_observers() + with WriteUserData(UserDataFiles.JOBS) as user_data: + user_data = [x for x in user_data if x["uid"] != job.uid] def update( self, @@ -207,11 +157,12 @@ class Jobs: job.updated_at = datetime.datetime.now() job.error = error job.result = result - if status == JobStatus.FINISHED or status == JobStatus.ERROR: + if status in (JobStatus.FINISHED, JobStatus.ERROR): job.finished_at = datetime.datetime.now() - # Notify the observers. - self._notify_observers() + with WriteUserData(UserDataFiles.JOBS) as user_data: + user_data = [x for x in user_data if x["uid"] != job.uid] + user_data.append(job.dict()) return job @@ -219,13 +170,18 @@ class Jobs: """ Get a job from the jobs list. """ - for job in self.jobs: - if job.id == id: - return job + with ReadUserData(UserDataFiles.JOBS) as user_data: + for job in user_data: + if job["uid"] == id: + return Job(**job) return None def get_jobs(self) -> typing.List[Job]: """ Get the jobs list. """ - return self.jobs + with ReadUserData(UserDataFiles.JOBS) as user_data: + try: + return [Job(**job) for job in user_data] + except json.decoder.JSONDecodeError: + return [] diff --git a/selfprivacy_api/utils/__init__.py b/selfprivacy_api/utils/__init__.py index 8ab26d1..f34fdad 100644 --- a/selfprivacy_api/utils/__init__.py +++ b/selfprivacy_api/utils/__init__.py @@ -10,6 +10,7 @@ import portalocker USERDATA_FILE = "/etc/nixos/userdata/userdata.json" TOKENS_FILE = "/etc/nixos/userdata/tokens.json" +JOBS_FILE = "/etc/nixos/userdata/jobs.json" DOMAIN_FILE = "/var/domain" @@ -18,6 +19,7 @@ class UserDataFiles(Enum): USERDATA = 0 TOKENS = 1 + JOBS = 2 def get_domain(): @@ -35,6 +37,12 @@ class WriteUserData(object): self.userdata_file = open(USERDATA_FILE, "r+", encoding="utf-8") elif file_type == UserDataFiles.TOKENS: self.userdata_file = open(TOKENS_FILE, "r+", encoding="utf-8") + elif file_type == UserDataFiles.JOBS: + # Make sure file exists + if not os.path.isfile(JOBS_FILE): + with open(JOBS_FILE, "w", encoding="utf-8") as jobs_file: + jobs_file.write("[]") + self.userdata_file = open(JOBS_FILE, "r+", encoding="utf-8") else: raise ValueError("Unknown file type") portalocker.lock(self.userdata_file, portalocker.LOCK_EX) @@ -60,6 +68,8 @@ class ReadUserData(object): self.userdata_file = open(USERDATA_FILE, "r", encoding="utf-8") elif file_type == UserDataFiles.TOKENS: self.userdata_file = open(TOKENS_FILE, "r", encoding="utf-8") + elif file_type == UserDataFiles.JOBS: + self.userdata_file = open(JOBS_FILE, "r", encoding="utf-8") else: raise ValueError("Unknown file type") portalocker.lock(self.userdata_file, portalocker.LOCK_SH) -- 2.42.0 From f940a23e7e5acd1ace7099cae4d3f8f81c0ad080 Mon Sep 17 00:00:00 2001 From: inexcode Date: Mon, 15 Aug 2022 22:51:01 +0400 Subject: [PATCH 24/50] Make sure jobs file exists on read --- selfprivacy_api/utils/__init__.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/selfprivacy_api/utils/__init__.py b/selfprivacy_api/utils/__init__.py index f34fdad..1904f76 100644 --- a/selfprivacy_api/utils/__init__.py +++ b/selfprivacy_api/utils/__init__.py @@ -69,6 +69,10 @@ class ReadUserData(object): elif file_type == UserDataFiles.TOKENS: self.userdata_file = open(TOKENS_FILE, "r", encoding="utf-8") elif file_type == UserDataFiles.JOBS: + # Make sure file exists + if not os.path.isfile(JOBS_FILE): + with open(JOBS_FILE, "w", encoding="utf-8") as jobs_file: + jobs_file.write("[]") self.userdata_file = open(JOBS_FILE, "r", encoding="utf-8") else: raise ValueError("Unknown file type") -- 2.42.0 From af902923ab8a4c7e149f24c31ad53784d61dce10 Mon Sep 17 00:00:00 2001 From: inexcode Date: Mon, 15 Aug 2022 23:12:50 +0400 Subject: [PATCH 25/50] replace uuid with str --- selfprivacy_api/jobs/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py index 5475e05..12430ee 100644 --- a/selfprivacy_api/jobs/__init__.py +++ b/selfprivacy_api/jobs/__init__.py @@ -45,7 +45,7 @@ class Job(BaseModel): Job class. """ - uid: UUID = uuid.uuid4() + uid: str = uuid.uuid4().urn name: str description: str status: JobStatus -- 2.42.0 From 5f34337fb40bb5d6d484f8907285eb249861b607 Mon Sep 17 00:00:00 2001 From: inexcode Date: Mon, 15 Aug 2022 23:37:34 +0400 Subject: [PATCH 26/50] Serialize custom types --- selfprivacy_api/jobs/__init__.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py index 12430ee..e218d10 100644 --- a/selfprivacy_api/jobs/__init__.py +++ b/selfprivacy_api/jobs/__init__.py @@ -33,7 +33,6 @@ class JobStatus(Enum): """ Status of a job. """ - CREATED = "CREATED" RUNNING = "RUNNING" FINISHED = "FINISHED" @@ -44,8 +43,7 @@ class Job(BaseModel): """ Job class. """ - - uid: str = uuid.uuid4().urn + uid: UUID = uuid.uuid4() name: str description: str status: JobStatus @@ -118,10 +116,10 @@ class Jobs: ) with WriteUserData(UserDataFiles.JOBS) as user_data: try: - user_data.append(job.dict()) + user_data.append(json.loads(job.json())) except json.decoder.JSONDecodeError: user_data = [] - user_data.append(job.dict()) + user_data.append(json.loads(job.json())) return job def remove(self, job: Job) -> None: @@ -162,7 +160,7 @@ class Jobs: with WriteUserData(UserDataFiles.JOBS) as user_data: user_data = [x for x in user_data if x["uid"] != job.uid] - user_data.append(job.dict()) + user_data.append(json.loads(job.json())) return job -- 2.42.0 From 64425ac4439ffb2f541fe42977c1fb8cbb4fa2bb Mon Sep 17 00:00:00 2001 From: inexcode Date: Mon, 15 Aug 2022 23:48:43 +0400 Subject: [PATCH 27/50] fix uid compare --- selfprivacy_api/jobs/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py index e218d10..c87db68 100644 --- a/selfprivacy_api/jobs/__init__.py +++ b/selfprivacy_api/jobs/__init__.py @@ -127,7 +127,7 @@ class Jobs: Remove a job from the jobs list. """ with WriteUserData(UserDataFiles.JOBS) as user_data: - user_data = [x for x in user_data if x["uid"] != job.uid] + user_data = [x for x in user_data if x["uid"] != job.uid.urn] def update( self, @@ -159,7 +159,7 @@ class Jobs: job.finished_at = datetime.datetime.now() with WriteUserData(UserDataFiles.JOBS) as user_data: - user_data = [x for x in user_data if x["uid"] != job.uid] + user_data = [x for x in user_data if x["uid"] != job.uid.urn] user_data.append(json.loads(job.json())) return job -- 2.42.0 From 039dd2f80ee898ca173a2d264c2fc149efc632e2 Mon Sep 17 00:00:00 2001 From: inexcode Date: Tue, 16 Aug 2022 01:31:24 +0400 Subject: [PATCH 28/50] Fix jobs file --- selfprivacy_api/jobs/__init__.py | 27 ++++++++++++++++++--------- selfprivacy_api/utils/__init__.py | 4 ++-- tests/conftest.py | 13 ++++++++++--- tests/data/jobs.json | 1 + tests/test_jobs.py | 30 ++++++++++++++++++++++++++++++ 5 files changed, 61 insertions(+), 14 deletions(-) create mode 100644 tests/data/jobs.json create mode 100644 tests/test_jobs.py diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py index c87db68..8202418 100644 --- a/selfprivacy_api/jobs/__init__.py +++ b/selfprivacy_api/jobs/__init__.py @@ -89,7 +89,7 @@ class Jobs: Reset the jobs list. """ with WriteUserData(UserDataFiles.JOBS) as user_data: - user_data = [] + user_data["jobs"] = [] def add( self, @@ -116,10 +116,11 @@ class Jobs: ) with WriteUserData(UserDataFiles.JOBS) as user_data: try: - user_data.append(json.loads(job.json())) + if "jobs" not in user_data: + user_data["jobs"] = [] + user_data["jobs"].append(json.loads(job.json())) except json.decoder.JSONDecodeError: - user_data = [] - user_data.append(json.loads(job.json())) + user_data["jobs"] = [json.loads(job.json())] return job def remove(self, job: Job) -> None: @@ -127,7 +128,9 @@ class Jobs: Remove a job from the jobs list. """ with WriteUserData(UserDataFiles.JOBS) as user_data: - user_data = [x for x in user_data if x["uid"] != job.uid.urn] + if "jobs" not in user_data: + user_data["jobs"] = [] + user_data["jobs"] = [x for x in user_data["jobs"] if x["uid"] != str(job.uid)] def update( self, @@ -159,8 +162,10 @@ class Jobs: job.finished_at = datetime.datetime.now() with WriteUserData(UserDataFiles.JOBS) as user_data: - user_data = [x for x in user_data if x["uid"] != job.uid.urn] - user_data.append(json.loads(job.json())) + if "jobs" not in user_data: + user_data["jobs"] = [] + user_data["jobs"] = [x for x in user_data["jobs"] if x["uid"] != str(job.uid)] + user_data["jobs"].append(json.loads(job.json())) return job @@ -169,7 +174,9 @@ class Jobs: Get a job from the jobs list. """ with ReadUserData(UserDataFiles.JOBS) as user_data: - for job in user_data: + if "jobs" not in user_data: + user_data["jobs"] = [] + for job in user_data["jobs"]: if job["uid"] == id: return Job(**job) return None @@ -180,6 +187,8 @@ class Jobs: """ with ReadUserData(UserDataFiles.JOBS) as user_data: try: - return [Job(**job) for job in user_data] + if "jobs" not in user_data: + user_data["jobs"] = [] + return [Job(**job) for job in user_data["jobs"]] except json.decoder.JSONDecodeError: return [] diff --git a/selfprivacy_api/utils/__init__.py b/selfprivacy_api/utils/__init__.py index 1904f76..14a9096 100644 --- a/selfprivacy_api/utils/__init__.py +++ b/selfprivacy_api/utils/__init__.py @@ -41,7 +41,7 @@ class WriteUserData(object): # Make sure file exists if not os.path.isfile(JOBS_FILE): with open(JOBS_FILE, "w", encoding="utf-8") as jobs_file: - jobs_file.write("[]") + jobs_file.write("{}") self.userdata_file = open(JOBS_FILE, "r+", encoding="utf-8") else: raise ValueError("Unknown file type") @@ -72,7 +72,7 @@ class ReadUserData(object): # Make sure file exists if not os.path.isfile(JOBS_FILE): with open(JOBS_FILE, "w", encoding="utf-8") as jobs_file: - jobs_file.write("[]") + jobs_file.write("{}") self.userdata_file = open(JOBS_FILE, "r", encoding="utf-8") else: raise ValueError("Unknown file type") diff --git a/tests/conftest.py b/tests/conftest.py index 2eca0f6..2ef4b28 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -13,6 +13,13 @@ def tokens_file(mocker, shared_datadir): ) return mock +@pytest.fixture +def jobs_file(mocker, shared_datadir): + """Mock tokens file.""" + mock = mocker.patch( + "selfprivacy_api.utils.JOBS_FILE", shared_datadir / "jobs.json" + ) + return mock @pytest.fixture def huey_database(mocker, shared_datadir): @@ -24,14 +31,14 @@ def huey_database(mocker, shared_datadir): @pytest.fixture -def client(tokens_file, huey_database): +def client(tokens_file, huey_database, jobs_file): from selfprivacy_api.app import app return TestClient(app) @pytest.fixture -def authorized_client(tokens_file, huey_database): +def authorized_client(tokens_file, huey_database, jobs_file): """Authorized test client fixture.""" from selfprivacy_api.app import app @@ -41,7 +48,7 @@ def authorized_client(tokens_file, huey_database): @pytest.fixture -def wrong_auth_client(tokens_file, huey_database): +def wrong_auth_client(tokens_file, huey_database, jobs_file): """Wrong token test client fixture.""" from selfprivacy_api.app import app diff --git a/tests/data/jobs.json b/tests/data/jobs.json new file mode 100644 index 0000000..0967ef4 --- /dev/null +++ b/tests/data/jobs.json @@ -0,0 +1 @@ +{} diff --git a/tests/test_jobs.py b/tests/test_jobs.py new file mode 100644 index 0000000..82c40d3 --- /dev/null +++ b/tests/test_jobs.py @@ -0,0 +1,30 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +import json +import pytest + +from selfprivacy_api.utils import WriteUserData, ReadUserData +from selfprivacy_api.jobs import Jobs, JobStatus + +def test_jobs(jobs_file, shared_datadir): + jobs = Jobs() + assert jobs.get_jobs() == [] + + test_job = jobs.add( + name="Test job", + description="This is a test job.", + status=JobStatus.CREATED, + status_text="Status text", + progress=0, + ) + + assert jobs.get_jobs() == [test_job] + + jobs.update( + job=test_job, + status=JobStatus.RUNNING, + status_text="Status text", + progress=50, + ) + + assert jobs.get_jobs() == [test_job] -- 2.42.0 From 7fe51eb665f0f8ab18638af3cc4452f9c0d87b34 Mon Sep 17 00:00:00 2001 From: inexcode Date: Tue, 16 Aug 2022 01:44:22 +0400 Subject: [PATCH 29/50] Fix update function --- selfprivacy_api/jobs/__init__.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py index 8202418..10098ca 100644 --- a/selfprivacy_api/jobs/__init__.py +++ b/selfprivacy_api/jobs/__init__.py @@ -130,7 +130,10 @@ class Jobs: with WriteUserData(UserDataFiles.JOBS) as user_data: if "jobs" not in user_data: user_data["jobs"] = [] - user_data["jobs"] = [x for x in user_data["jobs"] if x["uid"] != str(job.uid)] + for i, j in enumerate(user_data["jobs"]): + if j["uid"] == str(job.uid): + del user_data["jobs"][i] + break def update( self, @@ -164,8 +167,10 @@ class Jobs: with WriteUserData(UserDataFiles.JOBS) as user_data: if "jobs" not in user_data: user_data["jobs"] = [] - user_data["jobs"] = [x for x in user_data["jobs"] if x["uid"] != str(job.uid)] - user_data["jobs"].append(json.loads(job.json())) + for i, j in enumerate(user_data["jobs"]): + if j["uid"] == str(job.uid): + user_data["jobs"][i] = json.loads(job.json()) + break return job -- 2.42.0 From 87c036de7ff3ec8dd017a9d8030626a60a0a29c9 Mon Sep 17 00:00:00 2001 From: inexcode Date: Thu, 18 Aug 2022 00:58:56 +0400 Subject: [PATCH 30/50] Add GraphQL endpoints related to binds --- selfprivacy_api/graphql/common_types/jobs.py | 49 ++ .../graphql/mutations/mutation_interface.py | 8 + .../graphql/mutations/services_mutations.py | 168 ++++++ .../graphql/mutations/storage_mutations.py | 40 ++ selfprivacy_api/graphql/queries/jobs.py | 40 +- selfprivacy_api/graphql/queries/services.py | 2 +- selfprivacy_api/graphql/queries/system.py | 9 +- selfprivacy_api/jobs/__init__.py | 33 +- selfprivacy_api/jobs/migrate_to_binds.py | 285 +++++++++++ selfprivacy_api/jobs/test.py | 1 + .../services/bitwarden/__init__.py | 7 +- selfprivacy_api/services/gitea/__init__.py | 7 +- .../services/mailserver/__init__.py | 5 +- .../services/nextcloud/__init__.py | 7 +- selfprivacy_api/services/ocserv/__init__.py | 4 +- selfprivacy_api/services/pleroma/__init__.py | 7 +- selfprivacy_api/services/service.py | 3 +- selfprivacy_api/utils/block_devices.py | 30 +- selfprivacy_api/utils/migrate_to_binds.py | 112 ---- tests/conftest.py | 6 +- tests/test_block_device_utils.py | 484 ++++++++++++++++++ tests/test_block_device_utils/no_devices.json | 54 ++ tests/test_block_device_utils/only_root.json | 59 +++ tests/test_block_device_utils/undefined.json | 52 ++ tests/test_jobs.py | 2 + tests/test_network_utils.py | 43 ++ 26 files changed, 1333 insertions(+), 184 deletions(-) create mode 100644 selfprivacy_api/graphql/common_types/jobs.py create mode 100644 selfprivacy_api/graphql/mutations/services_mutations.py create mode 100644 selfprivacy_api/jobs/migrate_to_binds.py delete mode 100644 selfprivacy_api/utils/migrate_to_binds.py create mode 100644 tests/test_block_device_utils.py create mode 100644 tests/test_block_device_utils/no_devices.json create mode 100644 tests/test_block_device_utils/only_root.json create mode 100644 tests/test_block_device_utils/undefined.json diff --git a/selfprivacy_api/graphql/common_types/jobs.py b/selfprivacy_api/graphql/common_types/jobs.py new file mode 100644 index 0000000..4b095c8 --- /dev/null +++ b/selfprivacy_api/graphql/common_types/jobs.py @@ -0,0 +1,49 @@ +"""Jobs status""" +# pylint: disable=too-few-public-methods +import datetime +import typing +import strawberry + +from selfprivacy_api.jobs import Job, Jobs + + +@strawberry.type +class ApiJob: + """Job type for GraphQL.""" + + uid: str + name: str + description: str + status: str + status_text: typing.Optional[str] + progress: typing.Optional[int] + created_at: datetime.datetime + updated_at: datetime.datetime + finished_at: typing.Optional[datetime.datetime] + error: typing.Optional[str] + result: typing.Optional[str] + + +def job_to_api_job(job: Job) -> ApiJob: + """Convert a Job from jobs controller to a GraphQL ApiJob.""" + return ApiJob( + uid=str(job.uid), + name=job.name, + description=job.description, + status=job.status.name, + status_text=job.status_text, + progress=job.progress, + created_at=job.created_at, + updated_at=job.updated_at, + finished_at=job.finished_at, + error=job.error, + result=job.result, + ) + + +def get_api_job_by_id(job_id: str) -> typing.Optional[ApiJob]: + """Get a job for GraphQL by its ID.""" + job = Jobs.get_instance().get_job(job_id) + if job is None: + return None + return job_to_api_job(job) diff --git a/selfprivacy_api/graphql/mutations/mutation_interface.py b/selfprivacy_api/graphql/mutations/mutation_interface.py index 32146fc..33a6b02 100644 --- a/selfprivacy_api/graphql/mutations/mutation_interface.py +++ b/selfprivacy_api/graphql/mutations/mutation_interface.py @@ -1,4 +1,7 @@ import strawberry +import typing + +from selfprivacy_api.graphql.common_types.jobs import ApiJob @strawberry.interface @@ -11,3 +14,8 @@ class MutationReturnInterface: @strawberry.type class GenericMutationReturn(MutationReturnInterface): pass + + +@strawberry.type +class GenericJobButationReturn(MutationReturnInterface): + job: typing.Optional[ApiJob] = None diff --git a/selfprivacy_api/graphql/mutations/services_mutations.py b/selfprivacy_api/graphql/mutations/services_mutations.py new file mode 100644 index 0000000..b3aee32 --- /dev/null +++ b/selfprivacy_api/graphql/mutations/services_mutations.py @@ -0,0 +1,168 @@ +"""Services mutations""" +# pylint: disable=too-few-public-methods +import typing +import strawberry +from selfprivacy_api.graphql.common_types.jobs import job_to_api_job + +from selfprivacy_api.graphql.common_types.service import ( + Service, + service_to_graphql_service, +) +from selfprivacy_api.graphql.mutations.mutation_interface import ( + GenericJobButationReturn, + GenericMutationReturn, +) + +from selfprivacy_api.services import get_service_by_id +from selfprivacy_api.utils.block_devices import BlockDevices + + +@strawberry.type +class ServiceMutationReturn(GenericMutationReturn): + """Service mutation return type.""" + + service: typing.Optional[Service] = None + + +@strawberry.input +class MoveServiceInput: + """Move service input type.""" + + service_id: str + location: str + + +@strawberry.type +class ServiceJobMutationReturn(GenericJobButationReturn): + """Service job mutation return type.""" + + service: typing.Optional[Service] = None + + +@strawberry.type +class ServicesMutations: + """Services mutations.""" + + @strawberry.mutation + def enable_service(self, service_id: str) -> ServiceMutationReturn: + """Enable service.""" + service = get_service_by_id(service_id) + if service is None: + return ServiceMutationReturn( + success=False, + message="Service not found.", + code=404, + ) + service.enable() + return ServiceMutationReturn( + success=True, + message="Service enabled.", + code=200, + service=service_to_graphql_service(service), + ) + + @strawberry.mutation + def disable_service(self, service_id: str) -> ServiceMutationReturn: + """Disable service.""" + service = get_service_by_id(service_id) + if service is None: + return ServiceMutationReturn( + success=False, + message="Service not found.", + code=404, + ) + service.disable() + return ServiceMutationReturn( + success=True, + message="Service disabled.", + code=200, + service=service_to_graphql_service(service), + ) + + @strawberry.mutation + def stop_service(self, service_id: str) -> ServiceMutationReturn: + """Stop service.""" + service = get_service_by_id(service_id) + if service is None: + return ServiceMutationReturn( + success=False, + message="Service not found.", + code=404, + ) + service.stop() + return ServiceMutationReturn( + success=True, + message="Service stopped.", + code=200, + service=service_to_graphql_service(service), + ) + + @strawberry.mutation + def start_service(self, service_id: str) -> ServiceMutationReturn: + """Start service.""" + service = get_service_by_id(service_id) + if service is None: + return ServiceMutationReturn( + success=False, + message="Service not found.", + code=404, + ) + service.start() + return ServiceMutationReturn( + success=True, + message="Service started.", + code=200, + service=service_to_graphql_service(service), + ) + + @strawberry.mutation + def restart_service(self, service_id: str) -> ServiceMutationReturn: + """Restart service.""" + service = get_service_by_id(service_id) + if service is None: + return ServiceMutationReturn( + success=False, + message="Service not found.", + code=404, + ) + service.restart() + return ServiceMutationReturn( + success=True, + message="Service restarted.", + code=200, + service=service_to_graphql_service(service), + ) + + @strawberry.mutation + def move_service(self, input: MoveServiceInput) -> ServiceJobMutationReturn: + """Move service.""" + service = get_service_by_id(input.service_id) + if service is None: + return ServiceJobMutationReturn( + success=False, + message="Service not found.", + code=404, + ) + if not service.is_movable(): + return ServiceJobMutationReturn( + success=False, + message="Service is not movable.", + code=400, + service=service_to_graphql_service(service), + ) + volume = BlockDevices().get_block_device(input.location) + if volume is None: + return ServiceJobMutationReturn( + success=False, + message="Volume not found.", + code=404, + service=service_to_graphql_service(service), + ) + job = service.move_to_volume(volume) + return ServiceJobMutationReturn( + success=True, + message="Service moved.", + code=200, + service=service_to_graphql_service(service), + job=job_to_api_job(job), + ) diff --git a/selfprivacy_api/graphql/mutations/storage_mutations.py b/selfprivacy_api/graphql/mutations/storage_mutations.py index 1275945..84e97dc 100644 --- a/selfprivacy_api/graphql/mutations/storage_mutations.py +++ b/selfprivacy_api/graphql/mutations/storage_mutations.py @@ -1,10 +1,28 @@ """Storage devices mutations""" import strawberry from selfprivacy_api.graphql import IsAuthenticated +from selfprivacy_api.graphql.common_types.jobs import job_to_api_job from selfprivacy_api.utils.block_devices import BlockDevices from selfprivacy_api.graphql.mutations.mutation_interface import ( + GenericJobButationReturn, GenericMutationReturn, ) +from selfprivacy_api.jobs.migrate_to_binds import ( + BindMigrationConfig, + is_bind_migrated, + start_bind_migration, +) + + +@strawberry.input +class MigrateToBindsInput: + """Migrate to binds input""" + + email_block_device: str + bitwarden_block_device: str + gitea_block_device: str + nextcloud_block_device: str + pleroma_block_device: str @strawberry.type @@ -59,3 +77,25 @@ class StorageMutations: return GenericMutationReturn( success=False, code=409, message="Volume not unmounted (already unmounted?)" ) + + def migrate_to_binds(self, input: MigrateToBindsInput) -> GenericJobButationReturn: + """Migrate to binds""" + if not is_bind_migrated(): + return GenericJobButationReturn( + success=False, code=409, message="Already migrated to binds" + ) + job = start_bind_migration( + BindMigrationConfig( + email_block_device=input.email_block_device, + bitwarden_block_device=input.bitwarden_block_device, + gitea_block_device=input.gitea_block_device, + nextcloud_block_device=input.nextcloud_block_device, + pleroma_block_device=input.pleroma_block_device, + ) + ) + return GenericJobButationReturn( + success=True, + code=200, + message="Migration to binds started, rebuild the system to apply changes", + job=job_to_api_job(job), + ) diff --git a/selfprivacy_api/graphql/queries/jobs.py b/selfprivacy_api/graphql/queries/jobs.py index 4d2a3eb..426c563 100644 --- a/selfprivacy_api/graphql/queries/jobs.py +++ b/selfprivacy_api/graphql/queries/jobs.py @@ -2,25 +2,15 @@ # pylint: disable=too-few-public-methods import typing import strawberry -import datetime +from selfprivacy_api.graphql.common_types.jobs import ( + ApiJob, + get_api_job_by_id, + job_to_api_job, +) from selfprivacy_api.jobs import Jobs -@strawberry.type -class ApiJob: - name: str - description: str - status: str - status_text: typing.Optional[str] - progress: typing.Optional[int] - created_at: datetime.datetime - updated_at: datetime.datetime - finished_at: typing.Optional[datetime.datetime] - error: typing.Optional[str] - result: typing.Optional[str] - - @strawberry.type class Job: @strawberry.field @@ -28,18 +18,8 @@ class Job: Jobs.get_instance().get_jobs() - return [ - ApiJob( - name=job.name, - description=job.description, - status=job.status.name, - status_text=job.status_text, - progress=job.progress, - created_at=job.created_at, - updated_at=job.updated_at, - finished_at=job.finished_at, - error=job.error, - result=job.result, - ) - for job in Jobs.get_instance().get_jobs() - ] + return [job_to_api_job(job) for job in Jobs.get_instance().get_jobs()] + + @strawberry.field + def get_job(self, job_id: str) -> typing.Optional[ApiJob]: + return get_api_job_by_id(job_id) diff --git a/selfprivacy_api/graphql/queries/services.py b/selfprivacy_api/graphql/queries/services.py index 7d622b7..5398f81 100644 --- a/selfprivacy_api/graphql/queries/services.py +++ b/selfprivacy_api/graphql/queries/services.py @@ -13,6 +13,6 @@ from selfprivacy_api.services import get_all_services @strawberry.type class Services: @strawberry.field - def all_services(self, info) -> typing.List[Service]: + def all_services(self) -> typing.List[Service]: services = get_all_services() return [service_to_graphql_service(service) for service in services] diff --git a/selfprivacy_api/graphql/queries/system.py b/selfprivacy_api/graphql/queries/system.py index d17d283..872b2a1 100644 --- a/selfprivacy_api/graphql/queries/system.py +++ b/selfprivacy_api/graphql/queries/system.py @@ -7,6 +7,8 @@ from selfprivacy_api.graphql.common_types.dns import DnsRecord from selfprivacy_api.graphql.queries.common import Alert, Severity from selfprivacy_api.graphql.queries.providers import DnsProvider, ServerProvider +from selfprivacy_api.jobs import Jobs +from selfprivacy_api.jobs.migrate_to_binds import is_bind_migrated from selfprivacy_api.utils import ReadUserData import selfprivacy_api.actions.system as system_actions import selfprivacy_api.actions.ssh as ssh_actions @@ -103,6 +105,11 @@ class SystemInfo: system_version: str = strawberry.field(resolver=get_system_version) python_version: str = strawberry.field(resolver=get_python_version) + @strawberry.field + def using_binds(self) -> bool: + """Check if the system is using BINDs""" + return is_bind_migrated() + @strawberry.type class SystemProviderInfo: @@ -135,7 +142,7 @@ class System: settings: SystemSettings = SystemSettings() info: SystemInfo = SystemInfo() provider: SystemProviderInfo = strawberry.field(resolver=get_system_provider_info) - busy: bool = False + busy: bool = Jobs.is_busy() @strawberry.field def working_directory(self) -> str: diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py index 10098ca..d060e32 100644 --- a/selfprivacy_api/jobs/__init__.py +++ b/selfprivacy_api/jobs/__init__.py @@ -33,6 +33,7 @@ class JobStatus(Enum): """ Status of a job. """ + CREATED = "CREATED" RUNNING = "RUNNING" FINISHED = "FINISHED" @@ -43,7 +44,9 @@ class Job(BaseModel): """ Job class. """ + uid: UUID = uuid.uuid4() + type_id: str name: str description: str status: JobStatus @@ -84,16 +87,18 @@ class Jobs: else: Jobs.__instance = self - def reset(self) -> None: + @staticmethod + def reset() -> None: """ Reset the jobs list. """ with WriteUserData(UserDataFiles.JOBS) as user_data: user_data["jobs"] = [] + @staticmethod def add( - self, name: str, + type_id: str, description: str, status: JobStatus = JobStatus.CREATED, status_text: str = "", @@ -104,6 +109,7 @@ class Jobs: """ job = Job( name=name, + type_id=type_id, description=description, status=status, status_text=status_text, @@ -135,8 +141,8 @@ class Jobs: del user_data["jobs"][i] break + @staticmethod def update( - self, job: Job, status: JobStatus, status_text: typing.Optional[str] = None, @@ -174,7 +180,8 @@ class Jobs: return job - def get_job(self, id: str) -> typing.Optional[Job]: + @staticmethod + def get_job(uid: str) -> typing.Optional[Job]: """ Get a job from the jobs list. """ @@ -182,11 +189,12 @@ class Jobs: if "jobs" not in user_data: user_data["jobs"] = [] for job in user_data["jobs"]: - if job["uid"] == id: + if job["uid"] == uid: return Job(**job) return None - def get_jobs(self) -> typing.List[Job]: + @staticmethod + def get_jobs() -> typing.List[Job]: """ Get the jobs list. """ @@ -197,3 +205,16 @@ class Jobs: return [Job(**job) for job in user_data["jobs"]] except json.decoder.JSONDecodeError: return [] + + @staticmethod + def is_busy() -> bool: + """ + Check if there is a job running. + """ + with ReadUserData(UserDataFiles.JOBS) as user_data: + if "jobs" not in user_data: + user_data["jobs"] = [] + for job in user_data["jobs"]: + if job["status"] == JobStatus.RUNNING.value: + return True + return False diff --git a/selfprivacy_api/jobs/migrate_to_binds.py b/selfprivacy_api/jobs/migrate_to_binds.py new file mode 100644 index 0000000..2d6a37a --- /dev/null +++ b/selfprivacy_api/jobs/migrate_to_binds.py @@ -0,0 +1,285 @@ +"""Function to perform migration of app data to binds.""" +import subprocess +import psutil +import pathlib +import shutil + +from pydantic import BaseModel +from selfprivacy_api.jobs import Job, JobStatus, Jobs +from selfprivacy_api.services.bitwarden import Bitwarden +from selfprivacy_api.services.gitea import Gitea +from selfprivacy_api.services.mailserver import MailServer +from selfprivacy_api.services.nextcloud import Nextcloud +from selfprivacy_api.services.pleroma import Pleroma +from selfprivacy_api.utils import ReadUserData, WriteUserData +from selfprivacy_api.utils.huey import huey +from selfprivacy_api.utils.block_devices import BlockDevices + + +class BindMigrationConfig(BaseModel): + """Config for bind migration. + For each service provide block device name. + """ + + email_block_device: str + bitwarden_block_device: str + gitea_block_device: str + nextcloud_block_device: str + pleroma_block_device: str + + +def is_bind_migrated() -> bool: + """Check if bind migration was performed.""" + with ReadUserData() as user_data: + return user_data.get("useBinds", False) + + +def activate_binds(config: BindMigrationConfig): + """Activate binds.""" + # Activate binds in userdata + with WriteUserData() as user_data: + if "email" not in user_data: + user_data["email"] = {} + user_data["email"]["location"] = config.email_block_device + if "bitwarden" not in user_data: + user_data["bitwarden"] = {} + user_data["bitwarden"]["location"] = config.bitwarden_block_device + if "gitea" not in user_data: + user_data["gitea"] = {} + user_data["gitea"]["location"] = config.gitea_block_device + if "nextcloud" not in user_data: + user_data["nextcloud"] = {} + user_data["nextcloud"]["location"] = config.nextcloud_block_device + if "pleroma" not in user_data: + user_data["pleroma"] = {} + user_data["pleroma"]["location"] = config.pleroma_block_device + + user_data["useBinds"] = True + + +def move_folder( + data_path: pathlib.Path, bind_path: pathlib.Path, user: str, group: str +): + """Move folder from data to bind.""" + if data_path.exists(): + shutil.move(str(data_path), str(bind_path)) + else: + return + + data_path.mkdir(mode=0o750, parents=True, exist_ok=True) + + shutil.chown(str(bind_path), user=user, group=group) + shutil.chown(str(data_path), user=user, group=group) + + subprocess.run(["mount", "--bind", str(bind_path), str(data_path)], check=True) + + subprocess.run(["chown", "-R", f"{user}:{group}", str(data_path)], check=True) + + +@huey.task() +def migrate_to_binds(config: BindMigrationConfig, job: Job): + """Migrate app data to binds.""" + + # Exit if migration is already done + if is_bind_migrated(): + Jobs.update( + job=job, + status=JobStatus.ERROR, + error="Migration already done.", + ) + return + + Jobs.update( + job=job, + status=JobStatus.RUNNING, + progress=0, + status_text="Checking if all volumes are available.", + ) + # Get block devices. + block_devices = BlockDevices().get_block_devices() + block_device_names = [device.name for device in block_devices] + + # Get all unique required block devices + required_block_devices = [] + for block_device_name in config.__dict__.values(): + if block_device_name not in required_block_devices: + required_block_devices.append(block_device_name) + + # Check if all block devices from config are present. + for block_device_name in required_block_devices: + if block_device_name not in block_device_names: + Jobs.update( + job=job, + status=JobStatus.ERROR, + error=f"Block device {block_device_name} not found.", + ) + return + + # Make sure all required block devices are mounted. + # sda1 is the root partition and is always mounted. + for block_device_name in required_block_devices: + if block_device_name == "sda1": + continue + block_device = BlockDevices().get_block_device(block_device_name) + if block_device is None: + Jobs.update( + job=job, + status=JobStatus.ERROR, + error=f"Block device {block_device_name} not found.", + ) + return + if f"/volumes/{block_device_name}" not in block_device.mountpoints: + Jobs.update( + job=job, + status=JobStatus.ERROR, + error=f"Block device {block_device_name} not mounted.", + ) + return + + # Make sure /volumes/sda1 exists. + pathlib.Path("/volumes/sda1").mkdir(parents=True, exist_ok=True) + + Jobs.update( + job=job, + status=JobStatus.RUNNING, + progress=5, + status_text="Activating binds in NixOS config.", + ) + + activate_binds(config) + + # Perform migration of Nextcloud. + Jobs.update( + job=job, + status=JobStatus.RUNNING, + progress=10, + status_text="Migrating Nextcloud.", + ) + + Nextcloud().stop() + + move_folder( + data_path=pathlib.Path("/var/lib/nextcloud"), + bind_path=pathlib.Path(f"/volumes/{config.nextcloud_block_device}/nextcloud"), + user="nextcloud", + group="nextcloud", + ) + + # Start Nextcloud + Nextcloud().start() + + # Perform migration of Bitwarden + + Jobs.update( + job=job, + status=JobStatus.RUNNING, + progress=28, + status_text="Migrating Bitwarden.", + ) + + Bitwarden().stop() + + move_folder( + data_path=pathlib.Path("/var/lib/bitwarden"), + bind_path=pathlib.Path(f"/volumes/{config.bitwarden_block_device}/bitwarden"), + user="vaultwarden", + group="vaultwarden", + ) + + move_folder( + data_path=pathlib.Path("/var/lib/bitwarden_rs"), + bind_path=pathlib.Path( + f"/volumes/{config.bitwarden_block_device}/bitwarden_rs" + ), + user="vaultwarden", + group="vaultwarden", + ) + + # Start Bitwarden + Bitwarden().start() + + # Perform migration of Gitea + + Jobs.update( + job=job, + status=JobStatus.RUNNING, + progress=46, + status_text="Migrating Gitea.", + ) + + Gitea().stop() + + move_folder( + data_path=pathlib.Path("/var/lib/gitea"), + bind_path=pathlib.Path(f"/volumes/{config.gitea_block_device}/gitea"), + user="gitea", + group="gitea", + ) + + Gitea().start() + + # Perform migration of Mail server + + Jobs.update( + job=job, + status=JobStatus.RUNNING, + progress=64, + status_text="Migrating Mail server.", + ) + + MailServer().stop() + + move_folder( + data_path=pathlib.Path("/var/vmail"), + bind_path=pathlib.Path(f"/volumes/{config.email_block_device}/vmail"), + user="virtualMail", + group="virtualMail", + ) + + move_folder( + data_path=pathlib.Path("/var/sieve"), + bind_path=pathlib.Path(f"/volumes/{config.email_block_device}/sieve"), + user="virtualMail", + group="virtualMail", + ) + + MailServer().start() + + # Perform migration of Pleroma + + Jobs.update( + job=job, + status=JobStatus.RUNNING, + progress=82, + status_text="Migrating Pleroma.", + ) + + Pleroma().stop() + + move_folder( + data_path=pathlib.Path("/var/lib/pleroma"), + bind_path=pathlib.Path(f"/volumes/{config.pleroma_block_device}/pleroma"), + user="pleroma", + group="pleroma", + ) + + Pleroma().start() + + Jobs.update( + job=job, + status=JobStatus.FINISHED, + progress=100, + status_text="Migration finished.", + result="Migration finished.", + ) + + +def start_bind_migration(config: BindMigrationConfig) -> Job: + """Start migration.""" + job = Jobs.add( + type_id="migrations.migrate_to_binds", + name="Migrate to binds", + description="Migration required to use the new disk space management.", + ) + migrate_to_binds(config, job) + return job diff --git a/selfprivacy_api/jobs/test.py b/selfprivacy_api/jobs/test.py index f3c03af..9d93fb7 100644 --- a/selfprivacy_api/jobs/test.py +++ b/selfprivacy_api/jobs/test.py @@ -6,6 +6,7 @@ from selfprivacy_api.jobs import JobStatus, Jobs @huey.task() def test_job(): job = Jobs.get_instance().add( + type_id="test", name="Test job", description="This is a test job.", status=JobStatus.CREATED, diff --git a/selfprivacy_api/services/bitwarden/__init__.py b/selfprivacy_api/services/bitwarden/__init__.py index a752400..a581ec9 100644 --- a/selfprivacy_api/services/bitwarden/__init__.py +++ b/selfprivacy_api/services/bitwarden/__init__.py @@ -137,9 +137,10 @@ class Bitwarden(Service): ), ] - def move_to_volume(self, volume: BlockDevice): + def move_to_volume(self, volume: BlockDevice) -> Job: job = Jobs.get_instance().add( - name="services.bitwarden.move", + type_id="services.bitwarden.move", + name="Move Bitwarden", description=f"Moving Bitwarden data to {volume.name}", ) @@ -155,7 +156,7 @@ class Bitwarden(Service): owner="vaultwarden", ), FolderMoveNames( - name="bitwarden", + name="bitwarden_rs", bind_location="/var/lib/bitwarden_rs", group="vaultwarden", owner="vaultwarden", diff --git a/selfprivacy_api/services/gitea/__init__.py b/selfprivacy_api/services/gitea/__init__.py index 7916b60..7a5db1b 100644 --- a/selfprivacy_api/services/gitea/__init__.py +++ b/selfprivacy_api/services/gitea/__init__.py @@ -3,7 +3,7 @@ import base64 import subprocess import typing -from selfprivacy_api.jobs import Jobs +from selfprivacy_api.jobs import Job, Jobs from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service from selfprivacy_api.services.generic_size_counter import get_storage_usage from selfprivacy_api.services.generic_status_getter import get_service_status @@ -134,9 +134,10 @@ class Gitea(Service): ), ] - def move_to_volume(self, volume: BlockDevice): + def move_to_volume(self, volume: BlockDevice) -> Job: job = Jobs.get_instance().add( - name="services.gitea.move", + type_id="services.gitea.move", + name="Move Gitea", description=f"Moving Gitea data to {volume.name}", ) diff --git a/selfprivacy_api/services/mailserver/__init__.py b/selfprivacy_api/services/mailserver/__init__.py index 659d4c3..dfcaa7f 100644 --- a/selfprivacy_api/services/mailserver/__init__.py +++ b/selfprivacy_api/services/mailserver/__init__.py @@ -145,9 +145,10 @@ class MailServer(Service): ), ] - def move_to_volume(self, volume: BlockDevice): + def move_to_volume(self, volume: BlockDevice) -> Job: job = Jobs.get_instance().add( - name="services.mailserver.move", + type_id="services.mailserver.move", + name="Move Mail Server", description=f"Moving mailserver data to {volume.name}", ) diff --git a/selfprivacy_api/services/nextcloud/__init__.py b/selfprivacy_api/services/nextcloud/__init__.py index fcdaf9f..6bd616a 100644 --- a/selfprivacy_api/services/nextcloud/__init__.py +++ b/selfprivacy_api/services/nextcloud/__init__.py @@ -2,7 +2,7 @@ import base64 import subprocess import typing -from selfprivacy_api.jobs import Jobs +from selfprivacy_api.jobs import Job, Jobs from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service from selfprivacy_api.services.generic_size_counter import get_storage_usage from selfprivacy_api.services.generic_status_getter import get_service_status @@ -142,9 +142,10 @@ class Nextcloud(Service): ), ] - def move_to_volume(self, volume: BlockDevice): + def move_to_volume(self, volume: BlockDevice) -> Job: job = Jobs.get_instance().add( - name="services.nextcloud.move", + type_id="services.nextcloud.move", + name="Move Nextcloud", description=f"Moving Nextcloud to volume {volume.name}", ) move_service( diff --git a/selfprivacy_api/services/ocserv/__init__.py b/selfprivacy_api/services/ocserv/__init__.py index 73d6474..2c86259 100644 --- a/selfprivacy_api/services/ocserv/__init__.py +++ b/selfprivacy_api/services/ocserv/__init__.py @@ -2,7 +2,7 @@ import base64 import subprocess import typing -from selfprivacy_api.jobs import Jobs +from selfprivacy_api.jobs import Job, Jobs from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service from selfprivacy_api.services.generic_size_counter import get_storage_usage from selfprivacy_api.services.generic_status_getter import get_service_status @@ -104,5 +104,5 @@ class Ocserv(Service): def get_storage_usage() -> int: return 0 - def move_to_volume(self, volume: BlockDevice): + def move_to_volume(self, volume: BlockDevice) -> Job: raise NotImplementedError("ocserv service is not movable") diff --git a/selfprivacy_api/services/pleroma/__init__.py b/selfprivacy_api/services/pleroma/__init__.py index 9dd333e..1604fb4 100644 --- a/selfprivacy_api/services/pleroma/__init__.py +++ b/selfprivacy_api/services/pleroma/__init__.py @@ -2,7 +2,7 @@ import base64 import subprocess import typing -from selfprivacy_api.jobs import Jobs +from selfprivacy_api.jobs import Job, Jobs from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service from selfprivacy_api.services.generic_size_counter import get_storage_usage from selfprivacy_api.services.generic_status_getter import get_service_status @@ -122,9 +122,10 @@ class Pleroma(Service): ), ] - def move_to_volume(self, volume: BlockDevice): + def move_to_volume(self, volume: BlockDevice) -> Job: job = Jobs.get_instance().add( - name="services.pleroma.move", + type_id="services.pleroma.move", + name="Move Pleroma", description=f"Moving Pleroma to volume {volume.name}", ) move_service( diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index bffa1a4..0defcff 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -4,6 +4,7 @@ from enum import Enum import typing from pydantic import BaseModel +from selfprivacy_api.jobs import Job from selfprivacy_api.utils.block_devices import BlockDevice @@ -133,5 +134,5 @@ class Service(ABC): pass @abstractmethod - def move_to_volume(self, volume: BlockDevice): + def move_to_volume(self, volume: BlockDevice) -> Job: pass diff --git a/selfprivacy_api/utils/block_devices.py b/selfprivacy_api/utils/block_devices.py index f3b0911..9d96d52 100644 --- a/selfprivacy_api/utils/block_devices.py +++ b/selfprivacy_api/utils/block_devices.py @@ -16,13 +16,13 @@ def get_block_device(device_name): "-J", "-b", "-o", - "NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINTS,LABEL,UUID,SIZE, MODEL,SERIAL,TYPE", - device_name, + "NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINTS,LABEL,UUID,SIZE,MODEL,SERIAL,TYPE", + f"/dev/{device_name}", ] ) lsblk_output = lsblk_output.decode("utf-8") lsblk_output = json.loads(lsblk_output) - return lsblk_output["blockdevices"] + return lsblk_output["blockdevices"][0] def resize_block_device(block_device) -> bool: @@ -30,9 +30,11 @@ def resize_block_device(block_device) -> bool: Resize a block device. Return True if successful. """ resize_command = ["resize2fs", block_device] - resize_process = subprocess.Popen(resize_command, shell=False) - resize_process.communicate() - return resize_process.returncode == 0 + try: + subprocess.check_output(resize_command, shell=False) + except subprocess.CalledProcessError: + return False + return True class BlockDevice: @@ -43,14 +45,14 @@ class BlockDevice: def __init__(self, block_device): self.name = block_device["name"] self.path = block_device["path"] - self.fsavail = block_device["fsavail"] - self.fssize = block_device["fssize"] + self.fsavail = str(block_device["fsavail"]) + self.fssize = str(block_device["fssize"]) self.fstype = block_device["fstype"] - self.fsused = block_device["fsused"] + self.fsused = str(block_device["fsused"]) self.mountpoints = block_device["mountpoints"] self.label = block_device["label"] self.uuid = block_device["uuid"] - self.size = block_device["size"] + self.size = str(block_device["size"]) self.model = block_device["model"] self.serial = block_device["serial"] self.type = block_device["type"] @@ -73,14 +75,14 @@ class BlockDevice: Update current data and return a dictionary of stats. """ device = get_block_device(self.name) - self.fsavail = device["fsavail"] - self.fssize = device["fssize"] + self.fsavail = str(device["fsavail"]) + self.fssize = str(device["fssize"]) self.fstype = device["fstype"] - self.fsused = device["fsused"] + self.fsused = str(device["fsused"]) self.mountpoints = device["mountpoints"] self.label = device["label"] self.uuid = device["uuid"] - self.size = device["size"] + self.size = str(device["size"]) self.model = device["model"] self.serial = device["serial"] self.type = device["type"] diff --git a/selfprivacy_api/utils/migrate_to_binds.py b/selfprivacy_api/utils/migrate_to_binds.py deleted file mode 100644 index 0159cf1..0000000 --- a/selfprivacy_api/utils/migrate_to_binds.py +++ /dev/null @@ -1,112 +0,0 @@ -"""Function to perform migration of app data to binds.""" -import subprocess -import psutil -import pathlib -import shutil -from selfprivacy_api.services.nextcloud import Nextcloud -from selfprivacy_api.utils import WriteUserData -from selfprivacy_api.utils.block_devices import BlockDevices - - -class BindMigrationConfig: - """Config for bind migration. - For each service provide block device name. - """ - - email_block_device: str - bitwarden_block_device: str - gitea_block_device: str - nextcloud_block_device: str - pleroma_block_device: str - - -def migrate_to_binds(config: BindMigrationConfig): - """Migrate app data to binds.""" - - # Get block devices. - block_devices = BlockDevices().get_block_devices() - block_device_names = [device.name for device in block_devices] - - # Get all unique required block devices - required_block_devices = [] - for block_device_name in config.__dict__.values(): - if block_device_name not in required_block_devices: - required_block_devices.append(block_device_name) - - # Check if all block devices from config are present. - for block_device_name in required_block_devices: - if block_device_name not in block_device_names: - raise Exception(f"Block device {block_device_name} is not present.") - - # Make sure all required block devices are mounted. - # sda1 is the root partition and is always mounted. - for block_device_name in required_block_devices: - if block_device_name == "sda1": - continue - block_device = BlockDevices().get_block_device(block_device_name) - if block_device is None: - raise Exception(f"Block device {block_device_name} is not present.") - if f"/volumes/{block_device_name}" not in block_device.mountpoints: - raise Exception(f"Block device {block_device_name} is not mounted.") - - # Activate binds in userdata - with WriteUserData() as user_data: - if "email" not in user_data: - user_data["email"] = {} - user_data["email"]["block_device"] = config.email_block_device - if "bitwarden" not in user_data: - user_data["bitwarden"] = {} - user_data["bitwarden"]["block_device"] = config.bitwarden_block_device - if "gitea" not in user_data: - user_data["gitea"] = {} - user_data["gitea"]["block_device"] = config.gitea_block_device - if "nextcloud" not in user_data: - user_data["nextcloud"] = {} - user_data["nextcloud"]["block_device"] = config.nextcloud_block_device - if "pleroma" not in user_data: - user_data["pleroma"] = {} - user_data["pleroma"]["block_device"] = config.pleroma_block_device - - user_data["useBinds"] = True - - # Make sure /volumes/sda1 exists. - pathlib.Path("/volumes/sda1").mkdir(parents=True, exist_ok=True) - - # Perform migration of Nextcloud. - # Data is moved from /var/lib/nextcloud to /volumes//nextcloud. - # /var/lib/nextcloud is removed and /volumes//nextcloud is mounted as bind mount. - - # Turn off Nextcloud - Nextcloud().stop() - - # Move data from /var/lib/nextcloud to /volumes//nextcloud. - # /var/lib/nextcloud is removed and /volumes//nextcloud is mounted as bind mount. - nextcloud_data_path = pathlib.Path("/var/lib/nextcloud") - nextcloud_bind_path = pathlib.Path( - f"/volumes/{config.nextcloud_block_device}/nextcloud" - ) - if nextcloud_data_path.exists(): - shutil.move(str(nextcloud_data_path), str(nextcloud_bind_path)) - else: - raise Exception("Nextcloud data path does not exist.") - - # Make sure folder /var/lib/nextcloud exists. - nextcloud_data_path.mkdir(mode=0o750, parents=True, exist_ok=True) - - # Make sure this folder is owned by user nextcloud and group nextcloud. - shutil.chown(nextcloud_bind_path, user="nextcloud", group="nextcloud") - shutil.chown(nextcloud_data_path, user="nextcloud", group="nextcloud") - - # Mount nextcloud bind mount. - subprocess.run( - ["mount", "--bind", str(nextcloud_bind_path), str(nextcloud_data_path)], - check=True, - ) - - # Recursively chown all files in nextcloud bind mount. - subprocess.run( - ["chown", "-R", "nextcloud:nextcloud", str(nextcloud_data_path)], check=True - ) - - # Start Nextcloud - Nextcloud().start() diff --git a/tests/conftest.py b/tests/conftest.py index 2ef4b28..df147dc 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -13,14 +13,14 @@ def tokens_file(mocker, shared_datadir): ) return mock + @pytest.fixture def jobs_file(mocker, shared_datadir): """Mock tokens file.""" - mock = mocker.patch( - "selfprivacy_api.utils.JOBS_FILE", shared_datadir / "jobs.json" - ) + mock = mocker.patch("selfprivacy_api.utils.JOBS_FILE", shared_datadir / "jobs.json") return mock + @pytest.fixture def huey_database(mocker, shared_datadir): """Mock huey database.""" diff --git a/tests/test_block_device_utils.py b/tests/test_block_device_utils.py new file mode 100644 index 0000000..dbe84bc --- /dev/null +++ b/tests/test_block_device_utils.py @@ -0,0 +1,484 @@ +#!/usr/bin/env python3 +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=missing-function-docstring +import json +import subprocess +import pytest + +from selfprivacy_api.utils.block_devices import ( + BlockDevice, + BlockDevices, + get_block_device, + resize_block_device, +) +from tests.common import read_json + +SINGLE_LSBLK_OUTPUT = b""" +{ + "blockdevices": [ + { + "name": "sda1", + "path": "/dev/sda1", + "fsavail": "4614107136", + "fssize": "19814920192", + "fstype": "ext4", + "fsused": "14345314304", + "mountpoints": [ + "/nix/store", "/" + ], + "label": null, + "uuid": "ec80c004-baec-4a2c-851d-0e1807135511", + "size": 20210236928, + "model": null, + "serial": null, + "type": "part" + } + ] +} +""" + + +@pytest.fixture +def lsblk_singular_mock(mocker): + mock = mocker.patch( + "subprocess.check_output", autospec=True, return_value=SINGLE_LSBLK_OUTPUT + ) + return mock + + +@pytest.fixture +def failed_check_output_mock(mocker): + mock = mocker.patch( + "subprocess.check_output", + autospec=True, + side_effect=subprocess.CalledProcessError( + returncode=1, cmd=["some", "command"] + ), + ) + return mock + + +@pytest.fixture +def only_root_in_userdata(mocker, datadir): + mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "only_root.json") + assert read_json(datadir / "only_root.json")["volumes"][0]["device"] == "/dev/sda1" + assert ( + read_json(datadir / "only_root.json")["volumes"][0]["mountPoint"] + == "/volumes/sda1" + ) + assert read_json(datadir / "only_root.json")["volumes"][0]["filesystem"] == "ext4" + return datadir + + +@pytest.fixture +def no_devices_in_userdata(mocker, datadir): + mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "no_devices.json") + assert read_json(datadir / "no_devices.json")["volumes"] == [] + return datadir + + +@pytest.fixture +def undefined_devices_in_userdata(mocker, datadir): + mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "undefined.json") + assert "volumes" not in read_json(datadir / "undefined.json") + return datadir + + +def test_create_block_device_object(lsblk_singular_mock): + output = get_block_device("sda1") + assert lsblk_singular_mock.call_count == 1 + assert lsblk_singular_mock.call_args[0][0] == [ + "lsblk", + "-J", + "-b", + "-o", + "NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINTS,LABEL,UUID,SIZE,MODEL,SERIAL,TYPE", + "/dev/sda1", + ] + assert output == json.loads(SINGLE_LSBLK_OUTPUT)["blockdevices"][0] + + +def test_resize_block_device(lsblk_singular_mock): + result = resize_block_device("sdb") + assert result is True + assert lsblk_singular_mock.call_count == 1 + assert lsblk_singular_mock.call_args[0][0] == [ + "resize2fs", + "sdb", + ] + + +def test_resize_block_device_failed(failed_check_output_mock): + result = resize_block_device("sdb") + assert result is False + assert failed_check_output_mock.call_count == 1 + assert failed_check_output_mock.call_args[0][0] == [ + "resize2fs", + "sdb", + ] + + +VOLUME_LSBLK_OUTPUT = b""" +{ + "blockdevices": [ + { + "name": "sdb", + "path": "/dev/sdb", + "fsavail": "11888545792", + "fssize": "12573614080", + "fstype": "ext4", + "fsused": "24047616", + "mountpoints": [ + "/volumes/sdb" + ], + "label": null, + "uuid": "fa9d0026-ee23-4047-b8b1-297ae16fa751", + "size": 12884901888, + "model": "Volume", + "serial": "21378102", + "type": "disk" + } + ] +} +""" + + +def test_create_block_device(lsblk_singular_mock): + block_device = BlockDevice(json.loads(VOLUME_LSBLK_OUTPUT)["blockdevices"][0]) + + assert block_device.name == "sdb" + assert block_device.path == "/dev/sdb" + assert block_device.fsavail == "11888545792" + assert block_device.fssize == "12573614080" + assert block_device.fstype == "ext4" + assert block_device.fsused == "24047616" + assert block_device.mountpoints == ["/volumes/sdb"] + assert block_device.label is None + assert block_device.uuid == "fa9d0026-ee23-4047-b8b1-297ae16fa751" + assert block_device.size == "12884901888" + assert block_device.model == "Volume" + assert block_device.serial == "21378102" + assert block_device.type == "disk" + assert block_device.locked is False + assert str(block_device) == "sdb" + assert ( + repr(block_device) + == "" + ) + assert hash(block_device) == hash("sdb") + + +def test_block_devices_equal(lsblk_singular_mock): + block_device = BlockDevice(json.loads(VOLUME_LSBLK_OUTPUT)["blockdevices"][0]) + block_device2 = BlockDevice(json.loads(VOLUME_LSBLK_OUTPUT)["blockdevices"][0]) + + assert block_device == block_device2 + + +@pytest.fixture +def resize_block_mock(mocker): + mock = mocker.patch( + "selfprivacy_api.utils.block_devices.resize_block_device", + autospec=True, + return_value=True, + ) + return mock + + +def test_call_resize_from_block_device(lsblk_singular_mock, resize_block_mock): + block_device = BlockDevice(json.loads(VOLUME_LSBLK_OUTPUT)["blockdevices"][0]) + block_device.resize() + assert resize_block_mock.call_count == 1 + assert resize_block_mock.call_args[0][0] == "/dev/sdb" + assert lsblk_singular_mock.call_count == 0 + + +def test_get_stats_from_block_device(lsblk_singular_mock): + block_device = BlockDevice(json.loads(SINGLE_LSBLK_OUTPUT)["blockdevices"][0]) + stats = block_device.stats() + assert stats == { + "name": "sda1", + "path": "/dev/sda1", + "fsavail": "4614107136", + "fssize": "19814920192", + "fstype": "ext4", + "fsused": "14345314304", + "mountpoints": ["/nix/store", "/"], + "label": None, + "uuid": "ec80c004-baec-4a2c-851d-0e1807135511", + "size": "20210236928", + "model": None, + "serial": None, + "type": "part", + } + assert lsblk_singular_mock.call_count == 1 + assert lsblk_singular_mock.call_args[0][0] == [ + "lsblk", + "-J", + "-b", + "-o", + "NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINTS,LABEL,UUID,SIZE,MODEL,SERIAL,TYPE", + "/dev/sda1", + ] + + +def test_mount_block_device(lsblk_singular_mock, only_root_in_userdata): + block_device = BlockDevice(json.loads(SINGLE_LSBLK_OUTPUT)["blockdevices"][0]) + result = block_device.mount() + assert result is False + volume = BlockDevice(json.loads(VOLUME_LSBLK_OUTPUT)["blockdevices"][0]) + result = volume.mount() + assert result is True + assert ( + read_json(only_root_in_userdata / "only_root.json")["volumes"][1]["device"] + == "/dev/sdb" + ) + assert ( + read_json(only_root_in_userdata / "only_root.json")["volumes"][1]["mountPoint"] + == "/volumes/sdb" + ) + assert ( + read_json(only_root_in_userdata / "only_root.json")["volumes"][1]["fsType"] + == "ext4" + ) + + +def test_mount_block_device_when_undefined( + lsblk_singular_mock, undefined_devices_in_userdata +): + block_device = BlockDevice(json.loads(SINGLE_LSBLK_OUTPUT)["blockdevices"][0]) + result = block_device.mount() + assert result is True + assert ( + read_json(undefined_devices_in_userdata / "undefined.json")["volumes"][0][ + "device" + ] + == "/dev/sda1" + ) + assert ( + read_json(undefined_devices_in_userdata / "undefined.json")["volumes"][0][ + "mountPoint" + ] + == "/volumes/sda1" + ) + assert ( + read_json(undefined_devices_in_userdata / "undefined.json")["volumes"][0][ + "fsType" + ] + == "ext4" + ) + + +def test_unmount_block_device(lsblk_singular_mock, only_root_in_userdata): + block_device = BlockDevice(json.loads(SINGLE_LSBLK_OUTPUT)["blockdevices"][0]) + result = block_device.unmount() + assert result is True + volume = BlockDevice(json.loads(VOLUME_LSBLK_OUTPUT)["blockdevices"][0]) + result = volume.unmount() + assert result is False + assert len(read_json(only_root_in_userdata / "only_root.json")["volumes"]) == 0 + + +def test_unmount_block_device_when_undefined( + lsblk_singular_mock, undefined_devices_in_userdata +): + block_device = BlockDevice(json.loads(SINGLE_LSBLK_OUTPUT)["blockdevices"][0]) + result = block_device.unmount() + assert result is False + assert ( + len(read_json(undefined_devices_in_userdata / "undefined.json")["volumes"]) == 0 + ) + + +FULL_LSBLK_OUTPUT = b""" +{ + "blockdevices": [ + { + "name": "sda", + "path": "/dev/sda", + "fsavail": null, + "fssize": null, + "fstype": null, + "fsused": null, + "mountpoints": [ + null + ], + "label": null, + "uuid": null, + "size": 20480786432, + "model": "QEMU HARDDISK", + "serial": "drive-scsi0-0-0-0", + "type": "disk", + "children": [ + { + "name": "sda1", + "path": "/dev/sda1", + "fsavail": "4605702144", + "fssize": "19814920192", + "fstype": "ext4", + "fsused": "14353719296", + "mountpoints": [ + "/nix/store", "/" + ], + "label": null, + "uuid": "ec80c004-baec-4a2c-851d-0e1807135511", + "size": 20210236928, + "model": null, + "serial": null, + "type": "part" + },{ + "name": "sda14", + "path": "/dev/sda14", + "fsavail": null, + "fssize": null, + "fstype": null, + "fsused": null, + "mountpoints": [ + null + ], + "label": null, + "uuid": null, + "size": 1048576, + "model": null, + "serial": null, + "type": "part" + },{ + "name": "sda15", + "path": "/dev/sda15", + "fsavail": null, + "fssize": null, + "fstype": "vfat", + "fsused": null, + "mountpoints": [ + null + ], + "label": null, + "uuid": "6B29-5BA7", + "size": 268435456, + "model": null, + "serial": null, + "type": "part" + } + ] + },{ + "name": "sdb", + "path": "/dev/sdb", + "fsavail": "11888545792", + "fssize": "12573614080", + "fstype": "ext4", + "fsused": "24047616", + "mountpoints": [ + "/volumes/sdb" + ], + "label": null, + "uuid": "fa9d0026-ee23-4047-b8b1-297ae16fa751", + "size": 12884901888, + "model": "Volume", + "serial": "21378102", + "type": "disk" + },{ + "name": "sr0", + "path": "/dev/sr0", + "fsavail": null, + "fssize": null, + "fstype": null, + "fsused": null, + "mountpoints": [ + null + ], + "label": null, + "uuid": null, + "size": 1073741312, + "model": "QEMU DVD-ROM", + "serial": "QM00003", + "type": "rom" + } + ] +} +""" + + +@pytest.fixture +def lsblk_full_mock(mocker): + mock = mocker.patch( + "subprocess.check_output", autospec=True, return_value=FULL_LSBLK_OUTPUT + ) + return mock + + +def test_get_block_devices(lsblk_full_mock): + block_devices = BlockDevices().get_block_devices() + assert len(block_devices) == 2 + assert block_devices[0].name == "sda1" + assert block_devices[0].path == "/dev/sda1" + assert block_devices[0].fsavail == "4605702144" + assert block_devices[0].fssize == "19814920192" + assert block_devices[0].fstype == "ext4" + assert block_devices[0].fsused == "14353719296" + assert block_devices[0].mountpoints == ["/nix/store", "/"] + assert block_devices[0].label is None + assert block_devices[0].uuid == "ec80c004-baec-4a2c-851d-0e1807135511" + assert block_devices[0].size == "20210236928" + assert block_devices[0].model is None + assert block_devices[0].serial is None + assert block_devices[0].type == "part" + assert block_devices[1].name == "sdb" + assert block_devices[1].path == "/dev/sdb" + assert block_devices[1].fsavail == "11888545792" + assert block_devices[1].fssize == "12573614080" + assert block_devices[1].fstype == "ext4" + assert block_devices[1].fsused == "24047616" + assert block_devices[1].mountpoints == ["/volumes/sdb"] + assert block_devices[1].label is None + assert block_devices[1].uuid == "fa9d0026-ee23-4047-b8b1-297ae16fa751" + assert block_devices[1].size == "12884901888" + assert block_devices[1].model == "Volume" + assert block_devices[1].serial == "21378102" + assert block_devices[1].type == "disk" + + +def test_get_block_device(lsblk_full_mock): + block_device = BlockDevices().get_block_device("sda1") + assert block_device is not None + assert block_device.name == "sda1" + assert block_device.path == "/dev/sda1" + assert block_device.fsavail == "4605702144" + assert block_device.fssize == "19814920192" + assert block_device.fstype == "ext4" + assert block_device.fsused == "14353719296" + assert block_device.mountpoints == ["/nix/store", "/"] + assert block_device.label is None + assert block_device.uuid == "ec80c004-baec-4a2c-851d-0e1807135511" + assert block_device.size == "20210236928" + assert block_device.model is None + assert block_device.serial is None + assert block_device.type == "part" + + +def test_get_nonexistent_block_device(lsblk_full_mock): + block_device = BlockDevices().get_block_device("sda2") + assert block_device is None + + +def test_get_block_devices_by_mountpoint(lsblk_full_mock): + block_devices = BlockDevices().get_block_devices_by_mountpoint("/nix/store") + assert len(block_devices) == 1 + assert block_devices[0].name == "sda1" + assert block_devices[0].path == "/dev/sda1" + assert block_devices[0].fsavail == "4605702144" + assert block_devices[0].fssize == "19814920192" + assert block_devices[0].fstype == "ext4" + assert block_devices[0].fsused == "14353719296" + assert block_devices[0].mountpoints == ["/nix/store", "/"] + assert block_devices[0].label is None + assert block_devices[0].uuid == "ec80c004-baec-4a2c-851d-0e1807135511" + assert block_devices[0].size == "20210236928" + assert block_devices[0].model is None + assert block_devices[0].serial is None + assert block_devices[0].type == "part" + + +def test_get_block_devices_by_mountpoint_no_match(lsblk_full_mock): + block_devices = BlockDevices().get_block_devices_by_mountpoint("/foo") + assert len(block_devices) == 0 diff --git a/tests/test_block_device_utils/no_devices.json b/tests/test_block_device_utils/no_devices.json new file mode 100644 index 0000000..97300ca --- /dev/null +++ b/tests/test_block_device_utils/no_devices.json @@ -0,0 +1,54 @@ +{ + "backblaze": { + "accountId": "ID", + "accountKey": "KEY", + "bucket": "selfprivacy" + }, + "api": { + "token": "TEST_TOKEN", + "enableSwagger": false + }, + "bitwarden": { + "enable": true + }, + "cloudflare": { + "apiKey": "TOKEN" + }, + "databasePassword": "PASSWORD", + "domain": "test.tld", + "hashedMasterPassword": "HASHED_PASSWORD", + "hostname": "test-instance", + "nextcloud": { + "adminPassword": "ADMIN", + "databasePassword": "ADMIN", + "enable": true + }, + "resticPassword": "PASS", + "ssh": { + "enable": true, + "passwordAuthentication": true, + "rootKeys": [ + "ssh-ed25519 KEY test@pc" + ] + }, + "username": "tester", + "gitea": { + "enable": false + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true + }, + "autoUpgrade": { + "enable": true, + "allowReboot": true + }, + "timezone": "Europe/Moscow", + "sshKeys": [ + "ssh-rsa KEY test@pc" + ], + "volumes": [ + ] +} diff --git a/tests/test_block_device_utils/only_root.json b/tests/test_block_device_utils/only_root.json new file mode 100644 index 0000000..0f8ec0d --- /dev/null +++ b/tests/test_block_device_utils/only_root.json @@ -0,0 +1,59 @@ +{ + "backblaze": { + "accountId": "ID", + "accountKey": "KEY", + "bucket": "selfprivacy" + }, + "api": { + "token": "TEST_TOKEN", + "enableSwagger": false + }, + "bitwarden": { + "enable": true + }, + "cloudflare": { + "apiKey": "TOKEN" + }, + "databasePassword": "PASSWORD", + "domain": "test.tld", + "hashedMasterPassword": "HASHED_PASSWORD", + "hostname": "test-instance", + "nextcloud": { + "adminPassword": "ADMIN", + "databasePassword": "ADMIN", + "enable": true + }, + "resticPassword": "PASS", + "ssh": { + "enable": true, + "passwordAuthentication": true, + "rootKeys": [ + "ssh-ed25519 KEY test@pc" + ] + }, + "username": "tester", + "gitea": { + "enable": false + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true + }, + "autoUpgrade": { + "enable": true, + "allowReboot": true + }, + "timezone": "Europe/Moscow", + "sshKeys": [ + "ssh-rsa KEY test@pc" + ], + "volumes": [ + { + "device": "/dev/sda1", + "mountPoint": "/volumes/sda1", + "filesystem": "ext4" + } + ] +} diff --git a/tests/test_block_device_utils/undefined.json b/tests/test_block_device_utils/undefined.json new file mode 100644 index 0000000..eb660cc --- /dev/null +++ b/tests/test_block_device_utils/undefined.json @@ -0,0 +1,52 @@ +{ + "backblaze": { + "accountId": "ID", + "accountKey": "KEY", + "bucket": "selfprivacy" + }, + "api": { + "token": "TEST_TOKEN", + "enableSwagger": false + }, + "bitwarden": { + "enable": true + }, + "cloudflare": { + "apiKey": "TOKEN" + }, + "databasePassword": "PASSWORD", + "domain": "test.tld", + "hashedMasterPassword": "HASHED_PASSWORD", + "hostname": "test-instance", + "nextcloud": { + "adminPassword": "ADMIN", + "databasePassword": "ADMIN", + "enable": true + }, + "resticPassword": "PASS", + "ssh": { + "enable": true, + "passwordAuthentication": true, + "rootKeys": [ + "ssh-ed25519 KEY test@pc" + ] + }, + "username": "tester", + "gitea": { + "enable": false + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true + }, + "autoUpgrade": { + "enable": true, + "allowReboot": true + }, + "timezone": "Europe/Moscow", + "sshKeys": [ + "ssh-rsa KEY test@pc" + ] +} diff --git a/tests/test_jobs.py b/tests/test_jobs.py index 82c40d3..d1df2cc 100644 --- a/tests/test_jobs.py +++ b/tests/test_jobs.py @@ -6,11 +6,13 @@ import pytest from selfprivacy_api.utils import WriteUserData, ReadUserData from selfprivacy_api.jobs import Jobs, JobStatus + def test_jobs(jobs_file, shared_datadir): jobs = Jobs() assert jobs.get_jobs() == [] test_job = jobs.add( + type_id="test", name="Test job", description="This is a test job.", status=JobStatus.CREATED, diff --git a/tests/test_network_utils.py b/tests/test_network_utils.py index a7c1511..0662584 100644 --- a/tests/test_network_utils.py +++ b/tests/test_network_utils.py @@ -2,6 +2,7 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument # pylint: disable=missing-function-docstring +import subprocess import pytest from selfprivacy_api.utils.network import get_ip4, get_ip6 @@ -30,6 +31,28 @@ def ip_process_mock(mocker): return mock +@pytest.fixture +def failed_ip_process_mock(mocker): + mock = mocker.patch( + "subprocess.check_output", + autospec=True, + return_value=FAILED_OUTPUT_STRING, + ) + return mock + + +@pytest.fixture +def failed_subprocess_call(mocker): + mock = mocker.patch( + "subprocess.check_output", + autospec=True, + side_effect=subprocess.CalledProcessError( + returncode=1, cmd=["ip", "addr", "show", "dev", "eth0"] + ), + ) + return mock + + def test_get_ip4(ip_process_mock): """Test get IPv4 address""" ip4 = get_ip4() @@ -40,3 +63,23 @@ def test_get_ip6(ip_process_mock): """Test get IPv6 address""" ip6 = get_ip6() assert ip6 == "fe80::9400:ff:fef1:34ae" + + +def test_failed_get_ip4(failed_ip_process_mock): + ip4 = get_ip4() + assert ip4 is "" + + +def test_failed_get_ip6(failed_ip_process_mock): + ip6 = get_ip6() + assert ip6 is "" + + +def test_failed_subprocess_get_ip4(failed_subprocess_call): + ip4 = get_ip4() + assert ip4 is "" + + +def test_failed_subprocess_get_ip6(failed_subprocess_call): + ip6 = get_ip6() + assert ip6 is "" -- 2.42.0 From e5584e0e1c140757a7c7bd01e71a3ced93cb9bdd Mon Sep 17 00:00:00 2001 From: inexcode Date: Thu, 18 Aug 2022 01:13:06 +0400 Subject: [PATCH 31/50] use exists() instead of isFile --- selfprivacy_api/utils/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/utils/__init__.py b/selfprivacy_api/utils/__init__.py index 14a9096..83213d7 100644 --- a/selfprivacy_api/utils/__init__.py +++ b/selfprivacy_api/utils/__init__.py @@ -39,7 +39,7 @@ class WriteUserData(object): self.userdata_file = open(TOKENS_FILE, "r+", encoding="utf-8") elif file_type == UserDataFiles.JOBS: # Make sure file exists - if not os.path.isfile(JOBS_FILE): + if not os.path.exists(JOBS_FILE): with open(JOBS_FILE, "w", encoding="utf-8") as jobs_file: jobs_file.write("{}") self.userdata_file = open(JOBS_FILE, "r+", encoding="utf-8") @@ -70,7 +70,7 @@ class ReadUserData(object): self.userdata_file = open(TOKENS_FILE, "r", encoding="utf-8") elif file_type == UserDataFiles.JOBS: # Make sure file exists - if not os.path.isfile(JOBS_FILE): + if not os.path.exists(JOBS_FILE): with open(JOBS_FILE, "w", encoding="utf-8") as jobs_file: jobs_file.write("{}") self.userdata_file = open(JOBS_FILE, "r", encoding="utf-8") -- 2.42.0 From 19168dfdafb96bbd8daaf9ec7b825d339caf09f5 Mon Sep 17 00:00:00 2001 From: inexcode Date: Thu, 18 Aug 2022 01:38:38 +0400 Subject: [PATCH 32/50] Add jobs mocking to tests explicitly --- tests/conftest.py | 5 ++++- tests/test_block_device_utils.py | 32 ++++++++++++++++---------------- tests/test_jobs.py | 2 +- 3 files changed, 21 insertions(+), 18 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index df147dc..b4f812d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -17,7 +17,10 @@ def tokens_file(mocker, shared_datadir): @pytest.fixture def jobs_file(mocker, shared_datadir): """Mock tokens file.""" - mock = mocker.patch("selfprivacy_api.utils.JOBS_FILE", shared_datadir / "jobs.json") + mock = mocker.patch( + "selfprivacy_api.utils.JOBS_FILE", + shared_datadir / "jobs.json" + ) return mock diff --git a/tests/test_block_device_utils.py b/tests/test_block_device_utils.py index dbe84bc..a22dad0 100644 --- a/tests/test_block_device_utils.py +++ b/tests/test_block_device_utils.py @@ -85,7 +85,7 @@ def undefined_devices_in_userdata(mocker, datadir): return datadir -def test_create_block_device_object(lsblk_singular_mock): +def test_create_block_device_object(lsblk_singular_mock, authorized_client): output = get_block_device("sda1") assert lsblk_singular_mock.call_count == 1 assert lsblk_singular_mock.call_args[0][0] == [ @@ -99,7 +99,7 @@ def test_create_block_device_object(lsblk_singular_mock): assert output == json.loads(SINGLE_LSBLK_OUTPUT)["blockdevices"][0] -def test_resize_block_device(lsblk_singular_mock): +def test_resize_block_device(lsblk_singular_mock, authorized_client): result = resize_block_device("sdb") assert result is True assert lsblk_singular_mock.call_count == 1 @@ -109,7 +109,7 @@ def test_resize_block_device(lsblk_singular_mock): ] -def test_resize_block_device_failed(failed_check_output_mock): +def test_resize_block_device_failed(failed_check_output_mock, authorized_client): result = resize_block_device("sdb") assert result is False assert failed_check_output_mock.call_count == 1 @@ -144,7 +144,7 @@ VOLUME_LSBLK_OUTPUT = b""" """ -def test_create_block_device(lsblk_singular_mock): +def test_create_block_device(lsblk_singular_mock, authorized_client): block_device = BlockDevice(json.loads(VOLUME_LSBLK_OUTPUT)["blockdevices"][0]) assert block_device.name == "sdb" @@ -169,7 +169,7 @@ def test_create_block_device(lsblk_singular_mock): assert hash(block_device) == hash("sdb") -def test_block_devices_equal(lsblk_singular_mock): +def test_block_devices_equal(lsblk_singular_mock, authorized_client): block_device = BlockDevice(json.loads(VOLUME_LSBLK_OUTPUT)["blockdevices"][0]) block_device2 = BlockDevice(json.loads(VOLUME_LSBLK_OUTPUT)["blockdevices"][0]) @@ -186,7 +186,7 @@ def resize_block_mock(mocker): return mock -def test_call_resize_from_block_device(lsblk_singular_mock, resize_block_mock): +def test_call_resize_from_block_device(lsblk_singular_mock, resize_block_mock, authorized_client): block_device = BlockDevice(json.loads(VOLUME_LSBLK_OUTPUT)["blockdevices"][0]) block_device.resize() assert resize_block_mock.call_count == 1 @@ -194,7 +194,7 @@ def test_call_resize_from_block_device(lsblk_singular_mock, resize_block_mock): assert lsblk_singular_mock.call_count == 0 -def test_get_stats_from_block_device(lsblk_singular_mock): +def test_get_stats_from_block_device(lsblk_singular_mock, authorized_client): block_device = BlockDevice(json.loads(SINGLE_LSBLK_OUTPUT)["blockdevices"][0]) stats = block_device.stats() assert stats == { @@ -223,7 +223,7 @@ def test_get_stats_from_block_device(lsblk_singular_mock): ] -def test_mount_block_device(lsblk_singular_mock, only_root_in_userdata): +def test_mount_block_device(lsblk_singular_mock, only_root_in_userdata, authorized_client): block_device = BlockDevice(json.loads(SINGLE_LSBLK_OUTPUT)["blockdevices"][0]) result = block_device.mount() assert result is False @@ -245,7 +245,7 @@ def test_mount_block_device(lsblk_singular_mock, only_root_in_userdata): def test_mount_block_device_when_undefined( - lsblk_singular_mock, undefined_devices_in_userdata + lsblk_singular_mock, undefined_devices_in_userdata, authorized_client ): block_device = BlockDevice(json.loads(SINGLE_LSBLK_OUTPUT)["blockdevices"][0]) result = block_device.mount() @@ -270,7 +270,7 @@ def test_mount_block_device_when_undefined( ) -def test_unmount_block_device(lsblk_singular_mock, only_root_in_userdata): +def test_unmount_block_device(lsblk_singular_mock, only_root_in_userdata, authorized_client): block_device = BlockDevice(json.loads(SINGLE_LSBLK_OUTPUT)["blockdevices"][0]) result = block_device.unmount() assert result is True @@ -281,7 +281,7 @@ def test_unmount_block_device(lsblk_singular_mock, only_root_in_userdata): def test_unmount_block_device_when_undefined( - lsblk_singular_mock, undefined_devices_in_userdata + lsblk_singular_mock, undefined_devices_in_userdata, authorized_client ): block_device = BlockDevice(json.loads(SINGLE_LSBLK_OUTPUT)["blockdevices"][0]) result = block_device.unmount() @@ -407,7 +407,7 @@ def lsblk_full_mock(mocker): return mock -def test_get_block_devices(lsblk_full_mock): +def test_get_block_devices(lsblk_full_mock, authorized_client): block_devices = BlockDevices().get_block_devices() assert len(block_devices) == 2 assert block_devices[0].name == "sda1" @@ -438,7 +438,7 @@ def test_get_block_devices(lsblk_full_mock): assert block_devices[1].type == "disk" -def test_get_block_device(lsblk_full_mock): +def test_get_block_device(lsblk_full_mock, authorized_client): block_device = BlockDevices().get_block_device("sda1") assert block_device is not None assert block_device.name == "sda1" @@ -456,12 +456,12 @@ def test_get_block_device(lsblk_full_mock): assert block_device.type == "part" -def test_get_nonexistent_block_device(lsblk_full_mock): +def test_get_nonexistent_block_device(lsblk_full_mock, authorized_client): block_device = BlockDevices().get_block_device("sda2") assert block_device is None -def test_get_block_devices_by_mountpoint(lsblk_full_mock): +def test_get_block_devices_by_mountpoint(lsblk_full_mock, authorized_client): block_devices = BlockDevices().get_block_devices_by_mountpoint("/nix/store") assert len(block_devices) == 1 assert block_devices[0].name == "sda1" @@ -479,6 +479,6 @@ def test_get_block_devices_by_mountpoint(lsblk_full_mock): assert block_devices[0].type == "part" -def test_get_block_devices_by_mountpoint_no_match(lsblk_full_mock): +def test_get_block_devices_by_mountpoint_no_match(lsblk_full_mock, authorized_client): block_devices = BlockDevices().get_block_devices_by_mountpoint("/foo") assert len(block_devices) == 0 diff --git a/tests/test_jobs.py b/tests/test_jobs.py index d1df2cc..2732335 100644 --- a/tests/test_jobs.py +++ b/tests/test_jobs.py @@ -7,7 +7,7 @@ from selfprivacy_api.utils import WriteUserData, ReadUserData from selfprivacy_api.jobs import Jobs, JobStatus -def test_jobs(jobs_file, shared_datadir): +def test_jobs(authorized_client, jobs_file, shared_datadir): jobs = Jobs() assert jobs.get_jobs() == [] -- 2.42.0 From a67a0b3de2d3d1872e3f4a8413dcc3c85fbe65d5 Mon Sep 17 00:00:00 2001 From: inexcode Date: Thu, 18 Aug 2022 01:43:02 +0400 Subject: [PATCH 33/50] Fix type cast --- selfprivacy_api/services/generic_service_mover.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/selfprivacy_api/services/generic_service_mover.py b/selfprivacy_api/services/generic_service_mover.py index 6831ed7..a214830 100644 --- a/selfprivacy_api/services/generic_service_mover.py +++ b/selfprivacy_api/services/generic_service_mover.py @@ -53,7 +53,7 @@ def move_service( ) return # Check if there is enough space on the new volume - if volume.fsavail < service.get_storage_usage(): + if int(volume.fsavail) < service.get_storage_usage(): Jobs.get_instance().update( job=job, status=JobStatus.ERROR, -- 2.42.0 From bb99c2ba587f8545ae82c4a8f8fa2acf1d1a36bb Mon Sep 17 00:00:00 2001 From: inexcode Date: Thu, 18 Aug 2022 03:27:10 +0400 Subject: [PATCH 34/50] Fix graphql field --- selfprivacy_api/graphql/queries/system.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/graphql/queries/system.py b/selfprivacy_api/graphql/queries/system.py index 872b2a1..ef1476a 100644 --- a/selfprivacy_api/graphql/queries/system.py +++ b/selfprivacy_api/graphql/queries/system.py @@ -142,7 +142,10 @@ class System: settings: SystemSettings = SystemSettings() info: SystemInfo = SystemInfo() provider: SystemProviderInfo = strawberry.field(resolver=get_system_provider_info) - busy: bool = Jobs.is_busy() + @strawberry.field + def busy(self) -> bool: + """Check if the system is busy""" + return Jobs.is_busy() @strawberry.field def working_directory(self) -> str: -- 2.42.0 From 130ab61d4ba30602663982022309e23c397a2079 Mon Sep 17 00:00:00 2001 From: inexcode Date: Thu, 18 Aug 2022 03:35:06 +0400 Subject: [PATCH 35/50] Register volume migration mutation --- selfprivacy_api/graphql/mutations/storage_mutations.py | 1 + 1 file changed, 1 insertion(+) diff --git a/selfprivacy_api/graphql/mutations/storage_mutations.py b/selfprivacy_api/graphql/mutations/storage_mutations.py index 84e97dc..3c2955c 100644 --- a/selfprivacy_api/graphql/mutations/storage_mutations.py +++ b/selfprivacy_api/graphql/mutations/storage_mutations.py @@ -78,6 +78,7 @@ class StorageMutations: success=False, code=409, message="Volume not unmounted (already unmounted?)" ) + @strawberry.mutation(permission_classes=[IsAuthenticated]) def migrate_to_binds(self, input: MigrateToBindsInput) -> GenericJobButationReturn: """Migrate to binds""" if not is_bind_migrated(): -- 2.42.0 From 807df0c1cc02669dbadba7fafc0851d5142693d4 Mon Sep 17 00:00:00 2001 From: inexcode Date: Fri, 19 Aug 2022 05:48:26 +0400 Subject: [PATCH 36/50] Fix Bind migration check --- selfprivacy_api/graphql/mutations/storage_mutations.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/selfprivacy_api/graphql/mutations/storage_mutations.py b/selfprivacy_api/graphql/mutations/storage_mutations.py index 3c2955c..1b6d74e 100644 --- a/selfprivacy_api/graphql/mutations/storage_mutations.py +++ b/selfprivacy_api/graphql/mutations/storage_mutations.py @@ -81,7 +81,7 @@ class StorageMutations: @strawberry.mutation(permission_classes=[IsAuthenticated]) def migrate_to_binds(self, input: MigrateToBindsInput) -> GenericJobButationReturn: """Migrate to binds""" - if not is_bind_migrated(): + if is_bind_migrated(): return GenericJobButationReturn( success=False, code=409, message="Already migrated to binds" ) -- 2.42.0 From cd5ae809316944d37a431ff478a36f02f2fe4aa8 Mon Sep 17 00:00:00 2001 From: inexcode Date: Sat, 20 Aug 2022 22:46:39 +0400 Subject: [PATCH 37/50] Fix postgresql not being migrated during migration to binds --- selfprivacy_api/jobs/migrate_to_binds.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/jobs/migrate_to_binds.py b/selfprivacy_api/jobs/migrate_to_binds.py index 2d6a37a..346023d 100644 --- a/selfprivacy_api/jobs/migrate_to_binds.py +++ b/selfprivacy_api/jobs/migrate_to_binds.py @@ -1,6 +1,5 @@ """Function to perform migration of app data to binds.""" import subprocess -import psutil import pathlib import shutil @@ -263,6 +262,13 @@ def migrate_to_binds(config: BindMigrationConfig, job: Job): group="pleroma", ) + move_folder( + data_path=pathlib.Path("/var/lib/postgresql"), + bind_path=pathlib.Path(f"/volumes/{config.pleroma_block_device}/postgresql"), + user="postgres", + group="postgres", + ) + Pleroma().start() Jobs.update( -- 2.42.0 From c92294350fb4353fa06e6eef334ccc9c204f7b7b Mon Sep 17 00:00:00 2001 From: inexcode Date: Sat, 20 Aug 2022 22:47:32 +0400 Subject: [PATCH 38/50] Make Huey run immediately when testing --- selfprivacy_api/utils/huey.py | 8 +++++++- tests/conftest.py | 10 ++++++---- tests/test_common.py | 10 ++++++++++ 3 files changed, 23 insertions(+), 5 deletions(-) diff --git a/selfprivacy_api/utils/huey.py b/selfprivacy_api/utils/huey.py index 7b39d5a..034f7ba 100644 --- a/selfprivacy_api/utils/huey.py +++ b/selfprivacy_api/utils/huey.py @@ -1,8 +1,14 @@ """MiniHuey singleton.""" +import os from huey import SqliteHuey HUEY_DATABASE = "/etc/nixos/userdata/tasks.db" # Singleton instance containing the huey database. -huey = SqliteHuey(HUEY_DATABASE) +test_mode = os.environ.get("TEST_MODE") + +huey = SqliteHuey( + HUEY_DATABASE, + immediate=test_mode == "true", +) diff --git a/tests/conftest.py b/tests/conftest.py index b4f812d..ea7a66a 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,10 +1,15 @@ """Tests configuration.""" # pylint: disable=redefined-outer-name # pylint: disable=unused-argument +import os import pytest from fastapi.testclient import TestClient +def pytest_generate_tests(metafunc): + os.environ["TEST_MODE"] = "true" + + @pytest.fixture def tokens_file(mocker, shared_datadir): """Mock tokens file.""" @@ -17,10 +22,7 @@ def tokens_file(mocker, shared_datadir): @pytest.fixture def jobs_file(mocker, shared_datadir): """Mock tokens file.""" - mock = mocker.patch( - "selfprivacy_api.utils.JOBS_FILE", - shared_datadir / "jobs.json" - ) + mock = mocker.patch("selfprivacy_api.utils.JOBS_FILE", shared_datadir / "jobs.json") return mock diff --git a/tests/test_common.py b/tests/test_common.py index e581bd4..e5d3f62 100644 --- a/tests/test_common.py +++ b/tests/test_common.py @@ -1,6 +1,7 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument import json +import os import pytest from selfprivacy_api.utils import WriteUserData, ReadUserData @@ -28,3 +29,12 @@ def test_write_invalid_user_data(): with pytest.raises(ValueError): with WriteUserData("invalid") as user_data: pass + + +@pytest.fixture +def test_mode(): + return os.environ.get("TEST_MODE") + + +def test_the_test_mode(test_mode): + assert test_mode == "true" -- 2.42.0 From 07f4da2f2306a4e3dbce29e135ea33ab92b4b21d Mon Sep 17 00:00:00 2001 From: inexcode Date: Sat, 20 Aug 2022 22:48:44 +0400 Subject: [PATCH 39/50] Reorganize tests --- tests/test_block_device_utils.py | 12 +- .../{_test_system.py => test_system.py} | 115 +++++++++--------- tests/test_graphql/test_system/turned_on.json | 4 +- tests/test_jobs.py | 18 +++ tests/test_rest_endpoints/data/jobs.json | 1 + tests/test_rest_endpoints/data/tokens.json | 14 +++ .../services/data/tokens.json | 0 .../services/test_bitwarden.py | 0 .../test_bitwarden/enable_undefined.json | 0 .../services/test_bitwarden/turned_off.json | 0 .../services/test_bitwarden/turned_on.json | 0 .../services/test_bitwarden/undefined.json | 0 .../services/test_gitea.py | 0 .../services/test_gitea/enable_undefined.json | 0 .../services/test_gitea/turned_off.json | 0 .../services/test_gitea/turned_on.json | 0 .../services/test_gitea/undefined.json | 0 .../services/test_mailserver.py | 0 .../services/test_nextcloud.py | 0 .../test_nextcloud/enable_undefined.json | 0 .../services/test_nextcloud/turned_off.json | 0 .../services/test_nextcloud/turned_on.json | 0 .../services/test_nextcloud/undefined.json | 0 .../services/test_ocserv.py | 0 .../test_ocserv/enable_undefined.json | 0 .../services/test_ocserv/turned_off.json | 0 .../services/test_ocserv/turned_on.json | 0 .../services/test_ocserv/undefined.json | 0 .../services/test_pleroma.py | 0 .../test_pleroma/enable_undefined.json | 0 .../services/test_pleroma/turned_off.json | 0 .../services/test_pleroma/turned_on.json | 0 .../services/test_pleroma/undefined.json | 0 .../services/test_restic.py | 0 .../services/test_restic/no_values.json | 0 .../services/test_restic/some_values.json | 0 .../services/test_restic/undefined.json | 0 .../services/test_services.py | 70 ++++++----- .../services/test_ssh.py | 0 .../services/test_ssh/all_off.json | 0 .../test_ssh/root_and_admin_have_keys.json | 0 .../services/test_ssh/some_users.json | 0 .../services/test_ssh/turned_off.json | 0 .../services/test_ssh/turned_on.json | 0 .../services/test_ssh/undefined.json | 0 .../services/test_ssh/undefined_values.json | 0 tests/{ => test_rest_endpoints}/test_auth.py | 4 +- .../{ => test_rest_endpoints}/test_system.py | 0 .../test_system/domain | 0 .../test_system/no_values.json | 0 .../test_system/turned_off.json | 0 .../test_system/turned_on.json | 0 .../test_system/undefined.json | 0 tests/{ => test_rest_endpoints}/test_users.py | 0 .../test_users/no_users.json | 0 .../test_users/one_user.json | 0 .../test_users/some_users.json | 0 .../test_users/undefined.json | 0 58 files changed, 143 insertions(+), 95 deletions(-) rename tests/test_graphql/{_test_system.py => test_system.py} (89%) create mode 100644 tests/test_rest_endpoints/data/jobs.json create mode 100644 tests/test_rest_endpoints/data/tokens.json rename tests/{ => test_rest_endpoints}/services/data/tokens.json (100%) rename tests/{ => test_rest_endpoints}/services/test_bitwarden.py (100%) rename tests/{ => test_rest_endpoints}/services/test_bitwarden/enable_undefined.json (100%) rename tests/{ => test_rest_endpoints}/services/test_bitwarden/turned_off.json (100%) rename tests/{ => test_rest_endpoints}/services/test_bitwarden/turned_on.json (100%) rename tests/{ => test_rest_endpoints}/services/test_bitwarden/undefined.json (100%) rename tests/{ => test_rest_endpoints}/services/test_gitea.py (100%) rename tests/{ => test_rest_endpoints}/services/test_gitea/enable_undefined.json (100%) rename tests/{ => test_rest_endpoints}/services/test_gitea/turned_off.json (100%) rename tests/{ => test_rest_endpoints}/services/test_gitea/turned_on.json (100%) rename tests/{ => test_rest_endpoints}/services/test_gitea/undefined.json (100%) rename tests/{ => test_rest_endpoints}/services/test_mailserver.py (100%) rename tests/{ => test_rest_endpoints}/services/test_nextcloud.py (100%) rename tests/{ => test_rest_endpoints}/services/test_nextcloud/enable_undefined.json (100%) rename tests/{ => test_rest_endpoints}/services/test_nextcloud/turned_off.json (100%) rename tests/{ => test_rest_endpoints}/services/test_nextcloud/turned_on.json (100%) rename tests/{ => test_rest_endpoints}/services/test_nextcloud/undefined.json (100%) rename tests/{ => test_rest_endpoints}/services/test_ocserv.py (100%) rename tests/{ => test_rest_endpoints}/services/test_ocserv/enable_undefined.json (100%) rename tests/{ => test_rest_endpoints}/services/test_ocserv/turned_off.json (100%) rename tests/{ => test_rest_endpoints}/services/test_ocserv/turned_on.json (100%) rename tests/{ => test_rest_endpoints}/services/test_ocserv/undefined.json (100%) rename tests/{ => test_rest_endpoints}/services/test_pleroma.py (100%) rename tests/{ => test_rest_endpoints}/services/test_pleroma/enable_undefined.json (100%) rename tests/{ => test_rest_endpoints}/services/test_pleroma/turned_off.json (100%) rename tests/{ => test_rest_endpoints}/services/test_pleroma/turned_on.json (100%) rename tests/{ => test_rest_endpoints}/services/test_pleroma/undefined.json (100%) rename tests/{ => test_rest_endpoints}/services/test_restic.py (100%) rename tests/{ => test_rest_endpoints}/services/test_restic/no_values.json (100%) rename tests/{ => test_rest_endpoints}/services/test_restic/some_values.json (100%) rename tests/{ => test_rest_endpoints}/services/test_restic/undefined.json (100%) rename tests/{ => test_rest_endpoints}/services/test_services.py (75%) rename tests/{ => test_rest_endpoints}/services/test_ssh.py (100%) rename tests/{ => test_rest_endpoints}/services/test_ssh/all_off.json (100%) rename tests/{ => test_rest_endpoints}/services/test_ssh/root_and_admin_have_keys.json (100%) rename tests/{ => test_rest_endpoints}/services/test_ssh/some_users.json (100%) rename tests/{ => test_rest_endpoints}/services/test_ssh/turned_off.json (100%) rename tests/{ => test_rest_endpoints}/services/test_ssh/turned_on.json (100%) rename tests/{ => test_rest_endpoints}/services/test_ssh/undefined.json (100%) rename tests/{ => test_rest_endpoints}/services/test_ssh/undefined_values.json (100%) rename tests/{ => test_rest_endpoints}/test_auth.py (99%) rename tests/{ => test_rest_endpoints}/test_system.py (100%) rename tests/{ => test_rest_endpoints}/test_system/domain (100%) rename tests/{ => test_rest_endpoints}/test_system/no_values.json (100%) rename tests/{ => test_rest_endpoints}/test_system/turned_off.json (100%) rename tests/{ => test_rest_endpoints}/test_system/turned_on.json (100%) rename tests/{ => test_rest_endpoints}/test_system/undefined.json (100%) rename tests/{ => test_rest_endpoints}/test_users.py (100%) rename tests/{ => test_rest_endpoints}/test_users/no_users.json (100%) rename tests/{ => test_rest_endpoints}/test_users/one_user.json (100%) rename tests/{ => test_rest_endpoints}/test_users/some_users.json (100%) rename tests/{ => test_rest_endpoints}/test_users/undefined.json (100%) diff --git a/tests/test_block_device_utils.py b/tests/test_block_device_utils.py index a22dad0..2676e6c 100644 --- a/tests/test_block_device_utils.py +++ b/tests/test_block_device_utils.py @@ -186,7 +186,9 @@ def resize_block_mock(mocker): return mock -def test_call_resize_from_block_device(lsblk_singular_mock, resize_block_mock, authorized_client): +def test_call_resize_from_block_device( + lsblk_singular_mock, resize_block_mock, authorized_client +): block_device = BlockDevice(json.loads(VOLUME_LSBLK_OUTPUT)["blockdevices"][0]) block_device.resize() assert resize_block_mock.call_count == 1 @@ -223,7 +225,9 @@ def test_get_stats_from_block_device(lsblk_singular_mock, authorized_client): ] -def test_mount_block_device(lsblk_singular_mock, only_root_in_userdata, authorized_client): +def test_mount_block_device( + lsblk_singular_mock, only_root_in_userdata, authorized_client +): block_device = BlockDevice(json.loads(SINGLE_LSBLK_OUTPUT)["blockdevices"][0]) result = block_device.mount() assert result is False @@ -270,7 +274,9 @@ def test_mount_block_device_when_undefined( ) -def test_unmount_block_device(lsblk_singular_mock, only_root_in_userdata, authorized_client): +def test_unmount_block_device( + lsblk_singular_mock, only_root_in_userdata, authorized_client +): block_device = BlockDevice(json.loads(SINGLE_LSBLK_OUTPUT)["blockdevices"][0]) result = block_device.unmount() assert result is True diff --git a/tests/test_graphql/_test_system.py b/tests/test_graphql/test_system.py similarity index 89% rename from tests/test_graphql/_test_system.py rename to tests/test_graphql/test_system.py index 738f1f4..fd4937d 100644 --- a/tests/test_graphql/_test_system.py +++ b/tests/test_graphql/test_system.py @@ -124,6 +124,7 @@ def mock_dkim_key(mocker): autospec=True, return_value="I am a DKIM key", ) + return mock API_PYTHON_VERSION_INFO = """ @@ -137,7 +138,7 @@ def test_graphql_get_python_version_wrong_auth( wrong_auth_client, mock_subprocess_check_output ): """Test wrong auth""" - response = wrong_auth_client.get( + response = wrong_auth_client.post( "/graphql", json={ "query": generate_system_query([API_PYTHON_VERSION_INFO]), @@ -149,7 +150,7 @@ def test_graphql_get_python_version_wrong_auth( def test_graphql_get_python_version(authorized_client, mock_subprocess_check_output): """Test get python version""" - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ "query": generate_system_query([API_PYTHON_VERSION_INFO]), @@ -173,7 +174,7 @@ def test_graphql_get_system_version_unauthorized( wrong_auth_client, mock_subprocess_check_output ): """Test wrong auth""" - response = wrong_auth_client.get( + response = wrong_auth_client.post( "/graphql", json={ "query": generate_system_query([API_SYSTEM_VERSION_INFO]), @@ -188,7 +189,7 @@ def test_graphql_get_system_version_unauthorized( def test_graphql_get_system_version(authorized_client, mock_subprocess_check_output): """Test get system version""" - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ "query": generate_system_query([API_SYSTEM_VERSION_INFO]), @@ -209,7 +210,7 @@ domainInfo { hostname provider requiredDnsRecords { - type + recordType name content ttl @@ -219,14 +220,14 @@ domainInfo { """ -def dns_record(type="A", name="test.tld", content=None, ttl=3600, priority=None): +def dns_record(record_type="A", name="test-domain.tld", content=None, ttl=3600, priority=None): if content is None: - if type == "A": + if record_type == "A": content = "157.90.247.192" - elif type == "AAAA": + elif record_type == "AAAA": content = "fe80::9400:ff:fef1:34ae" return { - "type": type, + "recordType": record_type, "name": name, "content": content, "ttl": ttl, @@ -237,7 +238,7 @@ def dns_record(type="A", name="test.tld", content=None, ttl=3600, priority=None) def is_dns_record_in_array(records, dns_record) -> bool: for record in records: if ( - record["type"] == dns_record["type"] + record["recordType"] == dns_record["recordType"] and record["name"] == dns_record["name"] and record["content"] == dns_record["content"] and record["ttl"] == dns_record["ttl"] @@ -248,10 +249,10 @@ def is_dns_record_in_array(records, dns_record) -> bool: def test_graphql_get_domain( - authorized_client, domain_file, mock_get_ip4, mock_get_ip6, turned_on + authorized_client, domain_file, mock_get_ip4, mock_get_ip6, turned_on, mock_dkim_key ): """Test get domain""" - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ "query": generate_system_query([API_GET_DOMAIN_INFO]), @@ -263,53 +264,53 @@ def test_graphql_get_domain( assert ( response.json()["data"]["system"]["domainInfo"]["hostname"] == "test-instance" ) - assert response.json()["data"]["system"]["domainInfo"]["provider"] == "HETZNER" + assert response.json()["data"]["system"]["domainInfo"]["provider"] == "CLOUDFLARE" dns_records = response.json()["data"]["system"]["domainInfo"]["requiredDnsRecords"] assert is_dns_record_in_array(dns_records, dns_record()) - assert is_dns_record_in_array(dns_records, dns_record(type="AAAA")) - assert is_dns_record_in_array(dns_records, dns_record(name="api.test.tld")) + assert is_dns_record_in_array(dns_records, dns_record(record_type="AAAA")) + assert is_dns_record_in_array(dns_records, dns_record(name="api")) assert is_dns_record_in_array( - dns_records, dns_record(name="api.test.tld", type="AAAA") + dns_records, dns_record(name="api", record_type="AAAA") ) - assert is_dns_record_in_array(dns_records, dns_record(name="cloud.test.tld")) + assert is_dns_record_in_array(dns_records, dns_record(name="cloud")) assert is_dns_record_in_array( - dns_records, dns_record(name="cloud.test.tld", type="AAAA") + dns_records, dns_record(name="cloud", record_type="AAAA") ) - assert is_dns_record_in_array(dns_records, dns_record(name="git.test.tld")) + assert is_dns_record_in_array(dns_records, dns_record(name="git")) assert is_dns_record_in_array( - dns_records, dns_record(name="git.test.tld", type="AAAA") + dns_records, dns_record(name="git", record_type="AAAA") ) - assert is_dns_record_in_array(dns_records, dns_record(name="meet.test.tld")) + assert is_dns_record_in_array(dns_records, dns_record(name="meet")) assert is_dns_record_in_array( - dns_records, dns_record(name="meet.test.tld", type="AAAA") + dns_records, dns_record(name="meet", record_type="AAAA") ) - assert is_dns_record_in_array(dns_records, dns_record(name="password.test.tld")) + assert is_dns_record_in_array(dns_records, dns_record(name="password")) assert is_dns_record_in_array( - dns_records, dns_record(name="password.test.tld", type="AAAA") + dns_records, dns_record(name="password", record_type="AAAA") ) - assert is_dns_record_in_array(dns_records, dns_record(name="social.test.tld")) + assert is_dns_record_in_array(dns_records, dns_record(name="social")) assert is_dns_record_in_array( - dns_records, dns_record(name="social.test.tld", type="AAAA") + dns_records, dns_record(name="social", record_type="AAAA") ) - assert is_dns_record_in_array(dns_records, dns_record(name="vpn.test.tld")) + assert is_dns_record_in_array(dns_records, dns_record(name="vpn")) assert is_dns_record_in_array( - dns_records, dns_record(name="vpn.test.tld", type="AAAA") + dns_records, dns_record(name="vpn", record_type="AAAA") ) assert is_dns_record_in_array( dns_records, - dns_record(name="test.tld", type="MX", content="test.tld", priority=10), + dns_record(name="test-domain.tld", record_type="MX", content="test-domain.tld", priority=10), ) assert is_dns_record_in_array( dns_records, dns_record( - name="_dmarc.test.tld", type="TXT", content="v=DMARC1; p=none", ttl=18000 + name="_dmarc", record_type="TXT", content="v=DMARC1; p=none", ttl=18000 ), ) assert is_dns_record_in_array( dns_records, dns_record( - name="test.tld", - type="TXT", + name="test-domain.tld", + record_type="TXT", content="v=spf1 a mx ip4:157.90.247.192 -all", ttl=18000, ), @@ -317,8 +318,8 @@ def test_graphql_get_domain( assert is_dns_record_in_array( dns_records, dns_record( - name="selector._domainkey.test.tld", - type="TXT", + name="selector._domainkey", + record_type="TXT", content="I am a DKIM key", ttl=18000, ), @@ -334,7 +335,7 @@ settings { def test_graphql_get_timezone_unauthorized(client, turned_on): """Test get timezone without auth""" - response = client.get( + response = client.post( "/graphql", json={ "query": generate_system_query([API_GET_TIMEZONE]), @@ -346,7 +347,7 @@ def test_graphql_get_timezone_unauthorized(client, turned_on): def test_graphql_get_timezone(authorized_client, turned_on): """Test get timezone""" - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ "query": generate_system_query([API_GET_TIMEZONE]), @@ -359,7 +360,7 @@ def test_graphql_get_timezone(authorized_client, turned_on): def test_graphql_get_timezone_on_undefined(authorized_client, undefined_config): """Test get timezone when none is defined in config""" - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ "query": generate_system_query([API_GET_TIMEZONE]), @@ -484,7 +485,7 @@ def test_graphql_change_timezone_with_invalid_timezone(authorized_client, turned API_GET_AUTO_UPGRADE_SETTINGS_QUERY = """ settings { autoUpgrade { - enableAutoUpgrade + enable allowReboot } } @@ -493,10 +494,10 @@ settings { def test_graphql_get_auto_upgrade_unauthorized(client, turned_on): """Test get auto upgrade settings without auth""" - response = client.get( + response = client.post( "/graphql", json={ - "query": API_GET_AUTO_UPGRADE_SETTINGS_QUERY, + "query": generate_system_query([API_GET_AUTO_UPGRADE_SETTINGS_QUERY]), }, ) assert response.status_code == 200 @@ -505,66 +506,66 @@ def test_graphql_get_auto_upgrade_unauthorized(client, turned_on): def test_graphql_get_auto_upgrade(authorized_client, turned_on): """Test get auto upgrade settings""" - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ - "query": API_GET_AUTO_UPGRADE_SETTINGS_QUERY, + "query": generate_system_query([API_GET_AUTO_UPGRADE_SETTINGS_QUERY]), }, ) assert response.status_code == 200 assert response.json().get("data") is not None assert ( - response.json()["data"]["settings"]["autoUpgrade"]["enableAutoUpgrade"] is True + response.json()["data"]["system"]["settings"]["autoUpgrade"]["enable"] is True ) - assert response.json()["data"]["settings"]["autoUpgrade"]["allowReboot"] is True + assert response.json()["data"]["system"]["settings"]["autoUpgrade"]["allowReboot"] is True def test_graphql_get_auto_upgrade_on_undefined(authorized_client, undefined_config): """Test get auto upgrade settings when none is defined in config""" - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ - "query": API_GET_AUTO_UPGRADE_SETTINGS_QUERY, + "query": generate_system_query([API_GET_AUTO_UPGRADE_SETTINGS_QUERY]), }, ) assert response.status_code == 200 assert response.json().get("data") is not None assert ( - response.json()["data"]["settings"]["autoUpgrade"]["enableAutoUpgrade"] is True + response.json()["data"]["system"]["settings"]["autoUpgrade"]["enable"] is True ) - assert response.json()["data"]["settings"]["autoUpgrade"]["allowReboot"] is False + assert response.json()["data"]["system"]["settings"]["autoUpgrade"]["allowReboot"] is False def test_graphql_get_auto_upgrade_without_vlaues(authorized_client, no_values): """Test get auto upgrade settings without values""" - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ - "query": API_GET_AUTO_UPGRADE_SETTINGS_QUERY, + "query": generate_system_query([API_GET_AUTO_UPGRADE_SETTINGS_QUERY]), }, ) assert response.status_code == 200 assert response.json().get("data") is not None assert ( - response.json()["data"]["settings"]["autoUpgrade"]["enableAutoUpgrade"] is True + response.json()["data"]["system"]["settings"]["autoUpgrade"]["enable"] is True ) - assert response.json()["data"]["settings"]["autoUpgrade"]["allowReboot"] is False + assert response.json()["data"]["system"]["settings"]["autoUpgrade"]["allowReboot"] is False def test_graphql_get_auto_upgrade_turned_off(authorized_client, turned_off): """Test get auto upgrade settings when turned off""" - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ - "query": API_GET_AUTO_UPGRADE_SETTINGS_QUERY, + "query": generate_system_query([API_GET_AUTO_UPGRADE_SETTINGS_QUERY]), }, ) assert response.status_code == 200 assert response.json().get("data") is not None assert ( - response.json()["data"]["settings"]["autoUpgrade"]["enableAutoUpgrade"] is False + response.json()["data"]["system"]["settings"]["autoUpgrade"]["enable"] is False ) - assert response.json()["data"]["settings"]["autoUpgrade"]["allowReboot"] is False + assert response.json()["data"]["system"]["settings"]["autoUpgrade"]["allowReboot"] is False API_CHANGE_AUTO_UPGRADE_SETTINGS = """ @@ -805,7 +806,7 @@ def test_graphql_change_auto_upgrade_with_empty_input(authorized_client, turned_ API_PULL_SYSTEM_CONFIGURATION_MUTATION = """ -mutation testPullSystemConfiguration() { +mutation testPullSystemConfiguration { pullRepositoryChanges { success message diff --git a/tests/test_graphql/test_system/turned_on.json b/tests/test_graphql/test_system/turned_on.json index 337e47f..99a023c 100644 --- a/tests/test_graphql/test_system/turned_on.json +++ b/tests/test_graphql/test_system/turned_on.json @@ -33,7 +33,7 @@ }, "username": "tester", "gitea": { - "enable": false + "enable": true }, "ocserv": { "enable": true @@ -49,4 +49,4 @@ "sshKeys": [ "ssh-rsa KEY test@pc" ] -} \ No newline at end of file +} diff --git a/tests/test_jobs.py b/tests/test_jobs.py index 2732335..87f1386 100644 --- a/tests/test_jobs.py +++ b/tests/test_jobs.py @@ -30,3 +30,21 @@ def test_jobs(authorized_client, jobs_file, shared_datadir): ) assert jobs.get_jobs() == [test_job] + + +@pytest.fixture +def mock_subprocess_run(mocker): + mock = mocker.patch("subprocess.run", autospec=True) + return mock + + +@pytest.fixture +def mock_shutil_move(mocker): + mock = mocker.patch("shutil.move", autospec=True) + return mock + + +@pytest.fixture +def mock_shutil_chown(mocker): + mock = mocker.patch("shutil.chown", autospec=True) + return mock diff --git a/tests/test_rest_endpoints/data/jobs.json b/tests/test_rest_endpoints/data/jobs.json new file mode 100644 index 0000000..0967ef4 --- /dev/null +++ b/tests/test_rest_endpoints/data/jobs.json @@ -0,0 +1 @@ +{} diff --git a/tests/test_rest_endpoints/data/tokens.json b/tests/test_rest_endpoints/data/tokens.json new file mode 100644 index 0000000..9be9d02 --- /dev/null +++ b/tests/test_rest_endpoints/data/tokens.json @@ -0,0 +1,14 @@ +{ + "tokens": [ + { + "token": "TEST_TOKEN", + "name": "test_token", + "date": "2022-01-14 08:31:10.789314" + }, + { + "token": "TEST_TOKEN2", + "name": "test_token2", + "date": "2022-01-14 08:31:10.789314" + } + ] +} \ No newline at end of file diff --git a/tests/services/data/tokens.json b/tests/test_rest_endpoints/services/data/tokens.json similarity index 100% rename from tests/services/data/tokens.json rename to tests/test_rest_endpoints/services/data/tokens.json diff --git a/tests/services/test_bitwarden.py b/tests/test_rest_endpoints/services/test_bitwarden.py similarity index 100% rename from tests/services/test_bitwarden.py rename to tests/test_rest_endpoints/services/test_bitwarden.py diff --git a/tests/services/test_bitwarden/enable_undefined.json b/tests/test_rest_endpoints/services/test_bitwarden/enable_undefined.json similarity index 100% rename from tests/services/test_bitwarden/enable_undefined.json rename to tests/test_rest_endpoints/services/test_bitwarden/enable_undefined.json diff --git a/tests/services/test_bitwarden/turned_off.json b/tests/test_rest_endpoints/services/test_bitwarden/turned_off.json similarity index 100% rename from tests/services/test_bitwarden/turned_off.json rename to tests/test_rest_endpoints/services/test_bitwarden/turned_off.json diff --git a/tests/services/test_bitwarden/turned_on.json b/tests/test_rest_endpoints/services/test_bitwarden/turned_on.json similarity index 100% rename from tests/services/test_bitwarden/turned_on.json rename to tests/test_rest_endpoints/services/test_bitwarden/turned_on.json diff --git a/tests/services/test_bitwarden/undefined.json b/tests/test_rest_endpoints/services/test_bitwarden/undefined.json similarity index 100% rename from tests/services/test_bitwarden/undefined.json rename to tests/test_rest_endpoints/services/test_bitwarden/undefined.json diff --git a/tests/services/test_gitea.py b/tests/test_rest_endpoints/services/test_gitea.py similarity index 100% rename from tests/services/test_gitea.py rename to tests/test_rest_endpoints/services/test_gitea.py diff --git a/tests/services/test_gitea/enable_undefined.json b/tests/test_rest_endpoints/services/test_gitea/enable_undefined.json similarity index 100% rename from tests/services/test_gitea/enable_undefined.json rename to tests/test_rest_endpoints/services/test_gitea/enable_undefined.json diff --git a/tests/services/test_gitea/turned_off.json b/tests/test_rest_endpoints/services/test_gitea/turned_off.json similarity index 100% rename from tests/services/test_gitea/turned_off.json rename to tests/test_rest_endpoints/services/test_gitea/turned_off.json diff --git a/tests/services/test_gitea/turned_on.json b/tests/test_rest_endpoints/services/test_gitea/turned_on.json similarity index 100% rename from tests/services/test_gitea/turned_on.json rename to tests/test_rest_endpoints/services/test_gitea/turned_on.json diff --git a/tests/services/test_gitea/undefined.json b/tests/test_rest_endpoints/services/test_gitea/undefined.json similarity index 100% rename from tests/services/test_gitea/undefined.json rename to tests/test_rest_endpoints/services/test_gitea/undefined.json diff --git a/tests/services/test_mailserver.py b/tests/test_rest_endpoints/services/test_mailserver.py similarity index 100% rename from tests/services/test_mailserver.py rename to tests/test_rest_endpoints/services/test_mailserver.py diff --git a/tests/services/test_nextcloud.py b/tests/test_rest_endpoints/services/test_nextcloud.py similarity index 100% rename from tests/services/test_nextcloud.py rename to tests/test_rest_endpoints/services/test_nextcloud.py diff --git a/tests/services/test_nextcloud/enable_undefined.json b/tests/test_rest_endpoints/services/test_nextcloud/enable_undefined.json similarity index 100% rename from tests/services/test_nextcloud/enable_undefined.json rename to tests/test_rest_endpoints/services/test_nextcloud/enable_undefined.json diff --git a/tests/services/test_nextcloud/turned_off.json b/tests/test_rest_endpoints/services/test_nextcloud/turned_off.json similarity index 100% rename from tests/services/test_nextcloud/turned_off.json rename to tests/test_rest_endpoints/services/test_nextcloud/turned_off.json diff --git a/tests/services/test_nextcloud/turned_on.json b/tests/test_rest_endpoints/services/test_nextcloud/turned_on.json similarity index 100% rename from tests/services/test_nextcloud/turned_on.json rename to tests/test_rest_endpoints/services/test_nextcloud/turned_on.json diff --git a/tests/services/test_nextcloud/undefined.json b/tests/test_rest_endpoints/services/test_nextcloud/undefined.json similarity index 100% rename from tests/services/test_nextcloud/undefined.json rename to tests/test_rest_endpoints/services/test_nextcloud/undefined.json diff --git a/tests/services/test_ocserv.py b/tests/test_rest_endpoints/services/test_ocserv.py similarity index 100% rename from tests/services/test_ocserv.py rename to tests/test_rest_endpoints/services/test_ocserv.py diff --git a/tests/services/test_ocserv/enable_undefined.json b/tests/test_rest_endpoints/services/test_ocserv/enable_undefined.json similarity index 100% rename from tests/services/test_ocserv/enable_undefined.json rename to tests/test_rest_endpoints/services/test_ocserv/enable_undefined.json diff --git a/tests/services/test_ocserv/turned_off.json b/tests/test_rest_endpoints/services/test_ocserv/turned_off.json similarity index 100% rename from tests/services/test_ocserv/turned_off.json rename to tests/test_rest_endpoints/services/test_ocserv/turned_off.json diff --git a/tests/services/test_ocserv/turned_on.json b/tests/test_rest_endpoints/services/test_ocserv/turned_on.json similarity index 100% rename from tests/services/test_ocserv/turned_on.json rename to tests/test_rest_endpoints/services/test_ocserv/turned_on.json diff --git a/tests/services/test_ocserv/undefined.json b/tests/test_rest_endpoints/services/test_ocserv/undefined.json similarity index 100% rename from tests/services/test_ocserv/undefined.json rename to tests/test_rest_endpoints/services/test_ocserv/undefined.json diff --git a/tests/services/test_pleroma.py b/tests/test_rest_endpoints/services/test_pleroma.py similarity index 100% rename from tests/services/test_pleroma.py rename to tests/test_rest_endpoints/services/test_pleroma.py diff --git a/tests/services/test_pleroma/enable_undefined.json b/tests/test_rest_endpoints/services/test_pleroma/enable_undefined.json similarity index 100% rename from tests/services/test_pleroma/enable_undefined.json rename to tests/test_rest_endpoints/services/test_pleroma/enable_undefined.json diff --git a/tests/services/test_pleroma/turned_off.json b/tests/test_rest_endpoints/services/test_pleroma/turned_off.json similarity index 100% rename from tests/services/test_pleroma/turned_off.json rename to tests/test_rest_endpoints/services/test_pleroma/turned_off.json diff --git a/tests/services/test_pleroma/turned_on.json b/tests/test_rest_endpoints/services/test_pleroma/turned_on.json similarity index 100% rename from tests/services/test_pleroma/turned_on.json rename to tests/test_rest_endpoints/services/test_pleroma/turned_on.json diff --git a/tests/services/test_pleroma/undefined.json b/tests/test_rest_endpoints/services/test_pleroma/undefined.json similarity index 100% rename from tests/services/test_pleroma/undefined.json rename to tests/test_rest_endpoints/services/test_pleroma/undefined.json diff --git a/tests/services/test_restic.py b/tests/test_rest_endpoints/services/test_restic.py similarity index 100% rename from tests/services/test_restic.py rename to tests/test_rest_endpoints/services/test_restic.py diff --git a/tests/services/test_restic/no_values.json b/tests/test_rest_endpoints/services/test_restic/no_values.json similarity index 100% rename from tests/services/test_restic/no_values.json rename to tests/test_rest_endpoints/services/test_restic/no_values.json diff --git a/tests/services/test_restic/some_values.json b/tests/test_rest_endpoints/services/test_restic/some_values.json similarity index 100% rename from tests/services/test_restic/some_values.json rename to tests/test_rest_endpoints/services/test_restic/some_values.json diff --git a/tests/services/test_restic/undefined.json b/tests/test_rest_endpoints/services/test_restic/undefined.json similarity index 100% rename from tests/services/test_restic/undefined.json rename to tests/test_rest_endpoints/services/test_restic/undefined.json diff --git a/tests/services/test_services.py b/tests/test_rest_endpoints/services/test_services.py similarity index 75% rename from tests/services/test_services.py rename to tests/test_rest_endpoints/services/test_services.py index 03ae104..1108e8c 100644 --- a/tests/services/test_services.py +++ b/tests/test_rest_endpoints/services/test_services.py @@ -12,68 +12,78 @@ def call_args_asserts(mocked_object): assert mocked_object.call_count == 7 assert mocked_object.call_args_list[0][0][0] == [ "systemctl", - "status", + "show", "dovecot2.service", ] assert mocked_object.call_args_list[1][0][0] == [ "systemctl", - "status", + "show", "postfix.service", ] assert mocked_object.call_args_list[2][0][0] == [ "systemctl", - "status", + "show", "vaultwarden.service", ] assert mocked_object.call_args_list[3][0][0] == [ "systemctl", - "status", + "show", "gitea.service", ] assert mocked_object.call_args_list[4][0][0] == [ "systemctl", - "status", + "show", "phpfpm-nextcloud.service", ] assert mocked_object.call_args_list[5][0][0] == [ "systemctl", - "status", + "show", "ocserv.service", ] assert mocked_object.call_args_list[6][0][0] == [ "systemctl", - "status", + "show", "pleroma.service", ] -class ProcessMock: - """Mock subprocess.Popen""" +SUCCESSFUL_STATUS = b""" +Type=oneshot +ExitType=main +Restart=no +NotifyAccess=none +RestartUSec=100ms +LoadState=loaded +ActiveState=active +FreezerState=running +SubState=exited +""" - def __init__(self, args, **kwargs): - self.args = args - self.kwargs = kwargs - - def communicate(): - return (b"", None) - - returncode = 0 - - -class BrokenServiceMock(ProcessMock): - returncode = 3 +FAILED_STATUS = b""" +Type=oneshot +ExitType=main +Restart=no +NotifyAccess=none +RestartUSec=100ms +LoadState=loaded +ActiveState=failed +FreezerState=running +SubState=exited +""" @pytest.fixture def mock_subproccess_popen(mocker): - mock = mocker.patch("subprocess.Popen", autospec=True, return_value=ProcessMock) + mock = mocker.patch( + "subprocess.check_output", autospec=True, return_value=SUCCESSFUL_STATUS + ) return mock @pytest.fixture def mock_broken_service(mocker): mock = mocker.patch( - "subprocess.Popen", autospec=True, return_value=BrokenServiceMock + "subprocess.check_output", autospec=True, return_value=FAILED_STATUS ) return mock @@ -116,13 +126,13 @@ def test_no_dkim_key(authorized_client, mock_broken_service): response = authorized_client.get("/services/status") assert response.status_code == 200 assert response.json() == { - "imap": 3, - "smtp": 3, + "imap": 1, + "smtp": 1, "http": 0, - "bitwarden": 3, - "gitea": 3, - "nextcloud": 3, - "ocserv": 3, - "pleroma": 3, + "bitwarden": 1, + "gitea": 1, + "nextcloud": 1, + "ocserv": 1, + "pleroma": 1, } call_args_asserts(mock_broken_service) diff --git a/tests/services/test_ssh.py b/tests/test_rest_endpoints/services/test_ssh.py similarity index 100% rename from tests/services/test_ssh.py rename to tests/test_rest_endpoints/services/test_ssh.py diff --git a/tests/services/test_ssh/all_off.json b/tests/test_rest_endpoints/services/test_ssh/all_off.json similarity index 100% rename from tests/services/test_ssh/all_off.json rename to tests/test_rest_endpoints/services/test_ssh/all_off.json diff --git a/tests/services/test_ssh/root_and_admin_have_keys.json b/tests/test_rest_endpoints/services/test_ssh/root_and_admin_have_keys.json similarity index 100% rename from tests/services/test_ssh/root_and_admin_have_keys.json rename to tests/test_rest_endpoints/services/test_ssh/root_and_admin_have_keys.json diff --git a/tests/services/test_ssh/some_users.json b/tests/test_rest_endpoints/services/test_ssh/some_users.json similarity index 100% rename from tests/services/test_ssh/some_users.json rename to tests/test_rest_endpoints/services/test_ssh/some_users.json diff --git a/tests/services/test_ssh/turned_off.json b/tests/test_rest_endpoints/services/test_ssh/turned_off.json similarity index 100% rename from tests/services/test_ssh/turned_off.json rename to tests/test_rest_endpoints/services/test_ssh/turned_off.json diff --git a/tests/services/test_ssh/turned_on.json b/tests/test_rest_endpoints/services/test_ssh/turned_on.json similarity index 100% rename from tests/services/test_ssh/turned_on.json rename to tests/test_rest_endpoints/services/test_ssh/turned_on.json diff --git a/tests/services/test_ssh/undefined.json b/tests/test_rest_endpoints/services/test_ssh/undefined.json similarity index 100% rename from tests/services/test_ssh/undefined.json rename to tests/test_rest_endpoints/services/test_ssh/undefined.json diff --git a/tests/services/test_ssh/undefined_values.json b/tests/test_rest_endpoints/services/test_ssh/undefined_values.json similarity index 100% rename from tests/services/test_ssh/undefined_values.json rename to tests/test_rest_endpoints/services/test_ssh/undefined_values.json diff --git a/tests/test_auth.py b/tests/test_rest_endpoints/test_auth.py similarity index 99% rename from tests/test_auth.py rename to tests/test_rest_endpoints/test_auth.py index 5430e3a..1083be5 100644 --- a/tests/test_auth.py +++ b/tests/test_rest_endpoints/test_auth.py @@ -2,12 +2,10 @@ # pylint: disable=unused-argument # pylint: disable=missing-function-docstring import datetime -import json -import re import pytest from mnemonic import Mnemonic -from .common import read_json, write_json +from tests.common import read_json, write_json TOKENS_FILE_CONTETS = { diff --git a/tests/test_system.py b/tests/test_rest_endpoints/test_system.py similarity index 100% rename from tests/test_system.py rename to tests/test_rest_endpoints/test_system.py diff --git a/tests/test_system/domain b/tests/test_rest_endpoints/test_system/domain similarity index 100% rename from tests/test_system/domain rename to tests/test_rest_endpoints/test_system/domain diff --git a/tests/test_system/no_values.json b/tests/test_rest_endpoints/test_system/no_values.json similarity index 100% rename from tests/test_system/no_values.json rename to tests/test_rest_endpoints/test_system/no_values.json diff --git a/tests/test_system/turned_off.json b/tests/test_rest_endpoints/test_system/turned_off.json similarity index 100% rename from tests/test_system/turned_off.json rename to tests/test_rest_endpoints/test_system/turned_off.json diff --git a/tests/test_system/turned_on.json b/tests/test_rest_endpoints/test_system/turned_on.json similarity index 100% rename from tests/test_system/turned_on.json rename to tests/test_rest_endpoints/test_system/turned_on.json diff --git a/tests/test_system/undefined.json b/tests/test_rest_endpoints/test_system/undefined.json similarity index 100% rename from tests/test_system/undefined.json rename to tests/test_rest_endpoints/test_system/undefined.json diff --git a/tests/test_users.py b/tests/test_rest_endpoints/test_users.py similarity index 100% rename from tests/test_users.py rename to tests/test_rest_endpoints/test_users.py diff --git a/tests/test_users/no_users.json b/tests/test_rest_endpoints/test_users/no_users.json similarity index 100% rename from tests/test_users/no_users.json rename to tests/test_rest_endpoints/test_users/no_users.json diff --git a/tests/test_users/one_user.json b/tests/test_rest_endpoints/test_users/one_user.json similarity index 100% rename from tests/test_users/one_user.json rename to tests/test_rest_endpoints/test_users/one_user.json diff --git a/tests/test_users/some_users.json b/tests/test_rest_endpoints/test_users/some_users.json similarity index 100% rename from tests/test_users/some_users.json rename to tests/test_rest_endpoints/test_users/some_users.json diff --git a/tests/test_users/undefined.json b/tests/test_rest_endpoints/test_users/undefined.json similarity index 100% rename from tests/test_users/undefined.json rename to tests/test_rest_endpoints/test_users/undefined.json -- 2.42.0 From 3a5d4d5e86c59185c9a328ffc4ca1b6518af19a4 Mon Sep 17 00:00:00 2001 From: inexcode Date: Sat, 20 Aug 2022 22:49:51 +0400 Subject: [PATCH 40/50] Implement DNS records getter --- selfprivacy_api/graphql/queries/system.py | 19 +++++- selfprivacy_api/services/__init__.py | 45 +++++++++++-- .../services/bitwarden/__init__.py | 10 ++- selfprivacy_api/services/gitea/__init__.py | 10 ++- .../services/mailserver/__init__.py | 65 +++++++++++++------ .../services/nextcloud/__init__.py | 10 ++- selfprivacy_api/services/ocserv/__init__.py | 17 ++++- selfprivacy_api/services/pleroma/__init__.py | 10 ++- selfprivacy_api/utils/auth.py | 2 +- 9 files changed, 148 insertions(+), 40 deletions(-) diff --git a/selfprivacy_api/graphql/queries/system.py b/selfprivacy_api/graphql/queries/system.py index ef1476a..e3f0eff 100644 --- a/selfprivacy_api/graphql/queries/system.py +++ b/selfprivacy_api/graphql/queries/system.py @@ -9,6 +9,7 @@ from selfprivacy_api.graphql.queries.common import Alert, Severity from selfprivacy_api.graphql.queries.providers import DnsProvider, ServerProvider from selfprivacy_api.jobs import Jobs from selfprivacy_api.jobs.migrate_to_binds import is_bind_migrated +from selfprivacy_api.services import get_all_required_dns_records from selfprivacy_api.utils import ReadUserData import selfprivacy_api.actions.system as system_actions import selfprivacy_api.actions.ssh as ssh_actions @@ -21,7 +22,20 @@ class SystemDomainInfo: domain: str hostname: str provider: DnsProvider - required_dns_records: typing.List[DnsRecord] + @strawberry.field + def required_dns_records(self) -> typing.List[DnsRecord]: + """Collect all required DNS records for all services""" + return [ + DnsRecord( + record_type=record.type, + name=record.name, + content=record.content, + ttl=record.ttl, + priority=record.priority, + ) + for record in get_all_required_dns_records() + ] + def get_system_domain_info() -> SystemDomainInfo: @@ -31,8 +45,6 @@ def get_system_domain_info() -> SystemDomainInfo: domain=user_data["domain"], hostname=user_data["hostname"], provider=DnsProvider.CLOUDFLARE, - # TODO: get ip somehow - required_dns_records=[], ) @@ -142,6 +154,7 @@ class System: settings: SystemSettings = SystemSettings() info: SystemInfo = SystemInfo() provider: SystemProviderInfo = strawberry.field(resolver=get_system_provider_info) + @strawberry.field def busy(self) -> bool: """Check if the system is busy""" diff --git a/selfprivacy_api/services/__init__.py b/selfprivacy_api/services/__init__.py index eb9b7be..04d496f 100644 --- a/selfprivacy_api/services/__init__.py +++ b/selfprivacy_api/services/__init__.py @@ -7,8 +7,8 @@ from selfprivacy_api.services.mailserver import MailServer from selfprivacy_api.services.nextcloud import Nextcloud from selfprivacy_api.services.pleroma import Pleroma from selfprivacy_api.services.ocserv import Ocserv -from selfprivacy_api.services.service import Service - +from selfprivacy_api.services.service import Service, ServiceDnsRecord +import selfprivacy_api.utils.network as network_utils services: list[Service] = [ Bitwarden(), @@ -20,7 +20,7 @@ services: list[Service] = [ ] -def get_all_services() -> typing.List[Service]: +def get_all_services() -> list[Service]: return services @@ -31,13 +31,46 @@ def get_service_by_id(service_id: str) -> typing.Optional[Service]: return None -def get_enabled_services() -> typing.List[Service]: +def get_enabled_services() -> list[Service]: return [service for service in services if service.is_enabled()] -def get_disabled_services() -> typing.List[Service]: +def get_disabled_services() -> list[Service]: return [service for service in services if not service.is_enabled()] -def get_services_by_location(location: str) -> typing.List[Service]: +def get_services_by_location(location: str) -> list[Service]: return [service for service in services if service.get_location() == location] + +def get_all_required_dns_records() -> list[ServiceDnsRecord]: + ip4 = network_utils.get_ip4() + ip6 = network_utils.get_ip6() + dns_records: list[ServiceDnsRecord] = [ + ServiceDnsRecord( + type="A", + name="api", + content=ip4, + ttl=3600, + ), + ServiceDnsRecord( + type="AAAA", + name="api", + content=ip6, + ttl=3600, + ), + ServiceDnsRecord( + type="A", + name="meet", + content=ip4, + ttl=3600, + ), + ServiceDnsRecord( + type="AAAA", + name="meet", + content=ip6, + ttl=3600, + ), + ] + for service in get_enabled_services(): + dns_records += service.get_dns_records() + return dns_records diff --git a/selfprivacy_api/services/bitwarden/__init__.py b/selfprivacy_api/services/bitwarden/__init__.py index a581ec9..ea93de1 100644 --- a/selfprivacy_api/services/bitwarden/__init__.py +++ b/selfprivacy_api/services/bitwarden/__init__.py @@ -11,7 +11,7 @@ from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceS from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain from selfprivacy_api.utils.block_devices import BlockDevice from selfprivacy_api.utils.huey import huey -from selfprivacy_api.utils.network import get_ip4 +import selfprivacy_api.utils.network as network_utils from selfprivacy_api.services.bitwarden.icon import BITWARDEN_ICON @@ -132,7 +132,13 @@ class Bitwarden(Service): ServiceDnsRecord( type="A", name="password", - content=get_ip4(), + content=network_utils.get_ip4(), + ttl=3600, + ), + ServiceDnsRecord( + type="AAAA", + name="password", + content=network_utils.get_ip6(), ttl=3600, ), ] diff --git a/selfprivacy_api/services/gitea/__init__.py b/selfprivacy_api/services/gitea/__init__.py index 7a5db1b..d563164 100644 --- a/selfprivacy_api/services/gitea/__init__.py +++ b/selfprivacy_api/services/gitea/__init__.py @@ -11,7 +11,7 @@ from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceS from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain from selfprivacy_api.utils.block_devices import BlockDevice from selfprivacy_api.utils.huey import huey -from selfprivacy_api.utils.network import get_ip4 +import selfprivacy_api.utils.network as network_utils from selfprivacy_api.services.gitea.icon import GITEA_ICON @@ -129,7 +129,13 @@ class Gitea(Service): ServiceDnsRecord( type="A", name="git", - content=get_ip4(), + content=network_utils.get_ip4(), + ttl=3600, + ), + ServiceDnsRecord( + type="AAAA", + name="git", + content=network_utils.get_ip6(), ttl=3600, ), ] diff --git a/selfprivacy_api/services/mailserver/__init__.py b/selfprivacy_api/services/mailserver/__init__.py index dfcaa7f..ea085ba 100644 --- a/selfprivacy_api/services/mailserver/__init__.py +++ b/selfprivacy_api/services/mailserver/__init__.py @@ -9,11 +9,10 @@ from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move from selfprivacy_api.services.generic_size_counter import get_storage_usage from selfprivacy_api.services.generic_status_getter import get_service_status from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus -from selfprivacy_api.utils import ReadUserData, WriteUserData, get_dkim_key, get_domain -from selfprivacy_api.utils import huey +import selfprivacy_api.utils as utils from selfprivacy_api.utils.block_devices import BlockDevice from selfprivacy_api.utils.huey import huey -from selfprivacy_api.utils.network import get_ip4 +import selfprivacy_api.utils.network as network_utils from selfprivacy_api.services.mailserver.icon import MAILSERVER_ICON @@ -58,21 +57,34 @@ class MailServer(Service): imap_status = get_service_status("dovecot2.service") smtp_status = get_service_status("postfix.service") - if ( - imap_status == ServiceStatus.RUNNING - and smtp_status == ServiceStatus.RUNNING - ): - return ServiceStatus.RUNNING - elif imap_status == ServiceStatus.ERROR or smtp_status == ServiceStatus.ERROR: - return ServiceStatus.ERROR + if imap_status == ServiceStatus.ACTIVE and smtp_status == ServiceStatus.ACTIVE: + return ServiceStatus.ACTIVE + elif imap_status == ServiceStatus.FAILED or smtp_status == ServiceStatus.FAILED: + return ServiceStatus.FAILED elif ( - imap_status == ServiceStatus.STOPPED or smtp_status == ServiceStatus.STOPPED + imap_status == ServiceStatus.RELOADING + or smtp_status == ServiceStatus.RELOADING ): - return ServiceStatus.STOPPED + return ServiceStatus.RELOADING + elif ( + imap_status == ServiceStatus.ACTIVATING + or smtp_status == ServiceStatus.ACTIVATING + ): + return ServiceStatus.ACTIVATING + elif ( + imap_status == ServiceStatus.DEACTIVATING + or smtp_status == ServiceStatus.DEACTIVATING + ): + return ServiceStatus.DEACTIVATING + elif ( + imap_status == ServiceStatus.INACTIVE + or smtp_status == ServiceStatus.INACTIVE + ): + return ServiceStatus.INACTIVE elif imap_status == ServiceStatus.OFF or smtp_status == ServiceStatus.OFF: return ServiceStatus.OFF else: - return ServiceStatus.DEGRADED + return ServiceStatus.FAILED @staticmethod def enable(): @@ -115,7 +127,7 @@ class MailServer(Service): @staticmethod def get_location() -> str: - with ReadUserData() as user_data: + with utils.ReadUserData() as user_data: if user_data.get("useBinds", False): return user_data.get("mailserver", {}).get("location", "sda1") else: @@ -123,25 +135,38 @@ class MailServer(Service): @staticmethod def get_dns_records() -> typing.List[ServiceDnsRecord]: - domain = get_domain() - dkim_record = get_dkim_key(domain) - ip4 = get_ip4() + domain = utils.get_domain() + dkim_record = utils.get_dkim_key(domain) + ip4 = network_utils.get_ip4() + ip6 = network_utils.get_ip6() if dkim_record is None: return [] return [ + ServiceDnsRecord( + type="A", + name=domain, + content=ip4, + ttl=3600, + ), + ServiceDnsRecord( + type="AAAA", + name=domain, + content=ip6, + ttl=3600, + ), ServiceDnsRecord( type="MX", name=domain, content=domain, ttl=3600, priority=10 ), ServiceDnsRecord( - type="TXT", name="_dmarc", content=f"v=DMARC1; p=none", ttl=3600 + type="TXT", name="_dmarc", content=f"v=DMARC1; p=none", ttl=18000 ), ServiceDnsRecord( - type="TXT", name=domain, content=f"v=spf1 a mx ip4:{ip4} -all", ttl=3600 + type="TXT", name=domain, content=f"v=spf1 a mx ip4:{ip4} -all", ttl=18000 ), ServiceDnsRecord( - type="TXT", name="selector._domainkey", content=dkim_record, ttl=3600 + type="TXT", name="selector._domainkey", content=dkim_record, ttl=18000 ), ] diff --git a/selfprivacy_api/services/nextcloud/__init__.py b/selfprivacy_api/services/nextcloud/__init__.py index 6bd616a..4057b49 100644 --- a/selfprivacy_api/services/nextcloud/__init__.py +++ b/selfprivacy_api/services/nextcloud/__init__.py @@ -9,7 +9,7 @@ from selfprivacy_api.services.generic_status_getter import get_service_status from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain from selfprivacy_api.utils.block_devices import BlockDevice -from selfprivacy_api.utils.network import get_ip4 +import selfprivacy_api.utils.network as network_utils from selfprivacy_api.services.nextcloud.icon import NEXTCLOUD_ICON @@ -137,7 +137,13 @@ class Nextcloud(Service): ServiceDnsRecord( type="A", name="cloud", - content=get_ip4(), + content=network_utils.get_ip4(), + ttl=3600, + ), + ServiceDnsRecord( + type="AAAA", + name="cloud", + content=network_utils.get_ip6(), ttl=3600, ), ] diff --git a/selfprivacy_api/services/ocserv/__init__.py b/selfprivacy_api/services/ocserv/__init__.py index 2c86259..dcfacaa 100644 --- a/selfprivacy_api/services/ocserv/__init__.py +++ b/selfprivacy_api/services/ocserv/__init__.py @@ -9,8 +9,8 @@ from selfprivacy_api.services.generic_status_getter import get_service_status from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus from selfprivacy_api.utils import ReadUserData, WriteUserData from selfprivacy_api.utils.block_devices import BlockDevice -from selfprivacy_api.utils.network import get_ip4 from selfprivacy_api.services.ocserv.icon import OCSERV_ICON +import selfprivacy_api.utils.network as network_utils class Ocserv(Service): @@ -98,7 +98,20 @@ class Ocserv(Service): @staticmethod def get_dns_records() -> typing.List[ServiceDnsRecord]: - return [] + return [ + ServiceDnsRecord( + type="A", + name="vpn", + content=network_utils.get_ip4(), + ttl=3600, + ), + ServiceDnsRecord( + type="AAAA", + name="vpn", + content=network_utils.get_ip6(), + ttl=3600, + ), + ] @staticmethod def get_storage_usage() -> int: diff --git a/selfprivacy_api/services/pleroma/__init__.py b/selfprivacy_api/services/pleroma/__init__.py index 1604fb4..97c11f5 100644 --- a/selfprivacy_api/services/pleroma/__init__.py +++ b/selfprivacy_api/services/pleroma/__init__.py @@ -9,7 +9,7 @@ from selfprivacy_api.services.generic_status_getter import get_service_status from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain from selfprivacy_api.utils.block_devices import BlockDevice -from selfprivacy_api.utils.network import get_ip4 +import selfprivacy_api.utils.network as network_utils from selfprivacy_api.services.pleroma.icon import PLEROMA_ICON @@ -117,7 +117,13 @@ class Pleroma(Service): ServiceDnsRecord( type="A", name="social", - content=get_ip4(), + content=network_utils.get_ip4(), + ttl=3600, + ), + ServiceDnsRecord( + type="AAAA", + name="social", + content=network_utils.get_ip6(), ttl=3600, ), ] diff --git a/selfprivacy_api/utils/auth.py b/selfprivacy_api/utils/auth.py index d059d38..ecaf9af 100644 --- a/selfprivacy_api/utils/auth.py +++ b/selfprivacy_api/utils/auth.py @@ -88,7 +88,7 @@ def is_token_name_pair_valid(token_name, token): return False -def get_token_name(token): +def get_token_name(token: str) -> typing.Optional[str]: """Return the name of the token provided""" with ReadUserData(UserDataFiles.TOKENS) as tokens: for t in tokens["tokens"]: -- 2.42.0 From bf1cd328956de095a090f6d22e600ee8d1a79e0c Mon Sep 17 00:00:00 2001 From: inexcode Date: Sat, 20 Aug 2022 22:50:25 +0400 Subject: [PATCH 41/50] Change the ServiceStatus to match systemctl show --- .../graphql/common_types/service.py | 10 +++--- selfprivacy_api/rest/services.py | 36 ++++++++++--------- .../services/generic_service_mover.py | 7 ++-- .../services/generic_status_getter.py | 35 +++++++++--------- selfprivacy_api/services/service.py | 10 +++--- 5 files changed, 53 insertions(+), 45 deletions(-) diff --git a/selfprivacy_api/graphql/common_types/service.py b/selfprivacy_api/graphql/common_types/service.py index 99310f7..c1246ca 100644 --- a/selfprivacy_api/graphql/common_types/service.py +++ b/selfprivacy_api/graphql/common_types/service.py @@ -56,10 +56,12 @@ class ServiceStorageUsage(StorageUsageInterface): @strawberry.enum class ServiceStatusEnum(Enum): - RUNNING = "RUNNING" - DEGRADED = "DEGRADED" - ERROR = "ERROR" - STOPPED = "STOPPED" + ACTIVE = "ACTIVE" + RELOADING = "RELOADING" + INACTIVE = "INACTIVE" + FAILED = "FAILED" + ACTIVATING = "ACTIVATING" + DEACTIVATING = "DEACTIVATING" OFF = "OFF" diff --git a/selfprivacy_api/rest/services.py b/selfprivacy_api/rest/services.py index d374e7b..c9d5ff9 100644 --- a/selfprivacy_api/rest/services.py +++ b/selfprivacy_api/rest/services.py @@ -36,11 +36,13 @@ router = APIRouter( def service_status_to_return_code(status: ServiceStatus): - if status == ServiceStatus.RUNNING: + """Converts service status object to return code for + compatibility with legacy api""" + if status == ServiceStatus.ACTIVE: return 0 - elif status == ServiceStatus.ERROR: + elif status == ServiceStatus.FAILED: return 1 - elif status == ServiceStatus.STOPPED: + elif status == ServiceStatus.INACTIVE: return 3 elif status == ServiceStatus.OFF: return 4 @@ -317,13 +319,13 @@ async def rest_send_ssh_key(input: SshKeyInput): """Send the SSH key""" try: create_ssh_key("root", input.public_key) - except KeyAlreadyExists: - raise HTTPException(status_code=409, detail="Key already exists") - except InvalidPublicKey: + except KeyAlreadyExists as error: + raise HTTPException(status_code=409, detail="Key already exists") from error + except InvalidPublicKey as error: raise HTTPException( status_code=400, detail="Invalid key type. Only ssh-ed25519 and ssh-rsa are supported", - ) + ) from error return { "status": 0, @@ -345,15 +347,15 @@ async def rest_get_ssh_keys(username: str): async def rest_add_ssh_key(username: str, input: SshKeyInput): try: create_ssh_key(username, input.public_key) - except KeyAlreadyExists: - raise HTTPException(status_code=409, detail="Key already exists") - except InvalidPublicKey: + except KeyAlreadyExists as error: + raise HTTPException(status_code=409, detail="Key already exists") from error + except InvalidPublicKey as error: raise HTTPException( status_code=400, detail="Invalid key type. Only ssh-ed25519 and ssh-rsa are supported", - ) - except UserNotFound: - raise HTTPException(status_code=404, detail="User not found") + ) from error + except UserNotFound as error: + raise HTTPException(status_code=404, detail="User not found") from error return { "message": "New SSH key successfully written", @@ -364,8 +366,8 @@ async def rest_add_ssh_key(username: str, input: SshKeyInput): async def rest_delete_ssh_key(username: str, input: SshKeyInput): try: remove_ssh_key(username, input.public_key) - except KeyNotFound: - raise HTTPException(status_code=404, detail="Key not found") - except UserNotFound: - raise HTTPException(status_code=404, detail="User not found") + except KeyNotFound as error: + raise HTTPException(status_code=404, detail="Key not found") from error + except UserNotFound as error: + raise HTTPException(status_code=404, detail="User not found") from error return {"message": "SSH key deleted"} diff --git a/selfprivacy_api/services/generic_service_mover.py b/selfprivacy_api/services/generic_service_mover.py index a214830..c7d7c3b 100644 --- a/selfprivacy_api/services/generic_service_mover.py +++ b/selfprivacy_api/services/generic_service_mover.py @@ -103,10 +103,13 @@ def move_service( progress=5, ) service.stop() - # Wait for Nextcloud to stop, check every second + # Wait for the service to stop, check every second # If it does not stop in 30 seconds, abort for _ in range(30): - if service.get_status() != ServiceStatus.RUNNING: + if service.get_status() not in ( + ServiceStatus.ACTIVATING, + ServiceStatus.DEACTIVATING, + ): break time.sleep(1) else: diff --git a/selfprivacy_api/services/generic_status_getter.py b/selfprivacy_api/services/generic_status_getter.py index d53011e..c17f4d6 100644 --- a/selfprivacy_api/services/generic_status_getter.py +++ b/selfprivacy_api/services/generic_status_getter.py @@ -1,6 +1,5 @@ """Generic service status fetcher using systemctl""" import subprocess -import typing from selfprivacy_api.services.service import ServiceStatus @@ -8,22 +7,22 @@ from selfprivacy_api.services.service import ServiceStatus def get_service_status(service: str) -> ServiceStatus: """ Return service status from systemd. - Use command return code to determine status. - - Return code 0 means service is running. - Return code 1 or 2 means service is in error stat. - Return code 3 means service is stopped. - Return code 4 means service is off. + Use systemctl show to get the status of a service. + Get ActiveState from the output. """ - service_status = subprocess.Popen(["systemctl", "status", service]) - service_status.communicate()[0] - if service_status.returncode == 0: - return ServiceStatus.RUNNING - elif service_status.returncode == 1 or service_status.returncode == 2: - return ServiceStatus.ERROR - elif service_status.returncode == 3: - return ServiceStatus.STOPPED - elif service_status.returncode == 4: + service_status = subprocess.check_output(["systemctl", "show", service]) + if b"LoadState=not-found" in service_status: return ServiceStatus.OFF - else: - return ServiceStatus.DEGRADED + if b"ActiveState=active" in service_status: + return ServiceStatus.ACTIVE + if b"ActiveState=inactive" in service_status: + return ServiceStatus.INACTIVE + if b"ActiveState=activating" in service_status: + return ServiceStatus.ACTIVATING + if b"ActiveState=deactivating" in service_status: + return ServiceStatus.DEACTIVATING + if b"ActiveState=failed" in service_status: + return ServiceStatus.FAILED + if b"ActiveState=reloading" in service_status: + return ServiceStatus.RELOADING + return ServiceStatus.OFF diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index 0defcff..515e28f 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -12,10 +12,12 @@ from selfprivacy_api.utils.block_devices import BlockDevice class ServiceStatus(Enum): """Enum for service status""" - RUNNING = "RUNNING" - DEGRADED = "DEGRADED" - ERROR = "ERROR" - STOPPED = "STOPPED" + ACTIVE = "ACTIVE" + RELOADING = "RELOADING" + INACTIVE = "INACTIVE" + FAILED = "FAILED" + ACTIVATING = "ACTIVATING" + DEACTIVATING = "DEACTIVATING" OFF = "OFF" -- 2.42.0 From 2e22ad721982ce5911a7d4da01d9f5d3dcc4597e Mon Sep 17 00:00:00 2001 From: inexcode Date: Sat, 20 Aug 2022 22:50:42 +0400 Subject: [PATCH 42/50] Implement Pull repository changes in GraphQL --- .../graphql/mutations/system_mutations.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/selfprivacy_api/graphql/mutations/system_mutations.py b/selfprivacy_api/graphql/mutations/system_mutations.py index c19460f..daada17 100644 --- a/selfprivacy_api/graphql/mutations/system_mutations.py +++ b/selfprivacy_api/graphql/mutations/system_mutations.py @@ -111,3 +111,18 @@ class SystemMutations: message="System reboot has started", code=200, ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def pull_repository_changes(self) -> GenericMutationReturn: + result = system_actions.pull_repository_changes() + if result.status == 0: + return GenericMutationReturn( + success=True, + message="Repository changes pulled", + code=200, + ) + return GenericMutationReturn( + success=False, + message=f"Failed to pull repository changes:\n{result.data}", + code=500, + ) -- 2.42.0 From 50a309e2a2010635209abd3bd3c989da3353befa Mon Sep 17 00:00:00 2001 From: inexcode Date: Mon, 22 Aug 2022 21:45:00 +0400 Subject: [PATCH 43/50] Add remove job by uid endpoint --- .../graphql/mutations/job_mutations.py | 27 ++++++++++ selfprivacy_api/graphql/queries/system.py | 2 +- selfprivacy_api/jobs/__init__.py | 11 ++++- selfprivacy_api/services/__init__.py | 49 ++++++++++--------- .../services/mailserver/__init__.py | 5 +- tests/test_graphql/test_system.py | 31 +++++++++--- 6 files changed, 91 insertions(+), 34 deletions(-) create mode 100644 selfprivacy_api/graphql/mutations/job_mutations.py diff --git a/selfprivacy_api/graphql/mutations/job_mutations.py b/selfprivacy_api/graphql/mutations/job_mutations.py new file mode 100644 index 0000000..d3a3498 --- /dev/null +++ b/selfprivacy_api/graphql/mutations/job_mutations.py @@ -0,0 +1,27 @@ +"""Manipulate jobs""" +# pylint: disable=too-few-public-methods +import strawberry + +from selfprivacy_api.graphql.mutations.mutation_interface import GenericMutationReturn +from selfprivacy_api.jobs import Jobs + + +@strawberry.type +class JobMutations: + """Mutations related to jobs""" + + @strawberry.mutation + def remove_job(self, job_id: str) -> GenericMutationReturn: + """Remove a job from the queue""" + result = Jobs().remove_by_uuid(job_id) + if result: + return GenericMutationReturn( + success=True, + code=200, + message="Job removed", + ) + return GenericMutationReturn( + success=False, + code=404, + message="Job not found", + ) diff --git a/selfprivacy_api/graphql/queries/system.py b/selfprivacy_api/graphql/queries/system.py index e3f0eff..0e2a7ec 100644 --- a/selfprivacy_api/graphql/queries/system.py +++ b/selfprivacy_api/graphql/queries/system.py @@ -22,6 +22,7 @@ class SystemDomainInfo: domain: str hostname: str provider: DnsProvider + @strawberry.field def required_dns_records(self) -> typing.List[DnsRecord]: """Collect all required DNS records for all services""" @@ -37,7 +38,6 @@ class SystemDomainInfo: ] - def get_system_domain_info() -> SystemDomainInfo: """Get basic system domain info""" with ReadUserData() as user_data: diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py index d060e32..09ac254 100644 --- a/selfprivacy_api/jobs/__init__.py +++ b/selfprivacy_api/jobs/__init__.py @@ -130,6 +130,12 @@ class Jobs: return job def remove(self, job: Job) -> None: + """ + Remove a job from the jobs list. + """ + self.remove_by_uuid(str(job.uid)) + + def remove_by_uuid(self, job_uuid: str) -> bool: """ Remove a job from the jobs list. """ @@ -137,9 +143,10 @@ class Jobs: if "jobs" not in user_data: user_data["jobs"] = [] for i, j in enumerate(user_data["jobs"]): - if j["uid"] == str(job.uid): + if j["uid"] == job_uuid: del user_data["jobs"][i] - break + return True + return False @staticmethod def update( diff --git a/selfprivacy_api/services/__init__.py b/selfprivacy_api/services/__init__.py index 04d496f..30f28a6 100644 --- a/selfprivacy_api/services/__init__.py +++ b/selfprivacy_api/services/__init__.py @@ -42,34 +42,35 @@ def get_disabled_services() -> list[Service]: def get_services_by_location(location: str) -> list[Service]: return [service for service in services if service.get_location() == location] + def get_all_required_dns_records() -> list[ServiceDnsRecord]: ip4 = network_utils.get_ip4() ip6 = network_utils.get_ip6() dns_records: list[ServiceDnsRecord] = [ - ServiceDnsRecord( - type="A", - name="api", - content=ip4, - ttl=3600, - ), - ServiceDnsRecord( - type="AAAA", - name="api", - content=ip6, - ttl=3600, - ), - ServiceDnsRecord( - type="A", - name="meet", - content=ip4, - ttl=3600, - ), - ServiceDnsRecord( - type="AAAA", - name="meet", - content=ip6, - ttl=3600, - ), + ServiceDnsRecord( + type="A", + name="api", + content=ip4, + ttl=3600, + ), + ServiceDnsRecord( + type="AAAA", + name="api", + content=ip6, + ttl=3600, + ), + ServiceDnsRecord( + type="A", + name="meet", + content=ip4, + ttl=3600, + ), + ServiceDnsRecord( + type="AAAA", + name="meet", + content=ip6, + ttl=3600, + ), ] for service in get_enabled_services(): dns_records += service.get_dns_records() diff --git a/selfprivacy_api/services/mailserver/__init__.py b/selfprivacy_api/services/mailserver/__init__.py index ea085ba..34972a9 100644 --- a/selfprivacy_api/services/mailserver/__init__.py +++ b/selfprivacy_api/services/mailserver/__init__.py @@ -163,7 +163,10 @@ class MailServer(Service): type="TXT", name="_dmarc", content=f"v=DMARC1; p=none", ttl=18000 ), ServiceDnsRecord( - type="TXT", name=domain, content=f"v=spf1 a mx ip4:{ip4} -all", ttl=18000 + type="TXT", + name=domain, + content=f"v=spf1 a mx ip4:{ip4} -all", + ttl=18000, ), ServiceDnsRecord( type="TXT", name="selector._domainkey", content=dkim_record, ttl=18000 diff --git a/tests/test_graphql/test_system.py b/tests/test_graphql/test_system.py index fd4937d..a021a16 100644 --- a/tests/test_graphql/test_system.py +++ b/tests/test_graphql/test_system.py @@ -220,7 +220,9 @@ domainInfo { """ -def dns_record(record_type="A", name="test-domain.tld", content=None, ttl=3600, priority=None): +def dns_record( + record_type="A", name="test-domain.tld", content=None, ttl=3600, priority=None +): if content is None: if record_type == "A": content = "157.90.247.192" @@ -298,7 +300,12 @@ def test_graphql_get_domain( ) assert is_dns_record_in_array( dns_records, - dns_record(name="test-domain.tld", record_type="MX", content="test-domain.tld", priority=10), + dns_record( + name="test-domain.tld", + record_type="MX", + content="test-domain.tld", + priority=10, + ), ) assert is_dns_record_in_array( dns_records, @@ -517,7 +524,10 @@ def test_graphql_get_auto_upgrade(authorized_client, turned_on): assert ( response.json()["data"]["system"]["settings"]["autoUpgrade"]["enable"] is True ) - assert response.json()["data"]["system"]["settings"]["autoUpgrade"]["allowReboot"] is True + assert ( + response.json()["data"]["system"]["settings"]["autoUpgrade"]["allowReboot"] + is True + ) def test_graphql_get_auto_upgrade_on_undefined(authorized_client, undefined_config): @@ -533,7 +543,10 @@ def test_graphql_get_auto_upgrade_on_undefined(authorized_client, undefined_conf assert ( response.json()["data"]["system"]["settings"]["autoUpgrade"]["enable"] is True ) - assert response.json()["data"]["system"]["settings"]["autoUpgrade"]["allowReboot"] is False + assert ( + response.json()["data"]["system"]["settings"]["autoUpgrade"]["allowReboot"] + is False + ) def test_graphql_get_auto_upgrade_without_vlaues(authorized_client, no_values): @@ -549,7 +562,10 @@ def test_graphql_get_auto_upgrade_without_vlaues(authorized_client, no_values): assert ( response.json()["data"]["system"]["settings"]["autoUpgrade"]["enable"] is True ) - assert response.json()["data"]["system"]["settings"]["autoUpgrade"]["allowReboot"] is False + assert ( + response.json()["data"]["system"]["settings"]["autoUpgrade"]["allowReboot"] + is False + ) def test_graphql_get_auto_upgrade_turned_off(authorized_client, turned_off): @@ -565,7 +581,10 @@ def test_graphql_get_auto_upgrade_turned_off(authorized_client, turned_off): assert ( response.json()["data"]["system"]["settings"]["autoUpgrade"]["enable"] is False ) - assert response.json()["data"]["system"]["settings"]["autoUpgrade"]["allowReboot"] is False + assert ( + response.json()["data"]["system"]["settings"]["autoUpgrade"]["allowReboot"] + is False + ) API_CHANGE_AUTO_UPGRADE_SETTINGS = """ -- 2.42.0 From cb5e04567d2346525ca9f70f120a278c8260db20 Mon Sep 17 00:00:00 2001 From: inexcode Date: Mon, 22 Aug 2022 21:48:50 +0400 Subject: [PATCH 44/50] Add job mutations to GraphQL schema --- selfprivacy_api/graphql/schema.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/selfprivacy_api/graphql/schema.py b/selfprivacy_api/graphql/schema.py index 271d066..3edcea7 100644 --- a/selfprivacy_api/graphql/schema.py +++ b/selfprivacy_api/graphql/schema.py @@ -4,6 +4,7 @@ import strawberry from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.graphql.mutations.api_mutations import ApiMutations +from selfprivacy_api.graphql.mutations.job_mutations import JobMutations from selfprivacy_api.graphql.mutations.mutation_interface import GenericMutationReturn from selfprivacy_api.graphql.mutations.ssh_mutations import SshMutations from selfprivacy_api.graphql.mutations.storage_mutations import StorageMutations @@ -62,6 +63,7 @@ class Mutation( UserMutations, SshMutations, StorageMutations, + JobMutations, ): """Root schema for mutations""" -- 2.42.0 From 8c878ea89814b37ce4eb9700683efac2155fd8ef Mon Sep 17 00:00:00 2001 From: inexcode Date: Mon, 22 Aug 2022 22:28:12 +0400 Subject: [PATCH 45/50] Add service mutation endpoints to GraphQL --- selfprivacy_api/graphql/schema.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/selfprivacy_api/graphql/schema.py b/selfprivacy_api/graphql/schema.py index 3edcea7..661b058 100644 --- a/selfprivacy_api/graphql/schema.py +++ b/selfprivacy_api/graphql/schema.py @@ -6,6 +6,7 @@ from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.graphql.mutations.api_mutations import ApiMutations from selfprivacy_api.graphql.mutations.job_mutations import JobMutations from selfprivacy_api.graphql.mutations.mutation_interface import GenericMutationReturn +from selfprivacy_api.graphql.mutations.services_mutations import ServicesMutations from selfprivacy_api.graphql.mutations.ssh_mutations import SshMutations from selfprivacy_api.graphql.mutations.storage_mutations import StorageMutations from selfprivacy_api.graphql.mutations.system_mutations import SystemMutations @@ -63,6 +64,7 @@ class Mutation( UserMutations, SshMutations, StorageMutations, + ServicesMutations, JobMutations, ): """Root schema for mutations""" -- 2.42.0 From ab9e8d81e5ae219d99d2a419a49c841c312147f0 Mon Sep 17 00:00:00 2001 From: inexcode Date: Mon, 22 Aug 2022 23:32:37 +0400 Subject: [PATCH 46/50] Add auth to service mutations --- .../graphql/mutations/services_mutations.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/selfprivacy_api/graphql/mutations/services_mutations.py b/selfprivacy_api/graphql/mutations/services_mutations.py index b3aee32..38a0d7f 100644 --- a/selfprivacy_api/graphql/mutations/services_mutations.py +++ b/selfprivacy_api/graphql/mutations/services_mutations.py @@ -2,6 +2,7 @@ # pylint: disable=too-few-public-methods import typing import strawberry +from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.graphql.common_types.jobs import job_to_api_job from selfprivacy_api.graphql.common_types.service import ( @@ -43,7 +44,7 @@ class ServiceJobMutationReturn(GenericJobButationReturn): class ServicesMutations: """Services mutations.""" - @strawberry.mutation + @strawberry.mutation(permission_classes=[IsAuthenticated]) def enable_service(self, service_id: str) -> ServiceMutationReturn: """Enable service.""" service = get_service_by_id(service_id) @@ -61,7 +62,7 @@ class ServicesMutations: service=service_to_graphql_service(service), ) - @strawberry.mutation + @strawberry.mutation(permission_classes=[IsAuthenticated]) def disable_service(self, service_id: str) -> ServiceMutationReturn: """Disable service.""" service = get_service_by_id(service_id) @@ -79,7 +80,7 @@ class ServicesMutations: service=service_to_graphql_service(service), ) - @strawberry.mutation + @strawberry.mutation(permission_classes=[IsAuthenticated]) def stop_service(self, service_id: str) -> ServiceMutationReturn: """Stop service.""" service = get_service_by_id(service_id) @@ -97,7 +98,7 @@ class ServicesMutations: service=service_to_graphql_service(service), ) - @strawberry.mutation + @strawberry.mutation(permission_classes=[IsAuthenticated]) def start_service(self, service_id: str) -> ServiceMutationReturn: """Start service.""" service = get_service_by_id(service_id) @@ -115,7 +116,7 @@ class ServicesMutations: service=service_to_graphql_service(service), ) - @strawberry.mutation + @strawberry.mutation(permission_classes=[IsAuthenticated]) def restart_service(self, service_id: str) -> ServiceMutationReturn: """Restart service.""" service = get_service_by_id(service_id) @@ -133,7 +134,7 @@ class ServicesMutations: service=service_to_graphql_service(service), ) - @strawberry.mutation + @strawberry.mutation(permission_classes=[IsAuthenticated]) def move_service(self, input: MoveServiceInput) -> ServiceJobMutationReturn: """Move service.""" service = get_service_by_id(input.service_id) -- 2.42.0 From 28c6d983b95815137af8682ca50a4462b79e93e9 Mon Sep 17 00:00:00 2001 From: inexcode Date: Mon, 22 Aug 2022 23:49:14 +0400 Subject: [PATCH 47/50] Fix ws auth --- selfprivacy_api/app.py | 9 +++---- selfprivacy_api/dependencies.py | 27 +------------------ selfprivacy_api/graphql/__init__.py | 6 ++++- .../graphql/mutations/api_mutations.py | 12 +++++++-- .../graphql/queries/api_queries.py | 6 ++++- selfprivacy_api/graphql/schema.py | 17 ++++++++++-- 6 files changed, 40 insertions(+), 37 deletions(-) diff --git a/selfprivacy_api/app.py b/selfprivacy_api/app.py index b5ed512..3436445 100644 --- a/selfprivacy_api/app.py +++ b/selfprivacy_api/app.py @@ -1,13 +1,12 @@ #!/usr/bin/env python3 """SelfPrivacy server management API""" -import os -from fastapi import FastAPI, Depends, Request, WebSocket, BackgroundTasks +from fastapi import FastAPI from fastapi.middleware.cors import CORSMiddleware -from strawberry.fastapi import BaseContext, GraphQLRouter +from strawberry.fastapi import GraphQLRouter import uvicorn -from selfprivacy_api.dependencies import get_api_version, get_graphql_context +from selfprivacy_api.dependencies import get_api_version from selfprivacy_api.graphql.schema import schema from selfprivacy_api.migrations import run_migrations from selfprivacy_api.restic_controller.tasks import init_restic @@ -20,9 +19,9 @@ from selfprivacy_api.rest import ( ) app = FastAPI() + graphql_app = GraphQLRouter( schema, - context_getter=get_graphql_context, ) app.add_middleware( diff --git a/selfprivacy_api/dependencies.py b/selfprivacy_api/dependencies.py index c50d772..109e2ce 100644 --- a/selfprivacy_api/dependencies.py +++ b/selfprivacy_api/dependencies.py @@ -1,6 +1,4 @@ -from fastapi import Depends, FastAPI, HTTPException, status -from typing import Optional -from strawberry.fastapi import BaseContext +from fastapi import Depends, HTTPException, status from fastapi.security import APIKeyHeader from pydantic import BaseModel @@ -27,29 +25,6 @@ async def get_token_header( return TokenHeader(token=token) -class GraphQlContext(BaseContext): - def __init__(self, auth_token: Optional[str] = None): - self.auth_token = auth_token - self.is_authenticated = auth_token is not None - - -async def get_graphql_context( - token: str = Depends( - APIKeyHeader( - name="Authorization", - auto_error=False, - ) - ) -) -> GraphQlContext: - if token is None: - return GraphQlContext() - else: - token = token.replace("Bearer ", "") - if not is_token_valid(token): - return GraphQlContext() - return GraphQlContext(auth_token=token) - - def get_api_version() -> str: """Get API version""" return "2.0.0" diff --git a/selfprivacy_api/graphql/__init__.py b/selfprivacy_api/graphql/__init__.py index e01b158..89dc6e2 100644 --- a/selfprivacy_api/graphql/__init__.py +++ b/selfprivacy_api/graphql/__init__.py @@ -13,4 +13,8 @@ class IsAuthenticated(BasePermission): message = "You must be authenticated to access this resource." def has_permission(self, source: typing.Any, info: Info, **kwargs) -> bool: - return info.context.is_authenticated + return is_token_valid( + info.context["request"] + .headers.get("Authorization", "") + .replace("Bearer ", "") + ) diff --git a/selfprivacy_api/graphql/mutations/api_mutations.py b/selfprivacy_api/graphql/mutations/api_mutations.py index 15fa6dd..c6727db 100644 --- a/selfprivacy_api/graphql/mutations/api_mutations.py +++ b/selfprivacy_api/graphql/mutations/api_mutations.py @@ -116,7 +116,11 @@ class ApiMutations: @strawberry.mutation(permission_classes=[IsAuthenticated]) def refresh_device_api_token(self, info: Info) -> DeviceApiTokenMutationReturn: """Refresh device api token""" - token = info.context.auth_token + token = ( + info.context["request"] + .headers.get("Authorization", "") + .replace("Bearer ", "") + ) if token is None: return DeviceApiTokenMutationReturn( success=False, @@ -142,7 +146,11 @@ class ApiMutations: @strawberry.mutation(permission_classes=[IsAuthenticated]) def delete_device_api_token(self, device: str, info: Info) -> GenericMutationReturn: """Delete device api token""" - self_token = info.context.auth_token + self_token = ( + info.context["request"] + .headers.get("Authorization", "") + .replace("Bearer ", "") + ) try: delete_api_token(self_token, device) except NotFoundException: diff --git a/selfprivacy_api/graphql/queries/api_queries.py b/selfprivacy_api/graphql/queries/api_queries.py index cbe7690..7994a8f 100644 --- a/selfprivacy_api/graphql/queries/api_queries.py +++ b/selfprivacy_api/graphql/queries/api_queries.py @@ -85,7 +85,11 @@ class Api: creation_date=device.date, is_caller=device.is_caller, ) - for device in get_api_tokens_with_caller_flag(info.context.auth_token) + for device in get_api_tokens_with_caller_flag( + info.context["request"] + .headers.get("Authorization", "") + .replace("Bearer ", "") + ) ] recovery_key: ApiRecoveryKeyStatus = strawberry.field( diff --git a/selfprivacy_api/graphql/schema.py b/selfprivacy_api/graphql/schema.py index 661b058..dff9304 100644 --- a/selfprivacy_api/graphql/schema.py +++ b/selfprivacy_api/graphql/schema.py @@ -1,6 +1,8 @@ """GraphQL API for SelfPrivacy.""" # pylint: disable=too-few-public-methods +import asyncio +from typing import AsyncGenerator import strawberry from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.graphql.mutations.api_mutations import ApiMutations @@ -69,7 +71,7 @@ class Mutation( ): """Root schema for mutations""" - @strawberry.mutation + @strawberry.mutation(permission_classes=[IsAuthenticated]) def test_mutation(self) -> GenericMutationReturn: """Test mutation""" test_job() @@ -82,4 +84,15 @@ class Mutation( pass -schema = strawberry.Schema(query=Query, mutation=Mutation) +@strawberry.type +class Subscription: + """Root schema for subscriptions""" + + @strawberry.subscription(permission_classes=[IsAuthenticated]) + async def count(self, target: int = 100) -> AsyncGenerator[int, None]: + for i in range(target): + yield i + await asyncio.sleep(0.5) + + +schema = strawberry.Schema(query=Query, mutation=Mutation, subscription=Subscription) -- 2.42.0 From aafc77dce35f983202b2b0224cb74480747a98fb Mon Sep 17 00:00:00 2001 From: inexcode Date: Mon, 22 Aug 2022 23:49:30 +0400 Subject: [PATCH 48/50] Add vscode launch dotflile --- .vscode/launch.json | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) create mode 100644 .vscode/launch.json diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 0000000..a691ce0 --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,19 @@ +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "Python: FastAPI", + "type": "python", + "request": "launch", + "module": "uvicorn", + "args": [ + "selfprivacy_api.app:app" + ], + "jinja": true, + "justMyCode": false + } + ] +} -- 2.42.0 From ba434f4fb594a73b64974eea9c46516bfd5778d3 Mon Sep 17 00:00:00 2001 From: inexcode Date: Tue, 23 Aug 2022 00:06:12 +0400 Subject: [PATCH 49/50] Allow using query params to fetch GraphQL --- selfprivacy_api/graphql/__init__.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/selfprivacy_api/graphql/__init__.py b/selfprivacy_api/graphql/__init__.py index 89dc6e2..7372197 100644 --- a/selfprivacy_api/graphql/__init__.py +++ b/selfprivacy_api/graphql/__init__.py @@ -13,8 +13,9 @@ class IsAuthenticated(BasePermission): message = "You must be authenticated to access this resource." def has_permission(self, source: typing.Any, info: Info, **kwargs) -> bool: - return is_token_valid( - info.context["request"] - .headers.get("Authorization", "") - .replace("Bearer ", "") - ) + token = info.context["request"].headers.get("Authorization") + if token is None: + token = info.context["request"].query_params.get("token") + if token is None: + return False + return is_token_valid(token.replace("Bearer ", "")) -- 2.42.0 From 15a900d009720eea660d2c41f4122722661d1903 Mon Sep 17 00:00:00 2001 From: inexcode Date: Wed, 24 Aug 2022 03:05:06 +0400 Subject: [PATCH 50/50] Add Jitsi to services --- selfprivacy_api/services/__init__.py | 14 +- .../services/generic_status_getter.py | 32 ++++ selfprivacy_api/services/jitsi/__init__.py | 142 ++++++++++++++++++ selfprivacy_api/services/jitsi/icon.py | 5 + .../services/mailserver/__init__.py | 39 +---- selfprivacy_api/services/mailserver/icon.py | 2 +- tests/test_graphql/test_system/turned_on.json | 3 + 7 files changed, 192 insertions(+), 45 deletions(-) create mode 100644 selfprivacy_api/services/jitsi/__init__.py create mode 100644 selfprivacy_api/services/jitsi/icon.py diff --git a/selfprivacy_api/services/__init__.py b/selfprivacy_api/services/__init__.py index 30f28a6..a688734 100644 --- a/selfprivacy_api/services/__init__.py +++ b/selfprivacy_api/services/__init__.py @@ -3,6 +3,7 @@ import typing from selfprivacy_api.services.bitwarden import Bitwarden from selfprivacy_api.services.gitea import Gitea +from selfprivacy_api.services.jitsi import Jitsi from selfprivacy_api.services.mailserver import MailServer from selfprivacy_api.services.nextcloud import Nextcloud from selfprivacy_api.services.pleroma import Pleroma @@ -17,6 +18,7 @@ services: list[Service] = [ Nextcloud(), Pleroma(), Ocserv(), + Jitsi(), ] @@ -59,18 +61,6 @@ def get_all_required_dns_records() -> list[ServiceDnsRecord]: content=ip6, ttl=3600, ), - ServiceDnsRecord( - type="A", - name="meet", - content=ip4, - ttl=3600, - ), - ServiceDnsRecord( - type="AAAA", - name="meet", - content=ip6, - ttl=3600, - ), ] for service in get_enabled_services(): dns_records += service.get_dns_records() diff --git a/selfprivacy_api/services/generic_status_getter.py b/selfprivacy_api/services/generic_status_getter.py index c17f4d6..46720af 100644 --- a/selfprivacy_api/services/generic_status_getter.py +++ b/selfprivacy_api/services/generic_status_getter.py @@ -26,3 +26,35 @@ def get_service_status(service: str) -> ServiceStatus: if b"ActiveState=reloading" in service_status: return ServiceStatus.RELOADING return ServiceStatus.OFF + + +def get_service_status_from_several_units(services: list[str]) -> ServiceStatus: + """ + Fetch all service statuses for all services and return the worst status. + Statuses from worst to best: + - OFF + - FAILED + - RELOADING + - ACTIVATING + - DEACTIVATING + - INACTIVE + - ACTIVE + """ + service_statuses = [] + for service in services: + service_statuses.append(get_service_status(service)) + if ServiceStatus.OFF in service_statuses: + return ServiceStatus.OFF + if ServiceStatus.FAILED in service_statuses: + return ServiceStatus.FAILED + if ServiceStatus.RELOADING in service_statuses: + return ServiceStatus.RELOADING + if ServiceStatus.ACTIVATING in service_statuses: + return ServiceStatus.ACTIVATING + if ServiceStatus.DEACTIVATING in service_statuses: + return ServiceStatus.DEACTIVATING + if ServiceStatus.INACTIVE in service_statuses: + return ServiceStatus.INACTIVE + if ServiceStatus.ACTIVE in service_statuses: + return ServiceStatus.ACTIVE + return ServiceStatus.OFF diff --git a/selfprivacy_api/services/jitsi/__init__.py b/selfprivacy_api/services/jitsi/__init__.py new file mode 100644 index 0000000..6b3a973 --- /dev/null +++ b/selfprivacy_api/services/jitsi/__init__.py @@ -0,0 +1,142 @@ +"""Class representing Jitsi service""" +import base64 +import subprocess +import typing + +from selfprivacy_api.jobs import Job, Jobs +from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service +from selfprivacy_api.services.generic_size_counter import get_storage_usage +from selfprivacy_api.services.generic_status_getter import ( + get_service_status, + get_service_status_from_several_units, +) +from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus +from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain +from selfprivacy_api.utils.block_devices import BlockDevice +from selfprivacy_api.utils.huey import huey +import selfprivacy_api.utils.network as network_utils +from selfprivacy_api.services.jitsi.icon import JITSI_ICON + + +class Jitsi(Service): + """Class representing Jitsi service""" + + @staticmethod + def get_id() -> str: + """Return service id.""" + return "jitsi" + + @staticmethod + def get_display_name() -> str: + """Return service display name.""" + return "Jitsi" + + @staticmethod + def get_description() -> str: + """Return service description.""" + return "Jitsi is a free and open-source video conferencing solution." + + @staticmethod + def get_svg_icon() -> str: + """Read SVG icon from file and return it as base64 encoded string.""" + return base64.b64encode(JITSI_ICON.encode("utf-8")).decode("utf-8") + + @staticmethod + def get_url() -> typing.Optional[str]: + """Return service url.""" + domain = get_domain() + return f"https://meet.{domain}" + + @staticmethod + def is_movable() -> bool: + return False + + @staticmethod + def is_required() -> bool: + return False + + @staticmethod + def is_enabled() -> bool: + with ReadUserData() as user_data: + return user_data.get("jitsi", {}).get("enable", False) + + @staticmethod + def get_status() -> ServiceStatus: + return get_service_status_from_several_units( + ["jitsi-videobridge.service", "jicofo.service"] + ) + + @staticmethod + def enable(): + """Enable Jitsi service.""" + with WriteUserData() as user_data: + if "jitsi" not in user_data: + user_data["jitsi"] = {} + user_data["jitsi"]["enable"] = True + + @staticmethod + def disable(): + """Disable Gitea service.""" + with WriteUserData() as user_data: + if "jitsi" not in user_data: + user_data["jitsi"] = {} + user_data["jitsi"]["enable"] = False + + @staticmethod + def stop(): + subprocess.run(["systemctl", "stop", "jitsi-videobridge.service"]) + subprocess.run(["systemctl", "stop", "jicofo.service"]) + + @staticmethod + def start(): + subprocess.run(["systemctl", "start", "jitsi-videobridge.service"]) + subprocess.run(["systemctl", "start", "jicofo.service"]) + + @staticmethod + def restart(): + subprocess.run(["systemctl", "restart", "jitsi-videobridge.service"]) + subprocess.run(["systemctl", "restart", "jicofo.service"]) + + @staticmethod + def get_configuration(): + return {} + + @staticmethod + def set_configuration(config_items): + return super().set_configuration(config_items) + + @staticmethod + def get_logs(): + return "" + + @staticmethod + def get_storage_usage() -> int: + storage_usage = 0 + storage_usage += get_storage_usage("/var/lib/jitsi-meet") + return storage_usage + + @staticmethod + def get_location() -> str: + return "sda1" + + @staticmethod + def get_dns_records() -> typing.List[ServiceDnsRecord]: + ip4 = network_utils.get_ip4() + ip6 = network_utils.get_ip6() + return [ + ServiceDnsRecord( + type="A", + name="meet", + content=ip4, + ttl=3600, + ), + ServiceDnsRecord( + type="AAAA", + name="meet", + content=ip6, + ttl=3600, + ), + ] + + def move_to_volume(self, volume: BlockDevice) -> Job: + raise NotImplementedError("jitsi service is not movable") diff --git a/selfprivacy_api/services/jitsi/icon.py b/selfprivacy_api/services/jitsi/icon.py new file mode 100644 index 0000000..08bcbb1 --- /dev/null +++ b/selfprivacy_api/services/jitsi/icon.py @@ -0,0 +1,5 @@ +JITSI_ICON = """ + + + +""" diff --git a/selfprivacy_api/services/mailserver/__init__.py b/selfprivacy_api/services/mailserver/__init__.py index 34972a9..1a72f33 100644 --- a/selfprivacy_api/services/mailserver/__init__.py +++ b/selfprivacy_api/services/mailserver/__init__.py @@ -7,7 +7,10 @@ import typing from selfprivacy_api.jobs import Job, JobStatus, Jobs from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service from selfprivacy_api.services.generic_size_counter import get_storage_usage -from selfprivacy_api.services.generic_status_getter import get_service_status +from selfprivacy_api.services.generic_status_getter import ( + get_service_status, + get_service_status_from_several_units, +) from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus import selfprivacy_api.utils as utils from selfprivacy_api.utils.block_devices import BlockDevice @@ -54,37 +57,9 @@ class MailServer(Service): @staticmethod def get_status() -> ServiceStatus: - imap_status = get_service_status("dovecot2.service") - smtp_status = get_service_status("postfix.service") - - if imap_status == ServiceStatus.ACTIVE and smtp_status == ServiceStatus.ACTIVE: - return ServiceStatus.ACTIVE - elif imap_status == ServiceStatus.FAILED or smtp_status == ServiceStatus.FAILED: - return ServiceStatus.FAILED - elif ( - imap_status == ServiceStatus.RELOADING - or smtp_status == ServiceStatus.RELOADING - ): - return ServiceStatus.RELOADING - elif ( - imap_status == ServiceStatus.ACTIVATING - or smtp_status == ServiceStatus.ACTIVATING - ): - return ServiceStatus.ACTIVATING - elif ( - imap_status == ServiceStatus.DEACTIVATING - or smtp_status == ServiceStatus.DEACTIVATING - ): - return ServiceStatus.DEACTIVATING - elif ( - imap_status == ServiceStatus.INACTIVE - or smtp_status == ServiceStatus.INACTIVE - ): - return ServiceStatus.INACTIVE - elif imap_status == ServiceStatus.OFF or smtp_status == ServiceStatus.OFF: - return ServiceStatus.OFF - else: - return ServiceStatus.FAILED + return get_service_status_from_several_units( + ["dovecot2.service", "postfix.service"] + ) @staticmethod def enable(): diff --git a/selfprivacy_api/services/mailserver/icon.py b/selfprivacy_api/services/mailserver/icon.py index cb5b639..a688ef3 100644 --- a/selfprivacy_api/services/mailserver/icon.py +++ b/selfprivacy_api/services/mailserver/icon.py @@ -1,5 +1,5 @@ MAILSERVER_ICON = """ - + """ diff --git a/tests/test_graphql/test_system/turned_on.json b/tests/test_graphql/test_system/turned_on.json index 99a023c..821875b 100644 --- a/tests/test_graphql/test_system/turned_on.json +++ b/tests/test_graphql/test_system/turned_on.json @@ -41,6 +41,9 @@ "pleroma": { "enable": true }, + "jitsi": { + "enable": true + }, "autoUpgrade": { "enable": true, "allowReboot": true -- 2.42.0