From 8cdacb73dd2dd4363a8a6e3b38d5be4dd0745a29 Mon Sep 17 00:00:00 2001 From: inexcode Date: Thu, 27 Oct 2022 17:01:11 +0300 Subject: [PATCH 001/129] refactor: Use singleton metaclass for all singleton classes --- selfprivacy_api/graphql/common_types/jobs.py | 2 +- .../graphql/mutations/job_mutations.py | 2 +- selfprivacy_api/graphql/queries/jobs.py | 4 +- selfprivacy_api/jobs/__init__.py | 34 +++------------- selfprivacy_api/jobs/test.py | 14 +++---- selfprivacy_api/migrations/__init__.py | 4 +- .../check_for_failed_binds_migration.py | 6 +-- selfprivacy_api/restic_controller/__init__.py | 11 +---- .../services/bitwarden/__init__.py | 2 +- .../services/generic_service_mover.py | 40 +++++++++---------- selfprivacy_api/services/gitea/__init__.py | 2 +- .../services/mailserver/__init__.py | 2 +- .../services/nextcloud/__init__.py | 2 +- selfprivacy_api/services/pleroma/__init__.py | 2 +- selfprivacy_api/utils/block_devices.py | 10 +---- selfprivacy_api/utils/singleton_metaclass.py | 23 +++++++++++ 16 files changed, 74 insertions(+), 86 deletions(-) create mode 100644 selfprivacy_api/utils/singleton_metaclass.py diff --git a/selfprivacy_api/graphql/common_types/jobs.py b/selfprivacy_api/graphql/common_types/jobs.py index 4b095c8..3019a70 100644 --- a/selfprivacy_api/graphql/common_types/jobs.py +++ b/selfprivacy_api/graphql/common_types/jobs.py @@ -43,7 +43,7 @@ def job_to_api_job(job: Job) -> ApiJob: def get_api_job_by_id(job_id: str) -> typing.Optional[ApiJob]: """Get a job for GraphQL by its ID.""" - job = Jobs.get_instance().get_job(job_id) + job = Jobs.get_job(job_id) if job is None: return None return job_to_api_job(job) diff --git a/selfprivacy_api/graphql/mutations/job_mutations.py b/selfprivacy_api/graphql/mutations/job_mutations.py index 1ac2447..acc5f3d 100644 --- a/selfprivacy_api/graphql/mutations/job_mutations.py +++ b/selfprivacy_api/graphql/mutations/job_mutations.py @@ -14,7 +14,7 @@ class JobMutations: @strawberry.mutation(permission_classes=[IsAuthenticated]) def remove_job(self, job_id: str) -> GenericMutationReturn: """Remove a job from the queue""" - result = Jobs.get_instance().remove_by_uid(job_id) + result = Jobs.remove_by_uid(job_id) if result: return GenericMutationReturn( success=True, diff --git a/selfprivacy_api/graphql/queries/jobs.py b/selfprivacy_api/graphql/queries/jobs.py index 426c563..49bcbd7 100644 --- a/selfprivacy_api/graphql/queries/jobs.py +++ b/selfprivacy_api/graphql/queries/jobs.py @@ -16,9 +16,9 @@ class Job: @strawberry.field def get_jobs(self) -> typing.List[ApiJob]: - Jobs.get_instance().get_jobs() + Jobs.get_jobs() - return [job_to_api_job(job) for job in Jobs.get_instance().get_jobs()] + return [job_to_api_job(job) for job in Jobs.get_jobs()] @strawberry.field def get_job(self, job_id: str) -> typing.Optional[ApiJob]: diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py index f30fc5b..0dcfd66 100644 --- a/selfprivacy_api/jobs/__init__.py +++ b/selfprivacy_api/jobs/__init__.py @@ -17,10 +17,7 @@ A job is a dictionary with the following keys: import typing import datetime from uuid import UUID -import asyncio import json -import os -import time import uuid from enum import Enum @@ -64,29 +61,6 @@ class Jobs: Jobs class. """ - __instance = None - - @staticmethod - def get_instance(): - """ - Singleton method. - """ - if Jobs.__instance is None: - Jobs() - if Jobs.__instance is None: - raise Exception("Couldn't init Jobs singleton!") - return Jobs.__instance - return Jobs.__instance - - def __init__(self): - """ - Initialize the jobs list. - """ - if Jobs.__instance is not None: - raise Exception("This class is a singleton!") - else: - Jobs.__instance = self - @staticmethod def reset() -> None: """ @@ -130,13 +104,15 @@ class Jobs: user_data["jobs"] = [json.loads(job.json())] return job - def remove(self, job: Job) -> None: + @staticmethod + def remove(job: Job) -> None: """ Remove a job from the jobs list. """ - self.remove_by_uid(str(job.uid)) + Jobs.remove_by_uid(str(job.uid)) - def remove_by_uid(self, job_uuid: str) -> bool: + @staticmethod + def remove_by_uid(job_uuid: str) -> bool: """ Remove a job from the jobs list. """ diff --git a/selfprivacy_api/jobs/test.py b/selfprivacy_api/jobs/test.py index 9d93fb7..e3c38f4 100644 --- a/selfprivacy_api/jobs/test.py +++ b/selfprivacy_api/jobs/test.py @@ -5,7 +5,7 @@ from selfprivacy_api.jobs import JobStatus, Jobs @huey.task() def test_job(): - job = Jobs.get_instance().add( + job = Jobs.add( type_id="test", name="Test job", description="This is a test job.", @@ -14,42 +14,42 @@ def test_job(): progress=0, ) time.sleep(5) - Jobs.get_instance().update( + Jobs.update( job=job, status=JobStatus.RUNNING, status_text="Performing pre-move checks...", progress=5, ) time.sleep(5) - Jobs.get_instance().update( + Jobs.update( job=job, status=JobStatus.RUNNING, status_text="Performing pre-move checks...", progress=10, ) time.sleep(5) - Jobs.get_instance().update( + Jobs.update( job=job, status=JobStatus.RUNNING, status_text="Performing pre-move checks...", progress=15, ) time.sleep(5) - Jobs.get_instance().update( + Jobs.update( job=job, status=JobStatus.RUNNING, status_text="Performing pre-move checks...", progress=20, ) time.sleep(5) - Jobs.get_instance().update( + Jobs.update( job=job, status=JobStatus.RUNNING, status_text="Performing pre-move checks...", progress=25, ) time.sleep(5) - Jobs.get_instance().update( + Jobs.update( job=job, status=JobStatus.FINISHED, status_text="Job finished.", diff --git a/selfprivacy_api/migrations/__init__.py b/selfprivacy_api/migrations/__init__.py index 8209198..b051f04 100644 --- a/selfprivacy_api/migrations/__init__.py +++ b/selfprivacy_api/migrations/__init__.py @@ -8,7 +8,9 @@ at api.skippedMigrations in userdata.json and populating it with IDs of the migrations to skip. Adding DISABLE_ALL to that array disables the migrations module entirely. """ -from selfprivacy_api.migrations.check_for_failed_binds_migration import CheckForFailedBindsMigration +from selfprivacy_api.migrations.check_for_failed_binds_migration import ( + CheckForFailedBindsMigration, +) from selfprivacy_api.utils import ReadUserData from selfprivacy_api.migrations.fix_nixos_config_branch import FixNixosConfigBranch from selfprivacy_api.migrations.create_tokens_json import CreateTokensJson diff --git a/selfprivacy_api/migrations/check_for_failed_binds_migration.py b/selfprivacy_api/migrations/check_for_failed_binds_migration.py index 5871809..41d56b2 100644 --- a/selfprivacy_api/migrations/check_for_failed_binds_migration.py +++ b/selfprivacy_api/migrations/check_for_failed_binds_migration.py @@ -15,7 +15,7 @@ class CheckForFailedBindsMigration(Migration): def is_migration_needed(self): try: - jobs = Jobs.get_instance().get_jobs() + jobs = Jobs.get_jobs() # If there is a job with type_id "migrations.migrate_to_binds" and status is not "FINISHED", # then migration is needed and job is deleted for job in jobs: @@ -33,13 +33,13 @@ class CheckForFailedBindsMigration(Migration): # Get info about existing volumes # Write info about volumes to userdata.json try: - jobs = Jobs.get_instance().get_jobs() + jobs = Jobs.get_jobs() for job in jobs: if ( job.type_id == "migrations.migrate_to_binds" and job.status != JobStatus.FINISHED ): - Jobs.get_instance().remove(job) + Jobs.remove(job) with WriteUserData() as userdata: userdata["useBinds"] = False print("Done") diff --git a/selfprivacy_api/restic_controller/__init__.py b/selfprivacy_api/restic_controller/__init__.py index abb5dc8..b4efba2 100644 --- a/selfprivacy_api/restic_controller/__init__.py +++ b/selfprivacy_api/restic_controller/__init__.py @@ -7,6 +7,7 @@ from threading import Lock from enum import Enum import portalocker from selfprivacy_api.utils import ReadUserData +from selfprivacy_api.utils.singleton_metaclass import SingletonMetaclass class ResticStates(Enum): @@ -21,7 +22,7 @@ class ResticStates(Enum): INITIALIZING = 6 -class ResticController: +class ResticController(metaclass=SingletonMetaclass): """ States in wich the restic_controller may be - no backblaze key @@ -35,16 +36,8 @@ class ResticController: Current state can be fetched with get_state() """ - _instance = None - _lock = Lock() _initialized = False - def __new__(cls): - if not cls._instance: - with cls._lock: - cls._instance = super(ResticController, cls).__new__(cls) - return cls._instance - def __init__(self): if self._initialized: return diff --git a/selfprivacy_api/services/bitwarden/__init__.py b/selfprivacy_api/services/bitwarden/__init__.py index ea93de1..16d7746 100644 --- a/selfprivacy_api/services/bitwarden/__init__.py +++ b/selfprivacy_api/services/bitwarden/__init__.py @@ -144,7 +144,7 @@ class Bitwarden(Service): ] def move_to_volume(self, volume: BlockDevice) -> Job: - job = Jobs.get_instance().add( + job = Jobs.add( type_id="services.bitwarden.move", name="Move Bitwarden", description=f"Moving Bitwarden data to {volume.name}", diff --git a/selfprivacy_api/services/generic_service_mover.py b/selfprivacy_api/services/generic_service_mover.py index 8b3a759..6c1b426 100644 --- a/selfprivacy_api/services/generic_service_mover.py +++ b/selfprivacy_api/services/generic_service_mover.py @@ -29,7 +29,7 @@ def move_service( userdata_location: str, ): """Move a service to another volume.""" - job = Jobs.get_instance().update( + job = Jobs.update( job=job, status_text="Performing pre-move checks...", status=JobStatus.RUNNING, @@ -37,7 +37,7 @@ def move_service( service_name = service.get_display_name() with ReadUserData() as user_data: if not user_data.get("useBinds", False): - Jobs.get_instance().update( + Jobs.update( job=job, status=JobStatus.ERROR, error="Server is not using binds.", @@ -46,7 +46,7 @@ def move_service( # Check if we are on the same volume old_volume = service.get_location() if old_volume == volume.name: - Jobs.get_instance().update( + Jobs.update( job=job, status=JobStatus.ERROR, error=f"{service_name} is already on this volume.", @@ -54,7 +54,7 @@ def move_service( return # Check if there is enough space on the new volume if int(volume.fsavail) < service.get_storage_usage(): - Jobs.get_instance().update( + Jobs.update( job=job, status=JobStatus.ERROR, error="Not enough space on the new volume.", @@ -62,7 +62,7 @@ def move_service( return # Make sure the volume is mounted if volume.name != "sda1" and f"/volumes/{volume.name}" not in volume.mountpoints: - Jobs.get_instance().update( + Jobs.update( job=job, status=JobStatus.ERROR, error="Volume is not mounted.", @@ -71,14 +71,14 @@ def move_service( # Make sure current actual directory exists and if its user and group are correct for folder in folder_names: if not pathlib.Path(f"/volumes/{old_volume}/{folder.name}").exists(): - Jobs.get_instance().update( + Jobs.update( job=job, status=JobStatus.ERROR, error=f"{service_name} is not found.", ) return if not pathlib.Path(f"/volumes/{old_volume}/{folder.name}").is_dir(): - Jobs.get_instance().update( + Jobs.update( job=job, status=JobStatus.ERROR, error=f"{service_name} is not a directory.", @@ -88,7 +88,7 @@ def move_service( not pathlib.Path(f"/volumes/{old_volume}/{folder.name}").owner() == folder.owner ): - Jobs.get_instance().update( + Jobs.update( job=job, status=JobStatus.ERROR, error=f"{service_name} owner is not {folder.owner}.", @@ -96,7 +96,7 @@ def move_service( return # Stop service - Jobs.get_instance().update( + Jobs.update( job=job, status=JobStatus.RUNNING, status_text=f"Stopping {service_name}...", @@ -113,7 +113,7 @@ def move_service( break time.sleep(1) else: - Jobs.get_instance().update( + Jobs.update( job=job, status=JobStatus.ERROR, error=f"{service_name} did not stop in 30 seconds.", @@ -121,7 +121,7 @@ def move_service( return # Unmount old volume - Jobs.get_instance().update( + Jobs.update( job=job, status_text="Unmounting old folder...", status=JobStatus.RUNNING, @@ -134,14 +134,14 @@ def move_service( check=True, ) except subprocess.CalledProcessError: - Jobs.get_instance().update( + Jobs.update( job=job, status=JobStatus.ERROR, error="Unable to unmount old volume.", ) return # Move data to new volume and set correct permissions - Jobs.get_instance().update( + Jobs.update( job=job, status_text="Moving data to new volume...", status=JobStatus.RUNNING, @@ -154,14 +154,14 @@ def move_service( f"/volumes/{old_volume}/{folder.name}", f"/volumes/{volume.name}/{folder.name}", ) - Jobs.get_instance().update( + Jobs.update( job=job, status_text="Moving data to new volume...", status=JobStatus.RUNNING, progress=current_progress + folder_percentage, ) - Jobs.get_instance().update( + Jobs.update( job=job, status_text=f"Making sure {service_name} owns its files...", status=JobStatus.RUNNING, @@ -180,14 +180,14 @@ def move_service( ) except subprocess.CalledProcessError as error: print(error.output) - Jobs.get_instance().update( + Jobs.update( job=job, status=JobStatus.RUNNING, error=f"Unable to set ownership of new volume. {service_name} may not be able to access its files. Continuing anyway.", ) # Mount new volume - Jobs.get_instance().update( + Jobs.update( job=job, status_text=f"Mounting {service_name} data...", status=JobStatus.RUNNING, @@ -207,7 +207,7 @@ def move_service( ) except subprocess.CalledProcessError as error: print(error.output) - Jobs.get_instance().update( + Jobs.update( job=job, status=JobStatus.ERROR, error="Unable to mount new volume.", @@ -215,7 +215,7 @@ def move_service( return # Update userdata - Jobs.get_instance().update( + Jobs.update( job=job, status_text="Finishing move...", status=JobStatus.RUNNING, @@ -227,7 +227,7 @@ def move_service( user_data[userdata_location]["location"] = volume.name # Start service service.start() - Jobs.get_instance().update( + Jobs.update( job=job, status=JobStatus.FINISHED, result=f"{service_name} moved successfully.", diff --git a/selfprivacy_api/services/gitea/__init__.py b/selfprivacy_api/services/gitea/__init__.py index c6389bd..aacda5f 100644 --- a/selfprivacy_api/services/gitea/__init__.py +++ b/selfprivacy_api/services/gitea/__init__.py @@ -141,7 +141,7 @@ class Gitea(Service): ] def move_to_volume(self, volume: BlockDevice) -> Job: - job = Jobs.get_instance().add( + job = Jobs.add( type_id="services.gitea.move", name="Move Gitea", description=f"Moving Gitea data to {volume.name}", diff --git a/selfprivacy_api/services/mailserver/__init__.py b/selfprivacy_api/services/mailserver/__init__.py index 1a72f33..78a2441 100644 --- a/selfprivacy_api/services/mailserver/__init__.py +++ b/selfprivacy_api/services/mailserver/__init__.py @@ -149,7 +149,7 @@ class MailServer(Service): ] def move_to_volume(self, volume: BlockDevice) -> Job: - job = Jobs.get_instance().add( + job = Jobs.add( type_id="services.mailserver.move", name="Move Mail Server", description=f"Moving mailserver data to {volume.name}", diff --git a/selfprivacy_api/services/nextcloud/__init__.py b/selfprivacy_api/services/nextcloud/__init__.py index 4057b49..ad74354 100644 --- a/selfprivacy_api/services/nextcloud/__init__.py +++ b/selfprivacy_api/services/nextcloud/__init__.py @@ -149,7 +149,7 @@ class Nextcloud(Service): ] def move_to_volume(self, volume: BlockDevice) -> Job: - job = Jobs.get_instance().add( + job = Jobs.add( type_id="services.nextcloud.move", name="Move Nextcloud", description=f"Moving Nextcloud to volume {volume.name}", diff --git a/selfprivacy_api/services/pleroma/__init__.py b/selfprivacy_api/services/pleroma/__init__.py index 97c11f5..4d2b85e 100644 --- a/selfprivacy_api/services/pleroma/__init__.py +++ b/selfprivacy_api/services/pleroma/__init__.py @@ -129,7 +129,7 @@ class Pleroma(Service): ] def move_to_volume(self, volume: BlockDevice) -> Job: - job = Jobs.get_instance().add( + job = Jobs.add( type_id="services.pleroma.move", name="Move Pleroma", description=f"Moving Pleroma to volume {volume.name}", diff --git a/selfprivacy_api/utils/block_devices.py b/selfprivacy_api/utils/block_devices.py index 9d96d52..0de3d90 100644 --- a/selfprivacy_api/utils/block_devices.py +++ b/selfprivacy_api/utils/block_devices.py @@ -4,6 +4,7 @@ import json import typing from selfprivacy_api.utils import WriteUserData +from selfprivacy_api.utils.singleton_metaclass import SingletonMetaclass def get_block_device(device_name): @@ -147,16 +148,9 @@ class BlockDevice: return False -class BlockDevices: +class BlockDevices(metaclass=SingletonMetaclass): """Singleton holding all Block devices""" - _instance = None - - def __new__(cls, *args, **kwargs): - if not cls._instance: - cls._instance = super().__new__(cls) - return cls._instance - def __init__(self): self.block_devices = [] self.update() diff --git a/selfprivacy_api/utils/singleton_metaclass.py b/selfprivacy_api/utils/singleton_metaclass.py new file mode 100644 index 0000000..685cef6 --- /dev/null +++ b/selfprivacy_api/utils/singleton_metaclass.py @@ -0,0 +1,23 @@ +""" +Singleton is a creational design pattern, which ensures that only +one object of its kind exists and provides a single point of access +to it for any other code. +""" +from threading import Lock + + +class SingletonMetaclass(type): + """ + This is a thread-safe implementation of Singleton. + """ + + _instances = {} + _lock: Lock = Lock() + + def __call__(cls, *args, **kwargs): + with cls._lock: + if cls not in cls._instances: + cls._instances[cls] = super(SingletonMetaclass, cls).__call__( + *args, **kwargs + ) + return cls._instances[cls] From 19a4ec53773448f9797f0ad6b812e0e44dc1eef2 Mon Sep 17 00:00:00 2001 From: inexcode Date: Fri, 28 Oct 2022 11:09:36 +0300 Subject: [PATCH 002/129] feat: Add redis pool singleton --- selfprivacy_api/utils/redis_pool.py | 32 +++++++++++++++++++++++++++++ shell.nix | 1 + 2 files changed, 33 insertions(+) create mode 100644 selfprivacy_api/utils/redis_pool.py diff --git a/selfprivacy_api/utils/redis_pool.py b/selfprivacy_api/utils/redis_pool.py new file mode 100644 index 0000000..e4e98ac --- /dev/null +++ b/selfprivacy_api/utils/redis_pool.py @@ -0,0 +1,32 @@ +""" +Redis pool module for selfprivacy_api +""" +import redis.asyncio as redis +from selfprivacy_api.utils.singleton_metaclass import SingletonMetaclass + +REDIS_SOCKET = "/run/redis-sp-api/redis.sock" + + +class RedisPool(metaclass=SingletonMetaclass): + """ + Redis connection pool singleton. + """ + + def __init__(self): + self._pool = redis.ConnectionPool.from_url( + f"unix://{REDIS_SOCKET}", + decode_responses=True, + ) + self._pubsub_connection = self.get_connection() + + def get_connection(self): + """ + Get a connection from the pool. + """ + return redis.Redis(connection_pool=self._pool) + + def get_pubsub(self): + """ + Get a pubsub connection from the pool. + """ + return self._pubsub_connection.pubsub() diff --git a/shell.nix b/shell.nix index 0ccb99d..b6d8e32 100644 --- a/shell.nix +++ b/shell.nix @@ -18,6 +18,7 @@ let black fastapi uvicorn + redis (buildPythonPackage rec { pname = "strawberry-graphql"; version = "0.123.0"; From 83736e1e8fea20ef90de7bf2d4985ffdf88a9bfa Mon Sep 17 00:00:00 2001 From: inexcode Date: Tue, 1 Nov 2022 11:38:37 +0300 Subject: [PATCH 003/129] docs: add a pull request section to contributing.md --- CONTRIBUTING.md | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 CONTRIBUTING.md diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..8a6b88a --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,20 @@ +# SelfPrivacy API contributors guide + +## Commit messages + +We follow [Convetional Commits](https://www.conventionalcommits.org/en/v1.0.0/) specification. Please read it before commiting. + +Useful plugins for IDEs: + +- [VSCode](https://marketplace.visualstudio.com/items?itemName=vivaxy.vscode-conventional-commits) +- [IntelliJ](https://plugins.jetbrains.com/plugin/13389-conventional-commit) + +## Code style + +We use [Black]( + https://pypi.org/project/black/ +) for code formatting. Please install it and run `black .` before commiting. + +## Pull requests + +Please request a review from at least one of the other maintainers. If you are not sure who to request, request a review from SelfPrivacy/Devs team. From 27255cb5336852f3311212410763963622f88efe Mon Sep 17 00:00:00 2001 From: inexcode Date: Tue, 1 Nov 2022 14:59:23 +0300 Subject: [PATCH 004/129] docs: add note about tracking time to CONTRIBUTING spent @5m --- CONTRIBUTING.md | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 8a6b88a..7f82cfa 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -9,6 +9,21 @@ Useful plugins for IDEs: - [VSCode](https://marketplace.visualstudio.com/items?itemName=vivaxy.vscode-conventional-commits) - [IntelliJ](https://plugins.jetbrains.com/plugin/13389-conventional-commit) +### Track your time + +If you are working on a task, please track your time and add it to the commit message. For example: + +``` +feat: add new feature + +- did some work +- did some more work + +fixes #4, spent @1h30m +``` + +[Timewarrior](https://timewarrior.net/) is a good tool for tracking time. + ## Code style We use [Black]( From b0c26b876a28018fc9a6129e5555cc84bc2a59ab Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 26 Oct 2022 12:32:16 +0000 Subject: [PATCH 005/129] add providers migration --- selfprivacy_api/migrations/providers.py | 43 +++++++++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 selfprivacy_api/migrations/providers.py diff --git a/selfprivacy_api/migrations/providers.py b/selfprivacy_api/migrations/providers.py new file mode 100644 index 0000000..c565569 --- /dev/null +++ b/selfprivacy_api/migrations/providers.py @@ -0,0 +1,43 @@ +from selfprivacy_api.migrations.migration import Migration +from selfprivacy_api.utils import ReadUserData, WriteUserData + + +class CreateProviderFields(Migration): + """Unhardcode providers""" + + def get_migration_name(self): + return "create_provider_fields" + + def get_migration_description(self): + return "Add DNS, backup and server provider fields to enable user to choose between different clouds and to make the deployment adapt to these preferences." + + def is_migration_needed(self): + try: + with ReadUserData() as userdata: + return "dns" not in userdata + except Exception as e: + print(e) + return False + + def migrate(self): + # Write info about providers to userdata.json + try: + with WriteUserData() as userdata: + userdata["dns"] = { + "provider": "CLOUDFLARE", + "apiToken": userdata["cloudflare"]["apiToken"], + } + userdata["server"] = { + "provider": "HETZNER", + } + userdata["backup"] = { + "provider": "BACKBLAZE", + "accountId": userdata["backblaze"]["accountId"], + "accountKey": userdata["backblaze"]["accountKey"], + "bucket": userdata["backblaze"]["bucket"], + } + + print("Done") + except Exception as e: + print(e) + print("Error migrating provider fields") From decb98afe27e0e07cdc84e89687a21ef9b3b4ade Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 28 Oct 2022 10:17:57 +0000 Subject: [PATCH 006/129] add providers.py to the list of migrations Makes providers.py actually run. --- selfprivacy_api/migrations/__init__.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/selfprivacy_api/migrations/__init__.py b/selfprivacy_api/migrations/__init__.py index b051f04..7385548 100644 --- a/selfprivacy_api/migrations/__init__.py +++ b/selfprivacy_api/migrations/__init__.py @@ -18,6 +18,7 @@ from selfprivacy_api.migrations.migrate_to_selfprivacy_channel import ( MigrateToSelfprivacyChannel, ) from selfprivacy_api.migrations.mount_volume import MountVolume +from selfprivacy_api.migrations.providers import CreateProviderFields migrations = [ FixNixosConfigBranch(), @@ -25,6 +26,7 @@ migrations = [ MigrateToSelfprivacyChannel(), MountVolume(), CheckForFailedBindsMigration(), + CreateProviderFields(), ] From 7d58eb3d928d85bd240bc12077679ec908257fa7 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 28 Oct 2022 11:29:07 +0000 Subject: [PATCH 007/129] fix wrong cloudflare field key --- selfprivacy_api/migrations/providers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/selfprivacy_api/migrations/providers.py b/selfprivacy_api/migrations/providers.py index c565569..2cd5d5e 100644 --- a/selfprivacy_api/migrations/providers.py +++ b/selfprivacy_api/migrations/providers.py @@ -25,7 +25,7 @@ class CreateProviderFields(Migration): with WriteUserData() as userdata: userdata["dns"] = { "provider": "CLOUDFLARE", - "apiToken": userdata["cloudflare"]["apiToken"], + "apiKey": userdata["cloudflare"]["apiKey"], } userdata["server"] = { "provider": "HETZNER", From 9540e26ce1dc9c6a840321c35f9a5b7da9ca443b Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 28 Oct 2022 11:55:21 +0000 Subject: [PATCH 008/129] add digitalocean and backblaze options --- selfprivacy_api/graphql/queries/providers.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/selfprivacy_api/graphql/queries/providers.py b/selfprivacy_api/graphql/queries/providers.py index 6d0381e..eba8df9 100644 --- a/selfprivacy_api/graphql/queries/providers.py +++ b/selfprivacy_api/graphql/queries/providers.py @@ -11,3 +11,9 @@ class DnsProvider(Enum): @strawberry.enum class ServerProvider(Enum): HETZNER = "HETZNER" + DIGITALOCEAN = "DIGITALOCEAN" + + +@strawberry.enum +class BackupProvider(Enum): + BACKBLAZE = "BACKBLAZE" From dd15e0ab658f1587ecc4ded2344891b1db404980 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 28 Oct 2022 12:34:58 +0000 Subject: [PATCH 009/129] reroute dns provider query --- selfprivacy_api/graphql/queries/system.py | 2 +- tests/test_graphql/test_system/turned_on.json | 9 +++++---- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/selfprivacy_api/graphql/queries/system.py b/selfprivacy_api/graphql/queries/system.py index 0e2a7ec..6e681be 100644 --- a/selfprivacy_api/graphql/queries/system.py +++ b/selfprivacy_api/graphql/queries/system.py @@ -44,7 +44,7 @@ def get_system_domain_info() -> SystemDomainInfo: return SystemDomainInfo( domain=user_data["domain"], hostname=user_data["hostname"], - provider=DnsProvider.CLOUDFLARE, + provider=user_data["dns"]["provider"], ) diff --git a/tests/test_graphql/test_system/turned_on.json b/tests/test_graphql/test_system/turned_on.json index 821875b..7ad32c7 100644 --- a/tests/test_graphql/test_system/turned_on.json +++ b/tests/test_graphql/test_system/turned_on.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": true }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", @@ -51,5 +48,9 @@ "timezone": "Europe/Moscow", "sshKeys": [ "ssh-rsa KEY test@pc" - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + } } From 2ec0548c0927ecd91f8391c8ef97b0a02c9a7133 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 28 Oct 2022 12:45:19 +0000 Subject: [PATCH 010/129] add provider fields to the test jsons under system --- tests/test_graphql/test_system/no_values.json | 15 ++++++++++++++- tests/test_graphql/test_system/turned_off.json | 15 ++++++++++++++- tests/test_graphql/test_system/turned_on.json | 9 +++++++++ tests/test_graphql/test_system/undefined.json | 15 ++++++++++++++- 4 files changed, 51 insertions(+), 3 deletions(-) diff --git a/tests/test_graphql/test_system/no_values.json b/tests/test_graphql/test_system/no_values.json index 59e5e71..b903fea 100644 --- a/tests/test_graphql/test_system/no_values.json +++ b/tests/test_graphql/test_system/no_values.json @@ -46,5 +46,18 @@ "timezone": "Europe/Moscow", "sshKeys": [ "ssh-rsa KEY test@pc" - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_graphql/test_system/turned_off.json b/tests/test_graphql/test_system/turned_off.json index f451683..13c7556 100644 --- a/tests/test_graphql/test_system/turned_off.json +++ b/tests/test_graphql/test_system/turned_off.json @@ -48,5 +48,18 @@ "timezone": "Europe/Moscow", "sshKeys": [ "ssh-rsa KEY test@pc" - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_graphql/test_system/turned_on.json b/tests/test_graphql/test_system/turned_on.json index 7ad32c7..d2b83b6 100644 --- a/tests/test_graphql/test_system/turned_on.json +++ b/tests/test_graphql/test_system/turned_on.json @@ -52,5 +52,14 @@ "dns": { "provider": "CLOUDFLARE", "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" } } diff --git a/tests/test_graphql/test_system/undefined.json b/tests/test_graphql/test_system/undefined.json index b67b296..69ff97c 100644 --- a/tests/test_graphql/test_system/undefined.json +++ b/tests/test_graphql/test_system/undefined.json @@ -43,5 +43,18 @@ }, "sshKeys": [ "ssh-rsa KEY test@pc" - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file From f4a7986cfad3ee6ff45634dc9d3efd3accdf286e Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 28 Oct 2022 12:54:48 +0000 Subject: [PATCH 011/129] Remove old cloudflare apiKey field from system test jsons Strangely, no test were broken. Probably more testing is needed for token-dependent methods. --- tests/test_graphql/test_system/no_values.json | 5 +---- tests/test_graphql/test_system/turned_off.json | 5 +---- tests/test_graphql/test_system/undefined.json | 5 +---- 3 files changed, 3 insertions(+), 12 deletions(-) diff --git a/tests/test_graphql/test_system/no_values.json b/tests/test_graphql/test_system/no_values.json index b903fea..e888ae2 100644 --- a/tests/test_graphql/test_system/no_values.json +++ b/tests/test_graphql/test_system/no_values.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": true }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", @@ -60,4 +57,4 @@ "accountKey": "KEY", "bucket": "bucket" } -} \ No newline at end of file +} diff --git a/tests/test_graphql/test_system/turned_off.json b/tests/test_graphql/test_system/turned_off.json index 13c7556..c490a4c 100644 --- a/tests/test_graphql/test_system/turned_off.json +++ b/tests/test_graphql/test_system/turned_off.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": true }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", @@ -62,4 +59,4 @@ "accountKey": "KEY", "bucket": "bucket" } -} \ No newline at end of file +} diff --git a/tests/test_graphql/test_system/undefined.json b/tests/test_graphql/test_system/undefined.json index 69ff97c..11dacd7 100644 --- a/tests/test_graphql/test_system/undefined.json +++ b/tests/test_graphql/test_system/undefined.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": true }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", @@ -57,4 +54,4 @@ "accountKey": "KEY", "bucket": "bucket" } -} \ No newline at end of file +} From 5efa85f877ae6ddfe004ff89e106785124afb987 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 28 Oct 2022 13:03:07 +0000 Subject: [PATCH 012/129] add provider fields to the rest of test jsons --- tests/test_graphql/test_ssh/some_users.json | 15 ++++++++++++++- tests/test_graphql/test_users/no_users.json | 15 ++++++++++++++- tests/test_graphql/test_users/one_user.json | 15 ++++++++++++++- tests/test_graphql/test_users/some_users.json | 15 ++++++++++++++- tests/test_graphql/test_users/undefined.json | 15 ++++++++++++++- 5 files changed, 70 insertions(+), 5 deletions(-) diff --git a/tests/test_graphql/test_ssh/some_users.json b/tests/test_graphql/test_ssh/some_users.json index 569253a..987c64f 100644 --- a/tests/test_graphql/test_ssh/some_users.json +++ b/tests/test_graphql/test_ssh/some_users.json @@ -67,5 +67,18 @@ "username": "user3", "hashedPassword": "HASHED_PASSWORD_3" } - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_graphql/test_users/no_users.json b/tests/test_graphql/test_users/no_users.json index e5efe86..f205be3 100644 --- a/tests/test_graphql/test_users/no_users.json +++ b/tests/test_graphql/test_users/no_users.json @@ -50,5 +50,18 @@ "ssh-rsa KEY test@pc" ], "users": [ - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_graphql/test_users/one_user.json b/tests/test_graphql/test_users/one_user.json index 5df2108..3edaaa3 100644 --- a/tests/test_graphql/test_users/one_user.json +++ b/tests/test_graphql/test_users/one_user.json @@ -57,5 +57,18 @@ "ssh-rsa KEY user1@pc" ] } - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_graphql/test_users/some_users.json b/tests/test_graphql/test_users/some_users.json index 569253a..987c64f 100644 --- a/tests/test_graphql/test_users/some_users.json +++ b/tests/test_graphql/test_users/some_users.json @@ -67,5 +67,18 @@ "username": "user3", "hashedPassword": "HASHED_PASSWORD_3" } - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_graphql/test_users/undefined.json b/tests/test_graphql/test_users/undefined.json index 7b2cf8b..90379a5 100644 --- a/tests/test_graphql/test_users/undefined.json +++ b/tests/test_graphql/test_users/undefined.json @@ -48,5 +48,18 @@ "timezone": "Europe/Moscow", "sshKeys": [ "ssh-rsa KEY test@pc" - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file From 2d7bc0f15465f07f6cb1c8e9e854a9f0003bae2f Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 28 Oct 2022 13:08:22 +0000 Subject: [PATCH 013/129] remove cloudflare tokens from the rest of the test jsons. nobody seems to care --- tests/test_graphql/test_ssh/some_users.json | 5 +---- tests/test_graphql/test_users/no_users.json | 5 +---- tests/test_graphql/test_users/one_user.json | 5 +---- tests/test_graphql/test_users/some_users.json | 5 +---- tests/test_graphql/test_users/undefined.json | 5 +---- 5 files changed, 5 insertions(+), 20 deletions(-) diff --git a/tests/test_graphql/test_ssh/some_users.json b/tests/test_graphql/test_ssh/some_users.json index 987c64f..2126748 100644 --- a/tests/test_graphql/test_ssh/some_users.json +++ b/tests/test_graphql/test_ssh/some_users.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": false }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", @@ -81,4 +78,4 @@ "accountKey": "KEY", "bucket": "bucket" } -} \ No newline at end of file +} diff --git a/tests/test_graphql/test_users/no_users.json b/tests/test_graphql/test_users/no_users.json index f205be3..d37c257 100644 --- a/tests/test_graphql/test_users/no_users.json +++ b/tests/test_graphql/test_users/no_users.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": false }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", @@ -64,4 +61,4 @@ "accountKey": "KEY", "bucket": "bucket" } -} \ No newline at end of file +} diff --git a/tests/test_graphql/test_users/one_user.json b/tests/test_graphql/test_users/one_user.json index 3edaaa3..a083273 100644 --- a/tests/test_graphql/test_users/one_user.json +++ b/tests/test_graphql/test_users/one_user.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": false }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", @@ -71,4 +68,4 @@ "accountKey": "KEY", "bucket": "bucket" } -} \ No newline at end of file +} diff --git a/tests/test_graphql/test_users/some_users.json b/tests/test_graphql/test_users/some_users.json index 987c64f..2126748 100644 --- a/tests/test_graphql/test_users/some_users.json +++ b/tests/test_graphql/test_users/some_users.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": false }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", @@ -81,4 +78,4 @@ "accountKey": "KEY", "bucket": "bucket" } -} \ No newline at end of file +} diff --git a/tests/test_graphql/test_users/undefined.json b/tests/test_graphql/test_users/undefined.json index 90379a5..5bf053c 100644 --- a/tests/test_graphql/test_users/undefined.json +++ b/tests/test_graphql/test_users/undefined.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": false }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", @@ -62,4 +59,4 @@ "accountKey": "KEY", "bucket": "bucket" } -} \ No newline at end of file +} From 6c6f45781c34a7e504172881dd66404c9356c703 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 28 Oct 2022 13:32:33 +0000 Subject: [PATCH 014/129] add new fields to the rest of the test jsons. Missed some --- tests/test_block_device_utils/no_devices.json | 13 +++++++++++++ tests/test_block_device_utils/only_root.json | 15 ++++++++++++++- tests/test_block_device_utils/undefined.json | 15 ++++++++++++++- .../services/test_bitwarden/enable_undefined.json | 15 ++++++++++++++- .../services/test_bitwarden/turned_off.json | 15 ++++++++++++++- .../services/test_bitwarden/turned_on.json | 15 ++++++++++++++- .../services/test_bitwarden/undefined.json | 15 ++++++++++++++- .../services/test_gitea/enable_undefined.json | 15 ++++++++++++++- .../services/test_gitea/turned_off.json | 15 ++++++++++++++- .../services/test_gitea/turned_on.json | 15 ++++++++++++++- .../services/test_gitea/undefined.json | 15 ++++++++++++++- .../services/test_nextcloud/enable_undefined.json | 15 ++++++++++++++- .../services/test_nextcloud/turned_off.json | 15 ++++++++++++++- .../services/test_nextcloud/turned_on.json | 15 ++++++++++++++- .../services/test_nextcloud/undefined.json | 15 ++++++++++++++- .../services/test_ocserv/enable_undefined.json | 15 ++++++++++++++- .../services/test_ocserv/turned_off.json | 15 ++++++++++++++- .../services/test_ocserv/turned_on.json | 15 ++++++++++++++- .../services/test_ocserv/undefined.json | 15 ++++++++++++++- .../services/test_pleroma/enable_undefined.json | 15 ++++++++++++++- .../services/test_pleroma/turned_off.json | 15 ++++++++++++++- .../services/test_pleroma/turned_on.json | 15 ++++++++++++++- .../services/test_pleroma/undefined.json | 15 ++++++++++++++- .../services/test_restic/no_values.json | 15 ++++++++++++++- .../services/test_restic/some_values.json | 15 ++++++++++++++- .../services/test_restic/undefined.json | 15 ++++++++++++++- .../services/test_ssh/all_off.json | 15 ++++++++++++++- .../test_ssh/root_and_admin_have_keys.json | 15 ++++++++++++++- .../services/test_ssh/some_users.json | 15 ++++++++++++++- .../services/test_ssh/turned_off.json | 15 ++++++++++++++- .../services/test_ssh/turned_on.json | 15 ++++++++++++++- .../services/test_ssh/undefined.json | 15 ++++++++++++++- .../services/test_ssh/undefined_values.json | 15 ++++++++++++++- .../test_system/no_values.json | 15 ++++++++++++++- .../test_system/turned_off.json | 15 ++++++++++++++- .../test_system/turned_on.json | 15 ++++++++++++++- .../test_system/undefined.json | 15 ++++++++++++++- .../test_rest_endpoints/test_users/no_users.json | 15 ++++++++++++++- .../test_rest_endpoints/test_users/one_user.json | 15 ++++++++++++++- .../test_users/some_users.json | 15 ++++++++++++++- .../test_rest_endpoints/test_users/undefined.json | 15 ++++++++++++++- 41 files changed, 573 insertions(+), 40 deletions(-) diff --git a/tests/test_block_device_utils/no_devices.json b/tests/test_block_device_utils/no_devices.json index 97300ca..33280d9 100644 --- a/tests/test_block_device_utils/no_devices.json +++ b/tests/test_block_device_utils/no_devices.json @@ -49,6 +49,19 @@ "sshKeys": [ "ssh-rsa KEY test@pc" ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + }, "volumes": [ ] } diff --git a/tests/test_block_device_utils/only_root.json b/tests/test_block_device_utils/only_root.json index 0f8ec0d..7fee5b9 100644 --- a/tests/test_block_device_utils/only_root.json +++ b/tests/test_block_device_utils/only_root.json @@ -55,5 +55,18 @@ "mountPoint": "/volumes/sda1", "filesystem": "ext4" } - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } diff --git a/tests/test_block_device_utils/undefined.json b/tests/test_block_device_utils/undefined.json index eb660cc..9e600e2 100644 --- a/tests/test_block_device_utils/undefined.json +++ b/tests/test_block_device_utils/undefined.json @@ -48,5 +48,18 @@ "timezone": "Europe/Moscow", "sshKeys": [ "ssh-rsa KEY test@pc" - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } diff --git a/tests/test_rest_endpoints/services/test_bitwarden/enable_undefined.json b/tests/test_rest_endpoints/services/test_bitwarden/enable_undefined.json index 05e04c1..f7a2c9b 100644 --- a/tests/test_rest_endpoints/services/test_bitwarden/enable_undefined.json +++ b/tests/test_rest_endpoints/services/test_bitwarden/enable_undefined.json @@ -47,5 +47,18 @@ "timezone": "Europe/Moscow", "sshKeys": [ "ssh-rsa KEY test@pc" - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_bitwarden/turned_off.json b/tests/test_rest_endpoints/services/test_bitwarden/turned_off.json index 7b2cf8b..90379a5 100644 --- a/tests/test_rest_endpoints/services/test_bitwarden/turned_off.json +++ b/tests/test_rest_endpoints/services/test_bitwarden/turned_off.json @@ -48,5 +48,18 @@ "timezone": "Europe/Moscow", "sshKeys": [ "ssh-rsa KEY test@pc" - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_bitwarden/turned_on.json b/tests/test_rest_endpoints/services/test_bitwarden/turned_on.json index 337e47f..236f89f 100644 --- a/tests/test_rest_endpoints/services/test_bitwarden/turned_on.json +++ b/tests/test_rest_endpoints/services/test_bitwarden/turned_on.json @@ -48,5 +48,18 @@ "timezone": "Europe/Moscow", "sshKeys": [ "ssh-rsa KEY test@pc" - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_bitwarden/undefined.json b/tests/test_rest_endpoints/services/test_bitwarden/undefined.json index 625422b..9b5c2df 100644 --- a/tests/test_rest_endpoints/services/test_bitwarden/undefined.json +++ b/tests/test_rest_endpoints/services/test_bitwarden/undefined.json @@ -45,5 +45,18 @@ "timezone": "Europe/Moscow", "sshKeys": [ "ssh-rsa KEY test@pc" - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_gitea/enable_undefined.json b/tests/test_rest_endpoints/services/test_gitea/enable_undefined.json index 07b0e78..5def931 100644 --- a/tests/test_rest_endpoints/services/test_gitea/enable_undefined.json +++ b/tests/test_rest_endpoints/services/test_gitea/enable_undefined.json @@ -47,5 +47,18 @@ "timezone": "Europe/Moscow", "sshKeys": [ "ssh-rsa KEY test@pc" - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_gitea/turned_off.json b/tests/test_rest_endpoints/services/test_gitea/turned_off.json index 7b2cf8b..90379a5 100644 --- a/tests/test_rest_endpoints/services/test_gitea/turned_off.json +++ b/tests/test_rest_endpoints/services/test_gitea/turned_off.json @@ -48,5 +48,18 @@ "timezone": "Europe/Moscow", "sshKeys": [ "ssh-rsa KEY test@pc" - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_gitea/turned_on.json b/tests/test_rest_endpoints/services/test_gitea/turned_on.json index acb98ce..86690f5 100644 --- a/tests/test_rest_endpoints/services/test_gitea/turned_on.json +++ b/tests/test_rest_endpoints/services/test_gitea/turned_on.json @@ -48,5 +48,18 @@ "timezone": "Europe/Moscow", "sshKeys": [ "ssh-rsa KEY test@pc" - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_gitea/undefined.json b/tests/test_rest_endpoints/services/test_gitea/undefined.json index f689b2e..dca3d0d 100644 --- a/tests/test_rest_endpoints/services/test_gitea/undefined.json +++ b/tests/test_rest_endpoints/services/test_gitea/undefined.json @@ -45,5 +45,18 @@ "timezone": "Europe/Moscow", "sshKeys": [ "ssh-rsa KEY test@pc" - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_nextcloud/enable_undefined.json b/tests/test_rest_endpoints/services/test_nextcloud/enable_undefined.json index 68127f0..8352918 100644 --- a/tests/test_rest_endpoints/services/test_nextcloud/enable_undefined.json +++ b/tests/test_rest_endpoints/services/test_nextcloud/enable_undefined.json @@ -47,5 +47,18 @@ "timezone": "Europe/Moscow", "sshKeys": [ "ssh-rsa KEY test@pc" - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_nextcloud/turned_off.json b/tests/test_rest_endpoints/services/test_nextcloud/turned_off.json index 375e70f..f115c1b 100644 --- a/tests/test_rest_endpoints/services/test_nextcloud/turned_off.json +++ b/tests/test_rest_endpoints/services/test_nextcloud/turned_off.json @@ -48,5 +48,18 @@ "timezone": "Europe/Moscow", "sshKeys": [ "ssh-rsa KEY test@pc" - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_nextcloud/turned_on.json b/tests/test_rest_endpoints/services/test_nextcloud/turned_on.json index 7b2cf8b..90379a5 100644 --- a/tests/test_rest_endpoints/services/test_nextcloud/turned_on.json +++ b/tests/test_rest_endpoints/services/test_nextcloud/turned_on.json @@ -48,5 +48,18 @@ "timezone": "Europe/Moscow", "sshKeys": [ "ssh-rsa KEY test@pc" - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_nextcloud/undefined.json b/tests/test_rest_endpoints/services/test_nextcloud/undefined.json index fb02c69..cd10bca 100644 --- a/tests/test_rest_endpoints/services/test_nextcloud/undefined.json +++ b/tests/test_rest_endpoints/services/test_nextcloud/undefined.json @@ -40,5 +40,18 @@ "timezone": "Europe/Moscow", "sshKeys": [ "ssh-rsa KEY test@pc" - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_ocserv/enable_undefined.json b/tests/test_rest_endpoints/services/test_ocserv/enable_undefined.json index 88d804d..bce258b 100644 --- a/tests/test_rest_endpoints/services/test_ocserv/enable_undefined.json +++ b/tests/test_rest_endpoints/services/test_ocserv/enable_undefined.json @@ -47,5 +47,18 @@ "timezone": "Europe/Moscow", "sshKeys": [ "ssh-rsa KEY test@pc" - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_ocserv/turned_off.json b/tests/test_rest_endpoints/services/test_ocserv/turned_off.json index 6220561..f03d3de 100644 --- a/tests/test_rest_endpoints/services/test_ocserv/turned_off.json +++ b/tests/test_rest_endpoints/services/test_ocserv/turned_off.json @@ -48,5 +48,18 @@ "timezone": "Europe/Moscow", "sshKeys": [ "ssh-rsa KEY test@pc" - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_ocserv/turned_on.json b/tests/test_rest_endpoints/services/test_ocserv/turned_on.json index 375e70f..f115c1b 100644 --- a/tests/test_rest_endpoints/services/test_ocserv/turned_on.json +++ b/tests/test_rest_endpoints/services/test_ocserv/turned_on.json @@ -48,5 +48,18 @@ "timezone": "Europe/Moscow", "sshKeys": [ "ssh-rsa KEY test@pc" - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_ocserv/undefined.json b/tests/test_rest_endpoints/services/test_ocserv/undefined.json index f7e21bf..7d0d05f 100644 --- a/tests/test_rest_endpoints/services/test_ocserv/undefined.json +++ b/tests/test_rest_endpoints/services/test_ocserv/undefined.json @@ -45,5 +45,18 @@ "timezone": "Europe/Moscow", "sshKeys": [ "ssh-rsa KEY test@pc" - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_pleroma/enable_undefined.json b/tests/test_rest_endpoints/services/test_pleroma/enable_undefined.json index 20ab960..cb06c64 100644 --- a/tests/test_rest_endpoints/services/test_pleroma/enable_undefined.json +++ b/tests/test_rest_endpoints/services/test_pleroma/enable_undefined.json @@ -47,5 +47,18 @@ "timezone": "Europe/Moscow", "sshKeys": [ "ssh-rsa KEY test@pc" - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_pleroma/turned_off.json b/tests/test_rest_endpoints/services/test_pleroma/turned_off.json index b6d5fd6..c74e28d 100644 --- a/tests/test_rest_endpoints/services/test_pleroma/turned_off.json +++ b/tests/test_rest_endpoints/services/test_pleroma/turned_off.json @@ -48,5 +48,18 @@ "timezone": "Europe/Moscow", "sshKeys": [ "ssh-rsa KEY test@pc" - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_pleroma/turned_on.json b/tests/test_rest_endpoints/services/test_pleroma/turned_on.json index 6220561..f03d3de 100644 --- a/tests/test_rest_endpoints/services/test_pleroma/turned_on.json +++ b/tests/test_rest_endpoints/services/test_pleroma/turned_on.json @@ -48,5 +48,18 @@ "timezone": "Europe/Moscow", "sshKeys": [ "ssh-rsa KEY test@pc" - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_pleroma/undefined.json b/tests/test_rest_endpoints/services/test_pleroma/undefined.json index b909a95..325f2c4 100644 --- a/tests/test_rest_endpoints/services/test_pleroma/undefined.json +++ b/tests/test_rest_endpoints/services/test_pleroma/undefined.json @@ -45,5 +45,18 @@ "timezone": "Europe/Moscow", "sshKeys": [ "ssh-rsa KEY test@pc" - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_restic/no_values.json b/tests/test_rest_endpoints/services/test_restic/no_values.json index c1ef7a0..26a0196 100644 --- a/tests/test_rest_endpoints/services/test_restic/no_values.json +++ b/tests/test_rest_endpoints/services/test_restic/no_values.json @@ -64,5 +64,18 @@ "username": "user3", "hashedPassword": "HASHED_PASSWORD_3" } - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_restic/some_values.json b/tests/test_rest_endpoints/services/test_restic/some_values.json index a7dbf39..67a235d 100644 --- a/tests/test_rest_endpoints/services/test_restic/some_values.json +++ b/tests/test_rest_endpoints/services/test_restic/some_values.json @@ -67,5 +67,18 @@ "username": "user3", "hashedPassword": "HASHED_PASSWORD_3" } - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_restic/undefined.json b/tests/test_rest_endpoints/services/test_restic/undefined.json index 59e42a0..0e26dd7 100644 --- a/tests/test_rest_endpoints/services/test_restic/undefined.json +++ b/tests/test_rest_endpoints/services/test_restic/undefined.json @@ -62,5 +62,18 @@ "username": "user3", "hashedPassword": "HASHED_PASSWORD_3" } - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_ssh/all_off.json b/tests/test_rest_endpoints/services/test_ssh/all_off.json index e1b8510..dd50dc9 100644 --- a/tests/test_rest_endpoints/services/test_ssh/all_off.json +++ b/tests/test_rest_endpoints/services/test_ssh/all_off.json @@ -48,5 +48,18 @@ "timezone": "Europe/Moscow", "sshKeys": [ "ssh-rsa KEY test@pc" - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_ssh/root_and_admin_have_keys.json b/tests/test_rest_endpoints/services/test_ssh/root_and_admin_have_keys.json index 7b2cf8b..90379a5 100644 --- a/tests/test_rest_endpoints/services/test_ssh/root_and_admin_have_keys.json +++ b/tests/test_rest_endpoints/services/test_ssh/root_and_admin_have_keys.json @@ -48,5 +48,18 @@ "timezone": "Europe/Moscow", "sshKeys": [ "ssh-rsa KEY test@pc" - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_ssh/some_users.json b/tests/test_rest_endpoints/services/test_ssh/some_users.json index 569253a..987c64f 100644 --- a/tests/test_rest_endpoints/services/test_ssh/some_users.json +++ b/tests/test_rest_endpoints/services/test_ssh/some_users.json @@ -67,5 +67,18 @@ "username": "user3", "hashedPassword": "HASHED_PASSWORD_3" } - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_ssh/turned_off.json b/tests/test_rest_endpoints/services/test_ssh/turned_off.json index b09395b..b73e817 100644 --- a/tests/test_rest_endpoints/services/test_ssh/turned_off.json +++ b/tests/test_rest_endpoints/services/test_ssh/turned_off.json @@ -42,5 +42,18 @@ "enable": true, "allowReboot": true }, - "timezone": "Europe/Moscow" + "timezone": "Europe/Moscow", + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_ssh/turned_on.json b/tests/test_rest_endpoints/services/test_ssh/turned_on.json index 44b28ce..c9ea835 100644 --- a/tests/test_rest_endpoints/services/test_ssh/turned_on.json +++ b/tests/test_rest_endpoints/services/test_ssh/turned_on.json @@ -42,5 +42,18 @@ "enable": true, "allowReboot": true }, - "timezone": "Europe/Moscow" + "timezone": "Europe/Moscow", + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_ssh/undefined.json b/tests/test_rest_endpoints/services/test_ssh/undefined.json index a214cc3..edb6a43 100644 --- a/tests/test_rest_endpoints/services/test_ssh/undefined.json +++ b/tests/test_rest_endpoints/services/test_ssh/undefined.json @@ -38,5 +38,18 @@ "enable": true, "allowReboot": true }, - "timezone": "Europe/Moscow" + "timezone": "Europe/Moscow", + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_ssh/undefined_values.json b/tests/test_rest_endpoints/services/test_ssh/undefined_values.json index 235a220..40feec5 100644 --- a/tests/test_rest_endpoints/services/test_ssh/undefined_values.json +++ b/tests/test_rest_endpoints/services/test_ssh/undefined_values.json @@ -42,5 +42,18 @@ "timezone": "Europe/Moscow", "sshKeys": [ "ssh-rsa KEY test@pc" - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/test_system/no_values.json b/tests/test_rest_endpoints/test_system/no_values.json index 59e5e71..b903fea 100644 --- a/tests/test_rest_endpoints/test_system/no_values.json +++ b/tests/test_rest_endpoints/test_system/no_values.json @@ -46,5 +46,18 @@ "timezone": "Europe/Moscow", "sshKeys": [ "ssh-rsa KEY test@pc" - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/test_system/turned_off.json b/tests/test_rest_endpoints/test_system/turned_off.json index f451683..13c7556 100644 --- a/tests/test_rest_endpoints/test_system/turned_off.json +++ b/tests/test_rest_endpoints/test_system/turned_off.json @@ -48,5 +48,18 @@ "timezone": "Europe/Moscow", "sshKeys": [ "ssh-rsa KEY test@pc" - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/test_system/turned_on.json b/tests/test_rest_endpoints/test_system/turned_on.json index 337e47f..236f89f 100644 --- a/tests/test_rest_endpoints/test_system/turned_on.json +++ b/tests/test_rest_endpoints/test_system/turned_on.json @@ -48,5 +48,18 @@ "timezone": "Europe/Moscow", "sshKeys": [ "ssh-rsa KEY test@pc" - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/test_system/undefined.json b/tests/test_rest_endpoints/test_system/undefined.json index b67b296..69ff97c 100644 --- a/tests/test_rest_endpoints/test_system/undefined.json +++ b/tests/test_rest_endpoints/test_system/undefined.json @@ -43,5 +43,18 @@ }, "sshKeys": [ "ssh-rsa KEY test@pc" - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/test_users/no_users.json b/tests/test_rest_endpoints/test_users/no_users.json index e5efe86..f205be3 100644 --- a/tests/test_rest_endpoints/test_users/no_users.json +++ b/tests/test_rest_endpoints/test_users/no_users.json @@ -50,5 +50,18 @@ "ssh-rsa KEY test@pc" ], "users": [ - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/test_users/one_user.json b/tests/test_rest_endpoints/test_users/one_user.json index 5df2108..3edaaa3 100644 --- a/tests/test_rest_endpoints/test_users/one_user.json +++ b/tests/test_rest_endpoints/test_users/one_user.json @@ -57,5 +57,18 @@ "ssh-rsa KEY user1@pc" ] } - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/test_users/some_users.json b/tests/test_rest_endpoints/test_users/some_users.json index 569253a..987c64f 100644 --- a/tests/test_rest_endpoints/test_users/some_users.json +++ b/tests/test_rest_endpoints/test_users/some_users.json @@ -67,5 +67,18 @@ "username": "user3", "hashedPassword": "HASHED_PASSWORD_3" } - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/test_users/undefined.json b/tests/test_rest_endpoints/test_users/undefined.json index 7b2cf8b..90379a5 100644 --- a/tests/test_rest_endpoints/test_users/undefined.json +++ b/tests/test_rest_endpoints/test_users/undefined.json @@ -48,5 +48,18 @@ "timezone": "Europe/Moscow", "sshKeys": [ "ssh-rsa KEY test@pc" - ] + ], + "dns": { + "provider": "CLOUDFLARE", + "apiKey": "TOKEN" + }, + "server": { + "provider": "HETZNER" + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "bucket" + } } \ No newline at end of file From 589093b853802883babcaadbadea62316a778d78 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 31 Oct 2022 10:13:53 +0000 Subject: [PATCH 015/129] delete old cloudflare field --- tests/test_block_device_utils/no_devices.json | 3 --- tests/test_block_device_utils/only_root.json | 3 --- tests/test_block_device_utils/undefined.json | 3 --- .../services/test_bitwarden/enable_undefined.json | 3 --- .../services/test_bitwarden/turned_off.json | 3 --- .../test_rest_endpoints/services/test_bitwarden/turned_on.json | 3 --- .../test_rest_endpoints/services/test_bitwarden/undefined.json | 3 --- .../services/test_gitea/enable_undefined.json | 3 --- tests/test_rest_endpoints/services/test_gitea/turned_off.json | 3 --- tests/test_rest_endpoints/services/test_gitea/turned_on.json | 3 --- tests/test_rest_endpoints/services/test_gitea/undefined.json | 3 --- .../services/test_nextcloud/enable_undefined.json | 3 --- .../services/test_nextcloud/turned_off.json | 3 --- .../test_rest_endpoints/services/test_nextcloud/turned_on.json | 3 --- .../test_rest_endpoints/services/test_nextcloud/undefined.json | 3 --- .../services/test_ocserv/enable_undefined.json | 3 --- tests/test_rest_endpoints/services/test_ocserv/turned_off.json | 3 --- tests/test_rest_endpoints/services/test_ocserv/turned_on.json | 3 --- tests/test_rest_endpoints/services/test_ocserv/undefined.json | 3 --- .../services/test_pleroma/enable_undefined.json | 3 --- .../test_rest_endpoints/services/test_pleroma/turned_off.json | 3 --- tests/test_rest_endpoints/services/test_pleroma/turned_on.json | 3 --- tests/test_rest_endpoints/services/test_pleroma/undefined.json | 3 --- tests/test_rest_endpoints/services/test_restic/no_values.json | 3 --- .../test_rest_endpoints/services/test_restic/some_values.json | 3 --- tests/test_rest_endpoints/services/test_restic/undefined.json | 3 --- tests/test_rest_endpoints/services/test_ssh/all_off.json | 3 --- .../services/test_ssh/root_and_admin_have_keys.json | 3 --- tests/test_rest_endpoints/services/test_ssh/some_users.json | 3 --- tests/test_rest_endpoints/services/test_ssh/turned_off.json | 3 --- tests/test_rest_endpoints/services/test_ssh/turned_on.json | 3 --- tests/test_rest_endpoints/services/test_ssh/undefined.json | 3 --- .../services/test_ssh/undefined_values.json | 3 --- tests/test_rest_endpoints/test_system/no_values.json | 3 --- tests/test_rest_endpoints/test_system/turned_off.json | 3 --- tests/test_rest_endpoints/test_system/turned_on.json | 3 --- tests/test_rest_endpoints/test_system/undefined.json | 3 --- tests/test_rest_endpoints/test_users/no_users.json | 3 --- tests/test_rest_endpoints/test_users/one_user.json | 3 --- tests/test_rest_endpoints/test_users/some_users.json | 3 --- tests/test_rest_endpoints/test_users/undefined.json | 3 --- 41 files changed, 123 deletions(-) diff --git a/tests/test_block_device_utils/no_devices.json b/tests/test_block_device_utils/no_devices.json index 33280d9..3949584 100644 --- a/tests/test_block_device_utils/no_devices.json +++ b/tests/test_block_device_utils/no_devices.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": true }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", diff --git a/tests/test_block_device_utils/only_root.json b/tests/test_block_device_utils/only_root.json index 7fee5b9..5100fa8 100644 --- a/tests/test_block_device_utils/only_root.json +++ b/tests/test_block_device_utils/only_root.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": true }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", diff --git a/tests/test_block_device_utils/undefined.json b/tests/test_block_device_utils/undefined.json index 9e600e2..426ae52 100644 --- a/tests/test_block_device_utils/undefined.json +++ b/tests/test_block_device_utils/undefined.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": true }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", diff --git a/tests/test_rest_endpoints/services/test_bitwarden/enable_undefined.json b/tests/test_rest_endpoints/services/test_bitwarden/enable_undefined.json index f7a2c9b..63fef0e 100644 --- a/tests/test_rest_endpoints/services/test_bitwarden/enable_undefined.json +++ b/tests/test_rest_endpoints/services/test_bitwarden/enable_undefined.json @@ -10,9 +10,6 @@ }, "bitwarden": { }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", diff --git a/tests/test_rest_endpoints/services/test_bitwarden/turned_off.json b/tests/test_rest_endpoints/services/test_bitwarden/turned_off.json index 90379a5..3d4a8b3 100644 --- a/tests/test_rest_endpoints/services/test_bitwarden/turned_off.json +++ b/tests/test_rest_endpoints/services/test_bitwarden/turned_off.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": false }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", diff --git a/tests/test_rest_endpoints/services/test_bitwarden/turned_on.json b/tests/test_rest_endpoints/services/test_bitwarden/turned_on.json index 236f89f..26fc041 100644 --- a/tests/test_rest_endpoints/services/test_bitwarden/turned_on.json +++ b/tests/test_rest_endpoints/services/test_bitwarden/turned_on.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": true }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", diff --git a/tests/test_rest_endpoints/services/test_bitwarden/undefined.json b/tests/test_rest_endpoints/services/test_bitwarden/undefined.json index 9b5c2df..a2d398a 100644 --- a/tests/test_rest_endpoints/services/test_bitwarden/undefined.json +++ b/tests/test_rest_endpoints/services/test_bitwarden/undefined.json @@ -8,9 +8,6 @@ "token": "TEST_TOKEN", "enableSwagger": false }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", diff --git a/tests/test_rest_endpoints/services/test_gitea/enable_undefined.json b/tests/test_rest_endpoints/services/test_gitea/enable_undefined.json index 5def931..6dcc74f 100644 --- a/tests/test_rest_endpoints/services/test_gitea/enable_undefined.json +++ b/tests/test_rest_endpoints/services/test_gitea/enable_undefined.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": false }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", diff --git a/tests/test_rest_endpoints/services/test_gitea/turned_off.json b/tests/test_rest_endpoints/services/test_gitea/turned_off.json index 90379a5..3d4a8b3 100644 --- a/tests/test_rest_endpoints/services/test_gitea/turned_off.json +++ b/tests/test_rest_endpoints/services/test_gitea/turned_off.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": false }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", diff --git a/tests/test_rest_endpoints/services/test_gitea/turned_on.json b/tests/test_rest_endpoints/services/test_gitea/turned_on.json index 86690f5..94ba2b9 100644 --- a/tests/test_rest_endpoints/services/test_gitea/turned_on.json +++ b/tests/test_rest_endpoints/services/test_gitea/turned_on.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": false }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", diff --git a/tests/test_rest_endpoints/services/test_gitea/undefined.json b/tests/test_rest_endpoints/services/test_gitea/undefined.json index dca3d0d..93fb946 100644 --- a/tests/test_rest_endpoints/services/test_gitea/undefined.json +++ b/tests/test_rest_endpoints/services/test_gitea/undefined.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": false }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", diff --git a/tests/test_rest_endpoints/services/test_nextcloud/enable_undefined.json b/tests/test_rest_endpoints/services/test_nextcloud/enable_undefined.json index 8352918..e9b22e1 100644 --- a/tests/test_rest_endpoints/services/test_nextcloud/enable_undefined.json +++ b/tests/test_rest_endpoints/services/test_nextcloud/enable_undefined.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": false }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", diff --git a/tests/test_rest_endpoints/services/test_nextcloud/turned_off.json b/tests/test_rest_endpoints/services/test_nextcloud/turned_off.json index f115c1b..4e3114f 100644 --- a/tests/test_rest_endpoints/services/test_nextcloud/turned_off.json +++ b/tests/test_rest_endpoints/services/test_nextcloud/turned_off.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": false }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", diff --git a/tests/test_rest_endpoints/services/test_nextcloud/turned_on.json b/tests/test_rest_endpoints/services/test_nextcloud/turned_on.json index 90379a5..3d4a8b3 100644 --- a/tests/test_rest_endpoints/services/test_nextcloud/turned_on.json +++ b/tests/test_rest_endpoints/services/test_nextcloud/turned_on.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": false }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", diff --git a/tests/test_rest_endpoints/services/test_nextcloud/undefined.json b/tests/test_rest_endpoints/services/test_nextcloud/undefined.json index cd10bca..5b49e74 100644 --- a/tests/test_rest_endpoints/services/test_nextcloud/undefined.json +++ b/tests/test_rest_endpoints/services/test_nextcloud/undefined.json @@ -8,9 +8,6 @@ "token": "TEST_TOKEN", "enableSwagger": false }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", diff --git a/tests/test_rest_endpoints/services/test_ocserv/enable_undefined.json b/tests/test_rest_endpoints/services/test_ocserv/enable_undefined.json index bce258b..b5e2108 100644 --- a/tests/test_rest_endpoints/services/test_ocserv/enable_undefined.json +++ b/tests/test_rest_endpoints/services/test_ocserv/enable_undefined.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": false }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", diff --git a/tests/test_rest_endpoints/services/test_ocserv/turned_off.json b/tests/test_rest_endpoints/services/test_ocserv/turned_off.json index f03d3de..7bdef4a 100644 --- a/tests/test_rest_endpoints/services/test_ocserv/turned_off.json +++ b/tests/test_rest_endpoints/services/test_ocserv/turned_off.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": false }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", diff --git a/tests/test_rest_endpoints/services/test_ocserv/turned_on.json b/tests/test_rest_endpoints/services/test_ocserv/turned_on.json index f115c1b..4e3114f 100644 --- a/tests/test_rest_endpoints/services/test_ocserv/turned_on.json +++ b/tests/test_rest_endpoints/services/test_ocserv/turned_on.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": false }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", diff --git a/tests/test_rest_endpoints/services/test_ocserv/undefined.json b/tests/test_rest_endpoints/services/test_ocserv/undefined.json index 7d0d05f..1d64a84 100644 --- a/tests/test_rest_endpoints/services/test_ocserv/undefined.json +++ b/tests/test_rest_endpoints/services/test_ocserv/undefined.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": false }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", diff --git a/tests/test_rest_endpoints/services/test_pleroma/enable_undefined.json b/tests/test_rest_endpoints/services/test_pleroma/enable_undefined.json index cb06c64..3d88cd4 100644 --- a/tests/test_rest_endpoints/services/test_pleroma/enable_undefined.json +++ b/tests/test_rest_endpoints/services/test_pleroma/enable_undefined.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": false }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", diff --git a/tests/test_rest_endpoints/services/test_pleroma/turned_off.json b/tests/test_rest_endpoints/services/test_pleroma/turned_off.json index c74e28d..6a6a18e 100644 --- a/tests/test_rest_endpoints/services/test_pleroma/turned_off.json +++ b/tests/test_rest_endpoints/services/test_pleroma/turned_off.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": false }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", diff --git a/tests/test_rest_endpoints/services/test_pleroma/turned_on.json b/tests/test_rest_endpoints/services/test_pleroma/turned_on.json index f03d3de..7bdef4a 100644 --- a/tests/test_rest_endpoints/services/test_pleroma/turned_on.json +++ b/tests/test_rest_endpoints/services/test_pleroma/turned_on.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": false }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", diff --git a/tests/test_rest_endpoints/services/test_pleroma/undefined.json b/tests/test_rest_endpoints/services/test_pleroma/undefined.json index 325f2c4..fe994a0 100644 --- a/tests/test_rest_endpoints/services/test_pleroma/undefined.json +++ b/tests/test_rest_endpoints/services/test_pleroma/undefined.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": false }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", diff --git a/tests/test_rest_endpoints/services/test_restic/no_values.json b/tests/test_rest_endpoints/services/test_restic/no_values.json index 26a0196..75c0122 100644 --- a/tests/test_rest_endpoints/services/test_restic/no_values.json +++ b/tests/test_rest_endpoints/services/test_restic/no_values.json @@ -8,9 +8,6 @@ "bitwarden": { "enable": false }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", diff --git a/tests/test_rest_endpoints/services/test_restic/some_values.json b/tests/test_rest_endpoints/services/test_restic/some_values.json index 67a235d..1f9042b 100644 --- a/tests/test_rest_endpoints/services/test_restic/some_values.json +++ b/tests/test_rest_endpoints/services/test_restic/some_values.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": false }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", diff --git a/tests/test_rest_endpoints/services/test_restic/undefined.json b/tests/test_rest_endpoints/services/test_restic/undefined.json index 0e26dd7..52ab6bb 100644 --- a/tests/test_rest_endpoints/services/test_restic/undefined.json +++ b/tests/test_rest_endpoints/services/test_restic/undefined.json @@ -6,9 +6,6 @@ "bitwarden": { "enable": false }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", diff --git a/tests/test_rest_endpoints/services/test_ssh/all_off.json b/tests/test_rest_endpoints/services/test_ssh/all_off.json index dd50dc9..862270c 100644 --- a/tests/test_rest_endpoints/services/test_ssh/all_off.json +++ b/tests/test_rest_endpoints/services/test_ssh/all_off.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": false }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", diff --git a/tests/test_rest_endpoints/services/test_ssh/root_and_admin_have_keys.json b/tests/test_rest_endpoints/services/test_ssh/root_and_admin_have_keys.json index 90379a5..3d4a8b3 100644 --- a/tests/test_rest_endpoints/services/test_ssh/root_and_admin_have_keys.json +++ b/tests/test_rest_endpoints/services/test_ssh/root_and_admin_have_keys.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": false }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", diff --git a/tests/test_rest_endpoints/services/test_ssh/some_users.json b/tests/test_rest_endpoints/services/test_ssh/some_users.json index 987c64f..62c2919 100644 --- a/tests/test_rest_endpoints/services/test_ssh/some_users.json +++ b/tests/test_rest_endpoints/services/test_ssh/some_users.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": false }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", diff --git a/tests/test_rest_endpoints/services/test_ssh/turned_off.json b/tests/test_rest_endpoints/services/test_ssh/turned_off.json index b73e817..7b5ab1b 100644 --- a/tests/test_rest_endpoints/services/test_ssh/turned_off.json +++ b/tests/test_rest_endpoints/services/test_ssh/turned_off.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": false }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", diff --git a/tests/test_rest_endpoints/services/test_ssh/turned_on.json b/tests/test_rest_endpoints/services/test_ssh/turned_on.json index c9ea835..cb7761c 100644 --- a/tests/test_rest_endpoints/services/test_ssh/turned_on.json +++ b/tests/test_rest_endpoints/services/test_ssh/turned_on.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": false }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", diff --git a/tests/test_rest_endpoints/services/test_ssh/undefined.json b/tests/test_rest_endpoints/services/test_ssh/undefined.json index edb6a43..db3a227 100644 --- a/tests/test_rest_endpoints/services/test_ssh/undefined.json +++ b/tests/test_rest_endpoints/services/test_ssh/undefined.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": false }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", diff --git a/tests/test_rest_endpoints/services/test_ssh/undefined_values.json b/tests/test_rest_endpoints/services/test_ssh/undefined_values.json index 40feec5..8c36f13 100644 --- a/tests/test_rest_endpoints/services/test_ssh/undefined_values.json +++ b/tests/test_rest_endpoints/services/test_ssh/undefined_values.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": false }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", diff --git a/tests/test_rest_endpoints/test_system/no_values.json b/tests/test_rest_endpoints/test_system/no_values.json index b903fea..de42ac6 100644 --- a/tests/test_rest_endpoints/test_system/no_values.json +++ b/tests/test_rest_endpoints/test_system/no_values.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": true }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", diff --git a/tests/test_rest_endpoints/test_system/turned_off.json b/tests/test_rest_endpoints/test_system/turned_off.json index 13c7556..a8f1d90 100644 --- a/tests/test_rest_endpoints/test_system/turned_off.json +++ b/tests/test_rest_endpoints/test_system/turned_off.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": true }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", diff --git a/tests/test_rest_endpoints/test_system/turned_on.json b/tests/test_rest_endpoints/test_system/turned_on.json index 236f89f..26fc041 100644 --- a/tests/test_rest_endpoints/test_system/turned_on.json +++ b/tests/test_rest_endpoints/test_system/turned_on.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": true }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", diff --git a/tests/test_rest_endpoints/test_system/undefined.json b/tests/test_rest_endpoints/test_system/undefined.json index 69ff97c..c4d9382 100644 --- a/tests/test_rest_endpoints/test_system/undefined.json +++ b/tests/test_rest_endpoints/test_system/undefined.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": true }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", diff --git a/tests/test_rest_endpoints/test_users/no_users.json b/tests/test_rest_endpoints/test_users/no_users.json index f205be3..f52956a 100644 --- a/tests/test_rest_endpoints/test_users/no_users.json +++ b/tests/test_rest_endpoints/test_users/no_users.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": false }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", diff --git a/tests/test_rest_endpoints/test_users/one_user.json b/tests/test_rest_endpoints/test_users/one_user.json index 3edaaa3..98e60f6 100644 --- a/tests/test_rest_endpoints/test_users/one_user.json +++ b/tests/test_rest_endpoints/test_users/one_user.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": false }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", diff --git a/tests/test_rest_endpoints/test_users/some_users.json b/tests/test_rest_endpoints/test_users/some_users.json index 987c64f..62c2919 100644 --- a/tests/test_rest_endpoints/test_users/some_users.json +++ b/tests/test_rest_endpoints/test_users/some_users.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": false }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", diff --git a/tests/test_rest_endpoints/test_users/undefined.json b/tests/test_rest_endpoints/test_users/undefined.json index 90379a5..3d4a8b3 100644 --- a/tests/test_rest_endpoints/test_users/undefined.json +++ b/tests/test_rest_endpoints/test_users/undefined.json @@ -11,9 +11,6 @@ "bitwarden": { "enable": false }, - "cloudflare": { - "apiKey": "TOKEN" - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", From a723311b36c0ddb4647f4c412aa9d510b93a83b9 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 31 Oct 2022 10:52:38 +0000 Subject: [PATCH 016/129] reroute system provider query --- selfprivacy_api/graphql/queries/system.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/graphql/queries/system.py b/selfprivacy_api/graphql/queries/system.py index 6e681be..112fa8c 100644 --- a/selfprivacy_api/graphql/queries/system.py +++ b/selfprivacy_api/graphql/queries/system.py @@ -133,7 +133,11 @@ class SystemProviderInfo: def get_system_provider_info() -> SystemProviderInfo: """Get system provider info""" - return SystemProviderInfo(provider=ServerProvider.HETZNER, id="UNKNOWN") + with ReadUserData() as user_data: + return SystemProviderInfo( + provider=user_data["server"]["provider"], + id="UNKNOWN" + ) @strawberry.type From 07af2e59bef34d98cab12f48c8ba509763f72b62 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 31 Oct 2022 10:59:46 +0000 Subject: [PATCH 017/129] tests: fix backblaze bucket --- tests/test_block_device_utils/no_devices.json | 2 +- tests/test_block_device_utils/only_root.json | 2 +- tests/test_block_device_utils/undefined.json | 2 +- tests/test_graphql/test_ssh/some_users.json | 2 +- tests/test_graphql/test_system/no_values.json | 2 +- tests/test_graphql/test_system/turned_off.json | 2 +- tests/test_graphql/test_system/turned_on.json | 2 +- tests/test_graphql/test_system/undefined.json | 2 +- tests/test_graphql/test_users/no_users.json | 2 +- tests/test_graphql/test_users/one_user.json | 2 +- tests/test_graphql/test_users/some_users.json | 2 +- tests/test_graphql/test_users/undefined.json | 2 +- .../services/test_bitwarden/enable_undefined.json | 2 +- .../test_rest_endpoints/services/test_bitwarden/turned_off.json | 2 +- .../test_rest_endpoints/services/test_bitwarden/turned_on.json | 2 +- .../test_rest_endpoints/services/test_bitwarden/undefined.json | 2 +- .../services/test_gitea/enable_undefined.json | 2 +- tests/test_rest_endpoints/services/test_gitea/turned_off.json | 2 +- tests/test_rest_endpoints/services/test_gitea/turned_on.json | 2 +- tests/test_rest_endpoints/services/test_gitea/undefined.json | 2 +- .../services/test_nextcloud/enable_undefined.json | 2 +- .../test_rest_endpoints/services/test_nextcloud/turned_off.json | 2 +- .../test_rest_endpoints/services/test_nextcloud/turned_on.json | 2 +- .../test_rest_endpoints/services/test_nextcloud/undefined.json | 2 +- .../services/test_ocserv/enable_undefined.json | 2 +- tests/test_rest_endpoints/services/test_ocserv/turned_off.json | 2 +- tests/test_rest_endpoints/services/test_ocserv/turned_on.json | 2 +- tests/test_rest_endpoints/services/test_ocserv/undefined.json | 2 +- .../services/test_pleroma/enable_undefined.json | 2 +- tests/test_rest_endpoints/services/test_pleroma/turned_off.json | 2 +- tests/test_rest_endpoints/services/test_pleroma/turned_on.json | 2 +- tests/test_rest_endpoints/services/test_pleroma/undefined.json | 2 +- tests/test_rest_endpoints/services/test_restic/no_values.json | 2 +- tests/test_rest_endpoints/services/test_restic/some_values.json | 2 +- tests/test_rest_endpoints/services/test_restic/undefined.json | 2 +- tests/test_rest_endpoints/services/test_ssh/all_off.json | 2 +- .../services/test_ssh/root_and_admin_have_keys.json | 2 +- tests/test_rest_endpoints/services/test_ssh/some_users.json | 2 +- tests/test_rest_endpoints/services/test_ssh/turned_off.json | 2 +- tests/test_rest_endpoints/services/test_ssh/turned_on.json | 2 +- tests/test_rest_endpoints/services/test_ssh/undefined.json | 2 +- .../test_rest_endpoints/services/test_ssh/undefined_values.json | 2 +- tests/test_rest_endpoints/test_system/no_values.json | 2 +- tests/test_rest_endpoints/test_system/turned_off.json | 2 +- tests/test_rest_endpoints/test_system/turned_on.json | 2 +- tests/test_rest_endpoints/test_system/undefined.json | 2 +- tests/test_rest_endpoints/test_users/no_users.json | 2 +- tests/test_rest_endpoints/test_users/one_user.json | 2 +- tests/test_rest_endpoints/test_users/some_users.json | 2 +- tests/test_rest_endpoints/test_users/undefined.json | 2 +- 50 files changed, 50 insertions(+), 50 deletions(-) diff --git a/tests/test_block_device_utils/no_devices.json b/tests/test_block_device_utils/no_devices.json index 3949584..0918cd8 100644 --- a/tests/test_block_device_utils/no_devices.json +++ b/tests/test_block_device_utils/no_devices.json @@ -57,7 +57,7 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" }, "volumes": [ ] diff --git a/tests/test_block_device_utils/only_root.json b/tests/test_block_device_utils/only_root.json index 5100fa8..9c064c0 100644 --- a/tests/test_block_device_utils/only_root.json +++ b/tests/test_block_device_utils/only_root.json @@ -64,6 +64,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } diff --git a/tests/test_block_device_utils/undefined.json b/tests/test_block_device_utils/undefined.json index 426ae52..e764ebc 100644 --- a/tests/test_block_device_utils/undefined.json +++ b/tests/test_block_device_utils/undefined.json @@ -57,6 +57,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } diff --git a/tests/test_graphql/test_ssh/some_users.json b/tests/test_graphql/test_ssh/some_users.json index 2126748..6bc3525 100644 --- a/tests/test_graphql/test_ssh/some_users.json +++ b/tests/test_graphql/test_ssh/some_users.json @@ -76,6 +76,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } diff --git a/tests/test_graphql/test_system/no_values.json b/tests/test_graphql/test_system/no_values.json index e888ae2..110c762 100644 --- a/tests/test_graphql/test_system/no_values.json +++ b/tests/test_graphql/test_system/no_values.json @@ -55,6 +55,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } diff --git a/tests/test_graphql/test_system/turned_off.json b/tests/test_graphql/test_system/turned_off.json index c490a4c..3fdbb8a 100644 --- a/tests/test_graphql/test_system/turned_off.json +++ b/tests/test_graphql/test_system/turned_off.json @@ -57,6 +57,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } diff --git a/tests/test_graphql/test_system/turned_on.json b/tests/test_graphql/test_system/turned_on.json index d2b83b6..ab1723f 100644 --- a/tests/test_graphql/test_system/turned_on.json +++ b/tests/test_graphql/test_system/turned_on.json @@ -60,6 +60,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } diff --git a/tests/test_graphql/test_system/undefined.json b/tests/test_graphql/test_system/undefined.json index 11dacd7..1720527 100644 --- a/tests/test_graphql/test_system/undefined.json +++ b/tests/test_graphql/test_system/undefined.json @@ -52,6 +52,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } diff --git a/tests/test_graphql/test_users/no_users.json b/tests/test_graphql/test_users/no_users.json index d37c257..27207fb 100644 --- a/tests/test_graphql/test_users/no_users.json +++ b/tests/test_graphql/test_users/no_users.json @@ -59,6 +59,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } diff --git a/tests/test_graphql/test_users/one_user.json b/tests/test_graphql/test_users/one_user.json index a083273..bd3efc8 100644 --- a/tests/test_graphql/test_users/one_user.json +++ b/tests/test_graphql/test_users/one_user.json @@ -66,6 +66,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } diff --git a/tests/test_graphql/test_users/some_users.json b/tests/test_graphql/test_users/some_users.json index 2126748..6bc3525 100644 --- a/tests/test_graphql/test_users/some_users.json +++ b/tests/test_graphql/test_users/some_users.json @@ -76,6 +76,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } diff --git a/tests/test_graphql/test_users/undefined.json b/tests/test_graphql/test_users/undefined.json index 5bf053c..f0826c4 100644 --- a/tests/test_graphql/test_users/undefined.json +++ b/tests/test_graphql/test_users/undefined.json @@ -57,6 +57,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } diff --git a/tests/test_rest_endpoints/services/test_bitwarden/enable_undefined.json b/tests/test_rest_endpoints/services/test_bitwarden/enable_undefined.json index 63fef0e..e359a16 100644 --- a/tests/test_rest_endpoints/services/test_bitwarden/enable_undefined.json +++ b/tests/test_rest_endpoints/services/test_bitwarden/enable_undefined.json @@ -56,6 +56,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_bitwarden/turned_off.json b/tests/test_rest_endpoints/services/test_bitwarden/turned_off.json index 3d4a8b3..fcad1f5 100644 --- a/tests/test_rest_endpoints/services/test_bitwarden/turned_off.json +++ b/tests/test_rest_endpoints/services/test_bitwarden/turned_off.json @@ -57,6 +57,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_bitwarden/turned_on.json b/tests/test_rest_endpoints/services/test_bitwarden/turned_on.json index 26fc041..7ad9081 100644 --- a/tests/test_rest_endpoints/services/test_bitwarden/turned_on.json +++ b/tests/test_rest_endpoints/services/test_bitwarden/turned_on.json @@ -57,6 +57,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_bitwarden/undefined.json b/tests/test_rest_endpoints/services/test_bitwarden/undefined.json index a2d398a..1deab31 100644 --- a/tests/test_rest_endpoints/services/test_bitwarden/undefined.json +++ b/tests/test_rest_endpoints/services/test_bitwarden/undefined.json @@ -54,6 +54,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_gitea/enable_undefined.json b/tests/test_rest_endpoints/services/test_gitea/enable_undefined.json index 6dcc74f..fd67d96 100644 --- a/tests/test_rest_endpoints/services/test_gitea/enable_undefined.json +++ b/tests/test_rest_endpoints/services/test_gitea/enable_undefined.json @@ -56,6 +56,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_gitea/turned_off.json b/tests/test_rest_endpoints/services/test_gitea/turned_off.json index 3d4a8b3..fcad1f5 100644 --- a/tests/test_rest_endpoints/services/test_gitea/turned_off.json +++ b/tests/test_rest_endpoints/services/test_gitea/turned_off.json @@ -57,6 +57,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_gitea/turned_on.json b/tests/test_rest_endpoints/services/test_gitea/turned_on.json index 94ba2b9..50ce50b 100644 --- a/tests/test_rest_endpoints/services/test_gitea/turned_on.json +++ b/tests/test_rest_endpoints/services/test_gitea/turned_on.json @@ -57,6 +57,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_gitea/undefined.json b/tests/test_rest_endpoints/services/test_gitea/undefined.json index 93fb946..d6187cc 100644 --- a/tests/test_rest_endpoints/services/test_gitea/undefined.json +++ b/tests/test_rest_endpoints/services/test_gitea/undefined.json @@ -54,6 +54,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_nextcloud/enable_undefined.json b/tests/test_rest_endpoints/services/test_nextcloud/enable_undefined.json index e9b22e1..8f00842 100644 --- a/tests/test_rest_endpoints/services/test_nextcloud/enable_undefined.json +++ b/tests/test_rest_endpoints/services/test_nextcloud/enable_undefined.json @@ -56,6 +56,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_nextcloud/turned_off.json b/tests/test_rest_endpoints/services/test_nextcloud/turned_off.json index 4e3114f..8b6f466 100644 --- a/tests/test_rest_endpoints/services/test_nextcloud/turned_off.json +++ b/tests/test_rest_endpoints/services/test_nextcloud/turned_off.json @@ -57,6 +57,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_nextcloud/turned_on.json b/tests/test_rest_endpoints/services/test_nextcloud/turned_on.json index 3d4a8b3..fcad1f5 100644 --- a/tests/test_rest_endpoints/services/test_nextcloud/turned_on.json +++ b/tests/test_rest_endpoints/services/test_nextcloud/turned_on.json @@ -57,6 +57,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_nextcloud/undefined.json b/tests/test_rest_endpoints/services/test_nextcloud/undefined.json index 5b49e74..dd13c56 100644 --- a/tests/test_rest_endpoints/services/test_nextcloud/undefined.json +++ b/tests/test_rest_endpoints/services/test_nextcloud/undefined.json @@ -49,6 +49,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_ocserv/enable_undefined.json b/tests/test_rest_endpoints/services/test_ocserv/enable_undefined.json index b5e2108..a509d77 100644 --- a/tests/test_rest_endpoints/services/test_ocserv/enable_undefined.json +++ b/tests/test_rest_endpoints/services/test_ocserv/enable_undefined.json @@ -56,6 +56,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_ocserv/turned_off.json b/tests/test_rest_endpoints/services/test_ocserv/turned_off.json index 7bdef4a..8542a7c 100644 --- a/tests/test_rest_endpoints/services/test_ocserv/turned_off.json +++ b/tests/test_rest_endpoints/services/test_ocserv/turned_off.json @@ -57,6 +57,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_ocserv/turned_on.json b/tests/test_rest_endpoints/services/test_ocserv/turned_on.json index 4e3114f..8b6f466 100644 --- a/tests/test_rest_endpoints/services/test_ocserv/turned_on.json +++ b/tests/test_rest_endpoints/services/test_ocserv/turned_on.json @@ -57,6 +57,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_ocserv/undefined.json b/tests/test_rest_endpoints/services/test_ocserv/undefined.json index 1d64a84..7ffae11 100644 --- a/tests/test_rest_endpoints/services/test_ocserv/undefined.json +++ b/tests/test_rest_endpoints/services/test_ocserv/undefined.json @@ -54,6 +54,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_pleroma/enable_undefined.json b/tests/test_rest_endpoints/services/test_pleroma/enable_undefined.json index 3d88cd4..5d359c2 100644 --- a/tests/test_rest_endpoints/services/test_pleroma/enable_undefined.json +++ b/tests/test_rest_endpoints/services/test_pleroma/enable_undefined.json @@ -56,6 +56,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_pleroma/turned_off.json b/tests/test_rest_endpoints/services/test_pleroma/turned_off.json index 6a6a18e..74bfb7c 100644 --- a/tests/test_rest_endpoints/services/test_pleroma/turned_off.json +++ b/tests/test_rest_endpoints/services/test_pleroma/turned_off.json @@ -57,6 +57,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_pleroma/turned_on.json b/tests/test_rest_endpoints/services/test_pleroma/turned_on.json index 7bdef4a..8542a7c 100644 --- a/tests/test_rest_endpoints/services/test_pleroma/turned_on.json +++ b/tests/test_rest_endpoints/services/test_pleroma/turned_on.json @@ -57,6 +57,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_pleroma/undefined.json b/tests/test_rest_endpoints/services/test_pleroma/undefined.json index fe994a0..dcfe1e1 100644 --- a/tests/test_rest_endpoints/services/test_pleroma/undefined.json +++ b/tests/test_rest_endpoints/services/test_pleroma/undefined.json @@ -54,6 +54,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_restic/no_values.json b/tests/test_rest_endpoints/services/test_restic/no_values.json index 75c0122..0a8a0a0 100644 --- a/tests/test_rest_endpoints/services/test_restic/no_values.json +++ b/tests/test_rest_endpoints/services/test_restic/no_values.json @@ -73,6 +73,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_restic/some_values.json b/tests/test_rest_endpoints/services/test_restic/some_values.json index 1f9042b..aa3a19f 100644 --- a/tests/test_rest_endpoints/services/test_restic/some_values.json +++ b/tests/test_rest_endpoints/services/test_restic/some_values.json @@ -76,6 +76,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_restic/undefined.json b/tests/test_rest_endpoints/services/test_restic/undefined.json index 52ab6bb..df6380a 100644 --- a/tests/test_rest_endpoints/services/test_restic/undefined.json +++ b/tests/test_rest_endpoints/services/test_restic/undefined.json @@ -71,6 +71,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_ssh/all_off.json b/tests/test_rest_endpoints/services/test_ssh/all_off.json index 862270c..97d7690 100644 --- a/tests/test_rest_endpoints/services/test_ssh/all_off.json +++ b/tests/test_rest_endpoints/services/test_ssh/all_off.json @@ -57,6 +57,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_ssh/root_and_admin_have_keys.json b/tests/test_rest_endpoints/services/test_ssh/root_and_admin_have_keys.json index 3d4a8b3..fcad1f5 100644 --- a/tests/test_rest_endpoints/services/test_ssh/root_and_admin_have_keys.json +++ b/tests/test_rest_endpoints/services/test_ssh/root_and_admin_have_keys.json @@ -57,6 +57,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_ssh/some_users.json b/tests/test_rest_endpoints/services/test_ssh/some_users.json index 62c2919..0706d9e 100644 --- a/tests/test_rest_endpoints/services/test_ssh/some_users.json +++ b/tests/test_rest_endpoints/services/test_ssh/some_users.json @@ -76,6 +76,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_ssh/turned_off.json b/tests/test_rest_endpoints/services/test_ssh/turned_off.json index 7b5ab1b..92b2196 100644 --- a/tests/test_rest_endpoints/services/test_ssh/turned_off.json +++ b/tests/test_rest_endpoints/services/test_ssh/turned_off.json @@ -51,6 +51,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_ssh/turned_on.json b/tests/test_rest_endpoints/services/test_ssh/turned_on.json index cb7761c..7434d16 100644 --- a/tests/test_rest_endpoints/services/test_ssh/turned_on.json +++ b/tests/test_rest_endpoints/services/test_ssh/turned_on.json @@ -51,6 +51,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_ssh/undefined.json b/tests/test_rest_endpoints/services/test_ssh/undefined.json index db3a227..290b396 100644 --- a/tests/test_rest_endpoints/services/test_ssh/undefined.json +++ b/tests/test_rest_endpoints/services/test_ssh/undefined.json @@ -47,6 +47,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_ssh/undefined_values.json b/tests/test_rest_endpoints/services/test_ssh/undefined_values.json index 8c36f13..a570880 100644 --- a/tests/test_rest_endpoints/services/test_ssh/undefined_values.json +++ b/tests/test_rest_endpoints/services/test_ssh/undefined_values.json @@ -51,6 +51,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/test_system/no_values.json b/tests/test_rest_endpoints/test_system/no_values.json index de42ac6..442b62d 100644 --- a/tests/test_rest_endpoints/test_system/no_values.json +++ b/tests/test_rest_endpoints/test_system/no_values.json @@ -55,6 +55,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/test_system/turned_off.json b/tests/test_rest_endpoints/test_system/turned_off.json index a8f1d90..3a4a238 100644 --- a/tests/test_rest_endpoints/test_system/turned_off.json +++ b/tests/test_rest_endpoints/test_system/turned_off.json @@ -57,6 +57,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/test_system/turned_on.json b/tests/test_rest_endpoints/test_system/turned_on.json index 26fc041..7ad9081 100644 --- a/tests/test_rest_endpoints/test_system/turned_on.json +++ b/tests/test_rest_endpoints/test_system/turned_on.json @@ -57,6 +57,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/test_system/undefined.json b/tests/test_rest_endpoints/test_system/undefined.json index c4d9382..fd1efad 100644 --- a/tests/test_rest_endpoints/test_system/undefined.json +++ b/tests/test_rest_endpoints/test_system/undefined.json @@ -52,6 +52,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/test_users/no_users.json b/tests/test_rest_endpoints/test_users/no_users.json index f52956a..dadb584 100644 --- a/tests/test_rest_endpoints/test_users/no_users.json +++ b/tests/test_rest_endpoints/test_users/no_users.json @@ -59,6 +59,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/test_users/one_user.json b/tests/test_rest_endpoints/test_users/one_user.json index 98e60f6..71d6338 100644 --- a/tests/test_rest_endpoints/test_users/one_user.json +++ b/tests/test_rest_endpoints/test_users/one_user.json @@ -66,6 +66,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/test_users/some_users.json b/tests/test_rest_endpoints/test_users/some_users.json index 62c2919..0706d9e 100644 --- a/tests/test_rest_endpoints/test_users/some_users.json +++ b/tests/test_rest_endpoints/test_users/some_users.json @@ -76,6 +76,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/test_users/undefined.json b/tests/test_rest_endpoints/test_users/undefined.json index 3d4a8b3..fcad1f5 100644 --- a/tests/test_rest_endpoints/test_users/undefined.json +++ b/tests/test_rest_endpoints/test_users/undefined.json @@ -57,6 +57,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "bucket" + "bucket": "selfprivacy" } } \ No newline at end of file From 41e5f89b7b320d21f881bccc2e4b050672ee34d5 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 31 Oct 2022 13:32:17 +0000 Subject: [PATCH 018/129] migrate backblaze settings to new json format And adjust methods and tests --- selfprivacy_api/rest/services.py | 17 ++++++----- tests/test_block_device_utils/no_devices.json | 5 ---- tests/test_block_device_utils/only_root.json | 5 ---- tests/test_block_device_utils/undefined.json | 5 ---- tests/test_graphql/test_ssh/some_users.json | 5 ---- tests/test_graphql/test_system/no_values.json | 5 ---- .../test_graphql/test_system/turned_off.json | 5 ---- tests/test_graphql/test_system/turned_on.json | 5 ---- tests/test_graphql/test_system/undefined.json | 5 ---- tests/test_graphql/test_users/no_users.json | 5 ---- tests/test_graphql/test_users/one_user.json | 5 ---- tests/test_graphql/test_users/some_users.json | 5 ---- tests/test_graphql/test_users/undefined.json | 5 ---- .../test_bitwarden/enable_undefined.json | 5 ---- .../services/test_bitwarden/turned_off.json | 5 ---- .../services/test_bitwarden/turned_on.json | 5 ---- .../services/test_bitwarden/undefined.json | 5 ---- .../services/test_gitea/enable_undefined.json | 5 ---- .../services/test_gitea/turned_off.json | 5 ---- .../services/test_gitea/turned_on.json | 5 ---- .../services/test_gitea/undefined.json | 5 ---- .../test_nextcloud/enable_undefined.json | 5 ---- .../services/test_nextcloud/turned_off.json | 5 ---- .../services/test_nextcloud/turned_on.json | 5 ---- .../services/test_nextcloud/undefined.json | 5 ---- .../test_ocserv/enable_undefined.json | 5 ---- .../services/test_ocserv/turned_off.json | 5 ---- .../services/test_ocserv/turned_on.json | 5 ---- .../services/test_ocserv/undefined.json | 5 ---- .../test_pleroma/enable_undefined.json | 5 ---- .../services/test_pleroma/turned_off.json | 5 ---- .../services/test_pleroma/turned_on.json | 5 ---- .../services/test_pleroma/undefined.json | 5 ---- .../services/test_restic.py | 29 +++++++++++-------- .../services/test_restic/no_values.json | 6 ---- .../services/test_restic/some_values.json | 9 ++---- .../services/test_restic/undefined.json | 6 ---- .../services/test_ssh/all_off.json | 5 ---- .../test_ssh/root_and_admin_have_keys.json | 5 ---- .../services/test_ssh/some_users.json | 5 ---- .../services/test_ssh/turned_off.json | 5 ---- .../services/test_ssh/turned_on.json | 5 ---- .../services/test_ssh/undefined.json | 5 ---- .../services/test_ssh/undefined_values.json | 5 ---- .../test_system/no_values.json | 5 ---- .../test_system/turned_off.json | 5 ---- .../test_system/turned_on.json | 5 ---- .../test_system/undefined.json | 5 ---- .../test_users/no_users.json | 5 ---- .../test_users/one_user.json | 5 ---- .../test_users/some_users.json | 5 ---- .../test_users/undefined.json | 5 ---- 52 files changed, 28 insertions(+), 274 deletions(-) diff --git a/selfprivacy_api/rest/services.py b/selfprivacy_api/rest/services.py index c9d5ff9..ca2bc9b 100644 --- a/selfprivacy_api/rest/services.py +++ b/selfprivacy_api/rest/services.py @@ -257,24 +257,25 @@ async def restore_restic_backup(backup: BackupRestoreInput): raise HTTPException(status_code=404, detail="Backup not found") -class BackblazeConfigInput(BaseModel): +class BackupConfigInput(BaseModel): accountId: str accountKey: str bucket: str @router.put("/restic/backblaze/config") -async def set_backblaze_config(backblaze_config: BackblazeConfigInput): +async def set_backblaze_config(backup_config: BackupConfigInput): with WriteUserData() as data: - if "backblaze" not in data: - data["backblaze"] = {} - data["backblaze"]["accountId"] = backblaze_config.accountId - data["backblaze"]["accountKey"] = backblaze_config.accountKey - data["backblaze"]["bucket"] = backblaze_config.bucket + if "backup" not in data: + data["backup"] = {} + data["backup"]["provider"] = "BACKBLAZE" + data["backup"]["accountId"] = backup_config.accountId + data["backup"]["accountKey"] = backup_config.accountKey + data["backup"]["bucket"] = backup_config.bucket restic_tasks.update_keys_from_userdata() - return "New Backblaze settings saved" + return "New backup settings saved" @router.post("/ssh/enable") diff --git a/tests/test_block_device_utils/no_devices.json b/tests/test_block_device_utils/no_devices.json index 0918cd8..c395b21 100644 --- a/tests/test_block_device_utils/no_devices.json +++ b/tests/test_block_device_utils/no_devices.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_block_device_utils/only_root.json b/tests/test_block_device_utils/only_root.json index 9c064c0..1026ed0 100644 --- a/tests/test_block_device_utils/only_root.json +++ b/tests/test_block_device_utils/only_root.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_block_device_utils/undefined.json b/tests/test_block_device_utils/undefined.json index e764ebc..f5edda8 100644 --- a/tests/test_block_device_utils/undefined.json +++ b/tests/test_block_device_utils/undefined.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_graphql/test_ssh/some_users.json b/tests/test_graphql/test_ssh/some_users.json index 6bc3525..c02d216 100644 --- a/tests/test_graphql/test_ssh/some_users.json +++ b/tests/test_graphql/test_ssh/some_users.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_graphql/test_system/no_values.json b/tests/test_graphql/test_system/no_values.json index 110c762..779691f 100644 --- a/tests/test_graphql/test_system/no_values.json +++ b/tests/test_graphql/test_system/no_values.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_graphql/test_system/turned_off.json b/tests/test_graphql/test_system/turned_off.json index 3fdbb8a..5fc287c 100644 --- a/tests/test_graphql/test_system/turned_off.json +++ b/tests/test_graphql/test_system/turned_off.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_graphql/test_system/turned_on.json b/tests/test_graphql/test_system/turned_on.json index ab1723f..c6b758b 100644 --- a/tests/test_graphql/test_system/turned_on.json +++ b/tests/test_graphql/test_system/turned_on.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_graphql/test_system/undefined.json b/tests/test_graphql/test_system/undefined.json index 1720527..2e31fea 100644 --- a/tests/test_graphql/test_system/undefined.json +++ b/tests/test_graphql/test_system/undefined.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_graphql/test_users/no_users.json b/tests/test_graphql/test_users/no_users.json index 27207fb..a40fb88 100644 --- a/tests/test_graphql/test_users/no_users.json +++ b/tests/test_graphql/test_users/no_users.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_graphql/test_users/one_user.json b/tests/test_graphql/test_users/one_user.json index bd3efc8..7e1cced 100644 --- a/tests/test_graphql/test_users/one_user.json +++ b/tests/test_graphql/test_users/one_user.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_graphql/test_users/some_users.json b/tests/test_graphql/test_users/some_users.json index 6bc3525..c02d216 100644 --- a/tests/test_graphql/test_users/some_users.json +++ b/tests/test_graphql/test_users/some_users.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_graphql/test_users/undefined.json b/tests/test_graphql/test_users/undefined.json index f0826c4..ae9cd9e 100644 --- a/tests/test_graphql/test_users/undefined.json +++ b/tests/test_graphql/test_users/undefined.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_rest_endpoints/services/test_bitwarden/enable_undefined.json b/tests/test_rest_endpoints/services/test_bitwarden/enable_undefined.json index e359a16..1a95e85 100644 --- a/tests/test_rest_endpoints/services/test_bitwarden/enable_undefined.json +++ b/tests/test_rest_endpoints/services/test_bitwarden/enable_undefined.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_rest_endpoints/services/test_bitwarden/turned_off.json b/tests/test_rest_endpoints/services/test_bitwarden/turned_off.json index fcad1f5..c1691ea 100644 --- a/tests/test_rest_endpoints/services/test_bitwarden/turned_off.json +++ b/tests/test_rest_endpoints/services/test_bitwarden/turned_off.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_rest_endpoints/services/test_bitwarden/turned_on.json b/tests/test_rest_endpoints/services/test_bitwarden/turned_on.json index 7ad9081..42999d8 100644 --- a/tests/test_rest_endpoints/services/test_bitwarden/turned_on.json +++ b/tests/test_rest_endpoints/services/test_bitwarden/turned_on.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_rest_endpoints/services/test_bitwarden/undefined.json b/tests/test_rest_endpoints/services/test_bitwarden/undefined.json index 1deab31..ee288c2 100644 --- a/tests/test_rest_endpoints/services/test_bitwarden/undefined.json +++ b/tests/test_rest_endpoints/services/test_bitwarden/undefined.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_rest_endpoints/services/test_gitea/enable_undefined.json b/tests/test_rest_endpoints/services/test_gitea/enable_undefined.json index fd67d96..f9fb878 100644 --- a/tests/test_rest_endpoints/services/test_gitea/enable_undefined.json +++ b/tests/test_rest_endpoints/services/test_gitea/enable_undefined.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_rest_endpoints/services/test_gitea/turned_off.json b/tests/test_rest_endpoints/services/test_gitea/turned_off.json index fcad1f5..c1691ea 100644 --- a/tests/test_rest_endpoints/services/test_gitea/turned_off.json +++ b/tests/test_rest_endpoints/services/test_gitea/turned_off.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_rest_endpoints/services/test_gitea/turned_on.json b/tests/test_rest_endpoints/services/test_gitea/turned_on.json index 50ce50b..f9a1eaf 100644 --- a/tests/test_rest_endpoints/services/test_gitea/turned_on.json +++ b/tests/test_rest_endpoints/services/test_gitea/turned_on.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_rest_endpoints/services/test_gitea/undefined.json b/tests/test_rest_endpoints/services/test_gitea/undefined.json index d6187cc..a50a070 100644 --- a/tests/test_rest_endpoints/services/test_gitea/undefined.json +++ b/tests/test_rest_endpoints/services/test_gitea/undefined.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_rest_endpoints/services/test_nextcloud/enable_undefined.json b/tests/test_rest_endpoints/services/test_nextcloud/enable_undefined.json index 8f00842..19f1f2d 100644 --- a/tests/test_rest_endpoints/services/test_nextcloud/enable_undefined.json +++ b/tests/test_rest_endpoints/services/test_nextcloud/enable_undefined.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_rest_endpoints/services/test_nextcloud/turned_off.json b/tests/test_rest_endpoints/services/test_nextcloud/turned_off.json index 8b6f466..b80ad9e 100644 --- a/tests/test_rest_endpoints/services/test_nextcloud/turned_off.json +++ b/tests/test_rest_endpoints/services/test_nextcloud/turned_off.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_rest_endpoints/services/test_nextcloud/turned_on.json b/tests/test_rest_endpoints/services/test_nextcloud/turned_on.json index fcad1f5..c1691ea 100644 --- a/tests/test_rest_endpoints/services/test_nextcloud/turned_on.json +++ b/tests/test_rest_endpoints/services/test_nextcloud/turned_on.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_rest_endpoints/services/test_nextcloud/undefined.json b/tests/test_rest_endpoints/services/test_nextcloud/undefined.json index dd13c56..46c09f3 100644 --- a/tests/test_rest_endpoints/services/test_nextcloud/undefined.json +++ b/tests/test_rest_endpoints/services/test_nextcloud/undefined.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_rest_endpoints/services/test_ocserv/enable_undefined.json b/tests/test_rest_endpoints/services/test_ocserv/enable_undefined.json index a509d77..e080110 100644 --- a/tests/test_rest_endpoints/services/test_ocserv/enable_undefined.json +++ b/tests/test_rest_endpoints/services/test_ocserv/enable_undefined.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_rest_endpoints/services/test_ocserv/turned_off.json b/tests/test_rest_endpoints/services/test_ocserv/turned_off.json index 8542a7c..1c08123 100644 --- a/tests/test_rest_endpoints/services/test_ocserv/turned_off.json +++ b/tests/test_rest_endpoints/services/test_ocserv/turned_off.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_rest_endpoints/services/test_ocserv/turned_on.json b/tests/test_rest_endpoints/services/test_ocserv/turned_on.json index 8b6f466..b80ad9e 100644 --- a/tests/test_rest_endpoints/services/test_ocserv/turned_on.json +++ b/tests/test_rest_endpoints/services/test_ocserv/turned_on.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_rest_endpoints/services/test_ocserv/undefined.json b/tests/test_rest_endpoints/services/test_ocserv/undefined.json index 7ffae11..12eb73a 100644 --- a/tests/test_rest_endpoints/services/test_ocserv/undefined.json +++ b/tests/test_rest_endpoints/services/test_ocserv/undefined.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_rest_endpoints/services/test_pleroma/enable_undefined.json b/tests/test_rest_endpoints/services/test_pleroma/enable_undefined.json index 5d359c2..0903875 100644 --- a/tests/test_rest_endpoints/services/test_pleroma/enable_undefined.json +++ b/tests/test_rest_endpoints/services/test_pleroma/enable_undefined.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_rest_endpoints/services/test_pleroma/turned_off.json b/tests/test_rest_endpoints/services/test_pleroma/turned_off.json index 74bfb7c..813c01f 100644 --- a/tests/test_rest_endpoints/services/test_pleroma/turned_off.json +++ b/tests/test_rest_endpoints/services/test_pleroma/turned_off.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_rest_endpoints/services/test_pleroma/turned_on.json b/tests/test_rest_endpoints/services/test_pleroma/turned_on.json index 8542a7c..1c08123 100644 --- a/tests/test_rest_endpoints/services/test_pleroma/turned_on.json +++ b/tests/test_rest_endpoints/services/test_pleroma/turned_on.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_rest_endpoints/services/test_pleroma/undefined.json b/tests/test_rest_endpoints/services/test_pleroma/undefined.json index dcfe1e1..77d8ad2 100644 --- a/tests/test_rest_endpoints/services/test_pleroma/undefined.json +++ b/tests/test_rest_endpoints/services/test_pleroma/undefined.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_rest_endpoints/services/test_restic.py b/tests/test_rest_endpoints/services/test_restic.py index 9502be5..844ff34 100644 --- a/tests/test_rest_endpoints/services/test_restic.py +++ b/tests/test_rest_endpoints/services/test_restic.py @@ -161,7 +161,7 @@ def mock_restic_tasks(mocker): @pytest.fixture def undefined_settings(mocker, datadir): mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "undefined.json") - assert "backblaze" not in read_json(datadir / "undefined.json") + assert "backup" not in read_json(datadir / "undefined.json") return datadir @@ -170,20 +170,22 @@ def some_settings(mocker, datadir): mocker.patch( "selfprivacy_api.utils.USERDATA_FILE", new=datadir / "some_values.json" ) - assert "backblaze" in read_json(datadir / "some_values.json") - assert read_json(datadir / "some_values.json")["backblaze"]["accountId"] == "ID" - assert read_json(datadir / "some_values.json")["backblaze"]["accountKey"] == "KEY" - assert read_json(datadir / "some_values.json")["backblaze"]["bucket"] == "BUCKET" + assert "backup" in read_json(datadir / "some_values.json") + assert read_json(datadir / "some_values.json")["backup"]["provider"] == "BACKBLAZE" + assert read_json(datadir / "some_values.json")["backup"]["accountId"] == "ID" + assert read_json(datadir / "some_values.json")["backup"]["accountKey"] == "KEY" + assert read_json(datadir / "some_values.json")["backup"]["bucket"] == "BUCKET" return datadir @pytest.fixture def no_values(mocker, datadir): mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "no_values.json") - assert "backblaze" in read_json(datadir / "no_values.json") - assert "accountId" not in read_json(datadir / "no_values.json")["backblaze"] - assert "accountKey" not in read_json(datadir / "no_values.json")["backblaze"] - assert "bucket" not in read_json(datadir / "no_values.json")["backblaze"] + assert "backup" in read_json(datadir / "no_values.json") + assert "provider" not in read_json(datadir / "no_values.json")["backup"] + assert "accountId" not in read_json(datadir / "no_values.json")["backup"] + assert "accountKey" not in read_json(datadir / "no_values.json")["backup"] + assert "bucket" not in read_json(datadir / "no_values.json")["backup"] return datadir @@ -462,7 +464,8 @@ def test_set_backblaze_config( ) assert response.status_code == 200 assert mock_restic_tasks.update_keys_from_userdata.call_count == 1 - assert read_json(some_settings / "some_values.json")["backblaze"] == { + assert read_json(some_settings / "some_values.json")["backup"] == { + "provider": "BACKBLAZE", "accountId": "123", "accountKey": "456", "bucket": "789", @@ -478,7 +481,8 @@ def test_set_backblaze_config_on_undefined( ) assert response.status_code == 200 assert mock_restic_tasks.update_keys_from_userdata.call_count == 1 - assert read_json(undefined_settings / "undefined.json")["backblaze"] == { + assert read_json(undefined_settings / "undefined.json")["backup"] == { + "provider": "BACKBLAZE", "accountId": "123", "accountKey": "456", "bucket": "789", @@ -494,7 +498,8 @@ def test_set_backblaze_config_on_no_values( ) assert response.status_code == 200 assert mock_restic_tasks.update_keys_from_userdata.call_count == 1 - assert read_json(no_values / "no_values.json")["backblaze"] == { + assert read_json(no_values / "no_values.json")["backup"] == { + "provider": "BACKBLAZE", "accountId": "123", "accountKey": "456", "bucket": "789", diff --git a/tests/test_rest_endpoints/services/test_restic/no_values.json b/tests/test_rest_endpoints/services/test_restic/no_values.json index 0a8a0a0..3b4a2f5 100644 --- a/tests/test_rest_endpoints/services/test_restic/no_values.json +++ b/tests/test_rest_endpoints/services/test_restic/no_values.json @@ -1,6 +1,4 @@ { - "backblaze": { - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false @@ -70,9 +68,5 @@ "provider": "HETZNER" }, "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_restic/some_values.json b/tests/test_rest_endpoints/services/test_restic/some_values.json index aa3a19f..c003d10 100644 --- a/tests/test_rest_endpoints/services/test_restic/some_values.json +++ b/tests/test_rest_endpoints/services/test_restic/some_values.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "BUCKET" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false @@ -76,6 +71,6 @@ "provider": "BACKBLAZE", "accountId": "ID", "accountKey": "KEY", - "bucket": "selfprivacy" + "bucket": "BUCKET" } -} \ No newline at end of file +} diff --git a/tests/test_rest_endpoints/services/test_restic/undefined.json b/tests/test_rest_endpoints/services/test_restic/undefined.json index df6380a..5bd1220 100644 --- a/tests/test_rest_endpoints/services/test_restic/undefined.json +++ b/tests/test_rest_endpoints/services/test_restic/undefined.json @@ -66,11 +66,5 @@ }, "server": { "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" } } \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_ssh/all_off.json b/tests/test_rest_endpoints/services/test_ssh/all_off.json index 97d7690..051d364 100644 --- a/tests/test_rest_endpoints/services/test_ssh/all_off.json +++ b/tests/test_rest_endpoints/services/test_ssh/all_off.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_rest_endpoints/services/test_ssh/root_and_admin_have_keys.json b/tests/test_rest_endpoints/services/test_ssh/root_and_admin_have_keys.json index fcad1f5..c1691ea 100644 --- a/tests/test_rest_endpoints/services/test_ssh/root_and_admin_have_keys.json +++ b/tests/test_rest_endpoints/services/test_ssh/root_and_admin_have_keys.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_rest_endpoints/services/test_ssh/some_users.json b/tests/test_rest_endpoints/services/test_ssh/some_users.json index 0706d9e..df6380a 100644 --- a/tests/test_rest_endpoints/services/test_ssh/some_users.json +++ b/tests/test_rest_endpoints/services/test_ssh/some_users.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_rest_endpoints/services/test_ssh/turned_off.json b/tests/test_rest_endpoints/services/test_ssh/turned_off.json index 92b2196..3856c80 100644 --- a/tests/test_rest_endpoints/services/test_ssh/turned_off.json +++ b/tests/test_rest_endpoints/services/test_ssh/turned_off.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_rest_endpoints/services/test_ssh/turned_on.json b/tests/test_rest_endpoints/services/test_ssh/turned_on.json index 7434d16..e60c57f 100644 --- a/tests/test_rest_endpoints/services/test_ssh/turned_on.json +++ b/tests/test_rest_endpoints/services/test_ssh/turned_on.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_rest_endpoints/services/test_ssh/undefined.json b/tests/test_rest_endpoints/services/test_ssh/undefined.json index 290b396..7c9af37 100644 --- a/tests/test_rest_endpoints/services/test_ssh/undefined.json +++ b/tests/test_rest_endpoints/services/test_ssh/undefined.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_rest_endpoints/services/test_ssh/undefined_values.json b/tests/test_rest_endpoints/services/test_ssh/undefined_values.json index a570880..b7b03d3 100644 --- a/tests/test_rest_endpoints/services/test_ssh/undefined_values.json +++ b/tests/test_rest_endpoints/services/test_ssh/undefined_values.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_rest_endpoints/test_system/no_values.json b/tests/test_rest_endpoints/test_system/no_values.json index 442b62d..5c1431e 100644 --- a/tests/test_rest_endpoints/test_system/no_values.json +++ b/tests/test_rest_endpoints/test_system/no_values.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_rest_endpoints/test_system/turned_off.json b/tests/test_rest_endpoints/test_system/turned_off.json index 3a4a238..2336f36 100644 --- a/tests/test_rest_endpoints/test_system/turned_off.json +++ b/tests/test_rest_endpoints/test_system/turned_off.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_rest_endpoints/test_system/turned_on.json b/tests/test_rest_endpoints/test_system/turned_on.json index 7ad9081..42999d8 100644 --- a/tests/test_rest_endpoints/test_system/turned_on.json +++ b/tests/test_rest_endpoints/test_system/turned_on.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_rest_endpoints/test_system/undefined.json b/tests/test_rest_endpoints/test_system/undefined.json index fd1efad..6b9f3fd 100644 --- a/tests/test_rest_endpoints/test_system/undefined.json +++ b/tests/test_rest_endpoints/test_system/undefined.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_rest_endpoints/test_users/no_users.json b/tests/test_rest_endpoints/test_users/no_users.json index dadb584..5929a79 100644 --- a/tests/test_rest_endpoints/test_users/no_users.json +++ b/tests/test_rest_endpoints/test_users/no_users.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_rest_endpoints/test_users/one_user.json b/tests/test_rest_endpoints/test_users/one_user.json index 71d6338..6c553bc 100644 --- a/tests/test_rest_endpoints/test_users/one_user.json +++ b/tests/test_rest_endpoints/test_users/one_user.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_rest_endpoints/test_users/some_users.json b/tests/test_rest_endpoints/test_users/some_users.json index 0706d9e..df6380a 100644 --- a/tests/test_rest_endpoints/test_users/some_users.json +++ b/tests/test_rest_endpoints/test_users/some_users.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false diff --git a/tests/test_rest_endpoints/test_users/undefined.json b/tests/test_rest_endpoints/test_users/undefined.json index fcad1f5..c1691ea 100644 --- a/tests/test_rest_endpoints/test_users/undefined.json +++ b/tests/test_rest_endpoints/test_users/undefined.json @@ -1,9 +1,4 @@ { - "backblaze": { - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, "api": { "token": "TEST_TOKEN", "enableSwagger": false From f7b7e5a0be6995bc671a750395142cff5fa5a745 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 16 Nov 2022 13:54:54 +0000 Subject: [PATCH 019/129] migrate Jobs to redis --- selfprivacy_api/jobs/__init__.py | 114 +++++++++++++++++++------------ tests/test_jobs.py | 3 +- 2 files changed, 71 insertions(+), 46 deletions(-) diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py index 0dcfd66..7ecc8c9 100644 --- a/selfprivacy_api/jobs/__init__.py +++ b/selfprivacy_api/jobs/__init__.py @@ -17,13 +17,16 @@ A job is a dictionary with the following keys: import typing import datetime from uuid import UUID -import json import uuid from enum import Enum from pydantic import BaseModel -from selfprivacy_api.utils import ReadUserData, UserDataFiles, WriteUserData +from selfprivacy_api.utils.redis_pool import RedisPool +import asyncio + + +loop = asyncio.get_event_loop() class JobStatus(Enum): @@ -66,8 +69,11 @@ class Jobs: """ Reset the jobs list. """ - with WriteUserData(UserDataFiles.JOBS) as user_data: - user_data["jobs"] = [] + r = RedisPool().get_connection() + jobs = Jobs.get_jobs() + for job in jobs: + loop.run_until_complete(r.delete(redis_key_from_uuid(job.uid))) + loop.run_until_complete(r.delete("jobs")) @staticmethod def add( @@ -95,13 +101,10 @@ class Jobs: error=None, result=None, ) - with WriteUserData(UserDataFiles.JOBS) as user_data: - try: - if "jobs" not in user_data: - user_data["jobs"] = [] - user_data["jobs"].append(json.loads(job.json())) - except json.decoder.JSONDecodeError: - user_data["jobs"] = [json.loads(job.json())] + r = RedisPool().get_connection() + store_job_as_hash(r, redis_key_from_uuid(job.uid), job) + coroutine = r.lpush("jobs", redis_key_from_uuid(job.uid)) + loop.run_until_complete(coroutine) return job @staticmethod @@ -116,13 +119,10 @@ class Jobs: """ Remove a job from the jobs list. """ - with WriteUserData(UserDataFiles.JOBS) as user_data: - if "jobs" not in user_data: - user_data["jobs"] = [] - for i, j in enumerate(user_data["jobs"]): - if j["uid"] == job_uuid: - del user_data["jobs"][i] - return True + r = RedisPool().get_connection() + key = redis_key_from_uuid(job_uuid) + loop.run_until_complete(r.delete(key)) + loop.run_until_complete(r.lrem("jobs", 0, key)) return False @staticmethod @@ -154,13 +154,10 @@ class Jobs: if status in (JobStatus.FINISHED, JobStatus.ERROR): job.finished_at = datetime.datetime.now() - with WriteUserData(UserDataFiles.JOBS) as user_data: - if "jobs" not in user_data: - user_data["jobs"] = [] - for i, j in enumerate(user_data["jobs"]): - if j["uid"] == str(job.uid): - user_data["jobs"][i] = json.loads(job.json()) - break + r = RedisPool().get_connection() + key = redis_key_from_uuid(job.uid) + if exists_sync(r, key): + store_job_as_hash(r, key, job) return job @@ -169,12 +166,10 @@ class Jobs: """ Get a job from the jobs list. """ - with ReadUserData(UserDataFiles.JOBS) as user_data: - if "jobs" not in user_data: - user_data["jobs"] = [] - for job in user_data["jobs"]: - if job["uid"] == uid: - return Job(**job) + r = RedisPool().get_connection() + key = redis_key_from_uuid(uid) + if exists_sync(r, key): + return job_from_hash(r, key) return None @staticmethod @@ -182,23 +177,54 @@ class Jobs: """ Get the jobs list. """ - with ReadUserData(UserDataFiles.JOBS) as user_data: - try: - if "jobs" not in user_data: - user_data["jobs"] = [] - return [Job(**job) for job in user_data["jobs"]] - except json.decoder.JSONDecodeError: - return [] + r = RedisPool().get_connection() + jobs = loop.run_until_complete(r.lrange("jobs", 0, -1)) + return [job_from_hash(r, job_key) for job_key in jobs] @staticmethod def is_busy() -> bool: """ Check if there is a job running. """ - with ReadUserData(UserDataFiles.JOBS) as user_data: - if "jobs" not in user_data: - user_data["jobs"] = [] - for job in user_data["jobs"]: - if job["status"] == JobStatus.RUNNING.value: - return True + for job in Jobs.get_jobs(): + if job["status"] == JobStatus.RUNNING.value: + return True return False + + +def redis_key_from_uuid(uuid): + return "jobs:" + str(uuid) + + +def store_job_as_hash(r, redis_key, model): + for key, value in model.dict().items(): + if isinstance(value, uuid.UUID): + value = str(value) + if isinstance(value, datetime.datetime): + value = value.isoformat() + if isinstance(value, JobStatus): + value = value.value + coroutine = r.hset(redis_key, key, str(value)) + loop.run_until_complete(coroutine) + + +def job_from_hash(r, redis_key): + if exists_sync(r, redis_key): + job_dict = loop.run_until_complete(r.hgetall(redis_key)) + for date in [ + "created_at", + "updated_at", + "finished_at", + ]: + if job_dict[date] != "None": + job_dict[date] = datetime.datetime.fromisoformat(job_dict[date]) + for key in job_dict.keys(): + if job_dict[key] == "None": + job_dict[key] = None + + return Job(**job_dict) + return None + + +def exists_sync(r, key): + return loop.run_until_complete(r.exists(key)) diff --git a/tests/test_jobs.py b/tests/test_jobs.py index 87f1386..65d58e4 100644 --- a/tests/test_jobs.py +++ b/tests/test_jobs.py @@ -1,14 +1,13 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument -import json import pytest -from selfprivacy_api.utils import WriteUserData, ReadUserData from selfprivacy_api.jobs import Jobs, JobStatus def test_jobs(authorized_client, jobs_file, shared_datadir): jobs = Jobs() + jobs.reset() assert jobs.get_jobs() == [] test_job = jobs.add( From 5afa2338ca3513d4d5e0c15783d503762951ca81 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 23 Nov 2022 12:32:46 +0000 Subject: [PATCH 020/129] Migrate Jobs to redis Jobs API shall now use redis to store and retrieve jobs. This will make it possible to add pubsub for jobs updates. For now it uses blocking api of redis. --- selfprivacy_api/jobs/__init__.py | 24 +++++++++--------------- selfprivacy_api/utils/redis_pool.py | 2 +- 2 files changed, 10 insertions(+), 16 deletions(-) diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py index 7ecc8c9..7c16afd 100644 --- a/selfprivacy_api/jobs/__init__.py +++ b/selfprivacy_api/jobs/__init__.py @@ -23,10 +23,6 @@ from enum import Enum from pydantic import BaseModel from selfprivacy_api.utils.redis_pool import RedisPool -import asyncio - - -loop = asyncio.get_event_loop() class JobStatus(Enum): @@ -72,8 +68,8 @@ class Jobs: r = RedisPool().get_connection() jobs = Jobs.get_jobs() for job in jobs: - loop.run_until_complete(r.delete(redis_key_from_uuid(job.uid))) - loop.run_until_complete(r.delete("jobs")) + r.delete(redis_key_from_uuid(job.uid)) + r.delete("jobs") @staticmethod def add( @@ -103,8 +99,7 @@ class Jobs: ) r = RedisPool().get_connection() store_job_as_hash(r, redis_key_from_uuid(job.uid), job) - coroutine = r.lpush("jobs", redis_key_from_uuid(job.uid)) - loop.run_until_complete(coroutine) + r.lpush("jobs", redis_key_from_uuid(job.uid)) return job @staticmethod @@ -121,8 +116,8 @@ class Jobs: """ r = RedisPool().get_connection() key = redis_key_from_uuid(job_uuid) - loop.run_until_complete(r.delete(key)) - loop.run_until_complete(r.lrem("jobs", 0, key)) + r.delete(key) + r.lrem("jobs", 0, key) return False @staticmethod @@ -178,7 +173,7 @@ class Jobs: Get the jobs list. """ r = RedisPool().get_connection() - jobs = loop.run_until_complete(r.lrange("jobs", 0, -1)) + jobs = r.lrange("jobs", 0, -1) return [job_from_hash(r, job_key) for job_key in jobs] @staticmethod @@ -204,13 +199,12 @@ def store_job_as_hash(r, redis_key, model): value = value.isoformat() if isinstance(value, JobStatus): value = value.value - coroutine = r.hset(redis_key, key, str(value)) - loop.run_until_complete(coroutine) + r.hset(redis_key, key, str(value)) def job_from_hash(r, redis_key): if exists_sync(r, redis_key): - job_dict = loop.run_until_complete(r.hgetall(redis_key)) + job_dict = r.hgetall(redis_key) for date in [ "created_at", "updated_at", @@ -227,4 +221,4 @@ def job_from_hash(r, redis_key): def exists_sync(r, key): - return loop.run_until_complete(r.exists(key)) + return r.exists(key) diff --git a/selfprivacy_api/utils/redis_pool.py b/selfprivacy_api/utils/redis_pool.py index e4e98ac..a87049c 100644 --- a/selfprivacy_api/utils/redis_pool.py +++ b/selfprivacy_api/utils/redis_pool.py @@ -1,7 +1,7 @@ """ Redis pool module for selfprivacy_api """ -import redis.asyncio as redis +import redis from selfprivacy_api.utils.singleton_metaclass import SingletonMetaclass REDIS_SOCKET = "/run/redis-sp-api/redis.sock" From 244851c7cc818f6471bc5c66ff595e73e5d14287 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 23 Nov 2022 15:04:39 +0000 Subject: [PATCH 021/129] jobs: remove 'jobs' list, and use 'jobs:' prefix Less complexity, easier to add redis-native TTL --- selfprivacy_api/jobs/__init__.py | 15 ++++----------- 1 file changed, 4 insertions(+), 11 deletions(-) diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py index 7c16afd..4fa820c 100644 --- a/selfprivacy_api/jobs/__init__.py +++ b/selfprivacy_api/jobs/__init__.py @@ -69,7 +69,6 @@ class Jobs: jobs = Jobs.get_jobs() for job in jobs: r.delete(redis_key_from_uuid(job.uid)) - r.delete("jobs") @staticmethod def add( @@ -99,7 +98,6 @@ class Jobs: ) r = RedisPool().get_connection() store_job_as_hash(r, redis_key_from_uuid(job.uid), job) - r.lpush("jobs", redis_key_from_uuid(job.uid)) return job @staticmethod @@ -117,7 +115,6 @@ class Jobs: r = RedisPool().get_connection() key = redis_key_from_uuid(job_uuid) r.delete(key) - r.lrem("jobs", 0, key) return False @staticmethod @@ -151,7 +148,7 @@ class Jobs: r = RedisPool().get_connection() key = redis_key_from_uuid(job.uid) - if exists_sync(r, key): + if r.exists(key): store_job_as_hash(r, key, job) return job @@ -163,7 +160,7 @@ class Jobs: """ r = RedisPool().get_connection() key = redis_key_from_uuid(uid) - if exists_sync(r, key): + if r.exists(key): return job_from_hash(r, key) return None @@ -173,7 +170,7 @@ class Jobs: Get the jobs list. """ r = RedisPool().get_connection() - jobs = r.lrange("jobs", 0, -1) + jobs = r.keys("jobs:*") return [job_from_hash(r, job_key) for job_key in jobs] @staticmethod @@ -203,7 +200,7 @@ def store_job_as_hash(r, redis_key, model): def job_from_hash(r, redis_key): - if exists_sync(r, redis_key): + if r.exists(redis_key): job_dict = r.hgetall(redis_key) for date in [ "created_at", @@ -218,7 +215,3 @@ def job_from_hash(r, redis_key): return Job(**job_dict) return None - - -def exists_sync(r, key): - return r.exists(key) From d6ef01c0c7f91665925a20fde4ec3c37e9e5a6dc Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 23 Nov 2022 16:29:50 +0000 Subject: [PATCH 022/129] Add TTL to storage of finished or failed jobs Defaulting to 10 days. --- selfprivacy_api/jobs/__init__.py | 4 ++++ tests/test_jobs.py | 14 ++++++++++++++ 2 files changed, 18 insertions(+) diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py index 4fa820c..9e4d961 100644 --- a/selfprivacy_api/jobs/__init__.py +++ b/selfprivacy_api/jobs/__init__.py @@ -24,6 +24,8 @@ from pydantic import BaseModel from selfprivacy_api.utils.redis_pool import RedisPool +JOB_EXPIRATION_SECONDS = 10 * 24 * 60 * 60 # ten days + class JobStatus(Enum): """ @@ -150,6 +152,8 @@ class Jobs: key = redis_key_from_uuid(job.uid) if r.exists(key): store_job_as_hash(r, key, job) + if status in (JobStatus.FINISHED, JobStatus.ERROR): + r.expire(key, JOB_EXPIRATION_SECONDS) return job diff --git a/tests/test_jobs.py b/tests/test_jobs.py index 65d58e4..371dca4 100644 --- a/tests/test_jobs.py +++ b/tests/test_jobs.py @@ -3,6 +3,7 @@ import pytest from selfprivacy_api.jobs import Jobs, JobStatus +import selfprivacy_api.jobs as jobsmodule def test_jobs(authorized_client, jobs_file, shared_datadir): @@ -30,6 +31,19 @@ def test_jobs(authorized_client, jobs_file, shared_datadir): assert jobs.get_jobs() == [test_job] + backup = jobsmodule.JOB_EXPIRATION_SECONDS + jobsmodule.JOB_EXPIRATION_SECONDS = 0 + + jobs.update( + job=test_job, + status=JobStatus.FINISHED, + status_text="Yaaay!", + progress=100, + ) + + assert jobs.get_jobs() == [] + jobsmodule.JOB_EXPIRATION_SECONDS = backup + @pytest.fixture def mock_subprocess_run(mocker): From 2e9cdf15ab96dcd58f279b795e1c886c0e4f4660 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 28 Nov 2022 11:33:28 +0000 Subject: [PATCH 023/129] Autostart a redis instance in nix shell. As a part of test environment, an unprivileged redis server is started on localhost:6379. Redis connection pool detects when it is run in a development nix shell and uses this port instead of a production unix socket. This way, redis tests pass even on computers without redis installed. --- selfprivacy_api/utils/redis_pool.py | 17 +++++++++++++---- shell.nix | 5 +++++ 2 files changed, 18 insertions(+), 4 deletions(-) diff --git a/selfprivacy_api/utils/redis_pool.py b/selfprivacy_api/utils/redis_pool.py index a87049c..2f2cf21 100644 --- a/selfprivacy_api/utils/redis_pool.py +++ b/selfprivacy_api/utils/redis_pool.py @@ -3,6 +3,7 @@ Redis pool module for selfprivacy_api """ import redis from selfprivacy_api.utils.singleton_metaclass import SingletonMetaclass +from os import environ REDIS_SOCKET = "/run/redis-sp-api/redis.sock" @@ -13,10 +14,18 @@ class RedisPool(metaclass=SingletonMetaclass): """ def __init__(self): - self._pool = redis.ConnectionPool.from_url( - f"unix://{REDIS_SOCKET}", - decode_responses=True, - ) + if "USE_REDIS_PORT" in environ.keys(): + self._pool = redis.ConnectionPool( + host="127.0.0.1", + port=int(environ["USE_REDIS_PORT"]), + decode_responses=True, + ) + + else: + self._pool = redis.ConnectionPool.from_url( + f"unix://{REDIS_SOCKET}", + decode_responses=True, + ) self._pubsub_connection = self.get_connection() def get_connection(self): diff --git a/shell.nix b/shell.nix index b6d8e32..4589192 100644 --- a/shell.nix +++ b/shell.nix @@ -64,9 +64,14 @@ pkgs.mkShell { buildInputs = [ sp-python pkgs.black + pkgs.redis ]; shellHook = '' PYTHONPATH=${sp-python}/${sp-python.sitePackages} + # envs set with export and as attributes are treated differently. + # for example. printenv will not fetch the value of an attribute. + export USE_REDIS_PORT=6379 + redis-server --port $USE_REDIS_PORT >/dev/null & # maybe set more env-vars ''; } From 57794c95354cc70df1e7e3ce4aefe5f5f72b11bc Mon Sep 17 00:00:00 2001 From: Inex Code Date: Mon, 28 Nov 2022 15:49:48 +0300 Subject: [PATCH 024/129] style(graphql): Add missing comma \ --- selfprivacy_api/graphql/queries/system.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/selfprivacy_api/graphql/queries/system.py b/selfprivacy_api/graphql/queries/system.py index 112fa8c..cc30fd7 100644 --- a/selfprivacy_api/graphql/queries/system.py +++ b/selfprivacy_api/graphql/queries/system.py @@ -136,7 +136,7 @@ def get_system_provider_info() -> SystemProviderInfo: with ReadUserData() as user_data: return SystemProviderInfo( provider=user_data["server"]["provider"], - id="UNKNOWN" + id="UNKNOWN", ) From 144f95fb34cea438a0251edfc077f48b14584f92 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 30 Nov 2022 11:20:08 +0000 Subject: [PATCH 025/129] bind nix-shell's redis to localhost For security, mainly. Also, limit to one process. A follow up to #22 --- shell.nix | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/shell.nix b/shell.nix index 4589192..668da69 100644 --- a/shell.nix +++ b/shell.nix @@ -71,7 +71,8 @@ pkgs.mkShell { # envs set with export and as attributes are treated differently. # for example. printenv will not fetch the value of an attribute. export USE_REDIS_PORT=6379 - redis-server --port $USE_REDIS_PORT >/dev/null & + pkill redis-server + redis-server --bind 127.0.0.1 --port $USE_REDIS_PORT >/dev/null & # maybe set more env-vars ''; } From c21b6cb071c49afbfa1fc90c8df9b29d95337711 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 30 Nov 2022 14:17:53 +0000 Subject: [PATCH 026/129] jobs: dedicated reset test --- tests/test_jobs.py | 37 +++++++++++++++++++------------------ 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/tests/test_jobs.py b/tests/test_jobs.py index 371dca4..220cebc 100644 --- a/tests/test_jobs.py +++ b/tests/test_jobs.py @@ -6,11 +6,22 @@ from selfprivacy_api.jobs import Jobs, JobStatus import selfprivacy_api.jobs as jobsmodule -def test_jobs(authorized_client, jobs_file, shared_datadir): - jobs = Jobs() +def test_add_reset(jobs): + test_job = jobs.add( + type_id="test", + name="Test job", + description="This is a test job.", + status=JobStatus.CREATED, + status_text="Status text", + progress=0, + ) + assert jobs.get_jobs() == [test_job] jobs.reset() assert jobs.get_jobs() == [] + +def test_jobs(jobs): + test_job = jobs.add( type_id="test", name="Test job", @@ -19,7 +30,6 @@ def test_jobs(authorized_client, jobs_file, shared_datadir): status_text="Status text", progress=0, ) - assert jobs.get_jobs() == [test_job] jobs.update( @@ -46,18 +56,9 @@ def test_jobs(authorized_client, jobs_file, shared_datadir): @pytest.fixture -def mock_subprocess_run(mocker): - mock = mocker.patch("subprocess.run", autospec=True) - return mock - - -@pytest.fixture -def mock_shutil_move(mocker): - mock = mocker.patch("shutil.move", autospec=True) - return mock - - -@pytest.fixture -def mock_shutil_chown(mocker): - mock = mocker.patch("shutil.chown", autospec=True) - return mock +def jobs(): + j = Jobs() + j.reset() + assert j.get_jobs() == [] + yield j + j.reset() From b6eeec23ccc185f3542488f4858892095b5a4487 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 30 Nov 2022 14:31:37 +0000 Subject: [PATCH 027/129] jobs: singlejob fixture --- tests/test_jobs.py | 43 ++++++++++++++++++++----------------------- 1 file changed, 20 insertions(+), 23 deletions(-) diff --git a/tests/test_jobs.py b/tests/test_jobs.py index 220cebc..63b06d1 100644 --- a/tests/test_jobs.py +++ b/tests/test_jobs.py @@ -6,31 +6,14 @@ from selfprivacy_api.jobs import Jobs, JobStatus import selfprivacy_api.jobs as jobsmodule -def test_add_reset(jobs): - test_job = jobs.add( - type_id="test", - name="Test job", - description="This is a test job.", - status=JobStatus.CREATED, - status_text="Status text", - progress=0, - ) - assert jobs.get_jobs() == [test_job] - jobs.reset() - assert jobs.get_jobs() == [] +def test_add_reset(jobs_with_one_job): + jobs_with_one_job.reset() + assert jobs_with_one_job.get_jobs() == [] -def test_jobs(jobs): - - test_job = jobs.add( - type_id="test", - name="Test job", - description="This is a test job.", - status=JobStatus.CREATED, - status_text="Status text", - progress=0, - ) - assert jobs.get_jobs() == [test_job] +def test_jobs(jobs_with_one_job): + jobs = jobs_with_one_job + test_job = jobs_with_one_job.get_jobs()[0] jobs.update( job=test_job, @@ -62,3 +45,17 @@ def jobs(): assert j.get_jobs() == [] yield j j.reset() + + +@pytest.fixture +def jobs_with_one_job(jobs): + test_job = jobs.add( + type_id="test", + name="Test job", + description="This is a test job.", + status=JobStatus.CREATED, + status_text="Status text", + progress=0, + ) + assert jobs.get_jobs() == [test_job] + return jobs From 106a083ca2f6d27484f83da5853d106b462d8071 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 30 Nov 2022 14:47:56 +0000 Subject: [PATCH 028/129] jobs: simplify reset Also ups test coverage --- selfprivacy_api/jobs/__init__.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py index 9e4d961..a3007a4 100644 --- a/selfprivacy_api/jobs/__init__.py +++ b/selfprivacy_api/jobs/__init__.py @@ -67,10 +67,9 @@ class Jobs: """ Reset the jobs list. """ - r = RedisPool().get_connection() jobs = Jobs.get_jobs() for job in jobs: - r.delete(redis_key_from_uuid(job.uid)) + Jobs.remove(job) @staticmethod def add( From 7acbba996057220e94bad77254af88d1e90a9e82 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 30 Nov 2022 15:12:42 +0000 Subject: [PATCH 029/129] jobs: minimal update test --- tests/test_jobs.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/tests/test_jobs.py b/tests/test_jobs.py index 63b06d1..c7eb3d6 100644 --- a/tests/test_jobs.py +++ b/tests/test_jobs.py @@ -11,6 +11,15 @@ def test_add_reset(jobs_with_one_job): assert jobs_with_one_job.get_jobs() == [] +def test_minimal_update(jobs_with_one_job): + jobs = jobs_with_one_job + test_job = jobs_with_one_job.get_jobs()[0] + + jobs.update(job=test_job, status=JobStatus.ERROR) + + assert jobs.get_jobs() == [test_job] + + def test_jobs(jobs_with_one_job): jobs = jobs_with_one_job test_job = jobs_with_one_job.get_jobs()[0] From f51e378ff0061001517425604e6f2c8be3144ae9 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 30 Nov 2022 15:21:57 +0000 Subject: [PATCH 030/129] jobs: test updating more fields --- tests/test_jobs.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/test_jobs.py b/tests/test_jobs.py index c7eb3d6..df5c952 100644 --- a/tests/test_jobs.py +++ b/tests/test_jobs.py @@ -26,6 +26,8 @@ def test_jobs(jobs_with_one_job): jobs.update( job=test_job, + name="Write Tests", + description="An oddly satisfying experience", status=JobStatus.RUNNING, status_text="Status text", progress=50, From 870d2c408de938e1b35fa215ccf683e09716126d Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 30 Nov 2022 15:37:59 +0000 Subject: [PATCH 031/129] jobs: test nofail at nonexistent update --- tests/test_jobs.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/tests/test_jobs.py b/tests/test_jobs.py index df5c952..7ef9a9b 100644 --- a/tests/test_jobs.py +++ b/tests/test_jobs.py @@ -20,6 +20,16 @@ def test_minimal_update(jobs_with_one_job): assert jobs.get_jobs() == [test_job] +def test_remove_update_nonexistent(jobs_with_one_job): + test_job = jobs_with_one_job.get_jobs()[0] + + jobs_with_one_job.remove(test_job) + assert jobs_with_one_job.get_jobs() == [] + + result = jobs_with_one_job.update(job=test_job, status=JobStatus.ERROR) + assert result == test_job # even though we might consider changing this behavior + + def test_jobs(jobs_with_one_job): jobs = jobs_with_one_job test_job = jobs_with_one_job.get_jobs()[0] From 14c4ae26abc3916fc3f792f012a83ee1e327a2cd Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 30 Nov 2022 16:22:46 +0000 Subject: [PATCH 032/129] explicitly mark helper functions private I thought about making them private class members, but that would get unreadable and do more harm than good. --- selfprivacy_api/jobs/__init__.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py index a3007a4..6ad8493 100644 --- a/selfprivacy_api/jobs/__init__.py +++ b/selfprivacy_api/jobs/__init__.py @@ -98,7 +98,7 @@ class Jobs: result=None, ) r = RedisPool().get_connection() - store_job_as_hash(r, redis_key_from_uuid(job.uid), job) + _store_job_as_hash(r, _redis_key_from_uuid(job.uid), job) return job @staticmethod @@ -114,7 +114,7 @@ class Jobs: Remove a job from the jobs list. """ r = RedisPool().get_connection() - key = redis_key_from_uuid(job_uuid) + key = _redis_key_from_uuid(job_uuid) r.delete(key) return False @@ -148,9 +148,9 @@ class Jobs: job.finished_at = datetime.datetime.now() r = RedisPool().get_connection() - key = redis_key_from_uuid(job.uid) + key = _redis_key_from_uuid(job.uid) if r.exists(key): - store_job_as_hash(r, key, job) + _store_job_as_hash(r, key, job) if status in (JobStatus.FINISHED, JobStatus.ERROR): r.expire(key, JOB_EXPIRATION_SECONDS) @@ -162,9 +162,9 @@ class Jobs: Get a job from the jobs list. """ r = RedisPool().get_connection() - key = redis_key_from_uuid(uid) + key = _redis_key_from_uuid(uid) if r.exists(key): - return job_from_hash(r, key) + return _job_from_hash(r, key) return None @staticmethod @@ -174,7 +174,7 @@ class Jobs: """ r = RedisPool().get_connection() jobs = r.keys("jobs:*") - return [job_from_hash(r, job_key) for job_key in jobs] + return [_job_from_hash(r, job_key) for job_key in jobs] @staticmethod def is_busy() -> bool: @@ -187,11 +187,11 @@ class Jobs: return False -def redis_key_from_uuid(uuid): +def _redis_key_from_uuid(uuid): return "jobs:" + str(uuid) -def store_job_as_hash(r, redis_key, model): +def _store_job_as_hash(r, redis_key, model): for key, value in model.dict().items(): if isinstance(value, uuid.UUID): value = str(value) @@ -202,7 +202,7 @@ def store_job_as_hash(r, redis_key, model): r.hset(redis_key, key, str(value)) -def job_from_hash(r, redis_key): +def _job_from_hash(r, redis_key): if r.exists(redis_key): job_dict = r.hgetall(redis_key) for date in [ From 5c86706f4ba3279a84aa463d4b7a4cf368397dc8 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 30 Nov 2022 16:41:20 +0000 Subject: [PATCH 033/129] Jobs: fix value access in is_busy() Also added a test for is_busy() that highlighted this bug. --- selfprivacy_api/jobs/__init__.py | 2 +- tests/test_jobs.py | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py index 6ad8493..4267819 100644 --- a/selfprivacy_api/jobs/__init__.py +++ b/selfprivacy_api/jobs/__init__.py @@ -182,7 +182,7 @@ class Jobs: Check if there is a job running. """ for job in Jobs.get_jobs(): - if job["status"] == JobStatus.RUNNING.value: + if job.status == JobStatus.RUNNING: return True return False diff --git a/tests/test_jobs.py b/tests/test_jobs.py index 7ef9a9b..3474fc3 100644 --- a/tests/test_jobs.py +++ b/tests/test_jobs.py @@ -33,6 +33,7 @@ def test_remove_update_nonexistent(jobs_with_one_job): def test_jobs(jobs_with_one_job): jobs = jobs_with_one_job test_job = jobs_with_one_job.get_jobs()[0] + assert not jobs.is_busy() jobs.update( job=test_job, @@ -44,6 +45,7 @@ def test_jobs(jobs_with_one_job): ) assert jobs.get_jobs() == [test_job] + assert jobs.is_busy() backup = jobsmodule.JOB_EXPIRATION_SECONDS jobsmodule.JOB_EXPIRATION_SECONDS = 0 From 063dfafc196ad094765fe70f4a9ee34affa68026 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 30 Nov 2022 17:06:43 +0000 Subject: [PATCH 034/129] Jobs: fix return value of remove_by_uid And add a test for said return value. --- selfprivacy_api/jobs/__init__.py | 4 +++- tests/test_jobs.py | 9 +++++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py index 4267819..1547b84 100644 --- a/selfprivacy_api/jobs/__init__.py +++ b/selfprivacy_api/jobs/__init__.py @@ -115,7 +115,9 @@ class Jobs: """ r = RedisPool().get_connection() key = _redis_key_from_uuid(job_uuid) - r.delete(key) + if (r.exists(key)): + r.delete(key) + return True return False @staticmethod diff --git a/tests/test_jobs.py b/tests/test_jobs.py index 3474fc3..d0f506c 100644 --- a/tests/test_jobs.py +++ b/tests/test_jobs.py @@ -20,6 +20,15 @@ def test_minimal_update(jobs_with_one_job): assert jobs.get_jobs() == [test_job] +def test_remove_by_uid(jobs_with_one_job): + test_job = jobs_with_one_job.get_jobs()[0] + uid_str = str(test_job.uid) + + assert jobs_with_one_job.remove_by_uid(uid_str) + assert jobs_with_one_job.get_jobs() == [] + assert not jobs_with_one_job.remove_by_uid(uid_str) + + def test_remove_update_nonexistent(jobs_with_one_job): test_job = jobs_with_one_job.get_jobs()[0] From d47368cbe9de081b9c5ebe952c6c7bb66debfebe Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 30 Nov 2022 17:26:38 +0000 Subject: [PATCH 035/129] Jobs: test get_job() return values Coverage is now at 99% --- tests/test_jobs.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/tests/test_jobs.py b/tests/test_jobs.py index d0f506c..56e4aa3 100644 --- a/tests/test_jobs.py +++ b/tests/test_jobs.py @@ -39,6 +39,16 @@ def test_remove_update_nonexistent(jobs_with_one_job): assert result == test_job # even though we might consider changing this behavior +def test_remove_get_nonexistent(jobs_with_one_job): + test_job = jobs_with_one_job.get_jobs()[0] + uid_str = str(test_job.uid) + assert jobs_with_one_job.get_job(uid_str) == test_job + + jobs_with_one_job.remove(test_job) + + assert jobs_with_one_job.get_job(uid_str) is None + + def test_jobs(jobs_with_one_job): jobs = jobs_with_one_job test_job = jobs_with_one_job.get_jobs()[0] From 16e0bd56cedb27493ffcd1399e769eadf9bd68d8 Mon Sep 17 00:00:00 2001 From: def Date: Wed, 16 Nov 2022 19:12:38 +0200 Subject: [PATCH 036/129] refactor(repository): Tokens repository JSON backend (#18) Co-authored-by: def Co-authored-by: Inex Code Reviewed-on: https://git.selfprivacy.org/SelfPrivacy/selfprivacy-rest-api/pulls/18 Co-authored-by: def Co-committed-by: def --- selfprivacy_api/models/__init__.py | 0 .../models/tokens/new_device_key.py | 46 ++ selfprivacy_api/models/tokens/recovery_key.py | 56 ++ selfprivacy_api/models/tokens/token.py | 33 + selfprivacy_api/repositories/__init__.py | 0 .../repositories/tokens/__init__.py | 8 + .../tokens/abstract_tokens_repository.py | 93 +++ .../repositories/tokens/exceptions.py | 14 + .../tokens/json_tokens_repository.py | 238 +++++++ .../tokens/redis_tokens_repository.py | 15 + .../test_repository/test_tokens_repository.py | 582 ++++++++++++++++++ .../test_tokens_repository/empty_keys.json | 9 + .../test_tokens_repository/null_keys.json | 26 + .../test_tokens_repository/tokens.json | 35 ++ tests/test_graphql/test_users.py | 1 - 15 files changed, 1155 insertions(+), 1 deletion(-) create mode 100644 selfprivacy_api/models/__init__.py create mode 100644 selfprivacy_api/models/tokens/new_device_key.py create mode 100644 selfprivacy_api/models/tokens/recovery_key.py create mode 100644 selfprivacy_api/models/tokens/token.py create mode 100644 selfprivacy_api/repositories/__init__.py create mode 100644 selfprivacy_api/repositories/tokens/__init__.py create mode 100644 selfprivacy_api/repositories/tokens/abstract_tokens_repository.py create mode 100644 selfprivacy_api/repositories/tokens/exceptions.py create mode 100644 selfprivacy_api/repositories/tokens/json_tokens_repository.py create mode 100644 selfprivacy_api/repositories/tokens/redis_tokens_repository.py create mode 100644 tests/test_graphql/test_repository/test_tokens_repository.py create mode 100644 tests/test_graphql/test_repository/test_tokens_repository/empty_keys.json create mode 100644 tests/test_graphql/test_repository/test_tokens_repository/null_keys.json create mode 100644 tests/test_graphql/test_repository/test_tokens_repository/tokens.json diff --git a/selfprivacy_api/models/__init__.py b/selfprivacy_api/models/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/selfprivacy_api/models/tokens/new_device_key.py b/selfprivacy_api/models/tokens/new_device_key.py new file mode 100644 index 0000000..dda926c --- /dev/null +++ b/selfprivacy_api/models/tokens/new_device_key.py @@ -0,0 +1,46 @@ +""" +New device key used to obtain access token. +""" +from datetime import datetime, timedelta +import secrets +from pydantic import BaseModel +from mnemonic import Mnemonic + + +class NewDeviceKey(BaseModel): + """ + Recovery key used to obtain access token. + + Recovery key has a key string, date of creation, date of expiration. + """ + + key: str + created_at: datetime + expires_at: datetime + + def is_valid(self) -> bool: + """ + Check if the recovery key is valid. + """ + if self.expires_at < datetime.now(): + return False + return True + + def as_mnemonic(self) -> str: + """ + Get the recovery key as a mnemonic. + """ + return Mnemonic(language="english").to_mnemonic(bytes.fromhex(self.key)) + + @staticmethod + def generate() -> "NewDeviceKey": + """ + Factory to generate a random token. + """ + creation_date = datetime.now() + key = secrets.token_bytes(16).hex() + return NewDeviceKey( + key=key, + created_at=creation_date, + expires_at=datetime.now() + timedelta(minutes=10), + ) diff --git a/selfprivacy_api/models/tokens/recovery_key.py b/selfprivacy_api/models/tokens/recovery_key.py new file mode 100644 index 0000000..098aceb --- /dev/null +++ b/selfprivacy_api/models/tokens/recovery_key.py @@ -0,0 +1,56 @@ +""" +Recovery key used to obtain access token. + +Recovery key has a token string, date of creation, optional date of expiration and optional count of uses left. +""" +from datetime import datetime +import secrets +from typing import Optional +from pydantic import BaseModel +from mnemonic import Mnemonic + + +class RecoveryKey(BaseModel): + """ + Recovery key used to obtain access token. + + Recovery key has a key string, date of creation, optional date of expiration and optional count of uses left. + """ + + key: str + created_at: datetime + expires_at: Optional[datetime] + uses_left: Optional[int] + + def is_valid(self) -> bool: + """ + Check if the recovery key is valid. + """ + if self.expires_at is not None and self.expires_at < datetime.now(): + return False + if self.uses_left is not None and self.uses_left <= 0: + return False + return True + + def as_mnemonic(self) -> str: + """ + Get the recovery key as a mnemonic. + """ + return Mnemonic(language="english").to_mnemonic(bytes.fromhex(self.key)) + + @staticmethod + def generate( + expiration: Optional[datetime], + uses_left: Optional[int], + ) -> "RecoveryKey": + """ + Factory to generate a random token. + """ + creation_date = datetime.now() + key = secrets.token_bytes(24).hex() + return RecoveryKey( + key=key, + created_at=creation_date, + expires_at=expiration, + uses_left=uses_left, + ) diff --git a/selfprivacy_api/models/tokens/token.py b/selfprivacy_api/models/tokens/token.py new file mode 100644 index 0000000..4c34f58 --- /dev/null +++ b/selfprivacy_api/models/tokens/token.py @@ -0,0 +1,33 @@ +""" +Model of the access token. + +Access token has a token string, device name and date of creation. +""" +from datetime import datetime +import secrets +from pydantic import BaseModel + + +class Token(BaseModel): + """ + Model of the access token. + + Access token has a token string, device name and date of creation. + """ + + token: str + device_name: str + created_at: datetime + + @staticmethod + def generate(device_name: str) -> "Token": + """ + Factory to generate a random token. + """ + creation_date = datetime.now() + token = secrets.token_urlsafe(32) + return Token( + token=token, + device_name=device_name, + created_at=creation_date, + ) diff --git a/selfprivacy_api/repositories/__init__.py b/selfprivacy_api/repositories/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/selfprivacy_api/repositories/tokens/__init__.py b/selfprivacy_api/repositories/tokens/__init__.py new file mode 100644 index 0000000..9941bdc --- /dev/null +++ b/selfprivacy_api/repositories/tokens/__init__.py @@ -0,0 +1,8 @@ +from selfprivacy_api.repositories.tokens.abstract_tokens_repository import ( + AbstractTokensRepository, +) +from selfprivacy_api.repositories.tokens.json_tokens_repository import ( + JsonTokensRepository, +) + +repository = JsonTokensRepository() diff --git a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py new file mode 100644 index 0000000..3cf6e1d --- /dev/null +++ b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py @@ -0,0 +1,93 @@ +from abc import ABC, abstractmethod +from datetime import datetime +from typing import Optional + +from selfprivacy_api.models.tokens.token import Token +from selfprivacy_api.models.tokens.recovery_key import RecoveryKey +from selfprivacy_api.models.tokens.new_device_key import NewDeviceKey + + +class AbstractTokensRepository(ABC): + @abstractmethod + def get_token_by_token_string(self, token_string: str) -> Optional[Token]: + """Get the token by token""" + + @abstractmethod + def get_token_by_name(self, token_name: str) -> Optional[Token]: + """Get the token by name""" + + @abstractmethod + def get_tokens(self) -> list[Token]: + """Get the tokens""" + + @abstractmethod + def create_token(self, device_name: str) -> Token: + """Create new token""" + + @abstractmethod + def delete_token(self, input_token: Token) -> None: + """Delete the token""" + + @abstractmethod + def refresh_token(self, input_token: Token) -> Token: + """Refresh the token""" + + def is_token_valid(self, token_string: str) -> bool: + """Check if the token is valid""" + token = self.get_token_by_token_string(token_string) + if token is None: + return False + return True + + def is_token_name_exists(self, token_name: str) -> bool: + """Check if the token name exists""" + token = self.get_token_by_name(token_name) + if token is None: + return False + return True + + def is_token_name_pair_valid(self, token_name: str, token_string: str) -> bool: + """Check if the token name and token are valid""" + token = self.get_token_by_name(token_name) + if token is None: + return False + return token.token == token_string + + @abstractmethod + def get_recovery_key(self) -> Optional[RecoveryKey]: + """Get the recovery key""" + + @abstractmethod + def create_recovery_key( + self, + expiration: Optional[datetime], + uses_left: Optional[int], + ) -> RecoveryKey: + """Create the recovery key""" + + @abstractmethod + def use_mnemonic_recovery_key( + self, mnemonic_phrase: str, device_name: str + ) -> Token: + """Use the mnemonic recovery key and create a new token with the given name""" + + def is_recovery_key_valid(self) -> bool: + """Check if the recovery key is valid""" + recovery_key = self.get_recovery_key() + if recovery_key is None: + return False + return recovery_key.is_valid() + + @abstractmethod + def get_new_device_key(self) -> NewDeviceKey: + """Creates and returns the new device key""" + + @abstractmethod + def delete_new_device_key(self) -> None: + """Delete the new device key""" + + @abstractmethod + def use_mnemonic_new_device_key( + self, mnemonic_phrase: str, device_name: str + ) -> Token: + """Use the mnemonic new device key""" diff --git a/selfprivacy_api/repositories/tokens/exceptions.py b/selfprivacy_api/repositories/tokens/exceptions.py new file mode 100644 index 0000000..6b419c7 --- /dev/null +++ b/selfprivacy_api/repositories/tokens/exceptions.py @@ -0,0 +1,14 @@ +class TokenNotFound(Exception): + """Token not found!""" + + +class RecoveryKeyNotFound(Exception): + """Recovery key not found!""" + + +class InvalidMnemonic(Exception): + """Phrase is not mnemonic!""" + + +class NewDeviceKeyNotFound(Exception): + """New device key not found!""" diff --git a/selfprivacy_api/repositories/tokens/json_tokens_repository.py b/selfprivacy_api/repositories/tokens/json_tokens_repository.py new file mode 100644 index 0000000..aad3158 --- /dev/null +++ b/selfprivacy_api/repositories/tokens/json_tokens_repository.py @@ -0,0 +1,238 @@ +""" +temporary legacy +""" +from typing import Optional +from datetime import datetime +from mnemonic import Mnemonic + +from selfprivacy_api.utils import UserDataFiles, WriteUserData, ReadUserData +from selfprivacy_api.models.tokens.token import Token +from selfprivacy_api.models.tokens.recovery_key import RecoveryKey +from selfprivacy_api.models.tokens.new_device_key import NewDeviceKey +from selfprivacy_api.repositories.tokens.exceptions import ( + TokenNotFound, + RecoveryKeyNotFound, + InvalidMnemonic, + NewDeviceKeyNotFound, +) +from selfprivacy_api.repositories.tokens.abstract_tokens_repository import ( + AbstractTokensRepository, +) + +DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%f" + + +class JsonTokensRepository(AbstractTokensRepository): + def get_token_by_token_string(self, token_string: str) -> Optional[Token]: + """Get the token by token""" + with ReadUserData(UserDataFiles.TOKENS) as tokens_file: + for userdata_token in tokens_file["tokens"]: + if userdata_token["token"] == token_string: + + return Token( + token=token_string, + device_name=userdata_token["name"], + created_at=userdata_token["date"], + ) + + raise TokenNotFound("Token not found!") + + def get_token_by_name(self, token_name: str) -> Optional[Token]: + """Get the token by name""" + with ReadUserData(UserDataFiles.TOKENS) as tokens_file: + for userdata_token in tokens_file["tokens"]: + if userdata_token["name"] == token_name: + + return Token( + token=userdata_token["token"], + device_name=token_name, + created_at=userdata_token["date"], + ) + + raise TokenNotFound("Token not found!") + + def get_tokens(self) -> list[Token]: + """Get the tokens""" + tokens_list = [] + + with ReadUserData(UserDataFiles.TOKENS) as tokens_file: + for userdata_token in tokens_file["tokens"]: + tokens_list.append( + Token( + token=userdata_token["token"], + device_name=userdata_token["name"], + created_at=userdata_token["date"], + ) + ) + + return tokens_list + + def create_token(self, device_name: str) -> Token: + """Create new token""" + new_token = Token.generate(device_name) + + with WriteUserData(UserDataFiles.TOKENS) as tokens_file: + tokens_file["tokens"].append( + { + "token": new_token.token, + "name": new_token.device_name, + "date": new_token.created_at.strftime(DATETIME_FORMAT), + } + ) + return new_token + + def delete_token(self, input_token: Token) -> None: + """Delete the token""" + with WriteUserData(UserDataFiles.TOKENS) as tokens_file: + for userdata_token in tokens_file["tokens"]: + if userdata_token["token"] == input_token.token: + tokens_file["tokens"].remove(userdata_token) + return + + raise TokenNotFound("Token not found!") + + def refresh_token(self, input_token: Token) -> Token: + """Change the token field of the existing token""" + new_token = Token.generate(device_name=input_token.device_name) + + with WriteUserData(UserDataFiles.TOKENS) as tokens_file: + for userdata_token in tokens_file["tokens"]: + + if userdata_token["name"] == input_token.device_name: + userdata_token["token"] = new_token.token + userdata_token["date"] = ( + new_token.created_at.strftime(DATETIME_FORMAT), + ) + + return new_token + + raise TokenNotFound("Token not found!") + + def get_recovery_key(self) -> Optional[RecoveryKey]: + """Get the recovery key""" + with ReadUserData(UserDataFiles.TOKENS) as tokens_file: + + if ( + "recovery_token" not in tokens_file + or tokens_file["recovery_token"] is None + ): + return + + recovery_key = RecoveryKey( + key=tokens_file["recovery_token"].get("token"), + created_at=tokens_file["recovery_token"].get("date"), + expires_at=tokens_file["recovery_token"].get("expitation"), + uses_left=tokens_file["recovery_token"].get("uses_left"), + ) + + return recovery_key + + def create_recovery_key( + self, + expiration: Optional[datetime], + uses_left: Optional[int], + ) -> RecoveryKey: + """Create the recovery key""" + + recovery_key = RecoveryKey.generate(expiration, uses_left) + + with WriteUserData(UserDataFiles.TOKENS) as tokens_file: + tokens_file["recovery_token"] = { + "token": recovery_key.key, + "date": recovery_key.created_at.strftime(DATETIME_FORMAT), + "expiration": recovery_key.expires_at, + "uses_left": recovery_key.uses_left, + } + + return recovery_key + + def use_mnemonic_recovery_key( + self, mnemonic_phrase: str, device_name: str + ) -> Token: + """Use the mnemonic recovery key and create a new token with the given name""" + recovery_key = self.get_recovery_key() + + if recovery_key is None: + raise RecoveryKeyNotFound("Recovery key not found") + + if not recovery_key.is_valid(): + raise RecoveryKeyNotFound("Recovery key not found") + + recovery_token = bytes.fromhex(recovery_key.key) + + if not Mnemonic(language="english").check(mnemonic_phrase): + raise InvalidMnemonic("Phrase is not mnemonic!") + + phrase_bytes = Mnemonic(language="english").to_entropy(mnemonic_phrase) + if phrase_bytes != recovery_token: + raise RecoveryKeyNotFound("Recovery key not found") + + new_token = Token.generate(device_name=device_name) + + with WriteUserData(UserDataFiles.TOKENS) as tokens: + tokens["tokens"].append( + { + "token": new_token.token, + "name": new_token.device_name, + "date": new_token.created_at.strftime(DATETIME_FORMAT), + } + ) + + if "recovery_token" in tokens: + if ( + "uses_left" in tokens["recovery_token"] + and tokens["recovery_token"]["uses_left"] is not None + ): + tokens["recovery_token"]["uses_left"] -= 1 + return new_token + + def get_new_device_key(self) -> NewDeviceKey: + """Creates and returns the new device key""" + new_device_key = NewDeviceKey.generate() + + with WriteUserData(UserDataFiles.TOKENS) as tokens_file: + tokens_file["new_device"] = { + "token": new_device_key.key, + "date": new_device_key.created_at.strftime(DATETIME_FORMAT), + "expiration": new_device_key.expires_at.strftime(DATETIME_FORMAT), + } + + return new_device_key + + def delete_new_device_key(self) -> None: + """Delete the new device key""" + with WriteUserData(UserDataFiles.TOKENS) as tokens_file: + if "new_device" in tokens_file: + del tokens_file["new_device"] + return + + def use_mnemonic_new_device_key( + self, mnemonic_phrase: str, device_name: str + ) -> Token: + """Use the mnemonic new device key""" + + with ReadUserData(UserDataFiles.TOKENS) as tokens_file: + if "new_device" not in tokens_file or tokens_file["new_device"] is None: + raise NewDeviceKeyNotFound("New device key not found") + + new_device_key = NewDeviceKey( + key=tokens_file["new_device"]["token"], + created_at=tokens_file["new_device"]["date"], + expires_at=tokens_file["new_device"]["expiration"], + ) + + token = bytes.fromhex(new_device_key.key) + + if not Mnemonic(language="english").check(mnemonic_phrase): + raise InvalidMnemonic("Phrase is not mnemonic!") + + phrase_bytes = Mnemonic(language="english").to_entropy(mnemonic_phrase) + if bytes(phrase_bytes) != bytes(token): + raise NewDeviceKeyNotFound("Phrase is not token!") + + new_token = Token.generate(device_name=device_name) + with WriteUserData(UserDataFiles.TOKENS) as tokens: + if "new_device" in tokens: + del tokens["new_device"] + + return new_token diff --git a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py new file mode 100644 index 0000000..0186c11 --- /dev/null +++ b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py @@ -0,0 +1,15 @@ +""" +Token repository using Redis as backend. +""" +from selfprivacy_api.repositories.tokens.abstract_tokens_repository import ( + AbstractTokensRepository, +) + + +class RedisTokensRepository(AbstractTokensRepository): + """ + Token repository using Redis as a backend + """ + + def __init__(self) -> None: + raise NotImplementedError diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py new file mode 100644 index 0000000..878e242 --- /dev/null +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -0,0 +1,582 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=missing-function-docstring + +from datetime import datetime, timezone + +import pytest + +from selfprivacy_api.models.tokens.new_device_key import NewDeviceKey +from selfprivacy_api.models.tokens.recovery_key import RecoveryKey +from selfprivacy_api.models.tokens.token import Token +from selfprivacy_api.repositories.tokens.exceptions import ( + InvalidMnemonic, + RecoveryKeyNotFound, + TokenNotFound, + NewDeviceKeyNotFound, +) +from selfprivacy_api.repositories.tokens.json_tokens_repository import ( + JsonTokensRepository, +) +from tests.common import read_json + + +ORIGINAL_TOKEN_CONTENT = [ + { + "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", + "name": "primary_token", + "date": "2022-07-15 17:41:31.675698", + }, + { + "token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", + "name": "second_token", + "date": "2022-07-15 17:41:31.675698Z", + }, + { + "token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", + "name": "third_token", + "date": "2022-07-15T17:41:31.675698Z", + }, + { + "token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", + "name": "forth_token", + "date": "2022-07-15T17:41:31.675698", + }, +] + + +@pytest.fixture +def tokens(mocker, datadir): + mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=datadir / "tokens.json") + assert read_json(datadir / "tokens.json")["tokens"] == ORIGINAL_TOKEN_CONTENT + return datadir + + +@pytest.fixture +def empty_keys(mocker, datadir): + mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=datadir / "empty_keys.json") + assert read_json(datadir / "empty_keys.json")["tokens"] == [ + { + "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", + "name": "primary_token", + "date": "2022-07-15 17:41:31.675698", + } + ] + return datadir + + +@pytest.fixture +def null_keys(mocker, datadir): + mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=datadir / "null_keys.json") + assert read_json(datadir / "null_keys.json")["recovery_token"] is None + assert read_json(datadir / "null_keys.json")["new_device"] is None + return datadir + + +class RecoveryKeyMockReturnNotValid: + def is_valid() -> bool: + return False + + +@pytest.fixture +def mock_new_device_key_generate(mocker): + mock = mocker.patch( + "selfprivacy_api.repositories.tokens.json_tokens_repository.NewDeviceKey.generate", + autospec=True, + return_value=NewDeviceKey( + key="43478d05b35e4781598acd76e33832bb", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + expires_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + ), + ) + return mock + + +@pytest.fixture +def mock_generate_token(mocker): + mock = mocker.patch( + "selfprivacy_api.repositories.tokens.json_tokens_repository.Token.generate", + autospec=True, + return_value=Token( + token="ur71mC4aiI6FIYAN--cTL-38rPHS5D6NuB1bgN_qKF4", + device_name="newdevice", + created_at=datetime(2022, 11, 14, 6, 6, 32, 777123), + ), + ) + return mock + + +@pytest.fixture +def mock_get_recovery_key_return_not_valid(mocker): + mock = mocker.patch( + "selfprivacy_api.repositories.tokens.json_tokens_repository.JsonTokensRepository.get_recovery_key", + autospec=True, + return_value=RecoveryKeyMockReturnNotValid, + ) + return mock + + +@pytest.fixture +def mock_token_generate(mocker): + mock = mocker.patch( + "selfprivacy_api.repositories.tokens.json_tokens_repository.Token.generate", + autospec=True, + return_value=Token( + token="ZuLNKtnxDeq6w2dpOJhbB3iat_sJLPTPl_rN5uc5MvM", + device_name="IamNewDevice", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + ), + ) + return mock + + +@pytest.fixture +def mock_recovery_key_generate(mocker): + mock = mocker.patch( + "selfprivacy_api.repositories.tokens.json_tokens_repository.RecoveryKey.generate", + autospec=True, + return_value=RecoveryKey( + key="889bf49c1d3199d71a2e704718772bd53a422020334db051", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + expires_at=None, + uses_left=1, + ), + ) + return mock + + +############### +# Test tokens # +############### + + +def test_get_token_by_token_string(tokens): + repo = JsonTokensRepository() + + assert repo.get_token_by_token_string( + token_string="KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI" + ) == Token( + token="KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", + device_name="primary_token", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + ) + + +def test_get_token_by_non_existent_token_string(tokens): + repo = JsonTokensRepository() + + with pytest.raises(TokenNotFound): + assert repo.get_token_by_token_string(token_string="iamBadtoken") is None + + +def test_get_token_by_name(tokens): + repo = JsonTokensRepository() + + assert repo.get_token_by_name(token_name="primary_token") is not None + assert repo.get_token_by_name(token_name="primary_token") == Token( + token="KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", + device_name="primary_token", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + ) + + +def test_get_token_by_non_existent_name(tokens): + repo = JsonTokensRepository() + + with pytest.raises(TokenNotFound): + assert repo.get_token_by_name(token_name="badname") is None + + +def test_get_tokens(tokens): + repo = JsonTokensRepository() + + assert repo.get_tokens() == [ + Token( + token="KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", + device_name="primary_token", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + ), + Token( + token="3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", + device_name="second_token", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698, tzinfo=timezone.utc), + ), + Token( + token="LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", + device_name="third_token", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698, tzinfo=timezone.utc), + ), + Token( + token="dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", + device_name="forth_token", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + ), + ] + + +def test_get_tokens_when_one(empty_keys): + repo = JsonTokensRepository() + + assert repo.get_tokens() == [ + Token( + token="KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", + device_name="primary_token", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + ) + ] + + +def test_create_token(tokens, mock_token_generate): + repo = JsonTokensRepository() + + assert repo.create_token(device_name="IamNewDevice") == Token( + token="ZuLNKtnxDeq6w2dpOJhbB3iat_sJLPTPl_rN5uc5MvM", + device_name="IamNewDevice", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + ) + + +def test_delete_token(tokens): + repo = JsonTokensRepository() + input_token = Token( + token="KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", + device_name="primary_token", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + ) + + repo.delete_token(input_token) + assert read_json(tokens / "tokens.json")["tokens"] == [ + { + "token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", + "name": "second_token", + "date": "2022-07-15 17:41:31.675698Z", + }, + { + "token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", + "name": "third_token", + "date": "2022-07-15T17:41:31.675698Z", + }, + { + "token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", + "name": "forth_token", + "date": "2022-07-15T17:41:31.675698", + }, + ] + + +def test_delete_not_found_token(tokens): + repo = JsonTokensRepository() + input_token = Token( + token="imbadtoken", + device_name="primary_token", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + ) + with pytest.raises(TokenNotFound): + assert repo.delete_token(input_token) is None + + assert read_json(tokens / "tokens.json")["tokens"] == ORIGINAL_TOKEN_CONTENT + + +def test_refresh_token(tokens, mock_token_generate): + repo = JsonTokensRepository() + input_token = Token( + token="KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", + device_name="primary_token", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + ) + + assert repo.refresh_token(input_token) == Token( + token="ZuLNKtnxDeq6w2dpOJhbB3iat_sJLPTPl_rN5uc5MvM", + device_name="IamNewDevice", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + ) + + +def test_refresh_not_found_token(tokens, mock_token_generate): + repo = JsonTokensRepository() + input_token = Token( + token="idontknowwhoiam", + device_name="tellmewhoiam?", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + ) + + with pytest.raises(TokenNotFound): + assert repo.refresh_token(input_token) is None + + +################ +# Recovery key # +################ + + +def test_get_recovery_key(tokens): + repo = JsonTokensRepository() + + assert repo.get_recovery_key() == RecoveryKey( + key="ed653e4b8b042b841d285fa7a682fa09e925ddb2d8906f54", + created_at=datetime(2022, 11, 11, 11, 48, 54, 228038), + expires_at=None, + uses_left=2, + ) + + +def test_get_recovery_key_when_empty(empty_keys): + repo = JsonTokensRepository() + + assert repo.get_recovery_key() is None + + +def test_create_recovery_key(tokens, mock_recovery_key_generate): + repo = JsonTokensRepository() + + assert repo.create_recovery_key(uses_left=1, expiration=None) is not None + assert read_json(tokens / "tokens.json")["recovery_token"] == { + "token": "889bf49c1d3199d71a2e704718772bd53a422020334db051", + "date": "2022-07-15T17:41:31.675698", + "expiration": None, + "uses_left": 1, + } + + +def test_use_mnemonic_recovery_key_when_empty( + empty_keys, mock_recovery_key_generate, mock_token_generate +): + repo = JsonTokensRepository() + + with pytest.raises(RecoveryKeyNotFound): + assert ( + repo.use_mnemonic_recovery_key( + mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb", + device_name="primary_token", + ) + is None + ) + + +def test_use_mnemonic_not_valid_recovery_key( + tokens, mock_get_recovery_key_return_not_valid +): + repo = JsonTokensRepository() + + with pytest.raises(RecoveryKeyNotFound): + assert ( + repo.use_mnemonic_recovery_key( + mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb", + device_name="primary_token", + ) + is None + ) + + +def test_use_mnemonic_not_mnemonic_recovery_key(tokens): + repo = JsonTokensRepository() + + with pytest.raises(InvalidMnemonic): + assert ( + repo.use_mnemonic_recovery_key( + mnemonic_phrase="sorry, it was joke", + device_name="primary_token", + ) + is None + ) + + +def test_use_not_mnemonic_recovery_key(tokens): + repo = JsonTokensRepository() + + with pytest.raises(InvalidMnemonic): + assert ( + repo.use_mnemonic_recovery_key( + mnemonic_phrase="please come back", + device_name="primary_token", + ) + is None + ) + + +def test_use_not_found_mnemonic_recovery_key(tokens): + repo = JsonTokensRepository() + + with pytest.raises(RecoveryKeyNotFound): + assert ( + repo.use_mnemonic_recovery_key( + mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb", + device_name="primary_token", + ) + is None + ) + + +def test_use_menemonic_recovery_key_when_empty(empty_keys): + repo = JsonTokensRepository() + + with pytest.raises(RecoveryKeyNotFound): + assert ( + repo.use_mnemonic_recovery_key( + mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb", + device_name="primary_token", + ) + is None + ) + + +def test_use_menemonic_recovery_key_when_null(null_keys): + repo = JsonTokensRepository() + + with pytest.raises(RecoveryKeyNotFound): + assert ( + repo.use_mnemonic_recovery_key( + mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb", + device_name="primary_token", + ) + is None + ) + + +def test_use_mnemonic_recovery_key(tokens, mock_generate_token): + repo = JsonTokensRepository() + + assert repo.use_mnemonic_recovery_key( + mnemonic_phrase="uniform clarify napkin bid dress search input armor police cross salon because myself uphold slice bamboo hungry park", + device_name="newdevice", + ) == Token( + token="ur71mC4aiI6FIYAN--cTL-38rPHS5D6NuB1bgN_qKF4", + device_name="newdevice", + created_at=datetime(2022, 11, 14, 6, 6, 32, 777123), + ) + + assert read_json(tokens / "tokens.json")["tokens"] == [ + { + "date": "2022-07-15 17:41:31.675698", + "name": "primary_token", + "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", + }, + { + "token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", + "name": "second_token", + "date": "2022-07-15 17:41:31.675698Z", + }, + { + "token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", + "name": "third_token", + "date": "2022-07-15T17:41:31.675698Z", + }, + { + "token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", + "name": "forth_token", + "date": "2022-07-15T17:41:31.675698", + }, + { + "date": "2022-11-14T06:06:32.777123", + "name": "newdevice", + "token": "ur71mC4aiI6FIYAN--cTL-38rPHS5D6NuB1bgN_qKF4", + }, + ] + + assert read_json(tokens / "tokens.json")["recovery_token"] == { + "date": "2022-11-11T11:48:54.228038", + "expiration": None, + "token": "ed653e4b8b042b841d285fa7a682fa09e925ddb2d8906f54", + "uses_left": 1, + } + + +################## +# New device key # +################## + + +def test_get_new_device_key(tokens, mock_new_device_key_generate): + repo = JsonTokensRepository() + + assert repo.get_new_device_key() is not None + assert read_json(tokens / "tokens.json")["new_device"] == { + "date": "2022-07-15T17:41:31.675698", + "expiration": "2022-07-15T17:41:31.675698", + "token": "43478d05b35e4781598acd76e33832bb", + } + + +def test_delete_new_device_key(tokens): + repo = JsonTokensRepository() + + assert repo.delete_new_device_key() is None + assert "new_device" not in read_json(tokens / "tokens.json") + + +def test_delete_new_device_key_when_empty(empty_keys): + repo = JsonTokensRepository() + + repo.delete_new_device_key() + assert "new_device" not in read_json(empty_keys / "empty_keys.json") + + +def test_use_invalid_mnemonic_new_device_key( + tokens, mock_new_device_key_generate, datadir, mock_token_generate +): + repo = JsonTokensRepository() + + with pytest.raises(InvalidMnemonic): + assert ( + repo.use_mnemonic_new_device_key( + device_name="imnew", + mnemonic_phrase="oh-no", + ) + is None + ) + + +def test_use_not_exists_mnemonic_new_device_key( + tokens, mock_new_device_key_generate, mock_token_generate +): + repo = JsonTokensRepository() + + with pytest.raises(NewDeviceKeyNotFound): + assert ( + repo.use_mnemonic_new_device_key( + device_name="imnew", + mnemonic_phrase="uniform clarify napkin bid dress search input armor police cross salon because myself uphold slice bamboo hungry park", + ) + is None + ) + + +def test_use_mnemonic_new_device_key( + tokens, mock_new_device_key_generate, mock_token_generate +): + repo = JsonTokensRepository() + + assert ( + repo.use_mnemonic_new_device_key( + device_name="imnew", + mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb", + ) + is not None + ) + # assert read_json(datadir / "tokens.json")["new_device"] == [] + + +def test_use_mnemonic_new_device_key_when_empty(empty_keys): + repo = JsonTokensRepository() + + with pytest.raises(NewDeviceKeyNotFound): + assert ( + repo.use_mnemonic_new_device_key( + device_name="imnew", + mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb", + ) + is None + ) + + +def test_use_mnemonic_new_device_key_when_null(null_keys): + repo = JsonTokensRepository() + + with pytest.raises(NewDeviceKeyNotFound): + assert ( + repo.use_mnemonic_new_device_key( + device_name="imnew", + mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb", + ) + is None + ) diff --git a/tests/test_graphql/test_repository/test_tokens_repository/empty_keys.json b/tests/test_graphql/test_repository/test_tokens_repository/empty_keys.json new file mode 100644 index 0000000..2131ddf --- /dev/null +++ b/tests/test_graphql/test_repository/test_tokens_repository/empty_keys.json @@ -0,0 +1,9 @@ +{ + "tokens": [ + { + "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", + "name": "primary_token", + "date": "2022-07-15 17:41:31.675698" + } + ] +} diff --git a/tests/test_graphql/test_repository/test_tokens_repository/null_keys.json b/tests/test_graphql/test_repository/test_tokens_repository/null_keys.json new file mode 100644 index 0000000..45e6f90 --- /dev/null +++ b/tests/test_graphql/test_repository/test_tokens_repository/null_keys.json @@ -0,0 +1,26 @@ +{ + "tokens": [ + { + "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", + "name": "primary_token", + "date": "2022-07-15 17:41:31.675698" + }, + { + "token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", + "name": "second_token", + "date": "2022-07-15 17:41:31.675698Z" + }, + { + "token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", + "name": "third_token", + "date": "2022-07-15T17:41:31.675698Z" + }, + { + "token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", + "name": "forth_token", + "date": "2022-07-15T17:41:31.675698" + } + ], + "recovery_token": null, + "new_device": null +} diff --git a/tests/test_graphql/test_repository/test_tokens_repository/tokens.json b/tests/test_graphql/test_repository/test_tokens_repository/tokens.json new file mode 100644 index 0000000..bb1805c --- /dev/null +++ b/tests/test_graphql/test_repository/test_tokens_repository/tokens.json @@ -0,0 +1,35 @@ +{ + "tokens": [ + { + "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", + "name": "primary_token", + "date": "2022-07-15 17:41:31.675698" + }, + { + "token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", + "name": "second_token", + "date": "2022-07-15 17:41:31.675698Z" + }, + { + "token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", + "name": "third_token", + "date": "2022-07-15T17:41:31.675698Z" + }, + { + "token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", + "name": "forth_token", + "date": "2022-07-15T17:41:31.675698" + } + ], + "recovery_token": { + "token": "ed653e4b8b042b841d285fa7a682fa09e925ddb2d8906f54", + "date": "2022-11-11T11:48:54.228038", + "expiration": null, + "uses_left": 2 + }, + "new_device": { + "token": "2237238de23dc71ab558e317bdb8ff8e", + "date": "2022-10-26 20:50:47.973212", + "expiration": "2022-10-26 21:00:47.974153" + } +} diff --git a/tests/test_graphql/test_users.py b/tests/test_graphql/test_users.py index c36dcb2..7a65736 100644 --- a/tests/test_graphql/test_users.py +++ b/tests/test_graphql/test_users.py @@ -516,7 +516,6 @@ def test_graphql_add_long_username(authorized_client, one_user, mock_subprocess_ }, }, ) - assert response.status_code == 200 assert response.json().get("data") is not None assert response.json()["data"]["createUser"]["message"] is not None From 4cfe0515eaef29a33c087e90263714660c6246e3 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 7 Dec 2022 12:01:39 +0000 Subject: [PATCH 037/129] test(tokens-repo): split between abstract api and backend-specific tests --- .../test_json_tokens_repository.py | 154 ++++++++++++++++++ .../empty_keys.json | 9 + .../null_keys.json | 26 +++ .../test_json_tokens_repository/tokens.json | 35 ++++ .../test_repository/test_tokens_repository.py | 123 ++++++-------- 5 files changed, 272 insertions(+), 75 deletions(-) create mode 100644 tests/test_graphql/test_repository/test_json_tokens_repository.py create mode 100644 tests/test_graphql/test_repository/test_json_tokens_repository/empty_keys.json create mode 100644 tests/test_graphql/test_repository/test_json_tokens_repository/null_keys.json create mode 100644 tests/test_graphql/test_repository/test_json_tokens_repository/tokens.json diff --git a/tests/test_graphql/test_repository/test_json_tokens_repository.py b/tests/test_graphql/test_repository/test_json_tokens_repository.py new file mode 100644 index 0000000..e90b3dc --- /dev/null +++ b/tests/test_graphql/test_repository/test_json_tokens_repository.py @@ -0,0 +1,154 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=missing-function-docstring +""" +tests that restrict json token repository implementation +""" + +import pytest + + +from datetime import datetime + +from selfprivacy_api.models.tokens.token import Token +from selfprivacy_api.repositories.tokens.exceptions import ( + TokenNotFound, +) +from selfprivacy_api.repositories.tokens.json_tokens_repository import ( + JsonTokensRepository, +) +from tests.common import read_json + +from test_tokens_repository import ORIGINAL_TOKEN_CONTENT +from test_tokens_repository import ( + tokens, + mock_recovery_key_generate, + mock_generate_token, + mock_new_device_key_generate, + empty_keys, +) + + +def test_delete_token(tokens): + repo = JsonTokensRepository() + input_token = Token( + token="KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", + device_name="primary_token", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + ) + + repo.delete_token(input_token) + assert read_json(tokens / "tokens.json")["tokens"] == [ + { + "token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", + "name": "second_token", + "date": "2022-07-15 17:41:31.675698Z", + }, + { + "token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", + "name": "third_token", + "date": "2022-07-15T17:41:31.675698Z", + }, + { + "token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", + "name": "forth_token", + "date": "2022-07-15T17:41:31.675698", + }, + ] + + +def test_delete_not_found_token(tokens): + repo = JsonTokensRepository() + input_token = Token( + token="imbadtoken", + device_name="primary_token", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + ) + with pytest.raises(TokenNotFound): + assert repo.delete_token(input_token) is None + + assert read_json(tokens / "tokens.json")["tokens"] == ORIGINAL_TOKEN_CONTENT + + +def test_create_recovery_key(tokens, mock_recovery_key_generate): + repo = JsonTokensRepository() + + assert repo.create_recovery_key(uses_left=1, expiration=None) is not None + assert read_json(tokens / "tokens.json")["recovery_token"] == { + "token": "889bf49c1d3199d71a2e704718772bd53a422020334db051", + "date": "2022-07-15T17:41:31.675698", + "expiration": None, + "uses_left": 1, + } + + +def test_use_mnemonic_recovery_key(tokens, mock_generate_token): + repo = JsonTokensRepository() + + assert repo.use_mnemonic_recovery_key( + mnemonic_phrase="uniform clarify napkin bid dress search input armor police cross salon because myself uphold slice bamboo hungry park", + device_name="newdevice", + ) == Token( + token="ur71mC4aiI6FIYAN--cTL-38rPHS5D6NuB1bgN_qKF4", + device_name="newdevice", + created_at=datetime(2022, 11, 14, 6, 6, 32, 777123), + ) + + assert read_json(tokens / "tokens.json")["tokens"] == [ + { + "date": "2022-07-15 17:41:31.675698", + "name": "primary_token", + "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", + }, + { + "token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", + "name": "second_token", + "date": "2022-07-15 17:41:31.675698Z", + }, + { + "token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", + "name": "third_token", + "date": "2022-07-15T17:41:31.675698Z", + }, + { + "token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", + "name": "forth_token", + "date": "2022-07-15T17:41:31.675698", + }, + { + "date": "2022-11-14T06:06:32.777123", + "name": "newdevice", + "token": "ur71mC4aiI6FIYAN--cTL-38rPHS5D6NuB1bgN_qKF4", + }, + ] + assert read_json(tokens / "tokens.json")["recovery_token"] == { + "date": "2022-11-11T11:48:54.228038", + "expiration": None, + "token": "ed653e4b8b042b841d285fa7a682fa09e925ddb2d8906f54", + "uses_left": 1, + } + + +def test_get_new_device_key(tokens, mock_new_device_key_generate): + repo = JsonTokensRepository() + + assert repo.get_new_device_key() is not None + assert read_json(tokens / "tokens.json")["new_device"] == { + "date": "2022-07-15T17:41:31.675698", + "expiration": "2022-07-15T17:41:31.675698", + "token": "43478d05b35e4781598acd76e33832bb", + } + + +def test_delete_new_device_key(tokens): + repo = JsonTokensRepository() + + assert repo.delete_new_device_key() is None + assert "new_device" not in read_json(tokens / "tokens.json") + + +def test_delete_new_device_key_when_empty(empty_keys): + repo = JsonTokensRepository() + + repo.delete_new_device_key() + assert "new_device" not in read_json(empty_keys / "empty_keys.json") diff --git a/tests/test_graphql/test_repository/test_json_tokens_repository/empty_keys.json b/tests/test_graphql/test_repository/test_json_tokens_repository/empty_keys.json new file mode 100644 index 0000000..2131ddf --- /dev/null +++ b/tests/test_graphql/test_repository/test_json_tokens_repository/empty_keys.json @@ -0,0 +1,9 @@ +{ + "tokens": [ + { + "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", + "name": "primary_token", + "date": "2022-07-15 17:41:31.675698" + } + ] +} diff --git a/tests/test_graphql/test_repository/test_json_tokens_repository/null_keys.json b/tests/test_graphql/test_repository/test_json_tokens_repository/null_keys.json new file mode 100644 index 0000000..45e6f90 --- /dev/null +++ b/tests/test_graphql/test_repository/test_json_tokens_repository/null_keys.json @@ -0,0 +1,26 @@ +{ + "tokens": [ + { + "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", + "name": "primary_token", + "date": "2022-07-15 17:41:31.675698" + }, + { + "token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", + "name": "second_token", + "date": "2022-07-15 17:41:31.675698Z" + }, + { + "token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", + "name": "third_token", + "date": "2022-07-15T17:41:31.675698Z" + }, + { + "token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", + "name": "forth_token", + "date": "2022-07-15T17:41:31.675698" + } + ], + "recovery_token": null, + "new_device": null +} diff --git a/tests/test_graphql/test_repository/test_json_tokens_repository/tokens.json b/tests/test_graphql/test_repository/test_json_tokens_repository/tokens.json new file mode 100644 index 0000000..bb1805c --- /dev/null +++ b/tests/test_graphql/test_repository/test_json_tokens_repository/tokens.json @@ -0,0 +1,35 @@ +{ + "tokens": [ + { + "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", + "name": "primary_token", + "date": "2022-07-15 17:41:31.675698" + }, + { + "token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", + "name": "second_token", + "date": "2022-07-15 17:41:31.675698Z" + }, + { + "token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", + "name": "third_token", + "date": "2022-07-15T17:41:31.675698Z" + }, + { + "token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", + "name": "forth_token", + "date": "2022-07-15T17:41:31.675698" + } + ], + "recovery_token": { + "token": "ed653e4b8b042b841d285fa7a682fa09e925ddb2d8906f54", + "date": "2022-11-11T11:48:54.228038", + "expiration": null, + "uses_left": 2 + }, + "new_device": { + "token": "2237238de23dc71ab558e317bdb8ff8e", + "date": "2022-10-26 20:50:47.973212", + "expiration": "2022-10-26 21:00:47.974153" + } +} diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index 878e242..5a74bf4 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -236,34 +236,6 @@ def test_create_token(tokens, mock_token_generate): ) -def test_delete_token(tokens): - repo = JsonTokensRepository() - input_token = Token( - token="KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", - device_name="primary_token", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), - ) - - repo.delete_token(input_token) - assert read_json(tokens / "tokens.json")["tokens"] == [ - { - "token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", - "name": "second_token", - "date": "2022-07-15 17:41:31.675698Z", - }, - { - "token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", - "name": "third_token", - "date": "2022-07-15T17:41:31.675698Z", - }, - { - "token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", - "name": "forth_token", - "date": "2022-07-15T17:41:31.675698", - }, - ] - - def test_delete_not_found_token(tokens): repo = JsonTokensRepository() input_token = Token( @@ -274,7 +246,7 @@ def test_delete_not_found_token(tokens): with pytest.raises(TokenNotFound): assert repo.delete_token(input_token) is None - assert read_json(tokens / "tokens.json")["tokens"] == ORIGINAL_TOKEN_CONTENT + # assert read_json(tokens / "tokens.json")["tokens"] == ORIGINAL_TOKEN_CONTENT def test_refresh_token(tokens, mock_token_generate): @@ -330,12 +302,12 @@ def test_create_recovery_key(tokens, mock_recovery_key_generate): repo = JsonTokensRepository() assert repo.create_recovery_key(uses_left=1, expiration=None) is not None - assert read_json(tokens / "tokens.json")["recovery_token"] == { - "token": "889bf49c1d3199d71a2e704718772bd53a422020334db051", - "date": "2022-07-15T17:41:31.675698", - "expiration": None, - "uses_left": 1, - } + # assert read_json(tokens / "tokens.json")["recovery_token"] == { + # "token": "889bf49c1d3199d71a2e704718772bd53a422020334db051", + # "date": "2022-07-15T17:41:31.675698", + # "expiration": None, + # "uses_left": 1, + # } def test_use_mnemonic_recovery_key_when_empty( @@ -433,6 +405,7 @@ def test_use_menemonic_recovery_key_when_null(null_keys): ) +# agnostic test mixed with an implementation test def test_use_mnemonic_recovery_key(tokens, mock_generate_token): repo = JsonTokensRepository() @@ -445,40 +418,40 @@ def test_use_mnemonic_recovery_key(tokens, mock_generate_token): created_at=datetime(2022, 11, 14, 6, 6, 32, 777123), ) - assert read_json(tokens / "tokens.json")["tokens"] == [ - { - "date": "2022-07-15 17:41:31.675698", - "name": "primary_token", - "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", - }, - { - "token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", - "name": "second_token", - "date": "2022-07-15 17:41:31.675698Z", - }, - { - "token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", - "name": "third_token", - "date": "2022-07-15T17:41:31.675698Z", - }, - { - "token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", - "name": "forth_token", - "date": "2022-07-15T17:41:31.675698", - }, - { - "date": "2022-11-14T06:06:32.777123", - "name": "newdevice", - "token": "ur71mC4aiI6FIYAN--cTL-38rPHS5D6NuB1bgN_qKF4", - }, - ] + # assert read_json(tokens / "tokens.json")["tokens"] == [ + # { + # "date": "2022-07-15 17:41:31.675698", + # "name": "primary_token", + # "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", + # }, + # { + # "token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", + # "name": "second_token", + # "date": "2022-07-15 17:41:31.675698Z", + # }, + # { + # "token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", + # "name": "third_token", + # "date": "2022-07-15T17:41:31.675698Z", + # }, + # { + # "token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", + # "name": "forth_token", + # "date": "2022-07-15T17:41:31.675698", + # }, + # { + # "date": "2022-11-14T06:06:32.777123", + # "name": "newdevice", + # "token": "ur71mC4aiI6FIYAN--cTL-38rPHS5D6NuB1bgN_qKF4", + # }, + # ] - assert read_json(tokens / "tokens.json")["recovery_token"] == { - "date": "2022-11-11T11:48:54.228038", - "expiration": None, - "token": "ed653e4b8b042b841d285fa7a682fa09e925ddb2d8906f54", - "uses_left": 1, - } + # assert read_json(tokens / "tokens.json")["recovery_token"] == { + # "date": "2022-11-11T11:48:54.228038", + # "expiration": None, + # "token": "ed653e4b8b042b841d285fa7a682fa09e925ddb2d8906f54", + # "uses_left": 1, + # } ################## @@ -490,25 +463,25 @@ def test_get_new_device_key(tokens, mock_new_device_key_generate): repo = JsonTokensRepository() assert repo.get_new_device_key() is not None - assert read_json(tokens / "tokens.json")["new_device"] == { - "date": "2022-07-15T17:41:31.675698", - "expiration": "2022-07-15T17:41:31.675698", - "token": "43478d05b35e4781598acd76e33832bb", - } + # assert read_json(tokens / "tokens.json")["new_device"] == { + # "date": "2022-07-15T17:41:31.675698", + # "expiration": "2022-07-15T17:41:31.675698", + # "token": "43478d05b35e4781598acd76e33832bb", + # } def test_delete_new_device_key(tokens): repo = JsonTokensRepository() assert repo.delete_new_device_key() is None - assert "new_device" not in read_json(tokens / "tokens.json") + # assert "new_device" not in read_json(tokens / "tokens.json") def test_delete_new_device_key_when_empty(empty_keys): repo = JsonTokensRepository() repo.delete_new_device_key() - assert "new_device" not in read_json(empty_keys / "empty_keys.json") + # assert "new_device" not in read_json(empty_keys / "empty_keys.json") def test_use_invalid_mnemonic_new_device_key( From 8a05a55b80cf5267a1a5e7c646acaa1be3167217 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 7 Dec 2022 12:02:33 +0000 Subject: [PATCH 038/129] test(tokens-repo): parameterized fixture --- .../test_repository/test_tokens_repository.py | 66 +++++++++++++++---- 1 file changed, 55 insertions(+), 11 deletions(-) diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index 5a74bf4..cfeddb3 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -18,6 +18,9 @@ from selfprivacy_api.repositories.tokens.exceptions import ( from selfprivacy_api.repositories.tokens.json_tokens_repository import ( JsonTokensRepository, ) +from selfprivacy_api.repositories.tokens.redis_tokens_repository import ( + RedisTokensRepository, +) from tests.common import read_json @@ -44,6 +47,13 @@ ORIGINAL_TOKEN_CONTENT = [ }, ] +ORIGINAL_DEVICE_NAMES = [ + "primary_token", + "second_token", + "third_token", + "forth_token", +] + @pytest.fixture def tokens(mocker, datadir): @@ -145,25 +155,59 @@ def mock_recovery_key_generate(mocker): return mock +@pytest.fixture +def empty_json_repo(tokens): + repo = JsonTokensRepository() + for token in repo.get_tokens(): + repo.delete_token(token) + assert repo.get_tokens() == [] + return repo + + +@pytest.fixture +def empty_redis_repo(): + repo = RedisTokensRepository() + for token in repo.get_tokens(): + repo.delete_token(token) + assert repo.get_tokens() == [] + return repo + + +@pytest.fixture(params=["json", "redis"]) +def empty_repo(request, empty_json_repo): + if request.param == "json": + return empty_json_repo + if request.param == "redis": + # return empty_redis_repo + return empty_json_repo + else: + raise NotImplementedError + + +@pytest.fixture +def some_tokens_repo(empty_repo): + for name in ORIGINAL_DEVICE_NAMES: + empty_repo.create_token(name) + assert len(empty_repo.get_tokens()) == len(ORIGINAL_DEVICE_NAMES) + for i, t in enumerate(empty_repo.get_tokens()): + assert t.device_name == ORIGINAL_DEVICE_NAMES[i] + return empty_repo + + ############### # Test tokens # ############### -def test_get_token_by_token_string(tokens): - repo = JsonTokensRepository() +def test_get_token_by_token_string(some_tokens_repo): + repo = some_tokens_repo + test_token = repo.get_tokens()[2] - assert repo.get_token_by_token_string( - token_string="KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI" - ) == Token( - token="KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", - device_name="primary_token", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), - ) + assert repo.get_token_by_token_string(token_string=test_token.token) == test_token -def test_get_token_by_non_existent_token_string(tokens): - repo = JsonTokensRepository() +def test_get_token_by_non_existent_token_string(some_tokens_repo): + repo = some_tokens_repo with pytest.raises(TokenNotFound): assert repo.get_token_by_token_string(token_string="iamBadtoken") is None From 55ad2484b82238c6df70f7d53a5cd45df92d201b Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 7 Dec 2022 12:10:50 +0000 Subject: [PATCH 039/129] test(tokens-repo): agnostic test for getting by name --- .../test_repository/test_tokens_repository.py | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index cfeddb3..8e1b8e3 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -213,19 +213,15 @@ def test_get_token_by_non_existent_token_string(some_tokens_repo): assert repo.get_token_by_token_string(token_string="iamBadtoken") is None -def test_get_token_by_name(tokens): - repo = JsonTokensRepository() +def test_get_token_by_name(some_tokens_repo): + repo = some_tokens_repo assert repo.get_token_by_name(token_name="primary_token") is not None - assert repo.get_token_by_name(token_name="primary_token") == Token( - token="KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", - device_name="primary_token", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), - ) + assert repo.get_token_by_name(token_name="primary_token") == repo.get_tokens()[0] -def test_get_token_by_non_existent_name(tokens): - repo = JsonTokensRepository() +def test_get_token_by_non_existent_name(some_tokens_repo): + repo = some_tokens_repo with pytest.raises(TokenNotFound): assert repo.get_token_by_name(token_name="badname") is None From 2e2d344f43c16339b9fb870609e60db4c4964184 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 7 Dec 2022 13:03:48 +0000 Subject: [PATCH 040/129] test(tokens-repo): get_tokens metaproperties test --- .../test_repository/test_tokens_repository.py | 34 +++++-------------- 1 file changed, 9 insertions(+), 25 deletions(-) diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index 8e1b8e3..0fc3194 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -227,31 +227,15 @@ def test_get_token_by_non_existent_name(some_tokens_repo): assert repo.get_token_by_name(token_name="badname") is None -def test_get_tokens(tokens): - repo = JsonTokensRepository() - - assert repo.get_tokens() == [ - Token( - token="KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", - device_name="primary_token", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), - ), - Token( - token="3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", - device_name="second_token", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698, tzinfo=timezone.utc), - ), - Token( - token="LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", - device_name="third_token", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698, tzinfo=timezone.utc), - ), - Token( - token="dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", - device_name="forth_token", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), - ), - ] +def test_get_tokens(some_tokens_repo): + repo = some_tokens_repo + tokenstrings = [] + # we cannot insert tokens directly via api, so we check meta-properties instead + for token in some_tokens_repo.get_tokens(): + len(token.token) == 43 # assuming secrets.token_urlsafe + assert token.token not in tokenstrings + tokenstrings.append(token.token) + assert token.created_at.day == datetime.today().day def test_get_tokens_when_one(empty_keys): From 3921d9fe4c68347fc1de93036a85e0be0a0665aa Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 7 Dec 2022 14:48:33 +0000 Subject: [PATCH 041/129] test(tokens-repo): agnostic token creation test --- .../test_repository/test_tokens_repository.py | 25 ++++++++----------- 1 file changed, 10 insertions(+), 15 deletions(-) diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index 0fc3194..957ceb4 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -129,7 +129,7 @@ def mock_get_recovery_key_return_not_valid(mocker): @pytest.fixture def mock_token_generate(mocker): mock = mocker.patch( - "selfprivacy_api.repositories.tokens.json_tokens_repository.Token.generate", + "selfprivacy_api.models.tokens.token.Token.generate", autospec=True, return_value=Token( token="ZuLNKtnxDeq6w2dpOJhbB3iat_sJLPTPl_rN5uc5MvM", @@ -238,26 +238,21 @@ def test_get_tokens(some_tokens_repo): assert token.created_at.day == datetime.today().day -def test_get_tokens_when_one(empty_keys): - repo = JsonTokensRepository() - - assert repo.get_tokens() == [ - Token( - token="KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", - device_name="primary_token", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), - ) - ] - - -def test_create_token(tokens, mock_token_generate): - repo = JsonTokensRepository() +def test_create_token(empty_repo, mock_token_generate): + repo = empty_repo assert repo.create_token(device_name="IamNewDevice") == Token( token="ZuLNKtnxDeq6w2dpOJhbB3iat_sJLPTPl_rN5uc5MvM", device_name="IamNewDevice", created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), ) + assert repo.get_tokens() == [ + Token( + token="ZuLNKtnxDeq6w2dpOJhbB3iat_sJLPTPl_rN5uc5MvM", + device_name="IamNewDevice", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + ) + ] def test_delete_not_found_token(tokens): From db55685488a28639df3fb153742960772857f3e4 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 7 Dec 2022 14:51:28 +0000 Subject: [PATCH 042/129] test(tokens-repo): use 'repo' for consistency --- tests/test_graphql/test_repository/test_tokens_repository.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index 957ceb4..c561775 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -231,7 +231,7 @@ def test_get_tokens(some_tokens_repo): repo = some_tokens_repo tokenstrings = [] # we cannot insert tokens directly via api, so we check meta-properties instead - for token in some_tokens_repo.get_tokens(): + for token in repo.get_tokens(): len(token.token) == 43 # assuming secrets.token_urlsafe assert token.token not in tokenstrings tokenstrings.append(token.token) From b43c4014e21df8bf31204050a8f564c31770ccfe Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 7 Dec 2022 14:56:54 +0000 Subject: [PATCH 043/129] test(tokens-repo): agnostic delete not found --- .../test_graphql/test_repository/test_tokens_repository.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index c561775..1480b7f 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -255,8 +255,9 @@ def test_create_token(empty_repo, mock_token_generate): ] -def test_delete_not_found_token(tokens): - repo = JsonTokensRepository() +def test_delete_not_found_token(some_tokens_repo): + repo = some_tokens_repo + tokens = repo.get_tokens() input_token = Token( token="imbadtoken", device_name="primary_token", @@ -265,7 +266,7 @@ def test_delete_not_found_token(tokens): with pytest.raises(TokenNotFound): assert repo.delete_token(input_token) is None - # assert read_json(tokens / "tokens.json")["tokens"] == ORIGINAL_TOKEN_CONTENT + assert repo.get_tokens() == tokens def test_refresh_token(tokens, mock_token_generate): From fa54220327b416a6a03b6288705ebfdfbc9cbb88 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 7 Dec 2022 15:00:35 +0000 Subject: [PATCH 044/129] test(tokens-repo): agnostic refresh token --- .../test_repository/test_tokens_repository.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index 1480b7f..50aeed2 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -269,13 +269,9 @@ def test_delete_not_found_token(some_tokens_repo): assert repo.get_tokens() == tokens -def test_refresh_token(tokens, mock_token_generate): - repo = JsonTokensRepository() - input_token = Token( - token="KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", - device_name="primary_token", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), - ) +def test_refresh_token(some_tokens_repo, mock_token_generate): + repo = some_tokens_repo + input_token = some_tokens_repo.get_tokens()[0] assert repo.refresh_token(input_token) == Token( token="ZuLNKtnxDeq6w2dpOJhbB3iat_sJLPTPl_rN5uc5MvM", From c86eb8b786cb9975cebdaf586adce5cb347dc7ed Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 7 Dec 2022 15:35:29 +0000 Subject: [PATCH 045/129] test(tokens-repo): agnostic refresh token nonexistent --- tests/test_graphql/test_repository/test_tokens_repository.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index 50aeed2..0e25a68 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -280,8 +280,8 @@ def test_refresh_token(some_tokens_repo, mock_token_generate): ) -def test_refresh_not_found_token(tokens, mock_token_generate): - repo = JsonTokensRepository() +def test_refresh_not_found_token(some_tokens_repo, mock_token_generate): + repo = some_tokens_repo input_token = Token( token="idontknowwhoiam", device_name="tellmewhoiam?", From 6f400911fc7239f7a60566ec317dae019cb8b3b5 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 7 Dec 2022 16:12:35 +0000 Subject: [PATCH 046/129] test(tokens-repo): agnostic recovery keys testing --- .../test_repository/test_tokens_repository.py | 35 +++++++------------ 1 file changed, 12 insertions(+), 23 deletions(-) diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index 0e25a68..bcd7570 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -143,7 +143,7 @@ def mock_token_generate(mocker): @pytest.fixture def mock_recovery_key_generate(mocker): mock = mocker.patch( - "selfprivacy_api.repositories.tokens.json_tokens_repository.RecoveryKey.generate", + "selfprivacy_api.models.tokens.recovery_key.RecoveryKey.generate", autospec=True, return_value=RecoveryKey( key="889bf49c1d3199d71a2e704718772bd53a422020334db051", @@ -156,7 +156,7 @@ def mock_recovery_key_generate(mocker): @pytest.fixture -def empty_json_repo(tokens): +def empty_json_repo(empty_keys): repo = JsonTokensRepository() for token in repo.get_tokens(): repo.delete_token(token) @@ -297,33 +297,22 @@ def test_refresh_not_found_token(some_tokens_repo, mock_token_generate): ################ -def test_get_recovery_key(tokens): - repo = JsonTokensRepository() - - assert repo.get_recovery_key() == RecoveryKey( - key="ed653e4b8b042b841d285fa7a682fa09e925ddb2d8906f54", - created_at=datetime(2022, 11, 11, 11, 48, 54, 228038), - expires_at=None, - uses_left=2, - ) - - -def test_get_recovery_key_when_empty(empty_keys): - repo = JsonTokensRepository() +def test_get_recovery_key_when_empty(empty_repo): + repo = empty_repo assert repo.get_recovery_key() is None -def test_create_recovery_key(tokens, mock_recovery_key_generate): - repo = JsonTokensRepository() +def test_create_get_recovery_key(some_tokens_repo, mock_recovery_key_generate): + repo = some_tokens_repo assert repo.create_recovery_key(uses_left=1, expiration=None) is not None - # assert read_json(tokens / "tokens.json")["recovery_token"] == { - # "token": "889bf49c1d3199d71a2e704718772bd53a422020334db051", - # "date": "2022-07-15T17:41:31.675698", - # "expiration": None, - # "uses_left": 1, - # } + assert repo.get_recovery_key() == RecoveryKey( + key="889bf49c1d3199d71a2e704718772bd53a422020334db051", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + expires_at=None, + uses_left=1, + ) def test_use_mnemonic_recovery_key_when_empty( From 732e72d414889b3ff8a192de0637b1c641c652d9 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 7 Dec 2022 16:54:47 +0000 Subject: [PATCH 047/129] test(tokens-repo): mnemonic non-null invalid --- .../test_repository/test_tokens_repository.py | 46 ++++++++++--------- 1 file changed, 24 insertions(+), 22 deletions(-) diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index bcd7570..6a745be 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -83,11 +83,6 @@ def null_keys(mocker, datadir): return datadir -class RecoveryKeyMockReturnNotValid: - def is_valid() -> bool: - return False - - @pytest.fixture def mock_new_device_key_generate(mocker): mock = mocker.patch( @@ -117,11 +112,16 @@ def mock_generate_token(mocker): @pytest.fixture -def mock_get_recovery_key_return_not_valid(mocker): +def mock_recovery_key_generate_invalid(mocker): mock = mocker.patch( - "selfprivacy_api.repositories.tokens.json_tokens_repository.JsonTokensRepository.get_recovery_key", + "selfprivacy_api.models.tokens.recovery_key.RecoveryKey.generate", autospec=True, - return_value=RecoveryKeyMockReturnNotValid, + return_value=RecoveryKey( + key="889bf49c1d3199d71a2e704718772bd53a422020334db051", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + expires_at=None, + uses_left=0, + ), ) return mock @@ -315,10 +315,8 @@ def test_create_get_recovery_key(some_tokens_repo, mock_recovery_key_generate): ) -def test_use_mnemonic_recovery_key_when_empty( - empty_keys, mock_recovery_key_generate, mock_token_generate -): - repo = JsonTokensRepository() +def test_use_mnemonic_recovery_key_when_empty(empty_repo): + repo = empty_repo with pytest.raises(RecoveryKeyNotFound): assert ( @@ -331,9 +329,10 @@ def test_use_mnemonic_recovery_key_when_empty( def test_use_mnemonic_not_valid_recovery_key( - tokens, mock_get_recovery_key_return_not_valid + some_tokens_repo, mock_recovery_key_generate_invalid ): - repo = JsonTokensRepository() + repo = some_tokens_repo + assert repo.create_recovery_key(uses_left=0, expiration=None) is not None with pytest.raises(RecoveryKeyNotFound): assert ( @@ -345,8 +344,9 @@ def test_use_mnemonic_not_valid_recovery_key( ) -def test_use_mnemonic_not_mnemonic_recovery_key(tokens): - repo = JsonTokensRepository() +def test_use_mnemonic_not_mnemonic_recovery_key(some_tokens_repo): + repo = some_tokens_repo + assert repo.create_recovery_key(uses_left=1, expiration=None) is not None with pytest.raises(InvalidMnemonic): assert ( @@ -358,8 +358,9 @@ def test_use_mnemonic_not_mnemonic_recovery_key(tokens): ) -def test_use_not_mnemonic_recovery_key(tokens): - repo = JsonTokensRepository() +def test_use_not_mnemonic_recovery_key(some_tokens_repo): + repo = some_tokens_repo + assert repo.create_recovery_key(uses_left=1, expiration=None) is not None with pytest.raises(InvalidMnemonic): assert ( @@ -371,8 +372,9 @@ def test_use_not_mnemonic_recovery_key(tokens): ) -def test_use_not_found_mnemonic_recovery_key(tokens): - repo = JsonTokensRepository() +def test_use_not_found_mnemonic_recovery_key(some_tokens_repo): + repo = some_tokens_repo + assert repo.create_recovery_key(uses_left=1, expiration=None) is not None with pytest.raises(RecoveryKeyNotFound): assert ( @@ -384,8 +386,8 @@ def test_use_not_found_mnemonic_recovery_key(tokens): ) -def test_use_menemonic_recovery_key_when_empty(empty_keys): - repo = JsonTokensRepository() +def test_use_mnemonic_recovery_key_when_empty(empty_repo): + repo = empty_repo with pytest.raises(RecoveryKeyNotFound): assert ( From b9c570720b412f05906fb3ffda449af5f695de39 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 7 Dec 2022 17:07:17 +0000 Subject: [PATCH 048/129] test(tokens-repo): move null recovery token test to json tests Because the null state seems to be specific to json and not reproducible in abstract case. --- .../test_json_tokens_repository.py | 15 +++++++++++++++ .../test_repository/test_tokens_repository.py | 13 ------------- 2 files changed, 15 insertions(+), 13 deletions(-) diff --git a/tests/test_graphql/test_repository/test_json_tokens_repository.py b/tests/test_graphql/test_repository/test_json_tokens_repository.py index e90b3dc..a12c0de 100644 --- a/tests/test_graphql/test_repository/test_json_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_json_tokens_repository.py @@ -13,6 +13,7 @@ from datetime import datetime from selfprivacy_api.models.tokens.token import Token from selfprivacy_api.repositories.tokens.exceptions import ( TokenNotFound, + RecoveryKeyNotFound, ) from selfprivacy_api.repositories.tokens.json_tokens_repository import ( JsonTokensRepository, @@ -26,6 +27,7 @@ from test_tokens_repository import ( mock_generate_token, mock_new_device_key_generate, empty_keys, + null_keys, ) @@ -82,6 +84,19 @@ def test_create_recovery_key(tokens, mock_recovery_key_generate): } +def test_use_mnemonic_recovery_key_when_null(null_keys): + repo = JsonTokensRepository() + + with pytest.raises(RecoveryKeyNotFound): + assert ( + repo.use_mnemonic_recovery_key( + mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb", + device_name="primary_token", + ) + is None + ) + + def test_use_mnemonic_recovery_key(tokens, mock_generate_token): repo = JsonTokensRepository() diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index 6a745be..8fa6c47 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -399,19 +399,6 @@ def test_use_mnemonic_recovery_key_when_empty(empty_repo): ) -def test_use_menemonic_recovery_key_when_null(null_keys): - repo = JsonTokensRepository() - - with pytest.raises(RecoveryKeyNotFound): - assert ( - repo.use_mnemonic_recovery_key( - mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb", - device_name="primary_token", - ) - is None - ) - - # agnostic test mixed with an implementation test def test_use_mnemonic_recovery_key(tokens, mock_generate_token): repo = JsonTokensRepository() From dd525fe72377be4197ae64b3c292efa25d64cea0 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 7 Dec 2022 17:30:31 +0000 Subject: [PATCH 049/129] test(tokens-repo): agnostic use recovery token converted json-reading asserts to backend-agnostic asserts --- .../test_repository/test_tokens_repository.py | 74 +++++++++---------- 1 file changed, 35 insertions(+), 39 deletions(-) diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index 8fa6c47..f7aae99 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -155,6 +155,21 @@ def mock_recovery_key_generate(mocker): return mock +@pytest.fixture +def mock_recovery_key_generate_for_mnemonic(mocker): + mock = mocker.patch( + "selfprivacy_api.models.tokens.recovery_key.RecoveryKey.generate", + autospec=True, + return_value=RecoveryKey( + key="ed653e4b8b042b841d285fa7a682fa09e925ddb2d8906f54", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + expires_at=None, + uses_left=1, + ), + ) + return mock + + @pytest.fixture def empty_json_repo(empty_keys): repo = JsonTokensRepository() @@ -400,52 +415,33 @@ def test_use_mnemonic_recovery_key_when_empty(empty_repo): # agnostic test mixed with an implementation test -def test_use_mnemonic_recovery_key(tokens, mock_generate_token): - repo = JsonTokensRepository() +def test_use_mnemonic_recovery_key( + some_tokens_repo, mock_recovery_key_generate_for_mnemonic, mock_generate_token +): + repo = some_tokens_repo + assert repo.create_recovery_key(uses_left=1, expiration=None) is not None - assert repo.use_mnemonic_recovery_key( - mnemonic_phrase="uniform clarify napkin bid dress search input armor police cross salon because myself uphold slice bamboo hungry park", - device_name="newdevice", - ) == Token( + test_token = Token( token="ur71mC4aiI6FIYAN--cTL-38rPHS5D6NuB1bgN_qKF4", device_name="newdevice", created_at=datetime(2022, 11, 14, 6, 6, 32, 777123), ) - # assert read_json(tokens / "tokens.json")["tokens"] == [ - # { - # "date": "2022-07-15 17:41:31.675698", - # "name": "primary_token", - # "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", - # }, - # { - # "token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", - # "name": "second_token", - # "date": "2022-07-15 17:41:31.675698Z", - # }, - # { - # "token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", - # "name": "third_token", - # "date": "2022-07-15T17:41:31.675698Z", - # }, - # { - # "token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", - # "name": "forth_token", - # "date": "2022-07-15T17:41:31.675698", - # }, - # { - # "date": "2022-11-14T06:06:32.777123", - # "name": "newdevice", - # "token": "ur71mC4aiI6FIYAN--cTL-38rPHS5D6NuB1bgN_qKF4", - # }, - # ] + assert ( + repo.use_mnemonic_recovery_key( + mnemonic_phrase="uniform clarify napkin bid dress search input armor police cross salon because myself uphold slice bamboo hungry park", + device_name="newdevice", + ) + == test_token + ) - # assert read_json(tokens / "tokens.json")["recovery_token"] == { - # "date": "2022-11-11T11:48:54.228038", - # "expiration": None, - # "token": "ed653e4b8b042b841d285fa7a682fa09e925ddb2d8906f54", - # "uses_left": 1, - # } + assert test_token in repo.get_tokens() + assert repo.get_recovery_key() == RecoveryKey( + key="ed653e4b8b042b841d285fa7a682fa09e925ddb2d8906f54", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + expires_at=None, + uses_left=0, + ) ################## From f96d8b7d7cdd39cda92a1b91dd51bbdf3925c0e9 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 9 Dec 2022 10:06:12 +0000 Subject: [PATCH 050/129] test(tokens-repo): make another mock token generator agnostic --- tests/test_graphql/test_repository/test_tokens_repository.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index f7aae99..2a5b367 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -100,7 +100,7 @@ def mock_new_device_key_generate(mocker): @pytest.fixture def mock_generate_token(mocker): mock = mocker.patch( - "selfprivacy_api.repositories.tokens.json_tokens_repository.Token.generate", + "selfprivacy_api.models.tokens.token.Token.generate", autospec=True, return_value=Token( token="ur71mC4aiI6FIYAN--cTL-38rPHS5D6NuB1bgN_qKF4", From dc778b545e9cec45325e635378e89084b242cda1 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 9 Dec 2022 10:12:52 +0000 Subject: [PATCH 051/129] test(tokens-repo): get new device key --- .../test_repository/test_tokens_repository.py | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index 2a5b367..118162d 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -86,7 +86,7 @@ def null_keys(mocker, datadir): @pytest.fixture def mock_new_device_key_generate(mocker): mock = mocker.patch( - "selfprivacy_api.repositories.tokens.json_tokens_repository.NewDeviceKey.generate", + "selfprivacy_api.models.tokens.new_device_key.NewDeviceKey.generate", autospec=True, return_value=NewDeviceKey( key="43478d05b35e4781598acd76e33832bb", @@ -449,15 +449,14 @@ def test_use_mnemonic_recovery_key( ################## -def test_get_new_device_key(tokens, mock_new_device_key_generate): - repo = JsonTokensRepository() +def test_get_new_device_key(some_tokens_repo, mock_new_device_key_generate): + repo = some_tokens_repo - assert repo.get_new_device_key() is not None - # assert read_json(tokens / "tokens.json")["new_device"] == { - # "date": "2022-07-15T17:41:31.675698", - # "expiration": "2022-07-15T17:41:31.675698", - # "token": "43478d05b35e4781598acd76e33832bb", - # } + assert repo.get_new_device_key() == NewDeviceKey( + key="43478d05b35e4781598acd76e33832bb", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + expires_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + ) def test_delete_new_device_key(tokens): From 73584872f053a36ae5abacc15b7cd623e75d7506 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 9 Dec 2022 10:41:22 +0000 Subject: [PATCH 052/129] test(tokens-repo): agnosticise simple new device key tests the state of json file is tested separately in test_json_tokens_repository.py --- .../test_repository/test_tokens_repository.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index 118162d..2c74a47 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -206,6 +206,7 @@ def some_tokens_repo(empty_repo): assert len(empty_repo.get_tokens()) == len(ORIGINAL_DEVICE_NAMES) for i, t in enumerate(empty_repo.get_tokens()): assert t.device_name == ORIGINAL_DEVICE_NAMES[i] + assert empty_repo.get_new_device_key() is not None return empty_repo @@ -459,18 +460,17 @@ def test_get_new_device_key(some_tokens_repo, mock_new_device_key_generate): ) -def test_delete_new_device_key(tokens): - repo = JsonTokensRepository() +def test_delete_new_device_key(some_tokens_repo): + repo = some_tokens_repo assert repo.delete_new_device_key() is None - # assert "new_device" not in read_json(tokens / "tokens.json") + # we cannot say if there is ot not without creating it? -def test_delete_new_device_key_when_empty(empty_keys): - repo = JsonTokensRepository() +def test_delete_new_device_key_when_empty(empty_repo): + repo = empty_repo - repo.delete_new_device_key() - # assert "new_device" not in read_json(empty_keys / "empty_keys.json") + assert repo.delete_new_device_key() is None def test_use_invalid_mnemonic_new_device_key( From 3feebd5290c5b5e7788a4a5b8db015aea46eef03 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 9 Dec 2022 10:51:32 +0000 Subject: [PATCH 053/129] test(tokens-repo): invalid mnemonic new device key --- .../test_graphql/test_repository/test_tokens_repository.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index 2c74a47..28741ef 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -473,10 +473,8 @@ def test_delete_new_device_key_when_empty(empty_repo): assert repo.delete_new_device_key() is None -def test_use_invalid_mnemonic_new_device_key( - tokens, mock_new_device_key_generate, datadir, mock_token_generate -): - repo = JsonTokensRepository() +def test_use_invalid_mnemonic_new_device_key(some_tokens_repo): + repo = some_tokens_repo with pytest.raises(InvalidMnemonic): assert ( From cf7b7eb8a70f7d471b2032d00411cf0a33249c36 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 9 Dec 2022 10:57:11 +0000 Subject: [PATCH 054/129] test(tokens-repo): notfound mnemonic new device key --- tests/test_graphql/test_repository/test_tokens_repository.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index 28741ef..6619e1f 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -487,9 +487,10 @@ def test_use_invalid_mnemonic_new_device_key(some_tokens_repo): def test_use_not_exists_mnemonic_new_device_key( - tokens, mock_new_device_key_generate, mock_token_generate + empty_repo, mock_new_device_key_generate ): - repo = JsonTokensRepository() + repo = empty_repo + assert repo.get_new_device_key() is not None with pytest.raises(NewDeviceKeyNotFound): assert ( From ce411e92912df70771f3ae68cf78ebfacb396200 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 9 Dec 2022 11:28:07 +0000 Subject: [PATCH 055/129] test(tokens-repo): using a mnemonic device key --- .../test_repository/test_tokens_repository.py | 31 +++++++++++++++++-- 1 file changed, 28 insertions(+), 3 deletions(-) diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index 6619e1f..677170b 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -97,6 +97,21 @@ def mock_new_device_key_generate(mocker): return mock +# mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb", +@pytest.fixture +def mock_new_device_key_generate_for_mnemonic(mocker): + mock = mocker.patch( + "selfprivacy_api.models.tokens.new_device_key.NewDeviceKey.generate", + autospec=True, + return_value=NewDeviceKey( + key="2237238de23dc71ab558e317bdb8ff8e", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + expires_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + ), + ) + return mock + + @pytest.fixture def mock_generate_token(mocker): mock = mocker.patch( @@ -503,9 +518,10 @@ def test_use_not_exists_mnemonic_new_device_key( def test_use_mnemonic_new_device_key( - tokens, mock_new_device_key_generate, mock_token_generate + empty_repo, mock_new_device_key_generate_for_mnemonic ): - repo = JsonTokensRepository() + repo = empty_repo + assert repo.get_new_device_key() is not None assert ( repo.use_mnemonic_new_device_key( @@ -514,7 +530,16 @@ def test_use_mnemonic_new_device_key( ) is not None ) - # assert read_json(datadir / "tokens.json")["new_device"] == [] + + # we must delete the key after use + with pytest.raises(NewDeviceKeyNotFound): + assert ( + repo.use_mnemonic_new_device_key( + device_name="imnew", + mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb", + ) + is None + ) def test_use_mnemonic_new_device_key_when_empty(empty_keys): From be13d6163e4eac35ae526e1de0ad714d883fac9d Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 9 Dec 2022 11:30:33 +0000 Subject: [PATCH 056/129] test(tokens-repo): use a mnemonic device key on an empty repo --- tests/test_graphql/test_repository/test_tokens_repository.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index 677170b..3bd35b9 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -542,8 +542,8 @@ def test_use_mnemonic_new_device_key( ) -def test_use_mnemonic_new_device_key_when_empty(empty_keys): - repo = JsonTokensRepository() +def test_use_mnemonic_new_device_key_when_empty(empty_repo): + repo = empty_repo with pytest.raises(NewDeviceKeyNotFound): assert ( From 84bfa333fa9cf2283bfd524e76f225f69098ecd9 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 9 Dec 2022 11:35:29 +0000 Subject: [PATCH 057/129] test(tokens-repo): move new device key null test to json tests --- .../test_repository/test_json_tokens_repository.py | 14 ++++++++++++++ .../test_repository/test_tokens_repository.py | 13 ------------- 2 files changed, 14 insertions(+), 13 deletions(-) diff --git a/tests/test_graphql/test_repository/test_json_tokens_repository.py b/tests/test_graphql/test_repository/test_json_tokens_repository.py index a12c0de..feb3b50 100644 --- a/tests/test_graphql/test_repository/test_json_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_json_tokens_repository.py @@ -14,6 +14,7 @@ from selfprivacy_api.models.tokens.token import Token from selfprivacy_api.repositories.tokens.exceptions import ( TokenNotFound, RecoveryKeyNotFound, + NewDeviceKeyNotFound, ) from selfprivacy_api.repositories.tokens.json_tokens_repository import ( JsonTokensRepository, @@ -167,3 +168,16 @@ def test_delete_new_device_key_when_empty(empty_keys): repo.delete_new_device_key() assert "new_device" not in read_json(empty_keys / "empty_keys.json") + + +def test_use_mnemonic_new_device_key_when_null(null_keys): + repo = JsonTokensRepository() + + with pytest.raises(NewDeviceKeyNotFound): + assert ( + repo.use_mnemonic_new_device_key( + device_name="imnew", + mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb", + ) + is None + ) diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index 3bd35b9..c16f5a3 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -553,16 +553,3 @@ def test_use_mnemonic_new_device_key_when_empty(empty_repo): ) is None ) - - -def test_use_mnemonic_new_device_key_when_null(null_keys): - repo = JsonTokensRepository() - - with pytest.raises(NewDeviceKeyNotFound): - assert ( - repo.use_mnemonic_new_device_key( - device_name="imnew", - mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb", - ) - is None - ) From 4492bbe99599149c1f248fb3ddfe76db9aecadac Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 9 Dec 2022 11:48:11 +0000 Subject: [PATCH 058/129] test(tokens-repo): move null keys and tokens fixtures to json tests and remove corresponding json files from the folder --- .../test_json_tokens_repository.py | 17 +++++++-- .../test_repository/test_tokens_repository.py | 15 -------- .../test_tokens_repository/null_keys.json | 26 -------------- .../test_tokens_repository/tokens.json | 35 ------------------- 4 files changed, 15 insertions(+), 78 deletions(-) delete mode 100644 tests/test_graphql/test_repository/test_tokens_repository/null_keys.json delete mode 100644 tests/test_graphql/test_repository/test_tokens_repository/tokens.json diff --git a/tests/test_graphql/test_repository/test_json_tokens_repository.py b/tests/test_graphql/test_repository/test_json_tokens_repository.py index feb3b50..ac4a3d0 100644 --- a/tests/test_graphql/test_repository/test_json_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_json_tokens_repository.py @@ -23,15 +23,28 @@ from tests.common import read_json from test_tokens_repository import ORIGINAL_TOKEN_CONTENT from test_tokens_repository import ( - tokens, mock_recovery_key_generate, mock_generate_token, mock_new_device_key_generate, empty_keys, - null_keys, ) +@pytest.fixture +def tokens(mocker, datadir): + mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=datadir / "tokens.json") + assert read_json(datadir / "tokens.json")["tokens"] == ORIGINAL_TOKEN_CONTENT + return datadir + + +@pytest.fixture +def null_keys(mocker, datadir): + mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=datadir / "null_keys.json") + assert read_json(datadir / "null_keys.json")["recovery_token"] is None + assert read_json(datadir / "null_keys.json")["new_device"] is None + return datadir + + def test_delete_token(tokens): repo = JsonTokensRepository() input_token = Token( diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index c16f5a3..a7affd6 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -55,13 +55,6 @@ ORIGINAL_DEVICE_NAMES = [ ] -@pytest.fixture -def tokens(mocker, datadir): - mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=datadir / "tokens.json") - assert read_json(datadir / "tokens.json")["tokens"] == ORIGINAL_TOKEN_CONTENT - return datadir - - @pytest.fixture def empty_keys(mocker, datadir): mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=datadir / "empty_keys.json") @@ -75,14 +68,6 @@ def empty_keys(mocker, datadir): return datadir -@pytest.fixture -def null_keys(mocker, datadir): - mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=datadir / "null_keys.json") - assert read_json(datadir / "null_keys.json")["recovery_token"] is None - assert read_json(datadir / "null_keys.json")["new_device"] is None - return datadir - - @pytest.fixture def mock_new_device_key_generate(mocker): mock = mocker.patch( diff --git a/tests/test_graphql/test_repository/test_tokens_repository/null_keys.json b/tests/test_graphql/test_repository/test_tokens_repository/null_keys.json deleted file mode 100644 index 45e6f90..0000000 --- a/tests/test_graphql/test_repository/test_tokens_repository/null_keys.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "tokens": [ - { - "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", - "name": "primary_token", - "date": "2022-07-15 17:41:31.675698" - }, - { - "token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", - "name": "second_token", - "date": "2022-07-15 17:41:31.675698Z" - }, - { - "token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", - "name": "third_token", - "date": "2022-07-15T17:41:31.675698Z" - }, - { - "token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", - "name": "forth_token", - "date": "2022-07-15T17:41:31.675698" - } - ], - "recovery_token": null, - "new_device": null -} diff --git a/tests/test_graphql/test_repository/test_tokens_repository/tokens.json b/tests/test_graphql/test_repository/test_tokens_repository/tokens.json deleted file mode 100644 index bb1805c..0000000 --- a/tests/test_graphql/test_repository/test_tokens_repository/tokens.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "tokens": [ - { - "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", - "name": "primary_token", - "date": "2022-07-15 17:41:31.675698" - }, - { - "token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", - "name": "second_token", - "date": "2022-07-15 17:41:31.675698Z" - }, - { - "token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", - "name": "third_token", - "date": "2022-07-15T17:41:31.675698Z" - }, - { - "token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", - "name": "forth_token", - "date": "2022-07-15T17:41:31.675698" - } - ], - "recovery_token": { - "token": "ed653e4b8b042b841d285fa7a682fa09e925ddb2d8906f54", - "date": "2022-11-11T11:48:54.228038", - "expiration": null, - "uses_left": 2 - }, - "new_device": { - "token": "2237238de23dc71ab558e317bdb8ff8e", - "date": "2022-10-26 20:50:47.973212", - "expiration": "2022-10-26 21:00:47.974153" - } -} From c12dca9d9b15b2a55ec6919d3926c3de92c4b065 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 9 Dec 2022 11:56:47 +0000 Subject: [PATCH 059/129] refactor(tokens-repo): delete unused timezone import --- tests/test_graphql/test_repository/test_tokens_repository.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index a7affd6..40d3e7e 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -2,7 +2,7 @@ # pylint: disable=unused-argument # pylint: disable=missing-function-docstring -from datetime import datetime, timezone +from datetime import datetime import pytest From 0d748d7ab13e53590c1eace07eb4173c247a273d Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 9 Dec 2022 12:23:36 +0000 Subject: [PATCH 060/129] test(tokens-repo): move original token content to json tests --- .../test_json_tokens_repository.py | 26 +++++++++++++++++-- .../test_repository/test_tokens_repository.py | 23 ---------------- 2 files changed, 24 insertions(+), 25 deletions(-) diff --git a/tests/test_graphql/test_repository/test_json_tokens_repository.py b/tests/test_graphql/test_repository/test_json_tokens_repository.py index ac4a3d0..af8c844 100644 --- a/tests/test_graphql/test_repository/test_json_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_json_tokens_repository.py @@ -19,9 +19,8 @@ from selfprivacy_api.repositories.tokens.exceptions import ( from selfprivacy_api.repositories.tokens.json_tokens_repository import ( JsonTokensRepository, ) -from tests.common import read_json -from test_tokens_repository import ORIGINAL_TOKEN_CONTENT +from tests.common import read_json from test_tokens_repository import ( mock_recovery_key_generate, mock_generate_token, @@ -29,6 +28,29 @@ from test_tokens_repository import ( empty_keys, ) +ORIGINAL_TOKEN_CONTENT = [ + { + "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", + "name": "primary_token", + "date": "2022-07-15 17:41:31.675698", + }, + { + "token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", + "name": "second_token", + "date": "2022-07-15 17:41:31.675698Z", + }, + { + "token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", + "name": "third_token", + "date": "2022-07-15T17:41:31.675698Z", + }, + { + "token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", + "name": "forth_token", + "date": "2022-07-15T17:41:31.675698", + }, +] + @pytest.fixture def tokens(mocker, datadir): diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index 40d3e7e..0372b92 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -24,29 +24,6 @@ from selfprivacy_api.repositories.tokens.redis_tokens_repository import ( from tests.common import read_json -ORIGINAL_TOKEN_CONTENT = [ - { - "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", - "name": "primary_token", - "date": "2022-07-15 17:41:31.675698", - }, - { - "token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", - "name": "second_token", - "date": "2022-07-15 17:41:31.675698Z", - }, - { - "token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", - "name": "third_token", - "date": "2022-07-15T17:41:31.675698Z", - }, - { - "token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", - "name": "forth_token", - "date": "2022-07-15T17:41:31.675698", - }, -] - ORIGINAL_DEVICE_NAMES = [ "primary_token", "second_token", From b856a2aad3b533301965958d6f7a786ea7f487cb Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 9 Dec 2022 12:25:20 +0000 Subject: [PATCH 061/129] test(tokens-repo): re-add delete token test --- .../test_repository/test_tokens_repository.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index 0372b92..df56444 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -248,6 +248,20 @@ def test_create_token(empty_repo, mock_token_generate): ] +def test_delete_token(some_tokens_repo): + repo = some_tokens_repo + original_tokens = repo.get_tokens() + input_token = original_tokens[1] + + repo.delete_token(input_token) + + tokens_after_delete = repo.get_tokens() + for token in original_tokens: + if token != input_token: + assert token in tokens_after_delete + assert len(original_tokens) == len(tokens_after_delete) + 1 + + def test_delete_not_found_token(some_tokens_repo): repo = some_tokens_repo tokens = repo.get_tokens() From ff264ec808f18509d1138f756911f6a241a80391 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 12 Dec 2022 10:06:31 +0000 Subject: [PATCH 062/129] refactor(tokens-repo): simplify getting tokens get_token_by_token_string and get_token_by_name are no longer tied to json. --- .../tokens/json_tokens_repository.py | 26 ++++++------------- 1 file changed, 8 insertions(+), 18 deletions(-) diff --git a/selfprivacy_api/repositories/tokens/json_tokens_repository.py b/selfprivacy_api/repositories/tokens/json_tokens_repository.py index aad3158..30d8021 100644 --- a/selfprivacy_api/repositories/tokens/json_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/json_tokens_repository.py @@ -25,29 +25,19 @@ DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%f" class JsonTokensRepository(AbstractTokensRepository): def get_token_by_token_string(self, token_string: str) -> Optional[Token]: """Get the token by token""" - with ReadUserData(UserDataFiles.TOKENS) as tokens_file: - for userdata_token in tokens_file["tokens"]: - if userdata_token["token"] == token_string: - - return Token( - token=token_string, - device_name=userdata_token["name"], - created_at=userdata_token["date"], - ) + tokens = self.get_tokens() + for token in tokens: + if token.token == token_string: + return token raise TokenNotFound("Token not found!") def get_token_by_name(self, token_name: str) -> Optional[Token]: """Get the token by name""" - with ReadUserData(UserDataFiles.TOKENS) as tokens_file: - for userdata_token in tokens_file["tokens"]: - if userdata_token["name"] == token_name: - - return Token( - token=userdata_token["token"], - device_name=token_name, - created_at=userdata_token["date"], - ) + tokens = self.get_tokens() + for token in tokens: + if token.device_name == token_name: + return token raise TokenNotFound("Token not found!") From 4e60d1d37ac14040ebb170743a672164018d3fc4 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 12 Dec 2022 10:15:33 +0000 Subject: [PATCH 063/129] refactor(tokens-repo): move token getters to abstract class Not performance-optimal, but not in critical path either. 100 tokens max irl? --- .../tokens/abstract_tokens_repository.py | 15 +++++++++++++-- .../tokens/json_tokens_repository.py | 18 ------------------ 2 files changed, 13 insertions(+), 20 deletions(-) diff --git a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py index 3cf6e1d..2840917 100644 --- a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py @@ -3,18 +3,29 @@ from datetime import datetime from typing import Optional from selfprivacy_api.models.tokens.token import Token +from selfprivacy_api.repositories.tokens.exceptions import TokenNotFound from selfprivacy_api.models.tokens.recovery_key import RecoveryKey from selfprivacy_api.models.tokens.new_device_key import NewDeviceKey class AbstractTokensRepository(ABC): - @abstractmethod def get_token_by_token_string(self, token_string: str) -> Optional[Token]: """Get the token by token""" + tokens = self.get_tokens() + for token in tokens: + if token.token == token_string: + return token + + raise TokenNotFound("Token not found!") - @abstractmethod def get_token_by_name(self, token_name: str) -> Optional[Token]: """Get the token by name""" + tokens = self.get_tokens() + for token in tokens: + if token.device_name == token_name: + return token + + raise TokenNotFound("Token not found!") @abstractmethod def get_tokens(self) -> list[Token]: diff --git a/selfprivacy_api/repositories/tokens/json_tokens_repository.py b/selfprivacy_api/repositories/tokens/json_tokens_repository.py index 30d8021..86e756a 100644 --- a/selfprivacy_api/repositories/tokens/json_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/json_tokens_repository.py @@ -23,24 +23,6 @@ DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%f" class JsonTokensRepository(AbstractTokensRepository): - def get_token_by_token_string(self, token_string: str) -> Optional[Token]: - """Get the token by token""" - tokens = self.get_tokens() - for token in tokens: - if token.token == token_string: - return token - - raise TokenNotFound("Token not found!") - - def get_token_by_name(self, token_name: str) -> Optional[Token]: - """Get the token by name""" - tokens = self.get_tokens() - for token in tokens: - if token.device_name == token_name: - return token - - raise TokenNotFound("Token not found!") - def get_tokens(self) -> list[Token]: """Get the tokens""" tokens_list = [] From 27a7c24bc35f415977c611c1e00a2909d4c5cbc5 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 12 Dec 2022 10:42:16 +0000 Subject: [PATCH 064/129] refactor(tokens-repo): separate token storing --- .../repositories/tokens/json_tokens_repository.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/repositories/tokens/json_tokens_repository.py b/selfprivacy_api/repositories/tokens/json_tokens_repository.py index 86e756a..e011b62 100644 --- a/selfprivacy_api/repositories/tokens/json_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/json_tokens_repository.py @@ -43,6 +43,11 @@ class JsonTokensRepository(AbstractTokensRepository): """Create new token""" new_token = Token.generate(device_name) + self.__store_token(new_token) + + return new_token + + def __store_token(self, new_token: Token): with WriteUserData(UserDataFiles.TOKENS) as tokens_file: tokens_file["tokens"].append( { @@ -51,7 +56,6 @@ class JsonTokensRepository(AbstractTokensRepository): "date": new_token.created_at.strftime(DATETIME_FORMAT), } ) - return new_token def delete_token(self, input_token: Token) -> None: """Delete the token""" From 572ec75c39db21c05282bde443276092b14a5f0a Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 12 Dec 2022 11:15:25 +0000 Subject: [PATCH 065/129] refactor(tokens-repo): rewrite token refresh now it is not json-dependent. --- .../repositories/tokens/json_tokens_repository.py | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/selfprivacy_api/repositories/tokens/json_tokens_repository.py b/selfprivacy_api/repositories/tokens/json_tokens_repository.py index e011b62..2f4a0b1 100644 --- a/selfprivacy_api/repositories/tokens/json_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/json_tokens_repository.py @@ -71,16 +71,10 @@ class JsonTokensRepository(AbstractTokensRepository): """Change the token field of the existing token""" new_token = Token.generate(device_name=input_token.device_name) - with WriteUserData(UserDataFiles.TOKENS) as tokens_file: - for userdata_token in tokens_file["tokens"]: - - if userdata_token["name"] == input_token.device_name: - userdata_token["token"] = new_token.token - userdata_token["date"] = ( - new_token.created_at.strftime(DATETIME_FORMAT), - ) - - return new_token + if input_token in self.get_tokens(): + self.delete_token(input_token) + self.__store_token(new_token) + return new_token raise TokenNotFound("Token not found!") From 682cd4ae87ad73cb3a23655bec6cbd29ed66d606 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 12 Dec 2022 11:50:04 +0000 Subject: [PATCH 066/129] refactor(tokens-repo): move create_token to abstract class --- .../tokens/abstract_tokens_repository.py | 10 +++++++++- .../repositories/tokens/json_tokens_repository.py | 13 +++---------- 2 files changed, 12 insertions(+), 11 deletions(-) diff --git a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py index 2840917..bb77c1d 100644 --- a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py @@ -31,9 +31,13 @@ class AbstractTokensRepository(ABC): def get_tokens(self) -> list[Token]: """Get the tokens""" - @abstractmethod def create_token(self, device_name: str) -> Token: """Create new token""" + new_token = Token.generate(device_name) + + self._store_token(new_token) + + return new_token @abstractmethod def delete_token(self, input_token: Token) -> None: @@ -102,3 +106,7 @@ class AbstractTokensRepository(ABC): self, mnemonic_phrase: str, device_name: str ) -> Token: """Use the mnemonic new device key""" + + @abstractmethod + def _store_token(self, new_token: Token): + """Store a token directly""" diff --git a/selfprivacy_api/repositories/tokens/json_tokens_repository.py b/selfprivacy_api/repositories/tokens/json_tokens_repository.py index 2f4a0b1..963e3a9 100644 --- a/selfprivacy_api/repositories/tokens/json_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/json_tokens_repository.py @@ -39,15 +39,8 @@ class JsonTokensRepository(AbstractTokensRepository): return tokens_list - def create_token(self, device_name: str) -> Token: - """Create new token""" - new_token = Token.generate(device_name) - - self.__store_token(new_token) - - return new_token - - def __store_token(self, new_token: Token): + def _store_token(self, new_token: Token): + """Store a token directly""" with WriteUserData(UserDataFiles.TOKENS) as tokens_file: tokens_file["tokens"].append( { @@ -73,7 +66,7 @@ class JsonTokensRepository(AbstractTokensRepository): if input_token in self.get_tokens(): self.delete_token(input_token) - self.__store_token(new_token) + self._store_token(new_token) return new_token raise TokenNotFound("Token not found!") From 9a49067e53b1c7c9634ebcb0fec622fede60c725 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 12 Dec 2022 11:55:49 +0000 Subject: [PATCH 067/129] refactor(tokens-repo): move token refreshing to parent class --- .../repositories/tokens/abstract_tokens_repository.py | 11 +++++++++-- .../repositories/tokens/json_tokens_repository.py | 11 ----------- 2 files changed, 9 insertions(+), 13 deletions(-) diff --git a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py index bb77c1d..29c96a4 100644 --- a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py @@ -43,9 +43,16 @@ class AbstractTokensRepository(ABC): def delete_token(self, input_token: Token) -> None: """Delete the token""" - @abstractmethod def refresh_token(self, input_token: Token) -> Token: - """Refresh the token""" + """Change the token field of the existing token""" + new_token = Token.generate(device_name=input_token.device_name) + + if input_token in self.get_tokens(): + self.delete_token(input_token) + self._store_token(new_token) + return new_token + + raise TokenNotFound("Token not found!") def is_token_valid(self, token_string: str) -> bool: """Check if the token is valid""" diff --git a/selfprivacy_api/repositories/tokens/json_tokens_repository.py b/selfprivacy_api/repositories/tokens/json_tokens_repository.py index 963e3a9..c7d716f 100644 --- a/selfprivacy_api/repositories/tokens/json_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/json_tokens_repository.py @@ -60,17 +60,6 @@ class JsonTokensRepository(AbstractTokensRepository): raise TokenNotFound("Token not found!") - def refresh_token(self, input_token: Token) -> Token: - """Change the token field of the existing token""" - new_token = Token.generate(device_name=input_token.device_name) - - if input_token in self.get_tokens(): - self.delete_token(input_token) - self._store_token(new_token) - return new_token - - raise TokenNotFound("Token not found!") - def get_recovery_key(self) -> Optional[RecoveryKey]: """Get the recovery key""" with ReadUserData(UserDataFiles.TOKENS) as tokens_file: From 671203e99043fe3eeb478ff07c67bb75814a3534 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 12 Dec 2022 14:06:24 +0000 Subject: [PATCH 068/129] refactor(tokens-repository): dissect use_mnemonic_recovery_key() --- .../tokens/json_tokens_repository.py | 42 +++++++------------ 1 file changed, 16 insertions(+), 26 deletions(-) diff --git a/selfprivacy_api/repositories/tokens/json_tokens_repository.py b/selfprivacy_api/repositories/tokens/json_tokens_repository.py index c7d716f..1ebdca8 100644 --- a/selfprivacy_api/repositories/tokens/json_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/json_tokens_repository.py @@ -102,41 +102,31 @@ class JsonTokensRepository(AbstractTokensRepository): self, mnemonic_phrase: str, device_name: str ) -> Token: """Use the mnemonic recovery key and create a new token with the given name""" - recovery_key = self.get_recovery_key() - - if recovery_key is None: + if not self.is_recovery_key_valid(): raise RecoveryKeyNotFound("Recovery key not found") - if not recovery_key.is_valid(): + recovery_hex_key = self.get_recovery_key().key + if not self._assert_mnemonic(recovery_hex_key, mnemonic_phrase): raise RecoveryKeyNotFound("Recovery key not found") - recovery_token = bytes.fromhex(recovery_key.key) + new_token = self.create_token(device_name=device_name) + self._decrement_recovery_token() + + return new_token + + def _decrement_recovery_token(self): + if self.is_recovery_key_valid(): + with WriteUserData(UserDataFiles.TOKENS) as tokens: + tokens["recovery_token"]["uses_left"] -= 1 + + def _assert_mnemonic(self, hex_key: str, mnemonic_phrase: str): + recovery_token = bytes.fromhex(hex_key) if not Mnemonic(language="english").check(mnemonic_phrase): raise InvalidMnemonic("Phrase is not mnemonic!") phrase_bytes = Mnemonic(language="english").to_entropy(mnemonic_phrase) - if phrase_bytes != recovery_token: - raise RecoveryKeyNotFound("Recovery key not found") - - new_token = Token.generate(device_name=device_name) - - with WriteUserData(UserDataFiles.TOKENS) as tokens: - tokens["tokens"].append( - { - "token": new_token.token, - "name": new_token.device_name, - "date": new_token.created_at.strftime(DATETIME_FORMAT), - } - ) - - if "recovery_token" in tokens: - if ( - "uses_left" in tokens["recovery_token"] - and tokens["recovery_token"]["uses_left"] is not None - ): - tokens["recovery_token"]["uses_left"] -= 1 - return new_token + return phrase_bytes == recovery_token def get_new_device_key(self) -> NewDeviceKey: """Creates and returns the new device key""" From 772c0dfc64f149f94a1a90352b3286af75333a36 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 12 Dec 2022 14:22:36 +0000 Subject: [PATCH 069/129] refactor(tokens-repository): move use_mnemonic_recovery_key() to abstract class --- .../tokens/abstract_tokens_repository.py | 35 +++++++++++++++++-- .../tokens/json_tokens_repository.py | 27 +------------- 2 files changed, 34 insertions(+), 28 deletions(-) diff --git a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py index 29c96a4..82a0189 100644 --- a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py @@ -1,9 +1,14 @@ from abc import ABC, abstractmethod from datetime import datetime from typing import Optional +from mnemonic import Mnemonic from selfprivacy_api.models.tokens.token import Token -from selfprivacy_api.repositories.tokens.exceptions import TokenNotFound +from selfprivacy_api.repositories.tokens.exceptions import ( + TokenNotFound, + InvalidMnemonic, + RecoveryKeyNotFound, +) from selfprivacy_api.models.tokens.recovery_key import RecoveryKey from selfprivacy_api.models.tokens.new_device_key import NewDeviceKey @@ -87,11 +92,22 @@ class AbstractTokensRepository(ABC): ) -> RecoveryKey: """Create the recovery key""" - @abstractmethod def use_mnemonic_recovery_key( self, mnemonic_phrase: str, device_name: str ) -> Token: """Use the mnemonic recovery key and create a new token with the given name""" + if not self.is_recovery_key_valid(): + raise RecoveryKeyNotFound("Recovery key not found") + + recovery_hex_key = self.get_recovery_key().key + if not self._assert_mnemonic(recovery_hex_key, mnemonic_phrase): + raise RecoveryKeyNotFound("Recovery key not found") + + new_token = self.create_token(device_name=device_name) + + self._decrement_recovery_token() + + return new_token def is_recovery_key_valid(self) -> bool: """Check if the recovery key is valid""" @@ -117,3 +133,18 @@ class AbstractTokensRepository(ABC): @abstractmethod def _store_token(self, new_token: Token): """Store a token directly""" + + @abstractmethod + def _decrement_recovery_token(self): + """Decrement recovery key use count by one""" + + # TODO: find a proper place for it + def _assert_mnemonic(self, hex_key: str, mnemonic_phrase: str): + """Return true if hex string matches the phrase, false otherwise + Raise an InvalidMnemonic error if not mnemonic""" + recovery_token = bytes.fromhex(hex_key) + if not Mnemonic(language="english").check(mnemonic_phrase): + raise InvalidMnemonic("Phrase is not mnemonic!") + + phrase_bytes = Mnemonic(language="english").to_entropy(mnemonic_phrase) + return phrase_bytes == recovery_token diff --git a/selfprivacy_api/repositories/tokens/json_tokens_repository.py b/selfprivacy_api/repositories/tokens/json_tokens_repository.py index 1ebdca8..50d8869 100644 --- a/selfprivacy_api/repositories/tokens/json_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/json_tokens_repository.py @@ -11,7 +11,6 @@ from selfprivacy_api.models.tokens.recovery_key import RecoveryKey from selfprivacy_api.models.tokens.new_device_key import NewDeviceKey from selfprivacy_api.repositories.tokens.exceptions import ( TokenNotFound, - RecoveryKeyNotFound, InvalidMnemonic, NewDeviceKeyNotFound, ) @@ -98,36 +97,12 @@ class JsonTokensRepository(AbstractTokensRepository): return recovery_key - def use_mnemonic_recovery_key( - self, mnemonic_phrase: str, device_name: str - ) -> Token: - """Use the mnemonic recovery key and create a new token with the given name""" - if not self.is_recovery_key_valid(): - raise RecoveryKeyNotFound("Recovery key not found") - - recovery_hex_key = self.get_recovery_key().key - if not self._assert_mnemonic(recovery_hex_key, mnemonic_phrase): - raise RecoveryKeyNotFound("Recovery key not found") - - new_token = self.create_token(device_name=device_name) - - self._decrement_recovery_token() - - return new_token - def _decrement_recovery_token(self): + """Decrement recovery key use count by one""" if self.is_recovery_key_valid(): with WriteUserData(UserDataFiles.TOKENS) as tokens: tokens["recovery_token"]["uses_left"] -= 1 - def _assert_mnemonic(self, hex_key: str, mnemonic_phrase: str): - recovery_token = bytes.fromhex(hex_key) - if not Mnemonic(language="english").check(mnemonic_phrase): - raise InvalidMnemonic("Phrase is not mnemonic!") - - phrase_bytes = Mnemonic(language="english").to_entropy(mnemonic_phrase) - return phrase_bytes == recovery_token - def get_new_device_key(self) -> NewDeviceKey: """Creates and returns the new device key""" new_device_key = NewDeviceKey.generate() From 4498003aca13019f59fd058f46b9439c61f9cb88 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 12 Dec 2022 15:19:00 +0000 Subject: [PATCH 070/129] refactor(tokens-repository): dissect use_mnemonic_new_device_key() --- .../tokens/json_tokens_repository.py | 28 +++++++++---------- 1 file changed, 13 insertions(+), 15 deletions(-) diff --git a/selfprivacy_api/repositories/tokens/json_tokens_repository.py b/selfprivacy_api/repositories/tokens/json_tokens_repository.py index 50d8869..55316d6 100644 --- a/selfprivacy_api/repositories/tokens/json_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/json_tokens_repository.py @@ -123,33 +123,31 @@ class JsonTokensRepository(AbstractTokensRepository): del tokens_file["new_device"] return - def use_mnemonic_new_device_key( - self, mnemonic_phrase: str, device_name: str - ) -> Token: - """Use the mnemonic new device key""" - + def _get_stored_new_device_key(self) -> Optional[NewDeviceKey]: + """Retrieves new device key that is already stored.""" with ReadUserData(UserDataFiles.TOKENS) as tokens_file: if "new_device" not in tokens_file or tokens_file["new_device"] is None: - raise NewDeviceKeyNotFound("New device key not found") + return new_device_key = NewDeviceKey( key=tokens_file["new_device"]["token"], created_at=tokens_file["new_device"]["date"], expires_at=tokens_file["new_device"]["expiration"], ) + return new_device_key - token = bytes.fromhex(new_device_key.key) + def use_mnemonic_new_device_key( + self, mnemonic_phrase: str, device_name: str + ) -> Token: + """Use the mnemonic new device key""" + new_device_key = self._get_stored_new_device_key() + if not new_device_key: + raise NewDeviceKeyNotFound - if not Mnemonic(language="english").check(mnemonic_phrase): - raise InvalidMnemonic("Phrase is not mnemonic!") - - phrase_bytes = Mnemonic(language="english").to_entropy(mnemonic_phrase) - if bytes(phrase_bytes) != bytes(token): + if not self._assert_mnemonic(new_device_key.key, mnemonic_phrase): raise NewDeviceKeyNotFound("Phrase is not token!") new_token = Token.generate(device_name=device_name) - with WriteUserData(UserDataFiles.TOKENS) as tokens: - if "new_device" in tokens: - del tokens["new_device"] + self.delete_new_device_key() return new_token From 2797c6f88f4309b72695e86409028bf335129bb9 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 12 Dec 2022 15:36:38 +0000 Subject: [PATCH 071/129] fix(tokens-repository): use_mnemonic_new_device_key() now stores a token --- .../repositories/tokens/json_tokens_repository.py | 2 +- .../test_repository/test_tokens_repository.py | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/selfprivacy_api/repositories/tokens/json_tokens_repository.py b/selfprivacy_api/repositories/tokens/json_tokens_repository.py index 55316d6..12826ba 100644 --- a/selfprivacy_api/repositories/tokens/json_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/json_tokens_repository.py @@ -147,7 +147,7 @@ class JsonTokensRepository(AbstractTokensRepository): if not self._assert_mnemonic(new_device_key.key, mnemonic_phrase): raise NewDeviceKeyNotFound("Phrase is not token!") - new_token = Token.generate(device_name=device_name) + new_token = self.create_token(device_name=device_name) self.delete_new_device_key() return new_token diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index df56444..bdad6d8 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -499,14 +499,14 @@ def test_use_mnemonic_new_device_key( repo = empty_repo assert repo.get_new_device_key() is not None - assert ( - repo.use_mnemonic_new_device_key( - device_name="imnew", - mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb", - ) - is not None + new_token = repo.use_mnemonic_new_device_key( + device_name="imnew", + mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb", ) + assert new_token.device_name == "imnew" + assert new_token in repo.get_tokens() + # we must delete the key after use with pytest.raises(NewDeviceKeyNotFound): assert ( From ca822cdf6fe1b4e84c2c33d940044b2cb6db3318 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 12 Dec 2022 15:43:58 +0000 Subject: [PATCH 072/129] refactor(tokens-repository): move use_mnemonic_new_device_key() to abstract class --- .../tokens/abstract_tokens_repository.py | 17 ++++++++++++++++- .../tokens/json_tokens_repository.py | 19 ------------------- 2 files changed, 16 insertions(+), 20 deletions(-) diff --git a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py index 82a0189..a67d62d 100644 --- a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py @@ -8,6 +8,7 @@ from selfprivacy_api.repositories.tokens.exceptions import ( TokenNotFound, InvalidMnemonic, RecoveryKeyNotFound, + NewDeviceKeyNotFound, ) from selfprivacy_api.models.tokens.recovery_key import RecoveryKey from selfprivacy_api.models.tokens.new_device_key import NewDeviceKey @@ -124,11 +125,21 @@ class AbstractTokensRepository(ABC): def delete_new_device_key(self) -> None: """Delete the new device key""" - @abstractmethod def use_mnemonic_new_device_key( self, mnemonic_phrase: str, device_name: str ) -> Token: """Use the mnemonic new device key""" + new_device_key = self._get_stored_new_device_key() + if not new_device_key: + raise NewDeviceKeyNotFound + + if not self._assert_mnemonic(new_device_key.key, mnemonic_phrase): + raise NewDeviceKeyNotFound("Phrase is not token!") + + new_token = self.create_token(device_name=device_name) + self.delete_new_device_key() + + return new_token @abstractmethod def _store_token(self, new_token: Token): @@ -138,6 +149,10 @@ class AbstractTokensRepository(ABC): def _decrement_recovery_token(self): """Decrement recovery key use count by one""" + @abstractmethod + def _get_stored_new_device_key(self) -> Optional[NewDeviceKey]: + """Retrieves new device key that is already stored.""" + # TODO: find a proper place for it def _assert_mnemonic(self, hex_key: str, mnemonic_phrase: str): """Return true if hex string matches the phrase, false otherwise diff --git a/selfprivacy_api/repositories/tokens/json_tokens_repository.py b/selfprivacy_api/repositories/tokens/json_tokens_repository.py index 12826ba..b4c0ab2 100644 --- a/selfprivacy_api/repositories/tokens/json_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/json_tokens_repository.py @@ -3,7 +3,6 @@ temporary legacy """ from typing import Optional from datetime import datetime -from mnemonic import Mnemonic from selfprivacy_api.utils import UserDataFiles, WriteUserData, ReadUserData from selfprivacy_api.models.tokens.token import Token @@ -11,8 +10,6 @@ from selfprivacy_api.models.tokens.recovery_key import RecoveryKey from selfprivacy_api.models.tokens.new_device_key import NewDeviceKey from selfprivacy_api.repositories.tokens.exceptions import ( TokenNotFound, - InvalidMnemonic, - NewDeviceKeyNotFound, ) from selfprivacy_api.repositories.tokens.abstract_tokens_repository import ( AbstractTokensRepository, @@ -135,19 +132,3 @@ class JsonTokensRepository(AbstractTokensRepository): expires_at=tokens_file["new_device"]["expiration"], ) return new_device_key - - def use_mnemonic_new_device_key( - self, mnemonic_phrase: str, device_name: str - ) -> Token: - """Use the mnemonic new device key""" - new_device_key = self._get_stored_new_device_key() - if not new_device_key: - raise NewDeviceKeyNotFound - - if not self._assert_mnemonic(new_device_key.key, mnemonic_phrase): - raise NewDeviceKeyNotFound("Phrase is not token!") - - new_token = self.create_token(device_name=device_name) - self.delete_new_device_key() - - return new_token From f2fa47466bf3d8aab6f857cd4291fe2034750838 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Dec 2022 14:29:19 +0000 Subject: [PATCH 073/129] feat(tokens-repo):empty implementation of redis token repo But it initializes and fails tests! --- .../tokens/redis_tokens_repository.py | 54 ++++++++++++++++++- .../test_repository/test_tokens_repository.py | 6 +-- 2 files changed, 56 insertions(+), 4 deletions(-) diff --git a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py index 0186c11..13d49c9 100644 --- a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py @@ -1,9 +1,18 @@ """ Token repository using Redis as backend. """ +from typing import Optional +from datetime import datetime + from selfprivacy_api.repositories.tokens.abstract_tokens_repository import ( AbstractTokensRepository, ) +from selfprivacy_api.utils.redis_pool import RedisPool +from selfprivacy_api.models.tokens.token import Token +from selfprivacy_api.models.tokens.recovery_key import RecoveryKey +from selfprivacy_api.models.tokens.new_device_key import NewDeviceKey + +TOKENS_PREFIX = "token_repo:tokens:" class RedisTokensRepository(AbstractTokensRepository): @@ -11,5 +20,48 @@ class RedisTokensRepository(AbstractTokensRepository): Token repository using Redis as a backend """ - def __init__(self) -> None: + def __init__(self): + self.connection = RedisPool().get_connection() + + def token_key_for_device(device_name: str): + return TOKENS_PREFIX + str(hash(device_name)) + + def get_tokens(self) -> list[Token]: + """Get the tokens""" + raise NotImplementedError + + def delete_token(self, input_token: Token) -> None: + """Delete the token""" + raise NotImplementedError + + def get_recovery_key(self) -> Optional[RecoveryKey]: + """Get the recovery key""" + raise NotImplementedError + + def create_recovery_key( + self, + expiration: Optional[datetime], + uses_left: Optional[int], + ) -> RecoveryKey: + """Create the recovery key""" + raise NotImplementedError + + def get_new_device_key(self) -> NewDeviceKey: + """Creates and returns the new device key""" + raise NotImplementedError + + def delete_new_device_key(self) -> None: + """Delete the new device key""" + raise NotImplementedError + + def _store_token(self, new_token: Token): + """Store a token directly""" + raise NotImplementedError + + def _decrement_recovery_token(self): + """Decrement recovery key use count by one""" + raise NotImplementedError + + def _get_stored_new_device_key(self) -> Optional[NewDeviceKey]: + """Retrieves new device key that is already stored.""" raise NotImplementedError diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index bdad6d8..dff1799 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -166,12 +166,12 @@ def empty_redis_repo(): @pytest.fixture(params=["json", "redis"]) -def empty_repo(request, empty_json_repo): +def empty_repo(request, empty_json_repo, empty_redis_repo): if request.param == "json": return empty_json_repo if request.param == "redis": - # return empty_redis_repo - return empty_json_repo + return empty_redis_repo + # return empty_json_repo else: raise NotImplementedError From 256c16fa9fea641d96e0685d038fca7bba30b73a Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Dec 2022 14:48:43 +0000 Subject: [PATCH 074/129] feat(tokens-repo): redis get tokens --- .../tokens/redis_tokens_repository.py | 21 ++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py index 13d49c9..184fa8f 100644 --- a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py @@ -28,7 +28,9 @@ class RedisTokensRepository(AbstractTokensRepository): def get_tokens(self) -> list[Token]: """Get the tokens""" - raise NotImplementedError + r = self.connection + token_keys = r.keys(TOKENS_PREFIX + "*") + return [self._token_from_hash(key) for key in token_keys] def delete_token(self, input_token: Token) -> None: """Delete the token""" @@ -65,3 +67,20 @@ class RedisTokensRepository(AbstractTokensRepository): def _get_stored_new_device_key(self) -> Optional[NewDeviceKey]: """Retrieves new device key that is already stored.""" raise NotImplementedError + + def _token_from_hash(self, redis_key: str) -> Token: + r = self.connection + if r.exists(redis_key): + token_dict = r.hgetall(redis_key) + for date in [ + "created_at", + ]: + if token_dict[date] != "None": + token_dict[date] = datetime.datetime.fromisoformat(token_dict[date]) + for key in token_dict.keys(): + if token_dict[key] == "None": + token_dict[key] = None + + return Token(**token_dict) + return None + From d8e3cd67e0517c717a4f0c8c264dc7b9fec3eb70 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Dec 2022 15:21:32 +0000 Subject: [PATCH 075/129] feat(tokens-repo): redis store token --- .../repositories/tokens/redis_tokens_repository.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py index 184fa8f..72a3cec 100644 --- a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py @@ -23,6 +23,7 @@ class RedisTokensRepository(AbstractTokensRepository): def __init__(self): self.connection = RedisPool().get_connection() + @staticmethod def token_key_for_device(device_name: str): return TOKENS_PREFIX + str(hash(device_name)) @@ -58,7 +59,8 @@ class RedisTokensRepository(AbstractTokensRepository): def _store_token(self, new_token: Token): """Store a token directly""" - raise NotImplementedError + key = RedisTokensRepository.token_key_for_device(new_token.device_name) + self._store_token_as_hash(key, new_token) def _decrement_recovery_token(self): """Decrement recovery key use count by one""" @@ -76,7 +78,7 @@ class RedisTokensRepository(AbstractTokensRepository): "created_at", ]: if token_dict[date] != "None": - token_dict[date] = datetime.datetime.fromisoformat(token_dict[date]) + token_dict[date] = datetime.fromisoformat(token_dict[date]) for key in token_dict.keys(): if token_dict[key] == "None": token_dict[key] = None @@ -84,3 +86,9 @@ class RedisTokensRepository(AbstractTokensRepository): return Token(**token_dict) return None + def _store_token_as_hash(self, redis_key, model): + r = self.connection + for key, value in model.dict().items(): + if isinstance(value, datetime): + value = value.isoformat() + r.hset(redis_key, key, str(value)) From ba6a5261fa39e428c76b73394d8a17f263ff8ffb Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Dec 2022 15:34:25 +0000 Subject: [PATCH 076/129] refactor(tokens-repo): redis token key func --- .../repositories/tokens/redis_tokens_repository.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py index 72a3cec..44c32f3 100644 --- a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py @@ -57,9 +57,13 @@ class RedisTokensRepository(AbstractTokensRepository): """Delete the new device key""" raise NotImplementedError + @staticmethod + def _token_redis_key(token: Token) -> str: + return RedisTokensRepository.token_key_for_device(token.device_name) + def _store_token(self, new_token: Token): """Store a token directly""" - key = RedisTokensRepository.token_key_for_device(new_token.device_name) + key = RedisTokensRepository._token_redis_key(new_token) self._store_token_as_hash(key, new_token) def _decrement_recovery_token(self): From 647e02f25b3fe84e1b3f4f48f91961ab0349bde4 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Dec 2022 16:16:55 +0000 Subject: [PATCH 077/129] feat(tokens-repo): redis delete token --- .../repositories/tokens/redis_tokens_repository.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py index 44c32f3..86d5e51 100644 --- a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py @@ -35,7 +35,9 @@ class RedisTokensRepository(AbstractTokensRepository): def delete_token(self, input_token: Token) -> None: """Delete the token""" - raise NotImplementedError + r = self.connection + key = RedisTokensRepository._token_redis_key(input_token) + r.delete(key) def get_recovery_key(self) -> Optional[RecoveryKey]: """Get the recovery key""" From e504585437868ba788cd87751494f090d24a79b5 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Dec 2022 16:29:38 +0000 Subject: [PATCH 078/129] test(tokens-repo): do not require order --- .../test_repository/test_tokens_repository.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index dff1799..0a0382b 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -181,8 +181,8 @@ def some_tokens_repo(empty_repo): for name in ORIGINAL_DEVICE_NAMES: empty_repo.create_token(name) assert len(empty_repo.get_tokens()) == len(ORIGINAL_DEVICE_NAMES) - for i, t in enumerate(empty_repo.get_tokens()): - assert t.device_name == ORIGINAL_DEVICE_NAMES[i] + for name in ORIGINAL_DEVICE_NAMES: + assert empty_repo.get_token_by_name(name) is not None assert empty_repo.get_new_device_key() is not None return empty_repo @@ -209,8 +209,10 @@ def test_get_token_by_non_existent_token_string(some_tokens_repo): def test_get_token_by_name(some_tokens_repo): repo = some_tokens_repo - assert repo.get_token_by_name(token_name="primary_token") is not None - assert repo.get_token_by_name(token_name="primary_token") == repo.get_tokens()[0] + token = repo.get_token_by_name(token_name="primary_token") + assert token is not None + assert token.device_name == "primary_token" + assert token in repo.get_tokens() def test_get_token_by_non_existent_name(some_tokens_repo): From 3cb7f295934f20e5bc7af1f0d40a4e185892159e Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Dec 2022 17:03:22 +0000 Subject: [PATCH 079/129] refactor(tokens-repo): detach preparing a dict before a model cast --- .../tokens/redis_tokens_repository.py | 20 +++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py index 86d5e51..accaacc 100644 --- a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py @@ -76,18 +76,22 @@ class RedisTokensRepository(AbstractTokensRepository): """Retrieves new device key that is already stored.""" raise NotImplementedError + @staticmethod + def _prepare_model_dict(d: dict): + for date in [ + "created_at", + ]: + if d[date] != "None": + d[date] = datetime.fromisoformat(d[date]) + for key in d.keys(): + if d[key] == "None": + d[key] = None + def _token_from_hash(self, redis_key: str) -> Token: r = self.connection if r.exists(redis_key): token_dict = r.hgetall(redis_key) - for date in [ - "created_at", - ]: - if token_dict[date] != "None": - token_dict[date] = datetime.fromisoformat(token_dict[date]) - for key in token_dict.keys(): - if token_dict[key] == "None": - token_dict[key] = None + RedisTokensRepository._prepare_model_dict(token_dict) return Token(**token_dict) return None From b98ccb88d162276f5bdc11978c954a47887c5d66 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Dec 2022 17:10:32 +0000 Subject: [PATCH 080/129] refactor(tokens-repo): separate getting model dict --- .../repositories/tokens/redis_tokens_repository.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py index accaacc..af45384 100644 --- a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py @@ -87,12 +87,17 @@ class RedisTokensRepository(AbstractTokensRepository): if d[key] == "None": d[key] = None - def _token_from_hash(self, redis_key: str) -> Token: + def _model_dict_from_hash(self, redis_key: str) -> Optional[dict]: r = self.connection if r.exists(redis_key): token_dict = r.hgetall(redis_key) RedisTokensRepository._prepare_model_dict(token_dict) + return token_dict + return None + def _token_from_hash(self, redis_key: str) -> Optional[Token]: + token_dict = self._model_dict_from_hash(redis_key) + if token_dict is not None: return Token(**token_dict) return None From 9ffd67fa1947f0ad2835b78adbf94240d84ec507 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Dec 2022 17:20:09 +0000 Subject: [PATCH 081/129] feat(tokens-repo): get new device key --- .../repositories/tokens/redis_tokens_repository.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py index af45384..f99d215 100644 --- a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py @@ -13,6 +13,7 @@ from selfprivacy_api.models.tokens.recovery_key import RecoveryKey from selfprivacy_api.models.tokens.new_device_key import NewDeviceKey TOKENS_PREFIX = "token_repo:tokens:" +NEW_DEVICE_KEY_REDIS_KEY = "token_repo:new_device_key" class RedisTokensRepository(AbstractTokensRepository): @@ -53,7 +54,9 @@ class RedisTokensRepository(AbstractTokensRepository): def get_new_device_key(self) -> NewDeviceKey: """Creates and returns the new device key""" - raise NotImplementedError + new_device_key = NewDeviceKey.generate() + self._store_model_as_hash(NEW_DEVICE_KEY_REDIS_KEY, new_device_key) + return NewDeviceKey def delete_new_device_key(self) -> None: """Delete the new device key""" @@ -66,7 +69,7 @@ class RedisTokensRepository(AbstractTokensRepository): def _store_token(self, new_token: Token): """Store a token directly""" key = RedisTokensRepository._token_redis_key(new_token) - self._store_token_as_hash(key, new_token) + self._store_model_as_hash(key, new_token) def _decrement_recovery_token(self): """Decrement recovery key use count by one""" @@ -101,7 +104,7 @@ class RedisTokensRepository(AbstractTokensRepository): return Token(**token_dict) return None - def _store_token_as_hash(self, redis_key, model): + def _store_model_as_hash(self, redis_key, model): r = self.connection for key, value in model.dict().items(): if isinstance(value, datetime): From 95e200bfc5b7dc99ee22ca92154a294ef2dfe661 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Dec 2022 17:31:32 +0000 Subject: [PATCH 082/129] feat(tokens-repo): reset function --- .../repositories/tokens/redis_tokens_repository.py | 5 +++++ tests/test_graphql/test_repository/test_tokens_repository.py | 3 +-- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py index f99d215..be3615f 100644 --- a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py @@ -11,6 +11,7 @@ from selfprivacy_api.utils.redis_pool import RedisPool from selfprivacy_api.models.tokens.token import Token from selfprivacy_api.models.tokens.recovery_key import RecoveryKey from selfprivacy_api.models.tokens.new_device_key import NewDeviceKey +from selfprivacy_api.repositories.tokens.exceptions import TokenNotFound TOKENS_PREFIX = "token_repo:tokens:" NEW_DEVICE_KEY_REDIS_KEY = "token_repo:new_device_key" @@ -40,6 +41,10 @@ class RedisTokensRepository(AbstractTokensRepository): key = RedisTokensRepository._token_redis_key(input_token) r.delete(key) + def reset(self): + for token in self.get_tokens(): + self.delete_token(token) + def get_recovery_key(self) -> Optional[RecoveryKey]: """Get the recovery key""" raise NotImplementedError diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index 0a0382b..05ad77b 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -159,8 +159,7 @@ def empty_json_repo(empty_keys): @pytest.fixture def empty_redis_repo(): repo = RedisTokensRepository() - for token in repo.get_tokens(): - repo.delete_token(token) + repo.reset() assert repo.get_tokens() == [] return repo From bf6c230ae08c01aa97316ac31bce182143948edf Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Dec 2022 17:41:47 +0000 Subject: [PATCH 083/129] fix(tokens-repo): raise token not found when deleting nonexistent token even if device name exists --- selfprivacy_api/repositories/tokens/redis_tokens_repository.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py index be3615f..8432709 100644 --- a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py @@ -39,6 +39,8 @@ class RedisTokensRepository(AbstractTokensRepository): """Delete the token""" r = self.connection key = RedisTokensRepository._token_redis_key(input_token) + if input_token not in self.get_tokens(): + raise TokenNotFound r.delete(key) def reset(self): From 257096084f872f160e327bf1d2513b6ce3762578 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Dec 2022 17:51:51 +0000 Subject: [PATCH 084/129] refactor(tokens-repo): split out date field detection --- .../repositories/tokens/redis_tokens_repository.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py index 8432709..3a23911 100644 --- a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py @@ -87,10 +87,15 @@ class RedisTokensRepository(AbstractTokensRepository): raise NotImplementedError @staticmethod - def _prepare_model_dict(d: dict): - for date in [ + def _is_date_key(key: str): + return key in [ "created_at", - ]: + ] + + @staticmethod + def _prepare_model_dict(d: dict): + date_keys = [key for key in d.keys() if RedisTokensRepository._is_date_key(key)] + for date in date_keys: if d[date] != "None": d[date] = datetime.fromisoformat(d[date]) for key in d.keys(): From 4579fec569ff6da3c2f1b789b47c76be8d46e290 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Dec 2022 18:01:34 +0000 Subject: [PATCH 085/129] feat(tokens-repo): get recovery key --- .../repositories/tokens/redis_tokens_repository.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py index 3a23911..ae91b32 100644 --- a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py @@ -15,6 +15,7 @@ from selfprivacy_api.repositories.tokens.exceptions import TokenNotFound TOKENS_PREFIX = "token_repo:tokens:" NEW_DEVICE_KEY_REDIS_KEY = "token_repo:new_device_key" +RECOVERY_KEY_REDIS_KEY = "token_repo:recovery_key" class RedisTokensRepository(AbstractTokensRepository): @@ -49,7 +50,10 @@ class RedisTokensRepository(AbstractTokensRepository): def get_recovery_key(self) -> Optional[RecoveryKey]: """Get the recovery key""" - raise NotImplementedError + r = self.connection + if r.exists(RECOVERY_KEY_REDIS_KEY): + return self._recovery_key_from_hash(RECOVERY_KEY_REDIS_KEY) + return None def create_recovery_key( self, @@ -90,6 +94,7 @@ class RedisTokensRepository(AbstractTokensRepository): def _is_date_key(key: str): return key in [ "created_at", + "expires_at", ] @staticmethod @@ -116,6 +121,12 @@ class RedisTokensRepository(AbstractTokensRepository): return Token(**token_dict) return None + def _recovery_key_from_hash(self, redis_key: str) -> Optional[RecoveryKey]: + token_dict = self._model_dict_from_hash(redis_key) + if token_dict is not None: + return RecoveryKey(**token_dict) + return None + def _store_model_as_hash(self, redis_key, model): r = self.connection for key, value in model.dict().items(): From 8dfb3eb9369d266f2e35e10f7597a8c182de557b Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Dec 2022 18:06:11 +0000 Subject: [PATCH 086/129] feat(tokens-repo): fuller reset --- .../repositories/tokens/redis_tokens_repository.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py index ae91b32..ad9d26b 100644 --- a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py @@ -47,6 +47,9 @@ class RedisTokensRepository(AbstractTokensRepository): def reset(self): for token in self.get_tokens(): self.delete_token(token) + self.delete_new_device_key() + r = self.connection + r.delete(RECOVERY_KEY_REDIS_KEY) def get_recovery_key(self) -> Optional[RecoveryKey]: """Get the recovery key""" @@ -71,7 +74,8 @@ class RedisTokensRepository(AbstractTokensRepository): def delete_new_device_key(self) -> None: """Delete the new device key""" - raise NotImplementedError + r = self.connection + r.delete(NEW_DEVICE_KEY_REDIS_KEY) @staticmethod def _token_redis_key(token: Token) -> str: From eba1d01b3d648d23ecced682b8b0d613ad77f911 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Dec 2022 18:10:48 +0000 Subject: [PATCH 087/129] feat(tokens-repo): recovery key creation --- .../repositories/tokens/redis_tokens_repository.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py index ad9d26b..4c3c46f 100644 --- a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py @@ -64,7 +64,9 @@ class RedisTokensRepository(AbstractTokensRepository): uses_left: Optional[int], ) -> RecoveryKey: """Create the recovery key""" - raise NotImplementedError + recovery_key = RecoveryKey.generate(expiration=expiration, uses_left=uses_left) + self._store_model_as_hash(RECOVERY_KEY_REDIS_KEY, recovery_key) + return recovery_key def get_new_device_key(self) -> NewDeviceKey: """Creates and returns the new device key""" From 13e84e2697ca38b5846e77328086d059248d85c5 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Dec 2022 18:22:19 +0000 Subject: [PATCH 088/129] feat(tokens-repo): recovery key uses decrement --- .../repositories/tokens/redis_tokens_repository.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py index 4c3c46f..0804898 100644 --- a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py @@ -90,7 +90,10 @@ class RedisTokensRepository(AbstractTokensRepository): def _decrement_recovery_token(self): """Decrement recovery key use count by one""" - raise NotImplementedError + if self.is_recovery_key_valid(): + uses_left = self.get_recovery_key().uses_left + r = self.connection + r.hset(RECOVERY_KEY_REDIS_KEY, "uses_left", uses_left - 1) def _get_stored_new_device_key(self) -> Optional[NewDeviceKey]: """Retrieves new device key that is already stored.""" From fda5d315a961afb0b9b20fad26cd83cad6b3033e Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Dec 2022 18:27:58 +0000 Subject: [PATCH 089/129] fix(tokens-repo): return device key instead of NewDeviceKey class --- selfprivacy_api/repositories/tokens/redis_tokens_repository.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py index 0804898..930b043 100644 --- a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py @@ -72,7 +72,7 @@ class RedisTokensRepository(AbstractTokensRepository): """Creates and returns the new device key""" new_device_key = NewDeviceKey.generate() self._store_model_as_hash(NEW_DEVICE_KEY_REDIS_KEY, new_device_key) - return NewDeviceKey + return new_device_key def delete_new_device_key(self) -> None: """Delete the new device key""" From 6f6a9f5ef070befa4b9e0ca9fd80713c6de59567 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Dec 2022 18:34:12 +0000 Subject: [PATCH 090/129] test(tokens-repo): do not require order in test_delete_not_found_token --- .../test_graphql/test_repository/test_tokens_repository.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index 05ad77b..43f7626 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -265,7 +265,7 @@ def test_delete_token(some_tokens_repo): def test_delete_not_found_token(some_tokens_repo): repo = some_tokens_repo - tokens = repo.get_tokens() + initial_tokens = repo.get_tokens() input_token = Token( token="imbadtoken", device_name="primary_token", @@ -274,7 +274,10 @@ def test_delete_not_found_token(some_tokens_repo): with pytest.raises(TokenNotFound): assert repo.delete_token(input_token) is None - assert repo.get_tokens() == tokens + new_tokens = repo.get_tokens() + assert len(new_tokens) == len(initial_tokens) + for token in initial_tokens: + assert token in new_tokens def test_refresh_token(some_tokens_repo, mock_token_generate): From 0ae7c43ebf2b11c6b7edb8ab9086d80f3cd88cdc Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Dec 2022 18:45:12 +0000 Subject: [PATCH 091/129] refactor(tokens-repo): break out generic hash_as_model casting --- .../repositories/tokens/redis_tokens_repository.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py index 930b043..833679f 100644 --- a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py @@ -124,17 +124,17 @@ class RedisTokensRepository(AbstractTokensRepository): return token_dict return None - def _token_from_hash(self, redis_key: str) -> Optional[Token]: + def _hash_as_model(self, redis_key: str, model_class): token_dict = self._model_dict_from_hash(redis_key) if token_dict is not None: - return Token(**token_dict) + return model_class(**token_dict) return None + def _token_from_hash(self, redis_key: str) -> Optional[Token]: + return self._hash_as_model(redis_key, Token) + def _recovery_key_from_hash(self, redis_key: str) -> Optional[RecoveryKey]: - token_dict = self._model_dict_from_hash(redis_key) - if token_dict is not None: - return RecoveryKey(**token_dict) - return None + return self._hash_as_model(redis_key, RecoveryKey) def _store_model_as_hash(self, redis_key, model): r = self.connection From 5a25e2a2706ef2492e225ec1d0777c9944dd0152 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 14 Dec 2022 18:55:26 +0000 Subject: [PATCH 092/129] feat(tokens-repo): getting stored device key --- .../repositories/tokens/redis_tokens_repository.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py index 833679f..b1fb4b0 100644 --- a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py @@ -97,7 +97,7 @@ class RedisTokensRepository(AbstractTokensRepository): def _get_stored_new_device_key(self) -> Optional[NewDeviceKey]: """Retrieves new device key that is already stored.""" - raise NotImplementedError + return self._new_device_key_from_hash(NEW_DEVICE_KEY_REDIS_KEY) @staticmethod def _is_date_key(key: str): @@ -136,6 +136,9 @@ class RedisTokensRepository(AbstractTokensRepository): def _recovery_key_from_hash(self, redis_key: str) -> Optional[RecoveryKey]: return self._hash_as_model(redis_key, RecoveryKey) + def _new_device_key_from_hash(self, redis_key: str) -> Optional[NewDeviceKey]: + return self._hash_as_model(redis_key, NewDeviceKey) + def _store_model_as_hash(self, redis_key, model): r = self.connection for key, value in model.dict().items(): From ab70687c6196f021e6800e8ea424517c1b9a1130 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 19 Dec 2022 12:57:32 +0000 Subject: [PATCH 093/129] fix(tokens-repository) :make NONE-uses-left eternally valid and test for it --- .../tokens/json_tokens_repository.py | 3 +- .../tokens/redis_tokens_repository.py | 5 +- .../test_repository/test_tokens_repository.py | 74 +++++++------------ 3 files changed, 32 insertions(+), 50 deletions(-) diff --git a/selfprivacy_api/repositories/tokens/json_tokens_repository.py b/selfprivacy_api/repositories/tokens/json_tokens_repository.py index b4c0ab2..c599e0f 100644 --- a/selfprivacy_api/repositories/tokens/json_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/json_tokens_repository.py @@ -98,7 +98,8 @@ class JsonTokensRepository(AbstractTokensRepository): """Decrement recovery key use count by one""" if self.is_recovery_key_valid(): with WriteUserData(UserDataFiles.TOKENS) as tokens: - tokens["recovery_token"]["uses_left"] -= 1 + if tokens["recovery_token"]["uses_left"] is not None: + tokens["recovery_token"]["uses_left"] -= 1 def get_new_device_key(self) -> NewDeviceKey: """Creates and returns the new device key""" diff --git a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py index b1fb4b0..2fb6180 100644 --- a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py @@ -92,8 +92,9 @@ class RedisTokensRepository(AbstractTokensRepository): """Decrement recovery key use count by one""" if self.is_recovery_key_valid(): uses_left = self.get_recovery_key().uses_left - r = self.connection - r.hset(RECOVERY_KEY_REDIS_KEY, "uses_left", uses_left - 1) + if uses_left is not None: + r = self.connection + r.hset(RECOVERY_KEY_REDIS_KEY, "uses_left", uses_left - 1) def _get_stored_new_device_key(self) -> Optional[NewDeviceKey]: """Retrieves new device key that is already stored.""" diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index 43f7626..7a80b03 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -3,6 +3,7 @@ # pylint: disable=missing-function-docstring from datetime import datetime +from mnemonic import Mnemonic import pytest @@ -132,21 +133,6 @@ def mock_recovery_key_generate(mocker): return mock -@pytest.fixture -def mock_recovery_key_generate_for_mnemonic(mocker): - mock = mocker.patch( - "selfprivacy_api.models.tokens.recovery_key.RecoveryKey.generate", - autospec=True, - return_value=RecoveryKey( - key="ed653e4b8b042b841d285fa7a682fa09e925ddb2d8906f54", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), - expires_at=None, - uses_left=1, - ), - ) - return mock - - @pytest.fixture def empty_json_repo(empty_keys): repo = JsonTokensRepository() @@ -397,46 +383,40 @@ def test_use_not_found_mnemonic_recovery_key(some_tokens_repo): ) -def test_use_mnemonic_recovery_key_when_empty(empty_repo): - repo = empty_repo - - with pytest.raises(RecoveryKeyNotFound): - assert ( - repo.use_mnemonic_recovery_key( - mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb", - device_name="primary_token", - ) - is None - ) +@pytest.fixture(params=["recovery_uses_1", "recovery_eternal"]) +def recovery_key_uses_left(request): + if request.param == "recovery_uses_1": + return 1 + if request.param == "recovery_eternal": + return None -# agnostic test mixed with an implementation test -def test_use_mnemonic_recovery_key( - some_tokens_repo, mock_recovery_key_generate_for_mnemonic, mock_generate_token -): +def test_use_mnemonic_recovery_key(some_tokens_repo, recovery_key_uses_left): repo = some_tokens_repo - assert repo.create_recovery_key(uses_left=1, expiration=None) is not None - - test_token = Token( - token="ur71mC4aiI6FIYAN--cTL-38rPHS5D6NuB1bgN_qKF4", - device_name="newdevice", - created_at=datetime(2022, 11, 14, 6, 6, 32, 777123), - ) - assert ( - repo.use_mnemonic_recovery_key( - mnemonic_phrase="uniform clarify napkin bid dress search input armor police cross salon because myself uphold slice bamboo hungry park", - device_name="newdevice", - ) - == test_token + repo.create_recovery_key(uses_left=recovery_key_uses_left, expiration=None) + is not None + ) + assert repo.is_recovery_key_valid() + recovery_key = repo.get_recovery_key() + + token = repo.use_mnemonic_recovery_key( + mnemonic_phrase=Mnemonic(language="english").to_mnemonic( + bytes.fromhex(recovery_key.key) + ), + device_name="newdevice", ) - assert test_token in repo.get_tokens() + assert token.device_name == "newdevice" + assert token in repo.get_tokens() + new_uses = None + if recovery_key_uses_left is not None: + new_uses = recovery_key_uses_left - 1 assert repo.get_recovery_key() == RecoveryKey( - key="ed653e4b8b042b841d285fa7a682fa09e925ddb2d8906f54", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + key=recovery_key.key, + created_at=recovery_key.created_at, expires_at=None, - uses_left=0, + uses_left=new_uses, ) From a97705ef256f9c25a11e7e571664dbc9b8def7cf Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 19 Dec 2022 17:37:44 +0000 Subject: [PATCH 094/129] fix(tokens-repository): fix getting and setting recovery token expiration date --- .../tokens/json_tokens_repository.py | 7 +++++-- .../test_repository/test_tokens_repository.py | 21 ++++++++++++++++++- tests/test_models.py | 10 +++++++++ 3 files changed, 35 insertions(+), 3 deletions(-) create mode 100644 tests/test_models.py diff --git a/selfprivacy_api/repositories/tokens/json_tokens_repository.py b/selfprivacy_api/repositories/tokens/json_tokens_repository.py index c599e0f..e43a3eb 100644 --- a/selfprivacy_api/repositories/tokens/json_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/json_tokens_repository.py @@ -69,7 +69,7 @@ class JsonTokensRepository(AbstractTokensRepository): recovery_key = RecoveryKey( key=tokens_file["recovery_token"].get("token"), created_at=tokens_file["recovery_token"].get("date"), - expires_at=tokens_file["recovery_token"].get("expitation"), + expires_at=tokens_file["recovery_token"].get("expiration"), uses_left=tokens_file["recovery_token"].get("uses_left"), ) @@ -85,10 +85,13 @@ class JsonTokensRepository(AbstractTokensRepository): recovery_key = RecoveryKey.generate(expiration, uses_left) with WriteUserData(UserDataFiles.TOKENS) as tokens_file: + expiration = recovery_key.expires_at + if expiration is not None: + expiration = expiration.strftime(DATETIME_FORMAT) tokens_file["recovery_token"] = { "token": recovery_key.key, "date": recovery_key.created_at.strftime(DATETIME_FORMAT), - "expiration": recovery_key.expires_at, + "expiration": expiration, "uses_left": recovery_key.uses_left, } diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index 7a80b03..85cee00 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -2,7 +2,7 @@ # pylint: disable=unused-argument # pylint: disable=missing-function-docstring -from datetime import datetime +from datetime import datetime, timedelta from mnemonic import Mnemonic import pytest @@ -341,6 +341,25 @@ def test_use_mnemonic_not_valid_recovery_key( ) +def test_use_mnemonic_expired_recovery_key( + some_tokens_repo, +): + repo = some_tokens_repo + expiration = datetime.now() - timedelta(minutes=5) + assert repo.create_recovery_key(uses_left=2, expiration=expiration) is not None + recovery_key = repo.get_recovery_key() + assert recovery_key.expires_at == expiration + assert not repo.is_recovery_key_valid() + + with pytest.raises(RecoveryKeyNotFound): + token = repo.use_mnemonic_recovery_key( + mnemonic_phrase=Mnemonic(language="english").to_mnemonic( + bytes.fromhex(recovery_key.key) + ), + device_name="newdevice", + ) + + def test_use_mnemonic_not_mnemonic_recovery_key(some_tokens_repo): repo = some_tokens_repo assert repo.create_recovery_key(uses_left=1, expiration=None) is not None diff --git a/tests/test_models.py b/tests/test_models.py new file mode 100644 index 0000000..078dfd5 --- /dev/null +++ b/tests/test_models.py @@ -0,0 +1,10 @@ +import pytest +from datetime import datetime, timedelta + +from selfprivacy_api.models.tokens.recovery_key import RecoveryKey + + +def test_recovery_key_expired(): + expiration = datetime.now() - timedelta(minutes=5) + key = RecoveryKey.generate(expiration=expiration, uses_left=2) + assert not key.is_valid() From 009a89fa020f5eb471a53ca6fb183e9b1469cb88 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 21 Dec 2022 10:24:32 +0000 Subject: [PATCH 095/129] refactor(tokens-repo): use token repo for graphql use_recovery_api_key --- .../graphql/mutations/api_mutations.py | 28 +++++++++++++------ 1 file changed, 19 insertions(+), 9 deletions(-) diff --git a/selfprivacy_api/graphql/mutations/api_mutations.py b/selfprivacy_api/graphql/mutations/api_mutations.py index c6727db..1a67212 100644 --- a/selfprivacy_api/graphql/mutations/api_mutations.py +++ b/selfprivacy_api/graphql/mutations/api_mutations.py @@ -22,10 +22,19 @@ from selfprivacy_api.utils.auth import ( delete_new_device_auth_token, get_new_device_auth_token, refresh_token, - use_mnemonic_recoverery_token, use_new_device_auth_token, ) +from selfprivacy_api.repositories.tokens.json_tokens_repository import ( + JsonTokensRepository, +) +from selfprivacy_api.repositories.tokens.exceptions import ( + RecoveryKeyNotFound, + InvalidMnemonic, +) + +TOKEN_REPO = JsonTokensRepository() + @strawberry.type class ApiKeyMutationReturn(MutationReturnInterface): @@ -98,20 +107,21 @@ class ApiMutations: self, input: UseRecoveryKeyInput ) -> DeviceApiTokenMutationReturn: """Use recovery key""" - token = use_mnemonic_recoverery_token(input.key, input.deviceName) - if token is None: + try: + token = TOKEN_REPO.use_mnemonic_recovery_key(input.key, input.deviceName) + return DeviceApiTokenMutationReturn( + success=True, + message="Recovery key used", + code=200, + token=token.token, + ) + except (RecoveryKeyNotFound, InvalidMnemonic): return DeviceApiTokenMutationReturn( success=False, message="Recovery key not found", code=404, token=None, ) - return DeviceApiTokenMutationReturn( - success=True, - message="Recovery key used", - code=200, - token=token, - ) @strawberry.mutation(permission_classes=[IsAuthenticated]) def refresh_device_api_token(self, info: Info) -> DeviceApiTokenMutationReturn: From d3bf867bb54f9afd23b07fabcdac7c0c43b429ee Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 21 Dec 2022 13:05:00 +0000 Subject: [PATCH 096/129] fix(tokens-repo): do not change the date on token refresh --- .../tokens/abstract_tokens_repository.py | 1 + .../test_repository/test_tokens_repository.py | 14 ++++++++------ 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py index a67d62d..4f86e61 100644 --- a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py @@ -52,6 +52,7 @@ class AbstractTokensRepository(ABC): def refresh_token(self, input_token: Token) -> Token: """Change the token field of the existing token""" new_token = Token.generate(device_name=input_token.device_name) + new_token.created_at = input_token.created_at if input_token in self.get_tokens(): self.delete_token(input_token) diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index 85cee00..1a26247 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -266,15 +266,17 @@ def test_delete_not_found_token(some_tokens_repo): assert token in new_tokens -def test_refresh_token(some_tokens_repo, mock_token_generate): +def test_refresh_token(some_tokens_repo): repo = some_tokens_repo input_token = some_tokens_repo.get_tokens()[0] - assert repo.refresh_token(input_token) == Token( - token="ZuLNKtnxDeq6w2dpOJhbB3iat_sJLPTPl_rN5uc5MvM", - device_name="IamNewDevice", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), - ) + output_token = repo.refresh_token(input_token) + + assert output_token.token != input_token.token + assert output_token.device_name == input_token.device_name + assert output_token.created_at == input_token.created_at + + assert output_token in repo.get_tokens() def test_refresh_not_found_token(some_tokens_repo, mock_token_generate): From 39277419acbe61b70757cb7999148f0acb4f26f3 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 21 Dec 2022 13:09:51 +0000 Subject: [PATCH 097/129] refactor(tokens-repo): use tokens-repo to refresh tokens --- .../graphql/mutations/api_mutations.py | 24 ++++++++++--------- tests/test_graphql/test_api_devices.py | 24 ++++++++++++++----- 2 files changed, 31 insertions(+), 17 deletions(-) diff --git a/selfprivacy_api/graphql/mutations/api_mutations.py b/selfprivacy_api/graphql/mutations/api_mutations.py index 1a67212..3626d83 100644 --- a/selfprivacy_api/graphql/mutations/api_mutations.py +++ b/selfprivacy_api/graphql/mutations/api_mutations.py @@ -21,7 +21,6 @@ from selfprivacy_api.graphql.mutations.mutation_interface import ( from selfprivacy_api.utils.auth import ( delete_new_device_auth_token, get_new_device_auth_token, - refresh_token, use_new_device_auth_token, ) @@ -126,32 +125,35 @@ class ApiMutations: @strawberry.mutation(permission_classes=[IsAuthenticated]) def refresh_device_api_token(self, info: Info) -> DeviceApiTokenMutationReturn: """Refresh device api token""" - token = ( + token_string = ( info.context["request"] .headers.get("Authorization", "") .replace("Bearer ", "") ) - if token is None: + if token_string is None: return DeviceApiTokenMutationReturn( success=False, message="Token not found", code=404, token=None, ) - new_token = refresh_token(token) - if new_token is None: + + try: + old_token = TOKEN_REPO.get_token_by_token_string(token_string) + new_token = TOKEN_REPO.refresh_token(old_token) + return DeviceApiTokenMutationReturn( + success=True, + message="Token refreshed", + code=200, + token=new_token.token, + ) + except: return DeviceApiTokenMutationReturn( success=False, message="Token not found", code=404, token=None, ) - return DeviceApiTokenMutationReturn( - success=True, - message="Token refreshed", - code=200, - token=new_token, - ) @strawberry.mutation(permission_classes=[IsAuthenticated]) def delete_device_api_token(self, device: str, info: Info) -> GenericMutationReturn: diff --git a/tests/test_graphql/test_api_devices.py b/tests/test_graphql/test_api_devices.py index d8dc974..07cf42a 100644 --- a/tests/test_graphql/test_api_devices.py +++ b/tests/test_graphql/test_api_devices.py @@ -2,8 +2,14 @@ # pylint: disable=unused-argument # pylint: disable=missing-function-docstring import datetime +import pytest from mnemonic import Mnemonic +from selfprivacy_api.repositories.tokens.json_tokens_repository import ( + JsonTokensRepository, +) +from selfprivacy_api.models.tokens.token import Token + from tests.common import generate_api_query, read_json, write_json TOKENS_FILE_CONTETS = { @@ -30,6 +36,11 @@ devices { """ +@pytest.fixture +def token_repo(): + return JsonTokensRepository() + + def test_graphql_tokens_info(authorized_client, tokens_file): response = authorized_client.post( "/graphql", @@ -170,7 +181,7 @@ def test_graphql_refresh_token_unauthorized(client, tokens_file): assert response.json()["data"] is None -def test_graphql_refresh_token(authorized_client, tokens_file): +def test_graphql_refresh_token(authorized_client, tokens_file, token_repo): response = authorized_client.post( "/graphql", json={"query": REFRESH_TOKEN_MUTATION}, @@ -180,11 +191,12 @@ def test_graphql_refresh_token(authorized_client, tokens_file): assert response.json()["data"]["refreshDeviceApiToken"]["success"] is True assert response.json()["data"]["refreshDeviceApiToken"]["message"] is not None assert response.json()["data"]["refreshDeviceApiToken"]["code"] == 200 - assert read_json(tokens_file)["tokens"][0] == { - "token": response.json()["data"]["refreshDeviceApiToken"]["token"], - "name": "test_token", - "date": "2022-01-14 08:31:10.789314", - } + token = token_repo.get_token_by_name("test_token") + assert token == Token( + token=response.json()["data"]["refreshDeviceApiToken"]["token"], + device_name="test_token", + created_at=datetime.datetime(2022, 1, 14, 8, 31, 10, 789314), + ) NEW_DEVICE_KEY_MUTATION = """ From 16f71b0b09933b80420346df3f72de7fa6662aab Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 21 Dec 2022 13:51:39 +0000 Subject: [PATCH 098/129] refactor(tokens-repo): use tokens-repo to create recovery token --- selfprivacy_api/actions/api_tokens.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/selfprivacy_api/actions/api_tokens.py b/selfprivacy_api/actions/api_tokens.py index 61c695d..33ab286 100644 --- a/selfprivacy_api/actions/api_tokens.py +++ b/selfprivacy_api/actions/api_tokens.py @@ -2,11 +2,11 @@ from datetime import datetime from typing import Optional from pydantic import BaseModel +from mnemonic import Mnemonic from selfprivacy_api.utils.auth import ( delete_token, - generate_recovery_token, get_recovery_token_status, get_tokens_info, is_recovery_token_exists, @@ -17,6 +17,12 @@ from selfprivacy_api.utils.auth import ( get_token_name, ) +from selfprivacy_api.repositories.tokens.json_tokens_repository import ( + JsonTokensRepository, +) + +TOKEN_REPO = JsonTokensRepository() + class TokenInfoWithIsCaller(BaseModel): """Token info""" @@ -112,5 +118,6 @@ def get_new_api_recovery_key( if uses_left <= 0: raise InvalidUsesLeft("Uses must be greater than 0") - key = generate_recovery_token(expiration_date, uses_left) - return key + key = TOKEN_REPO.create_recovery_key(expiration_date, uses_left) + mnemonic_phrase = Mnemonic(language="english").to_mnemonic(bytes.fromhex(key.key)) + return mnemonic_phrase From 3021584adc57aa5a0abff28009339cde4b940304 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 21 Dec 2022 15:23:42 +0000 Subject: [PATCH 099/129] refactor(tokens-repo): delete refresh_token() from auth --- selfprivacy_api/actions/api_tokens.py | 10 ++++++---- selfprivacy_api/graphql/mutations/api_mutations.py | 8 ++++---- selfprivacy_api/utils/auth.py | 11 ----------- tests/test_rest_endpoints/test_auth.py | 8 +++++++- 4 files changed, 17 insertions(+), 20 deletions(-) diff --git a/selfprivacy_api/actions/api_tokens.py b/selfprivacy_api/actions/api_tokens.py index 33ab286..38cd208 100644 --- a/selfprivacy_api/actions/api_tokens.py +++ b/selfprivacy_api/actions/api_tokens.py @@ -13,13 +13,13 @@ from selfprivacy_api.utils.auth import ( is_recovery_token_valid, is_token_name_exists, is_token_name_pair_valid, - refresh_token, get_token_name, ) from selfprivacy_api.repositories.tokens.json_tokens_repository import ( JsonTokensRepository, ) +from selfprivacy_api.repositories.tokens.exceptions import TokenNotFound TOKEN_REPO = JsonTokensRepository() @@ -65,10 +65,12 @@ def delete_api_token(caller_token: str, token_name: str) -> None: def refresh_api_token(caller_token: str) -> str: """Refresh the token""" - new_token = refresh_token(caller_token) - if new_token is None: + try: + old_token = TOKEN_REPO.get_token_by_token_string(caller_token) + new_token = TOKEN_REPO.refresh_token(old_token) + except TokenNotFound: raise NotFoundException("Token not found") - return new_token + return new_token.token class RecoveryTokenStatus(BaseModel): diff --git a/selfprivacy_api/graphql/mutations/api_mutations.py b/selfprivacy_api/graphql/mutations/api_mutations.py index 3626d83..9f04317 100644 --- a/selfprivacy_api/graphql/mutations/api_mutations.py +++ b/selfprivacy_api/graphql/mutations/api_mutations.py @@ -11,6 +11,7 @@ from selfprivacy_api.actions.api_tokens import ( NotFoundException, delete_api_token, get_new_api_recovery_key, + refresh_api_token, ) from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.graphql.mutations.mutation_interface import ( @@ -139,15 +140,14 @@ class ApiMutations: ) try: - old_token = TOKEN_REPO.get_token_by_token_string(token_string) - new_token = TOKEN_REPO.refresh_token(old_token) + new_token = refresh_api_token(token_string) return DeviceApiTokenMutationReturn( success=True, message="Token refreshed", code=200, - token=new_token.token, + token=new_token, ) - except: + except NotFoundException: return DeviceApiTokenMutationReturn( success=False, message="Token not found", diff --git a/selfprivacy_api/utils/auth.py b/selfprivacy_api/utils/auth.py index ecaf9af..1788f8f 100644 --- a/selfprivacy_api/utils/auth.py +++ b/selfprivacy_api/utils/auth.py @@ -145,17 +145,6 @@ def delete_token(token_name): tokens["tokens"] = [t for t in tokens["tokens"] if t["name"] != token_name] -def refresh_token(token: str) -> typing.Optional[str]: - """Change the token field of the existing token""" - new_token = _generate_token() - with WriteUserData(UserDataFiles.TOKENS) as tokens: - for t in tokens["tokens"]: - if t["token"] == token: - t["token"] = new_token - return new_token - return None - - def is_recovery_token_exists(): """Check if recovery token exists""" with ReadUserData(UserDataFiles.TOKENS) as tokens: diff --git a/tests/test_rest_endpoints/test_auth.py b/tests/test_rest_endpoints/test_auth.py index 1083be5..12de0cf 100644 --- a/tests/test_rest_endpoints/test_auth.py +++ b/tests/test_rest_endpoints/test_auth.py @@ -5,6 +5,12 @@ import datetime import pytest from mnemonic import Mnemonic +from selfprivacy_api.repositories.tokens.json_tokens_repository import ( + JsonTokensRepository, +) + +TOKEN_REPO = JsonTokensRepository() + from tests.common import read_json, write_json @@ -97,7 +103,7 @@ def test_refresh_token(authorized_client, tokens_file): response = authorized_client.post("/auth/tokens") assert response.status_code == 200 new_token = response.json()["token"] - assert read_json(tokens_file)["tokens"][0]["token"] == new_token + assert TOKEN_REPO.get_token_by_token_string(new_token) is not None # new device From 20410ec790661cc2d3fb6e86a7cc9bf19faca5f3 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 21 Dec 2022 16:10:41 +0000 Subject: [PATCH 100/129] fix(tokens-repo): fix name pair validation being able to raise a notfound error --- .../repositories/tokens/abstract_tokens_repository.py | 7 +++++-- .../test_repository/test_tokens_repository.py | 8 ++++++++ 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py index 4f86e61..3b23a84 100644 --- a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py @@ -77,8 +77,11 @@ class AbstractTokensRepository(ABC): def is_token_name_pair_valid(self, token_name: str, token_string: str) -> bool: """Check if the token name and token are valid""" - token = self.get_token_by_name(token_name) - if token is None: + try: + token = self.get_token_by_name(token_name) + if token is None: + return False + except TokenNotFound: return False return token.token == token_string diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index 1a26247..d3650fe 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -207,6 +207,14 @@ def test_get_token_by_non_existent_name(some_tokens_repo): assert repo.get_token_by_name(token_name="badname") is None +def test_is_token_name_pair_valid(some_tokens_repo): + repo = some_tokens_repo + token = repo.get_tokens()[0] + assert repo.is_token_name_pair_valid(token.device_name, token.token) + assert not repo.is_token_name_pair_valid(token.device_name, "gibberish") + assert not repo.is_token_name_pair_valid("gibberish", token.token) + + def test_get_tokens(some_tokens_repo): repo = some_tokens_repo tokenstrings = [] From 2a239e35ad3d1cf04cd23c0de569df7f282d5983 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 21 Dec 2022 16:17:56 +0000 Subject: [PATCH 101/129] refactor(tokens-repo): delete is_token_name_pair_valid from auth --- selfprivacy_api/actions/api_tokens.py | 3 +-- selfprivacy_api/utils/auth.py | 9 --------- 2 files changed, 1 insertion(+), 11 deletions(-) diff --git a/selfprivacy_api/actions/api_tokens.py b/selfprivacy_api/actions/api_tokens.py index 38cd208..820d7c6 100644 --- a/selfprivacy_api/actions/api_tokens.py +++ b/selfprivacy_api/actions/api_tokens.py @@ -12,7 +12,6 @@ from selfprivacy_api.utils.auth import ( is_recovery_token_exists, is_recovery_token_valid, is_token_name_exists, - is_token_name_pair_valid, get_token_name, ) @@ -56,7 +55,7 @@ class CannotDeleteCallerException(Exception): def delete_api_token(caller_token: str, token_name: str) -> None: """Delete the token""" - if is_token_name_pair_valid(token_name, caller_token): + if TOKEN_REPO.is_token_name_pair_valid(token_name, caller_token): raise CannotDeleteCallerException("Cannot delete caller's token") if not is_token_name_exists(token_name): raise NotFoundException("Token not found") diff --git a/selfprivacy_api/utils/auth.py b/selfprivacy_api/utils/auth.py index 1788f8f..a6c6299 100644 --- a/selfprivacy_api/utils/auth.py +++ b/selfprivacy_api/utils/auth.py @@ -79,15 +79,6 @@ def is_token_name_exists(token_name): return token_name in [t["name"] for t in tokens["tokens"]] -def is_token_name_pair_valid(token_name, token): - """Check if token name and token pair exists""" - with ReadUserData(UserDataFiles.TOKENS) as tokens: - for t in tokens["tokens"]: - if t["name"] == token_name and t["token"] == token: - return True - return False - - def get_token_name(token: str) -> typing.Optional[str]: """Return the name of the token provided""" with ReadUserData(UserDataFiles.TOKENS) as tokens: From 22a309466e59974f04e85c5076febfb76579d6ad Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 21 Dec 2022 17:04:18 +0000 Subject: [PATCH 102/129] fix(tokens-repo): fix is_name_exists() being fallible --- .../repositories/tokens/abstract_tokens_repository.py | 5 +---- .../test_graphql/test_repository/test_tokens_repository.py | 7 +++++++ 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py index 3b23a84..03c65bd 100644 --- a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py @@ -70,10 +70,7 @@ class AbstractTokensRepository(ABC): def is_token_name_exists(self, token_name: str) -> bool: """Check if the token name exists""" - token = self.get_token_by_name(token_name) - if token is None: - return False - return True + return token_name in [token.device_name for token in self.get_tokens()] def is_token_name_pair_valid(self, token_name: str, token_string: str) -> bool: """Check if the token name and token are valid""" diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index d3650fe..95ba7c0 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -215,6 +215,13 @@ def test_is_token_name_pair_valid(some_tokens_repo): assert not repo.is_token_name_pair_valid("gibberish", token.token) +def test_is_token_name_exists(some_tokens_repo): + repo = some_tokens_repo + token = repo.get_tokens()[0] + assert repo.is_token_name_exists(token.device_name) + assert not repo.is_token_name_exists("gibberish") + + def test_get_tokens(some_tokens_repo): repo = some_tokens_repo tokenstrings = [] From 5a1f64b1e7b06bfdf6e03be8c8b355b3e4c4e781 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 21 Dec 2022 17:09:49 +0000 Subject: [PATCH 103/129] refactor(tokens-repo): delete is_name_exists() from auth utils --- selfprivacy_api/actions/api_tokens.py | 3 +-- selfprivacy_api/utils/auth.py | 6 ------ 2 files changed, 1 insertion(+), 8 deletions(-) diff --git a/selfprivacy_api/actions/api_tokens.py b/selfprivacy_api/actions/api_tokens.py index 820d7c6..8044a76 100644 --- a/selfprivacy_api/actions/api_tokens.py +++ b/selfprivacy_api/actions/api_tokens.py @@ -11,7 +11,6 @@ from selfprivacy_api.utils.auth import ( get_tokens_info, is_recovery_token_exists, is_recovery_token_valid, - is_token_name_exists, get_token_name, ) @@ -57,7 +56,7 @@ def delete_api_token(caller_token: str, token_name: str) -> None: """Delete the token""" if TOKEN_REPO.is_token_name_pair_valid(token_name, caller_token): raise CannotDeleteCallerException("Cannot delete caller's token") - if not is_token_name_exists(token_name): + if not TOKEN_REPO.is_token_name_exists(token_name): raise NotFoundException("Token not found") delete_token(token_name) diff --git a/selfprivacy_api/utils/auth.py b/selfprivacy_api/utils/auth.py index a6c6299..1d6939e 100644 --- a/selfprivacy_api/utils/auth.py +++ b/selfprivacy_api/utils/auth.py @@ -73,12 +73,6 @@ def is_token_valid(token): return False -def is_token_name_exists(token_name): - """Check if token name exists""" - with ReadUserData(UserDataFiles.TOKENS) as tokens: - return token_name in [t["name"] for t in tokens["tokens"]] - - def get_token_name(token: str) -> typing.Optional[str]: """Return the name of the token provided""" with ReadUserData(UserDataFiles.TOKENS) as tokens: From a2ac47b0f5604b0e49274840e1598ca071e81036 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 23 Dec 2022 11:14:52 +0000 Subject: [PATCH 104/129] refactor(tokens-repo): delete delete_token from auth utils --- selfprivacy_api/actions/api_tokens.py | 4 ++-- selfprivacy_api/utils/auth.py | 6 ------ 2 files changed, 2 insertions(+), 8 deletions(-) diff --git a/selfprivacy_api/actions/api_tokens.py b/selfprivacy_api/actions/api_tokens.py index 8044a76..bd1d4fe 100644 --- a/selfprivacy_api/actions/api_tokens.py +++ b/selfprivacy_api/actions/api_tokens.py @@ -6,7 +6,6 @@ from mnemonic import Mnemonic from selfprivacy_api.utils.auth import ( - delete_token, get_recovery_token_status, get_tokens_info, is_recovery_token_exists, @@ -58,7 +57,8 @@ def delete_api_token(caller_token: str, token_name: str) -> None: raise CannotDeleteCallerException("Cannot delete caller's token") if not TOKEN_REPO.is_token_name_exists(token_name): raise NotFoundException("Token not found") - delete_token(token_name) + token = TOKEN_REPO.get_token_by_name(token_name) + TOKEN_REPO.delete_token(token) def refresh_api_token(caller_token: str) -> str: diff --git a/selfprivacy_api/utils/auth.py b/selfprivacy_api/utils/auth.py index 1d6939e..e1c3b7e 100644 --- a/selfprivacy_api/utils/auth.py +++ b/selfprivacy_api/utils/auth.py @@ -124,12 +124,6 @@ def create_token(name): return token -def delete_token(token_name): - """Delete token""" - with WriteUserData(UserDataFiles.TOKENS) as tokens: - tokens["tokens"] = [t for t in tokens["tokens"] if t["name"] != token_name] - - def is_recovery_token_exists(): """Check if recovery token exists""" with ReadUserData(UserDataFiles.TOKENS) as tokens: From f928ca160a843a22eb1aec79b3156334fc780631 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 23 Dec 2022 12:00:35 +0000 Subject: [PATCH 105/129] refactor(tokens-repo): use token repo in get_api_recovery_token_status action --- selfprivacy_api/actions/api_tokens.py | 17 ++++++----------- 1 file changed, 6 insertions(+), 11 deletions(-) diff --git a/selfprivacy_api/actions/api_tokens.py b/selfprivacy_api/actions/api_tokens.py index bd1d4fe..399452a 100644 --- a/selfprivacy_api/actions/api_tokens.py +++ b/selfprivacy_api/actions/api_tokens.py @@ -6,10 +6,7 @@ from mnemonic import Mnemonic from selfprivacy_api.utils.auth import ( - get_recovery_token_status, get_tokens_info, - is_recovery_token_exists, - is_recovery_token_valid, get_token_name, ) @@ -83,18 +80,16 @@ class RecoveryTokenStatus(BaseModel): def get_api_recovery_token_status() -> RecoveryTokenStatus: """Get the recovery token status""" - if not is_recovery_token_exists(): + token = TOKEN_REPO.get_recovery_key() + if token is None: return RecoveryTokenStatus(exists=False, valid=False) - status = get_recovery_token_status() - if status is None: - return RecoveryTokenStatus(exists=False, valid=False) - is_valid = is_recovery_token_valid() + is_valid = TOKEN_REPO.is_recovery_key_valid() return RecoveryTokenStatus( exists=True, valid=is_valid, - date=status["date"], - expiration=status["expiration"], - uses_left=status["uses_left"], + date=token.created_at, + expiration=token.expires_at, + uses_left=token.uses_left, ) From e817de6228c60a6a9e9ace7015e63e476b9e9523 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 23 Dec 2022 12:29:32 +0000 Subject: [PATCH 106/129] refactor(tokens-repo): use token repo in get_api_tokens_with_caller_flag --- selfprivacy_api/actions/api_tokens.py | 16 +++++----------- 1 file changed, 5 insertions(+), 11 deletions(-) diff --git a/selfprivacy_api/actions/api_tokens.py b/selfprivacy_api/actions/api_tokens.py index 399452a..03de653 100644 --- a/selfprivacy_api/actions/api_tokens.py +++ b/selfprivacy_api/actions/api_tokens.py @@ -4,12 +4,6 @@ from typing import Optional from pydantic import BaseModel from mnemonic import Mnemonic - -from selfprivacy_api.utils.auth import ( - get_tokens_info, - get_token_name, -) - from selfprivacy_api.repositories.tokens.json_tokens_repository import ( JsonTokensRepository, ) @@ -28,13 +22,13 @@ class TokenInfoWithIsCaller(BaseModel): def get_api_tokens_with_caller_flag(caller_token: str) -> list[TokenInfoWithIsCaller]: """Get the tokens info""" - caller_name = get_token_name(caller_token) - tokens = get_tokens_info() + caller_name = TOKEN_REPO.get_token_by_token_string(caller_token).device_name + tokens = TOKEN_REPO.get_tokens() return [ TokenInfoWithIsCaller( - name=token.name, - date=token.date, - is_caller=token.name == caller_name, + name=token.device_name, + date=token.created_at, + is_caller=token.device_name == caller_name, ) for token in tokens ] From 5dedbda41f2183d43620ac35d2013b267073c80c Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 23 Dec 2022 12:41:29 +0000 Subject: [PATCH 107/129] refactor(tokens-repo): delete get_tokens_info() from auth utils --- selfprivacy_api/utils/auth.py | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/selfprivacy_api/utils/auth.py b/selfprivacy_api/utils/auth.py index e1c3b7e..3383f8d 100644 --- a/selfprivacy_api/utils/auth.py +++ b/selfprivacy_api/utils/auth.py @@ -89,18 +89,6 @@ class BasicTokenInfo(BaseModel): date: datetime -def get_tokens_info(): - """Get all tokens info without tokens themselves""" - with ReadUserData(UserDataFiles.TOKENS) as tokens: - return [ - BasicTokenInfo( - name=t["name"], - date=parse_date(t["date"]), - ) - for t in tokens["tokens"] - ] - - def _generate_token(): """Generates new token and makes sure it is unique""" token = secrets.token_urlsafe(32) From 3f6aa9bd06876707e47a2ed20816d095b6c3d7e9 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Dec 2022 10:13:37 +0000 Subject: [PATCH 108/129] refactor(tokens-repo): delete delete_new_device_auth_token from auth utils --- selfprivacy_api/actions/api_tokens.py | 4 ++++ selfprivacy_api/graphql/mutations/api_mutations.py | 2 +- selfprivacy_api/rest/api_auth.py | 2 +- selfprivacy_api/utils/auth.py | 7 ------- 4 files changed, 6 insertions(+), 9 deletions(-) diff --git a/selfprivacy_api/actions/api_tokens.py b/selfprivacy_api/actions/api_tokens.py index 03de653..a90aa12 100644 --- a/selfprivacy_api/actions/api_tokens.py +++ b/selfprivacy_api/actions/api_tokens.py @@ -110,3 +110,7 @@ def get_new_api_recovery_key( key = TOKEN_REPO.create_recovery_key(expiration_date, uses_left) mnemonic_phrase = Mnemonic(language="english").to_mnemonic(bytes.fromhex(key.key)) return mnemonic_phrase + + +def delete_new_device_auth_token() -> None: + TOKEN_REPO.delete_new_device_key() diff --git a/selfprivacy_api/graphql/mutations/api_mutations.py b/selfprivacy_api/graphql/mutations/api_mutations.py index 9f04317..0c83eab 100644 --- a/selfprivacy_api/graphql/mutations/api_mutations.py +++ b/selfprivacy_api/graphql/mutations/api_mutations.py @@ -12,6 +12,7 @@ from selfprivacy_api.actions.api_tokens import ( delete_api_token, get_new_api_recovery_key, refresh_api_token, + delete_new_device_auth_token, ) from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.graphql.mutations.mutation_interface import ( @@ -20,7 +21,6 @@ from selfprivacy_api.graphql.mutations.mutation_interface import ( ) from selfprivacy_api.utils.auth import ( - delete_new_device_auth_token, get_new_device_auth_token, use_new_device_auth_token, ) diff --git a/selfprivacy_api/rest/api_auth.py b/selfprivacy_api/rest/api_auth.py index f73056c..8209ef6 100644 --- a/selfprivacy_api/rest/api_auth.py +++ b/selfprivacy_api/rest/api_auth.py @@ -12,12 +12,12 @@ from selfprivacy_api.actions.api_tokens import ( get_api_tokens_with_caller_flag, get_new_api_recovery_key, refresh_api_token, + delete_new_device_auth_token, ) from selfprivacy_api.dependencies import TokenHeader, get_token_header from selfprivacy_api.utils.auth import ( - delete_new_device_auth_token, get_new_device_auth_token, use_mnemonic_recoverery_token, use_new_device_auth_token, diff --git a/selfprivacy_api/utils/auth.py b/selfprivacy_api/utils/auth.py index 3383f8d..847cd30 100644 --- a/selfprivacy_api/utils/auth.py +++ b/selfprivacy_api/utils/auth.py @@ -257,13 +257,6 @@ def _get_new_device_auth_token(): return new_device["token"] -def delete_new_device_auth_token(): - """Delete new device auth token""" - with WriteUserData(UserDataFiles.TOKENS) as tokens: - if "new_device" in tokens: - del tokens["new_device"] - - def use_new_device_auth_token(mnemonic_phrase, name): """Use the new device auth token by converting the mnemonic string to a byte array. If the mnemonic phrase is valid then generate a device token and return it. From cb1906144c22be4b363e9ca874558e3d1b5d214d Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Dec 2022 10:27:10 +0000 Subject: [PATCH 109/129] refactor(tokens-repo): delete get_new_device_auth_token from auth utils --- selfprivacy_api/actions/api_tokens.py | 8 ++++++++ .../graphql/mutations/api_mutations.py | 2 +- selfprivacy_api/rest/api_auth.py | 2 +- selfprivacy_api/utils/auth.py | 16 ---------------- 4 files changed, 10 insertions(+), 18 deletions(-) diff --git a/selfprivacy_api/actions/api_tokens.py b/selfprivacy_api/actions/api_tokens.py index a90aa12..b13c13e 100644 --- a/selfprivacy_api/actions/api_tokens.py +++ b/selfprivacy_api/actions/api_tokens.py @@ -114,3 +114,11 @@ def get_new_api_recovery_key( def delete_new_device_auth_token() -> None: TOKEN_REPO.delete_new_device_key() + + +def get_new_device_auth_token() -> str: + """Generate and store a new device auth token which is valid for 10 minutes + and return a mnemonic phrase representation + """ + key = TOKEN_REPO.get_new_device_key() + return Mnemonic(language="english").to_mnemonic(bytes.fromhex(key.key)) diff --git a/selfprivacy_api/graphql/mutations/api_mutations.py b/selfprivacy_api/graphql/mutations/api_mutations.py index 0c83eab..c2075c3 100644 --- a/selfprivacy_api/graphql/mutations/api_mutations.py +++ b/selfprivacy_api/graphql/mutations/api_mutations.py @@ -13,6 +13,7 @@ from selfprivacy_api.actions.api_tokens import ( get_new_api_recovery_key, refresh_api_token, delete_new_device_auth_token, + get_new_device_auth_token, ) from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.graphql.mutations.mutation_interface import ( @@ -21,7 +22,6 @@ from selfprivacy_api.graphql.mutations.mutation_interface import ( ) from selfprivacy_api.utils.auth import ( - get_new_device_auth_token, use_new_device_auth_token, ) diff --git a/selfprivacy_api/rest/api_auth.py b/selfprivacy_api/rest/api_auth.py index 8209ef6..a860798 100644 --- a/selfprivacy_api/rest/api_auth.py +++ b/selfprivacy_api/rest/api_auth.py @@ -13,12 +13,12 @@ from selfprivacy_api.actions.api_tokens import ( get_new_api_recovery_key, refresh_api_token, delete_new_device_auth_token, + get_new_device_auth_token, ) from selfprivacy_api.dependencies import TokenHeader, get_token_header from selfprivacy_api.utils.auth import ( - get_new_device_auth_token, use_mnemonic_recoverery_token, use_new_device_auth_token, ) diff --git a/selfprivacy_api/utils/auth.py b/selfprivacy_api/utils/auth.py index 847cd30..f27a826 100644 --- a/selfprivacy_api/utils/auth.py +++ b/selfprivacy_api/utils/auth.py @@ -227,22 +227,6 @@ def use_mnemonic_recoverery_token(mnemonic_phrase, name): return token -def get_new_device_auth_token() -> str: - """Generate a new device auth token which is valid for 10 minutes - and return a mnemonic phrase representation - Write token to the new_device of the tokens.json file. - """ - token = secrets.token_bytes(16) - token_str = token.hex() - with WriteUserData(UserDataFiles.TOKENS) as tokens: - tokens["new_device"] = { - "token": token_str, - "date": str(datetime.now()), - "expiration": str(datetime.now() + timedelta(minutes=10)), - } - return Mnemonic(language="english").to_mnemonic(token) - - def _get_new_device_auth_token(): """Get new device auth token. If it is expired, return None""" with ReadUserData(UserDataFiles.TOKENS) as tokens: From 29723b9f3a1d028bbe10da26b6694d36ce4c6aec Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Dec 2022 10:53:27 +0000 Subject: [PATCH 110/129] refactor(tokens-repo): delete use_mnemonic_recoverery_token from auth utils --- selfprivacy_api/actions/api_tokens.py | 22 ++++++++++- .../graphql/mutations/api_mutations.py | 9 +++-- selfprivacy_api/rest/api_auth.py | 6 +-- selfprivacy_api/utils/auth.py | 39 ------------------- 4 files changed, 29 insertions(+), 47 deletions(-) diff --git a/selfprivacy_api/actions/api_tokens.py b/selfprivacy_api/actions/api_tokens.py index b13c13e..394d3d9 100644 --- a/selfprivacy_api/actions/api_tokens.py +++ b/selfprivacy_api/actions/api_tokens.py @@ -7,7 +7,11 @@ from mnemonic import Mnemonic from selfprivacy_api.repositories.tokens.json_tokens_repository import ( JsonTokensRepository, ) -from selfprivacy_api.repositories.tokens.exceptions import TokenNotFound +from selfprivacy_api.repositories.tokens.exceptions import ( + TokenNotFound, + RecoveryKeyNotFound, + InvalidMnemonic, +) TOKEN_REPO = JsonTokensRepository() @@ -112,6 +116,22 @@ def get_new_api_recovery_key( return mnemonic_phrase +def use_mnemonic_recovery_token(mnemonic_phrase, name): + """Use the recovery token by converting the mnemonic word list to a byte array. + If the recovery token if invalid itself, return None + If the binary representation of phrase not matches + the byte array of the recovery token, return None. + If the mnemonic phrase is valid then generate a device token and return it. + Substract 1 from uses_left if it exists. + mnemonic_phrase is a string representation of the mnemonic word list. + """ + try: + token = TOKEN_REPO.use_mnemonic_recovery_key(mnemonic_phrase, name) + return token.token + except (RecoveryKeyNotFound, InvalidMnemonic): + return None + + def delete_new_device_auth_token() -> None: TOKEN_REPO.delete_new_device_key() diff --git a/selfprivacy_api/graphql/mutations/api_mutations.py b/selfprivacy_api/graphql/mutations/api_mutations.py index c2075c3..45961c8 100644 --- a/selfprivacy_api/graphql/mutations/api_mutations.py +++ b/selfprivacy_api/graphql/mutations/api_mutations.py @@ -11,6 +11,7 @@ from selfprivacy_api.actions.api_tokens import ( NotFoundException, delete_api_token, get_new_api_recovery_key, + use_mnemonic_recovery_token, refresh_api_token, delete_new_device_auth_token, get_new_device_auth_token, @@ -107,15 +108,15 @@ class ApiMutations: self, input: UseRecoveryKeyInput ) -> DeviceApiTokenMutationReturn: """Use recovery key""" - try: - token = TOKEN_REPO.use_mnemonic_recovery_key(input.key, input.deviceName) + token = use_mnemonic_recovery_token(input.key, input.deviceName) + if token is not None: return DeviceApiTokenMutationReturn( success=True, message="Recovery key used", code=200, - token=token.token, + token=token, ) - except (RecoveryKeyNotFound, InvalidMnemonic): + else: return DeviceApiTokenMutationReturn( success=False, message="Recovery key not found", diff --git a/selfprivacy_api/rest/api_auth.py b/selfprivacy_api/rest/api_auth.py index a860798..ab96bee 100644 --- a/selfprivacy_api/rest/api_auth.py +++ b/selfprivacy_api/rest/api_auth.py @@ -8,10 +8,11 @@ from selfprivacy_api.actions.api_tokens import ( InvalidUsesLeft, NotFoundException, delete_api_token, + refresh_api_token, get_api_recovery_token_status, get_api_tokens_with_caller_flag, get_new_api_recovery_key, - refresh_api_token, + use_mnemonic_recovery_token, delete_new_device_auth_token, get_new_device_auth_token, ) @@ -19,7 +20,6 @@ from selfprivacy_api.actions.api_tokens import ( from selfprivacy_api.dependencies import TokenHeader, get_token_header from selfprivacy_api.utils.auth import ( - use_mnemonic_recoverery_token, use_new_device_auth_token, ) @@ -99,7 +99,7 @@ class UseTokenInput(BaseModel): @router.post("/recovery_token/use") async def rest_use_recovery_token(input: UseTokenInput): - token = use_mnemonic_recoverery_token(input.token, input.device) + token = use_mnemonic_recovery_token(input.token, input.device) if token is None: raise HTTPException(status_code=404, detail="Token not found") return {"token": token} diff --git a/selfprivacy_api/utils/auth.py b/selfprivacy_api/utils/auth.py index f27a826..11836b1 100644 --- a/selfprivacy_api/utils/auth.py +++ b/selfprivacy_api/utils/auth.py @@ -188,45 +188,6 @@ def generate_recovery_token( return Mnemonic(language="english").to_mnemonic(recovery_token) -def use_mnemonic_recoverery_token(mnemonic_phrase, name): - """Use the recovery token by converting the mnemonic word list to a byte array. - If the recovery token if invalid itself, return None - If the binary representation of phrase not matches - the byte array of the recovery token, return None. - If the mnemonic phrase is valid then generate a device token and return it. - Substract 1 from uses_left if it exists. - mnemonic_phrase is a string representation of the mnemonic word list. - """ - if not is_recovery_token_valid(): - return None - recovery_token_str = _get_recovery_token() - if recovery_token_str is None: - return None - recovery_token = bytes.fromhex(recovery_token_str) - if not Mnemonic(language="english").check(mnemonic_phrase): - return None - phrase_bytes = Mnemonic(language="english").to_entropy(mnemonic_phrase) - if phrase_bytes != recovery_token: - return None - token = _generate_token() - name = _validate_token_name(name) - with WriteUserData(UserDataFiles.TOKENS) as tokens: - tokens["tokens"].append( - { - "token": token, - "name": name, - "date": str(datetime.now()), - } - ) - if "recovery_token" in tokens: - if ( - "uses_left" in tokens["recovery_token"] - and tokens["recovery_token"]["uses_left"] is not None - ): - tokens["recovery_token"]["uses_left"] -= 1 - return token - - def _get_new_device_auth_token(): """Get new device auth token. If it is expired, return None""" with ReadUserData(UserDataFiles.TOKENS) as tokens: From c037a12f4d2a882b2a29de1f1ef6be679d079be7 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Dec 2022 12:31:09 +0000 Subject: [PATCH 111/129] refactor(tokens-repo): break out _store_new_device_key() --- .../repositories/tokens/abstract_tokens_repository.py | 8 +++++++- .../repositories/tokens/json_tokens_repository.py | 7 +------ .../repositories/tokens/redis_tokens_repository.py | 6 ++---- 3 files changed, 10 insertions(+), 11 deletions(-) diff --git a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py index 03c65bd..bee76b7 100644 --- a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py @@ -118,9 +118,15 @@ class AbstractTokensRepository(ABC): return False return recovery_key.is_valid() - @abstractmethod def get_new_device_key(self) -> NewDeviceKey: """Creates and returns the new device key""" + new_device_key = NewDeviceKey.generate() + self._store_new_device_key(new_device_key) + + return new_device_key + + def _store_new_device_key(self, new_device_key: NewDeviceKey) -> None: + """Store new device key directly""" @abstractmethod def delete_new_device_key(self) -> None: diff --git a/selfprivacy_api/repositories/tokens/json_tokens_repository.py b/selfprivacy_api/repositories/tokens/json_tokens_repository.py index e43a3eb..394c046 100644 --- a/selfprivacy_api/repositories/tokens/json_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/json_tokens_repository.py @@ -104,10 +104,7 @@ class JsonTokensRepository(AbstractTokensRepository): if tokens["recovery_token"]["uses_left"] is not None: tokens["recovery_token"]["uses_left"] -= 1 - def get_new_device_key(self) -> NewDeviceKey: - """Creates and returns the new device key""" - new_device_key = NewDeviceKey.generate() - + def _store_new_device_key(self, new_device_key: NewDeviceKey) -> None: with WriteUserData(UserDataFiles.TOKENS) as tokens_file: tokens_file["new_device"] = { "token": new_device_key.key, @@ -115,8 +112,6 @@ class JsonTokensRepository(AbstractTokensRepository): "expiration": new_device_key.expires_at.strftime(DATETIME_FORMAT), } - return new_device_key - def delete_new_device_key(self) -> None: """Delete the new device key""" with WriteUserData(UserDataFiles.TOKENS) as tokens_file: diff --git a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py index 2fb6180..d665553 100644 --- a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py @@ -68,11 +68,9 @@ class RedisTokensRepository(AbstractTokensRepository): self._store_model_as_hash(RECOVERY_KEY_REDIS_KEY, recovery_key) return recovery_key - def get_new_device_key(self) -> NewDeviceKey: - """Creates and returns the new device key""" - new_device_key = NewDeviceKey.generate() + def _store_new_device_key(self, new_device_key: NewDeviceKey) -> None: + """Store new device key directly""" self._store_model_as_hash(NEW_DEVICE_KEY_REDIS_KEY, new_device_key) - return new_device_key def delete_new_device_key(self) -> None: """Delete the new device key""" From 5d4ed7343542b0d61392b82200ed92b0480ac4f8 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Dec 2022 14:01:36 +0000 Subject: [PATCH 112/129] fix(tokens-repo): properly expire new device keys --- .../tokens/abstract_tokens_repository.py | 3 ++ .../test_repository/test_tokens_repository.py | 37 ++++++++++++++++--- tests/test_models.py | 8 ++++ 3 files changed, 42 insertions(+), 6 deletions(-) diff --git a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py index bee76b7..7601ef4 100644 --- a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py @@ -140,6 +140,9 @@ class AbstractTokensRepository(ABC): if not new_device_key: raise NewDeviceKeyNotFound + if not new_device_key.is_valid(): + raise NewDeviceKeyNotFound + if not self._assert_mnemonic(new_device_key.key, mnemonic_phrase): raise NewDeviceKeyNotFound("Phrase is not token!") diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index 95ba7c0..a95878d 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -513,15 +513,20 @@ def test_use_not_exists_mnemonic_new_device_key( ) -def test_use_mnemonic_new_device_key( - empty_repo, mock_new_device_key_generate_for_mnemonic -): +def mnemonic_from_hex(hexkey): + return Mnemonic(language="english").to_mnemonic(bytes.fromhex(hexkey)) + + +def test_use_mnemonic_new_device_key(empty_repo): repo = empty_repo - assert repo.get_new_device_key() is not None + key = repo.get_new_device_key() + assert key is not None + + mnemonic_phrase = mnemonic_from_hex(key.key) new_token = repo.use_mnemonic_new_device_key( device_name="imnew", - mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb", + mnemonic_phrase=mnemonic_phrase, ) assert new_token.device_name == "imnew" @@ -532,12 +537,32 @@ def test_use_mnemonic_new_device_key( assert ( repo.use_mnemonic_new_device_key( device_name="imnew", - mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb", + mnemonic_phrase=mnemonic_phrase, ) is None ) +def test_use_mnemonic_expired_new_device_key( + some_tokens_repo, +): + repo = some_tokens_repo + expiration = datetime.now() - timedelta(minutes=5) + + key = repo.get_new_device_key() + assert key is not None + assert key.expires_at is not None + key.expires_at = expiration + assert not key.is_valid() + repo._store_new_device_key(key) + + with pytest.raises(NewDeviceKeyNotFound): + token = repo.use_mnemonic_new_device_key( + mnemonic_phrase=mnemonic_from_hex(key.key), + device_name="imnew", + ) + + def test_use_mnemonic_new_device_key_when_empty(empty_repo): repo = empty_repo diff --git a/tests/test_models.py b/tests/test_models.py index 078dfd5..2263e82 100644 --- a/tests/test_models.py +++ b/tests/test_models.py @@ -2,9 +2,17 @@ import pytest from datetime import datetime, timedelta from selfprivacy_api.models.tokens.recovery_key import RecoveryKey +from selfprivacy_api.models.tokens.new_device_key import NewDeviceKey def test_recovery_key_expired(): expiration = datetime.now() - timedelta(minutes=5) key = RecoveryKey.generate(expiration=expiration, uses_left=2) assert not key.is_valid() + + +def test_new_device_key_expired(): + expiration = datetime.now() - timedelta(minutes=5) + key = NewDeviceKey.generate() + key.expires_at = expiration + assert not key.is_valid() From f33d5155b04840f5c2ebe3e830d017fb09f311f7 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Dec 2022 14:03:55 +0000 Subject: [PATCH 113/129] refactor(tokens-repo): delete use_new_device_auth_token from auth utils --- selfprivacy_api/actions/api_tokens.py | 13 ++++++++++++ .../graphql/mutations/api_mutations.py | 4 +--- selfprivacy_api/rest/api_auth.py | 4 +--- selfprivacy_api/utils/auth.py | 21 ------------------- 4 files changed, 15 insertions(+), 27 deletions(-) diff --git a/selfprivacy_api/actions/api_tokens.py b/selfprivacy_api/actions/api_tokens.py index 394d3d9..3b180e8 100644 --- a/selfprivacy_api/actions/api_tokens.py +++ b/selfprivacy_api/actions/api_tokens.py @@ -11,6 +11,7 @@ from selfprivacy_api.repositories.tokens.exceptions import ( TokenNotFound, RecoveryKeyNotFound, InvalidMnemonic, + NewDeviceKeyNotFound, ) TOKEN_REPO = JsonTokensRepository() @@ -142,3 +143,15 @@ def get_new_device_auth_token() -> str: """ key = TOKEN_REPO.get_new_device_key() return Mnemonic(language="english").to_mnemonic(bytes.fromhex(key.key)) + + +def use_new_device_auth_token(mnemonic_phrase, name) -> str: + """Use the new device auth token by converting the mnemonic string to a byte array. + If the mnemonic phrase is valid then generate a device token and return it. + New device auth token must be deleted. + """ + try: + token = TOKEN_REPO.use_mnemonic_new_device_key(mnemonic_phrase, name) + return token.token + except (NewDeviceKeyNotFound, InvalidMnemonic): + return None diff --git a/selfprivacy_api/graphql/mutations/api_mutations.py b/selfprivacy_api/graphql/mutations/api_mutations.py index 45961c8..0c413fb 100644 --- a/selfprivacy_api/graphql/mutations/api_mutations.py +++ b/selfprivacy_api/graphql/mutations/api_mutations.py @@ -15,6 +15,7 @@ from selfprivacy_api.actions.api_tokens import ( refresh_api_token, delete_new_device_auth_token, get_new_device_auth_token, + use_new_device_auth_token, ) from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.graphql.mutations.mutation_interface import ( @@ -22,9 +23,6 @@ from selfprivacy_api.graphql.mutations.mutation_interface import ( MutationReturnInterface, ) -from selfprivacy_api.utils.auth import ( - use_new_device_auth_token, -) from selfprivacy_api.repositories.tokens.json_tokens_repository import ( JsonTokensRepository, diff --git a/selfprivacy_api/rest/api_auth.py b/selfprivacy_api/rest/api_auth.py index ab96bee..275dac3 100644 --- a/selfprivacy_api/rest/api_auth.py +++ b/selfprivacy_api/rest/api_auth.py @@ -15,13 +15,11 @@ from selfprivacy_api.actions.api_tokens import ( use_mnemonic_recovery_token, delete_new_device_auth_token, get_new_device_auth_token, + use_new_device_auth_token, ) from selfprivacy_api.dependencies import TokenHeader, get_token_header -from selfprivacy_api.utils.auth import ( - use_new_device_auth_token, -) router = APIRouter( prefix="/auth", diff --git a/selfprivacy_api/utils/auth.py b/selfprivacy_api/utils/auth.py index 11836b1..53dffd7 100644 --- a/selfprivacy_api/utils/auth.py +++ b/selfprivacy_api/utils/auth.py @@ -200,24 +200,3 @@ def _get_new_device_auth_token(): if datetime.now() > expiration: return None return new_device["token"] - - -def use_new_device_auth_token(mnemonic_phrase, name): - """Use the new device auth token by converting the mnemonic string to a byte array. - If the mnemonic phrase is valid then generate a device token and return it. - New device auth token must be deleted. - """ - token_str = _get_new_device_auth_token() - if token_str is None: - return None - token = bytes.fromhex(token_str) - if not Mnemonic(language="english").check(mnemonic_phrase): - return None - phrase_bytes = Mnemonic(language="english").to_entropy(mnemonic_phrase) - if phrase_bytes != token: - return None - token = create_token(name) - with WriteUserData(UserDataFiles.TOKENS) as tokens: - if "new_device" in tokens: - del tokens["new_device"] - return token From 7cf295450b048b08c5fa0333cbe27cd6d9f0660f Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Dec 2022 14:13:46 +0000 Subject: [PATCH 114/129] refactor(tokens-repo): do not use tokens repo directly from graphql --- selfprivacy_api/graphql/mutations/api_mutations.py | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/selfprivacy_api/graphql/mutations/api_mutations.py b/selfprivacy_api/graphql/mutations/api_mutations.py index 0c413fb..49c49ad 100644 --- a/selfprivacy_api/graphql/mutations/api_mutations.py +++ b/selfprivacy_api/graphql/mutations/api_mutations.py @@ -24,17 +24,6 @@ from selfprivacy_api.graphql.mutations.mutation_interface import ( ) -from selfprivacy_api.repositories.tokens.json_tokens_repository import ( - JsonTokensRepository, -) -from selfprivacy_api.repositories.tokens.exceptions import ( - RecoveryKeyNotFound, - InvalidMnemonic, -) - -TOKEN_REPO = JsonTokensRepository() - - @strawberry.type class ApiKeyMutationReturn(MutationReturnInterface): key: typing.Optional[str] From 69577c2854d334cf3bfb9be566aa032615ad1285 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Dec 2022 14:29:05 +0000 Subject: [PATCH 115/129] refactor(tokens-repo): delete get_recovery_key_status from auth utils --- selfprivacy_api/graphql/queries/api_queries.py | 17 ++++++++--------- selfprivacy_api/utils/auth.py | 17 ----------------- 2 files changed, 8 insertions(+), 26 deletions(-) diff --git a/selfprivacy_api/graphql/queries/api_queries.py b/selfprivacy_api/graphql/queries/api_queries.py index 7994a8f..7cf7c3e 100644 --- a/selfprivacy_api/graphql/queries/api_queries.py +++ b/selfprivacy_api/graphql/queries/api_queries.py @@ -4,13 +4,14 @@ import datetime import typing import strawberry from strawberry.types import Info -from selfprivacy_api.actions.api_tokens import get_api_tokens_with_caller_flag +from selfprivacy_api.actions.api_tokens import ( + get_api_tokens_with_caller_flag, + get_api_recovery_token_status, +) from selfprivacy_api.graphql import IsAuthenticated -from selfprivacy_api.utils import parse_date from selfprivacy_api.dependencies import get_api_version as get_api_version_dependency from selfprivacy_api.utils.auth import ( - get_recovery_token_status, is_recovery_token_exists, is_recovery_token_valid, ) @@ -51,7 +52,7 @@ def get_recovery_key_status() -> ApiRecoveryKeyStatus: expiration_date=None, uses_left=None, ) - status = get_recovery_token_status() + status = get_api_recovery_token_status() if status is None: return ApiRecoveryKeyStatus( exists=False, @@ -63,11 +64,9 @@ def get_recovery_key_status() -> ApiRecoveryKeyStatus: return ApiRecoveryKeyStatus( exists=True, valid=is_recovery_token_valid(), - creation_date=parse_date(status["date"]), - expiration_date=parse_date(status["expiration"]) - if status["expiration"] is not None - else None, - uses_left=status["uses_left"] if status["uses_left"] is not None else None, + creation_date=status.date, + expiration_date=status.expiration, + uses_left=status.uses_left, ) diff --git a/selfprivacy_api/utils/auth.py b/selfprivacy_api/utils/auth.py index 53dffd7..53df508 100644 --- a/selfprivacy_api/utils/auth.py +++ b/selfprivacy_api/utils/auth.py @@ -132,23 +132,6 @@ def is_recovery_token_valid(): return datetime.now() < parse_date(recovery_token["expiration"]) -def get_recovery_token_status(): - """Get recovery token date of creation, expiration and uses left""" - with ReadUserData(UserDataFiles.TOKENS) as tokens: - if "recovery_token" not in tokens: - return None - recovery_token = tokens["recovery_token"] - return { - "date": recovery_token["date"], - "expiration": recovery_token["expiration"] - if "expiration" in recovery_token - else None, - "uses_left": recovery_token["uses_left"] - if "uses_left" in recovery_token - else None, - } - - def _get_recovery_token(): """Get recovery token""" with ReadUserData(UserDataFiles.TOKENS) as tokens: From b11e5a5f77dfa0d8af54581a18868aa097a0c528 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Dec 2022 14:51:37 +0000 Subject: [PATCH 116/129] refactor(tokens-repo): delete recovery key quieries from auth utils --- .../graphql/queries/api_queries.py | 17 ++-------------- selfprivacy_api/utils/auth.py | 20 ------------------- 2 files changed, 2 insertions(+), 35 deletions(-) diff --git a/selfprivacy_api/graphql/queries/api_queries.py b/selfprivacy_api/graphql/queries/api_queries.py index 7cf7c3e..cf56231 100644 --- a/selfprivacy_api/graphql/queries/api_queries.py +++ b/selfprivacy_api/graphql/queries/api_queries.py @@ -11,11 +11,6 @@ from selfprivacy_api.actions.api_tokens import ( from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.dependencies import get_api_version as get_api_version_dependency -from selfprivacy_api.utils.auth import ( - is_recovery_token_exists, - is_recovery_token_valid, -) - def get_api_version() -> str: """Get API version""" @@ -44,16 +39,8 @@ class ApiRecoveryKeyStatus: def get_recovery_key_status() -> ApiRecoveryKeyStatus: """Get recovery key status""" - if not is_recovery_token_exists(): - return ApiRecoveryKeyStatus( - exists=False, - valid=False, - creation_date=None, - expiration_date=None, - uses_left=None, - ) status = get_api_recovery_token_status() - if status is None: + if status is None or not status.exists: return ApiRecoveryKeyStatus( exists=False, valid=False, @@ -63,7 +50,7 @@ def get_recovery_key_status() -> ApiRecoveryKeyStatus: ) return ApiRecoveryKeyStatus( exists=True, - valid=is_recovery_token_valid(), + valid=status.valid, creation_date=status.date, expiration_date=status.expiration, uses_left=status.uses_left, diff --git a/selfprivacy_api/utils/auth.py b/selfprivacy_api/utils/auth.py index 53df508..72c791b 100644 --- a/selfprivacy_api/utils/auth.py +++ b/selfprivacy_api/utils/auth.py @@ -112,26 +112,6 @@ def create_token(name): return token -def is_recovery_token_exists(): - """Check if recovery token exists""" - with ReadUserData(UserDataFiles.TOKENS) as tokens: - return "recovery_token" in tokens - - -def is_recovery_token_valid(): - """Check if recovery token is valid""" - with ReadUserData(UserDataFiles.TOKENS) as tokens: - if "recovery_token" not in tokens: - return False - recovery_token = tokens["recovery_token"] - if "uses_left" in recovery_token and recovery_token["uses_left"] is not None: - if recovery_token["uses_left"] <= 0: - return False - if "expiration" not in recovery_token or recovery_token["expiration"] is None: - return True - return datetime.now() < parse_date(recovery_token["expiration"]) - - def _get_recovery_token(): """Get recovery token""" with ReadUserData(UserDataFiles.TOKENS) as tokens: From b3d6251d11e02d1e5f091f69e6a2c2e1432681fe Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Dec 2022 14:54:07 +0000 Subject: [PATCH 117/129] refactor(tokens-repo): delete generare recovery key from auth utils --- selfprivacy_api/utils/auth.py | 31 ------------------------------- 1 file changed, 31 deletions(-) diff --git a/selfprivacy_api/utils/auth.py b/selfprivacy_api/utils/auth.py index 72c791b..6cdaf09 100644 --- a/selfprivacy_api/utils/auth.py +++ b/selfprivacy_api/utils/auth.py @@ -120,37 +120,6 @@ def _get_recovery_token(): return tokens["recovery_token"]["token"] -def generate_recovery_token( - expiration: typing.Optional[datetime], uses_left: typing.Optional[int] -) -> str: - """Generate a 24 bytes recovery token and return a mneomnic word list. - Write a string representation of the recovery token to the tokens.json file. - """ - # expires must be a date or None - # uses_left must be an integer or None - if expiration is not None: - if not isinstance(expiration, datetime): - raise TypeError("expires must be a datetime object") - if uses_left is not None: - if not isinstance(uses_left, int): - raise TypeError("uses_left must be an integer") - if uses_left <= 0: - raise ValueError("uses_left must be greater than 0") - - recovery_token = secrets.token_bytes(24) - recovery_token_str = recovery_token.hex() - with WriteUserData(UserDataFiles.TOKENS) as tokens: - tokens["recovery_token"] = { - "token": recovery_token_str, - "date": str(datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f")), - "expiration": expiration.strftime("%Y-%m-%dT%H:%M:%S.%f") - if expiration is not None - else None, - "uses_left": uses_left if uses_left is not None else None, - } - return Mnemonic(language="english").to_mnemonic(recovery_token) - - def _get_new_device_auth_token(): """Get new device auth token. If it is expired, return None""" with ReadUserData(UserDataFiles.TOKENS) as tokens: From 25326b75ca9be6cfc119197ed7db29bdc29bee90 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Dec 2022 14:55:40 +0000 Subject: [PATCH 118/129] refactor(tokens-repo): delete create token from auth utils --- selfprivacy_api/utils/auth.py | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/selfprivacy_api/utils/auth.py b/selfprivacy_api/utils/auth.py index 6cdaf09..860fa0d 100644 --- a/selfprivacy_api/utils/auth.py +++ b/selfprivacy_api/utils/auth.py @@ -97,21 +97,6 @@ def _generate_token(): return token -def create_token(name): - """Create new token""" - token = _generate_token() - name = _validate_token_name(name) - with WriteUserData(UserDataFiles.TOKENS) as tokens: - tokens["tokens"].append( - { - "token": token, - "name": name, - "date": str(datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f")), - } - ) - return token - - def _get_recovery_token(): """Get recovery token""" with ReadUserData(UserDataFiles.TOKENS) as tokens: From 87ea88c50ac57a8fc1eb854db05a647ade493227 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Dec 2022 14:56:39 +0000 Subject: [PATCH 119/129] refactor(tokens-repo): delete get token name from auth utils --- selfprivacy_api/utils/auth.py | 9 --------- 1 file changed, 9 deletions(-) diff --git a/selfprivacy_api/utils/auth.py b/selfprivacy_api/utils/auth.py index 860fa0d..2995aea 100644 --- a/selfprivacy_api/utils/auth.py +++ b/selfprivacy_api/utils/auth.py @@ -73,15 +73,6 @@ def is_token_valid(token): return False -def get_token_name(token: str) -> typing.Optional[str]: - """Return the name of the token provided""" - with ReadUserData(UserDataFiles.TOKENS) as tokens: - for t in tokens["tokens"]: - if t["token"] == token: - return t["name"] - return None - - class BasicTokenInfo(BaseModel): """Token info""" From 7d9bccf4ec67b83a017caa19495092b1ad33c176 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Dec 2022 15:18:02 +0000 Subject: [PATCH 120/129] fix(tokens-repo): make is_token_valid infallible --- .../repositories/tokens/abstract_tokens_repository.py | 5 +---- .../test_graphql/test_repository/test_tokens_repository.py | 7 +++++++ 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py index 7601ef4..dcd2b0d 100644 --- a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py @@ -63,10 +63,7 @@ class AbstractTokensRepository(ABC): def is_token_valid(self, token_string: str) -> bool: """Check if the token is valid""" - token = self.get_token_by_token_string(token_string) - if token is None: - return False - return True + return token_string in [token.token for token in self.get_tokens()] def is_token_name_exists(self, token_name: str) -> bool: """Check if the token name exists""" diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index a95878d..dd4f0ef 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -207,6 +207,13 @@ def test_get_token_by_non_existent_name(some_tokens_repo): assert repo.get_token_by_name(token_name="badname") is None +def test_is_token_valid(some_tokens_repo): + repo = some_tokens_repo + token = repo.get_tokens()[0] + assert repo.is_token_valid(token.token) + assert not repo.is_token_valid("gibberish") + + def test_is_token_name_pair_valid(some_tokens_repo): repo = some_tokens_repo token = repo.get_tokens()[0] From 8235c3595c63f611903c91be0d0f8cab34c07d0a Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Dec 2022 15:20:58 +0000 Subject: [PATCH 121/129] refactor(tokens-repo): delete is_token_valid from auth utils --- selfprivacy_api/actions/api_tokens.py | 5 +++++ selfprivacy_api/dependencies.py | 2 +- selfprivacy_api/graphql/__init__.py | 2 +- selfprivacy_api/utils/auth.py | 7 ------- 4 files changed, 7 insertions(+), 9 deletions(-) diff --git a/selfprivacy_api/actions/api_tokens.py b/selfprivacy_api/actions/api_tokens.py index 3b180e8..57828bc 100644 --- a/selfprivacy_api/actions/api_tokens.py +++ b/selfprivacy_api/actions/api_tokens.py @@ -39,6 +39,11 @@ def get_api_tokens_with_caller_flag(caller_token: str) -> list[TokenInfoWithIsCa ] +def is_token_valid(token) -> bool: + """Check if token is valid""" + return TOKEN_REPO.is_token_valid(token) + + class NotFoundException(Exception): """Not found exception""" diff --git a/selfprivacy_api/dependencies.py b/selfprivacy_api/dependencies.py index 9568a40..1348f65 100644 --- a/selfprivacy_api/dependencies.py +++ b/selfprivacy_api/dependencies.py @@ -2,7 +2,7 @@ from fastapi import Depends, HTTPException, status from fastapi.security import APIKeyHeader from pydantic import BaseModel -from selfprivacy_api.utils.auth import is_token_valid +from selfprivacy_api.actions.api_tokens import is_token_valid class TokenHeader(BaseModel): diff --git a/selfprivacy_api/graphql/__init__.py b/selfprivacy_api/graphql/__init__.py index 7372197..6124a1a 100644 --- a/selfprivacy_api/graphql/__init__.py +++ b/selfprivacy_api/graphql/__init__.py @@ -4,7 +4,7 @@ import typing from strawberry.permission import BasePermission from strawberry.types import Info -from selfprivacy_api.utils.auth import is_token_valid +from selfprivacy_api.actions.api_tokens import is_token_valid class IsAuthenticated(BasePermission): diff --git a/selfprivacy_api/utils/auth.py b/selfprivacy_api/utils/auth.py index 2995aea..48cf450 100644 --- a/selfprivacy_api/utils/auth.py +++ b/selfprivacy_api/utils/auth.py @@ -66,13 +66,6 @@ def _validate_token_name(name): return name -def is_token_valid(token): - """Check if token is valid""" - if token in _get_tokens(): - return True - return False - - class BasicTokenInfo(BaseModel): """Token info""" From 450ff41ebddff1000c3970347fc0cacac55841be Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Dec 2022 15:51:12 +0000 Subject: [PATCH 122/129] feat(tokens-repo): make device names unique before storage --- .../tokens/abstract_tokens_repository.py | 18 +++++++++++++++++- .../test_repository/test_tokens_repository.py | 11 +++++++++++ 2 files changed, 28 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py index dcd2b0d..931f64d 100644 --- a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py @@ -2,6 +2,8 @@ from abc import ABC, abstractmethod from datetime import datetime from typing import Optional from mnemonic import Mnemonic +from secrets import randbelow +import re from selfprivacy_api.models.tokens.token import Token from selfprivacy_api.repositories.tokens.exceptions import ( @@ -39,7 +41,8 @@ class AbstractTokensRepository(ABC): def create_token(self, device_name: str) -> Token: """Create new token""" - new_token = Token.generate(device_name) + unique_name = self._make_unique_device_name(device_name) + new_token = Token.generate(unique_name) self._store_token(new_token) @@ -160,6 +163,19 @@ class AbstractTokensRepository(ABC): def _get_stored_new_device_key(self) -> Optional[NewDeviceKey]: """Retrieves new device key that is already stored.""" + def _make_unique_device_name(self, name: str) -> str: + """Token name must be an alphanumeric string and not empty. + Replace invalid characters with '_' + If name exists, add a random number to the end of the name until it is unique. + """ + if not re.match("^[a-zA-Z0-9]*$", name): + name = re.sub("[^a-zA-Z0-9]", "_", name) + if name == "": + name = "Unknown device" + while self.is_token_name_exists(name): + name += str(randbelow(10)) + return name + # TODO: find a proper place for it def _assert_mnemonic(self, hex_key: str, mnemonic_phrase: str): """Return true if hex string matches the phrase, false otherwise diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index dd4f0ef..b9a9277 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -257,6 +257,17 @@ def test_create_token(empty_repo, mock_token_generate): ] +def test_create_token_existing(some_tokens_repo): + repo = some_tokens_repo + old_token = repo.get_tokens()[0] + + new_token = repo.create_token(device_name=old_token.device_name) + assert new_token.device_name != old_token.device_name + + assert old_token in repo.get_tokens() + assert new_token in repo.get_tokens() + + def test_delete_token(some_tokens_repo): repo = some_tokens_repo original_tokens = repo.get_tokens() From 3ecfb2eacbb157e2c5a22e30913df3778e3eecfd Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Dec 2022 15:54:33 +0000 Subject: [PATCH 123/129] refactor(tokens-repo): delete auth utils --- selfprivacy_api/utils/auth.py | 103 ---------------------------------- 1 file changed, 103 deletions(-) delete mode 100644 selfprivacy_api/utils/auth.py diff --git a/selfprivacy_api/utils/auth.py b/selfprivacy_api/utils/auth.py deleted file mode 100644 index 48cf450..0000000 --- a/selfprivacy_api/utils/auth.py +++ /dev/null @@ -1,103 +0,0 @@ -#!/usr/bin/env python3 -"""Token management utils""" -import secrets -from datetime import datetime, timedelta -import re -import typing - -from pydantic import BaseModel -from mnemonic import Mnemonic - -from . import ReadUserData, UserDataFiles, WriteUserData, parse_date - -""" -Token are stored in the tokens.json file. -File contains device tokens, recovery token and new device auth token. -File structure: -{ - "tokens": [ - { - "token": "device token", - "name": "device name", - "date": "date of creation", - } - ], - "recovery_token": { - "token": "recovery token", - "date": "date of creation", - "expiration": "date of expiration", - "uses_left": "number of uses left" - }, - "new_device": { - "token": "new device auth token", - "date": "date of creation", - "expiration": "date of expiration", - } -} -Recovery token may or may not have expiration date and uses_left. -There may be no recovery token at all. -Device tokens must be unique. -""" - - -def _get_tokens(): - """Get all tokens as list of tokens of every device""" - with ReadUserData(UserDataFiles.TOKENS) as tokens: - return [token["token"] for token in tokens["tokens"]] - - -def _get_token_names(): - """Get all token names""" - with ReadUserData(UserDataFiles.TOKENS) as tokens: - return [t["name"] for t in tokens["tokens"]] - - -def _validate_token_name(name): - """Token name must be an alphanumeric string and not empty. - Replace invalid characters with '_' - If token name exists, add a random number to the end of the name until it is unique. - """ - if not re.match("^[a-zA-Z0-9]*$", name): - name = re.sub("[^a-zA-Z0-9]", "_", name) - if name == "": - name = "Unknown device" - while name in _get_token_names(): - name += str(secrets.randbelow(10)) - return name - - -class BasicTokenInfo(BaseModel): - """Token info""" - - name: str - date: datetime - - -def _generate_token(): - """Generates new token and makes sure it is unique""" - token = secrets.token_urlsafe(32) - while token in _get_tokens(): - token = secrets.token_urlsafe(32) - return token - - -def _get_recovery_token(): - """Get recovery token""" - with ReadUserData(UserDataFiles.TOKENS) as tokens: - if "recovery_token" not in tokens: - return None - return tokens["recovery_token"]["token"] - - -def _get_new_device_auth_token(): - """Get new device auth token. If it is expired, return None""" - with ReadUserData(UserDataFiles.TOKENS) as tokens: - if "new_device" not in tokens: - return None - new_device = tokens["new_device"] - if "expiration" not in new_device: - return None - expiration = parse_date(new_device["expiration"]) - if datetime.now() > expiration: - return None - return new_device["token"] From 67872d7c556314b2a58bdbb8daa028746ebbf3d5 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 26 Dec 2022 16:22:16 +0000 Subject: [PATCH 124/129] test(tokens-repo): use mnemonic_from_hex consistently --- .../test_repository/test_tokens_repository.py | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index b9a9277..020a868 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -33,6 +33,10 @@ ORIGINAL_DEVICE_NAMES = [ ] +def mnemonic_from_hex(hexkey): + return Mnemonic(language="english").to_mnemonic(bytes.fromhex(hexkey)) + + @pytest.fixture def empty_keys(mocker, datadir): mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=datadir / "empty_keys.json") @@ -388,9 +392,7 @@ def test_use_mnemonic_expired_recovery_key( with pytest.raises(RecoveryKeyNotFound): token = repo.use_mnemonic_recovery_key( - mnemonic_phrase=Mnemonic(language="english").to_mnemonic( - bytes.fromhex(recovery_key.key) - ), + mnemonic_phrase=mnemonic_from_hex(recovery_key.key), device_name="newdevice", ) @@ -455,9 +457,7 @@ def test_use_mnemonic_recovery_key(some_tokens_repo, recovery_key_uses_left): recovery_key = repo.get_recovery_key() token = repo.use_mnemonic_recovery_key( - mnemonic_phrase=Mnemonic(language="english").to_mnemonic( - bytes.fromhex(recovery_key.key) - ), + mnemonic_phrase=mnemonic_from_hex(recovery_key.key), device_name="newdevice", ) @@ -531,10 +531,6 @@ def test_use_not_exists_mnemonic_new_device_key( ) -def mnemonic_from_hex(hexkey): - return Mnemonic(language="english").to_mnemonic(bytes.fromhex(hexkey)) - - def test_use_mnemonic_new_device_key(empty_repo): repo = empty_repo key = repo.get_new_device_key() From 999dd95cab5d4d6b23a812b74378b2bd02aad873 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Tue, 27 Dec 2022 07:10:14 +0300 Subject: [PATCH 125/129] ci: Add redis to CI pipeline --- .drone.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.drone.yml b/.drone.yml index 0f5f93a..24ab5da 100644 --- a/.drone.yml +++ b/.drone.yml @@ -5,12 +5,16 @@ name: default steps: - name: Run Tests and Generate Coverage Report commands: + - kill $(ps aux | grep '[r]edis-server 127.0.0.1:6389' | awk '{print $2}') + - redis-server --bind 127.0.0.1 --port 6389 >/dev/null & - coverage run -m pytest -q - coverage xml - sonar-scanner -Dsonar.projectKey=SelfPrivacy-REST-API -Dsonar.sources=. -Dsonar.host.url=http://analyzer.lan:9000 -Dsonar.login="$SONARQUBE_TOKEN" environment: SONARQUBE_TOKEN: from_secret: SONARQUBE_TOKEN + USE_REDIS_PORT: 6389 + - name: Run Bandit Checks commands: From cb403a94bdcfa9b853273d297e2b4d3ffac23402 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Fri, 30 Dec 2022 21:06:16 +0300 Subject: [PATCH 126/129] fix: typing --- selfprivacy_api/actions/api_tokens.py | 2 +- selfprivacy_api/jobs/__init__.py | 49 ++++++++++--------- .../tokens/abstract_tokens_repository.py | 11 +++-- .../tokens/json_tokens_repository.py | 8 +-- .../tokens/redis_tokens_repository.py | 46 ++++++++++------- 5 files changed, 67 insertions(+), 49 deletions(-) diff --git a/selfprivacy_api/actions/api_tokens.py b/selfprivacy_api/actions/api_tokens.py index 57828bc..38133fd 100644 --- a/selfprivacy_api/actions/api_tokens.py +++ b/selfprivacy_api/actions/api_tokens.py @@ -150,7 +150,7 @@ def get_new_device_auth_token() -> str: return Mnemonic(language="english").to_mnemonic(bytes.fromhex(key.key)) -def use_new_device_auth_token(mnemonic_phrase, name) -> str: +def use_new_device_auth_token(mnemonic_phrase, name) -> Optional[str]: """Use the new device auth token by converting the mnemonic string to a byte array. If the mnemonic phrase is valid then generate a device token and return it. New device auth token must be deleted. diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py index 1547b84..fe4a053 100644 --- a/selfprivacy_api/jobs/__init__.py +++ b/selfprivacy_api/jobs/__init__.py @@ -97,8 +97,8 @@ class Jobs: error=None, result=None, ) - r = RedisPool().get_connection() - _store_job_as_hash(r, _redis_key_from_uuid(job.uid), job) + redis = RedisPool().get_connection() + _store_job_as_hash(redis, _redis_key_from_uuid(job.uid), job) return job @staticmethod @@ -113,10 +113,10 @@ class Jobs: """ Remove a job from the jobs list. """ - r = RedisPool().get_connection() + redis = RedisPool().get_connection() key = _redis_key_from_uuid(job_uuid) - if (r.exists(key)): - r.delete(key) + if redis.exists(key): + redis.delete(key) return True return False @@ -149,12 +149,12 @@ class Jobs: if status in (JobStatus.FINISHED, JobStatus.ERROR): job.finished_at = datetime.datetime.now() - r = RedisPool().get_connection() + redis = RedisPool().get_connection() key = _redis_key_from_uuid(job.uid) - if r.exists(key): - _store_job_as_hash(r, key, job) + if redis.exists(key): + _store_job_as_hash(redis, key, job) if status in (JobStatus.FINISHED, JobStatus.ERROR): - r.expire(key, JOB_EXPIRATION_SECONDS) + redis.expire(key, JOB_EXPIRATION_SECONDS) return job @@ -163,10 +163,10 @@ class Jobs: """ Get a job from the jobs list. """ - r = RedisPool().get_connection() + redis = RedisPool().get_connection() key = _redis_key_from_uuid(uid) - if r.exists(key): - return _job_from_hash(r, key) + if redis.exists(key): + return _job_from_hash(redis, key) return None @staticmethod @@ -174,9 +174,14 @@ class Jobs: """ Get the jobs list. """ - r = RedisPool().get_connection() - jobs = r.keys("jobs:*") - return [_job_from_hash(r, job_key) for job_key in jobs] + redis = RedisPool().get_connection() + job_keys = redis.keys("jobs:*") + jobs = [] + for job_key in job_keys: + job = _job_from_hash(redis, job_key) + if job is not None: + jobs.append(job) + return jobs @staticmethod def is_busy() -> bool: @@ -189,11 +194,11 @@ class Jobs: return False -def _redis_key_from_uuid(uuid): - return "jobs:" + str(uuid) +def _redis_key_from_uuid(uuid_string): + return "jobs:" + str(uuid_string) -def _store_job_as_hash(r, redis_key, model): +def _store_job_as_hash(redis, redis_key, model): for key, value in model.dict().items(): if isinstance(value, uuid.UUID): value = str(value) @@ -201,12 +206,12 @@ def _store_job_as_hash(r, redis_key, model): value = value.isoformat() if isinstance(value, JobStatus): value = value.value - r.hset(redis_key, key, str(value)) + redis.hset(redis_key, key, str(value)) -def _job_from_hash(r, redis_key): - if r.exists(redis_key): - job_dict = r.hgetall(redis_key) +def _job_from_hash(redis, redis_key): + if redis.exists(redis_key): + job_dict = redis.hgetall(redis_key) for date in [ "created_at", "updated_at", diff --git a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py index 931f64d..3a20ede 100644 --- a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py @@ -17,7 +17,7 @@ from selfprivacy_api.models.tokens.new_device_key import NewDeviceKey class AbstractTokensRepository(ABC): - def get_token_by_token_string(self, token_string: str) -> Optional[Token]: + def get_token_by_token_string(self, token_string: str) -> Token: """Get the token by token""" tokens = self.get_tokens() for token in tokens: @@ -26,7 +26,7 @@ class AbstractTokensRepository(ABC): raise TokenNotFound("Token not found!") - def get_token_by_name(self, token_name: str) -> Optional[Token]: + def get_token_by_name(self, token_name: str) -> Token: """Get the token by name""" tokens = self.get_tokens() for token in tokens: @@ -101,7 +101,12 @@ class AbstractTokensRepository(ABC): if not self.is_recovery_key_valid(): raise RecoveryKeyNotFound("Recovery key not found") - recovery_hex_key = self.get_recovery_key().key + recovery_key = self.get_recovery_key() + + if recovery_key is None: + raise RecoveryKeyNotFound("Recovery key not found") + + recovery_hex_key = recovery_key.key if not self._assert_mnemonic(recovery_hex_key, mnemonic_phrase): raise RecoveryKeyNotFound("Recovery key not found") diff --git a/selfprivacy_api/repositories/tokens/json_tokens_repository.py b/selfprivacy_api/repositories/tokens/json_tokens_repository.py index 394c046..77e1311 100644 --- a/selfprivacy_api/repositories/tokens/json_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/json_tokens_repository.py @@ -85,13 +85,13 @@ class JsonTokensRepository(AbstractTokensRepository): recovery_key = RecoveryKey.generate(expiration, uses_left) with WriteUserData(UserDataFiles.TOKENS) as tokens_file: - expiration = recovery_key.expires_at - if expiration is not None: - expiration = expiration.strftime(DATETIME_FORMAT) + key_expiration: Optional[str] = None + if recovery_key.expires_at is not None: + key_expiration = recovery_key.expires_at.strftime(DATETIME_FORMAT) tokens_file["recovery_token"] = { "token": recovery_key.key, "date": recovery_key.created_at.strftime(DATETIME_FORMAT), - "expiration": expiration, + "expiration": key_expiration, "uses_left": recovery_key.uses_left, } diff --git a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py index d665553..c72e231 100644 --- a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py @@ -32,29 +32,34 @@ class RedisTokensRepository(AbstractTokensRepository): def get_tokens(self) -> list[Token]: """Get the tokens""" - r = self.connection - token_keys = r.keys(TOKENS_PREFIX + "*") - return [self._token_from_hash(key) for key in token_keys] + redis = self.connection + token_keys = redis.keys(TOKENS_PREFIX + "*") + tokens = [] + for key in token_keys: + token = self._token_from_hash(key) + if token is not None: + tokens.append(token) + return tokens def delete_token(self, input_token: Token) -> None: """Delete the token""" - r = self.connection + redis = self.connection key = RedisTokensRepository._token_redis_key(input_token) if input_token not in self.get_tokens(): raise TokenNotFound - r.delete(key) + redis.delete(key) def reset(self): for token in self.get_tokens(): self.delete_token(token) self.delete_new_device_key() - r = self.connection - r.delete(RECOVERY_KEY_REDIS_KEY) + redis = self.connection + redis.delete(RECOVERY_KEY_REDIS_KEY) def get_recovery_key(self) -> Optional[RecoveryKey]: """Get the recovery key""" - r = self.connection - if r.exists(RECOVERY_KEY_REDIS_KEY): + redis = self.connection + if redis.exists(RECOVERY_KEY_REDIS_KEY): return self._recovery_key_from_hash(RECOVERY_KEY_REDIS_KEY) return None @@ -74,8 +79,8 @@ class RedisTokensRepository(AbstractTokensRepository): def delete_new_device_key(self) -> None: """Delete the new device key""" - r = self.connection - r.delete(NEW_DEVICE_KEY_REDIS_KEY) + redis = self.connection + redis.delete(NEW_DEVICE_KEY_REDIS_KEY) @staticmethod def _token_redis_key(token: Token) -> str: @@ -89,10 +94,13 @@ class RedisTokensRepository(AbstractTokensRepository): def _decrement_recovery_token(self): """Decrement recovery key use count by one""" if self.is_recovery_key_valid(): - uses_left = self.get_recovery_key().uses_left + recovery_key = self.get_recovery_key() + if recovery_key is None: + return + uses_left = recovery_key.uses_left if uses_left is not None: - r = self.connection - r.hset(RECOVERY_KEY_REDIS_KEY, "uses_left", uses_left - 1) + redis = self.connection + redis.hset(RECOVERY_KEY_REDIS_KEY, "uses_left", uses_left - 1) def _get_stored_new_device_key(self) -> Optional[NewDeviceKey]: """Retrieves new device key that is already stored.""" @@ -116,9 +124,9 @@ class RedisTokensRepository(AbstractTokensRepository): d[key] = None def _model_dict_from_hash(self, redis_key: str) -> Optional[dict]: - r = self.connection - if r.exists(redis_key): - token_dict = r.hgetall(redis_key) + redis = self.connection + if redis.exists(redis_key): + token_dict = redis.hgetall(redis_key) RedisTokensRepository._prepare_model_dict(token_dict) return token_dict return None @@ -139,8 +147,8 @@ class RedisTokensRepository(AbstractTokensRepository): return self._hash_as_model(redis_key, NewDeviceKey) def _store_model_as_hash(self, redis_key, model): - r = self.connection + redis = self.connection for key, value in model.dict().items(): if isinstance(value, datetime): value = value.isoformat() - r.hset(redis_key, key, str(value)) + redis.hset(redis_key, key, str(value)) From 24353ca56acddd4a75c37326b0339b9ee2584eb2 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Fri, 30 Dec 2022 21:10:55 +0300 Subject: [PATCH 127/129] chore: bump API version --- selfprivacy_api/dependencies.py | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/dependencies.py b/selfprivacy_api/dependencies.py index 1348f65..f27bef3 100644 --- a/selfprivacy_api/dependencies.py +++ b/selfprivacy_api/dependencies.py @@ -27,4 +27,4 @@ async def get_token_header( def get_api_version() -> str: """Get API version""" - return "2.0.9" + return "2.1.0" diff --git a/setup.py b/setup.py index eabc165..9b819c4 100755 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import setup, find_packages setup( name="selfprivacy_api", - version="2.0.0", + version="2.1.0", packages=find_packages(), scripts=[ "selfprivacy_api/app.py", From bcfb8e62e9908268cde8f397c7d9bc645ba99d2f Mon Sep 17 00:00:00 2001 From: def Date: Wed, 16 Nov 2022 19:12:38 +0200 Subject: [PATCH 128/129] refactor(repository): Tokens repository JSON backend (#18) Co-authored-by: def Co-authored-by: Inex Code Reviewed-on: https://git.selfprivacy.org/SelfPrivacy/selfprivacy-rest-api/pulls/18 Co-authored-by: def Co-committed-by: def --- .../test_tokens_repository/null_keys.json | 26 ++++++++++++++ .../test_tokens_repository/tokens.json | 35 +++++++++++++++++++ 2 files changed, 61 insertions(+) create mode 100644 tests/test_graphql/test_repository/test_tokens_repository/null_keys.json create mode 100644 tests/test_graphql/test_repository/test_tokens_repository/tokens.json diff --git a/tests/test_graphql/test_repository/test_tokens_repository/null_keys.json b/tests/test_graphql/test_repository/test_tokens_repository/null_keys.json new file mode 100644 index 0000000..45e6f90 --- /dev/null +++ b/tests/test_graphql/test_repository/test_tokens_repository/null_keys.json @@ -0,0 +1,26 @@ +{ + "tokens": [ + { + "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", + "name": "primary_token", + "date": "2022-07-15 17:41:31.675698" + }, + { + "token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", + "name": "second_token", + "date": "2022-07-15 17:41:31.675698Z" + }, + { + "token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", + "name": "third_token", + "date": "2022-07-15T17:41:31.675698Z" + }, + { + "token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", + "name": "forth_token", + "date": "2022-07-15T17:41:31.675698" + } + ], + "recovery_token": null, + "new_device": null +} diff --git a/tests/test_graphql/test_repository/test_tokens_repository/tokens.json b/tests/test_graphql/test_repository/test_tokens_repository/tokens.json new file mode 100644 index 0000000..bb1805c --- /dev/null +++ b/tests/test_graphql/test_repository/test_tokens_repository/tokens.json @@ -0,0 +1,35 @@ +{ + "tokens": [ + { + "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", + "name": "primary_token", + "date": "2022-07-15 17:41:31.675698" + }, + { + "token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", + "name": "second_token", + "date": "2022-07-15 17:41:31.675698Z" + }, + { + "token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", + "name": "third_token", + "date": "2022-07-15T17:41:31.675698Z" + }, + { + "token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", + "name": "forth_token", + "date": "2022-07-15T17:41:31.675698" + } + ], + "recovery_token": { + "token": "ed653e4b8b042b841d285fa7a682fa09e925ddb2d8906f54", + "date": "2022-11-11T11:48:54.228038", + "expiration": null, + "uses_left": 2 + }, + "new_device": { + "token": "2237238de23dc71ab558e317bdb8ff8e", + "date": "2022-10-26 20:50:47.973212", + "expiration": "2022-10-26 21:00:47.974153" + } +} From 2fc635da71389402ebc67dbc6a2f4e5a898fb688 Mon Sep 17 00:00:00 2001 From: def Date: Thu, 29 Dec 2022 20:30:21 +0200 Subject: [PATCH 129/129] docs: add CONTRIBUTING.md --- CONTRIBUTING.md | 83 ++++++++++++++++++++++++++++++++++++++++--------- 1 file changed, 68 insertions(+), 15 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 7f82cfa..45ebd2a 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,13 +1,76 @@ # SelfPrivacy API contributors guide -## Commit messages +Instructions for [VScode](https://code.visualstudio.com) or [VScodium](https://github.com/VSCodium/vscodium) under Unix-like platform. -We follow [Convetional Commits](https://www.conventionalcommits.org/en/v1.0.0/) specification. Please read it before commiting. +1. **To get started, create an account for yourself on the** [**SelfPrivacy Gitea**](https://git.selfprivacy.org/user/sign_up). Proceed to fork +the [repository](https://git.selfprivacy.org/SelfPrivacy/selfprivacy-rest-api), and clone it on your local computer: -Useful plugins for IDEs: + ```git clone https://git.selfprivacy.org/your_user_name/selfprivacy-rest-api``` -- [VSCode](https://marketplace.visualstudio.com/items?itemName=vivaxy.vscode-conventional-commits) -- [IntelliJ](https://plugins.jetbrains.com/plugin/13389-conventional-commit) +2. **Install Nix** + + ```sh <(curl -L https://nixos.org/nix/install)``` + + For detailed installation information, please review and follow: [link](https://nixos.org/manual/nix/stable/installation/installing-binary.html#installing-a-binary-distribution). + +3. **Change directory to the cloned repository and start a nix shell:** + + ```cd selfprivacy-rest-api && nix-shell``` + + Nix will install all of the necessary packages for development work, all further actions will take place only within nix-shell. + +4. **Install these plugins for VScode/VScodium** + + Required: ```ms-python.python```, ```ms-python.vscode-pylance``` + + Optional, but highly recommended: ```ms-python.black-formatter```, ```bbenoist.Nix```, ```ryanluker.vscode-coverage-gutters``` + +5. **Set the path to the python interpreter from the nix store.** To do this, execute the command: + + ```whereis python``` + + Copy the path that starts with ```/nix/store/``` and ends with ```env/bin/python``` + + ```/nix/store/???-python3-3.9.??-env/bin/python``` + + Click on the python version selection in the lower right corner, and replace the path to the interpreter in the project with the one you copied from the terminal. + +6. **Congratulations :) Now you can develop new changes and test the project locally in a Nix environment.** + +## What do you need to know before starting development work? +- RestAPI is no longer utilized, the project has moved to [GraphQL](https://graphql.org), however, the API functionality still works on Rest + + +## What to do after making changes to the repository? + +**Run unit tests** using ```pytest .``` +Make sure that all tests pass successfully and the API works correctly. For convenience, you can use the built-in VScode interface. + +How to review the percentage of code coverage? Execute the command: + +```coverage run -m pytest && coverage xml && coverage report``` + +Next, use the recommended extension ```ryanluker.vscode-coverage-gutters```, navigate to one of the test files, and click the "watch" button on the bottom panel of VScode. + +**Format (linting) code**, we use [black](https://pypi.org/project/black/) formatting, enter +```black .``` to automatically format files, or use the recommended extension. + +**And please remember, we have adopted** [**commit naming convention**](https://www.conventionalcommits.org/en/v1.0.0/), follow the link for more information. + +Please request a review from at least one of the other maintainers. If you are not sure who to request, request a review from SelfPrivacy/Devs team. + +## Helpful links! + +**SelfPrivacy Contributor chat :3** + +- [**Telegram:** @selfprivacy_dev](https://t.me/selfprivacy_dev) +- [**Matrix:** #dev:selfprivacy.org](https://matrix.to/#/#dev:selfprivacy.org) + +**Helpful material to review:** + +- [GraphQL Query Language Documentation](https://graphql.org/) +- [Documentation Strawberry - python library for working with GraphQL](https://strawberry.rocks/docs/) +- [Nix Documentation](https://nixos.org/guides/ad-hoc-developer-environments.html) ### Track your time @@ -23,13 +86,3 @@ fixes #4, spent @1h30m ``` [Timewarrior](https://timewarrior.net/) is a good tool for tracking time. - -## Code style - -We use [Black]( - https://pypi.org/project/black/ -) for code formatting. Please install it and run `black .` before commiting. - -## Pull requests - -Please request a review from at least one of the other maintainers. If you are not sure who to request, request a review from SelfPrivacy/Devs team.