From 75e3143c82c77bacaacd2d424484b53f070b20d3 Mon Sep 17 00:00:00 2001 From: inexcode Date: Fri, 24 Jun 2022 15:26:51 +0300 Subject: [PATCH 01/39] strawberry init --- .vscode/settings.json | 4 +- selfprivacy_api/graphql/__init__.py | 0 selfprivacy_api/graphql/queries/__init__.py | 0 selfprivacy_api/graphql/queries/api.py | 25 +++++++++ selfprivacy_api/graphql/queries/common.py | 26 +++++++++ selfprivacy_api/graphql/queries/providers.py | 13 +++++ selfprivacy_api/graphql/queries/system.py | 59 ++++++++++++++++++++ shell.nix | 24 ++++++++ strawberry-graphql.patch | 12 ++++ 9 files changed, 162 insertions(+), 1 deletion(-) create mode 100644 selfprivacy_api/graphql/__init__.py create mode 100644 selfprivacy_api/graphql/queries/__init__.py create mode 100644 selfprivacy_api/graphql/queries/api.py create mode 100644 selfprivacy_api/graphql/queries/common.py create mode 100644 selfprivacy_api/graphql/queries/providers.py create mode 100644 selfprivacy_api/graphql/queries/system.py create mode 100644 strawberry-graphql.patch diff --git a/.vscode/settings.json b/.vscode/settings.json index ccb092d..8f927dc 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -6,5 +6,7 @@ "tests" ], "python.testing.unittestEnabled": false, - "python.testing.pytestEnabled": true + "python.testing.pytestEnabled": true, + "python.languageServer": "Pylance", + "python.analysis.typeCheckingMode": "basic" } \ No newline at end of file diff --git a/selfprivacy_api/graphql/__init__.py b/selfprivacy_api/graphql/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/selfprivacy_api/graphql/queries/__init__.py b/selfprivacy_api/graphql/queries/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/selfprivacy_api/graphql/queries/api.py b/selfprivacy_api/graphql/queries/api.py new file mode 100644 index 0000000..741a21e --- /dev/null +++ b/selfprivacy_api/graphql/queries/api.py @@ -0,0 +1,25 @@ +"""API access status""" +import datetime +import string +import typing +import strawberry + +@strawberry.type +class ApiDevice: + name: str + creation_date: datetime.datetime + is_caller: bool + +@strawberry.type +class ApiRecoveryKeyStatus: + exists: bool + valid: bool + creation_date: datetime.datetime + expiration_date: typing.Optional[datetime.datetime] + uses_left: typing.Optional[int] + +@strawberry.type +class Api: + version: str + devices: typing.List[ApiDevice] + recovery_key: ApiRecoveryKeyStatus diff --git a/selfprivacy_api/graphql/queries/common.py b/selfprivacy_api/graphql/queries/common.py new file mode 100644 index 0000000..9c9c1cb --- /dev/null +++ b/selfprivacy_api/graphql/queries/common.py @@ -0,0 +1,26 @@ +"""Common types and enums used by different types of queries.""" +from enum import Enum +import datetime +import typing +import strawberry + +@strawberry.enum +class Severity(Enum): + """ + Severity of an alert. + """ + INFO = "INFO" + WARNING = "WARNING" + ERROR = "ERROR" + CRITICAL = "CRITICAL" + SUCCESS = "SUCCESS" + +@strawberry.type +class Alert: + """ + Alert type. + """ + severity: Severity + title: str + message: str + timestamp: typing.Optional[datetime.datetime] diff --git a/selfprivacy_api/graphql/queries/providers.py b/selfprivacy_api/graphql/queries/providers.py new file mode 100644 index 0000000..6d61bac --- /dev/null +++ b/selfprivacy_api/graphql/queries/providers.py @@ -0,0 +1,13 @@ +"""Enums representing different service providers.""" +from enum import Enum +import datetime +import typing +import strawberry + +@strawberry.enum +class DnsProvider(Enum): + CLOUDFLARE = "CLOUDFLARE" + +@strawberry.enum +class ServerProvider(Enum): + HETZNER = "HETZNER" diff --git a/selfprivacy_api/graphql/queries/system.py b/selfprivacy_api/graphql/queries/system.py new file mode 100644 index 0000000..5ab3a36 --- /dev/null +++ b/selfprivacy_api/graphql/queries/system.py @@ -0,0 +1,59 @@ +import typing +import strawberry + +from selfprivacy_api.graphql.queries.common import Alert +from selfprivacy_api.graphql.queries.providers import DnsProvider, ServerProvider + +@strawberry.type +class DnsRecord: + recordType: str + name: str + content: str + ttl: int + priority: typing.Optional[int] + +@strawberry.type +class SystemDomainInfo: + domain: str + hostname: str + provider: DnsProvider + required_dns_records: typing.List[DnsRecord] + +@strawberry.type +class AutoUpgradeOptions: + enable: bool + allow_reboot: bool + +@strawberry.type +class SshSettings: + enable: bool + password_authentication: bool + root_ssh_keys: typing.List[str] + +@strawberry.type +class SystemSettings: + auto_upgrade: AutoUpgradeOptions + ssh: SshSettings + timezone: str + +@strawberry.type +class SystemInfo: + system_version: str + python_version: str + +@strawberry.type +class SystemProviderInfo: + provider: ServerProvider + id: str + +@strawberry.type +class System: + """ + Base system type which represents common system status + """ + status: Alert + domain: SystemDomainInfo + settings: SystemSettings + info: SystemInfo + provider: SystemProviderInfo + busy: bool \ No newline at end of file diff --git a/shell.nix b/shell.nix index 79f3623..947af0b 100644 --- a/shell.nix +++ b/shell.nix @@ -16,6 +16,30 @@ let mnemonic coverage pylint + pydantic + typing-extensions + (buildPythonPackage rec { + pname = "strawberry-graphql"; + version = "0.114.5"; + format = "pyproject"; + patches = [ + ./strawberry-graphql.patch + ]; + propagatedBuildInputs = [ + typing-extensions + graphql-core + python-multipart + python-dateutil + flask + pydantic + pygments + poetry + ]; + src = fetchPypi { + inherit pname version; + sha256 = "b6e007281cf29a66eeba66a512744853d8aa53b4ca2525befb6f350bb7b24df6"; + }; + }) ]); in pkgs.mkShell { diff --git a/strawberry-graphql.patch b/strawberry-graphql.patch new file mode 100644 index 0000000..48a04d4 --- /dev/null +++ b/strawberry-graphql.patch @@ -0,0 +1,12 @@ +diff --git a/pyproject.toml b/pyproject.toml +index 3283fce..89d3e8c 100644 +--- a/pyproject.toml ++++ b/pyproject.toml +@@ -45,7 +45,6 @@ python-multipart = "^0.0.5" + sanic = {version = ">=20.12.2,<22.0.0", optional = true} + aiohttp = {version = "^3.7.4.post0", optional = true} + fastapi = {version = ">=0.65.2", optional = true} +-"backports.cached-property" = "^1.0.1" + + [tool.poetry.dev-dependencies] + pytest = "^7.1" From 99beee40d6130838ecb09ae67c78ddf5f6dafc03 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Fri, 24 Jun 2022 16:05:18 +0300 Subject: [PATCH 02/39] Add integration with flask --- selfprivacy_api/app.py | 11 +++++++++++ selfprivacy_api/graphql/__init__.py | 15 +++++++++++++++ selfprivacy_api/graphql/queries/api.py | 5 ++++- selfprivacy_api/graphql/queries/system.py | 11 ++++++++++- selfprivacy_api/resolvers/api.py | 5 +++++ selfprivacy_api/resources/common.py | 4 ++-- 6 files changed, 47 insertions(+), 4 deletions(-) create mode 100644 selfprivacy_api/resolvers/api.py diff --git a/selfprivacy_api/app.py b/selfprivacy_api/app.py index 49e5bae..fd3f60c 100644 --- a/selfprivacy_api/app.py +++ b/selfprivacy_api/app.py @@ -9,6 +9,8 @@ from flask_restful import Api from flask_swagger import swagger from flask_swagger_ui import get_swaggerui_blueprint +from strawberry.flask.views import AsyncGraphQLView + from selfprivacy_api.resources.users import User, Users from selfprivacy_api.resources.common import ApiVersion from selfprivacy_api.resources.system import api_system @@ -21,6 +23,8 @@ from selfprivacy_api.migrations import run_migrations from selfprivacy_api.utils.auth import is_token_valid +from selfprivacy_api.graphql import schema + swagger_blueprint = get_swaggerui_blueprint( "/api/docs", "/api/swagger.json", config={"app_name": "SelfPrivacy API"} ) @@ -83,6 +87,13 @@ def create_app(test_config=None): return jsonify(swag) return jsonify({}), 404 + app.add_url_rule( + "/graphql", + view_func=AsyncGraphQLView.as_view( + "graphql", shema=schema + ) + ) + if app.config["ENABLE_SWAGGER"] == "1": app.register_blueprint(swagger_blueprint, url_prefix="/api/docs") diff --git a/selfprivacy_api/graphql/__init__.py b/selfprivacy_api/graphql/__init__.py index e69de29..5a08c3e 100644 --- a/selfprivacy_api/graphql/__init__.py +++ b/selfprivacy_api/graphql/__init__.py @@ -0,0 +1,15 @@ +"""GraphQL API for SelfPrivacy.""" +# pylint: disable=too-few-public-methods +import typing +import strawberry + +from selfprivacy_api.graphql.queries.system import System +from selfprivacy_api.graphql.queries.api import Api + +@strawberry.type +class Query: + """Root schema for queries""" + system: System + api: Api + +schema = strawberry.Schema(query=Query) diff --git a/selfprivacy_api/graphql/queries/api.py b/selfprivacy_api/graphql/queries/api.py index 741a21e..21576a8 100644 --- a/selfprivacy_api/graphql/queries/api.py +++ b/selfprivacy_api/graphql/queries/api.py @@ -1,9 +1,12 @@ """API access status""" +# pylint: disable=too-few-public-methods import datetime import string import typing import strawberry +from selfprivacy_api.resolvers.api import get_api_version + @strawberry.type class ApiDevice: name: str @@ -20,6 +23,6 @@ class ApiRecoveryKeyStatus: @strawberry.type class Api: - version: str + version: str = strawberry.field(resolver=get_api_version) devices: typing.List[ApiDevice] recovery_key: ApiRecoveryKeyStatus diff --git a/selfprivacy_api/graphql/queries/system.py b/selfprivacy_api/graphql/queries/system.py index 5ab3a36..f0b84f4 100644 --- a/selfprivacy_api/graphql/queries/system.py +++ b/selfprivacy_api/graphql/queries/system.py @@ -1,3 +1,5 @@ +"""Common system information and settings""" +# pylint: disable=too-few-public-methods import typing import strawberry @@ -6,6 +8,7 @@ from selfprivacy_api.graphql.queries.providers import DnsProvider, ServerProvide @strawberry.type class DnsRecord: + """DNS record""" recordType: str name: str content: str @@ -14,6 +17,7 @@ class DnsRecord: @strawberry.type class SystemDomainInfo: + """Information about the system domain""" domain: str hostname: str provider: DnsProvider @@ -21,28 +25,33 @@ class SystemDomainInfo: @strawberry.type class AutoUpgradeOptions: + """Automatic upgrade options""" enable: bool allow_reboot: bool @strawberry.type class SshSettings: + """SSH settings and root SSH keys""" enable: bool password_authentication: bool root_ssh_keys: typing.List[str] @strawberry.type class SystemSettings: + """Common system settings""" auto_upgrade: AutoUpgradeOptions ssh: SshSettings timezone: str @strawberry.type class SystemInfo: + """System components versions""" system_version: str python_version: str @strawberry.type class SystemProviderInfo: + """Information about the VPS/Dedicated server provider""" provider: ServerProvider id: str @@ -56,4 +65,4 @@ class System: settings: SystemSettings info: SystemInfo provider: SystemProviderInfo - busy: bool \ No newline at end of file + busy: bool diff --git a/selfprivacy_api/resolvers/api.py b/selfprivacy_api/resolvers/api.py new file mode 100644 index 0000000..245a635 --- /dev/null +++ b/selfprivacy_api/resolvers/api.py @@ -0,0 +1,5 @@ +"""Resolvers for API module""" + +def get_api_version() -> str: + """Get API version""" + return "1.2.7" diff --git a/selfprivacy_api/resources/common.py b/selfprivacy_api/resources/common.py index ce0dedf..7c8937b 100644 --- a/selfprivacy_api/resources/common.py +++ b/selfprivacy_api/resources/common.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 """Unassigned views""" from flask_restful import Resource - +from selfprivacy_api.resolvers.api import get_api_version class ApiVersion(Resource): """SelfPrivacy API version""" @@ -23,4 +23,4 @@ class ApiVersion(Resource): 401: description: Unauthorized """ - return {"version": "1.2.7"} + return {"version": get_api_version()} From 992a7837d4c47bf6796f39d6d1d0e07ce7ed0f70 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Fri, 24 Jun 2022 16:12:56 +0300 Subject: [PATCH 03/39] Update strawberry patch to remove backport --- strawberry-graphql.patch | 40 +++++++++++++++++++++++++++++++++++++++- 1 file changed, 39 insertions(+), 1 deletion(-) diff --git a/strawberry-graphql.patch b/strawberry-graphql.patch index 48a04d4..35e63a7 100644 --- a/strawberry-graphql.patch +++ b/strawberry-graphql.patch @@ -7,6 +7,44 @@ index 3283fce..89d3e8c 100644 aiohttp = {version = "^3.7.4.post0", optional = true} fastapi = {version = ">=0.65.2", optional = true} -"backports.cached-property" = "^1.0.1" - + [tool.poetry.dev-dependencies] pytest = "^7.1" +diff --git a/strawberry/field.py b/strawberry/field.py +index 4e7ee4b..7272188 100644 +--- a/strawberry/field.py ++++ b/strawberry/field.py +@@ -18,7 +18,6 @@ from typing import ( + overload, + ) + +-from backports.cached_property import cached_property + from typing_extensions import Literal + + from strawberry.annotation import StrawberryAnnotation +diff --git a/strawberry/types/fields/resolver.py b/strawberry/types/fields/resolver.py +index 0409233..be97594 100644 +--- a/strawberry/types/fields/resolver.py ++++ b/strawberry/types/fields/resolver.py +@@ -6,8 +6,6 @@ import sys + from inspect import isasyncgenfunction, iscoroutinefunction + from typing import Callable, Dict, Generic, List, Mapping, Optional, TypeVar, Union + +-from backports.cached_property import cached_property +- + from strawberry.annotation import StrawberryAnnotation + from strawberry.arguments import StrawberryArgument + from strawberry.exceptions import MissingArgumentsAnnotationsError +diff --git a/strawberry/types/info.py b/strawberry/types/info.py +index a172c04..6bbb578 100644 +--- a/strawberry/types/info.py ++++ b/strawberry/types/info.py +@@ -2,8 +2,6 @@ import dataclasses + import warnings + from typing import TYPE_CHECKING, Any, Dict, Generic, List, Optional, TypeVar, Union + +-from backports.cached_property import cached_property +- + from graphql import GraphQLResolveInfo, OperationDefinitionNode + from graphql.language import FieldNode + from graphql.pyutils.path import Path From c20b0c94f495bb33914aed3f10250ccc5e5128fb Mon Sep 17 00:00:00 2001 From: Inex Code Date: Fri, 24 Jun 2022 16:17:18 +0300 Subject: [PATCH 04/39] Update strawberry patch --- strawberry-graphql.patch | 37 ++++++++++++++++++++++++++----------- 1 file changed, 26 insertions(+), 11 deletions(-) diff --git a/strawberry-graphql.patch b/strawberry-graphql.patch index 35e63a7..3608342 100644 --- a/strawberry-graphql.patch +++ b/strawberry-graphql.patch @@ -7,42 +7,57 @@ index 3283fce..89d3e8c 100644 aiohttp = {version = "^3.7.4.post0", optional = true} fastapi = {version = ">=0.65.2", optional = true} -"backports.cached-property" = "^1.0.1" - + [tool.poetry.dev-dependencies] pytest = "^7.1" diff --git a/strawberry/field.py b/strawberry/field.py -index 4e7ee4b..7272188 100644 +index 4e7ee4b..06c2044 100644 --- a/strawberry/field.py +++ b/strawberry/field.py -@@ -18,7 +18,6 @@ from typing import ( +@@ -1,5 +1,6 @@ + import builtins + import dataclasses ++from functools import cached_property + import inspect + import sys + from typing import ( +@@ -18,7 +19,6 @@ from typing import ( overload, ) - + -from backports.cached_property import cached_property from typing_extensions import Literal - + from strawberry.annotation import StrawberryAnnotation diff --git a/strawberry/types/fields/resolver.py b/strawberry/types/fields/resolver.py -index 0409233..be97594 100644 +index 0409233..f4fbe9a 100644 --- a/strawberry/types/fields/resolver.py +++ b/strawberry/types/fields/resolver.py -@@ -6,8 +6,6 @@ import sys +@@ -1,13 +1,12 @@ + from __future__ import annotations as _ + + import builtins ++from functools import cached_property + import inspect + import sys from inspect import isasyncgenfunction, iscoroutinefunction from typing import Callable, Dict, Generic, List, Mapping, Optional, TypeVar, Union - + -from backports.cached_property import cached_property - from strawberry.annotation import StrawberryAnnotation from strawberry.arguments import StrawberryArgument from strawberry.exceptions import MissingArgumentsAnnotationsError diff --git a/strawberry/types/info.py b/strawberry/types/info.py -index a172c04..6bbb578 100644 +index a172c04..475a3ee 100644 --- a/strawberry/types/info.py +++ b/strawberry/types/info.py -@@ -2,8 +2,6 @@ import dataclasses +@@ -1,9 +1,8 @@ + import dataclasses ++from functools import cached_property import warnings from typing import TYPE_CHECKING, Any, Dict, Generic, List, Optional, TypeVar, Union - + -from backports.cached_property import cached_property - from graphql import GraphQLResolveInfo, OperationDefinitionNode From fc971292c2994dbb4df068b3a0f44252a8f7930a Mon Sep 17 00:00:00 2001 From: Inex Code Date: Fri, 24 Jun 2022 17:49:52 +0300 Subject: [PATCH 05/39] add __init__.py to resolvers --- selfprivacy_api/resolvers/__init__.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 selfprivacy_api/resolvers/__init__.py diff --git a/selfprivacy_api/resolvers/__init__.py b/selfprivacy_api/resolvers/__init__.py new file mode 100644 index 0000000..e69de29 From a6ad9aaf90c206c9ae9f8d2228c3b7f456c6e658 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Fri, 24 Jun 2022 18:02:40 +0300 Subject: [PATCH 06/39] rename folder --- selfprivacy_api/graphql/queries/api.py | 3 +-- selfprivacy_api/{resolvers => resolve_functions}/__init__.py | 0 selfprivacy_api/{resolvers => resolve_functions}/api.py | 0 selfprivacy_api/resources/common.py | 2 +- 4 files changed, 2 insertions(+), 3 deletions(-) rename selfprivacy_api/{resolvers => resolve_functions}/__init__.py (100%) rename selfprivacy_api/{resolvers => resolve_functions}/api.py (100%) diff --git a/selfprivacy_api/graphql/queries/api.py b/selfprivacy_api/graphql/queries/api.py index 21576a8..67113c6 100644 --- a/selfprivacy_api/graphql/queries/api.py +++ b/selfprivacy_api/graphql/queries/api.py @@ -1,11 +1,10 @@ """API access status""" # pylint: disable=too-few-public-methods import datetime -import string import typing import strawberry -from selfprivacy_api.resolvers.api import get_api_version +from selfprivacy_api.resolve_functions.api import get_api_version @strawberry.type class ApiDevice: diff --git a/selfprivacy_api/resolvers/__init__.py b/selfprivacy_api/resolve_functions/__init__.py similarity index 100% rename from selfprivacy_api/resolvers/__init__.py rename to selfprivacy_api/resolve_functions/__init__.py diff --git a/selfprivacy_api/resolvers/api.py b/selfprivacy_api/resolve_functions/api.py similarity index 100% rename from selfprivacy_api/resolvers/api.py rename to selfprivacy_api/resolve_functions/api.py diff --git a/selfprivacy_api/resources/common.py b/selfprivacy_api/resources/common.py index 7c8937b..60ce503 100644 --- a/selfprivacy_api/resources/common.py +++ b/selfprivacy_api/resources/common.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 """Unassigned views""" from flask_restful import Resource -from selfprivacy_api.resolvers.api import get_api_version +from selfprivacy_api.resolve_functions.api import get_api_version class ApiVersion(Resource): """SelfPrivacy API version""" From 28db251f1fc91d3de5d801adb23044da76b2d91c Mon Sep 17 00:00:00 2001 From: Inex Code Date: Fri, 24 Jun 2022 18:13:54 +0300 Subject: [PATCH 07/39] rollback the rename --- selfprivacy_api/graphql/queries/api.py | 2 +- selfprivacy_api/{resolve_functions => resolvers}/__init__.py | 0 selfprivacy_api/{resolve_functions => resolvers}/api.py | 0 selfprivacy_api/resources/common.py | 2 +- 4 files changed, 2 insertions(+), 2 deletions(-) rename selfprivacy_api/{resolve_functions => resolvers}/__init__.py (100%) rename selfprivacy_api/{resolve_functions => resolvers}/api.py (100%) diff --git a/selfprivacy_api/graphql/queries/api.py b/selfprivacy_api/graphql/queries/api.py index 67113c6..c890465 100644 --- a/selfprivacy_api/graphql/queries/api.py +++ b/selfprivacy_api/graphql/queries/api.py @@ -4,7 +4,7 @@ import datetime import typing import strawberry -from selfprivacy_api.resolve_functions.api import get_api_version +from selfprivacy_api.resolvers.api import get_api_version @strawberry.type class ApiDevice: diff --git a/selfprivacy_api/resolve_functions/__init__.py b/selfprivacy_api/resolvers/__init__.py similarity index 100% rename from selfprivacy_api/resolve_functions/__init__.py rename to selfprivacy_api/resolvers/__init__.py diff --git a/selfprivacy_api/resolve_functions/api.py b/selfprivacy_api/resolvers/api.py similarity index 100% rename from selfprivacy_api/resolve_functions/api.py rename to selfprivacy_api/resolvers/api.py diff --git a/selfprivacy_api/resources/common.py b/selfprivacy_api/resources/common.py index 60ce503..7c8937b 100644 --- a/selfprivacy_api/resources/common.py +++ b/selfprivacy_api/resources/common.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 """Unassigned views""" from flask_restful import Resource -from selfprivacy_api.resolve_functions.api import get_api_version +from selfprivacy_api.resolvers.api import get_api_version class ApiVersion(Resource): """SelfPrivacy API version""" From 01dea50c1f7cf28733c35a900f8d14bfc4fb58ee Mon Sep 17 00:00:00 2001 From: Inex Code Date: Fri, 24 Jun 2022 18:21:13 +0300 Subject: [PATCH 08/39] tmp allow access to graphql without auth --- selfprivacy_api/app.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/selfprivacy_api/app.py b/selfprivacy_api/app.py index fd3f60c..37be69c 100644 --- a/selfprivacy_api/app.py +++ b/selfprivacy_api/app.py @@ -51,6 +51,9 @@ def create_app(test_config=None): pass elif request.path.startswith("/auth/recovery_token/use"): pass + # TODO: REMOVE THIS + elif request.path.startswith("/graphql"): + pass else: auth = request.headers.get("Authorization") if auth is None: From 17b8334c6ebe3d6036812b2d115f51e88c68c190 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Fri, 24 Jun 2022 18:23:09 +0300 Subject: [PATCH 09/39] typo --- selfprivacy_api/app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/selfprivacy_api/app.py b/selfprivacy_api/app.py index 37be69c..6deba7c 100644 --- a/selfprivacy_api/app.py +++ b/selfprivacy_api/app.py @@ -93,7 +93,7 @@ def create_app(test_config=None): app.add_url_rule( "/graphql", view_func=AsyncGraphQLView.as_view( - "graphql", shema=schema + "graphql", schema=schema ) ) From 9b25bc0d53f60b8488ee93d699af5ce2abdf5d1e Mon Sep 17 00:00:00 2001 From: Inex Code Date: Fri, 24 Jun 2022 19:17:03 +0300 Subject: [PATCH 10/39] Add api status resolvers --- selfprivacy_api/graphql/__init__.py | 5 +- selfprivacy_api/graphql/queries/api.py | 11 +++-- selfprivacy_api/resolvers/api.py | 48 +++++++++++++++++++ .../resources/api_auth/recovery_token.py | 10 ++++ 4 files changed, 69 insertions(+), 5 deletions(-) diff --git a/selfprivacy_api/graphql/__init__.py b/selfprivacy_api/graphql/__init__.py index 5a08c3e..0897434 100644 --- a/selfprivacy_api/graphql/__init__.py +++ b/selfprivacy_api/graphql/__init__.py @@ -10,6 +10,9 @@ from selfprivacy_api.graphql.queries.api import Api class Query: """Root schema for queries""" system: System - api: Api + @strawberry.field + def api(self) -> Api: + """API access status""" + return Api() schema = strawberry.Schema(query=Query) diff --git a/selfprivacy_api/graphql/queries/api.py b/selfprivacy_api/graphql/queries/api.py index c890465..7424bee 100644 --- a/selfprivacy_api/graphql/queries/api.py +++ b/selfprivacy_api/graphql/queries/api.py @@ -4,24 +4,27 @@ import datetime import typing import strawberry -from selfprivacy_api.resolvers.api import get_api_version +from selfprivacy_api.resolvers.api import get_api_version, get_devices, get_recovery_key_status @strawberry.type class ApiDevice: + """A single device with SelfPrivacy app installed""" name: str creation_date: datetime.datetime is_caller: bool @strawberry.type class ApiRecoveryKeyStatus: + """Recovery key status""" exists: bool valid: bool - creation_date: datetime.datetime + creation_date: typing.Optional[datetime.datetime] expiration_date: typing.Optional[datetime.datetime] uses_left: typing.Optional[int] @strawberry.type class Api: + """API access status""" version: str = strawberry.field(resolver=get_api_version) - devices: typing.List[ApiDevice] - recovery_key: ApiRecoveryKeyStatus + devices: typing.List[ApiDevice] = strawberry.field(resolver=get_devices) + recovery_key: ApiRecoveryKeyStatus = strawberry.field(resolver=get_recovery_key_status) diff --git a/selfprivacy_api/resolvers/api.py b/selfprivacy_api/resolvers/api.py index 245a635..bbcebf9 100644 --- a/selfprivacy_api/resolvers/api.py +++ b/selfprivacy_api/resolvers/api.py @@ -1,5 +1,53 @@ """Resolvers for API module""" +import datetime +import typing +from flask import request + +from selfprivacy_api.graphql.queries.api import ApiDevice, ApiRecoveryKeyStatus + +from selfprivacy_api.utils.auth import ( + get_recovery_token_status, + get_tokens_info, + is_recovery_token_exists, + is_recovery_token_valid, + is_token_name_exists, + is_token_name_pair_valid, + refresh_token, + get_token_name, +) def get_api_version() -> str: """Get API version""" return "1.2.7" + +def get_devices() -> typing.List[ApiDevice]: + """Get list of devices""" + caller_name = get_token_name(request.headers.get("Authorization").split(" ")[1]) + tokens = get_tokens_info() + return [ + ApiDevice( + name=token["name"], + creation_date=datetime.datetime.strptime(token["date"], "%Y-%m-%dT%H:%M:%S.%fZ"), + is_caller=token["name"] == caller_name, + ) + for token in tokens + ] + +def get_recovery_key_status() -> ApiRecoveryKeyStatus: + """Get recovery key status""" + if not is_recovery_token_exists(): + return ApiRecoveryKeyStatus( + exists=False, valid=False, creation_date=None, expiration_date=None, uses_left=None + ) + status = get_recovery_token_status() + if status is None: + return ApiRecoveryKeyStatus( + exists=False, valid=False, creation_date=None, expiration_date=None, uses_left=None + ) + return ApiRecoveryKeyStatus( + exists=True, + valid=is_recovery_token_valid(), + creation_date=datetime.datetime.strptime(status["creation_date"], "%Y-%m-%dT%H:%M:%S.%fZ"), + expiration_date=datetime.datetime.strptime(status["expiration_date"], "%Y-%m-%dT%H:%M:%S.%fZ") if status["expiration_date"] is not None else None, + uses_left=status["uses_left"] if status["uses_left"] is not None else None, + ) diff --git a/selfprivacy_api/resources/api_auth/recovery_token.py b/selfprivacy_api/resources/api_auth/recovery_token.py index fbd80d9..e97c87a 100644 --- a/selfprivacy_api/resources/api_auth/recovery_token.py +++ b/selfprivacy_api/resources/api_auth/recovery_token.py @@ -60,6 +60,16 @@ class RecoveryToken(Resource): "uses_left": None, } status = get_recovery_token_status() + # check if status is None + if status is None: + return { + "exists": False, + "valid": False, + "date": None, + "expiration": None, + "uses_left": None, + } + if not is_recovery_token_valid(): return { "exists": True, From 766edc657a69e8170c76b7ed3646923ebce56f20 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Fri, 24 Jun 2022 19:24:10 +0300 Subject: [PATCH 11/39] resolve circular import --- selfprivacy_api/graphql/__init__.py | 11 ++++++++++- selfprivacy_api/graphql/queries/api.py | 8 -------- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/selfprivacy_api/graphql/__init__.py b/selfprivacy_api/graphql/__init__.py index 0897434..d5cd8a9 100644 --- a/selfprivacy_api/graphql/__init__.py +++ b/selfprivacy_api/graphql/__init__.py @@ -2,9 +2,18 @@ # pylint: disable=too-few-public-methods import typing import strawberry +from selfprivacy_api.graphql.queries.api import ApiDevice, ApiRecoveryKeyStatus from selfprivacy_api.graphql.queries.system import System -from selfprivacy_api.graphql.queries.api import Api + +from selfprivacy_api.resolvers.api import get_api_version, get_devices, get_recovery_key_status + +@strawberry.type +class Api: + """API access status""" + version: str = strawberry.field(resolver=get_api_version) + devices: typing.List[ApiDevice] = strawberry.field(resolver=get_devices) + recovery_key: ApiRecoveryKeyStatus = strawberry.field(resolver=get_recovery_key_status) @strawberry.type class Query: diff --git a/selfprivacy_api/graphql/queries/api.py b/selfprivacy_api/graphql/queries/api.py index 7424bee..2e46d50 100644 --- a/selfprivacy_api/graphql/queries/api.py +++ b/selfprivacy_api/graphql/queries/api.py @@ -4,7 +4,6 @@ import datetime import typing import strawberry -from selfprivacy_api.resolvers.api import get_api_version, get_devices, get_recovery_key_status @strawberry.type class ApiDevice: @@ -21,10 +20,3 @@ class ApiRecoveryKeyStatus: creation_date: typing.Optional[datetime.datetime] expiration_date: typing.Optional[datetime.datetime] uses_left: typing.Optional[int] - -@strawberry.type -class Api: - """API access status""" - version: str = strawberry.field(resolver=get_api_version) - devices: typing.List[ApiDevice] = strawberry.field(resolver=get_devices) - recovery_key: ApiRecoveryKeyStatus = strawberry.field(resolver=get_recovery_key_status) From 6ca723867ef7bc2823feb8e83fc789e610d1deaf Mon Sep 17 00:00:00 2001 From: Inex Code Date: Fri, 24 Jun 2022 19:28:58 +0300 Subject: [PATCH 12/39] once again --- selfprivacy_api/app.py | 2 +- selfprivacy_api/graphql/__init__.py | 27 ------------------- selfprivacy_api/graphql/queries/api.py | 23 ++++++---------- selfprivacy_api/graphql/queries/api_fields.py | 22 +++++++++++++++ selfprivacy_api/graphql/query.py | 18 +++++++++++++ selfprivacy_api/resolvers/api.py | 2 +- 6 files changed, 50 insertions(+), 44 deletions(-) create mode 100644 selfprivacy_api/graphql/queries/api_fields.py create mode 100644 selfprivacy_api/graphql/query.py diff --git a/selfprivacy_api/app.py b/selfprivacy_api/app.py index 6deba7c..594217b 100644 --- a/selfprivacy_api/app.py +++ b/selfprivacy_api/app.py @@ -23,7 +23,7 @@ from selfprivacy_api.migrations import run_migrations from selfprivacy_api.utils.auth import is_token_valid -from selfprivacy_api.graphql import schema +from selfprivacy_api.graphql.query import schema swagger_blueprint = get_swaggerui_blueprint( "/api/docs", "/api/swagger.json", config={"app_name": "SelfPrivacy API"} diff --git a/selfprivacy_api/graphql/__init__.py b/selfprivacy_api/graphql/__init__.py index d5cd8a9..e69de29 100644 --- a/selfprivacy_api/graphql/__init__.py +++ b/selfprivacy_api/graphql/__init__.py @@ -1,27 +0,0 @@ -"""GraphQL API for SelfPrivacy.""" -# pylint: disable=too-few-public-methods -import typing -import strawberry -from selfprivacy_api.graphql.queries.api import ApiDevice, ApiRecoveryKeyStatus - -from selfprivacy_api.graphql.queries.system import System - -from selfprivacy_api.resolvers.api import get_api_version, get_devices, get_recovery_key_status - -@strawberry.type -class Api: - """API access status""" - version: str = strawberry.field(resolver=get_api_version) - devices: typing.List[ApiDevice] = strawberry.field(resolver=get_devices) - recovery_key: ApiRecoveryKeyStatus = strawberry.field(resolver=get_recovery_key_status) - -@strawberry.type -class Query: - """Root schema for queries""" - system: System - @strawberry.field - def api(self) -> Api: - """API access status""" - return Api() - -schema = strawberry.Schema(query=Query) diff --git a/selfprivacy_api/graphql/queries/api.py b/selfprivacy_api/graphql/queries/api.py index 2e46d50..dbea5a1 100644 --- a/selfprivacy_api/graphql/queries/api.py +++ b/selfprivacy_api/graphql/queries/api.py @@ -1,22 +1,15 @@ """API access status""" # pylint: disable=too-few-public-methods -import datetime import typing import strawberry +from selfprivacy_api.graphql.queries.api_fields import ApiDevice, ApiRecoveryKeyStatus +from selfprivacy_api.resolvers.api import get_api_version, get_devices, get_recovery_key_status + @strawberry.type -class ApiDevice: - """A single device with SelfPrivacy app installed""" - name: str - creation_date: datetime.datetime - is_caller: bool - -@strawberry.type -class ApiRecoveryKeyStatus: - """Recovery key status""" - exists: bool - valid: bool - creation_date: typing.Optional[datetime.datetime] - expiration_date: typing.Optional[datetime.datetime] - uses_left: typing.Optional[int] +class Api: + """API access status""" + version: str = strawberry.field(resolver=get_api_version) + devices: typing.List[ApiDevice] = strawberry.field(resolver=get_devices) + recovery_key: ApiRecoveryKeyStatus = strawberry.field(resolver=get_recovery_key_status) diff --git a/selfprivacy_api/graphql/queries/api_fields.py b/selfprivacy_api/graphql/queries/api_fields.py new file mode 100644 index 0000000..2e46d50 --- /dev/null +++ b/selfprivacy_api/graphql/queries/api_fields.py @@ -0,0 +1,22 @@ +"""API access status""" +# pylint: disable=too-few-public-methods +import datetime +import typing +import strawberry + + +@strawberry.type +class ApiDevice: + """A single device with SelfPrivacy app installed""" + name: str + creation_date: datetime.datetime + is_caller: bool + +@strawberry.type +class ApiRecoveryKeyStatus: + """Recovery key status""" + exists: bool + valid: bool + creation_date: typing.Optional[datetime.datetime] + expiration_date: typing.Optional[datetime.datetime] + uses_left: typing.Optional[int] diff --git a/selfprivacy_api/graphql/query.py b/selfprivacy_api/graphql/query.py new file mode 100644 index 0000000..80b3d78 --- /dev/null +++ b/selfprivacy_api/graphql/query.py @@ -0,0 +1,18 @@ +"""GraphQL API for SelfPrivacy.""" +# pylint: disable=too-few-public-methods +import typing +import strawberry +from selfprivacy_api.graphql.queries.api import Api + +from selfprivacy_api.graphql.queries.system import System + +@strawberry.type +class Query: + """Root schema for queries""" + system: System + @strawberry.field + def api(self) -> Api: + """API access status""" + return Api() + +schema = strawberry.Schema(query=Query) diff --git a/selfprivacy_api/resolvers/api.py b/selfprivacy_api/resolvers/api.py index bbcebf9..f375dc8 100644 --- a/selfprivacy_api/resolvers/api.py +++ b/selfprivacy_api/resolvers/api.py @@ -3,7 +3,7 @@ import datetime import typing from flask import request -from selfprivacy_api.graphql.queries.api import ApiDevice, ApiRecoveryKeyStatus +from selfprivacy_api.graphql.queries.api_fields import ApiDevice, ApiRecoveryKeyStatus from selfprivacy_api.utils.auth import ( get_recovery_token_status, From 71c70592b222577b136ea36e663b3971c016706a Mon Sep 17 00:00:00 2001 From: Inex Code Date: Fri, 24 Jun 2022 19:35:42 +0300 Subject: [PATCH 13/39] fixes --- selfprivacy_api/resolvers/api.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/selfprivacy_api/resolvers/api.py b/selfprivacy_api/resolvers/api.py index f375dc8..871559e 100644 --- a/selfprivacy_api/resolvers/api.py +++ b/selfprivacy_api/resolvers/api.py @@ -22,7 +22,7 @@ def get_api_version() -> str: def get_devices() -> typing.List[ApiDevice]: """Get list of devices""" - caller_name = get_token_name(request.headers.get("Authorization").split(" ")[1]) + caller_name = get_token_name(request.headers.get("Authorization").split(" ")[1] if request.headers.get("Authorization") is not None else None) tokens = get_tokens_info() return [ ApiDevice( @@ -47,7 +47,7 @@ def get_recovery_key_status() -> ApiRecoveryKeyStatus: return ApiRecoveryKeyStatus( exists=True, valid=is_recovery_token_valid(), - creation_date=datetime.datetime.strptime(status["creation_date"], "%Y-%m-%dT%H:%M:%S.%fZ"), - expiration_date=datetime.datetime.strptime(status["expiration_date"], "%Y-%m-%dT%H:%M:%S.%fZ") if status["expiration_date"] is not None else None, + creation_date=datetime.datetime.strptime(status["date"], "%Y-%m-%dT%H:%M:%S.%fZ"), + expiration_date=datetime.datetime.strptime(status["expiration"], "%Y-%m-%dT%H:%M:%S.%fZ") if status["expiration"] is not None else None, uses_left=status["uses_left"] if status["uses_left"] is not None else None, ) From e2ac429975035bdf0c593fc912decad201b231fb Mon Sep 17 00:00:00 2001 From: Inex Code Date: Fri, 24 Jun 2022 19:50:30 +0300 Subject: [PATCH 14/39] parser --- selfprivacy_api/resolvers/api.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/selfprivacy_api/resolvers/api.py b/selfprivacy_api/resolvers/api.py index 871559e..c6068b8 100644 --- a/selfprivacy_api/resolvers/api.py +++ b/selfprivacy_api/resolvers/api.py @@ -16,6 +16,11 @@ from selfprivacy_api.utils.auth import ( get_token_name, ) +def parse_date(date_str: str) -> datetime.datetime: + """Parse date string which can be in + %Y-%m-%dT%H:%M:%S.%fZ or %Y-%m-%d %H:%M:%S.%f format""" + return datetime.datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%S.%fZ") if date_str.endswith("Z") else datetime.datetime.strptime(date_str, "%Y-%m-%d %H:%M:%S.%f") + def get_api_version() -> str: """Get API version""" return "1.2.7" @@ -27,7 +32,7 @@ def get_devices() -> typing.List[ApiDevice]: return [ ApiDevice( name=token["name"], - creation_date=datetime.datetime.strptime(token["date"], "%Y-%m-%dT%H:%M:%S.%fZ"), + creation_date=parse_date(token["date"]), is_caller=token["name"] == caller_name, ) for token in tokens @@ -47,7 +52,7 @@ def get_recovery_key_status() -> ApiRecoveryKeyStatus: return ApiRecoveryKeyStatus( exists=True, valid=is_recovery_token_valid(), - creation_date=datetime.datetime.strptime(status["date"], "%Y-%m-%dT%H:%M:%S.%fZ"), - expiration_date=datetime.datetime.strptime(status["expiration"], "%Y-%m-%dT%H:%M:%S.%fZ") if status["expiration"] is not None else None, + creation_date=parse_date(status["date"]), + expiration_date=parse_date(status["expiration"]) if status["expiration"] is not None else None, uses_left=status["uses_left"] if status["uses_left"] is not None else None, ) From 517a769e5b4018de3be5d300d76df61452cfd039 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Fri, 24 Jun 2022 20:08:58 +0300 Subject: [PATCH 15/39] add auth check --- selfprivacy_api/app.py | 2 +- selfprivacy_api/graphql/__init__.py | 37 ++++++++++ selfprivacy_api/graphql/queries/api.py | 67 ++++++++++++++++++- selfprivacy_api/graphql/queries/api_fields.py | 22 ------ selfprivacy_api/graphql/query.py | 18 ----- selfprivacy_api/resolvers/__init__.py | 0 selfprivacy_api/resolvers/api.py | 58 ---------------- selfprivacy_api/resources/common.py | 2 +- selfprivacy_api/utils/__init__.py | 6 ++ 9 files changed, 110 insertions(+), 102 deletions(-) delete mode 100644 selfprivacy_api/graphql/queries/api_fields.py delete mode 100644 selfprivacy_api/graphql/query.py delete mode 100644 selfprivacy_api/resolvers/__init__.py delete mode 100644 selfprivacy_api/resolvers/api.py diff --git a/selfprivacy_api/app.py b/selfprivacy_api/app.py index 594217b..6deba7c 100644 --- a/selfprivacy_api/app.py +++ b/selfprivacy_api/app.py @@ -23,7 +23,7 @@ from selfprivacy_api.migrations import run_migrations from selfprivacy_api.utils.auth import is_token_valid -from selfprivacy_api.graphql.query import schema +from selfprivacy_api.graphql import schema swagger_blueprint = get_swaggerui_blueprint( "/api/docs", "/api/swagger.json", config={"app_name": "SelfPrivacy API"} diff --git a/selfprivacy_api/graphql/__init__.py b/selfprivacy_api/graphql/__init__.py index e69de29..762a324 100644 --- a/selfprivacy_api/graphql/__init__.py +++ b/selfprivacy_api/graphql/__init__.py @@ -0,0 +1,37 @@ +"""GraphQL API for SelfPrivacy.""" +# pylint: disable=too-few-public-methods +import typing +import strawberry +from strawberry.permission import BasePermission +from strawberry.types import Info +from flask import request + +from selfprivacy_api.graphql.queries.api import Api +from selfprivacy_api.graphql.queries.system import System +from selfprivacy_api.utils.auth import is_token_valid + +class IsAuthenticated(BasePermission): + """Is authenticated permission""" + message = "You must be authenticated to access this resource." + + def has_permission(self, source: typing.Any, info: Info, **kwargs) -> bool: + auth = request.headers.get("Authorization") + if auth is None: + return False + # Strip Bearer from auth header + auth = auth.replace("Bearer ", "") + if not is_token_valid(auth): + return False + return True + + +@strawberry.type +class Query: + """Root schema for queries""" + system: System + @strawberry.field(permission_classes=[IsAuthenticated]) + def api(self) -> Api: + """API access status""" + return Api() + +schema = strawberry.Schema(query=Query) diff --git a/selfprivacy_api/graphql/queries/api.py b/selfprivacy_api/graphql/queries/api.py index dbea5a1..a79222a 100644 --- a/selfprivacy_api/graphql/queries/api.py +++ b/selfprivacy_api/graphql/queries/api.py @@ -1,11 +1,74 @@ """API access status""" # pylint: disable=too-few-public-methods +import datetime import typing +from flask import request import strawberry +from selfprivacy_api.utils import parse_date -from selfprivacy_api.graphql.queries.api_fields import ApiDevice, ApiRecoveryKeyStatus -from selfprivacy_api.resolvers.api import get_api_version, get_devices, get_recovery_key_status +from selfprivacy_api.utils.auth import ( + get_recovery_token_status, + get_tokens_info, + is_recovery_token_exists, + is_recovery_token_valid, + is_token_name_exists, + is_token_name_pair_valid, + refresh_token, + get_token_name, +) +def get_api_version() -> str: + """Get API version""" + return "1.2.7" + +@strawberry.type +class ApiDevice: + """A single device with SelfPrivacy app installed""" + name: str + creation_date: datetime.datetime + is_caller: bool + +def get_devices() -> typing.List[ApiDevice]: + """Get list of devices""" + caller_name = get_token_name(request.headers.get("Authorization").split(" ")[1] if request.headers.get("Authorization") is not None else None) + tokens = get_tokens_info() + return [ + ApiDevice( + name=token["name"], + creation_date=parse_date(token["date"]), + is_caller=token["name"] == caller_name, + ) + for token in tokens + ] + + +@strawberry.type +class ApiRecoveryKeyStatus: + """Recovery key status""" + exists: bool + valid: bool + creation_date: typing.Optional[datetime.datetime] + expiration_date: typing.Optional[datetime.datetime] + uses_left: typing.Optional[int] + +def get_recovery_key_status() -> ApiRecoveryKeyStatus: + """Get recovery key status""" + if not is_recovery_token_exists(): + return ApiRecoveryKeyStatus( + exists=False, valid=False, creation_date=None, expiration_date=None, uses_left=None + ) + status = get_recovery_token_status() + if status is None: + return ApiRecoveryKeyStatus( + exists=False, valid=False, creation_date=None, expiration_date=None, uses_left=None + ) + return ApiRecoveryKeyStatus( + exists=True, + valid=is_recovery_token_valid(), + creation_date=parse_date(status["date"]), + expiration_date=parse_date(status["expiration"]) if status["expiration"] is not None else None, + uses_left=status["uses_left"] if status["uses_left"] is not None else None, + ) @strawberry.type class Api: diff --git a/selfprivacy_api/graphql/queries/api_fields.py b/selfprivacy_api/graphql/queries/api_fields.py deleted file mode 100644 index 2e46d50..0000000 --- a/selfprivacy_api/graphql/queries/api_fields.py +++ /dev/null @@ -1,22 +0,0 @@ -"""API access status""" -# pylint: disable=too-few-public-methods -import datetime -import typing -import strawberry - - -@strawberry.type -class ApiDevice: - """A single device with SelfPrivacy app installed""" - name: str - creation_date: datetime.datetime - is_caller: bool - -@strawberry.type -class ApiRecoveryKeyStatus: - """Recovery key status""" - exists: bool - valid: bool - creation_date: typing.Optional[datetime.datetime] - expiration_date: typing.Optional[datetime.datetime] - uses_left: typing.Optional[int] diff --git a/selfprivacy_api/graphql/query.py b/selfprivacy_api/graphql/query.py deleted file mode 100644 index 80b3d78..0000000 --- a/selfprivacy_api/graphql/query.py +++ /dev/null @@ -1,18 +0,0 @@ -"""GraphQL API for SelfPrivacy.""" -# pylint: disable=too-few-public-methods -import typing -import strawberry -from selfprivacy_api.graphql.queries.api import Api - -from selfprivacy_api.graphql.queries.system import System - -@strawberry.type -class Query: - """Root schema for queries""" - system: System - @strawberry.field - def api(self) -> Api: - """API access status""" - return Api() - -schema = strawberry.Schema(query=Query) diff --git a/selfprivacy_api/resolvers/__init__.py b/selfprivacy_api/resolvers/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/selfprivacy_api/resolvers/api.py b/selfprivacy_api/resolvers/api.py deleted file mode 100644 index c6068b8..0000000 --- a/selfprivacy_api/resolvers/api.py +++ /dev/null @@ -1,58 +0,0 @@ -"""Resolvers for API module""" -import datetime -import typing -from flask import request - -from selfprivacy_api.graphql.queries.api_fields import ApiDevice, ApiRecoveryKeyStatus - -from selfprivacy_api.utils.auth import ( - get_recovery_token_status, - get_tokens_info, - is_recovery_token_exists, - is_recovery_token_valid, - is_token_name_exists, - is_token_name_pair_valid, - refresh_token, - get_token_name, -) - -def parse_date(date_str: str) -> datetime.datetime: - """Parse date string which can be in - %Y-%m-%dT%H:%M:%S.%fZ or %Y-%m-%d %H:%M:%S.%f format""" - return datetime.datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%S.%fZ") if date_str.endswith("Z") else datetime.datetime.strptime(date_str, "%Y-%m-%d %H:%M:%S.%f") - -def get_api_version() -> str: - """Get API version""" - return "1.2.7" - -def get_devices() -> typing.List[ApiDevice]: - """Get list of devices""" - caller_name = get_token_name(request.headers.get("Authorization").split(" ")[1] if request.headers.get("Authorization") is not None else None) - tokens = get_tokens_info() - return [ - ApiDevice( - name=token["name"], - creation_date=parse_date(token["date"]), - is_caller=token["name"] == caller_name, - ) - for token in tokens - ] - -def get_recovery_key_status() -> ApiRecoveryKeyStatus: - """Get recovery key status""" - if not is_recovery_token_exists(): - return ApiRecoveryKeyStatus( - exists=False, valid=False, creation_date=None, expiration_date=None, uses_left=None - ) - status = get_recovery_token_status() - if status is None: - return ApiRecoveryKeyStatus( - exists=False, valid=False, creation_date=None, expiration_date=None, uses_left=None - ) - return ApiRecoveryKeyStatus( - exists=True, - valid=is_recovery_token_valid(), - creation_date=parse_date(status["date"]), - expiration_date=parse_date(status["expiration"]) if status["expiration"] is not None else None, - uses_left=status["uses_left"] if status["uses_left"] is not None else None, - ) diff --git a/selfprivacy_api/resources/common.py b/selfprivacy_api/resources/common.py index 7c8937b..93c1dde 100644 --- a/selfprivacy_api/resources/common.py +++ b/selfprivacy_api/resources/common.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 """Unassigned views""" from flask_restful import Resource -from selfprivacy_api.resolvers.api import get_api_version +from selfprivacy_api.graphql.queries.api import get_api_version class ApiVersion(Resource): """SelfPrivacy API version""" diff --git a/selfprivacy_api/utils/__init__.py b/selfprivacy_api/utils/__init__.py index 5322fae..556ee60 100644 --- a/selfprivacy_api/utils/__init__.py +++ b/selfprivacy_api/utils/__init__.py @@ -1,5 +1,6 @@ #!/usr/bin/env python3 """Various utility functions""" +import datetime from enum import Enum import json import portalocker @@ -119,3 +120,8 @@ def is_username_forbidden(username): return True return False + +def parse_date(date_str: str) -> datetime.datetime: + """Parse date string which can be in + %Y-%m-%dT%H:%M:%S.%fZ or %Y-%m-%d %H:%M:%S.%f format""" + return datetime.datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%S.%fZ") if date_str.endswith("Z") else datetime.datetime.strptime(date_str, "%Y-%m-%d %H:%M:%S.%f") From 07e723dec8f61b9a987136f751809060db9f4df4 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Fri, 24 Jun 2022 20:12:32 +0300 Subject: [PATCH 16/39] more precise permission control --- selfprivacy_api/app.py | 2 +- selfprivacy_api/graphql/__init__.py | 15 --------------- selfprivacy_api/graphql/queries/api.py | 5 +++-- selfprivacy_api/graphql/schema.py | 19 +++++++++++++++++++ 4 files changed, 23 insertions(+), 18 deletions(-) create mode 100644 selfprivacy_api/graphql/schema.py diff --git a/selfprivacy_api/app.py b/selfprivacy_api/app.py index 6deba7c..082ddee 100644 --- a/selfprivacy_api/app.py +++ b/selfprivacy_api/app.py @@ -23,7 +23,7 @@ from selfprivacy_api.migrations import run_migrations from selfprivacy_api.utils.auth import is_token_valid -from selfprivacy_api.graphql import schema +from selfprivacy_api.graphql.schema import schema swagger_blueprint = get_swaggerui_blueprint( "/api/docs", "/api/swagger.json", config={"app_name": "SelfPrivacy API"} diff --git a/selfprivacy_api/graphql/__init__.py b/selfprivacy_api/graphql/__init__.py index 762a324..c1ae87b 100644 --- a/selfprivacy_api/graphql/__init__.py +++ b/selfprivacy_api/graphql/__init__.py @@ -1,13 +1,10 @@ """GraphQL API for SelfPrivacy.""" # pylint: disable=too-few-public-methods import typing -import strawberry from strawberry.permission import BasePermission from strawberry.types import Info from flask import request -from selfprivacy_api.graphql.queries.api import Api -from selfprivacy_api.graphql.queries.system import System from selfprivacy_api.utils.auth import is_token_valid class IsAuthenticated(BasePermission): @@ -23,15 +20,3 @@ class IsAuthenticated(BasePermission): if not is_token_valid(auth): return False return True - - -@strawberry.type -class Query: - """Root schema for queries""" - system: System - @strawberry.field(permission_classes=[IsAuthenticated]) - def api(self) -> Api: - """API access status""" - return Api() - -schema = strawberry.Schema(query=Query) diff --git a/selfprivacy_api/graphql/queries/api.py b/selfprivacy_api/graphql/queries/api.py index a79222a..5431d7d 100644 --- a/selfprivacy_api/graphql/queries/api.py +++ b/selfprivacy_api/graphql/queries/api.py @@ -4,6 +4,7 @@ import datetime import typing from flask import request import strawberry +from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.utils import parse_date from selfprivacy_api.utils.auth import ( @@ -74,5 +75,5 @@ def get_recovery_key_status() -> ApiRecoveryKeyStatus: class Api: """API access status""" version: str = strawberry.field(resolver=get_api_version) - devices: typing.List[ApiDevice] = strawberry.field(resolver=get_devices) - recovery_key: ApiRecoveryKeyStatus = strawberry.field(resolver=get_recovery_key_status) + devices: typing.List[ApiDevice] = strawberry.field(resolver=get_devices, permission_classes=[IsAuthenticated]) + recovery_key: ApiRecoveryKeyStatus = strawberry.field(resolver=get_recovery_key_status, permission_classes=[IsAuthenticated]) diff --git a/selfprivacy_api/graphql/schema.py b/selfprivacy_api/graphql/schema.py new file mode 100644 index 0000000..d3105a5 --- /dev/null +++ b/selfprivacy_api/graphql/schema.py @@ -0,0 +1,19 @@ +"""GraphQL API for SelfPrivacy.""" +# pylint: disable=too-few-public-methods +import typing +import strawberry + +from selfprivacy_api.graphql.queries.api import Api +from selfprivacy_api.graphql.queries.system import System + + +@strawberry.type +class Query: + """Root schema for queries""" + system: System + @strawberry.field + def api(self) -> Api: + """API access status""" + return Api() + +schema = strawberry.Schema(query=Query) From c6a3588e3374c3a66b5a737934a1684dc6a4352b Mon Sep 17 00:00:00 2001 From: Inex Code Date: Fri, 24 Jun 2022 20:25:49 +0300 Subject: [PATCH 17/39] add CORS --- selfprivacy_api/app.py | 2 ++ shell.nix | 26 ++++++++++++++------------ 2 files changed, 16 insertions(+), 12 deletions(-) diff --git a/selfprivacy_api/app.py b/selfprivacy_api/app.py index 082ddee..5c21111 100644 --- a/selfprivacy_api/app.py +++ b/selfprivacy_api/app.py @@ -8,6 +8,7 @@ from flask import Flask, request, jsonify from flask_restful import Api from flask_swagger import swagger from flask_swagger_ui import get_swaggerui_blueprint +from flask_cors import CORS from strawberry.flask.views import AsyncGraphQLView @@ -34,6 +35,7 @@ def create_app(test_config=None): """Initiate Flask app and bind routes""" app = Flask(__name__) api = Api(app) + CORS(app) if test_config is None: app.config["ENABLE_SWAGGER"] = os.environ.get("ENABLE_SWAGGER", "0") diff --git a/shell.nix b/shell.nix index 947af0b..2735de1 100644 --- a/shell.nix +++ b/shell.nix @@ -1,23 +1,24 @@ { pkgs ? import {} }: let sp-python = pkgs.python39.withPackages (p: with p; [ - flask - flask-restful - setuptools - portalocker - flask-swagger - flask-swagger-ui - pytz - pytest - pytest-mock - pytest-datadir - huey + flask + flask-restful + setuptools + portalocker + flask-swagger + flask-swagger-ui + pytz + pytest + pytest-mock + pytest-datadir + huey gevent mnemonic coverage pylint pydantic typing-extensions + flask-cors (buildPythonPackage rec { pname = "strawberry-graphql"; version = "0.114.5"; @@ -34,6 +35,7 @@ let pydantic pygments poetry + flask-cors ]; src = fetchPypi { inherit pname version; @@ -51,4 +53,4 @@ pkgs.mkShell { PYTHONPATH=${sp-python}/${sp-python.sitePackages} # maybe set more env-vars ''; -} \ No newline at end of file +} From 80e5550f7da0f636b850ab91ec3016563138343c Mon Sep 17 00:00:00 2001 From: Inex Code Date: Fri, 24 Jun 2022 21:14:20 +0300 Subject: [PATCH 18/39] add basic system getters --- selfprivacy_api/app.py | 5 +- selfprivacy_api/graphql/__init__.py | 2 + selfprivacy_api/graphql/queries/api.py | 38 ++++++- selfprivacy_api/graphql/queries/common.py | 4 + selfprivacy_api/graphql/queries/providers.py | 2 + selfprivacy_api/graphql/queries/system.py | 106 ++++++++++++++++-- selfprivacy_api/graphql/schema.py | 3 + selfprivacy_api/resources/common.py | 1 + selfprivacy_api/resources/system.py | 10 +- selfprivacy_api/utils/__init__.py | 7 +- tests/common.py | 11 ++ tests/test_auth.py | 12 +- tests/test_graphql/data/tokens.json | 14 +++ tests/test_graphql/test_api.py | 112 +++++++++++++++++++ 14 files changed, 294 insertions(+), 33 deletions(-) create mode 100644 tests/common.py create mode 100644 tests/test_graphql/data/tokens.json create mode 100644 tests/test_graphql/test_api.py diff --git a/selfprivacy_api/app.py b/selfprivacy_api/app.py index 5c21111..77281e8 100644 --- a/selfprivacy_api/app.py +++ b/selfprivacy_api/app.py @@ -93,10 +93,7 @@ def create_app(test_config=None): return jsonify({}), 404 app.add_url_rule( - "/graphql", - view_func=AsyncGraphQLView.as_view( - "graphql", schema=schema - ) + "/graphql", view_func=AsyncGraphQLView.as_view("graphql", schema=schema) ) if app.config["ENABLE_SWAGGER"] == "1": diff --git a/selfprivacy_api/graphql/__init__.py b/selfprivacy_api/graphql/__init__.py index c1ae87b..5e332f3 100644 --- a/selfprivacy_api/graphql/__init__.py +++ b/selfprivacy_api/graphql/__init__.py @@ -7,8 +7,10 @@ from flask import request from selfprivacy_api.utils.auth import is_token_valid + class IsAuthenticated(BasePermission): """Is authenticated permission""" + message = "You must be authenticated to access this resource." def has_permission(self, source: typing.Any, info: Info, **kwargs) -> bool: diff --git a/selfprivacy_api/graphql/queries/api.py b/selfprivacy_api/graphql/queries/api.py index 5431d7d..b2a81d2 100644 --- a/selfprivacy_api/graphql/queries/api.py +++ b/selfprivacy_api/graphql/queries/api.py @@ -18,20 +18,28 @@ from selfprivacy_api.utils.auth import ( get_token_name, ) + def get_api_version() -> str: """Get API version""" return "1.2.7" + @strawberry.type class ApiDevice: """A single device with SelfPrivacy app installed""" + name: str creation_date: datetime.datetime is_caller: bool + def get_devices() -> typing.List[ApiDevice]: """Get list of devices""" - caller_name = get_token_name(request.headers.get("Authorization").split(" ")[1] if request.headers.get("Authorization") is not None else None) + caller_name = get_token_name( + request.headers.get("Authorization").split(" ")[1] + if request.headers.get("Authorization") is not None + else None + ) tokens = get_tokens_info() return [ ApiDevice( @@ -46,34 +54,52 @@ def get_devices() -> typing.List[ApiDevice]: @strawberry.type class ApiRecoveryKeyStatus: """Recovery key status""" + exists: bool valid: bool creation_date: typing.Optional[datetime.datetime] expiration_date: typing.Optional[datetime.datetime] uses_left: typing.Optional[int] + def get_recovery_key_status() -> ApiRecoveryKeyStatus: """Get recovery key status""" if not is_recovery_token_exists(): return ApiRecoveryKeyStatus( - exists=False, valid=False, creation_date=None, expiration_date=None, uses_left=None + exists=False, + valid=False, + creation_date=None, + expiration_date=None, + uses_left=None, ) status = get_recovery_token_status() if status is None: return ApiRecoveryKeyStatus( - exists=False, valid=False, creation_date=None, expiration_date=None, uses_left=None + exists=False, + valid=False, + creation_date=None, + expiration_date=None, + uses_left=None, ) return ApiRecoveryKeyStatus( exists=True, valid=is_recovery_token_valid(), creation_date=parse_date(status["date"]), - expiration_date=parse_date(status["expiration"]) if status["expiration"] is not None else None, + expiration_date=parse_date(status["expiration"]) + if status["expiration"] is not None + else None, uses_left=status["uses_left"] if status["uses_left"] is not None else None, ) + @strawberry.type class Api: """API access status""" + version: str = strawberry.field(resolver=get_api_version) - devices: typing.List[ApiDevice] = strawberry.field(resolver=get_devices, permission_classes=[IsAuthenticated]) - recovery_key: ApiRecoveryKeyStatus = strawberry.field(resolver=get_recovery_key_status, permission_classes=[IsAuthenticated]) + devices: typing.List[ApiDevice] = strawberry.field( + resolver=get_devices, permission_classes=[IsAuthenticated] + ) + recovery_key: ApiRecoveryKeyStatus = strawberry.field( + resolver=get_recovery_key_status, permission_classes=[IsAuthenticated] + ) diff --git a/selfprivacy_api/graphql/queries/common.py b/selfprivacy_api/graphql/queries/common.py index 9c9c1cb..a1abbdc 100644 --- a/selfprivacy_api/graphql/queries/common.py +++ b/selfprivacy_api/graphql/queries/common.py @@ -4,22 +4,26 @@ import datetime import typing import strawberry + @strawberry.enum class Severity(Enum): """ Severity of an alert. """ + INFO = "INFO" WARNING = "WARNING" ERROR = "ERROR" CRITICAL = "CRITICAL" SUCCESS = "SUCCESS" + @strawberry.type class Alert: """ Alert type. """ + severity: Severity title: str message: str diff --git a/selfprivacy_api/graphql/queries/providers.py b/selfprivacy_api/graphql/queries/providers.py index 6d61bac..774d465 100644 --- a/selfprivacy_api/graphql/queries/providers.py +++ b/selfprivacy_api/graphql/queries/providers.py @@ -4,10 +4,12 @@ import datetime import typing import strawberry + @strawberry.enum class DnsProvider(Enum): CLOUDFLARE = "CLOUDFLARE" + @strawberry.enum class ServerProvider(Enum): HETZNER = "HETZNER" diff --git a/selfprivacy_api/graphql/queries/system.py b/selfprivacy_api/graphql/queries/system.py index f0b84f4..53f0abe 100644 --- a/selfprivacy_api/graphql/queries/system.py +++ b/selfprivacy_api/graphql/queries/system.py @@ -1,68 +1,158 @@ """Common system information and settings""" # pylint: disable=too-few-public-methods +import subprocess import typing import strawberry from selfprivacy_api.graphql.queries.common import Alert from selfprivacy_api.graphql.queries.providers import DnsProvider, ServerProvider +from selfprivacy_api.utils import ReadUserData + @strawberry.type class DnsRecord: """DNS record""" + recordType: str name: str content: str ttl: int priority: typing.Optional[int] + @strawberry.type class SystemDomainInfo: """Information about the system domain""" + domain: str hostname: str provider: DnsProvider required_dns_records: typing.List[DnsRecord] + +def get_system_domain_info() -> SystemDomainInfo: + """Get basic system domain info""" + with ReadUserData() as user_data: + return SystemDomainInfo( + domain=user_data["domain"], + hostname=user_data["hostname"], + provider=DnsProvider.CLOUDFLARE, + # TODO: get ip somehow + required_dns_records=[], + ) + + @strawberry.type class AutoUpgradeOptions: """Automatic upgrade options""" + enable: bool allow_reboot: bool + +def get_auto_upgrade_options() -> AutoUpgradeOptions: + """Get automatic upgrade options""" + with ReadUserData() as user_data: + if "autoUpgrade" not in user_data: + return AutoUpgradeOptions(enable=True, allow_reboot=False) + if "enable" not in user_data["autoUpgrade"]: + user_data["autoUpgrade"]["enable"] = True + if "allowReboot" not in user_data["autoUpgrade"]: + user_data["autoUpgrade"]["allowReboot"] = False + return AutoUpgradeOptions( + enable=user_data["autoUpgrade"]["enable"], + allow_reboot=user_data["autoUpgrade"]["allowReboot"], + ) + + @strawberry.type class SshSettings: """SSH settings and root SSH keys""" + enable: bool password_authentication: bool root_ssh_keys: typing.List[str] + +def get_ssh_settings() -> SshSettings: + """Get SSH settings""" + with ReadUserData() as user_data: + if "ssh" not in user_data: + return SshSettings( + enable=False, password_authentication=False, root_ssh_keys=[] + ) + if "enable" not in user_data["ssh"]: + user_data["ssh"]["enable"] = False + if "passwordAuthentication" not in user_data["ssh"]: + user_data["ssh"]["passwordAuthentication"] = False + if "rootKeys" not in user_data["ssh"]: + user_data["ssh"]["rootKeys"] = [] + return SshSettings( + enable=user_data["ssh"]["enable"], + password_authentication=user_data["ssh"]["passwordAuthentication"], + root_ssh_keys=user_data["ssh"]["rootKeys"], + ) + + +def get_system_timezone() -> str: + """Get system timezone""" + with ReadUserData() as user_data: + if "timezone" not in user_data: + return "Europe/Uzhgorod" + return user_data["timezone"] + + @strawberry.type class SystemSettings: """Common system settings""" - auto_upgrade: AutoUpgradeOptions - ssh: SshSettings - timezone: str + + auto_upgrade: AutoUpgradeOptions = strawberry.field( + resolver=get_auto_upgrade_options + ) + ssh: SshSettings = strawberry.field(resolver=get_ssh_settings) + timezone: str = strawberry.field(resolver=get_system_timezone) + + +def get_system_version() -> str: + """Get system version""" + return subprocess.check_output(["uname", "-a"]).decode("utf-8").strip() + + +def get_python_version() -> str: + """Get Python version""" + return subprocess.check_output(["python", "-V"]).decode("utf-8").strip() + @strawberry.type class SystemInfo: """System components versions""" - system_version: str - python_version: str + + system_version: str = strawberry.field(resolver=get_system_version) + python_version: str = strawberry.field(resolver=get_python_version) + @strawberry.type class SystemProviderInfo: """Information about the VPS/Dedicated server provider""" + provider: ServerProvider id: str + +def get_system_provider_info() -> SystemProviderInfo: + """Get system provider info""" + return SystemProviderInfo(provider=ServerProvider.HETZNER, id="UNKNOWN") + + @strawberry.type class System: """ Base system type which represents common system status """ + status: Alert - domain: SystemDomainInfo + domain: SystemDomainInfo = strawberry.field(resolver=get_system_domain_info) settings: SystemSettings info: SystemInfo - provider: SystemProviderInfo - busy: bool + provider: SystemProviderInfo = strawberry.field(resolver=get_system_provider_info) + busy: bool = False diff --git a/selfprivacy_api/graphql/schema.py b/selfprivacy_api/graphql/schema.py index d3105a5..e11e67e 100644 --- a/selfprivacy_api/graphql/schema.py +++ b/selfprivacy_api/graphql/schema.py @@ -10,10 +10,13 @@ from selfprivacy_api.graphql.queries.system import System @strawberry.type class Query: """Root schema for queries""" + system: System + @strawberry.field def api(self) -> Api: """API access status""" return Api() + schema = strawberry.Schema(query=Query) diff --git a/selfprivacy_api/resources/common.py b/selfprivacy_api/resources/common.py index 93c1dde..9ec060a 100644 --- a/selfprivacy_api/resources/common.py +++ b/selfprivacy_api/resources/common.py @@ -3,6 +3,7 @@ from flask_restful import Resource from selfprivacy_api.graphql.queries.api import get_api_version + class ApiVersion(Resource): """SelfPrivacy API version""" diff --git a/selfprivacy_api/resources/system.py b/selfprivacy_api/resources/system.py index db988da..958616e 100644 --- a/selfprivacy_api/resources/system.py +++ b/selfprivacy_api/resources/system.py @@ -5,6 +5,10 @@ import subprocess import pytz from flask import Blueprint from flask_restful import Resource, Api, reqparse +from selfprivacy_api.graphql.queries.system import ( + get_python_version, + get_system_version, +) from selfprivacy_api.utils import WriteUserData, ReadUserData @@ -256,9 +260,7 @@ class SystemVersion(Resource): description: Unauthorized """ return { - "system_version": subprocess.check_output(["uname", "-a"]) - .decode("utf-8") - .strip() + "system_version": get_system_version(), } @@ -279,7 +281,7 @@ class PythonVersion(Resource): 401: description: Unauthorized """ - return subprocess.check_output(["python", "-V"]).decode("utf-8").strip() + return get_python_version() class PullRepositoryChanges(Resource): diff --git a/selfprivacy_api/utils/__init__.py b/selfprivacy_api/utils/__init__.py index 556ee60..b0d0acc 100644 --- a/selfprivacy_api/utils/__init__.py +++ b/selfprivacy_api/utils/__init__.py @@ -121,7 +121,12 @@ def is_username_forbidden(username): return False + def parse_date(date_str: str) -> datetime.datetime: """Parse date string which can be in %Y-%m-%dT%H:%M:%S.%fZ or %Y-%m-%d %H:%M:%S.%f format""" - return datetime.datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%S.%fZ") if date_str.endswith("Z") else datetime.datetime.strptime(date_str, "%Y-%m-%d %H:%M:%S.%f") + return ( + datetime.datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%S.%fZ") + if date_str.endswith("Z") + else datetime.datetime.strptime(date_str, "%Y-%m-%d %H:%M:%S.%f") + ) diff --git a/tests/common.py b/tests/common.py new file mode 100644 index 0000000..c1b33fe --- /dev/null +++ b/tests/common.py @@ -0,0 +1,11 @@ +import json + + +def read_json(file_path): + with open(file_path, "r", encoding="utf-8") as file: + return json.load(file) + + +def write_json(file_path, data): + with open(file_path, "w", encoding="utf-8") as file: + json.dump(data, file, indent=4) diff --git a/tests/test_auth.py b/tests/test_auth.py index 819a385..10720ca 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -6,6 +6,8 @@ import re import pytest from mnemonic import Mnemonic +from .common import read_json, write_json + TOKENS_FILE_CONTETS = { "tokens": [ @@ -23,16 +25,6 @@ TOKENS_FILE_CONTETS = { } -def read_json(file_path): - with open(file_path, "r", encoding="utf-8") as file: - return json.load(file) - - -def write_json(file_path, data): - with open(file_path, "w", encoding="utf-8") as file: - json.dump(data, file, indent=4) - - def test_get_tokens_info(authorized_client, tokens_file): response = authorized_client.get("/auth/tokens") assert response.status_code == 200 diff --git a/tests/test_graphql/data/tokens.json b/tests/test_graphql/data/tokens.json new file mode 100644 index 0000000..9be9d02 --- /dev/null +++ b/tests/test_graphql/data/tokens.json @@ -0,0 +1,14 @@ +{ + "tokens": [ + { + "token": "TEST_TOKEN", + "name": "test_token", + "date": "2022-01-14 08:31:10.789314" + }, + { + "token": "TEST_TOKEN2", + "name": "test_token2", + "date": "2022-01-14 08:31:10.789314" + } + ] +} \ No newline at end of file diff --git a/tests/test_graphql/test_api.py b/tests/test_graphql/test_api.py new file mode 100644 index 0000000..fb0aec8 --- /dev/null +++ b/tests/test_graphql/test_api.py @@ -0,0 +1,112 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +import json +import pytest + +TOKENS_FILE_CONTETS = { + "tokens": [ + { + "token": "TEST_TOKEN", + "name": "test_token", + "date": "2022-01-14 08:31:10.789314", + }, + { + "token": "TEST_TOKEN2", + "name": "test_token2", + "date": "2022-01-14 08:31:10.789314", + }, + ] +} + + +def test_graphql_get_api_version(authorized_client): + response = authorized_client.get( + "/graphql", + json={ + "query": """ + query { + api { + version + } + } + """ + }, + ) + assert response.status_code == 200 + assert "version" in response.get_json()["data"]["api"] + + +def test_graphql_api_version_unauthorized(client): + response = client.get( + "/graphql", + json={ + "query": """ + query { + api { + version + } + } + """ + }, + ) + assert response.status_code == 200 + assert "version" in response.get_json()["data"]["api"] + + +def test_graphql_tokens_info(authorized_client, tokens_file): + response = authorized_client.get( + "/graphql", + json={ + "query": """ + query { + api { + devices { + creationDate + isCaller + name + } + } + } + """ + }, + ) + assert response.status_code == 200 + assert response.json == { + "data": { + "api": { + "devices": [ + { + "creationDate": "2022-01-14T08:31:10.789314", + "isCaller": True, + "name": "test_token", + }, + { + "creationDate": "2022-01-14T08:31:10.789314", + "isCaller": False, + "name": "test_token2", + }, + ] + } + } + } + + +def test_graphql_tokens_info_unauthorized(client, tokens_file): + response = client.get( + "/graphql", + json={ + "query": """ + query { + api { + devices { + creationDate + isCaller + name + } + } + } + """ + }, + ) + assert response.status_code == 200 + assert response.json["data"] is None From 45c3e3003d01941ccca61846e81b591f8b7d7992 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Fri, 24 Jun 2022 21:18:21 +0300 Subject: [PATCH 19/39] hhh --- selfprivacy_api/graphql/queries/system.py | 13 +++++++++---- selfprivacy_api/graphql/schema.py | 5 ++++- 2 files changed, 13 insertions(+), 5 deletions(-) diff --git a/selfprivacy_api/graphql/queries/system.py b/selfprivacy_api/graphql/queries/system.py index 53f0abe..06405a4 100644 --- a/selfprivacy_api/graphql/queries/system.py +++ b/selfprivacy_api/graphql/queries/system.py @@ -4,7 +4,7 @@ import subprocess import typing import strawberry -from selfprivacy_api.graphql.queries.common import Alert +from selfprivacy_api.graphql.queries.common import Alert, Severity from selfprivacy_api.graphql.queries.providers import DnsProvider, ServerProvider from selfprivacy_api.utils import ReadUserData @@ -150,9 +150,14 @@ class System: Base system type which represents common system status """ - status: Alert + status: Alert = strawberry.field(resolver=lambda: Alert( + severity=Severity.INFO, + title="Test message", + message="Test message", + timestamp=None + )) domain: SystemDomainInfo = strawberry.field(resolver=get_system_domain_info) - settings: SystemSettings - info: SystemInfo + settings: SystemSettings = SystemSettings() + info: SystemInfo = SystemInfo() provider: SystemProviderInfo = strawberry.field(resolver=get_system_provider_info) busy: bool = False diff --git a/selfprivacy_api/graphql/schema.py b/selfprivacy_api/graphql/schema.py index e11e67e..3e3fac7 100644 --- a/selfprivacy_api/graphql/schema.py +++ b/selfprivacy_api/graphql/schema.py @@ -11,7 +11,10 @@ from selfprivacy_api.graphql.queries.system import System class Query: """Root schema for queries""" - system: System + @strawberry.field + def system(self) -> System: + """System queries""" + return System() @strawberry.field def api(self) -> Api: From 503a39f390fa01eb27ee383e81c41aff39c80481 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Wed, 29 Jun 2022 20:39:46 +0300 Subject: [PATCH 20/39] API keys graphql tests --- .pylintrc | 2 + selfprivacy_api/graphql/mutations/__init__.py | 0 .../graphql/mutations/api_mutations.py | 52 ++ .../graphql/mutations/mutation_interface.py | 7 + .../queries/{api.py => api_queries.py} | 0 selfprivacy_api/graphql/schema.py | 12 +- selfprivacy_api/resources/common.py | 2 +- selfprivacy_api/utils/auth.py | 3 +- tests/common.py | 8 +- tests/test_auth.py | 5 +- tests/test_graphql/__init__.py | 0 tests/test_graphql/test_api.py | 109 +--- tests/test_graphql/test_api_devices.py | 446 +++++++++++++++ tests/test_graphql/test_api_recovery.py | 534 ++++++++++++++++++ tests/test_graphql/test_api_version.py | 28 + 15 files changed, 1114 insertions(+), 94 deletions(-) create mode 100644 .pylintrc create mode 100644 selfprivacy_api/graphql/mutations/__init__.py create mode 100644 selfprivacy_api/graphql/mutations/api_mutations.py create mode 100644 selfprivacy_api/graphql/mutations/mutation_interface.py rename selfprivacy_api/graphql/queries/{api.py => api_queries.py} (100%) create mode 100644 tests/test_graphql/__init__.py create mode 100644 tests/test_graphql/test_api_devices.py create mode 100644 tests/test_graphql/test_api_recovery.py create mode 100644 tests/test_graphql/test_api_version.py diff --git a/.pylintrc b/.pylintrc new file mode 100644 index 0000000..6a584c2 --- /dev/null +++ b/.pylintrc @@ -0,0 +1,2 @@ +[MASTER] +init-hook='import sys; sys.path.append("/path/to/root")' diff --git a/selfprivacy_api/graphql/mutations/__init__.py b/selfprivacy_api/graphql/mutations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/selfprivacy_api/graphql/mutations/api_mutations.py b/selfprivacy_api/graphql/mutations/api_mutations.py new file mode 100644 index 0000000..6604f7e --- /dev/null +++ b/selfprivacy_api/graphql/mutations/api_mutations.py @@ -0,0 +1,52 @@ +"""API access mutations""" +# pylint: disable=too-few-public-methods +import datetime +import typing +from flask import request +import strawberry +from selfprivacy_api.graphql import IsAuthenticated +from selfprivacy_api.graphql.mutations.mutation_interface import MutationReturnInterface +from selfprivacy_api.utils import parse_date + +from selfprivacy_api.utils.auth import ( + generate_recovery_token +) + +@strawberry.type +class ApiKeyMutationReturn(MutationReturnInterface): + key: typing.Optional[str] + +@strawberry.input +class RecoveryKeyLimitsInput: + """Recovery key limits input""" + expiration_date: typing.Optional[datetime.datetime] + uses: typing.Optional[int] + +@strawberry.type +class ApiMutations: + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def getNewRecoveryApiKey(self, limits: RecoveryKeyLimitsInput) -> ApiKeyMutationReturn: + """Generate recovery key""" + if limits.expiration_date is not None: + if limits.expiration_date < datetime.datetime.now(): + return ApiKeyMutationReturn( + success=False, + message="Expiration date must be in the future", + code=400, + key=None, + ) + if limits.uses is not None: + if limits.uses < 1: + return ApiKeyMutationReturn( + success=False, + message="Uses must be greater than 0", + code=400, + key=None, + ) + key = generate_recovery_token(limits.expiration_date, limits.uses) + return ApiKeyMutationReturn( + success=True, + message="Recovery key generated", + code=200, + key=key, + ) diff --git a/selfprivacy_api/graphql/mutations/mutation_interface.py b/selfprivacy_api/graphql/mutations/mutation_interface.py new file mode 100644 index 0000000..f5c212b --- /dev/null +++ b/selfprivacy_api/graphql/mutations/mutation_interface.py @@ -0,0 +1,7 @@ +import strawberry + +@strawberry.interface +class MutationReturnInterface: + success: bool + message: str + code: int diff --git a/selfprivacy_api/graphql/queries/api.py b/selfprivacy_api/graphql/queries/api_queries.py similarity index 100% rename from selfprivacy_api/graphql/queries/api.py rename to selfprivacy_api/graphql/queries/api_queries.py diff --git a/selfprivacy_api/graphql/schema.py b/selfprivacy_api/graphql/schema.py index 3e3fac7..71c9c18 100644 --- a/selfprivacy_api/graphql/schema.py +++ b/selfprivacy_api/graphql/schema.py @@ -2,8 +2,10 @@ # pylint: disable=too-few-public-methods import typing import strawberry +from selfprivacy_api.graphql import IsAuthenticated +from selfprivacy_api.graphql.mutations.api_mutations import ApiMutations -from selfprivacy_api.graphql.queries.api import Api +from selfprivacy_api.graphql.queries.api_queries import Api from selfprivacy_api.graphql.queries.system import System @@ -11,7 +13,7 @@ from selfprivacy_api.graphql.queries.system import System class Query: """Root schema for queries""" - @strawberry.field + @strawberry.field(permission_classes=[IsAuthenticated]) def system(self) -> System: """System queries""" return System() @@ -21,5 +23,9 @@ class Query: """API access status""" return Api() +@strawberry.type +class Mutation(ApiMutations): + """Root schema for mutations""" + pass -schema = strawberry.Schema(query=Query) +schema = strawberry.Schema(query=Query, mutation=Mutation) diff --git a/selfprivacy_api/resources/common.py b/selfprivacy_api/resources/common.py index 9ec060a..f78aad6 100644 --- a/selfprivacy_api/resources/common.py +++ b/selfprivacy_api/resources/common.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 """Unassigned views""" from flask_restful import Resource -from selfprivacy_api.graphql.queries.api import get_api_version +from selfprivacy_api.graphql.queries.api_queries import get_api_version class ApiVersion(Resource): diff --git a/selfprivacy_api/utils/auth.py b/selfprivacy_api/utils/auth.py index 8e5a5b2..8810149 100644 --- a/selfprivacy_api/utils/auth.py +++ b/selfprivacy_api/utils/auth.py @@ -3,6 +3,7 @@ import secrets from datetime import datetime, timedelta import re +import typing from mnemonic import Mnemonic @@ -190,7 +191,7 @@ def _get_recovery_token(): return tokens["recovery_token"]["token"] -def generate_recovery_token(expiration=None, uses_left=None): +def generate_recovery_token(expiration: typing.Optional[datetime], uses_left: typing.Optional[int]) -> str: """Generate a 24 bytes recovery token and return a mneomnic word list. Write a string representation of the recovery token to the tokens.json file. """ diff --git a/tests/common.py b/tests/common.py index c1b33fe..950c850 100644 --- a/tests/common.py +++ b/tests/common.py @@ -1,5 +1,5 @@ import json - +from mnemonic import Mnemonic def read_json(file_path): with open(file_path, "r", encoding="utf-8") as file: @@ -9,3 +9,9 @@ def read_json(file_path): def write_json(file_path, data): with open(file_path, "w", encoding="utf-8") as file: json.dump(data, file, indent=4) + +def generate_api_query(query_array): + return "query TestApi {\n api {" + "\n".join(query_array) + "}\n}" + +def mnemonic_to_hex(mnemonic): + return Mnemonic(language="english").to_entropy(mnemonic).hex() diff --git a/tests/test_auth.py b/tests/test_auth.py index 10720ca..4d78f62 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -1,5 +1,6 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument +# pylint: disable=missing-function-docstring import datetime import json import re @@ -383,7 +384,7 @@ def test_generate_recovery_token_with_expiration_date( def test_generate_recovery_token_with_expiration_in_the_past( - authorized_client, client, tokens_file + authorized_client, tokens_file ): # Server must return 400 if expiration date is in the past expiration_date = datetime.datetime.now() - datetime.timedelta(minutes=5) @@ -397,7 +398,7 @@ def test_generate_recovery_token_with_expiration_in_the_past( def test_generate_recovery_token_with_invalid_time_format( - authorized_client, client, tokens_file + authorized_client, tokens_file ): # Server must return 400 if expiration date is in the past expiration_date = "invalid_time_format" diff --git a/tests/test_graphql/__init__.py b/tests/test_graphql/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/test_graphql/test_api.py b/tests/test_graphql/test_api.py index fb0aec8..81b6175 100644 --- a/tests/test_graphql/test_api.py +++ b/tests/test_graphql/test_api.py @@ -1,8 +1,13 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument -import json +# pylint: disable=missing-function-docstring import pytest +from tests.common import generate_api_query +from tests.test_graphql.test_api_devices import API_DEVICES_QUERY +from tests.test_graphql.test_api_recovery import API_RECOVERY_QUERY +from tests.test_graphql.test_api_version import API_VERSION_QUERY + TOKENS_FILE_CONTETS = { "tokens": [ { @@ -18,95 +23,27 @@ TOKENS_FILE_CONTETS = { ] } - -def test_graphql_get_api_version(authorized_client): +def test_graphql_get_entire_api_data(authorized_client, tokens_file): response = authorized_client.get( "/graphql", json={ - "query": """ - query { - api { - version - } - } - """ + "query": generate_api_query([API_VERSION_QUERY, API_DEVICES_QUERY, API_RECOVERY_QUERY]) }, ) assert response.status_code == 200 + assert response.json.get("data") is not None assert "version" in response.get_json()["data"]["api"] - - -def test_graphql_api_version_unauthorized(client): - response = client.get( - "/graphql", - json={ - "query": """ - query { - api { - version - } - } - """ - }, - ) - assert response.status_code == 200 - assert "version" in response.get_json()["data"]["api"] - - -def test_graphql_tokens_info(authorized_client, tokens_file): - response = authorized_client.get( - "/graphql", - json={ - "query": """ - query { - api { - devices { - creationDate - isCaller - name - } - } - } - """ - }, - ) - assert response.status_code == 200 - assert response.json == { - "data": { - "api": { - "devices": [ - { - "creationDate": "2022-01-14T08:31:10.789314", - "isCaller": True, - "name": "test_token", - }, - { - "creationDate": "2022-01-14T08:31:10.789314", - "isCaller": False, - "name": "test_token2", - }, - ] - } - } - } - - -def test_graphql_tokens_info_unauthorized(client, tokens_file): - response = client.get( - "/graphql", - json={ - "query": """ - query { - api { - devices { - creationDate - isCaller - name - } - } - } - """ - }, - ) - assert response.status_code == 200 - assert response.json["data"] is None + assert response.json["data"]["api"]["devices"] is not None + assert len(response.json["data"]["api"]["devices"]) == 2 + assert response.json["data"]["api"]["devices"][0]["creationDate"] == "2022-01-14T08:31:10.789314" + assert response.json["data"]["api"]["devices"][0]["isCaller"] is True + assert response.json["data"]["api"]["devices"][0]["name"] == "test_token" + assert response.json["data"]["api"]["devices"][1]["creationDate"] == "2022-01-14T08:31:10.789314" + assert response.json["data"]["api"]["devices"][1]["isCaller"] is False + assert response.json["data"]["api"]["devices"][1]["name"] == "test_token2" + assert response.json["data"]["api"]["recoveryKey"] is not None + assert response.json["data"]["api"]["recoveryKey"]["exists"] is False + assert response.json["data"]["api"]["recoveryKey"]["valid"] is False + assert response.json["data"]["api"]["recoveryKey"]["creationDate"] is None + assert response.json["data"]["api"]["recoveryKey"]["expirationDate"] is None + assert response.json["data"]["api"]["recoveryKey"]["usesLeft"] is None diff --git a/tests/test_graphql/test_api_devices.py b/tests/test_graphql/test_api_devices.py new file mode 100644 index 0000000..0406371 --- /dev/null +++ b/tests/test_graphql/test_api_devices.py @@ -0,0 +1,446 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=missing-function-docstring +import datetime +import json +import pytest +from mnemonic import Mnemonic + +from tests.common import generate_api_query, read_json, write_json + +TOKENS_FILE_CONTETS = { + "tokens": [ + { + "token": "TEST_TOKEN", + "name": "test_token", + "date": "2022-01-14 08:31:10.789314", + }, + { + "token": "TEST_TOKEN2", + "name": "test_token2", + "date": "2022-01-14 08:31:10.789314", + }, + ] +} + +API_DEVICES_QUERY = """ +devices { + creationDate + isCaller + name +} +""" + +def test_graphql_tokens_info(authorized_client, tokens_file): + response = authorized_client.get( + "/graphql", + json={ + "query": generate_api_query([API_DEVICES_QUERY]) + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["api"]["devices"] is not None + assert len(response.json["data"]["api"]["devices"]) == 2 + assert response.json["data"]["api"]["devices"][0]["creationDate"] == "2022-01-14T08:31:10.789314" + assert response.json["data"]["api"]["devices"][0]["isCaller"] is True + assert response.json["data"]["api"]["devices"][0]["name"] == "test_token" + assert response.json["data"]["api"]["devices"][1]["creationDate"] == "2022-01-14T08:31:10.789314" + assert response.json["data"]["api"]["devices"][1]["isCaller"] is False + assert response.json["data"]["api"]["devices"][1]["name"] == "test_token2" + +def test_graphql_tokens_info_unauthorized(client, tokens_file): + response = client.get( + "/graphql", + json={ + "query": generate_api_query([API_DEVICES_QUERY]) + }, + ) + assert response.status_code == 200 + assert response.json["data"] is None + +DELETE_TOKEN_MUTATION = """ +mutation DeleteToken($device: String!) { + deleteDeviceApiToken(device: $device) { + success + message + code + } +} +""" + +def test_graphql_delete_token_unauthorized(client, tokens_file): + response = client.post( + "/graphql", + json={ + "query": DELETE_TOKEN_MUTATION, + "variables": { + "device": "test_token", + }, + }, + ) + assert response.status_code == 200 + assert response.json["data"] is None + +def test_graphql_delete_token(authorized_client, tokens_file): + response = authorized_client.post( + "/graphql", + json={ + "query": DELETE_TOKEN_MUTATION, + "variables": { + "device": "test_token", + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["deleteDeviceApiToken"]["success"] is True + assert response.json["data"]["deleteDeviceApiToken"]["message"] is not None + assert response.json["data"]["deleteDeviceApiToken"]["code"] == 200 + assert read_json(tokens_file) == { + "tokens": [ + { + "token": "TEST_TOKEN", + "name": "test_token2", + "date": "2022-01-14 08:31:10.789314", + } + ] + } + +def test_graphql_delete_self_token(authorized_client, tokens_file): + response = authorized_client.post( + "/graphql", + json={ + "query": DELETE_TOKEN_MUTATION, + "variables": { + "device": "test_token", + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["deleteDeviceApiToken"]["success"] is False + assert response.json["data"]["deleteDeviceApiToken"]["message"] is not None + assert response.json["data"]["deleteDeviceApiToken"]["code"] == 400 + assert read_json(tokens_file) == TOKENS_FILE_CONTETS + +def test_graphql_delete_nonexistent_token(authorized_client, tokens_file): + response = authorized_client.post( + "/graphql", + json={ + "query": DELETE_TOKEN_MUTATION, + "variables": { + "device": "test_token3", + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["deleteDeviceApiToken"]["success"] is False + assert response.json["data"]["deleteDeviceApiToken"]["message"] is not None + assert response.json["data"]["deleteDeviceApiToken"]["code"] == 404 + assert read_json(tokens_file) == TOKENS_FILE_CONTETS + +REFRESH_TOKEN_MUTATION = """ +mutation RefreshToken { + refreshDeviceApiToken { + success + message + code + } +} +""" + +def test_graphql_refresh_token_unauthorized(client, tokens_file): + response = client.post( + "/graphql", + json={ + "query": REFRESH_TOKEN_MUTATION + }, + ) + assert response.status_code == 200 + assert response.json["data"] is None + +def test_graphql_refresh_token(authorized_client, tokens_file): + response = authorized_client.post( + "/graphql", + json={ + "query": REFRESH_TOKEN_MUTATION + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["refreshDeviceApiToken"]["success"] is True + assert response.json["data"]["refreshDeviceApiToken"]["message"] is not None + assert response.json["data"]["refreshDeviceApiToken"]["code"] == 200 + assert read_json(tokens_file) == { + "tokens": [ + { + "token": "TEST_TOKEN", + "name": "test_token2", + "date": "2022-01-14 08:31:10.789314", + } + ] + } + +NEW_DEVICE_KEY_MUTATION = """ +mutation NewDeviceKey { + getNewDeviceApiKey { + success + message + code + key + } +} +""" + +def test_graphql_get_new_device_auth_key_unauthorized(client, tokens_file): + response = client.post( + "/graphql", + json={ + "query": NEW_DEVICE_KEY_MUTATION + }, + ) + assert response.status_code == 200 + assert response.json["data"] is None + +def test_graphql_get_new_device_auth_key(authorized_client, tokens_file): + response = authorized_client.post( + "/graphql", + json={ + "query": NEW_DEVICE_KEY_MUTATION + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["getNewDeviceApiToken"]["success"] is True + assert response.json["data"]["getNewDeviceApiToken"]["message"] is not None + assert response.json["data"]["getNewDeviceApiToken"]["code"] == 200 + assert response.json["data"]["getNewDeviceApiToken"]["key"].split(" ").__len__() == 12 + token = Mnemonic(language="english").to_entropy(response.json["data"]["getNewDeviceApiToken"]["key"]).hex() + assert read_json(tokens_file)["new_device"]["token"] == token + +INVALIDATE_NEW_DEVICE_KEY_MUTATION = """ +mutation InvalidateNewDeviceKey { + invalidateNewDeviceApiKey { + success + message + code + } +} +""" + +def test_graphql_invalidate_new_device_token_unauthorized(client, tokens_file): + response = client.post( + "/graphql", + json={ + "query": DELETE_TOKEN_MUTATION, + "variables": { + "device": "test_token", + }, + }, + ) + assert response.status_code == 200 + assert response.json["data"] is None + +def test_graphql_get_and_delete_new_device_key(authorized_client, tokens_file): + response = authorized_client.post( + "/graphql", + json={ + "query": NEW_DEVICE_KEY_MUTATION + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["getNewDeviceApiToken"]["success"] is True + assert response.json["data"]["getNewDeviceApiToken"]["message"] is not None + assert response.json["data"]["getNewDeviceApiToken"]["code"] == 200 + assert response.json["data"]["getNewDeviceApiToken"]["key"].split(" ").__len__() == 12 + token = Mnemonic(language="english").to_entropy(response.json["data"]["getNewDeviceApiToken"]["key"]).hex() + assert read_json(tokens_file)["new_device"]["token"] == token + response = authorized_client.post( + "/graphql", + json={ + "query": INVALIDATE_NEW_DEVICE_KEY_MUTATION + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["invalidateNewDeviceApiKey"]["success"] is True + assert response.json["data"]["invalidateNewDeviceApiKey"]["message"] is not None + assert response.json["data"]["invalidateNewDeviceApiKey"]["code"] == 200 + assert read_json(tokens_file) == TOKENS_FILE_CONTETS + +AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION = """ +mutation AuthorizeWithNewDeviceKey($input: UseNewDeviceKeyInput!) { + authorizeWithNewDeviceApiKey(inupt: $input) { + success + message + code + token + } +} +""" + +def test_graphql_get_and_authorize_new_device(client, authorized_client, tokens_file): + response = authorized_client.post( + "/graphql", + json={ + "query": NEW_DEVICE_KEY_MUTATION + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["getNewDeviceApiToken"]["success"] is True + assert response.json["data"]["getNewDeviceApiToken"]["message"] is not None + assert response.json["data"]["getNewDeviceApiToken"]["code"] == 200 + assert response.json["data"]["getNewDeviceApiToken"]["key"].split(" ").__len__() == 12 + key = Mnemonic(language="english").to_entropy(response.json["data"]["getNewDeviceApiToken"]["key"]).hex() + assert read_json(tokens_file)["new_device"]["token"] == key + response = client.post( + "/graphql", + json={ + "query": AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION, + "variables": { + "inupt": { + "key": key, + "deviceName": "test_token", + } + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["authorizeWithNewDeviceApiKey"]["success"] is True + assert response.json["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None + assert response.json["data"]["authorizeWithNewDeviceApiKey"]["code"] == 200 + token = response.json["data"]["authorizeWithNewDeviceApiKey"]["token"] + assert read_json(tokens_file)["tokens"][2]["token"] == token + assert read_json(tokens_file)["tokens"][2]["name"] == "new_device" + +def test_graphql_authorize_new_device_with_invalid_key(client, tokens_file): + response = client.post( + "/graphql", + json={ + "query": AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION, + "variables": { + "inupt": { + "key": "invalid_token", + "deviceName": "test_token", + } + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["authorizeWithNewDeviceApiKey"]["success"] is False + assert response.json["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None + assert response.json["data"]["authorizeWithNewDeviceApiKey"]["code"] == 404 + assert read_json(tokens_file) == TOKENS_FILE_CONTETS + +def test_graphql_get_and_authorize_used_key(client, authorized_client, tokens_file): + response = authorized_client.post( + "/graphql", + json={ + "query": NEW_DEVICE_KEY_MUTATION + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["getNewDeviceApiToken"]["success"] is True + assert response.json["data"]["getNewDeviceApiToken"]["message"] is not None + assert response.json["data"]["getNewDeviceApiToken"]["code"] == 200 + assert response.json["data"]["getNewDeviceApiToken"]["key"].split(" ").__len__() == 12 + key = Mnemonic(language="english").to_entropy(response.json["data"]["getNewDeviceApiToken"]["key"]).hex() + assert read_json(tokens_file)["new_device"]["token"] == key + response = client.post( + "/graphql", + json={ + "query": AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION, + "variables": { + "inupt": { + "key": key, + "deviceName": "test_token", + } + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["authorizeWithNewDeviceApiKey"]["success"] is True + assert response.json["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None + assert response.json["data"]["authorizeWithNewDeviceApiKey"]["code"] == 200 + assert read_json(tokens_file) == TOKENS_FILE_CONTETS + response = client.post( + "/graphql", + json={ + "query": AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION, + "variables": { + "inupt": { + "key": key, + "deviceName": "test_token2", + } + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["authorizeWithNewDeviceApiKey"]["success"] is False + assert response.json["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None + assert response.json["data"]["authorizeWithNewDeviceApiKey"]["code"] == 404 + assert read_json(tokens_file) == TOKENS_FILE_CONTETS + +def test_graphql_get_and_authorize_key_after_12_minutes(client, authorized_client, tokens_file): + response = authorized_client.post( + "/graphql", + json={ + "query": NEW_DEVICE_KEY_MUTATION + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["getNewDeviceApiToken"]["success"] is True + assert response.json["data"]["getNewDeviceApiToken"]["message"] is not None + assert response.json["data"]["getNewDeviceApiToken"]["code"] == 200 + assert response.json["data"]["getNewDeviceApiToken"]["key"].split(" ").__len__() == 12 + key = Mnemonic(language="english").to_entropy(response.json["data"]["getNewDeviceApiToken"]["key"]).hex() + assert read_json(tokens_file)["new_device"]["token"] == key + + file_data = read_json(tokens_file) + file_data["new_device"]["expiration"] = str( + datetime.datetime.now() - datetime.timedelta(minutes=13) + ) + write_json(tokens_file, file_data) + + response = client.post( + "/graphql", + json={ + "query": AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION, + "variables": { + "inupt": { + "key": key, + "deviceName": "test_token", + } + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["authorizeWithNewDeviceApiKey"]["success"] is False + assert response.json["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None + assert response.json["data"]["authorizeWithNewDeviceApiKey"]["code"] == 404 + +def test_graphql_authorize_without_token(client, tokens_file): + response = client.post( + "/graphql", + json={ + "query": AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION, + "variables": { + "inupt": { + "deviceName": "test_token", + } + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is None diff --git a/tests/test_graphql/test_api_recovery.py b/tests/test_graphql/test_api_recovery.py new file mode 100644 index 0000000..8ac8560 --- /dev/null +++ b/tests/test_graphql/test_api_recovery.py @@ -0,0 +1,534 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=missing-function-docstring +import json +import pytest +import datetime + +from tests.common import generate_api_query, mnemonic_to_hex, read_json, write_json + +TOKENS_FILE_CONTETS = { + "tokens": [ + { + "token": "TEST_TOKEN", + "name": "test_token", + "date": "2022-01-14 08:31:10.789314", + }, + { + "token": "TEST_TOKEN2", + "name": "test_token2", + "date": "2022-01-14 08:31:10.789314", + }, + ] +} + +API_RECOVERY_QUERY = """ +recoveryKey { + exists + valid + creationDate + expirationDate + usesLeft +} +""" + +def test_graphql_recovery_key_status_unauthorized(client, tokens_file): + response = client.post( + "/graphql", + json={ + "query": generate_api_query([API_RECOVERY_QUERY]) + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is None + +def test_graphql_recovery_key_status_when_none_exists(authorized_client, tokens_file): + response = authorized_client.get( + "/graphql", + json={ + "query": generate_api_query([API_RECOVERY_QUERY]) + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["api"]["recoveryKey"] is not None + assert response.json["data"]["api"]["recoveryKey"]["exists"] is False + assert response.json["data"]["api"]["recoveryKey"]["valid"] is False + assert response.json["data"]["api"]["recoveryKey"]["creationDate"] is None + assert response.json["data"]["api"]["recoveryKey"]["expirationDate"] is None + assert response.json["data"]["api"]["recoveryKey"]["usesLeft"] is None + +API_RECOVERY_KEY_GENERATE_MUTATION = """ +mutation TestGenerateRecoveryKey($limits: RecoveryKeyLimitsInput!) { + getNewRecoveryApiKey(limits: $limits) { + success + message + code + key + } +} +""" + +API_RECOVERY_KEY_USE_MUTATION = """ +mutation TestUseRecoveryKey($input: UseRecoveryKeyInput!) { + useRecoveryApiKey(input: $input) { + success + message + code + token + } +} +""" +def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): + response = authorized_client.post( + "/graphql", + json={ + "query": API_RECOVERY_KEY_GENERATE_MUTATION, + "variables": { + "limits": { + "uses": None, + "expirationDate": None, + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["getNewRecoveryApiKey"]["success"] is True + assert response.json["data"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json["data"]["getNewRecoveryApiKey"]["code"] == 200 + assert response.json["data"]["getNewRecoveryApiKey"]["key"] is not None + assert response.json["data"]["getNewRecoveryApiKey"]["key"].split(" ").__len__() == 18 + assert read_json(tokens_file)["recovery_token"] is not None + time_generated = read_json(tokens_file)["recovery_token"]["date"] + assert time_generated is not None + key = response.json["data"]["getNewRecoveryApiKey"]["key"] + assert ( + datetime.datetime.strptime( + time_generated, "%Y-%m-%dT%H:%M:%S.%fZ" + ) - datetime.timedelta(seconds=5) < datetime.datetime.now() + ) + + # Try to get token status + response = authorized_client.get( + "/graphql", + json={ + "query": generate_api_query([API_RECOVERY_QUERY]) + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["api"]["recoveryKey"] is not None + assert response.json["data"]["api"]["recoveryKey"]["exists"] is True + assert response.json["data"]["api"]["recoveryKey"]["valid"] is True + assert response.json["data"]["api"]["recoveryKey"]["creationDate"] == time_generated + assert response.json["data"]["api"]["recoveryKey"]["expirationDate"] is None + assert response.json["data"]["api"]["recoveryKey"]["usesLeft"] is None + + # Try to use token + response = client.post( + "/graphql", + json={ + "query": API_RECOVERY_KEY_USE_MUTATION, + "variables": { + "input": { + "token": key, + "tokenName": "test_token", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["useRecoveryKey"]["success"] is True + assert response.json["data"]["useRecoveryKey"]["message"] is not None + assert response.json["data"]["useRecoveryKey"]["code"] == 200 + assert response.json["data"]["useRecoveryKey"]["token"] is not None + assert response.json["data"]["useRecoveryKey"]["token"] == read_json(tokens_file)["tokens"][2]["token"] + assert read_json(tokens_file)["tokens"][2]["name"] == "test_token" + + # Try to use token again + response = client.post( + "/graphql", + json={ + "query": API_RECOVERY_KEY_USE_MUTATION, + "variables": { + "input": { + "token": key, + "tokenName": "test_token2", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["useRecoveryKey"]["success"] is True + assert response.json["data"]["useRecoveryKey"]["message"] is not None + assert response.json["data"]["useRecoveryKey"]["code"] == 200 + assert response.json["data"]["useRecoveryKey"]["token"] is not None + assert response.json["data"]["useRecoveryKey"]["token"] == read_json(tokens_file)["tokens"][3]["token"] + assert read_json(tokens_file)["tokens"][3]["name"] == "test_token2" + +def test_graphql_generate_recovery_key_with_expiration_date(client, authorized_client, tokens_file): + expiration_date = datetime.datetime.now() + datetime.timedelta(minutes=5) + expiration_date_str = expiration_date.strftime("%Y-%m-%dT%H:%M:%S.%fZ") + response = authorized_client.post( + "/graphql", + json={ + "query": API_RECOVERY_KEY_GENERATE_MUTATION, + "variables": { + "limits": { + "uses": None, + "expirationDate": expiration_date_str, + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["getNewRecoveryApiKey"]["success"] is True + assert response.json["data"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json["data"]["getNewRecoveryApiKey"]["code"] == 200 + assert response.json["data"]["getNewRecoveryApiKey"]["key"] is not None + assert response.json["data"]["getNewRecoveryApiKey"]["key"].split(" ").__len__() == 18 + assert read_json(tokens_file)["recovery_token"] is not None + + key = response.json["data"]["getNewRecoveryApiKey"]["key"] + assert read_json(tokens_file)["recovery_token"]["expirationDate"] == expiration_date_str + assert read_json(tokens_file)["recovery_token"]["token"] == mnemonic_to_hex(key) + + time_generated = read_json(tokens_file)["recovery_token"]["date"] + assert time_generated is not None + assert ( + datetime.datetime.strptime( + time_generated, "%Y-%m-%dT%H:%M:%S.%fZ" + ) - datetime.timedelta(seconds=5) < datetime.datetime.now() + ) + + # Try to get token status + response = authorized_client.get( + "/graphql", + json={ + "query": generate_api_query([API_RECOVERY_QUERY]) + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["api"]["recoveryKey"] is not None + assert response.json["data"]["api"]["recoveryKey"]["exists"] is True + assert response.json["data"]["api"]["recoveryKey"]["valid"] is True + assert response.json["data"]["api"]["recoveryKey"]["creationDate"] == time_generated + assert response.json["data"]["api"]["recoveryKey"]["expirationDate"] == expiration_date_str + assert response.json["data"]["api"]["recoveryKey"]["usesLeft"] is None + + # Try to use token + response = authorized_client.post( + "/graphql", + json={ + "query": API_RECOVERY_KEY_USE_MUTATION, + "variables": { + "input": { + "token": key, + "tokenName": "test_token", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["useRecoveryKey"]["success"] is True + assert response.json["data"]["useRecoveryKey"]["message"] is not None + assert response.json["data"]["useRecoveryKey"]["code"] == 200 + assert response.json["data"]["useRecoveryKey"]["token"] is not None + assert response.json["data"]["useRecoveryKey"]["token"] == read_json(tokens_file)["tokens"][2]["token"] + + # Try to use token again + response = authorized_client.post( + "/graphql", + json={ + "query": API_RECOVERY_KEY_USE_MUTATION, + "variables": { + "input": { + "token": key, + "tokenName": "test_token2", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["useRecoveryKey"]["success"] is True + assert response.json["data"]["useRecoveryKey"]["message"] is not None + assert response.json["data"]["useRecoveryKey"]["code"] == 200 + assert response.json["data"]["useRecoveryKey"]["token"] is not None + assert response.json["data"]["useRecoveryKey"]["token"] == read_json(tokens_file)["tokens"][3]["token"] + + # Try to use token after expiration date + new_data = read_json(tokens_file) + new_data["recovery_token"]["expirationDate"] = datetime.datetime.now() - datetime.timedelta(minutes=5) + write_json(tokens_file, new_data) + response = authorized_client.post( + "/graphql", + json={ + "query": API_RECOVERY_KEY_USE_MUTATION, + "variables": { + "input": { + "token": key, + "tokenName": "test_token3", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["useRecoveryKey"]["success"] is False + assert response.json["data"]["useRecoveryKey"]["message"] is not None + assert response.json["data"]["useRecoveryKey"]["code"] == 404 + assert response.json["data"]["useRecoveryKey"]["token"] is None + + assert read_json(tokens_file)["tokens"] == new_data["tokens"] + + # Try to get token status + response = authorized_client.get( + "/graphql", + json={ + "query": generate_api_query([API_RECOVERY_QUERY]) + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["api"]["recoveryKey"] is not None + assert response.json["data"]["api"]["recoveryKey"]["exists"] is True + assert response.json["data"]["api"]["recoveryKey"]["valid"] is False + assert response.json["data"]["api"]["recoveryKey"]["creationDate"] == time_generated + assert response.json["data"]["api"]["recoveryKey"]["expirationDate"] == new_data["recovery_token"]["expiration"] + assert response.json["data"]["api"]["recoveryKey"]["usesLeft"] is None + +def test_graphql_generate_recoevry_key_with_expiration_in_the_past(authorized_client, tokens_file): + expiration_date = datetime.datetime.now() - datetime.timedelta(minutes=5) + expiration_date_str = expiration_date.strftime("%Y-%m-%dT%H:%M:%S.%fZ") + + response = authorized_client.post( + "/graphql", + json={ + "query": API_RECOVERY_KEY_GENERATE_MUTATION, + "variables": { + "limits": { + "uses": None, + "expirationDate": expiration_date_str, + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["getNewRecoveryApiKey"]["success"] is False + assert response.json["data"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json["data"]["getNewRecoveryApiKey"]["code"] == 400 + assert response.json["data"]["getNewRecoveryApiKey"]["key"] is None + + assert read_json(tokens_file)["tokens"] == [] + assert "recovery_token" not in read_json(tokens_file) + +def test_graphql_generate_recovery_key_with_invalid_time_format(authorized_client, tokens_file): + expiration_date = "invalid_time_format" + expiration_date_str = expiration_date + + response = authorized_client.post( + "/graphql", + json={ + "query": API_RECOVERY_KEY_GENERATE_MUTATION, + "variables": { + "limits": { + "uses": None, + "expirationDate": expiration_date_str, + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["getNewRecoveryApiKey"]["success"] is False + assert response.json["data"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json["data"]["getNewRecoveryApiKey"]["code"] == 400 + assert response.json["data"]["getNewRecoveryApiKey"]["key"] is None + + assert read_json(tokens_file)["tokens"] == [] + assert "recovery_token" not in read_json(tokens_file) + +def test_graphql_generate_recovery_key_with_limited_uses(authorized_client, tokens_file): + + response = authorized_client.post( + "/graphql", + json={ + "query": API_RECOVERY_KEY_GENERATE_MUTATION, + "variables": { + "limits": { + "expirationDate": None, + "uses": 2, + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["getNewRecoveryApiKey"]["success"] is True + assert response.json["data"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json["data"]["getNewRecoveryApiKey"]["code"] == 200 + assert response.json["data"]["getNewRecoveryApiKey"]["key"] is not None + + mnemonic_key = response.json["data"]["getNewRecoveryApiKey"]["key"] + key = mnemonic_to_hex(mnemonic_key) + + assert read_json(tokens_file)["recovery_token"]["token"] == key + assert read_json(tokens_file)["recovery_token"]["uses_left"] == 2 + + # Try to get token status + response = authorized_client.get( + "/graphql", + json={ + "query": generate_api_query([API_RECOVERY_QUERY]) + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["api"]["recoveryKey"] is not None + assert response.json["data"]["api"]["recoveryKey"]["exists"] is True + assert response.json["data"]["api"]["recoveryKey"]["valid"] is True + assert response.json["data"]["api"]["recoveryKey"]["creationDate"] is not None + assert response.json["data"]["api"]["recoveryKey"]["expirationDate"] is None + assert response.json["data"]["api"]["recoveryKey"]["usesLeft"] == 2 + + # Try to use token + response = authorized_client.post( + "/graphql", + json={ + "query": API_RECOVERY_KEY_USE_MUTATION, + "variables": { + "input": { + "token": mnemonic_key, + "tokenName": "test_token1", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["useRecoveryKey"]["success"] is True + assert response.json["data"]["useRecoveryKey"]["message"] is not None + assert response.json["data"]["useRecoveryKey"]["code"] == 200 + assert response.json["data"]["useRecoveryKey"]["token"] is not None + + # Try to get token status + response = authorized_client.get( + "/graphql", + json={ + "query": generate_api_query([API_RECOVERY_QUERY]) + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["api"]["recoveryKey"] is not None + assert response.json["data"]["api"]["recoveryKey"]["exists"] is True + assert response.json["data"]["api"]["recoveryKey"]["valid"] is True + assert response.json["data"]["api"]["recoveryKey"]["creationDate"] is not None + assert response.json["data"]["api"]["recoveryKey"]["expirationDate"] is None + assert response.json["data"]["api"]["recoveryKey"]["usesLeft"] == 1 + + # Try to use token + response = authorized_client.post( + "/graphql", + json={ + "query": API_RECOVERY_KEY_USE_MUTATION, + "variables": { + "input": { + "token": mnemonic_key, + "tokenName": "test_token2", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["useRecoveryKey"]["success"] is True + assert response.json["data"]["useRecoveryKey"]["message"] is not None + assert response.json["data"]["useRecoveryKey"]["code"] == 200 + assert response.json["data"]["useRecoveryKey"]["token"] is not None + + # Try to get token status + response = authorized_client.get( + "/graphql", + json={ + "query": generate_api_query([API_RECOVERY_QUERY]) + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["api"]["recoveryKey"] is not None + assert response.json["data"]["api"]["recoveryKey"]["exists"] is True + assert response.json["data"]["api"]["recoveryKey"]["valid"] is False + assert response.json["data"]["api"]["recoveryKey"]["creationDate"] is not None + assert response.json["data"]["api"]["recoveryKey"]["expirationDate"] is not None + assert response.json["data"]["api"]["recoveryKey"]["usesLeft"] == 0 + + # Try to use token + response = authorized_client.post( + "/graphql", + json={ + "query": API_RECOVERY_KEY_USE_MUTATION, + "variables": { + "input": { + "token": mnemonic_key, + "tokenName": "test_token3", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["useRecoveryKey"]["success"] is False + assert response.json["data"]["useRecoveryKey"]["message"] is not None + assert response.json["data"]["useRecoveryKey"]["code"] == 404 + assert response.json["data"]["useRecoveryKey"]["token"] is None + +def test_graphql_generate_recovery_key_with_negative_uses(authorized_client, tokens_file): + # Try to get token status + response = authorized_client.post( + "/graphql", + json={ + "query": API_RECOVERY_KEY_GENERATE_MUTATION, + "variables": { + "limits": { + "uses": -1, + "expirationDate": None, + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["getNewRecoveryApiKey"]["success"] is False + assert response.json["data"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json["data"]["getNewRecoveryApiKey"]["code"] == 400 + assert response.json["data"]["getNewRecoveryApiKey"]["key"] is None + +def test_graphql_generate_recovery_key_with_zero_uses(authorized_client, tokens_file): + # Try to get token status + response = authorized_client.post( + "/graphql", + json={ + "query": API_RECOVERY_KEY_GENERATE_MUTATION, + "variables": { + "limits": { + "uses": 0, + "expirationDate": None, + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["getNewRecoveryApiKey"]["success"] is False + assert response.json["data"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json["data"]["getNewRecoveryApiKey"]["code"] == 400 + assert response.json["data"]["getNewRecoveryApiKey"]["key"] is None diff --git a/tests/test_graphql/test_api_version.py b/tests/test_graphql/test_api_version.py new file mode 100644 index 0000000..a45aa3a --- /dev/null +++ b/tests/test_graphql/test_api_version.py @@ -0,0 +1,28 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=missing-function-docstring + +from tests.common import generate_api_query + +API_VERSION_QUERY = "version" + +def test_graphql_get_api_version(authorized_client): + response = authorized_client.get( + "/graphql", + json={ + "query": generate_api_query([API_VERSION_QUERY]) + }, + ) + assert response.status_code == 200 + assert "version" in response.get_json()["data"]["api"] + + +def test_graphql_api_version_unauthorized(client): + response = client.get( + "/graphql", + json={ + "query": generate_api_query([API_VERSION_QUERY]) + }, + ) + assert response.status_code == 200 + assert "version" in response.get_json()["data"]["api"] From 376bf1ef77fb99ed9caa6bd8e0df5c10c153d774 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Tue, 5 Jul 2022 08:14:37 +0300 Subject: [PATCH 21/39] Add more tests --- .../resources/services/mailserver.py | 16 +- selfprivacy_api/utils/__init__.py | 12 + selfprivacy_api/utils/network.py | 22 ++ tests/common.py | 3 + tests/test_graphql/test_system.py | 227 ++++++++++++++++++ tests/test_graphql/test_system/domain | 1 + tests/test_graphql/test_system/no_values.json | 50 ++++ .../test_graphql/test_system/turned_off.json | 52 ++++ tests/test_graphql/test_system/turned_on.json | 52 ++++ tests/test_graphql/test_system/undefined.json | 47 ++++ tests/test_network_utils.py | 37 +++ 11 files changed, 509 insertions(+), 10 deletions(-) create mode 100644 selfprivacy_api/utils/network.py create mode 100644 tests/test_graphql/test_system.py create mode 100644 tests/test_graphql/test_system/domain create mode 100644 tests/test_graphql/test_system/no_values.json create mode 100644 tests/test_graphql/test_system/turned_off.json create mode 100644 tests/test_graphql/test_system/turned_on.json create mode 100644 tests/test_graphql/test_system/undefined.json create mode 100644 tests/test_network_utils.py diff --git a/selfprivacy_api/resources/services/mailserver.py b/selfprivacy_api/resources/services/mailserver.py index 1185d20..bf42c7d 100644 --- a/selfprivacy_api/resources/services/mailserver.py +++ b/selfprivacy_api/resources/services/mailserver.py @@ -7,7 +7,7 @@ from flask_restful import Resource from selfprivacy_api.resources.services import api -from selfprivacy_api.utils import get_domain +from selfprivacy_api.utils import get_dkim_key, get_domain class DKIMKey(Resource): @@ -31,15 +31,11 @@ class DKIMKey(Resource): """ domain = get_domain() - if os.path.exists("/var/dkim/" + domain + ".selector.txt"): - cat_process = subprocess.Popen( - ["cat", "/var/dkim/" + domain + ".selector.txt"], stdout=subprocess.PIPE - ) - dkim = cat_process.communicate()[0] - dkim = base64.b64encode(dkim) - dkim = str(dkim, "utf-8") - return dkim - return "DKIM file not found", 404 + dkim = get_dkim_key(domain) + if dkim is None: + return "DKIM file not found", 404 + dkim = base64.b64encode(dkim.encode("utf-8")).decode("utf-8") + return dkim api.add_resource(DKIMKey, "/mailserver/dkim") diff --git a/selfprivacy_api/utils/__init__.py b/selfprivacy_api/utils/__init__.py index b0d0acc..adb0409 100644 --- a/selfprivacy_api/utils/__init__.py +++ b/selfprivacy_api/utils/__init__.py @@ -3,6 +3,8 @@ import datetime from enum import Enum import json +import os +import subprocess import portalocker @@ -130,3 +132,13 @@ def parse_date(date_str: str) -> datetime.datetime: if date_str.endswith("Z") else datetime.datetime.strptime(date_str, "%Y-%m-%d %H:%M:%S.%f") ) + +def get_dkim_key(domain): + """Get DKIM key from /var/dkim/.selector.txt""" + if os.path.exists("/var/dkim/" + domain + ".selector.txt"): + cat_process = subprocess.Popen( + ["cat", "/var/dkim/" + domain + ".selector.txt"], stdout=subprocess.PIPE + ) + dkim = cat_process.communicate()[0] + return str(dkim, "utf-8") + return None diff --git a/selfprivacy_api/utils/network.py b/selfprivacy_api/utils/network.py new file mode 100644 index 0000000..1aa4644 --- /dev/null +++ b/selfprivacy_api/utils/network.py @@ -0,0 +1,22 @@ +#!/usr/bin/env python3 +"""Network utils""" +import subprocess +import re + +def get_ip4(): + """Get IPv4 address""" + try: + ip4 = subprocess.check_output(["ip", "addr", "show", "dev", "eth0"]).decode("utf-8") + ip4 = re.search(r"inet (\d+\.\d+\.\d+\.\d+)\/\d+", ip4) + except subprocess.CalledProcessError: + ip4 = None + return ip4.group(1) if ip4 else None + +def get_ip6(): + """Get IPv6 address""" + try: + ip6 = subprocess.check_output(["ip", "addr", "show", "dev", "eth0"]).decode("utf-8") + ip6 = re.search(r"inet6 (\S+)\/\d+", ip6) + except subprocess.CalledProcessError: + ip6 = None + return ip6.group(1) if ip6 else None diff --git a/tests/common.py b/tests/common.py index 950c850..d3dda69 100644 --- a/tests/common.py +++ b/tests/common.py @@ -13,5 +13,8 @@ def write_json(file_path, data): def generate_api_query(query_array): return "query TestApi {\n api {" + "\n".join(query_array) + "}\n}" +def generate_system_query(query_array): + return "query TestSystem {\n system {" + "\n".join(query_array) + "}\n}" + def mnemonic_to_hex(mnemonic): return Mnemonic(language="english").to_entropy(mnemonic).hex() diff --git a/tests/test_graphql/test_system.py b/tests/test_graphql/test_system.py new file mode 100644 index 0000000..661a68b --- /dev/null +++ b/tests/test_graphql/test_system.py @@ -0,0 +1,227 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=missing-function-docstring +import json +import pytest +import datetime + +from tests.common import generate_system_query, read_json, write_json + +@pytest.fixture +def domain_file(mocker, datadir): + mocker.patch("selfprivacy_api.utils.DOMAIN_FILE", datadir / "domain") + return datadir + + +@pytest.fixture +def turned_on(mocker, datadir): + mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_on.json") + assert read_json(datadir / "turned_on.json")["autoUpgrade"]["enable"] == True + assert read_json(datadir / "turned_on.json")["autoUpgrade"]["allowReboot"] == True + assert read_json(datadir / "turned_on.json")["timezone"] == "Europe/Moscow" + return datadir + + +@pytest.fixture +def turned_off(mocker, datadir): + mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_off.json") + assert read_json(datadir / "turned_off.json")["autoUpgrade"]["enable"] == False + assert read_json(datadir / "turned_off.json")["autoUpgrade"]["allowReboot"] == False + assert read_json(datadir / "turned_off.json")["timezone"] == "Europe/Moscow" + return datadir + + +@pytest.fixture +def undefined_config(mocker, datadir): + mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "undefined.json") + assert "autoUpgrade" not in read_json(datadir / "undefined.json") + assert "timezone" not in read_json(datadir / "undefined.json") + return datadir + + +@pytest.fixture +def no_values(mocker, datadir): + mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "no_values.json") + assert "enable" not in read_json(datadir / "no_values.json")["autoUpgrade"] + assert "allowReboot" not in read_json(datadir / "no_values.json")["autoUpgrade"] + return datadir + + +class ProcessMock: + """Mock subprocess.Popen""" + + def __init__(self, args, **kwargs): + self.args = args + self.kwargs = kwargs + + def communicate(): + return (b"", None) + + returncode = 0 + + +class BrokenServiceMock(ProcessMock): + def communicate(): + return (b"Testing error", None) + + returncode = 3 + + +@pytest.fixture +def mock_subprocess_popen(mocker): + mock = mocker.patch("subprocess.Popen", autospec=True, return_value=ProcessMock) + return mock + + +@pytest.fixture +def mock_os_chdir(mocker): + mock = mocker.patch("os.chdir", autospec=True) + return mock + + +@pytest.fixture +def mock_broken_service(mocker): + mock = mocker.patch( + "subprocess.Popen", autospec=True, return_value=BrokenServiceMock + ) + return mock + + +@pytest.fixture +def mock_subprocess_check_output(mocker): + mock = mocker.patch( + "subprocess.check_output", autospec=True, return_value=b"Testing Linux" + ) + return mock + +@pytest.fixture +def mock_get_ip4(mocker): + mock = mocker.patch("selfprivacy_api.utils.get_ip4", autospec=True, return_value="157.90.247.192") + return mock + +@pytest.fixture +def mock_get_ip6(mocker): + mock = mocker.patch("selfprivacy_api.utils.get_ip6", autospec=True, return_value="fe80::9400:ff:fef1:34ae") + return mock + +@pytest.fixture +def mock_dkim_key(mocker): + mock = mocker.patch("selfprivacy_api.utils.get_dkim_key", autospec=True, return_value="I am a DKIM key") + +API_PYTHON_VERSION_INFO = """ +info { + pythonVersion +} +""" + + +def test_graphql_wrong_auth(wrong_auth_client): + """Test wrong auth""" + response = wrong_auth_client.get( + "/graphql", + json={ + "query": generate_system_query([API_PYTHON_VERSION_INFO]), + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is None + +API_GET_DOMAIN_INFO = """ +domainInfo { + domain + hostname + provider + requiredDnsRecords { + type + name + content + ttl + priority + } +} +""" + +def dns_record(type="A", name="test.tld", content=None, ttl=3600, priority=None): + if content is None: + if type == "A": + content = "157.90.247.192" + elif type == "AAAA": + content = "fe80::9400:ff:fef1:34ae" + return { + "type": type, + "name": name, + "content": content, + "ttl": ttl, + "priority": priority, + } + +def test_graphql_get_domain(authorized_client, domain_file, mock_get_ip4, mock_get_ip6, turned_on): + """Test get domain""" + response = authorized_client.get( + "/graphql", + json={ + "query": generate_system_query([API_GET_DOMAIN_INFO]), + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["system"]["domainInfo"]["domain"] == "test.tld" + assert response.json["data"]["system"]["domainInfo"]["hostname"] == "test-instance" + assert response.json["data"]["system"]["domainInfo"]["provider"] == "HETZNER" + assert response.json["data"]["system"]["domainInfo"]["requiredDnsRecords"] == [ + dns_record(), + dns_record(type="AAAA"), + dns_record(name="api.test.tld"), + dns_record(name="api.test.tld", type="AAAA"), + dns_record(name="cloud.test.tld"), + dns_record(name="cloud.test.tld", type="AAAA"), + dns_record(name="git.test.tld"), + dns_record(name="git.test.tld", type="AAAA"), + dns_record(name="meet.test.tld"), + dns_record(name="meet.test.tld", type="AAAA"), + dns_record(name="password.test.tld"), + dns_record(name="password.test.tld", type="AAAA"), + dns_record(name="social.test.tld"), + dns_record(name="social.test.tld", type="AAAA"), + dns_record(name="vpn.test.tld"), + dns_record(name="vpn.test.tld", type="AAAA"), + dns_record(name="test.tld", type="MX", content="test.tld", priority=10), + dns_record(name="_dmarc.test.tld", type="TXT", content="v=DMARC1; p=none", ttl=18000), + dns_record(name="test.tld", type="TXT", content="v=spf1 a mx ip4:157.90.247.192 -all", ttl=18000), + dns_record(name="selector._domainkey.test.tld", type="TXT", content="I am a DKIM key", ttl=18000), + ] + +API_GET_TIMEZONE = """ +settings { + timezone +} +""" + +def test_graphql_get_timezone_unauthorized(unauthorized_client, turned_on): + """Test get timezone""" + response = unauthorized_client.get( + "/graphql", + json={ + "query": generate_system_query([API_GET_TIMEZONE]), + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is None + +def test_graphql_get_timezone(authorized_client, turned_on): + """Test get timezone""" + response = authorized_client.get( + "/graphql", + json={ + "query": generate_system_query([API_GET_TIMEZONE]), + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["system"]["settings"]["timezone"] == "Europe/Moscow" + +API_GET_PYTHON_VERSION = """ +info { + pythonVersion +} +""" diff --git a/tests/test_graphql/test_system/domain b/tests/test_graphql/test_system/domain new file mode 100644 index 0000000..3679d0d --- /dev/null +++ b/tests/test_graphql/test_system/domain @@ -0,0 +1 @@ +test-domain.tld \ No newline at end of file diff --git a/tests/test_graphql/test_system/no_values.json b/tests/test_graphql/test_system/no_values.json new file mode 100644 index 0000000..59e5e71 --- /dev/null +++ b/tests/test_graphql/test_system/no_values.json @@ -0,0 +1,50 @@ +{ + "backblaze": { + "accountId": "ID", + "accountKey": "KEY", + "bucket": "selfprivacy" + }, + "api": { + "token": "TEST_TOKEN", + "enableSwagger": false + }, + "bitwarden": { + "enable": true + }, + "cloudflare": { + "apiKey": "TOKEN" + }, + "databasePassword": "PASSWORD", + "domain": "test.tld", + "hashedMasterPassword": "HASHED_PASSWORD", + "hostname": "test-instance", + "nextcloud": { + "adminPassword": "ADMIN", + "databasePassword": "ADMIN", + "enable": true + }, + "resticPassword": "PASS", + "ssh": { + "enable": true, + "passwordAuthentication": true, + "rootKeys": [ + "ssh-ed25519 KEY test@pc" + ] + }, + "username": "tester", + "gitea": { + "enable": false + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true + }, + "autoUpgrade": { + }, + "timezone": "Europe/Moscow", + "sshKeys": [ + "ssh-rsa KEY test@pc" + ] +} \ No newline at end of file diff --git a/tests/test_graphql/test_system/turned_off.json b/tests/test_graphql/test_system/turned_off.json new file mode 100644 index 0000000..f451683 --- /dev/null +++ b/tests/test_graphql/test_system/turned_off.json @@ -0,0 +1,52 @@ +{ + "backblaze": { + "accountId": "ID", + "accountKey": "KEY", + "bucket": "selfprivacy" + }, + "api": { + "token": "TEST_TOKEN", + "enableSwagger": false + }, + "bitwarden": { + "enable": true + }, + "cloudflare": { + "apiKey": "TOKEN" + }, + "databasePassword": "PASSWORD", + "domain": "test.tld", + "hashedMasterPassword": "HASHED_PASSWORD", + "hostname": "test-instance", + "nextcloud": { + "adminPassword": "ADMIN", + "databasePassword": "ADMIN", + "enable": true + }, + "resticPassword": "PASS", + "ssh": { + "enable": true, + "passwordAuthentication": true, + "rootKeys": [ + "ssh-ed25519 KEY test@pc" + ] + }, + "username": "tester", + "gitea": { + "enable": false + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true + }, + "autoUpgrade": { + "enable": false, + "allowReboot": false + }, + "timezone": "Europe/Moscow", + "sshKeys": [ + "ssh-rsa KEY test@pc" + ] +} \ No newline at end of file diff --git a/tests/test_graphql/test_system/turned_on.json b/tests/test_graphql/test_system/turned_on.json new file mode 100644 index 0000000..337e47f --- /dev/null +++ b/tests/test_graphql/test_system/turned_on.json @@ -0,0 +1,52 @@ +{ + "backblaze": { + "accountId": "ID", + "accountKey": "KEY", + "bucket": "selfprivacy" + }, + "api": { + "token": "TEST_TOKEN", + "enableSwagger": false + }, + "bitwarden": { + "enable": true + }, + "cloudflare": { + "apiKey": "TOKEN" + }, + "databasePassword": "PASSWORD", + "domain": "test.tld", + "hashedMasterPassword": "HASHED_PASSWORD", + "hostname": "test-instance", + "nextcloud": { + "adminPassword": "ADMIN", + "databasePassword": "ADMIN", + "enable": true + }, + "resticPassword": "PASS", + "ssh": { + "enable": true, + "passwordAuthentication": true, + "rootKeys": [ + "ssh-ed25519 KEY test@pc" + ] + }, + "username": "tester", + "gitea": { + "enable": false + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true + }, + "autoUpgrade": { + "enable": true, + "allowReboot": true + }, + "timezone": "Europe/Moscow", + "sshKeys": [ + "ssh-rsa KEY test@pc" + ] +} \ No newline at end of file diff --git a/tests/test_graphql/test_system/undefined.json b/tests/test_graphql/test_system/undefined.json new file mode 100644 index 0000000..b67b296 --- /dev/null +++ b/tests/test_graphql/test_system/undefined.json @@ -0,0 +1,47 @@ +{ + "backblaze": { + "accountId": "ID", + "accountKey": "KEY", + "bucket": "selfprivacy" + }, + "api": { + "token": "TEST_TOKEN", + "enableSwagger": false + }, + "bitwarden": { + "enable": true + }, + "cloudflare": { + "apiKey": "TOKEN" + }, + "databasePassword": "PASSWORD", + "domain": "test.tld", + "hashedMasterPassword": "HASHED_PASSWORD", + "hostname": "test-instance", + "nextcloud": { + "adminPassword": "ADMIN", + "databasePassword": "ADMIN", + "enable": true + }, + "resticPassword": "PASS", + "ssh": { + "enable": true, + "passwordAuthentication": true, + "rootKeys": [ + "ssh-ed25519 KEY test@pc" + ] + }, + "username": "tester", + "gitea": { + "enable": false + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true + }, + "sshKeys": [ + "ssh-rsa KEY test@pc" + ] +} \ No newline at end of file diff --git a/tests/test_network_utils.py b/tests/test_network_utils.py new file mode 100644 index 0000000..b8f9c0d --- /dev/null +++ b/tests/test_network_utils.py @@ -0,0 +1,37 @@ +#!/usr/bin/env python3 +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=missing-function-docstring +import pytest + +from selfprivacy_api.utils.network import get_ip4, get_ip6 + +OUTPUT_STRING = b""" +2: eth0: mtu 1500 qdisc fq_codel state UP group default qlen 1000 + link/ether 96:00:00:f1:34:ae brd ff:ff:ff:ff:ff:ff + altname enp0s3 + altname ens3 + inet 157.90.247.192/32 brd 157.90.247.192 scope global dynamic eth0 + valid_lft 46061sec preferred_lft 35261sec + inet6 fe80::9400:ff:fef1:34ae/64 scope link + valid_lft forever preferred_lft forever +""" + +FAILED_OUTPUT_STRING = b""" +Device "eth0" does not exist. +""" + +@pytest.fixture +def ip_process_mock(mocker): + mock = mocker.patch("subprocess.check_output", autospec=True, return_value=OUTPUT_STRING) + return mock + +def test_get_ip4(ip_process_mock): + """Test get IPv4 address""" + ip4 = get_ip4() + assert ip4 == "157.90.247.192" + +def test_get_ip6(ip_process_mock): + """Test get IPv6 address""" + ip6 = get_ip6() + assert ip6 == "fe80::9400:ff:fef1:34ae" From 5711cf66b053197f7fc70e4d0e1f29370d871bda Mon Sep 17 00:00:00 2001 From: Inex Code Date: Tue, 5 Jul 2022 15:11:41 +0300 Subject: [PATCH 22/39] Api fixes --- .../graphql/mutations/api_mutations.py | 137 +++++++++++++++++- .../graphql/mutations/mutation_interface.py | 4 + selfprivacy_api/services/__init__.py | 0 selfprivacy_api/utils/auth.py | 4 +- tests/test_graphql/test_api_devices.py | 99 ++++++------- tests/test_graphql/test_system.py | 8 +- 6 files changed, 193 insertions(+), 59 deletions(-) create mode 100644 selfprivacy_api/services/__init__.py diff --git a/selfprivacy_api/graphql/mutations/api_mutations.py b/selfprivacy_api/graphql/mutations/api_mutations.py index 6604f7e..cd5d53b 100644 --- a/selfprivacy_api/graphql/mutations/api_mutations.py +++ b/selfprivacy_api/graphql/mutations/api_mutations.py @@ -5,27 +5,50 @@ import typing from flask import request import strawberry from selfprivacy_api.graphql import IsAuthenticated -from selfprivacy_api.graphql.mutations.mutation_interface import MutationReturnInterface -from selfprivacy_api.utils import parse_date +from selfprivacy_api.graphql.mutations.mutation_interface import GenericMutationReturn, MutationReturnInterface from selfprivacy_api.utils.auth import ( - generate_recovery_token + delete_new_device_auth_token, + delete_token, + generate_recovery_token, + get_new_device_auth_token, + is_token_name_exists, + is_token_name_pair_valid, + refresh_token, + use_mnemonic_recoverery_token, + use_new_device_auth_token ) @strawberry.type class ApiKeyMutationReturn(MutationReturnInterface): key: typing.Optional[str] +@strawberry.type +class DeviceApiTokenMutationReturn(MutationReturnInterface): + token: typing.Optional[str] + @strawberry.input class RecoveryKeyLimitsInput: """Recovery key limits input""" expiration_date: typing.Optional[datetime.datetime] uses: typing.Optional[int] +@strawberry.input +class UseRecoveryKeyInput: + """Use recovery key input""" + key: str + deviceName: str + +@strawberry.input +class UseNewDeviceKeyInput: + """Use new device key input""" + key: str + deviceName: str + @strawberry.type class ApiMutations: @strawberry.mutation(permission_classes=[IsAuthenticated]) - def getNewRecoveryApiKey(self, limits: RecoveryKeyLimitsInput) -> ApiKeyMutationReturn: + def get_new_recovery_api_key(self, limits: RecoveryKeyLimitsInput) -> ApiKeyMutationReturn: """Generate recovery key""" if limits.expiration_date is not None: if limits.expiration_date < datetime.datetime.now(): @@ -50,3 +73,109 @@ class ApiMutations: code=200, key=key, ) + + @strawberry.mutation() + def use_recovery_api_key(self, input: UseRecoveryKeyInput) -> DeviceApiTokenMutationReturn: + """Use recovery key""" + token = use_mnemonic_recoverery_token(input.key, input.deviceName) + if token is None: + return DeviceApiTokenMutationReturn( + success=False, + message="Recovery key not found", + code=404, + token=None, + ) + return DeviceApiTokenMutationReturn( + success=True, + message="Recovery key used", + code=200, + token=None, + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def refresh_device_api_token(self) -> DeviceApiTokenMutationReturn: + """Refresh device api token""" + token = request.headers.get("Authorization").split(" ")[1] if request.headers.get("Authorization") is not None else None + if token is None: + return DeviceApiTokenMutationReturn( + success=False, + message="Token not found", + code=404, + token=None, + ) + new_token = refresh_token(token) + if new_token is None: + return DeviceApiTokenMutationReturn( + success=False, + message="Token not found", + code=404, + token=None, + ) + return DeviceApiTokenMutationReturn( + success=True, + message="Token refreshed", + code=200, + token=new_token, + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def delete_device_api_token(self, device: str) -> GenericMutationReturn: + """Delete device api token""" + self_token = request.headers.get("Authorization").split(" ")[1] if request.headers.get("Authorization") is not None else None + if self_token is not None and is_token_name_pair_valid(device, self_token): + return GenericMutationReturn( + success=False, + message="Cannot delete caller's token", + code=400, + ) + if not is_token_name_exists(device): + return GenericMutationReturn( + success=False, + message="Token not found", + code=404, + ) + delete_token(device) + return GenericMutationReturn( + success=True, + message="Token deleted", + code=200, + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def get_new_device_api_key(self) -> ApiKeyMutationReturn: + """Generate device api key""" + key = get_new_device_auth_token() + return ApiKeyMutationReturn( + success=True, + message="Device api key generated", + code=200, + key=key, + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def invalidate_new_device_api_key(self) -> GenericMutationReturn: + """Invalidate new device api key""" + delete_new_device_auth_token() + return GenericMutationReturn( + success=True, + message="New device key deleted", + code=200, + ) + + @strawberry.mutation() + def authorize_with_new_device_api_key(self, input: UseNewDeviceKeyInput) -> DeviceApiTokenMutationReturn: + """Authorize with new device api key""" + token = use_new_device_auth_token(input.key, input.deviceName) + if token is None: + return DeviceApiTokenMutationReturn( + success=False, + message="Token not found", + code=404, + token=None, + ) + return DeviceApiTokenMutationReturn( + success=True, + message="Token used", + code=200, + token=token, + ) diff --git a/selfprivacy_api/graphql/mutations/mutation_interface.py b/selfprivacy_api/graphql/mutations/mutation_interface.py index f5c212b..1cf310c 100644 --- a/selfprivacy_api/graphql/mutations/mutation_interface.py +++ b/selfprivacy_api/graphql/mutations/mutation_interface.py @@ -5,3 +5,7 @@ class MutationReturnInterface: success: bool message: str code: int + +@strawberry.type +class GenericMutationReturn(MutationReturnInterface): + pass diff --git a/selfprivacy_api/services/__init__.py b/selfprivacy_api/services/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/selfprivacy_api/utils/auth.py b/selfprivacy_api/utils/auth.py index 8810149..30fa7de 100644 --- a/selfprivacy_api/utils/auth.py +++ b/selfprivacy_api/utils/auth.py @@ -133,7 +133,7 @@ def delete_token(token_name): tokens["tokens"] = [t for t in tokens["tokens"] if t["name"] != token_name] -def refresh_token(token): +def refresh_token(token: str) -> typing.Optional[str]: """Change the token field of the existing token""" new_token = _generate_token() with WriteUserData(UserDataFiles.TOKENS) as tokens: @@ -259,7 +259,7 @@ def use_mnemonic_recoverery_token(mnemonic_phrase, name): return token -def get_new_device_auth_token(): +def get_new_device_auth_token() -> str: """Generate a new device auth token which is valid for 10 minutes and return a mnemonic phrase representation Write token to the new_device of the tokens.json file. diff --git a/tests/test_graphql/test_api_devices.py b/tests/test_graphql/test_api_devices.py index 0406371..e5bf7ad 100644 --- a/tests/test_graphql/test_api_devices.py +++ b/tests/test_graphql/test_api_devices.py @@ -88,7 +88,7 @@ def test_graphql_delete_token(authorized_client, tokens_file): json={ "query": DELETE_TOKEN_MUTATION, "variables": { - "device": "test_token", + "device": "test_token2", }, }, ) @@ -101,7 +101,7 @@ def test_graphql_delete_token(authorized_client, tokens_file): "tokens": [ { "token": "TEST_TOKEN", - "name": "test_token2", + "name": "test_token", "date": "2022-01-14 08:31:10.789314", } ] @@ -147,6 +147,7 @@ mutation RefreshToken { success message code + token } } """ @@ -173,14 +174,10 @@ def test_graphql_refresh_token(authorized_client, tokens_file): assert response.json["data"]["refreshDeviceApiToken"]["success"] is True assert response.json["data"]["refreshDeviceApiToken"]["message"] is not None assert response.json["data"]["refreshDeviceApiToken"]["code"] == 200 - assert read_json(tokens_file) == { - "tokens": [ - { - "token": "TEST_TOKEN", - "name": "test_token2", - "date": "2022-01-14 08:31:10.789314", - } - ] + assert read_json(tokens_file)["tokens"][0] == { + "token": response.json["data"]["refreshDeviceApiToken"]["token"], + "name": "test_token", + "date": "2022-01-14 08:31:10.789314", } NEW_DEVICE_KEY_MUTATION = """ @@ -213,11 +210,11 @@ def test_graphql_get_new_device_auth_key(authorized_client, tokens_file): ) assert response.status_code == 200 assert response.json.get("data") is not None - assert response.json["data"]["getNewDeviceApiToken"]["success"] is True - assert response.json["data"]["getNewDeviceApiToken"]["message"] is not None - assert response.json["data"]["getNewDeviceApiToken"]["code"] == 200 - assert response.json["data"]["getNewDeviceApiToken"]["key"].split(" ").__len__() == 12 - token = Mnemonic(language="english").to_entropy(response.json["data"]["getNewDeviceApiToken"]["key"]).hex() + assert response.json["data"]["getNewDeviceApiKey"]["success"] is True + assert response.json["data"]["getNewDeviceApiKey"]["message"] is not None + assert response.json["data"]["getNewDeviceApiKey"]["code"] == 200 + assert response.json["data"]["getNewDeviceApiKey"]["key"].split(" ").__len__() == 12 + token = Mnemonic(language="english").to_entropy(response.json["data"]["getNewDeviceApiKey"]["key"]).hex() assert read_json(tokens_file)["new_device"]["token"] == token INVALIDATE_NEW_DEVICE_KEY_MUTATION = """ @@ -252,11 +249,11 @@ def test_graphql_get_and_delete_new_device_key(authorized_client, tokens_file): ) assert response.status_code == 200 assert response.json.get("data") is not None - assert response.json["data"]["getNewDeviceApiToken"]["success"] is True - assert response.json["data"]["getNewDeviceApiToken"]["message"] is not None - assert response.json["data"]["getNewDeviceApiToken"]["code"] == 200 - assert response.json["data"]["getNewDeviceApiToken"]["key"].split(" ").__len__() == 12 - token = Mnemonic(language="english").to_entropy(response.json["data"]["getNewDeviceApiToken"]["key"]).hex() + assert response.json["data"]["getNewDeviceApiKey"]["success"] is True + assert response.json["data"]["getNewDeviceApiKey"]["message"] is not None + assert response.json["data"]["getNewDeviceApiKey"]["code"] == 200 + assert response.json["data"]["getNewDeviceApiKey"]["key"].split(" ").__len__() == 12 + token = Mnemonic(language="english").to_entropy(response.json["data"]["getNewDeviceApiKey"]["key"]).hex() assert read_json(tokens_file)["new_device"]["token"] == token response = authorized_client.post( "/graphql", @@ -273,7 +270,7 @@ def test_graphql_get_and_delete_new_device_key(authorized_client, tokens_file): AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION = """ mutation AuthorizeWithNewDeviceKey($input: UseNewDeviceKeyInput!) { - authorizeWithNewDeviceApiKey(inupt: $input) { + authorizeWithNewDeviceApiKey(input: $input) { success message code @@ -291,20 +288,21 @@ def test_graphql_get_and_authorize_new_device(client, authorized_client, tokens_ ) assert response.status_code == 200 assert response.json.get("data") is not None - assert response.json["data"]["getNewDeviceApiToken"]["success"] is True - assert response.json["data"]["getNewDeviceApiToken"]["message"] is not None - assert response.json["data"]["getNewDeviceApiToken"]["code"] == 200 - assert response.json["data"]["getNewDeviceApiToken"]["key"].split(" ").__len__() == 12 - key = Mnemonic(language="english").to_entropy(response.json["data"]["getNewDeviceApiToken"]["key"]).hex() + assert response.json["data"]["getNewDeviceApiKey"]["success"] is True + assert response.json["data"]["getNewDeviceApiKey"]["message"] is not None + assert response.json["data"]["getNewDeviceApiKey"]["code"] == 200 + mnemonic_key = response.json["data"]["getNewDeviceApiKey"]["key"] + assert mnemonic_key.split(" ").__len__() == 12 + key = Mnemonic(language="english").to_entropy(mnemonic_key).hex() assert read_json(tokens_file)["new_device"]["token"] == key response = client.post( "/graphql", json={ "query": AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION, "variables": { - "inupt": { - "key": key, - "deviceName": "test_token", + "input": { + "key": mnemonic_key, + "deviceName": "new_device", } }, }, @@ -324,7 +322,7 @@ def test_graphql_authorize_new_device_with_invalid_key(client, tokens_file): json={ "query": AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION, "variables": { - "inupt": { + "input": { "key": "invalid_token", "deviceName": "test_token", } @@ -347,20 +345,21 @@ def test_graphql_get_and_authorize_used_key(client, authorized_client, tokens_fi ) assert response.status_code == 200 assert response.json.get("data") is not None - assert response.json["data"]["getNewDeviceApiToken"]["success"] is True - assert response.json["data"]["getNewDeviceApiToken"]["message"] is not None - assert response.json["data"]["getNewDeviceApiToken"]["code"] == 200 - assert response.json["data"]["getNewDeviceApiToken"]["key"].split(" ").__len__() == 12 - key = Mnemonic(language="english").to_entropy(response.json["data"]["getNewDeviceApiToken"]["key"]).hex() + assert response.json["data"]["getNewDeviceApiKey"]["success"] is True + assert response.json["data"]["getNewDeviceApiKey"]["message"] is not None + assert response.json["data"]["getNewDeviceApiKey"]["code"] == 200 + mnemonic_key = response.json["data"]["getNewDeviceApiKey"]["key"] + assert mnemonic_key.split(" ").__len__() == 12 + key = Mnemonic(language="english").to_entropy(mnemonic_key).hex() assert read_json(tokens_file)["new_device"]["token"] == key response = client.post( "/graphql", json={ "query": AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION, "variables": { - "inupt": { - "key": key, - "deviceName": "test_token", + "input": { + "key": mnemonic_key, + "deviceName": "new_token", } }, }, @@ -370,14 +369,16 @@ def test_graphql_get_and_authorize_used_key(client, authorized_client, tokens_fi assert response.json["data"]["authorizeWithNewDeviceApiKey"]["success"] is True assert response.json["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None assert response.json["data"]["authorizeWithNewDeviceApiKey"]["code"] == 200 - assert read_json(tokens_file) == TOKENS_FILE_CONTETS + assert read_json(tokens_file)["tokens"][2]["token"] == response.json["data"]["authorizeWithNewDeviceApiKey"]["token"] + assert read_json(tokens_file)["tokens"][2]["name"] == "new_token" + response = client.post( "/graphql", json={ "query": AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION, "variables": { - "inupt": { - "key": key, + "input": { + "key": mnemonic_key, "deviceName": "test_token2", } }, @@ -388,7 +389,7 @@ def test_graphql_get_and_authorize_used_key(client, authorized_client, tokens_fi assert response.json["data"]["authorizeWithNewDeviceApiKey"]["success"] is False assert response.json["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None assert response.json["data"]["authorizeWithNewDeviceApiKey"]["code"] == 404 - assert read_json(tokens_file) == TOKENS_FILE_CONTETS + assert read_json(tokens_file)["tokens"].__len__() == 3 def test_graphql_get_and_authorize_key_after_12_minutes(client, authorized_client, tokens_file): response = authorized_client.post( @@ -399,11 +400,11 @@ def test_graphql_get_and_authorize_key_after_12_minutes(client, authorized_clien ) assert response.status_code == 200 assert response.json.get("data") is not None - assert response.json["data"]["getNewDeviceApiToken"]["success"] is True - assert response.json["data"]["getNewDeviceApiToken"]["message"] is not None - assert response.json["data"]["getNewDeviceApiToken"]["code"] == 200 - assert response.json["data"]["getNewDeviceApiToken"]["key"].split(" ").__len__() == 12 - key = Mnemonic(language="english").to_entropy(response.json["data"]["getNewDeviceApiToken"]["key"]).hex() + assert response.json["data"]["getNewDeviceApiKey"]["success"] is True + assert response.json["data"]["getNewDeviceApiKey"]["message"] is not None + assert response.json["data"]["getNewDeviceApiKey"]["code"] == 200 + assert response.json["data"]["getNewDeviceApiKey"]["key"].split(" ").__len__() == 12 + key = Mnemonic(language="english").to_entropy(response.json["data"]["getNewDeviceApiKey"]["key"]).hex() assert read_json(tokens_file)["new_device"]["token"] == key file_data = read_json(tokens_file) @@ -417,7 +418,7 @@ def test_graphql_get_and_authorize_key_after_12_minutes(client, authorized_clien json={ "query": AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION, "variables": { - "inupt": { + "input": { "key": key, "deviceName": "test_token", } @@ -436,7 +437,7 @@ def test_graphql_authorize_without_token(client, tokens_file): json={ "query": AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION, "variables": { - "inupt": { + "input": { "deviceName": "test_token", } }, diff --git a/tests/test_graphql/test_system.py b/tests/test_graphql/test_system.py index 661a68b..308c981 100644 --- a/tests/test_graphql/test_system.py +++ b/tests/test_graphql/test_system.py @@ -96,12 +96,12 @@ def mock_subprocess_check_output(mocker): @pytest.fixture def mock_get_ip4(mocker): - mock = mocker.patch("selfprivacy_api.utils.get_ip4", autospec=True, return_value="157.90.247.192") + mock = mocker.patch("selfprivacy_api.utils.network.get_ip4", autospec=True, return_value="157.90.247.192") return mock @pytest.fixture def mock_get_ip6(mocker): - mock = mocker.patch("selfprivacy_api.utils.get_ip6", autospec=True, return_value="fe80::9400:ff:fef1:34ae") + mock = mocker.patch("selfprivacy_api.utils.network.get_ip6", autospec=True, return_value="fe80::9400:ff:fef1:34ae") return mock @pytest.fixture @@ -197,9 +197,9 @@ settings { } """ -def test_graphql_get_timezone_unauthorized(unauthorized_client, turned_on): +def test_graphql_get_timezone_unauthorized(client, turned_on): """Test get timezone""" - response = unauthorized_client.get( + response = client.get( "/graphql", json={ "query": generate_system_query([API_GET_TIMEZONE]), From e5405dfc6bf7943bd3c6d73d521e575ef9f1ca55 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Tue, 5 Jul 2022 15:54:21 +0300 Subject: [PATCH 23/39] linting --- tests/conftest.py | 11 ++++++ tests/test_graphql/test_api_devices.py | 2 - tests/test_graphql/test_system.py | 53 +++++++++++++++----------- 3 files changed, 41 insertions(+), 25 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 7a6fdea..9acdd24 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,3 +1,6 @@ +"""Tests configuration.""" +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument import pytest from flask import testing from selfprivacy_api.app import create_app @@ -5,6 +8,7 @@ from selfprivacy_api.app import create_app @pytest.fixture def tokens_file(mocker, shared_datadir): + """Mock tokens file.""" mock = mocker.patch( "selfprivacy_api.utils.TOKENS_FILE", shared_datadir / "tokens.json" ) @@ -13,6 +17,7 @@ def tokens_file(mocker, shared_datadir): @pytest.fixture def app(): + """Flask application.""" app = create_app( { "ENABLE_SWAGGER": "1", @@ -24,10 +29,12 @@ def app(): @pytest.fixture def client(app, tokens_file): + """Flask unauthorized test client.""" return app.test_client() class AuthorizedClient(testing.FlaskClient): + """Flask authorized test client.""" def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.token = "TEST_TOKEN" @@ -40,6 +47,7 @@ class AuthorizedClient(testing.FlaskClient): class WrongAuthClient(testing.FlaskClient): + """Flask client with wrong token""" def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.token = "WRONG_TOKEN" @@ -53,16 +61,19 @@ class WrongAuthClient(testing.FlaskClient): @pytest.fixture def authorized_client(app, tokens_file): + """Authorized test client fixture.""" app.test_client_class = AuthorizedClient return app.test_client() @pytest.fixture def wrong_auth_client(app, tokens_file): + """Wrong token test client fixture.""" app.test_client_class = WrongAuthClient return app.test_client() @pytest.fixture def runner(app, tokens_file): + """Flask test runner.""" return app.test_cli_runner() diff --git a/tests/test_graphql/test_api_devices.py b/tests/test_graphql/test_api_devices.py index e5bf7ad..37cb2d2 100644 --- a/tests/test_graphql/test_api_devices.py +++ b/tests/test_graphql/test_api_devices.py @@ -2,8 +2,6 @@ # pylint: disable=unused-argument # pylint: disable=missing-function-docstring import datetime -import json -import pytest from mnemonic import Mnemonic from tests.common import generate_api_query, read_json, write_json diff --git a/tests/test_graphql/test_system.py b/tests/test_graphql/test_system.py index 308c981..b7d6862 100644 --- a/tests/test_graphql/test_system.py +++ b/tests/test_graphql/test_system.py @@ -127,7 +127,7 @@ def test_graphql_wrong_auth(wrong_auth_client): assert response.json.get("data") is None API_GET_DOMAIN_INFO = """ -domainInfo { +domainInfo() { domain hostname provider @@ -155,6 +155,12 @@ def dns_record(type="A", name="test.tld", content=None, ttl=3600, priority=None) "priority": priority, } +def is_dns_record_in_array(records, dns_record) -> bool: + for record in records: + if record["type"] == dns_record["type"] and record["name"] == dns_record["name"] and record["content"] == dns_record["content"] and record["ttl"] == dns_record["ttl"] and record["priority"] == dns_record["priority"]: + return True + return False + def test_graphql_get_domain(authorized_client, domain_file, mock_get_ip4, mock_get_ip6, turned_on): """Test get domain""" response = authorized_client.get( @@ -168,28 +174,29 @@ def test_graphql_get_domain(authorized_client, domain_file, mock_get_ip4, mock_g assert response.json["data"]["system"]["domainInfo"]["domain"] == "test.tld" assert response.json["data"]["system"]["domainInfo"]["hostname"] == "test-instance" assert response.json["data"]["system"]["domainInfo"]["provider"] == "HETZNER" - assert response.json["data"]["system"]["domainInfo"]["requiredDnsRecords"] == [ - dns_record(), - dns_record(type="AAAA"), - dns_record(name="api.test.tld"), - dns_record(name="api.test.tld", type="AAAA"), - dns_record(name="cloud.test.tld"), - dns_record(name="cloud.test.tld", type="AAAA"), - dns_record(name="git.test.tld"), - dns_record(name="git.test.tld", type="AAAA"), - dns_record(name="meet.test.tld"), - dns_record(name="meet.test.tld", type="AAAA"), - dns_record(name="password.test.tld"), - dns_record(name="password.test.tld", type="AAAA"), - dns_record(name="social.test.tld"), - dns_record(name="social.test.tld", type="AAAA"), - dns_record(name="vpn.test.tld"), - dns_record(name="vpn.test.tld", type="AAAA"), - dns_record(name="test.tld", type="MX", content="test.tld", priority=10), - dns_record(name="_dmarc.test.tld", type="TXT", content="v=DMARC1; p=none", ttl=18000), - dns_record(name="test.tld", type="TXT", content="v=spf1 a mx ip4:157.90.247.192 -all", ttl=18000), - dns_record(name="selector._domainkey.test.tld", type="TXT", content="I am a DKIM key", ttl=18000), - ] + dns_records = response.json["data"]["system"]["domainInfo"]["requiredDnsRecords"] + assert is_dns_record_in_array(dns_records, dns_record()) + assert is_dns_record_in_array(dns_records, dns_record(type="AAAA")) + assert is_dns_record_in_array(dns_records, dns_record(name="api.test.tld")) + assert is_dns_record_in_array(dns_records, dns_record(name="api.test.tld", type="AAAA")) + assert is_dns_record_in_array(dns_records, dns_record(name="cloud.test.tld")) + assert is_dns_record_in_array(dns_records, dns_record(name="cloud.test.tld", type="AAAA")) + assert is_dns_record_in_array(dns_records, dns_record(name="git.test.tld")) + assert is_dns_record_in_array(dns_records, dns_record(name="git.test.tld", type="AAAA")) + assert is_dns_record_in_array(dns_records, dns_record(name="meet.test.tld")) + assert is_dns_record_in_array(dns_records, dns_record(name="meet.test.tld", type="AAAA")) + assert is_dns_record_in_array(dns_records, dns_record(name="password.test.tld")) + assert is_dns_record_in_array(dns_records, dns_record(name="password.test.tld", type="AAAA")) + assert is_dns_record_in_array(dns_records, dns_record(name="social.test.tld")) + assert is_dns_record_in_array(dns_records, dns_record(name="social.test.tld", type="AAAA")) + assert is_dns_record_in_array(dns_records, dns_record(name="vpn.test.tld")) + assert is_dns_record_in_array(dns_records, dns_record(name="vpn.test.tld", type="AAAA")) + assert is_dns_record_in_array(dns_records, dns_record(name="test.tld", type="MX", content="test.tld", priority=10)) + assert is_dns_record_in_array(dns_records, dns_record(name="_dmarc.test.tld", type="TXT", content="v=DMARC1; p=none", ttl=18000)) + assert is_dns_record_in_array(dns_records, dns_record(name="test.tld", type="TXT", content="v=spf1 a mx ip4:157.90.247.192 -all", ttl=18000)) + assert is_dns_record_in_array(dns_records, dns_record(name="selector._domainkey.test.tld", type="TXT", content="I am a DKIM key", ttl=18000)) + +def test_graphql API_GET_TIMEZONE = """ settings { From 63f3b2f4d1717280deda8fb0d8f2f5b045d99d1b Mon Sep 17 00:00:00 2001 From: Inex Code Date: Thu, 7 Jul 2022 14:49:04 +0300 Subject: [PATCH 24/39] Update tests for detlaff --- tests/test_graphql/test_system.py | 63 ++++++++++++++++++++++++++----- tests/test_system.py | 5 ++- 2 files changed, 56 insertions(+), 12 deletions(-) diff --git a/tests/test_graphql/test_system.py b/tests/test_graphql/test_system.py index b7d6862..7ed4d6a 100644 --- a/tests/test_graphql/test_system.py +++ b/tests/test_graphql/test_system.py @@ -54,14 +54,15 @@ class ProcessMock: self.args = args self.kwargs = kwargs - def communicate(): + def communicate(self): return (b"", None) returncode = 0 class BrokenServiceMock(ProcessMock): - def communicate(): + """Mock subprocess.Popen for broken service""" + def communicate(self): return (b"Testing error", None) returncode = 3 @@ -114,7 +115,6 @@ info { } """ - def test_graphql_wrong_auth(wrong_auth_client): """Test wrong auth""" response = wrong_auth_client.get( @@ -127,7 +127,7 @@ def test_graphql_wrong_auth(wrong_auth_client): assert response.json.get("data") is None API_GET_DOMAIN_INFO = """ -domainInfo() { +domainInfo { domain hostname provider @@ -196,8 +196,6 @@ def test_graphql_get_domain(authorized_client, domain_file, mock_get_ip4, mock_g assert is_dns_record_in_array(dns_records, dns_record(name="test.tld", type="TXT", content="v=spf1 a mx ip4:157.90.247.192 -all", ttl=18000)) assert is_dns_record_in_array(dns_records, dns_record(name="selector._domainkey.test.tld", type="TXT", content="I am a DKIM key", ttl=18000)) -def test_graphql - API_GET_TIMEZONE = """ settings { timezone @@ -205,7 +203,7 @@ settings { """ def test_graphql_get_timezone_unauthorized(client, turned_on): - """Test get timezone""" + """Test get timezone without auth""" response = client.get( "/graphql", json={ @@ -227,8 +225,53 @@ def test_graphql_get_timezone(authorized_client, turned_on): assert response.json.get("data") is not None assert response.json["data"]["system"]["settings"]["timezone"] == "Europe/Moscow" -API_GET_PYTHON_VERSION = """ -info { - pythonVersion +def test_graphql_get_timezone_on_undefined(authorized_client, undefiened_config): + """Test get timezone when none is defined in config""" + response = authorized_client.get( + "/graphql", + json={ + "query": generate_system_query([API_GET_TIMEZONE]), + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["system"]["settings"]["timezone"] == "Europe/Uzhgorod" + + +API_CHANGE_TIMEZONE_MUTATION = """ +mutation changeTimezone($timezone: String!) { + changeTimezone(timezone: $timezone) { + success + message + code + timezone + } +} +""" + +def test_graphql_change_timezone_unauthorized(client, turned_on): + """Test change timezone without auth""" + response = client.post( + "/graphql", + json={ + "query": API_CHANGE_TIMEZONE_MUTATION, + "variables": { + "timezone": "Europe/Moscow", + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is None + + +API_CHANGE_SERVER_SETTINGS = """ +mutation changeServerSettings($settings: SystemSettingsInput!) { + changeAutoUpgradeSettings(settings: $settings) { + success + message + code + enableAutoUpgrade + allowReboot + } } """ diff --git a/tests/test_system.py b/tests/test_system.py index 9ed2617..ac108aa 100644 --- a/tests/test_system.py +++ b/tests/test_system.py @@ -60,14 +60,15 @@ class ProcessMock: self.args = args self.kwargs = kwargs - def communicate(): + def communicate(self): return (b"", None) returncode = 0 class BrokenServiceMock(ProcessMock): - def communicate(): + """Mock subprocess.Popen""" + def communicate(self): return (b"Testing error", None) returncode = 3 From 9bd2896db874259bf3d99bc6e7e05dedac81b631 Mon Sep 17 00:00:00 2001 From: def Date: Thu, 7 Jul 2022 15:53:19 +0200 Subject: [PATCH 25/39] fix recovery tests --- .../graphql/mutations/api_mutations.py | 42 +++- .../graphql/mutations/mutation_interface.py | 2 + selfprivacy_api/graphql/queries/system.py | 14 +- selfprivacy_api/graphql/schema.py | 3 + .../resources/services/mailserver.py | 2 +- selfprivacy_api/utils/__init__.py | 1 + selfprivacy_api/utils/auth.py | 4 +- selfprivacy_api/utils/network.py | 10 +- tests/common.py | 4 + tests/conftest.py | 2 + tests/test_graphql/test_api.py | 15 +- tests/test_graphql/test_api_devices.py | 103 +++++--- tests/test_graphql/test_api_recovery.py | 230 ++++++++++-------- tests/test_graphql/test_api_version.py | 9 +- tests/test_graphql/test_system.py | 112 +++++++-- tests/test_network_utils.py | 7 +- tests/test_system.py | 5 +- 17 files changed, 372 insertions(+), 193 deletions(-) diff --git a/selfprivacy_api/graphql/mutations/api_mutations.py b/selfprivacy_api/graphql/mutations/api_mutations.py index cd5d53b..d516049 100644 --- a/selfprivacy_api/graphql/mutations/api_mutations.py +++ b/selfprivacy_api/graphql/mutations/api_mutations.py @@ -5,7 +5,10 @@ import typing from flask import request import strawberry from selfprivacy_api.graphql import IsAuthenticated -from selfprivacy_api.graphql.mutations.mutation_interface import GenericMutationReturn, MutationReturnInterface +from selfprivacy_api.graphql.mutations.mutation_interface import ( + GenericMutationReturn, + MutationReturnInterface, +) from selfprivacy_api.utils.auth import ( delete_new_device_auth_token, @@ -16,39 +19,50 @@ from selfprivacy_api.utils.auth import ( is_token_name_pair_valid, refresh_token, use_mnemonic_recoverery_token, - use_new_device_auth_token + use_new_device_auth_token, ) + @strawberry.type class ApiKeyMutationReturn(MutationReturnInterface): key: typing.Optional[str] + @strawberry.type class DeviceApiTokenMutationReturn(MutationReturnInterface): token: typing.Optional[str] + @strawberry.input class RecoveryKeyLimitsInput: """Recovery key limits input""" + expiration_date: typing.Optional[datetime.datetime] uses: typing.Optional[int] + @strawberry.input class UseRecoveryKeyInput: """Use recovery key input""" + key: str deviceName: str + @strawberry.input class UseNewDeviceKeyInput: """Use new device key input""" + key: str deviceName: str + @strawberry.type class ApiMutations: @strawberry.mutation(permission_classes=[IsAuthenticated]) - def get_new_recovery_api_key(self, limits: RecoveryKeyLimitsInput) -> ApiKeyMutationReturn: + def get_new_recovery_api_key( + self, limits: RecoveryKeyLimitsInput + ) -> ApiKeyMutationReturn: """Generate recovery key""" if limits.expiration_date is not None: if limits.expiration_date < datetime.datetime.now(): @@ -75,7 +89,9 @@ class ApiMutations: ) @strawberry.mutation() - def use_recovery_api_key(self, input: UseRecoveryKeyInput) -> DeviceApiTokenMutationReturn: + def use_recovery_api_key( + self, input: UseRecoveryKeyInput + ) -> DeviceApiTokenMutationReturn: """Use recovery key""" token = use_mnemonic_recoverery_token(input.key, input.deviceName) if token is None: @@ -89,13 +105,17 @@ class ApiMutations: success=True, message="Recovery key used", code=200, - token=None, + token=token, ) @strawberry.mutation(permission_classes=[IsAuthenticated]) def refresh_device_api_token(self) -> DeviceApiTokenMutationReturn: """Refresh device api token""" - token = request.headers.get("Authorization").split(" ")[1] if request.headers.get("Authorization") is not None else None + token = ( + request.headers.get("Authorization").split(" ")[1] + if request.headers.get("Authorization") is not None + else None + ) if token is None: return DeviceApiTokenMutationReturn( success=False, @@ -121,7 +141,11 @@ class ApiMutations: @strawberry.mutation(permission_classes=[IsAuthenticated]) def delete_device_api_token(self, device: str) -> GenericMutationReturn: """Delete device api token""" - self_token = request.headers.get("Authorization").split(" ")[1] if request.headers.get("Authorization") is not None else None + self_token = ( + request.headers.get("Authorization").split(" ")[1] + if request.headers.get("Authorization") is not None + else None + ) if self_token is not None and is_token_name_pair_valid(device, self_token): return GenericMutationReturn( success=False, @@ -163,7 +187,9 @@ class ApiMutations: ) @strawberry.mutation() - def authorize_with_new_device_api_key(self, input: UseNewDeviceKeyInput) -> DeviceApiTokenMutationReturn: + def authorize_with_new_device_api_key( + self, input: UseNewDeviceKeyInput + ) -> DeviceApiTokenMutationReturn: """Authorize with new device api key""" token = use_new_device_auth_token(input.key, input.deviceName) if token is None: diff --git a/selfprivacy_api/graphql/mutations/mutation_interface.py b/selfprivacy_api/graphql/mutations/mutation_interface.py index 1cf310c..32146fc 100644 --- a/selfprivacy_api/graphql/mutations/mutation_interface.py +++ b/selfprivacy_api/graphql/mutations/mutation_interface.py @@ -1,11 +1,13 @@ import strawberry + @strawberry.interface class MutationReturnInterface: success: bool message: str code: int + @strawberry.type class GenericMutationReturn(MutationReturnInterface): pass diff --git a/selfprivacy_api/graphql/queries/system.py b/selfprivacy_api/graphql/queries/system.py index 06405a4..cadf074 100644 --- a/selfprivacy_api/graphql/queries/system.py +++ b/selfprivacy_api/graphql/queries/system.py @@ -150,12 +150,14 @@ class System: Base system type which represents common system status """ - status: Alert = strawberry.field(resolver=lambda: Alert( - severity=Severity.INFO, - title="Test message", - message="Test message", - timestamp=None - )) + status: Alert = strawberry.field( + resolver=lambda: Alert( + severity=Severity.INFO, + title="Test message", + message="Test message", + timestamp=None, + ) + ) domain: SystemDomainInfo = strawberry.field(resolver=get_system_domain_info) settings: SystemSettings = SystemSettings() info: SystemInfo = SystemInfo() diff --git a/selfprivacy_api/graphql/schema.py b/selfprivacy_api/graphql/schema.py index 71c9c18..5aba9b3 100644 --- a/selfprivacy_api/graphql/schema.py +++ b/selfprivacy_api/graphql/schema.py @@ -23,9 +23,12 @@ class Query: """API access status""" return Api() + @strawberry.type class Mutation(ApiMutations): """Root schema for mutations""" + pass + schema = strawberry.Schema(query=Query, mutation=Mutation) diff --git a/selfprivacy_api/resources/services/mailserver.py b/selfprivacy_api/resources/services/mailserver.py index bf42c7d..01fa574 100644 --- a/selfprivacy_api/resources/services/mailserver.py +++ b/selfprivacy_api/resources/services/mailserver.py @@ -33,7 +33,7 @@ class DKIMKey(Resource): dkim = get_dkim_key(domain) if dkim is None: - return "DKIM file not found", 404 + return "DKIM file not found", 404 dkim = base64.b64encode(dkim.encode("utf-8")).decode("utf-8") return dkim diff --git a/selfprivacy_api/utils/__init__.py b/selfprivacy_api/utils/__init__.py index adb0409..81dc354 100644 --- a/selfprivacy_api/utils/__init__.py +++ b/selfprivacy_api/utils/__init__.py @@ -133,6 +133,7 @@ def parse_date(date_str: str) -> datetime.datetime: else datetime.datetime.strptime(date_str, "%Y-%m-%d %H:%M:%S.%f") ) + def get_dkim_key(domain): """Get DKIM key from /var/dkim/.selector.txt""" if os.path.exists("/var/dkim/" + domain + ".selector.txt"): diff --git a/selfprivacy_api/utils/auth.py b/selfprivacy_api/utils/auth.py index 30fa7de..6fff698 100644 --- a/selfprivacy_api/utils/auth.py +++ b/selfprivacy_api/utils/auth.py @@ -191,7 +191,9 @@ def _get_recovery_token(): return tokens["recovery_token"]["token"] -def generate_recovery_token(expiration: typing.Optional[datetime], uses_left: typing.Optional[int]) -> str: +def generate_recovery_token( + expiration: typing.Optional[datetime], uses_left: typing.Optional[int] +) -> str: """Generate a 24 bytes recovery token and return a mneomnic word list. Write a string representation of the recovery token to the tokens.json file. """ diff --git a/selfprivacy_api/utils/network.py b/selfprivacy_api/utils/network.py index 1aa4644..5081f0e 100644 --- a/selfprivacy_api/utils/network.py +++ b/selfprivacy_api/utils/network.py @@ -3,19 +3,25 @@ import subprocess import re + def get_ip4(): """Get IPv4 address""" try: - ip4 = subprocess.check_output(["ip", "addr", "show", "dev", "eth0"]).decode("utf-8") + ip4 = subprocess.check_output(["ip", "addr", "show", "dev", "eth0"]).decode( + "utf-8" + ) ip4 = re.search(r"inet (\d+\.\d+\.\d+\.\d+)\/\d+", ip4) except subprocess.CalledProcessError: ip4 = None return ip4.group(1) if ip4 else None + def get_ip6(): """Get IPv6 address""" try: - ip6 = subprocess.check_output(["ip", "addr", "show", "dev", "eth0"]).decode("utf-8") + ip6 = subprocess.check_output(["ip", "addr", "show", "dev", "eth0"]).decode( + "utf-8" + ) ip6 = re.search(r"inet6 (\S+)\/\d+", ip6) except subprocess.CalledProcessError: ip6 = None diff --git a/tests/common.py b/tests/common.py index d3dda69..01975e8 100644 --- a/tests/common.py +++ b/tests/common.py @@ -1,6 +1,7 @@ import json from mnemonic import Mnemonic + def read_json(file_path): with open(file_path, "r", encoding="utf-8") as file: return json.load(file) @@ -10,11 +11,14 @@ def write_json(file_path, data): with open(file_path, "w", encoding="utf-8") as file: json.dump(data, file, indent=4) + def generate_api_query(query_array): return "query TestApi {\n api {" + "\n".join(query_array) + "}\n}" + def generate_system_query(query_array): return "query TestSystem {\n system {" + "\n".join(query_array) + "}\n}" + def mnemonic_to_hex(mnemonic): return Mnemonic(language="english").to_entropy(mnemonic).hex() diff --git a/tests/conftest.py b/tests/conftest.py index 9acdd24..fb31456 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -35,6 +35,7 @@ def client(app, tokens_file): class AuthorizedClient(testing.FlaskClient): """Flask authorized test client.""" + def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.token = "TEST_TOKEN" @@ -48,6 +49,7 @@ class AuthorizedClient(testing.FlaskClient): class WrongAuthClient(testing.FlaskClient): """Flask client with wrong token""" + def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.token = "WRONG_TOKEN" diff --git a/tests/test_graphql/test_api.py b/tests/test_graphql/test_api.py index 81b6175..031e052 100644 --- a/tests/test_graphql/test_api.py +++ b/tests/test_graphql/test_api.py @@ -23,11 +23,14 @@ TOKENS_FILE_CONTETS = { ] } + def test_graphql_get_entire_api_data(authorized_client, tokens_file): response = authorized_client.get( "/graphql", json={ - "query": generate_api_query([API_VERSION_QUERY, API_DEVICES_QUERY, API_RECOVERY_QUERY]) + "query": generate_api_query( + [API_VERSION_QUERY, API_DEVICES_QUERY, API_RECOVERY_QUERY] + ) }, ) assert response.status_code == 200 @@ -35,10 +38,16 @@ def test_graphql_get_entire_api_data(authorized_client, tokens_file): assert "version" in response.get_json()["data"]["api"] assert response.json["data"]["api"]["devices"] is not None assert len(response.json["data"]["api"]["devices"]) == 2 - assert response.json["data"]["api"]["devices"][0]["creationDate"] == "2022-01-14T08:31:10.789314" + assert ( + response.json["data"]["api"]["devices"][0]["creationDate"] + == "2022-01-14T08:31:10.789314" + ) assert response.json["data"]["api"]["devices"][0]["isCaller"] is True assert response.json["data"]["api"]["devices"][0]["name"] == "test_token" - assert response.json["data"]["api"]["devices"][1]["creationDate"] == "2022-01-14T08:31:10.789314" + assert ( + response.json["data"]["api"]["devices"][1]["creationDate"] + == "2022-01-14T08:31:10.789314" + ) assert response.json["data"]["api"]["devices"][1]["isCaller"] is False assert response.json["data"]["api"]["devices"][1]["name"] == "test_token2" assert response.json["data"]["api"]["recoveryKey"] is not None diff --git a/tests/test_graphql/test_api_devices.py b/tests/test_graphql/test_api_devices.py index 37cb2d2..627d06a 100644 --- a/tests/test_graphql/test_api_devices.py +++ b/tests/test_graphql/test_api_devices.py @@ -29,34 +29,39 @@ devices { } """ + def test_graphql_tokens_info(authorized_client, tokens_file): response = authorized_client.get( "/graphql", - json={ - "query": generate_api_query([API_DEVICES_QUERY]) - }, + json={"query": generate_api_query([API_DEVICES_QUERY])}, ) assert response.status_code == 200 assert response.json.get("data") is not None assert response.json["data"]["api"]["devices"] is not None assert len(response.json["data"]["api"]["devices"]) == 2 - assert response.json["data"]["api"]["devices"][0]["creationDate"] == "2022-01-14T08:31:10.789314" + assert ( + response.json["data"]["api"]["devices"][0]["creationDate"] + == "2022-01-14T08:31:10.789314" + ) assert response.json["data"]["api"]["devices"][0]["isCaller"] is True assert response.json["data"]["api"]["devices"][0]["name"] == "test_token" - assert response.json["data"]["api"]["devices"][1]["creationDate"] == "2022-01-14T08:31:10.789314" + assert ( + response.json["data"]["api"]["devices"][1]["creationDate"] + == "2022-01-14T08:31:10.789314" + ) assert response.json["data"]["api"]["devices"][1]["isCaller"] is False assert response.json["data"]["api"]["devices"][1]["name"] == "test_token2" + def test_graphql_tokens_info_unauthorized(client, tokens_file): response = client.get( "/graphql", - json={ - "query": generate_api_query([API_DEVICES_QUERY]) - }, + json={"query": generate_api_query([API_DEVICES_QUERY])}, ) assert response.status_code == 200 assert response.json["data"] is None + DELETE_TOKEN_MUTATION = """ mutation DeleteToken($device: String!) { deleteDeviceApiToken(device: $device) { @@ -67,6 +72,7 @@ mutation DeleteToken($device: String!) { } """ + def test_graphql_delete_token_unauthorized(client, tokens_file): response = client.post( "/graphql", @@ -80,6 +86,7 @@ def test_graphql_delete_token_unauthorized(client, tokens_file): assert response.status_code == 200 assert response.json["data"] is None + def test_graphql_delete_token(authorized_client, tokens_file): response = authorized_client.post( "/graphql", @@ -105,6 +112,7 @@ def test_graphql_delete_token(authorized_client, tokens_file): ] } + def test_graphql_delete_self_token(authorized_client, tokens_file): response = authorized_client.post( "/graphql", @@ -122,6 +130,7 @@ def test_graphql_delete_self_token(authorized_client, tokens_file): assert response.json["data"]["deleteDeviceApiToken"]["code"] == 400 assert read_json(tokens_file) == TOKENS_FILE_CONTETS + def test_graphql_delete_nonexistent_token(authorized_client, tokens_file): response = authorized_client.post( "/graphql", @@ -139,6 +148,7 @@ def test_graphql_delete_nonexistent_token(authorized_client, tokens_file): assert response.json["data"]["deleteDeviceApiToken"]["code"] == 404 assert read_json(tokens_file) == TOKENS_FILE_CONTETS + REFRESH_TOKEN_MUTATION = """ mutation RefreshToken { refreshDeviceApiToken { @@ -150,22 +160,20 @@ mutation RefreshToken { } """ + def test_graphql_refresh_token_unauthorized(client, tokens_file): response = client.post( "/graphql", - json={ - "query": REFRESH_TOKEN_MUTATION - }, + json={"query": REFRESH_TOKEN_MUTATION}, ) assert response.status_code == 200 assert response.json["data"] is None + def test_graphql_refresh_token(authorized_client, tokens_file): response = authorized_client.post( "/graphql", - json={ - "query": REFRESH_TOKEN_MUTATION - }, + json={"query": REFRESH_TOKEN_MUTATION}, ) assert response.status_code == 200 assert response.json.get("data") is not None @@ -178,6 +186,7 @@ def test_graphql_refresh_token(authorized_client, tokens_file): "date": "2022-01-14 08:31:10.789314", } + NEW_DEVICE_KEY_MUTATION = """ mutation NewDeviceKey { getNewDeviceApiKey { @@ -189,22 +198,20 @@ mutation NewDeviceKey { } """ + def test_graphql_get_new_device_auth_key_unauthorized(client, tokens_file): response = client.post( "/graphql", - json={ - "query": NEW_DEVICE_KEY_MUTATION - }, + json={"query": NEW_DEVICE_KEY_MUTATION}, ) assert response.status_code == 200 assert response.json["data"] is None + def test_graphql_get_new_device_auth_key(authorized_client, tokens_file): response = authorized_client.post( "/graphql", - json={ - "query": NEW_DEVICE_KEY_MUTATION - }, + json={"query": NEW_DEVICE_KEY_MUTATION}, ) assert response.status_code == 200 assert response.json.get("data") is not None @@ -212,9 +219,14 @@ def test_graphql_get_new_device_auth_key(authorized_client, tokens_file): assert response.json["data"]["getNewDeviceApiKey"]["message"] is not None assert response.json["data"]["getNewDeviceApiKey"]["code"] == 200 assert response.json["data"]["getNewDeviceApiKey"]["key"].split(" ").__len__() == 12 - token = Mnemonic(language="english").to_entropy(response.json["data"]["getNewDeviceApiKey"]["key"]).hex() + token = ( + Mnemonic(language="english") + .to_entropy(response.json["data"]["getNewDeviceApiKey"]["key"]) + .hex() + ) assert read_json(tokens_file)["new_device"]["token"] == token + INVALIDATE_NEW_DEVICE_KEY_MUTATION = """ mutation InvalidateNewDeviceKey { invalidateNewDeviceApiKey { @@ -225,6 +237,7 @@ mutation InvalidateNewDeviceKey { } """ + def test_graphql_invalidate_new_device_token_unauthorized(client, tokens_file): response = client.post( "/graphql", @@ -238,12 +251,11 @@ def test_graphql_invalidate_new_device_token_unauthorized(client, tokens_file): assert response.status_code == 200 assert response.json["data"] is None + def test_graphql_get_and_delete_new_device_key(authorized_client, tokens_file): response = authorized_client.post( "/graphql", - json={ - "query": NEW_DEVICE_KEY_MUTATION - }, + json={"query": NEW_DEVICE_KEY_MUTATION}, ) assert response.status_code == 200 assert response.json.get("data") is not None @@ -251,13 +263,15 @@ def test_graphql_get_and_delete_new_device_key(authorized_client, tokens_file): assert response.json["data"]["getNewDeviceApiKey"]["message"] is not None assert response.json["data"]["getNewDeviceApiKey"]["code"] == 200 assert response.json["data"]["getNewDeviceApiKey"]["key"].split(" ").__len__() == 12 - token = Mnemonic(language="english").to_entropy(response.json["data"]["getNewDeviceApiKey"]["key"]).hex() + token = ( + Mnemonic(language="english") + .to_entropy(response.json["data"]["getNewDeviceApiKey"]["key"]) + .hex() + ) assert read_json(tokens_file)["new_device"]["token"] == token response = authorized_client.post( "/graphql", - json={ - "query": INVALIDATE_NEW_DEVICE_KEY_MUTATION - }, + json={"query": INVALIDATE_NEW_DEVICE_KEY_MUTATION}, ) assert response.status_code == 200 assert response.json.get("data") is not None @@ -266,6 +280,7 @@ def test_graphql_get_and_delete_new_device_key(authorized_client, tokens_file): assert response.json["data"]["invalidateNewDeviceApiKey"]["code"] == 200 assert read_json(tokens_file) == TOKENS_FILE_CONTETS + AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION = """ mutation AuthorizeWithNewDeviceKey($input: UseNewDeviceKeyInput!) { authorizeWithNewDeviceApiKey(input: $input) { @@ -277,12 +292,11 @@ mutation AuthorizeWithNewDeviceKey($input: UseNewDeviceKeyInput!) { } """ + def test_graphql_get_and_authorize_new_device(client, authorized_client, tokens_file): response = authorized_client.post( "/graphql", - json={ - "query": NEW_DEVICE_KEY_MUTATION - }, + json={"query": NEW_DEVICE_KEY_MUTATION}, ) assert response.status_code == 200 assert response.json.get("data") is not None @@ -314,6 +328,7 @@ def test_graphql_get_and_authorize_new_device(client, authorized_client, tokens_ assert read_json(tokens_file)["tokens"][2]["token"] == token assert read_json(tokens_file)["tokens"][2]["name"] == "new_device" + def test_graphql_authorize_new_device_with_invalid_key(client, tokens_file): response = client.post( "/graphql", @@ -334,12 +349,11 @@ def test_graphql_authorize_new_device_with_invalid_key(client, tokens_file): assert response.json["data"]["authorizeWithNewDeviceApiKey"]["code"] == 404 assert read_json(tokens_file) == TOKENS_FILE_CONTETS + def test_graphql_get_and_authorize_used_key(client, authorized_client, tokens_file): response = authorized_client.post( "/graphql", - json={ - "query": NEW_DEVICE_KEY_MUTATION - }, + json={"query": NEW_DEVICE_KEY_MUTATION}, ) assert response.status_code == 200 assert response.json.get("data") is not None @@ -367,7 +381,10 @@ def test_graphql_get_and_authorize_used_key(client, authorized_client, tokens_fi assert response.json["data"]["authorizeWithNewDeviceApiKey"]["success"] is True assert response.json["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None assert response.json["data"]["authorizeWithNewDeviceApiKey"]["code"] == 200 - assert read_json(tokens_file)["tokens"][2]["token"] == response.json["data"]["authorizeWithNewDeviceApiKey"]["token"] + assert ( + read_json(tokens_file)["tokens"][2]["token"] + == response.json["data"]["authorizeWithNewDeviceApiKey"]["token"] + ) assert read_json(tokens_file)["tokens"][2]["name"] == "new_token" response = client.post( @@ -389,12 +406,13 @@ def test_graphql_get_and_authorize_used_key(client, authorized_client, tokens_fi assert response.json["data"]["authorizeWithNewDeviceApiKey"]["code"] == 404 assert read_json(tokens_file)["tokens"].__len__() == 3 -def test_graphql_get_and_authorize_key_after_12_minutes(client, authorized_client, tokens_file): + +def test_graphql_get_and_authorize_key_after_12_minutes( + client, authorized_client, tokens_file +): response = authorized_client.post( "/graphql", - json={ - "query": NEW_DEVICE_KEY_MUTATION - }, + json={"query": NEW_DEVICE_KEY_MUTATION}, ) assert response.status_code == 200 assert response.json.get("data") is not None @@ -402,7 +420,11 @@ def test_graphql_get_and_authorize_key_after_12_minutes(client, authorized_clien assert response.json["data"]["getNewDeviceApiKey"]["message"] is not None assert response.json["data"]["getNewDeviceApiKey"]["code"] == 200 assert response.json["data"]["getNewDeviceApiKey"]["key"].split(" ").__len__() == 12 - key = Mnemonic(language="english").to_entropy(response.json["data"]["getNewDeviceApiKey"]["key"]).hex() + key = ( + Mnemonic(language="english") + .to_entropy(response.json["data"]["getNewDeviceApiKey"]["key"]) + .hex() + ) assert read_json(tokens_file)["new_device"]["token"] == key file_data = read_json(tokens_file) @@ -429,6 +451,7 @@ def test_graphql_get_and_authorize_key_after_12_minutes(client, authorized_clien assert response.json["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None assert response.json["data"]["authorizeWithNewDeviceApiKey"]["code"] == 404 + def test_graphql_authorize_without_token(client, tokens_file): response = client.post( "/graphql", diff --git a/tests/test_graphql/test_api_recovery.py b/tests/test_graphql/test_api_recovery.py index 8ac8560..0021e5d 100644 --- a/tests/test_graphql/test_api_recovery.py +++ b/tests/test_graphql/test_api_recovery.py @@ -32,22 +32,20 @@ recoveryKey { } """ + def test_graphql_recovery_key_status_unauthorized(client, tokens_file): response = client.post( "/graphql", - json={ - "query": generate_api_query([API_RECOVERY_QUERY]) - }, + json={"query": generate_api_query([API_RECOVERY_QUERY])}, ) assert response.status_code == 200 assert response.json.get("data") is None + def test_graphql_recovery_key_status_when_none_exists(authorized_client, tokens_file): response = authorized_client.get( "/graphql", - json={ - "query": generate_api_query([API_RECOVERY_QUERY]) - }, + json={"query": generate_api_query([API_RECOVERY_QUERY])}, ) assert response.status_code == 200 assert response.json.get("data") is not None @@ -58,6 +56,7 @@ def test_graphql_recovery_key_status_when_none_exists(authorized_client, tokens_ assert response.json["data"]["api"]["recoveryKey"]["expirationDate"] is None assert response.json["data"]["api"]["recoveryKey"]["usesLeft"] is None + API_RECOVERY_KEY_GENERATE_MUTATION = """ mutation TestGenerateRecoveryKey($limits: RecoveryKeyLimitsInput!) { getNewRecoveryApiKey(limits: $limits) { @@ -79,6 +78,8 @@ mutation TestUseRecoveryKey($input: UseRecoveryKeyInput!) { } } """ + + def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): response = authorized_client.post( "/graphql", @@ -98,23 +99,23 @@ def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): assert response.json["data"]["getNewRecoveryApiKey"]["message"] is not None assert response.json["data"]["getNewRecoveryApiKey"]["code"] == 200 assert response.json["data"]["getNewRecoveryApiKey"]["key"] is not None - assert response.json["data"]["getNewRecoveryApiKey"]["key"].split(" ").__len__() == 18 + assert ( + response.json["data"]["getNewRecoveryApiKey"]["key"].split(" ").__len__() == 18 + ) assert read_json(tokens_file)["recovery_token"] is not None time_generated = read_json(tokens_file)["recovery_token"]["date"] assert time_generated is not None key = response.json["data"]["getNewRecoveryApiKey"]["key"] assert ( - datetime.datetime.strptime( - time_generated, "%Y-%m-%dT%H:%M:%S.%fZ" - ) - datetime.timedelta(seconds=5) < datetime.datetime.now() + datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%fZ") + - datetime.timedelta(seconds=5) + < datetime.datetime.now() ) # Try to get token status response = authorized_client.get( "/graphql", - json={ - "query": generate_api_query([API_RECOVERY_QUERY]) - }, + json={"query": generate_api_query([API_RECOVERY_QUERY])}, ) assert response.status_code == 200 assert response.json.get("data") is not None @@ -132,19 +133,22 @@ def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): "query": API_RECOVERY_KEY_USE_MUTATION, "variables": { "input": { - "token": key, - "tokenName": "test_token", + "key": key, + "deviceName": "test_token", }, }, }, ) assert response.status_code == 200 assert response.json.get("data") is not None - assert response.json["data"]["useRecoveryKey"]["success"] is True - assert response.json["data"]["useRecoveryKey"]["message"] is not None - assert response.json["data"]["useRecoveryKey"]["code"] == 200 - assert response.json["data"]["useRecoveryKey"]["token"] is not None - assert response.json["data"]["useRecoveryKey"]["token"] == read_json(tokens_file)["tokens"][2]["token"] + assert response.json["data"]["useRecoveryApiKey"]["success"] is True + assert response.json["data"]["useRecoveryApiKey"]["message"] is not None + assert response.json["data"]["useRecoveryApiKey"]["code"] == 200 + assert response.json["data"]["useRecoveryApiKey"]["token"] is not None + assert ( + response.json["data"]["useRecoveryApiKey"]["token"] + == read_json(tokens_file)["tokens"][2]["token"] + ) assert read_json(tokens_file)["tokens"][2]["name"] == "test_token" # Try to use token again @@ -154,24 +158,30 @@ def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): "query": API_RECOVERY_KEY_USE_MUTATION, "variables": { "input": { - "token": key, - "tokenName": "test_token2", + "key": key, + "deviceName": "test_token2", }, }, }, ) assert response.status_code == 200 assert response.json.get("data") is not None - assert response.json["data"]["useRecoveryKey"]["success"] is True - assert response.json["data"]["useRecoveryKey"]["message"] is not None - assert response.json["data"]["useRecoveryKey"]["code"] == 200 - assert response.json["data"]["useRecoveryKey"]["token"] is not None - assert response.json["data"]["useRecoveryKey"]["token"] == read_json(tokens_file)["tokens"][3]["token"] + assert response.json["data"]["useRecoveryApiKey"]["success"] is True + assert response.json["data"]["useRecoveryApiKey"]["message"] is not None + assert response.json["data"]["useRecoveryApiKey"]["code"] == 200 + assert response.json["data"]["useRecoveryApiKey"]["token"] is not None + assert ( + response.json["data"]["useRecoveryApiKey"]["token"] + == read_json(tokens_file)["tokens"][3]["token"] + ) assert read_json(tokens_file)["tokens"][3]["name"] == "test_token2" -def test_graphql_generate_recovery_key_with_expiration_date(client, authorized_client, tokens_file): + +def test_graphql_generate_recovery_key_with_expiration_date( + client, authorized_client, tokens_file +): expiration_date = datetime.datetime.now() + datetime.timedelta(minutes=5) - expiration_date_str = expiration_date.strftime("%Y-%m-%dT%H:%M:%S.%fZ") + expiration_date_str = expiration_date.strftime("%Y-%m-%dT%H:%M:%S.%f") response = authorized_client.post( "/graphql", json={ @@ -190,27 +200,27 @@ def test_graphql_generate_recovery_key_with_expiration_date(client, authorized_c assert response.json["data"]["getNewRecoveryApiKey"]["message"] is not None assert response.json["data"]["getNewRecoveryApiKey"]["code"] == 200 assert response.json["data"]["getNewRecoveryApiKey"]["key"] is not None - assert response.json["data"]["getNewRecoveryApiKey"]["key"].split(" ").__len__() == 18 + assert ( + response.json["data"]["getNewRecoveryApiKey"]["key"].split(" ").__len__() == 18 + ) assert read_json(tokens_file)["recovery_token"] is not None key = response.json["data"]["getNewRecoveryApiKey"]["key"] - assert read_json(tokens_file)["recovery_token"]["expirationDate"] == expiration_date_str + assert read_json(tokens_file)["recovery_token"]["expiration"] == expiration_date_str assert read_json(tokens_file)["recovery_token"]["token"] == mnemonic_to_hex(key) time_generated = read_json(tokens_file)["recovery_token"]["date"] assert time_generated is not None assert ( - datetime.datetime.strptime( - time_generated, "%Y-%m-%dT%H:%M:%S.%fZ" - ) - datetime.timedelta(seconds=5) < datetime.datetime.now() + datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%fZ") + - datetime.timedelta(seconds=5) + < datetime.datetime.now() ) # Try to get token status response = authorized_client.get( "/graphql", - json={ - "query": generate_api_query([API_RECOVERY_QUERY]) - }, + json={"query": generate_api_query([API_RECOVERY_QUERY])}, ) assert response.status_code == 200 assert response.json.get("data") is not None @@ -218,7 +228,10 @@ def test_graphql_generate_recovery_key_with_expiration_date(client, authorized_c assert response.json["data"]["api"]["recoveryKey"]["exists"] is True assert response.json["data"]["api"]["recoveryKey"]["valid"] is True assert response.json["data"]["api"]["recoveryKey"]["creationDate"] == time_generated - assert response.json["data"]["api"]["recoveryKey"]["expirationDate"] == expiration_date_str + assert ( + response.json["data"]["api"]["recoveryKey"]["expirationDate"] + == expiration_date_str + ) assert response.json["data"]["api"]["recoveryKey"]["usesLeft"] is None # Try to use token @@ -228,19 +241,22 @@ def test_graphql_generate_recovery_key_with_expiration_date(client, authorized_c "query": API_RECOVERY_KEY_USE_MUTATION, "variables": { "input": { - "token": key, - "tokenName": "test_token", + "key": key, + "deviceName": "test_token", }, }, }, ) assert response.status_code == 200 assert response.json.get("data") is not None - assert response.json["data"]["useRecoveryKey"]["success"] is True - assert response.json["data"]["useRecoveryKey"]["message"] is not None - assert response.json["data"]["useRecoveryKey"]["code"] == 200 - assert response.json["data"]["useRecoveryKey"]["token"] is not None - assert response.json["data"]["useRecoveryKey"]["token"] == read_json(tokens_file)["tokens"][2]["token"] + assert response.json["data"]["useRecoveryApiKey"]["success"] is True + assert response.json["data"]["useRecoveryApiKey"]["message"] is not None + assert response.json["data"]["useRecoveryApiKey"]["code"] == 200 + assert response.json["data"]["useRecoveryApiKey"]["token"] is not None + assert ( + response.json["data"]["useRecoveryApiKey"]["token"] + == read_json(tokens_file)["tokens"][2]["token"] + ) # Try to use token again response = authorized_client.post( @@ -249,23 +265,28 @@ def test_graphql_generate_recovery_key_with_expiration_date(client, authorized_c "query": API_RECOVERY_KEY_USE_MUTATION, "variables": { "input": { - "token": key, - "tokenName": "test_token2", + "key": key, + "deviceName": "test_token2", }, }, }, ) assert response.status_code == 200 assert response.json.get("data") is not None - assert response.json["data"]["useRecoveryKey"]["success"] is True - assert response.json["data"]["useRecoveryKey"]["message"] is not None - assert response.json["data"]["useRecoveryKey"]["code"] == 200 - assert response.json["data"]["useRecoveryKey"]["token"] is not None - assert response.json["data"]["useRecoveryKey"]["token"] == read_json(tokens_file)["tokens"][3]["token"] + assert response.json["data"]["useRecoveryApiKey"]["success"] is True + assert response.json["data"]["useRecoveryApiKey"]["message"] is not None + assert response.json["data"]["useRecoveryApiKey"]["code"] == 200 + assert response.json["data"]["useRecoveryApiKey"]["token"] is not None + assert ( + response.json["data"]["useRecoveryApiKey"]["token"] + == read_json(tokens_file)["tokens"][3]["token"] + ) # Try to use token after expiration date new_data = read_json(tokens_file) - new_data["recovery_token"]["expirationDate"] = datetime.datetime.now() - datetime.timedelta(minutes=5) + new_data["recovery_token"][ + "expirationDate" + ] = datetime.datetime.now() - datetime.timedelta(minutes=5) write_json(tokens_file, new_data) response = authorized_client.post( "/graphql", @@ -273,27 +294,25 @@ def test_graphql_generate_recovery_key_with_expiration_date(client, authorized_c "query": API_RECOVERY_KEY_USE_MUTATION, "variables": { "input": { - "token": key, - "tokenName": "test_token3", + "key": key, + "deviceName": "test_token3", }, }, }, ) assert response.status_code == 200 assert response.json.get("data") is not None - assert response.json["data"]["useRecoveryKey"]["success"] is False - assert response.json["data"]["useRecoveryKey"]["message"] is not None - assert response.json["data"]["useRecoveryKey"]["code"] == 404 - assert response.json["data"]["useRecoveryKey"]["token"] is None + assert response.json["data"]["useRecoveryApiKey"]["success"] is False + assert response.json["data"]["useRecoveryApiKey"]["message"] is not None + assert response.json["data"]["useRecoveryApiKey"]["code"] == 404 + assert response.json["data"]["useRecoveryApiKey"]["token"] is None assert read_json(tokens_file)["tokens"] == new_data["tokens"] # Try to get token status response = authorized_client.get( "/graphql", - json={ - "query": generate_api_query([API_RECOVERY_QUERY]) - }, + json={"query": generate_api_query([API_RECOVERY_QUERY])}, ) assert response.status_code == 200 assert response.json.get("data") is not None @@ -301,12 +320,18 @@ def test_graphql_generate_recovery_key_with_expiration_date(client, authorized_c assert response.json["data"]["api"]["recoveryKey"]["exists"] is True assert response.json["data"]["api"]["recoveryKey"]["valid"] is False assert response.json["data"]["api"]["recoveryKey"]["creationDate"] == time_generated - assert response.json["data"]["api"]["recoveryKey"]["expirationDate"] == new_data["recovery_token"]["expiration"] + assert ( + response.json["data"]["api"]["recoveryKey"]["expirationDate"] + == new_data["recovery_token"]["expiration"] + ) assert response.json["data"]["api"]["recoveryKey"]["usesLeft"] is None -def test_graphql_generate_recoevry_key_with_expiration_in_the_past(authorized_client, tokens_file): + +def test_graphql_generate_recovery_key_with_expiration_in_the_past( + authorized_client, tokens_file +): expiration_date = datetime.datetime.now() - datetime.timedelta(minutes=5) - expiration_date_str = expiration_date.strftime("%Y-%m-%dT%H:%M:%S.%fZ") + expiration_date_str = expiration_date.strftime("%Y-%m-%dT%H:%M:%S.%f") response = authorized_client.post( "/graphql", @@ -326,11 +351,12 @@ def test_graphql_generate_recoevry_key_with_expiration_in_the_past(authorized_cl assert response.json["data"]["getNewRecoveryApiKey"]["message"] is not None assert response.json["data"]["getNewRecoveryApiKey"]["code"] == 400 assert response.json["data"]["getNewRecoveryApiKey"]["key"] is None - - assert read_json(tokens_file)["tokens"] == [] assert "recovery_token" not in read_json(tokens_file) -def test_graphql_generate_recovery_key_with_invalid_time_format(authorized_client, tokens_file): + +def test_graphql_generate_recovery_key_with_invalid_time_format( + authorized_client, tokens_file +): expiration_date = "invalid_time_format" expiration_date_str = expiration_date @@ -347,16 +373,14 @@ def test_graphql_generate_recovery_key_with_invalid_time_format(authorized_clien }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["getNewRecoveryApiKey"]["success"] is False - assert response.json["data"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json["data"]["getNewRecoveryApiKey"]["code"] == 400 - assert response.json["data"]["getNewRecoveryApiKey"]["key"] is None + assert response.json.get("data") is None - assert read_json(tokens_file)["tokens"] == [] assert "recovery_token" not in read_json(tokens_file) -def test_graphql_generate_recovery_key_with_limited_uses(authorized_client, tokens_file): + +def test_graphql_generate_recovery_key_with_limited_uses( + authorized_client, tokens_file +): response = authorized_client.post( "/graphql", @@ -386,9 +410,7 @@ def test_graphql_generate_recovery_key_with_limited_uses(authorized_client, toke # Try to get token status response = authorized_client.get( "/graphql", - json={ - "query": generate_api_query([API_RECOVERY_QUERY]) - }, + json={"query": generate_api_query([API_RECOVERY_QUERY])}, ) assert response.status_code == 200 assert response.json.get("data") is not None @@ -406,25 +428,23 @@ def test_graphql_generate_recovery_key_with_limited_uses(authorized_client, toke "query": API_RECOVERY_KEY_USE_MUTATION, "variables": { "input": { - "token": mnemonic_key, - "tokenName": "test_token1", + "key": mnemonic_key, + "deviceName": "test_token1", }, }, }, ) assert response.status_code == 200 assert response.json.get("data") is not None - assert response.json["data"]["useRecoveryKey"]["success"] is True - assert response.json["data"]["useRecoveryKey"]["message"] is not None - assert response.json["data"]["useRecoveryKey"]["code"] == 200 - assert response.json["data"]["useRecoveryKey"]["token"] is not None + assert response.json["data"]["useRecoveryApiKey"]["success"] is True + assert response.json["data"]["useRecoveryApiKey"]["message"] is not None + assert response.json["data"]["useRecoveryApiKey"]["code"] == 200 + assert response.json["data"]["useRecoveryApiKey"]["token"] is not None # Try to get token status response = authorized_client.get( "/graphql", - json={ - "query": generate_api_query([API_RECOVERY_QUERY]) - }, + json={"query": generate_api_query([API_RECOVERY_QUERY])}, ) assert response.status_code == 200 assert response.json.get("data") is not None @@ -442,25 +462,23 @@ def test_graphql_generate_recovery_key_with_limited_uses(authorized_client, toke "query": API_RECOVERY_KEY_USE_MUTATION, "variables": { "input": { - "token": mnemonic_key, - "tokenName": "test_token2", + "key": mnemonic_key, + "deviceName": "test_token2", }, }, }, ) assert response.status_code == 200 assert response.json.get("data") is not None - assert response.json["data"]["useRecoveryKey"]["success"] is True - assert response.json["data"]["useRecoveryKey"]["message"] is not None - assert response.json["data"]["useRecoveryKey"]["code"] == 200 - assert response.json["data"]["useRecoveryKey"]["token"] is not None + assert response.json["data"]["useRecoveryApiKey"]["success"] is True + assert response.json["data"]["useRecoveryApiKey"]["message"] is not None + assert response.json["data"]["useRecoveryApiKey"]["code"] == 200 + assert response.json["data"]["useRecoveryApiKey"]["token"] is not None # Try to get token status response = authorized_client.get( "/graphql", - json={ - "query": generate_api_query([API_RECOVERY_QUERY]) - }, + json={"query": generate_api_query([API_RECOVERY_QUERY])}, ) assert response.status_code == 200 assert response.json.get("data") is not None @@ -468,7 +486,7 @@ def test_graphql_generate_recovery_key_with_limited_uses(authorized_client, toke assert response.json["data"]["api"]["recoveryKey"]["exists"] is True assert response.json["data"]["api"]["recoveryKey"]["valid"] is False assert response.json["data"]["api"]["recoveryKey"]["creationDate"] is not None - assert response.json["data"]["api"]["recoveryKey"]["expirationDate"] is not None + assert response.json["data"]["api"]["recoveryKey"]["expirationDate"] is None assert response.json["data"]["api"]["recoveryKey"]["usesLeft"] == 0 # Try to use token @@ -478,20 +496,23 @@ def test_graphql_generate_recovery_key_with_limited_uses(authorized_client, toke "query": API_RECOVERY_KEY_USE_MUTATION, "variables": { "input": { - "token": mnemonic_key, - "tokenName": "test_token3", + "key": mnemonic_key, + "deviceName": "test_token3", }, }, }, ) assert response.status_code == 200 assert response.json.get("data") is not None - assert response.json["data"]["useRecoveryKey"]["success"] is False - assert response.json["data"]["useRecoveryKey"]["message"] is not None - assert response.json["data"]["useRecoveryKey"]["code"] == 404 - assert response.json["data"]["useRecoveryKey"]["token"] is None + assert response.json["data"]["useRecoveryApiKey"]["success"] is False + assert response.json["data"]["useRecoveryApiKey"]["message"] is not None + assert response.json["data"]["useRecoveryApiKey"]["code"] == 404 + assert response.json["data"]["useRecoveryApiKey"]["token"] is None -def test_graphql_generate_recovery_key_with_negative_uses(authorized_client, tokens_file): + +def test_graphql_generate_recovery_key_with_negative_uses( + authorized_client, tokens_file +): # Try to get token status response = authorized_client.post( "/graphql", @@ -512,6 +533,7 @@ def test_graphql_generate_recovery_key_with_negative_uses(authorized_client, tok assert response.json["data"]["getNewRecoveryApiKey"]["code"] == 400 assert response.json["data"]["getNewRecoveryApiKey"]["key"] is None + def test_graphql_generate_recovery_key_with_zero_uses(authorized_client, tokens_file): # Try to get token status response = authorized_client.post( diff --git a/tests/test_graphql/test_api_version.py b/tests/test_graphql/test_api_version.py index a45aa3a..8f76035 100644 --- a/tests/test_graphql/test_api_version.py +++ b/tests/test_graphql/test_api_version.py @@ -6,12 +6,11 @@ from tests.common import generate_api_query API_VERSION_QUERY = "version" + def test_graphql_get_api_version(authorized_client): response = authorized_client.get( "/graphql", - json={ - "query": generate_api_query([API_VERSION_QUERY]) - }, + json={"query": generate_api_query([API_VERSION_QUERY])}, ) assert response.status_code == 200 assert "version" in response.get_json()["data"]["api"] @@ -20,9 +19,7 @@ def test_graphql_get_api_version(authorized_client): def test_graphql_api_version_unauthorized(client): response = client.get( "/graphql", - json={ - "query": generate_api_query([API_VERSION_QUERY]) - }, + json={"query": generate_api_query([API_VERSION_QUERY])}, ) assert response.status_code == 200 assert "version" in response.get_json()["data"]["api"] diff --git a/tests/test_graphql/test_system.py b/tests/test_graphql/test_system.py index 7ed4d6a..4641329 100644 --- a/tests/test_graphql/test_system.py +++ b/tests/test_graphql/test_system.py @@ -7,6 +7,7 @@ import datetime from tests.common import generate_system_query, read_json, write_json + @pytest.fixture def domain_file(mocker, datadir): mocker.patch("selfprivacy_api.utils.DOMAIN_FILE", datadir / "domain") @@ -54,7 +55,7 @@ class ProcessMock: self.args = args self.kwargs = kwargs - def communicate(self): + def communicate(): return (b"", None) returncode = 0 @@ -62,7 +63,8 @@ class ProcessMock: class BrokenServiceMock(ProcessMock): """Mock subprocess.Popen for broken service""" - def communicate(self): + + def communicate(): return (b"Testing error", None) returncode = 3 @@ -95,19 +97,35 @@ def mock_subprocess_check_output(mocker): ) return mock + @pytest.fixture def mock_get_ip4(mocker): - mock = mocker.patch("selfprivacy_api.utils.network.get_ip4", autospec=True, return_value="157.90.247.192") + mock = mocker.patch( + "selfprivacy_api.utils.network.get_ip4", + autospec=True, + return_value="157.90.247.192", + ) return mock + @pytest.fixture def mock_get_ip6(mocker): - mock = mocker.patch("selfprivacy_api.utils.network.get_ip6", autospec=True, return_value="fe80::9400:ff:fef1:34ae") + mock = mocker.patch( + "selfprivacy_api.utils.network.get_ip6", + autospec=True, + return_value="fe80::9400:ff:fef1:34ae", + ) return mock + @pytest.fixture def mock_dkim_key(mocker): - mock = mocker.patch("selfprivacy_api.utils.get_dkim_key", autospec=True, return_value="I am a DKIM key") + mock = mocker.patch( + "selfprivacy_api.utils.get_dkim_key", + autospec=True, + return_value="I am a DKIM key", + ) + API_PYTHON_VERSION_INFO = """ info { @@ -115,6 +133,7 @@ info { } """ + def test_graphql_wrong_auth(wrong_auth_client): """Test wrong auth""" response = wrong_auth_client.get( @@ -126,6 +145,7 @@ def test_graphql_wrong_auth(wrong_auth_client): assert response.status_code == 200 assert response.json.get("data") is None + API_GET_DOMAIN_INFO = """ domainInfo { domain @@ -141,6 +161,7 @@ domainInfo { } """ + def dns_record(type="A", name="test.tld", content=None, ttl=3600, priority=None): if content is None: if type == "A": @@ -155,13 +176,23 @@ def dns_record(type="A", name="test.tld", content=None, ttl=3600, priority=None) "priority": priority, } + def is_dns_record_in_array(records, dns_record) -> bool: for record in records: - if record["type"] == dns_record["type"] and record["name"] == dns_record["name"] and record["content"] == dns_record["content"] and record["ttl"] == dns_record["ttl"] and record["priority"] == dns_record["priority"]: + if ( + record["type"] == dns_record["type"] + and record["name"] == dns_record["name"] + and record["content"] == dns_record["content"] + and record["ttl"] == dns_record["ttl"] + and record["priority"] == dns_record["priority"] + ): return True return False -def test_graphql_get_domain(authorized_client, domain_file, mock_get_ip4, mock_get_ip6, turned_on): + +def test_graphql_get_domain( + authorized_client, domain_file, mock_get_ip4, mock_get_ip6, turned_on +): """Test get domain""" response = authorized_client.get( "/graphql", @@ -178,23 +209,62 @@ def test_graphql_get_domain(authorized_client, domain_file, mock_get_ip4, mock_g assert is_dns_record_in_array(dns_records, dns_record()) assert is_dns_record_in_array(dns_records, dns_record(type="AAAA")) assert is_dns_record_in_array(dns_records, dns_record(name="api.test.tld")) - assert is_dns_record_in_array(dns_records, dns_record(name="api.test.tld", type="AAAA")) + assert is_dns_record_in_array( + dns_records, dns_record(name="api.test.tld", type="AAAA") + ) assert is_dns_record_in_array(dns_records, dns_record(name="cloud.test.tld")) - assert is_dns_record_in_array(dns_records, dns_record(name="cloud.test.tld", type="AAAA")) + assert is_dns_record_in_array( + dns_records, dns_record(name="cloud.test.tld", type="AAAA") + ) assert is_dns_record_in_array(dns_records, dns_record(name="git.test.tld")) - assert is_dns_record_in_array(dns_records, dns_record(name="git.test.tld", type="AAAA")) + assert is_dns_record_in_array( + dns_records, dns_record(name="git.test.tld", type="AAAA") + ) assert is_dns_record_in_array(dns_records, dns_record(name="meet.test.tld")) - assert is_dns_record_in_array(dns_records, dns_record(name="meet.test.tld", type="AAAA")) + assert is_dns_record_in_array( + dns_records, dns_record(name="meet.test.tld", type="AAAA") + ) assert is_dns_record_in_array(dns_records, dns_record(name="password.test.tld")) - assert is_dns_record_in_array(dns_records, dns_record(name="password.test.tld", type="AAAA")) + assert is_dns_record_in_array( + dns_records, dns_record(name="password.test.tld", type="AAAA") + ) assert is_dns_record_in_array(dns_records, dns_record(name="social.test.tld")) - assert is_dns_record_in_array(dns_records, dns_record(name="social.test.tld", type="AAAA")) + assert is_dns_record_in_array( + dns_records, dns_record(name="social.test.tld", type="AAAA") + ) assert is_dns_record_in_array(dns_records, dns_record(name="vpn.test.tld")) - assert is_dns_record_in_array(dns_records, dns_record(name="vpn.test.tld", type="AAAA")) - assert is_dns_record_in_array(dns_records, dns_record(name="test.tld", type="MX", content="test.tld", priority=10)) - assert is_dns_record_in_array(dns_records, dns_record(name="_dmarc.test.tld", type="TXT", content="v=DMARC1; p=none", ttl=18000)) - assert is_dns_record_in_array(dns_records, dns_record(name="test.tld", type="TXT", content="v=spf1 a mx ip4:157.90.247.192 -all", ttl=18000)) - assert is_dns_record_in_array(dns_records, dns_record(name="selector._domainkey.test.tld", type="TXT", content="I am a DKIM key", ttl=18000)) + assert is_dns_record_in_array( + dns_records, dns_record(name="vpn.test.tld", type="AAAA") + ) + assert is_dns_record_in_array( + dns_records, + dns_record(name="test.tld", type="MX", content="test.tld", priority=10), + ) + assert is_dns_record_in_array( + dns_records, + dns_record( + name="_dmarc.test.tld", type="TXT", content="v=DMARC1; p=none", ttl=18000 + ), + ) + assert is_dns_record_in_array( + dns_records, + dns_record( + name="test.tld", + type="TXT", + content="v=spf1 a mx ip4:157.90.247.192 -all", + ttl=18000, + ), + ) + assert is_dns_record_in_array( + dns_records, + dns_record( + name="selector._domainkey.test.tld", + type="TXT", + content="I am a DKIM key", + ttl=18000, + ), + ) + API_GET_TIMEZONE = """ settings { @@ -202,6 +272,7 @@ settings { } """ + def test_graphql_get_timezone_unauthorized(client, turned_on): """Test get timezone without auth""" response = client.get( @@ -213,6 +284,7 @@ def test_graphql_get_timezone_unauthorized(client, turned_on): assert response.status_code == 200 assert response.json.get("data") is None + def test_graphql_get_timezone(authorized_client, turned_on): """Test get timezone""" response = authorized_client.get( @@ -225,7 +297,8 @@ def test_graphql_get_timezone(authorized_client, turned_on): assert response.json.get("data") is not None assert response.json["data"]["system"]["settings"]["timezone"] == "Europe/Moscow" -def test_graphql_get_timezone_on_undefined(authorized_client, undefiened_config): + +def test_graphql_get_timezone_on_undefined(authorized_client, undefined_config): """Test get timezone when none is defined in config""" response = authorized_client.get( "/graphql", @@ -249,6 +322,7 @@ mutation changeTimezone($timezone: String!) { } """ + def test_graphql_change_timezone_unauthorized(client, turned_on): """Test change timezone without auth""" response = client.post( diff --git a/tests/test_network_utils.py b/tests/test_network_utils.py index b8f9c0d..a7c1511 100644 --- a/tests/test_network_utils.py +++ b/tests/test_network_utils.py @@ -21,16 +21,21 @@ FAILED_OUTPUT_STRING = b""" Device "eth0" does not exist. """ + @pytest.fixture def ip_process_mock(mocker): - mock = mocker.patch("subprocess.check_output", autospec=True, return_value=OUTPUT_STRING) + mock = mocker.patch( + "subprocess.check_output", autospec=True, return_value=OUTPUT_STRING + ) return mock + def test_get_ip4(ip_process_mock): """Test get IPv4 address""" ip4 = get_ip4() assert ip4 == "157.90.247.192" + def test_get_ip6(ip_process_mock): """Test get IPv6 address""" ip6 = get_ip6() diff --git a/tests/test_system.py b/tests/test_system.py index ac108aa..b9c8649 100644 --- a/tests/test_system.py +++ b/tests/test_system.py @@ -60,7 +60,7 @@ class ProcessMock: self.args = args self.kwargs = kwargs - def communicate(self): + def communicate(): return (b"", None) returncode = 0 @@ -68,7 +68,8 @@ class ProcessMock: class BrokenServiceMock(ProcessMock): """Mock subprocess.Popen""" - def communicate(self): + + def communicate(): return (b"Testing error", None) returncode = 3 From e3354c73ef08e71408edaa4fdaf886e35ec61986 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Fri, 8 Jul 2022 18:28:08 +0300 Subject: [PATCH 26/39] Change datetime formats, more tests --- .../graphql/mutations/api_mutations.py | 44 +- selfprivacy_api/graphql/queries/system.py | 2 +- .../resources/api_auth/recovery_token.py | 9 +- selfprivacy_api/utils/__init__.py | 30 +- selfprivacy_api/utils/auth.py | 21 +- tests/test_auth.py | 32 +- tests/test_graphql/test_api_recovery.py | 46 +- tests/test_graphql/test_system.py | 518 +++++++++++++++++- 8 files changed, 615 insertions(+), 87 deletions(-) diff --git a/selfprivacy_api/graphql/mutations/api_mutations.py b/selfprivacy_api/graphql/mutations/api_mutations.py index d516049..e0d1057 100644 --- a/selfprivacy_api/graphql/mutations/api_mutations.py +++ b/selfprivacy_api/graphql/mutations/api_mutations.py @@ -37,8 +37,8 @@ class DeviceApiTokenMutationReturn(MutationReturnInterface): class RecoveryKeyLimitsInput: """Recovery key limits input""" - expiration_date: typing.Optional[datetime.datetime] - uses: typing.Optional[int] + expiration_date: typing.Optional[datetime.datetime] = None + uses: typing.Optional[int] = None @strawberry.input @@ -61,26 +61,30 @@ class UseNewDeviceKeyInput: class ApiMutations: @strawberry.mutation(permission_classes=[IsAuthenticated]) def get_new_recovery_api_key( - self, limits: RecoveryKeyLimitsInput + self, limits: typing.Optional[RecoveryKeyLimitsInput] = None ) -> ApiKeyMutationReturn: """Generate recovery key""" - if limits.expiration_date is not None: - if limits.expiration_date < datetime.datetime.now(): - return ApiKeyMutationReturn( - success=False, - message="Expiration date must be in the future", - code=400, - key=None, - ) - if limits.uses is not None: - if limits.uses < 1: - return ApiKeyMutationReturn( - success=False, - message="Uses must be greater than 0", - code=400, - key=None, - ) - key = generate_recovery_token(limits.expiration_date, limits.uses) + if limits is not None: + if limits.expiration_date is not None: + if limits.expiration_date < datetime.datetime.now(): + return ApiKeyMutationReturn( + success=False, + message="Expiration date must be in the future", + code=400, + key=None, + ) + if limits.uses is not None: + if limits.uses < 1: + return ApiKeyMutationReturn( + success=False, + message="Uses must be greater than 0", + code=400, + key=None, + ) + if limits is not None: + key = generate_recovery_token(limits.expiration_date, limits.uses) + else: + key = generate_recovery_token(None, None) return ApiKeyMutationReturn( success=True, message="Recovery key generated", diff --git a/selfprivacy_api/graphql/queries/system.py b/selfprivacy_api/graphql/queries/system.py index cadf074..a235e4d 100644 --- a/selfprivacy_api/graphql/queries/system.py +++ b/selfprivacy_api/graphql/queries/system.py @@ -158,7 +158,7 @@ class System: timestamp=None, ) ) - domain: SystemDomainInfo = strawberry.field(resolver=get_system_domain_info) + domain_info: SystemDomainInfo = strawberry.field(resolver=get_system_domain_info) settings: SystemSettings = SystemSettings() info: SystemInfo = SystemInfo() provider: SystemProviderInfo = strawberry.field(resolver=get_system_provider_info) diff --git a/selfprivacy_api/resources/api_auth/recovery_token.py b/selfprivacy_api/resources/api_auth/recovery_token.py index e97c87a..912a50b 100644 --- a/selfprivacy_api/resources/api_auth/recovery_token.py +++ b/selfprivacy_api/resources/api_auth/recovery_token.py @@ -4,6 +4,7 @@ from datetime import datetime from flask_restful import Resource, reqparse from selfprivacy_api.resources.api_auth import api +from selfprivacy_api.utils import parse_date from selfprivacy_api.utils.auth import ( is_recovery_token_exists, is_recovery_token_valid, @@ -129,19 +130,17 @@ class RecoveryToken(Resource): # Convert expiration date to datetime and return 400 if it is not valid if args["expiration"]: try: - expiration = datetime.strptime( - args["expiration"], "%Y-%m-%dT%H:%M:%S.%fZ" - ) + expiration = parse_date(args["expiration"]) # Retrun 400 if expiration date is in the past if expiration < datetime.now(): return {"message": "Expiration date cannot be in the past"}, 400 except ValueError: return { - "error": "Invalid expiration date. Use YYYY-MM-DDTHH:MM:SS.SSSZ" + "error": "Invalid expiration date. Use YYYY-MM-DDTHH:MM:SS.SSS" }, 400 else: expiration = None - if args["uses"] != None and args["uses"] < 1: + if args["uses"] is not None and args["uses"] < 1: return {"message": "Uses must be greater than 0"}, 400 # Generate recovery token token = generate_recovery_token(expiration, args["uses"]) diff --git a/selfprivacy_api/utils/__init__.py b/selfprivacy_api/utils/__init__.py index 81dc354..c80dd99 100644 --- a/selfprivacy_api/utils/__init__.py +++ b/selfprivacy_api/utils/__init__.py @@ -125,13 +125,29 @@ def is_username_forbidden(username): def parse_date(date_str: str) -> datetime.datetime: - """Parse date string which can be in - %Y-%m-%dT%H:%M:%S.%fZ or %Y-%m-%d %H:%M:%S.%f format""" - return ( - datetime.datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%S.%fZ") - if date_str.endswith("Z") - else datetime.datetime.strptime(date_str, "%Y-%m-%d %H:%M:%S.%f") - ) + """Parse date string which can be in one of these formats: + - %Y-%m-%dT%H:%M:%S.%fZ + - %Y-%m-%dT%H:%M:%S.%f + - %Y-%m-%d %H:%M:%S.%fZ + - %Y-%m-%d %H:%M:%S.%f + """ + try: + return datetime.datetime.strptime(date_str, "%Y-%m-%d %H:%M:%S.%fZ") + except ValueError: + pass + try: + return datetime.datetime.strptime(date_str, "%Y-%m-%d %H:%M:%S.%f") + except ValueError: + pass + try: + return datetime.datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%S.%fZ") + except ValueError: + pass + try: + return datetime.datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%S.%f") + except ValueError: + pass + raise ValueError("Invalid date string") def get_dkim_key(domain): diff --git a/selfprivacy_api/utils/auth.py b/selfprivacy_api/utils/auth.py index 6fff698..f512948 100644 --- a/selfprivacy_api/utils/auth.py +++ b/selfprivacy_api/utils/auth.py @@ -7,7 +7,7 @@ import typing from mnemonic import Mnemonic -from . import ReadUserData, UserDataFiles, WriteUserData +from . import ReadUserData, UserDataFiles, WriteUserData, parse_date """ Token are stored in the tokens.json file. @@ -121,7 +121,7 @@ def create_token(name): { "token": token, "name": name, - "date": str(datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%fZ")), + "date": str(datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f")), } ) return token @@ -161,9 +161,7 @@ def is_recovery_token_valid(): return False if "expiration" not in recovery_token or recovery_token["expiration"] is None: return True - return datetime.now() < datetime.strptime( - recovery_token["expiration"], "%Y-%m-%dT%H:%M:%S.%fZ" - ) + return datetime.now() < parse_date(recovery_token["expiration"]) def get_recovery_token_status(): @@ -213,8 +211,8 @@ def generate_recovery_token( with WriteUserData(UserDataFiles.TOKENS) as tokens: tokens["recovery_token"] = { "token": recovery_token_str, - "date": str(datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%fZ")), - "expiration": expiration.strftime("%Y-%m-%dT%H:%M:%S.%fZ") + "date": str(datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f")), + "expiration": expiration.strftime("%Y-%m-%dT%H:%M:%S.%f") if expiration is not None else None, "uses_left": uses_left if uses_left is not None else None, @@ -285,14 +283,7 @@ def _get_new_device_auth_token(): new_device = tokens["new_device"] if "expiration" not in new_device: return None - if new_device["expiration"].endswith("Z"): - expiration = datetime.strptime( - new_device["expiration"], "%Y-%m-%dT%H:%M:%S.%fZ" - ) - else: - expiration = datetime.strptime( - new_device["expiration"], "%Y-%m-%d %H:%M:%S.%f" - ) + expiration = parse_date(new_device["expiration"]) if datetime.now() > expiration: return None return new_device["token"] diff --git a/tests/test_auth.py b/tests/test_auth.py index 4d78f62..d209c9c 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -25,6 +25,13 @@ TOKENS_FILE_CONTETS = { ] } +DATE_FORMATS = [ + "%Y-%m-%dT%H:%M:%S.%fZ", + "%Y-%m-%dT%H:%M:%S.%f", + "%Y-%m-%d %H:%M:%S.%fZ", + "%Y-%m-%d %H:%M:%S.%f", +] + def test_get_tokens_info(authorized_client, tokens_file): response = authorized_client.get("/auth/tokens") @@ -261,7 +268,7 @@ def test_generate_recovery_token(authorized_client, client, tokens_file): assert time_generated is not None # Assert that the token was generated near the current time assert ( - datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%fZ") + datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%f") - datetime.timedelta(seconds=5) < datetime.datetime.now() ) @@ -298,14 +305,14 @@ def test_generate_recovery_token(authorized_client, client, tokens_file): assert read_json(tokens_file)["tokens"][3]["name"] == "recovery_device2" +@pytest.mark.parametrize("timeformat", DATE_FORMATS) def test_generate_recovery_token_with_expiration_date( - authorized_client, client, tokens_file + authorized_client, client, tokens_file, timeformat ): # Generate token with expiration date # Generate expiration date in the future - # Expiration date format is YYYY-MM-DDTHH:MM:SS.SSSZ expiration_date = datetime.datetime.now() + datetime.timedelta(minutes=5) - expiration_date_str = expiration_date.strftime("%Y-%m-%dT%H:%M:%S.%fZ") + expiration_date_str = expiration_date.strftime(timeformat) response = authorized_client.post( "/auth/recovery_token", json={"expiration": expiration_date_str}, @@ -315,13 +322,15 @@ def test_generate_recovery_token_with_expiration_date( mnemonic_token = response.json["token"] token = Mnemonic(language="english").to_entropy(mnemonic_token).hex() assert read_json(tokens_file)["recovery_token"]["token"] == token - assert read_json(tokens_file)["recovery_token"]["expiration"] == expiration_date_str + assert datetime.datetime.strptime( + read_json(tokens_file)["recovery_token"]["expiration"], "%Y-%m-%dT%H:%M:%S.%f" + ) == datetime.datetime.strptime(expiration_date_str, timeformat) time_generated = read_json(tokens_file)["recovery_token"]["date"] assert time_generated is not None # Assert that the token was generated near the current time assert ( - datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%fZ") + datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%f") - datetime.timedelta(seconds=5) < datetime.datetime.now() ) @@ -333,7 +342,7 @@ def test_generate_recovery_token_with_expiration_date( "exists": True, "valid": True, "date": time_generated, - "expiration": expiration_date_str, + "expiration": expiration_date.strftime("%Y-%m-%dT%H:%M:%S.%f"), "uses_left": None, } @@ -360,7 +369,7 @@ def test_generate_recovery_token_with_expiration_date( # Try to use token after expiration date new_data = read_json(tokens_file) new_data["recovery_token"]["expiration"] = datetime.datetime.now().strftime( - "%Y-%m-%dT%H:%M:%S.%fZ" + "%Y-%m-%dT%H:%M:%S.%f" ) write_json(tokens_file, new_data) recovery_response = client.post( @@ -383,12 +392,13 @@ def test_generate_recovery_token_with_expiration_date( } +@pytest.mark.parametrize("timeformat", DATE_FORMATS) def test_generate_recovery_token_with_expiration_in_the_past( - authorized_client, tokens_file + authorized_client, tokens_file, timeformat ): # Server must return 400 if expiration date is in the past expiration_date = datetime.datetime.now() - datetime.timedelta(minutes=5) - expiration_date_str = expiration_date.strftime("%Y-%m-%dT%H:%M:%S.%fZ") + expiration_date_str = expiration_date.strftime(timeformat) response = authorized_client.post( "/auth/recovery_token", json={"expiration": expiration_date_str}, @@ -429,7 +439,7 @@ def test_generate_recovery_token_with_limited_uses( time_generated = read_json(tokens_file)["recovery_token"]["date"] assert time_generated is not None assert ( - datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%fZ") + datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%f") - datetime.timedelta(seconds=5) < datetime.datetime.now() ) diff --git a/tests/test_graphql/test_api_recovery.py b/tests/test_graphql/test_api_recovery.py index 0021e5d..2d1e16a 100644 --- a/tests/test_graphql/test_api_recovery.py +++ b/tests/test_graphql/test_api_recovery.py @@ -2,6 +2,7 @@ # pylint: disable=unused-argument # pylint: disable=missing-function-docstring import json +from time import strftime import pytest import datetime @@ -58,7 +59,7 @@ def test_graphql_recovery_key_status_when_none_exists(authorized_client, tokens_ API_RECOVERY_KEY_GENERATE_MUTATION = """ -mutation TestGenerateRecoveryKey($limits: RecoveryKeyLimitsInput!) { +mutation TestGenerateRecoveryKey($limits: RecoveryKeyLimitsInput) { getNewRecoveryApiKey(limits: $limits) { success message @@ -85,12 +86,6 @@ def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): "/graphql", json={ "query": API_RECOVERY_KEY_GENERATE_MUTATION, - "variables": { - "limits": { - "uses": None, - "expirationDate": None, - }, - }, }, ) assert response.status_code == 200 @@ -107,7 +102,7 @@ def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): assert time_generated is not None key = response.json["data"]["getNewRecoveryApiKey"]["key"] assert ( - datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%fZ") + datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%f") - datetime.timedelta(seconds=5) < datetime.datetime.now() ) @@ -122,7 +117,9 @@ def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): assert response.json["data"]["api"]["recoveryKey"] is not None assert response.json["data"]["api"]["recoveryKey"]["exists"] is True assert response.json["data"]["api"]["recoveryKey"]["valid"] is True - assert response.json["data"]["api"]["recoveryKey"]["creationDate"] == time_generated + assert response.json["data"]["api"]["recoveryKey"][ + "creationDate" + ] == time_generated.replace("Z", "") assert response.json["data"]["api"]["recoveryKey"]["expirationDate"] is None assert response.json["data"]["api"]["recoveryKey"]["usesLeft"] is None @@ -134,7 +131,7 @@ def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): "variables": { "input": { "key": key, - "deviceName": "test_token", + "deviceName": "new_test_token", }, }, }, @@ -149,7 +146,7 @@ def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): response.json["data"]["useRecoveryApiKey"]["token"] == read_json(tokens_file)["tokens"][2]["token"] ) - assert read_json(tokens_file)["tokens"][2]["name"] == "test_token" + assert read_json(tokens_file)["tokens"][2]["name"] == "new_test_token" # Try to use token again response = client.post( @@ -159,7 +156,7 @@ def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): "variables": { "input": { "key": key, - "deviceName": "test_token2", + "deviceName": "new_test_token2", }, }, }, @@ -174,7 +171,7 @@ def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): response.json["data"]["useRecoveryApiKey"]["token"] == read_json(tokens_file)["tokens"][3]["token"] ) - assert read_json(tokens_file)["tokens"][3]["name"] == "test_token2" + assert read_json(tokens_file)["tokens"][3]["name"] == "new_test_token2" def test_graphql_generate_recovery_key_with_expiration_date( @@ -188,7 +185,6 @@ def test_graphql_generate_recovery_key_with_expiration_date( "query": API_RECOVERY_KEY_GENERATE_MUTATION, "variables": { "limits": { - "uses": None, "expirationDate": expiration_date_str, }, }, @@ -212,7 +208,7 @@ def test_graphql_generate_recovery_key_with_expiration_date( time_generated = read_json(tokens_file)["recovery_token"]["date"] assert time_generated is not None assert ( - datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%fZ") + datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%f") - datetime.timedelta(seconds=5) < datetime.datetime.now() ) @@ -227,7 +223,9 @@ def test_graphql_generate_recovery_key_with_expiration_date( assert response.json["data"]["api"]["recoveryKey"] is not None assert response.json["data"]["api"]["recoveryKey"]["exists"] is True assert response.json["data"]["api"]["recoveryKey"]["valid"] is True - assert response.json["data"]["api"]["recoveryKey"]["creationDate"] == time_generated + assert response.json["data"]["api"]["recoveryKey"][ + "creationDate" + ] == time_generated.replace("Z", "") assert ( response.json["data"]["api"]["recoveryKey"]["expirationDate"] == expiration_date_str @@ -242,7 +240,7 @@ def test_graphql_generate_recovery_key_with_expiration_date( "variables": { "input": { "key": key, - "deviceName": "test_token", + "deviceName": "new_test_token", }, }, }, @@ -266,7 +264,7 @@ def test_graphql_generate_recovery_key_with_expiration_date( "variables": { "input": { "key": key, - "deviceName": "test_token2", + "deviceName": "new_test_token2", }, }, }, @@ -284,9 +282,9 @@ def test_graphql_generate_recovery_key_with_expiration_date( # Try to use token after expiration date new_data = read_json(tokens_file) - new_data["recovery_token"][ - "expirationDate" - ] = datetime.datetime.now() - datetime.timedelta(minutes=5) + new_data["recovery_token"]["expiration"] = ( + datetime.datetime.now() - datetime.timedelta(minutes=5) + ).strftime("%Y-%m-%dT%H:%M:%S.%f") write_json(tokens_file, new_data) response = authorized_client.post( "/graphql", @@ -295,7 +293,7 @@ def test_graphql_generate_recovery_key_with_expiration_date( "variables": { "input": { "key": key, - "deviceName": "test_token3", + "deviceName": "new_test_token3", }, }, }, @@ -339,7 +337,6 @@ def test_graphql_generate_recovery_key_with_expiration_in_the_past( "query": API_RECOVERY_KEY_GENERATE_MUTATION, "variables": { "limits": { - "uses": None, "expirationDate": expiration_date_str, }, }, @@ -366,7 +363,6 @@ def test_graphql_generate_recovery_key_with_invalid_time_format( "query": API_RECOVERY_KEY_GENERATE_MUTATION, "variables": { "limits": { - "uses": None, "expirationDate": expiration_date_str, }, }, @@ -521,7 +517,6 @@ def test_graphql_generate_recovery_key_with_negative_uses( "variables": { "limits": { "uses": -1, - "expirationDate": None, }, }, }, @@ -543,7 +538,6 @@ def test_graphql_generate_recovery_key_with_zero_uses(authorized_client, tokens_ "variables": { "limits": { "uses": 0, - "expirationDate": None, }, }, }, diff --git a/tests/test_graphql/test_system.py b/tests/test_graphql/test_system.py index 4641329..75b2c28 100644 --- a/tests/test_graphql/test_system.py +++ b/tests/test_graphql/test_system.py @@ -338,8 +338,170 @@ def test_graphql_change_timezone_unauthorized(client, turned_on): assert response.json.get("data") is None -API_CHANGE_SERVER_SETTINGS = """ -mutation changeServerSettings($settings: SystemSettingsInput!) { +def test_graphql_change_timezone(authorized_client, turned_on): + """Test change timezone""" + response = authorized_client.post( + "/graphql", + json={ + "query": API_CHANGE_TIMEZONE_MUTATION, + "variables": { + "timezone": "Europe/Helsinki", + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["changeTimezone"]["success"] is True + assert response.json["data"]["changeTimezone"]["message"] is not None + assert response.json["data"]["changeTimezone"]["code"] == 200 + assert response.json["data"]["changeTimezone"]["timezone"] == "Europe/Helsinki" + assert read_json(turned_on / "turned_on.json")["timezone"] == "Europe/Helsinki" + + +def test_graphql_change_timezone_on_undefined(authorized_client, undefined_config): + """Test change timezone when none is defined in config""" + response = authorized_client.post( + "/graphql", + json={ + "query": API_CHANGE_TIMEZONE_MUTATION, + "variables": { + "timezone": "Europe/Helsinki", + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["changeTimezone"]["success"] is True + assert response.json["data"]["changeTimezone"]["message"] is not None + assert response.json["data"]["changeTimezone"]["code"] == 200 + assert response.json["data"]["changeTimezone"]["timezone"] == "Europe/Helsinki" + assert ( + read_json(undefined_config / "undefined.json")["timezone"] == "Europe/Helsinki" + ) + + +def test_graphql_change_timezone_without_timezone(authorized_client, turned_on): + """Test change timezone without timezone""" + response = authorized_client.post( + "/graphql", + json={ + "query": API_CHANGE_TIMEZONE_MUTATION, + "variables": { + "timezone": "", + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["changeTimezone"]["success"] is False + assert response.json["data"]["changeTimezone"]["message"] is not None + assert response.json["data"]["changeTimezone"]["code"] == 400 + assert response.json["data"]["changeTimezone"]["timezone"] is None + assert read_json(turned_on / "turned_on.json")["timezone"] == "Europe/Moscow" + + +def test_graphql_change_timezone_with_invalid_timezone(authorized_client, turned_on): + """Test change timezone with invalid timezone""" + response = authorized_client.post( + "/graphql", + json={ + "query": API_CHANGE_TIMEZONE_MUTATION, + "variables": { + "timezone": "Invlaid/Timezone", + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["changeTimezone"]["success"] is False + assert response.json["data"]["changeTimezone"]["message"] is not None + assert response.json["data"]["changeTimezone"]["code"] == 400 + assert response.json["data"]["changeTimezone"]["timezone"] is None + assert read_json(turned_on / "turned_on.json")["timezone"] == "Europe/Moscow" + + +API_GET_AUTO_UPGRADE_SETTINGS_QUERY = """ +settings { + autoUpgrade { + enableAutoUpgrade + allowReboot + } +} +""" + + +def test_graphql_get_auto_upgrade_unauthorized(client, turned_on): + """Test get auto upgrade settings without auth""" + response = client.get( + "/graphql", + json={ + "query": API_GET_AUTO_UPGRADE_SETTINGS_QUERY, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is None + + +def test_graphql_get_auto_upgrade(authorized_client, turned_on): + """Test get auto upgrade settings""" + response = authorized_client.get( + "/graphql", + json={ + "query": API_GET_AUTO_UPGRADE_SETTINGS_QUERY, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["settings"]["autoUpgrade"]["enableAutoUpgrade"] is True + assert response.json["data"]["settings"]["autoUpgrade"]["allowReboot"] is True + + +def test_graphql_get_auto_upgrade_on_undefined(authorized_client, undefined_config): + """Test get auto upgrade settings when none is defined in config""" + response = authorized_client.get( + "/graphql", + json={ + "query": API_GET_AUTO_UPGRADE_SETTINGS_QUERY, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["settings"]["autoUpgrade"]["enableAutoUpgrade"] is True + assert response.json["data"]["settings"]["autoUpgrade"]["allowReboot"] is False + + +def test_graphql_get_auto_upgrade_without_vlaues(authorized_client, no_values): + """Test get auto upgrade settings without values""" + response = authorized_client.get( + "/graphql", + json={ + "query": API_GET_AUTO_UPGRADE_SETTINGS_QUERY, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["settings"]["autoUpgrade"]["enableAutoUpgrade"] is True + assert response.json["data"]["settings"]["autoUpgrade"]["allowReboot"] is False + + +def test_graphql_get_auto_upgrade_turned_off(authorized_client, turned_off): + """Test get auto upgrade settings when turned off""" + response = authorized_client.get( + "/graphql", + json={ + "query": API_GET_AUTO_UPGRADE_SETTINGS_QUERY, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert ( + response.json["data"]["settings"]["autoUpgrade"]["enableAutoUpgrade"] is False + ) + assert response.json["data"]["settings"]["autoUpgrade"]["allowReboot"] is False + + +API_CHANGE_AUTO_UPGRADE_SETTINGS = """ +mutation changeServerSettings($settings: AutoUpgradeSettingsInput!) { changeAutoUpgradeSettings(settings: $settings) { success message @@ -349,3 +511,355 @@ mutation changeServerSettings($settings: SystemSettingsInput!) { } } """ + + +def test_graphql_change_auto_upgrade_unauthorized(client, turned_on): + """Test change auto upgrade settings without auth""" + response = client.post( + "/graphql", + json={ + "query": API_CHANGE_AUTO_UPGRADE_SETTINGS, + "variables": { + "settings": { + "enableAutoUpgrade": True, + "allowReboot": True, + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is None + + +def test_graphql_change_auto_upgrade(authorized_client, turned_on): + """Test change auto upgrade settings""" + response = authorized_client.post( + "/graphql", + json={ + "query": API_CHANGE_AUTO_UPGRADE_SETTINGS, + "variables": { + "settings": { + "enableAutoUpgrade": False, + "allowReboot": True, + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["changeAutoUpgradeSettings"]["success"] is True + assert response.json["data"]["changeAutoUpgradeSettings"]["message"] is not None + assert response.json["data"]["changeAutoUpgradeSettings"]["code"] == 200 + assert ( + response.json["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] is False + ) + assert response.json["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True + assert read_json(turned_on / "turned_on.json")["autoUpgrade"]["enable"] is False + assert read_json(turned_on / "turned_on.json")["autoUpgrade"]["allowReboot"] is True + + +def test_graphql_change_auto_upgrade_on_undefined(authorized_client, undefined_config): + """Test change auto upgrade settings when none is defined in config""" + response = authorized_client.post( + "/graphql", + json={ + "query": API_CHANGE_AUTO_UPGRADE_SETTINGS, + "variables": { + "settings": { + "enableAutoUpgrade": False, + "allowReboot": True, + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["changeAutoUpgradeSettings"]["success"] is True + assert response.json["data"]["changeAutoUpgradeSettings"]["message"] is not None + assert response.json["data"]["changeAutoUpgradeSettings"]["code"] == 200 + assert ( + response.json["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] is False + ) + assert response.json["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True + assert ( + read_json(undefined_config / "undefined.json")["autoUpgrade"]["enable"] is False + ) + assert ( + read_json(undefined_config / "undefined.json")["autoUpgrade"]["allowReboot"] + is True + ) + + +def test_graphql_change_auto_upgrade_without_vlaues(authorized_client, no_values): + """Test change auto upgrade settings without values""" + response = authorized_client.post( + "/graphql", + json={ + "query": API_CHANGE_AUTO_UPGRADE_SETTINGS, + "variables": { + "settings": { + "enableAutoUpgrade": True, + "allowReboot": True, + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["changeAutoUpgradeSettings"]["success"] is True + assert response.json["data"]["changeAutoUpgradeSettings"]["message"] is not None + assert response.json["data"]["changeAutoUpgradeSettings"]["code"] == 200 + assert ( + response.json["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] is True + ) + assert response.json["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True + assert read_json(no_values / "no_values.json")["autoUpgrade"]["enable"] is True + assert read_json(no_values / "no_values.json")["autoUpgrade"]["allowReboot"] is True + + +def test_graphql_change_auto_upgrade_turned_off(authorized_client, turned_off): + """Test change auto upgrade settings when turned off""" + response = authorized_client.post( + "/graphql", + json={ + "query": API_CHANGE_AUTO_UPGRADE_SETTINGS, + "variables": { + "settings": { + "enableAutoUpgrade": True, + "allowReboot": True, + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["changeAutoUpgradeSettings"]["success"] is True + assert response.json["data"]["changeAutoUpgradeSettings"]["message"] is not None + assert response.json["data"]["changeAutoUpgradeSettings"]["code"] == 200 + assert ( + response.json["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] is True + ) + assert response.json["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True + assert read_json(turned_off / "turned_off.json")["autoUpgrade"]["enable"] is True + assert ( + read_json(turned_off / "turned_off.json")["autoUpgrade"]["allowReboot"] is True + ) + + +def test_grphql_change_auto_upgrade_without_enable(authorized_client, turned_off): + """Test change auto upgrade settings without enable""" + response = authorized_client.post( + "/graphql", + json={ + "query": API_CHANGE_AUTO_UPGRADE_SETTINGS, + "variables": { + "settings": { + "allowReboot": True, + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["changeAutoUpgradeSettings"]["success"] is True + assert response.json["data"]["changeAutoUpgradeSettings"]["message"] is not None + assert response.json["data"]["changeAutoUpgradeSettings"]["code"] == 200 + assert ( + response.json["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] is False + ) + assert response.json["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True + assert read_json(turned_off / "turned_off.json")["autoUpgrade"]["enable"] is False + assert ( + read_json(turned_off / "turned_off.json")["autoUpgrade"]["allowReboot"] is True + ) + + +def test_graphql_change_auto_upgrade_without_allow_reboot( + authorized_client, turned_off +): + """Test change auto upgrade settings without allow reboot""" + response = authorized_client.post( + "/graphql", + json={ + "query": API_CHANGE_AUTO_UPGRADE_SETTINGS, + "variables": { + "settings": { + "enableAutoUpgrade": True, + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["changeAutoUpgradeSettings"]["success"] is True + assert response.json["data"]["changeAutoUpgradeSettings"]["message"] is not None + assert response.json["data"]["changeAutoUpgradeSettings"]["code"] == 200 + assert ( + response.json["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] is True + ) + assert response.json["data"]["changeAutoUpgradeSettings"]["allowReboot"] is False + assert read_json(turned_off / "turned_off.json")["autoUpgrade"]["enable"] is True + assert ( + read_json(turned_off / "turned_off.json")["autoUpgrade"]["allowReboot"] is False + ) + + +def test_graphql_change_auto_upgrade_with_empty_input(authorized_client, turned_off): + """Test change auto upgrade settings with empty input""" + response = authorized_client.post( + "/graphql", + json={ + "query": API_CHANGE_AUTO_UPGRADE_SETTINGS, + "variables": { + "settings": {}, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is None + assert response.json["data"]["changeAutoUpgradeSettings"]["success"] is True + assert response.json["data"]["changeAutoUpgradeSettings"]["message"] is not None + assert response.json["data"]["changeAutoUpgradeSettings"]["code"] == 200 + assert ( + response.json["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] is False + ) + assert response.json["data"]["changeAutoUpgradeSettings"]["allowReboot"] is False + assert read_json(turned_off / "turned_off.json")["autoUpgrade"]["enable"] is False + assert ( + read_json(turned_off / "turned_off.json")["autoUpgrade"]["allowReboot"] is False + ) + + +API_REBUILD_SYSTEM_MUTATION = """ +mutation rebuildSystem() { + runSystemRebuild { + success + message + code + } +} +""" + + +def test_graphql_system_rebuild_unauthorized(client, mock_subprocess_popen): + """Test system rebuild without authorization""" + response = client.post( + "/graphql", + json={ + "query": API_REBUILD_SYSTEM_MUTATION, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is None + assert mock_subprocess_popen.call_count == 0 + + +def test_graphql_system_rebuild(authorized_client, mock_subprocess_popen): + """Test system rebuild""" + response = authorized_client.post( + "/graphql", + json={ + "query": API_REBUILD_SYSTEM_MUTATION, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["runSystemRebuild"]["success"] is True + assert response.json["data"]["runSystemRebuild"]["message"] is not None + assert response.json["data"]["runSystemRebuild"]["code"] == 200 + assert mock_subprocess_popen.call_count == 1 + assert mock_subprocess_popen.call_args[0][0] == [ + "systemctl", + "start", + "sp-nixos-rebuild.service", + ] + + +API_UPGRADE_SYSTEM_MUTATION = """ +mutation upgradeSystem() { + runSystemUpgrade { + success + message + code + } +} +""" + + +def test_graphql_system_upgrade_unauthorized(client, mock_subprocess_popen): + """Test system upgrade without authorization""" + response = client.post( + "/graphql", + json={ + "query": API_UPGRADE_SYSTEM_MUTATION, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is None + assert mock_subprocess_popen.call_count == 0 + + +def test_graphql_system_upgrade(authorized_client, mock_subprocess_popen): + """Test system upgrade""" + response = authorized_client.post( + "/graphql", + json={ + "query": API_UPGRADE_SYSTEM_MUTATION, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["runSystemUpgrade"]["success"] is True + assert response.json["data"]["runSystemUpgrade"]["message"] is not None + assert response.json["data"]["runSystemUpgrade"]["code"] == 200 + assert mock_subprocess_popen.call_count == 1 + assert mock_subprocess_popen.call_args[0][0] == [ + "systemctl", + "start", + "sp-nixos-upgrade.service", + ] + + +API_ROLLBACK_SYSTEM_MUTATION = """ +mutation rollbackSystem() { + runSystemRollback { + success + message + code + } +} +""" + + +def test_graphql_system_rollback_unauthorized(client, mock_subprocess_popen): + """Test system rollback without authorization""" + response = client.post( + "/graphql", + json={ + "query": API_ROLLBACK_SYSTEM_MUTATION, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is None + assert mock_subprocess_popen.call_count == 0 + + +def test_graphql_system_rollback(authorized_client, mock_subprocess_popen): + """Test system rollback""" + response = authorized_client.post( + "/graphql", + json={ + "query": API_ROLLBACK_SYSTEM_MUTATION, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["runSystemRollback"]["success"] is True + assert response.json["data"]["runSystemRollback"]["message"] is not None + assert response.json["data"]["runSystemRollback"]["code"] == 200 + assert mock_subprocess_popen.call_count == 1 + assert mock_subprocess_popen.call_args[0][0] == [ + "systemctl", + "start", + "sp-nixos-rollback.service", + ] From eb21b65bbc60385961bff3c12637abe8ec83222f Mon Sep 17 00:00:00 2001 From: Inex Code Date: Mon, 11 Jul 2022 16:42:51 +0300 Subject: [PATCH 27/39] More system tests Co-authored-by: Detlaff --- tests/test_graphql/test_system.py | 190 +++++++------- tests/test_graphql/test_system_nixos_tasks.py | 231 ++++++++++++++++++ 2 files changed, 324 insertions(+), 97 deletions(-) create mode 100644 tests/test_graphql/test_system_nixos_tasks.py diff --git a/tests/test_graphql/test_system.py b/tests/test_graphql/test_system.py index 75b2c28..fe76095 100644 --- a/tests/test_graphql/test_system.py +++ b/tests/test_graphql/test_system.py @@ -2,6 +2,7 @@ # pylint: disable=unused-argument # pylint: disable=missing-function-docstring import json +import os import pytest import datetime @@ -134,7 +135,9 @@ info { """ -def test_graphql_wrong_auth(wrong_auth_client): +def test_graphql_get_python_version_wrong_auth( + wrong_auth_client, mock_subprocess_check_output +): """Test wrong auth""" response = wrong_auth_client.get( "/graphql", @@ -146,6 +149,62 @@ def test_graphql_wrong_auth(wrong_auth_client): assert response.json.get("data") is None +def test_graphql_get_python_version(authorized_client, mock_subprocess_check_output): + """Test get python version""" + response = authorized_client.get( + "/graphql", + json={ + "query": generate_system_query([API_PYTHON_VERSION_INFO]), + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["system"]["info"]["pythonVersion"] == "Testing Linux" + assert mock_subprocess_check_output.call_count == 1 + assert mock_subprocess_check_output.call_args[0][0] == ["python", "-V"] + + +API_SYSTEM_VERSION_INFO = """ +info { + systemVersion +} +""" + + +def test_graphql_get_system_version_unauthorized( + wrong_auth_client, mock_subprocess_check_output +): + """Test wrong auth""" + response = wrong_auth_client.get( + "/graphql", + json={ + "query": generate_system_query([API_SYSTEM_VERSION_INFO]), + }, + ) + + assert response.status_code == 200 + assert response.json.get("data") is None + + assert mock_subprocess_check_output.call_count == 0 + + +def test_graphql_get_system_version(authorized_client, mock_subprocess_check_output): + """Test get system version""" + response = authorized_client.get( + "/graphql", + json={ + "query": generate_system_query([API_SYSTEM_VERSION_INFO]), + }, + ) + + assert response.status_code == 200 + assert response.json.get("data") is not None + + assert response.json["data"]["sytem"]["info"]["systemVersion"] == "Testing Linux" + assert mock_subprocess_check_output.call_count == 1 + assert mock_subprocess_check_output.call_args[0][0] == ["uname", "-a"] + + API_GET_DOMAIN_INFO = """ domainInfo { domain @@ -730,9 +789,9 @@ def test_graphql_change_auto_upgrade_with_empty_input(authorized_client, turned_ ) -API_REBUILD_SYSTEM_MUTATION = """ -mutation rebuildSystem() { - runSystemRebuild { +API_PULL_SYSTEM_CONFIGURATION_MUTATION = """ +mutation testPullSystemConfiguration() { + pullRepositoryChanges { success message code @@ -741,125 +800,62 @@ mutation rebuildSystem() { """ -def test_graphql_system_rebuild_unauthorized(client, mock_subprocess_popen): - """Test system rebuild without authorization""" +def test_graphql_pull_system_configuration_unauthorized(client, mock_subprocess_popen): response = client.post( "/graphql", json={ - "query": API_REBUILD_SYSTEM_MUTATION, + "query": API_PULL_SYSTEM_CONFIGURATION_MUTATION, }, ) + assert response.status_code == 200 assert response.json.get("data") is None assert mock_subprocess_popen.call_count == 0 -def test_graphql_system_rebuild(authorized_client, mock_subprocess_popen): - """Test system rebuild""" +def test_graphql_pull_system_configuration( + authorized_client, mock_subprocess_popen, mock_os_chdir +): + current_dir = os.getcwd() response = authorized_client.post( "/graphql", json={ - "query": API_REBUILD_SYSTEM_MUTATION, + "query": API_PULL_SYSTEM_CONFIGURATION_MUTATION, }, ) + assert response.status_code == 200 assert response.json.get("data") is not None - assert response.json["data"]["runSystemRebuild"]["success"] is True - assert response.json["data"]["runSystemRebuild"]["message"] is not None - assert response.json["data"]["runSystemRebuild"]["code"] == 200 + assert response.json["data"]["pullRepositoryChanges"]["success"] is True + assert response.json["data"]["pullRepositoryChanges"]["message"] is not None + assert response.json["data"]["pullRepositoryChanges"]["code"] == 200 + assert mock_subprocess_popen.call_count == 1 - assert mock_subprocess_popen.call_args[0][0] == [ - "systemctl", - "start", - "sp-nixos-rebuild.service", - ] + assert mock_subprocess_popen.call_args[0][0] == ["git", "pull"] + assert mock_os_chdir.call_count == 2 + assert mock_os_chdir.call_args_list[0][0][0] == "/etc/nixos" + assert mock_os_chdir.call_args_list[1][0][0] == current_dir -API_UPGRADE_SYSTEM_MUTATION = """ -mutation upgradeSystem() { - runSystemUpgrade { - success - message - code - } -} -""" +def test_graphql_pull_system_broken_repo( + authorized_client, mock_broken_service, mock_os_chdir +): + current_dir = os.getcwd() - -def test_graphql_system_upgrade_unauthorized(client, mock_subprocess_popen): - """Test system upgrade without authorization""" - response = client.post( - "/graphql", - json={ - "query": API_UPGRADE_SYSTEM_MUTATION, - }, - ) - assert response.status_code == 200 - assert response.json.get("data") is None - assert mock_subprocess_popen.call_count == 0 - - -def test_graphql_system_upgrade(authorized_client, mock_subprocess_popen): - """Test system upgrade""" response = authorized_client.post( "/graphql", json={ - "query": API_UPGRADE_SYSTEM_MUTATION, + "query": API_PULL_SYSTEM_CONFIGURATION_MUTATION, }, ) + assert response.status_code == 200 assert response.json.get("data") is not None - assert response.json["data"]["runSystemUpgrade"]["success"] is True - assert response.json["data"]["runSystemUpgrade"]["message"] is not None - assert response.json["data"]["runSystemUpgrade"]["code"] == 200 - assert mock_subprocess_popen.call_count == 1 - assert mock_subprocess_popen.call_args[0][0] == [ - "systemctl", - "start", - "sp-nixos-upgrade.service", - ] + assert response.json["data"]["pullRepositoryChanges"]["success"] is False + assert response.json["data"]["pullRepositoryChanges"]["message"] is not None + assert response.json["data"]["pullRepositoryChanges"]["code"] == 500 - -API_ROLLBACK_SYSTEM_MUTATION = """ -mutation rollbackSystem() { - runSystemRollback { - success - message - code - } -} -""" - - -def test_graphql_system_rollback_unauthorized(client, mock_subprocess_popen): - """Test system rollback without authorization""" - response = client.post( - "/graphql", - json={ - "query": API_ROLLBACK_SYSTEM_MUTATION, - }, - ) - assert response.status_code == 200 - assert response.json.get("data") is None - assert mock_subprocess_popen.call_count == 0 - - -def test_graphql_system_rollback(authorized_client, mock_subprocess_popen): - """Test system rollback""" - response = authorized_client.post( - "/graphql", - json={ - "query": API_ROLLBACK_SYSTEM_MUTATION, - }, - ) - assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["runSystemRollback"]["success"] is True - assert response.json["data"]["runSystemRollback"]["message"] is not None - assert response.json["data"]["runSystemRollback"]["code"] == 200 - assert mock_subprocess_popen.call_count == 1 - assert mock_subprocess_popen.call_args[0][0] == [ - "systemctl", - "start", - "sp-nixos-rollback.service", - ] + assert mock_broken_service.call_count == 1 + assert mock_os_chdir.call_count == 2 + assert mock_os_chdir.call_args_list[0][0][0] == "/etc/nixos" + assert mock_os_chdir.call_args_list[1][0][0] == current_dir diff --git a/tests/test_graphql/test_system_nixos_tasks.py b/tests/test_graphql/test_system_nixos_tasks.py new file mode 100644 index 0000000..3778887 --- /dev/null +++ b/tests/test_graphql/test_system_nixos_tasks.py @@ -0,0 +1,231 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=missing-function-docstring +import pytest + + +@pytest.fixture +def domain_file(mocker, datadir): + mocker.patch("selfprivacy_api.utils.DOMAIN_FILE", datadir / "domain") + return datadir + + +class ProcessMock: + """Mock subprocess.Popen""" + + def __init__(self, args, **kwargs): + self.args = args + self.kwargs = kwargs + + def communicate(): # pylint: disable=no-method-argument + return (b"", None) + + returncode = 0 + + +class BrokenServiceMock(ProcessMock): + """Mock subprocess.Popen for broken service""" + + def communicate(): # pylint: disable=no-method-argument + return (b"Testing error", None) + + returncode = 3 + + +@pytest.fixture +def mock_subprocess_popen(mocker): + mock = mocker.patch("subprocess.Popen", autospec=True, return_value=ProcessMock) + return mock + + +@pytest.fixture +def mock_os_chdir(mocker): + mock = mocker.patch("os.chdir", autospec=True) + return mock + + +@pytest.fixture +def mock_subprocess_check_output(mocker): + mock = mocker.patch( + "subprocess.check_output", autospec=True, return_value=b"Testing Linux" + ) + return mock + + +API_REBUILD_SYSTEM_MUTATION = """ +mutation rebuildSystem() { + runSystemRebuild { + success + message + code + } +} +""" + + +def test_graphql_system_rebuild_unauthorized(client, mock_subprocess_popen): + """Test system rebuild without authorization""" + response = client.post( + "/graphql", + json={ + "query": API_REBUILD_SYSTEM_MUTATION, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is None + assert mock_subprocess_popen.call_count == 0 + + +def test_graphql_system_rebuild(authorized_client, mock_subprocess_popen): + """Test system rebuild""" + response = authorized_client.post( + "/graphql", + json={ + "query": API_REBUILD_SYSTEM_MUTATION, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["runSystemRebuild"]["success"] is True + assert response.json["data"]["runSystemRebuild"]["message"] is not None + assert response.json["data"]["runSystemRebuild"]["code"] == 200 + assert mock_subprocess_popen.call_count == 1 + assert mock_subprocess_popen.call_args[0][0] == [ + "systemctl", + "start", + "sp-nixos-rebuild.service", + ] + + +API_UPGRADE_SYSTEM_MUTATION = """ +mutation upgradeSystem() { + runSystemUpgrade { + success + message + code + } +} +""" + + +def test_graphql_system_upgrade_unauthorized(client, mock_subprocess_popen): + """Test system upgrade without authorization""" + response = client.post( + "/graphql", + json={ + "query": API_UPGRADE_SYSTEM_MUTATION, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is None + assert mock_subprocess_popen.call_count == 0 + + +def test_graphql_system_upgrade(authorized_client, mock_subprocess_popen): + """Test system upgrade""" + response = authorized_client.post( + "/graphql", + json={ + "query": API_UPGRADE_SYSTEM_MUTATION, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["runSystemUpgrade"]["success"] is True + assert response.json["data"]["runSystemUpgrade"]["message"] is not None + assert response.json["data"]["runSystemUpgrade"]["code"] == 200 + assert mock_subprocess_popen.call_count == 1 + assert mock_subprocess_popen.call_args[0][0] == [ + "systemctl", + "start", + "sp-nixos-upgrade.service", + ] + + +API_ROLLBACK_SYSTEM_MUTATION = """ +mutation rollbackSystem() { + runSystemRollback { + success + message + code + } +} +""" + + +def test_graphql_system_rollback_unauthorized(client, mock_subprocess_popen): + """Test system rollback without authorization""" + response = client.post( + "/graphql", + json={ + "query": API_ROLLBACK_SYSTEM_MUTATION, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is None + assert mock_subprocess_popen.call_count == 0 + + +def test_graphql_system_rollback(authorized_client, mock_subprocess_popen): + """Test system rollback""" + response = authorized_client.post( + "/graphql", + json={ + "query": API_ROLLBACK_SYSTEM_MUTATION, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert response.json["data"]["runSystemRollback"]["success"] is True + assert response.json["data"]["runSystemRollback"]["message"] is not None + assert response.json["data"]["runSystemRollback"]["code"] == 200 + assert mock_subprocess_popen.call_count == 1 + assert mock_subprocess_popen.call_args[0][0] == [ + "systemctl", + "start", + "sp-nixos-rollback.service", + ] + + +API_REBOOT_SYSTEM_MUTATION = """ +mutation system { + rebootSystem { + success + message + code + } +} +""" + + +def test_graphql_reboot_system_unauthorized(client, mock_subprocess_popen): + response = client.post( + "/graphql", + json={ + "query": API_REBOOT_SYSTEM_MUTATION, + }, + ) + + assert response.status_code == 200 + assert response.json.get("data") is None + + assert mock_subprocess_popen.call_count == 0 + + +def test_graphql_reboot_system(authorized_client, mock_subprocess_popen): + response = authorized_client.post( + "/graphql", + json={ + "query": API_REBOOT_SYSTEM_MUTATION, + }, + ) + + assert response.status_code == 200 + assert response.json.get("data") is not None + + assert response.json["data"]["rebootSystem"]["success"] is True + assert response.json["data"]["rebootSystem"]["message"] is not None + assert response.json["data"]["rebootSystem"]["code"] == 200 + + assert mock_subprocess_popen.call_count == 1 + assert mock_subprocess_popen.call_args[0][0] == ["reboot"] From 26f9393d953aac3ae564519af71841e3cd9c8cfa Mon Sep 17 00:00:00 2001 From: Inex Code Date: Tue, 12 Jul 2022 16:24:29 +0300 Subject: [PATCH 28/39] Implement change system settings Co-authored-by: Detlaff --- .../graphql/mutations/system_mutations.py | 86 +++++++++++++++++++ selfprivacy_api/graphql/schema.py | 3 +- tests/test_graphql/test_system.py | 4 +- 3 files changed, 90 insertions(+), 3 deletions(-) create mode 100644 selfprivacy_api/graphql/mutations/system_mutations.py diff --git a/selfprivacy_api/graphql/mutations/system_mutations.py b/selfprivacy_api/graphql/mutations/system_mutations.py new file mode 100644 index 0000000..d080af3 --- /dev/null +++ b/selfprivacy_api/graphql/mutations/system_mutations.py @@ -0,0 +1,86 @@ +"""System management mutations""" +# pylint: disable=too-few-public-methods +import typing +import pytz +import strawberry +from selfprivacy_api.graphql import IsAuthenticated +from selfprivacy_api.graphql.mutations.mutation_interface import ( + MutationReturnInterface, +) +from selfprivacy_api.utils import ReadUserData, WriteUserData + + +@strawberry.type +class TimezoneMutationReturn(MutationReturnInterface): + """Return type of the timezone mutation, contains timezone""" + + timezone: typing.Optional[str] + + +@strawberry.type +class AutoUpgradeSettingsMutationReturn(MutationReturnInterface): + """Return type autoUpgrade Settings""" + + enableAutoUpgrade: bool + allowReboot: bool + + +@strawberry.input +class AutoUpgradeSettingsInput: + """Input type for auto upgrade settings""" + + enableAutoUpgrade: typing.Optional[bool] = None + allowReboot: typing.Optional[bool] = None + + +@strawberry.type +class SystemMutations: + """Mutations related to system settings""" + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def change_timezone(self, timezone: str) -> TimezoneMutationReturn: + """Change the timezone of the server. Timezone is a tzdatabase name.""" + if timezone not in pytz.all_timezones: + return TimezoneMutationReturn( + success=False, + message="Invalid timezone", + code=400, + timezone=None, + ) + with WriteUserData() as data: + data["timezone"] = timezone + return TimezoneMutationReturn( + success=True, + message="Timezone changed", + code=200, + timezone=timezone, + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def change_auto_upgrade_settings( + self, settings: AutoUpgradeSettingsInput + ) -> AutoUpgradeSettingsMutationReturn: + """Change auto upgrade settings of the server.""" + with WriteUserData() as data: + if "autoUpgrade" not in data: + data["autoUpgrade"] = {} + if "enable" not in data["autoUpgrade"]: + data["autoUpgrade"]["enable"] = True + if "allowReboot" not in data["autoUpgrade"]: + data["autoUpgrade"]["allowReboot"] = False + + if settings.enableAutoUpgrade is not None: + data["autoUpgrade"]["enable"] = settings.enableAutoUpgrade + if settings.allowReboot is not None: + data["autoUpgrade"]["allowReboot"] = settings.allowReboot + + auto_upgrade = data["autoUpgrade"]["enable"] + allow_reboot = data["autoUpgrade"]["allowReboot"] + + return AutoUpgradeSettingsMutationReturn( + success=True, + message="Auto-upgrade settings changed", + code=200, + enableAutoUpgrade=auto_upgrade, + allowReboot=allow_reboot, + ) diff --git a/selfprivacy_api/graphql/schema.py b/selfprivacy_api/graphql/schema.py index 5aba9b3..69735a2 100644 --- a/selfprivacy_api/graphql/schema.py +++ b/selfprivacy_api/graphql/schema.py @@ -4,6 +4,7 @@ import typing import strawberry from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.graphql.mutations.api_mutations import ApiMutations +from selfprivacy_api.graphql.mutations.system_mutations import SystemMutations from selfprivacy_api.graphql.queries.api_queries import Api from selfprivacy_api.graphql.queries.system import System @@ -25,7 +26,7 @@ class Query: @strawberry.type -class Mutation(ApiMutations): +class Mutation(ApiMutations, SystemMutations): """Root schema for mutations""" pass diff --git a/tests/test_graphql/test_system.py b/tests/test_graphql/test_system.py index fe76095..d5cf6e6 100644 --- a/tests/test_graphql/test_system.py +++ b/tests/test_graphql/test_system.py @@ -200,7 +200,7 @@ def test_graphql_get_system_version(authorized_client, mock_subprocess_check_out assert response.status_code == 200 assert response.json.get("data") is not None - assert response.json["data"]["sytem"]["info"]["systemVersion"] == "Testing Linux" + assert response.json["data"]["system"]["info"]["systemVersion"] == "Testing Linux" assert mock_subprocess_check_output.call_count == 1 assert mock_subprocess_check_output.call_args[0][0] == ["uname", "-a"] @@ -775,7 +775,7 @@ def test_graphql_change_auto_upgrade_with_empty_input(authorized_client, turned_ }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json.get("data") is not None assert response.json["data"]["changeAutoUpgradeSettings"]["success"] is True assert response.json["data"]["changeAutoUpgradeSettings"]["message"] is not None assert response.json["data"]["changeAutoUpgradeSettings"]["code"] == 200 From 5532114668abbd0bd69fec8781a89e3ba8290268 Mon Sep 17 00:00:00 2001 From: inexcode Date: Mon, 25 Jul 2022 17:08:31 +0300 Subject: [PATCH 29/39] Add volume management --- .../graphql/mutations/storage_mutation.py | 24 +++ selfprivacy_api/graphql/queries/storage.py | 31 +++ selfprivacy_api/graphql/schema.py | 9 +- .../services/nextcloud/__init__.py | 96 ++++++++++ .../services/nextcloud/nextcloud.svg | 10 + selfprivacy_api/services/service.py | 80 ++++++++ selfprivacy_api/utils/__init__.py | 2 +- selfprivacy_api/utils/block_devices.py | 176 ++++++++++++++++++ shell.nix | 2 + .../{test_system.py => _test_system.py} | 0 ...s_tasks.py => _test_system_nixos_tasks.py} | 0 11 files changed, 428 insertions(+), 2 deletions(-) create mode 100644 selfprivacy_api/graphql/mutations/storage_mutation.py create mode 100644 selfprivacy_api/graphql/queries/storage.py create mode 100644 selfprivacy_api/services/nextcloud/__init__.py create mode 100644 selfprivacy_api/services/nextcloud/nextcloud.svg create mode 100644 selfprivacy_api/services/service.py create mode 100644 selfprivacy_api/utils/block_devices.py rename tests/test_graphql/{test_system.py => _test_system.py} (100%) rename tests/test_graphql/{test_system_nixos_tasks.py => _test_system_nixos_tasks.py} (100%) diff --git a/selfprivacy_api/graphql/mutations/storage_mutation.py b/selfprivacy_api/graphql/mutations/storage_mutation.py new file mode 100644 index 0000000..97f632e --- /dev/null +++ b/selfprivacy_api/graphql/mutations/storage_mutation.py @@ -0,0 +1,24 @@ +"""Storage devices mutations""" +import typing +import strawberry +from selfprivacy_api.graphql import IsAuthenticated +from selfprivacy_api.utils.block_devices import BlockDevices +from selfprivacy_api.graphql.mutations.mutation_interface import ( + GenericMutationReturn, +) + + +@strawberry.type +class StorageMutations: + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def resize_volume(self, name: str) -> GenericMutationReturn: + """Resize volume""" + volume = BlockDevices().get_block_device(name) + if volume is None: + return GenericMutationReturn( + success=False, code=404, message="Volume not found" + ) + volume.resize() + return GenericMutationReturn( + success=True, code=200, message="Volume resize started" + ) diff --git a/selfprivacy_api/graphql/queries/storage.py b/selfprivacy_api/graphql/queries/storage.py new file mode 100644 index 0000000..0058a20 --- /dev/null +++ b/selfprivacy_api/graphql/queries/storage.py @@ -0,0 +1,31 @@ +"""Storage queries.""" +# pylint: disable=too-few-public-methods +import typing +import strawberry +from selfprivacy_api.utils.block_devices import BlockDevices + + +@strawberry.type +class StorageVolume: + total_space: int + free_space: int + used_space: int + root: bool + name: str + + +@strawberry.type +class Storage: + @strawberry.field + def volumes(self) -> typing.List[StorageVolume]: + """Get list of volumes""" + return [ + StorageVolume( + total_space=volume.fssize if volume.fssize is not None else volume.size, + free_space=volume.fsavail, + used_space=volume.fsused, + root=volume.name == "sda1", + name=volume.name, + ) + for volume in BlockDevices().get_block_devices() + ] diff --git a/selfprivacy_api/graphql/schema.py b/selfprivacy_api/graphql/schema.py index 69735a2..c4daac3 100644 --- a/selfprivacy_api/graphql/schema.py +++ b/selfprivacy_api/graphql/schema.py @@ -4,9 +4,11 @@ import typing import strawberry from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.graphql.mutations.api_mutations import ApiMutations +from selfprivacy_api.graphql.mutations.storage_mutation import StorageMutations from selfprivacy_api.graphql.mutations.system_mutations import SystemMutations from selfprivacy_api.graphql.queries.api_queries import Api +from selfprivacy_api.graphql.queries.storage import Storage from selfprivacy_api.graphql.queries.system import System @@ -24,9 +26,14 @@ class Query: """API access status""" return Api() + @strawberry.field(permission_classes=[IsAuthenticated]) + def storage(self) -> Storage: + """Storage queries""" + return Storage() + @strawberry.type -class Mutation(ApiMutations, SystemMutations): +class Mutation(ApiMutations, SystemMutations, StorageMutations): """Root schema for mutations""" pass diff --git a/selfprivacy_api/services/nextcloud/__init__.py b/selfprivacy_api/services/nextcloud/__init__.py new file mode 100644 index 0000000..525f657 --- /dev/null +++ b/selfprivacy_api/services/nextcloud/__init__.py @@ -0,0 +1,96 @@ +"""Class representing Nextcloud service.""" +import base64 +import subprocess +import psutil +from selfprivacy_api.services.service import Service, ServiceStatus +from selfprivacy_api.utils import ReadUserData, WriteUserData + + +class Nextcloud(Service): + """Class representing Nextcloud service.""" + + def get_id(self) -> str: + """Return service id.""" + return "nextcloud" + + def get_display_name(self) -> str: + """Return service display name.""" + return "Nextcloud" + + def get_description(self) -> str: + """Return service description.""" + return "Nextcloud is a cloud storage service that offers a web interface and a desktop client." + + def get_svg_icon(self) -> str: + """Read SVG icon from file and return it as base64 encoded string.""" + with open("selfprivacy_api/services/nextcloud/nextcloud.svg", "rb") as f: + return base64.b64encode(f.read()).decode("utf-8") + + def is_enabled(self) -> bool: + with ReadUserData() as user_data: + return user_data.get("nextcloud", {}).get("enable", False) + + def get_status(self) -> ServiceStatus: + """ + Return Nextcloud status from systemd. + Use command return code to determine status. + + Return code 0 means service is running. + Return code 1 or 2 means service is in error stat. + Return code 3 means service is stopped. + Return code 4 means service is off. + """ + service_status = subprocess.Popen( + ["systemctl", "status", "phpfpm-nextcloud.service"] + ) + service_status.communicate()[0] + if service_status.returncode == 0: + return ServiceStatus.RUNNING + elif service_status.returncode == 1 or service_status.returncode == 2: + return ServiceStatus.ERROR + elif service_status.returncode == 3: + return ServiceStatus.STOPPED + elif service_status.returncode == 4: + return ServiceStatus.OFF + else: + return ServiceStatus.DEGRADED + + def enable(self): + """Enable Nextcloud service.""" + with WriteUserData() as user_data: + if "nextcloud" not in user_data: + user_data["nextcloud"] = {} + user_data["nextcloud"]["enable"] = True + + def disable(self): + """Disable Nextcloud service.""" + with WriteUserData() as user_data: + if "nextcloud" not in user_data: + user_data["nextcloud"] = {} + user_data["nextcloud"]["enable"] = False + + def stop(self): + """Stop Nextcloud service.""" + subprocess.Popen(["systemctl", "stop", "phpfpm-nextcloud.service"]) + + def start(self): + """Start Nextcloud service.""" + subprocess.Popen(["systemctl", "start", "phpfpm-nextcloud.service"]) + + def restart(self): + """Restart Nextcloud service.""" + subprocess.Popen(["systemctl", "restart", "phpfpm-nextcloud.service"]) + + def get_configuration(self) -> dict: + """Return Nextcloud configuration.""" + return {} + + def set_configuration(self, config_items): + return super().set_configuration(config_items) + + def get_logs(self): + """Return Nextcloud logs.""" + return "" + + def get_storage_usage(self): + return psutil.disk_usage("/var/lib/nextcloud").used diff --git a/selfprivacy_api/services/nextcloud/nextcloud.svg b/selfprivacy_api/services/nextcloud/nextcloud.svg new file mode 100644 index 0000000..d7dbcb5 --- /dev/null +++ b/selfprivacy_api/services/nextcloud/nextcloud.svg @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py new file mode 100644 index 0000000..971358b --- /dev/null +++ b/selfprivacy_api/services/service.py @@ -0,0 +1,80 @@ +"""Abstract class for a service running on a server""" +from abc import ABC, abstractmethod +from enum import Enum + + +class ServiceStatus(Enum): + """Enum for service status""" + + RUNNING = "RUNNING" + DEGRADED = "DEGRADED" + ERROR = "ERROR" + STOPPED = "STOPPED" + OFF = "OFF" + + +class Service(ABC): + """ + Service here is some software that is hosted on the server and + can be installed, configured and used by a user. + """ + + @abstractmethod + def get_id(self) -> str: + pass + + @abstractmethod + def get_display_name(self) -> str: + pass + + @abstractmethod + def get_description(self) -> str: + pass + + @abstractmethod + def get_svg_icon(self) -> str: + pass + + @abstractmethod + def is_enabled(self) -> bool: + pass + + @abstractmethod + def get_status(self) -> ServiceStatus: + pass + + @abstractmethod + def enable(self): + pass + + @abstractmethod + def disable(self): + pass + + @abstractmethod + def stop(self): + pass + + @abstractmethod + def start(self): + pass + + @abstractmethod + def restart(self): + pass + + @abstractmethod + def get_configuration(self): + pass + + @abstractmethod + def set_configuration(self, config_items): + pass + + @abstractmethod + def get_logs(self): + pass + + @abstractmethod + def get_storage_usage(self): + pass diff --git a/selfprivacy_api/utils/__init__.py b/selfprivacy_api/utils/__init__.py index c80dd99..1adb189 100644 --- a/selfprivacy_api/utils/__init__.py +++ b/selfprivacy_api/utils/__init__.py @@ -65,7 +65,7 @@ class ReadUserData(object): portalocker.lock(self.userdata_file, portalocker.LOCK_SH) self.data = json.load(self.userdata_file) - def __enter__(self): + def __enter__(self) -> dict: return self.data def __exit__(self, *args): diff --git a/selfprivacy_api/utils/block_devices.py b/selfprivacy_api/utils/block_devices.py new file mode 100644 index 0000000..83937fd --- /dev/null +++ b/selfprivacy_api/utils/block_devices.py @@ -0,0 +1,176 @@ +"""Wrapper for block device functions.""" +import subprocess +import json +import typing + + +def get_block_device(device_name): + """ + Return a block device by name. + """ + lsblk_output = subprocess.check_output( + [ + "lsblk", + "-J", + "-b", + "-o", + "NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINT,LABEL,UUID,SIZE", + device_name, + ] + ) + lsblk_output = lsblk_output.decode("utf-8") + lsblk_output = json.loads(lsblk_output) + return lsblk_output["blockdevices"] + + +def resize_block_device(block_device) -> bool: + """ + Resize a block device. Return True if successful. + """ + resize_command = ["resize2fs", block_device] + resize_process = subprocess.Popen(resize_command, shell=False) + resize_process.communicate() + return resize_process.returncode == 0 + + +class BlockDevice: + """ + A block device. + """ + + def __init__(self, block_device): + self.name = block_device["name"] + self.path = block_device["path"] + self.fsavail = block_device["fsavail"] + self.fssize = block_device["fssize"] + self.fstype = block_device["fstype"] + self.fsused = block_device["fsused"] + self.mountpoint = block_device["mountpoint"] + self.label = block_device["label"] + self.uuid = block_device["uuid"] + self.size = block_device["size"] + self.locked = False + + def __str__(self): + return self.name + + def __repr__(self): + return f"" + + def __eq__(self, other): + return self.name == other.name + + def __hash__(self): + return hash(self.name) + + def stats(self) -> typing.Dict[str, typing.Any]: + """ + Update current data and return a dictionary of stats. + """ + device = get_block_device(self.name) + self.fsavail = device["fsavail"] + self.fssize = device["fssize"] + self.fstype = device["fstype"] + self.fsused = device["fsused"] + self.mountpoint = device["mountpoint"] + self.label = device["label"] + self.uuid = device["uuid"] + self.size = device["size"] + + return { + "name": self.name, + "path": self.path, + "fsavail": self.fsavail, + "fssize": self.fssize, + "fstype": self.fstype, + "fsused": self.fsused, + "mountpoint": self.mountpoint, + "label": self.label, + "uuid": self.uuid, + "size": self.size, + } + + def resize(self): + """ + Resize the block device. + """ + if not self.locked: + self.locked = True + resize_block_device(self.path) + self.locked = False + + +class BlockDevices: + """Singleton holding all Block devices""" + + _instance = None + + def __new__(cls, *args, **kwargs): + if not cls._instance: + cls._instance = super().__new__(cls) + return cls._instance + + def __init__(self): + self.block_devices = [] + self.update() + + def update(self) -> None: + """ + Update the list of block devices. + """ + devices = [] + lsblk_output = subprocess.check_output( + [ + "lsblk", + "-J", + "-b", + "-o", + "NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINT,LABEL,UUID,SIZE", + ] + ) + lsblk_output = lsblk_output.decode("utf-8") + lsblk_output = json.loads(lsblk_output) + for device in lsblk_output["blockdevices"]: + if device["fstype"] is None: + if "children" in device: + for child in device["children"]: + if child["fstype"] == "ext4": + device = child + break + devices.append(device) + # Add new devices and delete non-existent devices + for device in devices: + if device["name"] not in [ + block_device.name for block_device in self.block_devices + ]: + self.block_devices.append(BlockDevice(device)) + for block_device in self.block_devices: + if block_device.name not in [device["name"] for device in devices]: + self.block_devices.remove(block_device) + + def get_block_device(self, name: str) -> typing.Optional[BlockDevice]: + """ + Return a block device by name. + """ + for block_device in self.block_devices: + if block_device.name == name: + return block_device + return None + + def get_block_devices(self) -> typing.List[BlockDevice]: + """ + Return a list of block devices. + """ + return self.block_devices + + def get_block_devices_by_mountpoint( + self, mountpoint: str + ) -> typing.List[BlockDevice]: + """ + Return a list of block devices with a given mountpoint. + """ + block_devices = [] + for block_device in self.block_devices: + if block_device.mountpoint == mountpoint: + block_devices.append(block_device) + return block_devices diff --git a/shell.nix b/shell.nix index 2735de1..1f5b25c 100644 --- a/shell.nix +++ b/shell.nix @@ -19,6 +19,8 @@ let pydantic typing-extensions flask-cors + psutil + black (buildPythonPackage rec { pname = "strawberry-graphql"; version = "0.114.5"; diff --git a/tests/test_graphql/test_system.py b/tests/test_graphql/_test_system.py similarity index 100% rename from tests/test_graphql/test_system.py rename to tests/test_graphql/_test_system.py diff --git a/tests/test_graphql/test_system_nixos_tasks.py b/tests/test_graphql/_test_system_nixos_tasks.py similarity index 100% rename from tests/test_graphql/test_system_nixos_tasks.py rename to tests/test_graphql/_test_system_nixos_tasks.py From a6fe72608fc1cc7a58f9c0f83fc39e6965378261 Mon Sep 17 00:00:00 2001 From: inexcode Date: Mon, 25 Jul 2022 17:17:57 +0300 Subject: [PATCH 30/39] Bytes from int to str --- selfprivacy_api/graphql/queries/storage.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/selfprivacy_api/graphql/queries/storage.py b/selfprivacy_api/graphql/queries/storage.py index 0058a20..31ef354 100644 --- a/selfprivacy_api/graphql/queries/storage.py +++ b/selfprivacy_api/graphql/queries/storage.py @@ -7,9 +7,9 @@ from selfprivacy_api.utils.block_devices import BlockDevices @strawberry.type class StorageVolume: - total_space: int - free_space: int - used_space: int + total_space: str + free_space: str + used_space: str root: bool name: str @@ -21,9 +21,9 @@ class Storage: """Get list of volumes""" return [ StorageVolume( - total_space=volume.fssize if volume.fssize is not None else volume.size, - free_space=volume.fsavail, - used_space=volume.fsused, + total_space=str(volume.fssize) if volume.fssize is not None else str(volume.size), + free_space=str(volume.fsavail), + used_space=str(volume.fsused), root=volume.name == "sda1", name=volume.name, ) From e3245cd26aa9fd79c60259a4558dc2d456102b03 Mon Sep 17 00:00:00 2001 From: inexcode Date: Tue, 26 Jul 2022 15:33:44 +0300 Subject: [PATCH 31/39] Add mount volume migration --- selfprivacy_api/migrations/__init__.py | 7 ++- .../migrate_to_selfprivacy_channel.py | 4 -- selfprivacy_api/migrations/mount_volume.py | 48 +++++++++++++++++++ 3 files changed, 51 insertions(+), 8 deletions(-) create mode 100644 selfprivacy_api/migrations/mount_volume.py diff --git a/selfprivacy_api/migrations/__init__.py b/selfprivacy_api/migrations/__init__.py index 4eeebab..ea78e4c 100644 --- a/selfprivacy_api/migrations/__init__.py +++ b/selfprivacy_api/migrations/__init__.py @@ -11,11 +11,10 @@ Adding DISABLE_ALL to that array disables the migrations module entirely. from selfprivacy_api.utils import ReadUserData from selfprivacy_api.migrations.fix_nixos_config_branch import FixNixosConfigBranch from selfprivacy_api.migrations.create_tokens_json import CreateTokensJson -from selfprivacy_api.migrations.migrate_to_selfprivacy_channel import ( - MigrateToSelfprivacyChannel, -) +from selfprivacy_api.migrations.migrate_to_selfprivacy_channel import MigrateToSelfprivacyChannel +from selfprivacy_api.migrations.mount_volume import MountVolume -migrations = [FixNixosConfigBranch(), CreateTokensJson(), MigrateToSelfprivacyChannel()] +migrations = [FixNixosConfigBranch(), CreateTokensJson(), MigrateToSelfprivacyChannel(), MountVolume()] def run_migrations(): diff --git a/selfprivacy_api/migrations/migrate_to_selfprivacy_channel.py b/selfprivacy_api/migrations/migrate_to_selfprivacy_channel.py index 5f98f39..9bfd670 100644 --- a/selfprivacy_api/migrations/migrate_to_selfprivacy_channel.py +++ b/selfprivacy_api/migrations/migrate_to_selfprivacy_channel.py @@ -15,20 +15,16 @@ class MigrateToSelfprivacyChannel(Migration): def is_migration_needed(self): try: - print("Checking if migration is needed") output = subprocess.check_output( ["nix-channel", "--list"], start_new_session=True ) output = output.decode("utf-8") - print(output) first_line = output.split("\n", maxsplit=1)[0] - print(first_line) return first_line.startswith("nixos") and ( first_line.endswith("nixos-21.11") or first_line.endswith("nixos-21.05") ) except subprocess.CalledProcessError: return False - return False def migrate(self): # Change the channel and update them. diff --git a/selfprivacy_api/migrations/mount_volume.py b/selfprivacy_api/migrations/mount_volume.py new file mode 100644 index 0000000..1334336 --- /dev/null +++ b/selfprivacy_api/migrations/mount_volume.py @@ -0,0 +1,48 @@ +import os +import subprocess + +from selfprivacy_api.migrations.migration import Migration +from selfprivacy_api.utils import ReadUserData, WriteUserData +from selfprivacy_api.utils.block_devices import BlockDevices + +class MountVolume(Migration): + """Mount volume.""" + + def get_migration_name(self): + return "mount_volume" + + def get_migration_description(self): + return "Mount volume if it is not mounted." + + def is_migration_needed(self): + try: + with ReadUserData() as userdata: + return "volumes" not in userdata + except Exception as e: + print(e) + return False + + def migrate(self): + # Get info about existing volumes + # Write info about volumes to userdata.json + try: + volumes = BlockDevices().get_block_devices() + # If there is an unmounted volume sdb, + # Write it to userdata.json + is_there_a_volume = False + for volume in volumes: + if volume.name == "sdb": + is_there_a_volume = True + break + with WriteUserData() as userdata: + userdata["volumes"] = [] + if is_there_a_volume: + userdata["volumes"].append({ + "device": "/etc/sdb", + "mountPoint": "/volumes/sdb", + "fsType": "ext4", + }) + print("Done") + except Exception as e: + print(e) + print("Error mounting volume") From 1f64a76723d98786da2b1456d4c31633e1a9077f Mon Sep 17 00:00:00 2001 From: inexcode Date: Tue, 26 Jul 2022 15:52:28 +0300 Subject: [PATCH 32/39] Fix typo --- selfprivacy_api/migrations/mount_volume.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/selfprivacy_api/migrations/mount_volume.py b/selfprivacy_api/migrations/mount_volume.py index 1334336..368049c 100644 --- a/selfprivacy_api/migrations/mount_volume.py +++ b/selfprivacy_api/migrations/mount_volume.py @@ -38,7 +38,7 @@ class MountVolume(Migration): userdata["volumes"] = [] if is_there_a_volume: userdata["volumes"].append({ - "device": "/etc/sdb", + "device": "/dev/sdb", "mountPoint": "/volumes/sdb", "fsType": "ext4", }) From 67c8486c9bd644e15f27c7c33084fb273c6ce1cc Mon Sep 17 00:00:00 2001 From: Inex Code Date: Sat, 30 Jul 2022 17:48:33 +0300 Subject: [PATCH 33/39] Add more fields to GraphQL storage query --- .../graphql/mutations/storage_mutation.py | 38 ++++++++++++++ selfprivacy_api/graphql/queries/storage.py | 14 ++++- selfprivacy_api/migrations/__init__.py | 11 +++- selfprivacy_api/migrations/mount_volume.py | 13 +++-- selfprivacy_api/services/service.py | 13 +++++ selfprivacy_api/utils/block_devices.py | 52 ++++++++++++++++++- 6 files changed, 131 insertions(+), 10 deletions(-) diff --git a/selfprivacy_api/graphql/mutations/storage_mutation.py b/selfprivacy_api/graphql/mutations/storage_mutation.py index 97f632e..ff69aea 100644 --- a/selfprivacy_api/graphql/mutations/storage_mutation.py +++ b/selfprivacy_api/graphql/mutations/storage_mutation.py @@ -22,3 +22,41 @@ class StorageMutations: return GenericMutationReturn( success=True, code=200, message="Volume resize started" ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def mount_volume(self, name: str) -> GenericMutationReturn: + """Mount volume""" + volume = BlockDevices().get_block_device(name) + if volume is None: + return GenericMutationReturn( + success=False, code=404, message="Volume not found" + ) + is_success = volume.mount() + if is_success: + return GenericMutationReturn( + success=True, + code=200, + message="Volume mounted, rebuild the system to apply changes", + ) + return GenericMutationReturn( + success=False, code=409, message="Volume not mounted (already mounted?)" + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def unmount_volume(self, name: str) -> GenericMutationReturn: + """Unmount volume""" + volume = BlockDevices().get_block_device(name) + if volume is None: + return GenericMutationReturn( + success=False, code=404, message="Volume not found" + ) + is_success = volume.unmount() + if is_success: + return GenericMutationReturn( + success=True, + code=200, + message="Volume unmounted, rebuild the system to apply changes", + ) + return GenericMutationReturn( + success=False, code=409, message="Volume not unmounted (already unmounted?)" + ) diff --git a/selfprivacy_api/graphql/queries/storage.py b/selfprivacy_api/graphql/queries/storage.py index 31ef354..e645456 100644 --- a/selfprivacy_api/graphql/queries/storage.py +++ b/selfprivacy_api/graphql/queries/storage.py @@ -7,25 +7,37 @@ from selfprivacy_api.utils.block_devices import BlockDevices @strawberry.type class StorageVolume: + """Stats and basic info about a volume or a system disk.""" + total_space: str free_space: str used_space: str root: bool name: str + model: str + serial: str + type: str @strawberry.type class Storage: + """GraphQL queries to get storage information.""" + @strawberry.field def volumes(self) -> typing.List[StorageVolume]: """Get list of volumes""" return [ StorageVolume( - total_space=str(volume.fssize) if volume.fssize is not None else str(volume.size), + total_space=str(volume.fssize) + if volume.fssize is not None + else str(volume.size), free_space=str(volume.fsavail), used_space=str(volume.fsused), root=volume.name == "sda1", name=volume.name, + model=volume.model, + serial=volume.serial, + type=volume.type, ) for volume in BlockDevices().get_block_devices() ] diff --git a/selfprivacy_api/migrations/__init__.py b/selfprivacy_api/migrations/__init__.py index ea78e4c..2149e69 100644 --- a/selfprivacy_api/migrations/__init__.py +++ b/selfprivacy_api/migrations/__init__.py @@ -11,10 +11,17 @@ Adding DISABLE_ALL to that array disables the migrations module entirely. from selfprivacy_api.utils import ReadUserData from selfprivacy_api.migrations.fix_nixos_config_branch import FixNixosConfigBranch from selfprivacy_api.migrations.create_tokens_json import CreateTokensJson -from selfprivacy_api.migrations.migrate_to_selfprivacy_channel import MigrateToSelfprivacyChannel +from selfprivacy_api.migrations.migrate_to_selfprivacy_channel import ( + MigrateToSelfprivacyChannel, +) from selfprivacy_api.migrations.mount_volume import MountVolume -migrations = [FixNixosConfigBranch(), CreateTokensJson(), MigrateToSelfprivacyChannel(), MountVolume()] +migrations = [ + FixNixosConfigBranch(), + CreateTokensJson(), + MigrateToSelfprivacyChannel(), + MountVolume(), +] def run_migrations(): diff --git a/selfprivacy_api/migrations/mount_volume.py b/selfprivacy_api/migrations/mount_volume.py index 368049c..27fba83 100644 --- a/selfprivacy_api/migrations/mount_volume.py +++ b/selfprivacy_api/migrations/mount_volume.py @@ -5,6 +5,7 @@ from selfprivacy_api.migrations.migration import Migration from selfprivacy_api.utils import ReadUserData, WriteUserData from selfprivacy_api.utils.block_devices import BlockDevices + class MountVolume(Migration): """Mount volume.""" @@ -37,11 +38,13 @@ class MountVolume(Migration): with WriteUserData() as userdata: userdata["volumes"] = [] if is_there_a_volume: - userdata["volumes"].append({ - "device": "/dev/sdb", - "mountPoint": "/volumes/sdb", - "fsType": "ext4", - }) + userdata["volumes"].append( + { + "device": "/dev/sdb", + "mountPoint": "/volumes/sdb", + "fsType": "ext4", + } + ) print("Done") except Exception as e: print(e) diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index 971358b..a0e6ae6 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -1,6 +1,7 @@ """Abstract class for a service running on a server""" from abc import ABC, abstractmethod from enum import Enum +import typing class ServiceStatus(Enum): @@ -13,6 +14,14 @@ class ServiceStatus(Enum): OFF = "OFF" +class ServiceDnsRecord: + type: str + name: str + content: str + ttl: int + priority: typing.Optional[int] + + class Service(ABC): """ Service here is some software that is hosted on the server and @@ -78,3 +87,7 @@ class Service(ABC): @abstractmethod def get_storage_usage(self): pass + + @abstractmethod + def get_dns_records(self) -> typing.List[ServiceDnsRecord]: + pass diff --git a/selfprivacy_api/utils/block_devices.py b/selfprivacy_api/utils/block_devices.py index 83937fd..e6adddc 100644 --- a/selfprivacy_api/utils/block_devices.py +++ b/selfprivacy_api/utils/block_devices.py @@ -3,6 +3,8 @@ import subprocess import json import typing +from selfprivacy_api.utils import WriteUserData + def get_block_device(device_name): """ @@ -14,7 +16,7 @@ def get_block_device(device_name): "-J", "-b", "-o", - "NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINT,LABEL,UUID,SIZE", + "NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINT,LABEL,UUID,SIZE, MODEL,SERIAL,TYPE", device_name, ] ) @@ -49,6 +51,9 @@ class BlockDevice: self.label = block_device["label"] self.uuid = block_device["uuid"] self.size = block_device["size"] + self.model = block_device["model"] + self.serial = block_device["serial"] + self.type = block_device["type"] self.locked = False def __str__(self): @@ -76,6 +81,9 @@ class BlockDevice: self.label = device["label"] self.uuid = device["uuid"] self.size = device["size"] + self.model = device["model"] + self.serial = device["serial"] + self.type = device["type"] return { "name": self.name, @@ -88,6 +96,9 @@ class BlockDevice: "label": self.label, "uuid": self.uuid, "size": self.size, + "model": self.model, + "serial": self.serial, + "type": self.type, } def resize(self): @@ -99,6 +110,40 @@ class BlockDevice: resize_block_device(self.path) self.locked = False + def mount(self) -> bool: + """ + Mount the block device. + """ + with WriteUserData() as user_data: + if "volumes" not in user_data: + user_data["volumes"] = [] + # Check if the volume is already mounted + for volume in user_data["volumes"]: + if volume["device"] == self.path: + return False + user_data["volumes"].append( + { + "device": self.path, + "mountPoint": f"/volumes/{self.name}", + "fsType": self.fstype, + } + ) + return True + + def unmount(self) -> bool: + """ + Unmount the block device. + """ + with WriteUserData() as user_data: + if "volumes" not in user_data: + user_data["volumes"] = [] + # Check if the volume is already mounted + for volume in user_data["volumes"]: + if volume["device"] == self.path: + user_data["volumes"].remove(volume) + return True + return False + class BlockDevices: """Singleton holding all Block devices""" @@ -125,12 +170,15 @@ class BlockDevices: "-J", "-b", "-o", - "NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINT,LABEL,UUID,SIZE", + "NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINT,LABEL,UUID,SIZE,MODEL,SERIAL,TYPE", ] ) lsblk_output = lsblk_output.decode("utf-8") lsblk_output = json.loads(lsblk_output) for device in lsblk_output["blockdevices"]: + # Ignore devices with type "rom" + if device["type"] == "rom": + continue if device["fstype"] is None: if "children" in device: for child in device["children"]: From 8ea6548710577fd4c6bc03808f26b42dba9718e2 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Sat, 30 Jul 2022 18:01:51 +0300 Subject: [PATCH 34/39] Fix typing --- selfprivacy_api/graphql/queries/storage.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/graphql/queries/storage.py b/selfprivacy_api/graphql/queries/storage.py index e645456..6315b26 100644 --- a/selfprivacy_api/graphql/queries/storage.py +++ b/selfprivacy_api/graphql/queries/storage.py @@ -14,8 +14,8 @@ class StorageVolume: used_space: str root: bool name: str - model: str - serial: str + model: typing.Optional[str] + serial: typing.Optional[str] type: str From bec99f29ecf48ab5243026ff2ccb3a7db5f98b22 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Sat, 30 Jul 2022 18:24:21 +0300 Subject: [PATCH 35/39] Add a jobs singleton --- selfprivacy_api/jobs/__init__.py | 184 +++++++++++++++++++++++++++++++ 1 file changed, 184 insertions(+) create mode 100644 selfprivacy_api/jobs/__init__.py diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py new file mode 100644 index 0000000..a467583 --- /dev/null +++ b/selfprivacy_api/jobs/__init__.py @@ -0,0 +1,184 @@ +""" +Jobs controller. It handles the jobs that are created by the user. +This is a singleton class holding the jobs list. +Jobs can be added and removed. +A single job can be updated. +A job is a dictionary with the following keys: + - id: unique identifier of the job + - name: name of the job + - description: description of the job + - status: status of the job + - created_at: date of creation of the job + - updated_at: date of last update of the job + - finished_at: date of finish of the job + - error: error message if the job failed + - result: result of the job +""" +import typing +import datetime +import json +import os +import time +import uuid +from enum import Enum + + +class JobStatus(Enum): + """ + Status of a job. + """ + + CREATED = "CREATED" + RUNNING = "RUNNING" + FINISHED = "FINISHED" + ERROR = "ERROR" + + +class Job: + """ + Job class. + """ + + def __init__( + self, + name: str, + description: str, + status: JobStatus, + created_at: datetime.datetime, + updated_at: datetime.datetime, + finished_at: typing.Optional[datetime.datetime], + error: typing.Optional[str], + result: typing.Optional[str], + ): + self.id = str(uuid.uuid4()) + self.name = name + self.description = description + self.status = status + self.created_at = created_at + self.updated_at = updated_at + self.finished_at = finished_at + self.error = error + self.result = result + + def to_dict(self) -> dict: + """ + Convert the job to a dictionary. + """ + return { + "id": self.id, + "name": self.name, + "description": self.description, + "status": self.status, + "created_at": self.created_at, + "updated_at": self.updated_at, + "finished_at": self.finished_at, + "error": self.error, + "result": self.result, + } + + def to_json(self) -> str: + """ + Convert the job to a JSON string. + """ + return json.dumps(self.to_dict()) + + def __str__(self) -> str: + """ + Convert the job to a string. + """ + return self.to_json() + + def __repr__(self) -> str: + """ + Convert the job to a string. + """ + return self.to_json() + + +class Jobs: + """ + Jobs class. + """ + + __instance = None + + @staticmethod + def get_instance(): + """ + Singleton method. + """ + if Jobs.__instance is None: + Jobs() + return Jobs.__instance + + def __init__(self): + """ + Initialize the jobs list. + """ + if Jobs.__instance is not None: + raise Exception("This class is a singleton!") + else: + Jobs.__instance = self + self.jobs = [] + + def add( + self, name: str, description: str, status: JobStatus = JobStatus.CREATED + ) -> Job: + """ + Add a job to the jobs list. + """ + job = Job( + name=name, + description=description, + status=status, + created_at=datetime.datetime.now(), + updated_at=datetime.datetime.now(), + finished_at=None, + error=None, + result=None, + ) + self.jobs.append(job) + return job + + def remove(self, job: Job) -> None: + """ + Remove a job from the jobs list. + """ + self.jobs.remove(job) + + def update( + self, + job: Job, + name: typing.Optional[str], + description: typing.Optional[str], + status: JobStatus, + error: typing.Optional[str], + result: typing.Optional[str], + ) -> Job: + """ + Update a job in the jobs list. + """ + if name is not None: + job.name = name + if description is not None: + job.description = description + job.status = status + job.updated_at = datetime.datetime.now() + job.error = error + job.result = result + return job + + def get_job(self, id: str) -> typing.Optional[Job]: + """ + Get a job from the jobs list. + """ + for job in self.jobs: + if job.id == id: + return job + return None + + def get_jobs(self) -> list: + """ + Get the jobs list. + """ + return self.jobs From 5be240d357f7be6fd7738b7b869b2cf2f1fd4f31 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Mon, 1 Aug 2022 13:28:12 +0300 Subject: [PATCH 36/39] Update Strawberry and backport graphql-core to Nixos 21.11 --- shell.nix | 24 +++++++++++++++---- strawberry-graphql.patch | 51 ++++++++++++++++++++++++++++++++-------- 2 files changed, 61 insertions(+), 14 deletions(-) diff --git a/shell.nix b/shell.nix index 1f5b25c..e754a11 100644 --- a/shell.nix +++ b/shell.nix @@ -1,4 +1,4 @@ -{ pkgs ? import {} }: +{ pkgs ? import { } }: let sp-python = pkgs.python39.withPackages (p: with p; [ flask @@ -23,14 +23,13 @@ let black (buildPythonPackage rec { pname = "strawberry-graphql"; - version = "0.114.5"; + version = "0.123.0"; format = "pyproject"; patches = [ ./strawberry-graphql.patch ]; propagatedBuildInputs = [ typing-extensions - graphql-core python-multipart python-dateutil flask @@ -38,10 +37,27 @@ let pygments poetry flask-cors + (buildPythonPackage rec { + pname = "graphql-core"; + version = "3.2.0"; + format = "setuptools"; + src = fetchPypi { + inherit pname version; + sha256 = "sha256-huKgvgCL/eGe94OI3opyWh2UKpGQykMcJKYIN5c4A84="; + }; + checkInputs = [ + pytest-asyncio + pytest-benchmark + pytestCheckHook + ]; + pythonImportsCheck = [ + "graphql" + ]; + }) ]; src = fetchPypi { inherit pname version; - sha256 = "b6e007281cf29a66eeba66a512744853d8aa53b4ca2525befb6f350bb7b24df6"; + sha256 = "KsmZ5Xv8tUg6yBxieAEtvoKoRG60VS+iVGV0X6oCExo="; }; }) ]); diff --git a/strawberry-graphql.patch b/strawberry-graphql.patch index 3608342..a731522 100644 --- a/strawberry-graphql.patch +++ b/strawberry-graphql.patch @@ -1,8 +1,8 @@ diff --git a/pyproject.toml b/pyproject.toml -index 3283fce..89d3e8c 100644 +index 0cbf2ef..7736e92 100644 --- a/pyproject.toml +++ b/pyproject.toml -@@ -45,7 +45,6 @@ python-multipart = "^0.0.5" +@@ -51,7 +51,6 @@ python-multipart = "^0.0.5" sanic = {version = ">=20.12.2,<22.0.0", optional = true} aiohttp = {version = "^3.7.4.post0", optional = true} fastapi = {version = ">=0.65.2", optional = true} @@ -10,8 +10,38 @@ index 3283fce..89d3e8c 100644 [tool.poetry.dev-dependencies] pytest = "^7.1" +diff --git a/strawberry/directive.py b/strawberry/directive.py +index 491e390..26ba345 100644 +--- a/strawberry/directive.py ++++ b/strawberry/directive.py +@@ -1,10 +1,10 @@ + from __future__ import annotations + + import dataclasses ++from functools import cached_property + import inspect + from typing import Any, Callable, List, Optional, TypeVar + +-from backports.cached_property import cached_property + from typing_extensions import Annotated + + from graphql import DirectiveLocation +diff --git a/strawberry/extensions/tracing/datadog.py b/strawberry/extensions/tracing/datadog.py +index 01fba20..7c06950 100644 +--- a/strawberry/extensions/tracing/datadog.py ++++ b/strawberry/extensions/tracing/datadog.py +@@ -1,8 +1,8 @@ + import hashlib ++from functools import cached_property + from inspect import isawaitable + from typing import Optional + +-from backports.cached_property import cached_property + from ddtrace import tracer + + from strawberry.extensions import Extension diff --git a/strawberry/field.py b/strawberry/field.py -index 4e7ee4b..06c2044 100644 +index 80ed12a..f1bf2e9 100644 --- a/strawberry/field.py +++ b/strawberry/field.py @@ -1,5 +1,6 @@ @@ -30,24 +60,25 @@ index 4e7ee4b..06c2044 100644 from strawberry.annotation import StrawberryAnnotation diff --git a/strawberry/types/fields/resolver.py b/strawberry/types/fields/resolver.py -index 0409233..f4fbe9a 100644 +index c5b3edd..f4112ce 100644 --- a/strawberry/types/fields/resolver.py +++ b/strawberry/types/fields/resolver.py -@@ -1,13 +1,12 @@ +@@ -1,6 +1,7 @@ from __future__ import annotations as _ import builtins +from functools import cached_property import inspect import sys - from inspect import isasyncgenfunction, iscoroutinefunction - from typing import Callable, Dict, Generic, List, Mapping, Optional, TypeVar, Union + import warnings +@@ -22,7 +23,6 @@ from typing import ( # type: ignore[attr-defined] + _eval_type, + ) -from backports.cached_property import cached_property -- + from typing_extensions import Annotated, Protocol, get_args, get_origin + from strawberry.annotation import StrawberryAnnotation - from strawberry.arguments import StrawberryArgument - from strawberry.exceptions import MissingArgumentsAnnotationsError diff --git a/strawberry/types/info.py b/strawberry/types/info.py index a172c04..475a3ee 100644 --- a/strawberry/types/info.py From 337cf298845351d152337f11a1219837bf87aa7f Mon Sep 17 00:00:00 2001 From: def Date: Mon, 1 Aug 2022 13:40:40 +0300 Subject: [PATCH 37/39] Add GraphQJ user and ssh management (#12) Co-authored-by: Inex Code Reviewed-on: https://git.selfprivacy.org/SelfPrivacy/selfprivacy-rest-api/pulls/12 Co-authored-by: def Co-committed-by: def --- .pylintrc | 2 +- selfprivacy_api/app.py | 1 - .../graphql/common_types/__init__.py | 0 selfprivacy_api/graphql/common_types/user.py | 78 ++ .../graphql/mutations/ssh_mutations.py | 54 ++ .../graphql/mutations/ssh_utils.py | 74 ++ .../graphql/mutations/system_mutations.py | 2 +- .../graphql/mutations/users_mutations.py | 65 ++ .../graphql/mutations/users_utils.py | 111 +++ selfprivacy_api/graphql/queries/users.py | 38 + selfprivacy_api/graphql/schema.py | 19 +- selfprivacy_api/utils/__init__.py | 14 + tests/common.py | 4 + tests/test_graphql/_test_system.py | 8 +- tests/test_graphql/test_api.py | 1 - tests/test_graphql/test_api_recovery.py | 3 - tests/test_graphql/test_ssh.py | 353 +++++++++ tests/test_graphql/test_ssh/some_users.json | 71 ++ tests/test_graphql/test_users.py | 733 ++++++++++++++++++ tests/test_graphql/test_users/no_users.json | 54 ++ tests/test_graphql/test_users/one_user.json | 61 ++ tests/test_graphql/test_users/some_users.json | 71 ++ tests/test_graphql/test_users/undefined.json | 52 ++ 23 files changed, 1855 insertions(+), 14 deletions(-) create mode 100644 selfprivacy_api/graphql/common_types/__init__.py create mode 100644 selfprivacy_api/graphql/common_types/user.py create mode 100644 selfprivacy_api/graphql/mutations/ssh_mutations.py create mode 100644 selfprivacy_api/graphql/mutations/ssh_utils.py create mode 100644 selfprivacy_api/graphql/mutations/users_mutations.py create mode 100644 selfprivacy_api/graphql/mutations/users_utils.py create mode 100644 selfprivacy_api/graphql/queries/users.py create mode 100644 tests/test_graphql/test_ssh.py create mode 100644 tests/test_graphql/test_ssh/some_users.json create mode 100644 tests/test_graphql/test_users.py create mode 100644 tests/test_graphql/test_users/no_users.json create mode 100644 tests/test_graphql/test_users/one_user.json create mode 100644 tests/test_graphql/test_users/some_users.json create mode 100644 tests/test_graphql/test_users/undefined.json diff --git a/.pylintrc b/.pylintrc index 6a584c2..c6d73d8 100644 --- a/.pylintrc +++ b/.pylintrc @@ -1,2 +1,2 @@ [MASTER] -init-hook='import sys; sys.path.append("/path/to/root")' +init-hook="from pylint.config import find_pylintrc; import os, sys; sys.path.append(os.path.dirname(find_pylintrc()))" diff --git a/selfprivacy_api/app.py b/selfprivacy_api/app.py index 77281e8..15142f0 100644 --- a/selfprivacy_api/app.py +++ b/selfprivacy_api/app.py @@ -53,7 +53,6 @@ def create_app(test_config=None): pass elif request.path.startswith("/auth/recovery_token/use"): pass - # TODO: REMOVE THIS elif request.path.startswith("/graphql"): pass else: diff --git a/selfprivacy_api/graphql/common_types/__init__.py b/selfprivacy_api/graphql/common_types/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/selfprivacy_api/graphql/common_types/user.py b/selfprivacy_api/graphql/common_types/user.py new file mode 100644 index 0000000..8cc5f2c --- /dev/null +++ b/selfprivacy_api/graphql/common_types/user.py @@ -0,0 +1,78 @@ +import typing +from enum import Enum +import strawberry + +from selfprivacy_api.utils import ReadUserData +from selfprivacy_api.graphql.mutations.mutation_interface import ( + MutationReturnInterface, +) + + +@strawberry.enum +class UserType(Enum): + NORMAL = "NORMAL" + PRIMARY = "PRIMARY" + ROOT = "ROOT" + + +@strawberry.type +class User: + + user_type: UserType + username: str + # userHomeFolderspace: UserHomeFolderUsage + ssh_keys: typing.List[str] = strawberry.field(default_factory=list) + + +@strawberry.type +class UserMutationReturn(MutationReturnInterface): + """Return type for user mutation""" + + user: typing.Optional[User] + + +def ensure_ssh_and_users_fields_exist(data): + if "ssh" not in data: + data["ssh"] = [] + data["ssh"]["rootKeys"] = [] + + elif data["ssh"].get("rootKeys") is None: + data["ssh"]["rootKeys"] = [] + + if "sshKeys" not in data: + data["sshKeys"] = [] + + if "users" not in data: + data["users"] = [] + + +def get_user_by_username(username: str) -> typing.Optional[User]: + with ReadUserData() as data: + ensure_ssh_and_users_fields_exist(data) + + if username == "root": + return User( + user_type=UserType.ROOT, + username="root", + ssh_keys=data["ssh"]["rootKeys"], + ) + + if username == data["username"]: + return User( + user_type=UserType.PRIMARY, + username=username, + ssh_keys=data["sshKeys"], + ) + + for user in data["users"]: + if user["username"] == username: + if "sshKeys" not in user: + user["sshKeys"] = [] + + return User( + user_type=UserType.NORMAL, + username=username, + ssh_keys=user["sshKeys"], + ) + + return None diff --git a/selfprivacy_api/graphql/mutations/ssh_mutations.py b/selfprivacy_api/graphql/mutations/ssh_mutations.py new file mode 100644 index 0000000..b30f474 --- /dev/null +++ b/selfprivacy_api/graphql/mutations/ssh_mutations.py @@ -0,0 +1,54 @@ +#!/usr/bin/env python3 +"""Users management module""" +# pylint: disable=too-few-public-methods + +import strawberry + +from selfprivacy_api.graphql import IsAuthenticated +from selfprivacy_api.graphql.mutations.ssh_utils import ( + create_ssh_key, + remove_ssh_key, +) +from selfprivacy_api.graphql.common_types.user import ( + UserMutationReturn, + get_user_by_username, +) + + +@strawberry.input +class SshMutationInput: + """Input type for ssh mutation""" + + username: str + ssh_key: str + + +@strawberry.type +class SshMutations: + """Mutations ssh""" + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def add_ssh_key(self, ssh_input: SshMutationInput) -> UserMutationReturn: + """Add a new ssh key""" + + success, message, code = create_ssh_key(ssh_input.username, ssh_input.ssh_key) + + return UserMutationReturn( + success=success, + message=message, + code=code, + user=get_user_by_username(ssh_input.username), + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def remove_ssh_key(self, ssh_input: SshMutationInput) -> UserMutationReturn: + """Remove ssh key from user""" + + success, message, code = remove_ssh_key(ssh_input.username, ssh_input.ssh_key) + + return UserMutationReturn( + success=success, + message=message, + code=code, + user=get_user_by_username(ssh_input.username), + ) diff --git a/selfprivacy_api/graphql/mutations/ssh_utils.py b/selfprivacy_api/graphql/mutations/ssh_utils.py new file mode 100644 index 0000000..3dbc152 --- /dev/null +++ b/selfprivacy_api/graphql/mutations/ssh_utils.py @@ -0,0 +1,74 @@ +from selfprivacy_api.graphql.common_types.user import ensure_ssh_and_users_fields_exist +from selfprivacy_api.utils import ( + WriteUserData, + validate_ssh_public_key, +) + + +def create_ssh_key(username: str, ssh_key: str) -> tuple[bool, str, int]: + """Create a new ssh key""" + + if not validate_ssh_public_key(ssh_key): + return ( + False, + "Invalid key type. Only ssh-ed25519 and ssh-rsa are supported", + 400, + ) + + with WriteUserData() as data: + ensure_ssh_and_users_fields_exist(data) + + if username == data["username"]: + if ssh_key in data["sshKeys"]: + return False, "Key already exists", 409 + + data["sshKeys"].append(ssh_key) + return True, "New SSH key successfully written", 201 + + if username == "root": + if ssh_key in data["ssh"]["rootKeys"]: + return False, "Key already exists", 409 + + data["ssh"]["rootKeys"].append(ssh_key) + return True, "New SSH key successfully written", 201 + + for user in data["users"]: + if user["username"] == username: + if ssh_key in user["sshKeys"]: + return False, "Key already exists", 409 + + user["sshKeys"].append(ssh_key) + return True, "New SSH key successfully written", 201 + + return False, "User not found", 404 + + +def remove_ssh_key(username: str, ssh_key: str) -> tuple[bool, str, int]: + """Delete a ssh key""" + + with WriteUserData() as data: + ensure_ssh_and_users_fields_exist(data) + + if username == "root": + if ssh_key in data["ssh"]["rootKeys"]: + data["ssh"]["rootKeys"].remove(ssh_key) + return True, "SSH key deleted", 200 + + return False, "Key not found", 404 + + if username == data["username"]: + if ssh_key in data["sshKeys"]: + data["sshKeys"].remove(ssh_key) + return True, "SSH key deleted", 200 + + return False, "Key not found", 404 + + for user in data["users"]: + if user["username"] == username: + if ssh_key in user["sshKeys"]: + user["sshKeys"].remove(ssh_key) + return True, "SSH key deleted", 200 + + return False, "Key not found", 404 + + return False, "User not found", 404 diff --git a/selfprivacy_api/graphql/mutations/system_mutations.py b/selfprivacy_api/graphql/mutations/system_mutations.py index d080af3..517a697 100644 --- a/selfprivacy_api/graphql/mutations/system_mutations.py +++ b/selfprivacy_api/graphql/mutations/system_mutations.py @@ -7,7 +7,7 @@ from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.graphql.mutations.mutation_interface import ( MutationReturnInterface, ) -from selfprivacy_api.utils import ReadUserData, WriteUserData +from selfprivacy_api.utils import WriteUserData @strawberry.type diff --git a/selfprivacy_api/graphql/mutations/users_mutations.py b/selfprivacy_api/graphql/mutations/users_mutations.py new file mode 100644 index 0000000..a284ff2 --- /dev/null +++ b/selfprivacy_api/graphql/mutations/users_mutations.py @@ -0,0 +1,65 @@ +#!/usr/bin/env python3 +"""Users management module""" +# pylint: disable=too-few-public-methods +import strawberry +from selfprivacy_api.graphql import IsAuthenticated +from selfprivacy_api.graphql.common_types.user import ( + UserMutationReturn, + get_user_by_username, +) +from selfprivacy_api.graphql.mutations.mutation_interface import ( + GenericMutationReturn, +) +from selfprivacy_api.graphql.mutations.users_utils import ( + create_user, + delete_user, + update_user, +) + + +@strawberry.input +class UserMutationInput: + """Input type for user mutation""" + + username: str + password: str + + +@strawberry.type +class UserMutations: + """Mutations change user settings""" + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def create_user(self, user: UserMutationInput) -> UserMutationReturn: + + success, message, code = create_user(user.username, user.password) + + return UserMutationReturn( + success=success, + message=message, + code=code, + user=get_user_by_username(user.username), + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def delete_user(self, username: str) -> GenericMutationReturn: + success, message, code = delete_user(username) + + return GenericMutationReturn( + success=success, + message=message, + code=code, + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def update_user(self, user: UserMutationInput) -> UserMutationReturn: + """Update user mutation""" + + success, message, code = update_user(user.username, user.password) + + return UserMutationReturn( + success=success, + message=message, + code=code, + user=get_user_by_username(user.username), + ) diff --git a/selfprivacy_api/graphql/mutations/users_utils.py b/selfprivacy_api/graphql/mutations/users_utils.py new file mode 100644 index 0000000..f649b45 --- /dev/null +++ b/selfprivacy_api/graphql/mutations/users_utils.py @@ -0,0 +1,111 @@ +import re +from selfprivacy_api.utils import ( + WriteUserData, + ReadUserData, + is_username_forbidden, +) +from selfprivacy_api.utils import hash_password + + +def ensure_ssh_and_users_fields_exist(data): + if "ssh" not in data: + data["ssh"] = [] + data["ssh"]["rootKeys"] = [] + + elif data["ssh"].get("rootKeys") is None: + data["ssh"]["rootKeys"] = [] + + if "sshKeys" not in data: + data["sshKeys"] = [] + + if "users" not in data: + data["users"] = [] + + +def create_user(username: str, password: str) -> tuple[bool, str, int]: + """Create a new user""" + + # Check if password is null or none + if password == "": + return False, "Password is null", 400 + + # Check if username is forbidden + if is_username_forbidden(username): + return False, "Username is forbidden", 409 + + # Check is username passes regex + if not re.match(r"^[a-z_][a-z0-9_]+$", username): + return False, "Username must be alphanumeric", 400 + + # Check if username less than 32 characters + if len(username) >= 32: + return False, "Username must be less than 32 characters", 400 + + with ReadUserData() as data: + ensure_ssh_and_users_fields_exist(data) + + # Return 409 if user already exists + if data["username"] == username: + return False, "User already exists", 409 + + for data_user in data["users"]: + if data_user["username"] == username: + return False, "User already exists", 409 + + hashed_password = hash_password(password) + + with WriteUserData() as data: + ensure_ssh_and_users_fields_exist(data) + + data["users"].append( + { + "username": username, + "hashedPassword": hashed_password, + "sshKeys": [], + } + ) + + return True, "User was successfully created!", 201 + + +def delete_user(username: str) -> tuple[bool, str, int]: + with WriteUserData() as data: + ensure_ssh_and_users_fields_exist(data) + + if username == data["username"] or username == "root": + return False, "Cannot delete main or root user", 400 + + # Return 404 if user does not exist + for data_user in data["users"]: + if data_user["username"] == username: + data["users"].remove(data_user) + break + else: + return False, "User does not exist", 404 + + return True, "User was deleted", 200 + + +def update_user(username: str, password: str) -> tuple[bool, str, int]: + # Check if password is null or none + if password == "": + return False, "Password is null", 400 + + hashed_password = hash_password(password) + + with WriteUserData() as data: + ensure_ssh_and_users_fields_exist(data) + + if username == data["username"]: + data["hashedMasterPassword"] = hashed_password + + # Return 404 if user does not exist + else: + for data_user in data["users"]: + if data_user["username"] == username: + data_user["hashedPassword"] = hashed_password + break + else: + return False, "User does not exist", 404 + + return True, "User was successfully updated", 200 diff --git a/selfprivacy_api/graphql/queries/users.py b/selfprivacy_api/graphql/queries/users.py new file mode 100644 index 0000000..fc18a84 --- /dev/null +++ b/selfprivacy_api/graphql/queries/users.py @@ -0,0 +1,38 @@ +"""Users""" +# pylint: disable=too-few-public-methods +import typing +import strawberry + +from selfprivacy_api.graphql.common_types.user import ( + User, + ensure_ssh_and_users_fields_exist, + get_user_by_username, +) +from selfprivacy_api.utils import ReadUserData +from selfprivacy_api.graphql import IsAuthenticated + + +def get_users() -> typing.List[User]: + """Get users""" + user_list = [] + with ReadUserData() as data: + ensure_ssh_and_users_fields_exist(data) + + for user in data["users"]: + user_list.append(get_user_by_username(user["username"])) + + user_list.append(get_user_by_username(data["username"])) + + return user_list + + +@strawberry.type +class Users: + @strawberry.field(permission_classes=[IsAuthenticated]) + def get_user(self, username: str) -> typing.Optional[User]: + """Get users""" + return get_user_by_username(username) + + all_users: typing.List[User] = strawberry.field( + permission_classes=[IsAuthenticated], resolver=get_users + ) diff --git a/selfprivacy_api/graphql/schema.py b/selfprivacy_api/graphql/schema.py index c4daac3..c2d6a10 100644 --- a/selfprivacy_api/graphql/schema.py +++ b/selfprivacy_api/graphql/schema.py @@ -1,9 +1,10 @@ """GraphQL API for SelfPrivacy.""" # pylint: disable=too-few-public-methods -import typing + import strawberry from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.graphql.mutations.api_mutations import ApiMutations +from selfprivacy_api.graphql.mutations.ssh_mutations import SshMutations from selfprivacy_api.graphql.mutations.storage_mutation import StorageMutations from selfprivacy_api.graphql.mutations.system_mutations import SystemMutations @@ -11,6 +12,9 @@ from selfprivacy_api.graphql.queries.api_queries import Api from selfprivacy_api.graphql.queries.storage import Storage from selfprivacy_api.graphql.queries.system import System +from selfprivacy_api.graphql.mutations.users_mutations import UserMutations +from selfprivacy_api.graphql.queries.users import Users + @strawberry.type class Query: @@ -26,6 +30,11 @@ class Query: """API access status""" return Api() + @strawberry.field(permission_classes=[IsAuthenticated]) + def users(self) -> Users: + """Users queries""" + return Users() + @strawberry.field(permission_classes=[IsAuthenticated]) def storage(self) -> Storage: """Storage queries""" @@ -33,7 +42,13 @@ class Query: @strawberry.type -class Mutation(ApiMutations, SystemMutations, StorageMutations): +class Mutation( + ApiMutations, + SystemMutations, + UserMutations, + SshMutations, + StorageMutations, +): """Root schema for mutations""" pass diff --git a/selfprivacy_api/utils/__init__.py b/selfprivacy_api/utils/__init__.py index 1adb189..8ab26d1 100644 --- a/selfprivacy_api/utils/__init__.py +++ b/selfprivacy_api/utils/__init__.py @@ -159,3 +159,17 @@ def get_dkim_key(domain): dkim = cat_process.communicate()[0] return str(dkim, "utf-8") return None + + +def hash_password(password): + hashing_command = ["mkpasswd", "-m", "sha-512", password] + password_hash_process_descriptor = subprocess.Popen( + hashing_command, + shell=False, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + ) + hashed_password = password_hash_process_descriptor.communicate()[0] + hashed_password = hashed_password.decode("ascii") + hashed_password = hashed_password.rstrip() + return hashed_password diff --git a/tests/common.py b/tests/common.py index 01975e8..18e065c 100644 --- a/tests/common.py +++ b/tests/common.py @@ -20,5 +20,9 @@ def generate_system_query(query_array): return "query TestSystem {\n system {" + "\n".join(query_array) + "}\n}" +def generate_users_query(query_array): + return "query TestUsers {\n users {" + "\n".join(query_array) + "}\n}" + + def mnemonic_to_hex(mnemonic): return Mnemonic(language="english").to_entropy(mnemonic).hex() diff --git a/tests/test_graphql/_test_system.py b/tests/test_graphql/_test_system.py index d5cf6e6..476846a 100644 --- a/tests/test_graphql/_test_system.py +++ b/tests/test_graphql/_test_system.py @@ -1,12 +1,10 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument # pylint: disable=missing-function-docstring -import json import os import pytest -import datetime -from tests.common import generate_system_query, read_json, write_json +from tests.common import generate_system_query, read_json @pytest.fixture @@ -56,7 +54,7 @@ class ProcessMock: self.args = args self.kwargs = kwargs - def communicate(): + def communicate(): # pylint: disable=no-method-argument return (b"", None) returncode = 0 @@ -65,7 +63,7 @@ class ProcessMock: class BrokenServiceMock(ProcessMock): """Mock subprocess.Popen for broken service""" - def communicate(): + def communicate(): # pylint: disable=no-method-argument return (b"Testing error", None) returncode = 3 diff --git a/tests/test_graphql/test_api.py b/tests/test_graphql/test_api.py index 031e052..6343d8f 100644 --- a/tests/test_graphql/test_api.py +++ b/tests/test_graphql/test_api.py @@ -1,7 +1,6 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument # pylint: disable=missing-function-docstring -import pytest from tests.common import generate_api_query from tests.test_graphql.test_api_devices import API_DEVICES_QUERY diff --git a/tests/test_graphql/test_api_recovery.py b/tests/test_graphql/test_api_recovery.py index 2d1e16a..be0fdff 100644 --- a/tests/test_graphql/test_api_recovery.py +++ b/tests/test_graphql/test_api_recovery.py @@ -1,9 +1,6 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument # pylint: disable=missing-function-docstring -import json -from time import strftime -import pytest import datetime from tests.common import generate_api_query, mnemonic_to_hex, read_json, write_json diff --git a/tests/test_graphql/test_ssh.py b/tests/test_graphql/test_ssh.py new file mode 100644 index 0000000..7b48c83 --- /dev/null +++ b/tests/test_graphql/test_ssh.py @@ -0,0 +1,353 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +import pytest + +from tests.common import read_json + + +class ProcessMock: + """Mock subprocess.Popen""" + + def __init__(self, args, **kwargs): + self.args = args + self.kwargs = kwargs + + def communicate(): # pylint: disable=no-method-argument + return (b"NEW_HASHED", None) + + returncode = 0 + + +@pytest.fixture +def mock_subprocess_popen(mocker): + mock = mocker.patch("subprocess.Popen", autospec=True, return_value=ProcessMock) + return mock + + +@pytest.fixture +def some_users(mocker, datadir): + mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "some_users.json") + assert read_json(datadir / "some_users.json")["users"] == [ + { + "username": "user1", + "hashedPassword": "HASHED_PASSWORD_1", + "sshKeys": ["ssh-rsa KEY user1@pc"], + }, + {"username": "user2", "hashedPassword": "HASHED_PASSWORD_2", "sshKeys": []}, + {"username": "user3", "hashedPassword": "HASHED_PASSWORD_3"}, + ] + return datadir + + +# TESTS ######################################################## + + +API_CREATE_SSH_KEY_MUTATION = """ +mutation addSshKey($sshInput: SshMutationInput!) { + addSshKey(sshInput: $sshInput) { + success + message + code + user { + username + sshKeys + } + } +} +""" + + +def test_graphql_add_ssh_key_unauthorized(client, some_users, mock_subprocess_popen): + response = client.post( + "/graphql", + json={ + "query": API_CREATE_SSH_KEY_MUTATION, + "variables": { + "sshInput": { + "username": "user1", + "sshKey": "ssh-rsa KEY test_key@pc", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is None + + +def test_graphql_add_ssh_key(authorized_client, some_users, mock_subprocess_popen): + response = authorized_client.post( + "/graphql", + json={ + "query": API_CREATE_SSH_KEY_MUTATION, + "variables": { + "sshInput": { + "username": "user1", + "sshKey": "ssh-rsa KEY test_key@pc", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + + assert response.json["data"]["addSshKey"]["code"] == 201 + assert response.json["data"]["addSshKey"]["message"] is not None + assert response.json["data"]["addSshKey"]["success"] is True + + assert response.json["data"]["addSshKey"]["user"]["username"] == "user1" + assert response.json["data"]["addSshKey"]["user"]["sshKeys"] == [ + "ssh-rsa KEY user1@pc", + "ssh-rsa KEY test_key@pc", + ] + + +def test_graphql_add_root_ssh_key(authorized_client, some_users, mock_subprocess_popen): + response = authorized_client.post( + "/graphql", + json={ + "query": API_CREATE_SSH_KEY_MUTATION, + "variables": { + "sshInput": { + "username": "root", + "sshKey": "ssh-rsa KEY test_key@pc", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + + assert response.json["data"]["addSshKey"]["code"] == 201 + assert response.json["data"]["addSshKey"]["message"] is not None + assert response.json["data"]["addSshKey"]["success"] is True + + assert response.json["data"]["addSshKey"]["user"]["username"] == "root" + assert response.json["data"]["addSshKey"]["user"]["sshKeys"] == [ + "ssh-ed25519 KEY test@pc", + "ssh-rsa KEY test_key@pc", + ] + + +def test_graphql_add_main_ssh_key(authorized_client, some_users, mock_subprocess_popen): + response = authorized_client.post( + "/graphql", + json={ + "query": API_CREATE_SSH_KEY_MUTATION, + "variables": { + "sshInput": { + "username": "tester", + "sshKey": "ssh-rsa KEY test_key@pc", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + + assert response.json["data"]["addSshKey"]["code"] == 201 + assert response.json["data"]["addSshKey"]["message"] is not None + assert response.json["data"]["addSshKey"]["success"] is True + + assert response.json["data"]["addSshKey"]["user"]["username"] == "tester" + assert response.json["data"]["addSshKey"]["user"]["sshKeys"] == [ + "ssh-rsa KEY test@pc", + "ssh-rsa KEY test_key@pc", + ] + + +def test_graphql_add_bad_ssh_key(authorized_client, some_users, mock_subprocess_popen): + response = authorized_client.post( + "/graphql", + json={ + "query": API_CREATE_SSH_KEY_MUTATION, + "variables": { + "sshInput": { + "username": "user1", + "sshKey": "trust me, this is the ssh key", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + + assert response.json["data"]["addSshKey"]["code"] == 400 + assert response.json["data"]["addSshKey"]["message"] is not None + assert response.json["data"]["addSshKey"]["success"] is False + + +def test_graphql_add_ssh_key_nonexistent_user( + authorized_client, some_users, mock_subprocess_popen +): + response = authorized_client.post( + "/graphql", + json={ + "query": API_CREATE_SSH_KEY_MUTATION, + "variables": { + "sshInput": { + "username": "user666", + "sshKey": "ssh-rsa KEY test_key@pc", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + + assert response.json["data"]["addSshKey"]["code"] == 404 + assert response.json["data"]["addSshKey"]["message"] is not None + assert response.json["data"]["addSshKey"]["success"] is False + + +API_REMOVE_SSH_KEY_MUTATION = """ +mutation removeSshKey($sshInput: SshMutationInput!) { + removeSshKey(sshInput: $sshInput) { + success + message + code + user { + username + sshKeys + } + } +} +""" + + +def test_graphql_remove_ssh_key_unauthorized(client, some_users, mock_subprocess_popen): + response = client.post( + "/graphql", + json={ + "query": API_REMOVE_SSH_KEY_MUTATION, + "variables": { + "sshInput": { + "username": "user1", + "sshKey": "ssh-rsa KEY test_key@pc", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is None + + +def test_graphql_remove_ssh_key(authorized_client, some_users, mock_subprocess_popen): + response = authorized_client.post( + "/graphql", + json={ + "query": API_REMOVE_SSH_KEY_MUTATION, + "variables": { + "sshInput": { + "username": "user1", + "sshKey": "ssh-rsa KEY user1@pc", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + + assert response.json["data"]["removeSshKey"]["code"] == 200 + assert response.json["data"]["removeSshKey"]["message"] is not None + assert response.json["data"]["removeSshKey"]["success"] is True + + assert response.json["data"]["removeSshKey"]["user"]["username"] == "user1" + assert response.json["data"]["removeSshKey"]["user"]["sshKeys"] == [] + + +def test_graphql_remove_root_ssh_key( + authorized_client, some_users, mock_subprocess_popen +): + response = authorized_client.post( + "/graphql", + json={ + "query": API_REMOVE_SSH_KEY_MUTATION, + "variables": { + "sshInput": { + "username": "root", + "sshKey": "ssh-ed25519 KEY test@pc", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + + assert response.json["data"]["removeSshKey"]["code"] == 200 + assert response.json["data"]["removeSshKey"]["message"] is not None + assert response.json["data"]["removeSshKey"]["success"] is True + + assert response.json["data"]["removeSshKey"]["user"]["username"] == "root" + assert response.json["data"]["removeSshKey"]["user"]["sshKeys"] == [] + + +def test_graphql_remove_main_ssh_key( + authorized_client, some_users, mock_subprocess_popen +): + response = authorized_client.post( + "/graphql", + json={ + "query": API_REMOVE_SSH_KEY_MUTATION, + "variables": { + "sshInput": { + "username": "tester", + "sshKey": "ssh-rsa KEY test@pc", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + + assert response.json["data"]["removeSshKey"]["code"] == 200 + assert response.json["data"]["removeSshKey"]["message"] is not None + assert response.json["data"]["removeSshKey"]["success"] is True + + assert response.json["data"]["removeSshKey"]["user"]["username"] == "tester" + assert response.json["data"]["removeSshKey"]["user"]["sshKeys"] == [] + + +def test_graphql_remove_nonexistent_ssh_key( + authorized_client, some_users, mock_subprocess_popen +): + response = authorized_client.post( + "/graphql", + json={ + "query": API_REMOVE_SSH_KEY_MUTATION, + "variables": { + "sshInput": { + "username": "user1", + "sshKey": "ssh-rsa KEY test_key@pc", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + + assert response.json["data"]["removeSshKey"]["code"] == 404 + assert response.json["data"]["removeSshKey"]["message"] is not None + assert response.json["data"]["removeSshKey"]["success"] is False + + +def test_graphql_remove_ssh_key_nonexistent_user( + authorized_client, some_users, mock_subprocess_popen +): + response = authorized_client.post( + "/graphql", + json={ + "query": API_REMOVE_SSH_KEY_MUTATION, + "variables": { + "sshInput": { + "username": "user666", + "sshKey": "ssh-rsa KEY test_key@pc", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + + assert response.json["data"]["removeSshKey"]["code"] == 404 + assert response.json["data"]["removeSshKey"]["message"] is not None + assert response.json["data"]["removeSshKey"]["success"] is False diff --git a/tests/test_graphql/test_ssh/some_users.json b/tests/test_graphql/test_ssh/some_users.json new file mode 100644 index 0000000..569253a --- /dev/null +++ b/tests/test_graphql/test_ssh/some_users.json @@ -0,0 +1,71 @@ +{ + "backblaze": { + "accountId": "ID", + "accountKey": "KEY", + "bucket": "selfprivacy" + }, + "api": { + "token": "TEST_TOKEN", + "enableSwagger": false + }, + "bitwarden": { + "enable": false + }, + "cloudflare": { + "apiKey": "TOKEN" + }, + "databasePassword": "PASSWORD", + "domain": "test.tld", + "hashedMasterPassword": "HASHED_PASSWORD", + "hostname": "test-instance", + "nextcloud": { + "adminPassword": "ADMIN", + "databasePassword": "ADMIN", + "enable": true + }, + "resticPassword": "PASS", + "ssh": { + "enable": true, + "passwordAuthentication": true, + "rootKeys": [ + "ssh-ed25519 KEY test@pc" + ] + }, + "username": "tester", + "gitea": { + "enable": false + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true + }, + "autoUpgrade": { + "enable": true, + "allowReboot": true + }, + "timezone": "Europe/Moscow", + "sshKeys": [ + "ssh-rsa KEY test@pc" + ], + "users": [ + { + "username": "user1", + "hashedPassword": "HASHED_PASSWORD_1", + "sshKeys": [ + "ssh-rsa KEY user1@pc" + ] + }, + { + "username": "user2", + "hashedPassword": "HASHED_PASSWORD_2", + "sshKeys": [ + ] + }, + { + "username": "user3", + "hashedPassword": "HASHED_PASSWORD_3" + } + ] +} \ No newline at end of file diff --git a/tests/test_graphql/test_users.py b/tests/test_graphql/test_users.py new file mode 100644 index 0000000..afae1da --- /dev/null +++ b/tests/test_graphql/test_users.py @@ -0,0 +1,733 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +import pytest + +from tests.common import ( + generate_users_query, + read_json, +) + +invalid_usernames = [ + "messagebus", + "postfix", + "polkituser", + "dovecot2", + "dovenull", + "nginx", + "postgres", + "systemd-journal-gateway", + "prosody", + "systemd-network", + "systemd-resolve", + "systemd-timesync", + "opendkim", + "rspamd", + "sshd", + "selfprivacy-api", + "restic", + "redis", + "pleroma", + "ocserv", + "nextcloud", + "memcached", + "knot-resolver", + "gitea", + "bitwarden_rs", + "vaultwarden", + "acme", + "virtualMail", + "nixbld1", + "nixbld2", + "nixbld29", + "nobody", +] + + +## FIXTURES ################################################### + + +@pytest.fixture +def no_users(mocker, datadir): + mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "no_users.json") + assert read_json(datadir / "no_users.json")["users"] == [] + return datadir + + +@pytest.fixture +def one_user(mocker, datadir): + mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "one_user.json") + assert read_json(datadir / "one_user.json")["users"] == [ + { + "username": "user1", + "hashedPassword": "HASHED_PASSWORD_1", + "sshKeys": ["ssh-rsa KEY user1@pc"], + } + ] + return datadir + + +@pytest.fixture +def some_users(mocker, datadir): + mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "some_users.json") + assert read_json(datadir / "some_users.json")["users"] == [ + { + "username": "user1", + "hashedPassword": "HASHED_PASSWORD_1", + "sshKeys": ["ssh-rsa KEY user1@pc"], + }, + {"username": "user2", "hashedPassword": "HASHED_PASSWORD_2", "sshKeys": []}, + {"username": "user3", "hashedPassword": "HASHED_PASSWORD_3"}, + ] + return datadir + + +@pytest.fixture +def undefined_settings(mocker, datadir): + mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "undefined.json") + assert "users" not in read_json(datadir / "undefined.json") + return datadir + + +class ProcessMock: + """Mock subprocess.Popen""" + + def __init__(self, args, **kwargs): + self.args = args + self.kwargs = kwargs + + def communicate(): # pylint: disable=no-method-argument + return (b"NEW_HASHED", None) + + returncode = 0 + + +@pytest.fixture +def mock_subprocess_popen(mocker): + mock = mocker.patch("subprocess.Popen", autospec=True, return_value=ProcessMock) + return mock + + +## TESTS ###################################################### + +API_USERS_INFO = """ +allUsers { + username + sshKeys +} +""" + + +def test_graphql_get_users_unauthorized(client, some_users, mock_subprocess_popen): + """Test wrong auth""" + response = client.get( + "/graphql", + json={ + "query": generate_users_query([API_USERS_INFO]), + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is None + + +def test_graphql_get_some_users(authorized_client, some_users, mock_subprocess_popen): + response = authorized_client.get( + "/graphql", + json={ + "query": generate_users_query([API_USERS_INFO]), + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + assert len(response.json["data"]["users"]["allUsers"]) == 4 + assert response.json["data"]["users"]["allUsers"][0]["username"] == "user1" + assert response.json["data"]["users"]["allUsers"][0]["sshKeys"] == [ + "ssh-rsa KEY user1@pc" + ] + + assert response.json["data"]["users"]["allUsers"][1]["username"] == "user2" + assert response.json["data"]["users"]["allUsers"][1]["sshKeys"] == [] + + assert response.json["data"]["users"]["allUsers"][3]["username"] == "tester" + assert response.json["data"]["users"]["allUsers"][3]["sshKeys"] == [ + "ssh-rsa KEY test@pc" + ] + + +def test_graphql_get_no_users(authorized_client, no_users, mock_subprocess_popen): + response = authorized_client.get( + "/graphql", + json={ + "query": generate_users_query([API_USERS_INFO]), + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + + assert len(response.json["data"]["users"]["allUsers"]) == 1 + assert response.json["data"]["users"]["allUsers"][0]["username"] == "tester" + assert response.json["data"]["users"]["allUsers"][0]["sshKeys"] == [ + "ssh-rsa KEY test@pc" + ] + + +API_GET_USERS = """ +query TestUsers($username: String!) { + users { + getUser(username: $username) { + sshKeys + username + } + } +} +""" + + +def test_graphql_get_one_user_unauthorized(client, one_user, mock_subprocess_popen): + response = client.get( + "/graphql", + json={ + "query": API_GET_USERS, + "variables": { + "username": "user1", + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is None + + +def test_graphql_get_one_user(authorized_client, one_user, mock_subprocess_popen): + + response = authorized_client.get( + "/graphql", + json={ + "query": API_GET_USERS, + "variables": { + "username": "user1", + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + + assert len(response.json["data"]["users"]["getUser"]) == 2 + assert response.json["data"]["users"]["getUser"]["username"] == "user1" + assert response.json["data"]["users"]["getUser"]["sshKeys"] == [ + "ssh-rsa KEY user1@pc" + ] + + +def test_graphql_get_some_user(authorized_client, some_users, mock_subprocess_popen): + response = authorized_client.get( + "/graphql", + json={ + "query": API_GET_USERS, + "variables": { + "username": "user2", + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + + assert len(response.json["data"]["users"]["getUser"]) == 2 + assert response.json["data"]["users"]["getUser"]["username"] == "user2" + assert response.json["data"]["users"]["getUser"]["sshKeys"] == [] + + +def test_graphql_get_root_user(authorized_client, some_users, mock_subprocess_popen): + response = authorized_client.get( + "/graphql", + json={ + "query": API_GET_USERS, + "variables": { + "username": "root", + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + + assert len(response.json["data"]["users"]["getUser"]) == 2 + assert response.json["data"]["users"]["getUser"]["username"] == "root" + assert response.json["data"]["users"]["getUser"]["sshKeys"] == [ + "ssh-ed25519 KEY test@pc" + ] + + +def test_graphql_get_main_user(authorized_client, one_user, mock_subprocess_popen): + response = authorized_client.get( + "/graphql", + json={ + "query": API_GET_USERS, + "variables": { + "username": "tester", + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + + assert len(response.json["data"]["users"]["getUser"]) == 2 + assert response.json["data"]["users"]["getUser"]["username"] == "tester" + assert response.json["data"]["users"]["getUser"]["sshKeys"] == [ + "ssh-rsa KEY test@pc" + ] + + +def test_graphql_get_nonexistent_user( + authorized_client, one_user, mock_subprocess_popen +): + response = authorized_client.get( + "/graphql", + json={ + "query": API_GET_USERS, + "variables": { + "username": "tyler_durden", + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + + assert response.json["data"]["users"]["getUser"] is None + + +API_CREATE_USERS_MUTATION = """ +mutation createUser($user: UserMutationInput!) { + createUser(user: $user) { + success + message + code + user { + username + sshKeys + } + } +} +""" + + +def test_graphql_add_user_unauthorize(client, one_user, mock_subprocess_popen): + response = client.post( + "/graphql", + json={ + "query": API_CREATE_USERS_MUTATION, + "variables": { + "user": { + "username": "user2", + "password": "12345678", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is None + + +def test_graphql_add_user(authorized_client, one_user, mock_subprocess_popen): + response = authorized_client.post( + "/graphql", + json={ + "query": API_CREATE_USERS_MUTATION, + "variables": { + "user": { + "username": "user2", + "password": "12345678", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + + assert response.json["data"]["createUser"]["message"] is not None + assert response.json["data"]["createUser"]["code"] == 201 + assert response.json["data"]["createUser"]["success"] is True + + assert response.json["data"]["createUser"]["user"]["username"] == "user2" + assert response.json["data"]["createUser"]["user"]["sshKeys"] == [] + + +def test_graphql_add_undefined_settings( + authorized_client, undefined_settings, mock_subprocess_popen +): + response = authorized_client.post( + "/graphql", + json={ + "query": API_CREATE_USERS_MUTATION, + "variables": { + "user": { + "username": "user2", + "password": "12345678", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + + assert response.json["data"]["createUser"]["message"] is not None + assert response.json["data"]["createUser"]["code"] == 201 + assert response.json["data"]["createUser"]["success"] is True + + assert response.json["data"]["createUser"]["user"]["username"] == "user2" + assert response.json["data"]["createUser"]["user"]["sshKeys"] == [] + + +def test_graphql_add_without_password( + authorized_client, one_user, mock_subprocess_popen +): + response = authorized_client.post( + "/graphql", + json={ + "query": API_CREATE_USERS_MUTATION, + "variables": { + "user": { + "username": "user2", + "password": "", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + + assert response.json["data"]["createUser"]["message"] is not None + assert response.json["data"]["createUser"]["code"] == 400 + assert response.json["data"]["createUser"]["success"] is False + + assert response.json["data"]["createUser"]["user"] is None + + +def test_graphql_add_without_both(authorized_client, one_user, mock_subprocess_popen): + response = authorized_client.post( + "/graphql", + json={ + "query": API_CREATE_USERS_MUTATION, + "variables": { + "user": { + "username": "", + "password": "", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + + assert response.json["data"]["createUser"]["message"] is not None + assert response.json["data"]["createUser"]["code"] == 400 + assert response.json["data"]["createUser"]["success"] is False + + assert response.json["data"]["createUser"]["user"] is None + + +@pytest.mark.parametrize("username", invalid_usernames) +def test_graphql_add_system_username( + authorized_client, one_user, mock_subprocess_popen, username +): + response = authorized_client.post( + "/graphql", + json={ + "query": API_CREATE_USERS_MUTATION, + "variables": { + "user": { + "username": username, + "password": "12345678", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + + assert response.json["data"]["createUser"]["message"] is not None + assert response.json["data"]["createUser"]["code"] == 409 + assert response.json["data"]["createUser"]["success"] is False + + assert response.json["data"]["createUser"]["user"] is None + + +def test_graphql_add_existing_user(authorized_client, one_user, mock_subprocess_popen): + response = authorized_client.post( + "/graphql", + json={ + "query": API_CREATE_USERS_MUTATION, + "variables": { + "user": { + "username": "user1", + "password": "12345678", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + + assert response.json["data"]["createUser"]["message"] is not None + assert response.json["data"]["createUser"]["code"] == 409 + assert response.json["data"]["createUser"]["success"] is False + + assert response.json["data"]["createUser"]["user"]["username"] == "user1" + assert ( + response.json["data"]["createUser"]["user"]["sshKeys"][0] + == "ssh-rsa KEY user1@pc" + ) + + +def test_graphql_add_main_user(authorized_client, one_user, mock_subprocess_popen): + response = authorized_client.post( + "/graphql", + json={ + "query": API_CREATE_USERS_MUTATION, + "variables": { + "user": { + "username": "tester", + "password": "12345678", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + + assert response.json["data"]["createUser"]["message"] is not None + assert response.json["data"]["createUser"]["code"] == 409 + assert response.json["data"]["createUser"]["success"] is False + + assert response.json["data"]["createUser"]["user"]["username"] == "tester" + assert ( + response.json["data"]["createUser"]["user"]["sshKeys"][0] + == "ssh-rsa KEY test@pc" + ) + + +def test_graphql_add_long_username(authorized_client, one_user, mock_subprocess_popen): + response = authorized_client.post( + "/graphql", + json={ + "query": API_CREATE_USERS_MUTATION, + "variables": { + "user": { + "username": "a" * 32, + "password": "12345678", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + + assert response.json["data"]["createUser"]["message"] is not None + assert response.json["data"]["createUser"]["code"] == 400 + assert response.json["data"]["createUser"]["success"] is False + + assert response.json["data"]["createUser"]["user"] is None + + +@pytest.mark.parametrize("username", ["", "1", "фыр", "user1@", "^-^"]) +def test_graphql_add_invalid_username( + authorized_client, one_user, mock_subprocess_popen, username +): + response = authorized_client.post( + "/graphql", + json={ + "query": API_CREATE_USERS_MUTATION, + "variables": { + "user": { + "username": username, + "password": "12345678", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + + assert response.json["data"]["createUser"]["message"] is not None + assert response.json["data"]["createUser"]["code"] == 400 + assert response.json["data"]["createUser"]["success"] is False + + assert response.json["data"]["createUser"]["user"] is None + + +API_DELETE_USER_MUTATION = """ +mutation deleteUser($username: String!) { + deleteUser(username: $username) { + success + message + code + } +} +""" + + +def test_graphql_delete_user_unauthorized(client, some_users, mock_subprocess_popen): + response = client.post( + "/graphql", + json={ + "query": API_DELETE_USER_MUTATION, + "variables": {"username": "user1"}, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is None + + +def test_graphql_delete_user(authorized_client, some_users, mock_subprocess_popen): + response = authorized_client.post( + "/graphql", + json={ + "query": API_DELETE_USER_MUTATION, + "variables": {"username": "user1"}, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + + assert response.json["data"]["deleteUser"]["code"] == 200 + assert response.json["data"]["deleteUser"]["message"] is not None + assert response.json["data"]["deleteUser"]["success"] is True + + +@pytest.mark.parametrize("username", ["", "def"]) +def test_graphql_delete_nonexistent_users( + authorized_client, some_users, mock_subprocess_popen, username +): + response = authorized_client.post( + "/graphql", + json={ + "query": API_DELETE_USER_MUTATION, + "variables": {"username": username}, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + + assert response.json["data"]["deleteUser"]["code"] == 404 + assert response.json["data"]["deleteUser"]["message"] is not None + assert response.json["data"]["deleteUser"]["success"] is False + + +@pytest.mark.parametrize("username", invalid_usernames) +def test_graphql_delete_system_users( + authorized_client, some_users, mock_subprocess_popen, username +): + response = authorized_client.post( + "/graphql", + json={ + "query": API_DELETE_USER_MUTATION, + "variables": {"username": username}, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + + assert ( + response.json["data"]["deleteUser"]["code"] == 404 + or response.json["data"]["deleteUser"]["code"] == 400 + ) + assert response.json["data"]["deleteUser"]["message"] is not None + assert response.json["data"]["deleteUser"]["success"] is False + + +def test_graphql_delete_main_user(authorized_client, some_users, mock_subprocess_popen): + response = authorized_client.post( + "/graphql", + json={ + "query": API_DELETE_USER_MUTATION, + "variables": {"username": "tester"}, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + + assert response.json["data"]["deleteUser"]["code"] == 400 + assert response.json["data"]["deleteUser"]["message"] is not None + assert response.json["data"]["deleteUser"]["success"] is False + + +API_UPDATE_USER_MUTATION = """ +mutation updateUser($user: UserMutationInput!) { + updateUser(user: $user) { + success + message + code + user { + username + sshKeys + } + } +} +""" + + +def test_graphql_update_user_unauthorized(client, some_users, mock_subprocess_popen): + response = client.post( + "/graphql", + json={ + "query": API_UPDATE_USER_MUTATION, + "variables": { + "user": { + "username": "user1", + "password": "12345678", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is None + + +def test_graphql_update_user(authorized_client, some_users, mock_subprocess_popen): + response = authorized_client.post( + "/graphql", + json={ + "query": API_UPDATE_USER_MUTATION, + "variables": { + "user": { + "username": "user1", + "password": "12345678", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + + assert response.json["data"]["updateUser"]["code"] == 200 + assert response.json["data"]["updateUser"]["message"] is not None + assert response.json["data"]["updateUser"]["success"] is True + + assert response.json["data"]["updateUser"]["user"]["username"] == "user1" + assert response.json["data"]["updateUser"]["user"]["sshKeys"] == [ + "ssh-rsa KEY user1@pc" + ] + assert mock_subprocess_popen.call_count == 1 + + +def test_graphql_update_nonexistent_user( + authorized_client, some_users, mock_subprocess_popen +): + response = authorized_client.post( + "/graphql", + json={ + "query": API_UPDATE_USER_MUTATION, + "variables": { + "user": { + "username": "user666", + "password": "12345678", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json.get("data") is not None + + assert response.json["data"]["updateUser"]["code"] == 404 + assert response.json["data"]["updateUser"]["message"] is not None + assert response.json["data"]["updateUser"]["success"] is False + + assert response.json["data"]["updateUser"]["user"] is None + assert mock_subprocess_popen.call_count == 1 diff --git a/tests/test_graphql/test_users/no_users.json b/tests/test_graphql/test_users/no_users.json new file mode 100644 index 0000000..e5efe86 --- /dev/null +++ b/tests/test_graphql/test_users/no_users.json @@ -0,0 +1,54 @@ +{ + "backblaze": { + "accountId": "ID", + "accountKey": "KEY", + "bucket": "selfprivacy" + }, + "api": { + "token": "TEST_TOKEN", + "enableSwagger": false + }, + "bitwarden": { + "enable": false + }, + "cloudflare": { + "apiKey": "TOKEN" + }, + "databasePassword": "PASSWORD", + "domain": "test.tld", + "hashedMasterPassword": "HASHED_PASSWORD", + "hostname": "test-instance", + "nextcloud": { + "adminPassword": "ADMIN", + "databasePassword": "ADMIN", + "enable": true + }, + "resticPassword": "PASS", + "ssh": { + "enable": true, + "passwordAuthentication": true, + "rootKeys": [ + "ssh-ed25519 KEY test@pc" + ] + }, + "username": "tester", + "gitea": { + "enable": false + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true + }, + "autoUpgrade": { + "enable": true, + "allowReboot": true + }, + "timezone": "Europe/Moscow", + "sshKeys": [ + "ssh-rsa KEY test@pc" + ], + "users": [ + ] +} \ No newline at end of file diff --git a/tests/test_graphql/test_users/one_user.json b/tests/test_graphql/test_users/one_user.json new file mode 100644 index 0000000..5df2108 --- /dev/null +++ b/tests/test_graphql/test_users/one_user.json @@ -0,0 +1,61 @@ +{ + "backblaze": { + "accountId": "ID", + "accountKey": "KEY", + "bucket": "selfprivacy" + }, + "api": { + "token": "TEST_TOKEN", + "enableSwagger": false + }, + "bitwarden": { + "enable": false + }, + "cloudflare": { + "apiKey": "TOKEN" + }, + "databasePassword": "PASSWORD", + "domain": "test.tld", + "hashedMasterPassword": "HASHED_PASSWORD", + "hostname": "test-instance", + "nextcloud": { + "adminPassword": "ADMIN", + "databasePassword": "ADMIN", + "enable": true + }, + "resticPassword": "PASS", + "ssh": { + "enable": true, + "passwordAuthentication": true, + "rootKeys": [ + "ssh-ed25519 KEY test@pc" + ] + }, + "username": "tester", + "gitea": { + "enable": false + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true + }, + "autoUpgrade": { + "enable": true, + "allowReboot": true + }, + "timezone": "Europe/Moscow", + "sshKeys": [ + "ssh-rsa KEY test@pc" + ], + "users": [ + { + "username": "user1", + "hashedPassword": "HASHED_PASSWORD_1", + "sshKeys": [ + "ssh-rsa KEY user1@pc" + ] + } + ] +} \ No newline at end of file diff --git a/tests/test_graphql/test_users/some_users.json b/tests/test_graphql/test_users/some_users.json new file mode 100644 index 0000000..569253a --- /dev/null +++ b/tests/test_graphql/test_users/some_users.json @@ -0,0 +1,71 @@ +{ + "backblaze": { + "accountId": "ID", + "accountKey": "KEY", + "bucket": "selfprivacy" + }, + "api": { + "token": "TEST_TOKEN", + "enableSwagger": false + }, + "bitwarden": { + "enable": false + }, + "cloudflare": { + "apiKey": "TOKEN" + }, + "databasePassword": "PASSWORD", + "domain": "test.tld", + "hashedMasterPassword": "HASHED_PASSWORD", + "hostname": "test-instance", + "nextcloud": { + "adminPassword": "ADMIN", + "databasePassword": "ADMIN", + "enable": true + }, + "resticPassword": "PASS", + "ssh": { + "enable": true, + "passwordAuthentication": true, + "rootKeys": [ + "ssh-ed25519 KEY test@pc" + ] + }, + "username": "tester", + "gitea": { + "enable": false + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true + }, + "autoUpgrade": { + "enable": true, + "allowReboot": true + }, + "timezone": "Europe/Moscow", + "sshKeys": [ + "ssh-rsa KEY test@pc" + ], + "users": [ + { + "username": "user1", + "hashedPassword": "HASHED_PASSWORD_1", + "sshKeys": [ + "ssh-rsa KEY user1@pc" + ] + }, + { + "username": "user2", + "hashedPassword": "HASHED_PASSWORD_2", + "sshKeys": [ + ] + }, + { + "username": "user3", + "hashedPassword": "HASHED_PASSWORD_3" + } + ] +} \ No newline at end of file diff --git a/tests/test_graphql/test_users/undefined.json b/tests/test_graphql/test_users/undefined.json new file mode 100644 index 0000000..7b2cf8b --- /dev/null +++ b/tests/test_graphql/test_users/undefined.json @@ -0,0 +1,52 @@ +{ + "backblaze": { + "accountId": "ID", + "accountKey": "KEY", + "bucket": "selfprivacy" + }, + "api": { + "token": "TEST_TOKEN", + "enableSwagger": false + }, + "bitwarden": { + "enable": false + }, + "cloudflare": { + "apiKey": "TOKEN" + }, + "databasePassword": "PASSWORD", + "domain": "test.tld", + "hashedMasterPassword": "HASHED_PASSWORD", + "hostname": "test-instance", + "nextcloud": { + "adminPassword": "ADMIN", + "databasePassword": "ADMIN", + "enable": true + }, + "resticPassword": "PASS", + "ssh": { + "enable": true, + "passwordAuthentication": true, + "rootKeys": [ + "ssh-ed25519 KEY test@pc" + ] + }, + "username": "tester", + "gitea": { + "enable": false + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true + }, + "autoUpgrade": { + "enable": true, + "allowReboot": true + }, + "timezone": "Europe/Moscow", + "sshKeys": [ + "ssh-rsa KEY test@pc" + ] +} \ No newline at end of file From 206589d5ad85903c73ecb4f6f520bc4a50291a10 Mon Sep 17 00:00:00 2001 From: def Date: Mon, 1 Aug 2022 21:32:20 +0200 Subject: [PATCH 38/39] add system nixos tasks --- .../graphql/mutations/system_mutations.py | 47 +++++++++++++++++++ ...os_tasks.py => test_system_nixos_tasks.py} | 6 +-- 2 files changed, 50 insertions(+), 3 deletions(-) rename tests/test_graphql/{_test_system_nixos_tasks.py => test_system_nixos_tasks.py} (98%) diff --git a/selfprivacy_api/graphql/mutations/system_mutations.py b/selfprivacy_api/graphql/mutations/system_mutations.py index 517a697..057c26f 100644 --- a/selfprivacy_api/graphql/mutations/system_mutations.py +++ b/selfprivacy_api/graphql/mutations/system_mutations.py @@ -1,10 +1,12 @@ """System management mutations""" # pylint: disable=too-few-public-methods +import subprocess import typing import pytz import strawberry from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.graphql.mutations.mutation_interface import ( + GenericMutationReturn, MutationReturnInterface, ) from selfprivacy_api.utils import WriteUserData @@ -84,3 +86,48 @@ class SystemMutations: enableAutoUpgrade=auto_upgrade, allowReboot=allow_reboot, ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def run_system_rebuild(self) -> GenericMutationReturn: + rebuild_result = subprocess.Popen( + ["systemctl", "start", "sp-nixos-rebuild.service"], start_new_session=True + ) + rebuild_result.communicate()[0] + return GenericMutationReturn( + success=True, + message="Starting rebuild system", + code=200, + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def run_system_rollback(self) -> GenericMutationReturn: + rollback_result = subprocess.Popen( + ["systemctl", "start", "sp-nixos-rollback.service"], start_new_session=True + ) + rollback_result.communicate()[0] + return GenericMutationReturn( + success=True, + message="Starting rebuild system", + code=200, + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def run_system_upgrade(self) -> GenericMutationReturn: + upgrade_result = subprocess.Popen( + ["systemctl", "start", "sp-nixos-upgrade.service"], start_new_session=True + ) + upgrade_result.communicate()[0] + return GenericMutationReturn( + success=True, + message="Starting rebuild system", + code=200, + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def reboot_system(self) -> GenericMutationReturn: + subprocess.Popen(["reboot"], start_new_session=True) + return GenericMutationReturn( + success=True, + message="System reboot has started", + code=200, + ) diff --git a/tests/test_graphql/_test_system_nixos_tasks.py b/tests/test_graphql/test_system_nixos_tasks.py similarity index 98% rename from tests/test_graphql/_test_system_nixos_tasks.py rename to tests/test_graphql/test_system_nixos_tasks.py index 3778887..601c353 100644 --- a/tests/test_graphql/_test_system_nixos_tasks.py +++ b/tests/test_graphql/test_system_nixos_tasks.py @@ -53,7 +53,7 @@ def mock_subprocess_check_output(mocker): API_REBUILD_SYSTEM_MUTATION = """ -mutation rebuildSystem() { +mutation rebuildSystem { runSystemRebuild { success message @@ -98,7 +98,7 @@ def test_graphql_system_rebuild(authorized_client, mock_subprocess_popen): API_UPGRADE_SYSTEM_MUTATION = """ -mutation upgradeSystem() { +mutation upgradeSystem { runSystemUpgrade { success message @@ -143,7 +143,7 @@ def test_graphql_system_upgrade(authorized_client, mock_subprocess_popen): API_ROLLBACK_SYSTEM_MUTATION = """ -mutation rollbackSystem() { +mutation rollbackSystem { runSystemRollback { success message From 7935de0fe1f0f7e968646a9126d8adf5205e9561 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Thu, 25 Aug 2022 20:03:56 +0300 Subject: [PATCH 39/39] Migrate to FastAPI (#13) Co-authored-by: inexcode Reviewed-on: https://git.selfprivacy.org/SelfPrivacy/selfprivacy-rest-api/pulls/13 --- .gitignore | 2 + .pylintrc | 1 + .vscode/launch.json | 19 + api.nix | 64 +++ default.nix | 2 + pyproject.toml | 4 +- requirements.txt | 17 - .../{resources => actions}/__init__.py | 0 selfprivacy_api/actions/api_tokens.py | 116 +++++ selfprivacy_api/actions/ssh.py | 149 ++++++ selfprivacy_api/actions/system.py | 139 +++++ selfprivacy_api/actions/users.py | 219 ++++++++ selfprivacy_api/app.py | 134 ++--- selfprivacy_api/dependencies.py | 30 ++ selfprivacy_api/graphql/__init__.py | 13 +- selfprivacy_api/graphql/common_types/dns.py | 13 + selfprivacy_api/graphql/common_types/jobs.py | 49 ++ .../graphql/common_types/service.py | 146 ++++++ selfprivacy_api/graphql/common_types/user.py | 67 +-- .../graphql/mutations/api_mutations.py | 92 ++-- .../graphql/mutations/job_mutations.py | 27 + .../graphql/mutations/mutation_interface.py | 8 + .../graphql/mutations/services_mutations.py | 169 ++++++ .../graphql/mutations/ssh_mutations.py | 66 ++- .../graphql/mutations/ssh_utils.py | 74 --- ...orage_mutation.py => storage_mutations.py} | 42 +- .../graphql/mutations/system_mutations.py | 67 ++- .../graphql/mutations/users_mutations.py | 90 +++- .../graphql/mutations/users_utils.py | 111 ---- .../graphql/queries/api_queries.py | 48 +- selfprivacy_api/graphql/queries/jobs.py | 25 + selfprivacy_api/graphql/queries/providers.py | 2 - selfprivacy_api/graphql/queries/services.py | 18 + selfprivacy_api/graphql/queries/storage.py | 18 +- selfprivacy_api/graphql/queries/system.py | 99 ++-- selfprivacy_api/graphql/queries/users.py | 17 +- selfprivacy_api/graphql/schema.py | 45 +- selfprivacy_api/jobs/__init__.py | 183 ++++--- selfprivacy_api/jobs/migrate_to_binds.py | 291 +++++++++++ selfprivacy_api/jobs/test.py | 57 ++ .../resources/api_auth/__init__.py | 14 - .../resources/api_auth/app_tokens.py | 118 ----- .../resources/api_auth/new_device.py | 103 ---- .../resources/api_auth/recovery_token.py | 205 -------- selfprivacy_api/resources/common.py | 27 - .../resources/services/__init__.py | 19 - .../resources/services/bitwarden.py | 66 --- selfprivacy_api/resources/services/gitea.py | 66 --- .../resources/services/mailserver.py | 41 -- selfprivacy_api/resources/services/main.py | 84 --- .../resources/services/nextcloud.py | 66 --- selfprivacy_api/resources/services/ocserv.py | 66 --- selfprivacy_api/resources/services/pleroma.py | 66 --- selfprivacy_api/resources/services/restic.py | 241 --------- selfprivacy_api/resources/services/ssh.py | 407 --------------- selfprivacy_api/resources/system.py | 346 ------------- selfprivacy_api/resources/users.py | 162 ------ selfprivacy_api/rest/__init__.py | 0 selfprivacy_api/rest/api_auth.py | 127 +++++ selfprivacy_api/rest/services.py | 373 +++++++++++++ selfprivacy_api/rest/system.py | 105 ++++ selfprivacy_api/rest/users.py | 62 +++ selfprivacy_api/restic_controller/tasks.py | 4 +- selfprivacy_api/services/__init__.py | 67 +++ .../services/bitwarden/__init__.py | 174 +++++++ .../services/bitwarden/bitwarden.svg | 3 + selfprivacy_api/services/bitwarden/icon.py | 5 + .../services/generic_service_mover.py | 237 +++++++++ .../services/generic_size_counter.py | 16 + .../services/generic_status_getter.py | 60 +++ selfprivacy_api/services/gitea/__init__.py | 165 ++++++ selfprivacy_api/services/gitea/gitea.svg | 3 + selfprivacy_api/services/gitea/icon.py | 5 + selfprivacy_api/services/jitsi/__init__.py | 142 +++++ selfprivacy_api/services/jitsi/icon.py | 5 + .../services/mailserver/__init__.py | 179 +++++++ selfprivacy_api/services/mailserver/icon.py | 5 + .../services/mailserver/mailserver.svg | 3 + .../services/nextcloud/__init__.py | 145 ++++-- selfprivacy_api/services/nextcloud/icon.py | 12 + selfprivacy_api/services/ocserv/__init__.py | 121 +++++ selfprivacy_api/services/ocserv/icon.py | 5 + selfprivacy_api/services/ocserv/ocserv.svg | 3 + selfprivacy_api/services/pleroma/__init__.py | 157 ++++++ selfprivacy_api/services/pleroma/icon.py | 12 + selfprivacy_api/services/pleroma/pleroma.svg | 10 + selfprivacy_api/services/service.py | 175 ++++--- selfprivacy_api/task_registry.py | 4 + selfprivacy_api/utils/__init__.py | 14 + selfprivacy_api/utils/auth.py | 16 +- selfprivacy_api/utils/block_devices.py | 42 +- selfprivacy_api/utils/huey.py | 14 + selfprivacy_api/utils/network.py | 9 +- setup.py | 2 +- shell.nix | 11 +- tests/conftest.py | 83 ++- tests/data/jobs.json | 1 + tests/test_block_device_utils.py | 490 ++++++++++++++++++ tests/test_block_device_utils/no_devices.json | 54 ++ tests/test_block_device_utils/only_root.json | 59 +++ tests/test_block_device_utils/undefined.json | 52 ++ tests/test_common.py | 20 +- tests/test_graphql/test_api.py | 34 +- tests/test_graphql/test_api_devices.py | 192 +++---- tests/test_graphql/test_api_recovery.py | 278 +++++----- tests/test_graphql/test_api_version.py | 8 +- tests/test_graphql/test_ssh.py | 108 ++-- .../{_test_system.py => test_system.py} | 343 ++++++------ tests/test_graphql/test_system/turned_on.json | 7 +- tests/test_graphql/test_system_nixos_tasks.py | 40 +- tests/test_graphql/test_users.py | 242 ++++----- tests/test_jobs.py | 50 ++ tests/test_network_utils.py | 43 ++ tests/test_rest_endpoints/data/jobs.json | 1 + tests/test_rest_endpoints/data/tokens.json | 14 + .../services/data/tokens.json | 0 .../services/test_bitwarden.py | 0 .../test_bitwarden/enable_undefined.json | 0 .../services/test_bitwarden/turned_off.json | 0 .../services/test_bitwarden/turned_on.json | 0 .../services/test_bitwarden/undefined.json | 0 .../services/test_gitea.py | 0 .../services/test_gitea/enable_undefined.json | 0 .../services/test_gitea/turned_off.json | 0 .../services/test_gitea/turned_on.json | 0 .../services/test_gitea/undefined.json | 0 .../services/test_mailserver.py | 6 +- .../services/test_nextcloud.py | 0 .../test_nextcloud/enable_undefined.json | 0 .../services/test_nextcloud/turned_off.json | 0 .../services/test_nextcloud/turned_on.json | 0 .../services/test_nextcloud/undefined.json | 0 .../services/test_ocserv.py | 0 .../test_ocserv/enable_undefined.json | 0 .../services/test_ocserv/turned_off.json | 0 .../services/test_ocserv/turned_on.json | 0 .../services/test_ocserv/undefined.json | 0 .../services/test_pleroma.py | 0 .../test_pleroma/enable_undefined.json | 0 .../services/test_pleroma/turned_off.json | 0 .../services/test_pleroma/turned_on.json | 0 .../services/test_pleroma/undefined.json | 0 .../services/test_restic.py | 40 +- .../services/test_restic/no_values.json | 0 .../services/test_restic/some_values.json | 0 .../services/test_restic/undefined.json | 0 .../services/test_services.py | 91 ++-- .../services/test_ssh.py | 57 +- .../services/test_ssh/all_off.json | 0 .../test_ssh/root_and_admin_have_keys.json | 0 .../services/test_ssh/some_users.json | 0 .../services/test_ssh/turned_off.json | 0 .../services/test_ssh/turned_on.json | 0 .../services/test_ssh/undefined.json | 0 .../services/test_ssh/undefined_values.json | 0 tests/{ => test_rest_endpoints}/test_auth.py | 104 ++-- .../{ => test_rest_endpoints}/test_system.py | 18 +- .../test_system/domain | 0 .../test_system/no_values.json | 0 .../test_system/turned_off.json | 0 .../test_system/turned_on.json | 0 .../test_system/undefined.json | 0 tests/{ => test_rest_endpoints}/test_users.py | 26 +- .../test_users/no_users.json | 0 .../test_users/one_user.json | 0 .../test_users/some_users.json | 0 .../test_users/undefined.json | 0 167 files changed, 6088 insertions(+), 3695 deletions(-) create mode 100644 .vscode/launch.json create mode 100644 api.nix create mode 100644 default.nix delete mode 100755 requirements.txt rename selfprivacy_api/{resources => actions}/__init__.py (100%) create mode 100644 selfprivacy_api/actions/api_tokens.py create mode 100644 selfprivacy_api/actions/ssh.py create mode 100644 selfprivacy_api/actions/system.py create mode 100644 selfprivacy_api/actions/users.py create mode 100644 selfprivacy_api/dependencies.py create mode 100644 selfprivacy_api/graphql/common_types/dns.py create mode 100644 selfprivacy_api/graphql/common_types/jobs.py create mode 100644 selfprivacy_api/graphql/common_types/service.py create mode 100644 selfprivacy_api/graphql/mutations/job_mutations.py create mode 100644 selfprivacy_api/graphql/mutations/services_mutations.py delete mode 100644 selfprivacy_api/graphql/mutations/ssh_utils.py rename selfprivacy_api/graphql/mutations/{storage_mutation.py => storage_mutations.py} (61%) delete mode 100644 selfprivacy_api/graphql/mutations/users_utils.py create mode 100644 selfprivacy_api/graphql/queries/jobs.py create mode 100644 selfprivacy_api/graphql/queries/services.py create mode 100644 selfprivacy_api/jobs/migrate_to_binds.py create mode 100644 selfprivacy_api/jobs/test.py delete mode 100644 selfprivacy_api/resources/api_auth/__init__.py delete mode 100644 selfprivacy_api/resources/api_auth/app_tokens.py delete mode 100644 selfprivacy_api/resources/api_auth/new_device.py delete mode 100644 selfprivacy_api/resources/api_auth/recovery_token.py delete mode 100644 selfprivacy_api/resources/common.py delete mode 100644 selfprivacy_api/resources/services/__init__.py delete mode 100644 selfprivacy_api/resources/services/bitwarden.py delete mode 100644 selfprivacy_api/resources/services/gitea.py delete mode 100644 selfprivacy_api/resources/services/mailserver.py delete mode 100644 selfprivacy_api/resources/services/main.py delete mode 100644 selfprivacy_api/resources/services/nextcloud.py delete mode 100644 selfprivacy_api/resources/services/ocserv.py delete mode 100644 selfprivacy_api/resources/services/pleroma.py delete mode 100644 selfprivacy_api/resources/services/restic.py delete mode 100644 selfprivacy_api/resources/services/ssh.py delete mode 100644 selfprivacy_api/resources/system.py delete mode 100644 selfprivacy_api/resources/users.py create mode 100644 selfprivacy_api/rest/__init__.py create mode 100644 selfprivacy_api/rest/api_auth.py create mode 100644 selfprivacy_api/rest/services.py create mode 100644 selfprivacy_api/rest/system.py create mode 100644 selfprivacy_api/rest/users.py create mode 100644 selfprivacy_api/services/bitwarden/__init__.py create mode 100644 selfprivacy_api/services/bitwarden/bitwarden.svg create mode 100644 selfprivacy_api/services/bitwarden/icon.py create mode 100644 selfprivacy_api/services/generic_service_mover.py create mode 100644 selfprivacy_api/services/generic_size_counter.py create mode 100644 selfprivacy_api/services/generic_status_getter.py create mode 100644 selfprivacy_api/services/gitea/__init__.py create mode 100644 selfprivacy_api/services/gitea/gitea.svg create mode 100644 selfprivacy_api/services/gitea/icon.py create mode 100644 selfprivacy_api/services/jitsi/__init__.py create mode 100644 selfprivacy_api/services/jitsi/icon.py create mode 100644 selfprivacy_api/services/mailserver/__init__.py create mode 100644 selfprivacy_api/services/mailserver/icon.py create mode 100644 selfprivacy_api/services/mailserver/mailserver.svg create mode 100644 selfprivacy_api/services/nextcloud/icon.py create mode 100644 selfprivacy_api/services/ocserv/__init__.py create mode 100644 selfprivacy_api/services/ocserv/icon.py create mode 100644 selfprivacy_api/services/ocserv/ocserv.svg create mode 100644 selfprivacy_api/services/pleroma/__init__.py create mode 100644 selfprivacy_api/services/pleroma/icon.py create mode 100644 selfprivacy_api/services/pleroma/pleroma.svg create mode 100644 selfprivacy_api/task_registry.py create mode 100644 selfprivacy_api/utils/huey.py create mode 100644 tests/data/jobs.json create mode 100644 tests/test_block_device_utils.py create mode 100644 tests/test_block_device_utils/no_devices.json create mode 100644 tests/test_block_device_utils/only_root.json create mode 100644 tests/test_block_device_utils/undefined.json rename tests/test_graphql/{_test_system.py => test_system.py} (65%) create mode 100644 tests/test_jobs.py create mode 100644 tests/test_rest_endpoints/data/jobs.json create mode 100644 tests/test_rest_endpoints/data/tokens.json rename tests/{ => test_rest_endpoints}/services/data/tokens.json (100%) rename tests/{ => test_rest_endpoints}/services/test_bitwarden.py (100%) rename tests/{ => test_rest_endpoints}/services/test_bitwarden/enable_undefined.json (100%) rename tests/{ => test_rest_endpoints}/services/test_bitwarden/turned_off.json (100%) rename tests/{ => test_rest_endpoints}/services/test_bitwarden/turned_on.json (100%) rename tests/{ => test_rest_endpoints}/services/test_bitwarden/undefined.json (100%) rename tests/{ => test_rest_endpoints}/services/test_gitea.py (100%) rename tests/{ => test_rest_endpoints}/services/test_gitea/enable_undefined.json (100%) rename tests/{ => test_rest_endpoints}/services/test_gitea/turned_off.json (100%) rename tests/{ => test_rest_endpoints}/services/test_gitea/turned_on.json (100%) rename tests/{ => test_rest_endpoints}/services/test_gitea/undefined.json (100%) rename tests/{ => test_rest_endpoints}/services/test_mailserver.py (91%) rename tests/{ => test_rest_endpoints}/services/test_nextcloud.py (100%) rename tests/{ => test_rest_endpoints}/services/test_nextcloud/enable_undefined.json (100%) rename tests/{ => test_rest_endpoints}/services/test_nextcloud/turned_off.json (100%) rename tests/{ => test_rest_endpoints}/services/test_nextcloud/turned_on.json (100%) rename tests/{ => test_rest_endpoints}/services/test_nextcloud/undefined.json (100%) rename tests/{ => test_rest_endpoints}/services/test_ocserv.py (100%) rename tests/{ => test_rest_endpoints}/services/test_ocserv/enable_undefined.json (100%) rename tests/{ => test_rest_endpoints}/services/test_ocserv/turned_off.json (100%) rename tests/{ => test_rest_endpoints}/services/test_ocserv/turned_on.json (100%) rename tests/{ => test_rest_endpoints}/services/test_ocserv/undefined.json (100%) rename tests/{ => test_rest_endpoints}/services/test_pleroma.py (100%) rename tests/{ => test_rest_endpoints}/services/test_pleroma/enable_undefined.json (100%) rename tests/{ => test_rest_endpoints}/services/test_pleroma/turned_off.json (100%) rename tests/{ => test_rest_endpoints}/services/test_pleroma/turned_on.json (100%) rename tests/{ => test_rest_endpoints}/services/test_pleroma/undefined.json (100%) rename tests/{ => test_rest_endpoints}/services/test_restic.py (93%) rename tests/{ => test_rest_endpoints}/services/test_restic/no_values.json (100%) rename tests/{ => test_rest_endpoints}/services/test_restic/some_values.json (100%) rename tests/{ => test_rest_endpoints}/services/test_restic/undefined.json (100%) rename tests/{ => test_rest_endpoints}/services/test_services.py (70%) rename tests/{ => test_rest_endpoints}/services/test_ssh.py (91%) rename tests/{ => test_rest_endpoints}/services/test_ssh/all_off.json (100%) rename tests/{ => test_rest_endpoints}/services/test_ssh/root_and_admin_have_keys.json (100%) rename tests/{ => test_rest_endpoints}/services/test_ssh/some_users.json (100%) rename tests/{ => test_rest_endpoints}/services/test_ssh/turned_off.json (100%) rename tests/{ => test_rest_endpoints}/services/test_ssh/turned_on.json (100%) rename tests/{ => test_rest_endpoints}/services/test_ssh/undefined.json (100%) rename tests/{ => test_rest_endpoints}/services/test_ssh/undefined_values.json (100%) rename tests/{ => test_rest_endpoints}/test_auth.py (87%) rename tests/{ => test_rest_endpoints}/test_system.py (97%) rename tests/{ => test_rest_endpoints}/test_system/domain (100%) rename tests/{ => test_rest_endpoints}/test_system/no_values.json (100%) rename tests/{ => test_rest_endpoints}/test_system/turned_off.json (100%) rename tests/{ => test_rest_endpoints}/test_system/turned_on.json (100%) rename tests/{ => test_rest_endpoints}/test_system/undefined.json (100%) rename tests/{ => test_rest_endpoints}/test_users.py (93%) rename tests/{ => test_rest_endpoints}/test_users/no_users.json (100%) rename tests/{ => test_rest_endpoints}/test_users/one_user.json (100%) rename tests/{ => test_rest_endpoints}/test_users/some_users.json (100%) rename tests/{ => test_rest_endpoints}/test_users/undefined.json (100%) diff --git a/.gitignore b/.gitignore index 1264e45..7941396 100755 --- a/.gitignore +++ b/.gitignore @@ -145,3 +145,5 @@ dmypy.json cython_debug/ # End of https://www.toptal.com/developers/gitignore/api/flask + +*.db diff --git a/.pylintrc b/.pylintrc index c6d73d8..9135ea9 100644 --- a/.pylintrc +++ b/.pylintrc @@ -1,2 +1,3 @@ [MASTER] init-hook="from pylint.config import find_pylintrc; import os, sys; sys.path.append(os.path.dirname(find_pylintrc()))" +extension-pkg-whitelist=pydantic diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 0000000..a691ce0 --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,19 @@ +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "Python: FastAPI", + "type": "python", + "request": "launch", + "module": "uvicorn", + "args": [ + "selfprivacy_api.app:app" + ], + "jinja": true, + "justMyCode": false + } + ] +} diff --git a/api.nix b/api.nix new file mode 100644 index 0000000..83bc695 --- /dev/null +++ b/api.nix @@ -0,0 +1,64 @@ +{ lib, python39Packages }: +with python39Packages; +buildPythonApplication { + pname = "selfprivacy-api"; + version = "2.0.0"; + + propagatedBuildInputs = [ + setuptools + portalocker + pytz + pytest + pytest-mock + pytest-datadir + huey + gevent + mnemonic + pydantic + typing-extensions + psutil + fastapi + uvicorn + (buildPythonPackage rec { + pname = "strawberry-graphql"; + version = "0.123.0"; + format = "pyproject"; + patches = [ + ./strawberry-graphql.patch + ]; + propagatedBuildInputs = [ + typing-extensions + python-multipart + python-dateutil + # flask + pydantic + pygments + poetry + # flask-cors + (buildPythonPackage rec { + pname = "graphql-core"; + version = "3.2.0"; + format = "setuptools"; + src = fetchPypi { + inherit pname version; + sha256 = "sha256-huKgvgCL/eGe94OI3opyWh2UKpGQykMcJKYIN5c4A84="; + }; + checkInputs = [ + pytest-asyncio + pytest-benchmark + pytestCheckHook + ]; + pythonImportsCheck = [ + "graphql" + ]; + }) + ]; + src = fetchPypi { + inherit pname version; + sha256 = "KsmZ5Xv8tUg6yBxieAEtvoKoRG60VS+iVGV0X6oCExo="; + }; + }) + ]; + + src = ./.; +} diff --git a/default.nix b/default.nix new file mode 100644 index 0000000..740c7ce --- /dev/null +++ b/default.nix @@ -0,0 +1,2 @@ +{ pkgs ? import {} }: +pkgs.callPackage ./api.nix {} diff --git a/pyproject.toml b/pyproject.toml index 1ffd18c..7f8d872 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,3 +1,3 @@ [build-system] -requires = ["setuptools", "wheel", "portalocker", "flask-swagger", "flask-swagger-ui"] -build-backend = "setuptools.build_meta" \ No newline at end of file +requires = ["setuptools", "wheel", "portalocker"] +build-backend = "setuptools.build_meta" diff --git a/requirements.txt b/requirements.txt deleted file mode 100755 index 4e0e02e..0000000 --- a/requirements.txt +++ /dev/null @@ -1,17 +0,0 @@ -wheel -flask -flask_restful -flask_socketio -setuptools -portalocker -flask-swagger -flask-swagger-ui -pytz -huey -gevent -mnemonic - -pytest -coverage -pytest-mock -pytest-datadir diff --git a/selfprivacy_api/resources/__init__.py b/selfprivacy_api/actions/__init__.py similarity index 100% rename from selfprivacy_api/resources/__init__.py rename to selfprivacy_api/actions/__init__.py diff --git a/selfprivacy_api/actions/api_tokens.py b/selfprivacy_api/actions/api_tokens.py new file mode 100644 index 0000000..61c695d --- /dev/null +++ b/selfprivacy_api/actions/api_tokens.py @@ -0,0 +1,116 @@ +"""App tokens actions""" +from datetime import datetime +from typing import Optional +from pydantic import BaseModel + + +from selfprivacy_api.utils.auth import ( + delete_token, + generate_recovery_token, + get_recovery_token_status, + get_tokens_info, + is_recovery_token_exists, + is_recovery_token_valid, + is_token_name_exists, + is_token_name_pair_valid, + refresh_token, + get_token_name, +) + + +class TokenInfoWithIsCaller(BaseModel): + """Token info""" + + name: str + date: datetime + is_caller: bool + + +def get_api_tokens_with_caller_flag(caller_token: str) -> list[TokenInfoWithIsCaller]: + """Get the tokens info""" + caller_name = get_token_name(caller_token) + tokens = get_tokens_info() + return [ + TokenInfoWithIsCaller( + name=token.name, + date=token.date, + is_caller=token.name == caller_name, + ) + for token in tokens + ] + + +class NotFoundException(Exception): + """Not found exception""" + + +class CannotDeleteCallerException(Exception): + """Cannot delete caller exception""" + + +def delete_api_token(caller_token: str, token_name: str) -> None: + """Delete the token""" + if is_token_name_pair_valid(token_name, caller_token): + raise CannotDeleteCallerException("Cannot delete caller's token") + if not is_token_name_exists(token_name): + raise NotFoundException("Token not found") + delete_token(token_name) + + +def refresh_api_token(caller_token: str) -> str: + """Refresh the token""" + new_token = refresh_token(caller_token) + if new_token is None: + raise NotFoundException("Token not found") + return new_token + + +class RecoveryTokenStatus(BaseModel): + """Recovery token status""" + + exists: bool + valid: bool + date: Optional[datetime] = None + expiration: Optional[datetime] = None + uses_left: Optional[int] = None + + +def get_api_recovery_token_status() -> RecoveryTokenStatus: + """Get the recovery token status""" + if not is_recovery_token_exists(): + return RecoveryTokenStatus(exists=False, valid=False) + status = get_recovery_token_status() + if status is None: + return RecoveryTokenStatus(exists=False, valid=False) + is_valid = is_recovery_token_valid() + return RecoveryTokenStatus( + exists=True, + valid=is_valid, + date=status["date"], + expiration=status["expiration"], + uses_left=status["uses_left"], + ) + + +class InvalidExpirationDate(Exception): + """Invalid expiration date exception""" + + +class InvalidUsesLeft(Exception): + """Invalid uses left exception""" + + +def get_new_api_recovery_key( + expiration_date: Optional[datetime] = None, uses_left: Optional[int] = None +) -> str: + """Get new recovery key""" + if expiration_date is not None: + current_time = datetime.now().timestamp() + if expiration_date.timestamp() < current_time: + raise InvalidExpirationDate("Expiration date is in the past") + if uses_left is not None: + if uses_left <= 0: + raise InvalidUsesLeft("Uses must be greater than 0") + + key = generate_recovery_token(expiration_date, uses_left) + return key diff --git a/selfprivacy_api/actions/ssh.py b/selfprivacy_api/actions/ssh.py new file mode 100644 index 0000000..3f79ff8 --- /dev/null +++ b/selfprivacy_api/actions/ssh.py @@ -0,0 +1,149 @@ +"""Actions to manage the SSH.""" +from typing import Optional +from pydantic import BaseModel +from selfprivacy_api.actions.users import ( + UserNotFound, + ensure_ssh_and_users_fields_exist, +) + +from selfprivacy_api.utils import WriteUserData, ReadUserData, validate_ssh_public_key + + +def enable_ssh(): + with WriteUserData() as data: + if "ssh" not in data: + data["ssh"] = {} + data["ssh"]["enable"] = True + + +class UserdataSshSettings(BaseModel): + """Settings for the SSH.""" + + enable: bool = True + passwordAuthentication: bool = True + rootKeys: list[str] = [] + + +def get_ssh_settings() -> UserdataSshSettings: + with ReadUserData() as data: + if "ssh" not in data: + return UserdataSshSettings() + if "enable" not in data["ssh"]: + data["ssh"]["enable"] = True + if "passwordAuthentication" not in data["ssh"]: + data["ssh"]["passwordAuthentication"] = True + if "rootKeys" not in data["ssh"]: + data["ssh"]["rootKeys"] = [] + return UserdataSshSettings(**data["ssh"]) + + +def set_ssh_settings( + enable: Optional[bool] = None, password_authentication: Optional[bool] = None +) -> None: + with WriteUserData() as data: + if "ssh" not in data: + data["ssh"] = {} + if enable is not None: + data["ssh"]["enable"] = enable + if password_authentication is not None: + data["ssh"]["passwordAuthentication"] = password_authentication + + +def add_root_ssh_key(public_key: str): + with WriteUserData() as data: + if "ssh" not in data: + data["ssh"] = {} + if "rootKeys" not in data["ssh"]: + data["ssh"]["rootKeys"] = [] + # Return 409 if key already in array + for key in data["ssh"]["rootKeys"]: + if key == public_key: + raise KeyAlreadyExists() + data["ssh"]["rootKeys"].append(public_key) + + +class KeyAlreadyExists(Exception): + """Key already exists""" + + pass + + +class InvalidPublicKey(Exception): + """Invalid public key""" + + pass + + +def create_ssh_key(username: str, ssh_key: str): + """Create a new ssh key""" + + if not validate_ssh_public_key(ssh_key): + raise InvalidPublicKey() + + with WriteUserData() as data: + ensure_ssh_and_users_fields_exist(data) + + if username == data["username"]: + if ssh_key in data["sshKeys"]: + raise KeyAlreadyExists() + + data["sshKeys"].append(ssh_key) + return + + if username == "root": + if ssh_key in data["ssh"]["rootKeys"]: + raise KeyAlreadyExists() + + data["ssh"]["rootKeys"].append(ssh_key) + return + + for user in data["users"]: + if user["username"] == username: + if "sshKeys" not in user: + user["sshKeys"] = [] + if ssh_key in user["sshKeys"]: + raise KeyAlreadyExists() + + user["sshKeys"].append(ssh_key) + return + + raise UserNotFound() + + +class KeyNotFound(Exception): + """Key not found""" + + pass + + +def remove_ssh_key(username: str, ssh_key: str): + """Delete a ssh key""" + + with WriteUserData() as data: + ensure_ssh_and_users_fields_exist(data) + + if username == "root": + if ssh_key in data["ssh"]["rootKeys"]: + data["ssh"]["rootKeys"].remove(ssh_key) + return + + raise KeyNotFound() + + if username == data["username"]: + if ssh_key in data["sshKeys"]: + data["sshKeys"].remove(ssh_key) + return + + raise KeyNotFound() + + for user in data["users"]: + if user["username"] == username: + if "sshKeys" not in user: + user["sshKeys"] = [] + if ssh_key in user["sshKeys"]: + user["sshKeys"].remove(ssh_key) + return + + raise KeyNotFound() + + raise UserNotFound() diff --git a/selfprivacy_api/actions/system.py b/selfprivacy_api/actions/system.py new file mode 100644 index 0000000..853662f --- /dev/null +++ b/selfprivacy_api/actions/system.py @@ -0,0 +1,139 @@ +"""Actions to manage the system.""" +import os +import subprocess +import pytz +from typing import Optional +from pydantic import BaseModel + +from selfprivacy_api.utils import WriteUserData, ReadUserData + + +def get_timezone() -> str: + """Get the timezone of the server""" + with ReadUserData() as user_data: + if "timezone" in user_data: + return user_data["timezone"] + return "Europe/Uzhgorod" + + +class InvalidTimezone(Exception): + """Invalid timezone""" + + pass + + +def change_timezone(timezone: str) -> None: + """Change the timezone of the server""" + if timezone not in pytz.all_timezones: + raise InvalidTimezone(f"Invalid timezone: {timezone}") + with WriteUserData() as user_data: + user_data["timezone"] = timezone + + +class UserDataAutoUpgradeSettings(BaseModel): + """Settings for auto-upgrading user data""" + + enable: bool = True + allowReboot: bool = False + + +def get_auto_upgrade_settings() -> UserDataAutoUpgradeSettings: + """Get the auto-upgrade settings""" + with ReadUserData() as user_data: + if "autoUpgrade" in user_data: + return UserDataAutoUpgradeSettings(**user_data["autoUpgrade"]) + return UserDataAutoUpgradeSettings() + + +def set_auto_upgrade_settings( + enalbe: Optional[bool] = None, allowReboot: Optional[bool] = None +) -> None: + """Set the auto-upgrade settings""" + with WriteUserData() as user_data: + if "autoUpgrade" not in user_data: + user_data["autoUpgrade"] = {} + if enalbe is not None: + user_data["autoUpgrade"]["enable"] = enalbe + if allowReboot is not None: + user_data["autoUpgrade"]["allowReboot"] = allowReboot + + +def rebuild_system() -> int: + """Rebuild the system""" + rebuild_result = subprocess.Popen( + ["systemctl", "start", "sp-nixos-rebuild.service"], start_new_session=True + ) + rebuild_result.communicate()[0] + return rebuild_result.returncode + + +def rollback_system() -> int: + """Rollback the system""" + rollback_result = subprocess.Popen( + ["systemctl", "start", "sp-nixos-rollback.service"], start_new_session=True + ) + rollback_result.communicate()[0] + return rollback_result.returncode + + +def upgrade_system() -> int: + """Upgrade the system""" + upgrade_result = subprocess.Popen( + ["systemctl", "start", "sp-nixos-upgrade.service"], start_new_session=True + ) + upgrade_result.communicate()[0] + return upgrade_result.returncode + + +def reboot_system() -> None: + """Reboot the system""" + subprocess.Popen(["reboot"], start_new_session=True) + + +def get_system_version() -> str: + """Get system version""" + return subprocess.check_output(["uname", "-a"]).decode("utf-8").strip() + + +def get_python_version() -> str: + """Get Python version""" + return subprocess.check_output(["python", "-V"]).decode("utf-8").strip() + + +class SystemActionResult(BaseModel): + """System action result""" + + status: int + message: str + data: str + + +def pull_repository_changes() -> SystemActionResult: + """Pull repository changes""" + git_pull_command = ["git", "pull"] + + current_working_directory = os.getcwd() + os.chdir("/etc/nixos") + + git_pull_process_descriptor = subprocess.Popen( + git_pull_command, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + shell=False, + ) + + data = git_pull_process_descriptor.communicate()[0].decode("utf-8") + + os.chdir(current_working_directory) + + if git_pull_process_descriptor.returncode == 0: + return SystemActionResult( + status=0, + message="Pulled repository changes", + data=data, + ) + return SystemActionResult( + status=git_pull_process_descriptor.returncode, + message="Failed to pull repository changes", + data=data, + ) diff --git a/selfprivacy_api/actions/users.py b/selfprivacy_api/actions/users.py new file mode 100644 index 0000000..bfc1756 --- /dev/null +++ b/selfprivacy_api/actions/users.py @@ -0,0 +1,219 @@ +"""Actions to manage the users.""" +import re +from typing import Optional +from pydantic import BaseModel +from enum import Enum +from selfprivacy_api.utils import ( + ReadUserData, + WriteUserData, + hash_password, + is_username_forbidden, +) + + +class UserDataUserOrigin(Enum): + """Origin of the user in the user data""" + + NORMAL = "NORMAL" + PRIMARY = "PRIMARY" + ROOT = "ROOT" + + +class UserDataUser(BaseModel): + """The user model from the userdata file""" + + username: str + ssh_keys: list[str] + origin: UserDataUserOrigin + + +def ensure_ssh_and_users_fields_exist(data): + if "ssh" not in data: + data["ssh"] = {} + data["ssh"]["rootKeys"] = [] + + elif data["ssh"].get("rootKeys") is None: + data["ssh"]["rootKeys"] = [] + + if "sshKeys" not in data: + data["sshKeys"] = [] + + if "users" not in data: + data["users"] = [] + + +def get_users( + exclude_primary: bool = False, + exclude_root: bool = False, +) -> list[UserDataUser]: + """Get the list of users""" + users = [] + with ReadUserData() as user_data: + ensure_ssh_and_users_fields_exist(user_data) + users = [ + UserDataUser( + username=user["username"], + ssh_keys=user.get("sshKeys", []), + origin=UserDataUserOrigin.NORMAL, + ) + for user in user_data["users"] + ] + if not exclude_primary: + users.append( + UserDataUser( + username=user_data["username"], + ssh_keys=user_data["sshKeys"], + origin=UserDataUserOrigin.PRIMARY, + ) + ) + if not exclude_root: + users.append( + UserDataUser( + username="root", + ssh_keys=user_data["ssh"]["rootKeys"], + origin=UserDataUserOrigin.ROOT, + ) + ) + return users + + +class UsernameForbidden(Exception): + """Attemted to create a user with a forbidden username""" + + pass + + +class UserAlreadyExists(Exception): + """Attemted to create a user that already exists""" + + pass + + +class UsernameNotAlphanumeric(Exception): + """Attemted to create a user with a non-alphanumeric username""" + + pass + + +class UsernameTooLong(Exception): + """Attemted to create a user with a too long username. Username must be less than 32 characters""" + + pass + + +class PasswordIsEmpty(Exception): + """Attemted to create a user with an empty password""" + + pass + + +def create_user(username: str, password: str): + if password == "": + raise PasswordIsEmpty("Password is empty") + + if is_username_forbidden(username): + raise UsernameForbidden("Username is forbidden") + + if not re.match(r"^[a-z_][a-z0-9_]+$", username): + raise UsernameNotAlphanumeric( + "Username must be alphanumeric and start with a letter" + ) + + if len(username) >= 32: + raise UsernameTooLong("Username must be less than 32 characters") + + with ReadUserData() as user_data: + ensure_ssh_and_users_fields_exist(user_data) + if username == user_data["username"]: + raise UserAlreadyExists("User already exists") + if username in [user["username"] for user in user_data["users"]]: + raise UserAlreadyExists("User already exists") + + hashed_password = hash_password(password) + + with WriteUserData() as user_data: + ensure_ssh_and_users_fields_exist(user_data) + + user_data["users"].append( + {"username": username, "sshKeys": [], "hashedPassword": hashed_password} + ) + + +class UserNotFound(Exception): + """Attemted to get a user that does not exist""" + + pass + + +class UserIsProtected(Exception): + """Attemted to delete a user that is protected""" + + pass + + +def delete_user(username: str): + with WriteUserData() as user_data: + ensure_ssh_and_users_fields_exist(user_data) + if username == user_data["username"] or username == "root": + raise UserIsProtected("Cannot delete main or root user") + + for data_user in user_data["users"]: + if data_user["username"] == username: + user_data["users"].remove(data_user) + break + else: + raise UserNotFound("User did not exist") + + +def update_user(username: str, password: str): + if password == "": + raise PasswordIsEmpty("Password is empty") + + hashed_password = hash_password(password) + + with WriteUserData() as data: + ensure_ssh_and_users_fields_exist(data) + + if username == data["username"]: + data["hashedMasterPassword"] = hashed_password + + # Return 404 if user does not exist + else: + for data_user in data["users"]: + if data_user["username"] == username: + data_user["hashedPassword"] = hashed_password + break + else: + raise UserNotFound("User does not exist") + + +def get_user_by_username(username: str) -> Optional[UserDataUser]: + with ReadUserData() as data: + ensure_ssh_and_users_fields_exist(data) + + if username == "root": + return UserDataUser( + origin=UserDataUserOrigin.ROOT, + username="root", + ssh_keys=data["ssh"]["rootKeys"], + ) + + if username == data["username"]: + return UserDataUser( + origin=UserDataUserOrigin.PRIMARY, + username=username, + ssh_keys=data["sshKeys"], + ) + + for user in data["users"]: + if user["username"] == username: + if "sshKeys" not in user: + user["sshKeys"] = [] + + return UserDataUser( + origin=UserDataUserOrigin.NORMAL, + username=username, + ssh_keys=user["sshKeys"], + ) + + return None diff --git a/selfprivacy_api/app.py b/selfprivacy_api/app.py index 15142f0..3436445 100644 --- a/selfprivacy_api/app.py +++ b/selfprivacy_api/app.py @@ -1,110 +1,56 @@ #!/usr/bin/env python3 """SelfPrivacy server management API""" -import os -from gevent import monkey +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware +from strawberry.fastapi import GraphQLRouter +import uvicorn -from flask import Flask, request, jsonify -from flask_restful import Api -from flask_swagger import swagger -from flask_swagger_ui import get_swaggerui_blueprint -from flask_cors import CORS - -from strawberry.flask.views import AsyncGraphQLView - -from selfprivacy_api.resources.users import User, Users -from selfprivacy_api.resources.common import ApiVersion -from selfprivacy_api.resources.system import api_system -from selfprivacy_api.resources.services import services as api_services -from selfprivacy_api.resources.api_auth import auth as api_auth - -from selfprivacy_api.restic_controller.tasks import huey, init_restic - -from selfprivacy_api.migrations import run_migrations - -from selfprivacy_api.utils.auth import is_token_valid - +from selfprivacy_api.dependencies import get_api_version from selfprivacy_api.graphql.schema import schema +from selfprivacy_api.migrations import run_migrations +from selfprivacy_api.restic_controller.tasks import init_restic -swagger_blueprint = get_swaggerui_blueprint( - "/api/docs", "/api/swagger.json", config={"app_name": "SelfPrivacy API"} +from selfprivacy_api.rest import ( + system, + users, + api_auth, + services, +) + +app = FastAPI() + +graphql_app = GraphQLRouter( + schema, +) + +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], ) -def create_app(test_config=None): - """Initiate Flask app and bind routes""" - app = Flask(__name__) - api = Api(app) - CORS(app) +app.include_router(system.router) +app.include_router(users.router) +app.include_router(api_auth.router) +app.include_router(services.router) +app.include_router(graphql_app, prefix="/graphql") - if test_config is None: - app.config["ENABLE_SWAGGER"] = os.environ.get("ENABLE_SWAGGER", "0") - app.config["B2_BUCKET"] = os.environ.get("B2_BUCKET") - else: - app.config.update(test_config) - # Check bearer token - @app.before_request - def check_auth(): - # Exclude swagger-ui, /auth/new_device/authorize, /auth/recovery_token/use - if request.path.startswith("/api"): - pass - elif request.path.startswith("/auth/new_device/authorize"): - pass - elif request.path.startswith("/auth/recovery_token/use"): - pass - elif request.path.startswith("/graphql"): - pass - else: - auth = request.headers.get("Authorization") - if auth is None: - return jsonify({"error": "Missing Authorization header"}), 401 - # Strip Bearer from auth header - auth = auth.replace("Bearer ", "") - if not is_token_valid(auth): - return jsonify({"error": "Invalid token"}), 401 +@app.get("/api/version") +async def get_version(): + """Get the version of the server""" + return {"version": get_api_version()} - api.add_resource(ApiVersion, "/api/version") - api.add_resource(Users, "/users") - api.add_resource(User, "/users/") - app.register_blueprint(api_system) - app.register_blueprint(api_services) - app.register_blueprint(api_auth) - - @app.route("/api/swagger.json") - def spec(): - if app.config["ENABLE_SWAGGER"] == "1": - swag = swagger(app) - swag["info"]["version"] = "1.2.7" - swag["info"]["title"] = "SelfPrivacy API" - swag["info"]["description"] = "SelfPrivacy API" - swag["securityDefinitions"] = { - "bearerAuth": { - "type": "apiKey", - "name": "Authorization", - "in": "header", - } - } - swag["security"] = [{"bearerAuth": []}] - - return jsonify(swag) - return jsonify({}), 404 - - app.add_url_rule( - "/graphql", view_func=AsyncGraphQLView.as_view("graphql", schema=schema) - ) - - if app.config["ENABLE_SWAGGER"] == "1": - app.register_blueprint(swagger_blueprint, url_prefix="/api/docs") - - return app +@app.on_event("startup") +async def startup(): + run_migrations() + init_restic() if __name__ == "__main__": - monkey.patch_all() - created_app = create_app() - run_migrations() - huey.start() - init_restic() - created_app.run(port=5050, debug=False) + uvicorn.run("selfprivacy_api.app:app", host="0.0.0.0", port=5050, log_level="info") diff --git a/selfprivacy_api/dependencies.py b/selfprivacy_api/dependencies.py new file mode 100644 index 0000000..109e2ce --- /dev/null +++ b/selfprivacy_api/dependencies.py @@ -0,0 +1,30 @@ +from fastapi import Depends, HTTPException, status +from fastapi.security import APIKeyHeader +from pydantic import BaseModel + +from selfprivacy_api.utils.auth import is_token_valid + + +class TokenHeader(BaseModel): + token: str + + +async def get_token_header( + token: str = Depends(APIKeyHeader(name="Authorization", auto_error=False)) +) -> TokenHeader: + if token is None: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail="Token not provided" + ) + else: + token = token.replace("Bearer ", "") + if not is_token_valid(token): + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid token" + ) + return TokenHeader(token=token) + + +def get_api_version() -> str: + """Get API version""" + return "2.0.0" diff --git a/selfprivacy_api/graphql/__init__.py b/selfprivacy_api/graphql/__init__.py index 5e332f3..7372197 100644 --- a/selfprivacy_api/graphql/__init__.py +++ b/selfprivacy_api/graphql/__init__.py @@ -3,7 +3,6 @@ import typing from strawberry.permission import BasePermission from strawberry.types import Info -from flask import request from selfprivacy_api.utils.auth import is_token_valid @@ -14,11 +13,9 @@ class IsAuthenticated(BasePermission): message = "You must be authenticated to access this resource." def has_permission(self, source: typing.Any, info: Info, **kwargs) -> bool: - auth = request.headers.get("Authorization") - if auth is None: + token = info.context["request"].headers.get("Authorization") + if token is None: + token = info.context["request"].query_params.get("token") + if token is None: return False - # Strip Bearer from auth header - auth = auth.replace("Bearer ", "") - if not is_token_valid(auth): - return False - return True + return is_token_valid(token.replace("Bearer ", "")) diff --git a/selfprivacy_api/graphql/common_types/dns.py b/selfprivacy_api/graphql/common_types/dns.py new file mode 100644 index 0000000..c9f8413 --- /dev/null +++ b/selfprivacy_api/graphql/common_types/dns.py @@ -0,0 +1,13 @@ +import typing +import strawberry + + +@strawberry.type +class DnsRecord: + """DNS record""" + + record_type: str + name: str + content: str + ttl: int + priority: typing.Optional[int] diff --git a/selfprivacy_api/graphql/common_types/jobs.py b/selfprivacy_api/graphql/common_types/jobs.py new file mode 100644 index 0000000..4b095c8 --- /dev/null +++ b/selfprivacy_api/graphql/common_types/jobs.py @@ -0,0 +1,49 @@ +"""Jobs status""" +# pylint: disable=too-few-public-methods +import datetime +import typing +import strawberry + +from selfprivacy_api.jobs import Job, Jobs + + +@strawberry.type +class ApiJob: + """Job type for GraphQL.""" + + uid: str + name: str + description: str + status: str + status_text: typing.Optional[str] + progress: typing.Optional[int] + created_at: datetime.datetime + updated_at: datetime.datetime + finished_at: typing.Optional[datetime.datetime] + error: typing.Optional[str] + result: typing.Optional[str] + + +def job_to_api_job(job: Job) -> ApiJob: + """Convert a Job from jobs controller to a GraphQL ApiJob.""" + return ApiJob( + uid=str(job.uid), + name=job.name, + description=job.description, + status=job.status.name, + status_text=job.status_text, + progress=job.progress, + created_at=job.created_at, + updated_at=job.updated_at, + finished_at=job.finished_at, + error=job.error, + result=job.result, + ) + + +def get_api_job_by_id(job_id: str) -> typing.Optional[ApiJob]: + """Get a job for GraphQL by its ID.""" + job = Jobs.get_instance().get_job(job_id) + if job is None: + return None + return job_to_api_job(job) diff --git a/selfprivacy_api/graphql/common_types/service.py b/selfprivacy_api/graphql/common_types/service.py new file mode 100644 index 0000000..c1246ca --- /dev/null +++ b/selfprivacy_api/graphql/common_types/service.py @@ -0,0 +1,146 @@ +from enum import Enum +import typing +import strawberry +from selfprivacy_api.graphql.common_types.dns import DnsRecord + +from selfprivacy_api.services import get_service_by_id, get_services_by_location +from selfprivacy_api.services import Service as ServiceInterface +from selfprivacy_api.utils.block_devices import BlockDevices + + +def get_usages(root: "StorageVolume") -> list["StorageUsageInterface"]: + """Get usages of a volume""" + return [ + ServiceStorageUsage( + service=service_to_graphql_service(service), + title=service.get_display_name(), + used_space=str(service.get_storage_usage()), + volume=get_volume_by_id(service.get_location()), + ) + for service in get_services_by_location(root.name) + ] + + +@strawberry.type +class StorageVolume: + """Stats and basic info about a volume or a system disk.""" + + total_space: str + free_space: str + used_space: str + root: bool + name: str + model: typing.Optional[str] + serial: typing.Optional[str] + type: str + + @strawberry.field + def usages(self) -> list["StorageUsageInterface"]: + """Get usages of a volume""" + return get_usages(self) + + +@strawberry.interface +class StorageUsageInterface: + used_space: str + volume: typing.Optional[StorageVolume] + title: str + + +@strawberry.type +class ServiceStorageUsage(StorageUsageInterface): + """Storage usage for a service""" + + service: typing.Optional["Service"] + + +@strawberry.enum +class ServiceStatusEnum(Enum): + ACTIVE = "ACTIVE" + RELOADING = "RELOADING" + INACTIVE = "INACTIVE" + FAILED = "FAILED" + ACTIVATING = "ACTIVATING" + DEACTIVATING = "DEACTIVATING" + OFF = "OFF" + + +def get_storage_usage(root: "Service") -> ServiceStorageUsage: + """Get storage usage for a service""" + service = get_service_by_id(root.id) + if service is None: + return ServiceStorageUsage( + service=service, + title="Not found", + used_space="0", + volume=get_volume_by_id("sda1"), + ) + return ServiceStorageUsage( + service=service_to_graphql_service(service), + title=service.get_display_name(), + used_space=str(service.get_storage_usage()), + volume=get_volume_by_id(service.get_location()), + ) + + +@strawberry.type +class Service: + id: str + display_name: str + description: str + svg_icon: str + is_movable: bool + is_required: bool + is_enabled: bool + status: ServiceStatusEnum + url: typing.Optional[str] + dns_records: typing.Optional[typing.List[DnsRecord]] + + @strawberry.field + def storage_usage(self) -> ServiceStorageUsage: + """Get storage usage for a service""" + return get_storage_usage(self) + + +def service_to_graphql_service(service: ServiceInterface) -> Service: + """Convert service to graphql service""" + return Service( + id=service.get_id(), + display_name=service.get_display_name(), + description=service.get_description(), + svg_icon=service.get_svg_icon(), + is_movable=service.is_movable(), + is_required=service.is_required(), + is_enabled=service.is_enabled(), + status=ServiceStatusEnum(service.get_status().value), + url=service.get_url(), + dns_records=[ + DnsRecord( + record_type=record.type, + name=record.name, + content=record.content, + ttl=record.ttl, + priority=record.priority, + ) + for record in service.get_dns_records() + ], + ) + + +def get_volume_by_id(volume_id: str) -> typing.Optional[StorageVolume]: + """Get volume by id""" + volume = BlockDevices().get_block_device(volume_id) + if volume is None: + return None + return StorageVolume( + total_space=str(volume.fssize) + if volume.fssize is not None + else str(volume.size), + free_space=str(volume.fsavail), + used_space=str(volume.fsused), + root=volume.name == "sda1", + name=volume.name, + model=volume.model, + serial=volume.serial, + type=volume.type, + ) diff --git a/selfprivacy_api/graphql/common_types/user.py b/selfprivacy_api/graphql/common_types/user.py index 8cc5f2c..26ad6f2 100644 --- a/selfprivacy_api/graphql/common_types/user.py +++ b/selfprivacy_api/graphql/common_types/user.py @@ -1,8 +1,8 @@ import typing from enum import Enum import strawberry +import selfprivacy_api.actions.users as users_actions -from selfprivacy_api.utils import ReadUserData from selfprivacy_api.graphql.mutations.mutation_interface import ( MutationReturnInterface, ) @@ -28,51 +28,30 @@ class User: class UserMutationReturn(MutationReturnInterface): """Return type for user mutation""" - user: typing.Optional[User] - - -def ensure_ssh_and_users_fields_exist(data): - if "ssh" not in data: - data["ssh"] = [] - data["ssh"]["rootKeys"] = [] - - elif data["ssh"].get("rootKeys") is None: - data["ssh"]["rootKeys"] = [] - - if "sshKeys" not in data: - data["sshKeys"] = [] - - if "users" not in data: - data["users"] = [] + user: typing.Optional[User] = None def get_user_by_username(username: str) -> typing.Optional[User]: - with ReadUserData() as data: - ensure_ssh_and_users_fields_exist(data) - - if username == "root": - return User( - user_type=UserType.ROOT, - username="root", - ssh_keys=data["ssh"]["rootKeys"], - ) - - if username == data["username"]: - return User( - user_type=UserType.PRIMARY, - username=username, - ssh_keys=data["sshKeys"], - ) - - for user in data["users"]: - if user["username"] == username: - if "sshKeys" not in user: - user["sshKeys"] = [] - - return User( - user_type=UserType.NORMAL, - username=username, - ssh_keys=user["sshKeys"], - ) + user = users_actions.get_user_by_username(username) + if user is None: return None + + return User( + user_type=UserType(user.origin.value), + username=user.username, + ssh_keys=user.ssh_keys, + ) + + +def get_users() -> typing.List[User]: + """Get users""" + users = users_actions.get_users(exclude_root=True) + return [ + User( + user_type=UserType(user.origin.value), + username=user.username, + ssh_keys=user.ssh_keys, + ) + for user in users + ] diff --git a/selfprivacy_api/graphql/mutations/api_mutations.py b/selfprivacy_api/graphql/mutations/api_mutations.py index e0d1057..c6727db 100644 --- a/selfprivacy_api/graphql/mutations/api_mutations.py +++ b/selfprivacy_api/graphql/mutations/api_mutations.py @@ -2,8 +2,16 @@ # pylint: disable=too-few-public-methods import datetime import typing -from flask import request import strawberry +from strawberry.types import Info +from selfprivacy_api.actions.api_tokens import ( + CannotDeleteCallerException, + InvalidExpirationDate, + InvalidUsesLeft, + NotFoundException, + delete_api_token, + get_new_api_recovery_key, +) from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.graphql.mutations.mutation_interface import ( GenericMutationReturn, @@ -12,11 +20,7 @@ from selfprivacy_api.graphql.mutations.mutation_interface import ( from selfprivacy_api.utils.auth import ( delete_new_device_auth_token, - delete_token, - generate_recovery_token, get_new_device_auth_token, - is_token_name_exists, - is_token_name_pair_valid, refresh_token, use_mnemonic_recoverery_token, use_new_device_auth_token, @@ -64,27 +68,24 @@ class ApiMutations: self, limits: typing.Optional[RecoveryKeyLimitsInput] = None ) -> ApiKeyMutationReturn: """Generate recovery key""" - if limits is not None: - if limits.expiration_date is not None: - if limits.expiration_date < datetime.datetime.now(): - return ApiKeyMutationReturn( - success=False, - message="Expiration date must be in the future", - code=400, - key=None, - ) - if limits.uses is not None: - if limits.uses < 1: - return ApiKeyMutationReturn( - success=False, - message="Uses must be greater than 0", - code=400, - key=None, - ) - if limits is not None: - key = generate_recovery_token(limits.expiration_date, limits.uses) - else: - key = generate_recovery_token(None, None) + if limits is None: + limits = RecoveryKeyLimitsInput() + try: + key = get_new_api_recovery_key(limits.expiration_date, limits.uses) + except InvalidExpirationDate: + return ApiKeyMutationReturn( + success=False, + message="Expiration date must be in the future", + code=400, + key=None, + ) + except InvalidUsesLeft: + return ApiKeyMutationReturn( + success=False, + message="Uses must be greater than 0", + code=400, + key=None, + ) return ApiKeyMutationReturn( success=True, message="Recovery key generated", @@ -113,12 +114,12 @@ class ApiMutations: ) @strawberry.mutation(permission_classes=[IsAuthenticated]) - def refresh_device_api_token(self) -> DeviceApiTokenMutationReturn: + def refresh_device_api_token(self, info: Info) -> DeviceApiTokenMutationReturn: """Refresh device api token""" token = ( - request.headers.get("Authorization").split(" ")[1] - if request.headers.get("Authorization") is not None - else None + info.context["request"] + .headers.get("Authorization", "") + .replace("Bearer ", "") ) if token is None: return DeviceApiTokenMutationReturn( @@ -143,26 +144,33 @@ class ApiMutations: ) @strawberry.mutation(permission_classes=[IsAuthenticated]) - def delete_device_api_token(self, device: str) -> GenericMutationReturn: + def delete_device_api_token(self, device: str, info: Info) -> GenericMutationReturn: """Delete device api token""" self_token = ( - request.headers.get("Authorization").split(" ")[1] - if request.headers.get("Authorization") is not None - else None + info.context["request"] + .headers.get("Authorization", "") + .replace("Bearer ", "") ) - if self_token is not None and is_token_name_pair_valid(device, self_token): - return GenericMutationReturn( - success=False, - message="Cannot delete caller's token", - code=400, - ) - if not is_token_name_exists(device): + try: + delete_api_token(self_token, device) + except NotFoundException: return GenericMutationReturn( success=False, message="Token not found", code=404, ) - delete_token(device) + except CannotDeleteCallerException: + return GenericMutationReturn( + success=False, + message="Cannot delete caller token", + code=400, + ) + except Exception as e: + return GenericMutationReturn( + success=False, + message=str(e), + code=500, + ) return GenericMutationReturn( success=True, message="Token deleted", diff --git a/selfprivacy_api/graphql/mutations/job_mutations.py b/selfprivacy_api/graphql/mutations/job_mutations.py new file mode 100644 index 0000000..d3a3498 --- /dev/null +++ b/selfprivacy_api/graphql/mutations/job_mutations.py @@ -0,0 +1,27 @@ +"""Manipulate jobs""" +# pylint: disable=too-few-public-methods +import strawberry + +from selfprivacy_api.graphql.mutations.mutation_interface import GenericMutationReturn +from selfprivacy_api.jobs import Jobs + + +@strawberry.type +class JobMutations: + """Mutations related to jobs""" + + @strawberry.mutation + def remove_job(self, job_id: str) -> GenericMutationReturn: + """Remove a job from the queue""" + result = Jobs().remove_by_uuid(job_id) + if result: + return GenericMutationReturn( + success=True, + code=200, + message="Job removed", + ) + return GenericMutationReturn( + success=False, + code=404, + message="Job not found", + ) diff --git a/selfprivacy_api/graphql/mutations/mutation_interface.py b/selfprivacy_api/graphql/mutations/mutation_interface.py index 32146fc..33a6b02 100644 --- a/selfprivacy_api/graphql/mutations/mutation_interface.py +++ b/selfprivacy_api/graphql/mutations/mutation_interface.py @@ -1,4 +1,7 @@ import strawberry +import typing + +from selfprivacy_api.graphql.common_types.jobs import ApiJob @strawberry.interface @@ -11,3 +14,8 @@ class MutationReturnInterface: @strawberry.type class GenericMutationReturn(MutationReturnInterface): pass + + +@strawberry.type +class GenericJobButationReturn(MutationReturnInterface): + job: typing.Optional[ApiJob] = None diff --git a/selfprivacy_api/graphql/mutations/services_mutations.py b/selfprivacy_api/graphql/mutations/services_mutations.py new file mode 100644 index 0000000..38a0d7f --- /dev/null +++ b/selfprivacy_api/graphql/mutations/services_mutations.py @@ -0,0 +1,169 @@ +"""Services mutations""" +# pylint: disable=too-few-public-methods +import typing +import strawberry +from selfprivacy_api.graphql import IsAuthenticated +from selfprivacy_api.graphql.common_types.jobs import job_to_api_job + +from selfprivacy_api.graphql.common_types.service import ( + Service, + service_to_graphql_service, +) +from selfprivacy_api.graphql.mutations.mutation_interface import ( + GenericJobButationReturn, + GenericMutationReturn, +) + +from selfprivacy_api.services import get_service_by_id +from selfprivacy_api.utils.block_devices import BlockDevices + + +@strawberry.type +class ServiceMutationReturn(GenericMutationReturn): + """Service mutation return type.""" + + service: typing.Optional[Service] = None + + +@strawberry.input +class MoveServiceInput: + """Move service input type.""" + + service_id: str + location: str + + +@strawberry.type +class ServiceJobMutationReturn(GenericJobButationReturn): + """Service job mutation return type.""" + + service: typing.Optional[Service] = None + + +@strawberry.type +class ServicesMutations: + """Services mutations.""" + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def enable_service(self, service_id: str) -> ServiceMutationReturn: + """Enable service.""" + service = get_service_by_id(service_id) + if service is None: + return ServiceMutationReturn( + success=False, + message="Service not found.", + code=404, + ) + service.enable() + return ServiceMutationReturn( + success=True, + message="Service enabled.", + code=200, + service=service_to_graphql_service(service), + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def disable_service(self, service_id: str) -> ServiceMutationReturn: + """Disable service.""" + service = get_service_by_id(service_id) + if service is None: + return ServiceMutationReturn( + success=False, + message="Service not found.", + code=404, + ) + service.disable() + return ServiceMutationReturn( + success=True, + message="Service disabled.", + code=200, + service=service_to_graphql_service(service), + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def stop_service(self, service_id: str) -> ServiceMutationReturn: + """Stop service.""" + service = get_service_by_id(service_id) + if service is None: + return ServiceMutationReturn( + success=False, + message="Service not found.", + code=404, + ) + service.stop() + return ServiceMutationReturn( + success=True, + message="Service stopped.", + code=200, + service=service_to_graphql_service(service), + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def start_service(self, service_id: str) -> ServiceMutationReturn: + """Start service.""" + service = get_service_by_id(service_id) + if service is None: + return ServiceMutationReturn( + success=False, + message="Service not found.", + code=404, + ) + service.start() + return ServiceMutationReturn( + success=True, + message="Service started.", + code=200, + service=service_to_graphql_service(service), + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def restart_service(self, service_id: str) -> ServiceMutationReturn: + """Restart service.""" + service = get_service_by_id(service_id) + if service is None: + return ServiceMutationReturn( + success=False, + message="Service not found.", + code=404, + ) + service.restart() + return ServiceMutationReturn( + success=True, + message="Service restarted.", + code=200, + service=service_to_graphql_service(service), + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def move_service(self, input: MoveServiceInput) -> ServiceJobMutationReturn: + """Move service.""" + service = get_service_by_id(input.service_id) + if service is None: + return ServiceJobMutationReturn( + success=False, + message="Service not found.", + code=404, + ) + if not service.is_movable(): + return ServiceJobMutationReturn( + success=False, + message="Service is not movable.", + code=400, + service=service_to_graphql_service(service), + ) + volume = BlockDevices().get_block_device(input.location) + if volume is None: + return ServiceJobMutationReturn( + success=False, + message="Volume not found.", + code=404, + service=service_to_graphql_service(service), + ) + job = service.move_to_volume(volume) + return ServiceJobMutationReturn( + success=True, + message="Service moved.", + code=200, + service=service_to_graphql_service(service), + job=job_to_api_job(job), + ) diff --git a/selfprivacy_api/graphql/mutations/ssh_mutations.py b/selfprivacy_api/graphql/mutations/ssh_mutations.py index b30f474..60f81a8 100644 --- a/selfprivacy_api/graphql/mutations/ssh_mutations.py +++ b/selfprivacy_api/graphql/mutations/ssh_mutations.py @@ -3,9 +3,13 @@ # pylint: disable=too-few-public-methods import strawberry +from selfprivacy_api.actions.users import UserNotFound from selfprivacy_api.graphql import IsAuthenticated -from selfprivacy_api.graphql.mutations.ssh_utils import ( +from selfprivacy_api.actions.ssh import ( + InvalidPublicKey, + KeyAlreadyExists, + KeyNotFound, create_ssh_key, remove_ssh_key, ) @@ -31,12 +35,37 @@ class SshMutations: def add_ssh_key(self, ssh_input: SshMutationInput) -> UserMutationReturn: """Add a new ssh key""" - success, message, code = create_ssh_key(ssh_input.username, ssh_input.ssh_key) + try: + create_ssh_key(ssh_input.username, ssh_input.ssh_key) + except KeyAlreadyExists: + return UserMutationReturn( + success=False, + message="Key already exists", + code=409, + ) + except InvalidPublicKey: + return UserMutationReturn( + success=False, + message="Invalid key type. Only ssh-ed25519 and ssh-rsa are supported", + code=400, + ) + except UserNotFound: + return UserMutationReturn( + success=False, + message="User not found", + code=404, + ) + except Exception as e: + return UserMutationReturn( + success=False, + message=str(e), + code=500, + ) return UserMutationReturn( - success=success, - message=message, - code=code, + success=True, + message="New SSH key successfully written", + code=201, user=get_user_by_username(ssh_input.username), ) @@ -44,11 +73,30 @@ class SshMutations: def remove_ssh_key(self, ssh_input: SshMutationInput) -> UserMutationReturn: """Remove ssh key from user""" - success, message, code = remove_ssh_key(ssh_input.username, ssh_input.ssh_key) + try: + remove_ssh_key(ssh_input.username, ssh_input.ssh_key) + except KeyNotFound: + return UserMutationReturn( + success=False, + message="Key not found", + code=404, + ) + except UserNotFound: + return UserMutationReturn( + success=False, + message="User not found", + code=404, + ) + except Exception as e: + return UserMutationReturn( + success=False, + message=str(e), + code=500, + ) return UserMutationReturn( - success=success, - message=message, - code=code, + success=True, + message="SSH key successfully removed", + code=200, user=get_user_by_username(ssh_input.username), ) diff --git a/selfprivacy_api/graphql/mutations/ssh_utils.py b/selfprivacy_api/graphql/mutations/ssh_utils.py deleted file mode 100644 index 3dbc152..0000000 --- a/selfprivacy_api/graphql/mutations/ssh_utils.py +++ /dev/null @@ -1,74 +0,0 @@ -from selfprivacy_api.graphql.common_types.user import ensure_ssh_and_users_fields_exist -from selfprivacy_api.utils import ( - WriteUserData, - validate_ssh_public_key, -) - - -def create_ssh_key(username: str, ssh_key: str) -> tuple[bool, str, int]: - """Create a new ssh key""" - - if not validate_ssh_public_key(ssh_key): - return ( - False, - "Invalid key type. Only ssh-ed25519 and ssh-rsa are supported", - 400, - ) - - with WriteUserData() as data: - ensure_ssh_and_users_fields_exist(data) - - if username == data["username"]: - if ssh_key in data["sshKeys"]: - return False, "Key already exists", 409 - - data["sshKeys"].append(ssh_key) - return True, "New SSH key successfully written", 201 - - if username == "root": - if ssh_key in data["ssh"]["rootKeys"]: - return False, "Key already exists", 409 - - data["ssh"]["rootKeys"].append(ssh_key) - return True, "New SSH key successfully written", 201 - - for user in data["users"]: - if user["username"] == username: - if ssh_key in user["sshKeys"]: - return False, "Key already exists", 409 - - user["sshKeys"].append(ssh_key) - return True, "New SSH key successfully written", 201 - - return False, "User not found", 404 - - -def remove_ssh_key(username: str, ssh_key: str) -> tuple[bool, str, int]: - """Delete a ssh key""" - - with WriteUserData() as data: - ensure_ssh_and_users_fields_exist(data) - - if username == "root": - if ssh_key in data["ssh"]["rootKeys"]: - data["ssh"]["rootKeys"].remove(ssh_key) - return True, "SSH key deleted", 200 - - return False, "Key not found", 404 - - if username == data["username"]: - if ssh_key in data["sshKeys"]: - data["sshKeys"].remove(ssh_key) - return True, "SSH key deleted", 200 - - return False, "Key not found", 404 - - for user in data["users"]: - if user["username"] == username: - if ssh_key in user["sshKeys"]: - user["sshKeys"].remove(ssh_key) - return True, "SSH key deleted", 200 - - return False, "Key not found", 404 - - return False, "User not found", 404 diff --git a/selfprivacy_api/graphql/mutations/storage_mutation.py b/selfprivacy_api/graphql/mutations/storage_mutations.py similarity index 61% rename from selfprivacy_api/graphql/mutations/storage_mutation.py rename to selfprivacy_api/graphql/mutations/storage_mutations.py index ff69aea..1b6d74e 100644 --- a/selfprivacy_api/graphql/mutations/storage_mutation.py +++ b/selfprivacy_api/graphql/mutations/storage_mutations.py @@ -1,11 +1,28 @@ """Storage devices mutations""" -import typing import strawberry from selfprivacy_api.graphql import IsAuthenticated +from selfprivacy_api.graphql.common_types.jobs import job_to_api_job from selfprivacy_api.utils.block_devices import BlockDevices from selfprivacy_api.graphql.mutations.mutation_interface import ( + GenericJobButationReturn, GenericMutationReturn, ) +from selfprivacy_api.jobs.migrate_to_binds import ( + BindMigrationConfig, + is_bind_migrated, + start_bind_migration, +) + + +@strawberry.input +class MigrateToBindsInput: + """Migrate to binds input""" + + email_block_device: str + bitwarden_block_device: str + gitea_block_device: str + nextcloud_block_device: str + pleroma_block_device: str @strawberry.type @@ -60,3 +77,26 @@ class StorageMutations: return GenericMutationReturn( success=False, code=409, message="Volume not unmounted (already unmounted?)" ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def migrate_to_binds(self, input: MigrateToBindsInput) -> GenericJobButationReturn: + """Migrate to binds""" + if is_bind_migrated(): + return GenericJobButationReturn( + success=False, code=409, message="Already migrated to binds" + ) + job = start_bind_migration( + BindMigrationConfig( + email_block_device=input.email_block_device, + bitwarden_block_device=input.bitwarden_block_device, + gitea_block_device=input.gitea_block_device, + nextcloud_block_device=input.nextcloud_block_device, + pleroma_block_device=input.pleroma_block_device, + ) + ) + return GenericJobButationReturn( + success=True, + code=200, + message="Migration to binds started, rebuild the system to apply changes", + job=job_to_api_job(job), + ) diff --git a/selfprivacy_api/graphql/mutations/system_mutations.py b/selfprivacy_api/graphql/mutations/system_mutations.py index 057c26f..daada17 100644 --- a/selfprivacy_api/graphql/mutations/system_mutations.py +++ b/selfprivacy_api/graphql/mutations/system_mutations.py @@ -1,15 +1,14 @@ """System management mutations""" # pylint: disable=too-few-public-methods -import subprocess import typing -import pytz import strawberry from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.graphql.mutations.mutation_interface import ( GenericMutationReturn, MutationReturnInterface, ) -from selfprivacy_api.utils import WriteUserData + +import selfprivacy_api.actions.system as system_actions @strawberry.type @@ -42,15 +41,15 @@ class SystemMutations: @strawberry.mutation(permission_classes=[IsAuthenticated]) def change_timezone(self, timezone: str) -> TimezoneMutationReturn: """Change the timezone of the server. Timezone is a tzdatabase name.""" - if timezone not in pytz.all_timezones: + try: + system_actions.change_timezone(timezone) + except system_actions.InvalidTimezone as e: return TimezoneMutationReturn( success=False, - message="Invalid timezone", + message=str(e), code=400, timezone=None, ) - with WriteUserData() as data: - data["timezone"] = timezone return TimezoneMutationReturn( success=True, message="Timezone changed", @@ -63,36 +62,23 @@ class SystemMutations: self, settings: AutoUpgradeSettingsInput ) -> AutoUpgradeSettingsMutationReturn: """Change auto upgrade settings of the server.""" - with WriteUserData() as data: - if "autoUpgrade" not in data: - data["autoUpgrade"] = {} - if "enable" not in data["autoUpgrade"]: - data["autoUpgrade"]["enable"] = True - if "allowReboot" not in data["autoUpgrade"]: - data["autoUpgrade"]["allowReboot"] = False + system_actions.set_auto_upgrade_settings( + settings.enableAutoUpgrade, settings.allowReboot + ) - if settings.enableAutoUpgrade is not None: - data["autoUpgrade"]["enable"] = settings.enableAutoUpgrade - if settings.allowReboot is not None: - data["autoUpgrade"]["allowReboot"] = settings.allowReboot - - auto_upgrade = data["autoUpgrade"]["enable"] - allow_reboot = data["autoUpgrade"]["allowReboot"] + new_settings = system_actions.get_auto_upgrade_settings() return AutoUpgradeSettingsMutationReturn( success=True, message="Auto-upgrade settings changed", code=200, - enableAutoUpgrade=auto_upgrade, - allowReboot=allow_reboot, + enableAutoUpgrade=new_settings.enable, + allowReboot=new_settings.allowReboot, ) @strawberry.mutation(permission_classes=[IsAuthenticated]) def run_system_rebuild(self) -> GenericMutationReturn: - rebuild_result = subprocess.Popen( - ["systemctl", "start", "sp-nixos-rebuild.service"], start_new_session=True - ) - rebuild_result.communicate()[0] + system_actions.rebuild_system() return GenericMutationReturn( success=True, message="Starting rebuild system", @@ -101,10 +87,7 @@ class SystemMutations: @strawberry.mutation(permission_classes=[IsAuthenticated]) def run_system_rollback(self) -> GenericMutationReturn: - rollback_result = subprocess.Popen( - ["systemctl", "start", "sp-nixos-rollback.service"], start_new_session=True - ) - rollback_result.communicate()[0] + system_actions.rollback_system() return GenericMutationReturn( success=True, message="Starting rebuild system", @@ -113,10 +96,7 @@ class SystemMutations: @strawberry.mutation(permission_classes=[IsAuthenticated]) def run_system_upgrade(self) -> GenericMutationReturn: - upgrade_result = subprocess.Popen( - ["systemctl", "start", "sp-nixos-upgrade.service"], start_new_session=True - ) - upgrade_result.communicate()[0] + system_actions.upgrade_system() return GenericMutationReturn( success=True, message="Starting rebuild system", @@ -125,9 +105,24 @@ class SystemMutations: @strawberry.mutation(permission_classes=[IsAuthenticated]) def reboot_system(self) -> GenericMutationReturn: - subprocess.Popen(["reboot"], start_new_session=True) + system_actions.reboot_system() return GenericMutationReturn( success=True, message="System reboot has started", code=200, ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def pull_repository_changes(self) -> GenericMutationReturn: + result = system_actions.pull_repository_changes() + if result.status == 0: + return GenericMutationReturn( + success=True, + message="Repository changes pulled", + code=200, + ) + return GenericMutationReturn( + success=False, + message=f"Failed to pull repository changes:\n{result.data}", + code=500, + ) diff --git a/selfprivacy_api/graphql/mutations/users_mutations.py b/selfprivacy_api/graphql/mutations/users_mutations.py index a284ff2..27be1d7 100644 --- a/selfprivacy_api/graphql/mutations/users_mutations.py +++ b/selfprivacy_api/graphql/mutations/users_mutations.py @@ -10,11 +10,7 @@ from selfprivacy_api.graphql.common_types.user import ( from selfprivacy_api.graphql.mutations.mutation_interface import ( GenericMutationReturn, ) -from selfprivacy_api.graphql.mutations.users_utils import ( - create_user, - delete_user, - update_user, -) +import selfprivacy_api.actions.users as users_actions @strawberry.input @@ -31,35 +27,91 @@ class UserMutations: @strawberry.mutation(permission_classes=[IsAuthenticated]) def create_user(self, user: UserMutationInput) -> UserMutationReturn: - - success, message, code = create_user(user.username, user.password) + try: + users_actions.create_user(user.username, user.password) + except users_actions.PasswordIsEmpty as e: + return UserMutationReturn( + success=False, + message=str(e), + code=400, + ) + except users_actions.UsernameForbidden as e: + return UserMutationReturn( + success=False, + message=str(e), + code=409, + ) + except users_actions.UsernameNotAlphanumeric as e: + return UserMutationReturn( + success=False, + message=str(e), + code=400, + ) + except users_actions.UsernameTooLong as e: + return UserMutationReturn( + success=False, + message=str(e), + code=400, + ) + except users_actions.UserAlreadyExists as e: + return UserMutationReturn( + success=False, + message=str(e), + code=409, + user=get_user_by_username(user.username), + ) return UserMutationReturn( - success=success, - message=message, - code=code, + success=True, + message="User created", + code=201, user=get_user_by_username(user.username), ) @strawberry.mutation(permission_classes=[IsAuthenticated]) def delete_user(self, username: str) -> GenericMutationReturn: - success, message, code = delete_user(username) + try: + users_actions.delete_user(username) + except users_actions.UserNotFound as e: + return GenericMutationReturn( + success=False, + message=str(e), + code=404, + ) + except users_actions.UserIsProtected as e: + return GenericMutationReturn( + success=False, + message=str(e), + code=400, + ) return GenericMutationReturn( - success=success, - message=message, - code=code, + success=True, + message="User deleted", + code=200, ) @strawberry.mutation(permission_classes=[IsAuthenticated]) def update_user(self, user: UserMutationInput) -> UserMutationReturn: """Update user mutation""" - - success, message, code = update_user(user.username, user.password) + try: + users_actions.update_user(user.username, user.password) + except users_actions.PasswordIsEmpty as e: + return UserMutationReturn( + success=False, + message=str(e), + code=400, + ) + except users_actions.UserNotFound as e: + return UserMutationReturn( + success=False, + message=str(e), + code=404, + ) return UserMutationReturn( - success=success, - message=message, - code=code, + success=True, + message="User updated", + code=200, user=get_user_by_username(user.username), ) diff --git a/selfprivacy_api/graphql/mutations/users_utils.py b/selfprivacy_api/graphql/mutations/users_utils.py deleted file mode 100644 index f649b45..0000000 --- a/selfprivacy_api/graphql/mutations/users_utils.py +++ /dev/null @@ -1,111 +0,0 @@ -import re -from selfprivacy_api.utils import ( - WriteUserData, - ReadUserData, - is_username_forbidden, -) -from selfprivacy_api.utils import hash_password - - -def ensure_ssh_and_users_fields_exist(data): - if "ssh" not in data: - data["ssh"] = [] - data["ssh"]["rootKeys"] = [] - - elif data["ssh"].get("rootKeys") is None: - data["ssh"]["rootKeys"] = [] - - if "sshKeys" not in data: - data["sshKeys"] = [] - - if "users" not in data: - data["users"] = [] - - -def create_user(username: str, password: str) -> tuple[bool, str, int]: - """Create a new user""" - - # Check if password is null or none - if password == "": - return False, "Password is null", 400 - - # Check if username is forbidden - if is_username_forbidden(username): - return False, "Username is forbidden", 409 - - # Check is username passes regex - if not re.match(r"^[a-z_][a-z0-9_]+$", username): - return False, "Username must be alphanumeric", 400 - - # Check if username less than 32 characters - if len(username) >= 32: - return False, "Username must be less than 32 characters", 400 - - with ReadUserData() as data: - ensure_ssh_and_users_fields_exist(data) - - # Return 409 if user already exists - if data["username"] == username: - return False, "User already exists", 409 - - for data_user in data["users"]: - if data_user["username"] == username: - return False, "User already exists", 409 - - hashed_password = hash_password(password) - - with WriteUserData() as data: - ensure_ssh_and_users_fields_exist(data) - - data["users"].append( - { - "username": username, - "hashedPassword": hashed_password, - "sshKeys": [], - } - ) - - return True, "User was successfully created!", 201 - - -def delete_user(username: str) -> tuple[bool, str, int]: - with WriteUserData() as data: - ensure_ssh_and_users_fields_exist(data) - - if username == data["username"] or username == "root": - return False, "Cannot delete main or root user", 400 - - # Return 404 if user does not exist - for data_user in data["users"]: - if data_user["username"] == username: - data["users"].remove(data_user) - break - else: - return False, "User does not exist", 404 - - return True, "User was deleted", 200 - - -def update_user(username: str, password: str) -> tuple[bool, str, int]: - # Check if password is null or none - if password == "": - return False, "Password is null", 400 - - hashed_password = hash_password(password) - - with WriteUserData() as data: - ensure_ssh_and_users_fields_exist(data) - - if username == data["username"]: - data["hashedMasterPassword"] = hashed_password - - # Return 404 if user does not exist - else: - for data_user in data["users"]: - if data_user["username"] == username: - data_user["hashedPassword"] = hashed_password - break - else: - return False, "User does not exist", 404 - - return True, "User was successfully updated", 200 diff --git a/selfprivacy_api/graphql/queries/api_queries.py b/selfprivacy_api/graphql/queries/api_queries.py index b2a81d2..7994a8f 100644 --- a/selfprivacy_api/graphql/queries/api_queries.py +++ b/selfprivacy_api/graphql/queries/api_queries.py @@ -2,26 +2,23 @@ # pylint: disable=too-few-public-methods import datetime import typing -from flask import request import strawberry +from strawberry.types import Info +from selfprivacy_api.actions.api_tokens import get_api_tokens_with_caller_flag from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.utils import parse_date +from selfprivacy_api.dependencies import get_api_version as get_api_version_dependency from selfprivacy_api.utils.auth import ( get_recovery_token_status, - get_tokens_info, is_recovery_token_exists, is_recovery_token_valid, - is_token_name_exists, - is_token_name_pair_valid, - refresh_token, - get_token_name, ) def get_api_version() -> str: """Get API version""" - return "1.2.7" + return get_api_version_dependency() @strawberry.type @@ -33,24 +30,6 @@ class ApiDevice: is_caller: bool -def get_devices() -> typing.List[ApiDevice]: - """Get list of devices""" - caller_name = get_token_name( - request.headers.get("Authorization").split(" ")[1] - if request.headers.get("Authorization") is not None - else None - ) - tokens = get_tokens_info() - return [ - ApiDevice( - name=token["name"], - creation_date=parse_date(token["date"]), - is_caller=token["name"] == caller_name, - ) - for token in tokens - ] - - @strawberry.type class ApiRecoveryKeyStatus: """Recovery key status""" @@ -97,9 +76,22 @@ class Api: """API access status""" version: str = strawberry.field(resolver=get_api_version) - devices: typing.List[ApiDevice] = strawberry.field( - resolver=get_devices, permission_classes=[IsAuthenticated] - ) + + @strawberry.field(permission_classes=[IsAuthenticated]) + def devices(self, info: Info) -> typing.List[ApiDevice]: + return [ + ApiDevice( + name=device.name, + creation_date=device.date, + is_caller=device.is_caller, + ) + for device in get_api_tokens_with_caller_flag( + info.context["request"] + .headers.get("Authorization", "") + .replace("Bearer ", "") + ) + ] + recovery_key: ApiRecoveryKeyStatus = strawberry.field( resolver=get_recovery_key_status, permission_classes=[IsAuthenticated] ) diff --git a/selfprivacy_api/graphql/queries/jobs.py b/selfprivacy_api/graphql/queries/jobs.py new file mode 100644 index 0000000..426c563 --- /dev/null +++ b/selfprivacy_api/graphql/queries/jobs.py @@ -0,0 +1,25 @@ +"""Jobs status""" +# pylint: disable=too-few-public-methods +import typing +import strawberry +from selfprivacy_api.graphql.common_types.jobs import ( + ApiJob, + get_api_job_by_id, + job_to_api_job, +) + +from selfprivacy_api.jobs import Jobs + + +@strawberry.type +class Job: + @strawberry.field + def get_jobs(self) -> typing.List[ApiJob]: + + Jobs.get_instance().get_jobs() + + return [job_to_api_job(job) for job in Jobs.get_instance().get_jobs()] + + @strawberry.field + def get_job(self, job_id: str) -> typing.Optional[ApiJob]: + return get_api_job_by_id(job_id) diff --git a/selfprivacy_api/graphql/queries/providers.py b/selfprivacy_api/graphql/queries/providers.py index 774d465..6d0381e 100644 --- a/selfprivacy_api/graphql/queries/providers.py +++ b/selfprivacy_api/graphql/queries/providers.py @@ -1,7 +1,5 @@ """Enums representing different service providers.""" from enum import Enum -import datetime -import typing import strawberry diff --git a/selfprivacy_api/graphql/queries/services.py b/selfprivacy_api/graphql/queries/services.py new file mode 100644 index 0000000..5398f81 --- /dev/null +++ b/selfprivacy_api/graphql/queries/services.py @@ -0,0 +1,18 @@ +"""Services status""" +# pylint: disable=too-few-public-methods +import typing +import strawberry + +from selfprivacy_api.graphql.common_types.service import ( + Service, + service_to_graphql_service, +) +from selfprivacy_api.services import get_all_services + + +@strawberry.type +class Services: + @strawberry.field + def all_services(self) -> typing.List[Service]: + services = get_all_services() + return [service_to_graphql_service(service) for service in services] diff --git a/selfprivacy_api/graphql/queries/storage.py b/selfprivacy_api/graphql/queries/storage.py index 6315b26..6800518 100644 --- a/selfprivacy_api/graphql/queries/storage.py +++ b/selfprivacy_api/graphql/queries/storage.py @@ -2,23 +2,13 @@ # pylint: disable=too-few-public-methods import typing import strawberry + +from selfprivacy_api.graphql.common_types.service import ( + StorageVolume, +) from selfprivacy_api.utils.block_devices import BlockDevices -@strawberry.type -class StorageVolume: - """Stats and basic info about a volume or a system disk.""" - - total_space: str - free_space: str - used_space: str - root: bool - name: str - model: typing.Optional[str] - serial: typing.Optional[str] - type: str - - @strawberry.type class Storage: """GraphQL queries to get storage information.""" diff --git a/selfprivacy_api/graphql/queries/system.py b/selfprivacy_api/graphql/queries/system.py index a235e4d..0e2a7ec 100644 --- a/selfprivacy_api/graphql/queries/system.py +++ b/selfprivacy_api/graphql/queries/system.py @@ -1,23 +1,18 @@ """Common system information and settings""" # pylint: disable=too-few-public-methods -import subprocess +import os import typing import strawberry +from selfprivacy_api.graphql.common_types.dns import DnsRecord from selfprivacy_api.graphql.queries.common import Alert, Severity from selfprivacy_api.graphql.queries.providers import DnsProvider, ServerProvider +from selfprivacy_api.jobs import Jobs +from selfprivacy_api.jobs.migrate_to_binds import is_bind_migrated +from selfprivacy_api.services import get_all_required_dns_records from selfprivacy_api.utils import ReadUserData - - -@strawberry.type -class DnsRecord: - """DNS record""" - - recordType: str - name: str - content: str - ttl: int - priority: typing.Optional[int] +import selfprivacy_api.actions.system as system_actions +import selfprivacy_api.actions.ssh as ssh_actions @strawberry.type @@ -27,7 +22,20 @@ class SystemDomainInfo: domain: str hostname: str provider: DnsProvider - required_dns_records: typing.List[DnsRecord] + + @strawberry.field + def required_dns_records(self) -> typing.List[DnsRecord]: + """Collect all required DNS records for all services""" + return [ + DnsRecord( + record_type=record.type, + name=record.name, + content=record.content, + ttl=record.ttl, + priority=record.priority, + ) + for record in get_all_required_dns_records() + ] def get_system_domain_info() -> SystemDomainInfo: @@ -37,8 +45,6 @@ def get_system_domain_info() -> SystemDomainInfo: domain=user_data["domain"], hostname=user_data["hostname"], provider=DnsProvider.CLOUDFLARE, - # TODO: get ip somehow - required_dns_records=[], ) @@ -52,17 +58,11 @@ class AutoUpgradeOptions: def get_auto_upgrade_options() -> AutoUpgradeOptions: """Get automatic upgrade options""" - with ReadUserData() as user_data: - if "autoUpgrade" not in user_data: - return AutoUpgradeOptions(enable=True, allow_reboot=False) - if "enable" not in user_data["autoUpgrade"]: - user_data["autoUpgrade"]["enable"] = True - if "allowReboot" not in user_data["autoUpgrade"]: - user_data["autoUpgrade"]["allowReboot"] = False - return AutoUpgradeOptions( - enable=user_data["autoUpgrade"]["enable"], - allow_reboot=user_data["autoUpgrade"]["allowReboot"], - ) + settings = system_actions.get_auto_upgrade_settings() + return AutoUpgradeOptions( + enable=settings.enable, + allow_reboot=settings.allowReboot, + ) @strawberry.type @@ -76,30 +76,17 @@ class SshSettings: def get_ssh_settings() -> SshSettings: """Get SSH settings""" - with ReadUserData() as user_data: - if "ssh" not in user_data: - return SshSettings( - enable=False, password_authentication=False, root_ssh_keys=[] - ) - if "enable" not in user_data["ssh"]: - user_data["ssh"]["enable"] = False - if "passwordAuthentication" not in user_data["ssh"]: - user_data["ssh"]["passwordAuthentication"] = False - if "rootKeys" not in user_data["ssh"]: - user_data["ssh"]["rootKeys"] = [] - return SshSettings( - enable=user_data["ssh"]["enable"], - password_authentication=user_data["ssh"]["passwordAuthentication"], - root_ssh_keys=user_data["ssh"]["rootKeys"], - ) + settings = ssh_actions.get_ssh_settings() + return SshSettings( + enable=settings.enable, + password_authentication=settings.passwordAuthentication, + root_ssh_keys=settings.rootKeys, + ) def get_system_timezone() -> str: """Get system timezone""" - with ReadUserData() as user_data: - if "timezone" not in user_data: - return "Europe/Uzhgorod" - return user_data["timezone"] + return system_actions.get_timezone() @strawberry.type @@ -115,12 +102,12 @@ class SystemSettings: def get_system_version() -> str: """Get system version""" - return subprocess.check_output(["uname", "-a"]).decode("utf-8").strip() + return system_actions.get_system_version() def get_python_version() -> str: """Get Python version""" - return subprocess.check_output(["python", "-V"]).decode("utf-8").strip() + return system_actions.get_python_version() @strawberry.type @@ -130,6 +117,11 @@ class SystemInfo: system_version: str = strawberry.field(resolver=get_system_version) python_version: str = strawberry.field(resolver=get_python_version) + @strawberry.field + def using_binds(self) -> bool: + """Check if the system is using BINDs""" + return is_bind_migrated() + @strawberry.type class SystemProviderInfo: @@ -162,4 +154,13 @@ class System: settings: SystemSettings = SystemSettings() info: SystemInfo = SystemInfo() provider: SystemProviderInfo = strawberry.field(resolver=get_system_provider_info) - busy: bool = False + + @strawberry.field + def busy(self) -> bool: + """Check if the system is busy""" + return Jobs.is_busy() + + @strawberry.field + def working_directory(self) -> str: + """Get working directory""" + return os.getcwd() diff --git a/selfprivacy_api/graphql/queries/users.py b/selfprivacy_api/graphql/queries/users.py index fc18a84..d2c0555 100644 --- a/selfprivacy_api/graphql/queries/users.py +++ b/selfprivacy_api/graphql/queries/users.py @@ -5,27 +5,12 @@ import strawberry from selfprivacy_api.graphql.common_types.user import ( User, - ensure_ssh_and_users_fields_exist, get_user_by_username, + get_users, ) -from selfprivacy_api.utils import ReadUserData from selfprivacy_api.graphql import IsAuthenticated -def get_users() -> typing.List[User]: - """Get users""" - user_list = [] - with ReadUserData() as data: - ensure_ssh_and_users_fields_exist(data) - - for user in data["users"]: - user_list.append(get_user_by_username(user["username"])) - - user_list.append(get_user_by_username(data["username"])) - - return user_list - - @strawberry.type class Users: @strawberry.field(permission_classes=[IsAuthenticated]) diff --git a/selfprivacy_api/graphql/schema.py b/selfprivacy_api/graphql/schema.py index c2d6a10..dff9304 100644 --- a/selfprivacy_api/graphql/schema.py +++ b/selfprivacy_api/graphql/schema.py @@ -1,19 +1,27 @@ """GraphQL API for SelfPrivacy.""" # pylint: disable=too-few-public-methods +import asyncio +from typing import AsyncGenerator import strawberry from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.graphql.mutations.api_mutations import ApiMutations +from selfprivacy_api.graphql.mutations.job_mutations import JobMutations +from selfprivacy_api.graphql.mutations.mutation_interface import GenericMutationReturn +from selfprivacy_api.graphql.mutations.services_mutations import ServicesMutations from selfprivacy_api.graphql.mutations.ssh_mutations import SshMutations -from selfprivacy_api.graphql.mutations.storage_mutation import StorageMutations +from selfprivacy_api.graphql.mutations.storage_mutations import StorageMutations from selfprivacy_api.graphql.mutations.system_mutations import SystemMutations from selfprivacy_api.graphql.queries.api_queries import Api +from selfprivacy_api.graphql.queries.jobs import Job +from selfprivacy_api.graphql.queries.services import Services from selfprivacy_api.graphql.queries.storage import Storage from selfprivacy_api.graphql.queries.system import System from selfprivacy_api.graphql.mutations.users_mutations import UserMutations from selfprivacy_api.graphql.queries.users import Users +from selfprivacy_api.jobs.test import test_job @strawberry.type @@ -40,6 +48,16 @@ class Query: """Storage queries""" return Storage() + @strawberry.field(permission_classes=[IsAuthenticated]) + def jobs(self) -> Job: + """Jobs queries""" + return Job() + + @strawberry.field(permission_classes=[IsAuthenticated]) + def services(self) -> Services: + """Services queries""" + return Services() + @strawberry.type class Mutation( @@ -48,10 +66,33 @@ class Mutation( UserMutations, SshMutations, StorageMutations, + ServicesMutations, + JobMutations, ): """Root schema for mutations""" + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def test_mutation(self) -> GenericMutationReturn: + """Test mutation""" + test_job() + return GenericMutationReturn( + success=True, + message="Test mutation", + code=200, + ) + pass -schema = strawberry.Schema(query=Query, mutation=Mutation) +@strawberry.type +class Subscription: + """Root schema for subscriptions""" + + @strawberry.subscription(permission_classes=[IsAuthenticated]) + async def count(self, target: int = 100) -> AsyncGenerator[int, None]: + for i in range(target): + yield i + await asyncio.sleep(0.5) + + +schema = strawberry.Schema(query=Query, mutation=Mutation, subscription=Subscription) diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py index a467583..09ac254 100644 --- a/selfprivacy_api/jobs/__init__.py +++ b/selfprivacy_api/jobs/__init__.py @@ -16,12 +16,18 @@ A job is a dictionary with the following keys: """ import typing import datetime +from uuid import UUID +import asyncio import json import os import time import uuid from enum import Enum +from pydantic import BaseModel + +from selfprivacy_api.utils import ReadUserData, UserDataFiles, WriteUserData + class JobStatus(Enum): """ @@ -34,65 +40,23 @@ class JobStatus(Enum): ERROR = "ERROR" -class Job: +class Job(BaseModel): """ Job class. """ - def __init__( - self, - name: str, - description: str, - status: JobStatus, - created_at: datetime.datetime, - updated_at: datetime.datetime, - finished_at: typing.Optional[datetime.datetime], - error: typing.Optional[str], - result: typing.Optional[str], - ): - self.id = str(uuid.uuid4()) - self.name = name - self.description = description - self.status = status - self.created_at = created_at - self.updated_at = updated_at - self.finished_at = finished_at - self.error = error - self.result = result - - def to_dict(self) -> dict: - """ - Convert the job to a dictionary. - """ - return { - "id": self.id, - "name": self.name, - "description": self.description, - "status": self.status, - "created_at": self.created_at, - "updated_at": self.updated_at, - "finished_at": self.finished_at, - "error": self.error, - "result": self.result, - } - - def to_json(self) -> str: - """ - Convert the job to a JSON string. - """ - return json.dumps(self.to_dict()) - - def __str__(self) -> str: - """ - Convert the job to a string. - """ - return self.to_json() - - def __repr__(self) -> str: - """ - Convert the job to a string. - """ - return self.to_json() + uid: UUID = uuid.uuid4() + type_id: str + name: str + description: str + status: JobStatus + status_text: typing.Optional[str] + progress: typing.Optional[int] + created_at: datetime.datetime + updated_at: datetime.datetime + finished_at: typing.Optional[datetime.datetime] + error: typing.Optional[str] + result: typing.Optional[str] class Jobs: @@ -109,6 +73,9 @@ class Jobs: """ if Jobs.__instance is None: Jobs() + if Jobs.__instance is None: + raise Exception("Couldn't init Jobs singleton!") + return Jobs.__instance return Jobs.__instance def __init__(self): @@ -119,41 +86,78 @@ class Jobs: raise Exception("This class is a singleton!") else: Jobs.__instance = self - self.jobs = [] + @staticmethod + def reset() -> None: + """ + Reset the jobs list. + """ + with WriteUserData(UserDataFiles.JOBS) as user_data: + user_data["jobs"] = [] + + @staticmethod def add( - self, name: str, description: str, status: JobStatus = JobStatus.CREATED + name: str, + type_id: str, + description: str, + status: JobStatus = JobStatus.CREATED, + status_text: str = "", + progress: int = 0, ) -> Job: """ Add a job to the jobs list. """ job = Job( name=name, + type_id=type_id, description=description, status=status, + status_text=status_text, + progress=progress, created_at=datetime.datetime.now(), updated_at=datetime.datetime.now(), finished_at=None, error=None, result=None, ) - self.jobs.append(job) + with WriteUserData(UserDataFiles.JOBS) as user_data: + try: + if "jobs" not in user_data: + user_data["jobs"] = [] + user_data["jobs"].append(json.loads(job.json())) + except json.decoder.JSONDecodeError: + user_data["jobs"] = [json.loads(job.json())] return job def remove(self, job: Job) -> None: """ Remove a job from the jobs list. """ - self.jobs.remove(job) + self.remove_by_uuid(str(job.uid)) + def remove_by_uuid(self, job_uuid: str) -> bool: + """ + Remove a job from the jobs list. + """ + with WriteUserData(UserDataFiles.JOBS) as user_data: + if "jobs" not in user_data: + user_data["jobs"] = [] + for i, j in enumerate(user_data["jobs"]): + if j["uid"] == job_uuid: + del user_data["jobs"][i] + return True + return False + + @staticmethod def update( - self, job: Job, - name: typing.Optional[str], - description: typing.Optional[str], status: JobStatus, - error: typing.Optional[str], - result: typing.Optional[str], + status_text: typing.Optional[str] = None, + progress: typing.Optional[int] = None, + name: typing.Optional[str] = None, + description: typing.Optional[str] = None, + error: typing.Optional[str] = None, + result: typing.Optional[str] = None, ) -> Job: """ Update a job in the jobs list. @@ -162,23 +166,62 @@ class Jobs: job.name = name if description is not None: job.description = description + if status_text is not None: + job.status_text = status_text + if progress is not None: + job.progress = progress job.status = status job.updated_at = datetime.datetime.now() job.error = error job.result = result + if status in (JobStatus.FINISHED, JobStatus.ERROR): + job.finished_at = datetime.datetime.now() + + with WriteUserData(UserDataFiles.JOBS) as user_data: + if "jobs" not in user_data: + user_data["jobs"] = [] + for i, j in enumerate(user_data["jobs"]): + if j["uid"] == str(job.uid): + user_data["jobs"][i] = json.loads(job.json()) + break + return job - def get_job(self, id: str) -> typing.Optional[Job]: + @staticmethod + def get_job(uid: str) -> typing.Optional[Job]: """ Get a job from the jobs list. """ - for job in self.jobs: - if job.id == id: - return job + with ReadUserData(UserDataFiles.JOBS) as user_data: + if "jobs" not in user_data: + user_data["jobs"] = [] + for job in user_data["jobs"]: + if job["uid"] == uid: + return Job(**job) return None - def get_jobs(self) -> list: + @staticmethod + def get_jobs() -> typing.List[Job]: """ Get the jobs list. """ - return self.jobs + with ReadUserData(UserDataFiles.JOBS) as user_data: + try: + if "jobs" not in user_data: + user_data["jobs"] = [] + return [Job(**job) for job in user_data["jobs"]] + except json.decoder.JSONDecodeError: + return [] + + @staticmethod + def is_busy() -> bool: + """ + Check if there is a job running. + """ + with ReadUserData(UserDataFiles.JOBS) as user_data: + if "jobs" not in user_data: + user_data["jobs"] = [] + for job in user_data["jobs"]: + if job["status"] == JobStatus.RUNNING.value: + return True + return False diff --git a/selfprivacy_api/jobs/migrate_to_binds.py b/selfprivacy_api/jobs/migrate_to_binds.py new file mode 100644 index 0000000..346023d --- /dev/null +++ b/selfprivacy_api/jobs/migrate_to_binds.py @@ -0,0 +1,291 @@ +"""Function to perform migration of app data to binds.""" +import subprocess +import pathlib +import shutil + +from pydantic import BaseModel +from selfprivacy_api.jobs import Job, JobStatus, Jobs +from selfprivacy_api.services.bitwarden import Bitwarden +from selfprivacy_api.services.gitea import Gitea +from selfprivacy_api.services.mailserver import MailServer +from selfprivacy_api.services.nextcloud import Nextcloud +from selfprivacy_api.services.pleroma import Pleroma +from selfprivacy_api.utils import ReadUserData, WriteUserData +from selfprivacy_api.utils.huey import huey +from selfprivacy_api.utils.block_devices import BlockDevices + + +class BindMigrationConfig(BaseModel): + """Config for bind migration. + For each service provide block device name. + """ + + email_block_device: str + bitwarden_block_device: str + gitea_block_device: str + nextcloud_block_device: str + pleroma_block_device: str + + +def is_bind_migrated() -> bool: + """Check if bind migration was performed.""" + with ReadUserData() as user_data: + return user_data.get("useBinds", False) + + +def activate_binds(config: BindMigrationConfig): + """Activate binds.""" + # Activate binds in userdata + with WriteUserData() as user_data: + if "email" not in user_data: + user_data["email"] = {} + user_data["email"]["location"] = config.email_block_device + if "bitwarden" not in user_data: + user_data["bitwarden"] = {} + user_data["bitwarden"]["location"] = config.bitwarden_block_device + if "gitea" not in user_data: + user_data["gitea"] = {} + user_data["gitea"]["location"] = config.gitea_block_device + if "nextcloud" not in user_data: + user_data["nextcloud"] = {} + user_data["nextcloud"]["location"] = config.nextcloud_block_device + if "pleroma" not in user_data: + user_data["pleroma"] = {} + user_data["pleroma"]["location"] = config.pleroma_block_device + + user_data["useBinds"] = True + + +def move_folder( + data_path: pathlib.Path, bind_path: pathlib.Path, user: str, group: str +): + """Move folder from data to bind.""" + if data_path.exists(): + shutil.move(str(data_path), str(bind_path)) + else: + return + + data_path.mkdir(mode=0o750, parents=True, exist_ok=True) + + shutil.chown(str(bind_path), user=user, group=group) + shutil.chown(str(data_path), user=user, group=group) + + subprocess.run(["mount", "--bind", str(bind_path), str(data_path)], check=True) + + subprocess.run(["chown", "-R", f"{user}:{group}", str(data_path)], check=True) + + +@huey.task() +def migrate_to_binds(config: BindMigrationConfig, job: Job): + """Migrate app data to binds.""" + + # Exit if migration is already done + if is_bind_migrated(): + Jobs.update( + job=job, + status=JobStatus.ERROR, + error="Migration already done.", + ) + return + + Jobs.update( + job=job, + status=JobStatus.RUNNING, + progress=0, + status_text="Checking if all volumes are available.", + ) + # Get block devices. + block_devices = BlockDevices().get_block_devices() + block_device_names = [device.name for device in block_devices] + + # Get all unique required block devices + required_block_devices = [] + for block_device_name in config.__dict__.values(): + if block_device_name not in required_block_devices: + required_block_devices.append(block_device_name) + + # Check if all block devices from config are present. + for block_device_name in required_block_devices: + if block_device_name not in block_device_names: + Jobs.update( + job=job, + status=JobStatus.ERROR, + error=f"Block device {block_device_name} not found.", + ) + return + + # Make sure all required block devices are mounted. + # sda1 is the root partition and is always mounted. + for block_device_name in required_block_devices: + if block_device_name == "sda1": + continue + block_device = BlockDevices().get_block_device(block_device_name) + if block_device is None: + Jobs.update( + job=job, + status=JobStatus.ERROR, + error=f"Block device {block_device_name} not found.", + ) + return + if f"/volumes/{block_device_name}" not in block_device.mountpoints: + Jobs.update( + job=job, + status=JobStatus.ERROR, + error=f"Block device {block_device_name} not mounted.", + ) + return + + # Make sure /volumes/sda1 exists. + pathlib.Path("/volumes/sda1").mkdir(parents=True, exist_ok=True) + + Jobs.update( + job=job, + status=JobStatus.RUNNING, + progress=5, + status_text="Activating binds in NixOS config.", + ) + + activate_binds(config) + + # Perform migration of Nextcloud. + Jobs.update( + job=job, + status=JobStatus.RUNNING, + progress=10, + status_text="Migrating Nextcloud.", + ) + + Nextcloud().stop() + + move_folder( + data_path=pathlib.Path("/var/lib/nextcloud"), + bind_path=pathlib.Path(f"/volumes/{config.nextcloud_block_device}/nextcloud"), + user="nextcloud", + group="nextcloud", + ) + + # Start Nextcloud + Nextcloud().start() + + # Perform migration of Bitwarden + + Jobs.update( + job=job, + status=JobStatus.RUNNING, + progress=28, + status_text="Migrating Bitwarden.", + ) + + Bitwarden().stop() + + move_folder( + data_path=pathlib.Path("/var/lib/bitwarden"), + bind_path=pathlib.Path(f"/volumes/{config.bitwarden_block_device}/bitwarden"), + user="vaultwarden", + group="vaultwarden", + ) + + move_folder( + data_path=pathlib.Path("/var/lib/bitwarden_rs"), + bind_path=pathlib.Path( + f"/volumes/{config.bitwarden_block_device}/bitwarden_rs" + ), + user="vaultwarden", + group="vaultwarden", + ) + + # Start Bitwarden + Bitwarden().start() + + # Perform migration of Gitea + + Jobs.update( + job=job, + status=JobStatus.RUNNING, + progress=46, + status_text="Migrating Gitea.", + ) + + Gitea().stop() + + move_folder( + data_path=pathlib.Path("/var/lib/gitea"), + bind_path=pathlib.Path(f"/volumes/{config.gitea_block_device}/gitea"), + user="gitea", + group="gitea", + ) + + Gitea().start() + + # Perform migration of Mail server + + Jobs.update( + job=job, + status=JobStatus.RUNNING, + progress=64, + status_text="Migrating Mail server.", + ) + + MailServer().stop() + + move_folder( + data_path=pathlib.Path("/var/vmail"), + bind_path=pathlib.Path(f"/volumes/{config.email_block_device}/vmail"), + user="virtualMail", + group="virtualMail", + ) + + move_folder( + data_path=pathlib.Path("/var/sieve"), + bind_path=pathlib.Path(f"/volumes/{config.email_block_device}/sieve"), + user="virtualMail", + group="virtualMail", + ) + + MailServer().start() + + # Perform migration of Pleroma + + Jobs.update( + job=job, + status=JobStatus.RUNNING, + progress=82, + status_text="Migrating Pleroma.", + ) + + Pleroma().stop() + + move_folder( + data_path=pathlib.Path("/var/lib/pleroma"), + bind_path=pathlib.Path(f"/volumes/{config.pleroma_block_device}/pleroma"), + user="pleroma", + group="pleroma", + ) + + move_folder( + data_path=pathlib.Path("/var/lib/postgresql"), + bind_path=pathlib.Path(f"/volumes/{config.pleroma_block_device}/postgresql"), + user="postgres", + group="postgres", + ) + + Pleroma().start() + + Jobs.update( + job=job, + status=JobStatus.FINISHED, + progress=100, + status_text="Migration finished.", + result="Migration finished.", + ) + + +def start_bind_migration(config: BindMigrationConfig) -> Job: + """Start migration.""" + job = Jobs.add( + type_id="migrations.migrate_to_binds", + name="Migrate to binds", + description="Migration required to use the new disk space management.", + ) + migrate_to_binds(config, job) + return job diff --git a/selfprivacy_api/jobs/test.py b/selfprivacy_api/jobs/test.py new file mode 100644 index 0000000..9d93fb7 --- /dev/null +++ b/selfprivacy_api/jobs/test.py @@ -0,0 +1,57 @@ +import time +from selfprivacy_api.utils.huey import huey +from selfprivacy_api.jobs import JobStatus, Jobs + + +@huey.task() +def test_job(): + job = Jobs.get_instance().add( + type_id="test", + name="Test job", + description="This is a test job.", + status=JobStatus.CREATED, + status_text="", + progress=0, + ) + time.sleep(5) + Jobs.get_instance().update( + job=job, + status=JobStatus.RUNNING, + status_text="Performing pre-move checks...", + progress=5, + ) + time.sleep(5) + Jobs.get_instance().update( + job=job, + status=JobStatus.RUNNING, + status_text="Performing pre-move checks...", + progress=10, + ) + time.sleep(5) + Jobs.get_instance().update( + job=job, + status=JobStatus.RUNNING, + status_text="Performing pre-move checks...", + progress=15, + ) + time.sleep(5) + Jobs.get_instance().update( + job=job, + status=JobStatus.RUNNING, + status_text="Performing pre-move checks...", + progress=20, + ) + time.sleep(5) + Jobs.get_instance().update( + job=job, + status=JobStatus.RUNNING, + status_text="Performing pre-move checks...", + progress=25, + ) + time.sleep(5) + Jobs.get_instance().update( + job=job, + status=JobStatus.FINISHED, + status_text="Job finished.", + progress=100, + ) diff --git a/selfprivacy_api/resources/api_auth/__init__.py b/selfprivacy_api/resources/api_auth/__init__.py deleted file mode 100644 index 9bd1703..0000000 --- a/selfprivacy_api/resources/api_auth/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -#!/usr/bin/env python3 -"""API authentication module""" - -from flask import Blueprint -from flask_restful import Api - -auth = Blueprint("auth", __name__, url_prefix="/auth") -api = Api(auth) - -from . import ( - new_device, - recovery_token, - app_tokens, -) diff --git a/selfprivacy_api/resources/api_auth/app_tokens.py b/selfprivacy_api/resources/api_auth/app_tokens.py deleted file mode 100644 index 940c60a..0000000 --- a/selfprivacy_api/resources/api_auth/app_tokens.py +++ /dev/null @@ -1,118 +0,0 @@ -#!/usr/bin/env python3 -"""App tokens management module""" -from flask import request -from flask_restful import Resource, reqparse - -from selfprivacy_api.resources.api_auth import api -from selfprivacy_api.utils.auth import ( - delete_token, - get_tokens_info, - is_token_name_exists, - is_token_name_pair_valid, - refresh_token, - get_token_name, -) - - -class Tokens(Resource): - """Token management class - GET returns the list of active devices. - DELETE invalidates token unless it is the last one or the caller uses this token. - POST refreshes the token of the caller. - """ - - def get(self): - """ - Get current device tokens - --- - tags: - - Tokens - security: - - bearerAuth: [] - responses: - 200: - description: List of tokens - 400: - description: Bad request - """ - caller_name = get_token_name(request.headers.get("Authorization").split(" ")[1]) - tokens = get_tokens_info() - # Retrun a list of tokens and if it is the caller's token - # it will be marked with a flag - return [ - { - "name": token["name"], - "date": token["date"], - "is_caller": token["name"] == caller_name, - } - for token in tokens - ] - - def delete(self): - """ - Delete token - --- - tags: - - Tokens - security: - - bearerAuth: [] - parameters: - - in: body - name: token - required: true - description: Token's name to delete - schema: - type: object - properties: - token_name: - type: string - description: Token name to delete - required: true - responses: - 200: - description: Token deleted - 400: - description: Bad request - 404: - description: Token not found - """ - parser = reqparse.RequestParser() - parser.add_argument( - "token_name", type=str, required=True, help="Token to delete" - ) - args = parser.parse_args() - token_name = args["token_name"] - if is_token_name_pair_valid( - token_name, request.headers.get("Authorization").split(" ")[1] - ): - return {"message": "Cannot delete caller's token"}, 400 - if not is_token_name_exists(token_name): - return {"message": "Token not found"}, 404 - delete_token(token_name) - return {"message": "Token deleted"}, 200 - - def post(self): - """ - Refresh token - --- - tags: - - Tokens - security: - - bearerAuth: [] - responses: - 200: - description: Token refreshed - 400: - description: Bad request - 404: - description: Token not found - """ - # Get token from header - token = request.headers.get("Authorization").split(" ")[1] - new_token = refresh_token(token) - if new_token is None: - return {"message": "Token not found"}, 404 - return {"token": new_token}, 200 - - -api.add_resource(Tokens, "/tokens") diff --git a/selfprivacy_api/resources/api_auth/new_device.py b/selfprivacy_api/resources/api_auth/new_device.py deleted file mode 100644 index 2c0bde1..0000000 --- a/selfprivacy_api/resources/api_auth/new_device.py +++ /dev/null @@ -1,103 +0,0 @@ -#!/usr/bin/env python3 -"""New device auth module""" -from flask_restful import Resource, reqparse - -from selfprivacy_api.resources.api_auth import api -from selfprivacy_api.utils.auth import ( - get_new_device_auth_token, - use_new_device_auth_token, - delete_new_device_auth_token, -) - - -class NewDevice(Resource): - """New device auth class - POST returns a new token for the caller. - """ - - def post(self): - """ - Get new device token - --- - tags: - - Tokens - security: - - bearerAuth: [] - responses: - 200: - description: New device token - 400: - description: Bad request - """ - token = get_new_device_auth_token() - return {"token": token} - - def delete(self): - """ - Delete new device token - --- - tags: - - Tokens - security: - - bearerAuth: [] - responses: - 200: - description: New device token deleted - 400: - description: Bad request - """ - delete_new_device_auth_token() - return {"token": None} - - -class AuthorizeDevice(Resource): - """Authorize device class - POST authorizes the caller. - """ - - def post(self): - """ - Authorize device - --- - tags: - - Tokens - parameters: - - in: body - name: data - required: true - description: Who is authorizing - schema: - type: object - properties: - token: - type: string - description: Mnemonic token to authorize - device: - type: string - description: Device to authorize - responses: - 200: - description: Device authorized - 400: - description: Bad request - 404: - description: Token not found - """ - parser = reqparse.RequestParser() - parser.add_argument( - "token", type=str, required=True, help="Mnemonic token to authorize" - ) - parser.add_argument( - "device", type=str, required=True, help="Device to authorize" - ) - args = parser.parse_args() - auth_token = args["token"] - device = args["device"] - token = use_new_device_auth_token(auth_token, device) - if token is None: - return {"message": "Token not found"}, 404 - return {"message": "Device authorized", "token": token}, 200 - - -api.add_resource(NewDevice, "/new_device") -api.add_resource(AuthorizeDevice, "/new_device/authorize") diff --git a/selfprivacy_api/resources/api_auth/recovery_token.py b/selfprivacy_api/resources/api_auth/recovery_token.py deleted file mode 100644 index 912a50b..0000000 --- a/selfprivacy_api/resources/api_auth/recovery_token.py +++ /dev/null @@ -1,205 +0,0 @@ -#!/usr/bin/env python3 -"""Recovery token module""" -from datetime import datetime -from flask_restful import Resource, reqparse - -from selfprivacy_api.resources.api_auth import api -from selfprivacy_api.utils import parse_date -from selfprivacy_api.utils.auth import ( - is_recovery_token_exists, - is_recovery_token_valid, - get_recovery_token_status, - generate_recovery_token, - use_mnemonic_recoverery_token, -) - - -class RecoveryToken(Resource): - """Recovery token class - GET returns the status of the recovery token. - POST generates a new recovery token. - """ - - def get(self): - """ - Get recovery token status - --- - tags: - - Tokens - security: - - bearerAuth: [] - responses: - 200: - description: Recovery token status - schema: - type: object - properties: - exists: - type: boolean - description: Recovery token exists - valid: - type: boolean - description: Recovery token is valid - date: - type: string - description: Recovery token date - expiration: - type: string - description: Recovery token expiration date - uses_left: - type: integer - description: Recovery token uses left - 400: - description: Bad request - """ - if not is_recovery_token_exists(): - return { - "exists": False, - "valid": False, - "date": None, - "expiration": None, - "uses_left": None, - } - status = get_recovery_token_status() - # check if status is None - if status is None: - return { - "exists": False, - "valid": False, - "date": None, - "expiration": None, - "uses_left": None, - } - - if not is_recovery_token_valid(): - return { - "exists": True, - "valid": False, - "date": status["date"], - "expiration": status["expiration"], - "uses_left": status["uses_left"], - } - return { - "exists": True, - "valid": True, - "date": status["date"], - "expiration": status["expiration"], - "uses_left": status["uses_left"], - } - - def post(self): - """ - Generate recovery token - --- - tags: - - Tokens - security: - - bearerAuth: [] - parameters: - - in: body - name: data - required: true - description: Token data - schema: - type: object - properties: - expiration: - type: string - description: Token expiration date - uses: - type: integer - description: Token uses - responses: - 200: - description: Recovery token generated - schema: - type: object - properties: - token: - type: string - description: Mnemonic recovery token - 400: - description: Bad request - """ - parser = reqparse.RequestParser() - parser.add_argument( - "expiration", type=str, required=False, help="Token expiration date" - ) - parser.add_argument("uses", type=int, required=False, help="Token uses") - args = parser.parse_args() - # Convert expiration date to datetime and return 400 if it is not valid - if args["expiration"]: - try: - expiration = parse_date(args["expiration"]) - # Retrun 400 if expiration date is in the past - if expiration < datetime.now(): - return {"message": "Expiration date cannot be in the past"}, 400 - except ValueError: - return { - "error": "Invalid expiration date. Use YYYY-MM-DDTHH:MM:SS.SSS" - }, 400 - else: - expiration = None - if args["uses"] is not None and args["uses"] < 1: - return {"message": "Uses must be greater than 0"}, 400 - # Generate recovery token - token = generate_recovery_token(expiration, args["uses"]) - return {"token": token} - - -class UseRecoveryToken(Resource): - """Use recovery token class - POST uses the recovery token. - """ - - def post(self): - """ - Use recovery token - --- - tags: - - Tokens - parameters: - - in: body - name: data - required: true - description: Token data - schema: - type: object - properties: - token: - type: string - description: Mnemonic recovery token - device: - type: string - description: Device to authorize - responses: - 200: - description: Recovery token used - schema: - type: object - properties: - token: - type: string - description: Device authorization token - 400: - description: Bad request - 404: - description: Token not found - """ - parser = reqparse.RequestParser() - parser.add_argument( - "token", type=str, required=True, help="Mnemonic recovery token" - ) - parser.add_argument( - "device", type=str, required=True, help="Device to authorize" - ) - args = parser.parse_args() - # Use recovery token - token = use_mnemonic_recoverery_token(args["token"], args["device"]) - if token is None: - return {"error": "Token not found"}, 404 - return {"token": token} - - -api.add_resource(RecoveryToken, "/recovery_token") -api.add_resource(UseRecoveryToken, "/recovery_token/use") diff --git a/selfprivacy_api/resources/common.py b/selfprivacy_api/resources/common.py deleted file mode 100644 index f78aad6..0000000 --- a/selfprivacy_api/resources/common.py +++ /dev/null @@ -1,27 +0,0 @@ -#!/usr/bin/env python3 -"""Unassigned views""" -from flask_restful import Resource -from selfprivacy_api.graphql.queries.api_queries import get_api_version - - -class ApiVersion(Resource): - """SelfPrivacy API version""" - - def get(self): - """Get API version - --- - tags: - - System - responses: - 200: - description: API version - schema: - type: object - properties: - version: - type: string - description: API version - 401: - description: Unauthorized - """ - return {"version": get_api_version()} diff --git a/selfprivacy_api/resources/services/__init__.py b/selfprivacy_api/resources/services/__init__.py deleted file mode 100644 index a7f1dbe..0000000 --- a/selfprivacy_api/resources/services/__init__.py +++ /dev/null @@ -1,19 +0,0 @@ -#!/usr/bin/env python3 -"""Services management module""" -from flask import Blueprint -from flask_restful import Api - -services = Blueprint("services", __name__, url_prefix="/services") -api = Api(services) - -from . import ( - bitwarden, - gitea, - mailserver, - main, - nextcloud, - ocserv, - pleroma, - restic, - ssh, -) diff --git a/selfprivacy_api/resources/services/bitwarden.py b/selfprivacy_api/resources/services/bitwarden.py deleted file mode 100644 index 412ba8a..0000000 --- a/selfprivacy_api/resources/services/bitwarden.py +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/bin/env python3 -"""Bitwarden management module""" -from flask_restful import Resource - -from selfprivacy_api.resources.services import api -from selfprivacy_api.utils import WriteUserData - - -class EnableBitwarden(Resource): - """Enable Bitwarden""" - - def post(self): - """ - Enable Bitwarden - --- - tags: - - Bitwarden - security: - - bearerAuth: [] - responses: - 200: - description: Bitwarden enabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "bitwarden" not in data: - data["bitwarden"] = {} - data["bitwarden"]["enable"] = True - - return { - "status": 0, - "message": "Bitwarden enabled", - } - - -class DisableBitwarden(Resource): - """Disable Bitwarden""" - - def post(self): - """ - Disable Bitwarden - --- - tags: - - Bitwarden - security: - - bearerAuth: [] - responses: - 200: - description: Bitwarden disabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "bitwarden" not in data: - data["bitwarden"] = {} - data["bitwarden"]["enable"] = False - - return { - "status": 0, - "message": "Bitwarden disabled", - } - - -api.add_resource(EnableBitwarden, "/bitwarden/enable") -api.add_resource(DisableBitwarden, "/bitwarden/disable") diff --git a/selfprivacy_api/resources/services/gitea.py b/selfprivacy_api/resources/services/gitea.py deleted file mode 100644 index bd4b8de..0000000 --- a/selfprivacy_api/resources/services/gitea.py +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/bin/env python3 -"""Gitea management module""" -from flask_restful import Resource - -from selfprivacy_api.resources.services import api -from selfprivacy_api.utils import WriteUserData - - -class EnableGitea(Resource): - """Enable Gitea""" - - def post(self): - """ - Enable Gitea - --- - tags: - - Gitea - security: - - bearerAuth: [] - responses: - 200: - description: Gitea enabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "gitea" not in data: - data["gitea"] = {} - data["gitea"]["enable"] = True - - return { - "status": 0, - "message": "Gitea enabled", - } - - -class DisableGitea(Resource): - """Disable Gitea""" - - def post(self): - """ - Disable Gitea - --- - tags: - - Gitea - security: - - bearerAuth: [] - responses: - 200: - description: Gitea disabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "gitea" not in data: - data["gitea"] = {} - data["gitea"]["enable"] = False - - return { - "status": 0, - "message": "Gitea disabled", - } - - -api.add_resource(EnableGitea, "/gitea/enable") -api.add_resource(DisableGitea, "/gitea/disable") diff --git a/selfprivacy_api/resources/services/mailserver.py b/selfprivacy_api/resources/services/mailserver.py deleted file mode 100644 index 01fa574..0000000 --- a/selfprivacy_api/resources/services/mailserver.py +++ /dev/null @@ -1,41 +0,0 @@ -#!/usr/bin/env python3 -"""Mail server management module""" -import base64 -import subprocess -import os -from flask_restful import Resource - -from selfprivacy_api.resources.services import api - -from selfprivacy_api.utils import get_dkim_key, get_domain - - -class DKIMKey(Resource): - """Get DKIM key from file""" - - def get(self): - """ - Get DKIM key from file - --- - tags: - - Email - security: - - bearerAuth: [] - responses: - 200: - description: DKIM key encoded in base64 - 401: - description: Unauthorized - 404: - description: DKIM key not found - """ - domain = get_domain() - - dkim = get_dkim_key(domain) - if dkim is None: - return "DKIM file not found", 404 - dkim = base64.b64encode(dkim.encode("utf-8")).decode("utf-8") - return dkim - - -api.add_resource(DKIMKey, "/mailserver/dkim") diff --git a/selfprivacy_api/resources/services/main.py b/selfprivacy_api/resources/services/main.py deleted file mode 100644 index 8b6743c..0000000 --- a/selfprivacy_api/resources/services/main.py +++ /dev/null @@ -1,84 +0,0 @@ -#!/usr/bin/env python3 -"""Services status module""" -import subprocess -from flask_restful import Resource - -from . import api - - -class ServiceStatus(Resource): - """Get service status""" - - def get(self): - """ - Get service status - --- - tags: - - Services - responses: - 200: - description: Service status - schema: - type: object - properties: - imap: - type: integer - description: Dovecot service status - smtp: - type: integer - description: Postfix service status - http: - type: integer - description: Nginx service status - bitwarden: - type: integer - description: Bitwarden service status - gitea: - type: integer - description: Gitea service status - nextcloud: - type: integer - description: Nextcloud service status - ocserv: - type: integer - description: OpenConnect VPN service status - pleroma: - type: integer - description: Pleroma service status - 401: - description: Unauthorized - """ - imap_service = subprocess.Popen(["systemctl", "status", "dovecot2.service"]) - imap_service.communicate()[0] - smtp_service = subprocess.Popen(["systemctl", "status", "postfix.service"]) - smtp_service.communicate()[0] - http_service = subprocess.Popen(["systemctl", "status", "nginx.service"]) - http_service.communicate()[0] - bitwarden_service = subprocess.Popen( - ["systemctl", "status", "vaultwarden.service"] - ) - bitwarden_service.communicate()[0] - gitea_service = subprocess.Popen(["systemctl", "status", "gitea.service"]) - gitea_service.communicate()[0] - nextcloud_service = subprocess.Popen( - ["systemctl", "status", "phpfpm-nextcloud.service"] - ) - nextcloud_service.communicate()[0] - ocserv_service = subprocess.Popen(["systemctl", "status", "ocserv.service"]) - ocserv_service.communicate()[0] - pleroma_service = subprocess.Popen(["systemctl", "status", "pleroma.service"]) - pleroma_service.communicate()[0] - - return { - "imap": imap_service.returncode, - "smtp": smtp_service.returncode, - "http": http_service.returncode, - "bitwarden": bitwarden_service.returncode, - "gitea": gitea_service.returncode, - "nextcloud": nextcloud_service.returncode, - "ocserv": ocserv_service.returncode, - "pleroma": pleroma_service.returncode, - } - - -api.add_resource(ServiceStatus, "/status") diff --git a/selfprivacy_api/resources/services/nextcloud.py b/selfprivacy_api/resources/services/nextcloud.py deleted file mode 100644 index 3aa9d06..0000000 --- a/selfprivacy_api/resources/services/nextcloud.py +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/bin/env python3 -"""Nextcloud management module""" -from flask_restful import Resource - -from selfprivacy_api.resources.services import api -from selfprivacy_api.utils import WriteUserData - - -class EnableNextcloud(Resource): - """Enable Nextcloud""" - - def post(self): - """ - Enable Nextcloud - --- - tags: - - Nextcloud - security: - - bearerAuth: [] - responses: - 200: - description: Nextcloud enabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "nextcloud" not in data: - data["nextcloud"] = {} - data["nextcloud"]["enable"] = True - - return { - "status": 0, - "message": "Nextcloud enabled", - } - - -class DisableNextcloud(Resource): - """Disable Nextcloud""" - - def post(self): - """ - Disable Nextcloud - --- - tags: - - Nextcloud - security: - - bearerAuth: [] - responses: - 200: - description: Nextcloud disabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "nextcloud" not in data: - data["nextcloud"] = {} - data["nextcloud"]["enable"] = False - - return { - "status": 0, - "message": "Nextcloud disabled", - } - - -api.add_resource(EnableNextcloud, "/nextcloud/enable") -api.add_resource(DisableNextcloud, "/nextcloud/disable") diff --git a/selfprivacy_api/resources/services/ocserv.py b/selfprivacy_api/resources/services/ocserv.py deleted file mode 100644 index 4dc83da..0000000 --- a/selfprivacy_api/resources/services/ocserv.py +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/bin/env python3 -"""OpenConnect VPN server management module""" -from flask_restful import Resource - -from selfprivacy_api.resources.services import api -from selfprivacy_api.utils import WriteUserData - - -class EnableOcserv(Resource): - """Enable OpenConnect VPN server""" - - def post(self): - """ - Enable OCserv - --- - tags: - - OCserv - security: - - bearerAuth: [] - responses: - 200: - description: OCserv enabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "ocserv" not in data: - data["ocserv"] = {} - data["ocserv"]["enable"] = True - - return { - "status": 0, - "message": "OpenConnect VPN server enabled", - } - - -class DisableOcserv(Resource): - """Disable OpenConnect VPN server""" - - def post(self): - """ - Disable OCserv - --- - tags: - - OCserv - security: - - bearerAuth: [] - responses: - 200: - description: OCserv disabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "ocserv" not in data: - data["ocserv"] = {} - data["ocserv"]["enable"] = False - - return { - "status": 0, - "message": "OpenConnect VPN server disabled", - } - - -api.add_resource(EnableOcserv, "/ocserv/enable") -api.add_resource(DisableOcserv, "/ocserv/disable") diff --git a/selfprivacy_api/resources/services/pleroma.py b/selfprivacy_api/resources/services/pleroma.py deleted file mode 100644 index aaf08f0..0000000 --- a/selfprivacy_api/resources/services/pleroma.py +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/bin/env python3 -"""Pleroma management module""" -from flask_restful import Resource - -from selfprivacy_api.resources.services import api -from selfprivacy_api.utils import WriteUserData - - -class EnablePleroma(Resource): - """Enable Pleroma""" - - def post(self): - """ - Enable Pleroma - --- - tags: - - Pleroma - security: - - bearerAuth: [] - responses: - 200: - description: Pleroma enabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "pleroma" not in data: - data["pleroma"] = {} - data["pleroma"]["enable"] = True - - return { - "status": 0, - "message": "Pleroma enabled", - } - - -class DisablePleroma(Resource): - """Disable Pleroma""" - - def post(self): - """ - Disable Pleroma - --- - tags: - - Pleroma - security: - - bearerAuth: [] - responses: - 200: - description: Pleroma disabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "pleroma" not in data: - data["pleroma"] = {} - data["pleroma"]["enable"] = False - - return { - "status": 0, - "message": "Pleroma disabled", - } - - -api.add_resource(EnablePleroma, "/pleroma/enable") -api.add_resource(DisablePleroma, "/pleroma/disable") diff --git a/selfprivacy_api/resources/services/restic.py b/selfprivacy_api/resources/services/restic.py deleted file mode 100644 index dd22c9a..0000000 --- a/selfprivacy_api/resources/services/restic.py +++ /dev/null @@ -1,241 +0,0 @@ -#!/usr/bin/env python3 -"""Backups management module""" -from flask_restful import Resource, reqparse - -from selfprivacy_api.resources.services import api -from selfprivacy_api.utils import WriteUserData -from selfprivacy_api.restic_controller import tasks as restic_tasks -from selfprivacy_api.restic_controller import ResticController, ResticStates - - -class ListAllBackups(Resource): - """List all restic backups""" - - def get(self): - """ - Get all restic backups - --- - tags: - - Backups - security: - - bearerAuth: [] - responses: - 200: - description: A list of snapshots - 400: - description: Bad request - 401: - description: Unauthorized - """ - - restic = ResticController() - return restic.snapshot_list - - -class AsyncCreateBackup(Resource): - """Create a new restic backup""" - - def put(self): - """ - Initiate a new restic backup - --- - tags: - - Backups - security: - - bearerAuth: [] - responses: - 200: - description: Backup creation has started - 400: - description: Bad request - 401: - description: Unauthorized - 409: - description: Backup already in progress - """ - restic = ResticController() - if restic.state is ResticStates.NO_KEY: - return {"error": "No key provided"}, 400 - if restic.state is ResticStates.INITIALIZING: - return {"error": "Backup is initializing"}, 400 - if restic.state is ResticStates.BACKING_UP: - return {"error": "Backup is already running"}, 409 - restic_tasks.start_backup() - return { - "status": 0, - "message": "Backup creation has started", - } - - -class CheckBackupStatus(Resource): - """Check current backup status""" - - def get(self): - """ - Get backup status - --- - tags: - - Backups - security: - - bearerAuth: [] - responses: - 200: - description: Backup status - 400: - description: Bad request - 401: - description: Unauthorized - """ - restic = ResticController() - - return { - "status": restic.state.name, - "progress": restic.progress, - "error_message": restic.error_message, - } - - -class ForceReloadSnapshots(Resource): - """Force reload snapshots""" - - def get(self): - """ - Force reload snapshots - --- - tags: - - Backups - security: - - bearerAuth: [] - responses: - 200: - description: Snapshots reloaded - 400: - description: Bad request - 401: - description: Unauthorized - """ - restic_tasks.load_snapshots() - return { - "status": 0, - "message": "Snapshots reload started", - } - - -class AsyncRestoreBackup(Resource): - """Trigger backup restoration process""" - - def put(self): - """ - Start backup restoration - --- - tags: - - Backups - security: - - bearerAuth: [] - parameters: - - in: body - required: true - name: backup - description: Backup to restore - schema: - type: object - required: - - backupId - properties: - backupId: - type: string - responses: - 200: - description: Backup restoration process started - 400: - description: Bad request - 401: - description: Unauthorized - """ - parser = reqparse.RequestParser() - parser.add_argument("backupId", type=str, required=True) - args = parser.parse_args() - - restic = ResticController() - if restic.state is ResticStates.NO_KEY: - return {"error": "No key provided"}, 400 - if restic.state is ResticStates.NOT_INITIALIZED: - return {"error": "Repository is not initialized"}, 400 - if restic.state is ResticStates.BACKING_UP: - return {"error": "Backup is already running"}, 409 - if restic.state is ResticStates.INITIALIZING: - return {"error": "Repository is initializing"}, 400 - if restic.state is ResticStates.RESTORING: - return {"error": "Restore is already running"}, 409 - for backup in restic.snapshot_list: - if backup["short_id"] == args["backupId"]: - restic_tasks.restore_from_backup(args["backupId"]) - return { - "status": 0, - "message": "Backup restoration procedure started", - } - - return {"error": "Backup not found"}, 404 - - -class BackblazeConfig(Resource): - """Backblaze config""" - - def put(self): - """ - Set the new key for backblaze - --- - tags: - - Backups - security: - - bearerAuth: [] - parameters: - - in: body - required: true - name: backblazeSettings - description: New Backblaze settings - schema: - type: object - required: - - accountId - - accountKey - - bucket - properties: - accountId: - type: string - accountKey: - type: string - bucket: - type: string - responses: - 200: - description: New Backblaze settings - 400: - description: Bad request - 401: - description: Unauthorized - """ - parser = reqparse.RequestParser() - parser.add_argument("accountId", type=str, required=True) - parser.add_argument("accountKey", type=str, required=True) - parser.add_argument("bucket", type=str, required=True) - args = parser.parse_args() - - with WriteUserData() as data: - if "backblaze" not in data: - data["backblaze"] = {} - data["backblaze"]["accountId"] = args["accountId"] - data["backblaze"]["accountKey"] = args["accountKey"] - data["backblaze"]["bucket"] = args["bucket"] - - restic_tasks.update_keys_from_userdata() - - return "New Backblaze settings saved" - - -api.add_resource(ListAllBackups, "/restic/backup/list") -api.add_resource(AsyncCreateBackup, "/restic/backup/create") -api.add_resource(CheckBackupStatus, "/restic/backup/status") -api.add_resource(AsyncRestoreBackup, "/restic/backup/restore") -api.add_resource(BackblazeConfig, "/restic/backblaze/config") -api.add_resource(ForceReloadSnapshots, "/restic/backup/reload") diff --git a/selfprivacy_api/resources/services/ssh.py b/selfprivacy_api/resources/services/ssh.py deleted file mode 100644 index 3ea5a1d..0000000 --- a/selfprivacy_api/resources/services/ssh.py +++ /dev/null @@ -1,407 +0,0 @@ -#!/usr/bin/env python3 -"""SSH management module""" -from flask_restful import Resource, reqparse - -from selfprivacy_api.resources.services import api -from selfprivacy_api.utils import WriteUserData, ReadUserData, validate_ssh_public_key - - -class EnableSSH(Resource): - """Enable SSH""" - - def post(self): - """ - Enable SSH - --- - tags: - - SSH - security: - - bearerAuth: [] - responses: - 200: - description: SSH enabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "ssh" not in data: - data["ssh"] = {} - data["ssh"]["enable"] = True - - return { - "status": 0, - "message": "SSH enabled", - } - - -class SSHSettings(Resource): - """Enable/disable SSH""" - - def get(self): - """ - Get current SSH settings - --- - tags: - - SSH - security: - - bearerAuth: [] - responses: - 200: - description: SSH settings - 400: - description: Bad request - """ - with ReadUserData() as data: - if "ssh" not in data: - return {"enable": True, "passwordAuthentication": True} - if "enable" not in data["ssh"]: - data["ssh"]["enable"] = True - if "passwordAuthentication" not in data["ssh"]: - data["ssh"]["passwordAuthentication"] = True - return { - "enable": data["ssh"]["enable"], - "passwordAuthentication": data["ssh"]["passwordAuthentication"], - } - - def put(self): - """ - Change SSH settings - --- - tags: - - SSH - security: - - bearerAuth: [] - parameters: - - name: sshSettings - in: body - required: true - description: SSH settings - schema: - type: object - required: - - enable - - passwordAuthentication - properties: - enable: - type: boolean - passwordAuthentication: - type: boolean - responses: - 200: - description: New settings saved - 400: - description: Bad request - """ - parser = reqparse.RequestParser() - parser.add_argument("enable", type=bool, required=False) - parser.add_argument("passwordAuthentication", type=bool, required=False) - args = parser.parse_args() - enable = args["enable"] - password_authentication = args["passwordAuthentication"] - - with WriteUserData() as data: - if "ssh" not in data: - data["ssh"] = {} - if enable is not None: - data["ssh"]["enable"] = enable - if password_authentication is not None: - data["ssh"]["passwordAuthentication"] = password_authentication - - return "SSH settings changed" - - -class WriteSSHKey(Resource): - """Write new SSH key""" - - def put(self): - """ - Add a SSH root key - --- - consumes: - - application/json - tags: - - SSH - security: - - bearerAuth: [] - parameters: - - in: body - name: body - required: true - description: Public key to add - schema: - type: object - required: - - public_key - properties: - public_key: - type: string - description: ssh-ed25519 public key. - responses: - 201: - description: Key added - 400: - description: Bad request - 401: - description: Unauthorized - 409: - description: Key already exists - """ - parser = reqparse.RequestParser() - parser.add_argument( - "public_key", type=str, required=True, help="Key cannot be blank!" - ) - args = parser.parse_args() - - public_key = args["public_key"] - - if not validate_ssh_public_key(public_key): - return { - "error": "Invalid key type. Only ssh-ed25519 and ssh-rsa are supported.", - }, 400 - - with WriteUserData() as data: - if "ssh" not in data: - data["ssh"] = {} - if "rootKeys" not in data["ssh"]: - data["ssh"]["rootKeys"] = [] - # Return 409 if key already in array - for key in data["ssh"]["rootKeys"]: - if key == public_key: - return { - "error": "Key already exists", - }, 409 - data["ssh"]["rootKeys"].append(public_key) - - return { - "status": 0, - "message": "New SSH key successfully written", - }, 201 - - -class SSHKeys(Resource): - """List SSH keys""" - - def get(self, username): - """ - List SSH keys - --- - tags: - - SSH - security: - - bearerAuth: [] - parameters: - - in: path - name: username - type: string - required: true - description: User to list keys for - responses: - 200: - description: SSH keys - 401: - description: Unauthorized - """ - with ReadUserData() as data: - if username == "root": - if "ssh" not in data: - data["ssh"] = {} - if "rootKeys" not in data["ssh"]: - data["ssh"]["rootKeys"] = [] - return data["ssh"]["rootKeys"] - if username == data["username"]: - if "sshKeys" not in data: - data["sshKeys"] = [] - return data["sshKeys"] - if "users" not in data: - data["users"] = [] - for user in data["users"]: - if user["username"] == username: - if "sshKeys" not in user: - user["sshKeys"] = [] - return user["sshKeys"] - return { - "error": "User not found", - }, 404 - - def post(self, username): - """ - Add SSH key to the user - --- - tags: - - SSH - security: - - bearerAuth: [] - parameters: - - in: body - required: true - name: public_key - schema: - type: object - required: - - public_key - properties: - public_key: - type: string - - in: path - name: username - type: string - required: true - description: User to add keys for - responses: - 201: - description: SSH key added - 401: - description: Unauthorized - 404: - description: User not found - 409: - description: Key already exists - """ - parser = reqparse.RequestParser() - parser.add_argument( - "public_key", type=str, required=True, help="Key cannot be blank!" - ) - args = parser.parse_args() - - if username == "root": - return { - "error": "Use /ssh/key/send to add root keys", - }, 400 - - if not validate_ssh_public_key(args["public_key"]): - return { - "error": "Invalid key type. Only ssh-ed25519 and ssh-rsa are supported.", - }, 400 - - with WriteUserData() as data: - if username == data["username"]: - if "sshKeys" not in data: - data["sshKeys"] = [] - # Return 409 if key already in array - for key in data["sshKeys"]: - if key == args["public_key"]: - return { - "error": "Key already exists", - }, 409 - data["sshKeys"].append(args["public_key"]) - return { - "message": "New SSH key successfully written", - }, 201 - - if "users" not in data: - data["users"] = [] - for user in data["users"]: - if user["username"] == username: - if "sshKeys" not in user: - user["sshKeys"] = [] - # Return 409 if key already in array - for key in user["sshKeys"]: - if key == args["public_key"]: - return { - "error": "Key already exists", - }, 409 - user["sshKeys"].append(args["public_key"]) - return { - "message": "New SSH key successfully written", - }, 201 - return { - "error": "User not found", - }, 404 - - def delete(self, username): - """ - Delete SSH key - --- - tags: - - SSH - security: - - bearerAuth: [] - parameters: - - in: body - name: public_key - required: true - description: Key to delete - schema: - type: object - required: - - public_key - properties: - public_key: - type: string - - in: path - name: username - type: string - required: true - description: User to delete keys for - responses: - 200: - description: SSH key deleted - 401: - description: Unauthorized - 404: - description: Key not found - """ - parser = reqparse.RequestParser() - parser.add_argument( - "public_key", type=str, required=True, help="Key cannot be blank!" - ) - args = parser.parse_args() - - with WriteUserData() as data: - if username == "root": - if "ssh" not in data: - data["ssh"] = {} - if "rootKeys" not in data["ssh"]: - data["ssh"]["rootKeys"] = [] - # Return 404 if key not in array - for key in data["ssh"]["rootKeys"]: - if key == args["public_key"]: - data["ssh"]["rootKeys"].remove(key) - # If rootKeys became zero length, delete it - if len(data["ssh"]["rootKeys"]) == 0: - del data["ssh"]["rootKeys"] - return { - "message": "SSH key deleted", - }, 200 - return { - "error": "Key not found", - }, 404 - if username == data["username"]: - if "sshKeys" not in data: - data["sshKeys"] = [] - # Return 404 if key not in array - for key in data["sshKeys"]: - if key == args["public_key"]: - data["sshKeys"].remove(key) - return { - "message": "SSH key deleted", - }, 200 - return { - "error": "Key not found", - }, 404 - if "users" not in data: - data["users"] = [] - for user in data["users"]: - if user["username"] == username: - if "sshKeys" not in user: - user["sshKeys"] = [] - # Return 404 if key not in array - for key in user["sshKeys"]: - if key == args["public_key"]: - user["sshKeys"].remove(key) - return { - "message": "SSH key successfully deleted", - }, 200 - return { - "error": "Key not found", - }, 404 - return { - "error": "User not found", - }, 404 - - -api.add_resource(EnableSSH, "/ssh/enable") -api.add_resource(SSHSettings, "/ssh") - -api.add_resource(WriteSSHKey, "/ssh/key/send") -api.add_resource(SSHKeys, "/ssh/keys/") diff --git a/selfprivacy_api/resources/system.py b/selfprivacy_api/resources/system.py deleted file mode 100644 index 958616e..0000000 --- a/selfprivacy_api/resources/system.py +++ /dev/null @@ -1,346 +0,0 @@ -#!/usr/bin/env python3 -"""System management module""" -import os -import subprocess -import pytz -from flask import Blueprint -from flask_restful import Resource, Api, reqparse -from selfprivacy_api.graphql.queries.system import ( - get_python_version, - get_system_version, -) - -from selfprivacy_api.utils import WriteUserData, ReadUserData - -api_system = Blueprint("system", __name__, url_prefix="/system") -api = Api(api_system) - - -class Timezone(Resource): - """Change timezone of NixOS""" - - def get(self): - """ - Get current system timezone - --- - tags: - - System - security: - - bearerAuth: [] - responses: - 200: - description: Timezone - 400: - description: Bad request - """ - with ReadUserData() as data: - if "timezone" not in data: - return "Europe/Uzhgorod" - return data["timezone"] - - def put(self): - """ - Change system timezone - --- - tags: - - System - security: - - bearerAuth: [] - parameters: - - name: timezone - in: body - required: true - description: Timezone to set - schema: - type: object - required: - - timezone - properties: - timezone: - type: string - responses: - 200: - description: Timezone changed - 400: - description: Bad request - """ - parser = reqparse.RequestParser() - parser.add_argument("timezone", type=str, required=True) - timezone = parser.parse_args()["timezone"] - - # Check if timezone is a valid tzdata string - if timezone not in pytz.all_timezones: - return {"error": "Invalid timezone"}, 400 - - with WriteUserData() as data: - data["timezone"] = timezone - return "Timezone changed" - - -class AutoUpgrade(Resource): - """Enable/disable automatic upgrades and reboots""" - - def get(self): - """ - Get current system autoupgrade settings - --- - tags: - - System - security: - - bearerAuth: [] - responses: - 200: - description: Auto-upgrade settings - 400: - description: Bad request - """ - with ReadUserData() as data: - if "autoUpgrade" not in data: - return {"enable": True, "allowReboot": False} - if "enable" not in data["autoUpgrade"]: - data["autoUpgrade"]["enable"] = True - if "allowReboot" not in data["autoUpgrade"]: - data["autoUpgrade"]["allowReboot"] = False - return data["autoUpgrade"] - - def put(self): - """ - Change system auto upgrade settings - --- - tags: - - System - security: - - bearerAuth: [] - parameters: - - name: autoUpgrade - in: body - required: true - description: Auto upgrade settings - schema: - type: object - required: - - enable - - allowReboot - properties: - enable: - type: boolean - allowReboot: - type: boolean - responses: - 200: - description: New settings saved - 400: - description: Bad request - """ - parser = reqparse.RequestParser() - parser.add_argument("enable", type=bool, required=False) - parser.add_argument("allowReboot", type=bool, required=False) - args = parser.parse_args() - enable = args["enable"] - allow_reboot = args["allowReboot"] - - with WriteUserData() as data: - if "autoUpgrade" not in data: - data["autoUpgrade"] = {} - if enable is not None: - data["autoUpgrade"]["enable"] = enable - if allow_reboot is not None: - data["autoUpgrade"]["allowReboot"] = allow_reboot - return "Auto-upgrade settings changed" - - -class RebuildSystem(Resource): - """Rebuild NixOS""" - - def get(self): - """ - Rebuild NixOS with nixos-rebuild switch - --- - tags: - - System - security: - - bearerAuth: [] - responses: - 200: - description: System rebuild has started - 401: - description: Unauthorized - """ - rebuild_result = subprocess.Popen( - ["systemctl", "start", "sp-nixos-rebuild.service"], start_new_session=True - ) - rebuild_result.communicate()[0] - return rebuild_result.returncode - - -class RollbackSystem(Resource): - """Rollback NixOS""" - - def get(self): - """ - Rollback NixOS with nixos-rebuild switch --rollback - --- - tags: - - System - security: - - bearerAuth: [] - responses: - 200: - description: System rollback has started - 401: - description: Unauthorized - """ - rollback_result = subprocess.Popen( - ["systemctl", "start", "sp-nixos-rollback.service"], start_new_session=True - ) - rollback_result.communicate()[0] - return rollback_result.returncode - - -class UpgradeSystem(Resource): - """Upgrade NixOS""" - - def get(self): - """ - Upgrade NixOS with nixos-rebuild switch --upgrade - --- - tags: - - System - security: - - bearerAuth: [] - responses: - 200: - description: System upgrade has started - 401: - description: Unauthorized - """ - upgrade_result = subprocess.Popen( - ["systemctl", "start", "sp-nixos-upgrade.service"], start_new_session=True - ) - upgrade_result.communicate()[0] - return upgrade_result.returncode - - -class RebootSystem(Resource): - """Reboot the system""" - - def get(self): - """ - Reboot the system - --- - tags: - - System - security: - - bearerAuth: [] - responses: - 200: - description: System reboot has started - 401: - description: Unauthorized - """ - subprocess.Popen(["reboot"], start_new_session=True) - return "System reboot has started" - - -class SystemVersion(Resource): - """Get system version from uname""" - - def get(self): - """ - Get system version from uname -a - --- - tags: - - System - security: - - bearerAuth: [] - responses: - 200: - description: OK - 401: - description: Unauthorized - """ - return { - "system_version": get_system_version(), - } - - -class PythonVersion(Resource): - """Get python version""" - - def get(self): - """ - Get python version used by this API - --- - tags: - - System - security: - - bearerAuth: [] - responses: - 200: - description: OK - 401: - description: Unauthorized - """ - return get_python_version() - - -class PullRepositoryChanges(Resource): - """Pull NixOS config repository changes""" - - def get(self): - """ - Pull Repository Changes - --- - tags: - - System - security: - - bearerAuth: [] - responses: - 200: - description: Got update - 201: - description: Nothing to update - 401: - description: Unauthorized - 500: - description: Something went wrong - """ - - git_pull_command = ["git", "pull"] - - current_working_directory = os.getcwd() - os.chdir("/etc/nixos") - - git_pull_process_descriptor = subprocess.Popen( - git_pull_command, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - shell=False, - ) - - data = git_pull_process_descriptor.communicate()[0].decode("utf-8") - - os.chdir(current_working_directory) - - if git_pull_process_descriptor.returncode == 0: - return { - "status": 0, - "message": "Update completed successfully", - "data": data, - } - return { - "status": git_pull_process_descriptor.returncode, - "message": "Something went wrong", - "data": data, - }, 500 - - -api.add_resource(Timezone, "/configuration/timezone") -api.add_resource(AutoUpgrade, "/configuration/autoUpgrade") -api.add_resource(RebuildSystem, "/configuration/apply") -api.add_resource(RollbackSystem, "/configuration/rollback") -api.add_resource(UpgradeSystem, "/configuration/upgrade") -api.add_resource(RebootSystem, "/reboot") -api.add_resource(SystemVersion, "/version") -api.add_resource(PythonVersion, "/pythonVersion") -api.add_resource(PullRepositoryChanges, "/configuration/pull") diff --git a/selfprivacy_api/resources/users.py b/selfprivacy_api/resources/users.py deleted file mode 100644 index e114324..0000000 --- a/selfprivacy_api/resources/users.py +++ /dev/null @@ -1,162 +0,0 @@ -#!/usr/bin/env python3 -"""Users management module""" -import subprocess -import re -from flask_restful import Resource, reqparse - -from selfprivacy_api.utils import WriteUserData, ReadUserData, is_username_forbidden - - -class Users(Resource): - """Users management""" - - def get(self): - """ - Get a list of users - --- - tags: - - Users - security: - - bearerAuth: [] - responses: - 200: - description: A list of users - 401: - description: Unauthorized - """ - parser = reqparse.RequestParser(bundle_errors=True) - parser.add_argument("withMainUser", type=bool, required=False) - args = parser.parse_args() - with_main_user = False if args["withMainUser"] is None else args["withMainUser"] - - with ReadUserData() as data: - users = [] - if with_main_user: - users.append(data["username"]) - if "users" in data: - for user in data["users"]: - users.append(user["username"]) - return users - - def post(self): - """ - Create a new user - --- - consumes: - - application/json - tags: - - Users - security: - - bearerAuth: [] - parameters: - - in: body - name: user - required: true - description: User to create - schema: - type: object - required: - - username - - password - properties: - username: - type: string - description: Unix username. Must be alphanumeric and less than 32 characters - password: - type: string - description: Unix password. - responses: - 201: - description: Created user - 400: - description: Bad request - 401: - description: Unauthorized - 409: - description: User already exists - """ - parser = reqparse.RequestParser(bundle_errors=True) - parser.add_argument("username", type=str, required=True) - parser.add_argument("password", type=str, required=True) - args = parser.parse_args() - hashing_command = ["mkpasswd", "-m", "sha-512", args["password"]] - password_hash_process_descriptor = subprocess.Popen( - hashing_command, - shell=False, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - ) - hashed_password = password_hash_process_descriptor.communicate()[0] - hashed_password = hashed_password.decode("ascii") - hashed_password = hashed_password.rstrip() - # Check if username is forbidden - if is_username_forbidden(args["username"]): - return {"message": "Username is forbidden"}, 409 - # Check is username passes regex - if not re.match(r"^[a-z_][a-z0-9_]+$", args["username"]): - return {"error": "username must be alphanumeric"}, 400 - # Check if username less than 32 characters - if len(args["username"]) >= 32: - return {"error": "username must be less than 32 characters"}, 400 - - with WriteUserData() as data: - if "users" not in data: - data["users"] = [] - - # Return 409 if user already exists - if data["username"] == args["username"]: - return {"error": "User already exists"}, 409 - - for user in data["users"]: - if user["username"] == args["username"]: - return {"error": "User already exists"}, 409 - - data["users"].append( - { - "username": args["username"], - "hashedPassword": hashed_password, - } - ) - - return {"result": 0, "username": args["username"]}, 201 - - -class User(Resource): - """Single user managment""" - - def delete(self, username): - """ - Delete a user - --- - tags: - - Users - security: - - bearerAuth: [] - parameters: - - in: path - name: username - required: true - description: User to delete - type: string - responses: - 200: - description: Deleted user - 400: - description: Bad request - 401: - description: Unauthorized - 404: - description: User not found - """ - with WriteUserData() as data: - if username == data["username"]: - return {"error": "Cannot delete root user"}, 400 - # Return 400 if user does not exist - for user in data["users"]: - if user["username"] == username: - data["users"].remove(user) - break - else: - return {"error": "User does not exist"}, 404 - - return {"result": 0, "username": username} diff --git a/selfprivacy_api/rest/__init__.py b/selfprivacy_api/rest/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/selfprivacy_api/rest/api_auth.py b/selfprivacy_api/rest/api_auth.py new file mode 100644 index 0000000..f73056c --- /dev/null +++ b/selfprivacy_api/rest/api_auth.py @@ -0,0 +1,127 @@ +from datetime import datetime +from typing import Optional +from fastapi import APIRouter, Depends, HTTPException +from pydantic import BaseModel +from selfprivacy_api.actions.api_tokens import ( + CannotDeleteCallerException, + InvalidExpirationDate, + InvalidUsesLeft, + NotFoundException, + delete_api_token, + get_api_recovery_token_status, + get_api_tokens_with_caller_flag, + get_new_api_recovery_key, + refresh_api_token, +) + +from selfprivacy_api.dependencies import TokenHeader, get_token_header + +from selfprivacy_api.utils.auth import ( + delete_new_device_auth_token, + get_new_device_auth_token, + use_mnemonic_recoverery_token, + use_new_device_auth_token, +) + +router = APIRouter( + prefix="/auth", + tags=["auth"], + responses={404: {"description": "Not found"}}, +) + + +@router.get("/tokens") +async def rest_get_tokens(auth_token: TokenHeader = Depends(get_token_header)): + """Get the tokens info""" + return get_api_tokens_with_caller_flag(auth_token.token) + + +class DeleteTokenInput(BaseModel): + """Delete token input""" + + token_name: str + + +@router.delete("/tokens") +async def rest_delete_tokens( + token: DeleteTokenInput, auth_token: TokenHeader = Depends(get_token_header) +): + """Delete the tokens""" + try: + delete_api_token(auth_token.token, token.token_name) + except NotFoundException: + raise HTTPException(status_code=404, detail="Token not found") + except CannotDeleteCallerException: + raise HTTPException(status_code=400, detail="Cannot delete caller's token") + return {"message": "Token deleted"} + + +@router.post("/tokens") +async def rest_refresh_token(auth_token: TokenHeader = Depends(get_token_header)): + """Refresh the token""" + try: + new_token = refresh_api_token(auth_token.token) + except NotFoundException: + raise HTTPException(status_code=404, detail="Token not found") + return {"token": new_token} + + +@router.get("/recovery_token") +async def rest_get_recovery_token_status( + auth_token: TokenHeader = Depends(get_token_header), +): + return get_api_recovery_token_status() + + +class CreateRecoveryTokenInput(BaseModel): + expiration: Optional[datetime] = None + uses: Optional[int] = None + + +@router.post("/recovery_token") +async def rest_create_recovery_token( + limits: CreateRecoveryTokenInput = CreateRecoveryTokenInput(), + auth_token: TokenHeader = Depends(get_token_header), +): + try: + token = get_new_api_recovery_key(limits.expiration, limits.uses) + except InvalidExpirationDate as e: + raise HTTPException(status_code=400, detail=str(e)) + except InvalidUsesLeft as e: + raise HTTPException(status_code=400, detail=str(e)) + return {"token": token} + + +class UseTokenInput(BaseModel): + token: str + device: str + + +@router.post("/recovery_token/use") +async def rest_use_recovery_token(input: UseTokenInput): + token = use_mnemonic_recoverery_token(input.token, input.device) + if token is None: + raise HTTPException(status_code=404, detail="Token not found") + return {"token": token} + + +@router.post("/new_device") +async def rest_new_device(auth_token: TokenHeader = Depends(get_token_header)): + token = get_new_device_auth_token() + return {"token": token} + + +@router.delete("/new_device") +async def rest_delete_new_device_token( + auth_token: TokenHeader = Depends(get_token_header), +): + delete_new_device_auth_token() + return {"token": None} + + +@router.post("/new_device/authorize") +async def rest_new_device_authorize(input: UseTokenInput): + token = use_new_device_auth_token(input.token, input.device) + if token is None: + raise HTTPException(status_code=404, detail="Token not found") + return {"message": "Device authorized", "token": token} diff --git a/selfprivacy_api/rest/services.py b/selfprivacy_api/rest/services.py new file mode 100644 index 0000000..c9d5ff9 --- /dev/null +++ b/selfprivacy_api/rest/services.py @@ -0,0 +1,373 @@ +"""Basic services legacy api""" +import base64 +from typing import Optional +from fastapi import APIRouter, Depends, HTTPException +from pydantic import BaseModel +from selfprivacy_api.actions.ssh import ( + InvalidPublicKey, + KeyAlreadyExists, + KeyNotFound, + create_ssh_key, + enable_ssh, + get_ssh_settings, + remove_ssh_key, + set_ssh_settings, +) +from selfprivacy_api.actions.users import UserNotFound, get_user_by_username + +from selfprivacy_api.dependencies import get_token_header +from selfprivacy_api.restic_controller import ResticController, ResticStates +from selfprivacy_api.restic_controller import tasks as restic_tasks +from selfprivacy_api.services.bitwarden import Bitwarden +from selfprivacy_api.services.gitea import Gitea +from selfprivacy_api.services.mailserver import MailServer +from selfprivacy_api.services.nextcloud import Nextcloud +from selfprivacy_api.services.ocserv import Ocserv +from selfprivacy_api.services.pleroma import Pleroma +from selfprivacy_api.services.service import ServiceStatus +from selfprivacy_api.utils import WriteUserData, get_dkim_key, get_domain + +router = APIRouter( + prefix="/services", + tags=["services"], + dependencies=[Depends(get_token_header)], + responses={404: {"description": "Not found"}}, +) + + +def service_status_to_return_code(status: ServiceStatus): + """Converts service status object to return code for + compatibility with legacy api""" + if status == ServiceStatus.ACTIVE: + return 0 + elif status == ServiceStatus.FAILED: + return 1 + elif status == ServiceStatus.INACTIVE: + return 3 + elif status == ServiceStatus.OFF: + return 4 + else: + return 2 + + +@router.get("/status") +async def get_status(): + """Get the status of the services""" + mail_status = MailServer.get_status() + bitwarden_status = Bitwarden.get_status() + gitea_status = Gitea.get_status() + nextcloud_status = Nextcloud.get_status() + ocserv_stauts = Ocserv.get_status() + pleroma_status = Pleroma.get_status() + + return { + "imap": service_status_to_return_code(mail_status), + "smtp": service_status_to_return_code(mail_status), + "http": 0, + "bitwarden": service_status_to_return_code(bitwarden_status), + "gitea": service_status_to_return_code(gitea_status), + "nextcloud": service_status_to_return_code(nextcloud_status), + "ocserv": service_status_to_return_code(ocserv_stauts), + "pleroma": service_status_to_return_code(pleroma_status), + } + + +@router.post("/bitwarden/enable") +async def enable_bitwarden(): + """Enable Bitwarden""" + Bitwarden.enable() + return { + "status": 0, + "message": "Bitwarden enabled", + } + + +@router.post("/bitwarden/disable") +async def disable_bitwarden(): + """Disable Bitwarden""" + Bitwarden.disable() + return { + "status": 0, + "message": "Bitwarden disabled", + } + + +@router.post("/gitea/enable") +async def enable_gitea(): + """Enable Gitea""" + Gitea.enable() + return { + "status": 0, + "message": "Gitea enabled", + } + + +@router.post("/gitea/disable") +async def disable_gitea(): + """Disable Gitea""" + Gitea.disable() + return { + "status": 0, + "message": "Gitea disabled", + } + + +@router.get("/mailserver/dkim") +async def get_mailserver_dkim(): + """Get the DKIM record for the mailserver""" + domain = get_domain() + + dkim = get_dkim_key(domain) + if dkim is None: + raise HTTPException(status_code=404, detail="DKIM record not found") + dkim = base64.b64encode(dkim.encode("utf-8")).decode("utf-8") + return dkim + + +@router.post("/nextcloud/enable") +async def enable_nextcloud(): + """Enable Nextcloud""" + Nextcloud.enable() + return { + "status": 0, + "message": "Nextcloud enabled", + } + + +@router.post("/nextcloud/disable") +async def disable_nextcloud(): + """Disable Nextcloud""" + Nextcloud.disable() + return { + "status": 0, + "message": "Nextcloud disabled", + } + + +@router.post("/ocserv/enable") +async def enable_ocserv(): + """Enable Ocserv""" + Ocserv.enable() + return { + "status": 0, + "message": "Ocserv enabled", + } + + +@router.post("/ocserv/disable") +async def disable_ocserv(): + """Disable Ocserv""" + Ocserv.disable() + return { + "status": 0, + "message": "Ocserv disabled", + } + + +@router.post("/pleroma/enable") +async def enable_pleroma(): + """Enable Pleroma""" + Pleroma.enable() + return { + "status": 0, + "message": "Pleroma enabled", + } + + +@router.post("/pleroma/disable") +async def disable_pleroma(): + """Disable Pleroma""" + Pleroma.disable() + return { + "status": 0, + "message": "Pleroma disabled", + } + + +@router.get("/restic/backup/list") +async def get_restic_backup_list(): + restic = ResticController() + return restic.snapshot_list + + +@router.put("/restic/backup/create") +async def create_restic_backup(): + restic = ResticController() + if restic.state is ResticStates.NO_KEY: + raise HTTPException(status_code=400, detail="Backup key not provided") + if restic.state is ResticStates.INITIALIZING: + raise HTTPException(status_code=400, detail="Backup is initializing") + if restic.state is ResticStates.BACKING_UP: + raise HTTPException(status_code=409, detail="Backup is already running") + restic_tasks.start_backup() + return { + "status": 0, + "message": "Backup creation has started", + } + + +@router.get("/restic/backup/status") +async def get_restic_backup_status(): + restic = ResticController() + + return { + "status": restic.state.name, + "progress": restic.progress, + "error_message": restic.error_message, + } + + +@router.get("/restic/backup/reload") +async def reload_restic_backup(): + restic_tasks.load_snapshots() + return { + "status": 0, + "message": "Snapshots reload started", + } + + +class BackupRestoreInput(BaseModel): + backupId: str + + +@router.put("/restic/backup/restore") +async def restore_restic_backup(backup: BackupRestoreInput): + restic = ResticController() + if restic.state is ResticStates.NO_KEY: + raise HTTPException(status_code=400, detail="Backup key not provided") + if restic.state is ResticStates.NOT_INITIALIZED: + raise HTTPException( + status_code=400, detail="Backups repository is not initialized" + ) + if restic.state is ResticStates.BACKING_UP: + raise HTTPException(status_code=409, detail="Backup is already running") + if restic.state is ResticStates.INITIALIZING: + raise HTTPException(status_code=400, detail="Repository is initializing") + if restic.state is ResticStates.RESTORING: + raise HTTPException(status_code=409, detail="Restore is already running") + + for backup_item in restic.snapshot_list: + if backup_item["short_id"] == backup.backupId: + restic_tasks.restore_from_backup(backup.backupId) + return { + "status": 0, + "message": "Backup restoration procedure started", + } + + raise HTTPException(status_code=404, detail="Backup not found") + + +class BackblazeConfigInput(BaseModel): + accountId: str + accountKey: str + bucket: str + + +@router.put("/restic/backblaze/config") +async def set_backblaze_config(backblaze_config: BackblazeConfigInput): + with WriteUserData() as data: + if "backblaze" not in data: + data["backblaze"] = {} + data["backblaze"]["accountId"] = backblaze_config.accountId + data["backblaze"]["accountKey"] = backblaze_config.accountKey + data["backblaze"]["bucket"] = backblaze_config.bucket + + restic_tasks.update_keys_from_userdata() + + return "New Backblaze settings saved" + + +@router.post("/ssh/enable") +async def rest_enable_ssh(): + """Enable SSH""" + enable_ssh() + return { + "status": 0, + "message": "SSH enabled", + } + + +@router.get("/ssh") +async def rest_get_ssh(): + """Get the SSH configuration""" + settings = get_ssh_settings() + return { + "enable": settings.enable, + "passwordAuthentication": settings.passwordAuthentication, + } + + +class SshConfigInput(BaseModel): + enable: Optional[bool] = None + passwordAuthentication: Optional[bool] = None + + +@router.put("/ssh") +async def rest_set_ssh(ssh_config: SshConfigInput): + """Set the SSH configuration""" + set_ssh_settings(ssh_config.enable, ssh_config.passwordAuthentication) + + return "SSH settings changed" + + +class SshKeyInput(BaseModel): + public_key: str + + +@router.put("/ssh/key/send", status_code=201) +async def rest_send_ssh_key(input: SshKeyInput): + """Send the SSH key""" + try: + create_ssh_key("root", input.public_key) + except KeyAlreadyExists as error: + raise HTTPException(status_code=409, detail="Key already exists") from error + except InvalidPublicKey as error: + raise HTTPException( + status_code=400, + detail="Invalid key type. Only ssh-ed25519 and ssh-rsa are supported", + ) from error + + return { + "status": 0, + "message": "SSH key sent", + } + + +@router.get("/ssh/keys/{username}") +async def rest_get_ssh_keys(username: str): + """Get the SSH keys for a user""" + user = get_user_by_username(username) + if user is None: + raise HTTPException(status_code=404, detail="User not found") + + return user.ssh_keys + + +@router.post("/ssh/keys/{username}", status_code=201) +async def rest_add_ssh_key(username: str, input: SshKeyInput): + try: + create_ssh_key(username, input.public_key) + except KeyAlreadyExists as error: + raise HTTPException(status_code=409, detail="Key already exists") from error + except InvalidPublicKey as error: + raise HTTPException( + status_code=400, + detail="Invalid key type. Only ssh-ed25519 and ssh-rsa are supported", + ) from error + except UserNotFound as error: + raise HTTPException(status_code=404, detail="User not found") from error + + return { + "message": "New SSH key successfully written", + } + + +@router.delete("/ssh/keys/{username}") +async def rest_delete_ssh_key(username: str, input: SshKeyInput): + try: + remove_ssh_key(username, input.public_key) + except KeyNotFound as error: + raise HTTPException(status_code=404, detail="Key not found") from error + except UserNotFound as error: + raise HTTPException(status_code=404, detail="User not found") from error + return {"message": "SSH key deleted"} diff --git a/selfprivacy_api/rest/system.py b/selfprivacy_api/rest/system.py new file mode 100644 index 0000000..9933fb3 --- /dev/null +++ b/selfprivacy_api/rest/system.py @@ -0,0 +1,105 @@ +from typing import Optional +from fastapi import APIRouter, Body, Depends, HTTPException +from pydantic import BaseModel + +from selfprivacy_api.dependencies import get_token_header + +import selfprivacy_api.actions.system as system_actions + +router = APIRouter( + prefix="/system", + tags=["system"], + dependencies=[Depends(get_token_header)], + responses={404: {"description": "Not found"}}, +) + + +@router.get("/configuration/timezone") +async def get_timezone(): + """Get the timezone of the server""" + return system_actions.get_timezone() + + +class ChangeTimezoneRequestBody(BaseModel): + """Change the timezone of the server""" + + timezone: str + + +@router.put("/configuration/timezone") +async def change_timezone(timezone: ChangeTimezoneRequestBody): + """Change the timezone of the server""" + try: + system_actions.change_timezone(timezone.timezone) + except system_actions.InvalidTimezone as e: + raise HTTPException(status_code=400, detail=str(e)) + return {"timezone": timezone.timezone} + + +@router.get("/configuration/autoUpgrade") +async def get_auto_upgrade_settings(): + """Get the auto-upgrade settings""" + return system_actions.get_auto_upgrade_settings().dict() + + +class AutoUpgradeSettings(BaseModel): + """Settings for auto-upgrading user data""" + + enable: Optional[bool] = None + allowReboot: Optional[bool] = None + + +@router.put("/configuration/autoUpgrade") +async def set_auto_upgrade_settings(settings: AutoUpgradeSettings): + """Set the auto-upgrade settings""" + system_actions.set_auto_upgrade_settings(settings.enable, settings.allowReboot) + return "Auto-upgrade settings changed" + + +@router.get("/configuration/apply") +async def apply_configuration(): + """Apply the configuration""" + return_code = system_actions.rebuild_system() + return return_code + + +@router.get("/configuration/rollback") +async def rollback_configuration(): + """Rollback the configuration""" + return_code = system_actions.rollback_system() + return return_code + + +@router.get("/configuration/upgrade") +async def upgrade_configuration(): + """Upgrade the configuration""" + return_code = system_actions.upgrade_system() + return return_code + + +@router.get("/reboot") +async def reboot_system(): + """Reboot the system""" + system_actions.reboot_system() + return "System reboot has started" + + +@router.get("/version") +async def get_system_version(): + """Get the system version""" + return {"system_version": system_actions.get_system_version()} + + +@router.get("/pythonVersion") +async def get_python_version(): + """Get the Python version""" + return system_actions.get_python_version() + + +@router.get("/configuration/pull") +async def pull_configuration(): + """Pull the configuration""" + action_result = system_actions.pull_repository_changes() + if action_result.status == 0: + return action_result.dict() + raise HTTPException(status_code=500, detail=action_result.dict()) diff --git a/selfprivacy_api/rest/users.py b/selfprivacy_api/rest/users.py new file mode 100644 index 0000000..ab4c6c9 --- /dev/null +++ b/selfprivacy_api/rest/users.py @@ -0,0 +1,62 @@ +"""Users management module""" +from typing import Optional +from fastapi import APIRouter, Body, Depends, HTTPException +from pydantic import BaseModel + +import selfprivacy_api.actions.users as users_actions + +from selfprivacy_api.dependencies import get_token_header + +router = APIRouter( + prefix="/users", + tags=["users"], + dependencies=[Depends(get_token_header)], + responses={404: {"description": "Not found"}}, +) + + +@router.get("") +async def get_users(withMainUser: bool = False): + """Get the list of users""" + users: list[users_actions.UserDataUser] = users_actions.get_users( + exclude_primary=not withMainUser, exclude_root=True + ) + + return [user.username for user in users] + + +class UserInput(BaseModel): + """User input""" + + username: str + password: str + + +@router.post("", status_code=201) +async def create_user(user: UserInput): + try: + users_actions.create_user(user.username, user.password) + except users_actions.PasswordIsEmpty as e: + raise HTTPException(status_code=400, detail=str(e)) + except users_actions.UsernameForbidden as e: + raise HTTPException(status_code=409, detail=str(e)) + except users_actions.UsernameNotAlphanumeric as e: + raise HTTPException(status_code=400, detail=str(e)) + except users_actions.UsernameTooLong as e: + raise HTTPException(status_code=400, detail=str(e)) + except users_actions.UserAlreadyExists as e: + raise HTTPException(status_code=409, detail=str(e)) + + return {"result": 0, "username": user.username} + + +@router.delete("/{username}") +async def delete_user(username: str): + try: + users_actions.delete_user(username) + except users_actions.UserNotFound as e: + raise HTTPException(status_code=404, detail=str(e)) + except users_actions.UserIsProtected as e: + raise HTTPException(status_code=400, detail=str(e)) + + return {"result": 0, "username": username} diff --git a/selfprivacy_api/restic_controller/tasks.py b/selfprivacy_api/restic_controller/tasks.py index 4c610c4..f583d8b 100644 --- a/selfprivacy_api/restic_controller/tasks.py +++ b/selfprivacy_api/restic_controller/tasks.py @@ -1,10 +1,8 @@ """Tasks for the restic controller.""" from huey import crontab -from huey.contrib.mini import MiniHuey +from selfprivacy_api.utils.huey import huey from . import ResticController, ResticStates -huey = MiniHuey() - @huey.task() def init_restic(): diff --git a/selfprivacy_api/services/__init__.py b/selfprivacy_api/services/__init__.py index e69de29..a688734 100644 --- a/selfprivacy_api/services/__init__.py +++ b/selfprivacy_api/services/__init__.py @@ -0,0 +1,67 @@ +"""Services module.""" + +import typing +from selfprivacy_api.services.bitwarden import Bitwarden +from selfprivacy_api.services.gitea import Gitea +from selfprivacy_api.services.jitsi import Jitsi +from selfprivacy_api.services.mailserver import MailServer +from selfprivacy_api.services.nextcloud import Nextcloud +from selfprivacy_api.services.pleroma import Pleroma +from selfprivacy_api.services.ocserv import Ocserv +from selfprivacy_api.services.service import Service, ServiceDnsRecord +import selfprivacy_api.utils.network as network_utils + +services: list[Service] = [ + Bitwarden(), + Gitea(), + MailServer(), + Nextcloud(), + Pleroma(), + Ocserv(), + Jitsi(), +] + + +def get_all_services() -> list[Service]: + return services + + +def get_service_by_id(service_id: str) -> typing.Optional[Service]: + for service in services: + if service.get_id() == service_id: + return service + return None + + +def get_enabled_services() -> list[Service]: + return [service for service in services if service.is_enabled()] + + +def get_disabled_services() -> list[Service]: + return [service for service in services if not service.is_enabled()] + + +def get_services_by_location(location: str) -> list[Service]: + return [service for service in services if service.get_location() == location] + + +def get_all_required_dns_records() -> list[ServiceDnsRecord]: + ip4 = network_utils.get_ip4() + ip6 = network_utils.get_ip6() + dns_records: list[ServiceDnsRecord] = [ + ServiceDnsRecord( + type="A", + name="api", + content=ip4, + ttl=3600, + ), + ServiceDnsRecord( + type="AAAA", + name="api", + content=ip6, + ttl=3600, + ), + ] + for service in get_enabled_services(): + dns_records += service.get_dns_records() + return dns_records diff --git a/selfprivacy_api/services/bitwarden/__init__.py b/selfprivacy_api/services/bitwarden/__init__.py new file mode 100644 index 0000000..ea93de1 --- /dev/null +++ b/selfprivacy_api/services/bitwarden/__init__.py @@ -0,0 +1,174 @@ +"""Class representing Bitwarden service""" +import base64 +import subprocess +import typing + +from selfprivacy_api.jobs import Job, JobStatus, Jobs +from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service +from selfprivacy_api.services.generic_size_counter import get_storage_usage +from selfprivacy_api.services.generic_status_getter import get_service_status +from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus +from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain +from selfprivacy_api.utils.block_devices import BlockDevice +from selfprivacy_api.utils.huey import huey +import selfprivacy_api.utils.network as network_utils +from selfprivacy_api.services.bitwarden.icon import BITWARDEN_ICON + + +class Bitwarden(Service): + """Class representing Bitwarden service.""" + + @staticmethod + def get_id() -> str: + """Return service id.""" + return "bitwarden" + + @staticmethod + def get_display_name() -> str: + """Return service display name.""" + return "Bitwarden" + + @staticmethod + def get_description() -> str: + """Return service description.""" + return "Bitwarden is a password manager." + + @staticmethod + def get_svg_icon() -> str: + """Read SVG icon from file and return it as base64 encoded string.""" + return base64.b64encode(BITWARDEN_ICON.encode("utf-8")).decode("utf-8") + + @staticmethod + def get_url() -> typing.Optional[str]: + """Return service url.""" + domain = get_domain() + return f"https://password.{domain}" + + @staticmethod + def is_movable() -> bool: + return True + + @staticmethod + def is_required() -> bool: + return False + + @staticmethod + def is_enabled() -> bool: + with ReadUserData() as user_data: + return user_data.get("bitwarden", {}).get("enable", False) + + @staticmethod + def get_status() -> ServiceStatus: + """ + Return Bitwarden status from systemd. + Use command return code to determine status. + + Return code 0 means service is running. + Return code 1 or 2 means service is in error stat. + Return code 3 means service is stopped. + Return code 4 means service is off. + """ + return get_service_status("vaultwarden.service") + + @staticmethod + def enable(): + """Enable Bitwarden service.""" + with WriteUserData() as user_data: + if "bitwarden" not in user_data: + user_data["bitwarden"] = {} + user_data["bitwarden"]["enable"] = True + + @staticmethod + def disable(): + """Disable Bitwarden service.""" + with WriteUserData() as user_data: + if "bitwarden" not in user_data: + user_data["bitwarden"] = {} + user_data["bitwarden"]["enable"] = False + + @staticmethod + def stop(): + subprocess.run(["systemctl", "stop", "vaultwarden.service"]) + + @staticmethod + def start(): + subprocess.run(["systemctl", "start", "vaultwarden.service"]) + + @staticmethod + def restart(): + subprocess.run(["systemctl", "restart", "vaultwarden.service"]) + + @staticmethod + def get_configuration(): + return {} + + @staticmethod + def set_configuration(config_items): + return super().set_configuration(config_items) + + @staticmethod + def get_logs(): + return "" + + @staticmethod + def get_storage_usage() -> int: + storage_usage = 0 + storage_usage += get_storage_usage("/var/lib/bitwarden") + storage_usage += get_storage_usage("/var/lib/bitwarden_rs") + return storage_usage + + @staticmethod + def get_location() -> str: + with ReadUserData() as user_data: + if user_data.get("useBinds", False): + return user_data.get("bitwarden", {}).get("location", "sda1") + else: + return "sda1" + + @staticmethod + def get_dns_records() -> typing.List[ServiceDnsRecord]: + """Return list of DNS records for Bitwarden service.""" + return [ + ServiceDnsRecord( + type="A", + name="password", + content=network_utils.get_ip4(), + ttl=3600, + ), + ServiceDnsRecord( + type="AAAA", + name="password", + content=network_utils.get_ip6(), + ttl=3600, + ), + ] + + def move_to_volume(self, volume: BlockDevice) -> Job: + job = Jobs.get_instance().add( + type_id="services.bitwarden.move", + name="Move Bitwarden", + description=f"Moving Bitwarden data to {volume.name}", + ) + + move_service( + self, + volume, + job, + [ + FolderMoveNames( + name="bitwarden", + bind_location="/var/lib/bitwarden", + group="vaultwarden", + owner="vaultwarden", + ), + FolderMoveNames( + name="bitwarden_rs", + bind_location="/var/lib/bitwarden_rs", + group="vaultwarden", + owner="vaultwarden", + ), + ], + "bitwarden", + ) + + return job diff --git a/selfprivacy_api/services/bitwarden/bitwarden.svg b/selfprivacy_api/services/bitwarden/bitwarden.svg new file mode 100644 index 0000000..ced270c --- /dev/null +++ b/selfprivacy_api/services/bitwarden/bitwarden.svg @@ -0,0 +1,3 @@ + + + diff --git a/selfprivacy_api/services/bitwarden/icon.py b/selfprivacy_api/services/bitwarden/icon.py new file mode 100644 index 0000000..f9280e0 --- /dev/null +++ b/selfprivacy_api/services/bitwarden/icon.py @@ -0,0 +1,5 @@ +BITWARDEN_ICON = """ + + + +""" diff --git a/selfprivacy_api/services/generic_service_mover.py b/selfprivacy_api/services/generic_service_mover.py new file mode 100644 index 0000000..c7d7c3b --- /dev/null +++ b/selfprivacy_api/services/generic_service_mover.py @@ -0,0 +1,237 @@ +"""Generic handler for moving services""" + +import subprocess +import time +import pathlib +import shutil + +from pydantic import BaseModel +from selfprivacy_api.jobs import Job, JobStatus, Jobs +from selfprivacy_api.utils.huey import huey +from selfprivacy_api.utils.block_devices import BlockDevice +from selfprivacy_api.utils import ReadUserData, WriteUserData +from selfprivacy_api.services.service import Service, ServiceStatus + + +class FolderMoveNames(BaseModel): + name: str + bind_location: str + owner: str + group: str + + +@huey.task() +def move_service( + service: Service, + volume: BlockDevice, + job: Job, + folder_names: list[FolderMoveNames], + userdata_location: str, +): + """Move a service to another volume.""" + job = Jobs.get_instance().update( + job=job, + status_text="Performing pre-move checks...", + status=JobStatus.RUNNING, + ) + service_name = service.get_display_name() + with ReadUserData() as user_data: + if not user_data.get("useBinds", False): + Jobs.get_instance().update( + job=job, + status=JobStatus.ERROR, + error="Server is not using binds.", + ) + return + # Check if we are on the same volume + old_volume = service.get_location() + if old_volume == volume.name: + Jobs.get_instance().update( + job=job, + status=JobStatus.ERROR, + error=f"{service_name} is already on this volume.", + ) + return + # Check if there is enough space on the new volume + if int(volume.fsavail) < service.get_storage_usage(): + Jobs.get_instance().update( + job=job, + status=JobStatus.ERROR, + error="Not enough space on the new volume.", + ) + return + # Make sure the volume is mounted + if f"/volumes/{volume.name}" not in volume.mountpoints: + Jobs.get_instance().update( + job=job, + status=JobStatus.ERROR, + error="Volume is not mounted.", + ) + return + # Make sure current actual directory exists and if its user and group are correct + for folder in folder_names: + if not pathlib.Path(f"/volumes/{old_volume}/{folder.name}").exists(): + Jobs.get_instance().update( + job=job, + status=JobStatus.ERROR, + error=f"{service_name} is not found.", + ) + return + if not pathlib.Path(f"/volumes/{old_volume}/{folder.name}").is_dir(): + Jobs.get_instance().update( + job=job, + status=JobStatus.ERROR, + error=f"{service_name} is not a directory.", + ) + return + if ( + not pathlib.Path(f"/volumes/{old_volume}/{folder.name}").owner() + == folder.owner + ): + Jobs.get_instance().update( + job=job, + status=JobStatus.ERROR, + error=f"{service_name} owner is not {folder.owner}.", + ) + return + + # Stop service + Jobs.get_instance().update( + job=job, + status=JobStatus.RUNNING, + status_text=f"Stopping {service_name}...", + progress=5, + ) + service.stop() + # Wait for the service to stop, check every second + # If it does not stop in 30 seconds, abort + for _ in range(30): + if service.get_status() not in ( + ServiceStatus.ACTIVATING, + ServiceStatus.DEACTIVATING, + ): + break + time.sleep(1) + else: + Jobs.get_instance().update( + job=job, + status=JobStatus.ERROR, + error=f"{service_name} did not stop in 30 seconds.", + ) + return + + # Unmount old volume + Jobs.get_instance().update( + job=job, + status_text="Unmounting old folder...", + status=JobStatus.RUNNING, + progress=10, + ) + for folder in folder_names: + try: + subprocess.run( + ["umount", folder.bind_location], + check=True, + ) + except subprocess.CalledProcessError: + Jobs.get_instance().update( + job=job, + status=JobStatus.ERROR, + error="Unable to unmount old volume.", + ) + return + # Move data to new volume and set correct permissions + Jobs.get_instance().update( + job=job, + status_text="Moving data to new volume...", + status=JobStatus.RUNNING, + progress=20, + ) + current_progress = 20 + folder_percentage = 50 // len(folder_names) + for folder in folder_names: + shutil.move( + f"/volumes/{old_volume}/{folder.name}", + f"/volumes/{volume.name}/{folder.name}", + ) + Jobs.get_instance().update( + job=job, + status_text="Moving data to new volume...", + status=JobStatus.RUNNING, + progress=current_progress + folder_percentage, + ) + + Jobs.get_instance().update( + job=job, + status_text=f"Making sure {service_name} owns its files...", + status=JobStatus.RUNNING, + progress=70, + ) + for folder in folder_names: + try: + subprocess.run( + [ + "chown", + "-R", + f"{folder.owner}:f{folder.group}", + f"/volumes/{volume.name}/{folder.name}", + ], + check=True, + ) + except subprocess.CalledProcessError as error: + print(error.output) + Jobs.get_instance().update( + job=job, + status=JobStatus.RUNNING, + error=f"Unable to set ownership of new volume. {service_name} may not be able to access its files. Continuing anyway.", + ) + return + + # Mount new volume + Jobs.get_instance().update( + job=job, + status_text=f"Mounting {service_name} data...", + status=JobStatus.RUNNING, + progress=90, + ) + + for folder in folder_names: + try: + subprocess.run( + [ + "mount", + "--bind", + f"/volumes/{volume.name}/{folder.name}", + folder.bind_location, + ], + check=True, + ) + except subprocess.CalledProcessError as error: + print(error.output) + Jobs.get_instance().update( + job=job, + status=JobStatus.ERROR, + error="Unable to mount new volume.", + ) + return + + # Update userdata + Jobs.get_instance().update( + job=job, + status_text="Finishing move...", + status=JobStatus.RUNNING, + progress=95, + ) + with WriteUserData() as user_data: + if userdata_location not in user_data: + user_data[userdata_location] = {} + user_data[userdata_location]["location"] = volume.name + # Start service + service.start() + Jobs.get_instance().update( + job=job, + status=JobStatus.FINISHED, + result=f"{service_name} moved successfully.", + status_text=f"Starting {service}...", + progress=100, + ) diff --git a/selfprivacy_api/services/generic_size_counter.py b/selfprivacy_api/services/generic_size_counter.py new file mode 100644 index 0000000..4a706fb --- /dev/null +++ b/selfprivacy_api/services/generic_size_counter.py @@ -0,0 +1,16 @@ +"""Generic size counter using pathlib""" +import pathlib + + +def get_storage_usage(path: str) -> int: + """ + Calculate the real storage usage of path and all subdirectories. + Calculate using pathlib. + Do not follow symlinks. + """ + storage_usage = 0 + for iter_path in pathlib.Path(path).rglob("**/*"): + if iter_path.is_dir(): + continue + storage_usage += iter_path.stat().st_size + return storage_usage diff --git a/selfprivacy_api/services/generic_status_getter.py b/selfprivacy_api/services/generic_status_getter.py new file mode 100644 index 0000000..46720af --- /dev/null +++ b/selfprivacy_api/services/generic_status_getter.py @@ -0,0 +1,60 @@ +"""Generic service status fetcher using systemctl""" +import subprocess + +from selfprivacy_api.services.service import ServiceStatus + + +def get_service_status(service: str) -> ServiceStatus: + """ + Return service status from systemd. + Use systemctl show to get the status of a service. + Get ActiveState from the output. + """ + service_status = subprocess.check_output(["systemctl", "show", service]) + if b"LoadState=not-found" in service_status: + return ServiceStatus.OFF + if b"ActiveState=active" in service_status: + return ServiceStatus.ACTIVE + if b"ActiveState=inactive" in service_status: + return ServiceStatus.INACTIVE + if b"ActiveState=activating" in service_status: + return ServiceStatus.ACTIVATING + if b"ActiveState=deactivating" in service_status: + return ServiceStatus.DEACTIVATING + if b"ActiveState=failed" in service_status: + return ServiceStatus.FAILED + if b"ActiveState=reloading" in service_status: + return ServiceStatus.RELOADING + return ServiceStatus.OFF + + +def get_service_status_from_several_units(services: list[str]) -> ServiceStatus: + """ + Fetch all service statuses for all services and return the worst status. + Statuses from worst to best: + - OFF + - FAILED + - RELOADING + - ACTIVATING + - DEACTIVATING + - INACTIVE + - ACTIVE + """ + service_statuses = [] + for service in services: + service_statuses.append(get_service_status(service)) + if ServiceStatus.OFF in service_statuses: + return ServiceStatus.OFF + if ServiceStatus.FAILED in service_statuses: + return ServiceStatus.FAILED + if ServiceStatus.RELOADING in service_statuses: + return ServiceStatus.RELOADING + if ServiceStatus.ACTIVATING in service_statuses: + return ServiceStatus.ACTIVATING + if ServiceStatus.DEACTIVATING in service_statuses: + return ServiceStatus.DEACTIVATING + if ServiceStatus.INACTIVE in service_statuses: + return ServiceStatus.INACTIVE + if ServiceStatus.ACTIVE in service_statuses: + return ServiceStatus.ACTIVE + return ServiceStatus.OFF diff --git a/selfprivacy_api/services/gitea/__init__.py b/selfprivacy_api/services/gitea/__init__.py new file mode 100644 index 0000000..d563164 --- /dev/null +++ b/selfprivacy_api/services/gitea/__init__.py @@ -0,0 +1,165 @@ +"""Class representing Bitwarden service""" +import base64 +import subprocess +import typing + +from selfprivacy_api.jobs import Job, Jobs +from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service +from selfprivacy_api.services.generic_size_counter import get_storage_usage +from selfprivacy_api.services.generic_status_getter import get_service_status +from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus +from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain +from selfprivacy_api.utils.block_devices import BlockDevice +from selfprivacy_api.utils.huey import huey +import selfprivacy_api.utils.network as network_utils +from selfprivacy_api.services.gitea.icon import GITEA_ICON + + +class Gitea(Service): + """Class representing Gitea service""" + + @staticmethod + def get_id() -> str: + """Return service id.""" + return "gitea" + + @staticmethod + def get_display_name() -> str: + """Return service display name.""" + return "Gitea" + + @staticmethod + def get_description() -> str: + """Return service description.""" + return "Gitea is a Git forge." + + @staticmethod + def get_svg_icon() -> str: + """Read SVG icon from file and return it as base64 encoded string.""" + return base64.b64encode(GITEA_ICON.encode("utf-8")).decode("utf-8") + + @staticmethod + def get_url() -> typing.Optional[str]: + """Return service url.""" + domain = get_domain() + return f"https://git.{domain}" + + @staticmethod + def is_movable() -> bool: + return True + + @staticmethod + def is_required() -> bool: + return False + + @staticmethod + def is_enabled() -> bool: + with ReadUserData() as user_data: + return user_data.get("gitea", {}).get("enable", False) + + @staticmethod + def get_status() -> ServiceStatus: + """ + Return Gitea status from systemd. + Use command return code to determine status. + Return code 0 means service is running. + Return code 1 or 2 means service is in error stat. + Return code 3 means service is stopped. + Return code 4 means service is off. + """ + return get_service_status("gitea.service") + + @staticmethod + def enable(): + """Enable Gitea service.""" + with WriteUserData() as user_data: + if "gitea" not in user_data: + user_data["gitea"] = {} + user_data["gitea"]["enable"] = True + + @staticmethod + def disable(): + """Disable Gitea service.""" + with WriteUserData() as user_data: + if "gitea" not in user_data: + user_data["gitea"] = {} + user_data["gitea"]["enable"] = False + + @staticmethod + def stop(): + subprocess.run(["systemctl", "stop", "gitea.service"]) + + @staticmethod + def start(): + subprocess.run(["systemctl", "start", "gitea.service"]) + + @staticmethod + def restart(): + subprocess.run(["systemctl", "restart", "gitea.service"]) + + @staticmethod + def get_configuration(): + return {} + + @staticmethod + def set_configuration(config_items): + return super().set_configuration(config_items) + + @staticmethod + def get_logs(): + return "" + + @staticmethod + def get_storage_usage() -> int: + storage_usage = 0 + storage_usage += get_storage_usage("/var/lib/gitea") + return storage_usage + + @staticmethod + def get_location() -> str: + with ReadUserData() as user_data: + if user_data.get("useBinds", False): + return user_data.get("gitea", {}).get("location", "sda1") + else: + return "sda1" + + @staticmethod + def get_dns_records() -> typing.List[ServiceDnsRecord]: + return [ + ServiceDnsRecord( + type="A", + name="git", + content=network_utils.get_ip4(), + ttl=3600, + ), + ServiceDnsRecord( + type="AAAA", + name="git", + content=network_utils.get_ip6(), + ttl=3600, + ), + ] + + def move_to_volume(self, volume: BlockDevice) -> Job: + job = Jobs.get_instance().add( + type_id="services.gitea.move", + name="Move Gitea", + description=f"Moving Gitea data to {volume.name}", + ) + + move_service( + self, + volume, + job, + [ + FolderMoveNames( + name="gitea", + bind_location="/var/lib/gitea", + group="gitea", + owner="gitea", + ), + ], + "bitwarden", + ) + + return job diff --git a/selfprivacy_api/services/gitea/gitea.svg b/selfprivacy_api/services/gitea/gitea.svg new file mode 100644 index 0000000..9ba8a76 --- /dev/null +++ b/selfprivacy_api/services/gitea/gitea.svg @@ -0,0 +1,3 @@ + + + diff --git a/selfprivacy_api/services/gitea/icon.py b/selfprivacy_api/services/gitea/icon.py new file mode 100644 index 0000000..569f96a --- /dev/null +++ b/selfprivacy_api/services/gitea/icon.py @@ -0,0 +1,5 @@ +GITEA_ICON = """ + + + +""" diff --git a/selfprivacy_api/services/jitsi/__init__.py b/selfprivacy_api/services/jitsi/__init__.py new file mode 100644 index 0000000..6b3a973 --- /dev/null +++ b/selfprivacy_api/services/jitsi/__init__.py @@ -0,0 +1,142 @@ +"""Class representing Jitsi service""" +import base64 +import subprocess +import typing + +from selfprivacy_api.jobs import Job, Jobs +from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service +from selfprivacy_api.services.generic_size_counter import get_storage_usage +from selfprivacy_api.services.generic_status_getter import ( + get_service_status, + get_service_status_from_several_units, +) +from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus +from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain +from selfprivacy_api.utils.block_devices import BlockDevice +from selfprivacy_api.utils.huey import huey +import selfprivacy_api.utils.network as network_utils +from selfprivacy_api.services.jitsi.icon import JITSI_ICON + + +class Jitsi(Service): + """Class representing Jitsi service""" + + @staticmethod + def get_id() -> str: + """Return service id.""" + return "jitsi" + + @staticmethod + def get_display_name() -> str: + """Return service display name.""" + return "Jitsi" + + @staticmethod + def get_description() -> str: + """Return service description.""" + return "Jitsi is a free and open-source video conferencing solution." + + @staticmethod + def get_svg_icon() -> str: + """Read SVG icon from file and return it as base64 encoded string.""" + return base64.b64encode(JITSI_ICON.encode("utf-8")).decode("utf-8") + + @staticmethod + def get_url() -> typing.Optional[str]: + """Return service url.""" + domain = get_domain() + return f"https://meet.{domain}" + + @staticmethod + def is_movable() -> bool: + return False + + @staticmethod + def is_required() -> bool: + return False + + @staticmethod + def is_enabled() -> bool: + with ReadUserData() as user_data: + return user_data.get("jitsi", {}).get("enable", False) + + @staticmethod + def get_status() -> ServiceStatus: + return get_service_status_from_several_units( + ["jitsi-videobridge.service", "jicofo.service"] + ) + + @staticmethod + def enable(): + """Enable Jitsi service.""" + with WriteUserData() as user_data: + if "jitsi" not in user_data: + user_data["jitsi"] = {} + user_data["jitsi"]["enable"] = True + + @staticmethod + def disable(): + """Disable Gitea service.""" + with WriteUserData() as user_data: + if "jitsi" not in user_data: + user_data["jitsi"] = {} + user_data["jitsi"]["enable"] = False + + @staticmethod + def stop(): + subprocess.run(["systemctl", "stop", "jitsi-videobridge.service"]) + subprocess.run(["systemctl", "stop", "jicofo.service"]) + + @staticmethod + def start(): + subprocess.run(["systemctl", "start", "jitsi-videobridge.service"]) + subprocess.run(["systemctl", "start", "jicofo.service"]) + + @staticmethod + def restart(): + subprocess.run(["systemctl", "restart", "jitsi-videobridge.service"]) + subprocess.run(["systemctl", "restart", "jicofo.service"]) + + @staticmethod + def get_configuration(): + return {} + + @staticmethod + def set_configuration(config_items): + return super().set_configuration(config_items) + + @staticmethod + def get_logs(): + return "" + + @staticmethod + def get_storage_usage() -> int: + storage_usage = 0 + storage_usage += get_storage_usage("/var/lib/jitsi-meet") + return storage_usage + + @staticmethod + def get_location() -> str: + return "sda1" + + @staticmethod + def get_dns_records() -> typing.List[ServiceDnsRecord]: + ip4 = network_utils.get_ip4() + ip6 = network_utils.get_ip6() + return [ + ServiceDnsRecord( + type="A", + name="meet", + content=ip4, + ttl=3600, + ), + ServiceDnsRecord( + type="AAAA", + name="meet", + content=ip6, + ttl=3600, + ), + ] + + def move_to_volume(self, volume: BlockDevice) -> Job: + raise NotImplementedError("jitsi service is not movable") diff --git a/selfprivacy_api/services/jitsi/icon.py b/selfprivacy_api/services/jitsi/icon.py new file mode 100644 index 0000000..08bcbb1 --- /dev/null +++ b/selfprivacy_api/services/jitsi/icon.py @@ -0,0 +1,5 @@ +JITSI_ICON = """ + + + +""" diff --git a/selfprivacy_api/services/mailserver/__init__.py b/selfprivacy_api/services/mailserver/__init__.py new file mode 100644 index 0000000..1a72f33 --- /dev/null +++ b/selfprivacy_api/services/mailserver/__init__.py @@ -0,0 +1,179 @@ +"""Class representing Dovecot and Postfix services""" + +import base64 +import subprocess +import typing + +from selfprivacy_api.jobs import Job, JobStatus, Jobs +from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service +from selfprivacy_api.services.generic_size_counter import get_storage_usage +from selfprivacy_api.services.generic_status_getter import ( + get_service_status, + get_service_status_from_several_units, +) +from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus +import selfprivacy_api.utils as utils +from selfprivacy_api.utils.block_devices import BlockDevice +from selfprivacy_api.utils.huey import huey +import selfprivacy_api.utils.network as network_utils +from selfprivacy_api.services.mailserver.icon import MAILSERVER_ICON + + +class MailServer(Service): + """Class representing mail service""" + + @staticmethod + def get_id() -> str: + return "mailserver" + + @staticmethod + def get_display_name() -> str: + return "Mail Server" + + @staticmethod + def get_description() -> str: + return "E-Mail for company and family." + + @staticmethod + def get_svg_icon() -> str: + return base64.b64encode(MAILSERVER_ICON.encode("utf-8")).decode("utf-8") + + @staticmethod + def get_url() -> typing.Optional[str]: + """Return service url.""" + return None + + @staticmethod + def is_movable() -> bool: + return True + + @staticmethod + def is_required() -> bool: + return True + + @staticmethod + def is_enabled() -> bool: + return True + + @staticmethod + def get_status() -> ServiceStatus: + return get_service_status_from_several_units( + ["dovecot2.service", "postfix.service"] + ) + + @staticmethod + def enable(): + raise NotImplementedError("enable is not implemented for MailServer") + + @staticmethod + def disable(): + raise NotImplementedError("disable is not implemented for MailServer") + + @staticmethod + def stop(): + subprocess.run(["systemctl", "stop", "dovecot2.service"]) + subprocess.run(["systemctl", "stop", "postfix.service"]) + + @staticmethod + def start(): + subprocess.run(["systemctl", "start", "dovecot2.service"]) + subprocess.run(["systemctl", "start", "postfix.service"]) + + @staticmethod + def restart(): + subprocess.run(["systemctl", "restart", "dovecot2.service"]) + subprocess.run(["systemctl", "restart", "postfix.service"]) + + @staticmethod + def get_configuration(): + return {} + + @staticmethod + def set_configuration(config_items): + return super().set_configuration(config_items) + + @staticmethod + def get_logs(): + return "" + + @staticmethod + def get_storage_usage() -> int: + return get_storage_usage("/var/vmail") + + @staticmethod + def get_location() -> str: + with utils.ReadUserData() as user_data: + if user_data.get("useBinds", False): + return user_data.get("mailserver", {}).get("location", "sda1") + else: + return "sda1" + + @staticmethod + def get_dns_records() -> typing.List[ServiceDnsRecord]: + domain = utils.get_domain() + dkim_record = utils.get_dkim_key(domain) + ip4 = network_utils.get_ip4() + ip6 = network_utils.get_ip6() + + if dkim_record is None: + return [] + + return [ + ServiceDnsRecord( + type="A", + name=domain, + content=ip4, + ttl=3600, + ), + ServiceDnsRecord( + type="AAAA", + name=domain, + content=ip6, + ttl=3600, + ), + ServiceDnsRecord( + type="MX", name=domain, content=domain, ttl=3600, priority=10 + ), + ServiceDnsRecord( + type="TXT", name="_dmarc", content=f"v=DMARC1; p=none", ttl=18000 + ), + ServiceDnsRecord( + type="TXT", + name=domain, + content=f"v=spf1 a mx ip4:{ip4} -all", + ttl=18000, + ), + ServiceDnsRecord( + type="TXT", name="selector._domainkey", content=dkim_record, ttl=18000 + ), + ] + + def move_to_volume(self, volume: BlockDevice) -> Job: + job = Jobs.get_instance().add( + type_id="services.mailserver.move", + name="Move Mail Server", + description=f"Moving mailserver data to {volume.name}", + ) + + move_service( + self, + volume, + job, + [ + FolderMoveNames( + name="vmail", + bind_location="/var/vmail", + group="virtualMail", + owner="virtualMail", + ), + FolderMoveNames( + name="sieve", + bind_location="/var/sieve", + group="virtualMail", + owner="virtualMail", + ), + ], + "mailserver", + ) + + return job diff --git a/selfprivacy_api/services/mailserver/icon.py b/selfprivacy_api/services/mailserver/icon.py new file mode 100644 index 0000000..a688ef3 --- /dev/null +++ b/selfprivacy_api/services/mailserver/icon.py @@ -0,0 +1,5 @@ +MAILSERVER_ICON = """ + + + +""" diff --git a/selfprivacy_api/services/mailserver/mailserver.svg b/selfprivacy_api/services/mailserver/mailserver.svg new file mode 100644 index 0000000..d7d0ee2 --- /dev/null +++ b/selfprivacy_api/services/mailserver/mailserver.svg @@ -0,0 +1,3 @@ + + + diff --git a/selfprivacy_api/services/nextcloud/__init__.py b/selfprivacy_api/services/nextcloud/__init__.py index 525f657..4057b49 100644 --- a/selfprivacy_api/services/nextcloud/__init__.py +++ b/selfprivacy_api/services/nextcloud/__init__.py @@ -1,36 +1,62 @@ """Class representing Nextcloud service.""" import base64 import subprocess -import psutil -from selfprivacy_api.services.service import Service, ServiceStatus -from selfprivacy_api.utils import ReadUserData, WriteUserData +import typing +from selfprivacy_api.jobs import Job, Jobs +from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service +from selfprivacy_api.services.generic_size_counter import get_storage_usage +from selfprivacy_api.services.generic_status_getter import get_service_status +from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus +from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain +from selfprivacy_api.utils.block_devices import BlockDevice +import selfprivacy_api.utils.network as network_utils +from selfprivacy_api.services.nextcloud.icon import NEXTCLOUD_ICON class Nextcloud(Service): """Class representing Nextcloud service.""" - def get_id(self) -> str: + @staticmethod + def get_id() -> str: """Return service id.""" return "nextcloud" - def get_display_name(self) -> str: + @staticmethod + def get_display_name() -> str: """Return service display name.""" return "Nextcloud" - def get_description(self) -> str: + @staticmethod + def get_description() -> str: """Return service description.""" return "Nextcloud is a cloud storage service that offers a web interface and a desktop client." - def get_svg_icon(self) -> str: + @staticmethod + def get_svg_icon() -> str: """Read SVG icon from file and return it as base64 encoded string.""" - with open("selfprivacy_api/services/nextcloud/nextcloud.svg", "rb") as f: - return base64.b64encode(f.read()).decode("utf-8") + return base64.b64encode(NEXTCLOUD_ICON.encode("utf-8")).decode("utf-8") - def is_enabled(self) -> bool: + @staticmethod + def get_url() -> typing.Optional[str]: + """Return service url.""" + domain = get_domain() + return f"https://cloud.{domain}" + + @staticmethod + def is_movable() -> bool: + return True + + @staticmethod + def is_required() -> bool: + return False + + @staticmethod + def is_enabled() -> bool: with ReadUserData() as user_data: return user_data.get("nextcloud", {}).get("enable", False) - def get_status(self) -> ServiceStatus: + @staticmethod + def get_status() -> ServiceStatus: """ Return Nextcloud status from systemd. Use command return code to determine status. @@ -40,57 +66,106 @@ class Nextcloud(Service): Return code 3 means service is stopped. Return code 4 means service is off. """ - service_status = subprocess.Popen( - ["systemctl", "status", "phpfpm-nextcloud.service"] - ) - service_status.communicate()[0] - if service_status.returncode == 0: - return ServiceStatus.RUNNING - elif service_status.returncode == 1 or service_status.returncode == 2: - return ServiceStatus.ERROR - elif service_status.returncode == 3: - return ServiceStatus.STOPPED - elif service_status.returncode == 4: - return ServiceStatus.OFF - else: - return ServiceStatus.DEGRADED + return get_service_status("phpfpm-nextcloud.service") - def enable(self): + @staticmethod + def enable(): """Enable Nextcloud service.""" with WriteUserData() as user_data: if "nextcloud" not in user_data: user_data["nextcloud"] = {} user_data["nextcloud"]["enable"] = True - def disable(self): + @staticmethod + def disable(): """Disable Nextcloud service.""" with WriteUserData() as user_data: if "nextcloud" not in user_data: user_data["nextcloud"] = {} user_data["nextcloud"]["enable"] = False - def stop(self): + @staticmethod + def stop(): """Stop Nextcloud service.""" subprocess.Popen(["systemctl", "stop", "phpfpm-nextcloud.service"]) - def start(self): + @staticmethod + def start(): """Start Nextcloud service.""" subprocess.Popen(["systemctl", "start", "phpfpm-nextcloud.service"]) - def restart(self): + @staticmethod + def restart(): """Restart Nextcloud service.""" subprocess.Popen(["systemctl", "restart", "phpfpm-nextcloud.service"]) - def get_configuration(self) -> dict: + @staticmethod + def get_configuration() -> dict: """Return Nextcloud configuration.""" return {} - def set_configuration(self, config_items): + @staticmethod + def set_configuration(config_items): return super().set_configuration(config_items) - def get_logs(self): + @staticmethod + def get_logs(): """Return Nextcloud logs.""" return "" - def get_storage_usage(self): - return psutil.disk_usage("/var/lib/nextcloud").used + @staticmethod + def get_storage_usage() -> int: + """ + Calculate the real storage usage of /var/lib/nextcloud and all subdirectories. + Calculate using pathlib. + Do not follow symlinks. + """ + return get_storage_usage("/var/lib/nextcloud") + + @staticmethod + def get_location() -> str: + """Get the name of disk where Nextcloud is installed.""" + with ReadUserData() as user_data: + if user_data.get("useBinds", False): + return user_data.get("nextcloud", {}).get("location", "sda1") + else: + return "sda1" + + @staticmethod + def get_dns_records() -> typing.List[ServiceDnsRecord]: + return [ + ServiceDnsRecord( + type="A", + name="cloud", + content=network_utils.get_ip4(), + ttl=3600, + ), + ServiceDnsRecord( + type="AAAA", + name="cloud", + content=network_utils.get_ip6(), + ttl=3600, + ), + ] + + def move_to_volume(self, volume: BlockDevice) -> Job: + job = Jobs.get_instance().add( + type_id="services.nextcloud.move", + name="Move Nextcloud", + description=f"Moving Nextcloud to volume {volume.name}", + ) + move_service( + self, + volume, + job, + [ + FolderMoveNames( + name="nextcloud", + bind_location="/var/lib/nextcloud", + owner="nextcloud", + group="nextcloud", + ), + ], + "nextcloud", + ) + return job diff --git a/selfprivacy_api/services/nextcloud/icon.py b/selfprivacy_api/services/nextcloud/icon.py new file mode 100644 index 0000000..d178640 --- /dev/null +++ b/selfprivacy_api/services/nextcloud/icon.py @@ -0,0 +1,12 @@ +NEXTCLOUD_ICON = """ + + + + + + + + + + +""" diff --git a/selfprivacy_api/services/ocserv/__init__.py b/selfprivacy_api/services/ocserv/__init__.py new file mode 100644 index 0000000..dcfacaa --- /dev/null +++ b/selfprivacy_api/services/ocserv/__init__.py @@ -0,0 +1,121 @@ +"""Class representing ocserv service.""" +import base64 +import subprocess +import typing +from selfprivacy_api.jobs import Job, Jobs +from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service +from selfprivacy_api.services.generic_size_counter import get_storage_usage +from selfprivacy_api.services.generic_status_getter import get_service_status +from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus +from selfprivacy_api.utils import ReadUserData, WriteUserData +from selfprivacy_api.utils.block_devices import BlockDevice +from selfprivacy_api.services.ocserv.icon import OCSERV_ICON +import selfprivacy_api.utils.network as network_utils + + +class Ocserv(Service): + """Class representing ocserv service.""" + + @staticmethod + def get_id() -> str: + return "ocserv" + + @staticmethod + def get_display_name() -> str: + return "OpenConnect VPN" + + @staticmethod + def get_description() -> str: + return "OpenConnect VPN to connect your devices and access the internet." + + @staticmethod + def get_svg_icon() -> str: + return base64.b64encode(OCSERV_ICON.encode("utf-8")).decode("utf-8") + + @staticmethod + def get_url() -> typing.Optional[str]: + """Return service url.""" + return None + + @staticmethod + def is_movable() -> bool: + return False + + @staticmethod + def is_required() -> bool: + return False + + @staticmethod + def is_enabled() -> bool: + with ReadUserData() as user_data: + return user_data.get("ocserv", {}).get("enable", False) + + @staticmethod + def get_status() -> ServiceStatus: + return get_service_status("ocserv.service") + + @staticmethod + def enable(): + with WriteUserData() as user_data: + if "ocserv" not in user_data: + user_data["ocserv"] = {} + user_data["ocserv"]["enable"] = True + + @staticmethod + def disable(): + with WriteUserData() as user_data: + if "ocserv" not in user_data: + user_data["ocserv"] = {} + user_data["ocserv"]["enable"] = False + + @staticmethod + def stop(): + subprocess.run(["systemctl", "stop", "ocserv.service"]) + + @staticmethod + def start(): + subprocess.run(["systemctl", "start", "ocserv.service"]) + + @staticmethod + def restart(): + subprocess.run(["systemctl", "restart", "ocserv.service"]) + + @staticmethod + def get_configuration(): + return {} + + @staticmethod + def set_configuration(config_items): + return super().set_configuration(config_items) + + @staticmethod + def get_logs(): + return "" + + @staticmethod + def get_location() -> str: + return "sda1" + + @staticmethod + def get_dns_records() -> typing.List[ServiceDnsRecord]: + return [ + ServiceDnsRecord( + type="A", + name="vpn", + content=network_utils.get_ip4(), + ttl=3600, + ), + ServiceDnsRecord( + type="AAAA", + name="vpn", + content=network_utils.get_ip6(), + ttl=3600, + ), + ] + + @staticmethod + def get_storage_usage() -> int: + return 0 + + def move_to_volume(self, volume: BlockDevice) -> Job: + raise NotImplementedError("ocserv service is not movable") diff --git a/selfprivacy_api/services/ocserv/icon.py b/selfprivacy_api/services/ocserv/icon.py new file mode 100644 index 0000000..6585c5e --- /dev/null +++ b/selfprivacy_api/services/ocserv/icon.py @@ -0,0 +1,5 @@ +OCSERV_ICON = """ + + + +""" diff --git a/selfprivacy_api/services/ocserv/ocserv.svg b/selfprivacy_api/services/ocserv/ocserv.svg new file mode 100644 index 0000000..288f743 --- /dev/null +++ b/selfprivacy_api/services/ocserv/ocserv.svg @@ -0,0 +1,3 @@ + + + diff --git a/selfprivacy_api/services/pleroma/__init__.py b/selfprivacy_api/services/pleroma/__init__.py new file mode 100644 index 0000000..97c11f5 --- /dev/null +++ b/selfprivacy_api/services/pleroma/__init__.py @@ -0,0 +1,157 @@ +"""Class representing Nextcloud service.""" +import base64 +import subprocess +import typing +from selfprivacy_api.jobs import Job, Jobs +from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service +from selfprivacy_api.services.generic_size_counter import get_storage_usage +from selfprivacy_api.services.generic_status_getter import get_service_status +from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus +from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain +from selfprivacy_api.utils.block_devices import BlockDevice +import selfprivacy_api.utils.network as network_utils +from selfprivacy_api.services.pleroma.icon import PLEROMA_ICON + + +class Pleroma(Service): + """Class representing Pleroma service.""" + + @staticmethod + def get_id() -> str: + return "pleroma" + + @staticmethod + def get_display_name() -> str: + return "Pleroma" + + @staticmethod + def get_description() -> str: + return "Pleroma is a microblogging service that offers a web interface and a desktop client." + + @staticmethod + def get_svg_icon() -> str: + return base64.b64encode(PLEROMA_ICON.encode("utf-8")).decode("utf-8") + + @staticmethod + def get_url() -> typing.Optional[str]: + """Return service url.""" + domain = get_domain() + return f"https://social.{domain}" + + @staticmethod + def is_movable() -> bool: + return True + + @staticmethod + def is_required() -> bool: + return False + + @staticmethod + def is_enabled() -> bool: + with ReadUserData() as user_data: + return user_data.get("pleroma", {}).get("enable", False) + + @staticmethod + def get_status() -> ServiceStatus: + return get_service_status("pleroma.service") + + @staticmethod + def enable(): + with WriteUserData() as user_data: + if "pleroma" not in user_data: + user_data["pleroma"] = {} + user_data["pleroma"]["enable"] = True + + @staticmethod + def disable(): + with WriteUserData() as user_data: + if "pleroma" not in user_data: + user_data["pleroma"] = {} + user_data["pleroma"]["enable"] = False + + @staticmethod + def stop(): + subprocess.run(["systemctl", "stop", "pleroma.service"]) + subprocess.run(["systemctl", "stop", "postgresql.service"]) + + @staticmethod + def start(): + subprocess.run(["systemctl", "start", "pleroma.service"]) + subprocess.run(["systemctl", "start", "postgresql.service"]) + + @staticmethod + def restart(): + subprocess.run(["systemctl", "restart", "pleroma.service"]) + subprocess.run(["systemctl", "restart", "postgresql.service"]) + + @staticmethod + def get_configuration(config_items): + return {} + + @staticmethod + def set_configuration(config_items): + return super().set_configuration(config_items) + + @staticmethod + def get_logs(): + return "" + + @staticmethod + def get_storage_usage() -> int: + storage_usage = 0 + storage_usage += get_storage_usage("/var/lib/pleroma") + storage_usage += get_storage_usage("/var/lib/postgresql") + return storage_usage + + @staticmethod + def get_location() -> str: + with ReadUserData() as user_data: + if user_data.get("useBinds", False): + return user_data.get("pleroma", {}).get("location", "sda1") + else: + return "sda1" + + @staticmethod + def get_dns_records() -> typing.List[ServiceDnsRecord]: + return [ + ServiceDnsRecord( + type="A", + name="social", + content=network_utils.get_ip4(), + ttl=3600, + ), + ServiceDnsRecord( + type="AAAA", + name="social", + content=network_utils.get_ip6(), + ttl=3600, + ), + ] + + def move_to_volume(self, volume: BlockDevice) -> Job: + job = Jobs.get_instance().add( + type_id="services.pleroma.move", + name="Move Pleroma", + description=f"Moving Pleroma to volume {volume.name}", + ) + move_service( + self, + volume, + job, + [ + FolderMoveNames( + name="pleroma", + bind_location="/var/lib/pleroma", + owner="pleroma", + group="pleroma", + ), + FolderMoveNames( + name="postgresql", + bind_location="/var/lib/postgresql", + owner="postgres", + group="postgres", + ), + ], + "pleroma", + ) + return job diff --git a/selfprivacy_api/services/pleroma/icon.py b/selfprivacy_api/services/pleroma/icon.py new file mode 100644 index 0000000..c0c4d2b --- /dev/null +++ b/selfprivacy_api/services/pleroma/icon.py @@ -0,0 +1,12 @@ +PLEROMA_ICON = """ + + + + + + + + + + +""" diff --git a/selfprivacy_api/services/pleroma/pleroma.svg b/selfprivacy_api/services/pleroma/pleroma.svg new file mode 100644 index 0000000..f87c438 --- /dev/null +++ b/selfprivacy_api/services/pleroma/pleroma.svg @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index a0e6ae6..515e28f 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -3,23 +3,30 @@ from abc import ABC, abstractmethod from enum import Enum import typing +from pydantic import BaseModel +from selfprivacy_api.jobs import Job + +from selfprivacy_api.utils.block_devices import BlockDevice + class ServiceStatus(Enum): """Enum for service status""" - RUNNING = "RUNNING" - DEGRADED = "DEGRADED" - ERROR = "ERROR" - STOPPED = "STOPPED" + ACTIVE = "ACTIVE" + RELOADING = "RELOADING" + INACTIVE = "INACTIVE" + FAILED = "FAILED" + ACTIVATING = "ACTIVATING" + DEACTIVATING = "DEACTIVATING" OFF = "OFF" -class ServiceDnsRecord: +class ServiceDnsRecord(BaseModel): type: str name: str content: str ttl: int - priority: typing.Optional[int] + priority: typing.Optional[int] = None class Service(ABC): @@ -28,66 +35,106 @@ class Service(ABC): can be installed, configured and used by a user. """ + @staticmethod @abstractmethod - def get_id(self) -> str: + def get_id() -> str: + pass + + @staticmethod + @abstractmethod + def get_display_name() -> str: + pass + + @staticmethod + @abstractmethod + def get_description() -> str: + pass + + @staticmethod + @abstractmethod + def get_svg_icon() -> str: + pass + + @staticmethod + @abstractmethod + def get_url() -> typing.Optional[str]: + pass + + @staticmethod + @abstractmethod + def is_movable() -> bool: + pass + + @staticmethod + @abstractmethod + def is_required() -> bool: + pass + + @staticmethod + @abstractmethod + def is_enabled() -> bool: + pass + + @staticmethod + @abstractmethod + def get_status() -> ServiceStatus: + pass + + @staticmethod + @abstractmethod + def enable(): + pass + + @staticmethod + @abstractmethod + def disable(): + pass + + @staticmethod + @abstractmethod + def stop(): + pass + + @staticmethod + @abstractmethod + def start(): + pass + + @staticmethod + @abstractmethod + def restart(): + pass + + @staticmethod + @abstractmethod + def get_configuration(): + pass + + @staticmethod + @abstractmethod + def set_configuration(config_items): + pass + + @staticmethod + @abstractmethod + def get_logs(): + pass + + @staticmethod + @abstractmethod + def get_storage_usage() -> int: + pass + + @staticmethod + @abstractmethod + def get_dns_records() -> typing.List[ServiceDnsRecord]: + pass + + @staticmethod + @abstractmethod + def get_location() -> str: pass @abstractmethod - def get_display_name(self) -> str: - pass - - @abstractmethod - def get_description(self) -> str: - pass - - @abstractmethod - def get_svg_icon(self) -> str: - pass - - @abstractmethod - def is_enabled(self) -> bool: - pass - - @abstractmethod - def get_status(self) -> ServiceStatus: - pass - - @abstractmethod - def enable(self): - pass - - @abstractmethod - def disable(self): - pass - - @abstractmethod - def stop(self): - pass - - @abstractmethod - def start(self): - pass - - @abstractmethod - def restart(self): - pass - - @abstractmethod - def get_configuration(self): - pass - - @abstractmethod - def set_configuration(self, config_items): - pass - - @abstractmethod - def get_logs(self): - pass - - @abstractmethod - def get_storage_usage(self): - pass - - @abstractmethod - def get_dns_records(self) -> typing.List[ServiceDnsRecord]: + def move_to_volume(self, volume: BlockDevice) -> Job: pass diff --git a/selfprivacy_api/task_registry.py b/selfprivacy_api/task_registry.py new file mode 100644 index 0000000..82eaf06 --- /dev/null +++ b/selfprivacy_api/task_registry.py @@ -0,0 +1,4 @@ +from selfprivacy_api.utils.huey import huey +from selfprivacy_api.jobs.test import test_job +from selfprivacy_api.restic_controller.tasks import * +from selfprivacy_api.services.generic_service_mover import move_service diff --git a/selfprivacy_api/utils/__init__.py b/selfprivacy_api/utils/__init__.py index 8ab26d1..83213d7 100644 --- a/selfprivacy_api/utils/__init__.py +++ b/selfprivacy_api/utils/__init__.py @@ -10,6 +10,7 @@ import portalocker USERDATA_FILE = "/etc/nixos/userdata/userdata.json" TOKENS_FILE = "/etc/nixos/userdata/tokens.json" +JOBS_FILE = "/etc/nixos/userdata/jobs.json" DOMAIN_FILE = "/var/domain" @@ -18,6 +19,7 @@ class UserDataFiles(Enum): USERDATA = 0 TOKENS = 1 + JOBS = 2 def get_domain(): @@ -35,6 +37,12 @@ class WriteUserData(object): self.userdata_file = open(USERDATA_FILE, "r+", encoding="utf-8") elif file_type == UserDataFiles.TOKENS: self.userdata_file = open(TOKENS_FILE, "r+", encoding="utf-8") + elif file_type == UserDataFiles.JOBS: + # Make sure file exists + if not os.path.exists(JOBS_FILE): + with open(JOBS_FILE, "w", encoding="utf-8") as jobs_file: + jobs_file.write("{}") + self.userdata_file = open(JOBS_FILE, "r+", encoding="utf-8") else: raise ValueError("Unknown file type") portalocker.lock(self.userdata_file, portalocker.LOCK_EX) @@ -60,6 +68,12 @@ class ReadUserData(object): self.userdata_file = open(USERDATA_FILE, "r", encoding="utf-8") elif file_type == UserDataFiles.TOKENS: self.userdata_file = open(TOKENS_FILE, "r", encoding="utf-8") + elif file_type == UserDataFiles.JOBS: + # Make sure file exists + if not os.path.exists(JOBS_FILE): + with open(JOBS_FILE, "w", encoding="utf-8") as jobs_file: + jobs_file.write("{}") + self.userdata_file = open(JOBS_FILE, "r", encoding="utf-8") else: raise ValueError("Unknown file type") portalocker.lock(self.userdata_file, portalocker.LOCK_SH) diff --git a/selfprivacy_api/utils/auth.py b/selfprivacy_api/utils/auth.py index f512948..ecaf9af 100644 --- a/selfprivacy_api/utils/auth.py +++ b/selfprivacy_api/utils/auth.py @@ -5,6 +5,7 @@ from datetime import datetime, timedelta import re import typing +from pydantic import BaseModel from mnemonic import Mnemonic from . import ReadUserData, UserDataFiles, WriteUserData, parse_date @@ -87,7 +88,7 @@ def is_token_name_pair_valid(token_name, token): return False -def get_token_name(token): +def get_token_name(token: str) -> typing.Optional[str]: """Return the name of the token provided""" with ReadUserData(UserDataFiles.TOKENS) as tokens: for t in tokens["tokens"]: @@ -96,11 +97,22 @@ def get_token_name(token): return None +class BasicTokenInfo(BaseModel): + """Token info""" + + name: str + date: datetime + + def get_tokens_info(): """Get all tokens info without tokens themselves""" with ReadUserData(UserDataFiles.TOKENS) as tokens: return [ - {"name": token["name"], "date": token["date"]} for token in tokens["tokens"] + BasicTokenInfo( + name=t["name"], + date=parse_date(t["date"]), + ) + for t in tokens["tokens"] ] diff --git a/selfprivacy_api/utils/block_devices.py b/selfprivacy_api/utils/block_devices.py index e6adddc..9d96d52 100644 --- a/selfprivacy_api/utils/block_devices.py +++ b/selfprivacy_api/utils/block_devices.py @@ -16,13 +16,13 @@ def get_block_device(device_name): "-J", "-b", "-o", - "NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINT,LABEL,UUID,SIZE, MODEL,SERIAL,TYPE", - device_name, + "NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINTS,LABEL,UUID,SIZE,MODEL,SERIAL,TYPE", + f"/dev/{device_name}", ] ) lsblk_output = lsblk_output.decode("utf-8") lsblk_output = json.loads(lsblk_output) - return lsblk_output["blockdevices"] + return lsblk_output["blockdevices"][0] def resize_block_device(block_device) -> bool: @@ -30,9 +30,11 @@ def resize_block_device(block_device) -> bool: Resize a block device. Return True if successful. """ resize_command = ["resize2fs", block_device] - resize_process = subprocess.Popen(resize_command, shell=False) - resize_process.communicate() - return resize_process.returncode == 0 + try: + subprocess.check_output(resize_command, shell=False) + except subprocess.CalledProcessError: + return False + return True class BlockDevice: @@ -43,14 +45,14 @@ class BlockDevice: def __init__(self, block_device): self.name = block_device["name"] self.path = block_device["path"] - self.fsavail = block_device["fsavail"] - self.fssize = block_device["fssize"] + self.fsavail = str(block_device["fsavail"]) + self.fssize = str(block_device["fssize"]) self.fstype = block_device["fstype"] - self.fsused = block_device["fsused"] - self.mountpoint = block_device["mountpoint"] + self.fsused = str(block_device["fsused"]) + self.mountpoints = block_device["mountpoints"] self.label = block_device["label"] self.uuid = block_device["uuid"] - self.size = block_device["size"] + self.size = str(block_device["size"]) self.model = block_device["model"] self.serial = block_device["serial"] self.type = block_device["type"] @@ -60,7 +62,7 @@ class BlockDevice: return self.name def __repr__(self): - return f"" + return f"" def __eq__(self, other): return self.name == other.name @@ -73,14 +75,14 @@ class BlockDevice: Update current data and return a dictionary of stats. """ device = get_block_device(self.name) - self.fsavail = device["fsavail"] - self.fssize = device["fssize"] + self.fsavail = str(device["fsavail"]) + self.fssize = str(device["fssize"]) self.fstype = device["fstype"] - self.fsused = device["fsused"] - self.mountpoint = device["mountpoint"] + self.fsused = str(device["fsused"]) + self.mountpoints = device["mountpoints"] self.label = device["label"] self.uuid = device["uuid"] - self.size = device["size"] + self.size = str(device["size"]) self.model = device["model"] self.serial = device["serial"] self.type = device["type"] @@ -92,7 +94,7 @@ class BlockDevice: "fssize": self.fssize, "fstype": self.fstype, "fsused": self.fsused, - "mountpoint": self.mountpoint, + "mountpoints": self.mountpoints, "label": self.label, "uuid": self.uuid, "size": self.size, @@ -170,7 +172,7 @@ class BlockDevices: "-J", "-b", "-o", - "NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINT,LABEL,UUID,SIZE,MODEL,SERIAL,TYPE", + "NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINTS,LABEL,UUID,SIZE,MODEL,SERIAL,TYPE", ] ) lsblk_output = lsblk_output.decode("utf-8") @@ -219,6 +221,6 @@ class BlockDevices: """ block_devices = [] for block_device in self.block_devices: - if block_device.mountpoint == mountpoint: + if mountpoint in block_device.mountpoints: block_devices.append(block_device) return block_devices diff --git a/selfprivacy_api/utils/huey.py b/selfprivacy_api/utils/huey.py new file mode 100644 index 0000000..034f7ba --- /dev/null +++ b/selfprivacy_api/utils/huey.py @@ -0,0 +1,14 @@ +"""MiniHuey singleton.""" +import os +from huey import SqliteHuey + +HUEY_DATABASE = "/etc/nixos/userdata/tasks.db" + +# Singleton instance containing the huey database. + +test_mode = os.environ.get("TEST_MODE") + +huey = SqliteHuey( + HUEY_DATABASE, + immediate=test_mode == "true", +) diff --git a/selfprivacy_api/utils/network.py b/selfprivacy_api/utils/network.py index 5081f0e..c1b8a2b 100644 --- a/selfprivacy_api/utils/network.py +++ b/selfprivacy_api/utils/network.py @@ -2,9 +2,10 @@ """Network utils""" import subprocess import re +from typing import Optional -def get_ip4(): +def get_ip4() -> str: """Get IPv4 address""" try: ip4 = subprocess.check_output(["ip", "addr", "show", "dev", "eth0"]).decode( @@ -13,10 +14,10 @@ def get_ip4(): ip4 = re.search(r"inet (\d+\.\d+\.\d+\.\d+)\/\d+", ip4) except subprocess.CalledProcessError: ip4 = None - return ip4.group(1) if ip4 else None + return ip4.group(1) if ip4 else "" -def get_ip6(): +def get_ip6() -> str: """Get IPv6 address""" try: ip6 = subprocess.check_output(["ip", "addr", "show", "dev", "eth0"]).decode( @@ -25,4 +26,4 @@ def get_ip6(): ip6 = re.search(r"inet6 (\S+)\/\d+", ip6) except subprocess.CalledProcessError: ip6 = None - return ip6.group(1) if ip6 else None + return ip6.group(1) if ip6 else "" diff --git a/setup.py b/setup.py index 5619621..eabc165 100755 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import setup, find_packages setup( name="selfprivacy_api", - version="1.2.7", + version="2.0.0", packages=find_packages(), scripts=[ "selfprivacy_api/app.py", diff --git a/shell.nix b/shell.nix index e754a11..0ccb99d 100644 --- a/shell.nix +++ b/shell.nix @@ -1,12 +1,8 @@ { pkgs ? import { } }: let sp-python = pkgs.python39.withPackages (p: with p; [ - flask - flask-restful setuptools portalocker - flask-swagger - flask-swagger-ui pytz pytest pytest-mock @@ -18,9 +14,10 @@ let pylint pydantic typing-extensions - flask-cors psutil black + fastapi + uvicorn (buildPythonPackage rec { pname = "strawberry-graphql"; version = "0.123.0"; @@ -32,11 +29,11 @@ let typing-extensions python-multipart python-dateutil - flask + # flask pydantic pygments poetry - flask-cors + # flask-cors (buildPythonPackage rec { pname = "graphql-core"; version = "3.2.0"; diff --git a/tests/conftest.py b/tests/conftest.py index fb31456..ea7a66a 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,9 +1,13 @@ """Tests configuration.""" # pylint: disable=redefined-outer-name # pylint: disable=unused-argument +import os import pytest -from flask import testing -from selfprivacy_api.app import create_app +from fastapi.testclient import TestClient + + +def pytest_generate_tests(metafunc): + os.environ["TEST_MODE"] = "true" @pytest.fixture @@ -16,66 +20,43 @@ def tokens_file(mocker, shared_datadir): @pytest.fixture -def app(): - """Flask application.""" - app = create_app( - { - "ENABLE_SWAGGER": "1", - } +def jobs_file(mocker, shared_datadir): + """Mock tokens file.""" + mock = mocker.patch("selfprivacy_api.utils.JOBS_FILE", shared_datadir / "jobs.json") + return mock + + +@pytest.fixture +def huey_database(mocker, shared_datadir): + """Mock huey database.""" + mock = mocker.patch( + "selfprivacy_api.utils.huey.HUEY_DATABASE", shared_datadir / "huey.db" ) - - yield app + return mock @pytest.fixture -def client(app, tokens_file): - """Flask unauthorized test client.""" - return app.test_client() +def client(tokens_file, huey_database, jobs_file): + from selfprivacy_api.app import app - -class AuthorizedClient(testing.FlaskClient): - """Flask authorized test client.""" - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.token = "TEST_TOKEN" - - def open(self, *args, **kwargs): - if "headers" not in kwargs: - kwargs["headers"] = {} - kwargs["headers"]["Authorization"] = f"Bearer {self.token}" - return super().open(*args, **kwargs) - - -class WrongAuthClient(testing.FlaskClient): - """Flask client with wrong token""" - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.token = "WRONG_TOKEN" - - def open(self, *args, **kwargs): - if "headers" not in kwargs: - kwargs["headers"] = {} - kwargs["headers"]["Authorization"] = f"Bearer {self.token}" - return super().open(*args, **kwargs) + return TestClient(app) @pytest.fixture -def authorized_client(app, tokens_file): +def authorized_client(tokens_file, huey_database, jobs_file): """Authorized test client fixture.""" - app.test_client_class = AuthorizedClient - return app.test_client() + from selfprivacy_api.app import app + + client = TestClient(app) + client.headers.update({"Authorization": "Bearer TEST_TOKEN"}) + return client @pytest.fixture -def wrong_auth_client(app, tokens_file): +def wrong_auth_client(tokens_file, huey_database, jobs_file): """Wrong token test client fixture.""" - app.test_client_class = WrongAuthClient - return app.test_client() + from selfprivacy_api.app import app - -@pytest.fixture -def runner(app, tokens_file): - """Flask test runner.""" - return app.test_cli_runner() + client = TestClient(app) + client.headers.update({"Authorization": "Bearer WRONG_TOKEN"}) + return client diff --git a/tests/data/jobs.json b/tests/data/jobs.json new file mode 100644 index 0000000..0967ef4 --- /dev/null +++ b/tests/data/jobs.json @@ -0,0 +1 @@ +{} diff --git a/tests/test_block_device_utils.py b/tests/test_block_device_utils.py new file mode 100644 index 0000000..2676e6c --- /dev/null +++ b/tests/test_block_device_utils.py @@ -0,0 +1,490 @@ +#!/usr/bin/env python3 +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=missing-function-docstring +import json +import subprocess +import pytest + +from selfprivacy_api.utils.block_devices import ( + BlockDevice, + BlockDevices, + get_block_device, + resize_block_device, +) +from tests.common import read_json + +SINGLE_LSBLK_OUTPUT = b""" +{ + "blockdevices": [ + { + "name": "sda1", + "path": "/dev/sda1", + "fsavail": "4614107136", + "fssize": "19814920192", + "fstype": "ext4", + "fsused": "14345314304", + "mountpoints": [ + "/nix/store", "/" + ], + "label": null, + "uuid": "ec80c004-baec-4a2c-851d-0e1807135511", + "size": 20210236928, + "model": null, + "serial": null, + "type": "part" + } + ] +} +""" + + +@pytest.fixture +def lsblk_singular_mock(mocker): + mock = mocker.patch( + "subprocess.check_output", autospec=True, return_value=SINGLE_LSBLK_OUTPUT + ) + return mock + + +@pytest.fixture +def failed_check_output_mock(mocker): + mock = mocker.patch( + "subprocess.check_output", + autospec=True, + side_effect=subprocess.CalledProcessError( + returncode=1, cmd=["some", "command"] + ), + ) + return mock + + +@pytest.fixture +def only_root_in_userdata(mocker, datadir): + mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "only_root.json") + assert read_json(datadir / "only_root.json")["volumes"][0]["device"] == "/dev/sda1" + assert ( + read_json(datadir / "only_root.json")["volumes"][0]["mountPoint"] + == "/volumes/sda1" + ) + assert read_json(datadir / "only_root.json")["volumes"][0]["filesystem"] == "ext4" + return datadir + + +@pytest.fixture +def no_devices_in_userdata(mocker, datadir): + mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "no_devices.json") + assert read_json(datadir / "no_devices.json")["volumes"] == [] + return datadir + + +@pytest.fixture +def undefined_devices_in_userdata(mocker, datadir): + mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "undefined.json") + assert "volumes" not in read_json(datadir / "undefined.json") + return datadir + + +def test_create_block_device_object(lsblk_singular_mock, authorized_client): + output = get_block_device("sda1") + assert lsblk_singular_mock.call_count == 1 + assert lsblk_singular_mock.call_args[0][0] == [ + "lsblk", + "-J", + "-b", + "-o", + "NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINTS,LABEL,UUID,SIZE,MODEL,SERIAL,TYPE", + "/dev/sda1", + ] + assert output == json.loads(SINGLE_LSBLK_OUTPUT)["blockdevices"][0] + + +def test_resize_block_device(lsblk_singular_mock, authorized_client): + result = resize_block_device("sdb") + assert result is True + assert lsblk_singular_mock.call_count == 1 + assert lsblk_singular_mock.call_args[0][0] == [ + "resize2fs", + "sdb", + ] + + +def test_resize_block_device_failed(failed_check_output_mock, authorized_client): + result = resize_block_device("sdb") + assert result is False + assert failed_check_output_mock.call_count == 1 + assert failed_check_output_mock.call_args[0][0] == [ + "resize2fs", + "sdb", + ] + + +VOLUME_LSBLK_OUTPUT = b""" +{ + "blockdevices": [ + { + "name": "sdb", + "path": "/dev/sdb", + "fsavail": "11888545792", + "fssize": "12573614080", + "fstype": "ext4", + "fsused": "24047616", + "mountpoints": [ + "/volumes/sdb" + ], + "label": null, + "uuid": "fa9d0026-ee23-4047-b8b1-297ae16fa751", + "size": 12884901888, + "model": "Volume", + "serial": "21378102", + "type": "disk" + } + ] +} +""" + + +def test_create_block_device(lsblk_singular_mock, authorized_client): + block_device = BlockDevice(json.loads(VOLUME_LSBLK_OUTPUT)["blockdevices"][0]) + + assert block_device.name == "sdb" + assert block_device.path == "/dev/sdb" + assert block_device.fsavail == "11888545792" + assert block_device.fssize == "12573614080" + assert block_device.fstype == "ext4" + assert block_device.fsused == "24047616" + assert block_device.mountpoints == ["/volumes/sdb"] + assert block_device.label is None + assert block_device.uuid == "fa9d0026-ee23-4047-b8b1-297ae16fa751" + assert block_device.size == "12884901888" + assert block_device.model == "Volume" + assert block_device.serial == "21378102" + assert block_device.type == "disk" + assert block_device.locked is False + assert str(block_device) == "sdb" + assert ( + repr(block_device) + == "" + ) + assert hash(block_device) == hash("sdb") + + +def test_block_devices_equal(lsblk_singular_mock, authorized_client): + block_device = BlockDevice(json.loads(VOLUME_LSBLK_OUTPUT)["blockdevices"][0]) + block_device2 = BlockDevice(json.loads(VOLUME_LSBLK_OUTPUT)["blockdevices"][0]) + + assert block_device == block_device2 + + +@pytest.fixture +def resize_block_mock(mocker): + mock = mocker.patch( + "selfprivacy_api.utils.block_devices.resize_block_device", + autospec=True, + return_value=True, + ) + return mock + + +def test_call_resize_from_block_device( + lsblk_singular_mock, resize_block_mock, authorized_client +): + block_device = BlockDevice(json.loads(VOLUME_LSBLK_OUTPUT)["blockdevices"][0]) + block_device.resize() + assert resize_block_mock.call_count == 1 + assert resize_block_mock.call_args[0][0] == "/dev/sdb" + assert lsblk_singular_mock.call_count == 0 + + +def test_get_stats_from_block_device(lsblk_singular_mock, authorized_client): + block_device = BlockDevice(json.loads(SINGLE_LSBLK_OUTPUT)["blockdevices"][0]) + stats = block_device.stats() + assert stats == { + "name": "sda1", + "path": "/dev/sda1", + "fsavail": "4614107136", + "fssize": "19814920192", + "fstype": "ext4", + "fsused": "14345314304", + "mountpoints": ["/nix/store", "/"], + "label": None, + "uuid": "ec80c004-baec-4a2c-851d-0e1807135511", + "size": "20210236928", + "model": None, + "serial": None, + "type": "part", + } + assert lsblk_singular_mock.call_count == 1 + assert lsblk_singular_mock.call_args[0][0] == [ + "lsblk", + "-J", + "-b", + "-o", + "NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINTS,LABEL,UUID,SIZE,MODEL,SERIAL,TYPE", + "/dev/sda1", + ] + + +def test_mount_block_device( + lsblk_singular_mock, only_root_in_userdata, authorized_client +): + block_device = BlockDevice(json.loads(SINGLE_LSBLK_OUTPUT)["blockdevices"][0]) + result = block_device.mount() + assert result is False + volume = BlockDevice(json.loads(VOLUME_LSBLK_OUTPUT)["blockdevices"][0]) + result = volume.mount() + assert result is True + assert ( + read_json(only_root_in_userdata / "only_root.json")["volumes"][1]["device"] + == "/dev/sdb" + ) + assert ( + read_json(only_root_in_userdata / "only_root.json")["volumes"][1]["mountPoint"] + == "/volumes/sdb" + ) + assert ( + read_json(only_root_in_userdata / "only_root.json")["volumes"][1]["fsType"] + == "ext4" + ) + + +def test_mount_block_device_when_undefined( + lsblk_singular_mock, undefined_devices_in_userdata, authorized_client +): + block_device = BlockDevice(json.loads(SINGLE_LSBLK_OUTPUT)["blockdevices"][0]) + result = block_device.mount() + assert result is True + assert ( + read_json(undefined_devices_in_userdata / "undefined.json")["volumes"][0][ + "device" + ] + == "/dev/sda1" + ) + assert ( + read_json(undefined_devices_in_userdata / "undefined.json")["volumes"][0][ + "mountPoint" + ] + == "/volumes/sda1" + ) + assert ( + read_json(undefined_devices_in_userdata / "undefined.json")["volumes"][0][ + "fsType" + ] + == "ext4" + ) + + +def test_unmount_block_device( + lsblk_singular_mock, only_root_in_userdata, authorized_client +): + block_device = BlockDevice(json.loads(SINGLE_LSBLK_OUTPUT)["blockdevices"][0]) + result = block_device.unmount() + assert result is True + volume = BlockDevice(json.loads(VOLUME_LSBLK_OUTPUT)["blockdevices"][0]) + result = volume.unmount() + assert result is False + assert len(read_json(only_root_in_userdata / "only_root.json")["volumes"]) == 0 + + +def test_unmount_block_device_when_undefined( + lsblk_singular_mock, undefined_devices_in_userdata, authorized_client +): + block_device = BlockDevice(json.loads(SINGLE_LSBLK_OUTPUT)["blockdevices"][0]) + result = block_device.unmount() + assert result is False + assert ( + len(read_json(undefined_devices_in_userdata / "undefined.json")["volumes"]) == 0 + ) + + +FULL_LSBLK_OUTPUT = b""" +{ + "blockdevices": [ + { + "name": "sda", + "path": "/dev/sda", + "fsavail": null, + "fssize": null, + "fstype": null, + "fsused": null, + "mountpoints": [ + null + ], + "label": null, + "uuid": null, + "size": 20480786432, + "model": "QEMU HARDDISK", + "serial": "drive-scsi0-0-0-0", + "type": "disk", + "children": [ + { + "name": "sda1", + "path": "/dev/sda1", + "fsavail": "4605702144", + "fssize": "19814920192", + "fstype": "ext4", + "fsused": "14353719296", + "mountpoints": [ + "/nix/store", "/" + ], + "label": null, + "uuid": "ec80c004-baec-4a2c-851d-0e1807135511", + "size": 20210236928, + "model": null, + "serial": null, + "type": "part" + },{ + "name": "sda14", + "path": "/dev/sda14", + "fsavail": null, + "fssize": null, + "fstype": null, + "fsused": null, + "mountpoints": [ + null + ], + "label": null, + "uuid": null, + "size": 1048576, + "model": null, + "serial": null, + "type": "part" + },{ + "name": "sda15", + "path": "/dev/sda15", + "fsavail": null, + "fssize": null, + "fstype": "vfat", + "fsused": null, + "mountpoints": [ + null + ], + "label": null, + "uuid": "6B29-5BA7", + "size": 268435456, + "model": null, + "serial": null, + "type": "part" + } + ] + },{ + "name": "sdb", + "path": "/dev/sdb", + "fsavail": "11888545792", + "fssize": "12573614080", + "fstype": "ext4", + "fsused": "24047616", + "mountpoints": [ + "/volumes/sdb" + ], + "label": null, + "uuid": "fa9d0026-ee23-4047-b8b1-297ae16fa751", + "size": 12884901888, + "model": "Volume", + "serial": "21378102", + "type": "disk" + },{ + "name": "sr0", + "path": "/dev/sr0", + "fsavail": null, + "fssize": null, + "fstype": null, + "fsused": null, + "mountpoints": [ + null + ], + "label": null, + "uuid": null, + "size": 1073741312, + "model": "QEMU DVD-ROM", + "serial": "QM00003", + "type": "rom" + } + ] +} +""" + + +@pytest.fixture +def lsblk_full_mock(mocker): + mock = mocker.patch( + "subprocess.check_output", autospec=True, return_value=FULL_LSBLK_OUTPUT + ) + return mock + + +def test_get_block_devices(lsblk_full_mock, authorized_client): + block_devices = BlockDevices().get_block_devices() + assert len(block_devices) == 2 + assert block_devices[0].name == "sda1" + assert block_devices[0].path == "/dev/sda1" + assert block_devices[0].fsavail == "4605702144" + assert block_devices[0].fssize == "19814920192" + assert block_devices[0].fstype == "ext4" + assert block_devices[0].fsused == "14353719296" + assert block_devices[0].mountpoints == ["/nix/store", "/"] + assert block_devices[0].label is None + assert block_devices[0].uuid == "ec80c004-baec-4a2c-851d-0e1807135511" + assert block_devices[0].size == "20210236928" + assert block_devices[0].model is None + assert block_devices[0].serial is None + assert block_devices[0].type == "part" + assert block_devices[1].name == "sdb" + assert block_devices[1].path == "/dev/sdb" + assert block_devices[1].fsavail == "11888545792" + assert block_devices[1].fssize == "12573614080" + assert block_devices[1].fstype == "ext4" + assert block_devices[1].fsused == "24047616" + assert block_devices[1].mountpoints == ["/volumes/sdb"] + assert block_devices[1].label is None + assert block_devices[1].uuid == "fa9d0026-ee23-4047-b8b1-297ae16fa751" + assert block_devices[1].size == "12884901888" + assert block_devices[1].model == "Volume" + assert block_devices[1].serial == "21378102" + assert block_devices[1].type == "disk" + + +def test_get_block_device(lsblk_full_mock, authorized_client): + block_device = BlockDevices().get_block_device("sda1") + assert block_device is not None + assert block_device.name == "sda1" + assert block_device.path == "/dev/sda1" + assert block_device.fsavail == "4605702144" + assert block_device.fssize == "19814920192" + assert block_device.fstype == "ext4" + assert block_device.fsused == "14353719296" + assert block_device.mountpoints == ["/nix/store", "/"] + assert block_device.label is None + assert block_device.uuid == "ec80c004-baec-4a2c-851d-0e1807135511" + assert block_device.size == "20210236928" + assert block_device.model is None + assert block_device.serial is None + assert block_device.type == "part" + + +def test_get_nonexistent_block_device(lsblk_full_mock, authorized_client): + block_device = BlockDevices().get_block_device("sda2") + assert block_device is None + + +def test_get_block_devices_by_mountpoint(lsblk_full_mock, authorized_client): + block_devices = BlockDevices().get_block_devices_by_mountpoint("/nix/store") + assert len(block_devices) == 1 + assert block_devices[0].name == "sda1" + assert block_devices[0].path == "/dev/sda1" + assert block_devices[0].fsavail == "4605702144" + assert block_devices[0].fssize == "19814920192" + assert block_devices[0].fstype == "ext4" + assert block_devices[0].fsused == "14353719296" + assert block_devices[0].mountpoints == ["/nix/store", "/"] + assert block_devices[0].label is None + assert block_devices[0].uuid == "ec80c004-baec-4a2c-851d-0e1807135511" + assert block_devices[0].size == "20210236928" + assert block_devices[0].model is None + assert block_devices[0].serial is None + assert block_devices[0].type == "part" + + +def test_get_block_devices_by_mountpoint_no_match(lsblk_full_mock, authorized_client): + block_devices = BlockDevices().get_block_devices_by_mountpoint("/foo") + assert len(block_devices) == 0 diff --git a/tests/test_block_device_utils/no_devices.json b/tests/test_block_device_utils/no_devices.json new file mode 100644 index 0000000..97300ca --- /dev/null +++ b/tests/test_block_device_utils/no_devices.json @@ -0,0 +1,54 @@ +{ + "backblaze": { + "accountId": "ID", + "accountKey": "KEY", + "bucket": "selfprivacy" + }, + "api": { + "token": "TEST_TOKEN", + "enableSwagger": false + }, + "bitwarden": { + "enable": true + }, + "cloudflare": { + "apiKey": "TOKEN" + }, + "databasePassword": "PASSWORD", + "domain": "test.tld", + "hashedMasterPassword": "HASHED_PASSWORD", + "hostname": "test-instance", + "nextcloud": { + "adminPassword": "ADMIN", + "databasePassword": "ADMIN", + "enable": true + }, + "resticPassword": "PASS", + "ssh": { + "enable": true, + "passwordAuthentication": true, + "rootKeys": [ + "ssh-ed25519 KEY test@pc" + ] + }, + "username": "tester", + "gitea": { + "enable": false + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true + }, + "autoUpgrade": { + "enable": true, + "allowReboot": true + }, + "timezone": "Europe/Moscow", + "sshKeys": [ + "ssh-rsa KEY test@pc" + ], + "volumes": [ + ] +} diff --git a/tests/test_block_device_utils/only_root.json b/tests/test_block_device_utils/only_root.json new file mode 100644 index 0000000..0f8ec0d --- /dev/null +++ b/tests/test_block_device_utils/only_root.json @@ -0,0 +1,59 @@ +{ + "backblaze": { + "accountId": "ID", + "accountKey": "KEY", + "bucket": "selfprivacy" + }, + "api": { + "token": "TEST_TOKEN", + "enableSwagger": false + }, + "bitwarden": { + "enable": true + }, + "cloudflare": { + "apiKey": "TOKEN" + }, + "databasePassword": "PASSWORD", + "domain": "test.tld", + "hashedMasterPassword": "HASHED_PASSWORD", + "hostname": "test-instance", + "nextcloud": { + "adminPassword": "ADMIN", + "databasePassword": "ADMIN", + "enable": true + }, + "resticPassword": "PASS", + "ssh": { + "enable": true, + "passwordAuthentication": true, + "rootKeys": [ + "ssh-ed25519 KEY test@pc" + ] + }, + "username": "tester", + "gitea": { + "enable": false + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true + }, + "autoUpgrade": { + "enable": true, + "allowReboot": true + }, + "timezone": "Europe/Moscow", + "sshKeys": [ + "ssh-rsa KEY test@pc" + ], + "volumes": [ + { + "device": "/dev/sda1", + "mountPoint": "/volumes/sda1", + "filesystem": "ext4" + } + ] +} diff --git a/tests/test_block_device_utils/undefined.json b/tests/test_block_device_utils/undefined.json new file mode 100644 index 0000000..eb660cc --- /dev/null +++ b/tests/test_block_device_utils/undefined.json @@ -0,0 +1,52 @@ +{ + "backblaze": { + "accountId": "ID", + "accountKey": "KEY", + "bucket": "selfprivacy" + }, + "api": { + "token": "TEST_TOKEN", + "enableSwagger": false + }, + "bitwarden": { + "enable": true + }, + "cloudflare": { + "apiKey": "TOKEN" + }, + "databasePassword": "PASSWORD", + "domain": "test.tld", + "hashedMasterPassword": "HASHED_PASSWORD", + "hostname": "test-instance", + "nextcloud": { + "adminPassword": "ADMIN", + "databasePassword": "ADMIN", + "enable": true + }, + "resticPassword": "PASS", + "ssh": { + "enable": true, + "passwordAuthentication": true, + "rootKeys": [ + "ssh-ed25519 KEY test@pc" + ] + }, + "username": "tester", + "gitea": { + "enable": false + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true + }, + "autoUpgrade": { + "enable": true, + "allowReboot": true + }, + "timezone": "Europe/Moscow", + "sshKeys": [ + "ssh-rsa KEY test@pc" + ] +} diff --git a/tests/test_common.py b/tests/test_common.py index db60d84..e5d3f62 100644 --- a/tests/test_common.py +++ b/tests/test_common.py @@ -1,6 +1,7 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument import json +import os import pytest from selfprivacy_api.utils import WriteUserData, ReadUserData @@ -9,19 +10,13 @@ from selfprivacy_api.utils import WriteUserData, ReadUserData def test_get_api_version(authorized_client): response = authorized_client.get("/api/version") assert response.status_code == 200 - assert "version" in response.get_json() + assert "version" in response.json() def test_get_api_version_unauthorized(client): response = client.get("/api/version") assert response.status_code == 200 - assert "version" in response.get_json() - - -def test_get_swagger_json(authorized_client): - response = authorized_client.get("/api/swagger.json") - assert response.status_code == 200 - assert "swagger" in response.get_json() + assert "version" in response.json() def test_read_invalid_user_data(): @@ -34,3 +29,12 @@ def test_write_invalid_user_data(): with pytest.raises(ValueError): with WriteUserData("invalid") as user_data: pass + + +@pytest.fixture +def test_mode(): + return os.environ.get("TEST_MODE") + + +def test_the_test_mode(test_mode): + assert test_mode == "true" diff --git a/tests/test_graphql/test_api.py b/tests/test_graphql/test_api.py index 6343d8f..16c7c4d 100644 --- a/tests/test_graphql/test_api.py +++ b/tests/test_graphql/test_api.py @@ -24,7 +24,7 @@ TOKENS_FILE_CONTETS = { def test_graphql_get_entire_api_data(authorized_client, tokens_file): - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ "query": generate_api_query( @@ -33,25 +33,25 @@ def test_graphql_get_entire_api_data(authorized_client, tokens_file): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert "version" in response.get_json()["data"]["api"] - assert response.json["data"]["api"]["devices"] is not None - assert len(response.json["data"]["api"]["devices"]) == 2 + assert response.json().get("data") is not None + assert "version" in response.json()["data"]["api"] + assert response.json()["data"]["api"]["devices"] is not None + assert len(response.json()["data"]["api"]["devices"]) == 2 assert ( - response.json["data"]["api"]["devices"][0]["creationDate"] + response.json()["data"]["api"]["devices"][0]["creationDate"] == "2022-01-14T08:31:10.789314" ) - assert response.json["data"]["api"]["devices"][0]["isCaller"] is True - assert response.json["data"]["api"]["devices"][0]["name"] == "test_token" + assert response.json()["data"]["api"]["devices"][0]["isCaller"] is True + assert response.json()["data"]["api"]["devices"][0]["name"] == "test_token" assert ( - response.json["data"]["api"]["devices"][1]["creationDate"] + response.json()["data"]["api"]["devices"][1]["creationDate"] == "2022-01-14T08:31:10.789314" ) - assert response.json["data"]["api"]["devices"][1]["isCaller"] is False - assert response.json["data"]["api"]["devices"][1]["name"] == "test_token2" - assert response.json["data"]["api"]["recoveryKey"] is not None - assert response.json["data"]["api"]["recoveryKey"]["exists"] is False - assert response.json["data"]["api"]["recoveryKey"]["valid"] is False - assert response.json["data"]["api"]["recoveryKey"]["creationDate"] is None - assert response.json["data"]["api"]["recoveryKey"]["expirationDate"] is None - assert response.json["data"]["api"]["recoveryKey"]["usesLeft"] is None + assert response.json()["data"]["api"]["devices"][1]["isCaller"] is False + assert response.json()["data"]["api"]["devices"][1]["name"] == "test_token2" + assert response.json()["data"]["api"]["recoveryKey"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["exists"] is False + assert response.json()["data"]["api"]["recoveryKey"]["valid"] is False + assert response.json()["data"]["api"]["recoveryKey"]["creationDate"] is None + assert response.json()["data"]["api"]["recoveryKey"]["expirationDate"] is None + assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] is None diff --git a/tests/test_graphql/test_api_devices.py b/tests/test_graphql/test_api_devices.py index 627d06a..d8dc974 100644 --- a/tests/test_graphql/test_api_devices.py +++ b/tests/test_graphql/test_api_devices.py @@ -31,35 +31,35 @@ devices { def test_graphql_tokens_info(authorized_client, tokens_file): - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={"query": generate_api_query([API_DEVICES_QUERY])}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["api"]["devices"] is not None - assert len(response.json["data"]["api"]["devices"]) == 2 + assert response.json().get("data") is not None + assert response.json()["data"]["api"]["devices"] is not None + assert len(response.json()["data"]["api"]["devices"]) == 2 assert ( - response.json["data"]["api"]["devices"][0]["creationDate"] + response.json()["data"]["api"]["devices"][0]["creationDate"] == "2022-01-14T08:31:10.789314" ) - assert response.json["data"]["api"]["devices"][0]["isCaller"] is True - assert response.json["data"]["api"]["devices"][0]["name"] == "test_token" + assert response.json()["data"]["api"]["devices"][0]["isCaller"] is True + assert response.json()["data"]["api"]["devices"][0]["name"] == "test_token" assert ( - response.json["data"]["api"]["devices"][1]["creationDate"] + response.json()["data"]["api"]["devices"][1]["creationDate"] == "2022-01-14T08:31:10.789314" ) - assert response.json["data"]["api"]["devices"][1]["isCaller"] is False - assert response.json["data"]["api"]["devices"][1]["name"] == "test_token2" + assert response.json()["data"]["api"]["devices"][1]["isCaller"] is False + assert response.json()["data"]["api"]["devices"][1]["name"] == "test_token2" def test_graphql_tokens_info_unauthorized(client, tokens_file): - response = client.get( + response = client.post( "/graphql", json={"query": generate_api_query([API_DEVICES_QUERY])}, ) assert response.status_code == 200 - assert response.json["data"] is None + assert response.json()["data"] is None DELETE_TOKEN_MUTATION = """ @@ -84,7 +84,7 @@ def test_graphql_delete_token_unauthorized(client, tokens_file): }, ) assert response.status_code == 200 - assert response.json["data"] is None + assert response.json()["data"] is None def test_graphql_delete_token(authorized_client, tokens_file): @@ -98,10 +98,10 @@ def test_graphql_delete_token(authorized_client, tokens_file): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["deleteDeviceApiToken"]["success"] is True - assert response.json["data"]["deleteDeviceApiToken"]["message"] is not None - assert response.json["data"]["deleteDeviceApiToken"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["deleteDeviceApiToken"]["success"] is True + assert response.json()["data"]["deleteDeviceApiToken"]["message"] is not None + assert response.json()["data"]["deleteDeviceApiToken"]["code"] == 200 assert read_json(tokens_file) == { "tokens": [ { @@ -124,10 +124,10 @@ def test_graphql_delete_self_token(authorized_client, tokens_file): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["deleteDeviceApiToken"]["success"] is False - assert response.json["data"]["deleteDeviceApiToken"]["message"] is not None - assert response.json["data"]["deleteDeviceApiToken"]["code"] == 400 + assert response.json().get("data") is not None + assert response.json()["data"]["deleteDeviceApiToken"]["success"] is False + assert response.json()["data"]["deleteDeviceApiToken"]["message"] is not None + assert response.json()["data"]["deleteDeviceApiToken"]["code"] == 400 assert read_json(tokens_file) == TOKENS_FILE_CONTETS @@ -142,10 +142,10 @@ def test_graphql_delete_nonexistent_token(authorized_client, tokens_file): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["deleteDeviceApiToken"]["success"] is False - assert response.json["data"]["deleteDeviceApiToken"]["message"] is not None - assert response.json["data"]["deleteDeviceApiToken"]["code"] == 404 + assert response.json().get("data") is not None + assert response.json()["data"]["deleteDeviceApiToken"]["success"] is False + assert response.json()["data"]["deleteDeviceApiToken"]["message"] is not None + assert response.json()["data"]["deleteDeviceApiToken"]["code"] == 404 assert read_json(tokens_file) == TOKENS_FILE_CONTETS @@ -167,7 +167,7 @@ def test_graphql_refresh_token_unauthorized(client, tokens_file): json={"query": REFRESH_TOKEN_MUTATION}, ) assert response.status_code == 200 - assert response.json["data"] is None + assert response.json()["data"] is None def test_graphql_refresh_token(authorized_client, tokens_file): @@ -176,12 +176,12 @@ def test_graphql_refresh_token(authorized_client, tokens_file): json={"query": REFRESH_TOKEN_MUTATION}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["refreshDeviceApiToken"]["success"] is True - assert response.json["data"]["refreshDeviceApiToken"]["message"] is not None - assert response.json["data"]["refreshDeviceApiToken"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["refreshDeviceApiToken"]["success"] is True + assert response.json()["data"]["refreshDeviceApiToken"]["message"] is not None + assert response.json()["data"]["refreshDeviceApiToken"]["code"] == 200 assert read_json(tokens_file)["tokens"][0] == { - "token": response.json["data"]["refreshDeviceApiToken"]["token"], + "token": response.json()["data"]["refreshDeviceApiToken"]["token"], "name": "test_token", "date": "2022-01-14 08:31:10.789314", } @@ -205,7 +205,7 @@ def test_graphql_get_new_device_auth_key_unauthorized(client, tokens_file): json={"query": NEW_DEVICE_KEY_MUTATION}, ) assert response.status_code == 200 - assert response.json["data"] is None + assert response.json()["data"] is None def test_graphql_get_new_device_auth_key(authorized_client, tokens_file): @@ -214,14 +214,16 @@ def test_graphql_get_new_device_auth_key(authorized_client, tokens_file): json={"query": NEW_DEVICE_KEY_MUTATION}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["getNewDeviceApiKey"]["success"] is True - assert response.json["data"]["getNewDeviceApiKey"]["message"] is not None - assert response.json["data"]["getNewDeviceApiKey"]["code"] == 200 - assert response.json["data"]["getNewDeviceApiKey"]["key"].split(" ").__len__() == 12 + assert response.json().get("data") is not None + assert response.json()["data"]["getNewDeviceApiKey"]["success"] is True + assert response.json()["data"]["getNewDeviceApiKey"]["message"] is not None + assert response.json()["data"]["getNewDeviceApiKey"]["code"] == 200 + assert ( + response.json()["data"]["getNewDeviceApiKey"]["key"].split(" ").__len__() == 12 + ) token = ( Mnemonic(language="english") - .to_entropy(response.json["data"]["getNewDeviceApiKey"]["key"]) + .to_entropy(response.json()["data"]["getNewDeviceApiKey"]["key"]) .hex() ) assert read_json(tokens_file)["new_device"]["token"] == token @@ -249,7 +251,7 @@ def test_graphql_invalidate_new_device_token_unauthorized(client, tokens_file): }, ) assert response.status_code == 200 - assert response.json["data"] is None + assert response.json()["data"] is None def test_graphql_get_and_delete_new_device_key(authorized_client, tokens_file): @@ -258,14 +260,16 @@ def test_graphql_get_and_delete_new_device_key(authorized_client, tokens_file): json={"query": NEW_DEVICE_KEY_MUTATION}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["getNewDeviceApiKey"]["success"] is True - assert response.json["data"]["getNewDeviceApiKey"]["message"] is not None - assert response.json["data"]["getNewDeviceApiKey"]["code"] == 200 - assert response.json["data"]["getNewDeviceApiKey"]["key"].split(" ").__len__() == 12 + assert response.json().get("data") is not None + assert response.json()["data"]["getNewDeviceApiKey"]["success"] is True + assert response.json()["data"]["getNewDeviceApiKey"]["message"] is not None + assert response.json()["data"]["getNewDeviceApiKey"]["code"] == 200 + assert ( + response.json()["data"]["getNewDeviceApiKey"]["key"].split(" ").__len__() == 12 + ) token = ( Mnemonic(language="english") - .to_entropy(response.json["data"]["getNewDeviceApiKey"]["key"]) + .to_entropy(response.json()["data"]["getNewDeviceApiKey"]["key"]) .hex() ) assert read_json(tokens_file)["new_device"]["token"] == token @@ -274,10 +278,10 @@ def test_graphql_get_and_delete_new_device_key(authorized_client, tokens_file): json={"query": INVALIDATE_NEW_DEVICE_KEY_MUTATION}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["invalidateNewDeviceApiKey"]["success"] is True - assert response.json["data"]["invalidateNewDeviceApiKey"]["message"] is not None - assert response.json["data"]["invalidateNewDeviceApiKey"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["invalidateNewDeviceApiKey"]["success"] is True + assert response.json()["data"]["invalidateNewDeviceApiKey"]["message"] is not None + assert response.json()["data"]["invalidateNewDeviceApiKey"]["code"] == 200 assert read_json(tokens_file) == TOKENS_FILE_CONTETS @@ -299,11 +303,11 @@ def test_graphql_get_and_authorize_new_device(client, authorized_client, tokens_ json={"query": NEW_DEVICE_KEY_MUTATION}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["getNewDeviceApiKey"]["success"] is True - assert response.json["data"]["getNewDeviceApiKey"]["message"] is not None - assert response.json["data"]["getNewDeviceApiKey"]["code"] == 200 - mnemonic_key = response.json["data"]["getNewDeviceApiKey"]["key"] + assert response.json().get("data") is not None + assert response.json()["data"]["getNewDeviceApiKey"]["success"] is True + assert response.json()["data"]["getNewDeviceApiKey"]["message"] is not None + assert response.json()["data"]["getNewDeviceApiKey"]["code"] == 200 + mnemonic_key = response.json()["data"]["getNewDeviceApiKey"]["key"] assert mnemonic_key.split(" ").__len__() == 12 key = Mnemonic(language="english").to_entropy(mnemonic_key).hex() assert read_json(tokens_file)["new_device"]["token"] == key @@ -320,11 +324,13 @@ def test_graphql_get_and_authorize_new_device(client, authorized_client, tokens_ }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["success"] is True - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["code"] == 200 - token = response.json["data"]["authorizeWithNewDeviceApiKey"]["token"] + assert response.json().get("data") is not None + assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["success"] is True + assert ( + response.json()["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None + ) + assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["code"] == 200 + token = response.json()["data"]["authorizeWithNewDeviceApiKey"]["token"] assert read_json(tokens_file)["tokens"][2]["token"] == token assert read_json(tokens_file)["tokens"][2]["name"] == "new_device" @@ -343,10 +349,12 @@ def test_graphql_authorize_new_device_with_invalid_key(client, tokens_file): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["success"] is False - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["code"] == 404 + assert response.json().get("data") is not None + assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["success"] is False + assert ( + response.json()["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None + ) + assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["code"] == 404 assert read_json(tokens_file) == TOKENS_FILE_CONTETS @@ -356,11 +364,11 @@ def test_graphql_get_and_authorize_used_key(client, authorized_client, tokens_fi json={"query": NEW_DEVICE_KEY_MUTATION}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["getNewDeviceApiKey"]["success"] is True - assert response.json["data"]["getNewDeviceApiKey"]["message"] is not None - assert response.json["data"]["getNewDeviceApiKey"]["code"] == 200 - mnemonic_key = response.json["data"]["getNewDeviceApiKey"]["key"] + assert response.json().get("data") is not None + assert response.json()["data"]["getNewDeviceApiKey"]["success"] is True + assert response.json()["data"]["getNewDeviceApiKey"]["message"] is not None + assert response.json()["data"]["getNewDeviceApiKey"]["code"] == 200 + mnemonic_key = response.json()["data"]["getNewDeviceApiKey"]["key"] assert mnemonic_key.split(" ").__len__() == 12 key = Mnemonic(language="english").to_entropy(mnemonic_key).hex() assert read_json(tokens_file)["new_device"]["token"] == key @@ -377,13 +385,15 @@ def test_graphql_get_and_authorize_used_key(client, authorized_client, tokens_fi }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["success"] is True - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["success"] is True + assert ( + response.json()["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None + ) + assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["code"] == 200 assert ( read_json(tokens_file)["tokens"][2]["token"] - == response.json["data"]["authorizeWithNewDeviceApiKey"]["token"] + == response.json()["data"]["authorizeWithNewDeviceApiKey"]["token"] ) assert read_json(tokens_file)["tokens"][2]["name"] == "new_token" @@ -400,10 +410,12 @@ def test_graphql_get_and_authorize_used_key(client, authorized_client, tokens_fi }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["success"] is False - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["code"] == 404 + assert response.json().get("data") is not None + assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["success"] is False + assert ( + response.json()["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None + ) + assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["code"] == 404 assert read_json(tokens_file)["tokens"].__len__() == 3 @@ -415,14 +427,16 @@ def test_graphql_get_and_authorize_key_after_12_minutes( json={"query": NEW_DEVICE_KEY_MUTATION}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["getNewDeviceApiKey"]["success"] is True - assert response.json["data"]["getNewDeviceApiKey"]["message"] is not None - assert response.json["data"]["getNewDeviceApiKey"]["code"] == 200 - assert response.json["data"]["getNewDeviceApiKey"]["key"].split(" ").__len__() == 12 + assert response.json().get("data") is not None + assert response.json()["data"]["getNewDeviceApiKey"]["success"] is True + assert response.json()["data"]["getNewDeviceApiKey"]["message"] is not None + assert response.json()["data"]["getNewDeviceApiKey"]["code"] == 200 + assert ( + response.json()["data"]["getNewDeviceApiKey"]["key"].split(" ").__len__() == 12 + ) key = ( Mnemonic(language="english") - .to_entropy(response.json["data"]["getNewDeviceApiKey"]["key"]) + .to_entropy(response.json()["data"]["getNewDeviceApiKey"]["key"]) .hex() ) assert read_json(tokens_file)["new_device"]["token"] == key @@ -446,10 +460,12 @@ def test_graphql_get_and_authorize_key_after_12_minutes( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["success"] is False - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["code"] == 404 + assert response.json().get("data") is not None + assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["success"] is False + assert ( + response.json()["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None + ) + assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["code"] == 404 def test_graphql_authorize_without_token(client, tokens_file): @@ -465,4 +481,4 @@ def test_graphql_authorize_without_token(client, tokens_file): }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None diff --git a/tests/test_graphql/test_api_recovery.py b/tests/test_graphql/test_api_recovery.py index be0fdff..c5e229e 100644 --- a/tests/test_graphql/test_api_recovery.py +++ b/tests/test_graphql/test_api_recovery.py @@ -37,22 +37,22 @@ def test_graphql_recovery_key_status_unauthorized(client, tokens_file): json={"query": generate_api_query([API_RECOVERY_QUERY])}, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None def test_graphql_recovery_key_status_when_none_exists(authorized_client, tokens_file): - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={"query": generate_api_query([API_RECOVERY_QUERY])}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["api"]["recoveryKey"] is not None - assert response.json["data"]["api"]["recoveryKey"]["exists"] is False - assert response.json["data"]["api"]["recoveryKey"]["valid"] is False - assert response.json["data"]["api"]["recoveryKey"]["creationDate"] is None - assert response.json["data"]["api"]["recoveryKey"]["expirationDate"] is None - assert response.json["data"]["api"]["recoveryKey"]["usesLeft"] is None + assert response.json().get("data") is not None + assert response.json()["data"]["api"]["recoveryKey"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["exists"] is False + assert response.json()["data"]["api"]["recoveryKey"]["valid"] is False + assert response.json()["data"]["api"]["recoveryKey"]["creationDate"] is None + assert response.json()["data"]["api"]["recoveryKey"]["expirationDate"] is None + assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] is None API_RECOVERY_KEY_GENERATE_MUTATION = """ @@ -86,18 +86,19 @@ def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["getNewRecoveryApiKey"]["success"] is True - assert response.json["data"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json["data"]["getNewRecoveryApiKey"]["code"] == 200 - assert response.json["data"]["getNewRecoveryApiKey"]["key"] is not None + assert response.json().get("data") is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is True + assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is not None assert ( - response.json["data"]["getNewRecoveryApiKey"]["key"].split(" ").__len__() == 18 + response.json()["data"]["getNewRecoveryApiKey"]["key"].split(" ").__len__() + == 18 ) assert read_json(tokens_file)["recovery_token"] is not None time_generated = read_json(tokens_file)["recovery_token"]["date"] assert time_generated is not None - key = response.json["data"]["getNewRecoveryApiKey"]["key"] + key = response.json()["data"]["getNewRecoveryApiKey"]["key"] assert ( datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%f") - datetime.timedelta(seconds=5) @@ -105,20 +106,20 @@ def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): ) # Try to get token status - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={"query": generate_api_query([API_RECOVERY_QUERY])}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["api"]["recoveryKey"] is not None - assert response.json["data"]["api"]["recoveryKey"]["exists"] is True - assert response.json["data"]["api"]["recoveryKey"]["valid"] is True - assert response.json["data"]["api"]["recoveryKey"][ + assert response.json().get("data") is not None + assert response.json()["data"]["api"]["recoveryKey"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["exists"] is True + assert response.json()["data"]["api"]["recoveryKey"]["valid"] is True + assert response.json()["data"]["api"]["recoveryKey"][ "creationDate" ] == time_generated.replace("Z", "") - assert response.json["data"]["api"]["recoveryKey"]["expirationDate"] is None - assert response.json["data"]["api"]["recoveryKey"]["usesLeft"] is None + assert response.json()["data"]["api"]["recoveryKey"]["expirationDate"] is None + assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] is None # Try to use token response = client.post( @@ -134,13 +135,13 @@ def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["useRecoveryApiKey"]["success"] is True - assert response.json["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json["data"]["useRecoveryApiKey"]["code"] == 200 - assert response.json["data"]["useRecoveryApiKey"]["token"] is not None + assert response.json().get("data") is not None + assert response.json()["data"]["useRecoveryApiKey"]["success"] is True + assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None assert ( - response.json["data"]["useRecoveryApiKey"]["token"] + response.json()["data"]["useRecoveryApiKey"]["token"] == read_json(tokens_file)["tokens"][2]["token"] ) assert read_json(tokens_file)["tokens"][2]["name"] == "new_test_token" @@ -159,13 +160,13 @@ def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["useRecoveryApiKey"]["success"] is True - assert response.json["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json["data"]["useRecoveryApiKey"]["code"] == 200 - assert response.json["data"]["useRecoveryApiKey"]["token"] is not None + assert response.json().get("data") is not None + assert response.json()["data"]["useRecoveryApiKey"]["success"] is True + assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None assert ( - response.json["data"]["useRecoveryApiKey"]["token"] + response.json()["data"]["useRecoveryApiKey"]["token"] == read_json(tokens_file)["tokens"][3]["token"] ) assert read_json(tokens_file)["tokens"][3]["name"] == "new_test_token2" @@ -188,17 +189,18 @@ def test_graphql_generate_recovery_key_with_expiration_date( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["getNewRecoveryApiKey"]["success"] is True - assert response.json["data"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json["data"]["getNewRecoveryApiKey"]["code"] == 200 - assert response.json["data"]["getNewRecoveryApiKey"]["key"] is not None + assert response.json().get("data") is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is True + assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is not None assert ( - response.json["data"]["getNewRecoveryApiKey"]["key"].split(" ").__len__() == 18 + response.json()["data"]["getNewRecoveryApiKey"]["key"].split(" ").__len__() + == 18 ) assert read_json(tokens_file)["recovery_token"] is not None - key = response.json["data"]["getNewRecoveryApiKey"]["key"] + key = response.json()["data"]["getNewRecoveryApiKey"]["key"] assert read_json(tokens_file)["recovery_token"]["expiration"] == expiration_date_str assert read_json(tokens_file)["recovery_token"]["token"] == mnemonic_to_hex(key) @@ -211,23 +213,23 @@ def test_graphql_generate_recovery_key_with_expiration_date( ) # Try to get token status - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={"query": generate_api_query([API_RECOVERY_QUERY])}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["api"]["recoveryKey"] is not None - assert response.json["data"]["api"]["recoveryKey"]["exists"] is True - assert response.json["data"]["api"]["recoveryKey"]["valid"] is True - assert response.json["data"]["api"]["recoveryKey"][ + assert response.json().get("data") is not None + assert response.json()["data"]["api"]["recoveryKey"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["exists"] is True + assert response.json()["data"]["api"]["recoveryKey"]["valid"] is True + assert response.json()["data"]["api"]["recoveryKey"][ "creationDate" ] == time_generated.replace("Z", "") assert ( - response.json["data"]["api"]["recoveryKey"]["expirationDate"] + response.json()["data"]["api"]["recoveryKey"]["expirationDate"] == expiration_date_str ) - assert response.json["data"]["api"]["recoveryKey"]["usesLeft"] is None + assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] is None # Try to use token response = authorized_client.post( @@ -243,13 +245,13 @@ def test_graphql_generate_recovery_key_with_expiration_date( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["useRecoveryApiKey"]["success"] is True - assert response.json["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json["data"]["useRecoveryApiKey"]["code"] == 200 - assert response.json["data"]["useRecoveryApiKey"]["token"] is not None + assert response.json().get("data") is not None + assert response.json()["data"]["useRecoveryApiKey"]["success"] is True + assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None assert ( - response.json["data"]["useRecoveryApiKey"]["token"] + response.json()["data"]["useRecoveryApiKey"]["token"] == read_json(tokens_file)["tokens"][2]["token"] ) @@ -267,13 +269,13 @@ def test_graphql_generate_recovery_key_with_expiration_date( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["useRecoveryApiKey"]["success"] is True - assert response.json["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json["data"]["useRecoveryApiKey"]["code"] == 200 - assert response.json["data"]["useRecoveryApiKey"]["token"] is not None + assert response.json().get("data") is not None + assert response.json()["data"]["useRecoveryApiKey"]["success"] is True + assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None assert ( - response.json["data"]["useRecoveryApiKey"]["token"] + response.json()["data"]["useRecoveryApiKey"]["token"] == read_json(tokens_file)["tokens"][3]["token"] ) @@ -296,30 +298,32 @@ def test_graphql_generate_recovery_key_with_expiration_date( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["useRecoveryApiKey"]["success"] is False - assert response.json["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json["data"]["useRecoveryApiKey"]["code"] == 404 - assert response.json["data"]["useRecoveryApiKey"]["token"] is None + assert response.json().get("data") is not None + assert response.json()["data"]["useRecoveryApiKey"]["success"] is False + assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["useRecoveryApiKey"]["code"] == 404 + assert response.json()["data"]["useRecoveryApiKey"]["token"] is None assert read_json(tokens_file)["tokens"] == new_data["tokens"] # Try to get token status - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={"query": generate_api_query([API_RECOVERY_QUERY])}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["api"]["recoveryKey"] is not None - assert response.json["data"]["api"]["recoveryKey"]["exists"] is True - assert response.json["data"]["api"]["recoveryKey"]["valid"] is False - assert response.json["data"]["api"]["recoveryKey"]["creationDate"] == time_generated + assert response.json().get("data") is not None + assert response.json()["data"]["api"]["recoveryKey"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["exists"] is True + assert response.json()["data"]["api"]["recoveryKey"]["valid"] is False assert ( - response.json["data"]["api"]["recoveryKey"]["expirationDate"] + response.json()["data"]["api"]["recoveryKey"]["creationDate"] == time_generated + ) + assert ( + response.json()["data"]["api"]["recoveryKey"]["expirationDate"] == new_data["recovery_token"]["expiration"] ) - assert response.json["data"]["api"]["recoveryKey"]["usesLeft"] is None + assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] is None def test_graphql_generate_recovery_key_with_expiration_in_the_past( @@ -340,11 +344,11 @@ def test_graphql_generate_recovery_key_with_expiration_in_the_past( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["getNewRecoveryApiKey"]["success"] is False - assert response.json["data"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json["data"]["getNewRecoveryApiKey"]["code"] == 400 - assert response.json["data"]["getNewRecoveryApiKey"]["key"] is None + assert response.json().get("data") is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is False + assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 400 + assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is None assert "recovery_token" not in read_json(tokens_file) @@ -366,7 +370,7 @@ def test_graphql_generate_recovery_key_with_invalid_time_format( }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None assert "recovery_token" not in read_json(tokens_file) @@ -388,31 +392,31 @@ def test_graphql_generate_recovery_key_with_limited_uses( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["getNewRecoveryApiKey"]["success"] is True - assert response.json["data"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json["data"]["getNewRecoveryApiKey"]["code"] == 200 - assert response.json["data"]["getNewRecoveryApiKey"]["key"] is not None + assert response.json().get("data") is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is True + assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is not None - mnemonic_key = response.json["data"]["getNewRecoveryApiKey"]["key"] + mnemonic_key = response.json()["data"]["getNewRecoveryApiKey"]["key"] key = mnemonic_to_hex(mnemonic_key) assert read_json(tokens_file)["recovery_token"]["token"] == key assert read_json(tokens_file)["recovery_token"]["uses_left"] == 2 # Try to get token status - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={"query": generate_api_query([API_RECOVERY_QUERY])}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["api"]["recoveryKey"] is not None - assert response.json["data"]["api"]["recoveryKey"]["exists"] is True - assert response.json["data"]["api"]["recoveryKey"]["valid"] is True - assert response.json["data"]["api"]["recoveryKey"]["creationDate"] is not None - assert response.json["data"]["api"]["recoveryKey"]["expirationDate"] is None - assert response.json["data"]["api"]["recoveryKey"]["usesLeft"] == 2 + assert response.json().get("data") is not None + assert response.json()["data"]["api"]["recoveryKey"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["exists"] is True + assert response.json()["data"]["api"]["recoveryKey"]["valid"] is True + assert response.json()["data"]["api"]["recoveryKey"]["creationDate"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["expirationDate"] is None + assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] == 2 # Try to use token response = authorized_client.post( @@ -428,25 +432,25 @@ def test_graphql_generate_recovery_key_with_limited_uses( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["useRecoveryApiKey"]["success"] is True - assert response.json["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json["data"]["useRecoveryApiKey"]["code"] == 200 - assert response.json["data"]["useRecoveryApiKey"]["token"] is not None + assert response.json().get("data") is not None + assert response.json()["data"]["useRecoveryApiKey"]["success"] is True + assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None # Try to get token status - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={"query": generate_api_query([API_RECOVERY_QUERY])}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["api"]["recoveryKey"] is not None - assert response.json["data"]["api"]["recoveryKey"]["exists"] is True - assert response.json["data"]["api"]["recoveryKey"]["valid"] is True - assert response.json["data"]["api"]["recoveryKey"]["creationDate"] is not None - assert response.json["data"]["api"]["recoveryKey"]["expirationDate"] is None - assert response.json["data"]["api"]["recoveryKey"]["usesLeft"] == 1 + assert response.json().get("data") is not None + assert response.json()["data"]["api"]["recoveryKey"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["exists"] is True + assert response.json()["data"]["api"]["recoveryKey"]["valid"] is True + assert response.json()["data"]["api"]["recoveryKey"]["creationDate"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["expirationDate"] is None + assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] == 1 # Try to use token response = authorized_client.post( @@ -462,25 +466,25 @@ def test_graphql_generate_recovery_key_with_limited_uses( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["useRecoveryApiKey"]["success"] is True - assert response.json["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json["data"]["useRecoveryApiKey"]["code"] == 200 - assert response.json["data"]["useRecoveryApiKey"]["token"] is not None + assert response.json().get("data") is not None + assert response.json()["data"]["useRecoveryApiKey"]["success"] is True + assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None # Try to get token status - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={"query": generate_api_query([API_RECOVERY_QUERY])}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["api"]["recoveryKey"] is not None - assert response.json["data"]["api"]["recoveryKey"]["exists"] is True - assert response.json["data"]["api"]["recoveryKey"]["valid"] is False - assert response.json["data"]["api"]["recoveryKey"]["creationDate"] is not None - assert response.json["data"]["api"]["recoveryKey"]["expirationDate"] is None - assert response.json["data"]["api"]["recoveryKey"]["usesLeft"] == 0 + assert response.json().get("data") is not None + assert response.json()["data"]["api"]["recoveryKey"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["exists"] is True + assert response.json()["data"]["api"]["recoveryKey"]["valid"] is False + assert response.json()["data"]["api"]["recoveryKey"]["creationDate"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["expirationDate"] is None + assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] == 0 # Try to use token response = authorized_client.post( @@ -496,11 +500,11 @@ def test_graphql_generate_recovery_key_with_limited_uses( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["useRecoveryApiKey"]["success"] is False - assert response.json["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json["data"]["useRecoveryApiKey"]["code"] == 404 - assert response.json["data"]["useRecoveryApiKey"]["token"] is None + assert response.json().get("data") is not None + assert response.json()["data"]["useRecoveryApiKey"]["success"] is False + assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["useRecoveryApiKey"]["code"] == 404 + assert response.json()["data"]["useRecoveryApiKey"]["token"] is None def test_graphql_generate_recovery_key_with_negative_uses( @@ -519,11 +523,11 @@ def test_graphql_generate_recovery_key_with_negative_uses( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["getNewRecoveryApiKey"]["success"] is False - assert response.json["data"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json["data"]["getNewRecoveryApiKey"]["code"] == 400 - assert response.json["data"]["getNewRecoveryApiKey"]["key"] is None + assert response.json().get("data") is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is False + assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 400 + assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is None def test_graphql_generate_recovery_key_with_zero_uses(authorized_client, tokens_file): @@ -540,8 +544,8 @@ def test_graphql_generate_recovery_key_with_zero_uses(authorized_client, tokens_ }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["getNewRecoveryApiKey"]["success"] is False - assert response.json["data"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json["data"]["getNewRecoveryApiKey"]["code"] == 400 - assert response.json["data"]["getNewRecoveryApiKey"]["key"] is None + assert response.json().get("data") is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is False + assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 400 + assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is None diff --git a/tests/test_graphql/test_api_version.py b/tests/test_graphql/test_api_version.py index 8f76035..64bcc36 100644 --- a/tests/test_graphql/test_api_version.py +++ b/tests/test_graphql/test_api_version.py @@ -8,18 +8,18 @@ API_VERSION_QUERY = "version" def test_graphql_get_api_version(authorized_client): - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={"query": generate_api_query([API_VERSION_QUERY])}, ) assert response.status_code == 200 - assert "version" in response.get_json()["data"]["api"] + assert "version" in response.json()["data"]["api"] def test_graphql_api_version_unauthorized(client): - response = client.get( + response = client.post( "/graphql", json={"query": generate_api_query([API_VERSION_QUERY])}, ) assert response.status_code == 200 - assert "version" in response.get_json()["data"]["api"] + assert "version" in response.json()["data"]["api"] diff --git a/tests/test_graphql/test_ssh.py b/tests/test_graphql/test_ssh.py index 7b48c83..4831692 100644 --- a/tests/test_graphql/test_ssh.py +++ b/tests/test_graphql/test_ssh.py @@ -71,7 +71,7 @@ def test_graphql_add_ssh_key_unauthorized(client, some_users, mock_subprocess_po }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None def test_graphql_add_ssh_key(authorized_client, some_users, mock_subprocess_popen): @@ -88,14 +88,14 @@ def test_graphql_add_ssh_key(authorized_client, some_users, mock_subprocess_pope }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["addSshKey"]["code"] == 201 - assert response.json["data"]["addSshKey"]["message"] is not None - assert response.json["data"]["addSshKey"]["success"] is True + assert response.json()["data"]["addSshKey"]["code"] == 201 + assert response.json()["data"]["addSshKey"]["message"] is not None + assert response.json()["data"]["addSshKey"]["success"] is True - assert response.json["data"]["addSshKey"]["user"]["username"] == "user1" - assert response.json["data"]["addSshKey"]["user"]["sshKeys"] == [ + assert response.json()["data"]["addSshKey"]["user"]["username"] == "user1" + assert response.json()["data"]["addSshKey"]["user"]["sshKeys"] == [ "ssh-rsa KEY user1@pc", "ssh-rsa KEY test_key@pc", ] @@ -115,14 +115,14 @@ def test_graphql_add_root_ssh_key(authorized_client, some_users, mock_subprocess }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["addSshKey"]["code"] == 201 - assert response.json["data"]["addSshKey"]["message"] is not None - assert response.json["data"]["addSshKey"]["success"] is True + assert response.json()["data"]["addSshKey"]["code"] == 201 + assert response.json()["data"]["addSshKey"]["message"] is not None + assert response.json()["data"]["addSshKey"]["success"] is True - assert response.json["data"]["addSshKey"]["user"]["username"] == "root" - assert response.json["data"]["addSshKey"]["user"]["sshKeys"] == [ + assert response.json()["data"]["addSshKey"]["user"]["username"] == "root" + assert response.json()["data"]["addSshKey"]["user"]["sshKeys"] == [ "ssh-ed25519 KEY test@pc", "ssh-rsa KEY test_key@pc", ] @@ -142,14 +142,14 @@ def test_graphql_add_main_ssh_key(authorized_client, some_users, mock_subprocess }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["addSshKey"]["code"] == 201 - assert response.json["data"]["addSshKey"]["message"] is not None - assert response.json["data"]["addSshKey"]["success"] is True + assert response.json()["data"]["addSshKey"]["code"] == 201 + assert response.json()["data"]["addSshKey"]["message"] is not None + assert response.json()["data"]["addSshKey"]["success"] is True - assert response.json["data"]["addSshKey"]["user"]["username"] == "tester" - assert response.json["data"]["addSshKey"]["user"]["sshKeys"] == [ + assert response.json()["data"]["addSshKey"]["user"]["username"] == "tester" + assert response.json()["data"]["addSshKey"]["user"]["sshKeys"] == [ "ssh-rsa KEY test@pc", "ssh-rsa KEY test_key@pc", ] @@ -169,11 +169,11 @@ def test_graphql_add_bad_ssh_key(authorized_client, some_users, mock_subprocess_ }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["addSshKey"]["code"] == 400 - assert response.json["data"]["addSshKey"]["message"] is not None - assert response.json["data"]["addSshKey"]["success"] is False + assert response.json()["data"]["addSshKey"]["code"] == 400 + assert response.json()["data"]["addSshKey"]["message"] is not None + assert response.json()["data"]["addSshKey"]["success"] is False def test_graphql_add_ssh_key_nonexistent_user( @@ -192,11 +192,11 @@ def test_graphql_add_ssh_key_nonexistent_user( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["addSshKey"]["code"] == 404 - assert response.json["data"]["addSshKey"]["message"] is not None - assert response.json["data"]["addSshKey"]["success"] is False + assert response.json()["data"]["addSshKey"]["code"] == 404 + assert response.json()["data"]["addSshKey"]["message"] is not None + assert response.json()["data"]["addSshKey"]["success"] is False API_REMOVE_SSH_KEY_MUTATION = """ @@ -228,7 +228,7 @@ def test_graphql_remove_ssh_key_unauthorized(client, some_users, mock_subprocess }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None def test_graphql_remove_ssh_key(authorized_client, some_users, mock_subprocess_popen): @@ -245,14 +245,14 @@ def test_graphql_remove_ssh_key(authorized_client, some_users, mock_subprocess_p }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["removeSshKey"]["code"] == 200 - assert response.json["data"]["removeSshKey"]["message"] is not None - assert response.json["data"]["removeSshKey"]["success"] is True + assert response.json()["data"]["removeSshKey"]["code"] == 200 + assert response.json()["data"]["removeSshKey"]["message"] is not None + assert response.json()["data"]["removeSshKey"]["success"] is True - assert response.json["data"]["removeSshKey"]["user"]["username"] == "user1" - assert response.json["data"]["removeSshKey"]["user"]["sshKeys"] == [] + assert response.json()["data"]["removeSshKey"]["user"]["username"] == "user1" + assert response.json()["data"]["removeSshKey"]["user"]["sshKeys"] == [] def test_graphql_remove_root_ssh_key( @@ -271,14 +271,14 @@ def test_graphql_remove_root_ssh_key( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["removeSshKey"]["code"] == 200 - assert response.json["data"]["removeSshKey"]["message"] is not None - assert response.json["data"]["removeSshKey"]["success"] is True + assert response.json()["data"]["removeSshKey"]["code"] == 200 + assert response.json()["data"]["removeSshKey"]["message"] is not None + assert response.json()["data"]["removeSshKey"]["success"] is True - assert response.json["data"]["removeSshKey"]["user"]["username"] == "root" - assert response.json["data"]["removeSshKey"]["user"]["sshKeys"] == [] + assert response.json()["data"]["removeSshKey"]["user"]["username"] == "root" + assert response.json()["data"]["removeSshKey"]["user"]["sshKeys"] == [] def test_graphql_remove_main_ssh_key( @@ -297,14 +297,14 @@ def test_graphql_remove_main_ssh_key( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["removeSshKey"]["code"] == 200 - assert response.json["data"]["removeSshKey"]["message"] is not None - assert response.json["data"]["removeSshKey"]["success"] is True + assert response.json()["data"]["removeSshKey"]["code"] == 200 + assert response.json()["data"]["removeSshKey"]["message"] is not None + assert response.json()["data"]["removeSshKey"]["success"] is True - assert response.json["data"]["removeSshKey"]["user"]["username"] == "tester" - assert response.json["data"]["removeSshKey"]["user"]["sshKeys"] == [] + assert response.json()["data"]["removeSshKey"]["user"]["username"] == "tester" + assert response.json()["data"]["removeSshKey"]["user"]["sshKeys"] == [] def test_graphql_remove_nonexistent_ssh_key( @@ -323,11 +323,11 @@ def test_graphql_remove_nonexistent_ssh_key( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["removeSshKey"]["code"] == 404 - assert response.json["data"]["removeSshKey"]["message"] is not None - assert response.json["data"]["removeSshKey"]["success"] is False + assert response.json()["data"]["removeSshKey"]["code"] == 404 + assert response.json()["data"]["removeSshKey"]["message"] is not None + assert response.json()["data"]["removeSshKey"]["success"] is False def test_graphql_remove_ssh_key_nonexistent_user( @@ -346,8 +346,8 @@ def test_graphql_remove_ssh_key_nonexistent_user( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["removeSshKey"]["code"] == 404 - assert response.json["data"]["removeSshKey"]["message"] is not None - assert response.json["data"]["removeSshKey"]["success"] is False + assert response.json()["data"]["removeSshKey"]["code"] == 404 + assert response.json()["data"]["removeSshKey"]["message"] is not None + assert response.json()["data"]["removeSshKey"]["success"] is False diff --git a/tests/test_graphql/_test_system.py b/tests/test_graphql/test_system.py similarity index 65% rename from tests/test_graphql/_test_system.py rename to tests/test_graphql/test_system.py index 476846a..a021a16 100644 --- a/tests/test_graphql/_test_system.py +++ b/tests/test_graphql/test_system.py @@ -124,6 +124,7 @@ def mock_dkim_key(mocker): autospec=True, return_value="I am a DKIM key", ) + return mock API_PYTHON_VERSION_INFO = """ @@ -137,27 +138,27 @@ def test_graphql_get_python_version_wrong_auth( wrong_auth_client, mock_subprocess_check_output ): """Test wrong auth""" - response = wrong_auth_client.get( + response = wrong_auth_client.post( "/graphql", json={ "query": generate_system_query([API_PYTHON_VERSION_INFO]), }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None def test_graphql_get_python_version(authorized_client, mock_subprocess_check_output): """Test get python version""" - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ "query": generate_system_query([API_PYTHON_VERSION_INFO]), }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["system"]["info"]["pythonVersion"] == "Testing Linux" + assert response.json().get("data") is not None + assert response.json()["data"]["system"]["info"]["pythonVersion"] == "Testing Linux" assert mock_subprocess_check_output.call_count == 1 assert mock_subprocess_check_output.call_args[0][0] == ["python", "-V"] @@ -173,7 +174,7 @@ def test_graphql_get_system_version_unauthorized( wrong_auth_client, mock_subprocess_check_output ): """Test wrong auth""" - response = wrong_auth_client.get( + response = wrong_auth_client.post( "/graphql", json={ "query": generate_system_query([API_SYSTEM_VERSION_INFO]), @@ -181,14 +182,14 @@ def test_graphql_get_system_version_unauthorized( ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None assert mock_subprocess_check_output.call_count == 0 def test_graphql_get_system_version(authorized_client, mock_subprocess_check_output): """Test get system version""" - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ "query": generate_system_query([API_SYSTEM_VERSION_INFO]), @@ -196,9 +197,9 @@ def test_graphql_get_system_version(authorized_client, mock_subprocess_check_out ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["system"]["info"]["systemVersion"] == "Testing Linux" + assert response.json()["data"]["system"]["info"]["systemVersion"] == "Testing Linux" assert mock_subprocess_check_output.call_count == 1 assert mock_subprocess_check_output.call_args[0][0] == ["uname", "-a"] @@ -209,7 +210,7 @@ domainInfo { hostname provider requiredDnsRecords { - type + recordType name content ttl @@ -219,14 +220,16 @@ domainInfo { """ -def dns_record(type="A", name="test.tld", content=None, ttl=3600, priority=None): +def dns_record( + record_type="A", name="test-domain.tld", content=None, ttl=3600, priority=None +): if content is None: - if type == "A": + if record_type == "A": content = "157.90.247.192" - elif type == "AAAA": + elif record_type == "AAAA": content = "fe80::9400:ff:fef1:34ae" return { - "type": type, + "recordType": record_type, "name": name, "content": content, "ttl": ttl, @@ -237,7 +240,7 @@ def dns_record(type="A", name="test.tld", content=None, ttl=3600, priority=None) def is_dns_record_in_array(records, dns_record) -> bool: for record in records: if ( - record["type"] == dns_record["type"] + record["recordType"] == dns_record["recordType"] and record["name"] == dns_record["name"] and record["content"] == dns_record["content"] and record["ttl"] == dns_record["ttl"] @@ -248,66 +251,73 @@ def is_dns_record_in_array(records, dns_record) -> bool: def test_graphql_get_domain( - authorized_client, domain_file, mock_get_ip4, mock_get_ip6, turned_on + authorized_client, domain_file, mock_get_ip4, mock_get_ip6, turned_on, mock_dkim_key ): """Test get domain""" - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ "query": generate_system_query([API_GET_DOMAIN_INFO]), }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["system"]["domainInfo"]["domain"] == "test.tld" - assert response.json["data"]["system"]["domainInfo"]["hostname"] == "test-instance" - assert response.json["data"]["system"]["domainInfo"]["provider"] == "HETZNER" - dns_records = response.json["data"]["system"]["domainInfo"]["requiredDnsRecords"] + assert response.json().get("data") is not None + assert response.json()["data"]["system"]["domainInfo"]["domain"] == "test.tld" + assert ( + response.json()["data"]["system"]["domainInfo"]["hostname"] == "test-instance" + ) + assert response.json()["data"]["system"]["domainInfo"]["provider"] == "CLOUDFLARE" + dns_records = response.json()["data"]["system"]["domainInfo"]["requiredDnsRecords"] assert is_dns_record_in_array(dns_records, dns_record()) - assert is_dns_record_in_array(dns_records, dns_record(type="AAAA")) - assert is_dns_record_in_array(dns_records, dns_record(name="api.test.tld")) + assert is_dns_record_in_array(dns_records, dns_record(record_type="AAAA")) + assert is_dns_record_in_array(dns_records, dns_record(name="api")) assert is_dns_record_in_array( - dns_records, dns_record(name="api.test.tld", type="AAAA") + dns_records, dns_record(name="api", record_type="AAAA") ) - assert is_dns_record_in_array(dns_records, dns_record(name="cloud.test.tld")) + assert is_dns_record_in_array(dns_records, dns_record(name="cloud")) assert is_dns_record_in_array( - dns_records, dns_record(name="cloud.test.tld", type="AAAA") + dns_records, dns_record(name="cloud", record_type="AAAA") ) - assert is_dns_record_in_array(dns_records, dns_record(name="git.test.tld")) + assert is_dns_record_in_array(dns_records, dns_record(name="git")) assert is_dns_record_in_array( - dns_records, dns_record(name="git.test.tld", type="AAAA") + dns_records, dns_record(name="git", record_type="AAAA") ) - assert is_dns_record_in_array(dns_records, dns_record(name="meet.test.tld")) + assert is_dns_record_in_array(dns_records, dns_record(name="meet")) assert is_dns_record_in_array( - dns_records, dns_record(name="meet.test.tld", type="AAAA") + dns_records, dns_record(name="meet", record_type="AAAA") ) - assert is_dns_record_in_array(dns_records, dns_record(name="password.test.tld")) + assert is_dns_record_in_array(dns_records, dns_record(name="password")) assert is_dns_record_in_array( - dns_records, dns_record(name="password.test.tld", type="AAAA") + dns_records, dns_record(name="password", record_type="AAAA") ) - assert is_dns_record_in_array(dns_records, dns_record(name="social.test.tld")) + assert is_dns_record_in_array(dns_records, dns_record(name="social")) assert is_dns_record_in_array( - dns_records, dns_record(name="social.test.tld", type="AAAA") + dns_records, dns_record(name="social", record_type="AAAA") ) - assert is_dns_record_in_array(dns_records, dns_record(name="vpn.test.tld")) + assert is_dns_record_in_array(dns_records, dns_record(name="vpn")) assert is_dns_record_in_array( - dns_records, dns_record(name="vpn.test.tld", type="AAAA") - ) - assert is_dns_record_in_array( - dns_records, - dns_record(name="test.tld", type="MX", content="test.tld", priority=10), + dns_records, dns_record(name="vpn", record_type="AAAA") ) assert is_dns_record_in_array( dns_records, dns_record( - name="_dmarc.test.tld", type="TXT", content="v=DMARC1; p=none", ttl=18000 + name="test-domain.tld", + record_type="MX", + content="test-domain.tld", + priority=10, ), ) assert is_dns_record_in_array( dns_records, dns_record( - name="test.tld", - type="TXT", + name="_dmarc", record_type="TXT", content="v=DMARC1; p=none", ttl=18000 + ), + ) + assert is_dns_record_in_array( + dns_records, + dns_record( + name="test-domain.tld", + record_type="TXT", content="v=spf1 a mx ip4:157.90.247.192 -all", ttl=18000, ), @@ -315,8 +325,8 @@ def test_graphql_get_domain( assert is_dns_record_in_array( dns_records, dns_record( - name="selector._domainkey.test.tld", - type="TXT", + name="selector._domainkey", + record_type="TXT", content="I am a DKIM key", ttl=18000, ), @@ -332,40 +342,42 @@ settings { def test_graphql_get_timezone_unauthorized(client, turned_on): """Test get timezone without auth""" - response = client.get( + response = client.post( "/graphql", json={ "query": generate_system_query([API_GET_TIMEZONE]), }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None def test_graphql_get_timezone(authorized_client, turned_on): """Test get timezone""" - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ "query": generate_system_query([API_GET_TIMEZONE]), }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["system"]["settings"]["timezone"] == "Europe/Moscow" + assert response.json().get("data") is not None + assert response.json()["data"]["system"]["settings"]["timezone"] == "Europe/Moscow" def test_graphql_get_timezone_on_undefined(authorized_client, undefined_config): """Test get timezone when none is defined in config""" - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ "query": generate_system_query([API_GET_TIMEZONE]), }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["system"]["settings"]["timezone"] == "Europe/Uzhgorod" + assert response.json().get("data") is not None + assert ( + response.json()["data"]["system"]["settings"]["timezone"] == "Europe/Uzhgorod" + ) API_CHANGE_TIMEZONE_MUTATION = """ @@ -392,7 +404,7 @@ def test_graphql_change_timezone_unauthorized(client, turned_on): }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None def test_graphql_change_timezone(authorized_client, turned_on): @@ -407,11 +419,11 @@ def test_graphql_change_timezone(authorized_client, turned_on): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["changeTimezone"]["success"] is True - assert response.json["data"]["changeTimezone"]["message"] is not None - assert response.json["data"]["changeTimezone"]["code"] == 200 - assert response.json["data"]["changeTimezone"]["timezone"] == "Europe/Helsinki" + assert response.json().get("data") is not None + assert response.json()["data"]["changeTimezone"]["success"] is True + assert response.json()["data"]["changeTimezone"]["message"] is not None + assert response.json()["data"]["changeTimezone"]["code"] == 200 + assert response.json()["data"]["changeTimezone"]["timezone"] == "Europe/Helsinki" assert read_json(turned_on / "turned_on.json")["timezone"] == "Europe/Helsinki" @@ -427,11 +439,11 @@ def test_graphql_change_timezone_on_undefined(authorized_client, undefined_confi }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["changeTimezone"]["success"] is True - assert response.json["data"]["changeTimezone"]["message"] is not None - assert response.json["data"]["changeTimezone"]["code"] == 200 - assert response.json["data"]["changeTimezone"]["timezone"] == "Europe/Helsinki" + assert response.json().get("data") is not None + assert response.json()["data"]["changeTimezone"]["success"] is True + assert response.json()["data"]["changeTimezone"]["message"] is not None + assert response.json()["data"]["changeTimezone"]["code"] == 200 + assert response.json()["data"]["changeTimezone"]["timezone"] == "Europe/Helsinki" assert ( read_json(undefined_config / "undefined.json")["timezone"] == "Europe/Helsinki" ) @@ -449,11 +461,11 @@ def test_graphql_change_timezone_without_timezone(authorized_client, turned_on): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["changeTimezone"]["success"] is False - assert response.json["data"]["changeTimezone"]["message"] is not None - assert response.json["data"]["changeTimezone"]["code"] == 400 - assert response.json["data"]["changeTimezone"]["timezone"] is None + assert response.json().get("data") is not None + assert response.json()["data"]["changeTimezone"]["success"] is False + assert response.json()["data"]["changeTimezone"]["message"] is not None + assert response.json()["data"]["changeTimezone"]["code"] == 400 + assert response.json()["data"]["changeTimezone"]["timezone"] is None assert read_json(turned_on / "turned_on.json")["timezone"] == "Europe/Moscow" @@ -469,18 +481,18 @@ def test_graphql_change_timezone_with_invalid_timezone(authorized_client, turned }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["changeTimezone"]["success"] is False - assert response.json["data"]["changeTimezone"]["message"] is not None - assert response.json["data"]["changeTimezone"]["code"] == 400 - assert response.json["data"]["changeTimezone"]["timezone"] is None + assert response.json().get("data") is not None + assert response.json()["data"]["changeTimezone"]["success"] is False + assert response.json()["data"]["changeTimezone"]["message"] is not None + assert response.json()["data"]["changeTimezone"]["code"] == 400 + assert response.json()["data"]["changeTimezone"]["timezone"] is None assert read_json(turned_on / "turned_on.json")["timezone"] == "Europe/Moscow" API_GET_AUTO_UPGRADE_SETTINGS_QUERY = """ settings { autoUpgrade { - enableAutoUpgrade + enable allowReboot } } @@ -489,72 +501,90 @@ settings { def test_graphql_get_auto_upgrade_unauthorized(client, turned_on): """Test get auto upgrade settings without auth""" - response = client.get( + response = client.post( "/graphql", json={ - "query": API_GET_AUTO_UPGRADE_SETTINGS_QUERY, + "query": generate_system_query([API_GET_AUTO_UPGRADE_SETTINGS_QUERY]), }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None def test_graphql_get_auto_upgrade(authorized_client, turned_on): """Test get auto upgrade settings""" - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ - "query": API_GET_AUTO_UPGRADE_SETTINGS_QUERY, + "query": generate_system_query([API_GET_AUTO_UPGRADE_SETTINGS_QUERY]), }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["settings"]["autoUpgrade"]["enableAutoUpgrade"] is True - assert response.json["data"]["settings"]["autoUpgrade"]["allowReboot"] is True + assert response.json().get("data") is not None + assert ( + response.json()["data"]["system"]["settings"]["autoUpgrade"]["enable"] is True + ) + assert ( + response.json()["data"]["system"]["settings"]["autoUpgrade"]["allowReboot"] + is True + ) def test_graphql_get_auto_upgrade_on_undefined(authorized_client, undefined_config): """Test get auto upgrade settings when none is defined in config""" - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ - "query": API_GET_AUTO_UPGRADE_SETTINGS_QUERY, + "query": generate_system_query([API_GET_AUTO_UPGRADE_SETTINGS_QUERY]), }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["settings"]["autoUpgrade"]["enableAutoUpgrade"] is True - assert response.json["data"]["settings"]["autoUpgrade"]["allowReboot"] is False + assert response.json().get("data") is not None + assert ( + response.json()["data"]["system"]["settings"]["autoUpgrade"]["enable"] is True + ) + assert ( + response.json()["data"]["system"]["settings"]["autoUpgrade"]["allowReboot"] + is False + ) def test_graphql_get_auto_upgrade_without_vlaues(authorized_client, no_values): """Test get auto upgrade settings without values""" - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ - "query": API_GET_AUTO_UPGRADE_SETTINGS_QUERY, + "query": generate_system_query([API_GET_AUTO_UPGRADE_SETTINGS_QUERY]), }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["settings"]["autoUpgrade"]["enableAutoUpgrade"] is True - assert response.json["data"]["settings"]["autoUpgrade"]["allowReboot"] is False + assert response.json().get("data") is not None + assert ( + response.json()["data"]["system"]["settings"]["autoUpgrade"]["enable"] is True + ) + assert ( + response.json()["data"]["system"]["settings"]["autoUpgrade"]["allowReboot"] + is False + ) def test_graphql_get_auto_upgrade_turned_off(authorized_client, turned_off): """Test get auto upgrade settings when turned off""" - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ - "query": API_GET_AUTO_UPGRADE_SETTINGS_QUERY, + "query": generate_system_query([API_GET_AUTO_UPGRADE_SETTINGS_QUERY]), }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None assert ( - response.json["data"]["settings"]["autoUpgrade"]["enableAutoUpgrade"] is False + response.json()["data"]["system"]["settings"]["autoUpgrade"]["enable"] is False + ) + assert ( + response.json()["data"]["system"]["settings"]["autoUpgrade"]["allowReboot"] + is False ) - assert response.json["data"]["settings"]["autoUpgrade"]["allowReboot"] is False API_CHANGE_AUTO_UPGRADE_SETTINGS = """ @@ -585,7 +615,7 @@ def test_graphql_change_auto_upgrade_unauthorized(client, turned_on): }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None def test_graphql_change_auto_upgrade(authorized_client, turned_on): @@ -603,14 +633,15 @@ def test_graphql_change_auto_upgrade(authorized_client, turned_on): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["changeAutoUpgradeSettings"]["success"] is True - assert response.json["data"]["changeAutoUpgradeSettings"]["message"] is not None - assert response.json["data"]["changeAutoUpgradeSettings"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["success"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["message"] is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["code"] == 200 assert ( - response.json["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] is False + response.json()["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] + is False ) - assert response.json["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True assert read_json(turned_on / "turned_on.json")["autoUpgrade"]["enable"] is False assert read_json(turned_on / "turned_on.json")["autoUpgrade"]["allowReboot"] is True @@ -630,14 +661,15 @@ def test_graphql_change_auto_upgrade_on_undefined(authorized_client, undefined_c }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["changeAutoUpgradeSettings"]["success"] is True - assert response.json["data"]["changeAutoUpgradeSettings"]["message"] is not None - assert response.json["data"]["changeAutoUpgradeSettings"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["success"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["message"] is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["code"] == 200 assert ( - response.json["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] is False + response.json()["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] + is False ) - assert response.json["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True assert ( read_json(undefined_config / "undefined.json")["autoUpgrade"]["enable"] is False ) @@ -662,14 +694,15 @@ def test_graphql_change_auto_upgrade_without_vlaues(authorized_client, no_values }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["changeAutoUpgradeSettings"]["success"] is True - assert response.json["data"]["changeAutoUpgradeSettings"]["message"] is not None - assert response.json["data"]["changeAutoUpgradeSettings"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["success"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["message"] is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["code"] == 200 assert ( - response.json["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] is True + response.json()["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] + is True ) - assert response.json["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True assert read_json(no_values / "no_values.json")["autoUpgrade"]["enable"] is True assert read_json(no_values / "no_values.json")["autoUpgrade"]["allowReboot"] is True @@ -689,14 +722,15 @@ def test_graphql_change_auto_upgrade_turned_off(authorized_client, turned_off): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["changeAutoUpgradeSettings"]["success"] is True - assert response.json["data"]["changeAutoUpgradeSettings"]["message"] is not None - assert response.json["data"]["changeAutoUpgradeSettings"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["success"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["message"] is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["code"] == 200 assert ( - response.json["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] is True + response.json()["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] + is True ) - assert response.json["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True assert read_json(turned_off / "turned_off.json")["autoUpgrade"]["enable"] is True assert ( read_json(turned_off / "turned_off.json")["autoUpgrade"]["allowReboot"] is True @@ -717,14 +751,15 @@ def test_grphql_change_auto_upgrade_without_enable(authorized_client, turned_off }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["changeAutoUpgradeSettings"]["success"] is True - assert response.json["data"]["changeAutoUpgradeSettings"]["message"] is not None - assert response.json["data"]["changeAutoUpgradeSettings"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["success"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["message"] is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["code"] == 200 assert ( - response.json["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] is False + response.json()["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] + is False ) - assert response.json["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True assert read_json(turned_off / "turned_off.json")["autoUpgrade"]["enable"] is False assert ( read_json(turned_off / "turned_off.json")["autoUpgrade"]["allowReboot"] is True @@ -747,14 +782,15 @@ def test_graphql_change_auto_upgrade_without_allow_reboot( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["changeAutoUpgradeSettings"]["success"] is True - assert response.json["data"]["changeAutoUpgradeSettings"]["message"] is not None - assert response.json["data"]["changeAutoUpgradeSettings"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["success"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["message"] is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["code"] == 200 assert ( - response.json["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] is True + response.json()["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] + is True ) - assert response.json["data"]["changeAutoUpgradeSettings"]["allowReboot"] is False + assert response.json()["data"]["changeAutoUpgradeSettings"]["allowReboot"] is False assert read_json(turned_off / "turned_off.json")["autoUpgrade"]["enable"] is True assert ( read_json(turned_off / "turned_off.json")["autoUpgrade"]["allowReboot"] is False @@ -773,14 +809,15 @@ def test_graphql_change_auto_upgrade_with_empty_input(authorized_client, turned_ }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["changeAutoUpgradeSettings"]["success"] is True - assert response.json["data"]["changeAutoUpgradeSettings"]["message"] is not None - assert response.json["data"]["changeAutoUpgradeSettings"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["success"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["message"] is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["code"] == 200 assert ( - response.json["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] is False + response.json()["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] + is False ) - assert response.json["data"]["changeAutoUpgradeSettings"]["allowReboot"] is False + assert response.json()["data"]["changeAutoUpgradeSettings"]["allowReboot"] is False assert read_json(turned_off / "turned_off.json")["autoUpgrade"]["enable"] is False assert ( read_json(turned_off / "turned_off.json")["autoUpgrade"]["allowReboot"] is False @@ -788,7 +825,7 @@ def test_graphql_change_auto_upgrade_with_empty_input(authorized_client, turned_ API_PULL_SYSTEM_CONFIGURATION_MUTATION = """ -mutation testPullSystemConfiguration() { +mutation testPullSystemConfiguration { pullRepositoryChanges { success message @@ -807,7 +844,7 @@ def test_graphql_pull_system_configuration_unauthorized(client, mock_subprocess_ ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None assert mock_subprocess_popen.call_count == 0 @@ -823,10 +860,10 @@ def test_graphql_pull_system_configuration( ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["pullRepositoryChanges"]["success"] is True - assert response.json["data"]["pullRepositoryChanges"]["message"] is not None - assert response.json["data"]["pullRepositoryChanges"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["pullRepositoryChanges"]["success"] is True + assert response.json()["data"]["pullRepositoryChanges"]["message"] is not None + assert response.json()["data"]["pullRepositoryChanges"]["code"] == 200 assert mock_subprocess_popen.call_count == 1 assert mock_subprocess_popen.call_args[0][0] == ["git", "pull"] @@ -848,10 +885,10 @@ def test_graphql_pull_system_broken_repo( ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["pullRepositoryChanges"]["success"] is False - assert response.json["data"]["pullRepositoryChanges"]["message"] is not None - assert response.json["data"]["pullRepositoryChanges"]["code"] == 500 + assert response.json().get("data") is not None + assert response.json()["data"]["pullRepositoryChanges"]["success"] is False + assert response.json()["data"]["pullRepositoryChanges"]["message"] is not None + assert response.json()["data"]["pullRepositoryChanges"]["code"] == 500 assert mock_broken_service.call_count == 1 assert mock_os_chdir.call_count == 2 diff --git a/tests/test_graphql/test_system/turned_on.json b/tests/test_graphql/test_system/turned_on.json index 337e47f..821875b 100644 --- a/tests/test_graphql/test_system/turned_on.json +++ b/tests/test_graphql/test_system/turned_on.json @@ -33,7 +33,7 @@ }, "username": "tester", "gitea": { - "enable": false + "enable": true }, "ocserv": { "enable": true @@ -41,6 +41,9 @@ "pleroma": { "enable": true }, + "jitsi": { + "enable": true + }, "autoUpgrade": { "enable": true, "allowReboot": true @@ -49,4 +52,4 @@ "sshKeys": [ "ssh-rsa KEY test@pc" ] -} \ No newline at end of file +} diff --git a/tests/test_graphql/test_system_nixos_tasks.py b/tests/test_graphql/test_system_nixos_tasks.py index 601c353..3e823b6 100644 --- a/tests/test_graphql/test_system_nixos_tasks.py +++ b/tests/test_graphql/test_system_nixos_tasks.py @@ -72,7 +72,7 @@ def test_graphql_system_rebuild_unauthorized(client, mock_subprocess_popen): }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None assert mock_subprocess_popen.call_count == 0 @@ -85,10 +85,10 @@ def test_graphql_system_rebuild(authorized_client, mock_subprocess_popen): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["runSystemRebuild"]["success"] is True - assert response.json["data"]["runSystemRebuild"]["message"] is not None - assert response.json["data"]["runSystemRebuild"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["runSystemRebuild"]["success"] is True + assert response.json()["data"]["runSystemRebuild"]["message"] is not None + assert response.json()["data"]["runSystemRebuild"]["code"] == 200 assert mock_subprocess_popen.call_count == 1 assert mock_subprocess_popen.call_args[0][0] == [ "systemctl", @@ -117,7 +117,7 @@ def test_graphql_system_upgrade_unauthorized(client, mock_subprocess_popen): }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None assert mock_subprocess_popen.call_count == 0 @@ -130,10 +130,10 @@ def test_graphql_system_upgrade(authorized_client, mock_subprocess_popen): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["runSystemUpgrade"]["success"] is True - assert response.json["data"]["runSystemUpgrade"]["message"] is not None - assert response.json["data"]["runSystemUpgrade"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["runSystemUpgrade"]["success"] is True + assert response.json()["data"]["runSystemUpgrade"]["message"] is not None + assert response.json()["data"]["runSystemUpgrade"]["code"] == 200 assert mock_subprocess_popen.call_count == 1 assert mock_subprocess_popen.call_args[0][0] == [ "systemctl", @@ -162,7 +162,7 @@ def test_graphql_system_rollback_unauthorized(client, mock_subprocess_popen): }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None assert mock_subprocess_popen.call_count == 0 @@ -175,10 +175,10 @@ def test_graphql_system_rollback(authorized_client, mock_subprocess_popen): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["runSystemRollback"]["success"] is True - assert response.json["data"]["runSystemRollback"]["message"] is not None - assert response.json["data"]["runSystemRollback"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["runSystemRollback"]["success"] is True + assert response.json()["data"]["runSystemRollback"]["message"] is not None + assert response.json()["data"]["runSystemRollback"]["code"] == 200 assert mock_subprocess_popen.call_count == 1 assert mock_subprocess_popen.call_args[0][0] == [ "systemctl", @@ -207,7 +207,7 @@ def test_graphql_reboot_system_unauthorized(client, mock_subprocess_popen): ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None assert mock_subprocess_popen.call_count == 0 @@ -221,11 +221,11 @@ def test_graphql_reboot_system(authorized_client, mock_subprocess_popen): ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["rebootSystem"]["success"] is True - assert response.json["data"]["rebootSystem"]["message"] is not None - assert response.json["data"]["rebootSystem"]["code"] == 200 + assert response.json()["data"]["rebootSystem"]["success"] is True + assert response.json()["data"]["rebootSystem"]["message"] is not None + assert response.json()["data"]["rebootSystem"]["code"] == 200 assert mock_subprocess_popen.call_count == 1 assert mock_subprocess_popen.call_args[0][0] == ["reboot"] diff --git a/tests/test_graphql/test_users.py b/tests/test_graphql/test_users.py index afae1da..c36dcb2 100644 --- a/tests/test_graphql/test_users.py +++ b/tests/test_graphql/test_users.py @@ -119,53 +119,53 @@ allUsers { def test_graphql_get_users_unauthorized(client, some_users, mock_subprocess_popen): """Test wrong auth""" - response = client.get( + response = client.post( "/graphql", json={ "query": generate_users_query([API_USERS_INFO]), }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None def test_graphql_get_some_users(authorized_client, some_users, mock_subprocess_popen): - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ "query": generate_users_query([API_USERS_INFO]), }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert len(response.json["data"]["users"]["allUsers"]) == 4 - assert response.json["data"]["users"]["allUsers"][0]["username"] == "user1" - assert response.json["data"]["users"]["allUsers"][0]["sshKeys"] == [ + assert response.json().get("data") is not None + assert len(response.json()["data"]["users"]["allUsers"]) == 4 + assert response.json()["data"]["users"]["allUsers"][0]["username"] == "user1" + assert response.json()["data"]["users"]["allUsers"][0]["sshKeys"] == [ "ssh-rsa KEY user1@pc" ] - assert response.json["data"]["users"]["allUsers"][1]["username"] == "user2" - assert response.json["data"]["users"]["allUsers"][1]["sshKeys"] == [] + assert response.json()["data"]["users"]["allUsers"][1]["username"] == "user2" + assert response.json()["data"]["users"]["allUsers"][1]["sshKeys"] == [] - assert response.json["data"]["users"]["allUsers"][3]["username"] == "tester" - assert response.json["data"]["users"]["allUsers"][3]["sshKeys"] == [ + assert response.json()["data"]["users"]["allUsers"][3]["username"] == "tester" + assert response.json()["data"]["users"]["allUsers"][3]["sshKeys"] == [ "ssh-rsa KEY test@pc" ] def test_graphql_get_no_users(authorized_client, no_users, mock_subprocess_popen): - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ "query": generate_users_query([API_USERS_INFO]), }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert len(response.json["data"]["users"]["allUsers"]) == 1 - assert response.json["data"]["users"]["allUsers"][0]["username"] == "tester" - assert response.json["data"]["users"]["allUsers"][0]["sshKeys"] == [ + assert len(response.json()["data"]["users"]["allUsers"]) == 1 + assert response.json()["data"]["users"]["allUsers"][0]["username"] == "tester" + assert response.json()["data"]["users"]["allUsers"][0]["sshKeys"] == [ "ssh-rsa KEY test@pc" ] @@ -183,7 +183,7 @@ query TestUsers($username: String!) { def test_graphql_get_one_user_unauthorized(client, one_user, mock_subprocess_popen): - response = client.get( + response = client.post( "/graphql", json={ "query": API_GET_USERS, @@ -193,12 +193,12 @@ def test_graphql_get_one_user_unauthorized(client, one_user, mock_subprocess_pop }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None def test_graphql_get_one_user(authorized_client, one_user, mock_subprocess_popen): - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ "query": API_GET_USERS, @@ -208,17 +208,17 @@ def test_graphql_get_one_user(authorized_client, one_user, mock_subprocess_popen }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert len(response.json["data"]["users"]["getUser"]) == 2 - assert response.json["data"]["users"]["getUser"]["username"] == "user1" - assert response.json["data"]["users"]["getUser"]["sshKeys"] == [ + assert len(response.json()["data"]["users"]["getUser"]) == 2 + assert response.json()["data"]["users"]["getUser"]["username"] == "user1" + assert response.json()["data"]["users"]["getUser"]["sshKeys"] == [ "ssh-rsa KEY user1@pc" ] def test_graphql_get_some_user(authorized_client, some_users, mock_subprocess_popen): - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ "query": API_GET_USERS, @@ -228,15 +228,15 @@ def test_graphql_get_some_user(authorized_client, some_users, mock_subprocess_po }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert len(response.json["data"]["users"]["getUser"]) == 2 - assert response.json["data"]["users"]["getUser"]["username"] == "user2" - assert response.json["data"]["users"]["getUser"]["sshKeys"] == [] + assert len(response.json()["data"]["users"]["getUser"]) == 2 + assert response.json()["data"]["users"]["getUser"]["username"] == "user2" + assert response.json()["data"]["users"]["getUser"]["sshKeys"] == [] def test_graphql_get_root_user(authorized_client, some_users, mock_subprocess_popen): - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ "query": API_GET_USERS, @@ -246,17 +246,17 @@ def test_graphql_get_root_user(authorized_client, some_users, mock_subprocess_po }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert len(response.json["data"]["users"]["getUser"]) == 2 - assert response.json["data"]["users"]["getUser"]["username"] == "root" - assert response.json["data"]["users"]["getUser"]["sshKeys"] == [ + assert len(response.json()["data"]["users"]["getUser"]) == 2 + assert response.json()["data"]["users"]["getUser"]["username"] == "root" + assert response.json()["data"]["users"]["getUser"]["sshKeys"] == [ "ssh-ed25519 KEY test@pc" ] def test_graphql_get_main_user(authorized_client, one_user, mock_subprocess_popen): - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ "query": API_GET_USERS, @@ -266,11 +266,11 @@ def test_graphql_get_main_user(authorized_client, one_user, mock_subprocess_pope }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert len(response.json["data"]["users"]["getUser"]) == 2 - assert response.json["data"]["users"]["getUser"]["username"] == "tester" - assert response.json["data"]["users"]["getUser"]["sshKeys"] == [ + assert len(response.json()["data"]["users"]["getUser"]) == 2 + assert response.json()["data"]["users"]["getUser"]["username"] == "tester" + assert response.json()["data"]["users"]["getUser"]["sshKeys"] == [ "ssh-rsa KEY test@pc" ] @@ -278,7 +278,7 @@ def test_graphql_get_main_user(authorized_client, one_user, mock_subprocess_pope def test_graphql_get_nonexistent_user( authorized_client, one_user, mock_subprocess_popen ): - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ "query": API_GET_USERS, @@ -288,9 +288,9 @@ def test_graphql_get_nonexistent_user( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["users"]["getUser"] is None + assert response.json()["data"]["users"]["getUser"] is None API_CREATE_USERS_MUTATION = """ @@ -322,7 +322,7 @@ def test_graphql_add_user_unauthorize(client, one_user, mock_subprocess_popen): }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None def test_graphql_add_user(authorized_client, one_user, mock_subprocess_popen): @@ -339,14 +339,14 @@ def test_graphql_add_user(authorized_client, one_user, mock_subprocess_popen): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["createUser"]["message"] is not None - assert response.json["data"]["createUser"]["code"] == 201 - assert response.json["data"]["createUser"]["success"] is True + assert response.json()["data"]["createUser"]["message"] is not None + assert response.json()["data"]["createUser"]["code"] == 201 + assert response.json()["data"]["createUser"]["success"] is True - assert response.json["data"]["createUser"]["user"]["username"] == "user2" - assert response.json["data"]["createUser"]["user"]["sshKeys"] == [] + assert response.json()["data"]["createUser"]["user"]["username"] == "user2" + assert response.json()["data"]["createUser"]["user"]["sshKeys"] == [] def test_graphql_add_undefined_settings( @@ -365,14 +365,14 @@ def test_graphql_add_undefined_settings( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["createUser"]["message"] is not None - assert response.json["data"]["createUser"]["code"] == 201 - assert response.json["data"]["createUser"]["success"] is True + assert response.json()["data"]["createUser"]["message"] is not None + assert response.json()["data"]["createUser"]["code"] == 201 + assert response.json()["data"]["createUser"]["success"] is True - assert response.json["data"]["createUser"]["user"]["username"] == "user2" - assert response.json["data"]["createUser"]["user"]["sshKeys"] == [] + assert response.json()["data"]["createUser"]["user"]["username"] == "user2" + assert response.json()["data"]["createUser"]["user"]["sshKeys"] == [] def test_graphql_add_without_password( @@ -391,13 +391,13 @@ def test_graphql_add_without_password( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["createUser"]["message"] is not None - assert response.json["data"]["createUser"]["code"] == 400 - assert response.json["data"]["createUser"]["success"] is False + assert response.json()["data"]["createUser"]["message"] is not None + assert response.json()["data"]["createUser"]["code"] == 400 + assert response.json()["data"]["createUser"]["success"] is False - assert response.json["data"]["createUser"]["user"] is None + assert response.json()["data"]["createUser"]["user"] is None def test_graphql_add_without_both(authorized_client, one_user, mock_subprocess_popen): @@ -414,13 +414,13 @@ def test_graphql_add_without_both(authorized_client, one_user, mock_subprocess_p }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["createUser"]["message"] is not None - assert response.json["data"]["createUser"]["code"] == 400 - assert response.json["data"]["createUser"]["success"] is False + assert response.json()["data"]["createUser"]["message"] is not None + assert response.json()["data"]["createUser"]["code"] == 400 + assert response.json()["data"]["createUser"]["success"] is False - assert response.json["data"]["createUser"]["user"] is None + assert response.json()["data"]["createUser"]["user"] is None @pytest.mark.parametrize("username", invalid_usernames) @@ -440,13 +440,13 @@ def test_graphql_add_system_username( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["createUser"]["message"] is not None - assert response.json["data"]["createUser"]["code"] == 409 - assert response.json["data"]["createUser"]["success"] is False + assert response.json()["data"]["createUser"]["message"] is not None + assert response.json()["data"]["createUser"]["code"] == 409 + assert response.json()["data"]["createUser"]["success"] is False - assert response.json["data"]["createUser"]["user"] is None + assert response.json()["data"]["createUser"]["user"] is None def test_graphql_add_existing_user(authorized_client, one_user, mock_subprocess_popen): @@ -463,15 +463,15 @@ def test_graphql_add_existing_user(authorized_client, one_user, mock_subprocess_ }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["createUser"]["message"] is not None - assert response.json["data"]["createUser"]["code"] == 409 - assert response.json["data"]["createUser"]["success"] is False + assert response.json()["data"]["createUser"]["message"] is not None + assert response.json()["data"]["createUser"]["code"] == 409 + assert response.json()["data"]["createUser"]["success"] is False - assert response.json["data"]["createUser"]["user"]["username"] == "user1" + assert response.json()["data"]["createUser"]["user"]["username"] == "user1" assert ( - response.json["data"]["createUser"]["user"]["sshKeys"][0] + response.json()["data"]["createUser"]["user"]["sshKeys"][0] == "ssh-rsa KEY user1@pc" ) @@ -490,15 +490,15 @@ def test_graphql_add_main_user(authorized_client, one_user, mock_subprocess_pope }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["createUser"]["message"] is not None - assert response.json["data"]["createUser"]["code"] == 409 - assert response.json["data"]["createUser"]["success"] is False + assert response.json()["data"]["createUser"]["message"] is not None + assert response.json()["data"]["createUser"]["code"] == 409 + assert response.json()["data"]["createUser"]["success"] is False - assert response.json["data"]["createUser"]["user"]["username"] == "tester" + assert response.json()["data"]["createUser"]["user"]["username"] == "tester" assert ( - response.json["data"]["createUser"]["user"]["sshKeys"][0] + response.json()["data"]["createUser"]["user"]["sshKeys"][0] == "ssh-rsa KEY test@pc" ) @@ -517,13 +517,13 @@ def test_graphql_add_long_username(authorized_client, one_user, mock_subprocess_ }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["createUser"]["message"] is not None - assert response.json["data"]["createUser"]["code"] == 400 - assert response.json["data"]["createUser"]["success"] is False + assert response.json()["data"]["createUser"]["message"] is not None + assert response.json()["data"]["createUser"]["code"] == 400 + assert response.json()["data"]["createUser"]["success"] is False - assert response.json["data"]["createUser"]["user"] is None + assert response.json()["data"]["createUser"]["user"] is None @pytest.mark.parametrize("username", ["", "1", "фыр", "user1@", "^-^"]) @@ -543,13 +543,13 @@ def test_graphql_add_invalid_username( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["createUser"]["message"] is not None - assert response.json["data"]["createUser"]["code"] == 400 - assert response.json["data"]["createUser"]["success"] is False + assert response.json()["data"]["createUser"]["message"] is not None + assert response.json()["data"]["createUser"]["code"] == 400 + assert response.json()["data"]["createUser"]["success"] is False - assert response.json["data"]["createUser"]["user"] is None + assert response.json()["data"]["createUser"]["user"] is None API_DELETE_USER_MUTATION = """ @@ -572,7 +572,7 @@ def test_graphql_delete_user_unauthorized(client, some_users, mock_subprocess_po }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None def test_graphql_delete_user(authorized_client, some_users, mock_subprocess_popen): @@ -584,11 +584,11 @@ def test_graphql_delete_user(authorized_client, some_users, mock_subprocess_pope }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["deleteUser"]["code"] == 200 - assert response.json["data"]["deleteUser"]["message"] is not None - assert response.json["data"]["deleteUser"]["success"] is True + assert response.json()["data"]["deleteUser"]["code"] == 200 + assert response.json()["data"]["deleteUser"]["message"] is not None + assert response.json()["data"]["deleteUser"]["success"] is True @pytest.mark.parametrize("username", ["", "def"]) @@ -603,11 +603,11 @@ def test_graphql_delete_nonexistent_users( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["deleteUser"]["code"] == 404 - assert response.json["data"]["deleteUser"]["message"] is not None - assert response.json["data"]["deleteUser"]["success"] is False + assert response.json()["data"]["deleteUser"]["code"] == 404 + assert response.json()["data"]["deleteUser"]["message"] is not None + assert response.json()["data"]["deleteUser"]["success"] is False @pytest.mark.parametrize("username", invalid_usernames) @@ -622,14 +622,14 @@ def test_graphql_delete_system_users( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None assert ( - response.json["data"]["deleteUser"]["code"] == 404 - or response.json["data"]["deleteUser"]["code"] == 400 + response.json()["data"]["deleteUser"]["code"] == 404 + or response.json()["data"]["deleteUser"]["code"] == 400 ) - assert response.json["data"]["deleteUser"]["message"] is not None - assert response.json["data"]["deleteUser"]["success"] is False + assert response.json()["data"]["deleteUser"]["message"] is not None + assert response.json()["data"]["deleteUser"]["success"] is False def test_graphql_delete_main_user(authorized_client, some_users, mock_subprocess_popen): @@ -641,11 +641,11 @@ def test_graphql_delete_main_user(authorized_client, some_users, mock_subprocess }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["deleteUser"]["code"] == 400 - assert response.json["data"]["deleteUser"]["message"] is not None - assert response.json["data"]["deleteUser"]["success"] is False + assert response.json()["data"]["deleteUser"]["code"] == 400 + assert response.json()["data"]["deleteUser"]["message"] is not None + assert response.json()["data"]["deleteUser"]["success"] is False API_UPDATE_USER_MUTATION = """ @@ -677,7 +677,7 @@ def test_graphql_update_user_unauthorized(client, some_users, mock_subprocess_po }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None def test_graphql_update_user(authorized_client, some_users, mock_subprocess_popen): @@ -694,14 +694,14 @@ def test_graphql_update_user(authorized_client, some_users, mock_subprocess_pope }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["updateUser"]["code"] == 200 - assert response.json["data"]["updateUser"]["message"] is not None - assert response.json["data"]["updateUser"]["success"] is True + assert response.json()["data"]["updateUser"]["code"] == 200 + assert response.json()["data"]["updateUser"]["message"] is not None + assert response.json()["data"]["updateUser"]["success"] is True - assert response.json["data"]["updateUser"]["user"]["username"] == "user1" - assert response.json["data"]["updateUser"]["user"]["sshKeys"] == [ + assert response.json()["data"]["updateUser"]["user"]["username"] == "user1" + assert response.json()["data"]["updateUser"]["user"]["sshKeys"] == [ "ssh-rsa KEY user1@pc" ] assert mock_subprocess_popen.call_count == 1 @@ -723,11 +723,11 @@ def test_graphql_update_nonexistent_user( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["updateUser"]["code"] == 404 - assert response.json["data"]["updateUser"]["message"] is not None - assert response.json["data"]["updateUser"]["success"] is False + assert response.json()["data"]["updateUser"]["code"] == 404 + assert response.json()["data"]["updateUser"]["message"] is not None + assert response.json()["data"]["updateUser"]["success"] is False - assert response.json["data"]["updateUser"]["user"] is None + assert response.json()["data"]["updateUser"]["user"] is None assert mock_subprocess_popen.call_count == 1 diff --git a/tests/test_jobs.py b/tests/test_jobs.py new file mode 100644 index 0000000..87f1386 --- /dev/null +++ b/tests/test_jobs.py @@ -0,0 +1,50 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +import json +import pytest + +from selfprivacy_api.utils import WriteUserData, ReadUserData +from selfprivacy_api.jobs import Jobs, JobStatus + + +def test_jobs(authorized_client, jobs_file, shared_datadir): + jobs = Jobs() + assert jobs.get_jobs() == [] + + test_job = jobs.add( + type_id="test", + name="Test job", + description="This is a test job.", + status=JobStatus.CREATED, + status_text="Status text", + progress=0, + ) + + assert jobs.get_jobs() == [test_job] + + jobs.update( + job=test_job, + status=JobStatus.RUNNING, + status_text="Status text", + progress=50, + ) + + assert jobs.get_jobs() == [test_job] + + +@pytest.fixture +def mock_subprocess_run(mocker): + mock = mocker.patch("subprocess.run", autospec=True) + return mock + + +@pytest.fixture +def mock_shutil_move(mocker): + mock = mocker.patch("shutil.move", autospec=True) + return mock + + +@pytest.fixture +def mock_shutil_chown(mocker): + mock = mocker.patch("shutil.chown", autospec=True) + return mock diff --git a/tests/test_network_utils.py b/tests/test_network_utils.py index a7c1511..0662584 100644 --- a/tests/test_network_utils.py +++ b/tests/test_network_utils.py @@ -2,6 +2,7 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument # pylint: disable=missing-function-docstring +import subprocess import pytest from selfprivacy_api.utils.network import get_ip4, get_ip6 @@ -30,6 +31,28 @@ def ip_process_mock(mocker): return mock +@pytest.fixture +def failed_ip_process_mock(mocker): + mock = mocker.patch( + "subprocess.check_output", + autospec=True, + return_value=FAILED_OUTPUT_STRING, + ) + return mock + + +@pytest.fixture +def failed_subprocess_call(mocker): + mock = mocker.patch( + "subprocess.check_output", + autospec=True, + side_effect=subprocess.CalledProcessError( + returncode=1, cmd=["ip", "addr", "show", "dev", "eth0"] + ), + ) + return mock + + def test_get_ip4(ip_process_mock): """Test get IPv4 address""" ip4 = get_ip4() @@ -40,3 +63,23 @@ def test_get_ip6(ip_process_mock): """Test get IPv6 address""" ip6 = get_ip6() assert ip6 == "fe80::9400:ff:fef1:34ae" + + +def test_failed_get_ip4(failed_ip_process_mock): + ip4 = get_ip4() + assert ip4 is "" + + +def test_failed_get_ip6(failed_ip_process_mock): + ip6 = get_ip6() + assert ip6 is "" + + +def test_failed_subprocess_get_ip4(failed_subprocess_call): + ip4 = get_ip4() + assert ip4 is "" + + +def test_failed_subprocess_get_ip6(failed_subprocess_call): + ip6 = get_ip6() + assert ip6 is "" diff --git a/tests/test_rest_endpoints/data/jobs.json b/tests/test_rest_endpoints/data/jobs.json new file mode 100644 index 0000000..0967ef4 --- /dev/null +++ b/tests/test_rest_endpoints/data/jobs.json @@ -0,0 +1 @@ +{} diff --git a/tests/test_rest_endpoints/data/tokens.json b/tests/test_rest_endpoints/data/tokens.json new file mode 100644 index 0000000..9be9d02 --- /dev/null +++ b/tests/test_rest_endpoints/data/tokens.json @@ -0,0 +1,14 @@ +{ + "tokens": [ + { + "token": "TEST_TOKEN", + "name": "test_token", + "date": "2022-01-14 08:31:10.789314" + }, + { + "token": "TEST_TOKEN2", + "name": "test_token2", + "date": "2022-01-14 08:31:10.789314" + } + ] +} \ No newline at end of file diff --git a/tests/services/data/tokens.json b/tests/test_rest_endpoints/services/data/tokens.json similarity index 100% rename from tests/services/data/tokens.json rename to tests/test_rest_endpoints/services/data/tokens.json diff --git a/tests/services/test_bitwarden.py b/tests/test_rest_endpoints/services/test_bitwarden.py similarity index 100% rename from tests/services/test_bitwarden.py rename to tests/test_rest_endpoints/services/test_bitwarden.py diff --git a/tests/services/test_bitwarden/enable_undefined.json b/tests/test_rest_endpoints/services/test_bitwarden/enable_undefined.json similarity index 100% rename from tests/services/test_bitwarden/enable_undefined.json rename to tests/test_rest_endpoints/services/test_bitwarden/enable_undefined.json diff --git a/tests/services/test_bitwarden/turned_off.json b/tests/test_rest_endpoints/services/test_bitwarden/turned_off.json similarity index 100% rename from tests/services/test_bitwarden/turned_off.json rename to tests/test_rest_endpoints/services/test_bitwarden/turned_off.json diff --git a/tests/services/test_bitwarden/turned_on.json b/tests/test_rest_endpoints/services/test_bitwarden/turned_on.json similarity index 100% rename from tests/services/test_bitwarden/turned_on.json rename to tests/test_rest_endpoints/services/test_bitwarden/turned_on.json diff --git a/tests/services/test_bitwarden/undefined.json b/tests/test_rest_endpoints/services/test_bitwarden/undefined.json similarity index 100% rename from tests/services/test_bitwarden/undefined.json rename to tests/test_rest_endpoints/services/test_bitwarden/undefined.json diff --git a/tests/services/test_gitea.py b/tests/test_rest_endpoints/services/test_gitea.py similarity index 100% rename from tests/services/test_gitea.py rename to tests/test_rest_endpoints/services/test_gitea.py diff --git a/tests/services/test_gitea/enable_undefined.json b/tests/test_rest_endpoints/services/test_gitea/enable_undefined.json similarity index 100% rename from tests/services/test_gitea/enable_undefined.json rename to tests/test_rest_endpoints/services/test_gitea/enable_undefined.json diff --git a/tests/services/test_gitea/turned_off.json b/tests/test_rest_endpoints/services/test_gitea/turned_off.json similarity index 100% rename from tests/services/test_gitea/turned_off.json rename to tests/test_rest_endpoints/services/test_gitea/turned_off.json diff --git a/tests/services/test_gitea/turned_on.json b/tests/test_rest_endpoints/services/test_gitea/turned_on.json similarity index 100% rename from tests/services/test_gitea/turned_on.json rename to tests/test_rest_endpoints/services/test_gitea/turned_on.json diff --git a/tests/services/test_gitea/undefined.json b/tests/test_rest_endpoints/services/test_gitea/undefined.json similarity index 100% rename from tests/services/test_gitea/undefined.json rename to tests/test_rest_endpoints/services/test_gitea/undefined.json diff --git a/tests/services/test_mailserver.py b/tests/test_rest_endpoints/services/test_mailserver.py similarity index 91% rename from tests/services/test_mailserver.py rename to tests/test_rest_endpoints/services/test_mailserver.py index a9e5f12..36cf615 100644 --- a/tests/services/test_mailserver.py +++ b/tests/test_rest_endpoints/services/test_mailserver.py @@ -25,7 +25,7 @@ class NoFileMock(ProcessMock): def mock_subproccess_popen(mocker): mock = mocker.patch("subprocess.Popen", autospec=True, return_value=ProcessMock) mocker.patch( - "selfprivacy_api.resources.services.mailserver.get_domain", + "selfprivacy_api.rest.services.get_domain", autospec=True, return_value="example.com", ) @@ -37,7 +37,7 @@ def mock_subproccess_popen(mocker): def mock_no_file(mocker): mock = mocker.patch("subprocess.Popen", autospec=True, return_value=NoFileMock) mocker.patch( - "selfprivacy_api.resources.services.mailserver.get_domain", + "selfprivacy_api.rest.services.get_domain", autospec=True, return_value="example.com", ) @@ -67,7 +67,7 @@ def test_dkim_key(authorized_client, mock_subproccess_popen): """Test DKIM key""" response = authorized_client.get("/services/mailserver/dkim") assert response.status_code == 200 - assert base64.b64decode(response.data) == b"I am a DKIM key" + assert base64.b64decode(response.text) == b"I am a DKIM key" assert mock_subproccess_popen.call_args[0][0] == [ "cat", "/var/dkim/example.com.selector.txt", diff --git a/tests/services/test_nextcloud.py b/tests/test_rest_endpoints/services/test_nextcloud.py similarity index 100% rename from tests/services/test_nextcloud.py rename to tests/test_rest_endpoints/services/test_nextcloud.py diff --git a/tests/services/test_nextcloud/enable_undefined.json b/tests/test_rest_endpoints/services/test_nextcloud/enable_undefined.json similarity index 100% rename from tests/services/test_nextcloud/enable_undefined.json rename to tests/test_rest_endpoints/services/test_nextcloud/enable_undefined.json diff --git a/tests/services/test_nextcloud/turned_off.json b/tests/test_rest_endpoints/services/test_nextcloud/turned_off.json similarity index 100% rename from tests/services/test_nextcloud/turned_off.json rename to tests/test_rest_endpoints/services/test_nextcloud/turned_off.json diff --git a/tests/services/test_nextcloud/turned_on.json b/tests/test_rest_endpoints/services/test_nextcloud/turned_on.json similarity index 100% rename from tests/services/test_nextcloud/turned_on.json rename to tests/test_rest_endpoints/services/test_nextcloud/turned_on.json diff --git a/tests/services/test_nextcloud/undefined.json b/tests/test_rest_endpoints/services/test_nextcloud/undefined.json similarity index 100% rename from tests/services/test_nextcloud/undefined.json rename to tests/test_rest_endpoints/services/test_nextcloud/undefined.json diff --git a/tests/services/test_ocserv.py b/tests/test_rest_endpoints/services/test_ocserv.py similarity index 100% rename from tests/services/test_ocserv.py rename to tests/test_rest_endpoints/services/test_ocserv.py diff --git a/tests/services/test_ocserv/enable_undefined.json b/tests/test_rest_endpoints/services/test_ocserv/enable_undefined.json similarity index 100% rename from tests/services/test_ocserv/enable_undefined.json rename to tests/test_rest_endpoints/services/test_ocserv/enable_undefined.json diff --git a/tests/services/test_ocserv/turned_off.json b/tests/test_rest_endpoints/services/test_ocserv/turned_off.json similarity index 100% rename from tests/services/test_ocserv/turned_off.json rename to tests/test_rest_endpoints/services/test_ocserv/turned_off.json diff --git a/tests/services/test_ocserv/turned_on.json b/tests/test_rest_endpoints/services/test_ocserv/turned_on.json similarity index 100% rename from tests/services/test_ocserv/turned_on.json rename to tests/test_rest_endpoints/services/test_ocserv/turned_on.json diff --git a/tests/services/test_ocserv/undefined.json b/tests/test_rest_endpoints/services/test_ocserv/undefined.json similarity index 100% rename from tests/services/test_ocserv/undefined.json rename to tests/test_rest_endpoints/services/test_ocserv/undefined.json diff --git a/tests/services/test_pleroma.py b/tests/test_rest_endpoints/services/test_pleroma.py similarity index 100% rename from tests/services/test_pleroma.py rename to tests/test_rest_endpoints/services/test_pleroma.py diff --git a/tests/services/test_pleroma/enable_undefined.json b/tests/test_rest_endpoints/services/test_pleroma/enable_undefined.json similarity index 100% rename from tests/services/test_pleroma/enable_undefined.json rename to tests/test_rest_endpoints/services/test_pleroma/enable_undefined.json diff --git a/tests/services/test_pleroma/turned_off.json b/tests/test_rest_endpoints/services/test_pleroma/turned_off.json similarity index 100% rename from tests/services/test_pleroma/turned_off.json rename to tests/test_rest_endpoints/services/test_pleroma/turned_off.json diff --git a/tests/services/test_pleroma/turned_on.json b/tests/test_rest_endpoints/services/test_pleroma/turned_on.json similarity index 100% rename from tests/services/test_pleroma/turned_on.json rename to tests/test_rest_endpoints/services/test_pleroma/turned_on.json diff --git a/tests/services/test_pleroma/undefined.json b/tests/test_rest_endpoints/services/test_pleroma/undefined.json similarity index 100% rename from tests/services/test_pleroma/undefined.json rename to tests/test_rest_endpoints/services/test_pleroma/undefined.json diff --git a/tests/services/test_restic.py b/tests/test_rest_endpoints/services/test_restic.py similarity index 93% rename from tests/services/test_restic.py rename to tests/test_rest_endpoints/services/test_restic.py index 913362f..9502be5 100644 --- a/tests/services/test_restic.py +++ b/tests/test_rest_endpoints/services/test_restic.py @@ -43,7 +43,7 @@ class ResticControllerMock: @pytest.fixture def mock_restic_controller(mocker): mock = mocker.patch( - "selfprivacy_api.resources.services.restic.ResticController", + "selfprivacy_api.rest.services.ResticController", autospec=True, return_value=ResticControllerMock, ) @@ -60,7 +60,7 @@ class ResticControllerMockNoKey: @pytest.fixture def mock_restic_controller_no_key(mocker): mock = mocker.patch( - "selfprivacy_api.resources.services.restic.ResticController", + "selfprivacy_api.rest.services.ResticController", autospec=True, return_value=ResticControllerMockNoKey, ) @@ -77,7 +77,7 @@ class ResticControllerNotInitialized: @pytest.fixture def mock_restic_controller_not_initialized(mocker): mock = mocker.patch( - "selfprivacy_api.resources.services.restic.ResticController", + "selfprivacy_api.rest.services.ResticController", autospec=True, return_value=ResticControllerNotInitialized, ) @@ -94,7 +94,7 @@ class ResticControllerInitializing: @pytest.fixture def mock_restic_controller_initializing(mocker): mock = mocker.patch( - "selfprivacy_api.resources.services.restic.ResticController", + "selfprivacy_api.rest.services.ResticController", autospec=True, return_value=ResticControllerInitializing, ) @@ -111,7 +111,7 @@ class ResticControllerBackingUp: @pytest.fixture def mock_restic_controller_backing_up(mocker): mock = mocker.patch( - "selfprivacy_api.resources.services.restic.ResticController", + "selfprivacy_api.rest.services.ResticController", autospec=True, return_value=ResticControllerBackingUp, ) @@ -128,7 +128,7 @@ class ResticControllerError: @pytest.fixture def mock_restic_controller_error(mocker): mock = mocker.patch( - "selfprivacy_api.resources.services.restic.ResticController", + "selfprivacy_api.rest.services.ResticController", autospec=True, return_value=ResticControllerError, ) @@ -145,7 +145,7 @@ class ResticControllerRestoring: @pytest.fixture def mock_restic_controller_restoring(mocker): mock = mocker.patch( - "selfprivacy_api.resources.services.restic.ResticController", + "selfprivacy_api.rest.services.ResticController", autospec=True, return_value=ResticControllerRestoring, ) @@ -154,9 +154,7 @@ def mock_restic_controller_restoring(mocker): @pytest.fixture def mock_restic_tasks(mocker): - mock = mocker.patch( - "selfprivacy_api.resources.services.restic.restic_tasks", autospec=True - ) + mock = mocker.patch("selfprivacy_api.rest.services.restic_tasks", autospec=True) return mock @@ -197,7 +195,7 @@ def test_get_snapshots_unauthorized(client, mock_restic_controller, mock_restic_ def test_get_snapshots(authorized_client, mock_restic_controller, mock_restic_tasks): response = authorized_client.get("/services/restic/backup/list") assert response.status_code == 200 - assert response.get_json() == MOCKED_SNAPSHOTS + assert response.json() == MOCKED_SNAPSHOTS def test_create_backup_unauthorized(client, mock_restic_controller, mock_restic_tasks): @@ -247,7 +245,7 @@ def test_check_backup_status( ): response = authorized_client.get("/services/restic/backup/status") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "status": "INITIALIZED", "progress": 0, "error_message": None, @@ -259,7 +257,7 @@ def test_check_backup_status_no_key( ): response = authorized_client.get("/services/restic/backup/status") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "status": "NO_KEY", "progress": 0, "error_message": None, @@ -271,7 +269,7 @@ def test_check_backup_status_not_initialized( ): response = authorized_client.get("/services/restic/backup/status") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "status": "NOT_INITIALIZED", "progress": 0, "error_message": None, @@ -283,7 +281,7 @@ def test_check_backup_status_initializing( ): response = authorized_client.get("/services/restic/backup/status") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "status": "INITIALIZING", "progress": 0, "error_message": None, @@ -295,7 +293,7 @@ def test_check_backup_status_backing_up( ): response = authorized_client.get("/services/restic/backup/status") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "status": "BACKING_UP", "progress": 0.42, "error_message": None, @@ -307,7 +305,7 @@ def test_check_backup_status_error( ): response = authorized_client.get("/services/restic/backup/status") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "status": "ERROR", "progress": 0, "error_message": "Error message", @@ -319,7 +317,7 @@ def test_check_backup_status_restoring( ): response = authorized_client.get("/services/restic/backup/status") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "status": "RESTORING", "progress": 0, "error_message": None, @@ -346,7 +344,7 @@ def test_backup_restore_without_backup_id( authorized_client, mock_restic_controller, mock_restic_tasks ): response = authorized_client.put("/services/restic/backup/restore", json={}) - assert response.status_code == 400 + assert response.status_code == 422 assert mock_restic_tasks.restore_from_backup.call_count == 0 @@ -440,7 +438,7 @@ def test_set_backblaze_config_without_arguments( authorized_client, mock_restic_controller, mock_restic_tasks, some_settings ): response = authorized_client.put("/services/restic/backblaze/config") - assert response.status_code == 400 + assert response.status_code == 422 assert mock_restic_tasks.update_keys_from_userdata.call_count == 0 @@ -451,7 +449,7 @@ def test_set_backblaze_config_without_all_values( "/services/restic/backblaze/config", json={"accountId": "123", "applicationKey": "456"}, ) - assert response.status_code == 400 + assert response.status_code == 422 assert mock_restic_tasks.update_keys_from_userdata.call_count == 0 diff --git a/tests/services/test_restic/no_values.json b/tests/test_rest_endpoints/services/test_restic/no_values.json similarity index 100% rename from tests/services/test_restic/no_values.json rename to tests/test_rest_endpoints/services/test_restic/no_values.json diff --git a/tests/services/test_restic/some_values.json b/tests/test_rest_endpoints/services/test_restic/some_values.json similarity index 100% rename from tests/services/test_restic/some_values.json rename to tests/test_rest_endpoints/services/test_restic/some_values.json diff --git a/tests/services/test_restic/undefined.json b/tests/test_rest_endpoints/services/test_restic/undefined.json similarity index 100% rename from tests/services/test_restic/undefined.json rename to tests/test_rest_endpoints/services/test_restic/undefined.json diff --git a/tests/services/test_services.py b/tests/test_rest_endpoints/services/test_services.py similarity index 70% rename from tests/services/test_services.py rename to tests/test_rest_endpoints/services/test_services.py index aed48fb..1108e8c 100644 --- a/tests/services/test_services.py +++ b/tests/test_rest_endpoints/services/test_services.py @@ -9,76 +9,81 @@ def read_json(file_path): def call_args_asserts(mocked_object): - assert mocked_object.call_count == 8 + assert mocked_object.call_count == 7 assert mocked_object.call_args_list[0][0][0] == [ "systemctl", - "status", + "show", "dovecot2.service", ] assert mocked_object.call_args_list[1][0][0] == [ "systemctl", - "status", + "show", "postfix.service", ] assert mocked_object.call_args_list[2][0][0] == [ "systemctl", - "status", - "nginx.service", + "show", + "vaultwarden.service", ] assert mocked_object.call_args_list[3][0][0] == [ "systemctl", - "status", - "vaultwarden.service", + "show", + "gitea.service", ] assert mocked_object.call_args_list[4][0][0] == [ "systemctl", - "status", - "gitea.service", + "show", + "phpfpm-nextcloud.service", ] assert mocked_object.call_args_list[5][0][0] == [ "systemctl", - "status", - "phpfpm-nextcloud.service", + "show", + "ocserv.service", ] assert mocked_object.call_args_list[6][0][0] == [ "systemctl", - "status", - "ocserv.service", - ] - assert mocked_object.call_args_list[7][0][0] == [ - "systemctl", - "status", + "show", "pleroma.service", ] -class ProcessMock: - """Mock subprocess.Popen""" +SUCCESSFUL_STATUS = b""" +Type=oneshot +ExitType=main +Restart=no +NotifyAccess=none +RestartUSec=100ms +LoadState=loaded +ActiveState=active +FreezerState=running +SubState=exited +""" - def __init__(self, args, **kwargs): - self.args = args - self.kwargs = kwargs - - def communicate(): - return (b"", None) - - returncode = 0 - - -class BrokenServiceMock(ProcessMock): - returncode = 3 +FAILED_STATUS = b""" +Type=oneshot +ExitType=main +Restart=no +NotifyAccess=none +RestartUSec=100ms +LoadState=loaded +ActiveState=failed +FreezerState=running +SubState=exited +""" @pytest.fixture def mock_subproccess_popen(mocker): - mock = mocker.patch("subprocess.Popen", autospec=True, return_value=ProcessMock) + mock = mocker.patch( + "subprocess.check_output", autospec=True, return_value=SUCCESSFUL_STATUS + ) return mock @pytest.fixture def mock_broken_service(mocker): mock = mocker.patch( - "subprocess.Popen", autospec=True, return_value=BrokenServiceMock + "subprocess.check_output", autospec=True, return_value=FAILED_STATUS ) return mock @@ -104,7 +109,7 @@ def test_illegal_methods(authorized_client, mock_subproccess_popen): def test_dkim_key(authorized_client, mock_subproccess_popen): response = authorized_client.get("/services/status") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "imap": 0, "smtp": 0, "http": 0, @@ -120,14 +125,14 @@ def test_dkim_key(authorized_client, mock_subproccess_popen): def test_no_dkim_key(authorized_client, mock_broken_service): response = authorized_client.get("/services/status") assert response.status_code == 200 - assert response.get_json() == { - "imap": 3, - "smtp": 3, - "http": 3, - "bitwarden": 3, - "gitea": 3, - "nextcloud": 3, - "ocserv": 3, - "pleroma": 3, + assert response.json() == { + "imap": 1, + "smtp": 1, + "http": 0, + "bitwarden": 1, + "gitea": 1, + "nextcloud": 1, + "ocserv": 1, + "pleroma": 1, } call_args_asserts(mock_broken_service) diff --git a/tests/services/test_ssh.py b/tests/test_rest_endpoints/services/test_ssh.py similarity index 91% rename from tests/services/test_ssh.py rename to tests/test_rest_endpoints/services/test_ssh.py index 5975811..a17bdab 100644 --- a/tests/services/test_ssh.py +++ b/tests/test_rest_endpoints/services/test_ssh.py @@ -95,14 +95,18 @@ def some_users(mocker, datadir): ## TEST 401 ###################################################### -@pytest.mark.parametrize( - "endpoint", ["ssh", "ssh/enable", "ssh/key/send", "ssh/keys/user"] -) +@pytest.mark.parametrize("endpoint", ["ssh/enable", "ssh/keys/user"]) def test_unauthorized(client, ssh_off, endpoint): response = client.post(f"/services/{endpoint}") assert response.status_code == 401 +@pytest.mark.parametrize("endpoint", ["ssh", "ssh/key/send"]) +def test_unauthorized_put(client, ssh_off, endpoint): + response = client.put(f"/services/{endpoint}") + assert response.status_code == 401 + + ## TEST ENABLE ###################################################### @@ -133,31 +137,31 @@ def test_legacy_enable_when_enabled(authorized_client, ssh_on): def test_get_current_settings_ssh_off(authorized_client, ssh_off): response = authorized_client.get("/services/ssh") assert response.status_code == 200 - assert response.json == {"enable": False, "passwordAuthentication": True} + assert response.json() == {"enable": False, "passwordAuthentication": True} def test_get_current_settings_ssh_on(authorized_client, ssh_on): response = authorized_client.get("/services/ssh") assert response.status_code == 200 - assert response.json == {"enable": True, "passwordAuthentication": True} + assert response.json() == {"enable": True, "passwordAuthentication": True} def test_get_current_settings_all_off(authorized_client, all_off): response = authorized_client.get("/services/ssh") assert response.status_code == 200 - assert response.json == {"enable": False, "passwordAuthentication": False} + assert response.json() == {"enable": False, "passwordAuthentication": False} def test_get_current_settings_undefined(authorized_client, undefined_settings): response = authorized_client.get("/services/ssh") assert response.status_code == 200 - assert response.json == {"enable": True, "passwordAuthentication": True} + assert response.json() == {"enable": True, "passwordAuthentication": True} def test_get_current_settings_mostly_undefined(authorized_client, undefined_values): response = authorized_client.get("/services/ssh") assert response.status_code == 200 - assert response.json == {"enable": True, "passwordAuthentication": True} + assert response.json() == {"enable": True, "passwordAuthentication": True} ## PUT ON /ssh ###################################################### @@ -275,29 +279,22 @@ def test_add_invalid_root_key(authorized_client, ssh_on): ## /ssh/keys/{user} ###################################################### -def test_add_root_key_via_wrong_endpoint(authorized_client, ssh_on): - response = authorized_client.post( - "/services/ssh/keys/root", json={"public_key": "ssh-rsa KEY test@pc"} - ) - assert response.status_code == 400 - - def test_get_root_key(authorized_client, root_and_admin_have_keys): response = authorized_client.get("/services/ssh/keys/root") assert response.status_code == 200 - assert response.json == ["ssh-ed25519 KEY test@pc"] + assert response.json() == ["ssh-ed25519 KEY test@pc"] def test_get_root_key_when_none(authorized_client, ssh_on): response = authorized_client.get("/services/ssh/keys/root") assert response.status_code == 200 - assert response.json == [] + assert response.json() == [] def test_get_root_key_on_undefined(authorized_client, undefined_settings): response = authorized_client.get("/services/ssh/keys/root") assert response.status_code == 200 - assert response.json == [] + assert response.json() == [] def test_delete_root_key(authorized_client, root_and_admin_have_keys): @@ -310,6 +307,10 @@ def test_delete_root_key(authorized_client, root_and_admin_have_keys): not in read_json(root_and_admin_have_keys / "root_and_admin_have_keys.json")[ "ssh" ] + or read_json(root_and_admin_have_keys / "root_and_admin_have_keys.json")["ssh"][ + "rootKeys" + ] + == [] ) @@ -330,19 +331,19 @@ def test_delete_root_key_on_undefined(authorized_client, undefined_settings): "/services/ssh/keys/root", json={"public_key": "ssh-ed25519 KEY test@pc"} ) assert response.status_code == 404 - assert read_json(undefined_settings / "undefined.json")["ssh"]["rootKeys"] == [] + assert "ssh" not in read_json(undefined_settings / "undefined.json") def test_get_admin_key(authorized_client, root_and_admin_have_keys): response = authorized_client.get("/services/ssh/keys/tester") assert response.status_code == 200 - assert response.json == ["ssh-rsa KEY test@pc"] + assert response.json() == ["ssh-rsa KEY test@pc"] def test_get_admin_key_when_none(authorized_client, ssh_on): response = authorized_client.get("/services/ssh/keys/tester") assert response.status_code == 200 - assert response.json == [] + assert response.json() == [] def test_delete_admin_key(authorized_client, root_and_admin_have_keys): @@ -371,7 +372,7 @@ def test_delete_admin_key_on_undefined(authorized_client, undefined_settings): "/services/ssh/keys/tester", json={"public_key": "ssh-rsa KEY test@pc"} ) assert response.status_code == 404 - assert read_json(undefined_settings / "undefined.json")["sshKeys"] == [] + assert "sshKeys" not in read_json(undefined_settings / "undefined.json") def test_add_admin_key(authorized_client, ssh_on): @@ -418,9 +419,9 @@ def test_get_user_key(authorized_client, some_users, user): response = authorized_client.get(f"/services/ssh/keys/user{user}") assert response.status_code == 200 if user == 1: - assert response.json == ["ssh-rsa KEY user1@pc"] + assert response.json() == ["ssh-rsa KEY user1@pc"] else: - assert response.json == [] + assert response.json() == [] def test_get_keys_of_nonexistent_user(authorized_client, some_users): @@ -483,7 +484,13 @@ def test_delete_nonexistent_user_key(authorized_client, some_users, user): f"/services/ssh/keys/user{user}", json={"public_key": "ssh-rsa KEY user1@pc"} ) assert response.status_code == 404 - assert read_json(some_users / "some_users.json")["users"][user - 1]["sshKeys"] == [] + if user == 2: + assert ( + read_json(some_users / "some_users.json")["users"][user - 1]["sshKeys"] + == [] + ) + if user == 3: + "sshKeys" not in read_json(some_users / "some_users.json")["users"][user - 1] def test_add_keys_of_nonexistent_user(authorized_client, some_users): diff --git a/tests/services/test_ssh/all_off.json b/tests/test_rest_endpoints/services/test_ssh/all_off.json similarity index 100% rename from tests/services/test_ssh/all_off.json rename to tests/test_rest_endpoints/services/test_ssh/all_off.json diff --git a/tests/services/test_ssh/root_and_admin_have_keys.json b/tests/test_rest_endpoints/services/test_ssh/root_and_admin_have_keys.json similarity index 100% rename from tests/services/test_ssh/root_and_admin_have_keys.json rename to tests/test_rest_endpoints/services/test_ssh/root_and_admin_have_keys.json diff --git a/tests/services/test_ssh/some_users.json b/tests/test_rest_endpoints/services/test_ssh/some_users.json similarity index 100% rename from tests/services/test_ssh/some_users.json rename to tests/test_rest_endpoints/services/test_ssh/some_users.json diff --git a/tests/services/test_ssh/turned_off.json b/tests/test_rest_endpoints/services/test_ssh/turned_off.json similarity index 100% rename from tests/services/test_ssh/turned_off.json rename to tests/test_rest_endpoints/services/test_ssh/turned_off.json diff --git a/tests/services/test_ssh/turned_on.json b/tests/test_rest_endpoints/services/test_ssh/turned_on.json similarity index 100% rename from tests/services/test_ssh/turned_on.json rename to tests/test_rest_endpoints/services/test_ssh/turned_on.json diff --git a/tests/services/test_ssh/undefined.json b/tests/test_rest_endpoints/services/test_ssh/undefined.json similarity index 100% rename from tests/services/test_ssh/undefined.json rename to tests/test_rest_endpoints/services/test_ssh/undefined.json diff --git a/tests/services/test_ssh/undefined_values.json b/tests/test_rest_endpoints/services/test_ssh/undefined_values.json similarity index 100% rename from tests/services/test_ssh/undefined_values.json rename to tests/test_rest_endpoints/services/test_ssh/undefined_values.json diff --git a/tests/test_auth.py b/tests/test_rest_endpoints/test_auth.py similarity index 87% rename from tests/test_auth.py rename to tests/test_rest_endpoints/test_auth.py index d209c9c..1083be5 100644 --- a/tests/test_auth.py +++ b/tests/test_rest_endpoints/test_auth.py @@ -2,12 +2,10 @@ # pylint: disable=unused-argument # pylint: disable=missing-function-docstring import datetime -import json -import re import pytest from mnemonic import Mnemonic -from .common import read_json, write_json +from tests.common import read_json, write_json TOKENS_FILE_CONTETS = { @@ -36,11 +34,11 @@ DATE_FORMATS = [ def test_get_tokens_info(authorized_client, tokens_file): response = authorized_client.get("/auth/tokens") assert response.status_code == 200 - assert response.json == [ - {"name": "test_token", "date": "2022-01-14 08:31:10.789314", "is_caller": True}, + assert response.json() == [ + {"name": "test_token", "date": "2022-01-14T08:31:10.789314", "is_caller": True}, { "name": "test_token2", - "date": "2022-01-14 08:31:10.789314", + "date": "2022-01-14T08:31:10.789314", "is_caller": False, }, ] @@ -98,7 +96,7 @@ def test_refresh_token_unauthorized(client, tokens_file): def test_refresh_token(authorized_client, tokens_file): response = authorized_client.post("/auth/tokens") assert response.status_code == 200 - new_token = response.json["token"] + new_token = response.json()["token"] assert read_json(tokens_file)["tokens"][0]["token"] == new_token @@ -106,7 +104,7 @@ def test_refresh_token(authorized_client, tokens_file): def test_get_new_device_auth_token_unauthorized(client, tokens_file): - response = client.get("/auth/new_device") + response = client.post("/auth/new_device") assert response.status_code == 401 assert read_json(tokens_file) == TOKENS_FILE_CONTETS @@ -114,19 +112,19 @@ def test_get_new_device_auth_token_unauthorized(client, tokens_file): def test_get_new_device_auth_token(authorized_client, tokens_file): response = authorized_client.post("/auth/new_device") assert response.status_code == 200 - assert "token" in response.json - token = Mnemonic(language="english").to_entropy(response.json["token"]).hex() + assert "token" in response.json() + token = Mnemonic(language="english").to_entropy(response.json()["token"]).hex() assert read_json(tokens_file)["new_device"]["token"] == token def test_get_and_delete_new_device_token(authorized_client, tokens_file): response = authorized_client.post("/auth/new_device") assert response.status_code == 200 - assert "token" in response.json - token = Mnemonic(language="english").to_entropy(response.json["token"]).hex() + assert "token" in response.json() + token = Mnemonic(language="english").to_entropy(response.json()["token"]).hex() assert read_json(tokens_file)["new_device"]["token"] == token response = authorized_client.delete( - "/auth/new_device", json={"token": response.json["token"]} + "/auth/new_device", json={"token": response.json()["token"]} ) assert response.status_code == 200 assert read_json(tokens_file) == TOKENS_FILE_CONTETS @@ -141,15 +139,15 @@ def test_delete_token_unauthenticated(client, tokens_file): def test_get_and_authorize_new_device(client, authorized_client, tokens_file): response = authorized_client.post("/auth/new_device") assert response.status_code == 200 - assert "token" in response.json - token = Mnemonic(language="english").to_entropy(response.json["token"]).hex() + assert "token" in response.json() + token = Mnemonic(language="english").to_entropy(response.json()["token"]).hex() assert read_json(tokens_file)["new_device"]["token"] == token response = client.post( "/auth/new_device/authorize", - json={"token": response.json["token"], "device": "new_device"}, + json={"token": response.json()["token"], "device": "new_device"}, ) assert response.status_code == 200 - assert read_json(tokens_file)["tokens"][2]["token"] == response.json["token"] + assert read_json(tokens_file)["tokens"][2]["token"] == response.json()["token"] assert read_json(tokens_file)["tokens"][2]["name"] == "new_device" @@ -165,19 +163,19 @@ def test_authorize_new_device_with_invalid_token(client, tokens_file): def test_get_and_authorize_used_token(client, authorized_client, tokens_file): response = authorized_client.post("/auth/new_device") assert response.status_code == 200 - assert "token" in response.json - token = Mnemonic(language="english").to_entropy(response.json["token"]).hex() + assert "token" in response.json() + token = Mnemonic(language="english").to_entropy(response.json()["token"]).hex() assert read_json(tokens_file)["new_device"]["token"] == token response = client.post( "/auth/new_device/authorize", - json={"token": response.json["token"], "device": "new_device"}, + json={"token": response.json()["token"], "device": "new_device"}, ) assert response.status_code == 200 - assert read_json(tokens_file)["tokens"][2]["token"] == response.json["token"] + assert read_json(tokens_file)["tokens"][2]["token"] == response.json()["token"] assert read_json(tokens_file)["tokens"][2]["name"] == "new_device" response = client.post( "/auth/new_device/authorize", - json={"token": response.json["token"], "device": "new_device"}, + json={"token": response.json()["token"], "device": "new_device"}, ) assert response.status_code == 404 @@ -187,8 +185,8 @@ def test_get_and_authorize_token_after_12_minutes( ): response = authorized_client.post("/auth/new_device") assert response.status_code == 200 - assert "token" in response.json - token = Mnemonic(language="english").to_entropy(response.json["token"]).hex() + assert "token" in response.json() + token = Mnemonic(language="english").to_entropy(response.json()["token"]).hex() assert read_json(tokens_file)["new_device"]["token"] == token file_data = read_json(tokens_file) @@ -199,7 +197,7 @@ def test_get_and_authorize_token_after_12_minutes( response = client.post( "/auth/new_device/authorize", - json={"token": response.json["token"], "device": "new_device"}, + json={"token": response.json()["token"], "device": "new_device"}, ) assert response.status_code == 404 @@ -209,7 +207,7 @@ def test_authorize_without_token(client, tokens_file): "/auth/new_device/authorize", json={"device": "new_device"}, ) - assert response.status_code == 400 + assert response.status_code == 422 assert read_json(tokens_file) == TOKENS_FILE_CONTETS @@ -245,7 +243,7 @@ def test_get_recovery_token_status_unauthorized(client, tokens_file): def test_get_recovery_token_when_none_exists(authorized_client, tokens_file): response = authorized_client.get("/auth/recovery_token") assert response.status_code == 200 - assert response.json == { + assert response.json() == { "exists": False, "valid": False, "date": None, @@ -259,8 +257,8 @@ def test_generate_recovery_token(authorized_client, client, tokens_file): # Generate token without expiration and uses_left response = authorized_client.post("/auth/recovery_token") assert response.status_code == 200 - assert "token" in response.json - mnemonic_token = response.json["token"] + assert "token" in response.json() + mnemonic_token = response.json()["token"] token = Mnemonic(language="english").to_entropy(mnemonic_token).hex() assert read_json(tokens_file)["recovery_token"]["token"] == token @@ -274,9 +272,9 @@ def test_generate_recovery_token(authorized_client, client, tokens_file): ) # Try to get token status - response = client.get("/auth/recovery_token") + response = authorized_client.get("/auth/recovery_token") assert response.status_code == 200 - assert response.json == { + assert response.json() == { "exists": True, "valid": True, "date": time_generated, @@ -290,7 +288,7 @@ def test_generate_recovery_token(authorized_client, client, tokens_file): json={"token": mnemonic_token, "device": "recovery_device"}, ) assert recovery_response.status_code == 200 - new_token = recovery_response.json["token"] + new_token = recovery_response.json()["token"] assert read_json(tokens_file)["tokens"][2]["token"] == new_token assert read_json(tokens_file)["tokens"][2]["name"] == "recovery_device" @@ -300,7 +298,7 @@ def test_generate_recovery_token(authorized_client, client, tokens_file): json={"token": mnemonic_token, "device": "recovery_device2"}, ) assert recovery_response.status_code == 200 - new_token = recovery_response.json["token"] + new_token = recovery_response.json()["token"] assert read_json(tokens_file)["tokens"][3]["token"] == new_token assert read_json(tokens_file)["tokens"][3]["name"] == "recovery_device2" @@ -318,8 +316,8 @@ def test_generate_recovery_token_with_expiration_date( json={"expiration": expiration_date_str}, ) assert response.status_code == 200 - assert "token" in response.json - mnemonic_token = response.json["token"] + assert "token" in response.json() + mnemonic_token = response.json()["token"] token = Mnemonic(language="english").to_entropy(mnemonic_token).hex() assert read_json(tokens_file)["recovery_token"]["token"] == token assert datetime.datetime.strptime( @@ -336,9 +334,9 @@ def test_generate_recovery_token_with_expiration_date( ) # Try to get token status - response = client.get("/auth/recovery_token") + response = authorized_client.get("/auth/recovery_token") assert response.status_code == 200 - assert response.json == { + assert response.json() == { "exists": True, "valid": True, "date": time_generated, @@ -352,7 +350,7 @@ def test_generate_recovery_token_with_expiration_date( json={"token": mnemonic_token, "device": "recovery_device"}, ) assert recovery_response.status_code == 200 - new_token = recovery_response.json["token"] + new_token = recovery_response.json()["token"] assert read_json(tokens_file)["tokens"][2]["token"] == new_token assert read_json(tokens_file)["tokens"][2]["name"] == "recovery_device" @@ -362,7 +360,7 @@ def test_generate_recovery_token_with_expiration_date( json={"token": mnemonic_token, "device": "recovery_device2"}, ) assert recovery_response.status_code == 200 - new_token = recovery_response.json["token"] + new_token = recovery_response.json()["token"] assert read_json(tokens_file)["tokens"][3]["token"] == new_token assert read_json(tokens_file)["tokens"][3]["name"] == "recovery_device2" @@ -381,9 +379,9 @@ def test_generate_recovery_token_with_expiration_date( assert read_json(tokens_file)["tokens"] == new_data["tokens"] # Get the status of the token - response = client.get("/auth/recovery_token") + response = authorized_client.get("/auth/recovery_token") assert response.status_code == 200 - assert response.json == { + assert response.json() == { "exists": True, "valid": False, "date": time_generated, @@ -397,7 +395,7 @@ def test_generate_recovery_token_with_expiration_in_the_past( authorized_client, tokens_file, timeformat ): # Server must return 400 if expiration date is in the past - expiration_date = datetime.datetime.now() - datetime.timedelta(minutes=5) + expiration_date = datetime.datetime.utcnow() - datetime.timedelta(minutes=5) expiration_date_str = expiration_date.strftime(timeformat) response = authorized_client.post( "/auth/recovery_token", @@ -416,7 +414,7 @@ def test_generate_recovery_token_with_invalid_time_format( "/auth/recovery_token", json={"expiration": expiration_date}, ) - assert response.status_code == 400 + assert response.status_code == 422 assert "recovery_token" not in read_json(tokens_file) @@ -429,8 +427,8 @@ def test_generate_recovery_token_with_limited_uses( json={"uses": 2}, ) assert response.status_code == 200 - assert "token" in response.json - mnemonic_token = response.json["token"] + assert "token" in response.json() + mnemonic_token = response.json()["token"] token = Mnemonic(language="english").to_entropy(mnemonic_token).hex() assert read_json(tokens_file)["recovery_token"]["token"] == token assert read_json(tokens_file)["recovery_token"]["uses_left"] == 2 @@ -445,9 +443,9 @@ def test_generate_recovery_token_with_limited_uses( ) # Try to get token status - response = client.get("/auth/recovery_token") + response = authorized_client.get("/auth/recovery_token") assert response.status_code == 200 - assert response.json == { + assert response.json() == { "exists": True, "valid": True, "date": time_generated, @@ -461,16 +459,16 @@ def test_generate_recovery_token_with_limited_uses( json={"token": mnemonic_token, "device": "recovery_device"}, ) assert recovery_response.status_code == 200 - new_token = recovery_response.json["token"] + new_token = recovery_response.json()["token"] assert read_json(tokens_file)["tokens"][2]["token"] == new_token assert read_json(tokens_file)["tokens"][2]["name"] == "recovery_device" assert read_json(tokens_file)["recovery_token"]["uses_left"] == 1 # Get the status of the token - response = client.get("/auth/recovery_token") + response = authorized_client.get("/auth/recovery_token") assert response.status_code == 200 - assert response.json == { + assert response.json() == { "exists": True, "valid": True, "date": time_generated, @@ -484,14 +482,14 @@ def test_generate_recovery_token_with_limited_uses( json={"token": mnemonic_token, "device": "recovery_device2"}, ) assert recovery_response.status_code == 200 - new_token = recovery_response.json["token"] + new_token = recovery_response.json()["token"] assert read_json(tokens_file)["tokens"][3]["token"] == new_token assert read_json(tokens_file)["tokens"][3]["name"] == "recovery_device2" # Get the status of the token - response = client.get("/auth/recovery_token") + response = authorized_client.get("/auth/recovery_token") assert response.status_code == 200 - assert response.json == { + assert response.json() == { "exists": True, "valid": False, "date": time_generated, diff --git a/tests/test_system.py b/tests/test_rest_endpoints/test_system.py similarity index 97% rename from tests/test_system.py rename to tests/test_rest_endpoints/test_system.py index b9c8649..90c1499 100644 --- a/tests/test_system.py +++ b/tests/test_rest_endpoints/test_system.py @@ -123,13 +123,13 @@ def test_get_timezone_unauthorized(client, turned_on): def test_get_timezone(authorized_client, turned_on): response = authorized_client.get("/system/configuration/timezone") assert response.status_code == 200 - assert response.get_json() == "Europe/Moscow" + assert response.json() == "Europe/Moscow" def test_get_timezone_on_undefined(authorized_client, undefined_config): response = authorized_client.get("/system/configuration/timezone") assert response.status_code == 200 - assert response.get_json() == "Europe/Uzhgorod" + assert response.json() == "Europe/Uzhgorod" def test_put_timezone_unauthorized(client, turned_on): @@ -159,7 +159,7 @@ def test_put_timezone_on_undefined(authorized_client, undefined_config): def test_put_timezone_without_timezone(authorized_client, turned_on): response = authorized_client.put("/system/configuration/timezone", json={}) - assert response.status_code == 400 + assert response.status_code == 422 assert read_json(turned_on / "turned_on.json")["timezone"] == "Europe/Moscow" @@ -182,7 +182,7 @@ def test_get_auto_upgrade_unauthorized(client, turned_on): def test_get_auto_upgrade(authorized_client, turned_on): response = authorized_client.get("/system/configuration/autoUpgrade") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "enable": True, "allowReboot": True, } @@ -191,7 +191,7 @@ def test_get_auto_upgrade(authorized_client, turned_on): def test_get_auto_upgrade_on_undefined(authorized_client, undefined_config): response = authorized_client.get("/system/configuration/autoUpgrade") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "enable": True, "allowReboot": False, } @@ -200,7 +200,7 @@ def test_get_auto_upgrade_on_undefined(authorized_client, undefined_config): def test_get_auto_upgrade_without_values(authorized_client, no_values): response = authorized_client.get("/system/configuration/autoUpgrade") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "enable": True, "allowReboot": False, } @@ -209,7 +209,7 @@ def test_get_auto_upgrade_without_values(authorized_client, no_values): def test_get_auto_upgrade_turned_off(authorized_client, turned_off): response = authorized_client.get("/system/configuration/autoUpgrade") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "enable": False, "allowReboot": False, } @@ -357,7 +357,7 @@ def test_get_system_version_unauthorized(client, mock_subprocess_check_output): def test_get_system_version(authorized_client, mock_subprocess_check_output): response = authorized_client.get("/system/version") assert response.status_code == 200 - assert response.get_json() == {"system_version": "Testing Linux"} + assert response.json() == {"system_version": "Testing Linux"} assert mock_subprocess_check_output.call_count == 1 assert mock_subprocess_check_output.call_args[0][0] == ["uname", "-a"] @@ -384,7 +384,7 @@ def test_get_python_version_unauthorized(client, mock_subprocess_check_output): def test_get_python_version(authorized_client, mock_subprocess_check_output): response = authorized_client.get("/system/pythonVersion") assert response.status_code == 200 - assert response.get_json() == "Testing Linux" + assert response.json() == "Testing Linux" assert mock_subprocess_check_output.call_count == 1 assert mock_subprocess_check_output.call_args[0][0] == ["python", "-V"] diff --git a/tests/test_system/domain b/tests/test_rest_endpoints/test_system/domain similarity index 100% rename from tests/test_system/domain rename to tests/test_rest_endpoints/test_system/domain diff --git a/tests/test_system/no_values.json b/tests/test_rest_endpoints/test_system/no_values.json similarity index 100% rename from tests/test_system/no_values.json rename to tests/test_rest_endpoints/test_system/no_values.json diff --git a/tests/test_system/turned_off.json b/tests/test_rest_endpoints/test_system/turned_off.json similarity index 100% rename from tests/test_system/turned_off.json rename to tests/test_rest_endpoints/test_system/turned_off.json diff --git a/tests/test_system/turned_on.json b/tests/test_rest_endpoints/test_system/turned_on.json similarity index 100% rename from tests/test_system/turned_on.json rename to tests/test_rest_endpoints/test_system/turned_on.json diff --git a/tests/test_system/undefined.json b/tests/test_rest_endpoints/test_system/undefined.json similarity index 100% rename from tests/test_system/undefined.json rename to tests/test_rest_endpoints/test_system/undefined.json diff --git a/tests/test_users.py b/tests/test_rest_endpoints/test_users.py similarity index 93% rename from tests/test_users.py rename to tests/test_rest_endpoints/test_users.py index 9374ef2..ebb3eff 100644 --- a/tests/test_users.py +++ b/tests/test_rest_endpoints/test_users.py @@ -121,31 +121,31 @@ def test_get_users_unauthorized(client, some_users, mock_subprocess_popen): def test_get_some_users(authorized_client, some_users, mock_subprocess_popen): response = authorized_client.get("/users") assert response.status_code == 200 - assert response.json == ["user1", "user2", "user3"] + assert response.json() == ["user1", "user2", "user3"] def test_get_one_user(authorized_client, one_user, mock_subprocess_popen): response = authorized_client.get("/users") assert response.status_code == 200 - assert response.json == ["user1"] + assert response.json() == ["user1"] def test_get_one_user_with_main(authorized_client, one_user, mock_subprocess_popen): response = authorized_client.get("/users?withMainUser=true") assert response.status_code == 200 - assert response.json == ["tester", "user1"] + assert response.json().sort() == ["tester", "user1"].sort() def test_get_no_users(authorized_client, no_users, mock_subprocess_popen): response = authorized_client.get("/users") assert response.status_code == 200 - assert response.json == [] + assert response.json() == [] def test_get_no_users_with_main(authorized_client, no_users, mock_subprocess_popen): response = authorized_client.get("/users?withMainUser=true") assert response.status_code == 200 - assert response.json == ["tester"] + assert response.json() == ["tester"] def test_get_undefined_users( @@ -153,7 +153,7 @@ def test_get_undefined_users( ): response = authorized_client.get("/users") assert response.status_code == 200 - assert response.json == [] + assert response.json() == [] def test_post_users_unauthorized(client, some_users, mock_subprocess_popen): @@ -174,6 +174,7 @@ def test_post_one_user(authorized_client, one_user, mock_subprocess_popen): }, { "username": "user4", + "sshKeys": [], "hashedPassword": "NEW_HASHED", }, ] @@ -181,19 +182,19 @@ def test_post_one_user(authorized_client, one_user, mock_subprocess_popen): def test_post_without_username(authorized_client, one_user, mock_subprocess_popen): response = authorized_client.post("/users", json={"password": "password"}) - assert response.status_code == 400 + assert response.status_code == 422 def test_post_without_password(authorized_client, one_user, mock_subprocess_popen): response = authorized_client.post("/users", json={"username": "user4"}) - assert response.status_code == 400 + assert response.status_code == 422 def test_post_without_username_and_password( authorized_client, one_user, mock_subprocess_popen ): response = authorized_client.post("/users", json={}) - assert response.status_code == 400 + assert response.status_code == 422 @pytest.mark.parametrize("username", invalid_usernames) @@ -226,7 +227,7 @@ def test_post_user_to_undefined_users( ) assert response.status_code == 201 assert read_json(undefined_settings / "undefined.json")["users"] == [ - {"username": "user4", "hashedPassword": "NEW_HASHED"} + {"username": "user4", "sshKeys": [], "hashedPassword": "NEW_HASHED"} ] @@ -279,11 +280,6 @@ def test_delete_main_user(authorized_client, some_users, mock_subprocess_popen): assert response.status_code == 400 -def test_delete_without_argument(authorized_client, some_users, mock_subprocess_popen): - response = authorized_client.delete("/users/") - assert response.status_code == 404 - - def test_delete_just_delete(authorized_client, some_users, mock_subprocess_popen): response = authorized_client.delete("/users") assert response.status_code == 405 diff --git a/tests/test_users/no_users.json b/tests/test_rest_endpoints/test_users/no_users.json similarity index 100% rename from tests/test_users/no_users.json rename to tests/test_rest_endpoints/test_users/no_users.json diff --git a/tests/test_users/one_user.json b/tests/test_rest_endpoints/test_users/one_user.json similarity index 100% rename from tests/test_users/one_user.json rename to tests/test_rest_endpoints/test_users/one_user.json diff --git a/tests/test_users/some_users.json b/tests/test_rest_endpoints/test_users/some_users.json similarity index 100% rename from tests/test_users/some_users.json rename to tests/test_rest_endpoints/test_users/some_users.json diff --git a/tests/test_users/undefined.json b/tests/test_rest_endpoints/test_users/undefined.json similarity index 100% rename from tests/test_users/undefined.json rename to tests/test_rest_endpoints/test_users/undefined.json